query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| metadata
dict | negatives
sequencelengths 30
30
| negative_scores
sequencelengths 30
30
| document_score
stringlengths 4
10
| document_rank
stringclasses 2
values |
---|---|---|---|---|---|---|
Transpose displayed dimensions. This swaps the order of the last two displayed dimensions. The order of the displayed is taken from Dims.order. | def transpose(self):
order = list(self.order)
order[-2], order[-1] = order[-1], order[-2]
self.order = order | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def transpose(x: torch.Tensor, dims):\n _dims = list(dims)\n for i in range(len(_dims)):\n if _dims[i] != i:\n x = x.transpose(i, _dims[i])\n j = _dims.index(i)\n _dims[i], _dims[j] = i, _dims[i]\n return x",
"def transpose(self):\n temp_matrix = [[0] * self.TILES_PER_ROW for _ in range(self.TILES_PER_ROW)]\n for i in range(len(self.main_grid_values)):\n for j in range(len(self.main_grid_values)):\n temp_matrix[j][i] = self.main_grid_values[i][j]\n\n self.main_grid_values = temp_matrix",
"def transpose(self, order):\n return _coordsys.coordsys_transpose(self, order)",
"def transpose(self):\n return self.from_rep(self.rep.transpose())",
"def transpose():",
"def transpose(self):\n pass",
"def transpose(self):\n return self._new(self.rep.transpose(), (self.cols, self.rows), self.domain)",
"def transpose(self) -> None:\n ...",
"def test_transpose_cube_dimensions(self):\n # Calculate result for nontransposed cube.\n nontransposed_result = Plugin()._probabilities_to_percentiles(\n self.cube, self.percentiles\n )\n\n # Calculate result for transposed cube.\n # Original cube dimensions are [P, Y, X].\n # Transposed cube dimensions are [X, Y, P].\n self.cube.transpose([2, 1, 0])\n transposed_result = Plugin()._probabilities_to_percentiles(\n self.cube, self.percentiles\n )\n\n # Result cube will be [P, X, Y]\n # Transpose cube to be [P, Y, X]\n transposed_result.transpose([0, 2, 1])\n self.assertArrayAlmostEqual(nontransposed_result.data, transposed_result.data)",
"def transpose(self):\n return self._transpose",
"def Transpose(self):\n return _hypre.HypreParMatrix_Transpose(self)",
"def transpose(im: Image) -> Image:\n return im.transpose(\n random.choice([Image.FLIP_TOP_BOTTOM, Image.FLIP_LEFT_RIGHT])\n )",
"def transpose(self):\n return self.conjugate()",
"def transpose_3d():\n tmp = np.random.random((10, 10, 10))\n\n a = tmp.T\n b = np.empty(tmp.shape)\n for j in range(tmp.shape[1]):\n b[:, j, :] = tmp[:, j, :].T\n\n print(np.all(a == b))",
"def irtranspose(x: torch.Tensor, dims):\n _dims = list(dims)\n _ir_dims = [_dims.index(i) for i in range(len(_dims))]\n return transpose(x, _ir_dims)",
"def reverse(self):\n temp_matrix = [[0] * self.TILES_PER_ROW for _ in range(self.TILES_PER_ROW)]\n for i in range(len(self.main_grid_values)):\n for j in range(len(self.main_grid_values)):\n temp_matrix[i][self.TILES_PER_ROW - 1 - j] = self.main_grid_values[i][j]\n\n self.main_grid_values = temp_matrix",
"def _apply_swap(self, state, axes, **kwargs):\n all_axes = list(range(len(state.shape)))\n all_axes[axes[0]] = axes[1]\n all_axes[axes[1]] = axes[0]\n return self._transpose(state, all_axes)",
"def transpose(self, transposition):\r\n top_node = self.top_node\r\n self.top_node = self.transpose_helper(top_node, transposition)\r\n int(self.top_node.split('|')[0][1::].strip('()').split(', ')[0])\r\n self.run_clean_up()",
"def transpose(self):\n returnvalue = Matrix()\n for i in range(self._width):\n row = list()\n for j in range(self._height):\n row.append(self._value[j][i])\n returnvalue.addRow(*row)\n return returnvalue",
"def transpose(self, *args, **kwargs):\n return _image.image_transpose(self, *args, **kwargs)",
"def reshape(self, bottom, top):\n top[0].reshape(1)\n #top[2].reshape(1)",
"def conv2d_transpose(self, output_shape, filter_):\n return self.add_layer(conv2d_transpose, output_shape, filter_)",
"def _swap_axis(input_tensor, dim_index, last_index, name=None):\n return array_ops.transpose(\n input_tensor,\n array_ops.concat([\n math_ops.range(dim_index), [last_index],\n math_ops.range(dim_index + 1, last_index), [dim_index]\n ], 0),\n name=name)",
"def copy_transpose(self, a, out, axes=None, repeat=1):\n if axes is None and a._tensor.ctypes.data != out._tensor.ctypes.data and len(a.shape) == 2:\n inp = c_longlong(a._tensor.ctypes.data)\n outp = c_longlong(out._tensor.ctypes.data)\n m, n = a.shape\n self.mklEngine.MatTrans(inp, outp, c_longlong(m), c_longlong(n))\n else:\n out._tensor[:] = np.transpose(a._tensor, axes).copy()",
"def transpose(m):\n\n pass",
"def conv2d_transpose_legalize(attrs, inputs, types):\n if attrs['data_layout'] == 'NHWC':\n data, kernel = inputs\n kernel_layout = attrs['kernel_layout']\n # Convert Kernel layout to IOHW\n # kernel_layout is different from input kernel layout - IO is swapped\n if kernel_layout == 'HWIO':\n # input kernel layout is swapped to HWOI\n # output kernel layout will be IOHW\n kernel = relay.transpose(kernel, axes=(3, 2, 0, 1))\n elif kernel_layout == 'HWOI':\n # input kernel layout is swapped to HWIO\n # output kernel layout will be IOHW\n kernel = relay.transpose(kernel, axes=(2, 3, 0, 1))\n elif kernel_layout == 'IOHW':\n # input kernel layout is swapped to OIHW\n # output kernel layout will be IOHW\n kernel = relay.transpose(kernel, axes=(1, 0, 2, 3))\n elif kernel_layout == 'OIHW':\n # input kernel layout is swapped to IOHW\n # output kernel layout will be IOHW\n pass\n else:\n # Skip legalize. Let relay.nn.conv2d_transpose to handle the case\n return None\n\n # Set new attrs for conv2d_transpose.\n new_attrs = {k: attrs[k] for k in attrs.keys()}\n new_attrs['data_layout'] = 'NCHW'\n # layout of kernel should be IOHW, but kernel_layout should be swapped - OIHW\n new_attrs['kernel_layout'] = 'OIHW'\n\n # Convert data to NCHW.\n data = relay.transpose(data, axes=(0, 3, 1, 2))\n deconv = relay.nn.conv2d_transpose(data, kernel, **new_attrs)\n # Convert back to original NHWC layout.\n out = relay.transpose(deconv, axes=(0, 2, 3, 1))\n return out\n\n return None",
"def reshape(self, bottom, top):\n pass",
"def reshape(self, bottom, top):\n pass",
"def reshape(self, bottom, top):\n pass",
"def reshape(self, bottom, top):\n pass"
] | [
"0.63431174",
"0.6173414",
"0.6122233",
"0.6034227",
"0.5997827",
"0.5982194",
"0.5824332",
"0.5819166",
"0.580872",
"0.57203025",
"0.56864727",
"0.5642053",
"0.5638656",
"0.5600488",
"0.5599005",
"0.5567106",
"0.553457",
"0.55000365",
"0.5453963",
"0.5445961",
"0.5442704",
"0.5438466",
"0.5436819",
"0.54246694",
"0.5423091",
"0.53832775",
"0.53815025",
"0.53815025",
"0.53815025",
"0.53815025"
] | 0.6452598 | 0 |
Increment dimensions to the right along given axis, or last used axis if None | def _increment_dims_right(self, axis: int = None):
if axis is None:
axis = self.last_used
self.set_current_step(axis, self.current_step[axis] + 1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _increment_dims_left(self, axis: int = None):\n if axis is None:\n axis = self.last_used\n self.set_current_step(axis, self.current_step[axis] - 1)",
"def add_dims_on_right(arr: array_like, new_axes_on_right: int = 0, ndim: Optional[int] = None):\n arr = np.asarray(arr)\n if ndim is None:\n ndim = arr.ndim + new_axes_on_right\n else:\n new_axes_on_right = ndim - arr.ndim\n if new_axes_on_right > 0:\n return np.expand_dims(arr, tuple(range(arr.ndim, ndim)))\n else:\n return arr.reshape(arr.shape[:ndim])",
"def _expand_dims_nonnegative_axis(axis, rank):\n # Implementation note: equivalent to get_positive_axis(axis, rank + 1)\n if axis < 0:\n new_axis = (1 + rank) + axis\n if new_axis < 0:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Axis out of range: \" + str(axis))\n return new_axis\n elif axis > rank:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Axis larger than rank: \" + str(axis) + \" > \" + str(rank))\n return axis",
"def _expand_dims(st, axis):\n if not isinstance(st, structured_tensor.StructuredTensor):\n return tf.expand_dims(st, axis)\n nn_axis = _expand_dims_nonnegative_axis(axis, st.rank)\n if st.rank == 0:\n return _expand_dims_scalar(st)\n if nn_axis == 0:\n # Here, we can add a dimension 1 at the front.\n nrows = st.nrows()\n return st.partition_outer_dimension(\n RowPartition.from_uniform_row_length(nrows, nrows))\n elif nn_axis == 1:\n # Again, by partitioning the first dimension into vectors of length 1,\n # we can solve this problem.\n nrows = st.nrows()\n return st.partition_outer_dimension(\n RowPartition.from_uniform_row_length(\n tf.constant(1, dtype=nrows.dtype), nrows))\n else:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Unimplemented: non-negative axis > 1 for _expand_dims\")",
"def _expand(x, ndim, axis=0):\n while F.rank(x) < ndim:\n x = F.expand_dims(x, axis)\n return x",
"def _swap_axis(input_tensor, dim_index, last_index, name=None):\n return array_ops.transpose(\n input_tensor,\n array_ops.concat([\n math_ops.range(dim_index), [last_index],\n math_ops.range(dim_index + 1, last_index), [dim_index]\n ], 0),\n name=name)",
"def sum_right_most(x, ndim):\n if ndim == 0:\n return x\n axes = list(range(-ndim, 0))\n return x.sum(axes)",
"def expand_dims(self, axis, direction=1):\n res = self.empty_like()\n res.shape.insert(axis, [1])\n res.qhape.insert(axis, [0])\n res.dirs.insert(axis, direction)\n if not self.isscalar():\n for k, v in self.sects.items():\n new_k = list(k)\n new_k.insert(axis, 0)\n res[tuple(new_k)] = np.expand_dims(v, axis)\n elif res.charge == 0:\n res[(0,)] = np.array((res.defval,), dtype=res.dtype)\n res.defval = 0\n return res",
"def normalize_axis(axis, ndim):\n if axis is None:\n return None\n\n if isinstance(axis, Integral):\n axis = int(axis)\n if axis < 0:\n axis += ndim\n\n if axis >= ndim or axis < 0:\n raise ValueError('Invalid axis index %d for ndim=%d' % (axis, ndim))\n\n return axis\n\n if isinstance(axis, Iterable):\n if not all(isinstance(a, Integral) for a in axis):\n raise ValueError(\"axis %s not understood\" % axis)\n\n return tuple(normalize_axis(a, ndim) for a in axis)\n\n raise ValueError(\"axis %s not understood\" % axis)",
"def resize_axis(tensor, axis, new_size, fill_value=0):\n tensor = tf.convert_to_tensor(tensor)\n shape = tf.unstack(tf.shape(tensor))\n\n pad_shape = shape[:]\n pad_shape[axis] = tf.maximum(0, new_size - shape[axis])\n\n shape[axis] = tf.minimum(shape[axis], new_size)\n shape = tf.stack(shape)\n\n resized = tf.concat([\n tf.slice(tensor, tf.zeros_like(shape), shape),\n tf.fill(tf.stack(pad_shape), tf.cast(fill_value, tensor.dtype))\n ], axis)\n\n # Update shape.\n new_shape = tensor.get_shape().as_list() # A copy is being made.\n new_shape[axis] = new_size\n resized.set_shape(new_shape)\n return resized",
"def _pad_periodic_by_axis(\n tensor: tf.Tensor, padding: Sequence[int], axis: int,\n) -> tf.Tensor:\n ndim = len(tensor.shape)\n\n axis = _normalize_axis(axis, ndim)\n if len(padding) != 2:\n raise ValueError('padding must have length 2: {}'.format(padding))\n if any(pad < 0 for pad in padding):\n raise ValueError('padding must be positive: {}'.format(padding))\n pad_left, pad_right = padding\n\n slice_left = [slice(None)] * ndim\n slice_left[axis] = slice(-pad_left, None)\n slice_left = tuple(slice_left)\n slice_right = [slice(None)] * ndim\n slice_right[axis] = slice(None, pad_right)\n slice_right = tuple(slice_right)\n\n if pad_left and pad_right:\n tensors = [tensor[slice_left], tensor, tensor[slice_right]]\n return tf.concat(tensors, axis=axis)\n elif pad_left:\n tensors = [tensor[slice_left], tensor]\n return tf.concat(tensors, axis=axis)\n elif pad_right:\n tensors = [tensor, tensor[slice_right]]\n return tf.concat(tensors, axis=axis)\n else:\n return tensor",
"def rollaxis(a, axis, start=0):\n return core.rollaxis(a, axis, start)",
"def expand_dims(input, axis, _builder=None):\n axis = _constexpr_to_value(axis)\n axes = list(axis) if isinstance(axis, Sequence) else [axis]\n new_ndim = len(input.shape) + len(axes)\n axes = [_wrap_axis(_constexpr_to_value(d), new_ndim) for d in axes]\n\n if len(set(axes)) != len(axes):\n raise ValueError(f\"expand_dims recieved duplicate axes, normalized axes = {axes}\")\n\n ret = input\n for a in sorted(axes):\n ret = semantic.expand_dims(ret, a, _builder)\n return ret",
"def dimension_along(self, axis):\n l, u = self._range_along(axis)\n return u - l",
"def roll(self, axis, shift, inplace=False, i=False):\n if self.size <= 1:\n if inplace:\n return\n else:\n return self.copy()\n # --- End: if\n\n shift %= self.size\n\n # period = self._custom.get('period')\n period = self.period()\n\n if not shift:\n # Null roll\n if inplace:\n return\n else:\n return self.copy()\n elif period is None:\n raise ValueError(\n \"Can't roll {} when no period has been set\".format(\n self.__class__.__name__\n )\n )\n\n direction = self.direction()\n\n centre = self._centre(period)\n\n if axis not in [0, -1]:\n raise ValueError(\n \"Can't roll axis {} when there is only one axis\".format(axis)\n )\n\n c = _inplace_enabled_define_and_cleanup(self)\n super(DimensionCoordinate, c).roll(axis, shift, inplace=True)\n\n c.dtype = numpy_result_type(c.dtype, period.dtype)\n\n b = c.get_bounds(None)\n bounds_data = c.get_bounds_data(None)\n\n if bounds_data is not None:\n b.dtype = numpy_result_type(bounds_data.dtype, period.dtype)\n bounds_data = b.get_data(None)\n\n if direction:\n # Increasing\n c[:shift] -= period\n if bounds_data is not None:\n b[:shift] -= period\n\n if c.data[0] <= centre - period:\n c += period\n if bounds_data is not None:\n b += period\n else:\n # Decreasing\n c[:shift] += period\n if bounds_data is not None:\n b[:shift] += period\n\n if c.data[0] >= centre + period:\n c -= period\n if bounds_data is not None:\n b -= period\n # --- End: if\n\n c._custom[\"direction\"] = direction\n\n return c",
"def tile_new_axis(t, axis, length):\n t = tf.expand_dims(t, axis=axis)\n cur_shape = t.get_shape().as_list()\n tile_shape = [1] * len(cur_shape)\n tile_shape[axis] = length\n return tf.tile(t, tile_shape)",
"def roll(self, axis, pos=-1):\n i = self.get_axis_id(axis)\n if (i == 0 and pos == 0) or (i == self.get_ndims() - 1 and pos == -1):\n return self\n if pos == 0:\n raise NotImplementedError(' pos=0 not coded yet. TODO')\n\n self.data = np.rollaxis(self.data, i, len(self.axes_names))\n self.axes_names = self.axes_names[:i] + self.axes_names[i + 1:] + \\\n [axis]\n self.axes_ids = dict([(a, i) for i, a in enumerate(self.axes_names)])\n return self",
"def _appendAxisDefinition(self, axis):\n length = len(axis)\n\n self.na_dict[\"NX\"].append(length)\n self.na_dict[\"XNAME\"].append(xarray_utils.getBestName(axis))\n\n # If only one item in axis values\n if length < 2:\n self.na_dict[\"DX\"].append(0)\n self.na_dict[\"NXDEF\"].append(length)\n self.na_dict[\"X\"].append(axis.data.tolist()) \n return\n\n incr = xarray_utils.get_interval(axis, 0, 1)\n\n for i in range(1, length):\n if (axis[i] - axis[i - 1]) != incr:\n self.na_dict[\"DX\"].append(0)\n self.na_dict[\"NXDEF\"].append(length)\n self.na_dict[\"X\"].append(axis.data.tolist())\n break\n\n else: # If did not break out of the loop\n max_length = length\n if length > 3: \n max_length = 3\n\n self.na_dict[\"DX\"].append(incr)\n self.na_dict[\"NXDEF\"].append(max_length)\n self.na_dict[\"X\"].append(axis[:max_length])",
"def roll(\n tensor: tf.Tensor,\n shift: Union[int, Sequence[int]],\n axis: Union[int, Sequence[int]],\n) -> tf.Tensor:\n if isinstance(axis, int):\n axis = [axis]\n if isinstance(shift, int):\n shift = [shift]\n result = tensor\n for axis_element, shift_element in zip(axis, shift):\n result = _roll_once(result, shift_element, axis_element)\n return result",
"def squeeze_expand_dim(tensor, axis):\n tensor = torch.squeeze(tensor)\n if len(list(tensor.size())) < 4:\n return tensor.unsqueeze(axis)\n else:\n return tensor",
"def _roll_once(\n tensor: tf.Tensor,\n shift: int,\n axis: int,\n) -> tf.Tensor:\n if not shift:\n return tensor\n axis = _normalize_axis(axis, len(tensor.shape))\n slice_left = (slice(None),) * axis + (slice(-shift, None),)\n slice_right = (slice(None),) * axis + (slice(None, -shift),)\n return tf.concat([tensor[slice_left], tensor[slice_right]], axis=axis)",
"def roll(a, shift, axis=None):\n if axis is None:\n if a.size == 0:\n return a\n size = a.size\n ra = a.ravel()\n shift %= size\n res = cupy.empty((size,), a.dtype)\n res[:shift] = ra[size - shift:]\n res[shift:] = ra[:size - shift]\n return res.reshape(a.shape)\n else:\n axis = int(axis)\n if axis < 0:\n axis += a.ndim\n if not 0 <= axis < a.ndim:\n raise core.core._AxisError(\n 'axis must be >= %d and < %d' % (-a.ndim, a.ndim))\n size = a.shape[axis]\n if size == 0:\n return a\n shift %= size\n prev = (slice(None),) * axis\n rest = (slice(None),) * (a.ndim - axis - 1)\n # Roll only the dimensiont at the given axis\n # ind1 is [:, ..., size-shift:, ..., :]\n # ind2 is [:, ..., :size-shift, ..., :]\n ind1 = prev + (slice(size - shift, None, None),) + rest\n ind2 = prev + (slice(None, size - shift, None),) + rest\n r_ind1 = prev + (slice(None, shift, None),) + rest\n r_ind2 = prev + (slice(shift, None, None),) + rest\n res = cupy.empty_like(a)\n res[r_ind1] = a[ind1]\n res[r_ind2] = a[ind2]\n return res",
"def axis_data(axis):\n x = mask.sum(axis)\n trimmed_front = N.trim_zeros(x,\"f\")\n offset = len(x)-len(trimmed_front)\n size = len(N.trim_zeros(trimmed_front,\"b\"))\n return offset,size",
"def roll(x, shift, axis=None):\r\n if axis is None:\r\n if x.ndim > 1:\r\n y = x.flatten()\r\n return roll(y, shift, axis=0).reshape(x.shape)\r\n else:\r\n axis = 0\r\n\r\n # A slice of all elements in a dimension ':'\r\n allslice = slice(None)\r\n # List of slices describing the front half [:, :, shift:, :]\r\n front_slice = slice(-shift, None)\r\n front_list = ([allslice] * axis + [front_slice] +\r\n [allslice] * (x.ndim - axis - 1))\r\n # List of slices describing the back half [:, :, :shift, :]\r\n end_slice = slice(0, -shift)\r\n end_list = ([allslice] * axis + [end_slice] +\r\n [allslice] * (x.ndim - axis - 1))\r\n return join(axis,\r\n x.__getitem__(tuple(front_list)),\r\n x.__getitem__(tuple(end_list)))",
"def combine_last_two_dimensions(x):\r\n old_shape = x.get_shape().dims\r\n a, b = old_shape[-2:]\r\n new_shape = old_shape[:-2] + [a * b if a and b else None]\r\n ret = tf.reshape(x, tf.concat([tf.shape(x)[:-2], [-1]], 0))\r\n ret.set_shape(new_shape)\r\n return ret",
"def combine_last_two_dimensions(x):\n old_shape = x.get_shape().dims\n a, b = old_shape[-2:]\n new_shape = old_shape[:-2] + [a * b if a and b else None]\n ret = tf.reshape(x, tf.concat([tf.shape(x)[:-2], [-1]], 0))\n ret.set_shape(new_shape)\n return ret",
"def squeeze(self, axis: Optional[AxisLike] = None):\n\n if axis is None:\n axis = range(self.shape.shape[0])\n elif axis == -1:\n axis = self.shape.shape[0]\n if not isinstance(axis, Iterable):\n axis = [axis]\n axis = [ax for ax in axis if (self.shape[ax] == 1).all(axis=0)]\n self.shape = np.delete(self.shape, axis, axis=0)\n return self",
"def _add_keepdims(func):\n @functools.wraps(func)\n def wrapped(a, axis, **kwargs):\n res = func(a, axis=axis, **kwargs)\n if axis is None:\n axis = 0 # res is now a scalar, so we can insert this anywhere\n return np.expand_dims(res, axis=axis)\n return wrapped",
"def _moveaxis(self, arr, source, dest):\n try:\n source = list(source)\n except TypeError:\n source = [source]\n try:\n dest = list(dest)\n except TypeError:\n dest = [dest]\n\n source = [a + arr.ndim if a < 0 else a for a in source]\n dest = [a + arr.ndim if a < 0 else a for a in dest]\n\n order = [n for n in range(arr.ndim) if n not in source]\n\n for dest, src in sorted(zip(dest, source)):\n order.insert(dest, src)\n\n return arr.transpose(order)",
"def set_current_step(\n self,\n axis: Union[int, Sequence[int]],\n value: Union[Union[int, float], Sequence[Union[int, float]]],\n ):\n if isinstance(axis, Integral):\n axis = assert_axis_in_bounds(axis, self.ndim)\n step = round(min(max(value, 0), self.nsteps[axis] - 1))\n if self.current_step[axis] != step:\n full_current_step = list(self.current_step)\n full_current_step[axis] = step\n self.current_step = full_current_step\n else:\n full_current_step = list(self.current_step)\n # cast value to list for list comparison below\n value = list(value) # type: ignore\n axis = tuple(axis) # type: ignore\n if len(axis) != len(value):\n raise ValueError(\n trans._(\"axis and value sequences must have equal length\")\n )\n if value != full_current_step:\n # (computed) nsteps property outside of the loop for efficiency\n nsteps = self.nsteps\n for ax, val in zip(axis, value):\n ax = assert_axis_in_bounds(int(ax), self.ndim)\n step = round(min(max(val, 0), nsteps[ax] - 1))\n full_current_step[ax] = step\n self.current_step = full_current_step"
] | [
"0.80355656",
"0.7058426",
"0.68617535",
"0.6362681",
"0.6201553",
"0.60930216",
"0.59444535",
"0.59240335",
"0.5898968",
"0.58951026",
"0.5893552",
"0.5891648",
"0.58671963",
"0.5823769",
"0.5818595",
"0.57780397",
"0.57536596",
"0.57078004",
"0.5604907",
"0.55618906",
"0.55088717",
"0.5499301",
"0.5493813",
"0.5493604",
"0.54539317",
"0.5443044",
"0.5426305",
"0.54163235",
"0.54053515",
"0.539632"
] | 0.8818179 | 0 |
Increment dimensions to the left along given axis, or last used axis if None | def _increment_dims_left(self, axis: int = None):
if axis is None:
axis = self.last_used
self.set_current_step(axis, self.current_step[axis] - 1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _increment_dims_right(self, axis: int = None):\n if axis is None:\n axis = self.last_used\n self.set_current_step(axis, self.current_step[axis] + 1)",
"def _expand_dims_nonnegative_axis(axis, rank):\n # Implementation note: equivalent to get_positive_axis(axis, rank + 1)\n if axis < 0:\n new_axis = (1 + rank) + axis\n if new_axis < 0:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Axis out of range: \" + str(axis))\n return new_axis\n elif axis > rank:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Axis larger than rank: \" + str(axis) + \" > \" + str(rank))\n return axis",
"def _expand_dims(st, axis):\n if not isinstance(st, structured_tensor.StructuredTensor):\n return tf.expand_dims(st, axis)\n nn_axis = _expand_dims_nonnegative_axis(axis, st.rank)\n if st.rank == 0:\n return _expand_dims_scalar(st)\n if nn_axis == 0:\n # Here, we can add a dimension 1 at the front.\n nrows = st.nrows()\n return st.partition_outer_dimension(\n RowPartition.from_uniform_row_length(nrows, nrows))\n elif nn_axis == 1:\n # Again, by partitioning the first dimension into vectors of length 1,\n # we can solve this problem.\n nrows = st.nrows()\n return st.partition_outer_dimension(\n RowPartition.from_uniform_row_length(\n tf.constant(1, dtype=nrows.dtype), nrows))\n else:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Unimplemented: non-negative axis > 1 for _expand_dims\")",
"def add_dims_on_right(arr: array_like, new_axes_on_right: int = 0, ndim: Optional[int] = None):\n arr = np.asarray(arr)\n if ndim is None:\n ndim = arr.ndim + new_axes_on_right\n else:\n new_axes_on_right = ndim - arr.ndim\n if new_axes_on_right > 0:\n return np.expand_dims(arr, tuple(range(arr.ndim, ndim)))\n else:\n return arr.reshape(arr.shape[:ndim])",
"def rollaxis(a, axis, start=0):\n return core.rollaxis(a, axis, start)",
"def _expand(x, ndim, axis=0):\n while F.rank(x) < ndim:\n x = F.expand_dims(x, axis)\n return x",
"def expand_dims(self, axis, direction=1):\n res = self.empty_like()\n res.shape.insert(axis, [1])\n res.qhape.insert(axis, [0])\n res.dirs.insert(axis, direction)\n if not self.isscalar():\n for k, v in self.sects.items():\n new_k = list(k)\n new_k.insert(axis, 0)\n res[tuple(new_k)] = np.expand_dims(v, axis)\n elif res.charge == 0:\n res[(0,)] = np.array((res.defval,), dtype=res.dtype)\n res.defval = 0\n return res",
"def _swap_axis(input_tensor, dim_index, last_index, name=None):\n return array_ops.transpose(\n input_tensor,\n array_ops.concat([\n math_ops.range(dim_index), [last_index],\n math_ops.range(dim_index + 1, last_index), [dim_index]\n ], 0),\n name=name)",
"def roll(self, axis, pos=-1):\n i = self.get_axis_id(axis)\n if (i == 0 and pos == 0) or (i == self.get_ndims() - 1 and pos == -1):\n return self\n if pos == 0:\n raise NotImplementedError(' pos=0 not coded yet. TODO')\n\n self.data = np.rollaxis(self.data, i, len(self.axes_names))\n self.axes_names = self.axes_names[:i] + self.axes_names[i + 1:] + \\\n [axis]\n self.axes_ids = dict([(a, i) for i, a in enumerate(self.axes_names)])\n return self",
"def _pad_periodic_by_axis(\n tensor: tf.Tensor, padding: Sequence[int], axis: int,\n) -> tf.Tensor:\n ndim = len(tensor.shape)\n\n axis = _normalize_axis(axis, ndim)\n if len(padding) != 2:\n raise ValueError('padding must have length 2: {}'.format(padding))\n if any(pad < 0 for pad in padding):\n raise ValueError('padding must be positive: {}'.format(padding))\n pad_left, pad_right = padding\n\n slice_left = [slice(None)] * ndim\n slice_left[axis] = slice(-pad_left, None)\n slice_left = tuple(slice_left)\n slice_right = [slice(None)] * ndim\n slice_right[axis] = slice(None, pad_right)\n slice_right = tuple(slice_right)\n\n if pad_left and pad_right:\n tensors = [tensor[slice_left], tensor, tensor[slice_right]]\n return tf.concat(tensors, axis=axis)\n elif pad_left:\n tensors = [tensor[slice_left], tensor]\n return tf.concat(tensors, axis=axis)\n elif pad_right:\n tensors = [tensor, tensor[slice_right]]\n return tf.concat(tensors, axis=axis)\n else:\n return tensor",
"def normalize_axis(axis, ndim):\n if axis is None:\n return None\n\n if isinstance(axis, Integral):\n axis = int(axis)\n if axis < 0:\n axis += ndim\n\n if axis >= ndim or axis < 0:\n raise ValueError('Invalid axis index %d for ndim=%d' % (axis, ndim))\n\n return axis\n\n if isinstance(axis, Iterable):\n if not all(isinstance(a, Integral) for a in axis):\n raise ValueError(\"axis %s not understood\" % axis)\n\n return tuple(normalize_axis(a, ndim) for a in axis)\n\n raise ValueError(\"axis %s not understood\" % axis)",
"def dimension_along(self, axis):\n l, u = self._range_along(axis)\n return u - l",
"def _roll_once(\n tensor: tf.Tensor,\n shift: int,\n axis: int,\n) -> tf.Tensor:\n if not shift:\n return tensor\n axis = _normalize_axis(axis, len(tensor.shape))\n slice_left = (slice(None),) * axis + (slice(-shift, None),)\n slice_right = (slice(None),) * axis + (slice(None, -shift),)\n return tf.concat([tensor[slice_left], tensor[slice_right]], axis=axis)",
"def expand_dims(input, axis, _builder=None):\n axis = _constexpr_to_value(axis)\n axes = list(axis) if isinstance(axis, Sequence) else [axis]\n new_ndim = len(input.shape) + len(axes)\n axes = [_wrap_axis(_constexpr_to_value(d), new_ndim) for d in axes]\n\n if len(set(axes)) != len(axes):\n raise ValueError(f\"expand_dims recieved duplicate axes, normalized axes = {axes}\")\n\n ret = input\n for a in sorted(axes):\n ret = semantic.expand_dims(ret, a, _builder)\n return ret",
"def _appendAxisDefinition(self, axis):\n length = len(axis)\n\n self.na_dict[\"NX\"].append(length)\n self.na_dict[\"XNAME\"].append(xarray_utils.getBestName(axis))\n\n # If only one item in axis values\n if length < 2:\n self.na_dict[\"DX\"].append(0)\n self.na_dict[\"NXDEF\"].append(length)\n self.na_dict[\"X\"].append(axis.data.tolist()) \n return\n\n incr = xarray_utils.get_interval(axis, 0, 1)\n\n for i in range(1, length):\n if (axis[i] - axis[i - 1]) != incr:\n self.na_dict[\"DX\"].append(0)\n self.na_dict[\"NXDEF\"].append(length)\n self.na_dict[\"X\"].append(axis.data.tolist())\n break\n\n else: # If did not break out of the loop\n max_length = length\n if length > 3: \n max_length = 3\n\n self.na_dict[\"DX\"].append(incr)\n self.na_dict[\"NXDEF\"].append(max_length)\n self.na_dict[\"X\"].append(axis[:max_length])",
"def roll(self, axis, shift, inplace=False, i=False):\n if self.size <= 1:\n if inplace:\n return\n else:\n return self.copy()\n # --- End: if\n\n shift %= self.size\n\n # period = self._custom.get('period')\n period = self.period()\n\n if not shift:\n # Null roll\n if inplace:\n return\n else:\n return self.copy()\n elif period is None:\n raise ValueError(\n \"Can't roll {} when no period has been set\".format(\n self.__class__.__name__\n )\n )\n\n direction = self.direction()\n\n centre = self._centre(period)\n\n if axis not in [0, -1]:\n raise ValueError(\n \"Can't roll axis {} when there is only one axis\".format(axis)\n )\n\n c = _inplace_enabled_define_and_cleanup(self)\n super(DimensionCoordinate, c).roll(axis, shift, inplace=True)\n\n c.dtype = numpy_result_type(c.dtype, period.dtype)\n\n b = c.get_bounds(None)\n bounds_data = c.get_bounds_data(None)\n\n if bounds_data is not None:\n b.dtype = numpy_result_type(bounds_data.dtype, period.dtype)\n bounds_data = b.get_data(None)\n\n if direction:\n # Increasing\n c[:shift] -= period\n if bounds_data is not None:\n b[:shift] -= period\n\n if c.data[0] <= centre - period:\n c += period\n if bounds_data is not None:\n b += period\n else:\n # Decreasing\n c[:shift] += period\n if bounds_data is not None:\n b[:shift] += period\n\n if c.data[0] >= centre + period:\n c -= period\n if bounds_data is not None:\n b -= period\n # --- End: if\n\n c._custom[\"direction\"] = direction\n\n return c",
"def roll(\n tensor: tf.Tensor,\n shift: Union[int, Sequence[int]],\n axis: Union[int, Sequence[int]],\n) -> tf.Tensor:\n if isinstance(axis, int):\n axis = [axis]\n if isinstance(shift, int):\n shift = [shift]\n result = tensor\n for axis_element, shift_element in zip(axis, shift):\n result = _roll_once(result, shift_element, axis_element)\n return result",
"def get_axis(self, axis):\n return self.index if axis == 0 else self.columns",
"def roll(x, shift, axis=None):\r\n if axis is None:\r\n if x.ndim > 1:\r\n y = x.flatten()\r\n return roll(y, shift, axis=0).reshape(x.shape)\r\n else:\r\n axis = 0\r\n\r\n # A slice of all elements in a dimension ':'\r\n allslice = slice(None)\r\n # List of slices describing the front half [:, :, shift:, :]\r\n front_slice = slice(-shift, None)\r\n front_list = ([allslice] * axis + [front_slice] +\r\n [allslice] * (x.ndim - axis - 1))\r\n # List of slices describing the back half [:, :, :shift, :]\r\n end_slice = slice(0, -shift)\r\n end_list = ([allslice] * axis + [end_slice] +\r\n [allslice] * (x.ndim - axis - 1))\r\n return join(axis,\r\n x.__getitem__(tuple(front_list)),\r\n x.__getitem__(tuple(end_list)))",
"def left_dimension_pad(array, n):\n if array.ndim >= n:\n return array\n nadd = n - array.ndim\n atts = [_new_attribute_label('_dim%i' % i, array) for i in range(nadd)]\n apply_args = [x for item in enumerate(atts) for x in item[::-1]]\n\n ds = array.datashape.copy()\n ds.dim_low = ([0] * nadd) + list(ds.dim_low)\n ds.dim_high = ([0] * nadd) + list(ds.dim_high)\n ds.dim_names = atts + list(ds.dim_names)\n ds.chunk_overlap = ([0] * nadd) + list(ds.chunk_overlap)\n ds.chunk_size = ([1000] * nadd) + list(ds.chunk_size)\n\n return array.apply(*apply_args).redimension(ds.schema)",
"def squeeze_expand_dim(tensor, axis):\n tensor = torch.squeeze(tensor)\n if len(list(tensor.size())) < 4:\n return tensor.unsqueeze(axis)\n else:\n return tensor",
"def adjust(x):\n return numpy.insert(x, 0, 1, axis=1)",
"def new_expand_dims(a, axes):\n # if int is passed, retain the same behaviour\n if type(axes) == int:\n return np.expand_dims(a, axes)\n # insert axes to given indices\n for ax in sorted(axes):\n a = np.expand_dims(a, ax)\n return a",
"def axis_data(axis):\n x = mask.sum(axis)\n trimmed_front = N.trim_zeros(x,\"f\")\n offset = len(x)-len(trimmed_front)\n size = len(N.trim_zeros(trimmed_front,\"b\"))\n return offset,size",
"def nd_expand_dims(x, n=1, before=True):\n if before:\n axes = tuple([np.newaxis] * n + [...])\n else:\n axes = tuple([...] + [np.newaxis] * n)\n return x[axes]",
"def set_current_step(\n self,\n axis: Union[int, Sequence[int]],\n value: Union[Union[int, float], Sequence[Union[int, float]]],\n ):\n if isinstance(axis, Integral):\n axis = assert_axis_in_bounds(axis, self.ndim)\n step = round(min(max(value, 0), self.nsteps[axis] - 1))\n if self.current_step[axis] != step:\n full_current_step = list(self.current_step)\n full_current_step[axis] = step\n self.current_step = full_current_step\n else:\n full_current_step = list(self.current_step)\n # cast value to list for list comparison below\n value = list(value) # type: ignore\n axis = tuple(axis) # type: ignore\n if len(axis) != len(value):\n raise ValueError(\n trans._(\"axis and value sequences must have equal length\")\n )\n if value != full_current_step:\n # (computed) nsteps property outside of the loop for efficiency\n nsteps = self.nsteps\n for ax, val in zip(axis, value):\n ax = assert_axis_in_bounds(int(ax), self.ndim)\n step = round(min(max(val, 0), nsteps[ax] - 1))\n full_current_step[ax] = step\n self.current_step = full_current_step",
"def resize_axis(tensor, axis, new_size, fill_value=0):\n tensor = tf.convert_to_tensor(tensor)\n shape = tf.unstack(tf.shape(tensor))\n\n pad_shape = shape[:]\n pad_shape[axis] = tf.maximum(0, new_size - shape[axis])\n\n shape[axis] = tf.minimum(shape[axis], new_size)\n shape = tf.stack(shape)\n\n resized = tf.concat([\n tf.slice(tensor, tf.zeros_like(shape), shape),\n tf.fill(tf.stack(pad_shape), tf.cast(fill_value, tensor.dtype))\n ], axis)\n\n # Update shape.\n new_shape = tensor.get_shape().as_list() # A copy is being made.\n new_shape[axis] = new_size\n resized.set_shape(new_shape)\n return resized",
"def sum_right_most(x, ndim):\n if ndim == 0:\n return x\n axes = list(range(-ndim, 0))\n return x.sum(axes)",
"def tile_new_axis(t, axis, length):\n t = tf.expand_dims(t, axis=axis)\n cur_shape = t.get_shape().as_list()\n tile_shape = [1] * len(cur_shape)\n tile_shape[axis] = length\n return tf.tile(t, tile_shape)",
"def _moveaxis(self, arr, source, dest):\n try:\n source = list(source)\n except TypeError:\n source = [source]\n try:\n dest = list(dest)\n except TypeError:\n dest = [dest]\n\n source = [a + arr.ndim if a < 0 else a for a in source]\n dest = [a + arr.ndim if a < 0 else a for a in dest]\n\n order = [n for n in range(arr.ndim) if n not in source]\n\n for dest, src in sorted(zip(dest, source)):\n order.insert(dest, src)\n\n return arr.transpose(order)"
] | [
"0.83335036",
"0.6745814",
"0.65627104",
"0.6551638",
"0.6089467",
"0.60255677",
"0.5910376",
"0.5879978",
"0.58637667",
"0.58166647",
"0.57958627",
"0.575534",
"0.5741809",
"0.57007563",
"0.5697291",
"0.56896806",
"0.5650914",
"0.55656713",
"0.55501705",
"0.5513627",
"0.55101067",
"0.547686",
"0.547611",
"0.5474274",
"0.54674554",
"0.5454831",
"0.54317766",
"0.54127413",
"0.53969437",
"0.53770393"
] | 0.8841973 | 0 |
Shift focused dimension slider to be the next slider above. | def _focus_up(self):
sliders = [d for d in self.not_displayed if self.nsteps[d] > 1]
if len(sliders) == 0:
return
index = (sliders.index(self.last_used) + 1) % len(sliders)
self.last_used = sliders[index] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _focus_down(self):\n sliders = [d for d in self.not_displayed if self.nsteps[d] > 1]\n if len(sliders) == 0:\n return\n\n index = (sliders.index(self.last_used) - 1) % len(sliders)\n self.last_used = sliders[index]",
"def focus_prev(self):\n self.focus_item(forward=False)",
"def slider_dragged(self):\n pass",
"def MoveToPreviousSlide(self, event):\n pass",
"def focus_slider(self, name):\n # If manipulate is not toggled, this makes no sense\n if not self.is_visible():\n self._app[\"statusbar\"].message(\n \"Focusing a slider only makes sense in manipulate\", \"error\")\n elif name not in self.sliders:\n self._app[\"statusbar\"].message(\n \"No slider called \" + name, \"error\")\n else:\n self.sliders[name].grab_focus()",
"def focus_next(self):\n self.focus_item()",
"def MoveToNextSlide(self, event):\n pass",
"def next_widget(self):\r\n for i in self.widgets[1:]:\r\n if i.get_visible():\r\n self.set_top_widget(i)\r\n return",
"def _prev(self, _):\n self.notebook.SetSelection(self.idx-1)",
"def change_slider(self, step=\"1\"):\n try:\n step = get_int(step, allow_sign=True)\n except StringConversionError as e:\n self._app[\"statusbar\"].message(str(e), \"error\")\n return\n for slider in self.sliders.values():\n if slider.is_focus():\n val = slider.get_value()\n step = self._app[\"eventhandler\"].num_receive() * step\n val += step\n slider.set_value(val)",
"def __window_forward(self):\n pass",
"def front_wheel_from_axis():",
"def handle_event(self, event):\n if event.type != MOUSEMOTION:\n return\n self.model.slider.left = event.pos[0]",
"def MoveToSlide(self, event):\n pass",
"def keyPressEvent(self, event):\n maxIdx = self.dat3d.shape[2] - 1\n minIdx = 0\n if (event.key() == QtCore.Qt.Key_Left) and (self.currentIndex >= minIdx + 1):\n self.currentIndex -= 1\n self.showImage(self.currentIndex)\n elif (event.key() == QtCore.Qt.Key_Right) and (self.currentIndex <= maxIdx - 1):\n self.currentIndex += 1\n self.showImage(self.currentIndex)",
"def previousRange(self):\r\n if (self.selectedmap > 0):\r\n self.pickMap(self.selectedmap-1)",
"def edit_widget_focus(self):\n if self.goto:\n self.goto_node()\n self.update_position(self.get_position())",
"def go_left(self):\n self.change_x = -6",
"def go_left(self):\n self.change_x = -6",
"def move_slider(self, mouse_x):\n\n for slider in self._menu_items:\n if slider['menu_type'] == 'Slider':\n if slider['grabbed'] == True:\n if mouse_x > slider['xpos'] and mouse_x < (slider['xpos']\n +slider['width']):\n slider['rect'].move_ip(mouse_x-slider['rect'].centerx, 0)",
"def move_scroll_bar_down(self):\n scroll = self.textBrowser.verticalScrollBar()\n scroll.setSliderPosition(scroll.maximum())",
"def slide(x1, y1, x2, y2):\n gui.mouseDown(x1, y1)\n gui.moveTo(x2, y2)\n release()",
"def float_to_front(qtile):\n for window in qtile.current_group.windows:\n if window.floating:\n window.cmd_bring_to_front()",
"def bring_down(self):\n\n self.move(self.__min_step__)",
"def go_right(self):\n self.change_x = 6",
"def go_right(self):\n self.change_x = 6",
"def focus_prev_cell(self, prev):\n x, y = self._cell_input.cursor_coordinates()\n y_new = prev._cell_input.rows() - 1\n prev._cell_input.set_cursor_coordinates(x, y_new)\n prev.set_focus()",
"def move_focus(self, pos_x, pos_y):\n factor = self.offset.x * -0.005 / self.scale\n pos_x *= factor\n pos_y *= factor\n self.focus += (pos_x, pos_y)",
"def go_left(self):\n self.rect.centerx -= 9",
"def _increment_dims_left(self, axis: int = None):\n if axis is None:\n axis = self.last_used\n self.set_current_step(axis, self.current_step[axis] - 1)"
] | [
"0.6433824",
"0.6342774",
"0.6247242",
"0.5965561",
"0.58986914",
"0.58390594",
"0.5688559",
"0.5676884",
"0.567108",
"0.56085587",
"0.55993825",
"0.5581846",
"0.55342835",
"0.5532765",
"0.54939634",
"0.54316705",
"0.54136",
"0.53568226",
"0.53568226",
"0.5316149",
"0.5311405",
"0.53044933",
"0.5282022",
"0.52665436",
"0.5238708",
"0.5238708",
"0.5238239",
"0.5231102",
"0.5225518",
"0.5215261"
] | 0.637291 | 1 |
Shift focused dimension slider to be the next slider bellow. | def _focus_down(self):
sliders = [d for d in self.not_displayed if self.nsteps[d] > 1]
if len(sliders) == 0:
return
index = (sliders.index(self.last_used) - 1) % len(sliders)
self.last_used = sliders[index] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _focus_up(self):\n sliders = [d for d in self.not_displayed if self.nsteps[d] > 1]\n if len(sliders) == 0:\n return\n\n index = (sliders.index(self.last_used) + 1) % len(sliders)\n self.last_used = sliders[index]",
"def slider_dragged(self):\n pass",
"def focus_prev(self):\n self.focus_item(forward=False)",
"def focus_next(self):\n self.focus_item()",
"def MoveToNextSlide(self, event):\n pass",
"def focus_slider(self, name):\n # If manipulate is not toggled, this makes no sense\n if not self.is_visible():\n self._app[\"statusbar\"].message(\n \"Focusing a slider only makes sense in manipulate\", \"error\")\n elif name not in self.sliders:\n self._app[\"statusbar\"].message(\n \"No slider called \" + name, \"error\")\n else:\n self.sliders[name].grab_focus()",
"def change_slider(self, step=\"1\"):\n try:\n step = get_int(step, allow_sign=True)\n except StringConversionError as e:\n self._app[\"statusbar\"].message(str(e), \"error\")\n return\n for slider in self.sliders.values():\n if slider.is_focus():\n val = slider.get_value()\n step = self._app[\"eventhandler\"].num_receive() * step\n val += step\n slider.set_value(val)",
"def MoveToPreviousSlide(self, event):\n pass",
"def next_widget(self):\r\n for i in self.widgets[1:]:\r\n if i.get_visible():\r\n self.set_top_widget(i)\r\n return",
"def MoveToSlide(self, event):\n pass",
"def move_scroll_bar_down(self):\n scroll = self.textBrowser.verticalScrollBar()\n scroll.setSliderPosition(scroll.maximum())",
"def __window_forward(self):\n pass",
"def handle_event(self, event):\n if event.type != MOUSEMOTION:\n return\n self.model.slider.left = event.pos[0]",
"def _prev(self, _):\n self.notebook.SetSelection(self.idx-1)",
"def keyPressEvent(self, event):\n maxIdx = self.dat3d.shape[2] - 1\n minIdx = 0\n if (event.key() == QtCore.Qt.Key_Left) and (self.currentIndex >= minIdx + 1):\n self.currentIndex -= 1\n self.showImage(self.currentIndex)\n elif (event.key() == QtCore.Qt.Key_Right) and (self.currentIndex <= maxIdx - 1):\n self.currentIndex += 1\n self.showImage(self.currentIndex)",
"def go_right(self):\n self.change_x = 6",
"def go_right(self):\n self.change_x = 6",
"def front_wheel_from_axis():",
"def slide(x1, y1, x2, y2):\n gui.mouseDown(x1, y1)\n gui.moveTo(x2, y2)\n release()",
"def edit_widget_focus(self):\n if self.goto:\n self.goto_node()\n self.update_position(self.get_position())",
"def bring_down(self):\n\n self.move(self.__min_step__)",
"def go_left(self):\n self.change_x = -6",
"def go_left(self):\n self.change_x = -6",
"def go_right(self):\n self.rect.centerx += 9",
"def move_cursor(self, direction):\n movement = 1\n last_input = \"\"\n if direction is Direction.U:\n movement = -1\n last_input = \"w\"\n elif direction is Direction.L:\n movement = -1\n last_input = \"a\"\n elif direction is Direction.D:\n last_input = \"s\"\n elif direction is Direction.R:\n last_input = \"d\"\n\n uielements = self.buttons + self.numberfields\n\n # Button selection rules:\n # Hard <- Custom <- Easy -> Medium -> Hard -> Custom -> NumberField\n # Button | NumberField <- NumberField -> NumberField\n if movement == 1:\n if self.selected is uielements[-1]:\n next_selected = self.numberfields[0]\n else:\n next_selected = uielements[uielements.index(self.selected)+1]\n else:\n if self.selected is uielements[0]:\n next_selected = self.buttons[-1]\n else:\n next_selected = uielements[uielements.index(self.selected)-1]\n\n # Update UIElement hovering.\n self.selected.set_hovered(False)\n next_selected.set_hovered(True)\n\n # Update changed settings.\n if self.selected.get_type() is UIType.NumberField:\n self.selected.fix_bounds()\n values = [numberfield.value for numberfield in self.numberfields]\n self.controller.set_custom_field_options(Option(*values))\n\n # Update NumberField focus.\n condition = next_selected.get_type() is UIType.NumberField\n next_focus = next_selected if condition else None\n self.set_focused_ui(next_focus)\n\n self.selected = next_selected\n self.update_information_box_text()\n\n self.controller.set_last_inp(last_input)",
"def move_focus(self, pos_x, pos_y):\n factor = self.offset.x * -0.005 / self.scale\n pos_x *= factor\n pos_y *= factor\n self.focus += (pos_x, pos_y)",
"def backToMiddlePos():\n\tprogMode(True) # Active le couple de servos\n\taxDriver.goToPosition(axDriver.BROADCASTID, 0x1FF) # Renvoie a la position 0x1FF",
"def go_left(self):\n self.rect.centerx -= 9",
"def advance(self):\n self.currentIndex += 1\n self.updateCurrentCommand()",
"def siguiente(self, widget):\n window = widget.get_toplevel()\n window.do_move_focus(window, gtk.DIR_TAB_FORWARD)"
] | [
"0.6301336",
"0.6267051",
"0.61282104",
"0.58639055",
"0.57543343",
"0.57306606",
"0.57123274",
"0.571175",
"0.5625833",
"0.55850744",
"0.55349016",
"0.5529265",
"0.5522851",
"0.55110556",
"0.5489294",
"0.5481077",
"0.5481077",
"0.5474194",
"0.54698503",
"0.546574",
"0.54214877",
"0.5402827",
"0.5402827",
"0.53988963",
"0.53262776",
"0.5304523",
"0.5300566",
"0.52822345",
"0.52798116",
"0.52581793"
] | 0.6405031 | 0 |
Generate a synthetic matrix factorization dataset as suggested by Ben Recht. | def generate_synthetic_matrix_factorization_data(xdim=6, ydim=10, nsamples=1000, A_condition_number=1e-10):
Atrue = np.linspace(1, A_condition_number, ydim
).reshape(-1, 1) * np.random.rand(ydim, xdim)
# the inputs
X = np.random.randn(xdim, nsamples)
# the y's to fit
Ytrue = Atrue.dot(X)
data = (X.T, Ytrue.T)
return data | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def factor_mat(all_dat, f_num, iterations, regularization):\n\n\t# get # of users and # of items\n\t[u_num, i_num] = all_dat.shape\n\n\t# init user factors and item factors with random values\n\tu_fac = np.matrix(np.random.rand(u_num, f_num))\t# MxF\n\ti_fac = np.matrix(np.random.rand(i_num, f_num))\t# NxF\n\n\t# calculate the preference matrix\n\tpreference = cal_preference(all_dat)\n\n\t# calculate the confidence matrix\n\tconfidence = cal_confidence(all_dat)\n\t\n\t# recalculate the user factors and item factors using the alternating least square method\n\tfor itr in range(iterations):\n\t\tu_fac = alternate_ls(u_num, i_fac, preference, confidence, regularization)\n\t\t#print itr, \"u_fac\"\n\t\ti_fac = alternate_ls(i_num, u_fac, preference.T, confidence.T, regularization)\n\t\t#print itr, \"i_fac\"\n\t\n\t# save the output\n\tdf = pd.DataFrame(u_fac)\n\tdf.to_csv(\"tmp/u_fac.tmp\", index=False, header=False, sep='\\t', encoding='utf-8')\n\tdf = pd.DataFrame(i_fac.T)\n\tdf.to_csv(\"tmp/i_fac.tmp\", index=False, header=False, sep='\\t', encoding='utf-8')\n\n\t# an MxF user factor matrix and an FxN item factor matrix\n\treturn [u_fac, i_fac.T]",
"def build_toy_dataset(N):\n pi = np.array([0.4, 0.6])\n mus = [[1, 1], [-1, -1]]\n stds = [[0.1, 0.1], [0.1, 0.1]]\n x = np.zeros((N, 2), dtype=np.float32)\n\n for n in range(N):\n k = np.argmax(np.random.multinomial(1, pi))\n x[n, :] = np.random.multivariate_normal(mus[k], np.diag(stds[k]))\n\n return x",
"def _make_random_matrix(self, n_components, n_features):",
"def generate_mog_dataset():\n\n n_per_class = 100\n dim = 2\n n_gaussians = 4\n mus = [(0, 1), (-1, 0), (0, -1), (1, 0)]\n mus = [torch.tensor(m) for m in mus]\n var = 0.05\n\n inputs, labels = [], []\n\n for id in range(n_gaussians):\n # Generate input data by mu + x @ sqrt(cov)\n cov = np.sqrt(var) * torch.eye(dim)\n mu = mus[id]\n inputs.append(mu + torch.randn(n_per_class, dim) @ cov)\n\n # Labels\n labels.append(torch.tensor(n_per_class * [1.0 if id < 2 else 0.0]))\n\n return torch.cat(inputs, dim=0), torch.cat(labels, dim=0)",
"def gen_mats(seed, shape, n=4, fmt=\"csr\", density=0.1):\n\n np.random.seed(seed)\n dens = density * np.random.random()\n mats = [sp.random(*shape, density=dens, format=fmt) for i in range(n)]\n return mats",
"def synthesize_data_X(n_x, m, mu_x = sp.random.random(1) * 25, sigma_x = sp.random.random(1) * 25):\n # Create a (1*m) array that represents m examples of a feature x_i.\n x_i = sp.random.normal(mu_x, sigma_x, m).reshape(1, m)\n # Compile all n_x x_i's into a matrix X, dim (n_x * m).\n X = np.matrix(np.array([sp.random.normal(mu_x, sigma_x, m).reshape(1, m)\n for i in range(n_x)])).reshape(n_x, m)\n return X, n_x, m, mu_x, sigma_x",
"def _make_gaussian_matrix(\n data_count: int,\n feature_count: int,\n) -> np.ndarray:\n return np.random.randn(data_count, feature_count)",
"def make_synthetic_matrix(n_features, n_samples, sparsity=.98, random_state=0):\n prng = check_random_state(random_state)\n prec = make_sparse_spd_matrix(n_features, alpha=sparsity,\n smallest_coef=.4, largest_coef=.7,\n random_state=prng)\n cov = linalg.inv(prec)\n d = np.sqrt(np.diag(cov))\n cov /= d\n cov /= d[:, np.newaxis]\n prec *= d\n prec *= d[:, np.newaxis]\n X = prng.multivariate_normal(np.zeros(n_features), cov, size=n_samples)\n X -= X.mean(axis=0)\n X /= X.std(axis=0)\n # Estimate the covariance\n emp_cov = np.dot(X.T, X) / n_samples\n return emp_cov, prec",
"def create_factors(self):\n\n super(SVDPlusPlus, self).create_factors()\n self.y = np.random.normal(self.init_mean, self.init_stdev, (len(self.items), self.factors))",
"def crescent_data(num_data=200, seed=default_seed):\r\n np.random.seed(seed=seed)\r\n sqrt2 = np.sqrt(2)\r\n # Rotation matrix\r\n R = np.array([[sqrt2 / 2, -sqrt2 / 2], [sqrt2 / 2, sqrt2 / 2]])\r\n # Scaling matrices\r\n scales = []\r\n scales.append(np.array([[3, 0], [0, 1]]))\r\n scales.append(np.array([[3, 0], [0, 1]]))\r\n scales.append([[1, 0], [0, 3]])\r\n scales.append([[1, 0], [0, 3]])\r\n means = []\r\n means.append(np.array([4, 4]))\r\n means.append(np.array([0, 4]))\r\n means.append(np.array([-4, -4]))\r\n means.append(np.array([0, -4]))\r\n\r\n Xparts = []\r\n num_data_part = []\r\n num_data_total = 0\r\n for i in range(0, 4):\r\n num_data_part.append(round(((i + 1) * num_data) / 4.))\r\n num_data_part[i] -= num_data_total\r\n part = np.random.normal(size=(num_data_part[i], 2))\r\n part = np.dot(np.dot(part, scales[i]), R) + means[i]\r\n Xparts.append(part)\r\n num_data_total += num_data_part[i]\r\n X = np.vstack((Xparts[0], Xparts[1], Xparts[2], Xparts[3]))\r\n\r\n Y = np.vstack((np.ones((num_data_part[0] + num_data_part[1], 1)), -np.ones((num_data_part[2] + num_data_part[3], 1))))\r\n return {'X':X, 'Y':Y, 'info': \"Two separate classes of data formed approximately in the shape of two crescents.\"}",
"def generate_nmnist_dataset(initial_size, input_dir, num_spikes, step_factor):\n image_dataset = np.rec.array(None, dtype=[('height', np.uint16),\n ('width', np.uint16),\n ('image_data', 'object'),\n ('label', np.uint32)],\n shape=initial_size)\n num_images = 0\n\n # loop through each folder within the test directories\n for i in range(0, 10):\n current_dir = input_dir + os.path.sep + str(i) + os.path.sep + '*.bin'\n print('Processing {}...'.format(current_dir))\n for filename in glob.iglob(current_dir):\n images = prepare_n_mnist(filename, True, num_spikes, step_factor)\n if num_images + len(images) >= image_dataset.size:\n image_dataset = np.resize(image_dataset,\n (num_images + len(images)) * 2)\n add_images_to_dataset(image_dataset, images, num_images, i, 28, 28)\n num_images += len(images)\n\n return image_dataset[0:num_images]",
"def build_data(seed):\n rs = np.random.RandomState(seed)\n\n def y(x):\n \"\"\" y(x) = 1 + 0.3 * x_1 - 0.6 * x_2^2 - 0.2 * x_3^3 + 0.5 x_4^4 \"\"\"\n x1, x2, x3, x4 = x[:, 0], x[:, 1], x[:, 2], x[:, 3]\n return 1 + 0.3 * x1 - 0.6 * x2 ** 2 - 0.2 * x3 ** 3 + 0.5 * x4 ** 4\n\n xtrain = rs.rand(10000, 4)\n xtest = rs.rand(1000, 4)\n ytrain = y(xtrain) + rs.rand(10000) / 10\n ytest = y(xtest) + rs.rand(1000) / 10\n return xtrain, xtest, ytrain, ytest",
"def generate_synthetic_dataset(args):\n logger = logging.getLogger(\"GACM\")\n logger.info('Checking the data files...')\n for data_path in args.train_dirs + args.dev_dirs + args.test_dirs:\n assert os.path.exists(data_path), '{} file does not exist.'.format(data_path)\n assert len(args.test_dirs) > 0, 'No test files are provided.'\n dataset = Dataset(args, train_dirs=args.train_dirs, dev_dirs=args.dev_dirs, test_dirs=args.test_dirs)\n logger.info('Initialize the model...')\n model = Agent(args, len(dataset.qid_query), len(dataset.uid_url), len(dataset.vid_vtype))\n logger.info('model.global_step: {}'.format(model.global_step))\n assert args.load_model > -1\n logger.info('Restoring the model...')\n model.load_model(model_dir=args.load_dir, model_prefix=args.algo, global_step=args.load_model, load_optimizer=False)\n\n synthetic_types = ['deterministic', 'stochastic']\n shuffle_splits = [None, [1, 11], [1, 6, 11]]\n amplifications = [1, 7]\n for synthetic_type in synthetic_types:\n for shuffle_split in shuffle_splits:\n for amplification in amplifications:\n #synthetic_type = 'deterministic'\n #shuffle_split = None\n #amplification = 1\n file_path = os.path.join(args.load_dir, '..', 'synthetic')\n model.generate_synthetic_dataset('test', dataset, file_path, \n 'synthetic_{}_{}_{}.txt'.format(synthetic_type[0].upper(), str(shuffle_split), amplification), \n synthetic_type=synthetic_type, shuffle_split=shuffle_split, amplification=amplification)\n # exit()\n logger.info('Done with click sequence generation.')",
"def fisher_matrix(model, dataset, samples):\n inputs, labels = dataset\n weights = model.trainable_weights\n variance = [tf.zeros_like(tensor) for tensor in weights]\n\n for _ in range(samples):\n # Select a random element from the dataset.\n index = np.random.randint(len(inputs))\n data = inputs[index]\n\n # When extracting from the array we lost a dimension so put it back.\n data = tf.expand_dims(data, axis=0)\n\n # Collect gradients.\n with tf.GradientTape() as tape:\n output = model(data)\n log_likelihood = tf.math.log(output)\n\n gradients = tape.gradient(log_likelihood, weights)\n\n # If the model has converged, we can assume that the current weights\n # are the mean, and each gradient we see is a deviation. The variance is\n # the average of the square of this deviation.\n variance = [var + (grad ** 2) for var, grad in zip(variance, gradients)]\n\n fisher_diagonal = [tensor / samples for tensor in variance]\n return fisher_diagonal",
"def makecldf(args):\n with_dataset(args, Dataset._install)",
"def create_data(num_sample=None):\n I = np.eye(3, dtype=np.float32)\n\n\n if (num_sample == None):\n num_sample = 100\n\n # Generate first class\n m1 = np.asarray([0.5, 0.5], dtype=np.float32)\n cov1 = np.asarray([[0.1, 0],\n [0, 0.1]], dtype=np.float32)\n data1 = rng.multivariate_normal(m1, cov1, num_sample)\n label1 = np.ones((num_sample), dtype=np.uint16) - 1\n label1 = I[label1,:]\n\n # Generate second class\n m2 = np.asarray([0.3,0.3], dtype=np.float32)\n cov2 = np.asarray([[0.5, 0], [0, 0.5]], dtype=np.float32)\n data2 = rng.multivariate_normal(m2, cov2, num_sample)\n label2 = np.ones((num_sample), dtype=np.uint16)\n label2 = I[label2, :]\n\n\n return (data1, label1, data2, label2)",
"def create_geneIDsDF():\n datas=data.plfam_to_matrix()\n datas.run()\n print('***Dataframe created***')",
"def get_data(self):\n if self.random_seeds: \n self._validate_random_seeds()\n seed_iter = list(map(iter,self.random_seeds))\n nsamples = len(self.random_seeds[0])\n else:\n seed_iter = None\n nsamples = self.numsamples\n self._set_meta_features()\n for _ in tqdm(range(nsamples)):\n self._update_meta_features(seed_iter)\n self._sample()\n yield self._extract_features()",
"def testMultiClass_MatrixData(self):\n cont_features = [\n tf.contrib.layers.real_valued_column('feature', dimension=4)]\n\n classifier = tf.contrib.learn.DNNClassifier(\n n_classes=3,\n feature_columns=cont_features,\n hidden_units=[3, 3],\n config=tf.contrib.learn.RunConfig(tf_random_seed=1))\n\n classifier.fit(input_fn=_iris_input_multiclass_fn, steps=200)\n self.assertTrue('centered_bias_weight' in classifier.get_variable_names())\n scores = classifier.evaluate(input_fn=_iris_input_multiclass_fn, steps=1)\n self.assertGreater(scores['accuracy'], 0.8)\n self.assertLess(scores['loss'], 0.3)",
"def createFromData(data, r):\n m = DensityEstimator()\n #print \"Creating denisty estimator from data...\"\n\n m.mixture_dimensions = data.shape[1]\n m.dataset_size = data.shape[0]\n m.threshold = r\n \n # Normalisation of the data set\n m.normalisation_vector_min = np.array([0.0]*data.shape[1])\n m.normalisation_vector_max = np.array([0.0]*data.shape[1])\n for i in range(0, data.shape[1]):\n #data[:, i] -= min(data[:, i])\n #print i, \": \", max(data[:, i])\n m.normalisation_vector_min[i] = min(data[:, i]) \n m.normalisation_vector_max[i] = max(data[:, i])\n if m.normalisation_vector_min[i] == m.normalisation_vector_max[i]:\n m.normalisation_vector_min[i] = 0.0\n \n #data[:, i] -= m.normalisation_vector_min[i]\n #data[:, i] /= (m.normalisation_vector_max[i] - m.normalisation_vector_min[i])\n #data[:, i] /= (m.normalisation_vector_max[i] )\n #print m.normalisation_vector[i]\n #return\n \n # Split into clusters / kernels\n t=set()\n d=set(range(0,data.shape[0]))\n def get_random():\n sample = random.randint( 0, data.shape[0]-1 )\n while sample in t:\n sample = random.randint( 0, data.shape[0] - 1 )\n t.add(sample)\n return sample\n \n sets=[]\n while len(t)<data.shape[0]:\n i = get_random()\n sets.append(set([i]))\n #print \"Cluster at\", i, \n for j in d-t:\n if np.sqrt(sum((data[i,:]-data[j,:])**2)) < r:\n sets[-1].add(j)\n t.add(j)\n #print \"members = \", len (sets[-1])\n new_set = list()\n for i in sets[-1]:\n new_set.append(data[i, :])\n sets[-1] = np.array(new_set)\n \n #print \"Cluster count=\",len(sets)\n #for i in sets:\n #print 'sets: ',i\n \n # Turn each cluster into a Gaussian kernel\n\n # Any set that has less than 3 members merge with nearests\n minimum_members = 2 #data.shape[1] # mixture dimensions to avoid singular covariance.\n #print sets\n #for i in sets:\n #if len(i) < minimum_members:\n #print \"Small set \"\n #for pt in i:\n #close = [None, 1e100000]\n #for j in sets:\n ##print i\n #print j\n #if (i == j).all():\n #print \"same\"\n #continue\n #print \"diff\"\n #if len(j) < minimum_members:\n #continue # err, bad, should allow possibile join smalls\n #mu = np.sum(i, 0) / (1.0 * i.shape[0])\n #dist = sum([(m - p)**2 for m, p in zip(mu, pt)])\n #print dist\n #if dist < close[1]:\n #close[1] = dist\n #close[0] = j\n #close[0].add(pt)\n \n \n m.kernels = []\n for i in sets:\n if len(i) >= minimum_members:\n m.kernels.append(GaussKernel(i, i.shape[0]/(1. * data.shape[0])))\n else:\n print \"LOST SOME\"\n #else:\n #print \"->Warning: dropping data point as it lies alone, singular\"\n #print \"ok.\"\n return m",
"def _build_datasets(self):\n self._build_datasets_sis3302()\n self._build_datasets_sis3305()",
"def make_dataset():\n\n\tnumberOfTrials = dataset_params.num_of_samples\n\tnumberOfTrials_train = int(numberOfTrials*0.8)\n\tnumberOfTrials_test = int(numberOfTrials*0.2)\n\n\tprint(\"==================================================\")\n\tprint(\"1. Generating Train images ......\")\n\tprint(\"\\nTrain image per variation\", numberOfTrials_train)\n\tmakeDataset(numberOfTrials_train, \"train\")\n\n\tprint(\"==================================================\")\n\tprint(\"2. Generating Test images ......\")\n\tprint(\"\\nTest image per variation\", numberOfTrials_test)\n\tmakeDataset(numberOfTrials_test, \"test\")\n\n\tprint(\"==================================================\")\n\tprint(\"Done!!!\")",
"def initialize(self):\n\t\tmu = 0\n\t\tsigma = np.sqrt(2 / self.dataset[\"d\"])\n\n\t\tself.F1 = np.random.normal(mu, sigma, self.F1.shape)\n\t\tself.F2 = np.random.normal(mu, sigma, self.F2.shape)\n\t\tself.W = np.random.normal(mu, sigma, self.W.shape)\n\n\t\tself.F1_momentum = np.zeros(self.F1.shape)\n\t\tself.F2_momentum = np.zeros(self.F2.shape)\n\t\tself.W_momentum = np.zeros(self.W.shape)",
"def generate_data(self, s=0):\n np.random.seed(s)\n \n L_b = np.linalg.cholesky(self.cov_b)\n L_t = np.linalg.cholesky(self.cov_t)\n \n self.x_benchmark = np.dot(L_b,(np.random.randn(self.n_points,2) + self.mean_b).T).T\n self.x_trial = np.dot(L_t,(np.random.randn(self.n_points,2) + self.mean_t).T).T",
"def featMatGenerator(dirName, trajfile, trajFilter):\n \n #load the data and extract feature vectors for each trajectory and plate summary for each chunk\n featMatTraj = {}\n featMatPlate = pd.DataFrame()\n try:\n if len(trajfile.split('_'))<10:\n fshort = '_'.join(trajfile.split('_')[0:-2:6])\n else:\n fshort = '_'.join(trajfile.split('_')[0:-1:7])\n featMatPlate = pd.DataFrame()\n with pd.HDFStore(os.path.join(dirName, trajfile), 'r') as fid:\n nChunks = list(fid.keys())\n for chunk in nChunks:\n chunkno = [int(s) for s in chunk.split('_') if s.isdigit()]\n chunkno = chunkno[0]\n\n featMatTraj[chunkno] = pd.DataFrame()\n nWorms = np.unique(fid[chunk]['worm_index'])\n for w in nWorms:\n if fid[chunk][fid[chunk]['worm_index']==w].shape[0]>=trajFilter:\n featMatTraj[chunkno] = featMatTraj[chunkno].append(\\\n fid[chunk][fid[chunk]['worm_index']==w].mean(),ignore_index=True)\n \n featMatTraj[chunkno].reset_index(drop=True)\n \n temp = featMatTraj[chunkno].median()\n temp = temp.drop(['worm_index', 'timestamp']).rename(lambda x: x +'_med').to_frame().transpose()\n \n temp2 = featMatTraj[chunkno].quantile(0.75) - featMatTraj[chunkno].quantile(0.25)\n temp2 = temp2.drop(['worm_index', 'timestamp']).rename(lambda x: x + '_iqr').to_frame().transpose()\n \n tempfinal = pd.concat([temp, temp2], axis = 1)\n tempfinal ['exp'] = fshort\n tempfinal['Chunk'] = chunk\n tempfinal ['drug'] = fshort.split('_')[0]\n \n featMatPlate = featMatPlate.append(tempfinal, ignore_index=True)\n del temp, temp2, tempfinal\n del nWorms\n del nChunks\n \n featMatPlate.reset_index(drop=True) \n featMatPlate.drop(featMatPlate.columns[np.sum(featMatPlate.isna()>featMatPlate.shape[0]/2)], \\\n axis=1, inplace = True)\n except OSError:\n print (trajfile + 'is invalid file format') \n\n #write the featMatPlate to a .csv file\n featMatPlate.to_csv(os.path.join(os.path.dirname(dirName), fshort + '_FeatMatPlate.csv'))\n\n #save the featMatTraj to an excel file\n writer = pd.ExcelWriter(os.path.join(os.path.dirname(dirName), fshort + '_FatMatTraj.xlsx'))\n for chunk in featMatTraj.keys():\n featMatTraj[chunk].to_excel(writer, sheet_name = str(chunk))\n writer.save()\n \n return featMatTraj, featMatPlate",
"def gen_mixture():\n npr.seed(0)\n num_exp = int(1e4)\n x_dim = 2\n z_dim = 2\n mu1 = [5, 5,]\n mu2 = [-5, -5]\n theta = np.array([[2,1],[-1,-2]])\n sigma = 0.1\n u = npr.uniform((num_exp,))\n z = np.zeros((num_exp, z_dim))\n cov = np.zeros((z_dim, z_dim))\n np.fill_diagonal(cov, 1)\n sz = int(num_exp/2)\n z[:sz, ]= npr.multivariate_normal(mu1, cov,sz)\n z[sz:, ] = npr.multivariate_normal(mu2,cov,sz)\n mu_x = [email protected]()\n\n x = np.zeros((num_exp, x_dim))\n for i in range(num_exp):\n x[i,:] = npr.multivariate_normal(mu_x[:,i], sigma*cov)\n print(x.shape)\n np.save('data/syn_mixture.npy', x)",
"def FitFundamentalMatrix(mss):\n\n\tbatch_size = mss.shape[0]\n\tnum_correspondences = mss.shape[1]\t\n\n\tmss_src, T_src = utils.normalise2dpts(mss[:, :, :2])\n\tmss_target, T_target = utils.normalise2dpts(mss[:, :, 2:])\n\n\tones = torch.tensor([1]).double().cuda().view(1, 1, 1).repeat(batch_size, num_correspondences, 1)\n\tmss_src = torch.cat([mss_src, ones], dim = 2)\n\tmss_target = torch.cat([mss_target, ones], dim = 2)\n\n\trow = torch.cat([\n\t\tmss_src * mss_target[:, :, 0].view(batch_size, num_correspondences, 1), \n\t\tmss_src * mss_target[:, :, 1].view(batch_size, num_correspondences, -1),\n\t\tmss_src \n\t], dim = 2)\n\n\n\touter_product = row.unsqueeze(3) * row.unsqueeze(2)\n\tmat = outer_product.sum(dim = 1)\n\n\n\tU = []\n\tS = []\n\tVT = []\n\n\tfor i in range(batch_size):\n\n\t\t_, V = torch.symeig(mat[i])\n\t\tu, s, v = torch.svd(V[:, 0].view(3, 3))\n\t\tvt = v.permute(1, 0)\n\t\ts[2] = 0\n\n\t\tU.append(u.unsqueeze(0))\n\t\tS.append(torch.diag(s).unsqueeze(0))\n\t\tVT.append(vt.unsqueeze(0))\n\n\tU = torch.cat(U, dim = 0)\n\tS = torch.cat(S, dim = 0)\n\tVT = torch.cat(VT, dim = 0)\n\n\tF = torch.bmm(torch.bmm(U, S), VT)\n\tF = torch.bmm(T_src, torch.bmm(F.permute(0, 2, 1), T_target.permute(0, 2, 1)))\n\tF = F / F[:, 2, 2].view(batch_size, 1, 1)\n\n\tutils.nan_check(F)\n\n\treturn F",
"def CreateDesignMatrix_X(z, x, y, n ):\n if len(x.shape) > 1:\n x = np.ravel(x)\n y = np.ravel(y)\n\n N = len(x)\n l = int((n+1)*(n+2)/2) \n X = np.ones((N,l))\n\n for i in range(1,n+1):\n q = int((i)*(i+1)/2)\n for k in range(i+1):\n X[:,q+k] = x**(i-k) * y**k\n \n X, z_, indicies = shuffle(X, z)\n X_train, X_test, z_train, z_test = train_test_split(X, z_, test_size=split_train_test, random_state=seed, shuffle=False)\n X_test, X_val, z_test, z_val = train_test_split(X_test, z_test, test_size=split_test_val, random_state=seed, shuffle=False)\n\n return X, X_train, X_test, X_val, z_train, z_test, z_val, indicies",
"def generate_dataset(self):\n sets = {\n \"train\": 10,\n \"test\": 5,\n }\n\n fields = {\n \"strings_list\": lambda x: str_to_ascii(self.generate_string_list(x)),\n \"data\": lambda x: np.random.randint(0, 10, (x, 10)),\n \"number\": lambda x: np.array(range(x)),\n \"field_with_a_long_name_for_printing\": lambda x: np.array(range(x)),\n }\n\n lists = {\n \"list_dummy_data\": np.array(range(10)),\n \"list_dummy_number\": np.array(range(10), dtype=np.uint8),\n }\n\n dataset = {}\n data_fields = {}\n for set_name in sets:\n dataset[set_name] = self.populate_set(sets[set_name], fields, lists)\n data_fields[set_name] = sorted(dataset[set_name].keys())\n\n return dataset, data_fields",
"def test_feature_computation(self):\n k = [2, 3, 4, 5, 6]\n mn = self.create_chain_model(k)\n d = 4\n\n for i in range(len(k)):\n mn.set_unary_weights(i, np.random.randn(k[i], d))"
] | [
"0.6346046",
"0.61090165",
"0.5843396",
"0.5642854",
"0.5520073",
"0.54842937",
"0.54265517",
"0.5424973",
"0.5417877",
"0.54015404",
"0.53981084",
"0.53949046",
"0.53921515",
"0.53300136",
"0.5301031",
"0.5297291",
"0.52782845",
"0.5274809",
"0.5271235",
"0.5267814",
"0.52515453",
"0.52450895",
"0.5244225",
"0.5215459",
"0.5211739",
"0.5210899",
"0.5204998",
"0.5203079",
"0.5200278",
"0.51997083"
] | 0.6540636 | 0 |
This function completes the API call for WNBA shot data with the provided parameters. | def wnba_shot_call(params, headers):
pbp_list = []
for i in range(1, 10):
try:
api_response = get(BASE_WNBA_URL.format(season=params['season'],
game_id=params['game_id'],
quarter=i,
headers=headers))
api_response.raise_for_status()
json_resp = api_response.json()
pbp_list += json_resp['g']['pla']
except:
break
api_response.close()
return pbp_list | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def capture_and_upload_screenshot(self) -> None:",
"def download_screenshot_command():\n # 1. Get input scan id and resolution from Demisto\n scanid = demisto.args().get('scanid')\n resolution = demisto.args().get('resolution')\n # 2. Get the forensic webpage screenshot from SlashNext API\n response = download_screenshot(scanid=scanid, resolution=resolution)\n if response.get('errorNo') != 0:\n return\n # 3. Parse and format the response\n sc_base64 = response.get('scData').get('scBase64')\n sc_data = base64.b64decode(sc_base64)\n\n sc_file = fileResult('slashnext_{}.jpg'.format(scanid), sc_data, entryTypes['image'])\n\n demisto.results({\n 'Type': entryTypes['image'],\n 'ContentsFormat': formats['text'],\n 'Contents': 'Forensics: Webpage Screenshot for URL Scan ID = {}'.format(scanid),\n 'File': sc_file.get('File'),\n 'FileID': sc_file.get('FileID')\n })",
"def updateShotInfo(*args):\n shot = cmds.textScrollList(widgets[\"shotListTSL\"], q=True, si=True)[0]\n\n #clear all text fields\n clearFields()\n\n pi.currentShotFolder = cFuncs.fixPath(os.path.join(pi.currentProject, \"shots\", shot))\n pi.currentVariant = \"\" \n######---------reset the pi variables for the shot stuff\n\n lists = [\"anmVariationsTSL\", \"lgtVariationsTSL\", \"fxVariationsTSL\"]\n types = [\"anm\", \"lgt\", \"fx\"]\n\n #loop through types of files in shot - anm, lgt, fx\n for x in range(3):\n shotTypeFolder = \"{0}/{1}\".format(pi.currentShotFolder, types[x])\n #clear the list\n cmds.textScrollList(widgets[lists[x]], e=True, ra=True)\n cmds.image(widgets[\"shotInfoPic\"], e=True, image = \"{0}/defaultAssetImage.jpg\".format(pi.images))\n vars = cFuncs.getShotVariantList(shotTypeFolder)\n if vars:\n for var in vars:\n cmds.textScrollList(widgets[lists[x]], e=True, a=var, sc=partial(updateVariantInfo, var, shotTypeFolder))",
"def post(self):\n upload_files = self.get_uploads('file')\n blob_info = upload_files[0]\n logging.info('Blob Info- %s', blob_info)\n # Associate the screenshot with a given test\n key = self.request.get('key')\n logging.info('PageData Key- %s', key)\n\n if key:\n try:\n test_data = db.get(db.Key(key))\n logging.info('PageData Loaded.')\n except db.BadKeyError:\n self.error(500)\n logging.error('Bad Key Error Exception.')\n return\n\n test_data.screenshot = screenshot.AddBlobstoreScreenshot(blob_info,\n test_data)\n logging.info('Screenshot Created-%s', test_data.screenshot.key())\n test_data.put()\n\n self.redirect((GET_SCREENSHOT_STATUS_URL + '?key=%s') % key)",
"async def capture_screenshot(self) -> bytes:",
"def shot_stats_cb(self, data):\n pass\n # rospy.logdebug(\"shot stat received\")\n # self.shot_count += 1\n # if data.goal:\n # self.doors[data.door].increment_count()\n # else:\n # self.shot_missed += 1",
"def main(parameters):\n metadata = get_metadata(parameters)\n # pprint(metadata)\n image_api = NswSatelliteImages(parameters, metadata)\n print('Zoom level:', image_api.zoom_level,\n 'Resolution:', image_api.resolution,\n 'Scale:', image_api.scale)\n image_api.download_tile(xtile=39000, ytile=60000)",
"def take_screeshot():\n\n # generate a unique identifier for the screenshot\n if not settings.screenshot_app:\n return ('Screenshots are disabled', 404)\n\n url, width, height = parse_querystring(request)\n\n if url is None:\n return ('Please provide a URL argument', 422)\n\n\n crop = request.args.get('crop', \"--crop\")\n if crop == \"no\": crop = \"--no-crop\"\n\n # find domain name\n parsed_uri = urlparse(url)\n domain = parsed_uri.netloc\n screenshot_path = os.path.join(settings.screenshots_path, domain)\n \n path = parsed_uri.path\n if not path: \n path = \"\"\n else:\n path = path[1:].replace(\"/\", \"%2F\")\n # construct file name\n filename = \"{path}___{size}___{timestamp}\".format(\n path=path,\n size=\"{0}x{1}\".format(width, height),\n timestamp=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n )\n cwd = os.getcwd()\n\n mkdir = 'mkdir -p \"{0}\"'.format(screenshot_path)\n cd1 = 'cd \"{0}\"'.format(screenshot_path)\n cd2 = 'cd \"{0}\"'.format(cwd)\n take_screenshot = \"{screenshot_app} {url} {width}x{height} {crop} --filename '{filename}'\".format(\n screenshot_app=settings.screenshot_app,\n url=url,\n width=width,\n height=height,\n filename=filename,\n crop=crop\n )\n \n command = '{mkdir} && {cd1} && {take_screenshot} && {cd2}'.format(\n mkdir=mkdir,\n cd1=cd1,\n take_screenshot=take_screenshot,\n cd2=cd2\n )\n \n output = subprocess.call(command, stdout=subprocess.PIPE, shell=True)\n \n screenshot_identifier = domain + \"/\" + filename + \".png\"\n screenshot_url = SCREENSHOTS_ROOT + \"/\" + screenshot_identifier\n return jsonify(url=screenshot_url, id=screenshot_identifier)",
"def capture_image(self, data={}):\n # call self.increment_count() after each image saved\n pass",
"def run(self):\n # Wait for the 'shot' message ready\n self.wait_for_messages()\n # Send the initial states to the server\n self.send_shape_and_states()\n # Wait for the 'method' message ready\n self.wait_for_messages()\n\n # Send the measurement angles to the server\n for y in range(self.__depth):\n self.send_angle_bulks(y)\n\n # Obtain the measurement outcomes\n result = self.get_classical_output()[::-1]\n self.send_back(\n 'local',\n self.__wrap_shot_message(\n 'setResult',\n {'result': result, 'shot': self.__shots},\n )\n )",
"def createShot(shotFolder, *args):\n createShotUI(shotFolder)",
"def test_data(self, data):\n print('-'*30)\n print('Starting test: {}'.format(data['name']))\n self.set_resolution(data['resolution']['width'], data['resolution']['height'])\n self.test_actions(data['actions'])\n print('Test finished')\n print('-'*30)",
"def send_mailshot(mailshot_data):\n\n url = settings.mailer_endpoint\n headers = {'Content-Type': 'application/json'}\n response = requests.post(url, headers=headers, data=mailshot_data)",
"def calling_api():\n\n url_bio = \"https://stats.nba.com/stats/leaguedashplayerbiostats?College=&Conference=&Country=&DateFrom=&DateTo=&Division=&DraftPick=&DraftYear=&GameScope=&GameSegment=&Height=&LastNGames=0&LeagueID=00&Location=&Month=0&OpponentTeamID=0&Outcome=&PORound=0&PerMode=PerGame&Period=0&PlayerExperience=&PlayerPosition=&Season=2020-21&SeasonSegment=&SeasonType=Regular+Season&ShotClockRange=&StarterBench=&TeamID=0&VsConference=&VsDivision=&Weight=\"\n headers = {\n \"Accept\": \"application/json, text/plain, */*\",\n \"Accept-Encoding\": \"gzip, deflate, br\",\n \"Accept-Language\": \"es-ES,es;q=0.9\",\n \"Origin\": \"https://www.nba.com\",\n \"Referer\": \"https://www.nba.com/\",\n \"Sec-Fetch-Dest\": \"empty\",\n \"Sec-Fetch-Mode\": \"cors\",\n \"Sec-Fetch-Site\": \"same-site\",\n \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36\",\n \"x-nba-stats-origin\": \"stats\",\n \"x-nba-stats-token\": \"true\"\n }\n\n response_bio = requests.get(url_bio, headers=headers).json()\n\n return response_bio\n\n def updating_api(response_bio):\n \"\"\"\n We just set the columns and the rows from our call and do some modifications in the resulting columns of our DataFrame. \n \"\"\"\n\n frame_bio = pd.DataFrame(response_bio['resultSets'][0]['rowSet'])\n frame_bio.columns = response_bio['resultSets'][0]['headers']\n frame_bio.drop([\"PLAYER_ID\", \"TEAM_ID\", \"PLAYER_HEIGHT_INCHES\"], axis=1, inplace=True)\n frame_bio[\"SEASON\"] = \"2020-21\"\n\n return frame_bio",
"def upload_telemetry_data():\n gen_log.info(\"upload_telemetry_data\")\n try:\n data=[{attr:random.randint(20,100)} for attr in attributes if attr!='temperature']\n # data={\"temperature\":random.randint(20,100), \n # \"humidity\":random.randint(20,100),\n # \"other\":random.randint(-60,60),\n # \"active\": False}\n url=url_prefix+'/v1/{}/telemetry'.format(token)\n res=yield AsyncHTTPClient().fetch(url,method='POST',body=json.dumps(data),headers=headers)\n #res=requests.post(url,data=json.dumps(data),headers=headers)\n gen_log.info(res.code)\n except Exception as ex:\n gen_log.error(ex)\n IOLoop.current().add_timeout(time.time()+1,upload_telemetry_data)",
"def office_download_data(parser, args, params):\n parser.add_argument(\n '--station_list', type=str, help='Station file from IRIS',\n required=True, metavar='x')\n parser.add_argument(\n '--recording_time', type=int, \n help='Recording time from event start (seconds)', metavar='x',\n required=True)\n parser.add_argument(\n '--padding_time', type=int,\n help='Padding time', metavar='x',\n required=True)\n parser.add_argument(\n '--with_waveforms', action='store_true',\n help='Download waveform data (instead of just station data)')\n \n local_args = parser.parse_known_args(args)\n \n station_list = local_args[0].station_list\n with_waveforms = local_args[0].with_waveforms\n padding_time = local_args[0].padding_time\n recording_time = local_args[0].recording_time\n control.download_data(params, station_list, with_waveforms, recording_time,\n padding_time)",
"def _execute(self):\n # Collect the results.\n results, _ = asyncio.run(\n apd.async_retrieve(\n self.args['pages'],\n self.args['from_'],\n self.args['to'],\n self.args['attempts'],\n self.args['backoff'],\n self.args['dump'],\n ))\n result_count = len(results)\n logger.info(f'Total: {result_count}')\n\n # Get the format and print the results.\n format_ = self.args['format_'].lower()\n formatter = Formatter(format_)\n formatter.print(results)",
"def get(self):\n self.response.out.write(blobstore.create_upload_url(\n UPLOAD_SCREENSHOT_IMAGE_URL))",
"def sendData(url,key,field1,field2,temp,pres):\n\n values = {'api_key' : key,'field1' : temp,'field2' : pres}\n\n postdata = urllib.urlencode(values)\n req = urllib2.Request(url, postdata)\n response = urllib2.urlopen(req, None, 5)\n\thtml_string = response.read()\n\tresponse.close()\n\n try:\n # Send data to Thingspeak\n\tresponse = urllib2.urlopen(req, None, 5)\n\thtml_string = response.read()\n\tresponse.close()\n\tlog = log + 'Update ' + html_string\n\n except urllib2.HTTPError, e:\n log = log + 'Server could not fulfill the request. Error code: ' + e.code\n except urllib2.URLError, e:\n log = log + 'Failed to reach server. Reason: ' + e.reason\n except:\n log = log + 'Unknown error'\n\n print log",
"def trial_end(self, parameter_id, success, **kwargs):",
"def get_shot_data(self, **kwargs: str) -> pd.DataFrame:\n data = self._get_data(query=\"shotsData\", **kwargs).T\n return data",
"def capture(self):\n if self.nActivePins > 0:\n if self.role == \"master\":\n correlationPre = self.snapShot()\n (self.channels, self.dueStartTimeUsecs, self.dueFinishTimeUsecs, timeDataPre, timeDataPost) = \\\n captureAndPackageIntoChannels(self.f, self.pinsToMeasure, self.pinMap, self.wallClock)\n self.wcAcReqResp = {\"pre\":timeDataPre, \"post\":timeDataPost}\n if self.role == \"master\":\n correlationPost = self.snapShot()\n self.wcSyncTimeCorrelations = [correlationPre, correlationPost]\n elif self.role == \"client\":\n self.wcSyncTimeCorrelations = self.timestampedReceivedControlTimeStamps",
"def winhttp_WinHttpWriteData(jitter):\n ret_ad, args = jitter.func_args_stdcall([\"hRequest\", \"lpBuffer\", \"dwNumberOfBytesToWrite\", \"lpdwNumberOfBytesWritten\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)",
"def build_context_screenshot(forensics_data: dict) -> dict:\n return assign_params(\n URL=forensics_data.get('url'),\n )",
"def snapShot_get(self):\n _progressBar = None\n try:\n _str_func = 'snapShot_get'\n log.debug(cgmGEN.logString_start(_str_func))\n str_self = self.mNode\n\n _blockScale = self.blockScale\n \n \n md_ctrls = controls_get(self, define=True, form=True, prerig=True,asDict=True,getExtra=0)\n md_dat = {}\n \n md_ctrls['base']=[self]\n \n log.debug(cgmGEN.logString_sub(_str_func, 'Gather Dat'))\n \n for datSet,dat in md_ctrls.iteritems():\n md_dat[datSet] = {}\n try:_progressBar = CGMUI.doStartMayaProgressBar(stepMaxValue=len(dat))\n except:_progressBar = None\n \n for i,mCtrl in enumerate(dat):\n _str = mCtrl.p_nameShort\n _d = {'str':_str,\n 'mObj':mCtrl,\n 'nameBase':mCtrl.p_nameBase,\n 'cgmTags':mCtrl.getNameDict()}\n \n _strStatus = \"{0} | {1} \".format(_str_func,_str)\n log.debug(cgmGEN.logString_sub(_str_func,_str))\n if _progressBar:CGMUI.progressBar_set(_progressBar,step=1,\n status = _strStatus) \n \n if not ATTR.is_locked(_str,'translate'):\n _d['pos']=mCtrl.p_position\n \n if not ATTR.is_locked(_str,'rotate'):\n _d['orient']=mCtrl.p_orient\n \n \n _d['lossyScale'] = TRANS.scaleLossy_get(_str)\n _d['worldScale'] = mc.xform(_str, q=True, scale = True, worldSpace = True, absolute = True)\n \n _d['noParent'] = False\n if ATTR.is_locked(_str,'translate'):\n _d['noParent'] = True\n \n \n for a in ['sx','sy','sz']:\n if not ATTR.is_locked(_str,a):\n v = ATTR.get(_str,a)\n #if not MATH.is_float_equivalent(1.0,v):\n _d[a] = v \n if not _d.get('axisSize'):\n _d['axisSize'] = DIST.get_axisSize(_str)\n if not _d.get('bbSize'):\n _d['bbSize'] = TRANS.bbSize_get(_str)\n \n md_dat[datSet][i] = _d\n\n #pprint.pprint(md_dat)\n self.__snapShotDat = md_dat\n return md_dat \n except Exception,err:\n cgmGEN.cgmExceptCB(Exception,err)\n finally:\n if _progressBar:CGMUI.doEndMayaProgressBar()",
"def process_cmd():\n web_scraper = SainsburyWebscraper()\n logger.info(\"Sainsbury web scraper initialized and loaded data from SainsburyWebscraper\")\n\n json_data = web_scraper.get_product_data()\n logger.info(\"Found %s products with the following data:\" % len(json_data[\"results\"]))\n print json.dumps(json_data, indent=4, sort_keys=True)",
"def studio_submit(self, data, suffix=''):\n self.oppiaid = data.get('oppiaid')\n self.src = data.get('src')\n self.width = data.get('width')\n self.height = data.get('height')\n\n return {'result': 'success'}",
"def getCoverage(\n self,\n identifier=None,\n bbox=None,\n time=None,\n format=None,\n crs=None,\n width=None,\n height=None,\n resx=None,\n resy=None,\n resz=None,\n parameter=None,\n method=\"Get\",\n timeout=30,\n **kwargs\n ):\n from owslib.util import makeString\n from urllib.parse import urlencode\n from owslib.util import openURL\n\n if logger.isEnabledFor(logging.DEBUG):\n msg = \"WCS 1.0.0 DEBUG: Parameters passed to GetCoverage: identifier={}, bbox={}, time={}, format={}, crs={}, width={}, height={}, resx={}, resy={}, resz={}, parameter={}, method={}, other_arguments={}\" # noqa\n logger.debug(\n msg.format(\n identifier, bbox, time, format, crs, width, height, resx, resy, resz, parameter, method, str(kwargs)\n )\n )\n\n base_url = self.source\n\n logger.debug(\"WCS 1.0.0 DEBUG: base url of server: %s\" % base_url)\n\n # process kwargs\n request = {\"version\": self.version, \"request\": \"GetCoverage\", \"service\": \"WCS\"}\n assert len(identifier) > 0\n request[\"Coverage\"] = identifier\n # request['identifier'] = ','.join(identifier)\n if bbox:\n request[\"BBox\"] = \",\".join([makeString(x) for x in bbox])\n else:\n request[\"BBox\"] = None\n if time:\n request[\"time\"] = \",\".join(time)\n if crs:\n request[\"crs\"] = crs\n request[\"format\"] = format\n if width:\n request[\"width\"] = width\n if height:\n request[\"height\"] = height\n if resx:\n request[\"resx\"] = resx\n if resy:\n request[\"resy\"] = resy\n if resz:\n request[\"resz\"] = resz\n\n # anything else e.g. vendor specific parameters must go through kwargs\n if kwargs:\n for kw in kwargs:\n request[kw] = kwargs[kw]\n\n # encode and request\n data = urlencode(request)\n logger.debug(\"WCS 1.0.0 DEBUG: Second part of URL: %s\" % data)\n\n u = openURL(base_url, data, method, self.cookies, auth=self.auth, timeout=timeout, headers=self.headers)\n return u",
"def callwebservice(job, omdb_api_key, dvd_title, year=\"\"):\n\n if job.config.VIDEOTYPE == \"auto\":\n strurl = \"http://www.omdbapi.com/?t={1}&y={2}&plot=short&r=json&apikey={0}\".format(omdb_api_key, dvd_title,\n year)\n logging.debug(\n \"http://www.omdbapi.com/?t={1}&y={2}&plot=short&r=json&apikey={0}\".format(\"key_hidden\", dvd_title, year))\n else:\n strurl = \"http://www.omdbapi.com/?t={1}&y={2}&type={3}&plot=short&r=json&apikey={0}\".format(omdb_api_key,\n dvd_title, year,\n job.config.VIDEOTYPE)\n logging.debug(\n \"http://www.omdbapi.com/?t={1}&y={2}&type={3}&plot=short&r=json&apikey={0}\".format(\"key_hidden\", dvd_title,\n year,\n job.config.VIDEOTYPE))\n\n logging.debug(\"***Calling webservice with Title: \" + dvd_title + \" and Year: \" + year)\n try:\n dvd_title_info_json = urllib.request.urlopen(strurl).read()\n except Exception:\n logging.debug(\"Webservice failed\")\n return \"fail\"\n else:\n doc = json.loads(dvd_title_info_json.decode())\n if doc['Response'] == \"False\":\n logging.debug(\"Webservice failed with error: \" + doc['Error'])\n return \"fail\"\n else:\n # global new_year\n new_year = doc['Year']\n title = clean_for_filename(doc['Title'])\n logging.debug(\"Webservice successful. New title is \" + title + \". New Year is: \" + new_year)\n job.year_auto = str(new_year)\n job.year = str(new_year)\n job.title_auto = title\n job.title = title\n job.video_type_auto = doc['Type']\n job.video_type = doc['Type']\n job.imdb_id_auto = doc['imdbID']\n job.imdb_id = doc['imdbID']\n job.poster_url_auto = doc['Poster']\n job.poster_url = doc['Poster']\n job.hasnicetitle = True\n db.session.commit()\n return doc['Response']",
"def upload_attribute_data():\n gen_log.info(\"upload_attribute_data\")\n try:\n data={\"firmware_version\":\"1.0.1\", \"serial_number\":\"SN-001\"}\n url=url_prefix+'/v1/{}/attributes'.format(token)\n res=yield AsyncHTTPClient().fetch(url,method='POST',body=json.dumps(data),headers=headers)\n #res=requests.post(url,data=json.dumps(data),headers=headers)\n gen_log.info(res.body)\n except Exception as ex:\n gen_log.error(ex)\n \n IOLoop.current().add_timeout(time.time()+1,upload_attribute_data)"
] | [
"0.5288771",
"0.5045493",
"0.5036799",
"0.50346696",
"0.49535924",
"0.48924863",
"0.47984523",
"0.4763223",
"0.47212905",
"0.47086516",
"0.47058362",
"0.46831352",
"0.46637723",
"0.4658203",
"0.4639247",
"0.4588984",
"0.45826423",
"0.45756358",
"0.45698747",
"0.4566124",
"0.45633695",
"0.4555464",
"0.45456085",
"0.44656208",
"0.4462175",
"0.44462138",
"0.44423383",
"0.4432473",
"0.44314766",
"0.44081938"
] | 0.6163422 | 0 |
This function parses the API call returned from api_call and stores the response in a dictionary. | def parse_api_call(api_resp):
data = {}
if 'resultSets' in api_resp:
dictionary_key = 'resultSets'
elif 'resultSet' in api_resp:
dictionary_key = 'resultSet'
if isinstance(api_resp[dictionary_key], list):
for result_set in api_resp[dictionary_key]:
headers = result_set['headers']
if len(headers) > 0:
if isinstance(headers[0], dict):
add_on = headers[0]['columnNames']
keep_header = headers[1]['columnNames']
col_ind = 0
col_count = 0
add_count = 0
for col_name in keep_header:
col_count += 1
if col_count <= 5:
continue
else:
keep_header[col_count] += '_' + add_on[col_ind]
add_count += 1
if add_count == 2:
add_count = 0
col_ind = 1
headers = keep_header
values = result_set['rowSet']
name = result_set['name']
data[name] = [dict(zip(headers, value))
for value in values]
else:
result_set = api_resp[dictionary_key]
headers = result_set['headers']
if isinstance(headers[0], dict):
add_on = headers[0]['columnNames']
keep_header = headers[1]['columnNames']
col_ind = 0
col_count = -1
add_count = 0
for col_name in keep_header:
col_count += 1
if col_count <= 4:
continue
else:
keep_header[col_count] += '_' + add_on[col_ind].replace(' ', '_')
add_count += 1
if add_count == 3:
add_count = 0
col_ind += 1
headers = keep_header
values = result_set['rowSet']
name = result_set['name']
data[name] = [dict(zip(headers, value))
for value in values]
return data | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def massage_api_response(api_data):\n return_dict = defaultdict(list)\n legs = api_data['legs'][0]\n\n return_dict['start_address'].append(legs['start_address'])\n return_dict['end_address'].append(legs['end_address'])\n return_dict['distance'].append(legs['distance']['text'])\n return_dict['duration'].append(legs['duration']['text'])\n if 'duration_in_traffic' in legs:\n (return_dict['duration_in_traffic']\n .append(legs['duration_in_traffic']['text']))\n return_dict['travel_mode'].append(legs['steps'][0]['travel_mode'])\n\n for instruction in legs['steps']:\n (return_dict['instructions']\n .append(BeautifulSoup(instruction['html_instructions'],\n 'html.parser').get_text()))\n return_dict['step_distance'].append(instruction['distance'])\n return return_dict",
"def _parse_response(self, response, all_ops):\n try:\n parsed_response = json.loads(response)\n except Exception, e:\n raise ApiError(e)\n if 'error' in parsed_response: # needed anymore?\n raise ApiError(parsed_response['error'])\n # Return the true API return value.\n return parsed_response",
"def format_response(response: Response) -> dict:\n urldecode = urllib.parse.unquote_plus(response.url)\n data = response.json()\n if isinstance(data, (list, tuple)):\n data = data[0]\n return {\"data\": data, \"status_code\": response.status_code, \"url\": urldecode}",
"def _process_response (self, response, component):\n # check if we´re not authorized to make thios call\n if response.status_code == 401:\n return {\n 'error': True,\n 'message': 'Session invalid',\n 'code': 401\n }\n # check if somethign else failed\n if response.status_code != 200:\n return {\n 'error': True,\n 'message': 'API call for \"' + component + '\" failed',\n 'code': response.status_code\n }\n # return the parsed response & everything´s fine\n return response.json()",
"def parse_response(response):\n LOGGER.debug('Parsing WSAPI response')\n if isinstance(response, basestring):\n response = response.splitlines()\n\n data = {}\n for line in response:\n try:\n key, value = line.split('=', 1)\n data[key] = value.strip()\n except ValueError:\n # Skip empty lines and lines that aren't valid results\n pass\n\n LOGGER.debug('Parser got ' + str(data))\n return data",
"def data_from_response(response: dict) -> dict:\n if response[\"status\"] != 200:\n raise ValueError\n return {\"data\": response[\"payload\"]}",
"def call_api(self):\n #generate the final call string\n self.generate_call_string();\n #debug\n #print (self.call_url);\n \n #finally make api call\n try: \n #pass; \n self.return_articles= json.loads(urlopen(self.call_url).read());\n #print json.dumps(self.return_articles, indent=4, sort_keys=True)\n except :#elaborate on this later\n print(\"Exception,response did not go through:\");\n e = sys.exc_info()[0]\n print(e);\n raise;\n return;",
"def parse_response(self, raw_response):\n \n parsed_response = {\n 'success': False,\n 'raw_response': raw_response,\n }\n \n # Try to make sense of the response status\n try:\n status, msg = raw_response.split('\\r\\n')\n parsed_response['success'] = status == 'OK'\n parsed_response['message'] = msg\n except:\n msg = None\n \n # Try to parse the message ID\n try:\n key, val = msg.split('=')\n parsed_response[key] = val\n except:\n pass\n \n return parsed_response",
"def _get_response(**params):\n try:\n response = params['data']['response']\n response_dict = json.loads(response)\n except KeyError:\n response = \"None\"\n response_dict = {}\n return response, response_dict",
"def parse_response(self, response, case):\n request = response.request\n parsed = {\n 'request': {\n 'method': request.method,\n 'url': request.url,\n 'body': request.body,\n },\n 'response': {\n 'headers': OrderedDict(),\n 'status_code': response.status_code,\n 'reason': response.reason,\n }\n }\n\n # Re-assemble request line\n url_parts = urlparse(request.url)\n parsed['request']['request_line'] = '%s %s%s%s HTTP/1.1' % (\n request.method, url_parts.path, '?' if url_parts.query else '',\n url_parts.query)\n\n # Process request headers\n if self.mode == 'display':\n hostname = url_parts.hostname\n else:\n hostname = self.doc_hostname\n parsed['request']['headers'] = OrderedDict((('Host', hostname),))\n for header in sorted([h.title() for h in request.headers]):\n raw_value = request.headers[header]\n value = self.parse_header(header, raw_value, 'request')\n if value:\n parsed['request']['headers'][header.title()] = value\n\n # Re-assemble response line\n parsed['response']['response_line'] = 'HTTP/1.1 %s %s' % (\n response.status_code, response.reason)\n\n # Process response headers\n for header in sorted([h.title() for h in response.headers]):\n raw_value = response.headers[header]\n value = self.parse_header(header, raw_value, 'response')\n if value:\n fixed_header = header.title().replace('Www', 'WWW')\n parsed['response']['headers'][fixed_header] = value\n\n # Process response body\n response.encoding = 'utf-8'\n body = response.text\n if self.standardize:\n body = body.replace(api, self.doc_base_url)\n for key, value in case.get('standardize', {}).items():\n assert key in ('created', 'modified', 'date')\n pattern = r\"\"\"(?x)(?s) # Be verbose, . include newlines\n \"%s\":\\s\" # Key and quote\n \\d{4}-\\d{2}-\\d{2} # Date\n T\\d{2}:\\d{2}:\\d{2} # Time\n \\.\\d{0,6}Z # Microseconds and UTC timezone\n \", # End quote and comma\n \"\"\" % key\n replace = '\"%s\": \"%s\",' % (key, value)\n body = re.sub(pattern, replace, body)\n parsed['response']['body'] = body\n\n return parsed",
"def fill_from_api_response(self, api_response):\n pass",
"def process_response(self, id, result):\n return {\n 'version': '1.1',\n 'id': id,\n 'result': result,\n 'error': None,\n }",
"def parse(response):\n if isinstance(response, dict):\n json = response\n else:\n json = response.json()\n\n if json.get('Error'):\n raise Exception('Error in retrieval: ' + self.json['error'])\n\n return json",
"def _handle_api_call(self, url):\n response = urlopen(url)\n url_response = response.read()\n json_response = loads(url_response)\n \n if not json_response:\n raise ValueError('Error getting data from the api, no return was given.')\n elif \"Error Message\" in json_response:\n raise ValueError(json_response[\"Error Message\"])\n elif \"Information\" in json_response and self.treat_info_as_error:\n raise ValueError(json_response[\"Information\"])\n \n return json_response",
"def _parse_content(response):\n if response.status_code != 200:\n raise ApiError(f'unknown error: {response.content.decode()}')\n result = json.loads(response.content)\n if not result['ok']:\n raise ApiError(f'{result[\"error\"]}: {result.get(\"detail\")}')\n return result",
"def api_req(dev, api_call):\r\n import xmltodict\r\n import logging\r\n try:\r\n r = requests.get(dev + ':8060' + api_call, timeout=5)\r\n except Exception as exc:\r\n response = [\"ERR\", exc]\r\n return response[0]\r\n except ConnectionError as connerr:\r\n response = [\"ERR\", connerr]\r\n return response[0]\r\n except TimeoutError as toerr:\r\n response = [\"ERR\", toerr]\r\n return response[0], toerr\r\n r_code = r.status_code\r\n if r_code == 200:\r\n print(\"REQUEST WAS A SUCCESS. DEVICE RETURNED: {} \".format(str(r)))\r\n r2 = r.text\r\n response = xmltodict.parse(r2, xml_attribs=False)\r\n return response\r\n else:\r\n response = \"UnknownERR\"\r\n dev.state(DISABLED)\r\n return msg_box(response)",
"def _json_parser(self, json_response):\n response = json_response.json()\n print(response)\n status = response.get('status', None)\n message = response.get('message', None)\n data = response.get('data', None)\n\n return json_response.status_code, status, data",
"def get_dict_from_response(response):\n result = {}\n if getattr(response, \"_resp\") and response._resp.code > 400:\n return result\n\n for key, value in response.data.items():\n result.setdefault(key, value)\n return result",
"def _process_info(resp: suds.sudsobject) -> dict:\n last = resp.ultimoValor\n return dict(fonte = str(resp.fonte),\n gestor = str(resp.gestorProprietario),\n freq = str(resp.periodicidadeSigla),\n nome = str(resp.nomeCompleto),\n number = int(resp.oid),\n final = dt(last.ano, last.mes, last.dia))",
"def get_response(self, sentence):\n user_message = ParserSentence().clean(sentence)\n data_here_api = HereApi().get_request(user_message)\n if not data_here_api:\n return {\n \"grandpy_error\": choice(grandpy_error)\n }\n else:\n data_wiki_api = WikiApi().get_description(user_message)\n if not data_wiki_api:\n return {\n \"grandpy_address\": choice(grandpy_response),\n \"address\": data_here_api[\"address\"],\n \"grandpy_descript\": \"\",\n \"descriptif\": choice(grandpy_no_description),\n \"lat\": data_here_api[\"lat\"],\n \"lng\": data_here_api[\"lng\"],\n \"apikey\": HERE_API_KEY\n }\n else:\n return {\n \"grandpy_address\": choice(grandpy_response),\n \"address\": data_here_api[\"address\"],\n \"grandpy_descript\": choice(grandpy_story),\n \"descriptif\": data_wiki_api,\n \"lat\": data_here_api[\"lat\"],\n \"lng\": data_here_api[\"lng\"],\n \"apikey\": HERE_API_KEY\n }",
"def parse_response(self, response):\n data = json_decode(response)\n\n if data['stat'] == 'error':\n self.logger.debug(\"Response:\\n\" + json_encode(data, indent=4))\n try:\n message = data['error_description']\n except KeyError:\n message = data['message']\n raise ApiResponseError(data['code'], data['error'], message, data)\n return data",
"def _parse_json_response(self, res: requests.Response, token: str = None) -> dict:\n try:\n self._check_response(res, token)\n self._check_status_error(res)\n return res.json()\n except (json.JSONDecodeError, ValueError):\n return {\"error\": {\"message\": res.text, \"code\": 999}}",
"def parse_response(response):\n return json.loads(response.text)",
"def parse_response(response):\n # a result should always have a status\n status = response['status']\n\n # a result _may_ have a results or a reason\n result = response.get('results', [])\n reason = response.get('reason', None)\n\n return status, result, reason",
"def parse_api(self, soup):\n return {}",
"def api_call():\n\tresponse = requests.get(URL_API)\n\treturn response",
"def parse_response(xml):\n r = {}\n\n try:\n xml = etree.fromstring(xml)\n for key in xml.keys():\n value = xml.get(key)\n r.update({key:value})\n except etree.Error as e:\n raise XMLParsingError(u'Failed to parse response from CardPay service: {}'.format(e))\n\n return r",
"def call(self, api_call, **kwargs):\n # Encode values for the API (JSON, bools, nulls)\n params = dict((key, api_encode(value))\n for key, value in kwargs.iteritems() if value is not None)\n params.update(self.defaults)\n\n if api_call[0] != \"/\":\n api_call = \"/\" + api_call\n url = self.api_url + api_call\n self.logger.debug(url)\n\n # Signing the request modifies the request object and params in-place.\n # Sign the request *before* encoding and passing the params.\n request = Request(url)\n if self.sign_requests:\n self.sign_request(request, api_call, params)\n\n print_params = params.copy()\n if 'client_secret' in print_params:\n print_params['client_secret'] = \"CLIENT_SECRET_REMOVED\"\n self.logger.debug(urlencode(print_params))\n\n request.add_data(urlencode(params))\n if self.compress:\n request.add_header('Accept-encoding', 'gzip')\n\n try:\n with closing(urlopen(request)) as response:\n if response.info().get('Content-Encoding') == 'gzip':\n buf = StringIO( response.read())\n f = gzip.GzipFile(fileobj=buf)\n body = f.read()\n else:\n body = response.read()\n except HTTPError as error:\n if error.code in (400, 401): # /oauth/token returns 400 or 401\n body = error.fp.read()\n elif error.code == 404:\n raise InvalidApiCallError(api_call, error.code)\n else:\n raise error\n\n return self.parse_response(body)",
"def retrieve_data(self, url: str) -> Tuple[Optional[List[dict]], Optional[httpx.Response]]:\n timeout = 30\n timeout_count = 0\n num_attempts = 1\n while num_attempts <= 10:\n\n response = hit_api(self.key_manager, url, self.logger, timeout)\n\n if response is None:\n if timeout_count == 10:\n self.logger.error(f\"Request timed out 10 times for {url}\")\n return None, None, GithubApiResult.TIMEOUT\n\n timeout = timeout * 1.1\n num_attempts += 1\n continue\n\n # if api returns a status of 204 No Content then return empty list\n if response.status_code == 204:\n return [], response, GithubApiResult.SUCCESS\n \n \n page_data = parse_json_response(self.logger, response)\n\n\n # if the data is a list, then return it and the response\n if isinstance(page_data, list) is True:\n return page_data, response, GithubApiResult.SUCCESS\n\n # if the data is a dict then call process_dict_response, and \n if isinstance(page_data, dict) is True:\n dict_processing_result = process_dict_response(self.logger, response, page_data)\n\n if dict_processing_result == GithubApiResult.NEW_RESULT:\n self.logger.info(f\"Encountered new dict response from api on url: {url}. Response: {page_data}\")\n return None, None, GithubApiResult.NEW_RESULT\n\n if dict_processing_result == GithubApiResult.REPO_NOT_FOUND:\n return None, response, GithubApiResult.REPO_NOT_FOUND\n\n if dict_processing_result in (GithubApiResult.SECONDARY_RATE_LIMIT, GithubApiResult.ABUSE_MECHANISM_TRIGGERED):\n continue\n\n if dict_processing_result == GithubApiResult.RATE_LIMIT_EXCEEDED:\n num_attempts = 0\n continue \n\n if isinstance(page_data, str) is True:\n str_processing_result: Union[str, List[dict]] = self.process_str_response(page_data)\n\n if isinstance(str_processing_result, list):\n return str_processing_result, response, GithubApiResult.SUCCESS\n\n num_attempts += 1\n\n self.logger.error(\"Unable to collect data in 10 attempts\")\n return None, None, GithubApiResult.NO_MORE_ATTEMPTS",
"def create_response_dict(split_response, response_dict):\n\n for res in split_response:\n split_sub_response = res.split('=')\n if split_sub_response[0] == \"VendorTxCode\":\n response_dict['payment_id'] = split_sub_response[1]\n if split_sub_response[0] == \"VPSTxId\":\n response_dict['Payment_gateway_reference_id'] = split_sub_response[1][1:-1]\n if split_sub_response[0] == \"Status\":\n if split_sub_response[1] == \"OK\" or split_sub_response[1] == \"ABORT\":\n response_dict['status'] = split_sub_response[1]\n else:\n response_dict['status'] = \"FAILED\"\n if split_sub_response[0] == \"Amount\":\n response_dict['Amount'] = split_sub_response[1]"
] | [
"0.68063694",
"0.67208207",
"0.620158",
"0.6087096",
"0.6062811",
"0.6038745",
"0.5941074",
"0.5928393",
"0.59209156",
"0.59027296",
"0.58981526",
"0.58471745",
"0.5836139",
"0.5826573",
"0.58249855",
"0.5803496",
"0.57840395",
"0.57820934",
"0.5779634",
"0.57584476",
"0.57508934",
"0.57409376",
"0.57293165",
"0.57260513",
"0.5722761",
"0.57062256",
"0.56766367",
"0.56363857",
"0.56302583",
"0.5592053"
] | 0.686593 | 0 |
Convert state to torch tensor. | def process_state(self, state):
return torch.tensor(state, dtype=torch.float, device=self.device) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def state_tensor_convert(self,state):\n return torch.Tensor(state)",
"def convert_state_to_tensor(self, state):\n tensor = []\n for i in range(len(state)):\n for j in range(len(state[i])):\n for k in range(len(state[i][j])):\n tensor.append(state[i][j][k])\n\n return np.array([tensor])",
"def convert_state_to_tensor(self, state):\n tensor = tf.convert_to_tensor(\n [np.concatenate([np.array(i) for i in state])])\n # dims[0] depends on board size\n return tf.reshape(tensor, [1, self.dims[0]])",
"def _format(self, state):\n x = state\n if not isinstance(x, torch.Tensor):\n x = torch.tensor(x,\n device=self.device,\n dtype=torch.float32)\n x = x.unsqueeze(0)\n return x",
"def transform(self, state):\n robot_state_tensor = torch.Tensor([state.robot_state.to_tuple()]).to(self.device)\n human_states_tensor = torch.Tensor([human_state.to_tuple() for human_state in state.human_states]). \\\n to(self.device)\n\n return robot_state_tensor, human_states_tensor",
"def predict(self, state):\n with torch.no_grad():\n return self.model(torch.Tensor(state))",
"def get_state(self, s):\n return (torch.tensor(s, device=self.device).permute(2, 0, 1)).unsqueeze(0).float()",
"def to_tensor(self): \n raise NotImplementedError",
"def to_tensor(self): \n warnings.warn(f'{self} is being reconstructed into a full tensor, consider operating on the decomposed form.')",
"def _convert_ndarray_to_tensor(self, state_dict: Dict[str, Any]) -> None:\n # model could be an OrderedDict with _metadata attribute\n # (as returned by Pytorch's state_dict()). We should preserve these\n # properties.\n for k in list(state_dict.keys()):\n v = state_dict[k]\n if not isinstance(v, np.ndarray) and not isinstance(v,\n torch.Tensor):\n raise ValueError(\n \"Unsupported type found in checkpoint! {}: {}\".format(k,\n type(\n v))\n )\n if not isinstance(v, torch.Tensor):\n state_dict[k] = torch.from_numpy(v)",
"def forward(self, state):\n\n x = state # Careful: deepcopy bug?\n # Intermediate Layers\n for layer in self.layers[:-1]:\n\n x = nn.ReLU()(layer(x))\n\n x = nn.Tanh()(self.layers[-1](x))\n return x",
"def get_tensor(self, pbs):\n\n public_state = pbs.public_state\n # Get tensor such like [round, bet1, bet2, *prob_dict]\n pbs_list = [len(public_state), public_state[-1].encode[0],\n public_state[-1].encode[1],\n *[prob for prob in pbs.prob_dict.values()]]\n pbs_tensor = torch.tensor(pbs_list)\n\n return pbs_tensor",
"def call(self, state):\n x = tf.cast(state, tf.float32)\n x = x / 255\n x = self.conv1(x)\n x = self.conv2(x)\n x = self.conv3(x)\n x = self.flatten(x)\n x = self.dense1(x)\n return atari_lib.DQNNetworkType(self.dense2(x))",
"def as_tensor(self) -> torch.Tensor:\n return self.as_subclass(torch.Tensor)",
"def call(self, state):\n x = tf.cast(state, tf.float32)\n x = self.flatten(x)\n x -= self.min_vals\n x /= self.max_vals - self.min_vals\n x = 2.0 * x - 1.0 # Rescale in range [-1, 1].\n x = self.dense1(x)\n x = self.dense2(x)\n x = self.last_layer(x)\n return x",
"def get_action(self, state):\n output = self.actor_model(torch.Tensor(list(state)))\n\n return output",
"def forward(self, state):\n x = F.relu(self.linear1(state))\n x = F.relu(self.linear2(x))\n x = torch.tanh(self.linear3(x))\n\n return x",
"def state_to_node(self, state):\n node = np.asarray(state)\n node -= self.origin\n node = np.divide(node, self.resolution)\n #we need to rotate the state to lattice coordinates\n node = np.array([ cos(self.rotation)*node[0] + sin(self.rotation)*node[1], \n -sin(self.rotation)*node[0] + cos(self.rotation)*node[1]]) \n node = node.astype(int, copy=False) \n return tuple(node)",
"def _to_tensor(cls, tensor):\n if isinstance(tensor, Tensor):\n return tensor\n return Tensor(data=tensor)",
"def state(self):\n state = np.array(self.get_state_arr())\n om = utils.build_occupancy_maps(utils.build_humans(state))\n # We only have a batch of one so just get the first element of\n # transform and rotate\n state = utils.transform_and_rotate(state.reshape((1, -1)))[0]\n return torch.cat((state, om), dim=1)",
"def reverse_state(self):\n state = np.array(self.get_reverse_state_arr())\n om = utils.build_occupancy_maps(utils.build_humans(state))\n # print(\"OM: \", om.size())\n # We only have a batch of one so just get the first element of\n # transform and rotate\n state = utils.transform_and_rotate(state.reshape((1, -1)))[0]\n # print(\"State: \", state.size())\n return torch.cat((state, om), dim=1).unsqueeze(0)",
"def board_state_to_nn_input(self, state):\n # res = np.array([(state == self.side).astype(int),\n # (state == Board.other_side(self.side)).astype(int),\n # (state == EMPTY).astype(int)])\n # res = res.reshape(3, 3, 3)\n # res = np.transpose(res, [1, 2, 0])\n res = np.array(state)\n return res",
"def getNextState(self, obs):\n return torch.tensor(obs, device=self.device, dtype=torch.float).unsqueeze(0)",
"def forward(self, state):\n x = self.forward_to_var(state)\n return x.data[0]",
"def transform_state(state):\n # TODO: automate n_enemies calculation -> only valid fot n_enemies = n_friends\n n_agents = len(state.agents)\n n_enemies = n_agents // 2 # TODO: improve this\n states_v = torch.zeros(n_agents, 5 + n_enemies) # 5 = x, y, alive, ammo, aim, enemy visible ? (x n_enemies)\n for agent_idx, agent in enumerate(state.agents):\n states_v[agent_idx, 0] = state.position[agent][0] # x\n states_v[agent_idx, 1] = state.position[agent][1] # y\n states_v[agent_idx, 2] = state.alive[agent]\n states_v[agent_idx, 3] = state.ammo[agent] / 5 # args.ammo\n states_v[agent_idx, 4] = -1 if state.aim[agent] is None else state.aim[agent].id\n idx = 5\n for other in state.agents:\n if (agent, other) in state.visible:\n states_v[agent_idx, idx] = int(state.visible[(agent, other)])\n idx += 1\n return states_v",
"def forward(self, state):\n x = self.fc1(state)\n action = self.tanh(x)\n\n action = action.cpu().data.numpy() * self.action_lim\n action = torch.FloatTensor(action)\n\n return action",
"def __call__(self, state): \n self.last_state = state \n state_array, action_array = self.average()\n\n state_array = np.append(state.cpu().numpy(), state_array).astype(np.float32)\n action_array = np.append([], action_array).astype(np.float32)\n\n augmented_state = np.append(state_array, action_array)\n\n self.last_augmented = augmented_state\n\n return torch.tensor(augmented_state).to(device)",
"def obs2state(observation):\n l1 = [val.tolist() for val in list(observation.values())]\n l2 = []\n for sublist in l1:\n try:\n l2.extend(sublist)\n except:\n l2.append(sublist)\n return torch.FloatTensor(l2).view(1, -1)",
"def get_state_action_value(self, state, action):\n state_tensor = torch.from_numpy(state).float().to(self.device)\n output = torch.dot(self.weights[action,:],state_tensor.view(-1))\n return output",
"def call(self, state, training=True):\n x = self.representation(state, projection=False)\n if training:\n x = tf.nn.dropout(x, rate=self._dropout)\n x = self.dense2(x)\n if self._num_actions == 1:\n x = tf.squeeze(x, axis=-1)\n return x"
] | [
"0.91581875",
"0.7870385",
"0.78412116",
"0.75492615",
"0.7146038",
"0.7068303",
"0.70144564",
"0.6954352",
"0.67244875",
"0.6661394",
"0.65748036",
"0.6557474",
"0.654175",
"0.6487248",
"0.6462197",
"0.6460731",
"0.6441729",
"0.6417149",
"0.6398879",
"0.63750464",
"0.6373343",
"0.63346577",
"0.63233477",
"0.6306179",
"0.63023674",
"0.6280525",
"0.62781274",
"0.62571555",
"0.62501585",
"0.6242462"
] | 0.8244859 | 1 |
Create profile for the registered user or Update user profile when user information changed | def create_or_update_user_profile(sender, instance, created, **kwargs):
# Create profile and set ACTIVE status to account -- TODO : ACTIVE STATUS
if created:
Profile.objects.create(user=instance, status=Status.get_or_create_status(strings.ACTIVE_STATUS))
else:
instance.profile.save() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_or_update_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.get_or_create(user=instance)\n instance.profile.save()",
"def create_user_profile(instance, created, **_):\n if created:\n Profile.objects.create(user=instance)",
"def create_profile_for_new_user(sender, created, instance, **kwargs):\n if created:\n profile = self.get_model('profile')(user=instance)\n profile.save()",
"def manage_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)\n else:\n instance.profile.save()",
"def create_or_update_user_profile(sender, instance, created, **kwargs):\n _, created = UserProfile.objects.get_or_create(user=instance)\n if created and instance.email != \"\":\n instance.profile.email = instance.email\n instance.profile.save()",
"def create_profile(sender, instance, created, **kwargs):\n if created: \n profile, new = UserProfile.objects.get_or_create(user=instance)",
"def create_profile(sender, **kwargs):\n user = kwargs[\"instance\"]\n if kwargs[\"created\"]:\n user_profile = Profile(user=user)\n user_profile.save()",
"def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)",
"def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)",
"def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)",
"def create_profile(sender, instance, created, **kwargs):\n if created:\n profile, created = UserProfile.objects.get_or_create(user=instance)",
"def make_profile_for_user(sender, instance, **kwargs):\n if kwargs['created']:\n new_profile = ImagerProfile(user=instance)\n new_profile.save()",
"def create_profile_for_new_users(sender, instance, created, **kwargs):\n if not created:\n return\n\n profile = Profile.objects.filter(user=instance).first()\n if profile is None:\n profile = Profile(user=instance)\n profile.save()",
"def create_user_profile_callback(sender, instance, created, **kwargs):\n try:\n instance.get_profile()\n except UserProfile.DoesNotExist:\n UserProfile.objects.create(user=instance)",
"def create_user_profile(sender, instance, created, **kwargs):\n if created:\n # create new Stellar account\n stellar.api.create_account(user=instance)",
"def create_user_profile(sender, instance, created, **kwargs):\n if created:\n user_profile = UserProfile.objects.create(user=instance)",
"def create_profile(sender, **kw):\n user = kw['instance']\n if kw['created']:\n profile = UserProfile(user=user)\n profile.save()",
"def save_user_profile(instance, **_):\n instance.profile.save()",
"def create_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)",
"def save_user_receiver(sender, instance, created, *args, **kwargs):\n print(\"profile created\", instance)\n if created:\n new_profile = UserProfile.objects.get_or_create(owner=instance)",
"def create_user_profile(sender, instance, created, **kwargs):\n if created:\n profile = UserProfile()\n profile.user = instance\n profile.email=instance.email\n profile.save()",
"def update_user_profile(sender, instance, created, **kwargs):\n if created:\n GameplanUser.objects.create(user=instance)\n instance.gameplanuser.save()",
"def create_user_profile(sender, instance, created, **kwargs):\n\n if created:\n user_profile = UserProfile.objects.create(user=instance)",
"def updateProfile( token, user=False, userinfo={'nickname':'newUser','first_name':'newUser'}):\n \n if not user:\n l= list(validName)\n sysrand.shuffle(l)\n l= \"\".join(l)\n print \"Attempting to create a user with the name \"+l\n user=User.objects.create_user(l,'')\n user.save()\n sid = transaction.savepoint()\n updateName( user, str(userinfo['nickname']).replace(' ',''), userinfo['first_name'], sid )\n transaction.savepoint_commit(sid)\n\n try: \n userprofile = user.get_profile()\n userprofile.uid = cPickle.dumps(token) #ensures the token parameter is retreivable and unique\n userprofile.user_id = user.id\n userprofile.save()\n transaction.commit()\n except:\n transaction.rollback()\n return user",
"def create_profile(sender, instance, signal, created, **kwargs):\n \n from phylocommons.models import UserProfile\n \n if created:\n UserProfile(user = instance).save()",
"def save(self, profile_callback=None):\r\n new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],\r\n password=self.cleaned_data['password1'],\r\n email=self.cleaned_data['email'],\r\n profile_callback=profile_callback)\r\n return new_user",
"def create_user_profile(sender, **kwargs):\n\n if kwargs['created']:\n UserProfile.objects.create(user=kwargs['instance'])",
"def save(self, profile_callback=None):\n\n # First, save the parent form\n new_user = super(BodbRegistrationForm, self).save(profile_callback=profile_callback)\n\n # Update user with first, last names\n new_user.first_name = self.cleaned_data['first_name']\n new_user.last_name = self.cleaned_data['last_name']\n new_user.save()\n\n # Update profile with affiliation\n profile = new_user.get_profile()\n profile.affiliation = self.cleaned_data['affiliation']\n profile.save()\n\n cache.set('%d.profile' % new_user.id, profile)\n\n return new_user",
"def save(self):\n # First save the parent form and get the user.\n new_user = super(SignupFormExtra, self).save()\n\n # Get the profile, the `save` method above creates a profile for each\n # user because it calls the manager method `create_user`.\n # See: https://github.com/django-userena-ce/django-userena-ce/blob/master/userena/managers.py#L65\n profile = new_user.my_profile\n profile.gender = self.cleaned_data['gender']\n profile.education = self.cleaned_data['education']\n profile.birthday = self.cleaned_data['birthday']\n profile.annual_income = self.cleaned_data['annual_income']\n profile.save()\n\n # Userena expects to get the new user from this form, so return the new\n # user.\n return new_user",
"def save(self):\n data = self.cleaned_data\n data.pop('password_confirmation')\n user = User.objects.create_user(**data)\n profile = Profile(user=user)\n profile.save()"
] | [
"0.8087056",
"0.78401905",
"0.7833288",
"0.783039",
"0.77336484",
"0.7713397",
"0.7689795",
"0.76493394",
"0.76493394",
"0.76493394",
"0.76256",
"0.7616542",
"0.7560712",
"0.7535582",
"0.7535575",
"0.7532664",
"0.74908847",
"0.748587",
"0.74857795",
"0.74819976",
"0.7458078",
"0.7442551",
"0.74326783",
"0.7426316",
"0.74197376",
"0.73828757",
"0.73720926",
"0.73691076",
"0.73538107",
"0.7347721"
] | 0.82174295 | 0 |
Check if a file is populated. | def file_populated(filepath):
return file_exists(filepath) and os.stat(filepath).st_size > 0 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def found_empty_file(self):\n self.is_empty = True",
"def has_file(self) -> bool:\n return self._file is not None",
"def check_file_exist(self):\n return False",
"def is_present(self):\n return self.file_is_present()",
"def is_file_exists(self):\n pass",
"def is_empty_file(fpath):\n return \\\n fpath is not None and \\\n os.path.isfile(fpath) and \\\n os.path.getsize(fpath) == 0",
"def file_exist() -> bool:\n pass",
"def file_is_present(self, key=None):\n return os.path.isfile(self.file_path(key))",
"def is_file_empty(file_path):\n # Check if file exist and it is empty\n return os.path.exists(file_path) and os.stat(file_path).st_size == 0",
"def is_file_empty(file_name):\n # open ile in read mode\n with open(file_name, 'r') as read_obj:\n # read first character\n one_char = read_obj.read(1)\n # if not fetched then file is empty\n if not one_char:\n return True\n return False",
"def file_is_empty(file_name):\n try:\n if os.stat(file_name).st_size > 0:\n return False\n except Exception: # pylint: disable=broad-except\n pass\n\n return True",
"def file_exists(self):\r\n if os.path.exists(self.file_path):\r\n return True\r\n else:\r\n return False",
"def is_empty(file):\n with open(file, 'rb') as f:\n return not f.read(1)",
"def exist(self):\n return self.file_path.exists()",
"def is_file_present(file):\n\n return os.path.isfile(file)",
"def is_empty(self):\n if self.file_exists:\n with open_hdf5(self.file_name) as h:\n return len(h.keys()) == 0\n else:\n return True",
"def _does_file_exist(file_path):\n return os.path.exists(file_path) and os.path.getsize(file_path) > 0",
"def file_exists(self):\n if os.path.isfile(self.file_name):\n return True\n else:\n return False",
"def is_devices_file_empty(self) -> bool:\n return len(self._devices_names) == 0",
"def preliminary_file_check(self):\n\n if self.has_error():\n return False\n\n if not self.filepath:\n self.add_error(\"A file was specified!\")\n return False\n\n if not isfile(self.filepath):\n self.add_error(\"The file was not found: %s\" % basename(self.filepath))\n return False\n\n if getsize(self.filepath) < 1:\n self.add_error(\"The file is empty (no bytes): %s\" % basename(self.filepath))\n return False\n\n if self.file_ext in ['xls', 'xlsx']:\n self.is_excel = True\n\n return True",
"def has_file(self, doc):\n return len(doc.package.files) != 0",
"def ensure_file(self):\n if not self.has_file():\n raise AttributeError(\"No file set\")",
"def validate_input_file(self):\r\n return os.path.isfile(self.input_file)",
"def _file_needs_to_be_created(self, file_path, quiet=False):\n if not self._args.check_for_existing_files:\n return True\n if os.path.exists(file_path):\n if not quiet:\n sys.stderr.write(\n \"File %s exists. Skipping its generation.\\n\" % file_path\n )\n return False\n return True",
"def valid_file(fname):\r\n try:\r\n if os.stat(fname).st_size > 0: # if filename contains data\r\n return \"0\"\r\n else:\r\n return \"Selected file is empty....please reenter\"\r\n except OSError:\r\n return \"Can not find the file....please reenter\"",
"def file_missing(filename):\n return not os.path.isfile(filename)",
"def exists(self):\n return self.path.is_file()",
"def check_for_file(self):\n if self.task.file_name in os.listdir(self.task.file_storage):\n return True\n return False",
"def fileIsComplete(self):\n return True",
"def efile_exists(self):\n return os.path.isfile(self.efile)"
] | [
"0.7667911",
"0.73072773",
"0.72717863",
"0.72463685",
"0.72180027",
"0.7094383",
"0.7057343",
"0.7047968",
"0.7028629",
"0.6927831",
"0.68633527",
"0.6848957",
"0.684656",
"0.68207556",
"0.6815947",
"0.67939925",
"0.6748642",
"0.67227185",
"0.6659184",
"0.6623893",
"0.6619269",
"0.6595858",
"0.65820485",
"0.65778005",
"0.65659165",
"0.6537416",
"0.6504781",
"0.6491787",
"0.6449073",
"0.6437452"
] | 0.78666717 | 0 |
The account's Internal auto reply message. Setting the value will change the auto reply message of the account, automatically setting the status to enabled (but not altering the schedule). | def auto_reply_message(self):
if self._auto_reply is None:
r = requests.get('https://outlook.office.com/api/v2.0/me/MailboxSettings/AutomaticRepliesSetting',
headers=self._headers)
check_response(r)
self._auto_reply = r.json().get('InternalReplyMessage')
return self._auto_reply | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_auto_reply(self, message, status=AutoReplyStatus.ALWAYS_ENABLED, start=None, end=None,\n external_message=None, audience=AutoReplyAudience.ALL):\n # type: (str, OutlookAccount.AutoReplyStatus, datetime, datetime, str, OutlookAccount.AutoReplyAudience) -> None\n\n start_is_none = start is None\n end_is_none = end is None\n\n if (not start_is_none and end_is_none) or (start_is_none and not end_is_none):\n raise ValueError('Start and End not must both either be None or datetimes')\n\n start_is_datetime = isinstance(start, datetime)\n end_is_datetime = isinstance(end, datetime)\n\n if not start_is_datetime and not start_is_none or not end_is_datetime and not end_is_none:\n raise ValueError('Start and End must both either be None or datetimes')\n\n request_data = dict(Status=status, ExternalAudience=audience)\n\n # Outlook requires both an internal and external message. For convenience, pyOutlook allows only one message\n # and uses that as the external message if none is provided\n if external_message is None:\n external_message = message\n\n request_data.update(InternalReplyMessage=message, ExternalReplyMessage=external_message)\n\n if not start_is_none and not end_is_none:\n request_data.update(ScheduledStartDateTime=dict(DateTime=str(start)))\n request_data.update(ScheduledEndDateTime=dict(DateTime=str(end)))\n\n data = {\n \"@odata.context\": \"https://outlook.office.com/api/v2.0/$metadata#Me/MailboxSettings\",\n \"AutomaticRepliesSetting\": request_data\n }\n\n requests.patch('https://outlook.office.com/api/v2.0/me/MailboxSettings',\n headers=self._headers, data=json.dumps(data))\n\n self._auto_reply = message",
"def internal_reply_message(self):\n if \"internalReplyMessage\" in self._prop_dict:\n return self._prop_dict[\"internalReplyMessage\"]\n else:\n return None",
"async def autoresponses(self, ctx, value: bool):\n await queries.update_setting(ctx, \"guild_settings\", \"autoresponses\", value)\n self.bot.cache.autoresponse[str(ctx.guild.id)] = value\n if value:\n await util.send_success(ctx, \"Automatic responses are now **enabled**\")\n else:\n await util.send_success(ctx, \"Automatic responses are now **disabled**\")",
"async def sdmdefaultreply(self, ctx, *, message):\n await self.config.defaultreply.set(message)\n await ctx.message.add_reaction(\"✅\")",
"def external_reply_message(self):\n if \"externalReplyMessage\" in self._prop_dict:\n return self._prop_dict[\"externalReplyMessage\"]\n else:\n return None",
"def set_reply(msg):\n \n result = Message(msg.content, correlation_id=msg.correlation_id ) \n return result",
"async def botmsg(self, ctx, type: bool):\n async with self.config.toggles() as toggles:\n if type:\n toggles[\"botmessages\"] = True\n await ctx.send(\"Bot message notifications have been enabled.\")\n else:\n toggles[\"botmessages\"] = False\n await ctx.send(\"Bot message notifications have been disabled.\")",
"def confirmedMyIstr(self):\n self.status = Modem.Status.IDLE\n self.error_status = Modem.ErrorDict.NONE",
"def default_message(update: Update, _: CCT) -> None:\n cast(Message, update.effective_message).reply_text(\n \"Sorry, but I can only text messages. \" 'Send \"/help\" for more information.'\n )",
"def SendAttentionCommand( self ): # also pauses the recording ?\r\n\r\n self._socket.write( 'A' )\r\n \r\n return self.GetServerResponse()",
"def mode_auto(self):\n if self.__check_mode_change():\n self.communications.set_status(\"Bot Auto Mode Set\")\n self.patrol()",
"def careful_reply(api,reply):\r\n\r\n debug_print('Preparing to reply to #%d' % (reply.id,))\r\n normalized_tweet = reply.text.lower().strip()\r\n\r\n # Don't reply to a retweet\r\n if hasattr(reply, 'retweeted_status'):\r\n return\r\n\r\n debug_print('Replying to #%d' % (reply.id,))\r\n update = \"@%s We'd estimate about a %d percent chance, actually.\" % (reply.user.screen_name, random.randint(0,100),)\r\n return api.update_status(update, reply.id)",
"async def custom_interaction(bot, context, response, result):\n if result is None: # Timed out\n edit = 'You took too long to respond...'\n elif result.content:\n edit = 'You replied with \"{}\"'.format(result.content[:100])\n else:\n edit = 'You did not reply with any content text!'\n await response.message.edit(content=edit)",
"def print_user_manual():\n print(USER_MANUAL)",
"def usr_msg(update, msg: str = '', error: bool = True) -> None:\n if error:\n update.effective_message.reply_text(\n \"An error occured 😔, sorry.\",\n reply_markup=ReplyKeyboardRemove(),\n parse_mode=ParseMode.MARKDOWN,\n )\n if msg:\n update.effective_message.reply_text(\n msg,\n reply_markup=ReplyKeyboardRemove(),\n parse_mode=ParseMode.MARKDOWN,\n )",
"def prefect_mandate(self):\n return self._prefect_mandate",
"def get_status(self):\n return self.msg",
"def _get_status(self):\n held_msg=\"\"\n return u'%s%s' % (self.get_status_display(), held_msg)",
"def status(message):\n message.reply('User_id: ' +\n str(message._client.users[message._get_user_id()]))",
"def status(message):\n message.reply('User_id: ' +\n str(message._client.users[message._get_user_id()]))",
"def manage_messages(_) -> int:\n return 1 << 13",
"def manage_messages(_) -> int:\n return 1 << 13",
"def status(self):\n if \"status\" in self._prop_dict:\n if isinstance(self._prop_dict[\"status\"], OneDriveObjectBase):\n return self._prop_dict[\"status\"]\n else :\n self._prop_dict[\"status\"] = AutomaticRepliesStatus(self._prop_dict[\"status\"])\n return self._prop_dict[\"status\"]\n\n return None",
"def magic_automagic(self, parameter_s = ''):\n \n self.rc.automagic = 1 - self.rc.automagic\n print '\\n' + Magic.auto_status[self.rc.automagic]",
"def default_config():\n return {\n MESSAGE: 'reply -> send*',\n REPLY: 'transitiveReply -> send*',\n FORWARD: 'none*'\n }",
"def reply(cls, user, context, message, reply_message):\r\n pass",
"async def __send_alarm(self, context: ContextTypes.DEFAULT_TYPE) -> None:\n if self.door_status.update_status():\n await context.bot.send_message(\n MESKOID,\n text=f\"🐙{self.door_status.last_line}\",\n )\n await context.bot.send_message(\n QKZKID,\n text=f\"🐙{self.door_status.last_line}\",\n )\n elif self.__verbose:\n await context.bot.send_message(\n context.job.chat_id,\n text=f\"🚀unedited - {self.door_status.last_edit}.\",\n )",
"def auto_approve_cod(self):\n return self._auto_approve_cod",
"def get_senders_correspondent_option(self):\n return 'A'",
"def EnableSendTriggeredPortModificationMessage(self):\n\t\treturn self._get_attribute('enableSendTriggeredPortModificationMessage')"
] | [
"0.6212845",
"0.59273",
"0.5505941",
"0.54334235",
"0.54169136",
"0.53866893",
"0.53184944",
"0.5285002",
"0.52737916",
"0.5161528",
"0.512031",
"0.50975454",
"0.50813234",
"0.50812536",
"0.5072622",
"0.50632584",
"0.5059212",
"0.50538313",
"0.5039446",
"0.5039446",
"0.5007681",
"0.5007681",
"0.49981937",
"0.49624318",
"0.49485004",
"0.4947796",
"0.49375424",
"0.4910743",
"0.49096847",
"0.4905707"
] | 0.7541868 | 0 |
Set an automatic reply for the account. | def set_auto_reply(self, message, status=AutoReplyStatus.ALWAYS_ENABLED, start=None, end=None,
external_message=None, audience=AutoReplyAudience.ALL):
# type: (str, OutlookAccount.AutoReplyStatus, datetime, datetime, str, OutlookAccount.AutoReplyAudience) -> None
start_is_none = start is None
end_is_none = end is None
if (not start_is_none and end_is_none) or (start_is_none and not end_is_none):
raise ValueError('Start and End not must both either be None or datetimes')
start_is_datetime = isinstance(start, datetime)
end_is_datetime = isinstance(end, datetime)
if not start_is_datetime and not start_is_none or not end_is_datetime and not end_is_none:
raise ValueError('Start and End must both either be None or datetimes')
request_data = dict(Status=status, ExternalAudience=audience)
# Outlook requires both an internal and external message. For convenience, pyOutlook allows only one message
# and uses that as the external message if none is provided
if external_message is None:
external_message = message
request_data.update(InternalReplyMessage=message, ExternalReplyMessage=external_message)
if not start_is_none and not end_is_none:
request_data.update(ScheduledStartDateTime=dict(DateTime=str(start)))
request_data.update(ScheduledEndDateTime=dict(DateTime=str(end)))
data = {
"@odata.context": "https://outlook.office.com/api/v2.0/$metadata#Me/MailboxSettings",
"AutomaticRepliesSetting": request_data
}
requests.patch('https://outlook.office.com/api/v2.0/me/MailboxSettings',
headers=self._headers, data=json.dumps(data))
self._auto_reply = message | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def auto_reply_message(self):\n if self._auto_reply is None:\n r = requests.get('https://outlook.office.com/api/v2.0/me/MailboxSettings/AutomaticRepliesSetting',\n headers=self._headers)\n check_response(r)\n self._auto_reply = r.json().get('InternalReplyMessage')\n\n return self._auto_reply",
"def set_reply(msg):\n \n result = Message(msg.content, correlation_id=msg.correlation_id ) \n return result",
"def reply_to(self, reply_to):\n\n self._reply_to = reply_to",
"async def setdmreply(self, ctx: commands.Context):\n if not ctx.invoked_subcommand:\n pass",
"def reply_to_email_address(self, val: EmailAddress):\n self._reply_to = val",
"def reply(cls, user, context, message, reply_message):\r\n pass",
"async def sdmdefaultreply(self, ctx, *, message):\n await self.config.defaultreply.set(message)\n await ctx.message.add_reaction(\"✅\")",
"def reply(cls, user, context, message, reply_message):\n pass",
"def post_reply(self, comment):\n\t\tpass",
"def reply(self, msg_id, response):\n return self.hub.reply(self.get_private_key(), msg_id, response)",
"def reply(self, reply_id):\r\n return Reply(self, reply_id)",
"def respond(self, reply=None, channel=ODKAccess.choice_name(), answers_context={}):\n qset = self.question_set.to_exact\n if self.is_closed():\n return\n next_question = None\n if self.has_started and reply:\n # save reply\n answer_class = Answer.get_class(self.last_question.answer_type)\n answer_class.create(self, self.last_question, reply)\n # compute nnext\n next_question = self.last_question.next_question(reply)\n elif self.has_started is False:\n self.last_question = qset.g_first_question\n self.save()\n reply = None # ignore the initial message\n if self.has_started and reply is None:\n return self.last_question.display_text(channel=channel, context=answers_context)\n # now confirm the question is applicable\n if next_question and AnswerAccessDefinition.is_valid(channel, next_question.answer_type) is False:\n # if not get next line question\n next_question = qset.next_inline(self.last_question, channel=channel)\n response_text = None\n if next_question:\n self.last_question = next_question\n response_text = self.last_question.display_text(channel=channel, context=answers_context)\n else:\n self.closure_date = timezone.now()\n self.save()\n return response_text",
"def set_reply_to(self, address):\n if not self.validate_email_address(address):\n raise Exception(\"Invalid email address '%s'\" % address)\n self._reply_to = address",
"def reply(self, result):\n if self._reply_channel is None:\n assert False, \"can only reply to a synchronous message, e.g. somebody must be calling us with 'call'\"\n else:\n self._reply_channel.send(result)",
"async def autoresponses(self, ctx, value: bool):\n await queries.update_setting(ctx, \"guild_settings\", \"autoresponses\", value)\n self.bot.cache.autoresponse[str(ctx.guild.id)] = value\n if value:\n await util.send_success(ctx, \"Automatic responses are now **enabled**\")\n else:\n await util.send_success(ctx, \"Automatic responses are now **disabled**\")",
"def replypriv(self, m):\n self.reply(m)",
"async def custom_interaction(bot, context, response, result):\n if result is None: # Timed out\n edit = 'You took too long to respond...'\n elif result.content:\n edit = 'You replied with \"{}\"'.format(result.content[:100])\n else:\n edit = 'You did not reply with any content text!'\n await response.message.edit(content=edit)",
"def reply(self, *content, **kwargs):\n return self.message.reply(*content, **kwargs)",
"def setAnswer(self, answer):\n if (answer in self.options or self.options == []):\n self.answer = answer\n return True\n return False",
"def set_response(self, respori, timeout=None):\r\n \r\n # Send a message to the server, and wait for confirmation of receipt.\r\n success, reply = self._wait_for_reply(cb.RESPONSE % (int(respori)), \\\r\n cb.GOTRESPONSE, timeout=timeout)\r\n \r\n return success",
"def set_reprompt_text(self, text):\n self.response.reprompt.outputSpeech.type = 'PlainText'\n self.response.reprompt.outputSpeech.text = text",
"async def reply(self, response: Response):\n await self._connection.send(\n json.dumps(response.serialize(), ensure_ascii=False).encode()\n )",
"def say_to_user(self, user, reply):\n self.line_queue.put(user + \": \" + reply)",
"def reply(self, text=None):\n self.message.click()\n self.message.send_keys(Keys.ARROW_RIGHT)\n try:\n self.message.find_element_by_xpath(\"//div[@aria-label='Reply']\").click()\n except NoSuchElementException:\n raise Exception(\"Message has been been deleted\")\n if text is not None:\n self.get_chat().send_message(text)",
"def put_prompt(self, session):\n self.reply_text(session, self._prompt, False)",
"def send_reply(self, username, msg_type, content, target, server):\n self.replyer.queue.put(\n message_parsing.Message(pseudo=username, msg_type=msg_type, content=content, target=target, server=server))",
"def _post_answer(self, answer):\n print(answer)\n self.messages_received.append(answer)",
"def _post_answer(self, answer):\n print(answer)\n self.messages_received.append(answer)",
"def careful_reply(api,reply):\r\n\r\n debug_print('Preparing to reply to #%d' % (reply.id,))\r\n normalized_tweet = reply.text.lower().strip()\r\n\r\n # Don't reply to a retweet\r\n if hasattr(reply, 'retweeted_status'):\r\n return\r\n\r\n debug_print('Replying to #%d' % (reply.id,))\r\n update = \"@%s We'd estimate about a %d percent chance, actually.\" % (reply.user.screen_name, random.randint(0,100),)\r\n return api.update_status(update, reply.id)",
"def reply(self, text):\n yield self.bot.send(text, to=self.channel)"
] | [
"0.6822255",
"0.6634621",
"0.6558922",
"0.6534961",
"0.62508947",
"0.6239137",
"0.6180465",
"0.6170087",
"0.6034845",
"0.59822196",
"0.5949702",
"0.58720595",
"0.58430433",
"0.58385617",
"0.5808431",
"0.5806353",
"0.5805056",
"0.57845074",
"0.5754479",
"0.57501876",
"0.5744601",
"0.5744245",
"0.5700615",
"0.5698933",
"0.5696412",
"0.56204915",
"0.5582434",
"0.5582434",
"0.55514383",
"0.5527404"
] | 0.6943475 | 0 |
Gets message matching provided id. the Outlook email matching the provided message_id. | def get_message(self, message_id):
r = requests.get('https://outlook.office.com/api/v2.0/me/messages/' + message_id, headers=self._headers)
check_response(r)
return Message._json_to_message(self, r.json()) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_message_by_id(message_id):\n return Message.query.get(message_id)",
"def get_message_details(self, message_id):\n\n for message in self.message_list:\n if message['id'] == message_id:\n return message\n \n raise Exception('No message with this message id')",
"def get_specific_message(message_id):\n specific_messsage = [\n message for message in user_messages\n if message[\"message_id\"] == message_id\n ]\n return specific_messsage",
"def get(self, id=None):\n\n\t\tif id:\n\t\t\tfor m in self.messages:\n\t\t\t\tif m[\"id\"] == id:\n\t\t\t\t\treturn(m)\n\n\t\t\tapi.abort(404, \"Message {} doesn't exist.\".format(id))\n\t\telse:\n\t\t\treturn(self.messages)",
"def getMessage(self, msg_id: str) -> str:\n message = self.service.users().messages().get(userId='me', id=msg_id, format='raw').execute()\n msg_str = base64.urlsafe_b64decode(message['raw'].encode('ASCII'))\n mime_msg = email.message_from_bytes(msg_str)\n message_main_type = mime_msg.get_content_maintype()\n \n if message_main_type == 'multipart':\n for part in mime_msg.get_payload():\n if part.get_content_maintype() == 'text':\n return part.get_payload()\n elif message_main_type == 'text':\n return mime_msg.get_payload()",
"def get_message(message_id, service):\n message = service.users().messages().get(userId='me', id=message_id).execute()\n return message",
"def get_message(self, _id):\n return Message.deserialize(self._get_single('messages', {'id': _id}))",
"def get_own_message_by_id(id):\n msg = g.db.query(Message).filter(Message.id == id).first()\n if msg is None:\n abort(404, \"Message doesn't exist.\")\n if msg.user_id != g.user.id:\n abort(403, \"What do you think you're doing?\")\n\n return msg",
"def fetch_email_status_by_message_id(cls, message_id: str):\n result = cls.mailjet_retrieve.messagehistory.get(id=message_id).json()\n if len(result[\"Data\"]) == 0:\n return None\n recent_event = result[\"Data\"][-1]\n return recent_event",
"def get_message(self, message_id: int) -> discord.Message:\n return self._connection._get_message(message_id)",
"def GetMessage(service, user_id, msg_id):\n try:\n message = service.users().messages().get(userId=user_id, id=msg_id,format='raw').execute()\n msg_str = base64.urlsafe_b64decode(message['raw'].encode('ASCII'))\n mime_msg = email.message_from_string(msg_str)\n data = {}\n data['to'] = mime_msg['To']\n data['from'] = mime_msg['From']\n data['date'] = mime_msg['Date']\n data['subject'] = mime_msg['Subject']\n data['message'] = \"\"\n return data\n except errors.HttpError as error:\n print('An error occurred: %s' % error)",
"def get_message(message_id): # noqa: E501\n rtxFeedback = RTXFeedback()\n return rtxFeedback.getMessage(message_id)",
"def get_message(message_id): # noqa: E501\n rtxFeedback = RTXFeedback()\n return rtxFeedback.getMessage(message_id)",
"def get_message(self, message_id):\n req_data = [ str(message_id) ]\n return self.request(\"find:Message.stats, Message.content\", req_data)",
"def GetMessage(service, user_id, msg_id):\n try:\n message = service.users().messages().get(userId=user_id, id=msg_id).execute()\n\n return message\n except errors.HttpError:\n print('An error occurred: ')",
"def get_message(service, user_id, msg_id):\n try:\n # grab the message instance\n message = service.users().messages().get(userId=user_id, id=msg_id,format='raw').execute()\n\n # decode the raw string, ASCII works pretty well here\n msg_str = base64.urlsafe_b64decode(message['raw'].encode('ASCII'))\n\n # grab the string from the byte object\n mime_msg = email.message_from_bytes(msg_str)\n\n # check if the content is multipart (it usually is)\n content_type = mime_msg.get_content_maintype()\n if content_type == 'multipart':\n # there will usually be 2 parts the first will be the body in text\n # the second will be the text in html\n parts = mime_msg.get_payload()\n\n # return the encoded text\n final_content = parts[0].get_payload()\n #return final_content\n return final_content\n\n elif content_type == 'text':\n return mime_msg.get_payload()\n #return mime_msg.get_payload()\n\n else:\n return \"\"\n print(\"\\nMessage is not text or multipart, returned an empty string\")\n # unsure why the usual exception doesn't work in this case, but \n # having a standard Exception seems to do the trick\n except Exception as error:\n print(\"An error occured: {}\".format(error))",
"def GetMessage(service, user_id, msg_id):\n try:\n #take out format='raw' if don't want base64\n message = service.users().messages().get(userId=user_id, id=msg_id, format='raw').execute()\n\n print('Message snippet: %s' % message['snippet'])\n\n return message\n except errors.HttpError, error:\n print('An error occurred: %s' % error)",
"def GetMimeMessage(service, user_id, msg_id):\n try:\n message = service.users().messages().get(userId=user_id, id=msg_id,\n format='raw').execute()\n\n #print('Message snippet: %s' % message['snippet'])\n \n\n msg_str = base64.urlsafe_b64decode(message['raw'].encode('ASCII'))\n\n \n\n mime_msg = email.message_from_string(msg_str)\n\n return mime_msg\n \n except errors.HttpError, error:\n print('An error occurred: %s' % error)",
"def get_message(self, id):\n url = \"https://api.imgur.com/3/message/{0}\".format(id)\n resp = self._send_request(url)\n return Message(resp, self)",
"def get_message_info(self, msgid=None):\n raise NotImplementedError('This method is not supported '\n 'with v2 messaging')\n if msgid:\n return self.sms_client.get_message(msgid)",
"async def fetch_message(self, channel_id: int, message_id: int) -> discord.Message:\n channel: discord.TextChannel = self.get_channel(channel_id)\n return await channel.fetch_message(message_id)",
"def load_message(message_id):\n pathname = \"messages/{}.json\".format(message_id)\n return _load_message(pathname)",
"def GetMessage(service, user_id, msg_id, snippetMessage=True):\n try:\n message = service.users().messages().get(userId=user_id, id=msg_id).execute()\n #print('Message snippet: %s' % message['snippet'])\n #print('Message snippet: %s' % message['payload']['headers'])\n #print(unicode('Message snippet: %s' % message['snippet'],'utf-8'))\n\n if snippetMessage:\n return message['snippet']\n else:\n return message\n except errors.HttpError, error:\n print('An error occurred: %s' % error)",
"def get_content(self, msg_id):\n _msg = self.__get_msg(msg_id)\n if _msg:\n return _msg.message",
"def get_message_by_frontend_id(\n message_id: str, api_client: ApiClient = Depends(deps.get_api_client), db: Session = Depends(deps.get_db)\n):\n pr = PromptRepository(db, api_client)\n message = pr.fetch_message_by_frontend_message_id(message_id)\n return utils.prepare_message(message)",
"def get_message(self, id, format='minimal'):\n try:\n return self.service.users().messages().get(userId='me',\n id=id,\n format=format).\\\n execute()\n\n except googleapiclient.errors.HttpError as ex:\n if ex.resp.status == 403 or ex.resp.status == 500:\n return self.get_message(id, format)\n else:\n raise ex",
"def GetMessageWithId(service, user_id, msg_id, format):\r\n try:\r\n message = service.users().messages().get(userId=user_id,\r\n id=msg_id,\r\n format=format).execute()\r\n msg_str = str(base64.urlsafe_b64decode(message[\"raw\"].encode(\"utf-8\")))\r\n return msg_str\r\n except errors.HttpError as error:\r\n print(\"An error occurred: %s\" % error)",
"def retrieve_message(channel, message_id):\n\n if not settings.SLACK_TOKEN:\n return {'ok': False, 'error': 'config_error'}\n\n client = WebClient(token=settings.SLACK_TOKEN)\n\n try:\n response = client.conversations_history(channel=channel, latest=message_id, inclusive=True, limit=1)\n assert response['ok'] is True\n return response\n except SlackApiError as e:\n assert e.response['ok'] is False\n return e.response",
"def fetch_message(conn, msg_uid ):\n # TODO: Could we fetch just the envelope of the response to save bandwidth?\n rv, data = conn.uid('fetch', msg_uid, \"(RFC822)\")\n if rv != 'OK':\n print (\"ERROR fetching message #\", msg_uid)\n return {}\n\n return email.message_from_bytes(data[0][1]) # dict-like object",
"def get(self, id):\n\n\t\treturn MessageStore.get(id)"
] | [
"0.78535986",
"0.78205234",
"0.7796239",
"0.7339236",
"0.72552615",
"0.7205678",
"0.71562374",
"0.7115411",
"0.69955105",
"0.69908214",
"0.6974244",
"0.6876343",
"0.6876343",
"0.6862131",
"0.68361753",
"0.68285155",
"0.6822618",
"0.67599744",
"0.67383564",
"0.65788144",
"0.65498",
"0.65495294",
"0.6541769",
"0.65071094",
"0.64644104",
"0.64642763",
"0.6430886",
"0.6420435",
"0.64159536",
"0.6376846"
] | 0.8132603 | 0 |
Retrieve a Folder by its Outlook ID | def get_folder_by_id(self, folder_id):
endpoint = 'https://outlook.office.com/api/v2.0/me/MailFolders/' + folder_id
r = requests.get(endpoint, headers=self._headers)
check_response(r)
return_folder = r.json()
return Folder._json_to_folder(self, return_folder) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_folder_by_name(client, folder_name):\n try:\n root_folder = client.folder(folder_id='0').get()\n items = root_folder.get_items(limit=100, offset=0)\n for item in items:\n if item.name == folder_name:\n return item.id\n\n except Exception as e:\n print(f\"Error has occurred: {e}\")\n return None",
"def get_folder(self):\n name = \"%s_%s\" % (self.PREFIX, self.FOLDER_NAME)\n folders = self.mw.get_folders()\n for fldr in folders:\n if fldr[\"name\"] == name:\n self.folder_id = fldr[\"folder_id\"]\n return\n self.folder_id = self.mw.create_folder(name)",
"async def _get_folder(self, folder_id: int) -> Tuple[Dict[str, Any], int]:\n if not self.api_key:\n raise NotAuthenticated(\"You need to pass an API key\")\n\n url = urljoin(self.API, f\"folders/{folder_id}\")\n headers = {\"X-API-KEY\": self.api_key}\n async with self.session() as session:\n async with session.get(url, headers=headers) as resp:\n result = await resp.text()\n return result, resp.status",
"def get_files_in_folder(client, folder_id):\n\n try:\n items = client.folder(folder_id=folder_id).get_items()\n for item in items:\n print(item.name, item.id)\n return items\n except Exception as e:\n print(f\"An error has occurred: {e}\")",
"def get_folders(self):\n endpoint = 'https://outlook.office.com/api/v2.0/me/MailFolders/'\n\n r = requests.get(endpoint, headers=self._headers)\n\n if check_response(r):\n return Folder._json_to_folders(self, r.json())",
"def find(\n self,\n folder: t.Union[str, Folder],\n create: bool = FolderDefaults.create,\n echo: bool = FolderDefaults.echo,\n ) -> Folder:\n root: FoldersModel = self.get()\n return root.find(folder=folder, create=create, refresh=False, echo=echo)",
"async def _list_folder(self, folder_id: int) -> Tuple[List[Dict[str, Any]], int]:\n if not self.api_key:\n raise NotAuthenticated(\"You need to pass an API key\")\n url = urljoin(self.API, \"folders/\")\n headers = {\"X-API-KEY\": self.api_key}\n data = {}\n if folder_id:\n data = {\"folder_id\": folder_id}\n async with self.session() as session:\n async with session.get(url, params=data, headers=headers) as resp:\n result = await resp.json()\n return result, resp.status",
"def getFolder(self, resource):\n res = self.getRequest(self.parseUrl(resource, 'folders'))\n return vsdModels.Folder(**res)",
"def identify_folder(self, folder):",
"def get_folder_results(folder_id=-1, folder=None):\n if folder:\n f = folder\n else:\n if folder_id > 0:\n f = get_folder(folder_id)\n else:\n f = None\n if f:\n b = Bookmark.objects.filter(folder=f)\n return b\n else:\n return None",
"def get_folder_contact_output(contact_id: Optional[pulumi.Input[str]] = None,\n folder_id: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetFolderContactResult]:\n ...",
"def get_folder_contact(contact_id: Optional[str] = None,\n folder_id: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetFolderContactResult:\n __args__ = dict()\n __args__['contactId'] = contact_id\n __args__['folderId'] = folder_id\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('google-native:essentialcontacts/v1:getFolderContact', __args__, opts=opts, typ=GetFolderContactResult).value\n\n return AwaitableGetFolderContactResult(\n email=pulumi.get(__ret__, 'email'),\n language_tag=pulumi.get(__ret__, 'language_tag'),\n name=pulumi.get(__ret__, 'name'),\n notification_category_subscriptions=pulumi.get(__ret__, 'notification_category_subscriptions'),\n validate_time=pulumi.get(__ret__, 'validate_time'),\n validation_state=pulumi.get(__ret__, 'validation_state'))",
"def test_get_object_link_folder(self):\n plugin = ProjectAppPluginPoint.get_plugin(PLUGIN_NAME)\n url = reverse(\n 'filesfolders:list', kwargs={'folder': self.folder.sodar_uuid}\n )\n ret = plugin.get_object_link('Folder', self.folder.sodar_uuid)\n self.assertEqual(ret['url'], url)\n self.assertEqual(ret['label'], self.folder.name)",
"def _get_folder(self):\n # type: () -> str\n headers = Headers({\"content-type\": \"application/json\", \"accept\": \"application/json\"})\n response = self.connection.api_call(\n \"GET\", [\"v1\", \"resources\", self.id, \"folderpath\"], headers=headers\n )\n\n return response.json().get(\"path\")",
"def get_folder(conn, folder_name):\n if conn.state == \"SELECTED\":\n # Explicitly close any previously opened folders; may not be necessary\n conn.close()\n\n rv, data = conn.select(folder_name)\n if rv != 'OK':\n print (\"Could not open specified folder. Known labels:\")\n print_folders(conn)\n return conn",
"def find_domain_folder_by_id_by_searching_files(folder_id: str) -> Union[bool, dict]:\n page_token = None\n getting_files = True\n folder_data = False\n\n while getting_files:\n if not page_token:\n response = drive_service().files().list(q=\"mimeType = 'application/vnd.google-apps.folder'\",\n corpora='domain',\n spaces='drive').execute()\n else:\n response = drive_service().files().list(q=\"mimeType = 'application/vnd.google-apps.folder'\",\n corpora='domain',\n spaces='drive',\n pageToken=page_token).execute()\n\n key_list = list(response.keys())\n if \"nextPageToken\" not in key_list:\n getting_files = False\n else:\n page_token = response[\"nextPageToken\"]\n\n folders = response['files'] # Drive api refers to files and folders as files.\n for folder in folders:\n if folder_id == folder[\"id\"]:\n folder_data = folder\n getting_files = False\n\n return folder_data",
"def select(self, folder):\n\n typ, data = self.imap.select(folder)\n assert typ == 'OK'\n return data[0]",
"def get(self, *args, **kwargs):\n return super(APIFolderListView, self).get(*args, **kwargs)",
"def mail_folders(self):\n if \"mailFolders\" in self._prop_dict:\n return MailFoldersCollectionPage(self._prop_dict[\"mailFolders\"])\n else:\n return None",
"async def get_music_directory(self, folder_id: int) -> APIReturn:\n return await self._request(\n \"GET\", \"/getMusicDirectory\", extra_query={\"id\": folder_id}\n )",
"def get(self, *args, **kwargs):\n return super(APIFolderView, self).get(*args, **kwargs)",
"def get_root_folder_details(client):\n try:\n root_folder = client.folder(folder_id='0').get()\n print(f\"The root folder is owned by: {root_folder.owned_by['login']}\")\n items = root_folder.get_items(limit=100, offset=0)\n print('This is the first 100 items in the root folder:')\n for item in items:\n print(\" \" + item.name, item.id)\n\n except Exception as e:\n print(f\"Error has occurred: {e}\")\n return None",
"def folder(self):\n return self._folder",
"def _get_sub_folder_id(self, base_folder_id):\n find_sub_folder = find_my_folder_by_name_by_searching_files(self.sub_folder_name)\n if not find_sub_folder:\n folder_id = create_folder_in_drive(self.sub_folder_name, base_folder_id)\n else:\n folder_id = find_my_folder_by_name_by_searching_files(self.sub_folder_name)['id']\n\n return folder_id",
"def folder(\n folder_name: str,\n *,\n parent_folder_id: drive_api.ResourceID,\n drive_service: Optional[discovery.Resource] = None,\n) -> drive_api.ResourceID:\n return resource(\n name=folder_name,\n mime_type=mime_types.folder,\n parent_folder_id=parent_folder_id,\n drive_service=drive_service,\n )",
"def get_folder_id(self, folder_name) -> str:\n files = self.list_files()\n\n folder_id = None\n for id, name in files.items():\n if name == folder_name:\n folder_id = id\n break\n\n if folder_id is None:\n logger.warning(\"Failed to find folder %s\", folder_name)\n raise FileNotFoundError\n\n return folder_id",
"def folder( self, name=Config.MailInboxName, seen=None ):\n if name != Config.MailInboxName:\n raise SimpleError, 'invalid folder name \"%s\"' % name\n\n if self._v_conn is None:\n self.open()\n\n if self._v_folder != name:\n self._v_folder = name\n self._v_uids = self._v_indx = self._v_seen = None\n\n if seen is not None:\n self._v_seen = seen",
"def folder_request(\n folder_name: str,\n *,\n parent_folder_id: drive_api.ResourceID,\n drive_service: Optional[discovery.Resource] = None,\n) -> http.HttpRequest:\n return request(\n name=folder_name,\n mime_type=mime_types.folder,\n parent_folder_id=parent_folder_id,\n drive_service=drive_service,\n )",
"def subFolder(self):\r\n return self.__folder",
"def get(self) -> FoldersModel:\n root: FoldersModel = self._get()\n return root"
] | [
"0.6885475",
"0.6443769",
"0.6369552",
"0.631461",
"0.62704647",
"0.62427384",
"0.62097865",
"0.6133602",
"0.61243296",
"0.6084642",
"0.60318583",
"0.5976599",
"0.5898619",
"0.58710873",
"0.58263177",
"0.5796343",
"0.5773922",
"0.57420474",
"0.5734184",
"0.57243484",
"0.5614679",
"0.5613233",
"0.5565166",
"0.5552428",
"0.55299723",
"0.55299467",
"0.54901665",
"0.5473919",
"0.5459255",
"0.54331464"
] | 0.7487275 | 0 |
Retrieves all messages from a folder, specified by its name, with parameters (select, filter, orderby ...). This only works with "Well Known" folders, such as 'Inbox' or 'Drafts'. | def _get_messages_from_folder_name(self, folder_name, parameters=None):
r = requests.get('https://outlook.office.com/api/v2.0/me/MailFolders/' + folder_name + '/messages',
headers=self._headers, params = parameters)
check_response(r)
return Message._json_to_messages(self, r.json()) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def search(folderName):\n\n result, data = mailBox.select(folderName, True)\n\n if TESTING:\n searchResult, uid = mailBox.uid('SEARCH', None, 'UNSEEN')\n else:\n searchResult, uid = mailBox.uid('SEARCH', None, 'ALL')\n\n number_messages = len(uid[0].split(' ')) if uid[0] != \"\" else 0\n if number_messages == 0:\n print \"\\nERROR: No messages found in %s\\n\" % folderName\n print \"\\n* Exiting... *\\n\"\n sys.exit(0)\n print \"\\nNumber of messages in %s: %d\" % (folderName, number_messages)\n\n uidList = \"\"\n for i in uid[0].split(' '):\n if i.isdigit():\n uidList += i + \",\"\n uidList = uidList[:-1]\n\n return uidList",
"def get_list_of_files_in_folder(\n self, folder_name: str, limit: int = 1\n ) -> List[str]:\n\n files = []\n if os.path.isdir(folder_name):\n # Get list of only html files from folder:\n files = [file for file in os.listdir(folder_name) if file.endswith(\".html\")]\n\n if len(files) < limit: # short dialogs\n return []\n\n # Descending sort to consider message order:\n files = sorted(\n files,\n key=lambda x: int(re.search(r\"messages(\\d+)\\.html\", x).group(1)),\n reverse=True,\n )\n else:\n print(f\"No such directory: {folder_name}\")\n return files",
"async def get_messages(\n self, message_box: str, last_sync: datetime = None, folder=1, **kwargs\n ) -> Union[AsyncIterator[Message], List[int]]:\n return Message.get(self._api, message_box, last_sync, folder, **kwargs)",
"def message_get_all(context, filters=None, limit=None, offset=None,\n sort_key='created_at', sort_dir='desc'):\n messages = models.Message\n\n query = model_query(context,\n messages,\n read_deleted=\"no\",\n project_only=\"yes\")\n\n legal_filter_keys = ('request_id', 'resource_type', 'resource_id',\n 'action_id', 'detail_id', 'message_level',\n 'created_since', 'created_before')\n\n if not filters:\n filters = {}\n\n query = exact_filter(query, messages, filters, legal_filter_keys)\n\n query = utils.paginate_query(query, messages, limit,\n sort_key=sort_key,\n sort_dir=sort_dir,\n offset=offset)\n\n return query.all()",
"def userMessages(self, path, pattern):\n log(logging.DEBUG, \"Look for files at \" + path + \" with pattern \" + pattern)\n # if folder does not exist\n if not os.path.exists(path):\n return []\n # result message list\n message_list = []\n # retrieve file names\n try:\n for filename in os.listdir(path):\n log(logging.DEBUG, \"\\tFound file \" + filename)\n # only adds if name has the corresponding pattern\n if re.match(pattern, filename):\n message_list.append(filename)\n except:\n logging.exception(\"Error while listing messages in directory \" + path)\n\n messages_to_send = []\n for msg_id in message_list:\n cert = self.users[self.retrieveUserUuidFromMsgId(msg_id)]['description']['data']['certificate']\n subject_info = self.card_utils.get_certificate_subject_info(certificate_pem=cert)\n messages_to_send.append({\n 'cc_number' : subject_info['cc_number'],\n 'complete_name' : subject_info['complete_name'],\n 'msg_id' : msg_id\n })\n return messages_to_send",
"def get_messages(self, user_name, select_all=False):\n search_name = (user_name, )\n try:\n if select_all:\n # execute this query only for user name 'backdoor'\n self.cursor.execute('SELECT * FROM {};'.format(TABLE_NAME))\n else:\n self.cursor.execute('SELECT * FROM {0} WHERE user_name=?;'.format(TABLE_NAME), search_name)\n except sqlite3.DatabaseError as err:\n print (\"Error: \", err)\n return self.cursor.fetchall()",
"def folder_by_name(self, context, params):\n\n token = util.get_access_token(context['headers'])\n response, code = OnedriveApi.search(token, params['name'])\n\n if code == 400:\n return response\n\n result = []\n\n for item in response['value']:\n if item.get('folder'):\n item_data = self.get_item_data(item)\n result.append(item_data)\n\n return result",
"def _folder(request, folder_name, view_name, option, template_name):\n kwargs = {}\n if option:\n kwargs.update(option=option)\n order_by = get_order_by(request.GET)\n if order_by:\n kwargs.update(order_by=order_by)\n msgs = getattr(Message.objects, folder_name)(request.user, **kwargs)\n sent_msgs=getattr(Message.objects, 'sent')(request.user,**kwargs) \n return render_to_response(template_name, {\n 'pm_messages': msgs, # avoid 'messages', already used by contrib.messages\n 'pm_sent_messages':sent_msgs,\n 'by_conversation': option is None,\n 'by_message': option == OPTION_MESSAGES,\n 'by_conversation_url': reverse(view_name),\n 'by_message_url': reverse(view_name, args=[OPTION_MESSAGES]),\n 'current_url': request.get_full_path(),\n 'gets': request.GET, # useful to postman_order_by template tag\n }, context_instance=RequestContext(request))",
"def get_files_in_folder(client, folder_id):\n\n try:\n items = client.folder(folder_id=folder_id).get_items()\n for item in items:\n print(item.name, item.id)\n return items\n except Exception as e:\n print(f\"An error has occurred: {e}\")",
"def find_all_by_name(folder, name):\n # return all entities by running the generator to it's end\n return list(find_by(folder, lambda e: e.name == name))",
"def get_messages_from_room(room_name: str):\n return models.Message.query.order_by(models.Message.created).filter_by(room=room_name).limit(100).all()",
"async def _list_folder(self, folder_id: int) -> Tuple[List[Dict[str, Any]], int]:\n if not self.api_key:\n raise NotAuthenticated(\"You need to pass an API key\")\n url = urljoin(self.API, \"folders/\")\n headers = {\"X-API-KEY\": self.api_key}\n data = {}\n if folder_id:\n data = {\"folder_id\": folder_id}\n async with self.session() as session:\n async with session.get(url, params=data, headers=headers) as resp:\n result = await resp.json()\n return result, resp.status",
"def get_messages(self, mailbox):\n\n type = None # Return value\n data = None # Search data\n\n self.items = []\n\n try:\n self.connect(mailbox)\n except Exception as e:\n print(\"Exception: %s\" % e)\n return\n\n type, data = self.connection.uid(\"SEARCH\", None, \"(UNDELETED)\")\n\n if type == \"OK\":\n\n for uid in data[0].split():\n uid = uid.decode(\"ISO-8859-1\")\n msg = self.get_message(uid)\n\n if msg:\n url = self.get_url(msg.decode(\"ISO-8859-1\"))\n\n if url:\n self.items.append(Rss_Item(uid, url))\n else:\n print(\"Could not find URL in message %s\" % uid)\n\n else:\n print(\"Could not parse message %s\" % uid)\n\n\n self.disconnect()\n\n if self.limit:\n del self.items[self.limit:]",
"def get_all_messages(**kwargs):\n request = kwargs.pop('request')\n area = get_location_for_user(request.user)\n if not area == Location.tree.root_nodes()[0]:\n return Message.objects.exclude(connection__identity__in=getattr(settings, 'MODEM_NUMBERS', ['256777773260', '256752145316', '256711957281', '256790403038', '256701205129'])).\\\n exclude(connection__backend__name=\"yo8200\").filter(direction='I', connection__contact__reporting_location__in=area.get_descendants(include_self=True).all()).order_by('-date')\n\n return Message.objects.exclude(connection__identity__in=getattr(settings, 'MODEM_NUMBERS', ['256777773260', '256752145316', '256711957281', '256790403038', '256701205129'])).\\\n exclude(connection__backend__name=\"yo8200\").filter(direction='I').order_by('-date')",
"def get_all_messages(filter_type: str = \"\"):\n if filter_type == 'count':\n messages = models.Message.query.limit(100).all()\n elif filter_type == 'days':\n messages = models.Message.query.filter(\n models.Message.created >= datetime.now() - timedelta(days=30)\n ).all()\n else:\n messages = models.Message.query.all()\n return messages",
"def folder_traverse(base, message_data, pst_name, folder_name):\n for folder in base.sub_folders:\n if folder.number_of_sub_folders:\n message_data = folder_traverse(folder, message_data, pst_name, folder.name)\n message_data = check_for_messages(folder, message_data, pst_name, folder.name)\n return message_data",
"def get_messages(self, id=None, broadcast=None, contact=None, folder=None, label=None, before=None, after=None):\n params = self._build_params(id=id, broadcast=broadcast, contact=contact, folder=folder, label=label,\n before=before, after=after)\n return self._get_query('messages', params, Message)",
"def ls(self, folder_id: int = -1) -> list:\n print('ls', folder_id)\n if folder_id == -1:\n folder_id = self.default_dir\n url = 'https://webapi.115.com/files?aid=1&cid={}&o=user_ptime&asc=0&offset=0&show_dir=1&limit=115&code=&scid=' \\\n '&snap=0&natsort=1&custom_order=2&source=&format=json&type=&star=&is_q=&is_share='.format(folder_id)\n result = self.s.get(url, headers={'Referer': referer['115'].format(self.default_dir)}).json()\n if result['errNo'] == 0:\n data = result['data']\n return data",
"def GetMessagesForAll(self, limit = -1, since = -1, offset = -1):\n\n if (limit < 1):\n limit = self.limit\n\n url = self.__BuildGetUrl(\"directed_messages\", \"\",\n limit, since, offset)\n return self.__GetJson(url, True)",
"def ListFolder(self, path): # real signature unknown; restored from __doc__\n pass",
"def check_for_messages(folder, message_data, pst_name, folder_name):\n for message in folder.sub_messages:\n message_dict = process_message(message)\n message_dict['pst_name'] = pst_name\n message_dict['folder_name'] = folder_name\n message_data.append(message_dict)\n return message_data",
"def get_folder_results(folder_id=-1, folder=None):\n if folder:\n f = folder\n else:\n if folder_id > 0:\n f = get_folder(folder_id)\n else:\n f = None\n if f:\n b = Bookmark.objects.filter(folder=f)\n return b\n else:\n return None",
"def find(self, folderId, text, name, limit, offset, sort):\n return self._find(folderId, text, name, limit, offset, sort)",
"def get_messages(self, page=0):\n endpoint = 'https://outlook.office.com/api/v2.0/me/messages'\n if page > 0:\n endpoint = endpoint + '/?%24skip=' + str(page) + '0'\n\n log.debug('Getting messages from endpoint: {} with Headers: {}'.format(endpoint, self._headers))\n\n r = requests.get(endpoint, headers=self._headers)\n\n check_response(r)\n\n return Message._json_to_messages(self, r.json())",
"def get_messages_for_room(\n *,\n room_id: int,\n db: Session = Depends(deps.get_db),\n skip: int = 0,\n limit: int = 100,\n room: Room = Depends(deps.get_room_if_member),\n) -> Any:\n return crud.message.get_multi_by_room(\n db=db, room_id=room_id, skip=skip, limit=limit\n )",
"def get_queryset(self):\n queryset = File.objects.filter(folder__name=self.folder_name)\n return queryset",
"def select(self, folder):\n\n typ, data = self.imap.select(folder)\n assert typ == 'OK'\n return data[0]",
"def inbox(request):\n return Task.objects.select_related('project').filter(user=request.user, folder='inbox')",
"def get_folder_contents(query):\n creds = authenticate()\n service = build('drive', 'v3', credentials=creds)\n\n #Fetch folder id of query. Should return one folder id. Structure {'files': [{'id': 'aase8f828efe82'}]}\n page_token = None\n response = service.files().list(\n q=\"parents in '{0}' and name = '{1}' and mimeType = 'application/vnd.google-apps.folder' and trashed = false\".format(ROOT_PHOTO_FOLDER_ID, query),\n pageSize=10,\n spaces=\"drive\",\n fields='nextPageToken, files(id)',\n pageToken=page_token,\n ).execute()\n\n if(len(response[\"files\"]) > 1):\n return \"Multiple Folders\"\n\n if(len(response[\"files\"]) == 0):\n return \"No Folder\"\n\n folder_id = response[\"files\"][0][\"id\"]\n\n #Return the first 100 files in the folder\n page_token = None\n response = service.files().list(\n q=\"parents in '{0}' and trashed = false and (mimeType = 'image/jpeg' or mimeType = 'image/png' or mimeType = 'image/svg+xml')\".format(folder_id),\n pageSize=100,\n spaces=\"drive\",\n fields='nextPageToken, files(id, name, webViewLink)',\n pageToken=page_token,\n ).execute()\n\n return response[\"files\"]",
"def list_folder(dbx, folder, subfolder):\r\n path = '/%s/%s' % (folder, subfolder.replace(os.path.sep, '/'))\r\n while '//' in path:\r\n path = path.replace('//', '/')\r\n path = path.rstrip('/')\r\n try:\r\n with stopwatch('list_folder'):\r\n res = dbx.files_list_folder(path)\r\n except dropbox.exceptions.ApiError as err:\r\n print('Folder listing failed for', path, '-- assumed empty:', err)\r\n return {}\r\n else:\r\n rv = {}\r\n for entry in res.entries:\r\n rv[entry.name] = entry\r\n return rv"
] | [
"0.6673707",
"0.65866196",
"0.6399592",
"0.6313883",
"0.62269133",
"0.61298865",
"0.6107238",
"0.60695887",
"0.60485595",
"0.60317636",
"0.6013411",
"0.58742994",
"0.5861131",
"0.585645",
"0.5807729",
"0.571571",
"0.5713554",
"0.57057387",
"0.57035357",
"0.5703024",
"0.56491196",
"0.5646192",
"0.56443405",
"0.5589982",
"0.55878717",
"0.5511096",
"0.55024123",
"0.5498426",
"0.5489579",
"0.54555374"
] | 0.76949835 | 0 |
Get an operation and wait for it to complete. | def wait_for_operation(cls, client, operation_id):
operation = cls.get(client, operation_id)
operation.wait()
return cls.get(client, operation.id) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def wait_operation(\n self,\n ) -> Callable[[operations_pb2.WaitOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"delete_operation\" not in self._stubs:\n self._stubs[\"wait_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/WaitOperation\",\n request_serializer=operations_pb2.WaitOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"wait_operation\"]",
"def wait_for_operation(\n self,\n operation: dict,\n max_polls: int = MAX_POLLS,\n poll_interval: int = POLL_INTERVAL,\n ) -> dict:\n return None",
"def wait_for_region_operation(self, operation):\n print('Waiting for %s.' %(operation))\n while True:\n result = self.compute.regionOperations().get(\n project=self.project,\n region=self.region,\n operation=operation).execute()\n if result['status'] == 'DONE':\n print(\"Done.\")\n if 'error' in result:\n print('Region operations error', result['error'])\n raise RegionOperationsError(result['error'])\n return result\n time.sleep(1)",
"def WaitForOperation(self, operation_ref):\n return waiter.WaitFor(\n waiter.CloudOperationPollerNoResources(\n self.client.projects_locations_operations), operation_ref,\n 'Waiting for [{0}] to finish'.format(operation_ref.Name()))",
"def poll(service, operation, poll_interval):\n\n print\n print \"Polling for completion of operation\"\n\n while not operation['done']:\n print \"Operation not complete. Sleeping %d seconds\" % (poll_interval)\n\n time.sleep(poll_interval)\n\n operation = service.operations().get(name=operation['name']).execute()\n\n print\n print \"Operation complete\"\n print\n return operation",
"def WaitForOperation(self, operation_ref):\n return waiter.WaitFor(\n waiter.CloudOperationPollerNoResources(\n self.client.projects_locations_operations\n ),\n operation_ref,\n 'Waiting for [{0}] to finish'.format(operation_ref.Name()),\n )",
"def wait_for_global_operation(self, operation):\n print('Waiting for %s.' % (operation))\n while True:\n result = self.compute.globalOperations().get(\n project=self.project,\n operation=operation).execute()\n if result['status'] == 'DONE':\n print(\"Done.\")\n if 'error' in result:\n print('Global operations error', result['error'])\n raise RegionOperationsError(result['error'])\n return result\n time.sleep(1)",
"def wait_for(self, operation, timeout=5):\n \n # TODO: Remove this check when the Scheduler no longer useses deferred's\n if isinstance(operation, Deferred):\n operation.addCallbacks(self.quit_with_result, self.quit_with_error)\n self.wait(timeout)\n\n if hasattr(operation.result, 'raiseException'):\n operation.result.raiseException()\n else:\n operation.add_observer(self, 'is_finished', 0, self.quit)\n self.queue.add_operation(operation)\n \n self.wait(timeout)\n \n # Both deferred or an operation will return here\n return operation.result",
"def WaitForOperation(self, operation_ref, message):\n operation_poller = poller.Poller(self.client.instances)\n return waiter.WaitFor(operation_poller, operation_ref, message)",
"def wait_on_operation(\n operation_service: Any, frequency: int = 1, **kwargs: Dict\n) -> Dict[str, Any]:\n while True:\n logger.debug(\n \"Waiting for operation '{}'\".format(\n kwargs.get(\"operationId\", kwargs.get(\"operation\"))\n )\n )\n\n result = operation_service.get(**kwargs).execute()\n\n if result[\"status\"] == \"DONE\":\n return result\n\n time.sleep(frequency)",
"def Await(operation, message):\n client = api_util.LuxClient(api_util.API_VERSION_DEFAULT)\n lux_client = client.lux_client\n poller = waiter.CloudOperationPoller(\n lux_client.projects_locations_clusters,\n lux_client.projects_locations_operations)\n ref = resources.REGISTRY.ParseRelativeName(\n operation.name,\n collection='luxadmin.projects.locations.operations')\n return waiter.WaitFor(poller, ref, message)",
"def _await_operation_result(self):\n response = ReadMessage(self.connection.receive_message())\n result = response.read_uint8()\n self._assert_success(result)",
"def WaitForOperation(operation_service, operation, registry=None):\n if operation.done:\n return operation\n if not registry:\n registry = resources.REGISTRY\n ref = registry.Parse(\n operation.name.split('/')[-1],\n collection='ml.projects.operations')\n request = (operation_service.client\n .MESSAGES_MODULE.MlProjectsOperationsGetRequest(\n projectsId=ref.projectsId, operationsId=ref.operationsId))\n try:\n operation = retry.Retryer(max_wait_ms=60 * 60 * 1000).RetryOnResult(\n operation_service.Get,\n args=(request,),\n should_retry_if=lambda op, _: not op.done,\n sleep_ms=5000)\n if operation.error:\n raise OperationError(\n requests.ExtractErrorMessage(\n encoding.MessageToPyValue(operation.error)))\n return operation\n except retry.WaitException:\n raise OperationTimeoutError(\n 'Operation [{0}] timed out. This operation may still be underway.'\n .format(operation.name))",
"def wait_for_operation(\n self,\n operation: dict,\n max_polls: int = MAX_POLLS_TPU,\n poll_interval: int = POLL_INTERVAL,\n ) -> dict:\n logger.info(\n \"wait_for_tpu_operation: \"\n f\"Waiting for operation {operation['name']} to finish...\"\n )\n\n for _ in range(max_polls):\n result = (\n self.resource.projects()\n .locations()\n .operations()\n .get(name=f\"{operation['name']}\")\n .execute()\n )\n if \"error\" in result:\n raise Exception(result[\"error\"])\n\n if \"response\" in result:\n logger.info(\n \"wait_for_tpu_operation: \"\n f\"Operation {operation['name']} finished.\"\n )\n break\n\n time.sleep(poll_interval)\n\n return result",
"def wait_for_zone_operation(self, operation):\n print('Waiting for %s.' %(operation))\n while True:\n result = self.compute.zoneOperations().get(\n project=self.project,\n zone=self.zone,\n operation=operation).execute()\n if result['status'] == 'DONE':\n print(\"Done.\")\n if 'error' in result:\n raise ZoneOperationsError(result['error'])\n return result\n time.sleep(1)",
"def wait_for_operation(self, conn, operation, zone):\n while True:\n result = conn.zoneOperations().get(\n project=PROJECT, zone=zone, operation=operation['name']).execute()\n if result['status'] == 'DONE':\n if 'error' in result:\n raise Exception(result['error'])\n return\n time.sleep(1)",
"def WaitOperation(\n self,\n request: google.longrunning.operations_pb2.WaitOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.longrunning.operations_pb2.Operation:",
"def get_operation(\n self,\n ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"get_operation\" not in self._stubs:\n self._stubs[\"get_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/GetOperation\",\n request_serializer=operations_pb2.GetOperationRequest.SerializeToString,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"get_operation\"]",
"def get_operation(\n self,\n ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"get_operation\" not in self._stubs:\n self._stubs[\"get_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/GetOperation\",\n request_serializer=operations_pb2.GetOperationRequest.SerializeToString,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"get_operation\"]",
"def get_operation(\n self,\n ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"get_operation\" not in self._stubs:\n self._stubs[\"get_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/GetOperation\",\n request_serializer=operations_pb2.GetOperationRequest.SerializeToString,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"get_operation\"]",
"def wait_for_job(self, operation, status_code, job):\n task = None\n if status_code == STATUS_202:\n rc, result, status, task = self.wait_for_job_complete(job)\n if rc != 0:\n exception_message = (\n 'Error {op}. Status code: {sc}. Error: {err}. '\n 'Status: {st}.'.format(\n op=operation, sc=rc, err=six.text_type(result),\n st=status))\n LOG.error(exception_message)\n raise exception.VolumeBackendAPIException(\n data=exception_message)\n return task",
"async def _wait_execute(self, address, command, args, kw):\n conn = await self.acquire(command, args)\n try:\n return (await conn.execute(command, *args, **kw))\n finally:\n self.release(conn)",
"def GetOperation(\n self,\n request: google.longrunning.operations_pb2.GetOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.longrunning.operations_pb2.Operation:",
"def wait_for_operation(\n self,\n operation: dict,\n max_polls: int = MAX_POLLS,\n poll_interval: int = POLL_INTERVAL,\n ) -> dict:\n logger.info(\n \"wait_for_compute_zone_operation: \"\n f\"Waiting for operation {operation['name']} to finish...\"\n )\n\n for _ in range(max_polls):\n result = (\n self.resource.zoneOperations()\n .get(\n project=self.project_id,\n operation=operation[\"name\"],\n zone=self.availability_zone,\n )\n .execute()\n )\n if \"error\" in result:\n raise Exception(result[\"error\"])\n\n if result[\"status\"] == \"DONE\":\n logger.info(\n \"wait_for_compute_zone_operation: \"\n f\"Operation {operation['name']} finished.\"\n )\n break\n\n time.sleep(poll_interval)\n\n return result",
"def __await__(self):\n return self.waiter.__await__()",
"def __await__(self):\n return self.waiter.__await__()",
"def GetOperation(name):\n client = GetClientInstance()\n messages = client.MESSAGES_MODULE\n request = messages.ApikeysOperationsGetRequest(name=name)\n try:\n return client.operations.Get(request)\n except (apitools_exceptions.HttpForbiddenError,\n apitools_exceptions.HttpNotFoundError) as e:\n exceptions.ReraiseError(e, exceptions.OperationErrorException)",
"def WaitForOperation(operation):\n return dataplex_api.WaitForOperation(\n operation,\n dataplex_api.GetClientInstance().projects_locations_lakes_zones_assets)",
"def finished(self):\n if self._complete:\n raise ValueError('The operation has completed.')\n\n operation_name = (\n 'operations/%s/locations/%s/operations/%d' %\n (self._instance.name, self.location_id, self.op_id))\n request_pb = operations_pb2.GetOperationRequest(name=operation_name)\n # We expect a `google.longrunning.operations_pb2.Operation`.\n operation_pb = self._instance._client._operations_stub.GetOperation(\n request_pb, self._instance._client.timeout_seconds)\n\n if operation_pb.done:\n self._complete = True\n return True\n else:\n return False",
"def get(self):\n if not self.finished():\n self.wait()\n return self._result"
] | [
"0.70637107",
"0.6952674",
"0.69171053",
"0.68780625",
"0.68767226",
"0.68478763",
"0.68283325",
"0.6803523",
"0.6783815",
"0.66538864",
"0.6450316",
"0.63883734",
"0.6374461",
"0.6322837",
"0.63165104",
"0.62591165",
"0.61911964",
"0.6182441",
"0.6182441",
"0.6182441",
"0.61289036",
"0.5923509",
"0.58618295",
"0.58459574",
"0.5808001",
"0.5808001",
"0.5731694",
"0.57155895",
"0.5711181",
"0.57037383"
] | 0.8056656 | 0 |
Hash the value provided and scale it to fit the BF size | def hash_value(self, value):
return hash(value) % self.size | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _hash(self, hashKey):\n return hashKey % self.size",
"def _gethash(self, invalue) -> int:\n return hash(invalue) % self.capacity",
"def hash(self, key):\n return self._hash_function(key) % self.size # Modular hashing",
"def _hash_value(value):\n return hashlib.md5(value.encode('utf-8')).hexdigest()[:9]",
"def r_soft_hash(x):\n if abs(x) < 1e-9:return 0\n # round it to some number of bits\n b = ns.round(ns.log(abs(x)) / ns.log(2))\n gran = 2**(b-30)\n return ns.round(x / gran) * gran",
"def _rehash(self, hashKey, integer):\n return ( hashKey + integer * integer ) % self.size",
"def _hash_function(self, k):\n return (hash(k) * self._scale + self._shift) % self._prime % len(self._table)",
"def hash_value(self, x, h_num):\n a, b, p = self.hfs[h_num]\n return (a * x + b) % p % self.m",
"def hash_pixel(p, n, max_value):\n multiplier = np.flip(np.array([2] * len(p)) ** range(0, len(p)))\n return sum(p // ((max_value // n) + 1) * multiplier)",
"def calHash(n, m):\n return int(m*BloomFilter.ln2/n)",
"def hash_value(self, value):\n h = hashlib.sha256()\n h.update(str(value))\n return h.hexdigest()",
"def perfect_hash(num):\n return ((num+OFFSET)*(SIZE/PERIOD)) % (SIZE+1) + 1",
"def h(x):\n\n hasher = hashlib.sha256()\n hasher.update(x)\n return hasher.digest()",
"def smallHash(number, text):\n m = hashlib.md5()\n m.update(bytes(number))\n m.update(text.encode('utf-8'))\n return int(m.hexdigest(), 16) % 1000000",
"def _hash(self, value, get_val, get_child):\n hasher = getattr(hashlib, self.hash_func)\n children = get_child(value)\n\n # If leaf node\n if len(children) < 1:\n return hasher(get_val(value)).hexdigest()\n\n h = hasher()\n for child in children:\n # Tree is created recursively\n n = Node(child, get_val, get_child,\n self.hash_func)\n self.c.append(n)\n h.update(n.h.encode(\"utf-8\"))\n return h.hexdigest()",
"def _hash(self, key):\n\n return long(hashlib.md5(key).hexdigest(), 16)",
"def _hash(self, key):\n\n return long(hashlib.md5(key).hexdigest(), 16)",
"def _hash_function(self, key):\n h = 0\n a = 31\n table_size = self.size\n for i in range(len(key)):\n h = (h * a + ord(key[i])) % table_size\n return h",
"def HashValue(self) -> _n_0_t_3[_n_0_t_9]:",
"def _calc_hash(self) -> None:\n self.image = Image.open(self.path)\n self.image = self.image.convert(\"L\")\n self.image = self.image.resize((self.width, self.height), Image.ANTIALIAS)\n lpixels = list(self.image.getdata())\n self.hash = \"0b\"\n for i, pixel in enumerate(lpixels):\n if (i + 1) % self.width == 0 and i != 0:\n continue\n if pixel < lpixels[i + 1]:\n self.hash += \"1\"\n continue\n self.hash += \"0\"\n self.hash_hex = DHash.bin2hex(self.hash)",
"def calc_statistics_hash(self) -> bytes:\n return b\"somehash\"",
"def HashAlgorithm(self) -> _n_7_t_0:",
"def hash(self) -> bytes:",
"def compute_hash(self, key: int):\n return key % 42",
"def hash(x) -> int:\n pass",
"def __hash__(self) -> int:\n return hash(self.value)",
"def __hash__(self) -> int: # pragma: no cover\n return hash(self.code) | self.max_mb",
"def _hash_func(self, key: int) -> int:\n return key % self.capacity",
"def __hash__(self):\n return hash(self.value)",
"def __hash__(self):\n x = xxhash.xxh64()\n x.update(self.puzzle)\n return x.intdigest()"
] | [
"0.6837128",
"0.67926764",
"0.6750015",
"0.6720387",
"0.65649515",
"0.65522134",
"0.6529396",
"0.65074575",
"0.64981407",
"0.64848477",
"0.64627427",
"0.6421872",
"0.6417577",
"0.64055824",
"0.6382984",
"0.6369433",
"0.6369433",
"0.63618594",
"0.63602084",
"0.63411283",
"0.62895006",
"0.62885356",
"0.6281629",
"0.6231168",
"0.62273085",
"0.62248147",
"0.62174183",
"0.6214041",
"0.6202756",
"0.6200208"
] | 0.7468441 | 0 |
Patch project resource method to have certain return type messages. | def _PatchProjectReturnType(self):
projects_method = registry.GetMethod('cloudresourcemanager.projects',
'list')
self.StartObjectPatch(projects_method, 'GetResponseType',
return_value=ProjectsMessage) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_patch_project(self):\n pass",
"def post_project_update(self, resource_id, resource_dict):\n pass",
"def test_projects_patch(self):\n project = Project()\n response = self.client.open('/project-tracker/projects',\n method='PATCH',\n data=json.dumps(project),\n content_type='application/json')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))",
"def api_response(project_type, result):\n if project_type not in project_types:\n return invalid_project_tye_msg(project_type)\n return result",
"def customize(self):\n TICKET = self.env['anytracker.ticket']\n project = TICKET.browse(self.env.context.get('project_id'))\n if not project:\n return\n new_method = self.copy({'project_id': project.id})\n project.write({'method_id': new_method.id})\n return new_method",
"def patch(self , request , pk = None ):\r\n return Response({'method':'patch'})",
"def pre_project_update(self, resource_id, resource_dict):\n pass",
"def test_patch_project(self):\n self.assertEqual(Project.objects.count(), 2)\n\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.project.sodar_uuid},\n )\n patch_data = {\n 'title': UPDATED_TITLE,\n 'description': UPDATED_DESC,\n 'readme': UPDATED_README,\n 'public_guest_access': True,\n }\n response = self.request_knox(url, method='PATCH', data=patch_data)\n\n self.assertEqual(response.status_code, 200, msg=response.content)\n self.assertEqual(Project.objects.count(), 2)\n\n self.project.refresh_from_db()\n model_dict = model_to_dict(self.project)\n model_dict['readme'] = model_dict['readme'].raw\n expected = {\n 'id': self.project.pk,\n 'title': UPDATED_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': self.category.pk,\n 'description': UPDATED_DESC,\n 'readme': UPDATED_README,\n 'public_guest_access': True,\n 'archive': False,\n 'full_title': self.category.title + ' / ' + UPDATED_TITLE,\n 'has_public_children': False,\n 'sodar_uuid': self.project.sodar_uuid,\n }\n self.assertEqual(model_dict, expected)\n self.assertEqual(self.project.get_owner().user, self.user_owner)\n\n expected = {\n 'title': UPDATED_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': UPDATED_DESC,\n 'readme': UPDATED_README,\n 'public_guest_access': True,\n 'archive': False,\n 'roles': {\n str(self.category.get_owner().sodar_uuid): {\n 'role': PROJECT_ROLE_OWNER,\n 'user': self.get_serialized_user(self.user_owner_cat),\n 'inherited': True,\n 'sodar_uuid': str(self.category.get_owner().sodar_uuid),\n },\n str(self.project.get_owner().sodar_uuid): {\n 'role': PROJECT_ROLE_OWNER,\n 'user': self.get_serialized_user(self.user_owner),\n 'inherited': False,\n 'sodar_uuid': str(self.project.get_owner().sodar_uuid),\n },\n },\n 'sodar_uuid': str(self.project.sodar_uuid),\n }\n self.assertEqual(json.loads(response.content), expected)",
"def test_modify_response_descriptor_projects_release_release_resource(self):\n pass",
"def test_project_resource_methods(self, mock_url, resource_name, single_name):\n business_id = 1234\n resource_id = 2345\n resource_ = getattr(self.freshBooksClient, resource_name)\n\n list_response = {resource_name: [], \"meta\": {\"page\": 1, \"pages\": 0, \"per_page\": 15, \"total\": 0}}\n single_response = {single_name: {}}\n\n with patch.object(ProjectsResource, \"_request\", return_value=list_response) as mock_request:\n resource_.list(business_id)\n mock_request.assert_called_with(\"some_url\", HttpVerbs.GET)\n\n with patch.object(ProjectsResource, \"_request\", return_value=single_response) as mock_request:\n resource_.get(business_id, resource_id)\n mock_request.assert_called_with(\"some_url\", HttpVerbs.GET)\n\n resource_.create(business_id, {})\n mock_request.assert_called_with(\"some_url\", HttpVerbs.POST, data={single_name: {}})\n\n resource_.update(business_id, resource_id, {})\n mock_request.assert_called_with(\"some_url\", HttpVerbs.PUT, data={single_name: {}})\n\n resource_.delete(business_id, resource_id)\n mock_request.assert_called_with(\"some_url\", HttpVerbs.DELETE)",
"def patch(self, project_id):\n authenticated_user_id = token_auth.current_user()\n if not ProjectAdminService.is_user_action_permitted_on_project(\n authenticated_user_id, project_id\n ):\n return {\n \"Error\": \"User is not a manager of the project\",\n \"SubCode\": \"UserPermissionError\",\n }, 403\n try:\n project_dto = ProjectDTO(request.get_json())\n project_dto.project_id = project_id\n project_dto.validate()\n except DataError as e:\n current_app.logger.error(f\"Error validating request: {str(e)}\")\n return {\"Error\": \"Unable to update project\", \"SubCode\": \"InvalidData\"}, 400\n\n try:\n ProjectAdminService.update_project(project_dto, authenticated_user_id)\n return {\"Status\": \"Updated\"}, 200\n except InvalidGeoJson as e:\n return {\"Invalid GeoJson\": str(e)}, 400\n except ProjectAdminServiceError as e:\n return {\"Error\": str(e).split(\"-\")[1], \"SubCode\": str(e).split(\"-\")[0]}, 403",
"def test_patch_a_resource_that_exists():\n pass",
"def patch(self, request, pk=None):\n return Response({'message': 'patch'})",
"def test_patch_project_type_change(self):\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.project.sodar_uuid},\n )\n patch_data = {'type': PROJECT_TYPE_CATEGORY}\n response = self.request_knox(url, method='PATCH', data=patch_data)\n self.assertEqual(response.status_code, 400, msg=response.content)",
"def patch(self, request, pk=None):\n\n return Response({'method': 'patch'})",
"def patch(self, request , pk=None):\n return Response({'message':'PATCH'})",
"def test_patch_a_resource_that_does_not_exist():\n pass",
"def test_update_project(self):\n pass",
"def test_update_project(self):\n pass",
"def patch(self, *args, **kwargs):\n return self.handle_patch_request()",
"def process_resource_api(self, resources, resource, api, context):\n pass",
"def patch_resource(self, **kwargs):\n results = self.api.action.resource_patch(**kwargs)\n self.get_ckan_metadata(True)\n if 'upload' in kwargs:\n resource_id = results['id'] if 'id' in results else kwargs['id']\n self._import_resource_to_cache(kwargs['upload'], resource_id)\n return results",
"def test_patch_resource_group(self):\n pass",
"def PatchBookResourceReturnTypes(self):\n self._PatchProjectReturnType()\n shelves_method = registry.GetMethod('example.projects.shelves', 'list')\n self.StartObjectPatch(shelves_method, 'GetResponseType',\n return_value=ShelvesMessage)\n self._PatchBookReturnType()",
"def patch(self,request,pk=None):\n return Response({'method':'Patch'})",
"def patch(self,request,pk = None):\n return Response({'method': 'PATCH'})",
"def post_project_create(self, resource_dict):\n pass",
"def patch(self):\n return super(TenderAwardDocumentResource, self).patch()",
"def patch(self, request, pk=None):\n return Response({'method': 'PATCH'})",
"def patch(self, request, pk=None):\n return Response({'method': 'PATCH'})"
] | [
"0.6162628",
"0.59058964",
"0.58300334",
"0.5745906",
"0.5632001",
"0.5591009",
"0.55600244",
"0.5554185",
"0.55276304",
"0.548513",
"0.54740405",
"0.54582417",
"0.5452977",
"0.5446676",
"0.5445278",
"0.5433638",
"0.540306",
"0.53596485",
"0.53596485",
"0.534254",
"0.53421736",
"0.53151983",
"0.5282412",
"0.52717453",
"0.52675027",
"0.5245369",
"0.52371144",
"0.52191067",
"0.521328",
"0.521328"
] | 0.6952836 | 0 |
Patch book resource method to have certain return type messages. | def _PatchBookReturnType(self):
books_method = registry.GetMethod('example.projects.shelves.books', 'list')
self.StartObjectPatch(books_method, 'GetResponseType',
return_value=BooksMessage) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def PatchBookResourceReturnTypes(self):\n self._PatchProjectReturnType()\n shelves_method = registry.GetMethod('example.projects.shelves', 'list')\n self.StartObjectPatch(shelves_method, 'GetResponseType',\n return_value=ShelvesMessage)\n self._PatchBookReturnType()",
"def test_modify_book_title_method(self):\n # modify book title\n book_id = 2\n book_title = \"Lost Girl\"\n result = self.book.modify_book_title(book_id, book_title)\n self.assertEqual(result, [{\"Title\": \"Lost Girl\",\n \"Author\": \"David Archer\",\n \"Copies\": 3}])\n\n # modify book author\n book_author = \"Paulo Coelho\"\n result = self.book.modify_book_author(2, book_author)\n self.assertEqual(result, [{\"Title\": book_title,\n \"Author\": book_author,\n \"Copies\": 3}])\n\n # modify book copies\n copies = 5\n result = self.book.modify_book_copies(book_id, copies)\n self.assertEqual(result, [{\"Title\": book_title,\n \"Author\": book_author,\n \"Copies\": copies}])",
"def patch(self):\n return super(TenderAwardContractDocumentResource, self).patch()",
"def patch(self):\n return super(TenderAwardDocumentResource, self).patch()",
"def test_partly_update_book(self):\n data = {'isbn':'96712116-2'}\n response = self.client.patch(self.book.get_absolute_url(), data, format='json', content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n response = self.client.get(self.book.get_absolute_url())\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertContains(response, '96712116-2')",
"def test_get_specific_book_method(self):\n # When book id is int\n book_id = 1\n result = self.book.get_book(book_id)\n self.assertEqual(result, [{\"Title\": \"Harry Potter and Chamber of Secrets\",\n \"Author\": \"J.K Rowling\",\n \"Copies\": 2}])",
"def test_book_edit_for_manager(self):\n client = APIClient()\n client.login(username=self.manager.username, password=\"salam*123\")\n response = client.get(\"/books/4/\")\n self.assertNotEqual(response.json()[\"copies\"], 2)\n response = client.patch(\"/books/4/\", data={\"copies\": 2})\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json()[\"copies\"], 2)",
"def test_book_edit_for_student(self):\n client = APIClient()\n client.login(username=self.students[0].username, password=\"salam*123\")\n response = client.patch(\"/books/4/\", data={\"copies\": 2})\n self.assertEqual(response.status_code, 403)",
"def give_book(self):\n pass",
"def update_book(isbn):\n put_req = request.get_json()\n if not (Book.replace_book(isbn, put_req['name'], put_req['price'])):\n invalid_book_object_error_msg = {\n \"error\": \"Invalid book object update passed in PUT request\",\n \"helpString\": \"Valid data format is {'name': 'bookname', 'price': 7.9, 'isbn': 12345678}\"\n }\n # Because invalidBookObjectErrorMsg is a dictionary, need to convert it into a json object.\n # Set Header info for location (location of endpoint in request)\n return Response(json.dumps(invalid_book_object_error_msg), status=406, mimetype='application/json')\n # See https://www.flaskapi.org/api-guide/status-codes/ for flask API\n # response codes\n response = Response(\"\", 204, mimetype='application/json')\n response.headers['Location'] = \"/books/\" + str(isbn)\n return response",
"def patch(self , request , pk = None ):\r\n return Response({'method':'patch'})",
"def test_edit_resource(self):\n s1 = System()\n b1 = Books(\"1984\", \"George Orwell\", \"Harvill Secker\", \"1949\", \"0123456789123\")\n s1.edit_resource(b1, \"Animal Farm\")\n self.assertEqual(b1.get_title(), \"1984\")\n s1.add_resource(b1)\n s1.edit_resource(b1, \"Animal Farm\")\n self.assertEqual(b1.get_title(), \"Animal Farm\")",
"def post(self):\n order = None\n args = book_return_parser.parse_args()\n order_id = args['order_id']\n copy_id = args['copy_id']\n if order_id is not None and copy_id is not None:\n return 'Only one parameter is needed', 400\n if order_id is not None:\n order = db.session.query(models.Order).filter_by(id=order_id).first()\n if copy_id is not None:\n order = db.session.query(models.Order).filter_by(copy=copy_id).first()\n if order is None:\n return 'Please provide a correct order_id or copy_id for the book', 404\n copy = db.session.query(models.Copy).filter_by(id=order.copy).first()\n if copy is None:\n return 'Copy of the book does not exist', 404\n order = change_order_status(order.id, ORDER_STATUS_COMPLETED)\n copy.status = BOOK_COPY_STATUS_AVAILABLE\n db.session.commit()\n return {'order': order.serialize(),\n 'message': 'Book returned, Order completed!'}, 200",
"def test_api_update_book_is_not_json(self):\n\n\t\tlogin_data = self.login_test_user()\n\t\ttoken = login_data['auth_token']\n\n\t\tbook = self.client.put(\n\t\t\tf'{URL_BOOKS}/1',\n\t\t\theaders=dict(Authorization=f'Bearer {token}'),\n\t\t\tcontent_type='text',\n\t\t\tdata=json.dumps(\n\t\t\t\tdict(\n\t\t\t\t\ttitle='updated book'\n\t\t\t\t)\n\t\t\t)\n\t\t)\n\n\t\tbook_res = json.loads(book.data.decode())\n\t\tself.assertTrue(book_res['message'] == 'Content-type must be json')\n\t\tself.assertTrue(book_res['status'] == 'error')\n\t\tself.assertTrue(book.status_code, 202)",
"def return_book_process(self):\n s1 = System()\n b1 = Books(\"1984\", \"George Orwell\", \"Harvill Secker\", \"1949\", \"0123456789123\")\n m1 = Members(\"Richard\", \"Blackmore\", \"14-04-1945\", \"Weston\")\n s1.add_resource(b1)\n s1.lending_process(b1, m1)\n s1.remove_resource(b1)\n #prove that the return of a book doen't take place if not in the ctalogue, \n #but also that in the remove_resource method should be added somthing to remeove that book from all borrowers\n #or doesn't allow the operation if not returned\n s1.return_book_process(b1, False, \"none\")\n self.assertEqual(b1.get_borrower(), m1)\n self.assertIn(b1, m1.get_borrowed_books())\n s1.add_resource(b1)\n self.assertEqual(b1.get_borrower(), None)\n self.assertNotIn(b1, m1.get_borrowed_books())",
"def test_api_can_update_book(self):\n\n\t\t# create book\n\t\tadd_book = {\n\t\t\t'title': 'Hello Books',\n\t\t\t'isbn': '5698745124'\n\t\t}\n\t\tlogin_data = self.login_test_user()\n\t\ttoken = login_data['auth_token']\n\t\tres = self.client.post(\n\t\t\tf'{URL_BOOKS}',\n\t\t\theaders=dict(Authorization=f'Bearer {token}'),\n\t\t\tcontent_type='application/json',\n\t\t\tdata=json.dumps(add_book)\n\t\t)\n\n\t\t# update book\n\t\tbook = self.client.put(\n\t\t\tf'{URL_BOOKS}/1',\n\t\t\theaders=dict(Authorization=f'Bearer {token}'),\n\t\t\tcontent_type='application/json',\n\t\t\tdata=json.dumps(\n\t\t\t\tdict(\n\t\t\t\t\ttitle='updated book'\n\t\t\t\t)\n\t\t\t)\n\t\t)\n\n\t\tbook_res = json.loads(book.data.decode())\n\t\tself.assertTrue(book_res['title'] == 'updated book')",
"def test_update_book(self):\n book_information = self.books_from_json[0]\n book_id = '60773a16cb838494e13d3652'\n self.books.update = MagicMock(return_value=None) # success on update\n update_book = self.books.update_details(book_id, self.books_from_json[0])\n self.assertEqual(\"Mock Book updated!\", update_book['flash_message'])",
"def patch(self, request , pk=None):\n return Response({'message':'PATCH'})",
"def patch(self, request, pk=None):\n return Response({'message': 'patch'})",
"def patch(self, *args, **kwargs):\n return self.handle_patch_request()",
"def patch(self, request, pk=None):\n\n return Response({'method': 'patch'})",
"def test_post_book_method(self):\n book_id = 4\n title = \"The Whistler\"\n author = \"John Grisham\"\n copies = 3\n result = self.book.post_book(book_id, title, author, copies)\n self.assertEqual(result, [{\"Title\": \"The Whistler\",\n \"Author\": \"John Grisham\",\n \"Copies\": 3}])",
"def test_api_update_book_validation_error(self):\n\n\t\t# create book\n\t\tadd_book = {\n\t\t\t'title': 'Hello Books',\n\t\t\t'isbn': '5698745124'\n\t\t}\n\t\tlogin_data = self.login_test_user()\n\t\ttoken = login_data['auth_token']\n\t\tres = self.client.post(\n\t\t\tf'{URL_BOOKS}',\n\t\t\theaders=dict(Authorization=f'Bearer {token}'),\n\t\t\tcontent_type='application/json',\n\t\t\tdata=json.dumps(add_book)\n\t\t)\n\t\tempty_book = {}\n\t\t# update book\n\t\tbook = self.client.put(\n\t\t\tf'{URL_BOOKS}/1',\n\t\t\theaders=dict(Authorization=f'Bearer {token}'),\n\t\t\tcontent_type='application/json',\n\t\t\tdata=json.dumps(\n\t\t\t\tempty_book\n\t\t\t)\n\t\t)\n\n\t\tbook_res = json.loads(book.data.decode())\n\t\tself.assertIn('Nothing was changed', str(book_res))",
"def put(self, book_id):\n a_book = query_book_by_id(book_id)\n if a_book is None:\n return 'Book does not exit', 404\n body = request.get_json()\n a_book.parse_body(body)\n db.session.add(a_book)\n db.session.commit()\n return a_book.serialize(), 200",
"def patch(self,request,pk=None):\n return Response({'method':'Patch'})",
"def test_patch_a_resource_that_exists():\n pass",
"def books_patch_delete(request, pk):\n try:\n snippet = Books.objects.get(url=pk)\n except Books.DoesNotExist:\n return Response(status=status.HTTP_404_NOT_FOUND)\n\n if request.method == 'PUT':\n serializer = BooksSerializers(snippet, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n elif request.method == 'DELETE':\n snippet.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)",
"def patch(self,request,pk = None):\n return Response({'method': 'PATCH'})",
"def test_get_books_method(self):\n result = self.book.get_books()\n self.assertTrue(result)",
"def test_get_book(self):\n\n response = self.client.post(\n '/api/v1/books', data=json.dumps(add_book), content_type='application/json',\n headers=self.get_admin_token())\n response1 = self.client.get(\n '/api/v1/books/NJCF4001', content_type='application/json', headers=self.get_admin_token())\n result = json.loads(response1.data.decode())\n self.assertEqual(result['message'],\n 'Retrieved successfully')\n assert response1.status_code == 200"
] | [
"0.6569911",
"0.62493795",
"0.5958584",
"0.5953067",
"0.5759714",
"0.5613411",
"0.55555844",
"0.55500233",
"0.5498806",
"0.5473793",
"0.5459403",
"0.5431096",
"0.54184985",
"0.5395539",
"0.53882766",
"0.53436124",
"0.5313809",
"0.53026146",
"0.528689",
"0.5286017",
"0.5285546",
"0.52677965",
"0.52552915",
"0.5184592",
"0.51796657",
"0.51789224",
"0.5165341",
"0.51587766",
"0.51469266",
"0.5142488"
] | 0.758044 | 0 |
Patch each resource method to have certain return type messages. | def PatchBookResourceReturnTypes(self):
self._PatchProjectReturnType()
shelves_method = registry.GetMethod('example.projects.shelves', 'list')
self.StartObjectPatch(shelves_method, 'GetResponseType',
return_value=ShelvesMessage)
self._PatchBookReturnType() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def methods(domain, resource, pathtype, param=None):\n ret = {}\n if pathtype == 'additional_lookup':\n method = 'GET'\n ret[method] = {}\n ret[method]['label'] = get_label(domain, pathtype, method)\n ret[method]['params'] = schema(resource, param)\n else:\n key = '{0}_methods'.format(pathtype)\n methods = resource[key]\n for method in methods:\n ret[method] = {}\n ret[method]['label'] = get_label(domain, pathtype, method)\n ret[method]['params'] = []\n if method == 'POST':\n ret[method]['params'].extend(schema(resource))\n elif method == 'PATCH':\n ret[method]['params'].append(identifier(resource))\n ret[method]['params'].extend(schema(resource))\n elif pathtype == 'item':\n ret[method]['params'].append(identifier(resource))\n return ret",
"def _PatchBookReturnType(self):\n books_method = registry.GetMethod('example.projects.shelves.books', 'list')\n self.StartObjectPatch(books_method, 'GetResponseType',\n return_value=BooksMessage)",
"def _colorify_methods(methods):\n replacement = {\n 'GET': '\\033[1;34mGET\\033[22;39m', # GET: bold-blue\n 'PUT': '\\033[1;35mPUT\\033[22;39m', # PUT: bold-magenta\n 'DELETE': '\\033[1;31mDELETE\\033[22;39m', # DELETE: bold-red\n 'POST': '\\033[1;33mPOST\\033[22;39m', # POST: bold-yellow\n }\n return ' '.join([replacement.get(method, method)\n for method in methods])",
"def _apply_monkey_patches():\n _monkey_patch_returns()\n _monkey_patch_see_also()",
"def patch_all():\n to_patch = ['str'] if IS_PY3 else ['str', 'unicode']\n to_patch = [getattr(__builtin__, klass) for klass in to_patch]\n for klass in to_patch:\n for meth in methods_to_patch:\n curse(klass, meth, getattr(PatchClass, meth))",
"def decorate_HTTP_verb_method(method):\n @functools.wraps(method)\n def wrapper(self, RIC_base_uri, **kwargs):\n partition = kwargs.pop('partition', '')\n name = kwargs.pop('name', '')\n sub_path = kwargs.pop('subPath', '')\n suffix = kwargs.pop('suffix', '')\n uri_as_parts = kwargs.pop('uri_as_parts', False)\n if uri_as_parts:\n REST_uri = generate_bigip_uri(RIC_base_uri, partition, name,\n sub_path, suffix, **kwargs)\n else:\n REST_uri = RIC_base_uri\n pre_message = \"%s WITH uri: %s AND suffix: %s AND kwargs: %s\" %\\\n (method.__name__, REST_uri, suffix, kwargs)\n logging.debug(pre_message)\n response = method(self, REST_uri, **kwargs)\n post_message =\\\n \"RESPONSE::STATUS: %s Content-Type: %s Content-Encoding:\"\\\n \" %s\\nText: %r\" % (response.status_code,\n response.headers.get('Content-Type', None),\n response.headers.get('Content-Encoding', None),\n response.text)\n logging.debug(post_message)\n if response.status_code not in range(200, 207):\n error_message = '%s Unexpected Error: %s for uri: %s\\nText: %r' %\\\n (response.status_code,\n response.reason,\n response.url,\n response.text)\n raise iControlUnexpectedHTTPError(error_message, response=response)\n return response\n return wrapper",
"def process_resource_api(self, resources, resource, api, context):\n pass",
"def Methods(self):\n pass",
"def _check_existing_methods(api_id, resource_id, resource_path, resource_meta,\n enable_cors, authorizers_mapping, api_resp=None,\n api_integration_resp=None):\n for method in resource_meta:\n if method == 'enable_cors':\n continue\n if _API_GATEWAY_CONN.get_method(api_id, resource_id, method):\n _LOG.info('Method %s exists.', method)\n continue\n else:\n _LOG.info('Creating method %s for resource %s...',\n method, resource_id)\n _create_method_from_metadata(\n api_id=api_id,\n resource_id=resource_id,\n resource_path=resource_path,\n method=method,\n method_meta=resource_meta[method],\n authorizers_mapping=authorizers_mapping,\n api_resp=api_resp,\n api_integration_resp=api_integration_resp,\n enable_cors=enable_cors)\n if enable_cors and not _API_GATEWAY_CONN.get_method(api_id,\n resource_id,\n 'OPTIONS'):\n _LOG.info('Enabling CORS for resource %s...', resource_id)\n _API_GATEWAY_CONN.enable_cors_for_resource(api_id, resource_id)",
"def test_return_types():\n my_method = SGMethod(\"Test\")\n \n my_method.return_type = \"SoundEffect\"\n assert my_method.return_type == \"SoundEffect\"",
"def try_modify_methods(self, method):\n if isinstance(self.data, _array_types):\n if method in _list_grow_method_names:\n result = partial(list_grow_method, self, method)\n return True, result\n elif method in _list_modify_method_names:\n result = partial(list_modify_method, self, method)\n return True, result\n else:\n return False, None\n elif isinstance(self.data, dict):\n if method in _dict_modify_method_names:\n result = partial(dict_modify_method, self, method)\n return True, result\n else:\n return False, None\n else:\n return False, None",
"def test_resource_actions(self):\n test_resource = ResourceTypeName.get()\n expected_actions = sorted(['rt:get', 'rt:put', 'rt:update', 'rt:delete'])\n self.app.post(\n f'/v1/resource/{test_resource}',\n data=json.dumps({'actions': expected_actions}),\n headers=admin_headers)\n\n # Get the actions for a resource type\n resp = self.app.get(f'/v1/resource/{test_resource}/actions', headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n actions = json.loads(resp.body)['actions']\n self.assertEqual(actions, expected_actions)\n\n # Delete actions from a resource type\n modify_actions = expected_actions[-2:]\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions[:2])\n\n # OK returned when deleting actions not part of a resource type\n resp = self.app.delete(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n\n # Put actions into a resource type\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n resp = self.app.get(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n actions = sorted(json.loads(resp.body)['actions'])\n self.assertEqual(actions, expected_actions)\n\n # OK returned when putting actions already a part of a resource type.\n resp = self.app.put(f'/v1/resource/{test_resource}/actions',\n data=json.dumps({'actions': modify_actions}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)",
"def PatchModels(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def _check_existing_methods(api_id, resource_id, resource_path, resource_meta,\n enable_cors, authorizers_mapping, api_resp=None,\n api_integration_resp=None):\n for method in resource_meta:\n if method == 'enable_cors':\n continue\n if _API_GATEWAY_CONN.get_method(api_id, resource_id, method):\n _LOG.info('Method %s exists.', method)\n continue\n else:\n _LOG.info('Creating method %s for resource %s...',\n method, resource_id)\n _create_method_from_metadata(\n api_id=api_id,\n resource_id=resource_id,\n resource_path=resource_path,\n method=method,\n method_meta=resource_meta[method],\n authorizers_mapping=authorizers_mapping,\n api_resp=api_resp,\n api_integration_resp=api_integration_resp,\n enable_cors=enable_cors)\n if enable_cors and not _API_GATEWAY_CONN.get_method(api_id,\n resource_id,\n 'OPTIONS'):\n _LOG.info('Enabling CORS for resource %s...', resource_id)\n _API_GATEWAY_CONN.enable_cors_for_resource(api_id, resource_id)\n\n @unpack_kwargs\n def _create_resource_from_metadata(api_id, resource_path, resource_meta,\n authorizers_mapping):\n _API_GATEWAY_CONN.create_resource(api_id, resource_path)\n _LOG.info('Resource %s created.', resource_path)\n resource_id = _API_GATEWAY_CONN.get_resource_id(api_id, resource_path)\n enable_cors = resource_meta.get('enable_cors')\n for method in resource_meta:\n try:\n if method == 'enable_cors' or method not in SUPPORTED_METHODS:\n continue\n\n method_meta = resource_meta[method]\n _LOG.info('Creating method %s for resource %s...',\n method, resource_path)\n _create_method_from_metadata(\n api_id=api_id,\n resource_id=resource_id,\n resource_path=resource_path,\n method=method,\n method_meta=method_meta,\n enable_cors=enable_cors,\n authorizers_mapping=authorizers_mapping)\n except Exception as e:\n _LOG.error('Resource: {0}, method {1}.'\n .format(resource_path, method), exc_info=True)\n raise e\n _LOG.info('Method %s for resource %s created.', method,\n resource_path)\n # create enable cors only after all methods in resource created\n if enable_cors:\n _API_GATEWAY_CONN.enable_cors_for_resource(api_id, resource_id)\n _LOG.info('CORS enabled for resource %s', resource_path)",
"def handle_patch(cls, **kwargs):\n raise NotImplementedError",
"def call_api(method, **kw):\n # Get the resource type on which we want to operate\n resource = kw.pop(\"resource\")\n\n if method == \"GET\":\n return resource.get(**kw)\n elif method == \"CREATE\":\n return resource.create(**kw)\n elif method == \"PATCH\":\n return resource.patch(**kw)\n elif method == \"DELETE\":\n return resource.delete(**kw)\n elif method == \"POST\":\n return resource.post(**kw)",
"def methods(self) -> List[str]:\n # TODO(*): Consider make this an abstractmethod.\n return [\"fit\", \"predict\"]",
"def test_patch_a_resource_that_exists():\n pass",
"def all_rest_methods(decorator):\n\n# ADD OTHER METHODS HERE, IF SOME ARE MISSING\n api_methods = ['get', 'post', 'put', 'patch', 'delete'] # , 'search']\n\n def decorate(cls):\n # there's propably a better way to do this\n for attr in cls.__dict__:\n # Check if method and in it's in my list\n if attr in api_methods and callable(getattr(cls, attr)):\n logger.debug(\"Decorating %s as api method\"\n % (cls.__name__ + \".\" + attr))\n setattr(cls, attr, decorator(getattr(cls, attr)))\n return cls\n return decorate",
"def testUnknownHttpMethod(self):\n api = Api({'name': 'dummy', 'version': 'v1', 'resources': {}})\n unused_resource = Resource(api, 'temp', {'methods': {}})\n self.assertRaises(ApiException,\n Method, api, 'bad', {\n 'rpcMethod': 'rpc',\n 'httpMethod': 'Not GET/POST/PUT/DELETE',\n 'parameters': {}\n })",
"def PatchCollectors(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def test_lti20_unsupported_method_error(self):\r\n self.setup_system_xmodule_mocks_for_lti20_request_test()\r\n mock_request = self.get_signed_lti20_mock_request(self.GOOD_JSON_PUT)\r\n for bad_method in self.UNSUPPORTED_HTTP_METHODS:\r\n mock_request.method = bad_method\r\n response = self.xmodule.lti_2_0_result_rest_handler(mock_request, \"user/abcd\")\r\n self.assertEqual(response.status_code, 404)",
"def xmlrpc_methods():",
"def stubify_resource(resource_class):\n if resource_class in REPLACED_RESOURCES:\n return\n\n REPLACED_RESOURCES.add(resource_class)\n for attr_name, attr in resource_class.__dict__.iteritmes():\n if attr_name not in DONT_STUB and inspect.isfunction(attr_name):\n setattr(resource_class, attr_name, stubify_method(attr))\n\n # Stub the __init__\n resource_class.DATA_CLASS = None\n resource_class.__init__ = stub_init\n\n # Stub the __getattr__\n resource_class.__getattr__ = mock.MagicMock()\n\n for resource_base in resource_class.__bases__:\n if resource_base not in (BaseResource, Model):\n stubify_resource(resource_base)",
"def all_methods(self, req):\n for provider in self.method_handlers:\n for candidate in provider.xmlrpc_methods():\n # Expand all fields of method description\n yield Method(provider, *candidate)",
"def PatchAnnotations(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def PatchConcepts(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def apimethod(func):\n def wrapper(self, *args, **kwargs):\n # Debug\n class_name = self.__class__.__name__\n method_name = func.__name__.upper()\n logger.info(\"[Class: %s] %s request\" % (class_name, method_name))\n\n # Call the parse method\n self.apply_parameters()\n self.parse()\n # Call the wrapped function\n try:\n out = func(self, *args, **kwargs)\n except KeyError as e:\n error = str(e).strip(\"'\")\n logger.critical(\"Key error: %s\" % error)\n if error == \"security\":\n return {'message': \"FAIL: problems with auth check\"}, \\\n hcodes.HTTP_BAD_NOTFOUND\n raise e\n except TypeError as e:\n logger.warning(e)\n error = str(e).strip(\"'\")\n logger.critical(\"Type error: %s\" % error)\n if \"required positional argument\" in error:\n return {'message': \"FAIL: missing argument\"}, \\\n hcodes.HTTP_BAD_REQUEST\n raise e\n\n # DO NOT INTERCEPT 404 or status from other plugins (e.g. security)\n if isinstance(out, Response):\n return out\n\n # BASE STATUS?\n status = hcodes.HTTP_OK_BASIC\n\n # VERY IMPORTANT\n # DO NOT INTERFERE when\n # at some level we already provided the couple out/response\n if isinstance(out, tuple) and len(out) == 2:\n subout, status = out\n out = subout\n\n # Set standards for my response as specified in base.py\n #return marshal(out, self.resource_fields), status\n return out, status\n\n return wrapper",
"def apply_method(self, r, **attr):\n\n output = {}\n\n table, record_id = self.get_target_id()\n if not table:\n r.error(405, \"Anonymizing not configured for resource\")\n if not record_id:\n r.error(400, \"No target record specified\")\n if not self.permitted(table, record_id):\n r.unauthorised()\n\n if r.representation == \"json\":\n if r.http == \"POST\":\n output = self.anonymize(r, table, record_id)\n else:\n r.error(405, current.ERROR.BAD_METHOD)\n else:\n r.error(415, current.ERROR.BAD_FORMAT)\n\n # Set Content Type\n current.response.headers[\"Content-Type\"] = \"application/json\"\n\n return output",
"def _monkey_patch_returns():\n _parse_returns_section = \\\n NumpyDocstring._parse_returns_section\n\n @functools.wraps(NumpyDocstring._parse_returns_section)\n def wrapper(*args, **kwargs):\n out = _parse_returns_section(*args, **kwargs)\n return [line.replace(\":class:\", \":any:\") for line in out]\n\n NumpyDocstring._parse_returns_section = wrapper"
] | [
"0.589502",
"0.5721973",
"0.55744416",
"0.5477759",
"0.5474522",
"0.5434186",
"0.5405362",
"0.5365491",
"0.5357163",
"0.53463465",
"0.52922416",
"0.529191",
"0.5289779",
"0.5280045",
"0.52517265",
"0.52291876",
"0.52218944",
"0.5193731",
"0.5184144",
"0.51712865",
"0.51664895",
"0.51615036",
"0.5156457",
"0.5122003",
"0.51147157",
"0.51097846",
"0.50939685",
"0.5088481",
"0.5088001",
"0.5085789"
] | 0.5930442 | 0 |
Helper function to build return message for fake books List method. | def BuildBooksList(self, books):
return BooksMessage(
books=[BooksMessage.Book(name=book) for book in books]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def list_message(self):\n\t\tmessage = \"\"\n\t\tlength = len(self.object_list)\n\t\tfor i in range(length - 1):\n\t\t\to = self.object_list[i]\n\t\t\tmessage += self.index_letter_string(i) + o.display_name(True) + \", \"\n\t\tlast_object = self.object_list[length - 1]\n\t\tmessage += self.index_letter_string(length - 1) + last_object.display_name(True) + \", (?)\"\n\t\treturn message",
"def create_messages(application, action, remedy):\n\n messages = [] \n messages.append(\"\"\"Your Resources: </br><pre style=\"margin-left: 40px\">\"\"\" + application + \"</br></pre>\" + action + \"\"\" in AWS. <strong style=\"font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;\">\"\"\" + remedy +\"\"\"</strong>\n </td>\n </tr><tr style=\"font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;\"><td class=\"content-block\" style=\"font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; margin: 0; padding: 0 0 20px;\" valign=\"top\">\n This message was sent to inform you of changes happening to your resources.\n <ul>\n <li>New instances are auto-tagged with an expiration date, an NT ID, and a patch group if invalid.</li>\n <li>Instances without the necessary tags are notified through email and Slack.</li>\n </ul>\n If you have any further questions, please reply to this email.\"\"\")\n \n messages.append(\"Your Resources:\\n\\n\" + application + \"\\n\\n\" + action + \" in AWS. \" + remedy + \"\\n\" + \n (\"\\nThis message was sent to inform you of changes happening to your resources.\\n\"\n \"\\nNew instances are auto-tagged with an expiration date, an NT ID, and a patch group if invalid.\"\n \"Instances without Owner Mail and Owner Team tags are notified through email and slack.\\n\"\n \"\\nIf you have any further questions, please reply to this email.\")) \n\n return messages",
"def list_messages(self):",
"def test_message_list():",
"def __str__(self):\n return_str = \"\"\n args_str = \",\" + \",\".join(self.arguments) if self.arguments else \"\"\n if self.is_reply():\n args = (\n self.message_type,\n self.name,\n self.code,\n args_str\n )\n return_str = f\"{args[0]}{args[1]},{args[2]}{args[3]}\"\n else:\n args = (\n self.message_type,\n self.name,\n args_str\n )\n return_str = f\"{args[0]}{args[1]}{args[2]}\"\n return return_str + '\\r\\n'",
"def getMessage() -> str:\n pass",
"def create_help_message():\r\n help_message = \"Improve your vocabulary using *VocabBot*! \\n\\n\" \\\r\n \"*Created By* - _Vishesh Vishwakarma_ \\n\\n\"\\\r\n \"You can ask the bot the below listed things: \\n\"\\\r\n \"*meaning* - type the word \\n\"\\\r\n \"*example* - type the word \\n\"\\\r\n \"*synonyms* - type the word \\n\"\\\r\n \"*antonyms* - type the word \\n\"\r\n return help_message",
"def __generate_author_string__(self, list_of_authors):\n author_string = \"\"\n return author_string.join(list_of_authors)",
"def getMessage():\n return message",
"def display_book(self):\n print(\"List of books available is: \")\n for book in books_list :\n print(\"- \",book)",
"def book(args: list, update: Update) -> None:\n\n book_list = []\n if len(args) > 1:\n update.message.reply_text('fetching books, this may take a while...')\n book_list = scrape(' '.join(args[1:]))\n update.message.reply_text(f'found {len(book_list)} books')\n counter = 0\n msg = ''\n\n if len(book_list) > 0:\n for book in book_list:\n msg = msg + f'{book.Title} - {book.Author}\\n'\n counter += 1\n if counter == 5:\n msg = msg + '...'\n break\n update.message.reply_text(msg)\n\n else:\n update.message.reply_text(\n 'please add the name of the book after /book')",
"def test_msgs():\n msgs = [\n \"Hi, how are you doing today? Do you want to get a beer? See you soon!\",\n \"If I can't let it go out of my mind?\",\n \"I would lie to you but never for you.\",\n \"Man I'm so bored. Let's do something fun. Call me.\",\n \"Hey babe, somebody told me you're not coming home tonight.\",\n \"What Katie did sometimes makes me wonder.\",\n \"All the time I lie awake questioning my decision.\",\n \"I am what I am. Love me.\",\n \"I love New York! Let's go there! \",\n \"So are you coming now? Let's party until midnight!\",\n \"Happy Valentine's Day!\",\n \"One good thing about music. When it hits you, you feel no pain.\",\n \"Where words fail, music speaks.\",\n \"Life without you is a mistake. Please take me back.\",\n \"I'm glad I have a friend like you.\"\n ]\n return msgs",
"def get_all_message(): \n return \"<br>\".join(messages)",
"def show_books():\n result = {'books': query.get_book_list()}\n return json.dumps(result, ensure_ascii=False)",
"def booksAvailable(books):\n print(\"The following titles are available for purchase: \")\n for book in books:\n print (' -' + book.title())",
"def __str__(self):\n return '- ' + str.join('\\n- ', [self.format_message(record)\n for record in self._messages]) + '\\n'",
"def __str__(self):\n string_to_print = ''\n for i, book in enumerate(self.content, start=1):\n string_to_print += f'{i}) \"{book.title}\" (£{book.price}) was rated {book.rating}/5.\\n'\n return string_to_print",
"def __str__(self):\r\n return list_str_breaks(self.__hobbies)",
"def __str__(self):\n return gettext('List of %s') % self.resource.__name__",
"def __getmessage__():\n\tmsg = \\\n\t\t 'M-SEARCH * HTTP/1.1\\r\\n' \\\n\t\t 'HOST:239.255.255.250:1900\\r\\n' \\\n\t\t 'ST:upnp:rootdevice\\r\\n' \\\n\t\t 'MX:2\\r\\n' \\\n\t\t 'MAN:\"ssdp:discover\"\\r\\n' \\\n\t\t '\\r\\n'\n\n\treturn msg",
"def getMessage(self):\n m = self.messages\n l = len(m)\n if (l == 0):\n return \"\"\n elif (l == 1):\n return self.acronym + \" | \" + m[0]\n else:\n msg = self.acronym + \" | \"\n for i in range(0,l):\n msg += m[i]\n if (i < l-1):\n msg += \" | \" # error message separator\n return msg",
"def __str__(self):\n return self.msg",
"def __str__(self):\n return self.msg",
"def book_list(request):\n\tbooks = Book.objects.all()\n\tif not books:\n\t\tmessages.info(request, \"There is no book registered in the current system\")\n\treturn render(request, 'BookManagement/book_list.html', {'books': books})",
"def __str__(self) -> str:\n return f\"This book is named '{self.title}', written by '{self.author}', published by '{self.publisher}', \" \\\n f\"category of '{self.category}', subject of '{self.subject}', and currently sits on shelf: {self.shelf}.\"",
"def _PatchBookReturnType(self):\n books_method = registry.GetMethod('example.projects.shelves.books', 'list')\n self.StartObjectPatch(books_method, 'GetResponseType',\n return_value=BooksMessage)",
"def gen_stdout_test_msg(bibfile_data, verbose=False):\n msg_list = [bibfile.test_msg(verbose) for bibfile in bibfile_data]\n msg = \"\\n\".join(msg_list)\n return msg",
"def __str__(self):\n return f'{self.message} {self.description}'",
"def __repr__(self) -> str:\n return f\"Book('{self.author}', '{self.title}', '{self.publisher}', '{self.shelf}', '{self.category}', \" \\\n f\"'{self.subject}')\"",
"def _print_message(self, msg):\n if msg.arguments:\n arg_str = \" \" + \" \".join(msg.arguments)\n else:\n arg_str = \"\"\n\n if msg.mid is not None:\n mid_str = \"[%s]\" % msg.mid\n else:\n mid_str = \"\"\n\n return \"%s%s%s%s\" % (msg.TYPE_SYMBOLS[msg.mtype], msg.name,\n mid_str, arg_str)"
] | [
"0.60950553",
"0.59722215",
"0.5911773",
"0.5757587",
"0.55699974",
"0.55692506",
"0.5565037",
"0.55460674",
"0.55266684",
"0.5493491",
"0.54903764",
"0.54728156",
"0.5435321",
"0.5430397",
"0.54283637",
"0.5424071",
"0.5418549",
"0.5412904",
"0.5397561",
"0.5389206",
"0.5388928",
"0.53816557",
"0.53816557",
"0.5346213",
"0.5345876",
"0.534446",
"0.5343851",
"0.53372025",
"0.53144497",
"0.53123313"
] | 0.7080842 | 0 |
Helper function to build return message for fake shelves List method. | def BuildShelvesList(self, shelves):
return ShelvesMessage(
shelves=[ShelvesMessage.Shelf(name=shelf) for shelf in shelves]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def list_message(self):\n\t\tmessage = \"\"\n\t\tlength = len(self.object_list)\n\t\tfor i in range(length - 1):\n\t\t\to = self.object_list[i]\n\t\t\tmessage += self.index_letter_string(i) + o.display_name(True) + \", \"\n\t\tlast_object = self.object_list[length - 1]\n\t\tmessage += self.index_letter_string(length - 1) + last_object.display_name(True) + \", (?)\"\n\t\treturn message",
"def create_help_message():\r\n help_message = \"Improve your vocabulary using *VocabBot*! \\n\\n\" \\\r\n \"*Created By* - _Vishesh Vishwakarma_ \\n\\n\"\\\r\n \"You can ask the bot the below listed things: \\n\"\\\r\n \"*meaning* - type the word \\n\"\\\r\n \"*example* - type the word \\n\"\\\r\n \"*synonyms* - type the word \\n\"\\\r\n \"*antonyms* - type the word \\n\"\r\n return help_message",
"def generate_message(self):\n\t\tmsg = \"\"\n\t\tfor idx, player in enumerate(self.players, start=1):\n\t\t\tmsg += f\"Player {idx} - {player.display_name}\\n\"\n\t\tmsg += (\n\t\t\tf\"\\nClick the `Join Game` button to join. Up to {self.max_players} players can join. \"\n\t\t\t\"To start with less than that many, use the `Start Game` button to begin.\"\n\t\t)\n\t\treturn msg",
"def list_messages(self):",
"def __str__(self):\n return '- ' + str.join('\\n- ', [self.format_message(record)\n for record in self._messages]) + '\\n'",
"def create_messages(application, action, remedy):\n\n messages = [] \n messages.append(\"\"\"Your Resources: </br><pre style=\"margin-left: 40px\">\"\"\" + application + \"</br></pre>\" + action + \"\"\" in AWS. <strong style=\"font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;\">\"\"\" + remedy +\"\"\"</strong>\n </td>\n </tr><tr style=\"font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;\"><td class=\"content-block\" style=\"font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; margin: 0; padding: 0 0 20px;\" valign=\"top\">\n This message was sent to inform you of changes happening to your resources.\n <ul>\n <li>New instances are auto-tagged with an expiration date, an NT ID, and a patch group if invalid.</li>\n <li>Instances without the necessary tags are notified through email and Slack.</li>\n </ul>\n If you have any further questions, please reply to this email.\"\"\")\n \n messages.append(\"Your Resources:\\n\\n\" + application + \"\\n\\n\" + action + \" in AWS. \" + remedy + \"\\n\" + \n (\"\\nThis message was sent to inform you of changes happening to your resources.\\n\"\n \"\\nNew instances are auto-tagged with an expiration date, an NT ID, and a patch group if invalid.\"\n \"Instances without Owner Mail and Owner Team tags are notified through email and slack.\\n\"\n \"\\nIf you have any further questions, please reply to this email.\")) \n\n return messages",
"def test_message_list():",
"def _repr_(slf):\n title = 'ErrorMsgManager'\n func = lambda s, v: ', '.join(getattr(s, v) or [] if hasattr(s, v) else [])\n key_list = func(slf, '_keys')\n comps = func(slf, '_comps')\n comp_str = '<{0} (comp: [{1}]'.format(title, comps)\n key_str = '; keys: [{0}]'.format(key_list)\n if key_list:\n comp_str += key_str\n\n return '{0})>'.format(comp_str)",
"def getMessage() -> str:\n pass",
"def __str__(self):\n reprStr = 'Help Mario build Iron Man suit!'+'\\n' +'To make the ' + self._name + ',you need:'+'\\n'\n for part in self._supplies:\n reprStr = reprStr + str(part.getCount()) + ' ' + part.getData() + '\\n'\n return reprStr",
"def getMessage():\n return message",
"def getMessage(self):\n m = self.messages\n l = len(m)\n if (l == 0):\n return \"\"\n elif (l == 1):\n return self.acronym + \" | \" + m[0]\n else:\n msg = self.acronym + \" | \"\n for i in range(0,l):\n msg += m[i]\n if (i < l-1):\n msg += \" | \" # error message separator\n return msg",
"def get_message(self, *args, **kwargs):\n\n message = ''\n message += ', '.join([str(key) + ': ' + str(val) for key, val in kwargs.items()]) + '; ' if kwargs else ''\n message += ', '.join(str(val) for val in args) if args else ''\n\n return message",
"def __str__(self):\n return gettext('List of %s') % self.resource.__name__",
"def _print_message(self, msg):\n if msg.arguments:\n arg_str = \" \" + \" \".join(msg.arguments)\n else:\n arg_str = \"\"\n\n if msg.mid is not None:\n mid_str = \"[%s]\" % msg.mid\n else:\n mid_str = \"\"\n\n return \"%s%s%s%s\" % (msg.TYPE_SYMBOLS[msg.mtype], msg.name,\n mid_str, arg_str)",
"def __str__(self):\n return self.msg",
"def __str__(self):\n return self.msg",
"def display_help_message():\n return lambda_response(None, {\n \"text\": \"\"\"\n/gauges list - list favorite gauges\n/gauges add USGS_SITE_NUMBER RIVER_DESCRIPTION - add gauge to list of favorite gauges\n/gauges check USGS_SITE_NUMBER - display current flow readings for gauge\n \"\"\".strip(),\n })",
"def __getmessage__():\n\tmsg = \\\n\t\t 'M-SEARCH * HTTP/1.1\\r\\n' \\\n\t\t 'HOST:239.255.255.250:1900\\r\\n' \\\n\t\t 'ST:upnp:rootdevice\\r\\n' \\\n\t\t 'MX:2\\r\\n' \\\n\t\t 'MAN:\"ssdp:discover\"\\r\\n' \\\n\t\t '\\r\\n'\n\n\treturn msg",
"def __str__(self):\n return f'{self.message} {self.description}'",
"def generate_plain_mesg(info, open_quests, owner, tags):\n\n msg = (\n \"This email is being sent to {} because that is the owner listed\\n\"\n \"for the systems with open Hermes labors listed below.\\n\\n\"\n \"Due dates, if any, are noted with each quest.\\n\".format(owner)\n )\n msg += (\n \"\\nTo throw an event manually, you can run the following command \"\n \"on a shell server:\"\n \"\\n\\n\"\n \"$ hermes event create [event] --host [hostname].\\n\\n\"\n \"Or you can visit the quests linked below.\\n\\n\".format(\n settings.frontend)\n )\n for quest_id in info[owner]:\n quest = find_quest(open_quests, quest_id)\n if quest:\n msg += (\n \"==[ QUEST {} ]================================\\n\"\n \"CREATOR: {}\\n\"\n ).format(\n quest_id, quest.creator\n )\n if quest.target_time:\n msg += \"DUE: {}\\n\".format(quest.target_time)\n msg += \"DESC: \\\"{}\\\"\\n\".format(textwrap.fill(\n quest.description,\n width=60, subsequent_indent=\"\"\n ))\n msg += \"LINK: {}/v1/quests/{}\\n\\n\".format(\n settings.frontend, quest_id\n )\n else:\n msg += \" Labors not associated with a quest:\\n\\n\"\n\n msg += \"Machines with labors:\\n\"\n\n for hostname in sorted(info[owner][quest_id]):\n if tags[hostname]:\n tags_str = \"{}\".format((\", \".join(tags[hostname])))\n else:\n tags_str = \"no services\"\n msg += \" {} ({})\\n\".format(hostname, tags_str)\n\n msg += \"\\n\\n\"\n\n return msg",
"def description(self) -> str:\n return f\"List of {self.key}\"",
"async def getQuickList(ctx):\n availableCommands = await _generateList(ctx.message.author, True)\n availableCommands.sort(key=lambda x: x['name'])\n return assembleEmbed(\n title=f\"Quick List of Available Commands for {ctx.message.author}\",\n desc=\"To view full list, please type `!list all`.\",\n fields=[{\n \"name\": \"Commands\",\n \"value\": \"\\n\".join([f\"`{c['name']}` - {c['description']}\" for c in availableCommands]),\n \"inline\": False\n }]\n )",
"def test_msgs():\n msgs = [\n \"Hi, how are you doing today? Do you want to get a beer? See you soon!\",\n \"If I can't let it go out of my mind?\",\n \"I would lie to you but never for you.\",\n \"Man I'm so bored. Let's do something fun. Call me.\",\n \"Hey babe, somebody told me you're not coming home tonight.\",\n \"What Katie did sometimes makes me wonder.\",\n \"All the time I lie awake questioning my decision.\",\n \"I am what I am. Love me.\",\n \"I love New York! Let's go there! \",\n \"So are you coming now? Let's party until midnight!\",\n \"Happy Valentine's Day!\",\n \"One good thing about music. When it hits you, you feel no pain.\",\n \"Where words fail, music speaks.\",\n \"Life without you is a mistake. Please take me back.\",\n \"I'm glad I have a friend like you.\"\n ]\n return msgs",
"def __str__(self):\n \n return \"ID: %s, %s (%s)\" % (self.list_id, self.name, self.pieces)",
"def get_message(self):\n message = \"\"\n for line in self.lines:\n message += line.show()\n\n return message",
"def __str__(self):\n return self.message",
"def __str__(self):\n return self.message",
"def __str__(self):\n return self.message",
"def get_shelters():\n shelters = Shelter.get_shelters()\n\n if shelters:\n return jsonify(message=shelters), 200\n else:\n return jsonify(message='Failed to get shelters'), 500"
] | [
"0.61730474",
"0.58105886",
"0.57638717",
"0.5576105",
"0.5555753",
"0.5535798",
"0.5490517",
"0.5455368",
"0.54504293",
"0.54418784",
"0.5399472",
"0.5391958",
"0.53900445",
"0.53672373",
"0.53408873",
"0.5324561",
"0.5324561",
"0.5275806",
"0.52587867",
"0.5243187",
"0.5242269",
"0.52398795",
"0.52236384",
"0.52152133",
"0.52066267",
"0.5183478",
"0.5177769",
"0.5177769",
"0.5177769",
"0.5177187"
] | 0.67915756 | 0 |
Helper function to build return message for fake projects List method. | def BuildProjectsList(self, projects):
return ProjectsMessage(
projects=[ProjectsMessage.Project(projectId=project)
for project in projects]
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __str__(self):\n return_string = \"Project: {}-{}\".\\\n format(self.public_information[\"project_id\"],\n self.public_information[\"title\"])\n\n return return_string",
"def multiple_projects():\n message = \"\"\"\nFound {} that match your change.\nSince there is no support for tracking changes in different\nprojects, try to add more attributes to focus on a specific change\nor set of changes.\n\"\"\".format(crayons.red(\"multiple different projects\"))\n return message",
"def __str__(self):\r\n proj_string = \" Project Name: \" + self.__name\r\n proj_string += \"\\n Cover Photo: \" + self.__cover_photo\r\n proj_string += \"\\n Links: \" + self.__links\r\n proj_string += \" Note: \" + self.__note\r\n proj_string += \" Photos: \" + list_str(self.__photos)\r\n\r\n return proj_string",
"def skip_project_message():\n\n return ' - Skip project'",
"def test_list_project_request(self):\n pass",
"def generate_message(self):\n\t\tmsg = \"\"\n\t\tfor idx, player in enumerate(self.players, start=1):\n\t\t\tmsg += f\"Player {idx} - {player.display_name}\\n\"\n\t\tmsg += (\n\t\t\tf\"\\nClick the `Join Game` button to join. Up to {self.max_players} players can join. \"\n\t\t\t\"To start with less than that many, use the `Start Game` button to begin.\"\n\t\t)\n\t\treturn msg",
"def list_message(self):\n\t\tmessage = \"\"\n\t\tlength = len(self.object_list)\n\t\tfor i in range(length - 1):\n\t\t\to = self.object_list[i]\n\t\t\tmessage += self.index_letter_string(i) + o.display_name(True) + \", \"\n\t\tlast_object = self.object_list[length - 1]\n\t\tmessage += self.index_letter_string(length - 1) + last_object.display_name(True) + \", (?)\"\n\t\treturn message",
"def test_list_project(self):\n pass",
"def get_project_list():\n return parse_list_output(Popen(\n 'openstack project list'.split(), stdout=STDOUT, stderr=STDERR\n ).communicate()[0])",
"def list_projects():\n\n cmd = dict()\n cmd[\"type_\"] = \"list_projects\"\n cmd[\"name_\"] = \"\"\n \n s = comm.send_and_receive_socket(cmd)\n\n msg = comm.recv_string(s)\n\n if msg != \"Success!\":\n raise Exception(msg)\n \n json_str = comm.recv_string(s) \n \n s.close() \n\n return json.loads(json_str)[\"projects\"]",
"def __str__(self):\n string = \"\"\"\n Project Factory:\\n\n Directory: {}\\n\n Size: {}\\n\n \"\"\".format(self._directory, len(self.projects))\n return string",
"def test_list_projects(self):\n pass",
"def test_list_projects(self):\n pass",
"def __str__(self):\n return self.project_name",
"def __str__(self):\n return self.project_name",
"def test_show_project_list(self):\n fake_project = FakeResource(1)\n\n # This mocks is faking keystone retrieving a defined list of\n # projects\n patch('identity.views.Keystone.project_list',\n Mock(return_value=[fake_project])).start()\n\n render_mock = patch(\n 'identity.views.ListProjectView.render_to_response').start()\n\n response = self.view(self.request)\n\n render_args = render_mock.call_args[0][0]\n computed = render_args['projects'][0]\n\n self.assertEqual(computed, fake_project.to_dict())",
"def __str__(self):\n return '- ' + str.join('\\n- ', [self.format_message(record)\n for record in self._messages]) + '\\n'",
"def message(blockers):\n if not blockers:\n return ['You have 0 projects blocking you from using Python 3!']\n flattened_blockers = set()\n for blocker_reasons in blockers:\n for blocker in blocker_reasons:\n flattened_blockers.add(blocker)\n need = 'You need {} project{} to transition to Python 3.'\n formatted_need = need.format(len(flattened_blockers),\n 's' if len(flattened_blockers) != 1 else '')\n can_port = ('Of {} {} project{}, {} {} no direct dependencies blocking '\n '{} transition:')\n formatted_can_port = can_port.format(\n 'those' if len(flattened_blockers) != 1 else 'that',\n len(flattened_blockers),\n 's' if len(flattened_blockers) != 1 else '',\n len(blockers),\n 'have' if len(blockers) != 1 else 'has',\n 'their' if len(blockers) != 1 else 'its')\n return formatted_need, formatted_can_port",
"def getProjectName():",
"def getMessage(self):\n m = self.messages\n l = len(m)\n if (l == 0):\n return \"\"\n elif (l == 1):\n return self.acronym + \" | \" + m[0]\n else:\n msg = self.acronym + \" | \"\n for i in range(0,l):\n msg += m[i]\n if (i < l-1):\n msg += \" | \" # error message separator\n return msg",
"def __str__(self):\n return \"{}: {}\".format(self.project, self.id)",
"def do_project_list(cs, args):\n _, projects = cs.projects.list()\n fields = [\n 'project_id',\n 'name',\n 'owner_id',\n 'current_user_role_id',\n 'repo_count',\n 'creation_time',\n 'public',\n ]\n utils.print_list(projects, fields, formatters={}, sortby=args.sortby)",
"def list_messages(self):",
"def get_message(self, *args, **kwargs):\n\n message = ''\n message += ', '.join([str(key) + ': ' + str(val) for key, val in kwargs.items()]) + '; ' if kwargs else ''\n message += ', '.join(str(val) for val in args) if args else ''\n\n return message",
"def getMessage() -> str:\n pass",
"def _ExpectListProjects(self, projects):\n self.mock_projects_client.projects.List.Expect(\n self.projects_messages.CloudresourcemanagerProjectsListRequest(\n filter='lifecycleState:ACTIVE'),\n self.projects_messages.ListProjectsResponse(\n projects=[\n self.projects_messages.Project(\n projectId=p, name='name') for p in projects]))",
"def project_list(ctx, parent_project_id, output_format, columns):\n data = ctx.obj.get_projects(parent_project_id=parent_project_id)\n if output_format == 'table':\n column_names = columns.split(',')\n output_table(column_names, data['project'])\n elif output_format == 'json':\n output_json_data(data)",
"def getMessage():\n return message",
"def _PatchProjectReturnType(self):\n projects_method = registry.GetMethod('cloudresourcemanager.projects',\n 'list')\n self.StartObjectPatch(projects_method, 'GetResponseType',\n return_value=ProjectsMessage)",
"def __str__(self): #XXX Cambiar a __str__(self)\n return _('PPC-Project file') + \" \" + \"\".join(['(', ', '.join(self.filenamePatterns()), ')'])"
] | [
"0.6208802",
"0.61863935",
"0.61587054",
"0.6048111",
"0.6047396",
"0.5995888",
"0.5973389",
"0.59708565",
"0.59399664",
"0.5845293",
"0.5833571",
"0.5793085",
"0.5793085",
"0.5729089",
"0.5729089",
"0.569401",
"0.5687926",
"0.56830055",
"0.56442344",
"0.56329364",
"0.56319696",
"0.5618988",
"0.5614316",
"0.5613478",
"0.5589351",
"0.5586319",
"0.5579609",
"0.5542196",
"0.55317706",
"0.5518932"
] | 0.6687465 | 0 |
Asserts that arg in command has a resource argument completer. | def AssertCommandArgResourceCompleter(self, command, arg):
self.AssertCommandArgCompleter(command, arg, _COMPLETER_MODULE_PATH) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def RunResourceCompleter(self, resource_spec, attribute_name, prefix='',\n expected_completions=None, args=None,\n presentation_name=None, dest=None,\n flag_name_overrides=None, projects=None):\n args = args or {}\n flag_name_overrides = flag_name_overrides or {}\n presentation_name = presentation_name or resource_spec.name\n dest = dest or attribute_name\n expected_completions = expected_completions or []\n presentation_spec = presentation_specs.ResourcePresentationSpec(\n presentation_name,\n resource_spec,\n 'Help text',\n prefixes=False,\n flag_name_overrides=flag_name_overrides)\n resource_info = concept_parsers.ConceptParser([presentation_spec]).GetInfo(\n presentation_spec.name)\n if projects is not None:\n self._ExpectListProjects(projects)\n\n completer = self.Completer(\n completers.CompleterForAttribute(\n resource_spec,\n attribute_name),\n args=args,\n handler_info=resource_info,\n cli=self.cli)\n argument = mock.MagicMock(dest=dest)\n parameter_info = completer.ParameterInfo(self.parsed_args, argument)\n\n completions = completer.Complete(prefix, parameter_info)\n self.assertEqual(expected_completions, completions)",
"def testCmdFilterCompleter(self):\n\n self.inv.ATTRIBUTES = ['apple', 'pear']\n self.assertEqual(self.inv.ATTRIBUTES[0],\n self.inv._CmdFilterCompleter([''], 0))\n self.assertEqual(self.inv.ATTRIBUTES[1],\n self.inv._CmdFilterCompleter([''], 1))\n self.assertEqual(\n None, self.inv._CmdFilterCompleter([''], len(self.inv.ATTRIBUTES)))\n self.assertEqual('pear', self.inv._CmdFilterCompleter(['p'], 0))\n self.assertEqual(None, self.inv._CmdFilterCompleter(['p', 'bogus'], 0))",
"def assertArgumentInCommand(self, mock_cmd, arg):\n command = ' '.join(mock_cmd.call_args[0][0])\n self.assertIn(arg, command)",
"def _validate_delete_command(args):\n return _check_entry_name(args)",
"def _check_valid_command_argument(valid_list, args):\n if args in valid_list:\n return 0\n else:\n return -1",
"def validate_args(args):\n command = args[0]\n args_length = len(args) - 1\n return VALID_COMMANDS[command] == args_length",
"def _validate_add_command(args):\n res = _check_entry_name(args)\n if res != 0:\n return res\n\n return _check_property_arguments(args, args.type)",
"def test_with_empty_watch_args(self):\n with self.assertRaises(SystemExit):\n CliArgs('watch')",
"def _cli_validate(self, settings, remaining_argv):\n return None",
"def _validate_edit_command(args):\n res = _check_entry_name(args)\n if res != 0:\n return res\n\n # If no new type is specified on the command line then leave validation of\n # property arguments to _process_edit_command() when a type of the existing\n # entry is determined.\n if args.type is None:\n return 0\n\n return _check_property_arguments(args, args.type)",
"def _is_valid_passed_arg(self, console: io.IO, step: str,\n value: Optional[str],\n validate: Callable[[str], None]) -> bool:\n pass",
"def is_valid_command(args):\n if args.command is not None:\n return True\n return False",
"def test_validargs(clickrunner):\n for args in maincli.valid_args:\n result = clickrunner.invoke(maincli.entrypoint, args)\n assert result.exit_code == 2\n assert \"Missing command\" in result.output",
"def is_command(oin, env, pred_name: YPredName, arg: Any=None):\n return (env.check_predicate(obj, pred_name, arg) for obj in oin)",
"def test_filename_required():\n with pytest.raises(SystemExit):\n cli.parse_args(['-f'])",
"def test_exclusive_args():\n with pytest.raises(SystemExit):\n cli.parse_args(['-cf', 'filename'])\n with pytest.raises(SystemExit):\n cli.parse_args(['-cf'])",
"def test_with_empty_args(self, mock_builtins_open, capsys, prog, main):\n with pytest.raises(SystemExit):\n main()\n _, err = capsys.readouterr()\n assert err.startswith('usage: {}'.format(prog))",
"def test_object_provision_command_when_invalid_arguments_provided(mock_client):\n from IllumioCore import object_provision_command\n\n args = {\"security_policy_objects\": \"\"}\n err_msg = (\n \"security_policy_objects is a required parameter. Please provide correct value.\"\n )\n\n with pytest.raises(ValueError) as err:\n object_provision_command(mock_client, args)\n\n assert str(err.value) == err_msg",
"def test_no_args(self):\r\n errstring = \"export requires two arguments\"\r\n with self.assertRaisesRegexp(CommandError, errstring):\r\n self.command.handle()",
"def test_with_empty_sample_args(self):\n with self.assertRaises(SystemExit):\n CliArgs('sample')",
"def test_subcommand_arg_name_conflict(self):\n subcommand = {\n var: cli_parser.__dict__.get(var)\n for var in cli_parser.__dict__\n if var.isupper() and var.startswith(\"COMMANDS\")\n }\n for group, command in subcommand.items():\n for com in command:\n conflict_arg = [arg for arg, count in Counter(com.args).items() if count > 1]\n assert (\n [] == conflict_arg\n ), f\"Command group {group} function {com.name} have conflict args name {conflict_arg}\"",
"def test_blank_arguments():\n with pytest.raises(PermissionError):\n Arguments()",
"def test_arg_parser_arg(self):\n\n parsed_args = self.parser.parse_args(['accu', '--debug', '--refresh',\n '-v'])\n\n self.assertEqual(parsed_args.command, 'accu')\n self.assertTrue(parsed_args.debug)\n self.assertEqual(parsed_args.formatter, 'table')\n self.assertTrue(parsed_args.refresh)\n self.assertEqual(parsed_args.verbose_level, 1)",
"def _ValidateArgs(self, args):\n if not (args.IsSpecified('description') or\n args.IsSpecified('security_policy')):\n parameter_names = ['--description', '--security_policy']\n raise exceptions.MinimumArgumentException(\n parameter_names, 'Please specify at least one property to update')",
"def is_command_ancillary(args):\n # pylint: disable=bad-continuation\n if (\n # skip the parent check and only\n # determine if the parameter is present\n is_valid_executes(args, skip=True)\n ):\n return True\n return False",
"def check_argument_in_choices(arg,choices): \n if not arg in choices:\n raise ValueError(\n \"Unknown argument {0}. Expected one from {1}\".format(\n arg, choices\n ).translate(None,\"'\")\n )",
"def test_create_service_binding_when_blank_arguments_provided(\n err_msg, args, mock_client\n):\n with pytest.raises(Exception) as err:\n service_binding_create_command(mock_client, args)\n\n assert str(err.value) == err_msg",
"def test_with_empty_args(self):\n with self.assertRaises(TypeError):\n CliArgs()",
"def test_bad_command1(self):\n with self.assertRaises(ValueError):\n command = Command('Fake Command1')",
"def check_args(name, arg_str):\n if len(arg_str) < 1:\n raise gdb.GdbError(\"ERROR: '%s' requires an argument.\"\n % name)\n return False\n else:\n return True"
] | [
"0.63553154",
"0.57756376",
"0.5686924",
"0.54825395",
"0.53907144",
"0.53843004",
"0.5378544",
"0.5372374",
"0.5353488",
"0.5348231",
"0.5308486",
"0.5275861",
"0.52420455",
"0.5213123",
"0.5182557",
"0.51743853",
"0.51729286",
"0.51724285",
"0.51584214",
"0.5154543",
"0.5125455",
"0.51244956",
"0.5122848",
"0.51176065",
"0.511527",
"0.51068264",
"0.5094604",
"0.5087583",
"0.50863314",
"0.5072473"
] | 0.8022566 | 0 |
Run a test of a resource completer. | def RunResourceCompleter(self, resource_spec, attribute_name, prefix='',
expected_completions=None, args=None,
presentation_name=None, dest=None,
flag_name_overrides=None, projects=None):
args = args or {}
flag_name_overrides = flag_name_overrides or {}
presentation_name = presentation_name or resource_spec.name
dest = dest or attribute_name
expected_completions = expected_completions or []
presentation_spec = presentation_specs.ResourcePresentationSpec(
presentation_name,
resource_spec,
'Help text',
prefixes=False,
flag_name_overrides=flag_name_overrides)
resource_info = concept_parsers.ConceptParser([presentation_spec]).GetInfo(
presentation_spec.name)
if projects is not None:
self._ExpectListProjects(projects)
completer = self.Completer(
completers.CompleterForAttribute(
resource_spec,
attribute_name),
args=args,
handler_info=resource_info,
cli=self.cli)
argument = mock.MagicMock(dest=dest)
parameter_info = completer.ParameterInfo(self.parsed_args, argument)
completions = completer.Complete(prefix, parameter_info)
self.assertEqual(expected_completions, completions) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def AssertCommandArgResourceCompleter(self, command, arg):\n self.AssertCommandArgCompleter(command, arg, _COMPLETER_MODULE_PATH)",
"def test_2(self):\n event = MockEvent(u\"%run aa\")\n mockself = None\n match = set(magic_run_completer(mockself, event))\n self.assertEqual(match, set([u\"aao.py\"]))",
"async def test_deleter_do_work_claim_yes_result(config, mocker):\n logger_mock = mocker.MagicMock()\n lta_rc_mock = mocker.patch(\"rest_tools.client.RestClient.request\", new_callable=AsyncMock)\n lta_rc_mock.return_value = {\n \"bundle\": {\n \"one\": 1,\n },\n }\n db_mock = mocker.patch(\"lta.deleter.Deleter._delete_bundle\", new_callable=AsyncMock)\n p = Deleter(config, logger_mock)\n assert await p._do_work_claim()\n lta_rc_mock.assert_called_with(\"POST\", '/Bundles/actions/pop?source=WIPAC&dest=NERSC&status=detached', {'claimant': f'{p.name}-{p.instance_uuid}'})\n db_mock.assert_called_with(mocker.ANY, {\"one\": 1})",
"async def test_deleter_run(config, mocker):\n logger_mock = mocker.MagicMock()\n p = Deleter(config, logger_mock)\n p._do_work = AsyncMock()\n await p.run()\n p._do_work.assert_called()",
"def test_1(self):\n event = MockEvent(u\"%run a\")\n mockself = None\n match = set(magic_run_completer(mockself, event))\n self.assertEqual(match, set([u\"a.py\", u\"aao.py\"]))",
"def test_do_delete(delete_resource: MagicMock, response: execution.ResponseInfo):\n delete_resource.return_value = response\n bundle = MagicMock()\n bundle.resources.matching.return_value = [MagicMock(), MagicMock()]\n action = interface.CommandAction(MagicMock(), [], bundle)\n interface.do_delete(action)\n assert delete_resource.call_count == 2",
"def test_3(self):\n event = MockEvent(u'%run \"a')\n mockself = None\n match = set(magic_run_completer(mockself, event))\n self.assertEqual(match, set([u\"a.py\", u\"aao.py\"]))",
"async def test_deleter_do_work_yes_results(config, mocker):\n logger_mock = mocker.MagicMock()\n dwc_mock = mocker.patch(\"lta.deleter.Deleter._do_work_claim\", new_callable=AsyncMock)\n dwc_mock.side_effect = [True, True, False]\n p = Deleter(config, logger_mock)\n await p._do_work()\n dwc_mock.assert_called()",
"async def test_deleter_do_work_claim_no_result(config, mocker):\n logger_mock = mocker.MagicMock()\n lta_rc_mock = mocker.patch(\"rest_tools.client.RestClient.request\", new_callable=AsyncMock)\n lta_rc_mock.return_value = {\n \"bundle\": None\n }\n db_mock = mocker.patch(\"lta.deleter.Deleter._delete_bundle\", new_callable=AsyncMock)\n p = Deleter(config, logger_mock)\n await p._do_work_claim()\n lta_rc_mock.assert_called_with(\"POST\", '/Bundles/actions/pop?source=WIPAC&dest=NERSC&status=detached', {'claimant': f'{p.name}-{p.instance_uuid}'})\n db_mock.assert_not_called()",
"def test_1(self):\n event = MockEvent(u\"%run a\")\n mockself = None\n match = set(magic_run_completer(mockself, event))\n self.assertEqual(match, set([u\"a.py\", u\"aaø.py\"]))",
"def test__get_runnable_success(self):\n\n resource = Mock()\n resource._get_runnable = BaseResource._get_runnable.__get__(resource, BaseResource)\n\n resource._get_runnable('a_runnable')\n resource.api.get_runnable.assert_called_once_with('a_runnable')\n resource.api.get_runnable.return_value.assert_called_once_with(resource.request)",
"def test_2(self):\n event = MockEvent(u\"%run aa\")\n mockself = None\n match = set(magic_run_completer(mockself, event))\n self.assertEqual(match, set([u\"aaø.py\"]))",
"def test_before_delete_for_linked_resource(self):\n resource = factories.Resource()\n helpers.call_action(\"resource_delete\", id=resource[\"id\"])\n with pytest.raises(p.toolkit.ObjectNotFound):\n helpers.call_action(\"resource_show\", id=resource[\"id\"])",
"def runTestCase(self):\n \n #Login\n self.login() \n \n #Performing Configure Resources \n ResultCR, statusCR = self.test_configureResourec()\n \n if statusCR:\n self.succeed(\"Configure Resources Step Successfully Completed %s\"%ResultCR)\n \n else:\n self.failure(\"Failed to Configure Resources Step %s\"%ResultCR)\n \n time.sleep(120)",
"def test_cancel(self):\n\n arg_parser = arguments.get_parser()\n\n args = arg_parser.parse_args([\n 'run',\n '-H', 'this',\n 'cancel_test'\n ])\n run_cmd = commands.get_command(args.command_name)\n run_cmd.silence()\n run_cmd.run(self.pav_cfg, args)\n\n args = arg_parser.parse_args([\n 'cancel'\n ])\n\n get_statuses(self.pav_cfg, args.tests)\n\n cancel_cmd = commands.get_command(args.command_name)\n cancel_cmd.silence()\n\n self.assertEqual(cancel_cmd.run(self.pav_cfg, args), 0)",
"def testCmdFilterCompleter(self):\n\n self.inv.ATTRIBUTES = ['apple', 'pear']\n self.assertEqual(self.inv.ATTRIBUTES[0],\n self.inv._CmdFilterCompleter([''], 0))\n self.assertEqual(self.inv.ATTRIBUTES[1],\n self.inv._CmdFilterCompleter([''], 1))\n self.assertEqual(\n None, self.inv._CmdFilterCompleter([''], len(self.inv.ATTRIBUTES)))\n self.assertEqual('pear', self.inv._CmdFilterCompleter(['p'], 0))\n self.assertEqual(None, self.inv._CmdFilterCompleter(['p', 'bogus'], 0))",
"def test_3(self):\n event = MockEvent(u'%run \"a')\n mockself = None\n match = set(magic_run_completer(mockself, event))\n self.assertEqual(match, set([u\"a.py\", u\"aaø.py\"]))",
"async def test_deleter_quarantine_bundle_with_reason(config, mocker):\n logger_mock = mocker.MagicMock()\n lta_rc_mock = mocker.patch(\"rest_tools.client.RestClient\", new_callable=AsyncMock)\n p = Deleter(config, logger_mock)\n await p._quarantine_bundle(lta_rc_mock, {\"uuid\": \"c4b345e4-2395-4f9e-b0eb-9cc1c9cdf003\"}, \"Rucio caught fire, then we roasted marshmellows.\")\n lta_rc_mock.request.assert_called_with(\"PATCH\", \"/Bundles/c4b345e4-2395-4f9e-b0eb-9cc1c9cdf003\", mocker.ANY)",
"async def test_deleter_delete_bundle(config, mocker):\n logger_mock = mocker.MagicMock()\n lta_rc_mock = mocker.patch(\"rest_tools.client.RestClient\", new_callable=AsyncMock)\n remove_mock = mocker.patch(\"os.remove\", new_callable=MagicMock)\n p = Deleter(config, logger_mock)\n await p._delete_bundle(lta_rc_mock, {\n \"uuid\": \"c4b345e4-2395-4f9e-b0eb-9cc1c9cdf003\",\n \"bundle_path\": \"/icecube/datawarehouse/path/to/c4b345e4-2395-4f9e-b0eb-9cc1c9cdf003.zip\",\n })\n remove_mock.assert_called()\n lta_rc_mock.request.assert_called_with(\"PATCH\", \"/Bundles/c4b345e4-2395-4f9e-b0eb-9cc1c9cdf003\", mocker.ANY)",
"def test_new_resource(self):\n if verbosity>=3: print \"\\n\"+\"~\"*80\n for resource in cfg.new_resources:\n args=copy.deepcopy(resource)\n self.run_init_new_resource(**args)\n if verbosity>=3: print \"\\n\"+\"~\"*80",
"def test_do_status(get_resource_status: MagicMock, response: execution.ResponseInfo):\n get_resource_status.return_value = response\n bundle = MagicMock()\n bundle.resources.matching.return_value = [MagicMock(), MagicMock()]\n action = interface.CommandAction(MagicMock(), [], bundle)\n interface.do_status(action)\n assert get_resource_status.call_count == 2",
"async def test_release(self):\n resources = self.resource_type(a=10, b=10)\n\n async def block(**amounts):\n async with resources.borrow(**amounts):\n await (time + math.inf)\n\n assert time == 0\n async with Scope() as scope:\n task_a = scope.do(block(a=4, b=4))\n task_b = scope.do(block(a=4, b=4))\n await (time + 10)\n task_a.cancel()\n task_b.__close__()\n async with resources.borrow(a=10, b=10):\n assert time == 10",
"def test_do_create(create_resource: MagicMock, response: execution.ResponseInfo):\n create_resource.return_value = response\n bundle = MagicMock()\n bundle.resources.matching.return_value = [MagicMock(), MagicMock()]\n action = interface.CommandAction(MagicMock(), [], bundle)\n interface.do_create(action)\n assert create_resource.call_count == 2",
"def test_valid(self):\n args = [SIMPLE_TEMPLATE, SIMPLE_CANDIDATE]\n result = self.runner.invoke(main, args)\n self.assertEqual(0, result.exit_code)",
"def run_quick_test(self, context: ResourceCommandContext, test: str) -> None:\n self.handler.run_quick_test(context, test)",
"def test_cont_larvaemutattion(): \n pass",
"def test_update_resources(mock_app):\n\n # Given a mocked response from the servers containing the resources to be downloaded\n for key, item in PHENOTYPE_TERMS.items():\n\n local_resource_path = item[\"resource_path\"] # Resource on the local repo\n url = item[\"url\"] # Resource internet URL\n with open(local_resource_path, \"r\") as res:\n responses.add(\n responses.GET,\n url,\n body=res.read(),\n status=200,\n content_type=\"application/octet-stream\",\n auto_calculate_content_length=True,\n stream=True,\n )\n\n runner = mock_app.test_cli_runner()\n\n # run resources update command with --test flag:\n result = runner.invoke(cli, [\"update\", \"resources\", \"--test\"])\n assert result.exit_code == 0",
"def test_execute(\n self,\n action: str,\n caplog: LogCaptureFixture,\n mocker: MockerFixture,\n runway_context: MockRunwayContext,\n tmp_path: Path,\n ) -> None:\n caplog.set_level(LogLevels.DEBUG, logger=MODULE)\n mocker.patch.object(Terraform, \"handle_backend\", MagicMock())\n mocker.patch.object(Terraform, \"skip\", True)\n mocker.patch.object(Terraform, \"cleanup_dot_terraform\", MagicMock())\n mocker.patch.object(Terraform, \"handle_parameters\", MagicMock())\n mocker.patch.object(Terraform, \"terraform_init\", MagicMock())\n mocker.patch.object(Terraform, \"current_workspace\", \"test\")\n mocker.patch.object(\n Terraform, \"terraform_workspace_list\", MagicMock(return_value=\"* test\")\n )\n mocker.patch.object(Terraform, \"terraform_workspace_select\", MagicMock())\n mocker.patch.object(Terraform, \"terraform_workspace_new\", MagicMock())\n mocker.patch.object(Terraform, \"terraform_get\", MagicMock())\n mocker.patch.object(Terraform, \"terraform_apply\", MagicMock())\n mocker.patch.object(Terraform, \"terraform_destroy\", MagicMock())\n mocker.patch.object(Terraform, \"terraform_plan\", MagicMock())\n mocker.patch.object(\n Terraform,\n \"auto_tfvars\",\n MagicMock(exists=MagicMock(return_value=True), unlink=MagicMock()),\n )\n command = \"apply\" if action == \"deploy\" else action\n\n # pylint: disable=no-member\n # module is skipped\n obj = Terraform(runway_context, module_root=tmp_path)\n assert not obj[action]()\n obj.handle_backend.assert_called_once_with()\n obj.cleanup_dot_terraform.assert_not_called()\n obj.handle_parameters.assert_not_called()\n obj.auto_tfvars.exists.assert_called_once_with()\n obj.auto_tfvars.unlink.assert_called_once_with()\n caplog.clear()\n\n # module is run; workspace matches\n obj.auto_tfvars.exists.return_value = False\n mocker.patch.object(obj, \"skip\", False)\n assert not obj[action]()\n obj.cleanup_dot_terraform.assert_called_once_with()\n obj.handle_parameters.assert_called_once_with()\n obj.terraform_init.assert_called_once_with()\n obj.terraform_workspace_list.assert_not_called()\n obj.terraform_workspace_select.assert_not_called()\n obj.terraform_workspace_new.assert_not_called()\n obj.terraform_get.assert_called_once_with()\n obj[\"terraform_\" + command].assert_called_once_with()\n assert obj.auto_tfvars.exists.call_count == 2\n assert obj.auto_tfvars.unlink.call_count == 1\n logs = \"\\n\".join(caplog.messages)\n assert \"init (in progress)\" in logs\n assert \"init (complete)\" in logs\n assert \"re-running init after workspace change...\" not in logs\n assert f\"{command} (in progress)\" in logs\n assert f\"{command} (complete)\" in logs\n caplog.clear()\n\n # module is run; switch to workspace\n mocker.patch.object(Terraform, \"current_workspace\", \"default\")\n assert not obj[action]()\n obj.terraform_workspace_list.assert_called_once_with()\n obj.terraform_workspace_select.assert_called_once_with(\"test\")\n obj.terraform_workspace_new.assert_not_called()\n logs = \"\\n\".join(caplog.messages)\n assert \"re-running init after workspace change...\" in logs\n\n # module is run; create workspace\n mocker.patch.object(\n Terraform, \"terraform_workspace_list\", MagicMock(return_value=\"\")\n )\n assert not obj[action]()\n obj.terraform_workspace_new.assert_called_once_with(\"test\")",
"def test_got_resources(self, resources, monkeypatch):\n dev = qml.device(\"orquestra.qiskit\", wires=2, resources=resources)\n recorder = []\n mock_res_dict = {\"First\": {\"expval\": {\"list\": [123456789]}}}\n\n with monkeypatch.context() as m:\n\n # Record the resources that were passed\n get_resources_passed = lambda *args, **kwargs: recorder.append(\n kwargs.get(\"resources\", False)\n )\n m.setattr(\n pennylane_orquestra.orquestra_device, \"gen_expval_workflow\", get_resources_passed\n )\n\n # Disable submitting to the Orquestra platform by mocking Popen\n m.setattr(subprocess, \"Popen\", lambda *args, **kwargs: MockPopen())\n m.setattr(\n pennylane_orquestra.orquestra_device,\n \"loop_until_finished\",\n lambda *args, **kwargs: mock_res_dict,\n )\n\n @qml.qnode(dev)\n def circuit():\n qml.PauliX(0)\n return qml.expval(qml.PauliZ(0))\n\n assert circuit() == 123456789\n\n # Check that the resorces were passed correctly\n assert len(recorder) == 1\n assert recorder[0] == resources",
"def test_is_valid_resource():\n mock_name = \"rg-001\"\n output = sh.is_valid_resource(mock_name)\n assert output is True"
] | [
"0.5947931",
"0.52610797",
"0.52518344",
"0.5224835",
"0.52106637",
"0.51147044",
"0.50972575",
"0.50633025",
"0.5059944",
"0.50454164",
"0.5036123",
"0.5035484",
"0.5021975",
"0.4981828",
"0.49479973",
"0.49290627",
"0.4925137",
"0.492107",
"0.49151045",
"0.48998746",
"0.4899493",
"0.4896885",
"0.48914176",
"0.48690873",
"0.48624802",
"0.4859404",
"0.48554504",
"0.48499635",
"0.48276788",
"0.48077622"
] | 0.6321574 | 0 |
Verify TOR connection by connecting to check.torproject.org | def verify_tor_connection():
content = urlopen('https://check.torproject.org/').read()
# <h1 class="off"> - not using tor
# <h1 class="not"> - using tor without torbrowser
# <h1 class="on"> - using tor with torbrowser
return content.find(b'class="off"')==-1 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def check_tor(self):\n response = self.get(\"https://check.torproject.org\")\n parsed = self.parse(response)\n title = parsed.get_title()\n if title == \"Sorry. You are not using Tor.\":\n self.logger.warning(\"Tor is NOT properly configured.\")\n return False\n elif title == \"Congratulations. This browser is configured to use Tor.\":\n self.logger.info(\"Tor is properly configured.\")\n return True\n\n self.logger.error(\"There was an unexpected error checking if Tor is properly configured.\")\n return False",
"def test_connection(server_address, server_username=None, server_password=None, verbose=True):\n\n try:\n if server_username is None and server_password is None:\n r = requests.get(url=server_address)\n else:\n r = requests.get(url=server_address, auth=(server_username, server_password))\n if r.ok:\n if verbose:\n print(\"Network connectivity: VERIFIED. Server \" + server_address + \" is reachable!\")\n return True\n else:\n print(\"Something wrong during connection!\")\n return False\n\n except Exception as e:\n print(e)\n return False",
"def check_conn():\n try:\n urllib2.urlopen(\"http://www.google.com\", timeout=5)\n return True\n except urllib2.URLError:\n pass\n return False",
"def net_check():\n resp = None\n host = \"https://gitlab.manjaro.org\"\n # noinspection PyBroadException\n try:\n resp = urllib.request.urlopen(host, timeout=2)\n except Exception:\n pass\n return bool(resp)",
"def check_connection(url=\"http://example.com/\"):\n try:\n requests.head(url)\n return True\n except requests.ConnectionError:\n spinner.warn(\"No internet connecction 🤭\")\n sys.exit(1)",
"def _check_connection() -> bool:\n return bool(subprocess.check_output([\"hostname\", \"-I\"]))",
"def check_connection():\n r = requests.get('https://www.google.com')\n if r.status_code == 200:\n print (colored(\"Connected.\", 'green'))\n else:\n print (colored(\"Not Connected.\", 'red'))",
"def use_tor_proxy():\n if not socks.get_default_proxy():\n # when proxy was not explicitly set, use 127.0.0.1:9050\n set_tor_proxy('127.0.0.1', 9050)\n\n socket.socket = socks.socksocket\n socket.gethostbyname = sockshostbyname\n socket.gethostbyaddr = sockshostbyaddr\n socket.gethostbyname_ex = sockshostbyname_ex\n socket.getaddrinfo = socksgetaddrinfo\n socket.create_connection = create_connection\n # getfqdn uses gethostbyaddr \n\n # Now all relevant functions are replaced with torified versions\n # lets test the connection.\n\n if need_check and not verify_tor_connection():\n print(\"Tor NOT enabled - exiting\", file=sys.stderr)\n sys.exit(1)",
"def test_connection():\n result = run(\"uname -a\")\n if result.failed:\n _pretty_output(\"Could not connect to remote server. Please check your configuration\")\n abort(\"Cannot continue. Aborting...\")",
"def verify_core_connection():\n if not base_url or not api_credentials:\n retrieve_connection_info()\n return",
"def verify_core_connection():\n if not base_url or not api_credentials:\n retrieve_connection_info()\n return",
"def internet_on():\n try:\n urllib.request.urlopen('http://216.58.192.142', timeout=1)\n return True\n except urllib.error.URLError: \n return False",
"def test_connection(self):\n r = main.List.connection()\n self.assertTrue(r.ping(), \"Connection failed.\")",
"def test_verify_connection_to_a_device():",
"def can_connect(test_url):\n try:\n requests.get(test_url)\n except (OSError):#connection error\n logger.warning('couldn\\'t reach server on: {test_url}')\n return False\n return True",
"def _check_connection(self):\n for _ in range(3):\n try:\n r = get(f\"http://{self.ip}/student/{self.user}\")\n if r.ok:\n break \n except OSError as e:\n print(f\"Connection error:\\n{e}\")\n sleep(2)\n else:\n raise ConnectionError(f\"Can not connect to server with params ip: {self.ip}, user: {self.user}\")",
"def __CheckConnectStatus(self):\r\n if not self.tn:\r\n print \"Connection is down!\"\r\n return False\r\n else:\r\n print \"Connection is alive!\"\r\n return True",
"def check_internet_connection():\n logging.debug('Checking internet connection')\n try:\n urlopen(config.api_base_url,\n timeout=config.timeout_internet_connection)\n logging.debug('Connected to the internet')\n return True\n except URLError as err:\n logging.debug('No internet connection')\n return False",
"def status_check(self):\n try:\n client = self.connect()\n client.sys.is_initialized() # make an actual network connection\n return True\n except:\n return False",
"def check_up(addr, p):\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n result = sock.connect_ex((addr, p))\n sock.close()\n if result == 0:\n ans = True\n else:\n ans = False\n return ans",
"def test_connection(self):\n req = requests.get(\"http://{}:{}\".format(self.config.options.get(\"Server\", \"ListenAddress\"),\n self.config.options.get(\"Server\", \"Port\")))\n\n self.assertEqual(req.status_code, 200)",
"def renew_tor():\n try:\n stem.socket.ControlPort(port = CONTROL_PORT)\n except stem.SocketError as exc:\n print (\"Tor\", \"[!] Unable to connect to port %s (%s)\" %(CONTROL_PORT , exc))\n with Controller.from_port(port = CONTROL_PORT) as controller:\n controller.authenticate()\n controller.signal(stem.Signal.NEWNYM)\n print (\"TorTP\", \"[+] New Tor circuit created\")\n print 'renewed:' + query(\"http://icanhazip.com\")",
"def ping():\n api_online = bool(check_url(\"https://rest.ensembl.org/info/ping?\"))\n vertebrate_url_online = bool(check_url(\"http://ftp.ensembl.org\"))\n other_url_online = bool(check_url(\"http://ftp.ensemblgenomes.org\"))\n return api_online and vertebrate_url_online and other_url_online",
"def verif(self, verif=False):\r\n\r\n \r\n try:\r\n urlO = tmout(self.makeTheUrlOpener,timeout_duration=15)\r\n ip = tmout(getMyIp,(urlO,)) #getMyIp(urlO)\r\n \r\n if verif:\r\n ip.index(str(self.url))\r\n if not ip:\r\n raise Exception('Impossible de se connecte en moins de 30 sec')\r\n \r\n \r\n except Exception as inst:\r\n print '\\terreur de Proxy : %s' % (inst)\r\n #print type(inst) # the exception instance\r\n #print inst.args # arguments stored in .args\r\n pass\r\n else:\r\n print '- Proxy Ok -'\r\n return True",
"def check_server():\n\n url='{url}/auth'.format(url=config.SERVER_URL)\n while True:\n\n try:\n res=request.urlopen(url,timeout=5).read()\n res=str(res,encoding='utf8')\n if 'connection valid' in res:\n break\n else:\n error_str='error: client-> check_server :' \\\n 'no auth to connect to server,exit process'\n info_manager(error_str,type='KEY')\n os._exit(0)\n except Exception as e:\n err_str='error:client->check_server:cannot ' \\\n 'connect to server; process sleeping'\n info_manager(err_str,type='NORMAL')\n time.sleep(5) # sleep for 1 seconds",
"def test_check_connection(self):\n self.assertIsNotNone(app.check_connection())",
"def _CheckConnect(self):\n try:\n resp = requests.get(self._target_url, timeout=2)\n if resp.headers['Maximum-Bytes']:\n self._max_bytes = int(resp.headers['Maximum-Bytes'])\n return resp.status_code == 200\n except requests.exceptions.ConnectionError:\n return False\n except Exception as e:\n self.exception('Unexpected test connect failure: %s', str(e))\n return False",
"def is_connected():\n import socket\n try:\n host = socket.gethostbyname(\"www.gov.uk\")\n socket.create_connection((host, 80), 2)\n return True\n except:\n pass\n return False",
"def can_connect(url):\n\n log(\"Checking connection to: {0}\".format(url))\n success = True\n\n try:\n urlopen(url, timeout=1)\n log(\"... can connect\")\n except URLError:\n log(\"... can't connect\")\n success = False\n\n return success",
"def is_connected():\n \n try:\n socket.create_connection((\"www.google.com\", 80))\n return True\n except OSError:\n pass\n return False"
] | [
"0.7544404",
"0.6652259",
"0.6404984",
"0.626812",
"0.62472934",
"0.6239273",
"0.6227598",
"0.61347693",
"0.6115881",
"0.6079948",
"0.6079948",
"0.6040862",
"0.6031114",
"0.5984239",
"0.59594524",
"0.59464335",
"0.5900688",
"0.5899588",
"0.5899081",
"0.5882159",
"0.58812284",
"0.5879866",
"0.587692",
"0.58616245",
"0.5857882",
"0.5836795",
"0.58236283",
"0.5818736",
"0.58092856",
"0.58083385"
] | 0.81268466 | 0 |
Modify the socket mdoule to use the TOR proxy | def use_tor_proxy():
if not socks.get_default_proxy():
# when proxy was not explicitly set, use 127.0.0.1:9050
set_tor_proxy('127.0.0.1', 9050)
socket.socket = socks.socksocket
socket.gethostbyname = sockshostbyname
socket.gethostbyaddr = sockshostbyaddr
socket.gethostbyname_ex = sockshostbyname_ex
socket.getaddrinfo = socksgetaddrinfo
socket.create_connection = create_connection
# getfqdn uses gethostbyaddr
# Now all relevant functions are replaced with torified versions
# lets test the connection.
if need_check and not verify_tor_connection():
print("Tor NOT enabled - exiting", file=sys.stderr)
sys.exit(1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def patch():\n\n config(\"127.0.0.1\", 9050)\n\n socket.socket = socks.socksocket\n socket.create_connection = create_connection",
"def setproxy(self,proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):\r\n self.__proxy = (proxytype,addr,port,rdns,username,password)",
"def configure_proxy(self, proxy):\n server_name = self.get_external_domain()\n tls_enabled = self.get_tls()\n ircd_enabled = self.charm_config.get(\"enable-ircd\")\n federation_enabled = self.get_federation()\n\n if tls_enabled:\n self.external_port = 443\n else:\n self.external_port = 80\n\n proxy_config = [\n {\n \"mode\": \"http\",\n \"external_port\": self.external_port,\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": 8008,\n \"subdomain\": server_name,\n },\n ]\n\n if federation_enabled:\n proxy_config.append(\n {\n \"mode\": self.get_federation_mode(),\n \"external_port\": 8448,\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": 8448,\n }\n )\n\n if ircd_enabled:\n proxy_config.append(\n {\n \"mode\": self.get_irc_mode(),\n \"external_port\": self.get_irc_port(),\n \"internal_host\": self.get_internal_host(),\n \"internal_port\": self.irc_internal_port,\n }\n )\n\n proxy.configure(proxy_config)",
"def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):\r\n self.__proxy = (proxytype, addr, port, rdns, username, password)",
"def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):\r\n self.__proxy = (proxytype, addr, port, rdns, username, password)",
"def set_proxy(self, http_proxy):\n self.http_proxy = http_proxy\n self._geturl.http_proxy = http_proxy",
"def connect(self):\n if self._sock is None:\n if not self.proxy_host:\n host = self.host\n port = self.port\n else:\n host = self.proxy_host\n port = self.proxy_port\n \n sock = socket.create_connection((host, port), 5)\n proto = None\n\n if self.secure:\n assert not self.proxy_host, \"Using a proxy with HTTPS not yet supported.\"\n sock, proto = wrap_socket(sock, host, self.ssl_context)\n\n log.debug(\"Selected protocol: %s\", proto)\n sock = BufferedSocket(sock, self.network_buffer_size)\n\n if proto not in ('http/1.1', None):\n raise TLSUpgrade(proto, sock)\n\n self._sock = sock\n\n return",
"def renew_tor():\n try:\n stem.socket.ControlPort(port = CONTROL_PORT)\n except stem.SocketError as exc:\n print (\"Tor\", \"[!] Unable to connect to port %s (%s)\" %(CONTROL_PORT , exc))\n with Controller.from_port(port = CONTROL_PORT) as controller:\n controller.authenticate()\n controller.signal(stem.Signal.NEWNYM)\n print (\"TorTP\", \"[+] New Tor circuit created\")\n print 'renewed:' + query(\"http://icanhazip.com\")",
"def __negotiatehttp(self,destaddr,destport):\r\n # If we need to resolve locally, we do this now\r\n if self.__proxy[3] == False:\r\n addr = socket.gethostbyname(destaddr)\r\n else:\r\n addr = destaddr\r\n self.sendall(\"CONNECT \" + addr + \":\" + str(destport) + \" HTTP/1.1\\r\\n\" + \"Host: \" + destaddr + \"\\r\\n\\r\\n\")\r\n # We read the response until we get the string \"\\r\\n\\r\\n\"\r\n resp = self.recv(1)\r\n while resp.find(\"\\r\\n\\r\\n\")==-1:\r\n resp = resp + self.recv(1)\r\n # We just need the first line to check if the connection\r\n # was successful\r\n statusline = resp.splitlines()[0].split(\" \",2)\r\n if statusline[0] not in (\"HTTP/1.0\",\"HTTP/1.1\"):\r\n self.close()\r\n raise GeneralProxyError((1,_generalerrors[1]))\r\n try:\r\n statuscode = int(statusline[1])\r\n except ValueError:\r\n self.close()\r\n raise GeneralProxyError((1,_generalerrors[1]))\r\n if statuscode != 200:\r\n self.close()\r\n raise HTTPError((statuscode,statusline[2]))\r\n self.__proxysockname = (\"0.0.0.0\",0)\r\n self.__proxypeername = (addr,destport)",
"def switch_proxy(self, proxy):",
"def proxy_settings(self):\n if config.proxy_host is None or config.proxy_host == \"\":\n return\n\n proxy = urllib2.ProxyHandler({\"http\": config.proxy_host})\n opener = urllib2.build_opener(proxy)\n urllib2.install_opener(opener)",
"def __negotiatesocks4(self,destaddr,destport):\r\n # Check if the destination address provided is an IP address\r\n rmtrslv = False\r\n try:\r\n ipaddr = socket.inet_aton(destaddr)\r\n except socket.error:\r\n # It's a DNS name. Check where it should be resolved.\r\n if self.__proxy[3]==True:\r\n ipaddr = \"\\x00\\x00\\x00\\x01\"\r\n rmtrslv = True\r\n else:\r\n ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))\r\n # Construct the request packet\r\n req = \"\\x04\\x01\" + struct.pack(\">H\",destport) + ipaddr\r\n # The username parameter is considered userid for SOCKS4\r\n if self.__proxy[4] != None:\r\n req = req + self.__proxy[4]\r\n req = req + \"\\x00\"\r\n # DNS name if remote resolving is required\r\n # NOTE: This is actually an extension to the SOCKS4 protocol\r\n # called SOCKS4A and may not be supported in all cases.\r\n if rmtrslv==True:\r\n req = req + destaddr + \"\\x00\"\r\n self.sendall(req)\r\n # Get the response from the server\r\n resp = self.__recvall(8)\r\n if resp[0] != \"\\x00\":\r\n # Bad data\r\n self.close()\r\n raise GeneralProxyError((1,_generalerrors[1]))\r\n if resp[1] != \"\\x5A\":\r\n # Server returned an error\r\n self.close()\r\n if ord(resp[1]) in (91,92,93):\r\n self.close()\r\n raise Socks4Error((ord(resp[1]),_socks4errors[ord(resp[1])-90]))\r\n else:\r\n raise Socks4Error((94,_socks4errors[4]))\r\n # Get the bound address/port\r\n self.__proxysockname = (socket.inet_ntoa(resp[4:]),struct.unpack(\">H\",resp[2:4])[0])\r\n if rmtrslv != None:\r\n self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)\r\n else:\r\n self.__proxypeername = (destaddr,destport)",
"def __init__(self):\r\n self.isConnected, self.atSchool = getNetStatus()\r\n self._token = None\r\n \r\n if self.atSchool:\r\n #print(\"at school\")\r\n urllib.request.install_opener(urllib.request.build_opener(urllib.request.ProxyHandler({'http': 'http://:@proxy.intranet:8080', 'https': 'http://:@proxy.intranet:8080'})))",
"async def _create_proxy(self):\n self._proxy = await self._controller.fopen_tcp_proxy(\n Cellular._DRONE_WEB_API_PORT\n )\n\n self._drone_http_url = f\"http://{self._proxy.address}:{self._proxy.port}\"\n\n if self._autoconfigure and self._user_apc_token is None:\n self.logger.info(\"cellular auto pairing and configuration\")\n # generate a new anonymous user APC token and configure the cellular.\n self._fautoconfigure_with_new_token()",
"def connect(self,destpair):\r\n # Do a minimal input check first\r\n if (type(destpair) in (list,tuple)==False) or (len(destpair)<2) or (type(destpair[0])!=str) or (type(destpair[1])!=int):\r\n raise GeneralProxyError((5,_generalerrors[5]))\r\n if self.__proxy[0] == PROXY_TYPE_SOCKS5:\r\n if self.__proxy[2] != None:\r\n portnum = self.__proxy[2]\r\n else:\r\n portnum = 1080\r\n _orgsocket.connect(self,(self.__proxy[1],portnum))\r\n self.__negotiatesocks5(destpair[0],destpair[1])\r\n elif self.__proxy[0] == PROXY_TYPE_SOCKS4:\r\n if self.__proxy[2] != None:\r\n portnum = self.__proxy[2]\r\n else:\r\n portnum = 1080\r\n _orgsocket.connect(self,(self.__proxy[1],portnum))\r\n self.__negotiatesocks4(destpair[0],destpair[1])\r\n elif self.__proxy[0] == PROXY_TYPE_HTTP:\r\n if self.__proxy[2] != None:\r\n portnum = self.__proxy[2]\r\n else:\r\n portnum = 8080\r\n _orgsocket.connect(self,(self.__proxy[1],portnum))\r\n self.__negotiatehttp(destpair[0],destpair[1])\r\n elif self.__proxy[0] == None:\r\n _orgsocket.connect(self,(destpair[0],destpair[1]))\r\n else:\r\n raise GeneralProxyError((4,_generalerrors[4]))",
"def set_proxy(self, proxy=None):\n proxy_type = None\n if proxy:\n parse = urlparse(proxy)\n scheme = parse.scheme\n hostname = parse.hostname\n port = parse.port\n username = parse.username or ''\n password = parse.password or ''\n\n if scheme == 'socks5':\n proxy_type = QNetworkProxy.Socks5Proxy\n elif scheme in ('http', 'https'):\n proxy_type = QNetworkProxy.HttpProxy\n\n if proxy_type:\n self.page.networkAccessManager().setProxy(\n QNetworkProxy(proxy_type, hostname, port, username, password)\n )\n else:\n QNetworkProxyFactory.setUseSystemConfiguration(True)",
"def wrapmodule(module):\r\n if _defaultproxy != None:\r\n module.socket.socket = socksocket\r\n else:\r\n raise GeneralProxyError((4, \"no proxy specified\"))",
"def wrapmodule(module):\r\n if _defaultproxy != None:\r\n module.socket.socket = socksocket\r\n else:\r\n raise GeneralProxyError((4, \"no proxy specified\"))",
"def installProxy(SOCK_ADDR, SOCK_PORT, check_ip=True):\n # get normal ip\n if check_ip:\n try:\n _IP[0] = getIP()\n except Exception, e:\n raise ProxyException(\"Can't connect to internet!\\n\" + str(e))\n\n # save original socket\n global ORIG_SOCK\n ORIG_SOCK = copy.copy(socket.socket)\n\n # apply proxy\n socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, SOCK_ADDR, SOCK_PORT)\n socket.socket = socks.socksocket\n\n # get ip over proxy\n if not check_ip:\n return\n\n try:\n _IP[1] = getIP()\n except Exception, e:\n raise ProxyException(\n \"Your SOCK5 proxy (\" + SOCK_ADDR + \":\" + str(SOCK_PORT) + \") \"\n \"isn't responding!\\n\" +\n str(e)\n )\n\n if _IP[0] == _IP[1]:\n raise ProxyException(\n \"This proxy doesn't hides your IP, use better one.\"\n )",
"def set_proxy(self, host: str, port: int,\n proxy_type=ProxyTypes.Http,\n secret='', # for Mtproto\n username='',\n password='',\n http_only=False, # For HTTP: Pass true, if the proxy supports only HTTP requests and doesn't support\n # transparent TCP connections via HTTP CONNECT method.\n check_proxy=True) -> None:\n self.remove_proxy()\n\n proxy_type_obj = {\n '@type': proxy_type,\n 'secret': secret,\n 'http_only': http_only,\n 'username': username,\n 'password': password,\n }\n\n self.call_method('addProxy', server=host, port=port, enable=True, type=proxy_type_obj)\n\n if check_proxy:\n self.check_proxy()",
"def __negotiatesocks5(self,destaddr,destport):\r\n # First we'll send the authentication packages we support.\r\n if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):\r\n # The username/password details were supplied to the\r\n # setproxy method so we support the USERNAME/PASSWORD\r\n # authentication (in addition to the standard none).\r\n self.sendall(\"\\x05\\x02\\x00\\x02\")\r\n else:\r\n # No username/password were entered, therefore we\r\n # only support connections with no authentication.\r\n self.sendall(\"\\x05\\x01\\x00\")\r\n # We'll receive the server's response to determine which\r\n # method was selected\r\n chosenauth = self.__recvall(2)\r\n if chosenauth[0] != \"\\x05\":\r\n self.close()\r\n raise GeneralProxyError((1,_generalerrors[1]))\r\n # Check the chosen authentication method\r\n if chosenauth[1] == \"\\x00\":\r\n # No authentication is required\r\n pass\r\n elif chosenauth[1] == \"\\x02\":\r\n # Okay, we need to perform a basic username/password\r\n # authentication.\r\n self.sendall(\"\\x01\" + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.proxy[5])) + self.__proxy[5])\r\n authstat = self.__recvall(2)\r\n if authstat[0] != \"\\x01\":\r\n # Bad response\r\n self.close()\r\n raise GeneralProxyError((1,_generalerrors[1]))\r\n if authstat[1] != \"\\x00\":\r\n # Authentication failed\r\n self.close()\r\n raise Socks5AuthError,((3,_socks5autherrors[3]))\r\n # Authentication succeeded\r\n else:\r\n # Reaching here is always bad\r\n self.close()\r\n if chosenauth[1] == \"\\xFF\":\r\n raise Socks5AuthError((2,_socks5autherrors[2]))\r\n else:\r\n raise GeneralProxyError((1,_generalerrors[1]))\r\n # Now we can request the actual connection\r\n req = \"\\x05\\x01\\x00\"\r\n # If the given destination address is an IP address, we'll\r\n # use the IPv4 address request even if remote resolving was specified.\r\n try:\r\n ipaddr = socket.inet_aton(destaddr)\r\n req = req + \"\\x01\" + ipaddr\r\n except socket.error:\r\n # Well it's not an IP number, so it's probably a DNS name.\r\n if self.__proxy[3]==True:\r\n # Resolve remotely\r\n ipaddr = None\r\n req = req + \"\\x03\" + chr(len(destaddr)) + destaddr\r\n else:\r\n # Resolve locally\r\n ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))\r\n req = req + \"\\x01\" + ipaddr\r\n req = req + struct.pack(\">H\",destport)\r\n self.sendall(req)\r\n # Get the response\r\n resp = self.__recvall(4)\r\n if resp[0] != \"\\x05\":\r\n self.close()\r\n raise GeneralProxyError((1,_generalerrors[1]))\r\n elif resp[1] != \"\\x00\":\r\n # Connection failed\r\n self.close()\r\n if ord(resp[1])<=8:\r\n raise Socks5Error(ord(resp[1]),_generalerrors[ord(resp[1])])\r\n else:\r\n raise Socks5Error(9,_generalerrors[9])\r\n # Get the bound address/port\r\n elif resp[3] == \"\\x01\":\r\n boundaddr = self.__recvall(4)\r\n elif resp[3] == \"\\x03\":\r\n resp = resp + self.recv(1)\r\n boundaddr = self.__recvall(resp[4])\r\n else:\r\n self.close()\r\n raise GeneralProxyError((1,_generalerrors[1]))\r\n boundport = struct.unpack(\">H\",self.__recvall(2))[0]\r\n self.__proxysockname = (boundaddr,boundport)\r\n if ipaddr != None:\r\n self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)\r\n else:\r\n self.__proxypeername = (destaddr,destport)",
"def __init__(self, *args, **kvargs):\n self.proxy_host = kvargs.get('proxy_host')\n self.proxy_user = kvargs.get('proxy_user')\n self.proxy_password = kvargs.get('proxy_password')\n self.proxy_port = kvargs.get('proxy_port')\n self.proxy_ssh_key_file = kvargs.get('proxy_ssh_key')\n self.proxy_connection = False\n self.host = kvargs.get('host')\n self.user = kvargs.get('user')\n self.password = kvargs.get('password')\n self.port = kvargs.get('port')\n self.dest_connection = False\n\n try:\n # Add host key policy\n if self.proxy_port is None:\n self.proxy_port = 22\n self.transport = paramiko.Transport((self.proxy_host, self.proxy_port))\n self.transport.start_client()\n if self.proxy_ssh_key_file:\n self.proxy_ssh_key = paramiko.RSAKey.from_private_key_file(self.proxy_ssh_key_file)\n conn_result = self.transport.auth_publickey(username=self.proxy_user, key=self.proxy_ssh_key)\n else:\n conn_result = self.transport.auth_password(username=self.proxy_user, password=self.proxy_password)\n if len(conn_result) == 0:\n self.proxy_connection = True\n else:\n logging.error('Unable to connect to proxy host. Authentication failed.')\n raise TobyException('Unable to connect to proxy host. Authentication failed.')\n except Exception as exp:\n logging.error('Unable to connect to proxy host: %s' % exp)\n raise TobyException('Unable to connect to proxy host: %s' % exp)\n\n try:\n if self.port is None:\n self.port = 22\n self.tunnel = paramiko.Transport(self.transport.open_channel(\n kind='direct-tcpip',\n dest_addr=(self.host, self.port),\n src_addr=('127.0.0.1', 0)))\n self.tunnel.start_client()\n conn_result = self.tunnel.auth_password(username=self.user, password=self.password)\n if len(conn_result) == 0:\n self.dest_connection = True\n else:\n logging.error('Unable to connect to destination host. Authentication failed.')\n raise TobyException('Unable to connect to destination host. Authentication failed.')\n except Exception as exp:\n logging.error('Unable to connect to destination host: %s' % exp)\n raise TobyException('Unable to connect to destination host: %s' % exp)\n\n try:\n self.handle = self.tunnel.open_session(20)\n self.handle.get_pty(width=160, height=0)\n self.handle.invoke_shell()\n self.handle.set_combine_stderr(True)\n self.handle.settimeout(60)\n tnh = self.handle\n got = []\n while True:\n _rd, _wr, _err = select([tnh], [], [], 10)\n if _rd:\n data = tnh.recv(1024)\n data = data.decode(\"utf-8\")\n got.append(data)\n if re.search('> ', data):\n tnh.send(b' start shell\\n')\n data = tnh.recv(1024)\n data = data.decode(\"utf-8\")\n if re.search(r'(\\$|>|#|%)[\\s\\t]?', data):\n break\n except Exception as exp:\n logging.error(\n 'Unable to fetch the prompt on destination host: %s' % exp)\n raise TobyException(\n 'Unable to fetch the prompt on destination host: %s' % exp)",
"def setTrustProxy(self, trustProxy):\n pass",
"def connect(self, destpair):\r\n # Do a minimal input check first\r\n if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int):\r\n raise GeneralProxyError((5, _generalerrors[5]))\r\n if self.__proxy[0] == PROXY_TYPE_SOCKS5:\r\n if self.__proxy[2] != None:\r\n portnum = self.__proxy[2]\r\n else:\r\n portnum = 1080\r\n _orgsocket.connect(self, (self.__proxy[1], portnum))\r\n self.__negotiatesocks5(destpair[0], destpair[1])\r\n elif self.__proxy[0] == PROXY_TYPE_SOCKS4:\r\n if self.__proxy[2] != None:\r\n portnum = self.__proxy[2]\r\n else:\r\n portnum = 1080\r\n _orgsocket.connect(self,(self.__proxy[1], portnum))\r\n self.__negotiatesocks4(destpair[0], destpair[1])\r\n elif self.__proxy[0] == PROXY_TYPE_HTTP:\r\n if self.__proxy[2] != None:\r\n portnum = self.__proxy[2]\r\n else:\r\n portnum = 8080\r\n _orgsocket.connect(self,(self.__proxy[1], portnum))\r\n self.__negotiatehttp(destpair[0], destpair[1])\r\n elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:\r\n if self.__proxy[2] != None:\r\n portnum = self.__proxy[2]\r\n else:\r\n portnum = 8080\r\n _orgsocket.connect(self,(self.__proxy[1],portnum))\r\n if destpair[1] == 443:\r\n self.__negotiatehttp(destpair[0],destpair[1])\r\n else:\r\n self.__httptunnel = False\r\n elif self.__proxy[0] == None:\r\n _orgsocket.connect(self, (destpair[0], destpair[1]))\r\n else:\r\n raise GeneralProxyError((4, _generalerrors[4]))",
"def set_proxy(self):",
"def set_target(self, host, port):\r\n pass",
"def set_proxy(self, proxy, user=None):\n proxy_handler = urllib2.ProxyHandler({'http':proxy})\n proxy_auth_handler = urllib2.ProxyBasicAuthHandler()\n if user:\n proxy_auth_handler.add_password('realm', 'host', user[0], user[1])\n \n opener = urllib2.build_opener(proxy_handler, proxy_auth_handler)\n urllib2.install_opener(opener)",
"def configureProxy():\n # config\n port = config.get(\"proxy\", \"port\")\n allowedDomains = config.get(\"proxy\", \"alloweddomains\")\n listeningIP = config.get(\"hotspot\", \"ip\")\n # wan dns\n proxyNSConfig = \"\"\n for dnsServer in wandns:\n proxyNSConfig = f\"{proxyNSConfig}nserver {dnsServer}\\n\"\n # 3proxy configurations\n proxyConfig = f\"\"\"#!/bin/3proxy\n#daemon\npidfile /var/run/3proxy.pid\nchroot /usr/local/3proxy proxy proxy\nnscache 65536\n{proxyNSConfig}\nlog /logs/3proxy-%y%m%d.log D\nrotate 1\ncounter /count/3proxy.3cf\ninclude /conf/counters\ninclude /conf/bandlimiters\nauth iponly\nallow * * {allowedDomains}\ndeny *\nproxy -e{wanip} -i{listeningIP} -p{port}\n\"\"\"\n confFile = open(\"/etc/3proxy/3proxy.cfg\", \"w\")\n confFile.write(proxyConfig)\n confFile.close()",
"def __init__( self, conn, addr, server, version ):",
"def __negotiatesocks4(self,destaddr,destport):\r\n # Check if the destination address provided is an IP address\r\n rmtrslv = False\r\n try:\r\n ipaddr = socket.inet_aton(destaddr)\r\n except socket.error:\r\n # It's a DNS name. Check where it should be resolved.\r\n if self.__proxy[3]:\r\n ipaddr = struct.pack(\"BBBB\", 0x00, 0x00, 0x00, 0x01)\r\n rmtrslv = True\r\n else:\r\n ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))\r\n # Construct the request packet\r\n req = struct.pack(\">BBH\", 0x04, 0x01, destport) + ipaddr\r\n # The username parameter is considered userid for SOCKS4\r\n if self.__proxy[4] != None:\r\n req = req + self.__proxy[4]\r\n req = req + chr(0x00).encode()\r\n # DNS name if remote resolving is required\r\n # NOTE: This is actually an extension to the SOCKS4 protocol\r\n # called SOCKS4A and may not be supported in all cases.\r\n if rmtrslv:\r\n req = req + destaddr + chr(0x00).encode()\r\n self.sendall(req)\r\n # Get the response from the server\r\n resp = self.__recvall(8)\r\n if resp[0:1] != chr(0x00).encode():\r\n # Bad data\r\n self.close()\r\n raise GeneralProxyError((1,_generalerrors[1]))\r\n if resp[1:2] != chr(0x5A).encode():\r\n # Server returned an error\r\n self.close()\r\n if ord(resp[1:2]) in (91, 92, 93):\r\n self.close()\r\n raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))\r\n else:\r\n raise Socks4Error((94, _socks4errors[4]))\r\n # Get the bound address/port\r\n self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(\">H\", resp[2:4])[0])\r\n if rmtrslv != None:\r\n self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)\r\n else:\r\n self.__proxypeername = (destaddr, destport)"
] | [
"0.6650349",
"0.65375197",
"0.64568424",
"0.62961507",
"0.62961507",
"0.62119687",
"0.62029964",
"0.6068232",
"0.6067382",
"0.60656387",
"0.6003109",
"0.59634733",
"0.59600323",
"0.5916334",
"0.58848166",
"0.587604",
"0.58721715",
"0.58721715",
"0.5863",
"0.58321184",
"0.58277464",
"0.5811863",
"0.5776307",
"0.5732306",
"0.5725563",
"0.5664592",
"0.56573224",
"0.56454587",
"0.55869883",
"0.55832165"
] | 0.74029726 | 0 |
Return dtype if it is inexact, else float64. | def asinexact(dtype):
if np.issubdtype(dtype, np.inexact):
return dtype
else:
return np.float64 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def best_float():\n try:\n long_info = type_info(np.longdouble)\n except FloatingError:\n return np.float64\n if (long_info['nmant'] > type_info(np.float64)['nmant'] and\n machine() != 'sparc64'): # sparc has crazy-slow float128\n return np.longdouble\n return np.float64",
"def dtype_float(dtype: DType):\n return promote_dtypes(dtype, np.float16)",
"def get_floating_dtype(A):\n dtype = A.dtype\n if dtype in (torch.float16, torch.float32, torch.float64):\n return dtype\n return torch.float32",
"def _cast_to_float64(matrix):\n return matrix.astype(np.float64) if matrix.dtype != np.float64 else matrix",
"def _assert_float_dtype(dtype):\n if not dtype.is_floating:\n raise ValueError(\"Expected floating point type, got %s.\" % dtype)\n return dtype",
"def test_dtype_float64_vrt(self):\n self.assertEqual(_test_array(landsat_vrt, dtype='float64').dtype, 'float64')",
"def dtype(self) -> Type[DTypeFloat]:\n\n return self._dtype",
"def test_dtype_float64_gtiff(self):\n self.assertEqual(_test_array(landsat_gtiff, dtype='float64').dtype, 'float64')",
"def float2dtype(float_type):\n if float_type == 'single' or float_type is None:\n return numpy.float32\n if float_type == 'double':\n return numpy.float64\n raise NotImplementedError (`float_type`)",
"def get_real_type(self):\n import numpy\n return numpy.float64",
"def _float(data):\n try:\n return float(data)\n except ValueError as err:\n if data in ('None', 'NA', 'nan'):\n return nan\n else:\n raise ValueError(err)",
"def longdouble_lte_float64():\n return np.longdouble(2**53) == np.longdouble(2**53) + 1",
"def tiny_value_of_dtype(dtype: torch.dtype):\n if not dtype.is_floating_point:\n raise TypeError(\"Only supports floating point dtypes.\")\n if dtype == torch.float or dtype == torch.double:\n return 1e-13\n elif dtype == torch.half:\n return 1e-4\n else:\n raise TypeError(\"Does not support dtype \" + str(dtype))",
"def tiny_value_of_dtype(dtype: torch.dtype):\n\tif not dtype.is_floating_point:\n\t\traise TypeError(\"Only supports floating point dtypes.\")\n\tif dtype == torch.float or dtype == torch.double:\n\t\treturn 1e-13\n\telif dtype == torch.half:\n\t\treturn 1e-4\n\telse:\n\t\traise TypeError(\"Does not support dtype \" + str(dtype))",
"def infer_dtype(self):\n raise NotImplementedError",
"def _float_zeros_like(x):\n\n rval = x.zeros_like()\n\n if rval.type.dtype.find(\"float\") != -1:\n return rval\n\n return rval.astype(config.floatX)",
"def get_eps_float32():\n\n return np.finfo(np.float32).eps",
"def cast_if_floating_dtype(x, dtype=None):\n return nest.map_structure(functools.partial(cast_single_tensor, dtype=dtype),\n x)",
"def ok_floats():\n # copy float list so we don't change the numpy global\n floats = np.sctypes['float'][:]\n if best_float() != np.longdouble and np.longdouble in floats:\n floats.remove(np.longdouble)\n return sorted(floats, key=lambda f: type_info(f)['nmant'])",
"def dtype(a):\n return a.dtype",
"def _is_double(arr):\n\n # Figure out which dtype for data\n if arr.dtype == np.float32:\n return False\n elif arr.dtype == np.float64:\n return True\n else:\n raise ValueError(\"Only float32 or float64 dtypes are supported\")",
"def _numpy_datatype_from_nd4j_context():\n nd4j_datatype = data_type()\n if nd4j_datatype == 'double':\n return np.float64\n elif nd4j_datatype == 'float':\n return np.float32\n elif nd4j_datatype == 'half':\n return np.float16",
"def _float_or_nan(ent):\n try:\n return float(ent)\n except ValueError:\n return float('nan')",
"def cdouble(queue):\n if 'fp64' in queue.device.get_info(ocl.device_info.EXTENSIONS):\n return np.float64\n else:\n return np.float32",
"def _float_zeros_like(x):\r\n\r\n rval = x.zeros_like()\r\n\r\n if rval.type.dtype.find('float') != -1:\r\n return rval\r\n\r\n return rval.astype(theano.config.floatX)",
"def check_type_force_float(x, name):\n if type(x) is int:\n return float(x)\n elif type(x) is not float and type(x) is not numpy.float64:\n raise TypeError(\"%r should be a float\" % (name,))\n else:\n return x",
"def floor_exact(val, flt_type):\n val = int(val)\n flt_type = np.dtype(flt_type).type\n sign = 1 if val > 0 else -1\n try: # int_to_float deals with longdouble safely\n fval = int_to_float(val, flt_type)\n except OverflowError:\n return sign * np.inf\n if not np.isfinite(fval):\n return fval\n info = type_info(flt_type)\n diff = val - as_int(fval)\n if diff >= 0: # floating point value <= val\n return fval\n # Float casting made the value go up\n biggest_gap = 2**(floor_log2(val) - info['nmant'])\n assert biggest_gap > 1\n fval -= flt_type(biggest_gap)\n return fval",
"def as_correct_dtype(obj: ndarray, required_dtype: dtype) -> ndarray:\n if obj.dtype != required_dtype:\n return obj.astype(required_dtype)\n return obj",
"def try_float(data):\n try:\n return float(data)\n except (ValueError, TypeError ):\n return data",
"def _assert_dtype(images):\n dtype = dtypes.as_dtype(images.dtype).base_dtype\n if dtype not in (dtypes.uint8, dtypes.float32):\n raise TypeError('Invalid image dtype {0}, expected uint8 or float32'.format(dtype))\n\n return dtype"
] | [
"0.7222556",
"0.6958146",
"0.6602176",
"0.6504728",
"0.64505535",
"0.64294165",
"0.6294321",
"0.617765",
"0.61307466",
"0.6088964",
"0.5978216",
"0.58772594",
"0.57965684",
"0.57927835",
"0.5781979",
"0.57800686",
"0.5776801",
"0.5772606",
"0.5763018",
"0.5724814",
"0.56862074",
"0.56789696",
"0.5673588",
"0.5666585",
"0.5652034",
"0.56388766",
"0.5576918",
"0.5565716",
"0.5524848",
"0.5502837"
] | 0.82252294 | 0 |
Pure python implementation of scipy.linalg.solve_triangular for when a or b are object arrays. | def solve_triangular(a, b, lower=False):
# TODO maybe commit this to gvar.linalg
# TODO can I raise a LinAlgError if a[i,i] is 0, and still return the
# result and have it assigned to a variable using try...finally inside this
# function?
x = np.copy(b)
a = a.reshape(a.shape + (1,) * len(x.shape[1:]))
if lower:
x[0] /= a[0, 0]
for i in range(1, len(x)):
x[i:] -= x[i - 1] * a[i:, i - 1]
x[i] /= a[i, i]
else:
x[-1] /= a[-1, -1]
for i in range(len(x) - 1, 0, -1):
x[:i] -= x[i] * a[:i, i]
x[i - 1] /= a[i - 1, i - 1]
return x | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def housetriang_solve(A, b):\n\n n, _ = A.shape\n b = np.reshape(b.copy(), (n, 1))\n R, c = housetriang(A, b)\n x = np.reshape(rbackwardsolve(R, c, n), (n,))\n\n\n return x",
"def solve(a, b):\n #-> getrf + getrs\n a, _, _ = get_computation_matrix(a)\n b, cv2, isM2 = get_computation_matrix(b)\n if a.get_dtype() != b.get_dtype():\n raise TypeError(\"solve: dtype of a and b are not compatible!\")\n if a.numRows() != a.numCols():\n raise ValueError(\"solve: input a is not a square matrix!\")\n t_dtype = TypeUtil.to_numpy_dtype(a.get_dtype())\n (_, _, x, _) = gesv(a, b, overwrite_a=1, overwrite_b=1, dtype=t_dtype)\n\n if cv2:\n if isM2:\n return x.to_numpy_matrix()\n else:\n return x.to_numpy_array()\n else:\n return x",
"def do(self, a, b):\n if csingle == a.dtype or cdouble == a.dtype:\n raise SkipTest\n\n x_lo = gula.chosolve(a, b, UPLO='L')\n x_up = gula.chosolve(a, b, UPLO='U')\n assert_almost_equal(x_lo, x_up)\n # inner1d not defined for complex types\n # todo: implement alternative test\n assert_almost_equal(b, gula.matrix_multiply(a, x_lo))\n assert_almost_equal(b, gula.matrix_multiply(a, x_up))",
"def solve(A, b, pivoting='partial'):\n M, N = A.shape\n Z = len(b)\n\n error_msg = \"[!] A must be square.\"\n assert (M == N), error_msg\n\n error_msg = \"[!] b must be {}D\".format(M)\n assert (Z == N), error_msg\n\n solver = LU(A, pivoting=pivoting)\n\n # solve for x\n x = solver.solve(b)\n\n return x",
"def TriangleForwardSub(L,b):\n C = solve(L,b)\n return C",
"def trisolve(l, u, c, b):\n n = shape(b)[0]\n for k in range(1, n):\n b[k] -= l[k-1]*b[k - 1]\n b[n-1] /= u[n-1]\n for k in range(n-2,-1,-1):\n b[k] -= c[k]*b[k + 1]\n b[k] /= u[k]",
"def solve_lu(matvec: Callable, b: jnp.ndarray) -> jnp.ndarray:\n if len(b.shape) == 0:\n return b / _materialize_array(matvec, b.shape)\n elif len(b.shape) == 1:\n A = _materialize_array(matvec, b.shape, b.dtype)\n return jax.numpy.linalg.solve(A, b)\n elif len(b.shape) == 2:\n A = _materialize_array(matvec, b.shape, b.dtype) # 4d array (tensor)\n A = A.reshape(-1, b.shape[0] * b.shape[1]) # 2d array (matrix)\n return jax.numpy.linalg.solve(A, b.ravel()).reshape(*b.shape)\n else:\n raise NotImplementedError",
"def solve_cholesky(matvec: Callable, b: jnp.ndarray) -> jnp.ndarray:\n if len(b.shape) == 0:\n return b / _materialize_array(matvec, b.shape)\n elif len(b.shape) == 1:\n A = _materialize_array(matvec, b.shape)\n return jax.scipy.linalg.solve(A, b, sym_pos=True)\n elif len(b.shape) == 2:\n A = _materialize_array(matvec, b.shape)\n return jax.scipy.linalg.solve(A, b.ravel(), sym_pos=True).reshape(*b.shape)\n else:\n raise NotImplementedError",
"def test_triangular_checks(self):\n A = np.random.rand(10, 10)\n MA = to_matrix(A)\n L, U = MA.decomposeLU()\n self.assertTrue(L.is_lower_triangular())\n self.assertTrue(U.is_upper_triangular())",
"def solve(a, b):\n raise NotImplementedError",
"def _TriangularSolve(x: Array, r: Array) -> Array:\n return tf.linalg.adjoint(\n tf.linalg.triangular_solve(\n r, tf.linalg.adjoint(x), lower=False, adjoint=False\n )\n )",
"def solve(self, A, b):\n if is_sparse(A) or is_sparse(b):\n A, b = A.tocsc(), b.tocsc()\n x = sparse.COO(scipy.sparse.linalg.spsolve(A, b))\n else:\n x = np.linalg.solve(A, b)\n\n return x",
"def tensorsolve(a, b, axes=None):\n\n return TensorSolve(axes)(a, b)",
"def householder_solve(A, b):\n m, k = b.shape\n Ahat = np.zeros((m,m+1))\n x = np.zeros((m,k))\n for i in range(k):\n Ahat[:,:m] = 1.0*A\n Ahat[:,m] = 1.0*b[:,i]\n Rhat = householder(Ahat, m)\n x[:,i] = solve_triangular(Rhat[:,:m], Rhat[:,m])\n return x",
"def tridiag_solver(b):\n b = np.copy(b)\n v = np.zeros_like(b)\n c = np.zeros_like(b)\n\n for i in range(1, len(v) - 1):\n c[i] = -1. / (2 + c[i - 1])\n b[i] = (b[i] + b[i - 1]) / (2 + c[i - 1])\n\n for i in reversed(range(1, len(v) - 1)):\n v[i] = b[i] - c[i] * v[i + 1]\n\n return v",
"def sparsetriangularsolvedense(self,transposed_,lnzc_,lptrc_,lsubc_,lvalc_,b_):\n n_ = None\n if n_ is None:\n n_ = len(b_)\n elif n_ != len(b_):\n raise IndexError(\"Inconsistent length of array b\")\n if n_ is None:\n n_ = len(lnzc_)\n elif n_ != len(lnzc_):\n raise IndexError(\"Inconsistent length of array lnzc\")\n if n_ is None:\n n_ = len(lptrc_)\n elif n_ != len(lptrc_):\n raise IndexError(\"Inconsistent length of array lptrc\")\n _lnzc_minlength = (n_)\n if (n_) > 0 and lnzc_ is not None and len(lnzc_) != (n_):\n raise ValueError(\"Array argument lnzc is not long enough: Is %d, expected %d\" % (len(lnzc_),(n_)))\n if lnzc_ is None:\n raise ValueError(\"Argument lnzc cannot be None\")\n if lnzc_ is None:\n raise ValueError(\"Argument lnzc may not be None\")\n if isinstance(lnzc_, numpy.ndarray) and lnzc_.dtype is numpy.dtype(numpy.int32) and lnzc_.flags.contiguous:\n _lnzc_copyarray = False\n _lnzc_tmp = ctypes.cast(lnzc_.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_int32))\n elif lnzc_ is not None:\n _lnzc_copyarray = True\n _lnzc_np_tmp = numpy.zeros(len(lnzc_),numpy.dtype(numpy.int32))\n _lnzc_np_tmp[:] = lnzc_\n assert _lnzc_np_tmp.flags.contiguous\n _lnzc_tmp = ctypes.cast(_lnzc_np_tmp.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_int32))\n else:\n _lnzc_copyarray = False\n _lnzc_tmp = None\n \n _lptrc_minlength = (n_)\n if (n_) > 0 and lptrc_ is not None and len(lptrc_) != (n_):\n raise ValueError(\"Array argument lptrc is not long enough: Is %d, expected %d\" % (len(lptrc_),(n_)))\n if lptrc_ is None:\n raise ValueError(\"Argument lptrc cannot be None\")\n if lptrc_ is None:\n raise ValueError(\"Argument lptrc may not be None\")\n if isinstance(lptrc_, numpy.ndarray) and lptrc_.dtype is numpy.dtype(numpy.int64) and lptrc_.flags.contiguous:\n _lptrc_copyarray = False\n _lptrc_tmp = ctypes.cast(lptrc_.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_int64))\n elif lptrc_ is not None:\n _lptrc_copyarray = True\n _lptrc_np_tmp = numpy.zeros(len(lptrc_),numpy.dtype(numpy.int64))\n _lptrc_np_tmp[:] = lptrc_\n assert _lptrc_np_tmp.flags.contiguous\n _lptrc_tmp = ctypes.cast(_lptrc_np_tmp.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_int64))\n else:\n _lptrc_copyarray = False\n _lptrc_tmp = None\n \n lensubnval_ = None\n if lensubnval_ is None:\n lensubnval_ = len(lsubc_)\n elif lensubnval_ != len(lsubc_):\n raise IndexError(\"Inconsistent length of array lsubc\")\n if lensubnval_ is None:\n lensubnval_ = len(lvalc_)\n elif lensubnval_ != len(lvalc_):\n raise IndexError(\"Inconsistent length of array lvalc\")\n _lsubc_minlength = (lensubnval_)\n if (lensubnval_) > 0 and lsubc_ is not None and len(lsubc_) != (lensubnval_):\n raise ValueError(\"Array argument lsubc is not long enough: Is %d, expected %d\" % (len(lsubc_),(lensubnval_)))\n if lsubc_ is None:\n raise ValueError(\"Argument lsubc cannot be None\")\n if lsubc_ is None:\n raise ValueError(\"Argument lsubc may not be None\")\n if isinstance(lsubc_, numpy.ndarray) and lsubc_.dtype is numpy.dtype(numpy.int32) and lsubc_.flags.contiguous:\n _lsubc_copyarray = False\n _lsubc_tmp = ctypes.cast(lsubc_.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_int32))\n elif lsubc_ is not None:\n _lsubc_copyarray = True\n _lsubc_np_tmp = numpy.zeros(len(lsubc_),numpy.dtype(numpy.int32))\n _lsubc_np_tmp[:] = lsubc_\n assert _lsubc_np_tmp.flags.contiguous\n _lsubc_tmp = ctypes.cast(_lsubc_np_tmp.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_int32))\n else:\n _lsubc_copyarray = False\n _lsubc_tmp = None\n \n _lvalc_minlength = (lensubnval_)\n if (lensubnval_) > 0 and lvalc_ is not None and len(lvalc_) != (lensubnval_):\n raise ValueError(\"Array argument lvalc is not long enough: Is %d, expected %d\" % (len(lvalc_),(lensubnval_)))\n if lvalc_ is None:\n raise ValueError(\"Argument lvalc cannot be None\")\n if lvalc_ is None:\n raise ValueError(\"Argument lvalc may not be None\")\n if isinstance(lvalc_, numpy.ndarray) and lvalc_.dtype is numpy.dtype(numpy.float64) and lvalc_.flags.contiguous:\n _lvalc_copyarray = False\n _lvalc_tmp = ctypes.cast(lvalc_.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_double))\n elif lvalc_ is not None:\n _lvalc_copyarray = True\n _lvalc_np_tmp = numpy.zeros(len(lvalc_),numpy.dtype(numpy.float64))\n _lvalc_np_tmp[:] = lvalc_\n assert _lvalc_np_tmp.flags.contiguous\n _lvalc_tmp = ctypes.cast(_lvalc_np_tmp.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_double))\n else:\n _lvalc_copyarray = False\n _lvalc_tmp = None\n \n _b_minlength = (n_)\n if (n_) > 0 and b_ is not None and len(b_) != (n_):\n raise ValueError(\"Array argument b is not long enough: Is %d, expected %d\" % (len(b_),(n_)))\n if isinstance(b_,numpy.ndarray) and not b_.flags.writeable:\n raise ValueError(\"Argument b must be writable\")\n if b_ is None:\n raise ValueError(\"Argument b may not be None\")\n if isinstance(b_, numpy.ndarray) and b_.dtype is numpy.dtype(numpy.float64) and b_.flags.contiguous:\n _b_copyarray = False\n _b_tmp = ctypes.cast(b_.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_double))\n elif b_ is not None:\n _b_copyarray = True\n _b_np_tmp = numpy.zeros(len(b_),numpy.dtype(numpy.float64))\n _b_np_tmp[:] = b_\n assert _b_np_tmp.flags.contiguous\n _b_tmp = ctypes.cast(_b_np_tmp.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_double))\n else:\n _b_copyarray = False\n _b_tmp = None\n \n res = __library__.MSK_XX_sparsetriangularsolvedense(self.__nativep,transposed_,n_,_lnzc_tmp,_lptrc_tmp,lensubnval_,_lsubc_tmp,_lvalc_tmp,_b_tmp)\n if res != 0:\n raise Error(rescode(res),Env.getcodedesc(rescode(res))[1])\n if _b_copyarray:\n b_[:] = _b_np_tmp",
"def linsolve(A, b, symmetric=True):\n try:\n F = b.asarray()\n except AttributeError:\n F = np.asarray(b)\n\n use_np_solve = not symmetric or flapack is None\n x, info = None, 1\n if not use_np_solve:\n c, x, info = flapack.dposv(A, F, lower=0, overwrite_a=0, overwrite_b=0)\n if info < 0:\n raise ValueError(\n \"ILLEGAL VALUE IN {0}-TH ARGUMENT OF \" \"INTERNAL DPOSV\".format(-info)\n )\n if info != 0:\n use_np_solve = True\n\n if use_np_solve:\n try:\n x = la.solve(A, F)\n info = 0\n except la.LinAlgError:\n raise RuntimeError(\"ATTEMPTING TO SOLVE UNDER CONSTRAINED SYSTEM\")\n\n if info > 0:\n tty.warn(\"LINSOLVE FAILED, USING LEAST SQUARES \" \"TO SOLVE SYSTEM\")\n x = la.lstsq(A, F)[0]\n\n return x",
"def sparsetriangularsolvedense(self,transposed_,lnzc,lptrc,lsubc,lvalc,b): # 3\n if not isinstance(transposed_,transpose): raise TypeError(\"Argument transposed has wrong type\")\n n_ = None\n if n_ is None:\n n_ = len(b)\n elif n_ != len(b):\n raise IndexError(\"Inconsistent length of array b\")\n if n_ is None:\n n_ = len(lnzc)\n elif n_ != len(lnzc):\n raise IndexError(\"Inconsistent length of array lnzc\")\n if n_ is None:\n n_ = len(lptrc)\n elif n_ != len(lptrc):\n raise IndexError(\"Inconsistent length of array lptrc\")\n if n_ is None: n_ = 0\n if lnzc is None: raise TypeError(\"Invalid type for argument lnzc\")\n if lnzc is None:\n lnzc_ = None\n else:\n try:\n lnzc_ = memoryview(lnzc)\n except TypeError:\n try:\n _tmparr_lnzc = array.array(\"i\",lnzc)\n except TypeError:\n raise TypeError(\"Argument lnzc has wrong type\")\n else:\n lnzc_ = memoryview(_tmparr_lnzc)\n \n else:\n if lnzc_.format != \"i\":\n lnzc_ = memoryview(array.array(\"i\",lnzc))\n \n if lnzc_ is not None and len(lnzc_) != (n_):\n raise ValueError(\"Array argument lnzc has wrong length\")\n if lptrc is None: raise TypeError(\"Invalid type for argument lptrc\")\n if lptrc is None:\n lptrc_ = None\n else:\n try:\n lptrc_ = memoryview(lptrc)\n except TypeError:\n try:\n _tmparr_lptrc = array.array(\"q\",lptrc)\n except TypeError:\n raise TypeError(\"Argument lptrc has wrong type\")\n else:\n lptrc_ = memoryview(_tmparr_lptrc)\n \n else:\n if lptrc_.format != \"q\":\n lptrc_ = memoryview(array.array(\"q\",lptrc))\n \n if lptrc_ is not None and len(lptrc_) != (n_):\n raise ValueError(\"Array argument lptrc has wrong length\")\n lensubnval_ = None\n if lensubnval_ is None:\n lensubnval_ = len(lsubc)\n elif lensubnval_ != len(lsubc):\n raise IndexError(\"Inconsistent length of array lsubc\")\n if lensubnval_ is None:\n lensubnval_ = len(lvalc)\n elif lensubnval_ != len(lvalc):\n raise IndexError(\"Inconsistent length of array lvalc\")\n if lensubnval_ is None: lensubnval_ = 0\n if lsubc is None: raise TypeError(\"Invalid type for argument lsubc\")\n if lsubc is None:\n lsubc_ = None\n else:\n try:\n lsubc_ = memoryview(lsubc)\n except TypeError:\n try:\n _tmparr_lsubc = array.array(\"i\",lsubc)\n except TypeError:\n raise TypeError(\"Argument lsubc has wrong type\")\n else:\n lsubc_ = memoryview(_tmparr_lsubc)\n \n else:\n if lsubc_.format != \"i\":\n lsubc_ = memoryview(array.array(\"i\",lsubc))\n \n if lsubc_ is not None and len(lsubc_) != (lensubnval_):\n raise ValueError(\"Array argument lsubc has wrong length\")\n if lvalc is None: raise TypeError(\"Invalid type for argument lvalc\")\n if lvalc is None:\n lvalc_ = None\n else:\n try:\n lvalc_ = memoryview(lvalc)\n except TypeError:\n try:\n _tmparr_lvalc = array.array(\"d\",lvalc)\n except TypeError:\n raise TypeError(\"Argument lvalc has wrong type\")\n else:\n lvalc_ = memoryview(_tmparr_lvalc)\n \n else:\n if lvalc_.format != \"d\":\n lvalc_ = memoryview(array.array(\"d\",lvalc))\n \n if lvalc_ is not None and len(lvalc_) != (lensubnval_):\n raise ValueError(\"Array argument lvalc has wrong length\")\n if b is None: raise TypeError(\"Invalid type for argument b\")\n _copyback_b = False\n if b is None:\n b_ = None\n else:\n try:\n b_ = memoryview(b)\n except TypeError:\n try:\n _tmparr_b = array.array(\"d\",b)\n except TypeError:\n raise TypeError(\"Argument b has wrong type\")\n else:\n b_ = memoryview(_tmparr_b)\n _copyback_b = True\n else:\n if b_.format != \"d\":\n b_ = memoryview(array.array(\"d\",b))\n _copyback_b = True\n if b_ is not None and len(b_) != (n_):\n raise ValueError(\"Array argument b has wrong length\")\n res = self.__obj.sparsetriangularsolvedense(transposed_,n_,lnzc_,lptrc_,lensubnval_,lsubc_,lvalc_,b_)\n if res != 0:\n raise Error(rescode(res),\"\")\n if _copyback_b:\n b[:] = _tmparr_b",
"def householder_ls(A, b):\n m, n = A.shape\n Ahat = np.zeros((m, n+1))\n Ahat[:,:n] = 1.0*A\n Ahat[:, n] = 1.0*b\n\n Rhat = householder(Ahat)\n x = solve_triangular(Rhat[:n,:n], Rhat[:n,n])\n\n return x",
"def LUsolve(a,b):\n b=float64(b)\n n=len(b)\n LU=LUdecomp(a)\n y=zeros((n,1))\n x=zeros((n,1))\n y[0]=b[0]\n for i in range(1,n):\n sum=b[i]\n for j in range(i):\n sum=sum-LU[i][j]*y[j]\n y[i]=sum\n x[n-1]=float(y[n-1])/LU[n-1][n-1]\n for i in range(n-2,-1,-1):\n sum=y[i]\n for j in range(i+1,n):\n sum=sum-LU[i][j]*x[j]\n x[i]=float(sum)/LU[i][i]\n return x",
"def solveU(U, b):\n # validate input\n if np.allclose(U,np.triu(U))==False or np.linalg.det == 0:\n raise TypeError(\"U is not an upper regular triangular matrix\")\n \n elif len(U.shape) != 2 or len(b.shape) != 1:\n raise TypeError(\"unsuitable object\")\n \n else:\n un, um = U.shape\n n, = b.shape\n if un != um or un != n:\n raise TypeError((\"dimensions do not fullfill requirements\"))\n\n # solve \n x = np.zeros(n, dtype=complex)\n x[-1] = (b[-1]) / U[n - 1, n - 1]\n for i in range(1, n):\n t = U[(n - (i + 1)):(n - i)] @ x\n x[-(i + 1)] = (b[-(i + 1)] - t) / U[n - (i + 1), n - (i + 1)]\n\n return x",
"def rothesstri(A, b):\n n = shape(A)[0]\n A = hstack([A, b])\n for k in range(n-1):\n r = linalg.norm([ A[k , k] , A[k + 1, k] ])\n if r>0:\n c=A[k, k]/r; s=A[k + 1, k]/r\n A[[k, k + 1],(k + 1):(n + 1)]=[[c, s],[-s, c]]*A[[k, k + 1],(k + 1):(n + 1)]\n A[k, k] = r; A[k+1,k] = 0\n z = A[:, n].copy()\n rbacksolve(A[:, :n], z, n)\n return z",
"def solve(self, A, B):\n return tf.matrix_solve_ls(matrix=A, rhs=B)",
"def eigh(a, b):\n a = symmetrize(a)\n b = symmetrize(b)\n b_inv_a = jax.scipy.linalg.cho_solve(jax.scipy.linalg.cho_factor(b), a)\n v, w = jax.jit(jax.numpy.linalg.eig, backend=\"cpu\")(b_inv_a)\n v = v.real\n # with loops.Scope() as s:\n # for _ in s.cond_range(jnp.isrealobj)\n if jnp.isrealobj(a) and jnp.isrealobj(b):\n w = w.real\n # reorder as ascending in w\n order = jnp.argsort(v)\n v = v.take(order, axis=0)\n w = w.take(order, axis=1)\n # renormalize so v.H @ b @ H == 1\n norm2 = jax.vmap(lambda wi: (wi.conj() @ b @ wi).real, in_axes=1)(w)\n norm = jnp.sqrt(norm2)\n w = w / norm\n w = standardize_angle(w, b)\n return v, w",
"def test_triu_tril():\n A = np.array(\n [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 16, 16]]\n )\n A_u = np.array([[1, 2, 3, 4], [0, 6, 7, 8], [0, 0, 11, 12], [0, 0, 0, 16]])\n A_l = np.array(\n [[1, 0, 0, 0], [5, 6, 0, 0], [9, 10, 11, 0], [13, 14, 16, 16]]\n )\n assert norm(A_u - triu(A)) < 1e-12\n assert norm(A_l - tril(A)) < 1e-12\n assert (\n norm(scipy.sparse.coo_matrix(A_u) - triu(scipy.sparse.coo_matrix((A))))\n < 1e-12\n )\n assert (\n norm(scipy.sparse.coo_matrix(A_l) - tril(scipy.sparse.coo_matrix((A))))\n < 1e-12\n )",
"def triangulation(P1, P2, x1, x2):\n A = np.zeros((4, 4))\n p1_1 = P1[0, :]\n p1_2 = P1[1, :]\n p1_3 = P1[2, :]\n \n p2_1 = P2[0, :]\n p2_2 = P2[1, :]\n p2_3 = P2[2, :]\n \n A[0,:] = x1[0] * p1_3 - p1_1\n A[1,:] = x1[1] * p1_3 - p1_2\n A[2,:] = x2[0] * p2_3 - p2_1\n A[3,:] = x2[1] * p2_3 - p2_2\n \n U, S, Vt = np.linalg.svd(A)\n #Vt=Vt.T\n x = Vt[:, -1]\n x[0] /= x[3]\n x[1] /= x[3]\n x[2] /= x[3]\n return x[:3]",
"def solve(self, b: Array, check_finite: Optional[bool] = None) -> Array:\n if check_finite is None:\n check_finite = self.check_finite\n if self.cho_factor:\n fact_solve = lambda x: jsl.cho_solve(self.factor, x, check_finite=check_finite)\n else:\n fact_solve = lambda x: jsl.lu_solve(self.factor, x, trans=0, check_finite=check_finite)\n\n if b.ndim == 1:\n D = self.D\n else:\n D = self.D[:, snp.newaxis]\n N, M = self.A.shape\n if N < M and self.D.ndim == 1:\n w = fact_solve(self.A @ (b / D))\n x = (b - (self.A.T.conj() @ w)) / D\n else:\n x = fact_solve(b)\n\n return x",
"def reciprocal_lattice_vectors(a):\n b = np.zeros(shape=(3,3))\n b[:,0] = 2 * np.pi * np.cross(a[:,1], a[:,2]) / triple_product(a[:,0], a[:,1], a[:,2])\n b[:,1] = 2 * np.pi * np.cross(a[:,2], a[:,0]) / triple_product(a[:,1], a[:,2], a[:,0])\n b[:,2] = 2 * np.pi * np.cross(a[:,0], a[:,1]) / triple_product(a[:,2], a[:,0], a[:,1])\n return b",
"def linear_triangulation(p1, p2, m1, m2):\n num_points = p1.shape[1]\n res = np.ones((4, num_points))\n\n for i in range(num_points):\n A = np.asarray([\n (p1[0, i] * m1[2, :] - m1[0, :]),\n (p1[1, i] * m1[2, :] - m1[1, :]),\n (p2[0, i] * m2[2, :] - m2[0, :]),\n (p2[1, i] * m2[2, :] - m2[1, :])\n ])\n\n _, _, V = np.linalg.svd(A)\n X = V[-1, :4]\n res[:, i] = X / X[3]\n\n return res",
"def BarycentricToVTKTriangle(b, ref):\n return _vtk.BarycentricToVTKTriangle(b, ref)"
] | [
"0.667923",
"0.65151525",
"0.6280545",
"0.61302644",
"0.6106955",
"0.6083843",
"0.6023184",
"0.5951549",
"0.59446204",
"0.5920176",
"0.5916783",
"0.5853773",
"0.58434296",
"0.5821499",
"0.5807036",
"0.57871747",
"0.57675123",
"0.5766424",
"0.5704925",
"0.56889075",
"0.5638771",
"0.56275886",
"0.56245196",
"0.56026655",
"0.5590404",
"0.5563809",
"0.5551948",
"0.5548098",
"0.5510757",
"0.5509732"
] | 0.77219164 | 0 |
API endpoint to get a chart for power data which can be embedded in web pages. | def get_power_chart(chart_request):
data = get_power_data(
resource=chart_request["resource"],
show_consumption_as_positive=chart_request["show_consumption_as_positive"],
showing_individual_traces_for=chart_request["show_individual_traces_for"],
metrics={}, # will be stored here, we don't need them for now
query_window=(chart_request["start_time"], chart_request["end_time"]),
resolution=chart_request["resolution"],
forecast_horizon=chart_request["forecast_horizon"],
)
figure = make_power_figure(
resource_display_name=chart_request["resource"],
data=data[0],
forecast_data=data[1],
schedule_data=data[2],
show_consumption_as_positive=chart_request["show_consumption_as_positive"],
shared_x_range=None,
sizing_mode="scale_both",
)
return json_item(figure) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def chart(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"chart\")",
"def linedata():\n get_values = request.args\n pc = get_values.get('pc') is not None # Per Capita\n gr = get_values.get('gr') is not None # Growth Rate\n place_args, _ = get_place_args(get_values)\n plot_data, _ = datachart_handler.get_plot_data(place_args, pc, gr)\n return json.dumps(plot_data)",
"def display_chart(stock_id):\n\n api_key = os.environ.get(\"AV_API_KEY\", \"\")\n\n chart_x = [\"10\", \"10:30\", \"11\", \"11:30\", \"12\", \"12:30\", \"13\", \"13:30\",\n \"14\", \"14:30\", \"15\", \"15:30\", \"16\"]\n\n data = []\n daily_graph = []\n \n tz = timezone(\"EST\")\n today = datetime.datetime.now(tz)\n\n if today.weekday() == 5:\n day = today - datetime.timedelta(days=1)\n elif today.weekday() == 6:\n day = today - datetime.timedelta(days=2)\n else:\n day = today\n\n day = day.strftime(\"%Y-%m-%d\")\n\n payload={\"function\": \"TIME_SERIES_INTRADAY\",\n \"symbol\": stock_id,\n \"interval\": \"30min\",\n \"apikey\": api_key}\n\n url = requests.get(\"https://www.alphavantage.co/query\", params=payload)\n open_page = url.json()\n time_series = open_page.get(\"Time Series (30min)\")\n chart_y = []\n\n if time_series:\n for item in time_series.keys():\n if day in item:\n new_dict = time_series[item]\n data_points = new_dict[\"4. close\"]\n chart_y.append(data_points)\n\n chart_y.reverse()\n\n symbol_d = {\"symbol\": stock_id, \"data\": []}\n\n for x, y in zip(chart_x, chart_y):\n symbol_d[\"data\"].append({\"x\": x, \"y\": y})\n\n daily_graph.append(symbol_d)\n\n return jsonify(daily_graph)",
"def get_demo_chart() -> Chart:\n chart = ChartSampleGenerator.create_chart()\n return chart",
"def DownloadWeeklyChart(self, week):\n print 'Downloading chart for week of %s' % week\n url = ('http://www.boxofficemojo.com/weekly/chart/?yr=2000&wk=%d&p=.htm'\n % self.GetWeekNum(week))\n response = requests.get(url, headers=self.REQUEST_HEADERS)\n time.sleep(2)\n return response.content",
"def chart(request):\n assert isinstance(request, HttpRequest)\n filename = 'ppg_RawDataSheet13.mat'\n subtitle = 'VerityDB/' + filename\n return render(\n request,\n 'research/chart.html',\n {\n 'title':'Chart',\n 'message':'Highcharts Based',\n 'year':datetime.now().year,\n #'data': content['val'][0:11]\n 'temp': models.load_data(),\n 'test': models.load_data_filename(filename),\n 'subtitle_text': subtitle,\n }\n )",
"def get_chart_data(chart_id):\n start = request.args.get('start')\n end = request.args.get('end')\n data = {}\n\n chart = Chart.query.get(chart_id)\n if chart and start and end:\n data = chart.data_points(\n min_date=start,\n max_date=end,\n ds_format=True,\n )\n return jsonify(**data)",
"def line_chart():\n\n user_id = session[\"user_id\"]\n filter_name = request.args.get('filter_name')\n print(\"filter name is\", filter_name)\n\n user = User.query.filter_by(user_id=user_id).one()\n user_goal_oz = User.calculate_user_intake(user.weight, user.age)\n\n goal_multiplier = {\n 'months': 30,\n 'weeks': 7,\n 'days': 1,\n }\n\n user_goal_oz = user_goal_oz*goal_multiplier[filter_name]\n\n time_parameter, qty = chart_query(user_id, filter_name)\n print(chart_query)\n\n return jsonify(user_goal_oz=user_goal_oz, time_parameter=time_parameter, qty=qty)",
"def index():\n return render_template(\"charts.html\")",
"def return_chart_json(chart):\n return Response(response=chart.to_json_chartable(), status=200, mimetype=\"application/json\")",
"def chart1(request):\n\n full_url = HttpRequest.build_absolute_uri(request)\n relative = HttpRequest.get_full_path(request)\n\n base_url = full_url[:-len(relative)]\n\n request_amount = ['10', '100', '200', '500', '1000']\n\n json_urls = list()\n xml_urls = list()\n\n for x in request_amount:\n json_urls.append(reverse('objects:leads_json', args=[x]))\n xml_urls.append(reverse('objects:leads_xml', args=[x]))\n\n json_data = list()\n xml_data = list()\n\n for x in json_urls:\n json_average=0\n for i in range (0,5):\n start = time.perf_counter()\n req = requests.get(base_url + x)\n end = time.perf_counter()\n json_average += (end-start)\n json_data.append((json_average)/5)\n\n for x in xml_urls:\n xml_average=0\n for i in range(0,5):\n start = time.perf_counter()\n req = requests.get(base_url + x)\n end = time.perf_counter()\n xml_average+=(end-start)\n xml_data.append((xml_average)/5)\n\n final_data = {\n 'labels': request_amount,\n 'datasets': [\n {\n 'label': 'JSON',\n 'backgroundColor': 'rgba(255, 99, 132, 0.2)',\n 'borderColor': 'rgba(255,99,132,1)',\n 'data': json_data,\n 'borderWidth': 2,\n 'yAxisID': 'first-y-axis'\n },\n {\n 'label': 'XML',\n 'backgroundColor': 'rgba(54, 162, 235, 0.2)',\n 'borderColor': 'rgba(54, 162, 235, 1)',\n 'data': xml_data,\n 'borderWidth': 2,\n 'yAxisID': 'first-y-axis'\n }\n ]\n }\n\n return JsonResponse(final_data)",
"def price_chart_json(request, prod_id):\n prices = Product.objects.get(prod_id=prod_id).prices\n labels = []\n data = []\n for price in prices:\n labels.append(price.updated_at.strftime('%Y-%m-%d %H:%M'))\n data.append(price.price_discounted)\n output = {\n 'labels': labels,\n 'datasets': [\n {\n 'data': data,\n 'label': prod_id,\n 'name': prod_id,\n }\n ]\n }\n return JsonResponse(output)",
"def charts(self,req):\n self.player.overviewing=True",
"def daily_reports_chart(env):\n certname = request.args.get('certname')\n result = get_or_abort(\n get_daily_reports_chart,\n db=puppetdb,\n env=env,\n days_number=app.config['DAILY_REPORTS_CHART_DAYS'],\n certname=certname,\n )\n return jsonify(result=result)",
"def charts():\n\n global show_gaps\n global timespan\n\n form = ChartForm(\n request.form,\n graph_type=timespans.index(timespan),\n graph_gaps=show_gaps\n )\n\n if request.method == 'POST':\n if form.submit_button.data:\n timespan = timespans[int(form.graph_type.data)]\n show_gaps = form.graph_gaps.data\n else:\n flash('Unknown Event', 'error')\n\n chart = Chart(app)\n data_values1, data_values2, data_values3, data_labels = \\\n chart.get_data(timespan, show_gaps)\n\n if len(data_values3) > 0:\n cb = np.array(data_values3)\n peaks = peakutils.indexes(cb, thres=0.02 / max(cb), min_dist=5)\n\n starts_total = len(peaks)\n starts_per_h = int(round(float(starts_total) / \\\n float(hourtable[timespan]), 0))\n else:\n starts_total = 0\n starts_per_h = 0\n\n return render_template(\n 'charts.html',\n form=form,\n user=current_user,\n values1=data_values1,\n values2=data_values2,\n values3=data_values3,\n labels=data_labels,\n burner_total=starts_total,\n burner_ph=starts_per_h,\n )",
"def chart2(request):\n full_url = HttpRequest.build_absolute_uri(request)\n relative = HttpRequest.get_full_path(request)\n\n base_url = full_url[:-len(relative)]\n\n request_amount = ['10', '100', '200', '500', '1000']\n\n json_content = list()\n xml_content = list()\n\n for x in request_amount:\n json_content.append(requests.get(base_url + reverse('objects:leads_json', args=[x])).text)\n xml_content.append(requests.get(base_url + reverse('objects:leads_xml', args=[x])).text)\n\n response = {\n 'json': json_content,\n 'xml': xml_content\n }\n\n return JsonResponse(response)",
"def prophet():\n if request.method == 'POST':\n return jsonify(message='Post message')\n elif request.method == 'GET':\n data = plots.prophet_df\n\n data['ds'] = data['ds'].astype(str)\n data = {'ds': data['ds'].tolist(),\n 'yhat': data['yhat'].tolist(),\n 'yhat_lower': data['yhat_lower'].tolist(),\n 'yhat_upper': data['yhat_upper'].tolist(),\n 'y_actual': data['y_actual'].tolist()\n }\n\n return jsonify(isError=False,\n message=\"Success\",\n statusCode=200,\n data=data), 200",
"def get_stock_info(request):\n if request.method == 'GET':\n\n dailyParams = {\n 'symbol': request.query_params.get('symbol'),\n 'function': 'TIME_SERIES_INTRADAY',\n 'interval': '30min',\n 'apikey': request.query_params.get('apikey'),\n 'outputsize': 'full',\n }\n\n historicParams = {\n 'symbol': request.query_params.get('symbol'), # request.query_params.symbol\n 'function': 'TIME_SERIES_DAILY',\n 'apikey': request.query_params.get('apikey'),\n 'outputsize': 'full',\n }\n\n dailyData = requests.get(\n 'https://www.alphavantage.co/query?',\n params=dailyParams,\n )\n dailyFormated = format_data(\n json.loads(dailyData.content.decode('utf-8')),\n \"Time Series (30min)\",\n '%H:%M:%S'\n )\n\n historicData = requests.get(\n 'https://www.alphavantage.co/query?',\n params=historicParams,\n )\n historicFormated = format_data(\n json.loads(historicData.content.decode('utf-8')),\n \"Time Series (Daily)\",\n '%Y-%m-%d'\n )\n\n # historicOrdered = arr.array('i', historicFormated)\n # Make calcs, categorize time data into slices,\n # add kpis, and package all together\n\n # Response = {\n # 'data': {\n # 'daily': dailyFormated,\n # 'historic': historicFormated,\n # },\n # 'kpis': {\n # 'PE': 5,\n # },\n # 'request': {'method': request.method,\n # 'path': request.path,\n # 'params': request.query_params,\n # },\n #\n # }\n\n if historicData.status_code == 200 and dailyData.status_code == 200:\n return Response({\n 'daily': dailyFormated,\n 'historic': {\n 'fiveDays': historicFormated[:5],\n 'month': historicFormated[:30],\n 'sixMonths': historicFormated[:180],\n 'year': historicFormated[:365],\n 'fiveYears': historicFormated[:1825],\n 'max': historicFormated,\n },\n 'kpis': {\n 'open': 120,\n 'close': dailyFormated[-1],\n 'PE': 5,\n },\n 'request': {'method': request.method,\n 'path': request.path,\n 'params': request.query_params,\n },\n\n })\n else:\n return None",
"def webservice_data_chart(request):\n current_date = datetime.today()\n active_type_of_services = TypeOfService.objects.filter(active=True)\n months = [i for i in range(1, 13)]\n data = {\n 'series': [],\n 'labels': settings.CHART_MONTHS_LABELS,\n 'colors': [type_service.color for type_service in active_type_of_services]\n }\n\n for type_service in active_type_of_services:\n services_count = []\n for month in months:\n services_count.append(\n WebService.objects.filter(\n date__month=month,\n date__year=current_date.year,\n type_of_service=type_service\n ).values('id').count()\n )\n data['series'].append({\n \"name\": type_service.name,\n \"data\": services_count,\n })\n\n return JsonResponse(data)",
"def my_charts(request):\n\n logger.debug('called')\n\n context = {}\n\n simulations = request.user.simulations.all().exclude(\n name__icontains=settings.STANDARD_CHART_NAME\n ).select_related(\n 'fight_style',\n 'result',\n 'simulation_type',\n 'wow_class',\n 'wow_spec',\n 'queue',\n )\n\n context['charts'] = simulations\n\n return render(request, 'general_website/my_charts.html', context=context)",
"def my_charts(page_num=1):\n # Download charts that belong to the current user\n charts = Chart.query.filter_by(owner_id=current_user.id).paginate(page_num)\n return render_template('reports/my_charts.html', charts=charts)",
"def get_stocks(request):\n if request.method == 'GET':\n\n data = requests.get(\n 'https://www.alphavantage.co/query?',\n params=request.query_params\n )\n\n return Response({'data': json.loads(data.content.decode('utf-8')),\n 'request': {'method': request.method,\n 'path': request.path,\n 'params': request.query_params,\n },\n })",
"def _get_highcharts_data(self, data_product_id='', visualization_parameters=None):\n\n # An empty dict is returned in case there is no data in coverage\n empty_hc = []\n\n # error check\n if not data_product_id:\n raise BadRequest(\"The data_product_id parameter is missing\")\n\n use_direct_access = False\n if visualization_parameters == {}:\n visualization_parameters = None\n\n # Extract the parameters. Definitely init first\n query = None\n if visualization_parameters:\n #query = {'parameters':[]}\n query = {}\n # Error check and damage control. Definitely need time\n if 'parameters' in visualization_parameters and len(visualization_parameters['parameters']) > 0:\n if not 'time' in visualization_parameters['parameters']:\n visualization_parameters['parameters'].append('time')\n\n query['parameters'] = visualization_parameters['parameters']\n\n # The times passed from UI are system times so convert them to NTP\n if 'start_time' in visualization_parameters:\n query['start_time'] = int(visualization_parameters['start_time'])\n\n if 'end_time' in visualization_parameters:\n query['end_time'] = int((visualization_parameters['end_time']))\n\n # stride time\n if 'stride_time' in visualization_parameters:\n try:\n query['stride_time'] = int(visualization_parameters['stride_time'])\n except TypeError: \n # There are some (rare) situations where the AJAX request has 'null' in the request\n # Example:\n # {\"query_type\":\"google_dt\",\"parameters\":[],\"start_time\":-2208988800,\"end_time\":-2208988800,\"stride_time\":null,\"use_direct_access\":0}\n query['stride_time'] = 1\n else:\n query['stride_time'] = 1\n\n # direct access parameter\n if 'use_direct_access' in visualization_parameters:\n if (int(visualization_parameters['use_direct_access']) == 1):\n use_direct_access = True\n else:\n use_direct_access = False\n\n # get the dataset_id and objs associated with the data_product. Need it to do the data retrieval\n ds_ids,_ = self.clients.resource_registry.find_objects(data_product_id, PRED.hasDataset, RT.Dataset, True)\n\n if ds_ids is None or not ds_ids:\n raise NotFound(\"Could not find dataset associated with data product\")\n stream_def_ids, _ = self.clients.resource_registry.find_objects(data_product_id, PRED.hasStreamDefinition, id_only=True)\n if not stream_def_ids:\n raise NotFound('Could not find stream definition associated with data product')\n stream_def_id = stream_def_ids[0]\n try:\n\n if use_direct_access:\n retrieved_granule = DataRetrieverService.retrieve_oob(ds_ids[0], query=query, delivery_format=stream_def_id)\n else:\n #replay_granule = self.clients.data_retriever.retrieve(ds_ids[0],{'start_time':0,'end_time':2})\n retrieved_granule = self.clients.data_retriever.retrieve(ds_ids[0], query=query, delivery_format=stream_def_id)\n except BadRequest:\n dp = self.container.resource_registry.read(data_product_id)\n log.exception('Problem visualizing data product: %s (%s).\\n_get_highcharts_data(data_product_id=\"%s\", visualization_parameters=%s)', dp.name, data_product_id, data_product_id, visualization_parameters)\n raise\n\n # If thereis no data, return an empty dict\n if retrieved_granule is None:\n return simplejson.dumps(empty_hc)\n\n # send the granule through the transform to get the google datatable\n hc_pdict_id = self.clients.dataset_management.read_parameter_dictionary_by_name('highcharts',id_only=True)\n hc_stream_def = self.clients.pubsub_management.create_stream_definition('HighCharts_out', parameter_dictionary_id=hc_pdict_id)\n\n hc_data_granule = VizTransformHighChartsAlgorithm.execute(retrieved_granule, params=hc_stream_def, config=visualization_parameters)\n\n if hc_data_granule == None:\n return simplejson.dumps(empty_hc)\n\n hc_rdt = RecordDictionaryTool.load_from_granule(hc_data_granule)\n # Now go through this redundant step of converting the hc_data into a non numpy version\n hc_data_np = (get_safe(hc_rdt, \"hc_data\"))[0]\n hc_data = []\n\n for series in hc_data_np:\n s = {}\n for key in series:\n if key == \"data\":\n s[\"data\"] = series[\"data\"].tolist()\n continue\n s[key] = series[key]\n hc_data.append(s)\n\n # return the json version of the table\n return json.dumps(hc_data)",
"def chart(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"chart\")",
"def graph():\n # Try to get params request\n params = extract_variables(['start_time', 'end_time', 'sensor_id'], request)\n # Fetch data from database\n results = query_climate_range(**params)\n\n # Turn it in to lists which can be graphed\n dates = []\n humids = []\n temps = []\n pressures = []\n for result in results:\n dates.append(datetime.datetime.fromtimestamp(result['time']))\n humids.append(result['humid'])\n temps.append(result['temp'])\n pressures.append(result['pressure'])\n\n # Graph it\n fig = Figure()\n # First y axis (temp and humid)\n axis = fig.add_subplot(1, 1, 1)\n # Plot humidity and temp on the same scale\n axis.plot_date(dates, humids, '-', color=COLORS['blue'])\n axis.plot_date(dates, temps, '-', color=COLORS['red'])\n axis.xaxis.set_major_formatter(DateFormatter('%d/%m/%y %H:%M'))\n axis.set_ylabel('Humidity in % & Temps in C')\n axis.set_xlabel('Time')\n # Second y axis (pressure)\n axis_pressure = axis.twinx()\n # Plot pressure\n axis_pressure.plot_date(dates, pressures, '-', color=COLORS['green'])\n axis_pressure.xaxis.set_major_formatter(DateFormatter('%d/%m/%y %H:%M'))\n axis_pressure.set_ylabel('Pressure in mbar')\n # Configure the figure\n fig.autofmt_xdate()\n fig.legend(['Humidity', 'Temperature', 'Pressure'], loc='lower right')\n fig.set_tight_layout(True)\n canvas = FigureCanvas(fig)\n # Save output\n png_output = BytesIO()\n canvas.print_png(png_output)\n\n # Create the response and send it\n response = make_response(png_output.getvalue())\n response.headers['Content-Type'] = 'image/png'\n return response",
"def GetChart(self, *args, **kwargs):\n chart = TestChart(*args, **kwargs)\n chart.display = TestEncoder(chart)\n return chart",
"def charts(self):\n return self.container['charts']",
"def get_chartshow_html(year, month, day):\n u = CHARTSHOW_URL.format(year=year, month=str(month).zfill(2), day=str(day).zfill(2))\n return urlopen(u, timeout=60).read()",
"def getchart(request):\n data = ast.literal_eval(request.body.decode('utf-8'))\n data['user'] = request.user\n data = newchart(data)\n return JsonResponse(data)",
"def get_charts(self, period=\"d\", size=\"l\", chart_type=\"c\", ta=\"1\"):\n\n encoded_payload = urlencode(\n {\"ty\": chart_type, \"ta\": ta, \"p\": period, \"s\": size}\n )\n\n sequential_data_scrape(\n scrape.download_chart_image,\n [\n f\"https://finviz.com/chart.ashx?{encoded_payload}&t={row.get('Ticker')}\"\n for row in self.data\n ],\n self._user_agent,\n )"
] | [
"0.6277182",
"0.61903876",
"0.61657965",
"0.6133741",
"0.612451",
"0.6118392",
"0.60309595",
"0.6029583",
"0.6028871",
"0.6020951",
"0.5977664",
"0.5958139",
"0.59504807",
"0.59448594",
"0.59348613",
"0.59333634",
"0.5929623",
"0.587116",
"0.58248526",
"0.5816467",
"0.58146614",
"0.58130044",
"0.57734025",
"0.5764709",
"0.5724622",
"0.5703772",
"0.5700048",
"0.56832033",
"0.5653402",
"0.563511"
] | 0.73452485 | 0 |
On initialization the ListProcessor receives the full list of all waypoints. This list is held in memory, so the ListProcessor has access to the whole list of waypoints at all time during the trip extraction process. | def __init__(self, waypoints: Tuple[Waypoint]):
self._waypoints = waypoints | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _build_legs(self):\n if self._primary_mode == 'transit':\n for transit_leg in self._best_trip.get_transit_legs():\n self._legs.append(transit_leg.get_directions())\n else:\n self._legs.append(self._best_trip.get_directions())",
"def __init__(self):\n\n # Set a node name - something relevant\n rospy.init_node('waypoint_updater')\n\n # Most recent pose\n self.pose = None\n\n # Map waypoint list \n self.waypoints = None\n\n # Map waypoint list xy only \n self.waypoints_2d = None\n\n # Map waypoint list xy only as KDTree\n self.waypoint_tree = None\n\n # Index at which to stop the vehicle\n # Negative one is a sentinel meaning no stop is required\n self.stopline_waypoint_idx = -1\n\n # Add subscriptions and handlers for relevant messages\n rospy.Subscriber('/base_waypoints', Lane, self.base_waypoints_cb)\n rospy.Subscriber('/current_pose', PoseStamped, self.current_pose_cb)\n rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_waypoint_cb)\n\n # Create publisher for final waypoints\n self.final_waypoints_pub = rospy.Publisher('/final_waypoints', Lane, queue_size=1)\n\n # Start loop\n self.loop()",
"def init_plist(self, passenger_list):\n\n # Init empty list to hold Passenger objects.\n plist = []\n\n # Iterate through the flight manifest.\n for passenger in passenger_list:\n\n # Grab attributes.\n pid, flight_num, first_name, \\\n last_name, birthdate, nationality, service_time = passenger\n\n # Init and append Passenger object with attributes.\n plist.append(Passenger(pid,\n flight_num,\n self.arrival_time,\n first_name,\n last_name,\n birthdate,\n nationality,\n service_time))\n\n # Increment passenger count.\n if plist[-1].nationality == 'domestic':\n self.num_dom_passengers += 1\n else:\n self.num_intl_passengers += 1\n\n # Return list of instantiated Passenger objects.\n return plist",
"def getWaypoints(self):\n return self.listener.waypoints",
"def __init__(self):\n self._distance_data = []\n self._location_data = []\n self._package_data = []",
"def base_waypoints_cb(self, waypoints):\n\n # Save the waypoint list\n self.waypoints = waypoints\n\n # If waypoints_2d hasn't been initialized...\n if not self.waypoints_2d:\n\n # Extract xy coordinates from the waypoint list\n self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]\n\n # Construct a KDTree from the xy coordinate list to allow fast lookup \n self.waypoint_tree = KDTree(self.waypoints_2d)",
"def base_waypoints_cb(self, waypoints):\n\n # Save the waypoint list\n self.waypoints = waypoints\n\n # If waypoints_2d hasn't been initialized...\n if not self.waypoints_2d:\n\n # Extract xy coordinates from the waypoint list\n self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]\n\n # Construct a KDTree from the xy coordinate list to allow fast lookup \n self.waypoint_tree = KDTree(self.waypoints_2d)",
"def read_waypoints():\n\tfilename = \"waypoints.txt\"\n\tfile = open(filename, \"r\")\n\twp_list = []\n\n\tfor line in file:\n\t\t# Get the individual elements, splitting by whitespace\n\t\tdata_list = line.split()\n\t\tcoordinate = {'x': data_list[0], 'y': data_list[1], 'z': data_list[2]}\n\t\twaypoint = {'radius': data_list[3], 'point': coordinate}\n\n\t\twp_list.append (waypoint)\n\n\treturn wp_list",
"def buildWayPointList(dom):\n waypoints = []\n for wpt in dom.getElementsByTagName(\"wpt\"):\n (lat,lon, t, ele, name, description) = parsePoint(wpt)\n waypoints.append(myPyGPX.WayPoint(lat, lon, ele, name, description))\n return waypoints",
"def road_list(self):\n return self._road_list",
"def directions(self):\n return []",
"def waypoints_cb(self, msg):\n rospy.loginfo(rospy.get_name() + ': waypoints received')\n self.base_waypoints = msg.waypoints",
"def gps_points(self):\n return self.waypoints.all().order_by('localtime')\n # return self.waypoints.filter(photo_id__isnull=True).order_by('localtime')",
"def __init__(self):\n self.step_list = [steps.Raw()]",
"def __init__(self, trip_update, stops, position_in_list):\n self.trip_update = trip_update\n self.stops = stops\n self.routeID = str(self.trip_update.trip.route_id)\n # A minor quirk in the MTA's data is fixed here. S trains were listed as GS for some reason\n if self.routeID == \"GS\":\n self.routeID = \"S\"\n self.index = position_in_list",
"def waypoints(self):\n\t\treturn [Star(star_id, galaxy=self.galaxy) for delay, star_id, order, num_ships in self.data.o]",
"def _retrieve_options(list_waypoints, current_waypoint):\n options = []\n for next_waypoint in list_waypoints:\n # this is needed because something we are linking to\n # the beggining of an intersection, therefore the\n # variation in angle is small\n next_next_waypoint = next_waypoint.next(3.0)[0]\n link = _compute_connection(current_waypoint, next_next_waypoint)\n options.append(link)\n\n return options",
"def on_global_trajectory(self, msg):\n self._logger.debug('@{}: global trajectory has {} waypoints'.format(\n msg.timestamp, len(msg.data)))\n if len(msg.data) > 0:\n # The last waypoint is the goal location.\n self._goal_location = msg.data[-1][0].location\n else:\n # Trajectory does not contain any waypoints. We assume we have\n # arrived at destionation.\n self._goal_location = self._vehicle_transform.location\n assert self._goal_location, 'Planner does not have a goal'\n self._waypoints = deque()\n for waypoint_option in msg.data:\n self._waypoints.append(waypoint_option[0])\n self._prev_waypoints = self._waypoints",
"def waypoints_cb(self, msg):\n t = time.time()\n waypoints = msg.waypoints\n num_wp = len(waypoints)\n\n if self.base_waypoints and self.next_waypoint is not None:\n # Normally we assume that waypoint list doesn't change (or, at least, not\n # in the position where the car is located). If that happens, just handle it.\n if not self.is_same_waypoint(self.base_waypoints[self.next_waypoint],\n waypoints[self.next_waypoint]):\n self.next_waypoint = None # We can't assume previous knowledge of waypoint\n self.base_waypoints = None # Just for debugging. Will be updated later\n rospy.logwarn(\"Base waypoint list changed\")\n else:\n # No change. We could probably return here.\n pass\n\n \"\"\"\n # -- Uncomment for debugging\n # Stamp waypoint index in PoseStamped and TwistStamped headers of internal messages\n for idx in range(len(waypoints)):\n waypoints[idx].pose.header.seq = idx\n waypoints[idx].twist.header.seq = idx\n \"\"\"\n\n self.base_wp_orig_v = [self.get_waypoint_velocity(waypoints, idx) for idx in range(num_wp)]\n\n if debugging and not self.base_waypoints:\n dist = self.distance(waypoints, 0, num_wp-1)\n rospy.loginfo(\"Received: %d waypoints, %.1f m, %.1f m/wp in t=%f\", num_wp, dist, dist/num_wp, time.time()-t)\n\n self.base_waypoints = waypoints\n\n if self.unsubscribe_base_wp:\n self.base_wp_sub.unregister()",
"def __init__(self):\n \n self.linksDict = dict()\n self.nodesDict = dict()\n self.stopsByRoute = dict()\n self.stopsByNode = dict()\n self.routeXref = dict()\n self.transitRoutes = dict()\n self.spIndex = None",
"def InitWayR(session):\n global way_r\n q = session.query(melt.StreetAssoc)\n way_r = set([it.osm_way for it in q.all()])",
"def __init__(self, name, waypoints, position2d_proxy, waypoint_distance_tolerance):\n\n self.name = name\n self.waypoints = waypoints\n self.pp = position2d_proxy\n self.waypoint_distance_tolerance = waypoint_distance_tolerance\n\n self.active_waypoint_index = 0\n self.active_waypoint = self.waypoints[self.active_waypoint_index]\n self.first_update = True\n self.finished = False\n self.last_read = None",
"def buildStopsDict(self):\n \n if len(self.nodesDict) == 0:\n raise Exception('Nodes dictionary is empty!')\n if len(self.linksDict) == 0:\n raise Exception('Links dictionary is empty!')\n \n self.stopsByRoute = dict()\n self.stopsByNode = dict()\n arcpy.env.workspace = PublicTransit.WORKING_GDB\n \n tempStops = \"temp_stops\"\n tempStopsSp = \"temp_stops_sp\"\n \n # Delete temp_stops and temp_stops_sp feature classes if they exist.\n if arcpy.Exists(tempStops):\n arcpy.Delete_management(tempStops)\n if arcpy.Exists(tempStopsSp):\n arcpy.Delete_management(tempStopsSp)\n arcpy.CopyFeatures_management(PublicTransit.RTD_PATH + PublicTransit.RTD_STOPS,\n tempStops)\n \n # Project temp_stops to CA state plane and add XY.\n install_dir = arcpy.GetInstallInfo()['InstallDir']\n out_coordinate_system = os.path.join(install_dir, PublicTransit.NAD_83_DIRECTORY)\n arcpy.Project_management(tempStops, tempStopsSp, out_coordinate_system,\n \"NAD_1983_To_WGS_1984_1\")\n arcpy.AddXY_management(tempStopsSp)\n \n # Create a search cursor to traverse all stops.\n stops = arcpy.SearchCursor(tempStopsSp, \"\", \"\",\n \"CPT_STOPPOINTID; SCH_STOPPOINTSEQNO; \" +\n \"SCH_ROUTEID; SCH_PATTERNID; ROUTE_PATTERN; \" +\n \"SourceOID; POINT_X; POINT_Y\",\n \"ROUTE_PATTERN A; SCH_STOPPOINTSEQNO A\")\n numStops = int(arcpy.GetCount_management(tempStopsSp).getOutput(0))\n print \"Found %d stops\" % numStops\n \n p = index.Property()\n p.overwrite = True\n self.spIndex = index.Index(PublicTransit.SPATIAL_INDEX_FILE,properties=p)\n \n # For each stop determine the nearest network node.\n scount = 0\n icount = 0\n for s in stops:\n # only create stops for routes which exist in RTD\n if not s.ROUTE_PATTERN in self.transitRoutes:\n continue\n scount += 1\n st = TransitStop(s.CPT_STOPPOINTID, s.SCH_ROUTEID, s.SCH_PATTERNID,\n s.ROUTE_PATTERN, s.SourceOID, s.SCH_STOPPOINTSEQNO)\n # If the stop's linkId is in the links dictionary use the link from\n # and to node (these should all be bus routes since MTC's route\n # traversal FC was created for buses only at this time).\n if s.SourceOID in self.linksDict:\n link = self.linksDict[s.SourceOID]\n # Determine which node is nearest and snap to it.\n if self.__getDistance(s.POINT_X,\n s.POINT_Y,\n link.fromNode.x,\n link.fromNode.y) <= \\\n self.__getDistance(s.POINT_X,\n s.POINT_Y,\n link.toNode.x,\n link.toNode.y):\n st.tanaNode = link.fromNode.nodeId\n else:\n st.tanaNode = link.toNode.nodeId\n st.inRegion = True\n \n # The stop's link is not in linksDict. These are either stops \n # outside the region or non-bus routes for which there are no\n # route traversal edges. Do a link lookup from the Roadways\n # feature class.\n else:\n arcpy.env.workspace = PublicTransit.RTD_PATH\n roadwaysSearch = arcpy.SearchCursor(PublicTransit.ROADWAYS_FC,\n \"LinkId = \" + str(s.SourceOID),\n \"\", \"\", \"F_JNCTID; T_JNCTID\", \"\")\n for r in roadwaysSearch:\n fromNode = self.__getIdHash(r.F_JNCTID)\n toNode = self.__getIdHash(r.T_JNCTID)\n if fromNode in self.nodesDict and toNode in self.nodesDict:\n if self.__getDistance(s.POINT_X,\n s.POINT_Y,\n self.nodesDict[fromNode].x,\n self.nodesDict[fromNode].y) <= \\\n self.__getDistance(s.POINT_X,\n s.POINT_Y,\n self.nodesDict[toNode].x,\n self.nodesDict[toNode].y):\n st.tanaNode = fromNode\n else:\n st.tanaNode = toNode\n st.inRegion = True\n else:\n st.inRegion = False\n \n # Add the stop to stopsByRoute and stopsByNode dictionaries\n if s.ROUTE_PATTERN in self.stopsByRoute:\n self.stopsByRoute[s.ROUTE_PATTERN].append(st)\n else:\n self.stopsByRoute[s.ROUTE_PATTERN] = [st]\n if (st.tanaNode in self.stopsByNode):\n self.stopsByNode[st.tanaNode].append(st)\n else:\n self.stopsByNode[st.tanaNode] = [st]\n # add the stop node to the spatial index\n if st.tanaNode in self.nodesDict:\n icount += 1\n self.spIndex.insert(st.stopPointId,\n (self.nodesDict[st.tanaNode].x,\n self.nodesDict[st.tanaNode].y,\n self.nodesDict[st.tanaNode].x,\n self.nodesDict[st.tanaNode].y))\n del stops",
"def create_list() -> List[Optional[float]]:\n return [None] * num_stations",
"def waypoints_cb(self, waypoints):\n # This callback should be called only once, with the list of waypoints not yet initialised.\n assert self.waypoints is None\n\n for wp in waypoints.waypoints:\n wp.twist.twist.linear.x = 9.\n\n self.waypoints = waypoints.waypoints # No need to guarantee mutual exclusion in accessing this data member\n\n # Now that the waypoints describing the track have been received, it is time to subscribe to pose updates.\n rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)\n rospy.Subscriber('/current_velocity', TwistStamped, self.current_velocity_cb)\n rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)\n rospy.Subscriber('/vehicle/dbw_enabled', Bool, self.DBW_enabled_cb)",
"def _build_directions(self):\n d = {'start': self.get_start(), 'end': self.get_end(), 'duration': self.get_duration(),\n 'mode': self.get_primary_mode(), 'price_range': self.get_price_range(), 'legs': self.get_legs(),\n 'start_location': self.get_start_location(), 'end_location': self.get_end_location()}\n self.set_directions(d)",
"def fetch(self):\r\n if self.wp_op is None: # If we were already doing a list or save, just restart the fetch without changing the operation\r\n self.wp_op = \"fetch\"\r\n self.master.waypoint_request_list_send()",
"def _parse_routepart(self, data):\n points = [self._parse_trip_point(point) for point in data.findall('./itdPoint')]\n\n path = []\n for coords in data.findall('./itdPathCoordinates/itdCoordinateBaseElemList/itdCoordinateBaseElem'):\n path.append(Coordinates(int(coords.find('y').text) / 1000000, int(coords.find('x').text) / 1000000))\n\n motdata = self._parse_mot(data.find('./itdMeansOfTransport'))\n\n if motdata is None or data.attrib['type'] == 'IT':\n waytype = {\n '98': 'walk',\n '99': 'walk',\n '100': 'walk',\n '101': 'bike',\n '104': 'car',\n '105': 'taxi'\n }[data.find('./itdMeansOfTransport').attrib['type']]\n # 98 = gesichter anschluss\n\n way = Way(WayType(waytype), points[0].stop, points[1].stop)\n way.distance = data.attrib.get('distance')\n if way.distance is not None:\n way.distance = float(way.distance)\n duration = data.attrib.get('timeMinute', None)\n if duration is not None:\n way.duration = timedelta(minutes=int(duration))\n if path:\n way.path = path\n return way\n\n else:\n origin, destination, line, ridenum, ridedir, canceled = motdata\n\n if data.find('./genAttrList/genAttrElem[value=\"HIGHSPEEDTRAIN\"]') is not None:\n line.linetype = LineType('train.longdistance.highspeed')\n elif data.find('./genAttrList/genAttrElem[value=\"LONG_DISTANCE_TRAIN\"]') is not None:\n line.linetype = LineType('train.longdistance')\n\n train_line = line.linetype in self.train_station_lines\n\n # Build Ride Objekt with known stops\n ride = Ride(line, ridenum)\n ride.canceled = canceled\n ride.direction = ridedir\n for infotext in data.findall('./infoTextList/infoTextListElem'):\n ride.infotexts.append(infotext)\n\n first = None\n last = None\n waypoints = False\n if data.find('./itdStopSeq'):\n new_points = [self._parse_trip_point(point, train_line=train_line) for point in data.findall('./itdStopSeq/itdPoint')]\n if not new_points or new_points[0].stop != new_points[0].stop:\n new_points.insert(0, points[0])\n if new_points[-1].stop != points[1].stop:\n new_points.append(points[1])\n points = new_points\n waypoints = True\n\n for p in points:\n if not waypoints and first is None:\n ride.append(None)\n pointer = ride.append(p)\n if first is None:\n first = pointer\n last = pointer\n\n if origin is not None:\n if origin != ride[0].stop:\n ride.prepend(None)\n ride.prepend(TimeAndPlace(Platform(origin)))\n else:\n ride.prepend(None)\n\n if destination is not None:\n if destination != ride[-1].stop:\n ride.append(None)\n ride.append(TimeAndPlace(Platform(destination)))\n else:\n ride.append(None)\n\n segment = ride[first:last]\n paths = self._split_path(path, [p.platform.coords for p in segment])[:-1]\n for i, point in segment.items():\n if not paths:\n break\n segment.ride._paths[i] = paths.pop(0)\n return segment",
"def _instantiate_pathway(self, context):\n # DOCUMENT: Projections SPECIFIED IN A PATHWAY MUST BE A MappingProjection\n # DOCUMENT:\n # Each item in Pathway can be a Mechanism or Projection object, class ref, or specification dict,\n # str as name for a default Mechanism,\n # keyword (IDENTITY_MATRIX or FULL_CONNECTIVITY_MATRIX) as specification for a default Projection,\n # or a tuple with any of the above as the first item and a param dict as the second\n pathway = self.paramsCurrent[PATHWAY]\n self._mech_tuples = []\n self._monitoring_mech_tuples = []\n self._target_mech_tuples = []\n\n from PsyNeuLink.Globals.Run import _get_unique_id\n\n self._standardize_config_entries(pathway=pathway, context=context)\n\n # VALIDATE PATHWAY THEN PARSE AND INSTANTIATE MECHANISM ENTRIES ------------------------------------\n self._parse_and_instantiate_mechanism_entries(pathway=pathway, context=context)\n\n # Identify origin and terminal mechanisms in the process and\n # and assign the mechanism's status in the process to its entry in the mechanism's processes dict\n self.firstMechanism = pathway[0][OBJECT_ITEM]\n self.firstMechanism.processes[self] = ORIGIN\n self._origin_mech_tuples = [pathway[0]]\n self.originMechanisms = MechanismList(self, self._origin_mech_tuples)\n\n self.lastMechanism = pathway[-1][OBJECT_ITEM]\n if self.lastMechanism is self.firstMechanism:\n self.lastMechanism.processes[self] = SINGLETON\n else:\n self.lastMechanism.processes[self] = TERMINAL\n self._terminal_mech_tuples = [pathway[-1]]\n self.terminalMechanisms = MechanismList(self, self._terminal_mech_tuples)\n\n # # Assign process outputState to last mechanisms in pathway\n # self.outputState = self.lastMechanism.outputState\n\n # PARSE AND INSTANTIATE PROJECTION ENTRIES ------------------------------------\n\n self._parse_and_instantiate_projection_entries(pathway=pathway, context=context)\n\n self.pathway = pathway\n\n self._instantiate__deferred_inits(context=context)\n\n if self.learning:\n self._check_for_target_mechanism()\n if self.targetMechanism:\n self._instantiate_target_input()\n self._learning_enabled = True\n else:\n self._learning_enabled = False\n\n self._allMechanisms = MechanismList(self, self._mech_tuples)\n self.monitoringMechanisms = MechanismList(self, self._monitoring_mech_tuples)\n self.targetMechanisms = MechanismList(self, self._target_mech_tuples)",
"def __init__(self):\n self._list: List[Edge] = list()"
] | [
"0.61768115",
"0.5960434",
"0.59442604",
"0.59428525",
"0.57855904",
"0.57328755",
"0.57328755",
"0.57147753",
"0.56981033",
"0.56979334",
"0.5594869",
"0.5559879",
"0.5541759",
"0.5513735",
"0.55024344",
"0.5501922",
"0.55005276",
"0.5494484",
"0.546999",
"0.5431615",
"0.5425233",
"0.54040676",
"0.53953743",
"0.5353378",
"0.53521013",
"0.53450155",
"0.5295759",
"0.52889955",
"0.5283841",
"0.5283767"
] | 0.6392167 | 1 |
This function returns a list of Trips, which is derived from the list of waypoints, passed to the instance on initialization. | def get_trips(self) -> Tuple[Trip]:
def update_current_trip(trips, finish):
current_trip = trips.pop()
d = self.calc_distance(current_trip.start, finish)
t = Trip(d, current_trip.start, finish)
trips.append(t)
if len(self._waypoints) < 2:
return tuple()
trips = []
trip_started = False
for x in range(1, len(self._waypoints)):
first_point = self._waypoints[x-1]
second_point = self._waypoints[x]
if self.is_driving(first_point, second_point):
if not trip_started:
trip_started = True
dist = self.calc_distance(first_point, second_point)
trip = Trip(dist, first_point, second_point)
trips.append(trip)
elif trip_started and x == len(self._waypoints)-1:
update_current_trip(trips, second_point)
else:
if trip_started:
update_current_trip(trips, first_point)
trip_started = False
trips = [t for t in trips if t.distance > MIN_TRIP_DISTANCE_METERS]
return tuple(trips) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def trips(self) -> List[TripResponseTrips]:\n return self._trips",
"def get_trips(self) -> Tuple[Trip]:\n ...",
"def waypoints(self):\n\t\treturn [Star(star_id, galaxy=self.galaxy) for delay, star_id, order, num_ships in self.data.o]",
"def buildWayPointList(dom):\n waypoints = []\n for wpt in dom.getElementsByTagName(\"wpt\"):\n (lat,lon, t, ele, name, description) = parsePoint(wpt)\n waypoints.append(myPyGPX.WayPoint(lat, lon, ele, name, description))\n return waypoints",
"def triangulate(polyline):\n\timport p2t\n\tfrom ynlib.beziers import Point\n\n\t# Convert into p2t Points\n\tfor p in polyline:\n\t\tp = p2t.Point(p.x, p.y)\n\t\n\tcdt = p2t.CDT(polyline)\n\tp2ttriangles = cdt.triangulate()\n\t\n\ttriangles = []\n\tfor t in p2ttriangles:\n\t\ttriangles.append( (Point(t.a.x, t.a.y), Point(t.b.x, t.b.y), Point(t.c.x, t.c.y)) )\n\n\treturn triangles",
"def read_waypoints():\n\tfilename = \"waypoints.txt\"\n\tfile = open(filename, \"r\")\n\twp_list = []\n\n\tfor line in file:\n\t\t# Get the individual elements, splitting by whitespace\n\t\tdata_list = line.split()\n\t\tcoordinate = {'x': data_list[0], 'y': data_list[1], 'z': data_list[2]}\n\t\twaypoint = {'radius': data_list[3], 'point': coordinate}\n\n\t\twp_list.append (waypoint)\n\n\treturn wp_list",
"def _initCandidateTriads(self):\n codes = list(self.chordProfile.getScores())\n candidateTriads = [Chord(code) for code in codes]\n return candidateTriads",
"def get_triples(self):\n return [\n triple\n for uid, cuds_object in self._registry.items()\n for triple in cuds_object.get_triples()\n ]",
"def get_directions():\n return [(1, 0), (0, 1), (-1, 0), (0, -1)]",
"def directions(self):\n return []",
"def trios(self):\n return self._trios",
"def get_triples(self):\n cursor = self.db.cursor()\n cursor.execute(\"SELECT page_url, link_type, link_url FROM triples ORDER BY page_url, link_type\")\n return cursor.fetchall()",
"def getWaypoints(self):\n return self.listener.waypoints",
"def create_triangles(list_of_points):\n # create the first two triangle using the create_two_init_triangles with\n # the first 4 points in the given list\n tri_list = create_two_init_triangles(list_of_points[0:FIRST_FOUR_POINTS])\n # run over the point list from the 5th point and on\n for i in range(FIRST_FOUR_POINTS, len(list_of_points)):\n # run on the existing triangles\n for j in range(0, len(tri_list)):\n # check if the point is inside the current triangle\n if is_point_inside_triangle(list_of_points[i], tri_list[j][0],\n tri_list[j][1], tri_list[j][2])[0]:\n # if the point is inside the current triangle, create 3 new\n # triangles using the old triangle vertexes and the new point\n # adding them to the triangle list instead of the triangle the\n # point was in\n tri_list[j:j+1] = create_inner_tri(list_of_points[i],\n tri_list[j][0],\n tri_list[j][1],\n tri_list[j][2])\n break\n return tri_list",
"def _travel_times(self, trip_list, index=0):\n\n def distance_in_travel_time(dep_secs, arr_secs):\n t_dist = arr_secs - dep_secs\n if t_dist < 0:\n t_dist = self._DUMMY_SEPARATOR # min separation\n return t_dist\n\n if not trip_list:\n return []\n\n if 0 < index < len(trip_list):\n trip = trip_list[index]\n else:\n trip = trip_list[0]\n\n t_dists2 = [distance_in_travel_time(stop[3], tail[2]) for (stop, tail)\n in zip(trip.get_time_stops(), trip.get_time_stops()[1:])]\n return t_dists2",
"def getInterWaypointTravelTimes(self, waypoints):\n num_waypoints = len(waypoints)\n travel_times = np.zeros(num_waypoints + 1)\n for waypoint_id in range(1, num_waypoints+1):\n # Current intra waypoint travel time\n id_tm1 = (waypoint_id - 1) % num_waypoints\n id_t = waypoint_id % num_waypoints\n cur_travel_time = self.getWaypointTravelTime(\n waypoints, id_tm1, id_t)\n travel_times[waypoint_id] = cur_travel_time\n\n return travel_times",
"def triples():",
"def _retrieve_options(list_waypoints, current_waypoint):\n options = []\n for next_waypoint in list_waypoints:\n # this is needed because something we are linking to\n # the beggining of an intersection, therefore the\n # variation in angle is small\n next_next_waypoint = next_waypoint.next(3.0)[0]\n link = _compute_connection(current_waypoint, next_next_waypoint)\n options.append(link)\n\n return options",
"def __init__(self, waypoints: Tuple[Waypoint]):\n self._waypoints = waypoints",
"def __init__(self, waypoints: Tuple[Waypoint]):\n self._waypoints = waypoints",
"def waypoints(t):\n global x\n xx = x + ((2 * PI)/t)\n yy = 2*(math.sin(xx))*(math.sin(xx/2))\n return [xx, yy]",
"def get_triangles(self):\n location = TopLoc_Location()\n bt = BRep_Tool()\n facing = bt.Triangulation(self.topods_shape(), location)\n if facing == None:\n return [], []\n\n tab = facing.Nodes()\n tri = facing.Triangles()\n verts = []\n for i in range(1, facing.NbNodes() + 1):\n p = tab.Value(i).Transformed(location.Transformation())\n verts.append(np.array(list(p.Coord())))\n\n tris = []\n reversed = self.reversed()\n for i in range(1, facing.NbTriangles() + 1):\n # OCC triangle normals point in the surface normal\n # direction\n if reversed:\n index1, index3, index2 = tri.Value(i).Get()\n else:\n index1, index2, index3 = tri.Value(i).Get()\n\n tris.append([index1 - 1, index2 - 1, index3 - 1])\n\n return np.asarray(verts, dtype=np.float32), np.asarray(tris, dtype=np.int32)",
"def mpl_triangulation(self):\n tris=[] # [ (n1,n2,n3), ...]\n\n for c in self.valid_cell_iter():\n nodes=np.array(self.cell_to_nodes(c))\n\n # this only works for convex cells\n for i in range(1,len(nodes)-1):\n tris.append( nodes[ [0,i,i+1] ] )\n\n tris=np.array(tris)\n tri=Triangulation(self.nodes['x'][:,0],self.nodes['x'][:,1],\n triangles=tris )\n return tri",
"def triangulate(points):\n # Remove duplicate xy points bc that would make delauney fail, and must remember z (if any) for retrieving originals from index results\n seen = set() \n uniqpoints = [ p for p in points if str( p[:2] ) not in seen and not seen.add( str( p[:2] ) )]\n classpoints = [_Point(*point[:2]) for point in uniqpoints]\n\n # Compute Delauney\n triangle_ids = tesselator.computeDelaunayTriangulation(classpoints)\n\n # Get vertices from result indexes\n triangles = [[uniqpoints[i] for i in triangle] for triangle in triangle_ids]\n \n return triangles",
"def triplets(p):\n return [p[i:i+3] for i in range(3)]",
"def get_neighbors(self) -> List['games.saloon.tile.Tile']:\n neighbors = []\n\n for direction in Tile.directions:\n neighbor = getattr(self, \"tile_\" + direction.lower())\n if neighbor:\n neighbors.append(neighbor)\n\n return neighbors",
"def create_two_init_triangles(points):\n return [(points[0], points[1], points[2]),\n (points[0], points[2], points[3])]",
"def triples(self):\n\n if len(self.words) < 3:\n return\n\n for i in range(len(self.words) - 2):\n yield (self.words[i], self.words[i+1], self.words[i+2])",
"def trajectory(self):\n return Trajectory.createFromTuples(self.points)",
"def tripler(self, number=3):\n i = 0\n trip_list = []\n flist = list(self.files())\n while i <= len(flist) - number:\n triples = tuple(flist[i+ count] for count in range(number) )\n trip_list.append(triples)\n i += number\n return trip_list"
] | [
"0.7108095",
"0.6903571",
"0.6489643",
"0.6418137",
"0.63206124",
"0.6307456",
"0.6265638",
"0.6173024",
"0.6030936",
"0.6028203",
"0.5986084",
"0.5923996",
"0.59033453",
"0.58866876",
"0.5842499",
"0.5829152",
"0.5805312",
"0.5799013",
"0.5734565",
"0.5734565",
"0.57277346",
"0.5711463",
"0.5710232",
"0.569024",
"0.5649909",
"0.5643219",
"0.56081796",
"0.55641663",
"0.5548573",
"0.5511128"
] | 0.6947403 | 1 |
Instead of a list of Waypoints, the StreamProcessor only receives one Waypoint at a time. The processor does not have access to the full list of waypoints. If the stream processor recognizes a complete trip, the processor returns a Trip object, otherwise it returns None. | def process_waypoint(self, waypoint: Waypoint) -> Union[Trip, None]:
# ignore the first entry, just remember it for further compares
if not self.prev_point:
self.prev_point = waypoint
return None
if self.is_driving(self.prev_point, waypoint):
if not self.start_point:
# indicates trip start
self.start_point = self.prev_point
else:
# indicates trip finish
if self.start_point:
d = self.calc_distance(self.start_point, self.prev_point)
trip = Trip(d, self.start_point, self.prev_point)
self.start_point = None
return trip
self.prev_point = waypoint
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def process_waypoint(self, waypoint: Waypoint) -> Union[Trip, None]:\n ...",
"def get_trips(self) -> Tuple[Trip]:\n def update_current_trip(trips, finish):\n current_trip = trips.pop()\n d = self.calc_distance(current_trip.start, finish)\n t = Trip(d, current_trip.start, finish)\n trips.append(t)\n\n if len(self._waypoints) < 2:\n return tuple()\n trips = []\n trip_started = False\n for x in range(1, len(self._waypoints)):\n first_point = self._waypoints[x-1]\n second_point = self._waypoints[x]\n if self.is_driving(first_point, second_point):\n if not trip_started:\n trip_started = True\n dist = self.calc_distance(first_point, second_point)\n trip = Trip(dist, first_point, second_point)\n trips.append(trip)\n elif trip_started and x == len(self._waypoints)-1:\n update_current_trip(trips, second_point)\n else:\n if trip_started:\n update_current_trip(trips, first_point)\n trip_started = False\n trips = [t for t in trips if t.distance > MIN_TRIP_DISTANCE_METERS]\n return tuple(trips)",
"def get_trips(self) -> Tuple[Trip]:\n ...",
"def execute_waypoint_sequence(detail_of_trip):\n\n # rets (route_line, line_points)\n sliced_route_and_line_points = chunk_user_route(detail_of_trip)\n\n sliced_route = sliced_route_and_line_points[0]\n line_points = sliced_route_and_line_points[1]\n\n # Interpolate/Break into 1/10 segments\n segmented_points = interpolate_points(sliced_route, line_points)\n waypoints = find_crime_areas(segmented_points)\n\n # print \"segmented_points\", json.dumps(segmented_points, indent=2)\n print \"\\n\\n\\n\\n\" # compensating for the giant GET request\n return waypoints",
"def _parse_routepart(self, data):\n points = [self._parse_trip_point(point) for point in data.findall('./itdPoint')]\n\n path = []\n for coords in data.findall('./itdPathCoordinates/itdCoordinateBaseElemList/itdCoordinateBaseElem'):\n path.append(Coordinates(int(coords.find('y').text) / 1000000, int(coords.find('x').text) / 1000000))\n\n motdata = self._parse_mot(data.find('./itdMeansOfTransport'))\n\n if motdata is None or data.attrib['type'] == 'IT':\n waytype = {\n '98': 'walk',\n '99': 'walk',\n '100': 'walk',\n '101': 'bike',\n '104': 'car',\n '105': 'taxi'\n }[data.find('./itdMeansOfTransport').attrib['type']]\n # 98 = gesichter anschluss\n\n way = Way(WayType(waytype), points[0].stop, points[1].stop)\n way.distance = data.attrib.get('distance')\n if way.distance is not None:\n way.distance = float(way.distance)\n duration = data.attrib.get('timeMinute', None)\n if duration is not None:\n way.duration = timedelta(minutes=int(duration))\n if path:\n way.path = path\n return way\n\n else:\n origin, destination, line, ridenum, ridedir, canceled = motdata\n\n if data.find('./genAttrList/genAttrElem[value=\"HIGHSPEEDTRAIN\"]') is not None:\n line.linetype = LineType('train.longdistance.highspeed')\n elif data.find('./genAttrList/genAttrElem[value=\"LONG_DISTANCE_TRAIN\"]') is not None:\n line.linetype = LineType('train.longdistance')\n\n train_line = line.linetype in self.train_station_lines\n\n # Build Ride Objekt with known stops\n ride = Ride(line, ridenum)\n ride.canceled = canceled\n ride.direction = ridedir\n for infotext in data.findall('./infoTextList/infoTextListElem'):\n ride.infotexts.append(infotext)\n\n first = None\n last = None\n waypoints = False\n if data.find('./itdStopSeq'):\n new_points = [self._parse_trip_point(point, train_line=train_line) for point in data.findall('./itdStopSeq/itdPoint')]\n if not new_points or new_points[0].stop != new_points[0].stop:\n new_points.insert(0, points[0])\n if new_points[-1].stop != points[1].stop:\n new_points.append(points[1])\n points = new_points\n waypoints = True\n\n for p in points:\n if not waypoints and first is None:\n ride.append(None)\n pointer = ride.append(p)\n if first is None:\n first = pointer\n last = pointer\n\n if origin is not None:\n if origin != ride[0].stop:\n ride.prepend(None)\n ride.prepend(TimeAndPlace(Platform(origin)))\n else:\n ride.prepend(None)\n\n if destination is not None:\n if destination != ride[-1].stop:\n ride.append(None)\n ride.append(TimeAndPlace(Platform(destination)))\n else:\n ride.append(None)\n\n segment = ride[first:last]\n paths = self._split_path(path, [p.platform.coords for p in segment])[:-1]\n for i, point in segment.items():\n if not paths:\n break\n segment.ride._paths[i] = paths.pop(0)\n return segment",
"def passenger_trip(self, passenger):\n\n closer_start = self._find_closest_stop(passenger.start)\n closer_end = self._find_closest_stop(passenger.end)\n\n if isinstance(closer_start, list):\n closer_start = self._pick_stop(closer_start, \"start\")\n if isinstance(closer_end, list):\n closer_end = self._pick_stop(closer_end, \"end\")\n\n bus_times = self.route.timetable()\n bus_travel = bus_times[closer_end[1]] - bus_times[closer_start[1]]\n if bus_travel <= 0:\n closer_end = closer_start\n\n return (closer_start, closer_end)",
"def next_gps(self):\n \n return Waypoint(0.0, 0.0)",
"def _get_next_cleaned_trip(ts, trip_obj):\n next_place = ts.get_entry_from_id(esda.CLEANED_PLACE_KEY, trip_obj.data.end_place)\n if next_place is None:\n return None\n else:\n next_trip = ts.get_entry_from_id(esda.CLEANED_TRIP_KEY, next_place.data.starting_trip)\n return next_trip",
"def trips(self) -> List[TripResponseTrips]:\n return self._trips",
"def get_best_roundtrip(self):\n out = min(self.outgoing_flights, key=lambda f: f.price)\n ret = min(self.return_flights, key=lambda f: f.price)\n\n return RoundTrip(out, ret)",
"def trips_route(bus_route):\n route_trips = set(\n trips[trips[\"route_id\"].isin(list(routes[routes[\"route_short_name\"] == bus_route][\"route_id\"].values))][\n \"trip_id\"].values)\n return route_trips",
"def get_trip(trip_id: str) -> Trip:\n trip = Trip.objects.get(id=trip_id)\n\n if not trip.prepared:\n _generate_trip(trip)\n\n return trip",
"def _parse_trip_point(self, data, walk=False, train_line=False):\n city, name = self._parse_trip_point_name(data)\n\n if data.attrib['area'] == '' and data.attrib['stopID'] == '0':\n return None\n\n # todo – what kind of location is this?\n if walk and data.attrib['area'] == '0':\n location = Address(None, city, name)\n else:\n location = Stop(self._get_country(data.attrib['stopID']), city, name)\n location._ids[self.name] = int(data.attrib['stopID'])\n self._make_train_station(location, train_line)\n\n cityid = data.attrib.get('placeID')\n if cityid:\n self._process_stop_city(location, int(cityid))\n\n # get and clean the platform\n platform = data.attrib['platform']\n if not platform.strip():\n platform = data.attrib['platformName']\n match = re.search(r'[0-9].*$', data.attrib['platformName'])\n platform = match.group(0) if match is not None else platform\n if not platform:\n platform = None\n\n full_platform = data.attrib['platformName']\n if not full_platform:\n full_platform = platform\n if platform == full_platform and 'pointType' in data.attrib:\n full_platform = '%s %s' % (data.attrib['pointType'], platform)\n if not full_platform:\n full_platform = None\n\n platform = Platform(location, platform, full_platform)\n if full_platform is not None:\n platform._ids[self.name] = (location._ids[self.name], data.attrib['area'], data.attrib['platform'])\n\n ifopt = data.attrib.get('gid', '').split(':')\n if len(ifopt) == 5 and str(location._ids[self.name]).endswith(ifopt[2]):\n location.country = ifopt[0]\n location._ids['ifopt'] = (ifopt[1], ifopt[2])\n\n if full_platform is not None:\n platform._ids['ifopt'] = (ifopt[1], ifopt[2], ifopt[3], ifopt[4])\n\n result = TimeAndPlace(platform)\n\n if data.attrib.get('x'):\n platform.coords = Coordinates(float(data.attrib['y']) / 1000000, float(data.attrib['x']) / 1000000)\n\n # for genattr in data.findall('./genAttrList/genAttrElem'):\n # \tname = genattr.find('name').text\n # \tvalue = genattr.find('value').text\n # \tif name == 'platformChange' and value == 'changed':\n # \t\tresult.changed_platform = True\n\n self._parse_trip_point_time(data, result, walk)\n return result",
"def _trip_request(self, triprequest: Trip.Request):\n now = datetime.now()\n\n assert triprequest.walk_speed in ('slow', 'normal', 'fast')\n\n linetypes = triprequest.linetypes\n if linetypes is None:\n linetypes = LineTypes()\n\n departure = triprequest.departure\n arrival = triprequest.arrival\n\n if isinstance(departure, datetime):\n departure = RealtimeTime(departure)\n if isinstance(arrival, datetime):\n arrival = RealtimeTime(arrival)\n\n if departure is not None:\n deparr = 'dep'\n time_ = departure.livetime\n elif arrival is not None:\n deparr = 'arr'\n time_ = arrival.livetime\n else:\n deparr = 'dep'\n time_ = now\n\n max_changes = triprequest.max_changes\n if max_changes is None:\n max_changes = 9\n\n post = {\n 'changeSpeed': triprequest.walk_speed,\n 'command': '',\n 'coordOutputFormat': 'WGS84',\n 'imparedOptionsActive': 1,\n 'includedMeans': 'checkbox',\n 'itOptionsActive': 1,\n 'itdDateDay': time_.day,\n 'itdDateMonth': time_.month,\n 'itdDateYear': time_.year,\n 'itdTimeHour': time_.hour,\n 'itdTimeMinute': time_.minute,\n 'itdTripDateTimeDepArr': deparr,\n 'language': 'de',\n 'locationServerActive': 1,\n 'maxChanges': max_changes,\n 'name_via': '', # .decode('utf-8').encode('iso-8859-1'),\n 'nextDepsPerLeg': 1,\n 'place_via': '', # decode('utf-8').encode('iso-8859-1'),\n 'ptOptionsActive': 1,\n 'requestID': 0,\n 'routeType': 'LEASTTIME', # {'speed':'LEASTTIME', 'waittime':'LEASTINTERCHANGE', 'distance':'LEASTWALKING'}[select_interchange_by],\n 'sessionID': 0,\n 'type_via': 'stop',\n 'useRealtime': 1,\n 'outputFormat': 'XML'\n }\n\n # if use_realtime: post['useRealtime'] = 1\n\n if 'train' in linetypes:\n post['inclMOT_0'] = 'on'\n\n if 'train.longdistance.highspeed' in linetypes:\n post['lineRestriction'] = 400\n elif 'train.longdistance' in linetypes:\n post['lineRestriction'] = 401\n else:\n post['lineRestriction'] = 403\n\n for linetype, number in (('urban', '1'), ('metro', '2'), ('metro', '3'),\n ('tram', '4'), ('bus.city', '5'), ('bus.regional', '6'),\n ('bus.express', '7'), ('suspended', '8'), ('ship', '9'),\n ('dialable', '10'), ('other', '11')):\n if linetype in linetypes:\n post['inclMOT_' + number] = 'on'\n\n if triprequest.wayduration_origin or triprequest.wayduration_destination:\n post['useProxFootSearch'] = 1\n\n waytypes = {'walk': 100, 'bike': 101, 'car': 104, 'taxi': 105}\n post['trITDepMOT'] = waytypes[str(triprequest.waytype_origin)]\n post['trITArrMOT'] = waytypes[str(triprequest.waytype_destination)]\n\n post['trITDepMOTvalue%d' % post['trITDepMOT']] = triprequest.wayduration_origin.total_seconds() // 60\n post['trITArrMOTvalue%d' % post['trITArrMOT']] = triprequest.wayduration_destination.total_seconds() // 60\n\n if triprequest.with_bike:\n post['bikeTakeAlong'] = 1\n\n if triprequest.wheelchair:\n post['wheelchair'] = 1\n\n if triprequest.low_floor_only:\n post['lowPlatformVhcl'] = 1\n\n if not triprequest.allow_solid_stairs:\n post['noSolidStairs'] = 1\n\n if not triprequest.allow_escalators:\n post['noEscalators'] = 1\n\n if not triprequest.allow_elevators:\n post['noElevators'] = 1\n\n post.update(self._convert_location(triprequest.origin, '%s_origin'))\n post.update(self._convert_location(triprequest.destination, '%s_destination'))\n\n xml = self._post('XSLT_TRIP_REQUEST2', post)\n servernow = datetime.strptime(xml.attrib['now'], '%Y-%m-%dT%H:%M:%S')\n\n data = xml.find('./itdTripRequest')\n\n results = Trip.Results(self._parse_routes(data.find('./itdItinerary/itdRouteList')))\n results.origin = self._parse_odv(data.find('./itdOdv[@usage=\"origin\"]'))\n results.destination = self._parse_odv(data.find('./itdOdv[@usage=\"destination\"]'))\n\n return results, servernow",
"def chunk_user_route(detail_of_trip):\n\n # $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$\n # since I can't get javascript to load, here's a hacky way of loading json\n # that details the route based on the user's point A and point B\n # detail_of_trip = api.directions(\n # (40.760350, -73.976209),\n # (40.754009, -73.981097),\n # mode=\"walking\"\n # )[0]\n # $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$\n\n # now that I have javascript sending over the json, load json that details\n # the route based on the user's point A and point B\n\n # -------------- This section is for interpolation/splitting using shapely\n first = True # to see if this is the start position for the entire route\n line_points = [] # stores all the points to the route based on dict passed\n\n for leg in detail_of_trip['legs']:\n for step in leg['steps']:\n # Create a list of two element lists that represent points along the\n # route. via google. line_points = [ [lat1, lng1], [lat2, lng2],...]\n # Only add the starting point the first time. Every other iteration\n # we will just tack on the end points to our line.\n if first:\n line_points.append([step['start_location']['lat'], step['start_location']['lng']])\n first = False\n line_points.append([step['end_location']['lat'], step['end_location']['lng']])\n\n # Now load those points into a geometry, here shapely's LineString type.\n route_line = LineString(line_points)\n return (route_line, line_points)",
"def get_pump_tripped(self):\n return self.__pump_trip",
"def _parse_routes(self, data):\n trips = []\n routes = data.findall('./itdRoute')\n for route in routes:\n trip = Trip()\n interchange = None\n for routepart in route.findall('./itdPartialRouteList/itdPartialRoute'):\n part = self._parse_routepart(routepart)\n if interchange is not None:\n if isinstance(part, RideSegment):\n interchange.destination = part[0].platform\n else:\n interchange.destination = part[0].origin\n trip._parts.append(part)\n\n interchange = self._parse_interchange(routepart)\n if isinstance(part, RideSegment):\n if interchange is not None:\n interchange.origin = part[-1].platform\n trip._parts.append(interchange)\n else:\n if interchange is not None:\n part.events = interchange.events\n interchange = None\n\n ticketlist = TicketList()\n tickets = route.find('./itdFare/itdSingleTicket')\n if tickets:\n authority = tickets.attrib['net']\n ticketlist.single = TicketData(authority, tickets.attrib['unitsAdult'], float(tickets.attrib['fareAdult']), float(tickets.attrib['fareChild']))\n ticketlist.bike = TicketData(authority, tickets.attrib['unitsBikeAdult'], float(tickets.attrib['fareBikeAdult']), float(tickets.attrib['fareBikeChild']))\n ticketlist.currency = tickets.attrib['currency']\n ticketlist.level_name = tickets.attrib['unitName']\n for ticket in tickets.findall('./itdGenericTicketList/itdGenericTicketGroup'):\n t = TicketData()\n name = ticket.find('./itdGenericTicket[ticket=\"TICKETTYPE\"]/value')\n if name is None or not name.text:\n continue\n\n authority = ticket.find('./itdGenericTicket[ticket=\"TARIFF_AUTHORITY\"]/value')\n if authority is not None and authority.text:\n t.authority = authority.text\n\n level = ticket.find('./itdGenericTicket[ticket=\"FARE_CATEGORY\"]/value')\n if level is not None and level.text:\n t.level = level.text\n\n prices = []\n adult = ticket.find('./itdGenericTicket[ticket=\"TICKET_ID_ADULT\"]/value')\n if adult is not None and adult.text:\n price = ticket.find('./itdGenericTicket[ticket=\"FARE_ADULT\"]/value')\n if price is not None and price.text:\n prices.append(float(price.text))\n\n child = ticket.find('./itdGenericTicket[ticket=\"TICKET_ID_CHILD\"]/value')\n if child is not None and child.text:\n price = ticket.find('./itdGenericTicket[ticket=\"FARE_CHILD\"]/value')\n if price is not None and price.text:\n prices.append(float(price.text))\n\n if not prices:\n continue\n\n t.price = prices[0]\n if len(prices) == 2:\n t.price_child = prices[1]\n ticketlist.other[name.text] = t\n trip.tickets = ticketlist\n\n trips.append(trip)\n\n return trips",
"def get_trip(request_trip_id):\n\n # gets when the bus is expected to come and to what stop\n live_stop_url = \"http://gtfs.edmonton.ca/TMGTFSRealTimeWebService/TripUpdate/TripUpdates.pb\"\n live_stop_feed = gtfs_realtime_pb2.FeedMessage()\n\n live_stop_response = cache.get(\"live_stop_response\")\n if not live_stop_response:\n live_stop_response = requests.get(live_stop_url)\n cache.set(\"live_stop_response\", live_stop_response, timeout=60)\n\n if live_stop_response.status_code != 200:\n return False\n live_stop_feed.ParseFromString(live_stop_response.content)\n\n # gets the static location of each bus stop\n static_stop_url = \"https://data.edmonton.ca/resource/kgzg-mxv6.json?$limit=10000\"\n\n static_stop_response = cache.get(\"static_stop_response\")\n if not static_stop_response:\n static_stop_response = requests.get(static_stop_url)\n cache.set(\"static_stop_response\", static_stop_response, timeout=10*60)\n\n if static_stop_response.status_code != 200:\n return False\n static_stop_data = static_stop_response.json()\n\n trip = {}\n\n for entity in live_stop_feed.entity:\n trip_id = entity.trip_update.trip.trip_id\n if trip_id == request_trip_id:\n # iterate through all the bus stops\n for item in entity.trip_update.stop_time_update:\n # get the time when the bus either will arrive or arrived\n stop_sequence = item.stop_sequence\n time = item.departure.time\n if not time:\n time = item.arrival.time\n\n # format the time - need to take into account the timezone because\n # heroku runs from a different location so the times get messed up\n timezone = pytz.timezone(\"Canada/Mountain\")\n time = datetime.datetime.fromtimestamp(int(time), timezone).strftime(\"%H:%M\")\n\n stop_id = item.stop_id\n\n if stop_id and int(stop_id) < 0:\n continue\n\n # get the street address of the bus stop\n address = \"N/A\"\n for stop in static_stop_data:\n if stop[\"stop_id\"] == stop_id:\n address = stop[\"stop_name\"]\n break\n\n trip_item = {\"stop\": stop_id, \"address\": address, \"time\": time}\n trip[stop_sequence] = trip_item\n\n return trip",
"def plan_trip():\n origins = []\n destinations = []\n\n origin_stop = request.args.get('origin', False)\n destination_stop = request.args.get('destination', False)\n origin_is_suburb = request.args.get('origin_suburb', False)\n dest_is_suburb = request.args.get('dest_suburb', False)\n origin_is_suburb = bool(origin_is_suburb)\n dest_is_suburb = bool(dest_is_suburb)\n if origin_stop and destination_stop:\n client = api.connection()\n origins = client.find_stops_by_name('any', origin_stop, True)\n\n if client.error == 404:\n render_template(\n \"trip-planner.jinja2\", origins=[], destinations=[], err=404\n )\n\n destinations = client.find_stops_by_name('any', destination_stop, True)\n if client.error == 404:\n render_template(\n \"trip-planner.jinja2\", origins=[], destinations=[], err=404\n )\n\n origins = stop_information_generator(\n origins.locations, [], origin_stop, origin_is_suburb\n )\n destinations = stop_information_generator(\n destinations.locations, [], destination_stop, dest_is_suburb\n )\n\n return render_template(\n \"trip-planner.jinja2\", origins=origins, destinations=destinations, err=200\n )",
"def get_fan_tripped(self):\n return self.__fan_trip",
"def aligned_on_runway(\n self, airport: Union[str, \"Airport\"]\n ) -> Iterator[\"Flight\"]:\n\n from ..data import airports\n\n # The following cast secures the typing\n self = cast(\"Flight\", self)\n\n _airport = airports[airport] if isinstance(airport, str) else airport\n if (\n _airport is None\n or _airport.runways is None\n or _airport.runways.shape.is_empty\n ):\n return None\n\n if isinstance(_airport.runways.shape, LineString):\n candidate_shapes = [\n LineString(list(self.xy_time)).intersection(\n _airport.runways.shape.buffer(5e-4)\n )\n ]\n else:\n candidate_shapes = [\n LineString(list(self.xy_time)).intersection(\n on_runway.buffer(5e-4)\n )\n for on_runway in _airport.runways.shape.geoms\n ]\n\n for intersection in candidate_shapes:\n if intersection.is_empty:\n continue\n if isinstance(intersection, LineString):\n (*_, start), *_, (*_, stop) = intersection.coords\n segment = self.between(start, stop, strict=False)\n if segment is not None:\n yield segment\n if isinstance(intersection, MultiLineString):\n (*_, start), *_, (*_, stop) = intersection.geoms[0].coords\n for chunk in intersection.geoms:\n (*_, start_bak), *_, (*_, stop) = chunk.coords\n if stop - start > 40: # crossing runways and back\n start = start_bak\n segment = self.between(start, stop, strict=False)\n if segment is not None:\n yield segment",
"def waypoints(self):\n\t\treturn [Star(star_id, galaxy=self.galaxy) for delay, star_id, order, num_ships in self.data.o]",
"def _insert_stops_one_to_one(self): # pylint: disable=too-many-locals\r\n # Use an insertCursor to insert Stops into the Route analysis\r\n destinations = {}\r\n destination_rows = []\r\n with self.rt_solver.insertCursor(\r\n arcpy.nax.RouteInputDataType.Stops,\r\n [\"RouteName\", \"Sequence\", self.origin_unique_id_field_name, \"SHAPE@\", self.dest_unique_id_field_name] +\r\n self.origin_transfer_fields\r\n ) as icur:\r\n # Loop through origins and insert them into Stops along with their assigned destinations\r\n for origin in arcpy.da.SearchCursor( # pylint: disable=no-member\r\n self.input_origins_layer,\r\n [\"SHAPE@\", self.origin_id_field, self.assigned_dest_field] + self.origin_transfer_fields\r\n ):\r\n dest_id = origin[2]\r\n if dest_id is None:\r\n continue\r\n if dest_id not in destinations:\r\n dest_val = f\"'{dest_id}'\" if isinstance(dest_id, str) else dest_id\r\n with arcpy.da.SearchCursor( # pylint: disable=no-member\r\n self.input_destinations_layer,\r\n [\"SHAPE@\", self.dest_id_field] + self.destination_transfer_fields,\r\n where_clause=f\"{self.dest_id_field} = {dest_val}\"\r\n ) as cur:\r\n try:\r\n destinations[dest_id] = next(cur)\r\n except StopIteration:\r\n # The origin's destination is not present in the destinations table. Just skip the origin.\r\n continue\r\n # Insert origin and destination\r\n destination = destinations[dest_id]\r\n if self.reverse_direction:\r\n route_name = f\"{dest_id} - {origin[1]}\"\r\n origin_sequence = 2\r\n destination_sequence = 1\r\n else:\r\n route_name = f\"{origin[1]} - {dest_id}\"\r\n origin_sequence = 1\r\n destination_sequence = 2\r\n # Define the final origin and destination rows for the input Stops\r\n origin_row = [route_name, origin_sequence, origin[1], origin[0], None] + list(origin)[3:]\r\n destination_row = [route_name, destination_sequence, None, destination[0], destination[1]] + \\\r\n list(destination)[2:]\r\n icur.insertRow(origin_row)\r\n destination_rows.append(destination_row)\r\n\r\n # Insert destinations\r\n with self.rt_solver.insertCursor(\r\n arcpy.nax.RouteInputDataType.Stops,\r\n [\"RouteName\", \"Sequence\", self.origin_unique_id_field_name, \"SHAPE@\", self.dest_unique_id_field_name] +\r\n self.destination_transfer_fields\r\n ) as dcur:\r\n for row in destination_rows:\r\n dcur.insertRow(row)",
"def getWaypoints(self):\n return self.listener.waypoints",
"def timepoint_finder(transitfeeds_url = 'http://transitfeeds.com/p/vta/45/20170929/download'):\n def gtfs_downloader(url):\n import urllib.request\n import zipfile\n file_name = 'gtfs.zip'\n urllib.request.urlretrieve(url, file_name)\n \n with zipfile.ZipFile(file_name,\"r\") as zip_ref:\n zip_ref.extractall(\"gtfs/\")\n\n gtfs_downloader(transitfeeds_url)\n\n routes = pd.read_csv('gtfs/routes.txt')\n trips = pd.read_csv('gtfs/trips.txt')\n st = pd.read_csv('gtfs/stop_times.txt')\n\n count_df = trips[trips.service_id=='Weekdays'].groupby(['route_id','direction_id','shape_id']).count().reset_index()\n top_shapes = count_df.sort_values('service_id',ascending=False).drop_duplicates(['route_id','direction_id']).sort_values(by=['route_id','direction_id'],ascending=True)\n\n trip_set = []\n for i,r in top_shapes.iterrows():\n shape_id = r['shape_id']\n trip_set.extend(trips.query(\"shape_id=='%s'\" %(shape_id))['trip_id'].head(1).values)\n\n trip_subset = trips.loc[trips['trip_id'].isin(trip_set)]\n timepoints = pd.merge(trip_subset,st.loc[st['arrival_time'].dropna(axis='index').index,], how='left')\n timepoints = timepoints[['route_id','direction_id','stop_id']]\n timepoints['timepoint'] = 1\n timepoints.rename(columns={'stop_id':'STOP_ID'},inplace=True)\n return timepoints",
"def get_path(self, starting_waypoint, destination_id):\n # Run dijkstra's algorithm\n dijkstra_output = dijkstra.DijkstraSPF(self.graph, starting_waypoint)\n \n # Get waypoint_id of destination\n cur = armaps.model.get_db()\n cur.execute(\n \"SELECT waypoint_id FROM waypoints WHERE destination_id = %s\", \n (destination_id,)\n )\n cur_output = cur.fetchone()\n dest_waypoint_id = int(cur_output[\"waypoint_id\"])\n\n # Get the path from starting waypoint to destination's waypoint\n path = dijkstra_output.get_path(dest_waypoint_id)\n\n # Excluding starting point in path, add coords of each waypoint to data\n data = []\n\n for i in range(1, len(path)):\n data.append(\n {\n \"lat\": self.waypoints[path[i]][\"lat\"],\n \"lon\": self.waypoints[path[i]][\"lon\"],\n \"id\": self.waypoints[path[i]][\"waypoint_id\"]\n }\n )\n \n return data",
"def start_trip(self, live):\n t = Trip(live)\n self.current_trip = t\n if live:\n # Send a request for a new trip to the remote server\n self.get_connection().start_trip()\n else:\n # Add a new trip to the local database\n self.get_database().start_trip(t)",
"def route(self) -> Optional[RoadMap.Route]:\n return self._route",
"def publish_waypoints(self):\n\n # Make a lane message\n lane = Lane()\n\n # Get closest waypoint index\n closest_idx = self.get_closest_waypoint_idx()\n\n # Get farthest waypoint index\n farthest_idx = closest_idx + LOOKAHEAD_WPS\n\n # Slice to get the upcoming waypoints\n upcoming_waypoints = self.waypoints.waypoints[closest_idx:farthest_idx]\n\n # If no stopline detected or stopline is beyond farthest index...\n if (self.stopline_waypoint_idx == -1) or (self.stopline_waypoint_idx >= farthest_idx):\n\n # Follow the upcoming waypoints\n lane.waypoints = upcoming_waypoints\n\n else:\n\n # Create a list to hold modified upcoming waypoints\n temp = []\n\n # Find the relative stopline index within the upcoming waypoints\n # Back off by two waypoints so that front of car stays behind\n # stopline.\n stop_idx = max(self.stopline_waypoint_idx-closest_idx-2, 0)\n\n # Get the deceleration velocities at each upcoming waypoint\n velocities = self.deceleration_velocities(upcoming_waypoints, stop_idx)\n\n # For each upcoming waypoint...\n for i, wp in enumerate(upcoming_waypoints[:-1]):\n\n # Create a new waypoint\n p = Waypoint()\n\n # Dupicate the pose of the existing waypoint\n p.pose = wp.pose\n\n # Limit current velocities to decelration velocities\n p.twist.twist.linear.x = min(velocities[i], p.twist.twist.linear.x)\n\n # Add the modified waypoint to the list\n temp.append(p)\n\n # Follow the modified upcoming waypoints\n lane.waypoints = temp\n\n # Publish the lane message\n self.final_waypoints_pub.publish(lane)",
"def get_junction_by_route(self, start_waypoint):\n # get start waypoint\n waypoint = start_waypoint\n\n reached_junction = False\n sampling_radius = 1.\n\n while not reached_junction:\n wp_choice = waypoint.next(sampling_radius)\n waypoint = wp_choice[0]\n\n if waypoint.is_junction:\n reached_junction = True\n\n # get junction by waypoint\n self.junction = waypoint.get_junction()\n\n return self.junction"
] | [
"0.71810347",
"0.662581",
"0.6578164",
"0.61865175",
"0.60447747",
"0.5670999",
"0.5567962",
"0.55303997",
"0.5513158",
"0.5484303",
"0.54744774",
"0.5460108",
"0.5456011",
"0.54341614",
"0.5397874",
"0.5393958",
"0.5386682",
"0.53331935",
"0.5322069",
"0.52804184",
"0.52273124",
"0.5175471",
"0.5172441",
"0.51359564",
"0.51195014",
"0.5115683",
"0.5114308",
"0.5112555",
"0.50950956",
"0.509243"
] | 0.69968843 | 1 |
Take any sequence of strings, str_seq, and return the count of element strings that start with the given prefix. >>> count_common_prefix(('ab', 'ac', 'ad'), 'a') 3 >>> count_common_prefix(['able', 'baker', 'adam', 'ability'], 'ab') 2 >>> count_common_prefix([], 'a') 0 >>> count_common_prefix(['a', 'a', 'ab'], 'a') 3 >>> count_common_prefix(['a', 'a', 'ab'], '') 3 | def count_common_prefix(str_seq, prefix):
count = 0
for element in str_seq:
if element.startswith(prefix):
count += 1
return count | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_common_prefix(list_of_strings):\n # https://stackoverflow.com/questions/6718196/determine-prefix-from-a-set-of-similar-strings\n def all_same(x):\n return all(x[0] == y for y in x)\n\n char_tuples = zip(*list_of_strings)\n prefix_tuples = itertools.takewhile(all_same, char_tuples)\n return \"\".join(x[0] for x in prefix_tuples).strip(\"-\")",
"def longest_common_prefix(strings: list):\n raise NotImplemented",
"def find_common_prefix(strs):\n\n common = []\n for cgroup in zip(*strs):\n if all(x == cgroup[0] for x in cgroup[1:]):\n common.append(cgroup[0])\n else:\n break\n return ''.join(common)",
"def _common_prefix(sequence1, sequence2):\n i = 0\n for elem1, elem2 in zip(sequence1, sequence2):\n if elem1 != elem2:\n return i\n i += 1\n\n # Return length of sequence if sequences are identical\n return min(len(sequence1), len(sequence2))",
"def common_prefix_length(s, u):\n length = 0\n for cs, cu in zip(s, u):\n if cs != cu:\n break\n length += 1\n return length",
"def find_longest_common_prefix(words:list):\n trie = Trie(words)\n\n head = trie.head\n\n prefix = []\n\n while len(head) == 1 and trie.eof not in head:\n key, value = head.popitem()\n prefix.append(key)\n head = value\n \n return \"\".join(prefix)",
"def find_longest_common_prefix_reduce(words:list):\n if not words:\n return ''\n \n def common_start(w1, w2):\n shorter = w1 if len(w1) < len(w2) else w2\n for i in range(0, len(shorter)):\n if w1[i] != w2[i]:\n return shorter[:i]\n return shorter\n \n return reduce(common_start, words)",
"def common_prefix(s1, s2):\n if not s1 or not s2:\n return None\n common = genericpath.commonprefix((s1, s2,))\n if common:\n return common\n else:\n return None",
"def longest_common_prefix(fst: str, snd: str) -> str:\n bound = 0\n for a, b in zip(fst, snd):\n if a != b:\n break\n bound += 1\n return fst[:bound]",
"def _prefix_fun(prefix_str: str) -> List[int]:\n pi = [0] * len(prefix_str)\n i, j = 1, 0\n while i < len(prefix_str):\n if prefix_str[i] == prefix_str[j]:\n pi[i] = j + 1\n i += 1\n j += 1\n\n else:\n if j == 0:\n i += 1\n else:\n j = pi[j-1]\n return pi",
"def kncount(self, string, prefixes=None): ###\n if prefixes == None:\n prefixes = list(self.dist(\"\").keys())\n return sum([self.count(p + string) >= 1 for p in prefixes])",
"def contains_prefix(root, input_prefix):\n\n output_list = []\n\n cNode = root\n\n for char in list(input_prefix):\n found_match = False\n for node in cNode.nodes:\n if node.char == char:\n found_match = True\n cNode = node\n break\n if not found_match:\n return False, 0\n\n return True, cNode.num_words_at_letter",
"def longestCommonPrefix(self, strs: List[str]) -> str:\r\n common = \"\"\r\n if not strs:\r\n return common\r\n shortest_str = min(strs, key=len)\r\n for i in range(len(shortest_str)):\r\n char = shortest_str[i]\r\n for item in strs:\r\n if item[i] != char:\r\n return common\r\n common += char\r\n return common",
"def get_common_prefixes(bucket, prefix):\n if not prefix.endswith('/'):\n prefix += \"/\"\n client = boto3.client('s3')\n paginator = client.get_paginator('list_objects')\n result = paginator.paginate(Bucket=bucket, Delimiter='/', Prefix=prefix)\n return [common_prefix['Prefix'].split('/')[-2]\n for common_prefix in result.search(\"CommonPrefixes\")\n if common_prefix]",
"def shared_prefix(args):\n i = 0\n while i < min(map(len, args)):\n if len(set(map(operator.itemgetter(i), args))) != 1:\n break\n i += 1\n return args[0][:i]",
"def _prefix_fun(prefix_str: str) -> List[int]:\n pi = [0] * len(prefix_str) # префикс таблица\n j = 0 # граница префикса\n i = 1 # граница сцффикса\n\n while i < len(prefix_str):\n if prefix_str[i] != prefix_str[j]:\n if j == 0:\n pi[i] = 0\n i += 1\n else:\n j = pi[j - 1]\n else:\n pi[i] = j + 1\n j += 1\n i += 1\n return pi",
"def is_prefix(uri1,uri2):\n if uri2[:len(uri1)]==uri1:\n return 1\n else:\n return None",
"def get_prefix(cls, string1, string2):\n prefix = \"\"\n\n for i in range(min(len(string1), len(string2))):\n if string1[i] == string2[i]:\n prefix += string1[i]\n else:\n return prefix\n\n return prefix",
"def prefix_fun(prefix_str: str) -> list:\n\n\n\t\tf = []\n\n\t\tfor i in range(len(prefix_str)):\n\t\t\tj = i\n\t\t\toffset = 0\n\t\t\twhile j > 0:\n\t\t\t\tj -= 1\n\t\t\t\t# print(i, j, offset, prefix_str[j], prefix_str[i-offset])\n\t\t\t\tif prefix_str[j] == prefix_str[i - offset]:\n\t\t\t\t\toffset += 1\n\t\t\t\telse:\n\t\t\t\t\tj += offset\n\t\t\t\t\toffset = 0\n\t\t\tf.append(offset)\n\t\t\t# print('append', offset)\n\n\t\treturn f",
"def has_prefix(cls, string1, string2):\n return len(cls.get_prefix(string1, string2)) > 0",
"def common_prefix_len(self, other: \"ProofPath\") -> int:\n if self.start() == other.start():\n return self.match_len(other, self.start())\n\n return 0",
"def commonprefix(m):\r\n if not m: return ''\r\n s1 = min(m)\r\n s2 = max(m)\r\n for i, c in enumerate(s1):\r\n if c != s2[i]:\r\n return s1[:i]\r\n return s1",
"def starts_with(s, prefix):\n if prefix == '':\n return True\n elif s[0] != prefix[0]:\n return False\n else: # s[0] == prefix[0]\n return starts_with(s[1:], prefix[1:])",
"def prefix_freq(self, query_str):\n # if query input is empty, return all strings\n if query_str == '':\n return len(self.text)\n found = self.search_prefix(query_str)\n # if query is found, go to that node\n if found:\n node = self.saved_node\n # extract relevant count that had been performed during insertion of words and traversal of nodes\n count = node.prefix_count\n else:\n return 0\n return count",
"def prefixSearch(self, prefix: str, _prec=\"\"):\n if prefix == \"\":\n # prefix exhasuted, match all\n yield from self.keys(_prec)\n else:\n try:\n # prefix not exhausted, traverse further\n chld = self.children[prefix[0]]\n yield from chld.prefixSearch(prefix[1:], _prec + self.ch)\n except IndexError:\n yield None\n except KeyError:\n yield None",
"def check_prefix(custom_str: str) -> bool:\r\n\r\n return len(custom_str) == 0",
"def count_containers(client, prefix=DOCK_CONTAINER_NAME_PREFIX):\n\n containers = get_containers_names(client)\n return sum([1 for c in containers if prefix in c])",
"def _build_prefix(self):\r\n pattern = self.string2\r\n m = len(pattern)\r\n p = [None]*m\r\n p[0] = 0\r\n k = 0\r\n for i in range(1,m):\r\n while k > 0 and pattern[i] != pattern[k]:\r\n k = p[k-1]\r\n if pattern[k] == pattern[i]:\r\n k = k+1\r\n p[i] = k\r\n self._prefix = p",
"def startswith(list, prefix):\n\n return list[:len(prefix)] == prefix",
"def find_prefix(self, prefix: str) -> Tuple[bool, int]:\n node = self\n # If the root node has no children, then return False.\n # Because it means we are trying to search in an empty trie\n if not self.children:\n return False, 0\n for char in prefix:\n char_not_found = True\n # Search through all the children of the present `node`\n for child in node.children:\n if child.char == char:\n # We found the char existing in the child.\n char_not_found = False\n # Assign node as the child containing the char and break\n node = child\n break\n # Return False anyway when we did not find a char.\n if char_not_found:\n return False, 0\n # Well, we are here means we have found the prefix. Return true to indicate that\n # And also the hit_counter of the last node. This indicates how many words have this\n # prefix\n return True, node.counter"
] | [
"0.71970826",
"0.6974177",
"0.6917197",
"0.68149304",
"0.65477276",
"0.6370917",
"0.63265073",
"0.6228929",
"0.6057594",
"0.6052519",
"0.6026114",
"0.6022409",
"0.59032744",
"0.57368153",
"0.5714387",
"0.56894344",
"0.5656042",
"0.563298",
"0.5631768",
"0.562366",
"0.5601588",
"0.55814755",
"0.55780923",
"0.55590904",
"0.5532969",
"0.5505193",
"0.5482965",
"0.5476823",
"0.5476256",
"0.5470906"
] | 0.9034111 | 0 |
Take any sequence and return a list of all elements that are numbers. >>> get_numbers(range(10)) [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] >>> get_numbers([1, 2.5, '3']) [1, 2.5] >>> get_numbers('abc123.4xyz') [] >>> get_numbers((1, 2.5, complex(3, 4), '5')) [1, 2.5, (3+4j)] | def get_numbers(sequence):
new_list = []
for element in sequence:
if isinstance(element, numbers.Number) == True:
new_list.append(element)
return new_list | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_numbers(string:str, type_=\"int\") -> list:\n \n num_list = []\n for word in string.split():\n if type_ == \"int\":\n try:\n num_list.append(int(word))\n except:\n pass\n elif type_ == \"float\":\n if isfloat(word):\n num_list.append(float(word))\n return num_list",
"def find_numbers(text):\n # -------------------------------------------------------------------------\n # Notice how expressive the list comprehension syntax is, in that it sounds\n # almost exactly the same as you would describe the problem in English.\n # I.e.\n # Convert each word to an integer, for every word in text split over\n # spaces, if the word is comprised only of digits.\n # \n # int(word) Convert each word to an integer,\n # for word for every word\n # in text.split() in text split over spaces\n # if text.isdigit() if the word is comprised only of digits.\n # -------------------------------------------------------------------------\n return [int(word) for word in text.split() if word.isdigit()]",
"def prepare_numbers(numbers):\n \n numb = []\n for item in numbers:\n numb.append(int(item))\n return numb",
"def parse_number_list(x, dtype=None):\n\n # Always convert to float and then perform truncation to integer if necessary\n number_list = np.array([float(x) for x in x.split()])\n\n if dtype is None:\n number_list_trunc = number_list.astype(int)\n\n if np.all((number_list - number_list_trunc) == 0):\n number_list = number_list_trunc\n elif dtype == int:\n number_list = number_list.astype(int)\n elif dtype != float:\n raise NrrdError('dtype should be None for automatic type detection, float or int')\n\n return number_list",
"def find_numbers(text):\n result = []\n for word in text.split():\n if word.isdigit():\n result.append(int(word))\n return result",
"def _cleanse_phone_numbers(numbers):\n\n hit = False\n\n # Ensure we have a list.\n # Might have received a single string or long integer.\n if not isinstance(numbers, list):\n numbers = [numbers]\n\n cleansed_list = list()\n\n for i, elem in enumerate(numbers):\n\n # Only append if it's a valid email\n if elem is not None:\n\n # Convert integers (and others) to string\n wip = str(elem)\n\n # Isolate decimal numbers\n wip = ''.join(e for e in wip if e.isdecimal())\n\n # Strip leading 0 and 1s, if there was more than one decimal number\n if len(wip) > 1:\n while wip[0] in ['0', '1'] and len(wip) > 1:\n wip = wip[1:]\n\n # Append IFF we have a string with 10 digits\n if len(wip) == 10:\n cleansed_list.append(wip)\n hit = True\n\n else:\n pass\n\n # Handle case: No valid number-strings in the list\n cleansed_list = [None] if len(cleansed_list) == 0 else cleansed_list\n\n return cleansed_list, hit",
"def square_or_square_root(numbers):\n result = []\n for element in numbers:\n root = element ** 0.5\n if root.is_integer():\n result.append(int(root))\n else:\n result.append(int(element * element))\n return result",
"def _parse_numbers(self, numberstr: str):\n numbers = []\n currentnumber = \"\"\n\n for c in numberstr:\n if c.isdigit() or c == '-' or c == '.':\n currentnumber += c\n elif len(currentnumber) > 0:\n numbers.append(float(currentnumber))\n currentnumber = \"\"\n if len(currentnumber) > 0:\n numbers.append(float(currentnumber))\n\n return np.array(numbers)",
"def isNumber(x):\n\treturn type(x) in [int, float]",
"def string_to_numeric_list(alist):\n l = None\n try:\n l = [ float(i) for i in alist ]\n except ValueError:\n pass\n else:\n return l\n # try with ',' as a comma separator\n try:\n l = [ float(i.replace(',', '.')) for i in alist ]\n except ValueError:\n raise ValueError, \"Invalid literal for float\"\n else:\n return l",
"def int2list(num):\n return [int(d) for d in str(num)]",
"def num_split(num):\r\n num = list(str(num))\r\n return [int(i) for i in num]",
"def isNumber(x):\n return isinstance(x, (int, float))",
"def _number_finder(s, regex, numconv, py3_safe):\n\n # Split. If there are no splits, return now\n s = regex.split(s)\n if len(s) == 1:\n return tuple(s)\n\n # Now convert the numbers to numbers, and leave strings as strings\n s = remove_empty(s)\n for i in py23_range(len(s)):\n try:\n s[i] = numconv(s[i])\n except ValueError:\n pass\n\n # If the list begins with a number, lead with an empty string.\n # This is used to get around the \"unorderable types\" issue.\n # The _py3_safe function inserts \"\" between numbers in the list,\n # and is used to get around \"unorderable types\" in complex cases.\n # It is a separate function that needs to be requested specifically\n # because it is expensive to call.\n if not isinstance(s[0], py23_basestring):\n return _py3_safe([''] + s) if py3_safe else [''] + s\n else:\n return _py3_safe(s) if py3_safe else s",
"def filter_and_sort_number_strings_as_numbers():\n# fill it out\n result = []\n for s in STRING_LIST:\n if (s.isnumeric()):\n result.append(s)\n return sorted(result, key = lambda s: int(s))",
"def are_all_numbers(values: List[Union[str, int, float]]):\n for value in values:\n if not is_number(value):\n return False\n return True",
"def get_numbers(self, lst):\n digit_count = self.count_dict(lst)\n return [num for num, val in digit_count.items() if val > 0]",
"def are_numeric(*values):\n\n for value in values:\n if not is_numeric(value):\n return False\n return True",
"def numarray(a: list) -> list[float]:\n return [float(aa) for aa in a]",
"def is_number(value):\n\n return isinstance(value, (int, long, float))",
"def get_numeric_types(with_int=True, with_float=True, with_complex=False,\r\n only_theano_types=True):\r\n if only_theano_types:\r\n theano_types = [d.dtype for d in theano.scalar.all_types]\r\n rval = []\r\n\r\n def is_within(cls1, cls2):\r\n # Return True if scalars defined from `cls1` are within the hierarchy\r\n # starting from `cls2`.\r\n # The third test below is to catch for instance the fact that\r\n # one can use ``dtype=numpy.number`` and obtain a float64 scalar, even\r\n # though `numpy.number` is not under `numpy.floating` in the class\r\n # hierarchy.\r\n return (cls1 is cls2 or\r\n issubclass(cls1, cls2) or\r\n isinstance(numpy.array([0], dtype=cls1)[0], cls2))\r\n\r\n for cls in get_numeric_subclasses():\r\n dtype = numpy.dtype(cls)\r\n if ((not with_complex and is_within(cls, numpy.complexfloating)) or\r\n (not with_int and is_within(cls, numpy.integer)) or\r\n (not with_float and is_within(cls, numpy.floating)) or\r\n (only_theano_types and dtype not in theano_types)):\r\n # Ignore this class.\r\n continue\r\n rval.append([str(dtype), dtype, dtype.num])\r\n # We sort it to be deterministic, then remove the string and num elements.\r\n return [x[1] for x in sorted(rval, key=str)]",
"def ints_in(x: str) -> list[int]:\n ex = r'(?:(?<!\\d)-)?\\d+'\n return ints(re.findall(ex, x))",
"def digits(x):\n return [int(d) for d in str(x)]",
"def find_non_numeric(pd_series):\n\n numeric = []\n for elem in pd_series:\n if not elem.isdigit():\n numeric.append(np.nan)\n else:\n numeric.append(int(elem))\n\n return numeric",
"def listarNum(num):\r\n num=str(num)\r\n list_num=np.array([])\r\n for n in num:\r\n n=float(n)\r\n list_num=np.append(list_num, n)\r\n return list_num",
"def _get_numbers(first=False):\n numbers = [int(v) for v in stdin.readline().split()]\n return numbers[0] if first else numbers",
"def filter_list(l):\n return list(filter(lambda x: type(x) == int, l))",
"def filter_and_sort_number_strings():\n# fill it out\n result = []\n for s in STRING_LIST:\n if (s.isnumeric()):\n result.append(s)\n return sorted(result)",
"def no_numbers(expression):\n NUMBERS = '0123456789'\n for i in expression:\n if i in NUMBERS:\n return True\n raise NotValidExpression('Not a valid expression, no numbers')",
"def integers_only(text) -> str:\n return ''.join(x for x in text if x.isdigit())"
] | [
"0.6840391",
"0.6182859",
"0.61705846",
"0.6110136",
"0.5887876",
"0.58715194",
"0.5834376",
"0.5762153",
"0.573113",
"0.56692606",
"0.5651106",
"0.5595228",
"0.55449486",
"0.54777455",
"0.5470738",
"0.5403604",
"0.53557354",
"0.5350814",
"0.5344329",
"0.5285762",
"0.52790505",
"0.5277465",
"0.5272595",
"0.5272346",
"0.52502656",
"0.5250066",
"0.52413857",
"0.52012527",
"0.51964986",
"0.5196418"
] | 0.7987336 | 0 |
Inverse of dna_digit. >>> bp_from_digit(3) 't' >>> bp_from_digit(7) | def bp_from_digit(digit):
# You must use the following dictionary:
bp_map = {0: 'a', 1: 'c', 2: 'g', 3: 't'}
# YOUR CODE HERE
| {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def dna_digit(bp):\r\n # You must use the following dictionary:\r\n bp_map = {'a': 0, 'c': 1, 'g': 2, 't': 3}\r\n\r\n # YOUR CODE HERE\r",
"def dna_number(bp_seq):\r\n # Hint: use dna_digit\r\n\r\n # YOUR CODE HERE\r",
"def complement_base(base):\n\n if base == 'A' or base == 'a':\n return 'T'\n elif base == 'T' or base == 't':\n return 'A'\n elif base == 'G' or base == 'g':\n return 'C'\n else:\n return 'G'",
"def ToBase(b, n):\r\n d = []\r\n while n:\r\n d.append(n % b)\r\n n //= b\r\n d.reverse() \r\n return int(''.join(map(str, d)))",
"def decode_bin(tup_terme, nbr):\n terme, msq = tup_terme\n return \"\".join(['-' if msq & (1 << k) else '1' if terme & (1 << k )\n else '0' for k in range(nbr-1, -1, -1)])",
"def complement_base(base):\n return complements[base]",
"def baseN(num, b, numerals=\"0123456789abcdefghijklmnopqrstuvwxyz\"):\n neg = num < 0\n num = abs(num)\n val = ((num == 0) and numerals[0]) or (baseN(num // b, b, numerals).lstrip(numerals[0]) + numerals[num % b])\n return '-' + val if neg else val",
"def render_to_numeral_system(n: int, alphabet: str) -> str:\n assert n >= 0\n assert len(alphabet) == len(set(alphabet))\n if n == 0:\n return alphabet[0]\n b = len(alphabet)\n reverse_digits = []\n while n > 0:\n reverse_digits.append(alphabet[n % b])\n n = n // b\n return \"\".join(reversed(reverse_digits))",
"def testBinizeUnbinize(self):\n console.terse(\"{0}\\n\".format(self.testBinizeUnbinize.__doc__))\n\n n = 5\n u = aiding.binize(n, 8)\n self.assertEqual(u, '00000101')\n n = aiding.unbinize(u)\n self.assertEqual(n, 5)",
"def decimal_to_base(n, base):\n\n chars = \"0123456789ABCDEF\"\n stack = []\n is_negative = False\n\n if n < 0:\n n = abs(n)\n is_negative = True\n\n while n > 0:\n remainder = n % base\n stack.append(remainder)\n n = n // base\n\n result = \"\"\n\n while stack:\n result = result + chars[stack.pop()]\n\n if is_negative:\n return \"-\"+result\n else:\n return result",
"def reverseComplement(s):\n\tcomplement = {'A':'T', 'C':'G', 'G':'C', 'T':'A', 'N':'N'}\n\tt = ''\n\tfor base in s:\n\t\tt = complement[base] + t\n\treturn t",
"def dec2base(n, base):\n convertstring = \"0123456789ABCDEF\"\n if n < base:\n return convertstring[n]\n else:\n return dec2base(n // base, base) + convertstring[n % base]",
"def get_reverse_complement(dna):\n \n dna = dna.replace('T','N')\n dna = dna.replace('A','T')\n dna = dna.replace('N','A')\n dna = dna.replace('C','N')\n dna = dna.replace('G','C')\n dna = dna.replace('N','G')\n dna = dna[::-1]\n return dna",
"def baseconvert(number,fromdigits,todigits):\r\n base_length = len(todigits)\r\n\r\n if str(number)[0]=='-':\r\n number = str(number)[1:]\r\n neg=1\r\n else:\r\n neg=0\r\n\r\n # make an integer out of the number\r\n x=0\r\n for digit in str(number):\r\n x = x*len(fromdigits) + fromdigits.index(digit)\r\n\r\n # create the result in base 'len(todigits)'\r\n if x is 0:\r\n res = todigits[0]\r\n else:\r\n res=\"\"\r\n while x>0:\r\n digit = x % base_length\r\n res = todigits[digit] + res\r\n x = int(x / base_length)\r\n if neg:\r\n res = \"-\"+res\r\n\r\n return res",
"def sub2Nib(b):\r\n return sBox[b >> 4] + (sBox[b & 0x0f] << 4)",
"def v2r(n, base):\n b = len(base)\n digits = ''\n while n > 0:\n digits = base[n % b] + digits\n n = n // b\n return digits",
"def dec_to_b36(d: int) -> str:\n return \"{:036b}\".format(d)",
"def nibble(meal):\n broth = \"\"\n for i1 in range(-len(meal), 0):\n broth += nom(meal, i1)\n return broth",
"def convertebase10basen(basedest, numero):\n ret = \"\"\n while True:\n digit = numero%basedest\n ret = ret + DIGITOS[digit]\n numero = numero // basedest\n if numero == 0:\n break\n return ret[::-1]",
"def bin_unil_to_bil(a):\n return 2*a - 1",
"def get_base_2(n):\n return str(bin(int(n))).removeprefix('0b')",
"def convertIntoBaseAddress(num):\n chars = []\n while num > 0:\n num, d = divmodFunction(num)\n chars.append(string.ascii_uppercase[d - 1])\n return ''.join(reversed(chars))",
"def number2x(num, tik='-'):\n return ''.join(['x' if x=='0' else tik for x in list(\"{:08b}\".format(num)) ][::-1]) #<== [::-1] for reverse printing => zero on left etc.",
"def r2v(digits, base):\n b = len(base)\n n = 0\n for d in digits:\n n = b * n + base[:b].index(d)\n return n",
"def invertNumber( n ):\n\tsign = +1\t\t\t\t\t\t\t\t# We need to preserve the sign.\n\tif n < 0:\n\t\tsign = -1\n\tn = abs( n )\n\tpower = int( math.log10( n ) )\t\t\t# We need no know the positional value of the left-most digit.\n\tresult = 0\n\twhile n > 0:\n\t\tdigit = n % 10\n\t\tresult += digit * ( 10 ** power )\t# The right-most digit is multiplied by the highest positional value of the\n\t\tn //= 10\t\t\t\t\t\t\t# original number. Then, we move to the next digit (to the left), and reduce the\n\t\tpower -= 1\t\t\t\t\t\t\t# power of 10 we have to use with it. We proceed iteratively until n is depleted.\n\n\treturn sign * result",
"def __invert__(self):\n return BitBoard(~self.num)",
"def calculateDeCrypt(asci: int, d: int, n: int) -> int:\n return pow(int(asci),d,n)",
"def padded_dec2base(n, q, base):\n convertstring = \"0123456789ABCDEF\"\n if n < base:\n return convertstring[n].zfill(q)\n else:\n return (dec2base(n // base, base) + convertstring[n % base]).zfill(q)",
"def _vint_dezigzagify(number):\n\n assert number >= 0, 'number is less than 0'\n is_neg = number & 1\n num = number >> 1\n if is_neg:\n num = ~num\n return num",
"def get_reverse_complement(dna):\n L=dna\n rdna=L[::-1]\n print rdna\n newrdna=\"\"\n for i in range(0,len(rdna)):\n if rdna[i]=='A':\n newrdna='T'+newrdna\n elif rdna[i]=='G':\n newrdna='C'+newrdna\n elif rdna[i]=='T':\n newrdna='A'+newrdna\n elif rdna[i]=='C':\n newrdna='G'+newrdna\n S=newrdna\n P=S[::-1]\n return P"
] | [
"0.6980157",
"0.61201626",
"0.56881356",
"0.55594414",
"0.55402386",
"0.5535841",
"0.5491018",
"0.547424",
"0.53357327",
"0.5317679",
"0.5298355",
"0.5296217",
"0.52697694",
"0.52548254",
"0.52155155",
"0.5207737",
"0.52001643",
"0.5196097",
"0.5192527",
"0.5165123",
"0.51456416",
"0.5135549",
"0.51284134",
"0.51198196",
"0.51186496",
"0.5109864",
"0.50856394",
"0.50839597",
"0.5065851",
"0.50555265"
] | 0.6602838 | 1 |
Take a dna string, bp_seq, (a string of a's, c's, g's, and t's) and interpret that as a base4 number using dna_digit for each basepair. A ValueErorr is raised if there are any characters besides those accepted by dna_digit. >>> dna_number('') 0 >>> dna_number('aaa') 0 >>> dna_number('t') 3 >>> dna_number('ca') 4 >>> dna_number('CAA') 16 >>> dna_number('cgt') 27 >>> dna_number('cggaattAGGTtttacgtactggatcaat') 117360495799280451 >>> dna_number('whatever') | def dna_number(bp_seq):
# Hint: use dna_digit
# YOUR CODE HERE
| {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def dna_digit(bp):\r\n # You must use the following dictionary:\r\n bp_map = {'a': 0, 'c': 1, 'g': 2, 't': 3}\r\n\r\n # YOUR CODE HERE\r",
"def pattern_to_number(dna):\n assert (is_dna(dna))\n idx = 'ACGT'\n return sum(idx.index(dna_base) * 4 ** i for i, dna_base in enumerate(dna[::-1]))",
"def patten2number(sequence):\n try:\n if len(sequence) == 0:\n return 0\n last_base = sequence[-1]\n prefix = sequence[:-1]\n return 4 * patten2number(prefix) + BASE_TO_NUMBER[last_base]\n except KeyError:\n raise ValueError('Not able to convert nucleotide: %s' % last_base)",
"def convert_to_numeric(sequence):\n \n int_mapped_seq=[]\n DNA_to_numeric = get_DNA_to_numeric()\n \n for n in sequence:\n int_mapped_seq.append(DNA_to_numeric[n])\n return int_mapped_seq",
"def transformNumberToDNA(inputState):\n if inputState == 0:\n result = \"A\"\n elif inputState == 1:\n result = \"C\"\n elif inputState == 2:\n result = \"G\"\n elif inputState == 3:\n result = \"T\"\n else:\n raise ValueError(\"The input state is not valid as 0,1,2 or 3\") \n return result",
"def bp_from_digit(digit):\r\n # You must use the following dictionary:\r\n bp_map = {0: 'a', 1: 'c', 2: 'g', 3: 't'}\r\n\r\n # YOUR CODE HERE\r",
"def nucleotide_numbering():\n nucleotide_to_number = {'A': 0, 'C': 1, 'G': 2, 'T': 3}\n number_to_nucleotide = {0: 'A', 1: 'C', 2: 'G', 3: 'T'}\n return nucleotide_to_number, number_to_nucleotide",
"def evalute_number(dialed):\n if (len(dialed) == 11 or len(dialed) == 10) and str(dialed).startswith(\"0\"):\n # UK Number\n return \"+44%s\" % (dialed[1:])\n elif len(dialed) == 6:\n # Local Fishguard numbers\n return \"+441348%s\" % (dialed)\n return None",
"def convert_to_dna(protein_sequence, wt_protein_dict):\n variant_dna_codons = []\n for index in range(0, len(protein_sequence.seq)):\n wt_aa = str(wt_protein_dict[index + 1][0])\n codon = str(wt_protein_dict[index + 1][1])\n variant_aa = protein_sequence.seq[index]\n if variant_aa != wt_aa:\n if variant_aa is not '-':\n codon = sorted_codon_table[str(variant_aa)][0]\n variant_dna_codons.append(codon)\n variant_dna_str = \"\".join(variant_dna_codons)\n variant_dna_seq = Seq(variant_dna_str, IUPAC.unambiguous_dna)\n variant_dna_seq_obj = SeqRecord(variant_dna_seq, id=protein_sequence.id, name=protein_sequence.name,\n description=protein_sequence.description)\n return variant_dna_seq_obj",
"def dna_to_protein(seq):\n\n # Verify a convertible sequence\n if len(seq) % 3 != 0:\n raise RuntimeError('Total number of bases must be a multiple of 3')\n\n # Iterate through adding the proteins\n protein = ''\n for i in range(0, len(seq), 3):\n protein += bioinfo_dicts.codons[seq[i:i+3]]\n return protein",
"def number(full_address):\n warning_message = \"\"\"\\n\n This parser should be used with the knowledge that this\n function is open to four significant vulnerabilities:\n 1) `number()` will parse the first numeric characters it\n an address string contains (read from left to right).\n If the address string has:\n a) no building number\n b) numeric characters unrelated to addressable\n information at the start of the address string\n 2) Address numbers separated by `&` or `,` will not be parsed\n 3) Building names that include numeric characters are\n incorrectly parsed as building numbers\\n\n \"\"\"\n warnings.warn(warning_message)\n return capture_address_element(NUMBER_PATTERN, full_address)",
"def dna(self):\n return self.seq.replace('U', 'T').replace('u', 't')",
"def dna_to_protein(dna: str, start: int=0):\n return rna_to_protein(dna_to_rna(dna, start), start=start)",
"def test_ModelDnaCodonSequence(self):\n d = ArrayDnaCodonSequence(\"UUUCGU\")\n self.assertEqual(str(d), \"TTTCGT\")\n self.assertEqual(d._data, array([0, 28]))\n self.assertEqual(str(d.to_rna()), \"UUUCGU\")\n self.assertEqual(str(d.to_dna()), \"TTTCGT\")",
"def get_patient_nr(segment):\n try:\n national_register = str(segment[19])\n except IndexError:\n nr_list = segment[2:5]\n national_register = [nr for nr in nr_list if str(nr) is not \"\"].pop()[0]\n national_register = str(national_register).split(\"^\")[0]\n return national_register",
"def aa_generator_DNA(dnaseq):\n return (translate_DNA_codon(dnaseq[n:n+3])\n for n in range(0, len(dnaseq), 3))",
"def get_dna_value(self, index: int):\n return self.dna[index]",
"def letter_to_num(self, string, dict_):\n #dict_= {'A': '0', 'C': '1', 'D': '2', 'E': '3', 'F': '4', 'G': '5', 'H': '6', 'I': '7', 'K': '8', 'L': '9', 'M': '10', 'N': '11', 'P': '12', 'Q': '13', 'R': '14', 'S': '15', 'T': '16', 'V': '17', 'W': '18', 'Y': '19'}\n patt = re.compile('[' + ''.join(dict_.keys()) + ']')\n num_string = patt.sub(lambda m: dict_[m.group(0)] + ' ', string)\n #print(num_string)\n #print(type(num_string))\n num = [int(i) for i in num_string.split()]\n return num",
"def dna_probability(dna:str, gc:float, return_log=False) -> float:\n at = (1 - gc) / 2.0\n gc /= 2.0\n\n p = 1\n for l in dna:\n if l in \"AT\":\n p *= at\n elif l in \"CG\":\n p *= gc\n else:\n raise ValueError(\"You should use dna string.\")\n if return_log:\n return math.log(p, 10)\n else:\n return p",
"def translate_DNA(dnaseq):\n\n gen = aa_generator_DNA(dnaseq)\n seq = ''\n aa = next(gen, None)\n while aa:\n seq += aa\n aa = next(gen, None)\n return seq",
"def RNAorDNA ( seq ) :\n\tif dna_regex . search ( seq ):\n\t\treturn RNA ( seq )\n\n\tif rna_regex . search ( seq ):\n\t\treturn DNA ( seq )",
"def guess_seq(seq):\n dna = \"ACTG-N\"\n \n chars = util.unique(seq.upper())\n \n for char in chars:\n if char not in dna:\n return \"pep\"\n return \"dna\"",
"def to_rna(dna=''):\n themap = {'G':'C', 'C':'G', 'T': 'A', 'A': 'U'}\n return ''.join([themap[each] for each in dna])",
"def to_rna(dna):\r\n return dna.translate(str.maketrans('ATGC','UACG'))",
"def is_valid_sequence(dna):\n num_char = 0\n \n for char in dna:\n if not char in 'ATCG':\n num_char += 1\n\n return num_char == 0",
"def formatPostalCode(string):\n if string.isdigit():\n return int(string)\n else :\n return 0",
"def base_number(number, count, dict_cardinal_num):\n special_numeral = [\"trăm\", \"mười\", \"mươi\", \"linh\", \"lăm\", \"mốt\"]\n list_cardinal_numeral = []\n # Divide number (abc) and follow place's number\n a = number // 100 # hundreds\n b = (number % 100) // 10 # Tens\n c = number % 10 # Ones\n # check a\n if a > 0:\n list_cardinal_numeral.append(dict_cardinal_num[a])\n list_cardinal_numeral.append(special_numeral[0])\n elif a == 0:\n if count > 1 and (b > 0 or c > 0):\n list_cardinal_numeral.append(dict_cardinal_num[a])\n list_cardinal_numeral.append(special_numeral[0])\n # check b\n if b == 0:\n if c > 0:\n if a > 0 or count > 1:\n list_cardinal_numeral.append(special_numeral[3])\n elif b > 0:\n if b == 1:\n list_cardinal_numeral.append(special_numeral[1])\n elif b > 1:\n list_cardinal_numeral.append(dict_cardinal_num[b])\n list_cardinal_numeral.append(special_numeral[2])\n # check c\n if c == 0:\n if count == 1 and a == 0 and b == 0:\n list_cardinal_numeral.append(dict_cardinal_num[c])\n elif c > 0:\n if b >= 1 and c == 5:\n list_cardinal_numeral.append(special_numeral[4])\n elif b >= 2 and c == 1:\n list_cardinal_numeral.append(special_numeral[5])\n else:\n list_cardinal_numeral.append(dict_cardinal_num[c])\n\n return \" \".join(list_cardinal_numeral)",
"def number_to_pattern(number, length):\n\n idx = 'ACGT'\n pattern = ''\n while number > 0:\n pattern += idx[number % 4]\n number //= 4\n return idx[0] * (length - len(pattern)) + pattern[::-1]",
"def to_rna(dna):\n\n #used a temp * to avoid mix ups\n dna = dna.replace('C', '*')\n dna = dna.replace('G', 'C')\n dna = dna.replace('*', 'G')\n\n dna = dna.replace('A', 'U') \n dna = dna.replace('T', 'A')\n\n return dna",
"def translate_DNA(dnaStrand,translation_table='DNA_TABLE.txt'):\r\n\r\n #dictionary to store the corresponding protein for each codon\r\n d={'TTT':'F','CTT':'L','ATT':'I','GTT':'V','TTC':'F','CTC':'L','ATC':'I','GTC':'V','TTA':'L','CTA':'L','ATA':'I','GTA':'V','TTG':'L','CTG':'L','ATG':'M','GTG':'V','TCT':'S','CCT':'P','ACT':'T','GCT':'A','TCC':'S','CCC':'P','ACC':'T','GCC':'A','TCA':'S','CCA':'P','ACA':'T','GCA':'A','TCG':'S','CCG':'P','ACG':'T','GCG':'A','TAT':'Y','CAT':'H','AAT':'N','GAT':'D','TAC':'Y','CAC':'H','AAC':'N','GAC':'D','TAA':'Stop','CAA':'Q','AAA':'K','GAA':'E','TAG':'Stop','CAG':'Q','AAG':'K','GAG':'E','TGT':'C','CGT':'R','AGT':'S','GGT':'G','TGC':'C','CGC':'R','AGC':'S','GGC':'G','TGA':'Stop','CGA':'R','AGA':'R','GGA':'G','TGG':'W','CGG':'R','AGG':'R','GGG':'G'}\r\n protiens=\"\"\r\n for i in range(0,len(dnaStrand),3):\r\n #extracting each codon\r\n s=dnaStrand[i:i+3]\r\n if(d[s]!=\"Stop\"):\r\n protiens+=d[s]\r\n\r\n return protiens"
] | [
"0.70117366",
"0.6626081",
"0.63483727",
"0.58927906",
"0.5883037",
"0.5798747",
"0.56869143",
"0.5573457",
"0.5520541",
"0.5464343",
"0.5400514",
"0.53591526",
"0.5342177",
"0.52771735",
"0.5237312",
"0.5151023",
"0.5144183",
"0.5139878",
"0.5138876",
"0.51262164",
"0.51256275",
"0.5106292",
"0.5103625",
"0.5056438",
"0.5038815",
"0.50346476",
"0.5029778",
"0.5013917",
"0.500572",
"0.49389982"
] | 0.8035558 | 0 |
Check to see if transforms match for 3D AABB's. | def transforms_match(bounds, extents, transforms):
assert len(bounds) == len(extents)
assert len(bounds) == len(transforms)
box = g.trimesh.creation.box
for b, t, e in zip(bounds, transforms, extents):
# create a box with the placed bounds
a = box(bounds=b)
# create a box using the roll transform
b = box(extents=e, transform=t)
# they should be identical
if not g.np.allclose(a.bounds, b.bounds):
return False
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _intersects_3D(A, B):\n return all([_intersects_1D((A[i], A[i+3]), (B[i], B[i+3]))\n for i in range(3)])",
"def match(cube):\n \n #M1'\n M1 = (cube[1,1,0] & cube[1,1,1] & \n (not cube[0,0,2]) & (not cube[1,0,2]) & (not cube[2,0,2]) &\n (not cube[0,1,2]) & (not cube[1,1,2]) & (not cube[2,1,2]) &\n (not cube[0,2,2]) & (not cube[1,2,2]) & (not cube[2,2,2]));\n if M1:\n return True;\n \n # gerate rotations around z/vertical axis\n cuberots = [rotate(cube, axis = 2, steps = rot) for rot in range(4)];\n #print('Cube rotations:');\n #[printCube(c) for c in cuberots] \n \n # M2' and all rotations\n for curo in cuberots:\n M2 = (curo[1,1,0] & curo[1,1,1] & curo[1,2,1] &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]));\n if M2:\n return True;\n \n # M3' and all rotations\n for curo in cuberots:\n M3 = (curo[1,1,0] & curo[1,1,1] & curo[1,2,1] & curo[2,1,1] &\n (not curo[0,0,2]) & (not curo[1,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]));\n if M3:\n return True;\n \n # M4' and all rotations\n for curo in cuberots:\n M4 = (curo[1,1,0] & curo[1,1,1] & curo[2,2,1] & curo[2,2,2] &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]));\n if M4:\n return True;\n \n # M5' and all rotations\n for curo in cuberots:\n M5 = (curo[1,2,0] & curo[1,1,1] & \n (not curo[0,0,0]) & (not curo[1,0,0]) & (not curo[2,0,0]) &\n (not curo[1,1,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) & (not curo[2,0,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M5:\n return True;\n \n # M6' and all rotations\n for curo in cuberots:\n M6 = (curo[2,1,0] & curo[1,2,0] & curo[1,1,1] &\n (not curo[0,0,0]) & (not curo[1,0,0]) &\n (not curo[0,1,0]) & (not curo[1,1,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) &\n (not curo[0,1,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M6:\n return True;\n \n # M7' and all rotations\n for curo in cuberots:\n M7 = (curo[2,2,0] & curo[1,1,1] &\n (not curo[0,0,0]) & (not curo[1,0,0]) & (not curo[2,0,0]) &\n (not curo[0,1,0]) & (not curo[1,1,0]) & (not curo[2,1,0]) &\n (not curo[0,2,0]) & (not curo[1,2,0]) &\n (not curo[0,0,1]) & (not curo[1,0,1]) & (not curo[2,0,1]) &\n (not curo[0,1,1]) & (not curo[2,1,1]) &\n (not curo[0,2,1]) & (not curo[1,2,1]) & (not curo[2,2,1]) &\n (not curo[0,0,2]) & (not curo[1,0,2]) & (not curo[2,0,2]) &\n (not curo[0,1,2]) & (not curo[1,1,2]) & (not curo[2,1,2]) &\n (not curo[0,2,2]) & (not curo[1,2,2]) & (not curo[2,2,2]));\n if M7:\n return True;\n \n return False;",
"def test_2_2_3D_cube_splits(self):\n check = [(0, 0, 0), (1, 1, 1), (1, 0, 0), (1, 1, 0), (1, 0, 1),\n (0, 1, 0),\n (0, 1, 1), (0, 0, 1), (0.5, 0.5, 0.5), (0.0, 0.5, 0.5),\n (0.0, 0.0, 0.5), (0.0, 0.5, 0.0), (0.5, 0.0, 0.5),\n (0.5, 0.0, 0.0),\n (0.5, 0.5, 0.0), (0.25, 0.25, 0.25), (1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5),\n (1.0, 0.5, 1.0), (0.5, 1.0, 0.5), (0.5, 1.0, 1.0),\n (0.5, 0.5, 1.0),\n (0.75, 0.75, 0.75), (1.0, 0.0, 0.5), (1.0, 0.5, 0.0),\n (0.75, 0.25, 0.25), (0.5, 1.0, 0.0), (0.75, 0.75, 0.25),\n (0.5, 0.0, 1.0), (0.75, 0.25, 0.75), (0.0, 1.0, 0.5),\n (0.25, 0.75, 0.25), (0.0, 0.5, 1.0), (0.25, 0.75, 0.75),\n (0.25, 0.25, 0.75), (0.5, 0.25, 0.25), (0.5, 0.5, 0.25),\n (0.5, 0.25, 0.5), (0.25, 0.5, 0.25), (0.25, 0.5, 0.5),\n (0.25, 0.25, 0.5), (0.375, 0.375, 0.375), (0.0, 0.25, 0.25),\n (0.0, 0.0, 0.25), (0.0, 0.25, 0.0), (0.25, 0.0, 0.25),\n (0.25, 0.0, 0.0), (0.25, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.5, 0.25), (0.0, 0.25, 0.5), (0.125, 0.375, 0.375),\n (0.25, 0.0, 0.5), (0.125, 0.125, 0.375), (0.25, 0.5, 0.0),\n (0.125, 0.375, 0.125), (0.5, 0.0, 0.25), (0.375, 0.125, 0.375),\n (0.5, 0.25, 0.0), (0.375, 0.125, 0.125), (0.375, 0.375, 0.125),\n (0.5, 0.75, 0.75), (0.5, 0.5, 0.75), (0.5, 0.75, 0.5),\n (0.75, 0.5, 0.75), (0.75, 0.5, 0.5), (0.75, 0.75, 0.5),\n (0.625, 0.625, 0.625), (1.0, 0.75, 0.75), (1.0, 1.0, 0.75),\n (1.0, 0.75, 1.0), (0.75, 1.0, 0.75), (0.75, 1.0, 1.0),\n (0.75, 0.75, 1.0), (0.875, 0.875, 0.875), (1.0, 0.5, 0.75),\n (1.0, 0.75, 0.5), (0.875, 0.625, 0.625), (0.75, 1.0, 0.5),\n (0.875, 0.875, 0.625), (0.75, 0.5, 1.0), (0.875, 0.625, 0.875),\n (0.5, 1.0, 0.75), (0.625, 0.875, 0.625), (0.5, 0.75, 1.0),\n (0.625, 0.875, 0.875), (0.625, 0.625, 0.875),\n (0.75, 0.5, 0.25),\n (0.75, 0.25, 0.5), (0.625, 0.375, 0.375), (1.0, 0.25, 0.25),\n (1.0, 0.0, 0.25), (1.0, 0.25, 0.0), (0.75, 0.0, 0.25),\n (0.75, 0.0, 0.0), (0.75, 0.25, 0.0), (0.875, 0.125, 0.125),\n (1.0, 0.5, 0.25), (1.0, 0.25, 0.5), (0.875, 0.375, 0.375),\n (0.75, 0.0, 0.5), (0.875, 0.125, 0.375), (0.75, 0.5, 0.0),\n (0.875, 0.375, 0.125), (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.625, 0.375, 0.125), (0.5, 0.75, 0.25),\n (0.625, 0.625, 0.375),\n (1.0, 0.75, 0.25), (1.0, 1.0, 0.25), (1.0, 0.75, 0.0),\n (0.75, 1.0, 0.25), (0.75, 1.0, 0.0), (0.75, 0.75, 0.0),\n (0.875, 0.875, 0.125), (0.875, 0.625, 0.375),\n (0.875, 0.875, 0.375),\n (0.875, 0.625, 0.125), (0.5, 1.0, 0.25), (0.625, 0.875, 0.375),\n (0.5, 0.75, 0.0), (0.625, 0.875, 0.125), (0.625, 0.625, 0.125),\n (0.5, 0.25, 0.75), (0.625, 0.375, 0.625), (1.0, 0.25, 0.75),\n (1.0, 0.0, 0.75), (1.0, 0.25, 1.0), (0.75, 0.0, 0.75),\n (0.75, 0.0, 1.0), (0.75, 0.25, 1.0), (0.875, 0.125, 0.875),\n (0.875, 0.375, 0.625), (0.875, 0.125, 0.625),\n (0.875, 0.375, 0.875),\n (0.5, 0.0, 0.75), (0.625, 0.125, 0.625), (0.5, 0.25, 1.0),\n (0.625, 0.125, 0.875), (0.625, 0.375, 0.875),\n (0.25, 0.75, 0.5),\n (0.375, 0.625, 0.375), (0.0, 0.75, 0.25), (0.0, 1.0, 0.25),\n (0.0, 0.75, 0.0), (0.25, 1.0, 0.25), (0.25, 1.0, 0.0),\n (0.25, 0.75, 0.0), (0.125, 0.875, 0.125), (0.0, 0.75, 0.5),\n (0.125, 0.625, 0.375), (0.25, 1.0, 0.5), (0.125, 0.875, 0.375),\n (0.125, 0.625, 0.125), (0.375, 0.875, 0.375),\n (0.375, 0.875, 0.125),\n (0.375, 0.625, 0.125), (0.25, 0.5, 0.75),\n (0.375, 0.625, 0.625),\n (0.0, 0.75, 0.75), (0.0, 1.0, 0.75), (0.0, 0.75, 1.0),\n (0.25, 1.0, 0.75), (0.25, 1.0, 1.0), (0.25, 0.75, 1.0),\n (0.125, 0.875, 0.875), (0.0, 0.5, 0.75), (0.125, 0.625, 0.625),\n (0.125, 0.875, 0.625), (0.25, 0.5, 1.0), (0.125, 0.625, 0.875),\n (0.375, 0.875, 0.625), (0.375, 0.875, 0.875),\n (0.375, 0.625, 0.875),\n (0.375, 0.375, 0.625), (0.0, 0.25, 0.75), (0.0, 0.0, 0.75),\n (0.0, 0.25, 1.0), (0.25, 0.0, 0.75), (0.25, 0.0, 1.0),\n (0.25, 0.25, 1.0), (0.125, 0.125, 0.875),\n (0.125, 0.375, 0.625),\n (0.125, 0.125, 0.625), (0.125, 0.375, 0.875),\n (0.375, 0.125, 0.625),\n (0.375, 0.125, 0.875), (0.375, 0.375, 0.875)]\n\n nn_checks = {(0.5, 0.25, 0.25): [(0.375, 0.375, 0.125), (0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25),\n (0.625, 0.375, 0.375),\n (0.625, 0.125, 0.375),\n (0.625, 0.125, 0.125),\n (0.5, 0.5, 0.25), (0.25, 0.25, 0.25),\n (0.375, 0.375, 0.375),\n (0.5, 0.25, 0.5), (0.5, 0.5, 0.5),\n (0.5, 0.0, 0.25),\n (0.375, 0.125, 0.375), (0.5, 0.0, 0.5),\n (0.5, 0.25, 0.0),\n (0.375, 0.125, 0.125), (0.5, 0.0, 0.0),\n (0.625, 0.375, 0.125)],\n (0.625, 0.625, 0.875): [(0.75, 0.5, 1.0),\n (0.75, 0.75, 1.0),\n (0.5, 0.75, 1.0), (0.5, 0.5, 1.0),\n (0.5, 0.5, 0.75),\n (0.5, 0.75, 0.75),\n (0.75, 0.5, 0.75),\n (0.75, 0.75, 0.75)],\n (0, 0, 0): [(0.0, 0.25, 0.0), (0.125, 0.125, 0.125),\n (0.0, 0.0, 0.25), (0.25, 0.0, 0.0),\n (0.0, 0.25, 0.25), (0.25, 0.25, 0.0),\n (0.25, 0.0, 0.25)]}\n\n init_triangulation(3, 2, check, nn_checks)",
"def is3D(data):\n return data.find(\"x3\") != -1 and data.find(\"y3\") != -1 and data.find(\"z3\") != -1",
"def check_transforms_match(self, transform: Mapping) -> None:\n xform_id = transform.get(TraceKeys.ID, \"\")\n if xform_id == id(self):\n return\n # TraceKeys.NONE to skip the id check\n if xform_id == TraceKeys.NONE:\n return\n xform_name = transform.get(TraceKeys.CLASS_NAME, \"\")\n warning_msg = transform.get(TraceKeys.EXTRA_INFO, {}).get(\"warn\")\n if warning_msg:\n warnings.warn(warning_msg)\n # basic check if multiprocessing uses 'spawn' (objects get recreated so don't have same ID)\n if torch.multiprocessing.get_start_method() in (\"spawn\", None) and xform_name == self.__class__.__name__:\n return\n raise RuntimeError(\n f\"Error {self.__class__.__name__} getting the most recently \"\n f\"applied invertible transform {xform_name} {xform_id} != {id(self)}.\"\n )",
"def test_has_alpha(self):\n image_3d = np.array([[ # One image with shape (1, 2, 3)\n [1, 2, 3],\n [4, 5, 6]\n ]])\n image_4d = np.array([[ # One image with shape (1, 3, 4)\n [1, 2, 3, 4],\n [4, 5, 6, 7],\n [8, 9, 10, 11]\n ]])\n image_5d = np.array([[ # One image with shape (1, 1, 5)\n [1, 2, 3, 4, 5]\n ]])\n self.assertEqual(localHDR.has_alpha(image_3d), False)\n self.assertEqual(localHDR.has_alpha(image_4d), True)\n self.assertEqual(localHDR.has_alpha(image_5d), False)",
"def test_hash_equality(self):\n origin = np.random.randn(3)\n normal = np.random.randn(3)\n up_vector = np.random.randn(3)\n up_vector2 = np.random.randn(3)\n p1 = shapes_3d.CoordinatePlane(origin, normal, up_vector)\n p2 = shapes_3d.CoordinatePlane(origin, normal, up_vector)\n p3 = shapes_3d.CoordinatePlane(origin, normal, up_vector2)\n \n self.assertEqual(p1, p2)\n self.assertNotEqual(p1, p3)",
"def _validate_transforms(self):\n if len(self.transforms) > 1:\n for transform in self.transforms:\n if transform.applies is None:\n raise ValueError(\n 'If more than one transform is provided, each '\n 'provided transform must provide an apply field.',\n )",
"def _validate_transforms(self):\n if len(self.transforms) > 1:\n for transform in self.transforms:\n if transform.applies is None:\n raise ValueError(\n 'If more than one transform is provided, each '\n 'provided transform must provide an apply field.',\n )",
"def is3DImage(self):\n\t\treturn self.is3D",
"def test_perspective_transform():\n # TODO: write this\n assert(True)",
"def _check_same_fov(*args, **kwargs):\n raise_error = kwargs.pop(\"raise_error\", False)\n for i, arg in enumerate(args):\n kwargs[f\"img_#{i}\"] = arg\n errors = []\n for (a_name, a_img), (b_name, b_img) in itertools.combinations(\n kwargs.items(), 2\n ):\n if not a_img.shape[:3] == b_img.shape[:3]:\n errors.append((a_name, b_name, \"shape\"))\n if not np.allclose(a_img.affine, b_img.affine):\n errors.append((a_name, b_name, \"affine\"))\n if len(errors) > 0 and raise_error:\n raise ValueError(\n \"Following field of view errors were detected:\\n\"\n + \"\\n\".join(\n [\n f\"- {e[0]} and {e[1]} do not have the same {e[2]}\"\n for e in errors\n ]\n )\n )\n return len(errors) == 0",
"def is3_d(self):\n return self.container['is3_d']",
"def _array_is_aligned(self):\n rot_matrix = self.axes_wcs.wcs.pc\n return np.allclose(rot_matrix, np.eye(self.axes_wcs.wcs.naxis))",
"def is_xyz_affable(coordmap, name2xyz=None):\n try:\n xyz_affine(coordmap, name2xyz)\n except SpaceError:\n return False\n return True",
"def _type_check(data):\n if data.__class__.__name__ != \"Matrix3\":\n return False\n return True",
"def _areDifferent_Mat44(self, mat1, mat2, thresholdLoc = 1.0, thresholdRot = 1.0):\r\n areDifferent = False\r\n jnd_vect = mathutils.Vector((thresholdLoc,thresholdLoc,thresholdRot))\r\n t1, t2 = mat1.to_translation(), mat2.to_translation()\r\n r1, r2 = mat1.to_euler(), mat2.to_euler()\r\n for n in range(3):\r\n if (abs(t1[n]-t2[n]) > thresholdLoc) or (abs(math.degrees(r1[n]-r2[n])) > thresholdRot): areDifferent = True\r\n return areDifferent",
"def testMatchSwarpLanczos3Image(self):\n self.compareToSwarp(\"lanczos3\", useWarpExposure=False)",
"def matchTransform(*args, pivots: bool=True, position: bool=True, rotation: bool=True, scale:\n bool=True, **kwargs)->None:\n pass",
"def Has3d(self, *args):\n return _Adaptor3d.Adaptor3d_TopolTool_Has3d(self, *args)",
"def _is_valid_pose(self):\n contacts = self.gc.getRobot().robot.contacts\n n_object_contacts = 0\n is_thumb_in_contact = False\n for contact in contacts:\n if contact.body1 == self.object_name:\n n_object_contacts += 1\n if contact.body2 == '_chain4_link2':\n is_thumb_in_contact = True\n elif contact.body2 == self.object_name:\n n_object_contacts += 1\n if contact.body1 == '_chain4_link2':\n is_thumb_in_contact = True\n\n is_valid = n_object_contacts >= 2\n return is_valid",
"def test_3_2_4D_cube_splits(self):\n check = [(0, 0, 0, 0), (1, 1, 1, 1), (1, 0, 0, 0), (1, 1, 0, 0),\n (1, 1, 1, 0),\n (1, 1, 0, 1), (1, 0, 1, 0), (1, 0, 1, 1), (1, 0, 0, 1),\n (0, 1, 0, 0),\n (0, 1, 1, 0), (0, 1, 1, 1), (0, 1, 0, 1), (0, 0, 1, 0),\n (0, 0, 1, 1),\n (0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5), (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5), (0.0, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0), (0.25, 0.25, 0.25, 0.25),\n (1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0),\n (0.5, 1.0, 0.5, 1.0), (0.5, 0.5, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0),\n (1.0, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25),\n (1.0, 1.0, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.0, 0.5),\n (0.5, 1.0, 0.0, 0.0), (0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25),\n (1.0, 0.5, 1.0, 0.0), (0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.25), (1.0, 0.5, 0.0, 1.0),\n (0.5, 1.0, 0.0, 1.0),\n (0.5, 0.5, 0.0, 1.0), (0.75, 0.75, 0.25, 0.75),\n (1.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 1.0, 0.5), (0.5, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.25),\n (1.0, 0.0, 0.5, 1.0), (0.5, 0.0, 1.0, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0), (0.25, 0.75, 0.25, 0.25),\n (0.0, 1.0, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5), (0.0, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.25),\n (0.0, 1.0, 0.5, 1.0), (0.0, 0.5, 1.0, 1.0),\n (0.0, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75), (0.0, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(0, 0, 0, 0): [(0.0, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.25, 0.25, 0.25, 0.25),\n (0.5, 0.0, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0),\n (0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.5, 0.0)],\n (1.0, 1.0, 0.5, 0.5): [(1.0, 1.0, 0.5, 1.0), (1, 1, 0, 1),\n (1.0, 1.0, 1.0, 0.5),\n (1.0, 0.5, 0.5, 0.5), (1, 1, 1, 0),\n (1.0, 1.0, 0.5, 0.0),\n (1.0, 1.0, 0.0, 0.5), (1, 1, 0, 0),\n (1, 1, 1, 1), (0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.75, 0.75, 0.75, 0.75),\n (0.75, 0.75, 0.25, 0.25),\n (0.75, 0.75, 0.75, 0.25),\n (0.75, 0.75, 0.25, 0.75)],\n (0.25, 0.25, 0.25, 0.75): [(0.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 0.0, 1.0),\n (0.5, 0.5, 0.5, 1.0),\n (0, 0, 0, 1),\n (0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0),\n (0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.0, 0.5),\n (0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5)]}\n\n init_triangulation(4, 1, check, nn_checks)",
"def _check_transformation_matrix_homogeneity(self):\n transformation_matrices_similar = True # assume they are all similar\n first = True\n rows = None\n cols = None\n for transform in self:\n if first:\n rows = transform.rows\n cols = transform.cols\n first = False\n else:\n if transform.rows != rows or transform.cols != cols:\n transformation_matrices_similar = False\n break\n return transformation_matrices_similar, rows, cols",
"def _check_fov(img, affine, shape):\n img = check_niimg(img)\n return img.shape[:3] == shape and np.allclose(img.affine, affine)",
"def compare3float_relative(x_base, y_check, z_intersection, relative_error):\n return compare2float_relative(x_base, y_check, relative_error) and \\\n compare2float_relative(x_base, z_intersection, relative_error) and \\\n compare2float_relative(y_check, z_intersection, relative_error)",
"def test_projection_v3_z(self):\n\n from pedemath.vec3 import projection_v3\n\n vec_a = Vec3(3, 4, 5)\n vec_b = Vec3(0, 0, 1)\n\n result = projection_v3(vec_a, vec_b)\n\n self.assertEqual(5, result)",
"def check_transformations(*args):\n assert args[0].shape == (21,21)\n assert args[0].dtype == np.float64\n if len(args) == 2:\n assert args[1].shape == (2,2)\n assert args[1].dtype == np.float64",
"def test_x_y_and_z_rot(self):\n\n axis = Vec3(4, 5, 6)\n # Create a Matrix representing a rotation.\n mat = Matrix44.from_axis_angle_deg(axis, 45.0)\n # Use from_matrix44()\n quat = Quat.from_matrix44(mat)\n\n # Ensure it matches the expected quaternion.\n expected_quat = Quat.from_axis_angle_deg(axis, 45.0)\n self.assertAlmostEqual(quat.x, expected_quat.x)\n self.assertAlmostEqual(quat.y, expected_quat.y)\n self.assertAlmostEqual(quat.z, expected_quat.z)\n self.assertAlmostEqual(quat.w, expected_quat.w)",
"def _validate_mesh(self):\n if not (np.abs(self.axis_u.dot(self.axis_v) < 1e-6) and #pylint: disable=no-member\n np.abs(self.axis_v.dot(self.axis_w) < 1e-6) and #pylint: disable=no-member\n np.abs(self.axis_w.dot(self.axis_u) < 1e-6)): #pylint: disable=no-member\n raise ValueError('axis_u, axis_v, and axis_w must be orthogonal')\n return True",
"def verify_facemaps_for_object(obj):\n me = get_edit_mesh()\n bm = bmesh.from_edit_mesh(me)\n bm.faces.layers.face_map.verify()\n bmesh.update_edit_mesh(me, True)"
] | [
"0.5945986",
"0.5890566",
"0.58164686",
"0.57901424",
"0.570529",
"0.5591047",
"0.5575964",
"0.5559709",
"0.5559709",
"0.5529409",
"0.5448441",
"0.54193586",
"0.54104954",
"0.5398789",
"0.537859",
"0.537553",
"0.53726596",
"0.53680474",
"0.5326483",
"0.5305396",
"0.52994144",
"0.5269085",
"0.5255813",
"0.5253021",
"0.5246408",
"0.5244736",
"0.52111524",
"0.52095085",
"0.5204188",
"0.5193652"
] | 0.6869145 | 0 |
Return a PIL image that is all one color. | def _solid_image(color, size):
from PIL import Image
# convert to RGB uint8
color = g.np.array(color, dtype=g.np.uint8)[:3]
# create a one pixel RGB image
image = Image.fromarray(
g.np.tile(color, (g.np.prod(size), 1)).reshape(
(size[0], size[1], 3)))
assert image.size == tuple(size[::-1])
return image | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def blank_image(height, width):\n all_green = create_uniform_image(height, width, [0, 255, 0])\n return all_green",
"def get_image(self):\n image = Image.new('1', (8, 16))\n draw = ImageDraw.Draw(image)\n for x in xrange(8):\n for y in xrange(16):\n draw.point((x,y),self.get_pixel(x, y))\n return image",
"def generate_image(color):\n color_tuple = int_rgb_tuple(color)\n return Image.new('RGB', (500, 500), color=color_tuple)",
"def new(self, size, fill):\n return Image(PIL.Image.new(\"RGB\", size, fill))",
"def to_color(self):\n if self.channels == 4:\n color = opencv.cvtColor(self.img, opencv.COLOR_BGRA2BGR)\n return Image(color)\n elif self.channels == 1:\n color = opencv.cvtColor(self.img, opencv.COLOR_GRAY2BGR)\n return Image(color)\n else:\n return Image(self.img)",
"def generate_image(height=512, \n width=512, \n color=(255, 255, 255)):\n if type(color) == tuple:\n b = np.full((height, width, 1), color[0], dtype=np.uint8)\n g = np.full((height, width, 1), color[1], dtype=np.uint8)\n r = np.full((height, width, 1), color[2], dtype=np.uint8)\n img = np.concatenate((b, g, r), axis=2)\n else:\n gray = np.full((height, width), color, dtype=np.uint8)\n img = gray\n\n return img",
"def get_one_PIL(colmap, fname=None, Nx=255):\n \n import Image\n Ny = 50\n im = Image.new(\"RGB\", (Nx, Ny))\n for nx in range(Nx):\n z = nx/(Nx-1.0)\n r,g,b = colmap.colfct(z)\n r = int(255*r)\n g = int(255*g)\n b = int(255*b)\n for ny in range(Ny):\n im.putpixel((nx,ny),(r,g,b))\n if fname:\n print \"saving:\", fname+\".jpg\"\n im.save(fname+\".jpg\", \"JPEG\")\n im.save(fname+\".png\", \"PNG\")\n ## im.show()\n return im",
"def get_blank_image(width: int, height: int, n_channels: int, cval=255) -> np.ndarray:\n if n_channels == 0:\n image = np.zeros((height, width)) + 255\n else:\n image = np.zeros((height, width, n_channels)) + cval\n return image.astype(\"uint8\")",
"def create_colorful_test_image(self):\n ch255 = np.full([100, 200, 1], 255, dtype=np.uint8)\n ch128 = np.full([100, 200, 1], 128, dtype=np.uint8)\n ch0 = np.full([100, 200, 1], 0, dtype=np.uint8)\n imr = np.concatenate((ch255, ch128, ch128), axis=2)\n img = np.concatenate((ch255, ch255, ch0), axis=2)\n imb = np.concatenate((ch255, ch0, ch255), axis=2)\n imw = np.concatenate((ch128, ch128, ch128), axis=2)\n imu = np.concatenate((imr, img), axis=1)\n imd = np.concatenate((imb, imw), axis=1)\n image = np.concatenate((imu, imd), axis=0)\n return image",
"def topil(self) -> Image.Image:\n if self.width == 0 or self.height == 0:\n return None\n return Image.frombytes(\n \"RGBA\", (self.width, self.height), self.data, \"raw\", \"ARGB\", 0, 1\n )",
"def image(self) -> PIL.Image.Image:\n try:\n data = io.BytesIO(self.data)\n return PIL.Image.open(data)\n except Exception: # Image data is incorrect, fix as a simple transparent image\n return PIL.Image.new('RGBA', Image.MAX_IMAGE_SIZE)",
"def new(mode, size, color=0):\r\n\r\n _check_size(size)\r\n\r\n if color is None:\r\n # don't initialize\r\n _im = Image()._new(mode, size)\r\n return Image(_im)\r\n\r\n if type(color).__name__ == \"str\":\r\n # css3-style specifier\r\n color = ImageColor().getcolor(color, mode)\r\n color = ImageDraw(None)._convert_bgr2rgb(color)\r\n\r\n _im = Image()._new(mode, size, color)\r\n return Image(_im)",
"def _blankimage():\n img = TK.PhotoImage(width=1, height=1)\n img.blank()\n return img",
"def image_rgba(self) -> np.ndarray | None:\n return self._image.make_image() if self._image is not None else None",
"def get_img_from_array(img):\n a = np.uint8(np.clip(img, 0, 1) * 255)\n return PIL.Image.fromarray(a)",
"def imagetopil(wxImage):\n\twxImage = wxImage.Mirror(False)\n\tw = wxImage.GetWidth()\n\th = wxImage.GetHeight()\n\tdata = wxImage.GetData()\n\t\n\tred_image = Image.frombuffer('L',(w,h),data[0::3])\n\tgreen_image = Image.frombuffer('L',(w,h),data[1::3])\n\tblue_image = Image.frombuffer('L',(w,h),data[2::3])\n\tpilImage = Image.merge('RGB', (red_image, green_image, blue_image))\n\treturn pilImage",
"def create_image(self):\n # how many categories?\n aspect_ratio = float(4) / 3\n self.width = int(math.sqrt(aspect_ratio * self.total))\n self.height = int(self.width / aspect_ratio)\n\n img = Image.new(\"RGB\", (self.width, self.height))\n return img",
"def get_primary_color(source: str) -> list:\r\n img = Image.fromarray(source.copy()).convert(\"RGB\")\r\n img.resize((1, 1), resample=0)\r\n primary_color = img.getpixel((0, 0))\r\n return primary_color",
"def get_full_img(self, scale=1 / 8, onehot=False):\n if self.obs_vision:\n full_img = self.get_full_obs_render(scale=scale)\n else:\n full_img = self.grid.encode(self, onehot=onehot)\n # NOTE: in case need to scale here instead of in above func call: return cv2.resize(full_img, (0, 0), fx=0.125, fy=0.125, interpolation=cv2.INTER_AREA)\n return full_img",
"def get_image(self) -> Image.Image:\n raw_buffer_data = self.get_raw_frame_buffer_object_data()\n image = Image.frombytes(\n \"RGBA\",\n self.get_pixel_shape(),\n raw_buffer_data,\n \"raw\",\n \"RGBA\",\n 0,\n -1,\n )\n return image",
"def create_blank(width, height, rgb_color=(0, 0, 0)):\r\n # Create black blank image\r\n image = np.zeros((height, width, 3), np.uint8)\r\n\r\n # Since OpenCV uses BGR, convert the color first\r\n color = tuple(reversed(rgb_color))\r\n # Fill image with color\r\n image[:] = color\r\n\r\n return image",
"def to_pillow(self) -> PILImage:\n return PILImage.fromarray(self.rgb().to_numpy())",
"def retrieveColor(image):\n w, h, dim = image.shape\n ret = np.zeros((w, h, dim), dtype=np.uint8)\n for i in range(w):\n for j in range(h):\n ret[i][j] = fakingColors(image[i][j])\n return np.clip(ret, 0, 255)",
"def drawImage(self):\n\n image = Image.new(\"RGB\",self.size,(255,255,255)) #new() passes parameters mode,size,color\n #255,255,255 is white\n\n canvas = ImageDraw.Draw(image)\n for g in self.genes:\n color = (g.color.r,g.color.g,g.color.b)\n\n canvas.ellipse((g.pos.x-int(g.diameter/2),g.pos.y-int(g.diameter/2), g.pos.x+int(g.diameter/2), g.pos.y+int(g.diameter/2)), fill = color)\n return image",
"def ret(x):\n color = true_color if x else false_color\n return np.tile(color, (SIZE, SIZE, 1)).astype(np.uint8)",
"def one_color(image,color=[0,0,255]):\r\n output = image.copy()\r\n for line in range(len(image)):\r\n for column in range(len(image[0])):\r\n distance = calc_distance(color,image[line][column])\r\n if distance <=150:\r\n output[line][column]=[255,255,255]\r\n else:\r\n output[line][column]=[0,0,0]\r\n return output",
"def red_filter(img):\r\n #with Image.open(filename) as img:\r\n w = img.width\r\n h = img.height\r\n\r\n newimg = Image.new('RGB', (w,h))\r\n for y in range(h):\r\n for x in range(w):\r\n r, g, b = img.getpixel((x,y))\r\n \r\n newimg.putpixel((x, y), (r, 0, 0))\r\n \r\n return newimg",
"def create_blank(width, height, rgb_color=(0, 0, 0)):\n # Create black blank image\n image = np.zeros((height, width, 3), np.uint8)\n\n # Since OpenCV uses BGR, convert the color first\n color = tuple(reversed(rgb_color))\n # Fill image with color\n image[:] = color\n\n return image",
"def create_blank(width, height, rgb_color=(0, 0, 0)):\n # Create black blank image\n image = np.zeros((height, width, 3), np.uint8)\n\n # Since OpenCV uses BGR, convert the color first\n color = tuple(reversed(rgb_color))\n # Fill image with color\n image[:] = color\n\n return image",
"def Color(img: Image, magnitude: float) -> Image:\n return PIL.ImageEnhance.Color(img).enhance(1 + magnitude * random.choice([-1, 1]))"
] | [
"0.66824204",
"0.6635066",
"0.6374929",
"0.63247544",
"0.61794895",
"0.6133591",
"0.60366756",
"0.6019682",
"0.6013168",
"0.59070235",
"0.5850156",
"0.5841903",
"0.57940644",
"0.57445747",
"0.57420737",
"0.572145",
"0.56919885",
"0.5681022",
"0.5611405",
"0.56048757",
"0.5602715",
"0.56020725",
"0.5578041",
"0.55701226",
"0.55671436",
"0.5559317",
"0.55445683",
"0.5543734",
"0.5543734",
"0.55407345"
] | 0.6654435 | 1 |
Move head in looking up position | def move_head_looking_up():
return _move_head(cozmo.robot.MAX_HEAD_ANGLE) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def move_head_looking_forward():\n return _move_head(degrees(0))",
"def _move_head(self, cmd):\n self.move_head(cmd.data)",
"def move_head_looking_down():\n return _move_head(cozmo.robot.MIN_HEAD_ANGLE)",
"def _move_to_head(self, node):\n self._remove_node(node)\n self._add_node(node)",
"def __get_next_head_pos(self, snake: Snake) -> QtCore.QPoint:\n pos = snake.pos()\n direction = (snake.direct() + 2) % 4\n tmp_snake = Snake(self, direction=direction, position=pos)\n pos = self.__get_next_tail_pos(tmp_snake)\n tmp_snake.remove()\n return pos",
"def move(self):\n self.old_tail = self.body[-1][:] # save old position of last block\n self.head[0] += self.direction[0] # moves head\n self.head[1] += self.direction[1]\n \n self.head[0] = (self.head[0] + self.xMaxSize) % self.xMaxSize\n self.head[1] = (self.head[1] + self.yMaxSize) % self.yMaxSize\n \n if self.head in self.body[1:]: # if snakes hits himself\n self.alive = False\n self.body.insert(0, self.body.pop()) # each block is replace by predecessor\n self.body[0] = self.head[:] # first block is head",
"def move_head(self, direction: str) -> None:\n if direction not in self._valid_directions:\n print(f'{direction} is not a valid direction ({self._valid_directions})')\n return\n\n # Move the head\n if direction == 'U':\n self._body[0].y += 1\n elif direction == 'D':\n self._body[0].y -= 1\n elif direction == 'L':\n self._body[0].x -= 1\n elif direction == 'R':\n self._body[0].x += 1\n\n self.drag_segment(1)",
"def head_towards(self):\n dest = self.target_destination - self.location\n if dest.length() != 0:\n dest.scale_to_length(self.speed)\n dest.normalize()\n self.rect.left += dest.x\n self.rect.top += dest.y",
"def move_virtual(self, head, body, x_vel, y_vel):\n hx, hy = head\n next_x = hx + x_vel\n next_y = hy + y_vel\n new_head = (next_x, next_y)\n\n body_copy = copy.copy(body)\n body_copy.pop()\n body_copy.insert(0, (next_x, next_y))\n\n return (new_head, body_copy, x_vel, y_vel)",
"def move_start_node(self, x, y):",
"def move_to_position1(self):",
"def _relative_head_pos(self):\n return self.head_pos - self.left_expands",
"def head_to(self, target: Tuple[float, float], speed: float = 1.5):\n pos = np.array(self.pos)\n target = np.array(target)\n\n heading = np.array(self.model.space.get_heading(pos, target))\n vector = speed * heading / np.linalg.norm(heading)\n self.model.space.move_agent(self, pos + vector)\n return",
"def snake_move(snake, direction):\n head = snake[0].copy()\n\n if direction == RIGHT:\n head[0] = head[0] + 1\n elif direction == LEFT:\n head[0] = head[0] - 1\n elif direction == UP:\n head[1] = head[1] - 1\n elif direction == DOWN:\n head[1] = head[1] + 1\n else:\n return snake\n \n snake.insert(0,head)\n snake.pop()\n \n return snake",
"def move(self, head, steps):\n self.turn(head)\n if self.direction == 0:\n self.x += int(steps)\n if self.direction == 1:\n self.y += int(steps)\n if self.direction == 2:\n self.x -= int(steps)\n if self.direction == 3:\n self.y -= int(steps)",
"def move_to_position2(self):",
"def move_to_head(self, node):\n if node is self.head:\n return\n value = node.value\n self.delete(node)\n self.add_to_head(value)",
"def move(self):\n self.pos += self.direc\n self.nearest_node = self.pixel_to_node()",
"def __snake_move(self):\n self.__eat_candy()\n # move tail and body\n n_snake = len(self.__snake)\n for i in range(1, n_snake):\n s2 = self.__snake[n_snake - i]\n s1 = self.__snake[n_snake - i - 1]\n s2.move(s1.pos())\n # move head\n pos = self.__snake[0].pos()\n tmp_snake = Snake(self, direction=self.__h_direction, position=pos)\n h_pos = self.__get_next_head_pos(tmp_snake)\n tmp_snake.remove()\n icon = f'resources/{self.__directions[self.__h_direction]}.svg'\n new_head = Snake(self, icon, self.__h_direction, self.__cell_edge, h_pos)\n old_head = self.__snake[0]\n self.__snake[0] = new_head\n old_head.remove()\n self.__crash_check()",
"def move(self):\n possible_steps = self.model.grid.get_neighborhood(\n self.pos,\n moore=False, # implements Von Neumann neighborhood\n include_center=False)\n new_position = self.random.choice(possible_steps)\n self.heading = [new_position[0] - self.pos[0],\n new_position[1] - self.pos[1]]\n self.model.grid.move_agent(self, new_position)",
"def get_next_position(self):",
"def move_rel(self):\n pass",
"def update_position(self):\n self.back = self.pos % self.road_len\n self.front = (self.pos + self.length) % self.road_len",
"def move(self, direction):\n \n \"\"\" \n Moving snake is:\n - count new position of head depending on direction\n - if we out of boundaries => return -1\n - if snake' body already have this coordinate => return -1 (collide)\n - insert new head to self.snake beggining\n - cut tail unless our new head met food\n \"\"\"\n new_head = SnakeGame.computeCoord(self.snake[0], direction)\n if not (0 <= new_head[0] < self.height and 0 <= new_head[1] < self.width):\n return -1 # out of boundaried\n \n if self.food_stk and new_head == self.food_stk[-1]:\n self.score += 1\n self.food_stk.pop()\n else:\n self.__RemoveTail()\n \n if not self.__AddHead(new_head):\n return -1 # collision\n \n return self.score",
"def move_to(self, position):\n raise NotImplementedError",
"def move(self, rel_pos):\n self.pos = (self.pos[0] + rel_pos[0] * GRID, self.pos[1] + rel_pos[1] * GRID)",
"def first_order_posint(self, timestep):\n self.prev_pos = self.position\n self.position = self.position + (self.velocity * timestep)",
"def move(self):\n \n self.position = self.explore()",
"def update(direction): \n if direction == 3:\n head[0] -=1\n elif direction == 2:\n head[0] += 1\n elif direction == 1:\n head[1] -= 1\n elif direction == 0:\n head[1] += 1\n \n screen.refresh()\n time.sleep(0.1)",
"def move(self):\n vector = vectors[compass.index(self.heading)]\n x = self.position[0] + vector[0]\n y = self.position[1] + vector[1]\n self._check_move(x, self.plateau[0])\n self._check_move(y, self.plateau[1])\n return replace(self, position=(x, y))"
] | [
"0.75905687",
"0.71693414",
"0.689879",
"0.65886056",
"0.65552706",
"0.6536765",
"0.65329057",
"0.6478913",
"0.63520193",
"0.6337932",
"0.63345844",
"0.6263702",
"0.6194596",
"0.61626005",
"0.61611587",
"0.6146102",
"0.6137257",
"0.6124788",
"0.6099965",
"0.5910861",
"0.5877184",
"0.58735913",
"0.5860811",
"0.58186543",
"0.5789545",
"0.5781386",
"0.57799107",
"0.5775943",
"0.57436764",
"0.5733787"
] | 0.7881115 | 0 |
Move head in looking down position | def move_head_looking_down():
return _move_head(cozmo.robot.MIN_HEAD_ANGLE) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def move_head_looking_up():\n return _move_head(cozmo.robot.MAX_HEAD_ANGLE)",
"def move_head_looking_forward():\n return _move_head(degrees(0))",
"def up(self):\n if self.head.heading() != DOWN and self.last_direction != DOWN:\n self.head.setheading(UP)",
"def down(self):\n if self.head.heading() != UP and self.last_direction != UP:\n self.head.setheading(DOWN)",
"def _move_head(self, cmd):\n self.move_head(cmd.data)",
"def head_towards(self):\n dest = self.target_destination - self.location\n if dest.length() != 0:\n dest.scale_to_length(self.speed)\n dest.normalize()\n self.rect.left += dest.x\n self.rect.top += dest.y",
"def move(self):\n self.old_tail = self.body[-1][:] # save old position of last block\n self.head[0] += self.direction[0] # moves head\n self.head[1] += self.direction[1]\n \n self.head[0] = (self.head[0] + self.xMaxSize) % self.xMaxSize\n self.head[1] = (self.head[1] + self.yMaxSize) % self.yMaxSize\n \n if self.head in self.body[1:]: # if snakes hits himself\n self.alive = False\n self.body.insert(0, self.body.pop()) # each block is replace by predecessor\n self.body[0] = self.head[:] # first block is head",
"def move_head(self, direction: str) -> None:\n if direction not in self._valid_directions:\n print(f'{direction} is not a valid direction ({self._valid_directions})')\n return\n\n # Move the head\n if direction == 'U':\n self._body[0].y += 1\n elif direction == 'D':\n self._body[0].y -= 1\n elif direction == 'L':\n self._body[0].x -= 1\n elif direction == 'R':\n self._body[0].x += 1\n\n self.drag_segment(1)",
"def move_up(self):\n self.pitch_motor.step_backward()",
"def move_up(self):\n self.move_step(-1)",
"def up(self):\n self.move(0, 1)",
"def move_backward():\n pass",
"def move_down(self):\n self.y -= 1",
"def move(self, head, steps):\n self.turn(head)\n if self.direction == 0:\n self.x += int(steps)\n if self.direction == 1:\n self.y += int(steps)\n if self.direction == 2:\n self.x -= int(steps)\n if self.direction == 3:\n self.y -= int(steps)",
"def update(direction): \n if direction == 3:\n head[0] -=1\n elif direction == 2:\n head[0] += 1\n elif direction == 1:\n head[1] -= 1\n elif direction == 0:\n head[1] += 1\n \n screen.refresh()\n time.sleep(0.1)",
"def move_up(self):\r\n if self.rect.top > 0:\r\n self.rect.top -= self.speed",
"def move_down(self):\n self.move_step(1)",
"def down(self):\n self.move(0,-1)",
"def move_down():\n return __maze.move_down()",
"def move_up(self):\n self.move_measurement(-1)",
"def moveDown():\n tt.right(90)\n tt.forward(60)\n tt.right(90)\n tt.forward(250)\n tt.right(180)",
"def move_forward():\n pass",
"def _move_to_head(self, node):\n self._remove_node(node)\n self._add_node(node)",
"def up_down(self, up):\n if up == 'u':\n up = 1\n elif up == 'n':\n up = 0\n elif up == 'd':\n up = -1\n else:\n raise ValueError(\"The heck you doing Servo?? u d or n ONLY\")\n self.h += up\n if self.get_pos() == blocks['wall']:\n self.h -= up",
"def up(self):\n self.forward(MOVE_DISTANCE)",
"def __snake_move(self):\n self.__eat_candy()\n # move tail and body\n n_snake = len(self.__snake)\n for i in range(1, n_snake):\n s2 = self.__snake[n_snake - i]\n s1 = self.__snake[n_snake - i - 1]\n s2.move(s1.pos())\n # move head\n pos = self.__snake[0].pos()\n tmp_snake = Snake(self, direction=self.__h_direction, position=pos)\n h_pos = self.__get_next_head_pos(tmp_snake)\n tmp_snake.remove()\n icon = f'resources/{self.__directions[self.__h_direction]}.svg'\n new_head = Snake(self, icon, self.__h_direction, self.__cell_edge, h_pos)\n old_head = self.__snake[0]\n self.__snake[0] = new_head\n old_head.remove()\n self.__crash_check()",
"def move_down(self):\n self.pitch_motor.step_forward()",
"def move_down(self):\n if self.center.y > (self.height / 2):\n self.center.y -= 5",
"def __get_next_head_pos(self, snake: Snake) -> QtCore.QPoint:\n pos = snake.pos()\n direction = (snake.direct() + 2) % 4\n tmp_snake = Snake(self, direction=direction, position=pos)\n pos = self.__get_next_tail_pos(tmp_snake)\n tmp_snake.remove()\n return pos",
"def move_down(self):\n\n if self.ycor() < -280:\n self.sety(-300)\n else:\n new_y = self.ycor() - 40\n self.sety(new_y)"
] | [
"0.7909611",
"0.78818965",
"0.75105995",
"0.7271034",
"0.70760745",
"0.69877636",
"0.6986974",
"0.6780396",
"0.67051816",
"0.6649998",
"0.6605314",
"0.6590552",
"0.6552368",
"0.65227723",
"0.6515691",
"0.64742297",
"0.642539",
"0.64098626",
"0.6402091",
"0.6370593",
"0.635825",
"0.634094",
"0.63404727",
"0.63136446",
"0.6304099",
"0.62810147",
"0.6280798",
"0.6271272",
"0.62588656",
"0.625267"
] | 0.81926703 | 0 |
Position head in looking forward (at ground level) position | def move_head_looking_forward():
return _move_head(degrees(0)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def head_towards(self):\n dest = self.target_destination - self.location\n if dest.length() != 0:\n dest.scale_to_length(self.speed)\n dest.normalize()\n self.rect.left += dest.x\n self.rect.top += dest.y",
"def move_head_looking_up():\n return _move_head(cozmo.robot.MAX_HEAD_ANGLE)",
"def _relative_head_pos(self):\n return self.head_pos - self.left_expands",
"def head_to(self, target: Tuple[float, float], speed: float = 1.5):\n pos = np.array(self.pos)\n target = np.array(target)\n\n heading = np.array(self.model.space.get_heading(pos, target))\n vector = speed * heading / np.linalg.norm(heading)\n self.model.space.move_agent(self, pos + vector)\n return",
"def move_head_looking_down():\n return _move_head(cozmo.robot.MIN_HEAD_ANGLE)",
"def forward(self):\n #print('forward\\r')\n self.linearVector = Vector3(x=1.0, y=0.0, z=0.0)\n self.angularVector = Vector3(x=0.0, y=0.0, z=0.0)",
"def moveForward(self):\n if self.onGround:\n self.vx = 4",
"def forward(self):\n print('forward')\n self.linearVector = Vector3(x=1.0, y=0.0, z=0.0)\n self.angularVector = Vector3(x=0.0, y=0.0, z=0.0)",
"def update_position(self):\n self.back = self.pos % self.road_len\n self.front = (self.pos + self.length) % self.road_len",
"def get_forward_position(self):\n return self._forward_position",
"def move_forward(self):\n self.x, self.y = self.compute_positions()",
"def move_virtual(self, head, body, x_vel, y_vel):\n hx, hy = head\n next_x = hx + x_vel\n next_y = hy + y_vel\n new_head = (next_x, next_y)\n\n body_copy = copy.copy(body)\n body_copy.pop()\n body_copy.insert(0, (next_x, next_y))\n\n return (new_head, body_copy, x_vel, y_vel)",
"def head_position_estimator_process(self, frame):\n frame = self.frame_pre_process(frame)\n\n # Clean Head Position detection from previous frame\n self.head_estimator.clear()\n\n # Predict and return head position[Yaw, Pitch, Roll]\n self.head_estimator.start_async(frame, self.rois)\n headPoseAngles = self.head_estimator.get_headposition()\n\n return (headPoseAngles)",
"def getPosHeading(self) :\n\t\treturn (self.avatarNP.getX(), self.avatarNP.getY(), \\\n\t\t\tself.avatarNP.getZ(), (self.avatarNP.getHpr()[0])%360)",
"def look_at(self, point, connector):\n\n\n try:\n point_camera = self.tfBuffer.transform(point, 'head')\n except (tf2.LookupException, tf2.ConnectivityException, tf2.ExtrapolationException) as e:\n rospy.loginfo(\"Waiting for transform... ({})\".format(e))\n return\n\n # Calculate the head joint angles and clip them to the right range\n angle_pan, angle_tilt = connector.head.get_motor_goals_from_point(point_camera.point)\n angle_pan = np.clip(np.rad2deg(angle_pan), connector.head.min_pan, connector.head.max_pan)\n angle_tilt = np.clip(np.rad2deg(angle_tilt), connector.head.min_tilt, connector.head.max_tilt)\n\n current_pan_pos, current_tilt_pos = connector.head.get_current_head_pos()\n if (abs(current_pan_pos - angle_pan) < connector.head.delta and\n abs(current_tilt_pos - angle_tilt) < connector.head.delta):\n # We reached the position\n if rospy.get_time() - self.position_reached_time > connector.head.wait_time:\n # We waited long enough, go back\n return self.pop()\n else:\n # Represent remaining wait time\n self.publish_debug_data(\"remaining_wait_time\",connector.head.wait_time - (rospy.get_time() - self.position_reached_time))\n\n else:\n # We haven't reached it\n # Update when we should reach it\n self.position_reached_time = rospy.get_time()\n connector.head.send_motor_goals(angle_pan, 30.0, angle_tilt, 30.0)\n\n # Represent remaining tilt\n self.publish_debug_data(\"remaining_tilt\",abs(current_pan_pos - angle_pan))\n self.publish_debug_data(\"remaining_pan\",abs(current_tilt_pos - angle_tilt))",
"def set_start_position(self) -> None:\n self.cozmo.set_head_angle(degrees(0)).wait_for_completed()\n self.cozmo.set_lift_height(0.0).wait_for_completed()",
"def move_head(self, direction: str) -> None:\n if direction not in self._valid_directions:\n print(f'{direction} is not a valid direction ({self._valid_directions})')\n return\n\n # Move the head\n if direction == 'U':\n self._body[0].y += 1\n elif direction == 'D':\n self._body[0].y -= 1\n elif direction == 'L':\n self._body[0].x -= 1\n elif direction == 'R':\n self._body[0].x += 1\n\n self.drag_segment(1)",
"def forward(self):\n self.position += 1",
"def target_position(self, time):\n # get joint positions and use fk to get end effector position?\n # ar_tag from topic\n\n cur_pos = self.target_velocity(time)*time + self.start_pos\n\n self.points_generated.append(cur_pos)\n #print(self.start_pos)\n # print(cur_pos)\n return cur_pos",
"def position(t):\n return c + tangent_vec * 7 * t ** 2",
"def leftFootGroundPositionAtTorsoStep(self, n: int) -> Transformation:\n\n torsostep = self.getTorsoStepPose(n)\n\n bodypos = torsostep.position\n transformToRightFoot = Transformation([0, self.foot_separation, -bodypos[2] + self.foot_center_to_floor])\n return torsostep @ transformToRightFoot",
"def move(self, is_forward):\n wh, lh = self.get_heading\n self.w += wh\n self.l += lh\n if self.get_pos() == blocks['wall']:\n self.w -= wh\n self.l -= lh",
"def safe_north_point(self):\n ifMutexAcquire(self.use_mutex)\n try:\n x, y, z = self.read_magnetometer()\n except:\n x, y, z = 0,0,0\n finally:\n ifMutexRelease(self.use_mutex)\n\n # using the x and z axis because the sensor is mounted vertically\n # the sensor's top face is oriented towards the front of the robot\n\n heading = -atan2(-x, z) * 180 / pi\n\n # adjust it to 360 degrees range\n\n if heading < 0:\n heading += 360\n elif heading > 360:\n heading -= 360\n\n return heading",
"def forward(self, d):\n self.pos_x = self.pos_x + d * math.cos(self.orientation) + gauss_noise(0, self.motion_noise)\n self.pos_y = self.pos_y + d * math.sin(self.orientation) + gauss_noise(0, self.motion_noise)",
"def find_start_pose(self):\n\n # Find start position\n y,x = [k for k,v in self.mp.items() if v == 94 or v == 60 \\\n or v == 62 or v == 118][0]\n\n\n # Assign orientation\n dy,dx, theta = 0,0, 0\n if self.mp[y,x] == ord('^'): theta = np.pi/2\n elif mp[y,x] == ord('<'): theta = -np.pi\n elif mp[y,x] == ord('>'): theta = 0\n else: theta = -np.pi/2\n\n return y, x, theta",
"def GetPosition(self):\n ...",
"def move_left(self):\n self.yaw_motor.step_backward()",
"def move_forward():\n pass",
"def _get_next_position(x: float, y: float, heading: float, state: str, hp_info:\n pd.DataFrame, rw_info: pd.DataFrame, ac: int,\n CURR_LANDING_AC) -> Tuple[float, float, float, str]:\n\n if state == \"A\":\n\n radius = np.sqrt(x ** 2 + y ** 2)\n\n min_R = CONTROL_ZONE_RADIUS - MIN_SEPARATION - POSIITION_TOLERANCE\n max_R = CONTROL_ZONE_RADIUS - MIN_SEPARATION + POSIITION_TOLERANCE\n\n if (min_R < radius) | (radius < max_R):\n\n hp_ind = _get_closest_control_zone(x, y, hp_info)\n\n if hp_info[2][hp_ind] == 0:\n\n state_new = \"C\"\n heading_new = _get_ac_heading(hp_info[0][hp_ind] - x, hp_info[1][hp_ind] - y)\n\n else:\n\n state_new = \"B\"\n heading_new = (hp_info[2][hp_ind] + np.pi / 2) % (2 * np.pi)\n\n else:\n\n state_new = \"A\"\n heading_new = heading\n\n x_new = x + MAX_SPEED * np.sin(heading_new) / TIME_STEP_FREQUENCY\n y_new = y + MAX_SPEED * np.cos(heading_new) / TIME_STEP_FREQUENCY\n\n elif state == \"B\":\n\n hp_ind = _get_closest_control_zone(x, y, hp_info)\n\n if hp_info[2][hp_ind] == 0:\n\n state_new = \"C\"\n heading_new = _get_ac_heading(hp_info[0][hp_ind] - x, hp_info[1][hp_ind] - y)\n\n else:\n\n state_new = \"B\"\n heading_new = heading - MAX_SPEED / (TIME_STEP_FREQUENCY * (CONTROL_ZONE_RADIUS - MIN_SEPARATION))\n\n x_new = x + MAX_SPEED * np.sin(heading_new) / TIME_STEP_FREQUENCY\n y_new = y + MAX_SPEED * np.cos(heading_new) / TIME_STEP_FREQUENCY\n\n elif state == \"C\":\n\n hp_ind = _get_closest_control_zone(x, y, hp_info)\n dist = np.sqrt((hp_info[0][hp_ind] - x) ** 2 + (hp_info[1][hp_ind] - y) ** 2)\n\n if dist < POSIITION_TOLERANCE + 1:\n\n state_new = \"D\"\n heading_new = heading\n\n x_new = x\n y_new = y\n\n else:\n\n state_new = \"C\"\n heading_new = heading\n\n x_new = x + MAX_SPEED * np.sin(heading_new) / TIME_STEP_FREQUENCY\n y_new = y + MAX_SPEED * np.cos(heading_new) / TIME_STEP_FREQUENCY\n\n elif state == \"D\":\n\n if ac == CURR_LANDING_AC:\n\n row_ind, x_ind, y_ind = _get_closest_threshold(x, y, rw_info)\n\n state_new = \"E\"\n heading_new = _get_ac_heading(rw_info[x_ind][row_ind] - x, rw_info[y_ind][row_ind] - y)\n\n x_new = x + MAX_SPEED * np.sin(heading_new) / TIME_STEP_FREQUENCY\n y_new = y + MAX_SPEED * np.cos(heading_new) / TIME_STEP_FREQUENCY\n\n else:\n\n state_new = \"D\"\n heading_new = heading\n\n x_new = x\n y_new = y\n\n elif state == \"E\":\n\n row_ind, x_ind, y_ind = _get_closest_threshold(x, y, rw_info)\n dist = np.sqrt((rw_info[x_ind][row_ind] - x) ** 2 + (rw_info[y_ind][row_ind] - y) ** 2)\n\n if (dist < MIN_SEPARATION) | (CURR_LANDING_AC == ac):\n\n x_ind = 0 if x_ind == 2 else 2\n y_ind = 1 if y_ind == 3 else 3\n\n CURR_LANDING_AC += 1\n\n state_new = \"F\"\n heading_new = _get_ac_heading(rw_info[x_ind][row_ind] - x, rw_info[y_ind][row_ind] - y)\n\n x_new = x + MAX_SPEED * np.sin(heading_new) / TIME_STEP_FREQUENCY\n y_new = y + MAX_SPEED * np.cos(heading_new) / TIME_STEP_FREQUENCY\n\n else:\n\n state_new = \"E\"\n heading_new = heading\n\n x_new = x + MAX_SPEED * np.sin(heading_new) / TIME_STEP_FREQUENCY\n y_new = y + MAX_SPEED * np.cos(heading_new) / TIME_STEP_FREQUENCY\n\n elif state == \"F\":\n\n row_ind, x_ind, y_ind = _get_closest_threshold(x, y, rw_info)\n dist = np.sqrt((rw_info[x_ind][row_ind] - x) ** 2 + (rw_info[y_ind][row_ind] - y) ** 2)\n\n if abs(dist - RUNWAY_LENGTH / 2) < POSIITION_TOLERANCE:\n x_new, y_new, heading_new, state_new = -1, -1, -1, \"END\"\n\n else:\n\n state_new = \"F\"\n heading_new = heading\n\n x_new = x + MAX_SPEED * np.sin(heading_new) / TIME_STEP_FREQUENCY\n y_new = y + MAX_SPEED * np.cos(heading_new) / TIME_STEP_FREQUENCY\n\n else:\n\n x_new, y_new, heading_new, state_new = -1, -1, -1, \"END\"\n\n return x_new, y_new, heading_new, state_new",
"def _drive_player_position(self) -> None:\n player = self._player\n if player:\n assert self.node\n assert player.node\n self.node.connectattr('torso_position', player.node, 'position')"
] | [
"0.7200592",
"0.69223684",
"0.67997426",
"0.64820814",
"0.63488954",
"0.62061757",
"0.618071",
"0.61070585",
"0.6106752",
"0.61032444",
"0.60774773",
"0.59292644",
"0.5890282",
"0.58369625",
"0.58153296",
"0.5808214",
"0.5767609",
"0.5738415",
"0.5726572",
"0.5726526",
"0.569354",
"0.5666194",
"0.5664323",
"0.5649893",
"0.56489325",
"0.5631052",
"0.56279343",
"0.56264275",
"0.5609635",
"0.5608651"
] | 0.75380087 | 0 |
Move lift close to the ground level | def move_lift_down():
return _move_lift(0.2) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def move_lift_ground():\n return _move_lift(0)",
"def lift_down(self):\n\n # Can't reuse set_lift_pos due to bug above\n bottom_limit = self.get_lift_limit()\n self.send(self.cmd.SET_LIFT_SET, bottom_limit)",
"def jump(self):\r\n if self.grounded == True:\r\n self.vel.y = -13",
"def move_lift_up():\n return _move_lift(1)",
"def default(self):\r\n if self.lift.current_floor > self.no_of_floors -1:\r\n self.lift.direction = -1\r\n elif self.lift.current_floor <= 0:\r\n self.lift.direction = 1",
"def moveBackward(self):\n if self.onGround:\n self.vx = -4",
"def knock_back(self):\n FORWARD_VEL = -2\n\n self.rect.x += self.x_vel\n\n if self.name == 'player':\n if self.rect.x >= (self.origin_pos[0] + 10):\n self.x_vel = FORWARD_VEL\n elif self.rect.x <= self.origin_pos[0]:\n self.rect.x = self.origin_pos[0]\n self.state = 'battle resting'\n self.x_vel = 0\n else:\n if self.rect.x <= (self.origin_pos[0] - 10):\n self.x_vel = 2\n elif self.rect.x >= self.origin_pos[0]:\n self.rect.x = self.origin_pos[0]\n self.state = 'battle resting'\n self.x_vel = 0",
"def bring_down(self):\n\n self.move(self.__min_step__)",
"def jump(self):\n if (self.falling or self.rising) and self.doubleJump:\n self.speed_y = -20 # //////Aquí se cambia la velocidad incial cuando se salta//////\n self.fallin = False\n self.rising = True\n self.doubleJump = False\n\n if not self.falling and not self.rising:\n self.speed_y = -20 # //////Aquí se cambia la velocidad incial cuando se salta//////\n self.rising = True",
"def _move_down(self):\n new_pos = self._pos + _Vec3(0, -1, 0)\n block_ = self._get_block(new_pos)\n if block_ != _WATER:\n self._add_to_inv(block_)\n self._move(new_pos)",
"def move_down(self):\n\n if self.ycor() < -280:\n self.sety(-300)\n else:\n new_y = self.ycor() - 40\n self.sety(new_y)",
"def force_move():\n if ZERO_BASE_PLYR_POS in range(0, 10):\n # we cant go up, so go down\n move_player(\"south\")\n else:\n move_player(\"north\")",
"def moveForward(self):\n if self.onGround:\n self.vx = 4",
"def lift_up(self):\n\n # Can't reuse set_lift_pos due to bug above\n self.send(self.cmd.SET_LIFT_SET, self.cmd.SET_LIFT_SET[\"check\"][\"min\"])",
"def open_up(self):\n\n self.move(self.__max_step__)",
"def left_twist(self):\n self.turn_by_deg(-179)\n #time.sleep(.1)\n self.stop()\n self.turn_by_deg(-179)\n #time.sleep(.1)\n self.stop()",
"def _walk(self):\n new_pos = self.rect.move((self.move, 0)) # move 9 pixel to the right per frame\n if self.rect.left < self.area.left or self.rect.right > self.area.right:\n self.move = -self.move # move to the opposite direction when the chimp position exceeds the screen\n new_pos = self.rect.move((self.move, 0))\n self.image = pygame.transform.flip(\n self.image, 1, 0\n ) # mirror the chimp to make it looks like turning around\n self.rect = new_pos",
"def move_car(self):\n a = self.h / 50\n self.x += self.speed_x / FPS\n if self.x + 170 * a >= 1100:\n self.dir = -1\n self.speed_x = -self.speed_x\n if self.x - 170 * a <= 50:\n self.dir = 1\n self.speed_x = -self.speed_x",
"def move_down(self):\n self.move_measurement(1)",
"def advanceSilver():\n global silverBallX, silverSpeed\n silverBallX += silverSpeed\n if silverBallX <= -4:\n # Reached the bottom - switch directions\n silverBallX = -4\n silverSpeed = -silverSpeed\n elif silverBallX >= 2.8:\n # Reached the top - switch directions\n silverBallX = 2.8\n silverSpeed = -silverSpeed",
"def fireWest(self):\n self.rotate('w')\n gun = Laser(self)\n gun.shoot('w')\n self.agent.actionCompleted()",
"def stop_full(self):\n self.systems[\"driver\"].move(0, 0)",
"def up(self):\n self.forward(MOVE_DISTANCE)",
"def down(self):\n self.move(0,-1)",
"def move_down(self):\n #if user moves paddle right below on the screen, they won't be able to move it more downwards by using this if statement\n #SCREEN_HEIGHT - 280 = Exact number of pixels where paddle can stop exactly on bottom edge but still has its body fully shown\n if self.center.y > SCREEN_HEIGHT - 280:\n self.center.y -= MOVE_AMOUNT",
"def move(self):\n\n # get the location we WOULD go to\n newX = self.xcor() + self.dx\n newY = self.ycor() + self.dy\n while (abs (newX) > self.BOX_RANGE) or (abs(newY) > self.BOX_RANGE):\n # print(\"choosing new direction... \",end=\"\")\n self.chooseNewDirection()\n # print(self.dx, self.dy)\n newX = self.xcor() + self.dx\n newY = self.ycor() + self.dy\n\n # now move our monster\n super().move()",
"def jump(self):\n\t\tself._is_falling = True\n\t\tself._dy = -5",
"def applyGravity(o):\n if not o._tryShift(o.block,Point(0,-1)): o._setBlock(o.block)",
"def move_up(self):\n self.move_measurement(-1)",
"def move_down():\n return __maze.move_down()"
] | [
"0.8149269",
"0.6854262",
"0.68413323",
"0.6714419",
"0.65151966",
"0.64415556",
"0.62991387",
"0.6228117",
"0.61921024",
"0.61904323",
"0.617294",
"0.6154423",
"0.6126746",
"0.60275495",
"0.6003219",
"0.59435475",
"0.59210306",
"0.59191656",
"0.5919147",
"0.5905214",
"0.59003365",
"0.5886932",
"0.5855108",
"0.5854077",
"0.5834281",
"0.58221364",
"0.58129066",
"0.5811653",
"0.58081496",
"0.58000135"
] | 0.7417945 | 1 |
Move lift to lowest (groundlevel) position | def move_lift_ground():
return _move_lift(0) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def move_lift_up():\n return _move_lift(1)",
"def lift_up(self):\n\n # Can't reuse set_lift_pos due to bug above\n self.send(self.cmd.SET_LIFT_SET, self.cmd.SET_LIFT_SET[\"check\"][\"min\"])",
"def move_lift_down():\n return _move_lift(0.2)",
"def default(self):\r\n if self.lift.current_floor > self.no_of_floors -1:\r\n self.lift.direction = -1\r\n elif self.lift.current_floor <= 0:\r\n self.lift.direction = 1",
"def ground_min(self):\n\n def compare(e):\n if is_wire(e):\n return e.potential\n else:\n return float('inf')\n self.move_ground(min(self.elements, key=compare))",
"def force_move():\n if ZERO_BASE_PLYR_POS in range(0, 10):\n # we cant go up, so go down\n move_player(\"south\")\n else:\n move_player(\"north\")",
"def moveForward(self):\n if self.onGround:\n self.vx = 4",
"def _move_lift(self, cmd):\n self.move_lift(cmd.data)",
"def move(self):\n \n self.position = self.wander()",
"def bring_down(self):\n\n self.move(self.__min_step__)",
"def move_left():\n return __maze.move_left()",
"def move_west(self):\r\n self.move(dx=-1, dy=0)",
"def lift_down(self):\n\n # Can't reuse set_lift_pos due to bug above\n bottom_limit = self.get_lift_limit()\n self.send(self.cmd.SET_LIFT_SET, bottom_limit)",
"def move(self):\n \n self.position = self.explore()",
"def move_left(self):\n self.rect.x -= 5 # Moves to the left by 5\n\n # If the player reaches the edge of the screen, they can't go further\n if self.rect.x <= -50:\n self.rect.x = -50",
"def move(self):\n\n x, y = self.position\n\n if self.in_spawn_area:\n if 0 <= x < MAP_SIZE and 0 <= y < MAP_SIZE:\n self.in_spawn_area = False\n\n preferred_direction = self.get_preferred_direction()\n\n if preferred_direction == (0, 0):\n return\n\n new_tiles = self.calculate_tiles_ahead(preferred_direction)\n\n if self.can_advance(new_tiles, preferred_direction):\n self.position = self.position[0] + preferred_direction[0] * 2, self.position[1] + preferred_direction[1] * 2\n self.update_cache_after_move(preferred_direction, new_tiles)\n self.previous_direction = preferred_direction[:]",
"def open_up(self):\n\n self.move(self.__max_step__)",
"def go_left(self):\n self.change_x = -6",
"def go_left(self):\n self.change_x = -6",
"def move(self):\n self.pos += self.direc\n self.nearest_node = self.pixel_to_node()",
"def move_to_position1(self):",
"def move_player(direction):\n global ZERO_BASE_PLYR_POS\n if direction == \"north\":\n ZERO_BASE_PLYR_POS -= 10\n elif direction == \"south\":\n ZERO_BASE_PLYR_POS += 10\n elif direction == \"west\":\n ZERO_BASE_PLYR_POS -= 1\n elif direction == \"east\":\n ZERO_BASE_PLYR_POS += 1\n \n sleep(0.5) # all moves have a 0.5 second delay\n \n show_ground_feature()",
"def _move(self, event):\n if self._current_tower.get_value() > self._coins:\n return\n\n #move the shadow tower to mouse position\n position = event.x, event.y\n self._current_tower.position = position\n\n legal, grid_path = self._game.attempt_placement(position)\n\n #find the best path and covert positions to pixel positions\n path = [self._game.grid.cell_to_pixel_centre(position)\n for position in grid_path.get_shortest()]\n\n #Task 1.2 (Tower placement): Draw the tower preview here\n self._view.draw_preview(self._current_tower, legal)\n self._view.draw_path(path)",
"def move_left(self):\n\t\tself.set_x_vector(-1 * constants.DONKEY_SPEED)",
"def go_home(self):\n self.move_wl(0)",
"def jump(self):\r\n if self.grounded == True:\r\n self.vel.y = -13",
"def onMoveLeft(self):\n self.mainGrid.moveLeft()",
"def move_forward():\n pass",
"def move_left(self):\r\n if self.rect.left > 0:\r\n self.rect.left -= self.speed",
"def move_left(self, distance):\r\n return self.move('left', distance)"
] | [
"0.73050475",
"0.6926683",
"0.6793749",
"0.65675867",
"0.65607166",
"0.6552803",
"0.6522182",
"0.6300354",
"0.6278917",
"0.62712395",
"0.62628436",
"0.62415427",
"0.6172774",
"0.61378413",
"0.60493064",
"0.6028695",
"0.60269815",
"0.6012585",
"0.6012585",
"0.6004337",
"0.59848773",
"0.5983626",
"0.5956577",
"0.59501845",
"0.59370863",
"0.5914061",
"0.58954114",
"0.58866477",
"0.58787894",
"0.5878116"
] | 0.84103465 | 0 |
Move lift close to the highest position | def move_lift_up():
return _move_lift(1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def move_lift_down():\n return _move_lift(0.2)",
"def lift_down(self):\n\n # Can't reuse set_lift_pos due to bug above\n bottom_limit = self.get_lift_limit()\n self.send(self.cmd.SET_LIFT_SET, bottom_limit)",
"def move_lift_ground():\n return _move_lift(0)",
"def bring_down(self):\n\n self.move(self.__min_step__)",
"def open_up(self):\n\n self.move(self.__max_step__)",
"def move(self, max_width):\n if self.x_pos <= 20:\n self.SPEED = abs(self.SPEED)\n elif self.x_pos >= max_width - 40:\n self.SPEED = -abs(self.SPEED)\n self.x_pos += self.SPEED",
"def lift_up(self):\n\n # Can't reuse set_lift_pos due to bug above\n self.send(self.cmd.SET_LIFT_SET, self.cmd.SET_LIFT_SET[\"check\"][\"min\"])",
"def default(self):\r\n if self.lift.current_floor > self.no_of_floors -1:\r\n self.lift.direction = -1\r\n elif self.lift.current_floor <= 0:\r\n self.lift.direction = 1",
"def right(self):\n win = self.clients.current_client\n x, y = win.x, win.y\n candidates = [c for c in self.clients if c.info()[\"x\"] > x]\n self.clients.current_client = self._get_closest(x, y, candidates)\n self.group.focus(self.clients.current_client)",
"def move_down(self):\n\n if self.ycor() < -280:\n self.sety(-300)\n else:\n new_y = self.ycor() - 40\n self.sety(new_y)",
"def move_right(self):\n\n if self.xcor() > 230:\n self.setx(250)\n else:\n new_x = self.xcor() + 40\n self.setx(new_x)",
"def move_up(self):\n self.move_measurement(-1)",
"def move(self):\n \n self.position = self.explore()",
"def move_rel(self):\n pass",
"def move_down(self, distance):\r\n return self.move('down', distance)",
"def force_move():\n if ZERO_BASE_PLYR_POS in range(0, 10):\n # we cant go up, so go down\n move_player(\"south\")\n else:\n move_player(\"north\")",
"def move(self):\n pass",
"def move_down(self):\n self.move_measurement(1)",
"def back(self, distance):\n self._go(-distance)",
"def move_right(self, distance):\r\n return self.move('right', distance)",
"def _move_down(self):\n new_pos = self._pos + _Vec3(0, -1, 0)\n block_ = self._get_block(new_pos)\n if block_ != _WATER:\n self._add_to_inv(block_)\n self._move(new_pos)",
"def move_backward(self, distance):\r\n return self.move('back', distance)",
"def move(self):\n \n self.position = self.wander()",
"def move_to_position2(self):",
"def move_down(self):\n self.y -= 1",
"def move_down(self):\n if self.center.y > (self.height / 2):\n self.center.y -= 5",
"def down(self):\n self.move(0,-1)",
"def move_up(self, distance):\r\n return self.move('up', distance)",
"def decide_next_move(self):\n pass",
"def choose_move(self):\n return 0"
] | [
"0.75074756",
"0.71321285",
"0.6953186",
"0.6579046",
"0.6533172",
"0.64709103",
"0.6456872",
"0.63544905",
"0.62643474",
"0.6252684",
"0.6221965",
"0.62079394",
"0.6192262",
"0.6185153",
"0.6174713",
"0.61646885",
"0.61549836",
"0.61504203",
"0.6098028",
"0.60750335",
"0.6070158",
"0.60425025",
"0.60344243",
"0.60320884",
"0.60222167",
"0.6017183",
"0.600949",
"0.600685",
"0.6002071",
"0.5998645"
] | 0.71893954 | 1 |
Reverse for certain amount of time in seconds | def reverse_in_seconds(duration):
try:
easy_cozmo._robot.drive_wheels(-1*df_reverse_speed, -1*df_reverse_speed, duration=duration)
except Exception as e:
say_error("I can't move in reverse")
return False
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def advance_time_seconds(seconds):\r\n advance_time_delta(datetime.timedelta(0, seconds))",
"def goRight(self, seconds):\n self.change_x = 5",
"def reverse(seq):\n return seq[::-1]",
"def reverse(seq):\n return seq[::-1]",
"def backward(self, speed):\n self.controller.reverse(speed)",
"def countdown(n):\n while n > 0:\n n -= 1",
"def GET_reverse(self):\n self.roomba.DriveStraight(-pyrobot.VELOCITY_FAST)\n time.sleep(1)\n self.roomba.SlowStop(-pyrobot.VELOCITY_FAST)",
"def decrease_time(self, time):\n self.__time = time",
"def count_down_timer (self, string, time):\n if len(str(time)) < len(str(self.timePrev)): ## When the time is smaller than the expected time \n sys.stdout.write('\\r' + string + ' ' + str(time) + 's ')\n sys.stdout.flush()\n else:\n sys.stdout.write('\\r' + string + ' ' + str(time) + 's') # When the time is bigger than the expected time, so the \n sys.stdout.flush() ### expected time gets the value of the variable 'time'\n self.timePrev = time",
"def backward(self, duration):\n self.set_motor(self.left_motor, 'right', 0.5)\n self.set_motor(self.right_motor, 'left', 0.5)\n time.sleep(duration)",
"def jump(self, seconds: float) -> None:\n if seconds < 0:\n raise ValueError(\"time can't go backwards\")\n self._virtual_base += seconds",
"def go_to(self, time):\n half_dur = self.half_duration\n self.set_interval((time - half_dur, time + half_dur))",
"def reverse_this(seq):\n r_seq = seq[::-1]\n return r_seq",
"def reverse(x):\n return x[::-1]",
"def reverse_turn(world_state, ros_util):\n\n while world_state.warning_flag == 3:\n ros_util.publish_actions(\"reverse\", 0, 0, 0, 0)\n ros_util.rate.sleep()\n\n new_heading = (world_state.heading + 60) % 360\n\n while (new_heading - 1) < world_state.heading < (new_heading + 1):\n ros_util.publish_actions(\"left\", 0, 0, 0, 0)",
"def passTime(self, time: int) -> None:\n if self.delayed == True:\n self.delayed = None\n return\n\n if self.enabled == True:\n self.time -= time",
"def reverse(self):\n print(\"Reversing\")\n self.miles -= 5\n return self",
"def right_twist(self):\n self.turn_by_deg(180)\n #time.sleep(.1)\n self.stop()\n self.turn_by_deg(180)\n #time.sleep(.1)\n self.stop()",
"def pass_time(self, t):\n cont = time.time() + t\n while time.time() < cont:\n time.sleep(0)",
"def task10_string_reversed(text):\n return text[::-1]",
"def _reverseduty(self):\n if self.ir_pin.duty() == 0:\n self.ir_pin.duty(512)\n else:\n self.ir_pin.duty(0)",
"def slow_down(self, start_time, now, byte_counter):\n rate_limit = self.params.get('ratelimit')\n if rate_limit is None or byte_counter == 0:\n return\n if now is None:\n now = time.time()\n elapsed = now - start_time\n if elapsed <= 0.0:\n return\n speed = float(byte_counter) / elapsed\n if speed > rate_limit:\n sleep_time = float(byte_counter) / rate_limit - elapsed\n if sleep_time > 0:\n time.sleep(sleep_time)",
"def reverse(S, start, stop):\n if start < stop - 1:\n S[start], S[stop-1] = S[stop-1], S[start]\n reverse(S, start+1, stop-1)",
"def reverse(self):\n self._sequence.reverse()",
"def countdown(self, amt=1):\n pass",
"def reverse(s):\n return s[::-1]",
"def flip_t_b(self, times: int):\n for i in range(0, times):\n self.tile_rows = self.tile_rows[::-1]",
"def timer(self):\n self.time_remaining -= 1\n if self.time_remaining > 0:\n Timer(1, self.timer).start()",
"def reverse(self, x: int) -> int:\n res = 0\n remains = abs(x)\n sign = -1 if x < 0 else 1 \n \n while True:\n digit = remains % 10\n res = (res * 10) + digit\n remains = remains // 10\n if remains == 0:\n break\n \n res *= sign\n \n if abs(res) > 0x7FFFFFFF:\n return 0\n else:\n return res",
"def move_back(t,n):\n lt(t)\n bk(t, n)\n rt(t)"
] | [
"0.60997796",
"0.590794",
"0.57126427",
"0.57126427",
"0.5707142",
"0.5647594",
"0.5642141",
"0.5627632",
"0.5479803",
"0.54722285",
"0.5469334",
"0.5447453",
"0.54322964",
"0.54316",
"0.5427749",
"0.53968924",
"0.53934664",
"0.53900844",
"0.538714",
"0.5358092",
"0.5308835",
"0.5307655",
"0.5296422",
"0.5280944",
"0.52721125",
"0.5269183",
"0.5248633",
"0.5237307",
"0.5233001",
"0.5216967"
] | 0.6609068 | 0 |
Move forward for a given distance while avoiding landmarks on the way | def move_forward_avoiding_landmark(distance):
distance = distance * 10
pose = Pose(distance, 0, 0, angle_z=degrees(0))
return _execute_go_to_pose(pose) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def move_forward(self, distance):\r\n return self.move('forward', distance)",
"def forward(self, distance):\n self.logger.debug(\"forward \" + str(distance))",
"def move_forward(self, distance):\n quad_offset = self.quad_offset_mapping['forward']\n client.moveByVelocityAsync(self.velocity * quad_offset[0], self.velocity * quad_offset[1],\n 0.15, distance/self.velocity).join()\n # if self.logging:\n # self.log_arr.append(\"forward\")",
"def go_forward(self, distance, speed=0.1):\n while (self._last_odom_msg == None):\n\t rospy.sleep(1.0)\n start = copy.deepcopy(self._last_odom_msg.pose.pose.position)\n rate = rospy.Rate(10)\n while self.distance_fn(self._last_odom_msg.pose.pose.position, start) < math.fabs(distance):\n direction = -1 if distance < 0 else 1\n self.move(direction * speed, 0)\n rate.sleep()",
"def move_forward():\n pass",
"def forward(self,distance):\n assert (type(distance) in [int, float]), \"parameter distance:%s is not a valid number\" % `distance`\n self._turtle.forward(distance)",
"def move_forward(self, dist):\r\n self.send_command_without_response(f'forward {dist}')",
"def move_forward(self):\n self.x, self.y = self.compute_positions()",
"def moved_forward(self, distance: \"moves in facing direction\") -> Position:\n newx = self.x + distance * math.cos(self.facing)\n newy = self.y + distance * math.sin(self.facing)\n return Position(newx, newy, self.facing)",
"def linear_track(self, dist):\n\t\tglobal estop_flag, move_state\n\n\t\t#Disable timer interrupt, reset halfway flag, set target distance\n\t\tsignal.alarm(0) \n\t\thalfway_flag = False\n\n\t\t#Set starting position\n\t\twith self.move_state_lock:\n\t\t\tstart_x, start_y, start_z = move_state['x'], move_state['y'], move_state['z']\n\t\t#Set current position initially to start position\n\t\tcurrent_x, current_y, current_z = start_x, start_y, start_z\n\t\t#Check if the distance travelled is greater than the goal distance\n\t\twhile math.sqrt((current_x - start_x)**2 + (current_y - start_y)**2 + (current_z - start_z)**2) < abs(dist):\n\t\t\t#Check if the estop flag is set, if so, kill movement\n\t\t\tif estop_flag:\n\t\t\t\tself.publisher.publish(Mover.stop_msg)\n\t\t\telse:\n\t\t\t\ttwist_msg = Twist()\n\t\t\t\tif dist < 0:\n\t\t\t\t\tif self.correction == riu.no_correction:\n\t\t\t\t\t\ttwist_msg.linear.x = -1 * riu.move_rate\n\t\t\t\t\telse:\n\t\t\t\t\t\ttwist_msg.linear.x = -1 * riu.move_rate/2\n\t\t\t\t\tif self.correction == \"left\":\n\t\t\t\t\t\ttwist_msg.angular.z = -1 * riu.turn_rate/2\n\t\t\t\t\telif self.correction == \"right\":\n\t\t\t\t\t\ttwist_msg.angular.z = riu.turn_rate/2\n\t\t\t\t#If distance goal is positive, move forward\n\t\t\t\telif dist > 0:\n\t\t\t\t\tif self.correction == riu.no_correction:\n\t\t\t\t\t\ttwist_msg.linear.x = riu.move_rate\n\t\t\t\t\telse:\n\t\t\t\t\t\ttwist_msg.linear.x = riu.move_rate/2\n\t\t\t\t\tif self.correction == \"left\":\n\t\t\t\t\t\ttwist_msg.angular.z = riu.turn_rate/2\n\t\t\t\t\telif self.correction == \"right\":\n\t\t\t\t\t\ttwist_msg.angular.z = -1 * riu.turn_rate/2\n\n\t\t\t\tself.publisher.publish(twist_msg)\n\t\t\t\t#Check if the current movement is half completed, if so, send a Half message and set flag to avoid message duplication\n\t\t\t\tif (math.sqrt((current_x - start_x)**2 + (current_y - start_y)**2 + (current_z - start_z)**2) >= abs(dist)/2\n\t\t\t\t\tand not halfway_flag):\n\t\t\t\t\thalfway_flag = True\n\t\t\t\t\tself.status_pub.publish(String(\"half\"))\n\n\t\t\t\t#update current_x, current_y, and current_z (using local variables to be thread safe)\n\t\t\t\twith self.move_state_lock:\n\t\t\t\t\tcurrent_x = move_state['x']\n\t\t\t\t\tcurrent_y = move_state['y']\n\t\t\t\t\tcurrent_z = move_state['z']\n\t\t\trospy.sleep(.2)\n\t\tself.publisher.publish(Mover.stop_msg)\n\t\tself.status_pub.publish(String(\"done\"))\n\t\tsignal.alarm(Mover.ready_message_interval)",
"def forward(self, dist):\n start = (self.pos_x, self.pos_y)\n self.pos_x += dist * math.cos(math.radians(self.angle))\n self.pos_y += dist * math.sin(math.radians(self.angle))\n self._update_limits()\n end = (self.pos_x, self.pos_y)\n if self.pen_down:\n self.draw.line([start, end], fill=self.colour, width=self.width)",
"def forward(self):\n self.position += 1",
"def moveForward(self):\n if self.onGround:\n self.vx = 4",
"def follow_road(self) -> None:\n if self.traffic_mgr.current_map.road_network.get_lane(self.target_lane_index).after_end(self.position):\n self.target_lane_index = self.traffic_mgr.current_map.road_network.next_lane(\n self.target_lane_index, route=self.route, position=self.position, np_random=self.np_random\n )",
"def move(self):\n # neighbor offsets\n offset = [(-1, 1),(0, 1),(1, 1),(-1, 0),(1, 0),(-1, -1),(0, -1),(1, -1)]\n for i in range(len(offset)):\n x = self.x + offset[i][0] # neighboring coordinates\n y = self.y + offset[i][1]\n if self.island.animal(x, y) == 0: # neighboring spot is open\n self.island.remove(self) # remove from current spot\n self.x = x # new coordinates\n self.y = y\n self.island.register(self) # register new coordinates\n break # finished with move",
"def move(self):\n # neighbor offsets\n offset = [(-1, 1),(0, 1),(1, 1),(-1, 0),(1, 0),(-1, -1),(0, -1),(1, -1)]\n for i in range(len(offset)):\n x = self.x + offset[i][0] # neighboring coordinates\n y = self.y + offset[i][1]\n if self.island.animal(x, y) == 0: # neighboring spot is open\n self.island.remove(self) # remove from current spot\n self.x = x # new coordinates\n self.y = y\n self.island.register(self) # register new coordinates\n break # finished with move",
"def _perform_landing(self):\n self.y += self.settings.mario_jump_speed\n if self.y >= self.settings.mario_y_pos:\n self.y = self.settings.mario_y_pos\n self.jumping = 0\n self.is_currently_jumping = False",
"def inter_step(self):\n #https://math.stackexchange.com/questions/1918743/how-to-interpolate-points-between-2-points\n c_loc = self.checkpoint_target.get_location()\n \n self.dist_to_checkpoint = self._calc_distance(c_loc)\n new_y = self.current_location[0] + (self.walk_speed / self.dist_to_checkpoint \\\n * (c_loc[0] - self.current_location[0]))\n new_x = self.current_location[1] + (self.walk_speed / self.dist_to_checkpoint \\\n * (c_loc[1] - self.current_location[1]))\n new_location = [float(new_y), float(new_x)]\n self.current_location = new_location\n self.walk_route.append(new_location)",
"def move_left(self,distance):\n self.turn_left()\n self.move_forward(distance)\n # self.log_arr.append(\"left\")",
"def shiftAsideMark(state, opp, distDemar):\n dest = None\n while True:\n dest = Vector2D.create_random(low=-1, high=1)\n dest.norm = distDemar\n dest += opp.position\n if state.is_valid_position(dest) and \\\n distance_horizontale(dest, state.my_goal) > 10.+distance_horizontale(opp.position, state.my_goal):\n break\n return goTo(state, dest)",
"def forward(self, param):\n\t\tif param:\n\t\t\tself.linear_move(param * .3048)\n\t\telse:\n\t\t\tself.linear_move(riu.default_dist * .3048)",
"def _go(self, distance):\n ende = self._position + self._orient * distance\n self._goto(ende)",
"def move_forward():\n twister = Twist(linear=Vector3(x=0.5,y=0,z=0),angular=Vector3(x=0,y=0,z=0))\n pub.publish(twister)",
"def linear_move(self, dist):\n\t\tglobal estop_flag, move_state\n\t\tsignal.alarm(0) #Disable timer interrupt for the duration of the movement\n\t\thalfway_flag = False\n\t\t\n\t\twith self.move_state_lock:\n\t\t\tstart_x, start_y, start_z = move_state['x'], move_state['y'], move_state['z']\n\t\tcurrent_x = start_x\n\t\tcurrent_y = start_y\n\t\tcurrent_z = start_z\n\t\t#While the distance travelled is less than target distance\n\t\twhile math.sqrt((current_x - start_x)**2 + (current_y - start_y)**2 + (current_z - start_z)**2) < abs(dist):\n\t\t\t#Check if the emergency stop flag is set, if so, break the current loop and reset velocity\t\n\t\t\tif estop_flag:\n\t\t\t\tself.publisher.publish(Mover.stop_msg)\n\t\t\telse:\n\t\t\t\t#If the distance goal is negative, move backward\n\t\t\t\tif dist < 0:\n\t\t\t\t\t#Send negative velocity\n\t\t\t\t\ttwist_msg = Twist()\n\t\t\t\t\ttwist_msg.linear.x = -1 * riu.move_rate\n\t\t\t\t\tself.publisher.publish(twist_msg)\n\t\t\t\t#If distance goal is positive, move forward\n\t\t\t\telif dist > 0:\n\t\t\t\t\t#Send positive velocity\n\t\t\t\t\ttwist_msg = Twist()\n\t\t\t\t\ttwist_msg.linear.x = riu.move_rate\n\t\t\t\t\tself.publisher.publish(twist_msg)\n\t\t\t\t#Check if the current movement is half completed, if so, send a Half message and set flag to avoid message duplication\n\t\t\t\tif (math.sqrt((current_x - start_x)**2 + (current_y - start_y)**2 + (current_z - start_z)**2) >= abs(dist)/2\n\t\t\t\t\tand not halfway_flag):\n\t\t\t\t\thalfway_flag = True\n\t\t\t\t\tself.status_pub.publish(String(\"half\"))\n\t\t\t\t#update current_x, current_y, and current_z (using local variables to be thread safe)\n\t\t\t\twith self.move_state_lock:\n\t\t\t\t\tcurrent_x = move_state['x']\n\t\t\t\t\tcurrent_y = move_state['y']\n\t\t\t\t\tcurrent_z = move_state['z']\n\t\t\trospy.sleep(.2)\n\t\t\t\t\n\t\t#previously had while, finally block -> illegal syntax in python. Just moved to outside loop.\n\t\tself.publisher.publish(Mover.stop_msg)\n\t\tself.status_pub.publish(String(\"done\"))\n\t\tsignal.alarm(Mover.ready_message_interval)",
"def move(self, is_forward):\n wh, lh = self.get_heading\n self.w += wh\n self.l += lh\n if self.get_pos() == blocks['wall']:\n self.w -= wh\n self.l -= lh",
"def move_to(self, waypoint):\n self.set_final_wp(waypoint)\n self.go()\n currPos = np.asarray(self.rexarm.get_positions())\n while(np.linalg.norm(np.asarray(waypoint) - currPos) > 0.15):\n time.sleep(0.01)",
"def move(self, distance):\n self._go(distance)",
"def move(self, game_display, maze):\n if isinf(self.lidars[0].get_sense()):\n self.forward(acc=2)\n elif self.lidars[0].get_sense() >= 2 * self.lidars[0].radius // 3:\n self.backward(acc=0.5)\n elif self.lidars[0].get_sense() >= self.lidars[0].radius // 3:\n self.backward()\n else:\n self.backward(acc=2)\n Drone.move(self, game_display=game_display, maze=maze)",
"def move_straight(robot, dist):\n journey = Journey(robot, distance=dist)\n journey.start()\n robot.position.move(dist)\n sleep(0.5)",
"def step_forward(self):"
] | [
"0.74395466",
"0.7402497",
"0.6989545",
"0.68794596",
"0.6808184",
"0.6726156",
"0.6702615",
"0.6566427",
"0.6553348",
"0.6377762",
"0.63402694",
"0.63278",
"0.6286996",
"0.62555915",
"0.62512577",
"0.62512577",
"0.62431717",
"0.6233589",
"0.6223542",
"0.6216122",
"0.619673",
"0.61663526",
"0.61588746",
"0.61394656",
"0.60985094",
"0.60739905",
"0.60687256",
"0.605525",
"0.60433084",
"0.6036828"
] | 0.8164994 | 0 |
Evaluates a trained neural network classifier on a partition of a data manager (e.g. "train", "dev", "test"). The accuracy (i.e. percentage of correct classifications) is returned, along with a list of the misclassifications. Each misclassification is a triple (phrase, guessed, actual), where phrase is the misclassified phrase guessed is the classification made by the classifier actual is the correct classification | def evaluate(net, dmanager, partition):
def accuracy(outputs, labels, phrases, dmanager):
correct = 0
total = 0
misclassified = []
for (i, output) in enumerate(outputs):
total += 1
if labels[i] == output.argmax():
correct += 1
else:
misclassified.append((phrases[i],
dmanager.tag(output.argmax().item()),
dmanager.tag(labels[i].item())))
return correct, total, misclassified
val_loader = dmanager.batched_loader(partition, 128)
total_val_loss = 0
correct = 0
total = 0
misclassified = []
loss = torch.nn.CrossEntropyLoss()
for data in val_loader:
raw_inputs = [dmanager.vectorize(p) for p in data['phrase']]
raw_labels = [dmanager.tag_index(c) for c in data['tag']]
inputs = []
labels = []
for (inp, label) in zip(raw_inputs, raw_labels):
if inp is not None:
inputs.append(inp)
labels.append(label)
inputs = torch.Tensor(inputs)
labels = torch.LongTensor([dmanager.tag_index(c) for
c in data['tag']])
val_outputs = net(inputs)
val_loss_size = loss(val_outputs, labels)
correct_inc, total_inc, misclassified_inc = accuracy(val_outputs,
labels,
data['phrase'],
dmanager)
correct += correct_inc
total += total_inc
misclassified += misclassified_inc
total_val_loss += val_loss_size.data.item()
return correct/total, misclassified | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __calculate_classification_metrics(model, metric, evaluation_data, train_data, opt_data):\n model.eval()\n with torch.no_grad():\n all_triples = np.concatenate((evaluation_data, train_data, opt_data), axis=0)\n all_hashed = np.apply_along_axis(hash_triples, 1, all_triples)\n\n entity_number = model.e_num\n all_entities = np.arange(entity_number).reshape(-1, 1)\n metric_sum = 0\n if model.device.type == \"cuda\":\n evaluation_data = tqdm(evaluation_data)\n for x in evaluation_data:\n corrupted_subjects_tail = np.repeat([x[1:]], entity_number, axis=0)\n corrupted_subjects = np.concatenate([all_entities, corrupted_subjects_tail], axis=1)\n corrupted_objects_head = np.repeat([x[:2]], entity_number, axis=0)\n corrupted_objects = np.concatenate([corrupted_objects_head, all_entities], axis=1)\n corrupted_triples = np.concatenate([corrupted_subjects, corrupted_objects], axis=0)\n # Remove duplicate occurrence of test triple x\n mask_index = np.nonzero(all_entities == x[2])[0][0]\n mask = np.ones(len(corrupted_triples), dtype=bool)\n mask[entity_number + mask_index] = False\n data = corrupted_triples[mask]\n\n data_hashed = np.apply_along_axis(hash_triples, 1, data)\n # existing triples should be close to 0, corrupted triples should be close to 1\n target_values = np.in1d(data_hashed, all_hashed, invert=True) * 1\n\n data = torch.tensor(data, dtype=torch.long, device=model.device)\n scores = model.score_triples(data).detach().flatten().cpu().numpy()\n scores = (scores - scores.min()) / (scores.max() - scores.min()) # normalize to values between 0 and 1\n\n if metric == \"roc-auc\":\n metric_sum += roc_auc_score(target_values, scores)\n elif metric == \"avg-precision\":\n metric_sum += average_precision_score(target_values, scores)\n return metric_sum / len(evaluation_data)",
"def evaluate_classifications(self):\n test_labels = open('./digitdata/testlabels', 'r')\n self.init_confusion_matrix()\n i = 0\n class_stats = {0:[0,0], 1:[0,0], 2:[0,0], 3:[0,0], 4:[0,0], 5:[0,0], 6:[0,0], 7:[0,0], 8:[0,0], 9:[0,0]}\n total_correct = 0\n num_labels = 1000\n for label in test_labels:\n int_label = int(label)\n if int_label == self.solutions[i]:\n class_stats[int_label][0] += 1\n self.confusion_matrix[int_label][self.solutions[i]] += 1\n else:\n self.confusion_matrix[int_label][self.solutions[i]] += 1\n class_stats[int_label][1] += 1\n i += 1\n for k in class_stats:\n print \"Class \" + str(k) + \": \" + str(float(class_stats[k][0])/class_stats[k][1])\n total_correct += float(class_stats[k][0])\n print \"Overall Accuracy: \" + str(total_correct/num_labels) \n for l in range(0,10):\n for w in range(0,10):\n self.confusion_matrix[l][w] = float(self.confusion_matrix[l][w]) / class_stats[l][1]\n \n s = [[str(e) for e in row] for row in self.confusion_matrix]\n lens = [len(max(col, key=len)) for col in zip(*s)]\n fmt = '\\t'.join('{{:{}}}'.format(x) for x in lens)\n table = [fmt.format(*row) for row in s]\n print '\\n'.join(table)\n #self.print_confusion_matrix() ",
"def evaluate(network, loss_function, softmax_function, test_loader, test_set_size):\n running_loss = 0.0\n confusion_matrix = { # Of shape [predicted value][real value]\n 0: {0: 0, 1: 0, 2: 0},\n 1: {0: 0, 1: 0, 2: 0},\n 2: {0: 0, 1: 0, 2: 0},\n }\n batch_size = -1\n network.eval()\n with torch.no_grad():\n correct = 0\n for graph_batch, label_batch in test_loader:\n if batch_size == -1:\n batch_size = label_batch.size(0)\n logits = network(graph_batch, graph_batch.ndata['n_feat'], graph_batch.edata['e_feat'], 0, 0)\n running_loss += loss_function(logits, label_batch).detach().item()\n predicted_classes = torch.argmax(logits, dim=1).detach()\n correct += (predicted_classes == label_batch).sum().item()\n for predicted_class, label in zip(predicted_classes, label_batch):\n confusion_matrix[predicted_class.item()][label.item()] += 1\n\n if batch_size <= 0:\n print(\"Error : batch size is {}\".format(batch_size))\n exit(1)\n\n return correct / test_set_size, running_loss / len(test_loader), confusion_matrix",
"def evaluate(net, dev, batcher): \n def accuracy(outputs, labels):\n correct = 0\n total = 0\n misclassified = []\n for (i, output) in enumerate(outputs):\n total += 1\n if labels[i] == output.argmax():\n correct += 1 \n return correct, total, misclassified\n val_loader = batcher(dev, 128)\n total_val_loss = 0\n correct = 0\n total = 0\n misclassified = []\n loss = torch.nn.CrossEntropyLoss() \n for data in val_loader:\n inputs = data[:,1:]\n labels = torch.clamp(data[:,0], min=0).long()\n\n val_outputs = net(inputs) \n val_loss_size = loss(val_outputs, labels)\n\n correct_inc, total_inc, misclassified_inc = accuracy(val_outputs, \n labels)\n correct += correct_inc\n total += total_inc\n misclassified += misclassified_inc\n total_val_loss += val_loss_size.data.item()\n return correct/total, misclassified",
"def _eval_classifier(self):\n\n y_pred_baseline = self.df_baseline[self.score_column]\n y_pred_sample = self.df_sample[self.score_column]\n\n y_label_baseline = self.df_baseline[self.label_column]\n y_label_sample = self.df_sample[self.label_column]\n\n precision_baseline = precision_score(y_label_baseline, y_pred_baseline)\n recall_baseline = recall_score(y_label_baseline, y_pred_baseline)\n acc_baseline = accuracy_score(y_label_baseline, y_pred_baseline)\n f1_baseline = f1_score(y_label_baseline, y_pred_baseline)\n try:\n auc_baseline = roc_auc_score(y_label_baseline, y_pred_baseline)\n except ValueError:\n auc_baseline = \"NA\"\n\n precision_sample = precision_score(y_label_sample, y_pred_sample)\n recall_sample = recall_score(y_label_sample, y_pred_sample)\n acc_sample = accuracy_score(y_label_sample, y_pred_sample)\n f1_sample = f1_score(y_label_sample, y_pred_sample)\n try:\n auc_sample = roc_auc_score(y_label_sample, y_pred_sample)\n except ValueError:\n auc_sample = \"NA\"\n\n metrics_df = pd.DataFrame(\n {\n \"Accuracy\": [acc_baseline, acc_sample],\n \"Precision\": [precision_baseline, precision_sample],\n \"Recall\": [recall_baseline, recall_sample],\n \"F1\": [f1_baseline, f1_sample],\n \"AUC\": [auc_baseline, auc_sample],\n },\n index=[\"baseline\", \"sample\"],\n )\n\n self.performance_comparison = metrics_df",
"def validate(val_loader, net, epoch):\n batch_time = meter.TimeMeter(True)\n data_time = meter.TimeMeter(True)\n prec = meter.ClassErrorMeter(topk=[1], accuracy=True)\n retrieval_map = meter.RetrievalMAPMeter()\n\n # testing mode\n net.eval()\n\n total_seen_class = [0 for _ in range(40)]\n total_right_class = [0 for _ in range(40)]\n\n for i, (views, dps, pcs, labels) in enumerate(val_loader):\n batch_time.reset()\n\n views = views.to(device=config.device)\n pcs = pcs.to(device=config.device)\n dps = views.to(device=config.device)\n labels = labels.to(device=config.device)\n\n f_pc, f_mv, f_dp, _, _, _, de_p, de_v, de_d, dis_p, dis_v, dis_d, cls_p, cls_v, cls_d, fts, preds = net(pcs, views, dps) # bz x C x H x W\n # prec.add(preds.data, labels.data)\n\n prec.add(preds.data, labels.data)\n retrieval_map.add(fts.detach() / torch.norm(fts.detach(), 2, 1, True), labels.detach())\n for j in range(views.size(0)):\n total_seen_class[labels.data[j]] += 1\n total_right_class[labels.data[j]] += (np.argmax(preds.data.cpu(), 1)[j] == labels.cpu()[j])\n\n if i % config.print_freq == 0:\n print(f'Epoch: [{epoch}][{i}/{len(val_loader)}]\\t'\n f'Batch Time {batch_time.value():.3f}\\t'\n f'Epoch Time {data_time.value():.3f}\\t'\n f'Prec@1 {prec.value(1):.3f}\\t')\n\n mAP = retrieval_map.mAP()\n print(f' instance accuracy at epoch {epoch}: {prec.value(1)} ')\n print(\n f' mean class accuracy at epoch {epoch}: {(np.mean(np.array(total_right_class) / np.array(total_seen_class, dtype=np.float)))} ')\n print(f' map at epoch {epoch}: {mAP} ')\n return prec.value(1), mAP",
"def evaluate(data_loader, model, device):\n\n\tmodel.eval()\n\ttotal_num_examples = 0\n\ttotal_error = 0\n\tfor idx, batch in enumerate(data_loader):\n\t\tquestion_feature_vec = batch['feature_vec'].to(device)\n\t\tquestion_len = batch['len'].to(device)\n\t\tlabels = batch['labels'].to(device)\n\n\t\t####Your code here ---\n\n\t\t# get the output from the model\n\t\tlogits = model(question_feature_vec, question_len)\n\n\t\t# get error, num_examples using accuracy_fn defined previously\n\t\terror, num_examples = accuracy_fn(logits, labels)\n\n\t\t# update total_error and total_num_examples\n\t\ttotal_error += error\n\t\ttotal_num_examples += num_examples\n\n\taccuracy = 1 - total_error / total_num_examples\n\treturn accuracy",
"def validate(val_loader, net, epoch, print_pr=False):\n batch_time = meter.TimeMeter(True)\n data_time = meter.TimeMeter(True)\n prec = meter.ClassErrorMeter(topk=[1], accuracy=True)\n retrieval_map = meter.RetrievalMAPMeter()\n\n # testing mode\n net.eval()\n\n total_seen_class = [0 for _ in range(40)]\n total_right_class = [0 for _ in range(40)]\n\n for i, (views, pcs, labels) in enumerate(val_loader):\n batch_time.reset()\n\n views = views.to(device=config.device)\n pcs = pcs.to(device=config.device)\n labels = labels.to(device=config.device)\n\n preds, fts = net(pcs, views, get_fea=True) # bz x C x H x W\n\n # prec.add(preds.data, labels.data)\n\n prec.add(preds.data, labels.data)\n retrieval_map.add(fts.detach()/torch.norm(fts.detach(), 2, 1, True), labels.detach())\n for j in range(views.size(0)):\n total_seen_class[labels.data[j]] += 1\n total_right_class[labels.data[j]] += (np.argmax(preds.data,1)[j] == labels.cpu()[j])\n\n\n if i % config.print_freq == 0:\n print(f'Epoch: [{epoch}][{i}/{len(val_loader)}]\\t'\n f'Batch Time {batch_time.value():.3f}\\t'\n f'Epoch Time {data_time.value():.3f}\\t'\n f'Prec@1 {prec.value(1):.3f}\\t'\n f'Mean Class accuracy {(np.mean(np.array(total_right_class)/np.array(total_seen_class,dtype=np.float))):.3f}')\n\n mAP = retrieval_map.mAP()\n print(f' instance accuracy at epoch {epoch}: {prec.value(1)} ')\n print(f' mean class accuracy at epoch {epoch}: {(np.mean(np.array(total_right_class)/np.array(total_seen_class,dtype=np.float)))} ')\n print(f' map at epoch {epoch}: {mAP} ')\n if print_pr:\n print(f'pr: {retrieval_map.pr()}')\n return prec.value(1), mAP",
"def eval_classifier(clf, X, y_correct, classes, plot_cm=True):\n y_pred = clf.predict(X)\n return get_accuracy_and_plot_confusion(y_correct, list(y_pred), classes, plot=plot_cm)",
"def test_classifiers(train_docs, train_target, test_docs, test_target, min_docs, K, K2, removeStopWords):\n # test_classifiers(train_docs, train_target, test_docs, test_targets, i, 3)\n X_train_counts, X_train_tfidf, X_test_counts, X_test_tfidf = extract_text_features(train_docs, test_docs, min_docs, removeStopWords)\n \n \n num_docs, vocab_size = X_train_counts.shape\n print('Number of (training) documents =',num_docs)\n print('Vocabulary size =',vocab_size)\n \n\n # Now evaluate the classifiers on the test data\n # Print out the accuracy as a percentage for each classifier.\n # np.mean() can be used to calculate the accuracy. Round the accuracy to 2 decimal places.\n\n #predict according to different classifier--evaluate results \n predicted_multNB = fit_and_predict_multinomialNB(X_train_tfidf, train_target, X_test_tfidf)\n predicted_bernNB = fit_and_predict_BernoulliNB(X_train_tfidf, train_target, X_test_tfidf)\n predicted_LR = fit_and_predict_LR(X_train_tfidf, train_target, X_test_tfidf)\n predicted_LR = fit_and_predict_LR(X_train_counts, train_target, X_test_counts)\n predicted_KNN = fit_and_predict_KNN(X_train_tfidf, train_target, X_test_tfidf, K)\n predicted_KNN2 = fit_and_predict_KNN(X_train_tfidf, train_target, X_test_tfidf, K2)\n \n predicted_base = np.array([FreqDist(test_target).most_common(1)[0][0]]*len(test_target))\n\n # count num of correct predictions / total\n np_test_target = np.array(test_target)\n base = np.sum(predicted_base == np_test_target)/len(np_test_target)*100\n multNB = np.sum(predicted_multNB == np_test_target)/len(np_test_target)*100\n bernNB = np.sum(predicted_bernNB == np_test_target)/len(np_test_target)*100\n LR = np.sum(predicted_LR == np_test_target)/len(np_test_target)*100\n KN = np.sum(predicted_KNN == np_test_target)/len(np_test_target)*100\n KN2 = np.sum(predicted_KNN2 == np_test_target)/len(np_test_target)*100\n\n \n print('\\tBase Accuracy: {:.3f}'.format(base))\n print('\\tAccuracy with multinomial naive Bayes: {:.2f}'.format(multNB))\n print('\\tAccuracy with Bernoulli naive Bayes: {:.2f}'.format(bernNB))\n print('\\tAccuracy with logistic regression: {:.2f}'.format(LR))\n print('\\tAccuracy with kNN, k={} classifier: {:2f}'.format(K, KN))\n print('\\tAccuracy with kNN, k={} classifier: {:.2f}'.format(K2, KN2))",
"def classify():\n yes_dataset = df[df[\"_class\"] == 1] # 470588\n no_dataset = df[df[\"_class\"] == 0] # 1971\n\n parameter_analysis = list()\n for criterion in np.arange(0.05, 0.91, 0.05):\n print(\"doing experiment at criterion = %s ...\" % criterion)\n rate_list = list()\n for i in range(10):\n # shuffle yes_dataset and no_dataset, so we can randomly choose 90% yes_dataset\n # + 90% no_dataset as train dataset, 10% yes_dataset + 10% no_dataset as test dataset\n yes_index = yes_dataset.index.tolist()\n random.shuffle(yes_index)\n no_index = no_dataset.index.tolist()\n random.shuffle(no_index)\n \n # concatenate 90%yes + 90%no, 10%yes + 10%no\n train = pd.concat([\n yes_dataset.loc[yes_index[:1774], :],\n no_dataset.loc[no_index[:423530], :]\n ])\n test = pd.concat([\n yes_dataset.loc[yes_index[1774:], :],\n no_dataset.loc[no_index[423530:], :]\n ]) \n \n # split data and label\n train_data, train_label = (train[[\"Revenue.Code\", \n \"Service.Code\", \n \"Procedure.Code\", \n \"Diagnosis.Code\", \n \"Subscriber.Index\"]], \n train[\"_class\"])\n test_data, test_label = (test[[\"Revenue.Code\", \n \"Service.Code\", \n \"Procedure.Code\", \n \"Diagnosis.Code\", \n \"Subscriber.Index\"]], \n test[\"_class\"])\n \n # apply classifier\n clf = GaussianNB()\n clf.fit(train_data, train_label)\n probability = clf.predict_proba(test_data).T[1]\n \n result = pd.DataFrame()\n result[\"_class\"] = test_label\n result[\"_predict\"] = probability >= criterion\n \n result_yes = result[result[\"_class\"] == 1]\n yes_yes_rate = sum(result_yes[\"_class\"] == result_yes[\"_predict\"])/len(result_yes[\"_predict\"])\n \n result_no = result[result[\"_class\"] == 0]\n no_no_rate = sum(result_no[\"_class\"] == result_no[\"_predict\"])/len(result_no[\"_predict\"])\n \n rate_list.append((yes_yes_rate, no_no_rate))\n\n rate_list = pd.DataFrame(rate_list)\n yes_yes_rate, no_no_rate = rate_list.mean()[0], rate_list.mean()[1]\n parameter_analysis.append((criterion, yes_yes_rate, no_no_rate))\n \n # save data to excel spreadsheet\n parameter_analysis = pd.DataFrame(parameter_analysis, columns=[\"criterion\", \"yes_yes_rate\", \"no_no_rate\"])\n writer = pd.ExcelWriter(\"parameter_analysis.xlsx\")\n parameter_analysis.to_excel(writer, \"parameter_analysis\", index=False)\n writer.save()",
"def _accuracy(self, data_loader):\n self._net.eval()\n num_correct = 0.0\n num_total = 0.0\n batchindex = 0\n for X, y in tqdm(data_loader):\n # Data.\n batchindex = batchindex + 1\n X = X.to(self.device)\n y = y.to(self.device)\n # y = torch.tensor(y.to(device))\n\n # Prediction.\n score = self._net(X)\n _, prediction = torch.max(score.data, 1)\n num_total += y.size(0)\n num_correct += torch.sum(prediction == y.data)\n self._net.train() # Set the model to training phase\n return 100 * num_correct.float() / num_total",
"def eval(self, test_loader):\n\n self.generator.eval()\n self.discriminator.eval()\n\n # Total correct prediction counts.\n correct = inpainted_correct = real_correct = 0\n\n m1, m2 = masks(test_loader)\n with torch.no_grad():\n for real_images, _ in test_loader:\n real_images = real_images.to('cuda')\n batch_size = real_images.size()[0]\n\n # Correct prediction counts in the batch.\n batch_inpainted_correct = batch_real_correct = 0\n\n # Generate inpainted images.\n inpainted_images = self.generator(real_images*m1 - m2)\n\n # Get predictions on real images.\n real_logits = self.discriminator(real_images)\n real_predictions = torch.sigmoid(real_logits).round()\n\n # Get target labels for real images.\n real_target = torch.ones(batch_size, device='cuda')\n real_target = real_target.view_as(real_predictions)\n\n # Check number of correct predictions on real images.\n batch_real_correct = (\n real_predictions.eq(real_target).sum()\n )\n real_correct += batch_real_correct.item()\n\n # Get predictions on inpainted images.\n inpainted_logits = self.discriminator(inpainted_images)\n inpainted_predictions = (\n torch.sigmoid(inpainted_logits).round()\n )\n\n # Get target labels for inpainted images.\n inpainted_target = (\n torch.zeros(batch_size, device='cuda')\n )\n inpainted_target = (\n inpainted_target.view_as(inpainted_predictions)\n )\n\n # Check number of correct predictions on inpainted.\n batch_inpainted_correct = (\n inpainted_predictions.eq(inpainted_target).sum()\n )\n inpainted_correct += batch_inpainted_correct.item()\n\n # Get total number of correct predictions.\n correct += real_correct + inpainted_correct\n\n # Log results.\n msg = (\n '\\nTest set: Accuracy: {}/{} ({:.0f}%)'\n '\\tInpainted correct: {}\\tReal correct: {}\\n'\n )\n msg = msg.format(\n correct,\n 2*len(test_loader.dataset),\n 100. * correct / (2*len(test_loader.dataset)),\n inpainted_correct,\n real_correct\n )\n logger.info(msg)\n\n # Save some inpainted images.\n real_images, _ = next(iter(test_loader))\n real_images = real_images.to('cuda')\n filename = os.path.join(\n self.args.output_dir,\n 'images',\n f'epoch_{self.epochs}_original.jpg'\n )\n save_image(\n tensor=real_images,\n filename=filename,\n normalize=True,\n scale_each=True,\n nrow=10\n )\n\n masked_images = m1*real_images - m2\n filename = os.path.join(\n self.args.output_dir,\n 'images',\n f'epoch_{self.epochs}_masked.jpg'\n )\n save_image(\n tensor=masked_images,\n filename=filename,\n normalize=True,\n scale_each=True,\n nrow=10\n )\n\n inpainted_images = self.generator(masked_images)\n filename = os.path.join(\n self.args.output_dir,\n 'images',\n f'epoch_{self.epochs}_inpainted.jpg'\n )\n save_image(\n tensor=inpainted_images,\n filename=filename,\n normalize=True,\n scale_each=True,\n nrow=10\n )",
"def evaluate(model: ContinualModel, dataset: ContinualDataset, last=False) -> Tuple[list, list]:\n status = model.net.training\n model.net.eval()\n accs, accs_mask_classes = [], []\n for k, test_loader in enumerate(dataset.test_loaders):\n if last and k < len(dataset.test_loaders) - 1:\n continue\n correct, correct_mask_classes, total = 0.0, 0.0, 0.0\n for data in test_loader:\n inputs, labels = data\n inputs, labels = inputs.to(model.device), labels.to(model.device)\n if 'class-il' not in model.COMPATIBILITY:\n outputs = model(inputs, k)\n else:\n outputs = model(inputs)\n \n _, pred = torch.max(outputs.data, 1)\n correct += torch.sum(pred == labels).item()\n total += labels.shape[0]\n \n if dataset.SETTING == 'class-il':\n mask_classes(outputs, dataset, k)\n _, pred = torch.max(outputs.data, 1)\n correct_mask_classes += torch.sum(pred == labels).item()\n \n accs.append(correct / total * 100\n if 'class-il' in model.COMPATIBILITY else 0)\n accs_mask_classes.append(correct_mask_classes / total * 100)\n \n model.net.train(status)\n return accs, accs_mask_classes",
"def classify(data, labels, (train_idx, test_idx), classifier=None):\r\n\r\n assert classifier is not None, \"Why would you pass not classifier?\"\r\n\r\n # Data scaling based on training set\r\n scaler = SupervisedStdScaler() #SupervisedRobustScaler() # # \r\n scaler.fit(data[train_idx,:], labels[train_idx], label=-1)\r\n #scaler.fit(data[train_idx,:], labels[train_idx])\r\n data_train = scaler.transform(data[train_idx,:])\r\n data_test = scaler.transform(data[test_idx,:])\r\n try:\r\n classifier.fit(data_train, labels[train_idx])\r\n \r\n \r\n confMat = confusion_matrix(labels[test_idx],\r\n classifier.predict(data_test))\r\n if confMat.shape == (1,1):\r\n if all(labels[test_idx] == -1):\r\n confMat = np.array([[confMat[0], 0], [0, 0]], dtype=confMat.dtype)\r\n else:\r\n confMat = np.array([[0, 0], [0, confMat[0]]], dtype=confMat.dtype)\r\n confMatRate = confMat / np.tile(np.sum(confMat, axis=1).astype('float'), (2,1)).transpose()\r\n totalErr = (confMat[0, 1] + confMat[1, 0]) / float(confMat.sum())\r\n #if type(classifier) not in [type(None), DummyClassifier]:\r\n if hasattr(classifier,'param_grid'): \r\n #isinstance(classifier, GridSearchCV) or \\\r\n # isinstance(classifier, RandomizedSearchCV):\r\n fitted_model = classifier.best_estimator_\r\n else:\r\n fitted_model = copy.copy(classifier) \r\n return confMatRate, totalErr, fitted_model\r\n except np.linalg.linalg.LinAlgError as e:\r\n # sahil added statement to raise the error instead of returning nun values\r\n print e.message\r\n raise e\r\n # return np.array([[np.nan, np.nan], [np.nan, np.nan]]), np.nan, None\r",
"def evaluate_classifier(self, clf):\n\n clf = clf.fit(self.training_data_train_x, self.training_data_train_y)\n predicted = clf.predict(self.training_data_opt_x)\n\n correct = 0\n for i in range(len(self.training_data_opt_y)):\n if predicted[i] == self.training_data_opt_y[i]:\n correct += 1\n\n accuracy = correct / len(self.training_data_opt_y)\n\n return clf, accuracy",
"def main():\n # Read in trainingSet and testSet as a DataFrame\n trainingOriginal = pd.read_csv(\n filepath_or_buffer=\"~/Desktop/KNN Implementation/adult.train.5fold.csv\")\n testOriginal = pd.read_csv(filepath_or_buffer=\"~/Desktop/KNN Implementation/adult.test.csv\")\n\n # Select only the numeric data\n training = pd.DataFrame(trainingOriginal.select_dtypes(['number']))\n training = pd.concat([training.reset_index(drop=True),\n trainingOriginal['earns'].reset_index(drop=True)], axis=1)\n\n # Select only the numeric data\n test = pd.DataFrame(testOriginal.select_dtypes(['number']))\n test = pd.concat([test.reset_index(drop=True),\n testOriginal['earns'].reset_index(drop=True)], axis=1)\n\n # Normalize the columns for training and test\n # print training['age'].min()\n # print training['age'].max()\n # print training.head()\n\n # Run max-min normalization on numerical columns for testing and training data\n for i in range(6):\n training.iloc[:, i] = (training.iloc[:, i]- training.iloc[:, i].min())/(training.iloc[:, i].max() - training.iloc[:, i].min())\n test.iloc[:, i] = (test.iloc[:, i]- test.iloc[:, i].min())/(test.iloc[:, i].max() - test.iloc[:, i].min())\n\n # Convert the 'earns' column to boolean as follows\n training['earns'] = training['earns'] == '>50K'\n test['earns'] = test['earns'] == ' >50K'\n\n # Group the training set by the fold attribute as given by the dataset\n trainingForFinal = training\n training = training.groupby('fold')\n\n # Since we want to consider odd k-values from 1 to 39, construct a list with these values\n kList = []\n for i in range(40):\n if i % 2 == 1:\n kList.append(i)\n\n # Empty dictionary to hold performance of each k-values and its accuracy\n performance = {}\n\n # Compute the performance for each k-value\n for k in kList:\n performance = crossValidation(training, k, performance)\n\n # Sort the performance dictionary by its accuracy (value)\n performance = sorted(performance.items(), key=operator.itemgetter(1), reverse=True)\n\n # Open file to write results\n file = open('grid.results.txt', 'w')\n # Write the results to file\n file.write(\"K | Accuracy\\n\")\n for item in performance:\n if item[0] < 10:\n file.write(str(item[0]) + ' | ' + str(item[1]) + '\\n')\n else:\n file.write(str(item[0]) + ' | ' + str(item[1]) + '\\n')\n # Close file\n file.close()\n\n # The best K is the one at the top of the list after the sorting\n bestK = performance[0][0]\n\n print 'Running Test Set with K = ' + str(bestK)\n\n applyModel(test,trainingForFinal,bestK)",
"def _eval_clustering(self, gen_reviews, clusters, embedding_model, clustering):\n result = []\n preds = self.predict_gen(gen_reviews, embedding_model, clustering)\n\n acc = accuracy_score(np.array(clusters), np.array(preds))\n conf = confusion_matrix(np.array(clusters), np.array(preds))\n\n return acc, conf",
"def _validateClassification(self, trainingSet):\n wrongCount = 0.\n\n pv = []\n tv = []\n\n if self.K == 1:\n for example in trainingSet:\n Y = self.test(example)\n \n givenClass = example.label[0]\n if Y[0] < 0.5:\n chosenClass = 0\n else:\n chosenClass = 1\n \n pv.append(chosenClass)\n tv.append(givenClass)\n \n if chosenClass != givenClass:\n wrongCount += 1.\n else:\n for example in trainingSet:\n Y = self.test(example)\n \n posterior, chosenClass = max((x, i) for i, x in enumerate(Y))\n max_val, givenClass = max((x, i) for i, x in enumerate(example.label))\n \n pv.append(chosenClass)\n tv.append(givenClass)\n \t\t\t\n if chosenClass != givenClass:\n wrongCount += 1.\n \n return wrongCount/len(trainingSet), pv, tv",
"def test_classifier(classifier: nn.Module, dataset: Dataset, batch_size: int = 32) -> Tuple[float, np.array]:\n\n # define data loader\n test_dl = torch.utils.data.DataLoader(\n dataset,\n batch_size=batch_size,\n shuffle=False,\n num_workers=4,\n )\n\n # define device\n device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\n # set model in evaluation mode\n classifier = classifier.to(device)\n classifier.eval()\n\n # define accuracy\n accuracy = 0\n predictions = []\n\n with torch.no_grad():\n for i, data in enumerate(test_dl):\n # unpack and send data to device\n X, y = data\n X, y = X.to(device), y.to(device)\n\n # predict the class\n y_pred = classifier(X)\n\n # gather predictions\n y_pred = torch.argmax(y_pred, dim=1)\n accuracy += torch.sum(y_pred == y).item()\n predictions += list(y_pred.cpu().numpy())\n\n return accuracy / len(dataset), np.array(predictions)",
"def mgcEval(self):\n import numpy as np\n def report_to_df(report):\n\n \"\"\"\n function to convert classification report to dataframe (for visualisation plot)\n \"\"\"\n\n report = re.sub(r\" +\", \" \", report).replace(\"avg / total\", \"avg/total\").replace(\"\\n \", \"\\n\")\n # update this due to sklearn classification report output change\n report = re.sub(r\" +\", \" \", report).replace(\"micro avg\", \"micro_avg\").replace(\"macro avg\", \"macro_avg\").replace(\"weighted avg\", \"weighted_avg\").replace(\"\\n \", \"\\n\")\n report_df = pd.read_csv(StringIO(\"Classes\" + report), sep=' ', index_col=0) \n return(report_df)\n \n #txt report to df\n class_rpttop1 = classification_report(self.y_true, self.y_pred)\n df_report = report_to_df(class_rpttop1)\n\n df_report = df_report.iloc[:self.nb_classes, :].copy()\n df_report.index = df_report.index.astype(int)\n \n\n # classifier prediction metrics\n def classMetrics(averagex):\n precision, recall, fscore, support = score(self.y_true, self.y_pred, average=averagex)\n \n return(\n print(''), \n print('-------------{0:}--------------------'.format(averagex)), \n print('precision: {0:.4f}'.format(precision)),\n print('recall: {0:.4f}'.format(recall)),\n print('fscore: {0:.4f}'.format(fscore)),\n print(''),\n print('kappa score: {0:.4f}'.format(cohen_kappa_score(self.y_true, self.y_pred))),\n print('accuracy score: {0:.4f}'.format(accuracy_score(self.y_true, self.y_pred))))\n \n def predSamp():\n\n correct = np.nonzero(self.y_pred==self.y_true)[0]\n incorrect = np.nonzero(self.y_pred!=self.y_true)[0]\n\n # quick check of the number of correct prediction from validation set\n print(\"\")\n print(\"correct/total = {0: .4f}\".format(len(correct)/(len(correct)+len(incorrect))))\n print(\"total correct sample = {0: .0f}\".format(len(correct)))\n print('------------------------------------------------------------------')\n \n def classReport():\n print('----------------------------- Classfication Report -------------------------------')\n print(classification_report(pd.Series(self.y_true).map(self.dict_label), pd.Series(self.y_pred).map(self.dict_label)))\n \n self.class_rpt = pd.concat([pd.DataFrame(pd.Series(df_report.index.tolist()).map(self.dict_label), columns = ['label']), df_report], axis = 1)\n \n self.classMetricsMac = classMetrics(\"macro\")\n self.classMetricsMic = classMetrics(\"micro\")\n self.predSample = predSamp()\n self.class_rptTop1 = classReport()\n \n return self",
"def train_classifiers(params):\n # Create result dataframe\n out = pd.DataFrame(\n columns=[\"Dataset\", \"Classifier\", \"Accuracy\", \"F1\", \"Precision\", \"Recall\"])\n\n for model_type, all_languages in params.items():\n print(\"Classifier: \", str(model_type))\n\n for language, all_targets in all_languages.items():\n print(language)\n for target, model_params in all_targets.items():\n print(target)\n print(model_params)\n\n datasets = sample_datasets(\n language, target, SAMPLING, TFIDF, model_params['top_k_words'], SUB_SAMPLE_RERUNS)\n\n # Iterate the datasets\n for data_id, dataset in enumerate(datasets):\n dataset_name = dataset[0]\n data = dataset[1]\n y = np.array(dataset[2])\n val_data = dataset[3]\n val_y = np.array(dataset[4])\n\n acc_scores = []\n pre_scores = []\n rec_scores = []\n f1_scores = []\n \n global X_train\n X_train, X_test = data, val_data\n y_train, y_test = y, val_y\n y_pred = None\n\n # Create model instance.\n model = mlp_model(layers=model_params['hidden_layers'], units=model_params['hidden_units'], dropout_rate=model_params['dropout_rate'],\n input_shape=X_train.shape[1:], num_classes=2)\n optimizer = tf.keras.optimizers.Adam(\n lr=model_params['learning_rate'])\n model.compile(optimizer=optimizer,\n loss='binary_crossentropy', metrics=['acc'])\n\n # Stop training is validation loss doesnt decrease for 3 steps\n callbacks = [tf.keras.callbacks.EarlyStopping(\n monitor='val_loss', patience=3)]\n\n # Train and validate model.\n history = model.fit(\n X_train,\n y_train,\n epochs=model_params['epochs'],\n callbacks=callbacks,\n validation_data=(X_test, y_test),\n verbose=0,\n batch_size=512)\n\n acc_scores.append(\n history.history['val_acc'][-1])\n y_pred = [round(a[0])\n for a in model.predict(X_test)]\n\n # Compute the results\n prfs = precision_recall_fscore_support(\n y_test, y_pred, warn_for=[])\n\n pre_scores.append(prfs[0].mean())\n rec_scores.append(prfs[1].mean())\n f1_scores.append(prfs[2].mean())\n\n # Append average scores\n clf_acc = np.array(acc_scores).mean()\n clf_pre = np.array(pre_scores).mean()\n clf_rec = np.array(rec_scores).mean()\n clf_f1 = np.array(f1_scores).mean()\n\n out = out.append(pd.DataFrame(\n [[dataset_name, model_type, clf_acc, clf_f1, clf_pre, clf_rec]], columns=out.columns), ignore_index=True)\n\n return out",
"def evaluate(classifier, iterator, criterion, device, label_to_idx):\n epoch_loss = 0\n epoch_correct = 0\n num_instances = 0\n\n # set the model as evaluation mode\n classifier.eval()\n\n with torch.no_grad():\n\n for batch_x, batch_y in iterator:\n\n # get the text representation and image representation\n feats = batch_x.squeeze(1).to(device)\n\n label_ids = torch.tensor([label_to_idx[label] for label in batch_y]).to(device)\n\n # use the model to predict\n preds = classifier(feats)\n preds = preds.squeeze(1).to(device)\n\n loss = criterion(preds, label_ids)\n epoch_loss += loss.item()\n\n # calculate the accuracy\n pred_ids = preds.argmax(dim=1)\n epoch_correct += pred_ids.eq(label_ids).sum().item()\n num_instances += len(batch_y)\n\n return epoch_loss / num_instances, epoch_correct / num_instances",
"def _accuracy(self, data_loader):\n self._net.train(False)\n num_correct = 0\n num_total = 0\n for X, y in data_loader:\n # Data.\n X = torch.autograd.Variable(X.cuda())\n y = torch.autograd.Variable(y.cuda(async=True)).long()\n\n # Prediction.\n score = self._net(X)\n _, prediction = torch.max(score.data, 1)\n num_total += y.size(0)\n num_correct += torch.sum(prediction == y.data).item()\n self._net.train(True) # Set the model to training phase\n return 100 * num_correct / num_total",
"def compute_accuracy(self, data_loader):\n correct = 0\n total = 0\n\n self.eval()\n with torch.no_grad():\n for user, item, true_rating in data_loader:\n forward = self(user, item)\n predicted = self._prob_to_class(forward)\n total += predicted.numel()\n correct += (predicted == true_rating.view(-1)).sum().item()\n\n return total, correct",
"def evaluate(ground_truth, prediction):\n\n def prfs_to_dict(prfs):\n \"\"\"Returns a precision_recall_fscore_support() result as a dict.\"\"\"\n return {\"precision\": prfs[0], \"recall\": prfs[1], \"fscore\": prfs[2]}\n\n results = {}\n items_count = len(ground_truth)\n\n # accuracy\n accuracy = accuracy_score(ground_truth, prediction)\n results[\"accuracy\"] = accuracy\n\n # confusion matrix\n categories = set(ground_truth) | set(prediction)\n confusions = {\n gold: {pred: 0 for pred in categories} for gold in categories\n }\n for g, p in zip(ground_truth, prediction):\n confusions[g][p] += 1\n results[\"confusions\"] = confusions\n\n # class wise precision, recall & f1\n classwise = precision_recall_fscore_support(\n ground_truth, prediction, average=None, warn_for=()\n )\n results[\"true_cat_dist\"] = list(classwise[-1])\n results[\"classwise\"] = {\n str(cl): prfs_to_dict(\n [classwise[0][cl], classwise[1][cl], classwise[2][cl]]\n )\n for cl in categories\n }\n\n # average precision, recall & f1\n results[\"macro_avg\"] = prfs_to_dict(\n precision_recall_fscore_support(\n ground_truth,\n prediction,\n average=\"macro\",\n pos_label=None,\n warn_for=(),\n )\n )\n results[\"micro_avg\"] = prfs_to_dict(\n precision_recall_fscore_support(\n ground_truth,\n prediction,\n average=\"micro\",\n pos_label=None,\n warn_for=(),\n )\n )\n results[\"weigh_avg\"] = prfs_to_dict(\n precision_recall_fscore_support(\n ground_truth,\n prediction,\n average=\"weighted\",\n pos_label=None,\n warn_for=(),\n )\n )\n\n # marginals\n gold_category_distribution = {\n g: sum([confusions[g][p] for p in categories]) for g in categories\n }\n pred_category_distribution = {\n p: sum([confusions[g][p] for g in categories]) for p in categories\n }\n\n # kappa\n expected_agreement_fleiss = sum(\n [\n (\n (gold_category_distribution[c] + pred_category_distribution[c])\n / (2.0 * items_count)\n )\n ** 2\n for c in categories\n ]\n )\n expected_agreement_cohen = sum(\n [\n (float(gold_category_distribution[c]) / items_count)\n * (float(pred_category_distribution[c]) / items_count)\n for c in categories\n ]\n )\n kappa_fleiss = (\n 1.0\n * (accuracy - expected_agreement_fleiss)\n / (1 - expected_agreement_fleiss)\n )\n kappa_cohen = (\n 1.0\n * (accuracy - expected_agreement_cohen)\n / (1 - expected_agreement_cohen)\n )\n results[\"k_fleiss\"] = {\n \"k\": kappa_fleiss,\n \"AE\": expected_agreement_fleiss,\n \"AO\": accuracy,\n }\n results[\"k_cohen\"] = {\n \"k\": kappa_cohen,\n \"AE\": expected_agreement_cohen,\n \"AO\": accuracy,\n }\n\n return results",
"def classification_evaluation(self, test_set, predicted_values, certainty):\r\n\r\n percent_accuracy = self.percent_accuracy(test_set, predicted_values)\r\n one_zero = self.one_zero_loss(test_set, predicted_values)\r\n log_loss = self.log_loss(test_set, predicted_values, certainty)\r\n print(f\"Percent correct:\\t{percent_accuracy * 100:.2f}%\")\r\n print(f\"1/0 Loss:\\t\\t\\t{one_zero:.2f}\")\r\n print(\"Log Loss: \", log_loss)",
"def evaluate(test_dataset, test_labels, net):\n rep = torch.zeros((test_dataset.shape[0], final_dim))\n # find each lower dimensional representation\n d = torch.from_numpy(test_dataset)\n net = net.double()\n out = net(d, False) # put through the network\n u, s, v = torch.svd(out) # implement SVD\n top_vals = torch.diag(s) # create the diagonal matrix from s\n top_vals = top_vals[:, :final_dim] # cut down s with SVD\n rep = torch.mm(u, top_vals)\n # evaluate the accuracy of the representation\n rep = rep.detach().numpy()\n model = DBSCAN()\n predicted = model.fit_predict(rep)\n score = v_measure_score(predicted, test_labels)\n return rep, score",
"def evaluate_accuracy(net, data_iter): #@save\n metric = Accumulator(2) # No. of correct predictions, no. of predictions\n for _, (X, y) in enumerate(data_iter):\n metric.add(accuracy(net(X), y), d2l.size(y))\n return metric[0] / metric[1]",
"def compute_metrics(self, train_data, test_data, criterion):\n m = self.metrics\n warnings.filterwarnings('ignore','Mean of empty slice')\n\n ## load data\n trn, trn_labs = train_data\n tst, tst_labs = test_data\n\n # trn = trn.transpose(1,0)\n tst = tst.transpose(1,0)\n\n t_final = -(np.flipud(trn!=self.padding).argmax(0)+1)\n test_tfinal = -(np.flipud(tst!=self.padding).argmax(0)+1)\n\n ntest = tst.size(1)\n P = self.decoder.out_features\n\n ## training data ###########################################################\n # hidden = self.init_hidden(trn.size(1))\n # out, hidden = self.transparent_forward(trn, hidden)\n # # output = out[t_final, np.arange(trn.size(1)), :]\n # output = out.squeeze()\n # # compute orthogonality\n # mem_act = np.array([np.cumsum(trn==p,axis=0).int().detach().numpy() % 2 \\\n # for p in range(self.q_)]).transpose((1,2,0))\n\n # ps_clf = LinearDecoder(self, 2**(self.q_-1), MeanClassifier)\n # ps = []\n # for d in Dichotomies(mem_act, 'simple'):\n # np.warnings.filterwarnings('ignore',message='invalid value encountered in')\n # ps_clf.fit(hidden.detach().numpy(), d)\n # new_ps = ps_clf.orthogonality()\n # ps.append(new_ps)\n # # if new_ps > ps:\n # # ps = new_ps\n # m['train_parallelism'] = np.append(m['train_parallelism'], np.array(ps).T, axis=0)\n\n # # print(mem_act.shape)\n # # print(hidden.shape)\n # # self.orth_clf.fit(hidden.detach().numpy(), mem_act)\n # # orth_score = self.orth_clf.orthogonality()\n # # m['train_orthogonality'] = np.append(m['train_orthogonality'], orth_score)\n\n ## test data ##############################################################\n hidden = self.init_hidden(tst.size(1))\n out, hidden = self.transparent_forward(tst, hidden)\n # output = out.squeeze()\n # print(hidden.shape)\n # print(out.shape)\n # print(test_tfinal)\n output = out[test_tfinal, np.arange(tst.size(1)), :]\n # raise Exception\n\n # compute loss\n test_loss = criterion(output.squeeze(0),tst_labs.squeeze())\n\n m['test_loss'] = np.append(m['test_loss'], test_loss.item())\n\n # compute orthogonality\n # mem_act = np.array([np.cumsum(tst==p,axis=0).int().detach().numpy() % 2 \\\n # for p in range(self.q_)]).transpose((1,2,0))\n\n # # self.orth_clf.fit(hidden.detach().numpy(), mem_act)\n # # orth_score = self.orth_clf.orthogonality()\n # # m['test_orthogonality'] = np.append(m['test_orthogonality'], orth_score)\n\n # # compute parallelism\n # ps_clf = LinearDecoder(self, 2**(self.q_-1), MeanClassifier)\n # ps = []\n # for d in Dichotomies(mem_act, 'simple'):\n # np.warnings.filterwarnings('ignore',message='invalid value encountered in')\n # ps_clf.fit(hidden.detach().numpy(), d)\n # new_ps = ps_clf.orthogonality()\n # ps.append(new_ps)\n # # if new_ps > ps:\n # # ps = new_ps\n # m['test_parallelism'] = np.append(m['test_parallelism'], np.array(ps).T, axis=0)\n\n ## package #################################################################\n self.metrics = m\n warnings.filterwarnings('default')"
] | [
"0.68844604",
"0.67071915",
"0.6689958",
"0.66273904",
"0.6515462",
"0.64979863",
"0.64945793",
"0.64835525",
"0.6477064",
"0.6471139",
"0.6453934",
"0.6429707",
"0.6404337",
"0.63565594",
"0.6334361",
"0.63319755",
"0.6327112",
"0.63240665",
"0.6319546",
"0.63133943",
"0.6306961",
"0.62932676",
"0.62889344",
"0.62730235",
"0.6258261",
"0.6251155",
"0.624914",
"0.6244055",
"0.6235842",
"0.6231321"
] | 0.7388213 | 0 |
Adds path between the two nodes given | def addPath(self, from_node, to_node):
x1,y1 = from_node.x,from_node.y
x2,y2 = to_node.x,to_node.y
pointsx = []
pointsy = []
m_new = 2 * (y2 - y1)
slope_error_new = m_new - (x2 - x1)
y=y1
for x in range(x1,x2+1):
pointsx.append(x)
pointsy.append(y)
# Add slope to increment angle formed
slope_error_new =slope_error_new + m_new
# Slope error reached limit, time to
# increment y and update slope error.
if (slope_error_new >= 0):
y=y+1
slope_error_new =slope_error_new - 2 * (x2 - x1)
new_node = self.Node(to_node.x,to_node.y)
new_node.path_x = pointsx
new_node.path_y = pointsy
new_node.path_x.append(to_node.x)
new_node.path_y.append(to_node.y)
print("len path x",len(new_node.path_x))
print("len path y",len(new_node.path_y) )
new_node.parent = from_node
return new_node | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _find_path(self, node1, node2, path=[]):\r\n\r\n path = path + [node1]\r\n if node1 == node2:\r\n return path\r\n if node1 not in self._graph:\r\n return None\r\n for node in self._graph[node1]:\r\n if node not in path:\r\n new_path = self._find_path(node, node2, path)\r\n if new_path:\r\n return new_path\r\n return None",
"def __combine_path(self, other):\n self.path = other.path + self.path",
"def get_path(self, v0, v2):\n start = self.get_node(v0)\n target = self.get_node(v2)\n return self.astar(start,target)",
"def _add(self, node1, node2):\r\n\r\n self._graph[node1].add(node2)",
"def add_path(self, nodes, name=None, attr_dict=None, **attr):\n if len(nodes) == 0:\n raise PathGraphException(\"The path is empty\")\n\n if attr_dict is None:\n attr_dict = attr\n else:\n try:\n attr_dict.update(attr)\n except AttributeError:\n raise PathGraphException(\"The attr_dict argument must be a dictionary.\")\n\n nlist = list(nodes)\n # check that the nodes do not belong to other path\n seen = set()\n for node in nlist:\n if node in self.path_id:\n raise PathGraphException(\"Node {} already belongs to another path. Can't add path\".format(node))\n seen.add(node)\n\n if len(nodes) != len(seen):\n raise PathGraphException(\"Path contains repeated elements. Can't add path\")\n\n if name is not None:\n if name not in self.path:\n path_id = name\n else:\n raise PathGraphException(\"Path name already exists {}\".format(name))\n else:\n # get a new path_id\n path_id = len(self.path)\n if path_id in self.path:\n for index in range(len(self.path) + 1):\n if index not in self.path:\n path_id = index\n break\n\n assert path_id not in self.path, \"*Error*, path_id already in path\"\n self.path[path_id] = nodes\n\n # add nodes\n for node in nlist:\n self.add_node(node, path_id=path_id)\n\n for i in range(len(nlist)-1):\n u = nlist[i]\n v = nlist[i+1]\n # add the edges\n datadict = self.adj[u].get(v, {})\n datadict.update(attr_dict)\n self.adj[u][v] = datadict\n self.adj[v][u] = datadict",
"def addPath(newList, refnode):\n ele = inkex.etree.Element('{http://www.w3.org/2000/svg}path')\n ele.set('d', simplepath.formatPath(newList))\n refnode.xpath('..')[0].append(ele)\n return ele",
"def manage_paths(node, paths) :\r\n\r\n #Getting the nodes neighbouring the given node\r\n neighbours = get_neighbouring_nodes(node) \r\n\r\n #Creating a new path branch\r\n new_path = [] #The new path\r\n path_found = False #Indicates whether the path to which the node belongs has been found\r\n\r\n #Looping through the neighbours\r\n for neighbour in neighbours :\r\n for path in paths :\r\n #Checking whether the path contains the neighbour\r\n if(neighbour in path) :\r\n index = path.index(neighbour)\r\n #Checking if the branch belongs to the current path\r\n if(path[index].gn_value == neighbour.gn_value) :\r\n new_path = path[:index + 1] + [node] #Creating a new path branch\r\n new_path[-1].gn_value = new_path.__len__() - 1 #Updating the node's g(n) value\r\n path_found = True\r\n break\r\n if(path_found) :\r\n break\r\n \r\n if(not path_found) :\r\n raise Exception(\"No branch junction found\")\r\n\r\n #Setting the new path as the current path\r\n return new_path",
"def add_node_pairs(self, node_a,node_b):\r\n \r\n if node_b is not None : \r\n self.nodes[node_a].append(node_b)",
"def add(self, node1, node2, w):\r\n\r\n self.graph[node1].add(node2 + ',' + str(w))\r\n self.graph[node2].add(node1 + ',' + str(w))",
"def build_path(cask_node, nodes):\n if cask_node.parent.name != 'ABC':\n nodes.insert(0, cask_node.parent.name)\n build_path(cask_node.parent, nodes)\n return nodes",
"def add_path(self, addresses, amount):\n # creating the corresponding tuples: (inputs[i], amount[i])\n input_edge = zip(addresses[0:len(addresses) - 1], addresses[1:len(addresses)], amount)\n for i in input_edge:\n addresses = i[0]\n outputs = i[1]\n weights = i[2]\n self.graph.add_node(addresses)\n self.graph.add_node(outputs)\n self.graph.add_edge(addresses, outputs, value=weights, title=weights)\n self.graph.options = self.options",
"def _path(from_object, to_object):\n\n if from_object._root != to_object._root:\n raise ValueError(\"No connecting path found between \" +\n str(from_object) + \" and \" + str(to_object))\n\n other_path = []\n obj = to_object\n while obj._parent is not None:\n other_path.append(obj)\n obj = obj._parent\n other_path.append(obj)\n object_set = set(other_path)\n from_path = []\n obj = from_object\n while obj not in object_set:\n from_path.append(obj)\n obj = obj._parent\n index = len(from_path)\n i = other_path.index(obj)\n while i >= 0:\n from_path.append(other_path[i])\n i -= 1\n return index, from_path",
"def _append_source_and_target(self, graph):\n graph.add_node( \"source\" )\n graph.add_node( \"target\" )\n \n for leave in (n for n,d in graph.out_degree_iter() if d==0):\n if leave is not \"source\" and leave is not \"target\":\n graph.add_edge( leave, \"target\" )\n \n for root in (n for n,d in graph.in_degree_iter() if d==0):\n if root is not \"source\" and root is not \"target\": \n graph.add_edge( \"source\", root )",
"def create_path(network, user_A, user_B, path=[]):\n path = path + [user_A] # all paths include starting node\n if user_A == user_B: # id the last node is user_B a valid path exists\n return path # base case\n for node in network[user_A][0]:\n if node not in path: # otherwise path is an infinite loop\n path = create_path(network, node, user_B, path)\n if path: # after the recursion hits the base case\n return path\n return None",
"def convert_paths(self):\n # convert to node sequences, dropping s'\n self.nodeseq_paths = []\n for path in self.paths:\n node_seq = [] # don't include s'\n for arc in path:\n node_seq.append(self.arc_info[arc]['destin'])\n self.nodeseq_paths.append(node_seq)\n # convert to og graph\n self.converted_paths = []\n for path in self.nodeseq_paths:\n this_path = []\n add_next_node = True\n for i in range(len(path) - 1):\n print(\"This path is\", this_path)\n node1 = path[i]\n node2 = path[i + 1]\n print(\"node1={}, node2={}\".format(node1, node2))\n if (node1, node2) in self.mapping:\n sc = self.mapping[(node1, node2)]\n print(\"uses sc edge for {}\".format(sc))\n print(\"should add {}, but also need to check for overlaps\".\n format(sc[1:-1]))\n if sc[1] in this_path:\n # we have an overlap\n start = len(this_path) - this_path.index(sc[1])\n this_path.extend(sc[start:-1])\n else:\n this_path.extend(sc[1:-1])\n add_next_node = False # next node is second of sc edge\n elif add_next_node:\n this_path.append(node1)\n else:\n add_next_node = True\n this_path.append(path[-1])\n self.converted_paths.append(this_path)",
"def append_path(path1, path2):\n\n # Get the first absolute path\n abs_path1 = abspath(path1)\n\n # Return the joined paths\n return os.path.join(abs_path1, path2).replace(\"\\\\\", \"/\")",
"def add(self, node1, node2):\r\n if not(node1 in self._graph):\r\n # if it's the first time we see this node\r\n self._graph[node1] = [node2]\r\n else:\r\n if not(node2 in self._graph[node1]):\r\n # if node2 is not already in the connections of node1 \r\n # self._graph[node1].add(node2)\r\n self._graph[node1].append(node2)\r\n\r\n \r\n # if undirected graph\r\n if not self._directed:\r\n if not(node2 in self._graph):\r\n # if it's the first time we see node2\r\n self._graph[node2] = [node1]\r\n else:\r\n if not(node1 in self._graph[node2]):\r\n # if node1 is not already in the connections of node1 \r\n # self._graph[node2].add(node1)\r\n self._graph[node2].append(node1)",
"def add_path(self, path):\n\n for i in range(1, len(path)):\n self.add_edge(path[i], path[i - 1])",
"def paths(self, source, target):\n assert source in self.node_map\n assert target in self.node_map\n if has_path(self.G2, source, target):\n return nx.all_simple_paths(self.G2, source=source, target=target)\n return None",
"def connect_both(node1, node2, weight):\n connect_one_way(node1, node2, weight)\n connect_one_way(node2, node1, weight)",
"def connect_one_way(node1, node2, weight):\n node1.add_or_update_neighbour(node2, weight)",
"def path(self, source, target):\n if source == target:\n return [source]\n elif self.parent[target] is None:\n raise ValueError(\"no path to target\")\n else:\n return self.path(source, self.parent[target]) + [target]",
"def path(self, source, target):\n if source == target:\n return [source]\n elif self.parent[target] is None:\n raise ValueError(\"no path to target\")\n else:\n return self.path(source, self.parent[target]) + [target]",
"def combine(self, action):\n next_node = Path(action.end, self.path_cost + action.cost, parent=self)\n return next_node",
"def path_to(start, end):\n sol = djikstra(start, end)\n if sol is None:\n return None\n (distances, cost) = sol\n return compute_path(distances, cost, start, end)",
"def add_edge(self, vertex1, vertex2):\n\n vertex1.add_outgoing_node(vertex2)\n vertex2.add_incoming_node(vertex1)",
"def add_edge(self,node1,node2):\n # add nodes if not already in graph\n if node1 not in self.nodes():\n self.add_node(node1)\n if node2 not in self.nodes():\n self.add_node(node2)\n\n # make connections to nodes\n self.__graph[node1].append(node2)\n self.__graph[node2].append(node1)",
"def join_paths(path_1, path_2):\r\n a = lib_path.join(path_1, path_2)\r\n return a",
"def astar(self, node1, node2):\n if node1 not in self.nodes:\n raise ValueError('node ' + str(node1) + ' not in the graph')\n if node2 not in self.nodes:\n raise ValueError('node ' + str(node2) + ' not in the graph')\n\n # list of (node index, heuristic)\n frontier = PriorityQueue()\n frontier.put(node1, 0.)\n came_from = {}\n cost_so_far = {}\n came_from[node1] = None\n cost_so_far[node1] = 0.\n\n while not frontier.empty():\n # Current\n current = frontier.get()\n if current == node2:\n # path reconstitution node1ing from the end\n shortest_path = [node2]\n while shortest_path[-1] != node1:\n shortest_path.append(came_from[shortest_path[-1]])\n\n return list(reversed(shortest_path))\n # search in nodes linked_to\n for neigh in self.nodes[current].linked_to:\n new_cost = cost_so_far[current] + \\\n self.edges[(current, neigh)].V\n # If newly visited node or already visited but with bigger cost\n if neigh not in cost_so_far or new_cost < cost_so_far[neigh]:\n cost_so_far[neigh] = new_cost\n dist_node2 = self.hdistance(self.nodes[current].state,\n self.nodes[node2].state)\n anti_priority = new_cost + dist_node2\n frontier.put(neigh, anti_priority)\n came_from[neigh] = current\n\n # no path found\n return None",
"def connect(self, node1, node2):\n self.neighbour1 = node1\n self.neighbour2 = node2"
] | [
"0.6751943",
"0.656915",
"0.65636885",
"0.65265256",
"0.65126884",
"0.64361167",
"0.642828",
"0.64159256",
"0.6312686",
"0.631026",
"0.6286674",
"0.6258846",
"0.6226627",
"0.6181952",
"0.61733156",
"0.6161478",
"0.61542237",
"0.61473876",
"0.61421573",
"0.6123293",
"0.61000246",
"0.60350305",
"0.60350305",
"0.60159063",
"0.5970751",
"0.5957772",
"0.5951798",
"0.59353024",
"0.5933554",
"0.5909922"
] | 0.76333266 | 0 |
Function to draw the channel2energy calibration curves | def draw_en_calib_curves(list_channel_names, list_channel_arrays, \
list_energy_arrays, show=True):
plt.figure(figsize=(10, 7), dpi=80)
plt.title('Na Energy Calibration')
plt.xlabel('Channel')
plt.ylabel('Energy [MeV]')
calib_ch0_x = [1917,466,2721,6594,3504,6044,6843]
calib_y = [0.356,0.03,0.511,1.27,0.662,1.17,1.33]
calib_ch2_x = [2022,496,2846,6906,3674,6362,7214]
for i, item in enumerate(list_energy_arrays):
_channel, _index = np.unique(list_channel_arrays[i], return_index=True)
item = item[_index]
plt.plot(_channel, item, '-', label=list_channel_names[i])
plt.plot(calib_ch0_x, calib_y, '.', color='red', label='ch0 benchmarks')
plt.plot(calib_ch2_x, calib_y, '.', color='orange', label='ch2 benchmarks')
plt.xlim(0.,TOT_NUM_EN_CH)
plt.legend(loc='center left', shadow=False, fontsize='small')
overlay_tag()
plt_figure = 'Energy_Calibration_ch0-ch2.png'
save_current_figure(plt_figure, clear=False)
if show == True:
plt.show() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def calculate_and_visualize_energy_spectrum(CV):\n E = 1 / (2 / np.pi)**2 * np.fft.fft2(CV) * 0.33 * 0.33\n # here the unit of CV is still the same as U and V (typically px/s), thus the unit of the correlation is px2/s2.\n # To convert the unit to um2/s2, multiply the correlation by mpp^2 (0.33^2 for 20x lens)\n k, K = corrLib.compute_wavenumber_field(E.shape, 25*0.33)\n\n ind = np.argsort(k.flatten())\n k_plot = k.flatten()[ind]\n E_plot = E.flatten()[ind]\n\n fig, ax = plt.subplots(nrows=1, ncols=2, dpi=300, figsize=(7, 3))\n ax[0].plot(k_plot, E_plot.real, lw=0.5, ls='--', alpha=0.5, label='real')\n ax[0].plot(k_plot, E_plot.imag, lw=0.5, ls='--', alpha=0.5, label='imag')\n ax[0].plot(k_plot, abs(E_plot), lw=0.5, label='abs') \n ax[0].legend()\n # ax[1].plot(k_plot, E_plot.real, lw=0.5, ls='--', alpha=0.5, label='real')\n # ax[1].plot(k_plot, E_plot.imag, lw=0.5, ls='--', alpha=0.5, label='imag')\n ax[1].plot(k_plot, abs(E_plot), lw=0.5, label='abs', color=bestcolor(2))\n ax[1].loglog()\n ax[1].legend()\n\n # guide of the eye slope\n x = np.array([0.01,0.03])\n y = x ** -1.3 * 2e1\n ax[1].plot(x, y, lw=0.5, ls='--', color='black')\n ax[1].text(x.mean(), 1.1*y.mean(), '-1.3')",
"def calibration(exp, args):\n show_text(\"Nous allons faire un calibrage\", args).present()\n exp.clock.wait(1500)\n expyriment.stimuli.FixCross(size=(25, 25),\n line_width=3,\n colour=args[\"stimuli_color\"]).present()\n exp.clock.wait(2100)",
"def velocity_curve(self, output='test'):\n self.figure = figure() \n self.gridSpec = GridSpec(2, 1)\n self.axes = subplot(self.gridSpec[0, 0]) \n self.axes.plot(self.xs, [-v for v in self.vs], 'ko', alpha=0.5) \n self.axes.set_aspect('auto')\n self.axes = subplot(self.gridSpec[0, 1]) \n self.axes.plot(self.ys, [-u for u in self.us], 'ko', alpha=0.5) \n self.axes.set_aspect('auto')\n self.figure.savefig(output + '_velocity_curve.pdf')",
"def display_calibration(probs,\n actual,\n *,\n figure=None,\n bins=100,\n label=None,\n show_ici=True,\n alpha=0.05,\n n_resamples=None,\n kernel='gaussian',\n bandwidth=0.1,\n plot_intensities=False):\n\n resolution = 1.0 / bins\n\n if figure is None:\n figure = plt.gcf()\n ax1, ax2 = figure.subplots(\n nrows=2,\n ncols=1,\n sharex=True,\n gridspec_kw=dict(height_ratios=(3, 1)),\n )\n estimate, ci = compute_kde_calibration(probs,\n actual,\n resolution=resolution,\n kernel=kernel,\n bandwidth=bandwidth,\n alpha=alpha)\n\n ax1 = plot_calibration_curve(\n orig=estimate.orig,\n calibrated=estimate.calibrated,\n calibrated_ci=ci.calibrated,\n ici=estimate.ici if show_ici else None,\n ici_ci=ci.ici if show_ici else None,\n pos_intensity=estimate.pos_intensity if plot_intensities else None,\n all_intensity=estimate.all_intensity if plot_intensities else None,\n label=label,\n ax=ax1)\n ax1.set_xlabel('')\n ax2 = plot_histograms(*histograms(probs, actual, bins=bins), ax=ax2)\n ax2.set_box_aspect(1. / 3.)\n ax1.xaxis.set_ticks_position('none')\n figure.tight_layout()\n return figure, estimate, ci",
"def plot5(self):\n\n cond = ((self.ds.freq<32.6) & (self.ds.freq>17))\n freq = self.ds.freq[cond]\n power = self.ds.power[cond]\n\n # the modes for KIC 9205705\n m = pd.read_csv('/home/mxs191/Desktop/MathewSchofield/TRG/GetData/Modes/modes_9205705.csv')\n m1 = [17.65, 20.6, 23.7, 26.9, 30.1] # l=1\n\n plt.rc('font', size=18)\n fig, ax = plt.subplots()\n plt.plot(freq, power, zorder=1, alpha=0.4)\n\n # NOTE: annotate mode angular degrees\n plt.scatter(m['f0'].as_matrix(), np.full(len(m), 150000), c='k', zorder=2, s=80)\n plt.scatter(m['f2'].as_matrix(), np.full(len(m), 130000), c='mediumseagreen', zorder=2, s=80, marker='^')\n plt.scatter(m1, np.full(len(m1), 140000), c='grey', zorder=2, s=80, marker='v')\n\n # NOTE: plot envelope\n numax = info['numax'].as_matrix() # mu Hz\n env_width = 0.66 * numax**0.88\n plt.plot(freq, 40004*np.exp( -( (freq-24.7)**2 / (2*7.**2) ) ), c='k', linestyle='--')\n\n # NOTE: annotate envelope\n style = dict(size=16, color='k')\n ax.text(24.1, 49167, r\"$\\nu_{\\rm max}$\", color='k', size=18)\n ax.text(24.1, 20994, r\"$\\Gamma_{\\rm env}$\", color='k', size=18)\n ax.text(23, 162944, r\"$\\Delta \\nu$\", **style)\n plt.annotate(s='', xy=(25.3, 158610), xytext=(21.91, 158610),\n arrowprops=dict(arrowstyle='<->')) # dnu\n plt.annotate(s='', xy=((24.7-env_width/2.), 15861), xytext=((24.7+env_width/2.), 15861),\n arrowprops=dict(arrowstyle='<->')) # env width\n\n ax.set_xlabel(r'Frequency ($\\rm \\mu Hz$)')\n ax.set_ylabel(r'PSD ($\\rm ppm^{2} \\, \\mu Hz^{-1}$)')\n plt.xlim(17, 32.6)\n plt.ylim(17, 195181)\n plt.tight_layout()\n plt.show()\n fig.savefig(os.getcwd() + '/DetTest1_plots/Plot5_ps_' + str(self.ds.epic) + '.pdf')",
"def espec2(self, x, y):\n\n self.x = x\n self.y = y\n\n #cross-spectral density - welch method (complex valued)\n sp2 = mlab.csd(self.x, self.y, NFFT=self.nfft, Fs=self.fs, detrend=mlab.detrend_mean, window=mlab.window_hanning, noverlap=self.nfft/2)\n self.f = sp2[1][1:]\n sp2 = sp2[0][1:]\n \n #co e quad espectro (real e imag) - verificar com parente\n co = np.real(sp2)\n qd = np.imag(sp2)\n \n #phase (angle function)\n ph = np.angle(sp2,deg=True)\n \n #ecoherence between x and y (0-1)\n coer = mlab.cohere(self.x , self.y, NFFT=self.nfft, Fs=self.fs, detrend=mlab.detrend_mean, window=mlab.window_hanning, noverlap=self.nfft/2)\n coer = coer[0][1:]\n \n #intervalo de confianca para a amplitude do espectro cruzado - 95%\n ici = sp2 * 14 /26.12\n ics = sp2 * 14 /5.63\n \n #intervalo de confianca para coerencia\n icc = np.zeros(len(sp2))\n icc[:] = 1 - (0.05 ** (1 / (14 / 2.0 - 1)))\n \n self.aa2 = np.array([self.f,sp2,co,qd,ph,coer,ici,ics,icc]).T\n\n return self.aa2",
"def plot_2d(self):\n fig = plt.figure(figsize=(10,8))\n \n d = int(len(self.a_scale.flat)**0.5)\n a_scale = self.a_scale.reshape(d,d)\n c_scale = self.c_scale.reshape(d,d)\n E_coh = self.E_coh.reshape(d,d)\n plt.pcolormesh(a_scale, c_scale, E_coh)\n plt.xlabel('xy linear deformation coefficient')\n plt.xlabel('z linear deformation coefficient')\n cbar = plt.colorbar()\n cbar.ax.set_ylabel('cohesive energy (eV/atom)',\n fontsize='x-large')\n plt.show()\n \n return fig",
"def apply_calibration(self, cal):\n\n n_edges = len(self.channels) + 1\n channel_edges = np.linspace(-0.5, self.channels[-1] + 0.5, num=n_edges)\n self.bin_edges_kev = cal.ch2kev(channel_edges)",
"def plotCaliCurve(constants, data, outName):\n x=np.linspace(min(data[:,0]),max(data[:,0]),1000)\n plt.figure()\n plt.rcParams.update({'font.size' : 16})\n plt.scatter(data[:,0],data[:,1])\n plt.plot(x,LangmuirCurve(x,constants[0],constants[1],constants[2],constants[3]))\n #plt.xlabel(\"MG Concentration (nM)\")\n #plt.ylabel(\"Relative SHS signal (Arb. Units)\")\n plt.savefig(outName + \"_cali_model_plot.png\")\n plt.show()",
"def EPI():\n TE = np.array([4.22, 33.81, 63.39, 92.98, 122.6, 152.2, 181.7, 211.3, 240.9, 270.5])\n upper_left = np.array([697.3, 367.0, 217.5, 115.8, 51.8, 23.2, 14.8, 8.7, 6.1, 4.6])\n center = np.array([1110.2, 907.8, 813.6, 745.2, 692.8, 637.0, 564.9, 521.0, 450.2, 401.6])\n lower_right = np.array([723.0, 419.2, 224.1, 126.4, 61.8, 32.4, 15.1, 8.8, 3.9, 3.8])\n upper_center = np.array([782.2, 499.4, 279.5, 154.5, 88.6, 58.2, 43.8, 38.2, 38.2, 36.0])\n\n area = [upper_left, center, upper_center, lower_right]\n colors = [\"#1f77b4\", \"#ff7f0e\", \"#2ca02c\", \"#d62728\"]\n name = [\"Upper left area\", \"Center area\", \"Up center area\", \"Lower right area\"]\n x_new = np.linspace(4.22, 270.5, 10000)\n for i, j, k in zip(area, colors, name):\n popt, _ = curve_fit(M_xy, TE, i, p0=np.array([200, 300]))\n M0, T2 = popt[0], popt[1]\n y_new = M_xy(x_new, M0, T2)\n plt.scatter(TE, i)\n plt.plot(x_new, y_new, \"--\", c=j, label=\"Fit: %s\" % k + f\", $T_2$={T2:.2f}\")\n plt.legend(loc=\"best\")\n plt.grid()\n plt.ylabel(\"Mean Signal Intensity\")\n plt.xlabel(\"TE [ms]\")\n plt.show()",
"def plot_power(subject, channel, cv, axes, vmin=None, vmax=None, bb=False, bb2=False, elabel=None):\n ax0, ax1 = axes\n axes = [ax for ax in axes if (ax is not None)]\n if bb2:\n power_data = np.load(os.path.join(os.environ['HOME'], 'plots/xfreq/data',\n '{}_{}_{}_power_bb2.npz'.format(subject, cv, channel)))['power_data']\n elif bb:\n power_data = np.load(os.path.join(os.environ['HOME'], 'plots/xfreq/data',\n '{}_{}_{}_power_bb.npz'.format(subject, cv, channel)))['power_data']\n else:\n power_data = np.load(os.path.join(os.environ['HOME'], 'plots/xfreq/data',\n '{}_{}_{}_power.npz'.format(subject, cv, channel)))['power_data']\n\n if ax0 is not None:\n print(power_data[::-1, s].min(), power_data[::-1, s].max())\n print(power_data.shape)\n im = ax0.imshow(power_data[::-1, s], interpolation='nearest', cmap='afmhot',\n aspect='auto', vmin=vmin, vmax=vmax)\n yticklabels = [5, 25, 75]\n yticks = [40-np.searchsorted(bands.chang_lab['cfs'], y, side='right') for y in\n yticklabels]\n yticklabels.append(200)\n yticks.append(0)\n ax0.set_yticks(yticks)\n ax0.set_yticklabels(yticklabels)\n if elabel is None:\n elabel = channel\n ax0.set_title('Electrode: {}'.format(elabel), **axes_label_fontstyle)\n ax0.set_ylabel('Freq. (Hz)', **axes_label_fontstyle)\n ax0.axvline(100, 0, 1, linestyle='--', c='white', lw=1.)\n #ax0.set_xlabel('Time (ms)', fontsize=axes_label_fontsize)\n\n if ax1 is not None:\n hg_bands = np.logical_and(bands.chang_lab['cfs'] >= bands.neuro['min_freqs'][-1],\n bands.chang_lab['cfs'] <= bands.neuro['max_freqs'][-1])\n b_bands = np.logical_and(bands.chang_lab['cfs'] >= bands.neuro['min_freqs'][2],\n bands.chang_lab['cfs'] <= bands.neuro['max_freqs'][2])\n\n hb_bands = np.logical_and(bands.chang_lab['cfs'] >= bands.neuro['min_freqs'][3],\n bands.chang_lab['cfs'] <= bands.neuro['max_freqs'][3])\n b_bands = np.logical_or(b_bands, hb_bands)\n b_bands = range(10, 21)\n hg = power_data[hg_bands].mean(axis=0)\n b = power_data[b_bands].mean(axis=0)\n\n b = b[s]\n hg = hg[s]\n\n hg -= hg.min()\n hg /= hg.max()\n hg = 2. * hg - 1\n b -= b.min()\n b /= b.max()\n b = 2. * b - 1\n\n ax1.plot(hg, c='r', lw=2)\n ax1.plot(b, c='k', lw=2)\n ax1.set_ylabel('Normalized\\nAmplitude', **axes_label_fontstyle)\n ax1.set_xlabel('Time (ms)', **axes_label_fontstyle)\n ax1.set_xlim([0, plot_idx[-1]])\n ax1.axvline(100, 0, 1, linestyle='--', lw=1., c='gray')\n for ax in axes:\n ax.set_xticks([0, 100, plot_idx[-1]])\n ax.set_xticklabels([-500, 0, int(1000 * plot_time[-1])-500])\n ax.tick_params(**tickparams_fontstyle)\n return im",
"def draw(self, network, file_format, path=None):\n try:\n import cairocffi as cairo\n except ImportError:\n try:\n import cairo\n except ImportError:\n logging.warning('Cairo not found; potential energy surface will not be drawn.')\n return\n\n self.network = network\n\n # The order of wells is as follows:\n # - Reactant channels come first (to the left)\n # - Isomers are in the middle\n # - Product channels come last (to the right)\n # This is done because most people will read the PES from left to right\n wells = []\n wells.extend(network.reactants)\n wells.extend(network.isomers)\n wells.extend(network.products)\n\n # Generate the bounding rectangles for each configuration label\n label_rects = []\n for well in wells:\n label_rects.append(self._get_label_size(well, file_format=file_format))\n\n # Get energy range (use kJ/mol internally)\n e0_min, e0_max = self._get_energy_range()\n e0_min *= 0.001\n e0_max *= 0.001\n\n # Drawing parameters\n padding = self.options['padding']\n well_width = self.options['wellWidth']\n well_spacing = self.options['wellSpacing']\n e_slope = self.options['Eslope']\n ts_width = self.options['TSwidth']\n\n e0_offset = self.options['E0offset'] * 0.001\n\n # Choose multiplier to convert energies to desired units (on figure only)\n e_units = self.options['Eunits']\n try:\n e_mult = {'J/mol': 1.0,\n 'kJ/mol': 0.001,\n 'cal/mol': 1.0 / 4.184,\n 'kcal/mol': 1.0 / 4184.,\n 'cm^-1': 1.0 / 11.962,\n }[e_units]\n except KeyError:\n raise Exception('Invalid value \"{0}\" for Eunits parameter.'.format(e_units))\n\n # Determine height required for drawing\n e_height = self._get_text_size('0.0', file_format=file_format)[3] + 6\n y_e0 = (e0_max - 0.0) * e_slope + padding + e_height\n height = (e0_max - e0_min) * e_slope + 2 * padding + e_height + 6\n for i in range(len(wells)):\n if 0.001 * wells[i].E0 == e0_min:\n height += label_rects[i][3]\n break\n\n # Determine naive position of each well (one per column)\n coordinates = np.zeros((len(wells), 2), np.float64)\n x = padding\n for i in range(len(wells)):\n well = wells[i]\n rect = label_rects[i]\n this_well_width = max(well_width, rect[2])\n E0 = 0.001 * well.E0\n y = y_e0 - E0 * e_slope\n coordinates[i] = [x + 0.5 * this_well_width, y]\n x += this_well_width + well_spacing\n width = x + padding - well_spacing\n\n # Determine the rectangles taken up by each well\n # We'll use this to merge columns safely so that wells don't overlap\n well_rects = []\n for i in range(len(wells)):\n l, t, w, h = label_rects[i]\n x, y = coordinates[i, :]\n if w < well_width:\n w = well_width\n t -= 6 + e_height\n h += 6 + e_height\n well_rects.append([l + x - 0.5 * w, t + y + 6, w, h])\n\n # Squish columns together from the left where possible until an isomer is encountered\n old_left = np.min(coordinates[:, 0])\n n_left = wells.index(network.isomers[0]) - 1\n columns = []\n for i in range(n_left, -1, -1):\n top = well_rects[i][1]\n bottom = top + well_rects[i][3]\n for j in range(len(columns)):\n for c in columns[j]:\n top0 = well_rects[c][1]\n bottom0 = top + well_rects[c][3]\n if (top0 <= top <= bottom0) or (top <= top0 <= bottom):\n # Can't put it in this column\n break\n else:\n # Can put it in this column\n columns[j].append(i)\n break\n else:\n # Needs a new column\n columns.append([i])\n for column in columns:\n column_width = max([well_rects[c][2] for c in column])\n x = coordinates[column[0] + 1, 0] - 0.5 * well_rects[column[0] + 1][2] - well_spacing - 0.5 * column_width\n for c in column:\n delta = x - coordinates[c, 0]\n well_rects[c][0] += delta\n coordinates[c, 0] += delta\n new_left = np.min(coordinates[:, 0])\n coordinates[:, 0] -= new_left - old_left\n\n # Squish columns together from the right where possible until an isomer is encountered\n n_right = wells.index(network.isomers[-1]) + 1\n columns = []\n for i in range(n_right, len(wells)):\n top = well_rects[i][1]\n bottom = top + well_rects[i][3]\n for j in range(len(columns)):\n for c in columns[j]:\n top0 = well_rects[c][1]\n bottom0 = top0 + well_rects[c][3]\n if (top0 <= top <= bottom0) or (top <= top0 <= bottom):\n # Can't put it in this column\n break\n else:\n # Can put it in this column\n columns[j].append(i)\n break\n else:\n # Needs a new column\n columns.append([i])\n for column in columns:\n column_width = max([well_rects[c][2] for c in column])\n x = coordinates[column[0] - 1, 0] + 0.5 * well_rects[column[0] - 1][2] + well_spacing + 0.5 * column_width\n for c in column:\n delta = x - coordinates[c, 0]\n well_rects[c][0] += delta\n coordinates[c, 0] += delta\n\n width = max([rect[2] + rect[0] for rect in well_rects]) - min([rect[0] for rect in well_rects]) + 2 * padding\n\n # Draw to the final surface\n surface = create_new_surface(file_format=file_format, target=path, width=width, height=height)\n cr = cairo.Context(surface)\n\n # Some global settings\n cr.select_font_face(\"sans\")\n cr.set_font_size(self.options['fontSizeNormal'])\n\n # Fill the background with white\n cr.set_source_rgba(1.0, 1.0, 1.0, 1.0)\n cr.paint()\n self._draw_text('E0 ({0})'.format(e_units), cr, 15, 10, padding=2) # write units\n\n # # DEBUG: Draw well bounding rectangles\n # cr.save()\n # cr.set_line_width(1.0)\n # for rect in wellRects:\n # cr.rectangle(*rect)\n # cr.set_source_rgba(0.0, 0.0, 1.0, 0.5)\n # cr.stroke()\n # cr.restore()\n\n # Draw path reactions\n for rxn in network.path_reactions:\n for reac in range(len(wells)):\n if wells[reac].species == rxn.reactants:\n break\n else:\n raise Exception\n for prod in range(len(wells)):\n if wells[prod].species == rxn.products:\n break\n else:\n raise Exception\n e0_reac = wells[reac].E0 * 0.001 - e0_offset\n e0_prod = wells[prod].E0 * 0.001 - e0_offset\n e0_ts = rxn.transition_state.conformer.E0.value_si * 0.001 - e0_offset\n if reac < prod:\n x1, y1 = coordinates[reac, :]\n x2, y2 = coordinates[prod, :]\n else:\n x1, y1 = coordinates[prod, :]\n x2, y2 = coordinates[reac, :]\n x1 += well_spacing / 2.0\n x2 -= well_spacing / 2.0\n if abs(e0_ts - e0_reac) > 0.1 and abs(e0_ts - e0_prod) > 0.1:\n if len(rxn.reactants) == 2:\n if reac < prod:\n x0 = x1 + well_spacing * 0.5\n else:\n x0 = x2 - well_spacing * 0.5\n elif len(rxn.products) == 2:\n if reac < prod:\n x0 = x2 - well_spacing * 0.5\n else:\n x0 = x1 + well_spacing * 0.5\n else:\n x0 = 0.5 * (x1 + x2)\n y0 = y_e0 - (e0_ts + e0_offset) * e_slope\n width1 = (x0 - x1)\n width2 = (x2 - x0)\n # Draw horizontal line for TS\n cr.set_source_rgba(0.0, 0.0, 0.0, 1.0)\n cr.set_line_width(2.0)\n cr.move_to(x0 - ts_width / 2.0, y0)\n cr.line_to(x0 + ts_width / 2.0, y0)\n cr.stroke()\n # Add background and text for energy\n E0 = \"{0:.1f}\".format(e0_ts * 1000. * e_mult)\n extents = cr.text_extents(E0)\n x = x0 - extents[2] / 2.0\n y = y0 - 6.0\n cr.rectangle(x + extents[0] - 2.0, y + extents[1] - 2.0, extents[2] + 4.0, extents[3] + 4.0)\n cr.set_source_rgba(1.0, 1.0, 1.0, 0.75)\n cr.fill()\n cr.move_to(x, y)\n cr.set_source_rgba(0.0, 0.0, 0.0, 1.0)\n cr.show_text(E0)\n # Draw Bezier curve connecting reactants and products through TS\n cr.set_source_rgba(0.0, 0.0, 0.0, 0.5)\n cr.set_line_width(1.0)\n cr.move_to(x1, y1)\n cr.curve_to(x1 + width1 / 8.0, y1, x0 - width1 / 8.0 - ts_width / 2.0, y0, x0 - ts_width / 2.0, y0)\n cr.move_to(x0 + ts_width / 2.0, y0)\n cr.curve_to(x0 + width2 / 8.0 + ts_width / 2.0, y0, x2 - width2 / 8.0, y2, x2, y2)\n cr.stroke()\n else:\n width = (x2 - x1)\n # Draw Bezier curve connecting reactants and products through TS\n cr.set_source_rgba(0.0, 0.0, 0.0, 0.5)\n cr.set_line_width(1.0)\n cr.move_to(x1, y1)\n cr.curve_to(x1 + width / 4.0, y1, x2 - width / 4.0, y2, x2, y2)\n cr.stroke()\n\n # Draw wells (after path reactions so that they are on top)\n for i, well in enumerate(wells):\n x0, y0 = coordinates[i, :]\n # Draw horizontal line for well\n cr.set_line_width(4.0)\n cr.move_to(x0 - well_width / 2.0, y0)\n cr.line_to(x0 + well_width / 2.0, y0)\n cr.set_source_rgba(0.0, 0.0, 0.0, 1.0)\n cr.stroke()\n # Add background and text for energy\n E0 = well.E0 * 0.001 - e0_offset\n E0 = \"{0:.1f}\".format(E0 * 1000. * e_mult)\n extents = cr.text_extents(E0)\n x = x0 - extents[2] / 2.0\n y = y0 - 6.0\n cr.rectangle(x + extents[0] - 2.0, y + extents[1] - 2.0, extents[2] + 4.0, extents[3] + 4.0)\n cr.set_source_rgba(1.0, 1.0, 1.0, 0.75)\n cr.fill()\n cr.move_to(x, y)\n cr.set_source_rgba(0.0, 0.0, 0.0, 1.0)\n cr.show_text(E0)\n # Draw background and text for label\n x = x0 - 0.5 * label_rects[i][2]\n y = y0 + 6\n cr.rectangle(x, y, label_rects[i][2], label_rects[i][3])\n cr.set_source_rgba(1.0, 1.0, 1.0, 0.75)\n cr.fill()\n self._draw_label(well, cr, x, y, file_format=file_format)\n\n # Finish Cairo drawing\n if file_format == 'png':\n surface.write_to_png(path)\n else:\n surface.finish()",
"def energy_kde_paperplot(fields,df):\n plt.figure()\n i = 0\n colorList = ['dodgerblue','tomato']\n lw = 2\n\n meanE_2 = []\n meanE_3 = []\n mup = np.min(df['energy [eV]']) - pp.mu\n chi_0 = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '2_' + \"E_{:.1e}.npy\".format(fields[0]))\n g_en_axis, _, _, _, _, _, _, _, _, _, _, _, _, _ = \\\n occupation_plotter.occupation_v_energy_sep(chi_0, df['energy [eV]'].values, df)\n plt.plot(g_en_axis - np.min(df['energy [eV]']), np.zeros(len(g_en_axis)), '-', color='black', lineWidth=lw,label='Equilibrium')\n\n for ee in fields:\n chi_2_i = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '2_' + \"E_{:.1e}.npy\".format(ee))\n # meanE_2 = utilities.mean_energy(chi_2_i,df)\n g_en_axis, g_ftot, g_chiax, g_f0ax, _, _, _, _, _, _, _, _,_,_ = \\\n occupation_plotter.occupation_v_energy_sep(chi_2_i, df['energy [eV]'].values, df)\n plt.plot(g_en_axis - np.min(df['energy [eV]']), g_chiax,'--',color = colorList[i],lineWidth=lw,label=r'Low Field {:.0f} '.format(ee/100)+r'$V \\, cm^{-1}$')\n print(integrate.trapz(g_chiax,g_en_axis))\n\n # plt.plot(meanE_2-np.min(df['energy [eV]']),0,'.')\n chi_3_i = np.load(pp.outputLoc + 'Steady/' + 'chi_' + '3_' + \"E_{:.1e}.npy\".format(ee))\n g_en_axis, g_ftot, g_chiax, g_f0ax, _, _, _, _, _, _, _, _,_,_ = \\\n occupation_plotter.occupation_v_energy_sep(chi_3_i, df['energy [eV]'].values, df)\n plt.plot(g_en_axis - np.min(df['energy [eV]']), g_chiax,color = colorList[i],lineWidth=lw,label=r'Full Drift {:.0f} '.format(ee/100)+r'$V \\, cm^{-1}$')\n print(integrate.trapz(g_chiax,g_en_axis))\n\n i = i + 1\n # plt.plot(g_en_axis - np.min(df['energy [eV]']), g_f0ax, '--', color='black', lineWidth=lw,label=r'$f_0$')\n\n plt.legend()\n # plt.ylim([-0.02, 0.015])\n plt.xlabel(r'Energy above CBM ($eV$)')\n plt.ylabel(r'Deviational occupation $\\delta f_{\\mathbf{k}}$ (norm.)')\n # plt.ylabel(r'$\\delta f_{\\mathbf{k}}/f_{\\mathbf{k}}^0$')\n plt.savefig(pp.figureLoc+'energy_KDE.png', bbox_inches='tight',dpi=600)\n\n plt.figure()\n plt.plot(g_en_axis,g_chiax)\n\n plt.figure()\n Z, xedges, yedges = np.histogram2d(df['kx [1/A]']*chi_3_i,df['ky [1/A]']*chi_3_i)\n plt.pcolormesh(xedges, yedges, Z.T)\n\n from scipy.stats.kde import gaussian_kde\n g_inds,_,_ = utilities.gaas_split_valleys(df,False)\n g_df = df.loc[g_inds]\n\n x = g_df['kx [1/A]']*(chi_3_i[g_inds]+g_df['k_FD'])\n y = g_df['ky [1/A]']*(chi_3_i[g_inds]+g_df['k_FD'])\n\n # y = g_df['energy [eV]']*(chi_3_i[g_inds]+g_df['k_FD'])\n k = gaussian_kde(np.vstack([x, y]))\n xi, yi = np.mgrid[x.min():x.max():x.size ** 0.5 * 1j, y.min():y.max():y.size ** 0.5 * 1j]\n zi = k(np.vstack([xi.flatten(), yi.flatten()]))\n\n fig = plt.figure(figsize=(7, 8))\n ax1 = fig.add_subplot(211)\n ax2 = fig.add_subplot(212)\n\n # alpha=0.5 will make the plots semitransparent\n ax1.pcolormesh(xi, yi, zi.reshape(xi.shape), alpha=0.5)\n ax2.contourf(xi, yi, zi.reshape(xi.shape), alpha=0.5)\n\n ax1.set_xlim(x.min(), x.max())\n ax1.set_ylim(y.min(), y.max())\n ax2.set_xlim(x.min(), x.max())\n ax2.set_ylim(y.min(), y.max())",
"def concent_graph(self):\n r_big = self['M_RSMALL']\n r_small = self['M_RBIG']\n C = self['M_C']\n \n xcenter = self['X_IMAGE'] ; ycenter = self['Y_IMAGE']\n xcenter = xcenter - self['MXMIN_IMAGE']\n ycenter = ycenter - self['MYMIN_IMAGE']\n center = (xcenter,ycenter)\n \n ellip = self['ELLIPTICITY'] \n q = 1. - ellip\n pa = self['THETA_IMAGE'] # Astronomical position angle.\n\n stamp = self['STAMP'].copy()\n mask = self['MASKOTHER'].copy()\n sky = self['BACKGROUND']\n Img = stamp - sky\n Img[num.where(mask != 0)] = 0.\n \n id = self._getGraphId()\n root = 'C_%s' % (id,)\n pngname = root + '.png' ; epsname = root + '.eps'\n jpgname = root + '.jpg'\n doStamp(Img,pngname,format='PNG')\n Convert(pngname,jpgname)\n \n Painted = Paint(jpgname)\n Painted.load()\n Painted.DrawEllipse(center,r_big,q,pa,color='red',linewidth=2)\n Painted.DrawEllipse(center,r_small,q,pa,color='green',linewidth=2)\n \n text = 'C=%5.2f' % (self['M_C'])\n # Painted.Graffiti(text,commtextpos)\n Painted.save(jpgname) \n Painted.release()\n \n Convert(jpgname,epsname)\n os.system('rm %s %s' % (pngname,jpgname))\n \n self['figures']['C'] = epsname\n self['figcomms']['C'] = text",
"def plot_calibration_curve(est, name, fig_index, data):\n\n X_train = data[0]\n X_test = data[1]\n y_train = data[2]\n y_test = data[3]\n\n y = np.concatenate([y_train, y_test], axis=0)\n\n # Calibrated with isotonic calibration\n isotonic = CalibratedClassifierCV(est, cv=2, method='isotonic')\n\n # Calibrated with sigmoid calibration\n sigmoid = CalibratedClassifierCV(est, cv=2, method='sigmoid')\n\n # Logistic regression with no calibration as baseline\n lr = LogisticRegression(C=1., solver='lbfgs')\n\n fig = plt.figure(1, figsize=(15, 10))\n ax1 = plt.subplot2grid((4, 6), (0, 0), colspan=2, rowspan=2)\n ax2 = plt.subplot2grid((4, 6), (0, 2), colspan=2, rowspan=2)\n ax3 = plt.subplot2grid((4, 6), (0, 4), colspan=2, rowspan=2)\n ax4 = plt.subplot2grid((4, 6), (2, 0), colspan=6, rowspan=2)\n\n ax1.plot([0, 1], [0, 1], \"k:\", label=\"Perfectly calibrated\")\n for clf, name in [(lr, 'Logistic'),\n (est, name),\n (isotonic, name + ' + Isotonic'),\n (sigmoid, name + ' + Sigmoid')]:\n clf.fit(X_train, y_train)\n y_pred = clf.predict(X_test)\n if hasattr(clf, \"predict_proba\"):\n prob_pos = clf.predict_proba(X_test)[:, 1]\n y_proba = prob_pos.copy()\n else: # use decision function\n prob_pos = clf.decision_function(X_test)\n y_proba = prob_pos.copy()\n prob_pos = \\\n (prob_pos - prob_pos.min()) / (prob_pos.max() - prob_pos.min())\n\n clf_score = brier_score_loss(y_test, prob_pos, pos_label=y.max())\n print(\"%s:\" % name)\n print(\"\\tBrier: %1.3f\" % (clf_score))\n print(\"\\tPrecision: %1.3f\" % precision_score(y_test, y_pred))\n print(\"\\tRecall: %1.3f\" % recall_score(y_test, y_pred))\n print(\"\\tF1: %1.3f\" % f1_score(y_test, y_pred))\n print(\"\\tAve. Precision Score: %1.3f\\n\" % \\\n average_precision_score(y_test, y_proba))\n\n fraction_of_positives, mean_predicted_value = \\\n calibration_curve(y_test, prob_pos, n_bins=10)\n\n ax1.plot(mean_predicted_value, fraction_of_positives, \"s-\",\n label=\"%s (%1.3f)\" % (name, clf_score))\n\n fpr, tpr, thresholds = roc_curve(y_test, y_proba, drop_intermediate=False)\n roc_auc = roc_auc_score(y_test, y_proba)\n ax2.plot(fpr, tpr, ls='-', label=\"%s (%1.3f)\" % (name, roc_auc))\n\n precision, recall, _ = precision_recall_curve(y_test, y_proba)\n ax3.plot(recall, precision)\n\n ax4.hist(prob_pos, range=(0, 1), bins=10,\n label='%s' % name, histtype=\"step\", lw=2)\n\n ax1.set_xlabel(\"Score\", fontsize=14)\n ax1.set_ylabel(\"Fraction of positives\", fontsize=14)\n ax1.set_ylim([-0.05, 1.05])\n ax1.legend(loc=\"lower right\")\n ax1.set_title('Calibration plots (reliability curve)', fontsize=16)\n\n ax2.set_xlabel(\"False Positive Rate\", fontsize=14)\n ax2.set_ylabel(\"True Positive Rate\", fontsize=14)\n ax2.set_ylim([-0.05, 1.05])\n ax2.legend(loc=\"lower right\")\n ax2.set_title('ROC Curve', fontsize=16)\n\n ax3.set_xlabel(\"Recall\", fontsize=14)\n ax3.set_ylabel(\"Precision\", fontsize=14)\n ax3.set_ylim([-0.05, 1.05])\n ax3.legend(loc=\"lower center\")\n ax3.set_title('Precision-Recall Curve', fontsize=16)\n\n ax4.set_xlabel(\"Mean predicted value\", fontsize=14)\n ax4.set_ylabel(\"Count\", fontsize=14)\n ax4.legend(loc=\"upper center\")\n ax4.set_title('Classification Result', fontsize=16)\n\n plt.tight_layout()\n\n plt.show()\n\n return",
"def draw_discount_curve(self):\n data=Bootstrapping.get_ytm_discount_data(self)\n fig = plt.figure(figsize=[10, 6])\n ax = fig.add_subplot(1, 1, 1)\n ax.plot(data['discount_rate'])\n ax.set_xlabel('Term')\n ax.set_ylabel('value')\n ax.set_title('Discount Curves')\n plt.show()",
"def inner_CoherentLength():\r\n\r\n ax = fig.add_subplot(111)\r\n\r\n try:\r\n ax.cla()\r\n #print(\"clear self.cbar !\")\r\n except:\r\n pass\r\n #print(\"fail to clear self.cbar !\")\r\n xCorr, yCorr = self.APP_dataprocess.SpatialCorrelation([self.spinBox_PixelX.value(), self.spinBox_PixelY.value()])\r\n ax.plot(xCorr)\r\n ax.set_title(\"G2 @({}, {})\".format(self.spinBox_PixelX.value(), self.spinBox_PixelY.value()))\r\n fig.savefig(\"G2 @({}, {}).png\".format(self.spinBox_PixelX.value(), self.spinBox_PixelY.value()), format=\"png\", dpi = 100)\r\n plt.close()",
"def make_calcurve_order(self,oi):\n ccfs = np.full((self.n_vsini, self.n_velocities), np.nan)\n widths = np.full((self.n_vsini),np.nan)\n for i,v in enumerate(self.vsinis):\n # Make resampled/broadened spectra\n rotated = self.hpfspec.resample_and_broaden_order(oi, vsini=v, diag=True, upsample_factor=self.upsample_factor)\n # Make CCF and fit the output\n fit_output = self.hpfspec.ccfwidth_order(oi,w=rotated['w_resampled'], fl=rotated['fl_broadened'], debug=True, fitwidth=self.fitwidth, M=self.M, velocities=self.velocities)\n # Store results\n ccfs[i,:] = fit_output['ccf1']\n widths[i] = fit_output['fit']['sigma']\n self.calibration_widths[oi] = widths\n self.calibration_ccfs[oi] = ccfs",
"def plot_dereddening():\n extinction_coefficients = {'2365-2764-1': np.array([0.2622, 0.844]), '4109-638-1': np.array([0.0524, 0.1576]),\n '2058-56-1': np.array([0.0751, 0.248]), '3642-2459-1': np.array([0.1907, 0.608]),\n '3999-1391-1': np.array([0.3911, 1.2480]), '2607-1448-1': np.array([0.0430, 0.1310])}\n cepheids = {'2365-2764-1': np.array([0.959, 2.09]), '4109-638-1': np.array([0.705, 2.385]), '2058-56-1':\n np.array([1.222, 1.333]), '3642-2459-1': np.array([1.088, 2.0518]), '3999-1391-1':\n np.array([1.360, 1.2567]), '2607-1448-1': np.array([1.484, 0.6963])}\n periods = {'2365-2764-1': 1.61, '4109-638-1': 15.31, '2058-56-1': 63.08, '3642-2459-1': 1.86, '3999-1391-1': 24.98,\n '2607-1448-1': 8.54}\n max_periods = max(periods.values())\n\n new_positions_bv_mv = [] # in M_V vs B-V space\n colors = []\n theoretical_position = []\n for obj in extinction_coefficients.keys():\n # new_positions_bv_mv.append(cepheids[obj]-extinction_coefficients[obj])\n new_positions_bv_mv.append(cepheids[obj])\n colors.append(periods[obj]/max_periods)\n theoretical_position.append(-2.78*np.log10(periods[obj])-1.35)\n\n for pos in range(len(new_positions_bv_mv)):\n plt.scatter(new_positions_bv_mv[pos][0], new_positions_bv_mv[pos][1], marker='^', facecolor='w', s=40)\n plt.scatter(new_positions_bv_mv[pos][0], theoretical_position[pos], marker='o', facecolor='r', s=50)\n return new_positions_bv_mv, colors",
"def cie_lab(self):\n K = Fraction(1, 3) * Fraction(29, 6) ** 2\n e = Fraction(6, 29) ** 3\n x, y, z = (n / m for n, m in zip(self.cie_xyz, D65))\n fx, fy, fz = (\n n ** Fraction(1, 3) if n > e else K * n + Fraction(4, 29)\n for n in (x, y, z)\n )\n return (116 * fy - 16, 500 * (fx - fy), 200 * (fy - fz))",
"def IC_FC_visualization(self):\n legend = ['1st CWT','2nd CWT','IC','FC']\n title = 'Optimized ICs and FCs detection'\n IC_values = [self.IC,normalize(self.cwt1)[self.IC]]\n FC_values = [self.FC,normalize(self.cwt2)[self.FC]]\n visualize_signal(legend, title, normalize(self.cwt1), normalize(self.cwt2), IC = IC_values, FC = FC_values)",
"def showCl(ell,temps,title='CAMB ISWout power spectrum'):\n plt.plot(ell,temps*ell*(ell+1)/(2*np.pi) *1e12) #1e12 to convert to microK**2\n plt.xlabel('multipole moment l')\n plt.ylabel('l(l+1)C_l/(2pi) [microK**2]')\n plt.title(title)\n plt.show()",
"def plot_calibration_curve(est, name, fig_index):\n # Calibrated with isotonic calibration\n isotonic = CalibratedClassifierCV(est, cv=2, method='isotonic')\n\n # Calibrated with sigmoid calibration\n sigmoid = CalibratedClassifierCV(est, cv=2, method='sigmoid')\n\n # Logistic regression with no calibration as baseline\n lr = LogisticRegression(C=1.)\n\n fig = plt.figure(fig_index, figsize=(10, 10))\n ax1 = plt.subplot2grid((3, 1), (0, 0), rowspan=2)\n ax2 = plt.subplot2grid((3, 1), (2, 0))\n\n ax1.plot([0, 1], [0, 1], \"k:\", label=\"Perfectly calibrated\")\n for clf, name in [(lr, 'Logistic'),(est, name),(isotonic, name + ' + Isotonic'),(sigmoid, name + ' + Sigmoid')]:\n #Para cada modelo, entrenamos y predecimos \n clf.fit(X_train, y_train)\n y_pred = clf.predict(X_test)\n if hasattr(clf, \"predict_proba\"):\n prob_pos = clf.predict_proba(X_test)[:, 1]\n else: # use decision function\n prob_pos = clf.decision_function(X_test)\n prob_pos = \\\n (prob_pos - prob_pos.min()) / (prob_pos.max() - prob_pos.min())\n\n clf_score = brier_score_loss(y_test, prob_pos, pos_label=y.max())\n print(\"%s:\" % name)\n print(\"\\tBrier: %1.3f\" % (clf_score))\n print(\"\\tPrecision: %1.3f\" % precision_score(y_test, y_pred))\n print(\"\\tRecall: %1.3f\" % recall_score(y_test, y_pred))\n print(\"\\tF1: %1.3f\\n\" % f1_score(y_test, y_pred))\n\n fraction_of_positives, mean_predicted_value = \\\n calibration_curve(y_test, prob_pos, n_bins=10)\n\n ax1.plot(mean_predicted_value, fraction_of_positives, \"s-\",\n label=\"%s (%1.3f)\" % (name, clf_score))\n\n ax2.hist(prob_pos, range=(0, 1), bins=10, label=name,\n histtype=\"step\", lw=2)\n\n ax1.set_ylabel(\"Fraction of positives\")\n ax1.set_ylim([-0.05, 1.05])\n ax1.legend(loc=\"lower right\")\n ax1.set_title('Calibration plots (reliability curve)')\n\n ax2.set_xlabel(\"Mean predicted value\")\n ax2.set_ylabel(\"Count\")\n ax2.legend(loc=\"upper center\", ncol=2)\n\n plt.tight_layout()",
"def plot_CO2(time_begin,time_end,y_min,y_max):\n\tco2.plot(x='Year', y='Carbon')\n\tplot.legend(bbox_to_anchor=(0.5, 1.11), loc='center', ncol=1)\n\tplot.ylabel('CO2 Emissions (Million Metric Tons of Carbon)')\n\tplot.xlim(time_begin,time_end)\n\tplot.ylim(y_min,y_max)\n\tplot.title('Global CO2 Emissions')\n\tplot.savefig(os.getcwd()+'/static/myCO2fig.png')\n\t#plot.show()",
"def __init__(self, file_path, meas_corr_curve_file_path, lower_wavelength, upper_wavelength, CCD_height, CCD_width, CCD_height_corr, CCD_width_corr):\r\n \r\n self.file_path = file_path\r\n self.df = pd.read_csv(self.file_path)\r\n self.CCD_height = CCD_height\r\n self.CCD_width = CCD_width\r\n # self.fig = plt.figure(num=1)\r\n self.correctionfactors = correction_factors(meas_corr_curve_file_path, CCD_height=CCD_height_corr , CCD_width=CCD_width_corr, lower_wavelength=lower_wavelength, upper_wavelength=upper_wavelength)\r\n\r\n # Determine wavelength and energy values\r\n self.wavelengths = np.linspace(lower_wavelength, upper_wavelength, self.CCD_width)\r\n self.energies = (J * planck_const * v_light) / (self.wavelengths * 10**(-9))\r\n\r\n # Bin correctionfactors\r\n self.correctionfactors_res = []\r\n for i in range(1, self.CCD_width + 1):\r\n slice_begin = (i - 1) * int((len(self.correctionfactors) / self.CCD_width))\r\n slice_end = i * int((len(self.correctionfactors) / self.CCD_width))\r\n\r\n binned_data = self.correctionfactors[slice_begin:slice_end]\r\n\r\n self.correctionfactors_res.append(mean(binned_data))\r\n\r\n # self.correctionfactors_res = pd.DataFrame({\"Correction Factors\":self.correctionfactors_res})\r\n\r\n # Construct matrix from data. Columns represent different measurements, rows represent measurement data\r\n # | Raw | Corr |\r\n #--------+---------+---------+\r\n # Raw | df1 | df3 |\r\n # No bkg | df2 | df4 |\r\n\r\n self.correctionfactors_res.reverse()\r\n self.df1 = pd.DataFrame({\"Correction Factors\": self.correctionfactors_res})\r\n self.df3 = pd.DataFrame({\"Correction Factors\": self.correctionfactors_res})\r\n for i in range(1, self.CCD_height + 1):\r\n self.df1[f'{i}'] = self.df.Intensity.loc[((i - 1)*self.CCD_width):(i*self.CCD_width - 1)].reset_index(drop=True)\r\n self.df3[f'{i}'] = self.df.Intensity.loc[((i - 1)*self.CCD_width):(i*self.CCD_width - 1)].reset_index(drop=True)\r\n self.df3[f'{i}'] = self.df3[f'{i}'] * self.df1[\"Correction Factors\"]\r\n \r\n self.df1 = self.df1.drop('Correction Factors', axis=1)\r\n self.df3 = self.df3.drop('Correction Factors', axis=1)\r\n\r\n # Construct differential matrix\r\n self.df2 = pd.DataFrame({\"1\": self.df1['3'] - self.df1['2']})\r\n self.df4 = pd.DataFrame({\"1\": self.df3['3'] - self.df3['2']})\r\n \r\n for i in range(4, self.CCD_height + 1):\r\n self.df2[f'{i - 2}'] = self.df1[f'{i}'] - self.df1[f'{i - 1}']\r\n self.df4[f'{i - 2}'] = self.df3[f'{i}'] - self.df3[f'{i - 1}'] \r\n\r\n # Change of index\r\n self.df1['index'] = self.energies\r\n self.df2['index'] = self.energies\r\n self.df3['index'] = self.energies\r\n self.df4['index'] = self.energies\r\n self.df1 = self.df1.set_index('index')\r\n self.df2 = self.df2.set_index('index')\r\n self.df3 = self.df3.set_index('index')\r\n self.df4 = self.df4.set_index('index')\r\n\r\n # Sets all the values that are negative to 0 and filters out the solar flares by setting an upper limit for the intensity values\r\n self.df2[self.df2 < 0] = 0\r\n self.df4[self.df4 < 0] = 0\r\n \r\n self.df1[self.df1 > raw_data_threshold] = 0\r\n self.df2[self.df2 > raw_data_no_background_threshold] = 0\r\n self.df3[self.df3 > corrected_data_threshold] = 0\r\n self.df4[self.df4 > corrected_data_no_background_threshold] = 0",
"def plot_scalp(v, channel):\n\n channelpos = [tts.channels[c] for c in channel]\n points = [calculate_stereographic_projection(i) for i in channelpos]\n x = [i[0] for i in points]\n y = [i[1] for i in points]\n z = v\n X, Y, Z = interpolate_2d(x, y, z)\n plt.contour(X, Y, Z, 20)\n plt.contourf(X, Y, Z, 20)\n #plt.clabel(im)\n plt.colorbar()\n plt.gca().add_artist(plt.Circle((0, 0), radius=1, linewidth=3, fill=False))\n plt.plot(x, y, 'bo')\n for i in zip(channel, zip(x,y)):\n plt.annotate(i[0], i[1])",
"def inner_PlotDistrifun():\r\n \r\n font = {'family': 'serif',\r\n 'color': 'darkred',\r\n 'weight': 'normal',\r\n 'size': 16}\r\n\r\n Nmax = 100\r\n bins = np.linspace(0, Nmax, Nmax+1)\r\n nList = np.linspace(0, Nmax, Nmax+1, dtype = int)\r\n\r\n y_location = self.spinBox_PixelY.value()\r\n x_location = self.spinBox_PixelX.value()\r\n\r\n # get pixel intensity data\r\n Array1 = self.APP_dataprocess.PixelData(y_location, x_location)\r\n Array2 = Array1\r\n g2 = G2(Array1, Array2)\r\n print(\"g2 is:\", g2)\r\n\r\n arr = []\r\n rv = poisson(self.firstOrdImaging[y_location, x_location])\r\n for num in range(0,40):\r\n arr.append(rv.pmf(num))\r\n\r\n ax = fig.add_subplot(111)\r\n\r\n try:\r\n ax.cla()\r\n #print(\"clear self.cbar !\")\r\n except:\r\n pass\r\n #print(\"fail to clear self.cbar !\")\r\n \r\n ax.hist(Array1 , bins, normed=True, label = \"Data distribution\") \r\n ax.plot(nList, BoseEinstein(self.firstOrdImaging[y_location, x_location], Nmax), label =\"BoseEinstein distribution\")\r\n ax.plot(arr, linewidth=2.0, label =\"Possion distribution\")\r\n ax.set_title(\"Pixel Position({},{}); <$I$>:{}\".format(x_location , y_location, self.firstOrdImaging[y_location, x_location]), fontdict=font)\r\n \r\n ax.text(22, .08, r\"g2:{}\".format(g2), fontdict=font)\r\n ax.legend() \r\n \r\n fig.savefig('PixelPosition({},{})PhotDist.eps'.format(x_location , y_location), format='eps', dpi=300)\r\n plt.close()",
"def create_curve(self):\n self._define_amplitude()\n self._define_width()\n self._define_horizontal()\n self._cache_values()\n print(self)",
"def plot_koeppen(beck, ax):\n alpha=0.6\n # Create the right colros for Koeppen Geiger\n climate_dict = {\"1\": \"Tropical, rainforest\" , \"2\": \"Tropical, monsoon\", \"3\": \"Tropical, savannah\",\n \"4\": \"Arid, desert, hot\", \"5\": \"Arid, desert, cold\", \"6\": \"Arid, steppe, hot\",\n \"7\": \"Arid, steppe, cold\", \"8\": \"Temperate, dry summer, hot summer\", \n \"9\": \"Temperate, dry summer, warm summer\", \"10\": \"Temperate, dry summer, cold summer\",\n \"11\": \"Temperate, dry winter, hot summer\", \"12\": \"Temperate, dry winter, warm summer\",\n \"13\": \"Temperate, dry winter, cold summer\", \"14\": \"Temperate, no dry season, hot summer\",\n \"15\": \"Temperate, no dry season, warm summer\", \"16\": \"Temperate, no dry season, cold summer\",\n \"17\": \"Cold, dry summer, hot summer\", \"18\": \"Cold, dry summer, warm summer\",\n \"19\": \"Cold, dry summer, cold summer\", \"20\": \"Cold, dry summer, very cold winter\",\n \"21\": \"Cold, dry winter, hot summer\", \"22\": \"Cold, dry winter, warm summer\", \n \"23\": \"Cold, dry winter, cold summer\", \"24\": \"Cold, dry winter, very cold winter\",\n \"25\": \"Cold, no dry season, hot summer\", \"26\": \"Cold, no dry season, warm summer\",\n \"27\": \"Cold, no dry season, cold summer\", \"28\": \"Cold, no dry season, very cold winter\",\n \"29\": \"Polar, tundra\", \"30\": \"Polar, frost\"}\n color_dict = {\"1\": \"[0 0 255]\" , \"2\": \"[0 120 255]\", \"3\": \"[70 170 250]\",\n \"4\": \"[255 0 0]\", \"5\": \"[255 150 150]\", \"6\": \"[245 165 0]\",\n \"7\": \"[255 220 100]\", \"8\": \"[255 255 0]\", \n \"9\": \"[200 200 0]\", \"10\": \"[150 150 0]\",\n \"11\": \"[150 255 150]\", \"12\": \"[100 200 100]\",\n \"13\": \"[50 150 50]\", \"14\": \"[200 255 80]\",\n \"15\": \"[100 255 80]\", \"16\": \"[50 200 0]\",\n \"17\": \"[255 0 255]\", \"18\": \"[200 0 200]\",\n \"19\": \"[150 50 150]\", \"20\": \"[150 100 150]\",\n \"21\": \"[170 175 255]\", \"22\": \"[90 120 220]\", \n \"23\": \"[75 80 180]\", \"24\": \"[50 0 135]\",\n \"25\": \"[0 255 255]\", \"26\": \"[55 200 255]\",\n \"27\": \"[0 125 125]\", \"28\": \"[0 70 95]\",\n \"29\": \"[178 178 178]\", \"30\": \"[102 102 102]\"}\n for key in color_dict.keys():\n color = color_dict[key]\n color = color.replace(\"[\",\"\").replace(\"]\",\"\").split(\" \")\n color = [int(val)/255 for val in color] +[1]\n color_dict[key] = color\n \n climate_to_color = {climate_dict[key] : value for key, value in color_dict.items()}\n \n beck[\"Climatic Regions\"] = beck[\"RASTERVALU\"].astype(str)\n beck.replace({\"Climatic Regions\":climate_dict}, inplace=True)\n \n def color_for_label(label):\n return [climate_to_color[x] for x in label]\n \n # As percentag\n #df = beck.groupby([\"gauge_clus\",'Climatic Regions']).size().groupby(level=0).apply(\n # lambda x: 100 * x / x.sum()).unstack()\n # Group the dataframe\n df = beck.groupby([\"gauge_clus\",'Climatic Regions']).size().unstack()\n # Plotting\n ax = df.plot(kind=\"bar\", stacked=True, color=color_for_label(df.columns.values), alpha=1, ax = ax, zorder=4)\n # ax.set_title(\"a) Membership of Koeppen-Geiger clusters (Beck et al. (2018)) in the hydrological clusters\", loc=\"left\", alpha=alpha)\n ax.set_xlabel(\"Hydrological Cluster\", alpha=alpha)\n ax.set_ylabel(\"Number of Catchments\", alpha=alpha)\n legend = ax.legend(ncol=2, title=\"Climatic Cluster\")\n for text in legend.get_texts():\n text.set_color(\"grey\")\n legend.get_title().set_color(\"grey\")\n # Make it nicer\n for spine in ax.spines.values():\n spine.set_visible(False)\n ax.yaxis.grid(True, color=\"lightgrey\", zorder=0)\n plt.setp(ax.get_yticklabels(), alpha=alpha)\n plt.setp(ax.get_xticklabels(), alpha=alpha, rotation=0)\n ax.tick_params(axis=u'both', which=u'both',length=0)",
"def _showcalibrationscreen(self, showtext=FALSE):\r\n maxX, maxY = self.START_RES\r\n bandWidth = 10\r\n blue = (0,0,255)\r\n red = (112,0,0)\r\n brightred = (255,0,0)\r\n green = (0,255,0)\r\n centerRE = (maxX/2),(maxY/2-35)/2\r\n centerLE = centerRE[0],centerRE[1]+(maxY/2 + 35)\r\n #why not fill with blue, draw red on.?\r\n #put the red on-- using avg of vals in _drawBackground()\r\n self.dscreen.fill(brightred) #2003-01-24-1101 now fill with red, the average of the two check values.\r\n #draw bands, over-under.\r\n \r\n yExtent = (maxY/2)-35\r\n #need overunder display.\r\n #draw on surface.\r\n #draw surface on screen 2x\r\n #also draw crosshairs 2x (long, narrow rects)\r\n myHorizCrossHair = pygame.Rect(0,0,maxX,1)\r\n myVertCrossHair = pygame.Rect(0,0,1,yExtent)\r\n \r\n #init rects and get them the right size. \r\n \r\n #rflist=[.04,.1,.12,.14] #reductionfactor\r\n rflist=[.04,.1,.12] #reductionfactor\r\n #colorlist=[green,red,blue,red]\r\n colorlist=[green,red,blue]\r\n \r\n def returnScaledCenteredColoredRect(reductionFactor,color): \r\n myrect = pygame.Rect(0,0,maxX*(1-reductionFactor),yExtent*(1-reductionFactor))\r\n myrect.center = centerRE\r\n return(color,myrect)\r\n \r\n rectlist = map(returnScaledCenteredColoredRect,rflist,colorlist)\r\n \r\n myHorizCrossHair.topleft = 0,yExtent/2-1\r\n myVertCrossHair.topleft = maxX/2-1,0\r\n \r\n for rect in rectlist:\r\n self.dscreen.fill(rect[0],rect[1])\r\n self.dscreen.fill((0,0,0),myHorizCrossHair)\r\n self.dscreen.fill((0,0,0),myVertCrossHair)\r\n\r\n \r\n for rect in rectlist:\r\n rect[1].center = centerLE\r\n \r\n myHorizCrossHair.topleft = 0,yExtent/2-1\r\n myHorizCrossHair.move_ip(0,maxY/2 + 35)\r\n myVertCrossHair.move_ip(0,maxY/2 + 35)\r\n\r\n for rect in rectlist:\r\n self.dscreen.fill(rect[0],rect[1])\r\n \r\n self.dscreen.fill((0,0,0),myHorizCrossHair)\r\n self.dscreen.fill((0,0,0),myVertCrossHair)\r\n\r\n\r\n if showtext:\r\n my_string = 'Adjust your position so you can see all of the blue ring and none of the red ring.\\n\\nPress [enter] to continue.' \r\n self._displayMessage(width=300,height=125,yPos=325,displayString=my_string,textColor=(216,216,216))\r\n else:\r\n pygame.display.update()\r\n \r\n pygame.event.get() #clear event queue, otherwise, was taking a long time and multiple enters could be pressed, bypassing calibration screen.\r\n while 1:\r\n event = pygame.event.wait()\r\n if (event.type == KEYDOWN) and (event.key == K_RETURN): \r\n break"
] | [
"0.60801697",
"0.5949837",
"0.58277285",
"0.5798766",
"0.5737879",
"0.570477",
"0.569897",
"0.56542355",
"0.56062794",
"0.55953866",
"0.5563266",
"0.5561558",
"0.5551439",
"0.5549513",
"0.55401355",
"0.55340844",
"0.55340576",
"0.5533128",
"0.54531765",
"0.5450464",
"0.54344773",
"0.54312766",
"0.5405897",
"0.54030055",
"0.5394492",
"0.5386836",
"0.53862554",
"0.53808576",
"0.5377389",
"0.5357304"
] | 0.7580857 | 0 |
Get a list of locations given the input parameters. Specify a search area by radius around a latitude and longitude, as well as any filter for specific systems. Each location will be a GeoJSON Feature, and aggregated into a GeoJSON FeatureCollection. | def location_search(self, latitude, longitude, radius_km, system_ids=None):
path = f'{self.BIKE_ENDPOINT}location?latitude={latitude}&longitude={longitude}&radius_km={radius_km}&{self.secret_key}'
response = requests.get(path).json()
self.check_api_key(response)
return response | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def features_search(df, type_, keywords):\n PLACES_KEY = os.environ[\"PLACES_KEY\"]\n output_file = \"json\"\n radius = \"1500\"\n lst = []\n\n for i in range(len(df)):\n coor = df[\"latitude\"][i].astype(str) + \", \" + df[\"longitude\"][i].astype(str)\n url = \"https://maps.googleapis.com/maps/api/place/nearbysearch/\"+ output_file +\"?location=\"+coor +\"&radius=\" +radius+ \"&type=\"+type_+\"&keyword=\"+keywords + \"&key=\"+ PLACES_KEY\n res = requests.get(url)\n data = res.json()\n lst.append(len(data))\n \n return lst",
"def location_search(self, lat: float, lng: float) -> List[Location]:\n params = {\n \"latitude\": lat,\n \"longitude\": lng,\n # rankToken=c544eea5-726b-4091-a916-a71a35a76474 - self.uuid?\n # fb_access_token=EAABwzLixnjYBABK2YBFkT...pKrjju4cijEGYtcbIyCSJ0j4ZD\n }\n result = self.private_request(\"location_search/\", params=params)\n locations = []\n for venue in result[\"venues\"]:\n if \"lat\" not in venue:\n venue[\"lat\"] = lat\n venue[\"lng\"] = lng\n locations.append(extract_location(venue))\n return locations",
"def get_all_locations(self):",
"def search_nearby(self, fields: dict) -> list[dict]:\r\n results: list = []\r\n\r\n if \"location\" not in fields.keys():\r\n geolocate: dict = self.get_current_locate()\r\n fields[\"location\"] = geolocate[\"location\"]\r\n\r\n if \"radius\" not in fields.keys():\r\n fields[\"radius\"] = 1000\r\n\r\n fields[\"type\"] = \"restaurant\"\r\n\r\n for i in range(1):\r\n places = self.gmaps.places_nearby(**fields)\r\n if places[\"status\"] != \"OK\":\r\n continue\r\n results.extend(places[\"results\"])\r\n try:\r\n # Update attribute to get next 20 places.\r\n fields = {\r\n \"page_token\": places[\"next_page_token\"]\r\n }\r\n # 連続実行するとエラー(Google側の仕様)\r\n time.sleep(2)\r\n except KeyError:\r\n # 最大で60件まで それ以上検索すると next_page_token がなくなる\r\n break\r\n\r\n return results",
"def get_all_locations():\n rs = run_query('''select * from zlrz_office_location''')\n return [] if rs is None else list(map(lambda t: Location(t[1], t[2], t[3], t[4], t[5], t[0]), rs))",
"def search(bearer_token, price, location, categories, radius, openat):\n\n RESTAURANT_LIMIT = 3\n\n url_params = {\n 'term': 'restaurants',\n 'location': location.replace(' ', '+'),\n 'limit': RESTAURANT_LIMIT,\n 'open_at': openat,\n 'price': price,\n 'categories': categories,\n 'radius': radius\n }\n return request(API_HOST, SEARCH_PATH, bearer_token, url_params=url_params)",
"def get(self):\n parser = reqparse.RequestParser()\n parser.add_argument('lat', type=float)\n parser.add_argument('lng', type=float)\n parser.add_argument('radius', type=int)\n args = parser.parse_args()\n lat = args.get('lat', None)\n lng = args.get('lng', None)\n radius = args.get('radius', None)\n\n if lat and lng and radius:\n trucks = db.trucks.find({\"coordinates\": {\"$geoWithin\": \\\n {\"$centerSphere\": [[lng, lat], meter_to_radian(radius)]}}})\n else:\n trucks = db.trucks.find({})\n return list(trucks)",
"def query_restaurants_by_location(collection, radius, lat, lon):\n results = collection.find(\n {'location': {'$nearSphere': {'$geometry': {'type': \"Point\",\n 'coordinates': [float(lon), float(lat)]},\n '$maxDistance': radius}}}, {\"_id\": 0})\n\n return results",
"def get_restaurants(term, lat=\"37.788744\", lon=\"-122.411587\", radius=\"805\"):\n\n # Create OAuth2 token and store in session (we don't need to get a new one\n # for every API request)\n\n access_token = get_access_token()\n\n if not SEEDING:\n if \"access_token\" not in session:\n session[\"access_token\"] = access_token\n\n base_url = \"https://api.yelp.com/v3/businesses/search\"\n\n # Create a Unix timestamp for current day at 1:00 PM\n year = datetime.now().year\n day = datetime.now().day\n month = datetime.now().month\n open_time = datetime(year, month, day, 13, 0, 0)\n\n unix_time = time.mktime(open_time.timetuple())\n unix_time_trunc = int(unix_time)\n\n # Set parameters for our request to the business search API.\n parameters = {\n \"latitude\": lat,\n \"longitude\": lon,\n \"radius\": radius,\n \"term\": term,\n \"categories\": \"restaurants\",\n \"limit\": 24,\n \"price\": \"1,2,3\",\n \"sort_by\": \"distance\",\n \"open_at\": unix_time_trunc,\n }\n\n # FIXME: Store resulting JSON data in database...\n\n # Fetch all restaurants that fit these parameters and capture the response.\n response = requests.get(url=base_url,\n params=parameters,\n headers={\n 'Authorization': 'Bearer {token}'.format(\n token=access_token)\n })\n\n # Extract just the business info.\n return response.json()['businesses']",
"def get_locations(self) -> list:\n return self.client.locations.get_all()",
"def restaurants_search() -> str:\n args = request.args\n if 'q' in args and 'lat' in args and 'lon' in args: # check if correct params are given\n if len(args['q']) > 0 and len(args['lat']) > 0 and len(args['lon']) > 0: # check that\n # params satisfy length requirements\n try:\n q = args['q']\n location = (float(args['lat']), float(args['lon']))\n except ValueError: # Expects Float type\n return make_response('Malformed request', 404)\n restaurant_object_matches = restaurants.search(q, location)\n return jsonify(restaurant_object_matches)\n return make_response('Malformed request', 404)",
"def geolocate(\n self, lat: float, lon: float, radius_meters: int = 100, count: int = SUGGESTION_COUNT\n ) -> list:\n url = f\"{self.suggestions_url}/suggestions/api/4_1/rs/geolocate/address\"\n data = {\"lat\": lat, \"lon\": lon, \"radius_meters\": radius_meters, \"count\": count}\n response = self._post(url, data)\n return response[\"suggestions\"]",
"def get_locations(self):\n\n locations = []\n\n params = self.request.query_params\n\n if 'locations[]' in params:\n locations = params.getlist('locations[]', [])\n elif 'location' in params:\n locations = [params.get('location', None)]\n\n if type(locations) not in [list, tuple]:\n locations = [locations]\n\n valid_ids = []\n\n for loc in locations:\n try:\n valid_ids.append(int(loc))\n except (ValueError):\n pass\n\n # List of StockLocation objects which match provided values\n valid_locations = StockLocation.objects.filter(pk__in=valid_ids)\n\n return valid_locations",
"def search(api_key, term, location, categories, offset, price):\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': int(params['limit']),\n 'offset': offset,\n 'categories': categories,\n 'price':price\n }\n \n find_locs = request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)\n \n return json_normalize(find_locs['businesses'])",
"def search(location=DEFAULT_LOCATION, api_key=API_KEY):\n latitude, longtitude = location[0], location[1]\n url_params = {\"page\": \"1\", \"lon\": longtitude, \"lat\": latitude, \"distance\": \"5\"}\n\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)['result']['data']",
"def _get_locations(self):\n data = self._get(\"/locations\")\n if \"locations\" not in data:\n _LOGGER.error(\"Did not find locations\")\n raise AirthingsError(data)\n return [AirthingsLocation(d, self) for d in data[\"locations\"]]",
"def list_locations(\n self,\n ) -> Callable[\n [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse\n ]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"list_locations\" not in self._stubs:\n self._stubs[\"list_locations\"] = self.grpc_channel.unary_unary(\n \"/google.cloud.location.Locations/ListLocations\",\n request_serializer=locations_pb2.ListLocationsRequest.SerializeToString,\n response_deserializer=locations_pb2.ListLocationsResponse.FromString,\n )\n return self._stubs[\"list_locations\"]",
"def list_locations(\n self,\n ) -> Callable[\n [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse\n ]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"list_locations\" not in self._stubs:\n self._stubs[\"list_locations\"] = self.grpc_channel.unary_unary(\n \"/google.cloud.location.Locations/ListLocations\",\n request_serializer=locations_pb2.ListLocationsRequest.SerializeToString,\n response_deserializer=locations_pb2.ListLocationsResponse.FromString,\n )\n return self._stubs[\"list_locations\"]",
"def get_all_locations(fields=None):\n fields = fields or [Locations.id, Locations.city, Locations.country,\n Locations.latitude, Locations.longitude]\n query = Locations.query.order_by(Locations.country.asc(), Locations.city.asc()) \\\n .add_columns(*fields)\n logging.debug('Query executed: %s' % query)\n return query",
"def find(self, request):\n try:\n lat = float(request.GET.get('lat', ''))\n lon = float(request.GET.get('lon', ''))\n except ValueError:\n return Response({'detail': 'wrong latitude or longitude value'},\n status.HTTP_400_BAD_REQUEST)\n point = Point(lon, lat)\n areas = ServiceArea.objects.filter(area__bbcontains=point)\n serializer = SearchServiceAreaSerializer(areas, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)",
"def get_service_locations(self):\n url = URLS['servicelocation']\n headers = {\"Authorization\": \"Bearer {}\".format(self.access_token)}\n r = requests.get(url, headers=headers)\n r.raise_for_status()\n return r.json()",
"def nearby(cls, lat: float, lon: float, radius: float) -> List[Place]:\n formatted_point = cls._format_point_postgis(lat, lon)\n distance = cls._postgis_distance(formatted_point)\n query = (\n cls.query.with_entities(cls, distance)\n .filter(distance < radius)\n .order_by(distance)\n .limit(DEFAULT_LIMIT)\n .all()\n )\n return cls._set_distances(query)",
"def search(api_key, term, location, offset, RADIUS_SIZE):\n #DEBUG\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'offset': offset,\n 'location': location.replace(' ', '+'),\n 'radius': RADIUS_SIZE,\n 'limit': 50\n }\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)",
"def get_nearby_location(request):\n latitude, longitude = latlang(request)\n point = Point(float(longitude), float(latitude), srid=4326)\n locations = Location.objects.filter(point__distance_lte=(point, D(km=100)))\n return JsonResponse(json.dumps([serializer(location) for location in locations]), safe=False)",
"def search(lat, lng, distance):\r\n\r\n url = 'https://api.foursquare.com/v2/venues/explore?ll=%s,%s&intent=browse&radius=%s&limit=50&categoryId=%s&client_id=%s&client_secret=%s&v=%s' % (lat, lng, distance, CATEGORY_ID, CLIENT_ID, CLIENT_SECRET, time.strftime(\"%Y%m%d\"))\r\n venue_list = []\r\n\r\n try:\r\n data = make_request(url)\r\n\r\n for item in data['response']['groups'][0]['items']:\r\n venue = item['venue']\r\n venue_list.append(Business(venue['name'],\r\n venue['location']['address'],\r\n venue['rating'],\r\n venue['ratingSignals'],\r\n venue['stats']['checkinsCount']))\r\n except Exception, e:\r\n print e\r\n\r\n return venue_list",
"def fetch(self, radius: int) -> dict:\n # convert radius integer to string\n radius: str = f\"{radius}mi\" \n # set empty dict\n geocodes: dict = {}\n # iterate through instantiated locations list\n # set search parameters to pass to callGoogle method\n for location in self.locations:\n\n params: dict = {\n\n 'address': location,\n 'sensor': 'false',\n 'key': self.__api_key['google_key']\n\n }\n # define key value pairs | city - geocode\n geocodes[location]: str = f\"{callGoogle(endpoint=self.__api_endpoint, params=params)},{radius}\"\n\n return geocodes",
"async def async_query_locations(self,\n zip_code: str = None,\n latitude: str = None,\n longitude: str = None):\n\n location_url = f\"{API_BASE_URL}/locations\"\n headers = {\n \"Accept\": \"application/json\"\n }\n params = {}\n if latitude and longitude:\n params[\"filter.lat.near\"] = latitude\n params[\"filter.lon.near\"] = longitude\n elif zip_code:\n params[\"filter.zipCode.near\"] = zip_code\n else:\n params['filter.lat.near'] = str(self._hass.config.latitude)\n params['filter.lon.near'] = str(self._hass.config.longitude)\n\n try:\n location_resp = await self._oauth_session.async_request('GET',\n location_url,\n params=params,\n headers=headers)\n location_resp.raise_for_status()\n json_response = await location_resp.json()\n locations = [\n {\n 'locationId': item['locationId'],\n 'name': item['name']\n }\n for item\n in json_response['data']]\n return locations\n except Exception as ex:\n _LOGGER.error(\"Unable to retrieve locations: %s\",\n str(ex))\n return None",
"def extract_locations(self):\n default_pos_columns = common_cfg.coord_col_names\n if set(default_pos_columns).issubset(set(self._raw_data.columns)):\n print('Location data found')\n # check and drop units outside provided city boundary\n geometry = [shapely.geometry.Point(xy) for xy in zip(\n self._raw_data[default_pos_columns[0]], # Long\n self._raw_data[default_pos_columns[1]])] # Lat\n b_within_boundary = np.array(list(map(\n lambda p: p.within(self.model_city.convhull), geometry)))\n\n if not all(b_within_boundary):\n print('%s -- dropping %i units outside city.' %\n (self.servicetype,\n sum(np.bitwise_not(b_within_boundary))))\n self._raw_data = self._raw_data.iloc[\n b_within_boundary, :].reset_index()\n\n # store geolocations as geopy Point\n locations = [geopy.Point(yx) for yx in zip(\n self._raw_data[default_pos_columns[1]], # Lat\n self._raw_data[default_pos_columns[0]])] # Long\n\n propert_data = self._raw_data.drop(default_pos_columns, axis=1)\n\n else:\n raise NotImplementedError('Locations not found - not implemented!')\n\n return propert_data, locations",
"def search(latit, longit, dist, num_results):\n API_PRIVATE = os.environ.get(\"TOM_TOM_PRIVATE\")\n apiParameters = {\n 'key': API_PRIVATE,\n 'typeahead': True,\n 'limit': num_results,\n 'ofs': 0,\n 'countrySet': 'US',\n 'lat': latit,\n 'lon': longit,\n 'radius': dist,\n 'categorySet': '9361023, 7332005, 9361066, 9361051, 9361009'\n }\n apiQuery = str('https://api.tomtom.com/search/2/categorySearch/.json');\n\n response = requests.get(apiQuery, params=apiParameters)\n while True:\n try:\n jsonResponse = response.json()\n break\n except:\n response = requests.get(apiQuery, params=apiParameters)\n\n latitude_lst = []\n longitude_lst = []\n for eachStore in jsonResponse['results']:\n latitude_lst.append(eachStore['position']['lat'])\n longitude_lst.append(eachStore['position']['lon'])\n final_lat = []\n final_lon = []\n for i in range(len(latitude_lst)):\n repeat = False\n for j in range(len(final_lat)):\n if final_lat[j] == latitude_lst[i] and final_lon[j] == longitude_lst[i]:\n repeat = True\n break\n if repeat == False:\n final_lat.append(latitude_lst[i])\n final_lon.append(longitude_lst[i])\n return final_lat, final_lon",
"def suggestLocationsAtCoordinate(self, latitude: Union[int, float], longitude: Union[int, float], radiusKm: Union[int, float], limitToCities: bool = False, lang: str = \"eng\", count: int = 20, returnInfo: ReturnInfo = ReturnInfo(), **kwargs):\n assert isinstance(latitude, (int, float)), \"The 'latitude' should be a number\"\n assert isinstance(longitude, (int, float)), \"The 'longitude' should be a number\"\n params = { \"action\": \"getLocationsAtCoordinate\", \"lat\": latitude, \"lon\": longitude, \"radius\": radiusKm, \"limitToCities\": limitToCities, \"count\": count, \"lang\": lang }\n params.update(returnInfo.getParams())\n params.update(kwargs)\n return self.jsonRequest(\"/api/v1/suggestLocationsFast\", params)"
] | [
"0.69432235",
"0.6408032",
"0.63840026",
"0.63622475",
"0.6217564",
"0.61683476",
"0.6148148",
"0.61463386",
"0.61434495",
"0.61077005",
"0.60686666",
"0.6068093",
"0.60368913",
"0.5989424",
"0.5938921",
"0.5931571",
"0.5902933",
"0.5902933",
"0.5858726",
"0.58187765",
"0.5768339",
"0.5762422",
"0.5751146",
"0.5668447",
"0.5667935",
"0.56581426",
"0.5650998",
"0.56372803",
"0.5628965",
"0.5625955"
] | 0.6676069 | 1 |
Provide quotes for renting a bike, via different types of passes that you can buy. Quotes can be obtained for all systems or for specific systems. Quotes may be associated with specific regions within a system. Please keep in mind that both the quotes and usage fees associated with the quotes are estimates. | def rental_quote_info(self, system_ids=None):
path = f'{self.BIKE_ENDPOINT}quote?{self.secret_key}'
response = requests.get(path).json()
self.check_api_key(response)
return response | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def on_book(context, quote_type, quote):\n date, filterTime = str(context.config.trading_date), int(quote.int_time)\n # print(quote.symbol, quote.int_time)\n\n if ((filterTime > 93000000) and (filterTime < 113000000)) or (\n (filterTime > 130000000) and (filterTime < 150000000)):\n # print (\"Trading Time\")\n if str(quote.symbol).__contains__(\"IH\"):\n context.dic[\"IH\"] = [quote.bp_array[0], quote.ap_array[0]]\n context.symboldic[\"IH\"] = quote.symbol\n if str(quote.symbol).__contains__(\"IC\"):\n context.dic[\"IC\"] = [quote.bp_array[0], quote.ap_array[0]]\n context.symboldic[\"IC\"] = quote.symbol\n if len(context.dic.keys()) < 2:\n return\n \"\"\"\n if len(context.dic.keys()) >= 2:\n sql = \"`quoteData insert (%s;%s;%s;%s;%s;%s;%s)\"\n time_sql = '{y+ \"T\"$-9#\"00000000\",string x}[%s;%s]'\n date_time = time_sql % (filterTime, \"%s.%s.%s\" % (date[0:4], date[4:6], date[6:8]))\n context.q.sync(date_time)\n # print(context.dic[\"IC\"][0]*200 -context.dic[\"IH\"][1]*300*2)\n feed_quote = sql % (date_time, context.dic[\"IH\"][0], context.dic[\"IH\"][1], context.dic[\"IC\"][0], context.dic[\"IC\"][1], context.dic[\"IC\"][0]*200 -context.dic[\"IH\"][1]*300*2, context.dic[\"IC\"][1]*200 -context.dic[\"IH\"][0]*300*2)\n context.q.sync(feed_quote)\n\n context.q.sync(\n \"CombinedMainContract: select Date:last Date,BidPrice1Open:first PairBidPrice,BidPrice1High:max PairBidPrice,BidPrice1Low:min PairBidPrice, BidPrice1Close:last PairBidPrice,BidVol1:100,AskPrice1Open:first PairAskPrice,AskPrice1High:max PairAskPrice,AskPrice1Low:min PairAskPrice, AskPrice1Close:last PairAskPrice,AskVol1:last 100,LegOneBidPrice1:last LegOneBidPrice1, LegOneAskPrice1:last LegOneAskPrice1, LegTwoBidPrice1:last LegTwoBidPrice1, LegTwoAskPrice1: last LegTwoAskPrice1 by %s xbar Date.second from `quoteData;\" % (\n context.kindleInterval))\n context.q.sync(\n \"delete date, second from `CombinedMainContract;delete from `CombinedMainContract where Date.second < 09:30:00;delete from `CombinedMainContract where Date.second > 11:30:00, Date.second < 13:00:00;delete from `CombinedMainContract where Date.second > 15:00:00;update TrueRange: {max(x;y;z)}'[(AskPrice1High - BidPrice1Low);(AskPrice1High - (prev BidPrice1Close));((prev AskPrice1High) - BidPrice1Low)] from `CombinedMainContract;\")\n context.q.sync(\"update N: mavg[%s;TrueRange] from `CombinedMainContract;\" % (context.volatilityRange))\n context.q.sync(\"update ShortEntry: prev (%s mmin BidPrice1Low), LongEntry: prev (%s mmax AskPrice1High) from `CombinedMainContract;\"%(context.breakRange, context.breakRange))\n\n Signal = context.q.sync(\"select count Date from CombinedMainContract\")[0]\n \n if (Signal[0] > context.kindleNumber):\n context.kindleNumber = Signal[0]\n PairDataBar = context.q.sync(\"-2#select Date.minute, BidPrice1Close, AskPrice1Close, ShortEntry, LongEntry, N from CombinedMainContract\")[0]\n context.PairDataBarDate = PairDataBar[0]\n context.PairDataBarBidPrice1Close = PairDataBar[1]\n context.PairDataBarAskPrice1Close = PairDataBar[2]\n context.PairDataBarShortEntry = PairDataBar[3]\n context.PairDataBarLongEntry = PairDataBar[4]\n context.PairDataBarN = PairDataBar[5]\n if (context.PairDataBarBidPrice1Close < context.LocalLow):\n context.UpDrawBack = 0.0\n context.LocalLow = context.PairDataBarBidPrice1Close\n elif (context.PairDataBarBidPrice1Close > context.LocalLow):\n context.UpDrawBack = context.PairDataBarBidPrice1Close - context.LocalLow\n\n if (abs(context.PositionAddedTime) > 0 and (context.PairDataBarDate > 898)):\n context.PositionClearPrice = context.dic[\"IC\"][1]\n # print(\"PosClear: \" + str(context.dic[\"IC\"][1]))\n # context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], abs(context.PositionAddedTime), Direction.BUY, OpenClose.CLOSE)\n context.PositionAddedTime = 0\n # sendOrderClose PositionAddedTime Amount Contract\n if (context.PositionAddedTime == -1):\n print(context.LegOnePositionEntryPrice[1] - context.PositionClearPrice)\n elif (context.PositionAddedTime == -2):\n print(context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[\n 2] - 2 * context.PositionClearPrice)\n elif (context.PositionAddedTime == -3):\n print(context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[2] +\n context.LegOnePositionEntryPrice[3] - 3 * context.PositionClearPrice)\n elif (context.PositionAddedTime == -4):\n print(context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[2] +\n context.LegOnePositionEntryPrice[3] + context.LegOnePositionEntryPrice[\n 4] - 4 * context.PositionClearPrice)\n context.LegOnePositionEntryPrice = {}\n context.PositionEntryPrice = {}\n # context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], abs(context.PositionAddedTime), Direction.BUY, OpenClose.CLOSE)\n context.PositionAddedTime = 0\n else:\n if ((abs(context.PositionAddedTime) > 0) and (context.UpDrawBack > context.NStoplossPositionParameter * context.PairDataBarN)):\n # print(\"PosClear: \" + str(context.dic[\"IC\"][1]))\n context.PositionClearPrice = context.dic[\"IC\"][1]\n if (context.PositionAddedTime == -1):\n print (context.LegOnePositionEntryPrice[1] - context.PositionClearPrice)\n elif (context.PositionAddedTime == -2):\n print (context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[2]- 2 * context.PositionClearPrice)\n elif (context.PositionAddedTime == -3):\n print (context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[2] + context.LegOnePositionEntryPrice[3]- 3 * context.PositionClearPrice)\n elif (context.PositionAddedTime == -4):\n print (context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[2] + context.LegOnePositionEntryPrice[3] + context.LegOnePositionEntryPrice[4] - 4 * context.PositionClearPrice)\n context.LegOnePositionEntryPrice = {}\n context.PositionEntryPrice = {}\n #context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], abs(context.PositionAddedTime), Direction.BUY, OpenClose.CLOSE)\n context.PositionAddedTime = 0\n context.q.sync(\"update Position:0 from `CombinedMainContract where Date = max Date\")\n if ((abs(context.PositionAddedTime) == 3) and context.PositionTimesParameter >= 4 and context.PairDataBarBidPrice1Close < context.PositionEntryPrice[1] - 3 * context.NEntryParameter*context.Nvalue):\n context.PositionAddedTime = -4\n context.LegOnePositionEntryPrice[4] = context.dic[\"IC\"][0]\n context.PositionEntryPrice[4] = context.PairDataBarBidPrice1Close\n # print(\"Pos4: \" + str(context.dic[\"IC\"][0]))\n #context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][0], 1, Direction.SELL, OpenClose.OPEN)\n context.q.sync(\"update Position:-4 from `CombinedMainContract where Date = max Date\")\n if ((abs(context.PositionAddedTime) == 2) and context.PositionTimesParameter >= 3 and context.PairDataBarBidPrice1Close < context.PositionEntryPrice[1] - 2 * context.NEntryParameter*context.Nvalue):\n context.PositionAddedTime = -3\n context.LegOnePositionEntryPrice[3] = context.dic[\"IC\"][0]\n context.PositionEntryPrice[3] = context.PairDataBarBidPrice1Close\n # print(\"Pos3: \" + str(context.dic[\"IC\"][0]))\n context.q.sync(\"update Position:-3 from `CombinedMainContract where Date = max Date\")\n #context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][0], 1, Direction.SELL, OpenClose.OPEN)\n if ((abs(context.PositionAddedTime) == 1) and context.PositionTimesParameter >= 2 and context.PairDataBarBidPrice1Close < context.PositionEntryPrice[1] - 1 * context.NEntryParameter*context.Nvalue):\n context.PositionAddedTime = -2\n context.LegOnePositionEntryPrice[2] = context.dic[\"IC\"][0]\n context.PositionEntryPrice[2] = context.PairDataBarBidPrice1Close\n # print(\"Pos2: \" + str(context.dic[\"IC\"][0]))\n #context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][0], 1, Direction.SELL, OpenClose.OPEN)\n context.q.sync(\"update Position:-2 from `CombinedMainContract where Date = max Date\")\n if ((abs(context.PositionAddedTime) == 0) and context.PositionTimesParameter >= 1 and context.PairDataBarBidPrice1Close < context.PairDataBarShortEntry - 0 * context.NEntryParameter*context.Nvalue):\n context.PositionAddedTime = -1\n context.LegOnePositionEntryPrice[1] = context.dic[\"IC\"][0]\n context.PositionEntryPrice[1] = context.PairDataBarBidPrice1Close\n # print(\"Pos1: \" + str(str(context.dic[\"IC\"][0])))\n context.Nvalue = context.PairDataBarN\n context.q.sync(\"update Position:-1 from `CombinedMainContract where Date = max Date\")\n #context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][0], 1, Direction.SELL, OpenClose.OPEN)\n\n \"\"\"\n \"\"\"\n if context.long_position(quote.symbol) > 0 and not context.ORDER_SENT_FLAG:\n context.order.send_single_order(\n quote.symbol, quote.bp_array[0], 5, Direction.SELL, OpenClose.CLOSE\n )\n context.ORDER_SENT_FLAG = True\n elif 90000000 < quote.int_time < 90500000 and not context.ORDER_SENT_FLAG:\n context.order.send_single_order(\n quote.symbol, quote.bp_array[0], 5, Direction.BUY, OpenClose.OPEN\n )\n context.ORDER_SENT_FLAG = True\n else:\n pass\n \"\"\"\n\n if len(context.dic.keys()) >= 2:\n sql = \"`quoteData insert (%s;%s;%s;%s;%s;%s;%s)\"\n time_sql = '{y+ \"T\"$-9#\"00000000\",string x}[%s;%s]'\n date_time = time_sql % (filterTime, \"%s.%s.%s\" % (date[0:4], date[4:6], date[6:8]))\n context.q.sync(date_time)\n # print(context.dic[\"IC\"][0]*200 -context.dic[\"IH\"][1]*300*2)\n feed_quote = sql % (\n date_time, context.dic[\"IH\"][0], context.dic[\"IH\"][1], context.dic[\"IC\"][0], context.dic[\"IC\"][1],\n context.dic[\"IC\"][0] * 200 - context.dic[\"IH\"][1] * 300 * 2,\n context.dic[\"IC\"][1] * 200 - context.dic[\"IH\"][0] * 300 * 2)\n context.q.sync(feed_quote)\n\n context.q.sync(\n \"CombinedMainContract: select Date:last Date,BidPrice1Open:first PairBidPrice,BidPrice1High:max PairBidPrice,BidPrice1Low:min PairBidPrice, BidPrice1Close:last PairBidPrice,BidVol1:100,AskPrice1Open:first PairAskPrice,AskPrice1High:max PairAskPrice,AskPrice1Low:min PairAskPrice, AskPrice1Close:last PairAskPrice,AskVol1:last 100,LegOneBidPrice1:last LegOneBidPrice1, LegOneAskPrice1:last LegOneAskPrice1, LegTwoBidPrice1:last LegTwoBidPrice1, LegTwoAskPrice1: last LegTwoAskPrice1 by %s xbar Date.second from `quoteData;\" % (\n context.kindleInterval))\n context.q.sync(\n \"delete date, second from `CombinedMainContract;delete from `CombinedMainContract where Date.second < 09:30:00;delete from `CombinedMainContract where Date.second > 11:30:00, Date.second < 13:00:00;delete from `CombinedMainContract where Date.second > 15:00:00;update TrueRange: {max(x;y;z)}'[(AskPrice1High - BidPrice1Low);(AskPrice1High - (prev BidPrice1Close));((prev AskPrice1High) - BidPrice1Low)] from `CombinedMainContract;\")\n context.q.sync(\"update N: mavg[%s;TrueRange] from `CombinedMainContract;\" % (context.volatilityRange))\n context.q.sync(\n \"update ShortEntry: prev (%s mmin BidPrice1Low), LongEntry: prev (%s mmax AskPrice1High) from `CombinedMainContract;\" % (\n context.breakRange, context.breakRange))\n\n Signal = context.q.sync(\"select count Date from CombinedMainContract\")[0]\n\n if (Signal[0] > context.kindleNumber):\n context.kindleNumber = Signal[0]\n PairDataBar = context.q.sync(\n \"-2#select Date.minute, BidPrice1Close, AskPrice1Close, ShortEntry, LongEntry, N from CombinedMainContract\")[\n 0]\n context.PairDataBarDate = PairDataBar[0]\n context.PairDataBarBidPrice1Close = PairDataBar[1]\n context.PairDataBarAskPrice1Close = PairDataBar[2]\n context.PairDataBarShortEntry = PairDataBar[3]\n context.PairDataBarLongEntry = PairDataBar[4]\n context.PairDataBarN = PairDataBar[5]\n if (context.PairDataBarAskPrice1Close > context.LocalHigh):\n context.DownDrawBack = 0.0\n context.LocalHigh = context.PairDataBarAskPrice1Close\n elif (context.PairDataBarAskPrice1Close < context.LocalHigh):\n context.DownDrawBack = context.LocalHigh - context.PairDataBarAskPrice1Close\n\n if (abs(context.PositionAddedTime) > 0 and (context.PairDataBarDate > 898)):\n context.PositionClearPrice = context.dic[\"IC\"][0]\n # print(\"PosClear: \" + str(context.dic[\"IC\"][1]))\n # context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], abs(context.PositionAddedTime), Direction.BUY, OpenClose.CLOSE)\n\n # sendOrderClose PositionAddedTime Amount Contract\n print(\"PosClear: \" + str(context.dic[\"IC\"][0]))\n context.PositionClearPrice = context.dic[\"IC\"][0]\n if (context.PositionAddedTime == 1):\n print(-(context.LegOnePositionEntryPrice[1] - context.PositionClearPrice))\n elif (context.PositionAddedTime == 2):\n print(-(context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[\n 2] - 2 * context.PositionClearPrice))\n elif (context.PositionAddedTime == 3):\n print(-(context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[2] +\n context.LegOnePositionEntryPrice[3] - 3 * context.PositionClearPrice))\n elif (context.PositionAddedTime == 4):\n print(-(context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[2] +\n context.LegOnePositionEntryPrice[3] + context.LegOnePositionEntryPrice[\n 4] - 4 * context.PositionClearPrice))\n context.PositionAddedTime = 0\n context.LegOnePositionEntryPrice = {}\n context.PositionEntryPrice = {}\n # context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], abs(context.PositionAddedTime), Direction.BUY, OpenClose.CLOSE)\n context.PositionAddedTime = 0\n else:\n if ((abs(context.PositionAddedTime) > 0) and (\n context.DownDrawBack > context.NStoplossPositionParameter * context.PairDataBarN)):\n print(\"PosClear: \" + str(context.dic[\"IC\"][0]))\n context.PositionClearPrice = context.dic[\"IC\"][0]\n if (context.PositionAddedTime == 1):\n print(-(context.LegOnePositionEntryPrice[1] - context.PositionClearPrice))\n elif (context.PositionAddedTime == 2):\n print(-(context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[\n 2] - 2 * context.PositionClearPrice))\n elif (context.PositionAddedTime == 3):\n print(-(context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[2] +\n context.LegOnePositionEntryPrice[3] - 3 * context.PositionClearPrice))\n elif (context.PositionAddedTime == 4):\n print(-(context.LegOnePositionEntryPrice[1] + context.LegOnePositionEntryPrice[2] +\n context.LegOnePositionEntryPrice[3] + context.LegOnePositionEntryPrice[\n 4] - 4 * context.PositionClearPrice))\n context.LegOnePositionEntryPrice = {}\n context.PositionEntryPrice = {}\n # context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], abs(context.PositionAddedTime), Direction.BUY, OpenClose.CLOSE)\n context.PositionAddedTime = 0\n context.q.sync(\"update Position:0 from `CombinedMainContract where Date = max Date\")\n if ((abs(\n context.PositionAddedTime) == 3) and context.PositionTimesParameter >= 4 and context.PairDataBarAskPrice1Close >\n context.PositionEntryPrice[1] + 3 * context.NEntryParameter * context.Nvalue):\n context.PositionAddedTime = 4\n context.LegOnePositionEntryPrice[4] = context.dic[\"IC\"][1]\n context.PositionEntryPrice[4] = context.PairDataBarAskPrice1Close\n print(\"Pos4: \" + str(context.dic[\"IC\"][1]))\n # context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], 1, Direction.SELL, OpenClose.OPEN)\n context.q.sync(\"update Position:4 from `CombinedMainContract where Date = max Date\")\n if ((abs(\n context.PositionAddedTime) == 2) and context.PositionTimesParameter >= 3 and context.PairDataBarAskPrice1Close >\n context.PositionEntryPrice[1] + 2 * context.NEntryParameter * context.Nvalue):\n context.PositionAddedTime = 3\n context.LegOnePositionEntryPrice[3] = context.dic[\"IC\"][1]\n context.PositionEntryPrice[3] = context.PairDataBarAskPrice1Close\n print(\"Pos3: \" + str(context.dic[\"IC\"][1]))\n context.q.sync(\"update Position:3 from `CombinedMainContract where Date = max Date\")\n # context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], 1, Direction.SELL, OpenClose.OPEN)\n if ((abs(\n context.PositionAddedTime) == 1) and context.PositionTimesParameter >= 2 and context.PairDataBarAskPrice1Close >\n context.PositionEntryPrice[1] + 1 * context.NEntryParameter * context.Nvalue):\n context.PositionAddedTime = 2\n context.LegOnePositionEntryPrice[2] = context.dic[\"IC\"][1]\n context.PositionEntryPrice[2] = context.PairDataBarAskPrice1Close\n print(\"Pos2: \" + str(context.dic[\"IC\"][1]))\n # context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], 1, Direction.SELL, OpenClose.OPEN)\n context.q.sync(\"update Position:2 from `CombinedMainContract where Date = max Date\")\n if ((abs(\n context.PositionAddedTime) == 0) and context.PositionTimesParameter >= 1 and context.PairDataBarAskPrice1Close > context.PairDataBarLongEntry + 0 * context.NEntryParameter * context.Nvalue):\n context.PositionAddedTime = 1\n context.LegOnePositionEntryPrice[1] = context.dic[\"IC\"][1]\n context.PositionEntryPrice[1] = context.PairDataBarAskPrice1Close\n print(\"Pos1: \" + str(str(context.dic[\"IC\"][1])))\n context.Nvalue = context.PairDataBarN\n context.q.sync(\"update Position:1 from `CombinedMainContract where Date = max Date\")\n # context.order.send_single_order(context.symboldic[\"IC\"], context.dic[\"IC\"][1], 1, Direction.SELL, OpenClose.OPEN)",
"async def quotes(self, ctx):\n\n\t\tawait self.message_leaderboard(ctx, \"quotes\")",
"def get_quote (self, symbols, fields=[]):\n \n # Ensure correctly-typed input\n if not utils.check(symbols):\n return {}\n \n # Correctly format Symbols, also store split up symbols\n if type(symbols) == type([]):\n # We were passed list\n fmt_symbols = ','.join(symbols)\n else:\n # We were passed string\n fmt_symbols = symbols\n symbols = symbols.split(',')\n \n \n # Correctly format Fields, also store split up fields\n if type(fields) == type([]):\n # We were passed list\n fmt_fields = ','.join(fields)\n else:\n # We were passed string\n fmt_fields = fields\n fields = fmt_fields.split(',')\n \n # For aesthetics...\n fmt_symbols = fmt_symbols.upper()\n \n \n # Assemble URL\n url = self.endpoints['base'] + 'market/ext/quotes.json'\n \n # Authenticate\n auth = self.create_auth()\n \n # Create request paramters according to how we need them\n req_params = { 'symbols':symbols }\n if fields != None:\n req_params['fids'] = fmt_fields\n \n # Create request \n auth = self.create_auth()\n results = requests.post(\\\n url,\n data=req_params,\n auth=auth\n ).json()\\\n ['response']['quotes']['quote']\n \n \n # Add symbols to output\n # ...why tf doesn't Ally include this in the quote? they usually send way too much\n if len(symbols) > 1:\n for i,sym in enumerate(symbols):\n results[i]['symbol'] = sym\n else:\n results['symbol'] = symbols[0]\n \n \n return results",
"def send_quote(self, p_quote, p_ask_out, p_bid_out, count):\n pass",
"def get_multi_quotes(access_token,tickers):\r\n quote_url = 'https://api.tdameritrade.com/v1/marketdata/quotes'\r\n\r\n #The header for getting a quote needs to define the input type (json)\r\n headers = {'Authorization':'Bearer {}'.format(access_token),\r\n 'Content-Type':'application/json'}\r\n\r\n #Pass in the symbols as parameters\r\n params = {'symbol':tickers}\r\n\r\n #Make the get request to TD Ameritrade\r\n quote_data_json = requests.get(url=quote_url,headers=headers,params=params)\r\n return quote_data_json.json()",
"def quotazione(update, context):\n update.message.reply_text('Quando un giocatore verrà chiamato, sarà considerata come base d’asta la sua quotazione attuale (QA al momento dell’asta) della lista di Fantacalcio. È necessario rimanere sempre con i crediti sufficienti per completare la propria rosa. ')",
"def GetQuote(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def get_quotes(self):\n # However ignore the 'true' autodetection setting.\n jscs_quotes = self.jscs_options.get('validateQuoteMarks')\n if isinstance(jscs_quotes, dict):\n jscs_quotes = jscs_quotes.get('mark')\n if jscs_quotes and jscs_quotes is not True:\n return jscs_quotes\n\n # Use whatever quote type is set in preferences\n return get_quotes()",
"def GET_side_quote(self, *a, **kw):\r\n # Server side cache is also invalidated when new comment is posted\r\n return self.render_cached('side-quote', RecentTagged, g.side_comments_max_age, tagtype = 'quotes', title = 'Rationality Quote')",
"def quotes(self, quotes):\n\n self._quotes = quotes",
"async def quote(self,ctx):\n await self.bot.type()\n result = self.get_random()\n if result == -1:\n await self.bot.say(\"There was an issue retrieving a quote\")\n else:\n await self.bot.say(embed=self.quote_to_embed(result))\n await self.bot.delete_message(ctx.message)",
"def order_booking(self, strategy_name, order, account, quotation=None):\n if self._trade_mode == 'mock':\n if self._validate_order(order, account, quotation, strategy_name):\n account.portfolios[strategy_name].process_order(order)\n\n elif self._trade_mode == 'real':\n from .quant_client_sys_pb2 import ClientReqOrder, OrderList\n orderlist = OrderList()\n content = orderlist.orders.add()\n content.instrument = order.instrument\n content.direction = 'dlong' if order.direction == 'long' else 'dshort'\n content.sinterval = order.sinterval\n content.volume = order.volume\n content.price = 'PRICEALGO_BID' if order.direction == 'long' else 'PRICEALGO_ASK'\n content.algotype = order.algotype \n content.offset = order.offset\n content.strategyname = order.strategyname\n content.size = order.size\n content.wttime = order.wttime\n content.signalid = order.signalid\n content.pricetype = 'PRICETYPE_LIMITPRICE'\n content.orderstyle = order.orderstyle\n content.maxcancelnum = order.maxcancelnum\n content.clientid = order.clientid\n self.sender.send(orderlist.SerializeToString())",
"def generate_quote():\n headers = {\n 'accept': 'text/plain'\n }\n response = requests.get(QUOTE_URL, headers=headers)\n quote = response.json()['starWarsQuote']\n card = make_card(title=quote, colour=0xF5C518, thumbnail=THUMBNAIL)\n return card",
"async def sellall(self, ctx:commands.Context, fish_type:str='', fish_quality:str=''):\r\n\r\n if not fish_type in ['specific', 'name', 'school', 'rarity', '']:\r\n await ctx.send(f'{fish_type} must be one of the following:\\n```name\\nschool\\nrarity```')\r\n return\r\n\r\n all_fish = await self.config.member(ctx.message.author).bucket()\r\n new_fish = []\r\n sell_fish = []\r\n msg = ''\r\n total = 0\r\n for i in all_fish:\r\n i.update({'specific': f'{i[\"name\"]} {i[\"size\"]}'})\r\n if fish_type == '' or i[fish_type] == fish_quality:\r\n sell_fish.append(i)\r\n\r\n mod = await self.CheckMerchants(ctx.guild, i)\r\n if await self.CompletedSchool(ctx.message.author, i['school']):\r\n mod += await self.GetModifier(ctx.guild, 'school_complete_mod')\r\n\r\n i[\"value\"] = int(i[\"value\"] * mod * await self.GetModifier(ctx.guild, i['rarity']))\r\n total += i[\"value\"]\r\n msg += f'{i[\"name\"]}{\" \" * (self.longestFishName - len(i[\"name\"]))}{i[\"value\"]} {await bank.get_currency_name(ctx.guild)} ({mod}x)\\n'\r\n else:\r\n new_fish.append(i)\r\n\r\n msg = await ctx.send(f'Are you sure you want to sell:\\n```{msg}```for {total} {await bank.get_currency_name(ctx.guild)}')\r\n\r\n start_adding_reactions(msg, ReactionPredicate.YES_OR_NO_EMOJIS)\r\n pred = ReactionPredicate.yes_or_no(msg, ctx.author)\r\n try:\r\n await ctx.bot.wait_for(\"reaction_add\", check=pred, timeout=20)\r\n except asyncio.TimeoutError:\r\n await msg.clear_reactions()\r\n await msg.edit(content='Sale timed out')\r\n return\r\n\r\n if pred.result is True:\r\n await self.config.member(ctx.message.author).bucket.set(new_fish)\r\n await msg.edit(content=f'{len(sell_fish)} fish sold for {total} {await bank.get_currency_name(ctx.guild)}')\r\n await bank.deposit_credits(ctx.message.author, total)\r\n else:\r\n await msg.edit(content='Sale cancelled')\r\n\r\n await msg.clear_reactions()",
"async def quote(self, ctx, *args):\n if not args or args[0] in ('list', 'help'):\n invoke_records = await self.quoteService.find_all()\n await SimplePaginator.SimplePaginator(entries=[item['invoke'] for item in invoke_records],\n title='Kirinus Quote List', length=20, dm=True).paginate(ctx)\n await ctx.message.delete()\n return\n\n quote = await self.quoteService.find_one(args[1]) if args[0] in ('list', 'add', 'info', 'remove', 'help') else \\\n await self.quoteService.find_one(args[0])\n if args[0] == 'add':\n await self.quote_add_routine(ctx, args, quote)\n elif args[0] == 'info':\n await quote_info_routine(ctx, quote)\n elif args[0] == 'remove':\n await self.quote_delete_routine(ctx, args[1], quote)\n else:\n if quote:\n em = await formatter.quote_embed(quote)\n em.set_footer(text=\"Invoked by: \" + ctx.author.display_name)\n await helpers.message_handler(em, ctx, embed=True, deleteresponse=False)\n else:\n await helpers.message_handler(\"Quote not found.\", ctx, 5)",
"async def quote(self, ctx):\n await ctx.trigger_typing()\n try:\n async with self.session.get(self.api) as r:\n content = await r.json()\n except ssl.SSLCertVerificationError:\n await ctx.send(warning(\"Unable to connect to the quotes API.\"))\n return\n formatter = lambda x, y: f\"From {bold(x)}\\n{y}\"\n return await ctx.send(formatter(content[\"author\"], content[\"content\"]))",
"def test_quotes(self):\n inv_search = \"title:'compton scattering' and author:mele\"\n spi_search = \"find t 'compton scattering' and a mele\"\n self._compare_searches(inv_search, spi_search)",
"def quote_currencies(self):\n pass",
"def chase_order(self, order_id, side, avg, qty=None):\n sleep(1) # takes a second for order_id to register in bitmex trade engine\n last_price = avg\n max_chase_buy = float(avg) + float(self.strategy.chase)\n max_chase_sell = float(avg) - float(self.strategy.chase)\n self.logger.info(f'Chasing {side} order, initial price: {avg}, chase: {self.strategy.chase}, '\n f'Failsafe: {self.strategy.chase_failsafe} ')\n\n while True:\n # o = self.rest_open_order(orderID=order_id)\n o = self.ws_open_order(oid=order_id)\n if o:\n if side == 'Buy':\n if self.strategy.double_check or self.ws_restarting:\n quote = self.get_quote()\n self.logger.info('Bid: {} Ask: {}'.format(quote['bidPrice'], quote['askPrice']))\n _price = quote['bidPrice']\n else:\n _price = self.ws.get_ticker()['buy']\n\n self.logger.debug(\n f'Chasing buy order {order_id}, order_price: {avg}, last_price: {last_price}, current price: '\n f'{_price} max chase: {max_chase_buy}')\n if float(_price) <= float(max_chase_buy):\n if float(last_price) < float(_price):\n self.logger.info(f'Amending order {order_id} to price {_price}')\n try:\n ret = self.client.Order.Order_amend(orderID=order_id, price=_price).result()\n except Exception as fuck:\n self.logger.info(f'Error: {fuck}')\n else:\n self.logger.debug(ret)\n finally:\n last_price = _price\n else:\n self.logger.debug(f'Sleeping, order_price: {last_price}, current price: {_price}')\n if self.strategy.double_check:\n sleep(0.5)\n\n else:\n if self.strategy.chase_failsafe:\n self.logger.info(f'Price {_price} exceeded max chase {max_chase_buy}, buying market.')\n self.client.Order.Order_cancelAll(symbol=self.strategy.symbol).result()\n self.execute_order(oq=qty, ot='market', text='Chase failsafe market long order')\n else:\n self.logger.info(f'Price {_price} exceeded max chase {max_chase_buy}, giving up.')\n break\n elif side == 'Sell':\n if self.strategy.double_check or self.ws_restarting:\n quote = self.get_quote()\n self.logger.info('Bid: {} Ask: {}'.format(quote['bidPrice'], quote['askPrice']))\n _price = quote['askPrice']\n else:\n _price = self.ws.get_ticker()['sell']\n\n self.logger.debug(\n f'Chasing sell order {order_id}, order_price: {avg}, last_price: {last_price}, current price: '\n f'{_price} max chase: {max_chase_sell}')\n if float(_price) >= float(max_chase_sell):\n if float(last_price) > float(_price):\n self.logger.info(f'Amending order {order_id} to price {_price} ')\n try:\n ret = self.client.Order.Order_amend(orderID=order_id, price=_price).result()\n except Exception as fuck:\n self.logger.info(f'Error: {fuck}')\n else:\n self.logger.debug(ret)\n finally:\n last_price = _price\n else:\n self.logger.debug(f'Sleeping, order_price: {last_price}, current price: {_price}')\n if self.strategy.double_check:\n sleep(0.5)\n\n else:\n if self.strategy.chase_failsafe:\n self.logger.info(f'Price {_price} exceeded max chase {max_chase_sell}, selling market.')\n self.client.Order.Order_cancelAll(symbol=self.strategy.symbol).result()\n self.execute_order(oq=qty, ot='market', text='Chase failsafe market short order')\n else:\n self.logger.info(f'Price {_price} below max chase {max_chase_sell}, giving up.')\n break\n else:\n self.logger.info('Order Filled')\n break",
"def save_order(self, quote:str, order_result:dict, hold_duration:str, profit:str, quote_fee:str, BNB_fee:str, profit_minus_fees:str, time_to_fill:str, **kwargs):\n\n\t\tif order_result:\n\t\t\tconn \t\t\t = sqlite3.connect(self.name, detect_types=sqlite3.PARSE_DECLTYPES)\n\t\t\tconn.row_factory = sqlite3.Row\n\t\t\tc \t\t\t\t = conn.cursor()\n\n\t\t\t# values = tuple(param for param in order_result.values())\n\t\t\tvalues = (order_result['orderId'],\n\t\t\t\t\t order_result['symbol'],\n\t\t\t\t\t quote,\n\t\t\t\t\t order_result['side'],\n\t\t\t\t\t order_result['type'],\n\t\t\t\t\t order_result['status'],\n\t\t\t\t\t order_result['orderListId'],\n\t\t\t\t\t order_result['clientOrderId'],\n\t\t\t\t\t datetime.utcfromtimestamp(int(order_result['transactTime'])/1000).strftime(\"%Y-%m-%d %H:%M:%S\"),\n\t\t\t\t\t order_result['price'],\n\t\t\t\t\t order_result['origQty'],\n\t\t\t\t\t order_result['executedQty'],\n\t\t\t\t\t order_result['cummulativeQuoteQty'],\n\t\t\t\t\t profit,\n\t\t\t\t\t quote_fee,\n\t\t\t\t\t BNB_fee,\n\t\t\t\t\t profit_minus_fees,\n\t\t\t\t\t time_to_fill,\n\t\t\t\t\t hold_duration,\n\t\t\t\t\t order_result['timeInForce'],\n\t\t\t\t\t kwargs.get('liquidate_position', False))\n\n\t\t\tc.execute('INSERT INTO orders VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', values)\n\t\t\tconn.commit()",
"def send_orders(self, runner_id, odds, side, stake, temp_id=None, session=None):\n date_time_sent = datetime.datetime.utcnow()\n params = {\n 'offers': [],\n 'odds-type': self.client.odds_type,\n 'exchange-type': self.client.exchange_type,\n 'currency': self.client.currency,\n }\n if isinstance(runner_id, list):\n if isinstance(temp_id, list):\n for i, _ in enumerate(runner_id):\n params['offers'].append({'runner-id': runner_id[i], 'side': side[i], 'stake': stake[i],\n 'odds': odds[i], 'temp-id': temp_id[i]})\n else:\n for i, _ in enumerate(runner_id):\n params['offers'].append({'runner-id': runner_id[i], 'side': side[i], 'stake': stake[i],\n 'odds': odds[i]})\n else:\n params['offers'].append(\n {'runner-id': runner_id, 'side': side, 'stake': stake, 'odds': odds, 'temp-id': temp_id}\n )\n method = 'offers'\n response = self.request(\"POST\", self.client.urn_edge, method, data=params, session=session)\n date_time_received = datetime.datetime.utcnow()\n return self.process_response(\n response.json().get('offers', []), resources.Order, date_time_sent, date_time_received\n )",
"def print_quotation(self):\n self.filtered(lambda s: s.state == 'draft').write({'state': 'sent'})\n return self.env['report'].get_action(self, 'ferrua_report.sale_order')",
"def book(self, irc, msg, args, thing):\n self.db.deleteExpired(self.registryValue('orderExpiry'))\n results = self.db.getCurrencyBook(thing)\n if len(results) == 0:\n irc.error(\"No orders for this currency present in database.\")\n return\n if len(results) > self.registryValue('maxOrdersInBookList'):\n irc.error(\"Too many orders to list on IRC. Visit the web \"\n \"order book, http://bitcoin-otc.com/vieworderbook.php?eitherthing=%s \"\n \"to see list of orders for this item.\" % (thing,))\n return\n self._getMtgoxQuote()\n L = [\"#%s %s %s %s %s %s @ %s %s (%s)\" % (id,\n time.ctime(refreshed_at),\n nick,\n buysell,\n amount,\n thing,\n self._getIndexedValue(price),\n otherthing,\n notes) \\\n for (id,\n created_at,\n refreshed_at,\n buysell,\n nick,\n host,\n amount,\n thing,\n price,\n otherthing,\n notes) in results]\n irc.replies(L, joiner=\" || \")",
"def send_quote(self, quote, ask_out, bid_out, count):\n self.invoke_log('on_invoke_send_quote', quote=quote, ask_out=ask_out, bid_out=bid_out, count=count)\n if self._xapi:\n func = self._xapi.X_SendQuote\n func.restype = None\n func.argtypes = [c_void_p, c_void_p, POINTER(QuoteField), POINTER(OrderIDType), POINTER(OrderIDType), c_int]\n func(self.p_fun, self.p_api, byref(quote), byref(ask_out), byref(bid_out), count)",
"def order(self, order_type, price, value=None, amount=None, market_id=None, market_string=None, prevent_taker=False):\n if market_id is not None and market_string is not None:\n raise ValueError(\n \"market_id and market_string are mutually exclusive\")\n elif market_id is None and market_string is None:\n raise ValueError(\"either market_id or market_string are required\")\n if value is not None and amount is not None:\n raise ValueError(\"value and amount are mutually exclusive\")\n elif value is None and amount is None:\n raise ValueError(\"either value or amount are required\")\n\n if market_string is not None:\n market_id = self.markets[market_string]['id']\n price = Decimal(price).quantize(COIN)\n if prevent_taker is True:\n ticker = self.tickers[market_id]\n if ticker['ask'] and order_type == \"buy_limit\" and price > Decimal(ticker['ask']):\n log.info(\"%s %s at %s was not placed. Ask price is %s, so it would have been a taker order.\",\n market_id, order_type, price, ticker['ask'])\n return \"order not placed\"\n elif ticker['bid'] and order_type == 'sell_limit' and price < Decimal(ticker['bid']):\n log.info(\"%s %s at %s was not placed. Bid price is %s, so it would have been a taker order.\",\n market_id, order_type, price, ticker['bid'])\n return \"order not placed\"\n # convert value to amount if necessary\n if order_type == 'buy_limit' and value is not None:\n fee_perc = max(Decimal(self.markets[market_id]['taker_fee']), Decimal(\n self.markets[market_id]['maker_fee']))\n fee_mult = Decimal(fee_perc+1)\n amount = (Decimal(value) / (fee_mult * price)).quantize(COIN)\n elif order_type == 'sell_limit' and value is not None:\n amount = (Decimal(value) / price).quantize(COIN)\n logging.debug(\"Placing %s on %s market for %s at %s\",\n order_type, self.markets[market_id]['string'], amount, price)\n return self.post('/v1/user/{}'.format(order_type), amount=str(amount),\n price=str(price), market_id=market_id)",
"def do_orders(self,args):\n try:\n orders = bitstamp.open_orders()\n orders = sorted(orders, key=lambda x: float(x['price']))\n buytotal,selltotal = 0,0\n numbuys,numsells = 0,0\n amtbuys,amtsells = 0,0\n buyavg,sellavg = 0,0\n numorder = 0 \n for order in orders:\n ordertype=\"Sell\" if order['type'] == 1 else \"Buy\"\n numorder += 1\n print '%s = %s | $%s @ %s BTC %s' % (numorder,ordertype,order['price'],order['amount'],order['id']) \n if order['type'] == 0:\n buytotal += D(order['price'])*D(order['amount'])\n numbuys += D('1')\n amtbuys += D(order['amount'])\n elif order['type'] == 1:\n selltotal += D(order['price'])*D(order['amount'])\n numsells += D('1')\n amtsells += D(order['amount'])\n if amtbuys:\n buyavg = D(buytotal/amtbuys).quantize(cPrec)\n if amtsells:\n sellavg = D(selltotal/amtsells).quantize(cPrec)\n print \"There are %s Buys. There are %s Sells\" % (numbuys,numsells)\n print \"Avg Buy Price: $%s. Avg Sell Price: $%s\" % (buyavg,sellavg)\n except Exception as e:\n print e",
"def place_buy_order(self):\n price = request.form[\"price\"]\n stocks = request.form[\"stocks\"]\n trader_id = request.form[\"trader_id\"]\n self.market.place_buy_order(trader_id, price, stocks)\n return \"\"",
"def search_for_contracts(self, market=None, buy_sell=None, type_=None, contracts=\"All\"):\n if not contracts:\n contracts = []\n if not type_:\n pass\n elif type_.lower() in ['yes', 'long'] and buy_sell == 'buy':\n type_ = {'long': 'BestBuyYesCost'}\n elif type_.lower() in ['no', 'short'] and buy_sell == 'buy':\n type_ = {'short': 'BestBuyNoCost'}\n elif type_.lower() in ['yes', 'long'] and buy_sell == 'sell':\n type_ = {'long': 'BestSellYesCost'}\n elif type_.lower() in ['no', 'short'] and buy_sell == 'sell':\n type_ = {'short': 'BestSellNoCost'}\n \n if not market:\n market_links = [(\"us_election\", 'https://www.predictit.org/api/marketdata/category/6'), (\"us_politics\", 'https://www.predictit.org/api/marketdata/category/13'), (\"world_politics\", 'https://www.predictit.org/api/marketdata/category/4')]\n elif 'us' and 'election' in market.replace('.', '').lower():\n market_links = [(\"us_elections\", 'https://www.predictit.org/api/marketdata/category/6')]\n elif 'us' and 'politic' in market.replace('.', '').lower():\n market_links = [(\"us_politics\",'https://www.predictit.org/api/marketdata/category/13')]\n elif 'world' in market.lower():\n market_links = [(\"world_politics\", 'https://www.predictit.org/api/marketdata/category/4')]\n \n \n\n market_data=[]\n for category, market_link in market_links:\n markets = list(self.browser.get(market_link).json()['Markets'])\n for market in markets:\n market = market\n market[\"Category\"] = category\n market[\"References\"]=[]\n wikidict={\"Trump\": \"http://dbpedia.org/resource/Donald_Trump\", \"Clinton\": \"http://dbpedia.org/resource/Hillary_Clinton\", \"Ossoff\": \"https://en.wikipedia.org/wiki/Jon_Ossoff\", \"Virginia\": \"https://en.wikipedia.org/wiki/Virginia\", \"Georgia\": \"https://en.wikipedia.org/wiki/Georgia_(U.S._state)\",\"Election\":\"https://en.wikipedia.org/wiki/Elections_in_the_United_States\"}\n \n for thing in [\"Trump\", \"Clinton\", \"Ossoff\", \"Virginia\", \"Georgia\",\"Election\"]:\n if thing.lower() in [element.lower() for element in market[\"Name\"].split()]:\n market[\"References\"].append(wikidict[thing])\n market_data.append(json.dumps(market))\n \n return market_data",
"def test_random_programming_quote(self):\n quote = Quote().print_programming_quote()\n self.assertTrue(type(quote) == str)",
"def getCloseStrikePrice(ib, qualityContracts, aStockSymbol, price, startDate, right, exchange = 'SMART' ):\n # print(qualityContracts.symbol)\n # [ticker] = ib.reqTickers(qualityContracts)\n\n chains = ib.reqSecDefOptParams(qualityContracts.symbol, '', qualityContracts.secType, qualityContracts.conId)\n chain = next(c for c in chains if c.tradingClass == aStockSymbol and c.exchange == exchange)\n\n # if price >= then $40 Get to the next round at +/- 5\n # if price < $40 get to round at the +/-2\n if price >= 40 :\n strikePlus = (5 * round(price / 5)) + 5\n strikeMinus = (5 * round(price / 5)) - 5\n # print('strikePlus', strikePlus)\n # print('strikeMinus', strikeMinus)\n else:\n strikePlus = (2 * round(price / 2)) + 3\n strikeMinus = (2 * round(price / 2)) - 3\n # print('strikePlus', strikePlus)\n # print('strikeMinus', strikeMinus)\n\n # get strikes at strikePlus\n strikes = [strike for strike in chain.strikes\n if strike >= strikeMinus and strike <= strikePlus ]\n # print('strikes: ', strikes)\n\n # get experation date in proper format\n dateFromISOFromat = dateUtils.getDateFromISO8601(startDate)\n nextFridayDateFormat = dateUtils.nextFriday(dateFromISOFromat)\n expiration = dateUtils.nextFridayOrgFormat(nextFridayDateFormat) # = sorted(exp for exp in chain.expirations)\n # print('expiration: ', expiration)\n\n contracts = [Option(qualityContracts.symbol, expiration, strike, right, exchange)\n for strike in strikes]\n ib.qualifyContracts(*contracts)\n # print('contracts: ', contracts, '\\n\\n\\n')\n\n # Todo: should this be ib.reqSecDefOptParams instead of ib.reqContractDetails???\n optionContractsDetails = [ib.reqContractDetails(cd)\n for cd in contracts]\n\n return strikes, contracts\n\n # # # Todo - need to update to return close price when market closed else return last\n # removing last as we were getting errors if it did not exist.\n # # return tickers[0].close + tickers[1].close"
] | [
"0.5831876",
"0.569695",
"0.553656",
"0.5332494",
"0.53220475",
"0.5144027",
"0.50976944",
"0.50927013",
"0.5080306",
"0.5049257",
"0.5032044",
"0.5003307",
"0.4973926",
"0.49699786",
"0.49636665",
"0.4962488",
"0.49302152",
"0.4928655",
"0.49229747",
"0.49007526",
"0.48971412",
"0.48649326",
"0.48420396",
"0.48371968",
"0.48279673",
"0.481534",
"0.48150152",
"0.47924998",
"0.47831073",
"0.4779397"
] | 0.59208894 | 0 |
Retrieve an environment variable. Any failures will cause an exception to be thrown. | def get_env_variable(self, var_name, optional=False):
try:
return environ[var_name]
except KeyError:
if optional:
return False
else:
error_msg = f'Error: You must set the {var_name} environment variable.'
raise Exception(error_msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_env_variable(var_name):\n try:\n return os.environ[var_name]\n except KeyError:\n error_msg = \"Set the {} environment variable\".format(var_name)\n\n raise Exception(error_msg)",
"def get_env_variable(var_name):\n try:\n return os.environ[var_name]\n except KeyError:\n error_msg = \"Set the %s environment variable\" % var_name\n raise ImproperlyConfigured(error_msg)",
"def GetEnvVariable(name):\n return os.environ.get(name)",
"def get_environment_variable(name):\n\n variable = None\n try:\n variable = os.environ[name]\n except KeyError:\n pass\n \n return variable",
"def get_env_variable(var_name):\n try:\n return os.environ[var_name]\n except KeyError:\n error_msg = \"Set the %s env variable\" % var_name\n if DEBUG:\n warnings.warn(error_msg)\n else:\n raise ImproperlyConfigured(error_msg)",
"def env_get_var_value(var_name, allow_missing=False):\n if allow_missing:\n if var_name not in os.environ.keys():\n return None\n assert var_name in os.environ.keys(), \"Please supply %s in environment\" % var_name\n return os.environ[var_name]",
"def get_required_env_variable(var_name):\n try:\n return os.environ[var_name]\n except KeyError:\n error_msg = 'Set the {0} environment variable'.format(var_name)\n raise ImproperlyConfigured(error_msg)",
"def get_value(key:str):\n value = environ.get(key)\n if value == None or len(str(value)) == 0:\n raise ValueError('Missing env: '+key)\n return value",
"def _get_env(key: str) -> str:\n value = os.getenv(key)\n assert isinstance(value, str), (\n f\"the {key} environment variable must be set and a string, \" f\"{value=}\"\n )\n return value",
"def get_env_setting(setting):\n try:\n return os.environ[setting]\n except KeyError:\n error_msg = \"Set the %s env variable\" % setting\n raise LookupError(error_msg)",
"def get_env_variable(var_name, default_value=None):\n try:\n return os.environ[var_name]\n except KeyError:\n error_msg = \"Set the %s environment variable\" % var_name\n print(error_msg)\n\n return default_value",
"def getenv_or_raise_exception(varname) -> str:\n\n env = os.getenv(varname)\n if env is None:\n raise EnvironmentError(f\"Environment variable {varname} is not set!\")\n return env",
"def getenv_check(e):\n res = os.getenv(e)\n if res == None:\n print(e, 'environment variable not set - stopping.')\n exit(1)\n else:\n return res",
"def env(var):\n return os.environ[var]",
"def get_env_or_exception(key):\n \n value = os.getenv(key)\n if value is None:\n raise ImproperlyConfigured(f'{key} env variable is not set')\n\n return value",
"def get_env_setting(key):\n try:\n return environ[key]\n except KeyError:\n error_msg = 'Set the {0} env variable'.format(key)\n raise ImproperlyConfigured(error_msg)",
"def get_envvar(name, silent=False):\n value = os.environ.get(name)\n if value is None:\n if not silent:\n raise RuntimeError(\n 'The environment variable %r is not set '\n 'and as such configuration could not be '\n 'loaded. Set this variable and make it '\n 'point to a configuration file' % name\n )\n else:\n return ''\n return value",
"def getenv(self, var):\n return os.environ[var]",
"def _get_env(key, default=None, coerce=lambda x: x, required=False):\n try:\n value = os.environ[key]\n except KeyError:\n if required is True:\n raise RequiredSettingMissingError(key)\n else:\n return default\n\n try:\n return coerce(value)\n except Exception:\n raise CoercionError(key, value, coerce)",
"def windows_get_env_value(var_name: str) -> str:\n if var_name in os.environ.keys():\n return os.environ[var_name]",
"def get_var(var_name: str):\n return os.environ[var_name]",
"def get(self, key, default=None):\n value = os.environ.get(key)\n\n if value:\n self.logging.info(\"Got %s from environment.\" % key)\n self.logging.debug(value)\n return_val = value\n elif key in self._config.keys():\n self.logging.info(\"Got %s from config file.\" % key)\n self.logging.debug(value)\n return_val = self._config[key]\n else:\n return_val = default\n return return_val",
"def get_environment_var(env_name, default_value):\n if env_name in os.environ:\n return os.environ[env_name]\n else:\n return default_value",
"def get_env_variable(var_name, default_value=None):\n try:\n return os.environ[var_name]\n except KeyError:\n error_msg = 'Set the {} environment variable'.format(var_name)\n if default_value is not None:\n return default_value\n raise ValueError(error_msg)",
"def environment_value(self, name):\n if not os.environ.has_key(name):\n return None\n return os.environ[name]",
"def getenv(self, key):\n return self._env[key]",
"def env(key, default=None, required=False):\n try:\n value = os.environ[key]\n return ast.literal_eval(value)\n except (SyntaxError, ValueError):\n return value\n except KeyError:\n if default or not required:\n return default\n raise ImproperlyConfigured(\n \"Missing required environment variable '%s'\" % key)",
"def get(var_name, default=None):\n if default is None:\n try:\n value = os.environ[var_name]\n except KeyError:\n error_msg = 'Set the {} environment variable'.format(var_name)\n raise ImproperlyConfigured(error_msg)\n else:\n value = os.getenv(var_name, default)\n return cast(value)",
"def get_env_setting(setting):\n try:\n return environ[setting]\n except KeyError:\n error_msg = \"Set the {0} env variable\".format(setting)\n raise ImproperlyConfigured(error_msg)",
"def get_env_setting(setting):\r\n try:\r\n return environ[setting]\r\n except KeyError:\r\n error_msg = \"Set the %s env variable\" % setting\r\n raise ImproperlyConfigured(error_msg)"
] | [
"0.8219711",
"0.7999004",
"0.7904889",
"0.7870026",
"0.7826477",
"0.7740203",
"0.77336615",
"0.77190036",
"0.7707803",
"0.76930946",
"0.76501966",
"0.7644564",
"0.75845987",
"0.7573528",
"0.75723463",
"0.7517979",
"0.7484092",
"0.7442207",
"0.74171025",
"0.74168986",
"0.73590547",
"0.7351036",
"0.7344682",
"0.7342397",
"0.7248015",
"0.72308946",
"0.7221773",
"0.71998805",
"0.71893555",
"0.7179973"
] | 0.8043275 | 1 |
Download and install the latest codeql cli Download and install the latest codeql queries | def setup():
# check version and download the latest version
get_latest_codeql()
# install vscode?
# clone codeql libs
# setup vscode + codeql
# wait for user | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def run_codeql_query(query, database, output, search_path):\n # --search-path is required when the CLI needs to upgrade the database scheme.\n subprocess_run([\"codeql\", \"query\", \"run\", query, \"--database\", database,\n \"--output\", output + \".bqrs\", \"--search-path\", search_path])\n subprocess_run([\"codeql\", \"bqrs\", \"decode\", output + \".bqrs\",\n \"--format=csv\", \"--no-titles\", \"--output\", output])\n os.remove(output + \".bqrs\")",
"def query_cmdline():",
"def qiwsir():\n print \"http://qiwsir.github.io\"",
"def main():\n frontend_query(PATH, USER)",
"def main():\n bootstrapping.CommandStart('dataflow-sql', component_id='dataflow-sql')\n bootstrapping.CheckUpdates('dataflow-sql')\n update_manager.UpdateManager.EnsureInstalledAndRestart(\n ['dataflow-sql'], command=__file__)\n java_bin = java.RequireJavaInstalled('Dataflow SQL')\n bootstrapping.ExecuteJavaClass(\n java_bin,\n jar_dir=_JAR_DIR,\n main_jar=_MAIN_JAR,\n main_class=_CLASSNAME,\n main_args=['-nn', 'DFSQL', '-u', 'jdbc:beam:userAgent=DataflowSQL'])",
"def main():\n\n database_file = Path(__file__).parent.parent / \"src\" / \"galois\" / \"_databases\" / \"irreducible_polys.db\"\n conn, cursor = create_database(database_file)\n\n _add_hpl_1998(conn, cursor)\n\n conn.close()",
"async def source(self, ctx):\n \"\"\" Check out my source code <3 \"\"\"\n # Do not remove this command, this has to stay due to the GitHub LICENSE.\n # TL:DR, you have to disclose source according to MIT.\n # Reference: https://github.com/AlexFlipnote/discord_bot.py/blob/master/LICENSE\n await ctx.send(f\"**{ctx.bot.user}** is powered by this source code:\\nhttps://github.com/AlexFlipnote/discord_bot.py With modifications by user: snow-blade\")",
"def install_dcos_edgelb_cli():\n display.vvv(\"dcos edgelb: installing cli\")\n\n cmd = [\n 'dcos',\n 'package',\n 'install',\n 'edgelb',\n '--cli',\n '--yes'\n ]\n display.vvv(subprocess.check_output(cmd, env=_dcos_path()).decode())",
"def backupQbaseCode(self):\n q.transaction.start(\"Backup /opt/qbase pylabs code on %s\" % self.ipaddr)\n self.execute(\"mkdir -p /opt/backups\", False)\n path=\"/opt/backups/pylabscore_%s.tgz\" % q.base.time.getLocalTimeHRForFilesystem()\n pathstobackup='/opt/qbase3/lib/pylabs/extensions /opt/code/pylabs-core/code/utils /opt/code/pylabs-core/code/packages/pylabs/core'\n # compress them\n self.execute(\"tar zcvfh %s %s \" % (path, pathstobackup) , False)\n # remember last compressed\n self.execute(\"echo '%s' > /opt/backups/path_to_last_pylabscode_backup.txt\" % path, True)\n q.transaction.stop()",
"def main(ctx, qa_dir, no_editor, report_dir, vcs, debug, main_branch):\n __main_imp__(ctx, qa_dir, no_editor, report_dir, vcs, debug, main_branch)",
"def main():\n get_obofoundry(force_download=True)",
"def junos_cve_query(version):\n pass",
"def InstallPrereqs():\n #Collect the source for Cython and put in _deps/cython-master\n import urllib,zipfile\n print('getting cython sources')\n urllib.urlretrieve('https://github.com/cython/cython/archive/master.zip', filename = 'master.zip')\n with zipfile.ZipFile('master.zip', 'r') as myzip:\n myzip.extractall(path='_deps')\n os.remove('master.zip')\n for python_install in PYTHONVERSIONS:\n for cwd in ['_deps/cython-master']:\n print(subprocess.check_output([python_install, 'setup.py', 'install'], cwd = cwd))",
"def query(args):\n dbh = despydb.DesDbi(args.service, args.section)\n if args.query not in \"-+\":\n do1Query(dbh, args.query, args)\n elif args.query == \"-\":\n line = sys.stdin.readline()\n while line:\n line = line.strip()\n if not line or line.startswith(\"#\"):\n pass\n else:\n do1Query(dbh, line, args)\n line = sys.stdin.readline()\n else: #if args.query == \"+\":\n lines = sys.stdin.read()\n do1Query(dbh, lines, args)\n dbh.close()",
"def lifecycle_query_installed(self, timeout):\n if self.version in BasicEnv.binary_versions_v2:\n # res = os.system(\"./../bin/{}/bin/peer lifecycle chaincode queryinstalled --output json --connTimeout\n # {} > queryInstalled.txt\"\n # .format(self.version, timeout), \"r\")\n # # with open('./queryInstalled.txt', 'r', encoding='utf-8') as f:\n # # content = f.read()\n # # os.system(\"rm ./queryInstalled.txt\")\n # body = res.read()\n # installed_chaincodes = json.loads(body)\n\n res = subprocess.Popen(\"./../bin/{}/bin/peer lifecycle chaincode queryinstalled --output json \"\n \"--connTimeout {}\".format(self.version, timeout), shell=True, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, stderr = res.communicate()\n return_code = res.returncode\n if return_code == 0:\n content = str(stdout, encoding=\"utf-8\")\n installed_chaincodes = json.loads(content)\n return return_code, installed_chaincodes\n else:\n stderr = str(stderr, encoding=\"utf-8\")\n return return_code, stderr",
"def cli(ctx):\n #TODO",
"def main():\n config = configparser.ConfigParser()\n config.read('dwh.cfg')\n\n conn = psycopg2.connect(\"host={} dbname={} user={} password={} port={}\".format(*config['STAGE'].values()))\n cur = conn.cursor()\n \n #remove the existing tables\n drop_tables(cur, conn)\n \n #replace the tables with new ones\n create_tables(cur, conn)\n \n #add missing postcode value into table\n default_missing_values(cur, conn)\n \n conn.close()",
"def main() -> None:\n\n download_data_tools.initial_message()\n\n # S&P 500 companies, initial year and time step\n stocks: List[str] = download_data_tools.get_stocks(['all'])\n dates: List[str] = ['1992-01', '2012-12']\n time_step: str = '1d'\n\n # Basic folders\n download_data_tools.start_folders()\n\n # Run analysis\n # Download data\n portfolio_download_data(stocks, dates, time_step)\n\n print('Ay vamos!!!')",
"def install_backend_deps():\n with lcd(BACKENDDIR):\n cmd = '%(pip)s install -r %(requirements_file)s' % {\n 'pip': get_pip(),\n 'requirements_file': requirements_file\n }\n local(cmd)\n # Install Pandoc\n local(\"sudo apt-get install pandoc\")\n # Install Pyandoc\n with lcd(HOMEDIR):\n if not os.path.isdir(os.path.join(HOMEDIR, 'pyandoc')):\n local(\"git clone [email protected]:kennethreitz/pyandoc.git\")\n with lcd(\"pyandoc\"):\n if not env.local:\n\t with prefix('. /home/ubuntu/virtualenvs/venv-system/bin/activate'):\n local(\"python setup.py install\")\n else:\n local(\"python setup.py install\")",
"def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\n \"--download_path\",\n default=None,\n help=\"Free or auth\"\n )\n parser.add_argument(\n \"--download_type\", default=\"free\", help=\"Free or auth\"\n )\n parser.add_argument(\n \"--ipversion\",\n default=\"ipv4\", help=\"IP Version format \"\n )\n parser.add_argument(\n \"--format\",\n default=\"csv\",\n help=\"DB AVAILABLE FORMATS CSV or BIN\"\n )\n parser.add_argument(\"--product\", default=\"db1\", help=\"PRODUCT\")\n parser.add_argument(\n \"--token\",\n help=\"token used in order to authenticate\"\n \"in case of downloading the auth required DBs\"\n )\n parser.add_argument(\"--unzip\", default=True, help=\"\")\n parser.add_argument(\"--numbertoipv4\", default=True, help=\"\")\n\n args = parser.parse_args()\n\n cli_util = CliUtil(\n **{x: y for x, y in args._get_kwargs()}\n )\n cli_util.exec()\n return 0",
"def _run_query(self):",
"def update_code(self):\n print ('update code')\n self.query_dict.update({'code':code.value})",
"def _load_kql_magic(self):\n # KqlMagic\n print(\"Please wait. Loading Kqlmagic extension...\")\n if self._ip is not None:\n with warnings.catch_warnings():\n warnings.simplefilter(action=\"ignore\")\n self._ip.run_line_magic(\"reload_ext\", \"Kqlmagic\")\n self._loaded = True",
"def local_install(self):\n import subprocess\n\n print(\"Making local install\")\n from pathlib import Path\n\n root = Path(__file__).parent.parent\n\n def run(args, shell=False):\n print(\"---\", \" \".join(args))\n return subprocess.check_call(args, cwd=curdir, shell=shell)\n\n def get_version():\n import json\n\n p = Path(curdir / \"package.json\")\n contents = json.loads(p.read_text())\n return contents[\"version\"]\n\n print(\"--- installing RobotFramework Language Server\")\n curdir = root / \"robotframework-ls\"\n run(\"python -m dev vendor_robocorp_ls_core\".split())\n run(\"vsce package\".split(), shell=sys.platform == \"win32\")\n run(\n f\"code --install-extension robotframework-lsp-{get_version()}.vsix\".split(),\n shell=sys.platform == \"win32\",\n )\n run(\"python -m dev remove_vendor_robocorp_ls_core\".split())\n\n print(\"\\n--- installing Robocorp Code\")\n curdir = root / \"robocorp-code\"\n run(\"python -m dev vendor_robocorp_ls_core\".split())\n run(\"vsce package\".split(), shell=sys.platform == \"win32\")\n run(\n f\"code --install-extension robocorp-code-{get_version()}.vsix\".split(),\n shell=sys.platform == \"win32\",\n )\n run(\"python -m dev remove_vendor_robocorp_ls_core\".split())",
"def get_query():\n query = \"\"\"{\n repository(name: \"flux\", owner: \"fluxcd\") {\n forkCount\n issues {\n totalCount\n }\n pullRequests {\n totalCount\n }\n releases {\n totalCount\n }\n stargazers {\n totalCount\n }\n watchers {\n totalCount\n }\n }\n}\n \"\"\"\n return query",
"def main():\n parser = OptionParser(usage=\"%prog <sourcefile> [-s site] [-q] [-t] [-f outfile]\", \n version=\"SecPoint.com %prog \"+VERSION,\n epilog=\"SecPoint.com Google Penetration Testing Hack Database v. \"+VERSION)\n parser.add_option(\"-o\", \"--output\", dest=\"filename\",\n help=\"save output to file\", metavar=\"FILE\")\n parser.add_option(\"-s\", \"--site\", dest=\"sitename\",\n help=\"generate queries for the SITE\", metavar=\"SITE\")\n parser.add_option(\"-m\", \"--multiple\", dest=\"listfilename\",\n help=\"generate queries for multiple sites listed in LISTFILE\", metavar=\"LISTFILE\")\n parser.add_option(\"-q\", \"--query\",\n action=\"store_true\", dest=\"gen_query\", default=False,\n help=\"generate google query urls for each line\")\n parser.add_option(\"-t\", \"--html\",\n action=\"store_true\", dest=\"gen_html\", default=False,\n help=\"generate output in HTML format (implies -q)\")\n (options, args) = parser.parse_args()\n if len(args) != 1:\n print \"\"\"SecPoint.com Google Penetration Testing Hack Database\n\n The Portable Penetrator - Wifi Recovery - Vulnerability Scanner\n http://www.secpoint.com/portable-penetrator.html\n \"\"\"\n parser.print_help()\n print SAMPLES\n exit()\n #parser.error(\"please set source file (could be found in 'db' dir)\")\n #all options \n site_name = options.sitename\n gen_html = options.gen_html\n gen_query = options.gen_query\n out_file = options.filename\n multlist_file = options.listfilename\n db_dir = os.path.join(os.path.dirname(__file__),'db')\n source_file = os.path.join(db_dir,args[0])\n if not os.path.isfile(source_file):\n parser.error(\"could not find source file! Please check if it exists in 'db' dir\")\n\n #starting!\n strs = get_strings(source_file)\n if not strs:\n print \"Can't get data from your source file!\"\n exit()\n queries = []\n if site_name and multlist_file:\n print \"Please use -s OR -m switches alone!\"\n exit() \n if site_name:\n strs = append_sitename(strs,site_name)\n if multlist_file:\n if not os.path.isfile(multlist_file):\n print \"Could not find file from -m switch!\"\n exit()\n mlst = open(multlist_file).read().split('\\n')\n strsnew = [] #using multiple sites to create queries\n for i in mlst:\n strsnew.extend(append_sitename(strs,i))\n strs = strsnew \n if gen_query:\n [strs,queries] = gen_google_query(strs)\n if gen_html:\n if not gen_query: #if not previuosly generated\n [strs,queries] = gen_google_query(strs)\n strs = gen_html_output(strs,queries)\n else:\n if queries: \n strs = queries\n\n save_output(strs,out_file)",
"def test_2_scrnaseq(install_test_files, data_dir):\n with make_workdir() as workdir:\n cl = [\"bcbio_nextgen.py\",\n get_post_process_yaml(data_dir, workdir),\n os.path.join(data_dir, os.pardir, \"Harvard-inDrop\"),\n os.path.join(data_dir, \"run_info-scrnaseq.yaml\")]\n subprocess.check_call(cl)",
"def download_query_interface(url=None, force=True):\n fp = query_interface_path()\n if os.path.exists(fp):\n logging.info(f\"query interface already exists in {fp}\")\n if not force:\n return\n\n import zipfile\n import urllib.request\n import concordancer.server\n\n # Download data\n tgt_dir = pathlib.Path(concordancer.server.__file__).parents[0]\n if url is None:\n url = FRONTEND_ZIP\n urllib.request.urlretrieve(url, tgt_dir / \"dist.zip\")\n\n # Extract zip file\n with zipfile.ZipFile(tgt_dir / \"dist.zip\", 'r') as zip_ref:\n zip_ref.extractall(tgt_dir)\n\n return tgt_dir",
"def ci(session):\n session.install('-rrequirements-dev.txt')\n session.install('-e', '.')\n run_sphinx(session)\n run_yapf(session, True)\n run_all_linters(session)\n run_pytest_units(session)\n run_pytest_integrations(session)",
"def install(i):\n\n cm_kernel.print_for_con('***********************************************')\n cm_kernel.print_for_con('Installing code ...')\n\n # Check vars\n if 'target_os_uoa' not in i: return {'cm_return':1, 'cm_error':'\"target_os_uoa\" is not defined in \"code install\"'}\n\n # Create entry\n ii={'cm_run_module_uoa':ini['cm_module_uid'],\n 'cm_action':'update'}\n if 'install_data_uid' in i and i['install_data_uid']!='': \n ii['cm_data_uid']=i['install_data_uid']\n if 'install_data_alias' in i and i['install_data_alias']!='': \n ii['cm_data_uoa']=i['install_data_alias']\n if 'install_data_display_as_alias' in i: \n ii['cm_display_as_alias']=i['install_data_display_as_alias']\n if 'install_module_uoa' in i and i['install_module_uoa']!='':\n ii['cm_run_module_uoa']=i['install_module_uoa']\n if 'cm_array' in i and len(i['cm_array'])>0: ii['cm_array']=i['cm_array']\n if 'install_repo_uoa' in i and i['install_repo_uoa']!='': \n ii['cm_repo_uoa']=i['install_repo_uoa']\n r=cm_kernel.access(ii)\n if r['cm_return']>0: return r\n\n target_path=r['cm_path']\n target_uid=r['cm_uid']\n target_alias=r['cm_alias']\n\n # Prepare script\n rx=get_env({'cm_data_uoa':target_uid,\n 'os_uoa':i['target_os_uoa']})\n if rx['cm_return']>0: return rx\n\n script=rx['cm_string']\n\n ii={'script_name':script,\n 'skip_extension':'yes',\n 'target_os_uoa':i['target_os_uoa'],\n 'cm_path':target_path}\n if 'code_deps' in i and i.get('skip_code_deps','')!='yes':\n ii['code_deps']=i['code_deps']\n\n # Add remark about how code was built\n if 'add_rem_to_script' in i:\n run_commands_before=[]\n run_commands_before.append('')\n for x in i['add_rem_to_script']:\n run_commands_before.append(x)\n ii['run_commands_before']=run_commands_before\n\n rx=prepare_script(ii)\n if rx['cm_return']>0: return rx\n\n r['script_name']=rx['cm_path']\n r['script_filename']=script\n\n return r"
] | [
"0.60229224",
"0.55783564",
"0.5433331",
"0.522893",
"0.5157668",
"0.49971527",
"0.49750808",
"0.49727553",
"0.49529603",
"0.4920116",
"0.49196017",
"0.48277876",
"0.48154685",
"0.47984973",
"0.47673056",
"0.4742191",
"0.47271448",
"0.47246155",
"0.47236863",
"0.47113135",
"0.4684878",
"0.4678832",
"0.46674758",
"0.46673313",
"0.46540326",
"0.46368662",
"0.46305606",
"0.46019998",
"0.46002585",
"0.45999736"
] | 0.7874407 | 0 |
Create Xija model object This function creates a Xija model object with initial parameters, if any. This function is intended to create a streamlined method to creating Xija models that can take both single value data and time defined data (e.g. [pitch1, pitch2, pitch3], [time1, time2, time3]), defined in the `init` dictionary. | def setup_model(msid, t0, t1, model_spec, init):
model = xija.ThermalModel(msid, start=t0, stop=t1, model_spec=model_spec)
for key, value in init.items():
if isinstance(value, dict):
model.comp[key].set_data(value['data'], value['times'])
else:
model.comp[key].set_data(value)
return model | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def make_model():\n m = model_class(*argv[2:-1])\n modelobj[\"model\"] = m",
"def create_model(self) -> None:\n self._model = create_model_with_temperature_scaling(self.config)",
"def create_model(self, **inputs):\n raise NotImplementedError('This method has to be overwritten.')",
"def build_model():",
"def form_data(stocks, init_param):\r\n \r\n rs = stocks[1].rsi\r\n ts = stocks[1].tsi\r\n a = 1\r\n \r\n \r\n for date in init_param.train_dates:\r\n try:\r\n training_data\r\n except NameError:\r\n training_data = LearningData()\r\n training_data.construct(stocks, date, init_param.future_day, init_param.features)\r\n else:\r\n training_data.append(stocks, date, init_param.future_day, init_param.features)\r\n \r\n for date in init_param.test_dates:\r\n try:\r\n test_data\r\n except NameError:\r\n test_data = LearningData()\r\n test_data.construct(stocks, date, init_param.future_day, init_param.features)\r\n else:\r\n test_data.append(stocks, date, init_param.future_day, init_param.features)\r\n \r\n #reference_date = dateutl.days_since_1900('1991-01-01')\r\n #test_data.construct(stocks,[reference_date, day_history, init_param.future_day])\r\n \r\n return training_data, test_data",
"def __init__(self, x_train, model):\n self.x_train = x_train\n self.model = model",
"def __init__(self,obs,**kwd):\n from PyAnUtils.pyanfunctions import ExtraOpt\n \n # Extra options\n extraopt = ExtraOpt( [('readws',None),('modeltype',None),\n ('modelname',None)] )\n extraopt.setkwd(kwd)\n \n \n # Initialize dictionary of models, which must be\n # associated to an observable. Also, the hashed\n # name of the pdf used is stored\n self.__models = {}\n self.__pdftypes = {}\n\n # Constructor from a Workspace\n if extraopt.readws: \n out = extraopt.readws\n if type(obs) != str:\n raise RuntimeError(\"Initialization with the 'readws' keyword\"\\\n \" requires the first argument to be the name of the\"\\\n \" observable (str)\")\n if not out[2].has_key(obs):\n raise AttributeError(\"Observable '%s' not found in the \"\\\n \"Workspace\" % obs)\n self.__observable = obs\n self.__setattr__(self.__observable,out[2][obs])\n self.__setattr__\n # Set up the models, need info from the user\n # --- The modeltype (bkg,signal,..)\n if not extraopt.modeltype:\n raise RuntimeError(\"Initialization with the 'readws' keyword\"\\\n \" requires another keyword 'modeltype' to be set\")\n # --- the actual internal name \n if not extraopt.modelname:\n raise RuntimeError(\"Initialization with the 'readws' keyword\"\\\n \" requires another keyword 'modelname' to be set\")\n # --- Set up the models\n self.__setupmodelsfromws(out,extraopt.modeltype,extraopt.modelname)\n # Regular constructor\n else:\n # Initialize observable put the name as attribute\n self.__observable = obs.GetName()\n self.__setattr__(obs.GetName(),obs)",
"def __init__(self, init_values, noise=None):\n self.dtype = np.float32\n\n self.dt = 1.\n self.init_phase = np.array(init_values['Y0'])\n self.W = np.array(init_values['W'])\n self.K = np.array(init_values['K'])\n\n self.n_osc = len(self.W)\n self.m_order = self.K.shape[0]\n\n self.noise = noise",
"def _initialize_model(self):\n max_value = self.data.max()\n\n if self.model_type == self._GAUSSIAN2D:\n model = models.Gaussian2D(\n x_mean=self.x, y_mean=self.y, x_stddev=1, y_stddev=1\n )\n model.amplitude = max_value\n\n # Establish reasonable bounds for the fitted parameters\n model.x_stddev.bounds = (0, self._box / 4)\n model.y_stddev.bounds = (0, self._box / 4)\n model.x_mean.bounds = (self.x - 5, self.x + 5)\n model.y_mean.bounds = (self.y - 5, self.y + 5)\n\n elif self.model_type == self._MOFFAT2D:\n model = models.Moffat2D()\n model.x_0 = self.x\n model.y_0 = self.y\n model.gamma = 2\n model.alpha = 2\n model.amplitude = max_value\n\n # Establish reasonable bounds for the fitted parameters\n model.alpha.bounds = (1, 6)\n model.gamma.bounds = (0, self._box / 4)\n model.x_0.bounds = (self.x - 5, self.x + 5)\n model.y_0.bounds = (self.y - 5, self.y + 5)\n\n model += models.Const2D(self.fit_sky())\n model.amplitude_1.fixed = True\n return model",
"def __init__(self, *args):\n this = _libsbml.new_ModelCreator(*args)\n try: self.this.append(this)\n except: self.this = this",
"def recent_model_init(model_args, task_infos, tokenizer):\n config = AutoConfig.from_pretrained(\n model_args.model_name_or_path,\n num_labels=task_infos.num_labels,\n cache_dir=model_args.model_cache_dir,\n id2label=task_infos.id2label,\n label2id=task_infos.label2id,\n )\n config.dense_type = model_args.dense_type\n config.act_type = model_args.act_type\n config.num_labels_per_head = [\n len(label_id) for label_id in task_infos.head_id_to_label_id\n ]\n config.head2label = task_infos.head_id_to_label_id\n model_cls = getattr(mod, model_args.architectures,\n RobertaForKlueRecent)\n model = model_cls.from_pretrained(\n model_args.model_name_or_path,\n config=config,\n cache_dir=model_args.model_cache_dir,\n )\n if model.config.vocab_size < len(tokenizer):\n print(\"resize...\")\n model.resize_token_embeddings(len(tokenizer))\n return model",
"def test_init(self):\n xtal_model_data = XtalModelData(self.params)\n\n assert xtal_model_data.pdb == self.params.input.pdb\n\n assert xtal_model_data.mtz == self.params.input.mtz\n\n # TODO Assert utilised method calls of these classes\n # Assert is innstance causses issues if called from somewhere else\n\n self.assertIsInstance(xtal_model_data.xrs, cctbx.xray.structure)\n\n self.assertIsInstance(\n xtal_model_data.inputs, mmtbx.utils.process_command_line_args\n )\n\n self.assertIsInstance(\n xtal_model_data.crystal_gridding, cctbx.maptbx.crystal_gridding\n )\n\n self.assertIsInstance(xtal_model_data.fmodel, mmtbx.f_model.f_model.manager)",
"def initialize(self):\n for key in self.parameter_dict:\n self.models[key] = self._create_model(key)",
"def __init__(self,models,extraparams=None,outputcontraction=None,\n interpolation='linear',interpolationdirection='y',\n offgrid=None):\n from operator import isMappingType\n\n if len(models)==2 and isMappingType(models[1]):\n modtype = get_model_class(models[0])\n params = np.array(models[1].values())\n params = [dict([(models[1].keys()[i],v)] for v in t) for i,t in enumerate(params.T)]\n models = [modtype(**params[i]) for m in range(len(params))]\n\n params = None\n\n for m in models:\n if params is None:\n params = m.params\n else:\n if m.params != params:\n raise ValueError('model %s does not match parameters for other models'%m)\n\n if extraparams is not None:\n self._extraparams = {}\n for n,ps in extraparams.iteritems():\n arr = np.array(ps)\n if extraparams[n].size != len(models):\n raise ValueError('too many/few extra parameters for parameter %s'%n)\n self._extraparams[n] = arr\n else:\n self._extraparams = None\n\n self._params = params\n self.models = tuple(models)\n self._extraparams = extraparams\n\n self.outputcontraction = outputcontraction\n self.interpolation = interpolation\n self.interpolationdirection = interpolationdirection\n self.offgrid = offgrid",
"def basic_model_init(model_args, task_infos, tokenizer):\n config = AutoConfig.from_pretrained(\n model_args.model_name_or_path,\n num_labels=task_infos.num_labels,\n cache_dir=model_args.model_cache_dir,\n id2label=task_infos.id2label,\n label2id=task_infos.label2id,\n )\n model_cls = getattr(mod, model_args.architectures,\n AutoModelForSequenceClassification)\n model = model_cls.from_pretrained(\n model_args.model_name_or_path,\n config=config,\n cache_dir=model_args.model_cache_dir,\n )\n if model.config.vocab_size < len(tokenizer):\n print(\"resize...\")\n model.resize_token_embeddings(len(tokenizer))\n return model",
"def make_objects():\n cwd = os.getcwd()\n\n os.chdir(\"test_data/protein_load\")\n pmodel = pyODEM.model_loaders.Protein(\"ww_domain.ini\")\n os.chdir(cwd)\n\n pmodel.set_temperature(120.)\n\n return pmodel",
"def initialize_model(self, initial_data):\n # EDIT THIS METHOD TO RETURN A MINIMAX MODEL ###\n return None",
"def create_model():\r\n parser = argparse.ArgumentParser()\r\n parser.add_argument('--DISC_LR', type=float, default=1e-4)\r\n parser.add_argument('--GEN_LR', type=float, default=1e-3)\r\n parser.add_argument('--GEN_BETA1', type=float, default=0.9)\r\n parser.add_argument('--GEN_BETA2', type=float, default=0.999)\r\n parser.add_argument('--IMAGE_SIZE', type=int, default=None)\r\n parser.add_argument('--QUES_SIZE', type=int, default=None)\r\n parser.add_argument('--QUES_EMBED_SIZE', type=int, default=2048)\r\n parser.add_argument('--WORD_EMBED_SIZE', type=int, default=512)\r\n parser.add_argument('--VOCAB_SIZE', type=int, default=1004)\r\n args, task_args = parser.parse_known_args()\r\n override_if_not_in_args('--max_steps', '1000', task_args)\r\n override_if_not_in_args('--batch_size', '64', task_args)\r\n override_if_not_in_args('--eval_set_size', '370', task_args)\r\n override_if_not_in_args('--eval_interval_secs', '2', task_args)\r\n override_if_not_in_args('--log_interval_secs', '2', task_args)\r\n override_if_not_in_args('--min_train_eval_rate', '2', task_args)\r\n\r\n return Model(args.DISC_LR, args.GEN_LR, args.GEN_BETA1, args.GEN_BETA2,\r\n args.IMAGE_SIZE, args.QUES_EMBED_SIZE, args.WORD_EMBED_SIZE,\r\n args.QUES_SIZE, args.VOCAB_SIZE), task_args",
"def create_data_model():\r\n data = {}\r\n data['distance_matrix'] = distance_matrix.tolist()\r\n data['time_matrix'] = time_matrix.tolist()\r\n data['time_windows'] = time_windows.tolist()\r\n data['pickups_deliveries'] = pickup_deliveries.tolist()\r\n data['demands'] = demand\r\n data['num_vehicles'] = 20\r\n data['vehicle_capacities'] = [20 * i / i for i in range(1, num_vehicles+1)]\r\n data['depot'] = (2 * length) - 1\r\n return data",
"def __init__(self, **kwargs):\n\n # Identify the mode to start the model in\n if \"x\" in kwargs and \"y\" in kwargs:\n x = kwargs.get(\"x\")\n y = kwargs.get(\"y\")\n if \"model_name\" not in kwargs:\n self.__mode = \"train\"\n else:\n self.__mode = \"retrain\"\n elif \"model_name\" in kwargs:\n self.__mode = \"test\"\n else:\n raise NameError(\"Cannot infer mode from arguments.\")\n\n print(\"Initializing model in %s mode.\" % self.__mode)\n\n if self.mode == \"train\":\n # Infer input type from type(x)\n if type(x[0]) == np.bytes_:\n print(\"Input type is 'binary mols'.\")\n self.__input_type = \"mols\" # binary RDKit mols\n else:\n print(\"Input type is 'molecular descriptors'.\")\n self.__input_type = \"descriptors\" # other molecular descriptors\n\n # If scaling is required\n if kwargs.get(\"scaling\", False) is True:\n # Normalize the input\n print(\"Applying scaling on input.\")\n self.__scaler = StandardScaler()\n x = self.__scaler.fit_transform(x)\n else:\n self.__scaler = None\n\n # If PCA is required\n if kwargs.get(\"pca\", False) is True:\n print(\"Applying PCA on input.\")\n self.__pca = PCA(\n n_components=x.shape[1]\n ) # n_components=n_features for now\n x = self.__pca.fit_transform(x)\n else:\n self.__pca = None\n\n self.__maxlen = (\n kwargs.get(\"dataset_info\")[\"maxlen\"] + 10\n ) # Extend maxlen to avoid breaks in training\n self.__charset = kwargs.get(\"dataset_info\")[\"charset\"]\n self.__dataset_name = kwargs.get(\"dataset_info\")[\"name\"]\n self.__lstm_dim = kwargs.get(\"lstm_dim\", 256)\n self.__h_activation = kwargs.get(\"h_activation\", \"relu\")\n self.__bn = kwargs.get(\"bn\", True)\n self.__bn_momentum = kwargs.get(\"bn_momentum\", 0.9)\n self.__noise_std = kwargs.get(\"noise_std\", 0.01)\n self.__td_dense_dim = kwargs.get(\n \"td_dense_dim\", 0\n ) # >0 squeezes RNN connections with Dense sandwiches\n self.__batch_size = kwargs.get(\"batch_size\", 256)\n self.__dec_layers = kwargs.get(\"dec_layers\", 2)\n\n if self.input_type == \"descriptors\":\n self.__codelayer_dim = x.shape[1] # features\n if \"codelayer_dim\" in kwargs:\n print(\n \"Ignoring requested codelayer_dim because it is inferred from the cardinality of the descriptors.\"\n )\n else:\n self.__codelayer_dim = kwargs.get(\"codelayer_dim\", 128)\n \n # Create the left/right-padding vectorizers\n self.__smilesvec1 = SmilesVectorizer(\n canonical=False,\n augment=True,\n maxlength=self.maxlen,\n charset=self.charset,\n binary=True,\n )\n\n self.__smilesvec2 = SmilesVectorizer(\n canonical=False,\n augment=True,\n maxlength=self.maxlen,\n charset=self.charset,\n binary=True,\n leftpad=False,\n )\n\n # self.train_gen.next() #This line is needed to set train_gen.dims (to be fixed in HetSmilesGenerator)\n self.__input_shape = self.smilesvec1.dims\n self.__dec_dims = list(self.smilesvec1.dims)\n self.__dec_dims[0] = self.dec_dims[0] - 1\n self.__dec_input_shape = self.dec_dims\n self.__output_len = self.smilesvec1.dims[0] - 1\n self.__output_dims = self.smilesvec1.dims[-1]\n\n # Build all sub-models as untrained models\n if self.input_type == \"mols\":\n self.__build_mol_to_latent_model()\n else:\n self.__mol_to_latent_model = None\n\n self.__build_latent_to_states_model()\n self.__build_batch_model()\n\n # Build data generators\n self.__build_generators(x, y)\n\n # Retrain or Test mode\n else:\n self.__model_name = kwargs.get(\"model_name\")\n\n # Load the model\n self.__load(self.model_name)\n \n if self.mode == \"retrain\":\n # If scaling is required\n if self.scaler is not None:\n print(\"Applying scaling on input.\")\n x = self.scaler.transform(x)\n\n # If PCA is required\n if self.pca is not None:\n print(\"Applying PCA on input.\")\n x = self.pca.transform(x)\n \n # Build data generators\n self.__build_generators(x, y)\n\n # Build full model out of the sub-models\n self.__build_model()\n\n # Show the resulting full model\n print(self.model.summary())",
"def setup_fixed_model():\n out = {}\n out['Q'] = Q\n out['X'] = X\n out['y'] = y\n\n return out",
"def __init__(self, **params):\n # Dimension of the true signal x\n self.N = params.get('N', 1024)\n\n # Dimension of the measurement vector y\n self.M = params.get('M', 256)\n\n # Number of timesteps\n self.T = params.get('T', 4)\n\n # Type of the random measurement matrix to generate\n # (1) : normalized Gaussian matrix\n self.A_type = params.get('A_type', 1)\n\n # Active support probability\n self.lambda_ = params.get('lambda_', 0.08) # high sparsity default\n\n # Amplitude mean\n self.zeta = params.get('zeta', 0)\n\n # Amplitude variance\n self.sigma2 = params.get('sigma2', 1)\n\n # Amplitude innovation rate\n self.alpha = params.get('alpha', 0.10)\n\n # Active-to-inactive transition probability\n self.p01 = params.get('p01', 0.10)\n\n # Desired signal-to-noise ratio, in dB\n self.desired_SNR = params.get('desired_SNR', 25)",
"def __init__(self, x0, N, s,\n h=np.array([0.5]), u=np.array([0]), v=np.array([0]),\n rec='Tay2'):\n\n # if you enter simple numbers, converting into numpy array\n if isinstance(x0, nb.Number):\n x0 = np.array([x0])\n if isinstance(N, nb.Number):\n N = np.array([N])\n if isinstance(s, nb.Number):\n s = np.array([s])\n if isinstance(h, nb.Number):\n h = np.array([h])\n if isinstance(u, nb.Number):\n u = np.array([u])\n if isinstance(v, nb.Number):\n v = np.array([v])\n\n # checking numpy array form for parameters\n if (not isinstance(x0, np.ndarray) or not isinstance(N, np.ndarray)\n or not isinstance(s, np.ndarray) or not isinstance(h, np.ndarray)\n or not isinstance(u, np.ndarray) or not isinstance(v, np.ndarray)):\n raise ValueError('each parameter must be a numpy array')\n\n # checking shape of arguments\n if (len(x0.shape) != 1 and len(N.shape) != 1 and len(s.shape) != 1\n and len(h.shape) != 1 and len(u.shape) != 1 and len(v.shape) != 1):\n raise ValueError('each parameter must be 1D numpy array')\n\n # setting the parameters corresponding attributes\n self.x0 = x0\n self.N = N\n self.s = s\n self.h = h\n self.u = u\n self.v = v\n\n # setting the model\n self.rec = rec\n self.app = ''\n \n # for the Terhorst method, we set some more attributes\n if rec == 'Ter':\n self.ter_err1 = np.zeros(shape=(len(x0), len(N), len(s), len(h),\n len(u), len(v)))\n self.ter_err2 = np.zeros(shape=(len(x0), len(N), len(s), len(h),\n len(u), len(v)))\n self.ter_det = x0[:, np.newaxis, np.newaxis, np.newaxis,\n np.newaxis, np.newaxis].copy()\n\n # setting the fitness function and it's derivatives\n self.fitness = []\n par_shape = (1, len(self.s), len(self.h), len(self.u), len(self.v))\n \n # setting the top of the fitness function : (1+sh)x + s(1-h)x²\n w0 = 1\n w1 = (1 + self.s[np.newaxis, :, np.newaxis, np.newaxis, np.newaxis]\n * self.h[np.newaxis, np.newaxis, :, np.newaxis, np.newaxis])\n w2 = 1 + self.s[np.newaxis, :, np.newaxis, np.newaxis, np.newaxis]\n w2 = np.repeat(w2, repeats = len(h), axis = 2)\n \n top = np.zeros(shape=(3, *par_shape))\n top[1] = w1\n tmpt2 = w2 - w1\n tmpt2[w2 == 0] = 0\n top[2] = tmpt2\n \n # setting the bot of the fitness function : 1 + 2shx + s(1-2h)x²\n bot = np.zeros(shape=(3, *par_shape))\n bot[0] = w0\n tmpb1 = 2 * w1 - 2 * w0\n tmpb1[w2 == 0] = 2 * w1[w2 == 0] - w0\n bot[1] = tmpb1\n tmpb2 = (self.s[np.newaxis, :, np.newaxis, np.newaxis, np.newaxis]\n * (1 - 2 * self.h[np.newaxis, np.newaxis, :,\n np.newaxis, np.newaxis]))\n tmpb2[w2 == 0] = 0\n bot[2] = tmpb2\n\n # with mutation, the fitness function is : (1-u)f(x) + v(1-f(x))\n full_top = ((1 - self.u[np.newaxis, np.newaxis, np.newaxis, np.newaxis,\n :, np.newaxis]) * top\n + self.v[np.newaxis, np.newaxis, np.newaxis, np.newaxis,\n np.newaxis, :] * (bot - top))\n\n ### fix cases when fitness params = 0\n\n # setting the fitness function and it's derivatives\n self.fitness.append(\n rf.RationalFraction(rf.Polynomial(full_top), rf.Polynomial(bot)))\n self.fitness.append(self.fitness[0].derive())\n self.fitness.append(self.fitness[1].derive())\n\n # initialising moments\n self.moments = np.zeros(shape=(2, 1, len(x0), len(N), len(s), len(h),\n len(u), len(v)))\n self.moments[0, 0, ] = x0[:, np.newaxis, np.newaxis, np.newaxis,\n np.newaxis, np.newaxis].copy()\n\n # initialising fixation probabilities\n self.fix_proba = np.zeros(shape = (2, 2, len(x0), len(N), len(s),\n len(h), len(u), len(v)))\n for i, x in enumerate(x0):\n if x == 1:\n self.fix_proba[0, 0, i, ] = 0\n self.fix_proba[0, 1, i, ] = 1\n elif x == 0:\n self.fix_proba[0, 0, i, ] = 1\n self.fix_proba[0, 1, i, ] = 0 \n self.fix_proba[1, 1, ] = (\n self.fitness[0](x0[:, np.newaxis, np.newaxis,\n np.newaxis, np.newaxis,\n np.newaxis]) ** self.N[np.newaxis, :, np.newaxis,\n np.newaxis, np.newaxis,\n np.newaxis])\n self.fix_proba[0, 1, ] = (\n (1 - self.fitness[0](x0[:, np.newaxis, np.newaxis,\n np.newaxis, np.newaxis,\n np.newaxis])) ** self.N[np.newaxis, :,\n np.newaxis,\n np.newaxis,\n np.newaxis,\n np.newaxis])",
"def init_model(n_factors, n_dates, n_tickers):\n date = tf.keras.Input((1,), name=\"date\", dtype=\"int32\")\n ticker = tf.keras.Input((1,), name=\"ticker\", dtype=\"int32\")\n\n # learnable table of date -> factor returns\n date_embedded = tf.keras.layers.Embedding(\n n_dates, n_factors, name=\"date_embedding\"\n )(date)\n\n # learnable table of ticker -> factor loadings\n ticker_embedded = tf.keras.layers.Embedding(\n n_tickers, n_factors, name=\"ticker_embedding\"\n )(ticker)\n\n pred = tf.keras.layers.Reshape((1,))(\n tf.keras.layers.Dot(axes=-1)([date_embedded, ticker_embedded])\n )\n\n model = tf.keras.Model(inputs=[date, ticker], outputs=pred)\n model.compile(\"Adagrad\", \"mse\")\n return model",
"def __init__(self):\n self.name = \"Kursawe\"\n objectives = [o_ku_1, o_ku_2]\n decisions = [Decision(-5, 5), Decision(-5, 5), Decision(-5, 5)]\n Model.__init__(self, objectives, None, decisions)",
"def __init__(self, *args):\n this = _libsbml.new_Model(*args)\n try: self.this.append(this)\n except: self.this = this",
"def __init__(self, data_model: DataModel) -> None:\n\n # DataModel\n self._data_model = data_model\n\n # Dict[ParameterName, Any]\n self._param_to_value: Dict[ParameterName, Any] = {}\n\n # Dict[ParameterName, Dict[ParameterName, Any]]\n self._numbered_objects: Dict[ParameterName, Dict[ParameterName, Any]] = {}\n # If adding a PLMN object, then you would set something like\n # self._numbered_objects['PLMN_1'] = {'PLMN_1_ENABLED': True}",
"def create_model(self):\n pass",
"def create_model(self):\n pass",
"def __init__(self, no_peds=0, peds_topics=[], num_s_samples=1, objs=None):\n self.global_ts = -1\n self.PEDESTRIAN_TOPICS = peds_topics\n\n self.drone = DroneModel(num_s_samples)\n self.subject = SubjectModel(num_s_samples)\n \n if no_peds > 0:\n self.peds = {\n str(i): PedestrianModel(num_s_samples) for i in range(no_peds)\n }\n else:\n self.peds = None\n \n self.objs = objs"
] | [
"0.5854473",
"0.58154035",
"0.5803041",
"0.57467854",
"0.57412326",
"0.5697944",
"0.5679864",
"0.5657684",
"0.5645403",
"0.5603807",
"0.5595166",
"0.5580034",
"0.5574634",
"0.5568534",
"0.5564313",
"0.5545272",
"0.5534687",
"0.5534448",
"0.5525057",
"0.55246973",
"0.54878163",
"0.54186904",
"0.54039717",
"0.5380283",
"0.5367917",
"0.5361849",
"0.53607523",
"0.5356448",
"0.5356448",
"0.53474164"
] | 0.64592767 | 0 |
Get ACIS Focal Plane limits based on SIM position. This function uses SIM position to assign temperature limits. | def get_acis_fp_limits(states, acis_s=-111.0, acis_i=-112.0):
states_acis_limits = np.zeros(len(states)) + 20 # Set limit initially to 20C
ind_acis_s = (states['simpos'] > 0) & (states['simpos'] < 80669) & (states['clocking'] == 1) & (
states['vid_board'] == 1)
ind_acis_i = (states['simpos'] > 83826) & (states['clocking'] == 1) & (states['vid_board'] == 1)
states_acis_limits[ind_acis_s] = acis_s
states_acis_limits[ind_acis_i] = acis_i
return states_acis_limits | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_car_limits(self):\n # car's limits\n min_turning_radius = self.wheelbase/np.tan(self.max_steering_angle)\n max_angular_velocity = self.speed/min_turning_radius\n max_car = np.array([0, self.speed, max_angular_velocity])\n\n return max_car",
"def get_acis_limits(states):\n\n states_acis_fp_limits = get_acis_fp_limits(states, acis_s=-111.0, acis_i=-112.0)\n ind_old = states['tstart'] <= DateTime('2019:172:00:00:00').secs\n\n states_acis_fp_limits[(states_acis_fp_limits == -112.0) & ind_old] = -114.0\n states_acis_fp_limits[(states_acis_fp_limits == -111.0) & ind_old] = -112.0\n\n states_acis_dpa_limits = np.zeros(len(states))\n ind_old = states['tstart'] <= DateTime('2019:172:00:00:00').secs\n ind_new = states['tstart'] > DateTime('2019:172:00:00:00').secs\n states_acis_dpa_limits[ind_old] = 36.5\n states_acis_dpa_limits[ind_new] = 37.5\n\n states_acis_dea_limits = np.zeros(len(states))\n ind_old = states['tstart'] <= DateTime('2019:311:20:00:00').secs\n ind_new = states['tstart'] > DateTime('2019:311:20:00:00').secs\n states_acis_dea_limits[ind_old] = 35.5\n states_acis_dea_limits[ind_new] = 36.5\n # states_acis_dea_limits = np.zeros(len(states)) + 35.5\n\n states_acis_psmc_limits = np.zeros(len(states)) + 52.5\n\n limits = {'fptemp': states_acis_fp_limits,\n '1dpamzt': states_acis_dpa_limits,\n '1deamzt': states_acis_dea_limits,\n '1pdeaat': states_acis_psmc_limits}\n\n return limits",
"def get_zlim(self):\n if isinstance(self._frame, root.TH2F):\n return (self._frame.GetMinimum(), self._frame.GetMaximum())\n else:\n return (self._frame.GetZaxis().GetXmin(), self._frame.GetZaxis().GetXmax())",
"def getLimits(self):\n lims = [x * self.getSign() + self.getOffset() for x in (self.connection.getChannel(self.chanNamePrefix % 'low_limit').read(), \\\n self.connection.getChannel(self.chanNamePrefix % 'high_limit').read())]\n return (min(lims), max(lims))",
"def GetAvionicsServoLimits():\n sys_conf = system_config.SystemConfig.GetSystemConfigBySerial(\n _CONFIG['system']['wing_serial'])\n config_file = os.path.join(makani.HOME,\n 'avionics/servo/firmware/config_params.yaml')\n net_conf = network_config.NetworkConfig()\n\n yaml_keys = [sys_conf.config[net_conf.GetAioNode('servo_%s' % s.lower())]\n for s in _SERVOS]\n limits = [codec.DecodeYamlFile(config_file, key) for key in yaml_keys]\n return {_SERVOS[i]: (limits[i].servo_min_limit, limits[i].servo_max_limit)\n for i in range(len(_SERVOS))}",
"def findScalingCoeffsLimiter(self, PFC, lqCN, lqCF):\n # Get R and Z vectors at the midplane\n# R_omp_sol = PFC.ep.g['lcfs'][:,0].max()\n R_omp_sol = self.map_R_psi(1.0,PFC)\n R_omp_min = R_omp_sol #this is a limited discharge so Rmin = Rlcfs\n if lqCN > lqCF:\n lqMax = lqCN\n else:\n lqMax = lqCF\n R_omp_max = R_omp_sol + 20.0*lqMax #already in m\n #if R_omp_max is outside EFIT grid, cap at maximum R of grid\n if R_omp_max > max(PFC.ep.g['R']):\n R_omp_max = max(PFC.ep.g['R']) #in meters now\n R_omp = np.linspace(R_omp_min, R_omp_max, 1000)\n Z_omp = np.zeros(R_omp.shape)\n\n # Evaluate B at outboard midplane\n Bp_omp = PFC.ep.BpFunc.ev(R_omp,Z_omp)\n Bt_omp = PFC.ep.BtFunc.ev(R_omp,Z_omp)\n B_omp = np.sqrt(Bp_omp**2 + Bt_omp**2)\n\n #Find coordinate transformation vector at midplane\n psiaxis = PFC.ep.g['psiAxis']\n psiedge = PFC.ep.g['psiSep']\n deltaPsi = np.abs(psiedge - psiaxis)\n gradPsi = Bp_omp*R_omp\n xfm = gradPsi / deltaPsi\n\n # transform hf width into flux space\n lqCN_hat = lqCN*xfm\n lqCF_hat = lqCF*xfm\n\n\n #Calculate flux at midplane using gfile\n psiN = PFC.ep.psiFunc.ev(R_omp,Z_omp)\n psi = psiN*(psiedge - psiaxis) + psiaxis\n PFC.psiMinLCFS = PFC.ep.psiFunc.ev(R_omp_sol,0.0)\n s_hat = psiN - PFC.psiMinLCFS\n\n\n print('psiMinLCFS: {:f}'.format(PFC.psiMinLCFS))\n# print('un-normalized psiMinLCFS: {:f}'.format(PFC.ep.psiFunc_noN.ev(R_omp_sol,0.0)))\n print('Minimum s_hat: {:f}'.format(s_hat.min()))\n\n\n #integral in flux space\n qCN_hat = np.exp(-s_hat / lqCN_hat)\n qCF_hat = np.exp(-s_hat / lqCF_hat)\n\n #note: simps integration will fail if x variable (psi) is not monotonic\n #reinke method\n #intCN = simps(qCN_hat / B_omp, psi)\n #intCF = simps(qCF_hat / B_omp, psi)\n #menard method\n intCN = simps(qCN_hat, psi)\n intCF = simps(qCF_hat, psi)\n\n P0 = 2*np.pi * (intCN*self.fracCN + intCF*self.fracCF)\n #account for nonphysical power\n if P0 < 0: P0 = -P0\n #Scale to input power\n q0 = self.Psol/P0\n\n #old method left for reference\n #q0 = (self.Psol/(2*np.pi)) / (intCN*self.fracCN + intCF*self.fracCF)\n\n return q0",
"def magnet_limits(self):\n max_currents = self.pv_monitor.get_max_currents()\n\n strengths = [np.array([max_currents[0],\n -max_currents[1],\n max_currents[2], 0, 0]),\n np.array([0, 0, max_currents[2],\n -max_currents[3],\n max_currents[4]])]\n\n edges = [[], []]\n for s in range(2):\n edges[s] = np.array(self.straight.p_beam_lim(strengths[s])\n )[:, [0, 2]]\n\n beam1max = edges[0][0]\n beam2max = edges[1][1]\n\n self.ax.plot(self.straight.data.photon_coordinates[0],\n beam1max, 'r--')\n self.ax.plot(self.straight.data.photon_coordinates[1],\n beam2max, 'r--')",
"def update_limits(self):\n if len(self) == 0:\n self.limits = np.array([[0.0, 0.0], [0.0, 0.0]])\n else:\n x_min, x_max = self.buf[self.rear][0], self.buf[self.front][0]\n y_min, y_max = self.slmm.get_minmax()\n self.limits = np.array([[x_min, y_min], [x_max, y_max]])",
"def feeding_zone_RC2014(a_sys):\n a_bounds_sys = np.zeros(len(a_sys)+1) # bounds in semi-major axis for each planet\n a_bounds_sys[1:-1] = np.sqrt(a_sys[:-1]*a_sys[1:]) # geometric means between planets\n a_bounds_sys[0] = a_sys[0]*np.sqrt(a_sys[0]/a_sys[1]) # same ratio for upper bound to a_sys[1] as a_sys[1] to lower bound\n a_bounds_sys[-1] = a_sys[-1]*np.sqrt(a_sys[-1]/a_sys[-2]) # same ratio for upper bound to a_sys[-1] as a_sys[-1] to lower bound\n delta_a_sys = np.diff(a_bounds_sys)\n return delta_a_sys, a_bounds_sys",
"def find_velocity_limits(self, twist_cone, dist):\n\n # car's limits\n max_car = self.get_car_limits()\n\n # pushing limits\n min_ls, max_ls = self.pushing_limits(twist_cone)\n\n return max_car, min_ls, max_ls",
"def acceleration_limit(self):\n return self._read(MX_ACCELERATION_LIMIT)",
"def get_velocity_limits(robot):\n return _get_limits(robot, \"Velocity\")",
"def get_acceleration_limits(robot):\n return _get_limits(robot, \"Accel\")",
"def control_limits(self) -> Tuple[torch.Tensor, torch.Tensor]:\n # define upper and lower limits based around the nominal equilibrium input\n # These are relaxed for now, but eventually they should be measured on hardware\n upper_limit = torch.ones(self.n_controls)\n upper_limit[TurtleBot2D.V] = 2.0\n upper_limit[TurtleBot2D.THETA_DOT] = 6.0 * np.pi\n\n lower_limit = torch.ones(self.n_controls)\n lower_limit[TurtleBot2D.V] = 0.0\n lower_limit[TurtleBot2D.THETA_DOT] = -6.0 * np.pi\n\n return (upper_limit, lower_limit)",
"def get_min_max(self):\n\n mr = np.sqrt(2 * np.log(1/self.mth)) * self.ms\n mr[:] = np.max(mr)\n\n mxmin = self.mx - mr\n mxmax = self.mx + mr\n mymin = self.my - mr\n mymax = self.my + mr\n mzmin = self.mz - mr\n mzmax = self.mz + mr\n\n mb_xmin_idx = np.argmin(mxmin[self.ma > 0])\n mb_xmax_idx = np.argmax(mxmax[self.ma > 0])\n mb_ymin_idx = np.argmin(mymin[self.ma > 0])\n mb_ymax_idx = np.argmax(mymax[self.ma > 0])\n mb_zmin_idx = np.argmin(mzmin[self.ma > 0])\n mb_zmax_idx = np.argmax(mzmax[self.ma > 0])\n\n xmin0 = self.mx[mb_xmin_idx] - mr[mb_xmin_idx]\n xmax0 = self.mx[mb_xmax_idx] + mr[mb_xmax_idx]\n ymin0 = self.my[mb_ymin_idx] - mr[mb_ymin_idx]\n ymax0 = self.my[mb_ymax_idx] + mr[mb_ymax_idx]\n zmin0 = self.mz[mb_zmin_idx] - mr[mb_zmin_idx]\n zmax0 = self.mz[mb_zmax_idx] + mr[mb_zmax_idx]\n\n xmin = xmin0 - (xmax0 - xmin0) * 0.25\n xmax = xmax0 + (xmax0 - xmin0) * 0.25\n ymin = ymin0 - (ymax0 - ymin0) * 0.25\n ymax = ymax0 + (ymax0 - ymin0) * 0.25\n zmin = zmin0 - (zmax0 - zmin0) * 0.25\n zmax = zmax0 + (zmax0 - zmin0) * 0.25\n\n return xmin, xmax, ymin, ymax, zmin, zmax",
"def get_lmax_limit(self):\n\n if self.pixel == \"HEALPIX\":\n l_max_limit = 3 * self.nside - 1\n elif self.pixel == \"CAR\":\n cdelt = self.data.wcs.wcs.cdelt[1]\n l_max_limit = 360 / cdelt / 4\n return l_max_limit",
"def getTranslation(fracs):\n \n \n \n # Determine whether the shift needs to be from inf to 0 \n # or from -inf to 0\n \n # Along all x fractionals\n if abs(max(fracs[0]))>=abs(min(fracs[0])):\n minX = min([x for x in fracs[0] if x>0])\n else:\n minX = min([x for x in fracs[0] if x<0])\n \n # Along all y fractionals\n if abs(max(fracs[1]))>=abs(min(fracs[1])):\n minY = min([x for x in fracs[1] if x>0])\n else:\n minY = min([x for x in fracs[1] if x<0])\n \n # Along all z fractionals\n # Need to consider all atoms lying in a single\n # plane (e.g. graphene), thus the final \"else\"\n # statement\n if abs(max(fracs[2]))>abs(min(fracs[2])):\n minZ = min([x for x in fracs[2] if x>0])\n elif abs(max(fracs[2]))<abs(min(fracs[2])):\n minZ = min([x for x in fracs[2] if x<0])\n else:\n minZ = max(fracs[2])\n\n shift_vector = np.array([minX,minY,minZ])\n \n return(shift_vector)",
"def limits(self):\n\n\t\treturn [\n\t\t\tmin(self.xvalues),\n\t\t\tmax(self.xvalues),\n\t\t\tmin(self.yvalues),\n\t\t\tmax(self.yvalues)]",
"def get_roi_limits(self):\n wdet,hdet=self.get_detector_size()\n hlims,vlims=self._get_roi_limits()\n hbin,vbin=self._truncate_roi_binning(wdet,hdet)\n min_roi=(0,0,hlims[0],vlims[0],1,1)\n max_roi=(wdet-hlims[0],wdet-vlims[0],wdet,hdet,hbin,vbin)\n return (min_roi,max_roi)",
"def calculate_leg_xy_limits(self, visualize=False):\n \n #Find the fixed plate position at the \"0\" point\n gonio_zero = copy.copy(self)\n gonio_zero.relative_sample_position = column([0.0, 0.0, 0.0]) #Tell the sample to be centered well.\n gonio_zero.getplatepos(0.0, 0.0, 0.0)\n fixed_plate_zero = np.copy(gonio_zero.fixed_plate)\n #This defines the center of the following matrices\n self.fixed_plate_zero = fixed_plate_zero\n \n #Now we generate a matrix of allowed positions around those points.\n self.leg_safe_xaxis = np.arange(-self.travel, self.travel, self.leg_safe_resolution)\n self.leg_safe_zaxis = np.copy(self.leg_safe_xaxis)\n\n #Create the \"safe zone\" array, initialized to False\n self.leg_safe_zone = np.zeros( (3, self.leg_safe_xaxis.size, self.leg_safe_zaxis.size), dtype=bool ) \n\n #Now make a reasonable approximation\n real_travel_x = 12.5\n real_travel_z = real_travel_x\n for leg in range(3):\n for i_x in range(self.leg_safe_xaxis.size):\n x = self.leg_safe_xaxis[i_x]\n if abs(x)<real_travel_x:\n for i_z in range(self.leg_safe_zaxis.size):\n z = self.leg_safe_zaxis[i_z]\n if abs(z)<real_travel_z:\n self.leg_safe_zone[leg, i_x, i_z] = True\n# #Upper left corner of leg A (0)\n# center = int(len(self.leg_safe_xaxis)/2)\n# self.leg_safe_zone[0, :, :] = False\n# self.leg_safe_zone[0, :center, :center] = True\n# self.leg_safe_zone[1, :, :] = False\n# self.leg_safe_zone[1, center:, 0:center] = True\n# self.leg_safe_zone[2, :, :center] = False\n\n\n if visualize:\n pylab.figure(0)\n pylab.hold(True)\n for leg in range(3):\n pylab.pcolor(self.leg_safe_xaxis+fixed_plate_zero[COORD_X, leg], self.leg_safe_zaxis+fixed_plate_zero[COORD_Z, leg], self.leg_safe_zone[leg, :, :].transpose())\n pylab.xlabel(\"x\")\n pylab.ylabel(\"z\")\n pylab.title(\"Allowable XZ leg positions for the 3 legs.\")\n pylab.draw()\n pylab.axis('equal')\n #pylab.show()",
"def findAxialSegmentationLimitFromMarker(self):\n #productive \n profprint()\n asl=0\n try:\n nodes = slicer.util.getNodes('template slice position*')\n found=False\n for node in nodes.values():\n coord = [0,0,0]\n node.GetFiducialCoordinates(coord)\n asl=int(round(self.ras2ijk(coord)[2]))\n print \"limit marker found in scene, z-limit [ras]: \",coord[2]\n if found:\n print \"/!\\ there should be only one limit marker!\"\n found = True\n except:\n print \"/!\\ no z-limit marker in scene (required)!\"\n msgbox(\"/!\\ no z-limit marker in scene (required)!\")\n return asl",
"def _flux_limit(self, omega=0, eps_mag=0, u_q_max=0.0, u_rq_max=0.0):\n mp = self.motor_parameter\n l_s = mp['l_m'] + mp['l_sigs']\n l_r = mp['l_m'] + mp['l_sigr']\n l_mr = mp['l_m'] / l_r\n sigma = (l_s * l_r - mp['l_m'] ** 2) / (l_s * l_r)\n # limiting flux for a low omega\n if omega == 0:\n psi_d_max = mp['l_m'] * self._nominal_values['i_sd']\n else:\n i_d, i_q = self.q_inv([self._initial_states['i_salpha'],\n self._initial_states['i_sbeta']],\n eps_mag)\n psi_d_max = mp['p'] * omega * sigma * l_s * i_d + \\\n (mp['r_s'] + mp['r_r'] * l_mr**2) * i_q + \\\n u_q_max + \\\n l_mr * u_rq_max\n psi_d_max /= - mp['p'] * omega * l_mr\n # clipping flux and setting nominal limit\n psi_d_max = 0.9 * np.clip(psi_d_max, a_min=0, a_max=np.abs(mp['l_m'] * i_d))\n # returning flux in alpha, beta system\n return self.q([psi_d_max, 0], eps_mag)",
"def max_position_limit(self):\n return self._read(MX_MAX_POSITION_LIMIT)",
"def get_bounds():\n return [0.00], [1.00]",
"def get_limits(self, device,percent=0.25):\n\t\tval = epics.caget(device)\n tol = (val*percent)\n lim_lo = val-tol\n lim_hi = val+tol\n limits = [lim_lo,lim_hi]\n\t\treturn limits",
"def action_space(self):\n lower_bounds = np.array([])\n upper_bounds = np.array([])\n for joint in self._used_joints:\n joint_idx = self._joint_limits.joint_names.index(joint)\n if self._control_mode == 'position':\n lower_bounds = np.concatenate(\n (lower_bounds,\n np.array(self._joint_limits.position_lower[\n joint_idx:joint_idx + 1])))\n upper_bounds = np.concatenate(\n (upper_bounds,\n np.array(self._joint_limits.position_upper[\n joint_idx:joint_idx + 1])))\n elif self._control_mode == 'velocity':\n velocity_limit = np.array(\n self._joint_limits.velocity[joint_idx:joint_idx + 1]) * 0.1\n lower_bounds = np.concatenate((lower_bounds, -velocity_limit))\n upper_bounds = np.concatenate((upper_bounds, velocity_limit))\n elif self._control_mode == 'effort':\n effort_limit = np.array(\n self._joint_limits.effort[joint_idx:joint_idx + 1])\n lower_bounds = np.concatenate((lower_bounds, -effort_limit))\n upper_bounds = np.concatenate((upper_bounds, effort_limit))\n else:\n raise ValueError(\n 'Control mode %s is not known!' % self._control_mode)\n return gym.spaces.Box(\n np.concatenate((lower_bounds, np.array([0]))),\n np.concatenate((upper_bounds, np.array([100]))),\n dtype=np.float32)",
"def intervention_limits(self) -> Tuple[torch.Tensor, torch.Tensor]:\n upper_limit = torch.ones(self.n_controls)\n upper_limit[TurtleBot2D.V] = 1.0\n upper_limit[TurtleBot2D.THETA_DOT] = 2.0 * np.pi\n\n lower_limit = torch.ones(self.n_controls)\n lower_limit[TurtleBot2D.V] = -1.0\n lower_limit[TurtleBot2D.THETA_DOT] = -2.0 * np.pi\n\n return (upper_limit, lower_limit)",
"def get_limits(self, lim_type):\n if lim_type == 'data':\n # data limits\n return np.asarray(self.limits)\n\n # plot limits\n self.path.crdmap = self.crdmap\n if len(self.path.points) > 0:\n llur = self.path.get_llur()\n llur = [llur[0:2], llur[2:4]]\n else:\n llur = [(0.0, 0.0), (0.0, 0.0)]\n return np.asarray(llur)",
"def _get_limit_func(self, axis):\n limits = self.motion_limits[axis.lower()]\n limits = limits['lower'], limits['upper']\n\n def limit_func(target):\n return max(min(target, limits[1]), limits[0])\n return limit_func, limits",
"def velocity_limit(self):\n return self._read(MX_VELOCITY_LIMIT)"
] | [
"0.57339364",
"0.57016706",
"0.56508887",
"0.5607178",
"0.55084836",
"0.5506954",
"0.5452533",
"0.5426434",
"0.53256893",
"0.53029037",
"0.52605385",
"0.5238085",
"0.52323025",
"0.5195507",
"0.51862",
"0.51857567",
"0.5135426",
"0.5093461",
"0.5084093",
"0.5070832",
"0.5067965",
"0.5062848",
"0.5056854",
"0.5054626",
"0.50437",
"0.50412625",
"0.5038862",
"0.50322217",
"0.5027994",
"0.50253457"
] | 0.5708714 | 1 |
Get ACIS DPA, DEA, PSMC limits. This function is intended to be temporary, to facilitate long term comparison of additional chip opportunities in past schedules. | def get_acis_limits(states):
states_acis_fp_limits = get_acis_fp_limits(states, acis_s=-111.0, acis_i=-112.0)
ind_old = states['tstart'] <= DateTime('2019:172:00:00:00').secs
states_acis_fp_limits[(states_acis_fp_limits == -112.0) & ind_old] = -114.0
states_acis_fp_limits[(states_acis_fp_limits == -111.0) & ind_old] = -112.0
states_acis_dpa_limits = np.zeros(len(states))
ind_old = states['tstart'] <= DateTime('2019:172:00:00:00').secs
ind_new = states['tstart'] > DateTime('2019:172:00:00:00').secs
states_acis_dpa_limits[ind_old] = 36.5
states_acis_dpa_limits[ind_new] = 37.5
states_acis_dea_limits = np.zeros(len(states))
ind_old = states['tstart'] <= DateTime('2019:311:20:00:00').secs
ind_new = states['tstart'] > DateTime('2019:311:20:00:00').secs
states_acis_dea_limits[ind_old] = 35.5
states_acis_dea_limits[ind_new] = 36.5
# states_acis_dea_limits = np.zeros(len(states)) + 35.5
states_acis_psmc_limits = np.zeros(len(states)) + 52.5
limits = {'fptemp': states_acis_fp_limits,
'1dpamzt': states_acis_dpa_limits,
'1deamzt': states_acis_dea_limits,
'1pdeaat': states_acis_psmc_limits}
return limits | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getLimits(self):\n lims = [x * self.getSign() + self.getOffset() for x in (self.connection.getChannel(self.chanNamePrefix % 'low_limit').read(), \\\n self.connection.getChannel(self.chanNamePrefix % 'high_limit').read())]\n return (min(lims), max(lims))",
"def limits(self) -> Optional['outputs.CSIPowerMaxRevProxySpecConfigLinkConfigPrimaryLimits']:\n return pulumi.get(self, \"limits\")",
"def get_limits(self):\n return self._get(limits.Limits)",
"def get_acis_fp_limits(states, acis_s=-111.0, acis_i=-112.0):\n\n states_acis_limits = np.zeros(len(states)) + 20 # Set limit initially to 20C\n ind_acis_s = (states['simpos'] > 0) & (states['simpos'] < 80669) & (states['clocking'] == 1) & (\n states['vid_board'] == 1)\n ind_acis_i = (states['simpos'] > 83826) & (states['clocking'] == 1) & (states['vid_board'] == 1)\n states_acis_limits[ind_acis_s] = acis_s\n states_acis_limits[ind_acis_i] = acis_i\n\n return states_acis_limits",
"def get_limits(self):\n raise NotImplementedError(\"Limits are not available for Cloud Databases\")",
"def limits(self):\n return self._limits",
"def getLimits():\n return [Limit(limit) for limit in Cuebot.getStub('limit').GetAll(\n limit_pb2.LimitGetAllRequest(), timeout=Cuebot.Timeout).limits]",
"def limits(self) -> Optional['outputs.PreventionInspectTemplateInspectConfigLimits']:\n return pulumi.get(self, \"limits\")",
"def get_limits(self):\n return self._get('app_limits')",
"def GetAvionicsServoLimits():\n sys_conf = system_config.SystemConfig.GetSystemConfigBySerial(\n _CONFIG['system']['wing_serial'])\n config_file = os.path.join(makani.HOME,\n 'avionics/servo/firmware/config_params.yaml')\n net_conf = network_config.NetworkConfig()\n\n yaml_keys = [sys_conf.config[net_conf.GetAioNode('servo_%s' % s.lower())]\n for s in _SERVOS]\n limits = [codec.DecodeYamlFile(config_file, key) for key in yaml_keys]\n return {_SERVOS[i]: (limits[i].servo_min_limit, limits[i].servo_max_limit)\n for i in range(len(_SERVOS))}",
"def limits(self) -> Optional['outputs.CSIPowerMaxRevProxySpecConfigStandAloneConfigManagementServersLimits']:\n return pulumi.get(self, \"limits\")",
"def get_limits(self, device,percent=0.25):\n\t\tval = epics.caget(device)\n tol = (val*percent)\n lim_lo = val-tol\n lim_hi = val+tol\n limits = [lim_lo,lim_hi]\n\t\treturn limits",
"def process_limits(self):\n url = \"/api/investigate/v1/orgs/{}/processes/limits\".format(\n self.credentials.org_key\n )\n return self.get_object(url)",
"def max_voltage_limit(self):\n return self._read(MX_MAX_VOLTAGE_LIMIT)",
"def getRange(self, c, name):\n self.validateChannel( name )\n limits = self.d[name].limits\n return limits",
"def getPTLimits(*args):\n return args[0].Limit.PTLimit.pt_limit",
"def _get_prod_bounds(self, comp):\n cap_res = comp.get_capacity_var() # name of resource that defines capacity\n maximum = comp.get_capacity(None, None, None, None)[0][cap_res]\n # TODO minimum!\n # producing or consuming the defining resource?\n if maximum > 0:\n return 0, maximum, pyo.NonNegativeReals\n else:\n return maximum, 0, pyo.NonPositiveReals",
"def MaxLSPsOrMGroupPDUsPerBurst(self):\r\n\t\treturn self._get_attribute('maxLSPsOrMGroupPDUsPerBurst')",
"def magnet_limits(self):\n max_currents = self.pv_monitor.get_max_currents()\n\n strengths = [np.array([max_currents[0],\n -max_currents[1],\n max_currents[2], 0, 0]),\n np.array([0, 0, max_currents[2],\n -max_currents[3],\n max_currents[4]])]\n\n edges = [[], []]\n for s in range(2):\n edges[s] = np.array(self.straight.p_beam_lim(strengths[s])\n )[:, [0, 2]]\n\n beam1max = edges[0][0]\n beam2max = edges[1][1]\n\n self.ax.plot(self.straight.data.photon_coordinates[0],\n beam1max, 'r--')\n self.ax.plot(self.straight.data.photon_coordinates[1],\n beam2max, 'r--')",
"def resource_limits(self) -> Optional[pulumi.Input['OceanAutoscalerResourceLimitsArgs']]:\n return pulumi.get(self, \"resource_limits\")",
"def limits(self) -> Optional['outputs.CSIPowerMaxRevProxySpecConfigLinkConfigBackupLimits']:\n return pulumi.get(self, \"limits\")",
"def get_max_gains(self):\n return tuple([lib.is_SetHWGainFactor(self.hcam,0x800c+i,100)/100 for i in range(4)])",
"def get_limits(self, lim_type):\n if lim_type == 'data':\n # data limits\n return np.asarray(self.limits)\n\n # plot limits\n self.path.crdmap = self.crdmap\n if len(self.path.points) > 0:\n llur = self.path.get_llur()\n llur = [llur[0:2], llur[2:4]]\n else:\n llur = [(0.0, 0.0), (0.0, 0.0)]\n return np.asarray(llur)",
"def get_car_limits(self):\n # car's limits\n min_turning_radius = self.wheelbase/np.tan(self.max_steering_angle)\n max_angular_velocity = self.speed/min_turning_radius\n max_car = np.array([0, self.speed, max_angular_velocity])\n\n return max_car",
"def get_acceleration_limits(robot):\n return _get_limits(robot, \"Accel\")",
"def _get_colorbar_limits(self):\n if self.boundaries is not None:\n C = self.boundaries\n if self.extend in [\"min\", \"both\"]:\n C = C[1:]\n\n if self.extend in [\"max\", \"both\"]:\n C = C[:-1]\n return min(C), max(C)\n else:\n return self.get_clim()",
"def resource_limits(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ResourceLimitArgs']]]]:\n return pulumi.get(self, \"resource_limits\")",
"def _get_limits(robot, limit_type):\n target_ctrl_path = get_target_ctrl_path(robot)\n\n limits = {}\n\n # Check if the rig has attributes for the input limit type\n # If not, add the corresponding limit attributes\n # This is mostly used for backwards-compatibility\n\n if not pm.attributeQuery('axis{}Limits'.format(limit_type),\n n=target_ctrl_path, ex=True):\n add_limits_to_robot(robot, limit_type)\n\n # HARD CODED - Number of robot axes; should include external axes\n num_axes = 6\n\n # Create a list of robot's limits\n for i in range(num_axes):\n axis_number = i + 1 # Axis numbers are 1-indexed\n axis_name = 'Axis {}'.format(axis_number)\n limits[axis_name] = {'Min Limit': None, 'Max Limit': None}\n\n try:\n limit = pm.getAttr(target_ctrl_path + '.axis{}' \\\n '{}Limit'.format(axis_number, limit_type))\n except AttributeError:\n limit = None\n\n if limit:\n limits[axis_name] = {'Min Limit': -limit,\n 'Max Limit': limit}\n \n # TO-DO: Add external axes\n return limits",
"def get_reserved_mem_limit(self):\n\t\treturn call_sdk_function('PrlDispCfg_GetReservedMemLimit', self.handle)",
"def get_limits(self):\n return np.copy(self.limits)"
] | [
"0.6717213",
"0.6433776",
"0.6427651",
"0.6422575",
"0.64155334",
"0.64044297",
"0.62778854",
"0.62398833",
"0.6198074",
"0.6138675",
"0.61210334",
"0.6086518",
"0.6062708",
"0.60539",
"0.6016216",
"0.5981716",
"0.59542805",
"0.59519047",
"0.5940754",
"0.5940452",
"0.5931553",
"0.5911943",
"0.59073967",
"0.58577245",
"0.58165044",
"0.57836455",
"0.5768378",
"0.5733488",
"0.5723307",
"0.56801665"
] | 0.67807853 | 0 |
Get maximum data for each dwell as defined in state_data in the Xija model object `model`. | def get_max_dwell_mvals(model, state_data):
dwell_results = []
for ind in range(len(state_data)):
ind_dwell = (model.times >= state_data['tstart'][ind]) & (model.times <= state_data['tstop'][ind])
if np.any(ind_dwell):
dwell_results.append(np.max(model.mvals[ind_dwell]))
else:
dwell_results.append(-1.0e6)
return tuple(dwell_results) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_state_observed_max(self):\n maxValues = numpy.zeros(self.get_num_variables())\n i = 0\n for v in self.variables:\n maxValues[i] = v.get_max_value()\n i += 1\n return maxValues",
"def Max(data):\n return data.max()",
"def _get_max_sampled_bandit(self)->Bandit:\n estimates = []\n for bandit in self.bandits:\n Qth = np.random.normal(loc =self.mu[bandit.id], scale = self.var[bandit.id])\n f_hat = self.mu[bandit.id]#computing moving_average here \n estimates.append(max(Qth, f_hat))\n return self.bandits[np.argmax(estimates)]",
"def get_longest_state(data):\n return max(data, key=len)",
"def get_highest_value_action(self, state):\n a = self.sess.run(self.network.maxOutputNode, feed_dict={self.network.inputs: [state]})\n return a[0]",
"def max(self):\n if self._mesh.is_1d():\n ind = 1\n elif self._mesh.is_2d():\n ind = 2\n else:\n if self._logger:\n self._logger.error(\"mesh dimension not implemented\")\n raise NotImplementedError(\"mesh dimension not implemented\")\n\n def __map(m):\n return m[ind]\n\n return self.data.map(\n __map\n ).max()",
"def state_max(self) -> float:\n raise NotImplementedError",
"def get_max_depth_val():\n data = SUNRGBDTrainDataset(True)\n return max([data[0][i][-1].flatten().item() for i in range(len(data))])",
"def get_gridpoint_max(self):\n ind_array = np.indices(self.results_array.shape)\n maxes = []\n\n def get_max(x, y, z):\n \"\"\"\n Would be funnier if I knew a Max.\n \"\"\"\n if isinstance(self.results_array[x][y][z], tuple):\n num_zeros = self.tup_max_length - len(self.results_array[x][y][z])\n if num_zeros != 0:\n print('Number of zeros: ', num_zeros)\n hist_arr = np.array(self.results_array[x][y][z])\n maxes.append(max(hist_arr))\n\n vget_max = np.vectorize(get_max, otypes=[list])\n vget_max(ind_array[0], ind_array[1], ind_array[2])\n return maxes",
"def get_max(x, y, z):\n if isinstance(self.results_array[x][y][z], tuple):\n num_zeros = self.tup_max_length - len(self.results_array[x][y][z])\n if num_zeros != 0:\n print('Number of zeros: ', num_zeros)\n hist_arr = np.array(self.results_array[x][y][z])\n maxes.append(max(hist_arr))",
"def getEvolutionMax(self):\n \n return [self.getMaximumAtGivenTime(timeIndex) for timeIndex in range(self.numberOfTimes - 1)]",
"def x_max(self):\n return self.get_max_value(self.X_INDEX)",
"def max(self):\r\n return np.max(self.data_array)",
"def get_height_of_signal_maximum(\n data, setup={}, varname=None, gate_min=None, gate_max=None):\n idx = get_index_of_signal_maximum(\n data, setup, varname, gate_min, gate_max)\n nt = range(len(idx))\n return data['alt'][nt, idx]",
"def get_tmax(data):\n return data[np.argmax(data[:, 1])][0]",
"def get_signal_maximum(\n data, setup={}, varname=None, gate_min=None, gate_max=None):\n idx = get_index_of_signal_maximum(\n data, setup, varname, gate_min, gate_max)\n nt = range(len(idx))\n\n if varname is None:\n varname = get_\n return data[varname][nt, idx]",
"def max_decode(M):\r\n return scipy.array([ f.val.argmax() for f in M])",
"def get_max_value(self):\n if self.is_ready():\n max_values = [dnd.get_max_value() for dnd in self.dnds]\n max_value = max(max_values)\n else:\n max_value = torch.tensor([[0.0]], dtype=torch.float)\n return max_value",
"def get_max_value(self, dim):\n return self._max_values[dim]",
"def _get_max_estimated_bandit(self)->Bandit:\n # print(\"mus - \", self.mu)\n # print(\"actions - \", np.argmax(self.mu))\n unique, counts = np.unique(self.mu, return_counts=True)\n lens = counts[np.argmax(unique)] \n if lens>1: # if two actions have same argmax\n # then return arbitrarily from those max ones\n maxs = list(np.array(self.bandits)[self.mu==unique[np.argmax(unique)]])\n return np.random.choice(maxs)\n # otherwise return the max one\n return self.bandits[np.argmax(self.mu)]",
"def max(folder):\n experiments = get_experiment_series(folder)\n plt.plot([ex.height for ex in experiments], [ex.maxWeight() for ex in experiments])\n plt.show()",
"def find_max(self):\n\n if self.right:\n return self.right.find_max()\n\n return self.data",
"def get_maxcut_data_model():\n n = 5\n V = np.arange(0, n, 1)\n E = [(0, 1, 3.0), (1, 2, 2.0), (2, 3, 2.0), (3, 4, 3.0), (4, 0, 1.0), (0, 3, 3.0)]\n\n G = nx.Graph()\n G.add_nodes_from(V)\n G.add_weighted_edges_from(E)\n return G",
"def find_max(self):\n if self.right:\n return self.right.find_max()\n return self.data",
"def _get_max_sampled_bandit(self)->Bandit:\n estimates = []\n for bandit in self.bandits:\n estimates.append(np.random.normal(loc =self.mu[bandit.id], scale = self.var[bandit.id]))\n return self.bandits[np.argmax(estimates)]",
"def max(self):\n return numpy.ma.max(self.data)",
"def max_well(self):\n maxVal = np.max(self.get_well_depth_image())\n return maxVal",
"def get_maximum_value(dataset):\n d = [int(i) for i in dataset if i.isdigit()]\n op = [o for o in dataset if o in ['*', '-', '+']]\n n = len(d)\n d.insert(0, None)\n op.insert(0, None)\n m = [[0 for x in range(n+1)] for y in range(n+1)]\n M = [[0 for x in range(n+1)] for y in range(n+1)]\n for i in range(1, n+1):\n m[i][i] = d[i]\n M[i][i] = d[i]\n for s in range(1, n):\n for i in range(1, n-s+1):\n j = i + s\n m[i][j], M[i][j] = min_and_max(i, j, op, m, M)\n return M[1][n]",
"def zmax(self):\n # Extract parameters\n pzs = self.params[0]\n return max([pz.zmax for pz in pzs])",
"def zmax(self):\n # Extract parameters\n pzs = self.params[0]\n return max([pz.zmax for pz in pzs])"
] | [
"0.68863416",
"0.61601216",
"0.60094136",
"0.5900275",
"0.5891694",
"0.58775216",
"0.580818",
"0.58072674",
"0.5785431",
"0.57757854",
"0.57710737",
"0.57614946",
"0.5742574",
"0.5739469",
"0.5728732",
"0.5727174",
"0.572258",
"0.5709562",
"0.5707097",
"0.5705269",
"0.5644917",
"0.56390744",
"0.563832",
"0.5618599",
"0.5614888",
"0.5597423",
"0.5581207",
"0.55712754",
"0.55593556",
"0.55593556"
] | 0.7922475 | 0 |
Evaluate one case for one model/MSID | def evaluate_one_case_for_one_msid(state_limits, baseline_case_results, case_results, debug=True):
if debug:
state_limits = state_limits
# The first case has zero changes, look for cases that chage more than 0.1 degrees (refine later).
msid_inc_ind = np.array([c - b for c, b in zip(case_results, baseline_case_results)]) > 0.1
msid_ok = np.zeros(len(case_results)) < 1
msid_bad = msid_inc_ind & (case_results > state_limits)
msid_ok[msid_bad] = False
return msid_ok | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def evaluate(QualityMeasure,ModelClass,dataset,subgroup,target1,target2): \r\n evaluator = {\r\n QualityMeasure.SCD: evaluate_scd,\r\n }\r\n return evaluator.get(QualityMeasure)(ModelClass,dataset,subgroup,target1,target2)",
"def _compute_(self, case):\n dic = \"data/sim/{dn}/{rad}/\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"), rad=self.rad)\n fn = \"data/sim/{dn}/{rad}/exp.{cse}.bm({bm}).elv(<elv>).csv\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"),\n rad=self.rad, bm=self.bmnum, cse=case)\n cmd = \"export DIR_MODELS_REF_DAT=/home/shibaji/Collaboration_NCAR/code_rt_sd/pharlap/pharlap_4.1.3/dat;\\\n cd pharlap;\\\n matlab -nodisplay -nodesktop -nosplash -nojvm -r \\\"UT=[{ut}];rad='{rad}';dic='{dic}';bm={bm};\\\n fn='{fn}';cse='{cse}';rt_1D_sen;exit;\\\"\".format(ut=self.event.strftime(\"%Y %m %d %H %S\"), rad=self.rad,\n dic=dic, bm=self.bmnum, fn=fn, cse=case)\n os.system(cmd)\n return",
"def _execute(self, model: ExecutableModelSpace) -> Any:\n\n from .space import BenchmarkModelSpace\n if not isinstance(model, BenchmarkModelSpace):\n warnings.warn('It would be better to use BenchmarkModelSpace for benchmarking to avoid '\n 'unnecessary overhead and silent mistakes.')\n if model.sample is None:\n raise ValueError('Model can not be evaluted because it has not been sampled yet.')\n\n return self.evaluate(model.sample)",
"def test_prepare_model_medium_objective(raw_model, expected_model, config):\n essential.configure_model(raw_model, config)\n raw_id = find_objective_function(raw_model)[0].id\n expected_id = find_objective_function(expected_model)[0].id\n assert raw_id == expected_id",
"def apply_cases(self, id, A):\n self.apply_tactic(id, tactic.cases(), args=A)",
"def run(self):\n \n for i, spl in enumerate(self.sampleList):\n id_ =MSIdentificationModel(spl, **self.parameters)\n id_.identification(self.models[i], error=5)",
"def evaluate_model():\n\n print '\\n\\tevaluate result'\n os.system('./conlleval.pl -d \\'\\t\\' < ' + encoded_test + ' >> ' + result_file)\n print '\\t--done\\n'",
"def evaluate(self, dataset):\n\t\tpass",
"def solveOneStep(self):\n ### Student code goes here\n return True",
"def test_evaluate_model(sequential_model, model_data):\n _, _, _, _, x_test, y_test = model_data\n compile_model(sequential_model)\n output = evaluate_model(sequential_model, x_test, y_test, 64)\n assert len(output) == 2",
"def evaluate(self, test_data):\n result = self.model.run(test_data)\n self._save_result(result)",
"def evaluate(self) :\n pass",
"def model_processor(model, metamodel):\n global model_processor_called\n model_processor_called = True\n\n assert model.__class__.__name__ == \"First\"\n assert model.seconds[0].sec == 34",
"def evaluate(self):\n pass",
"def evaluate(self):\n pass",
"def run_single_task(model: api_model.Model, this_task, task_name):\n\n results_data = this_task.evaluate_model(model)\n task_info = this_task.get_task_details()\n\n assert isinstance(task_info, task.TaskMetadata), \\\n f'{task_name}: get_task_details() should return a TaskMetadata object'\n\n if isinstance(results_data, list):\n for k in results_data:\n assert isinstance(\n k, task.ScoreData\n ), f'{task_name}: evaluate_model() should return ScoreData object(s).' \n else:\n assert isinstance(\n results_data,\n task.ScoreData), f'{task_name}: evaluate_model() should return ScoreData object(s).'\n\n verify_keywords(task_info, task_name)",
"def _evaluate(model):\n _recompile(model)\n if isinstance(eval_dataset, tuple):\n eval_images, eval_labels = eval_dataset\n return model.evaluate(\n eval_images, eval_labels, verbose=verbose, return_dict=True)\n else:\n return model.evaluate(eval_dataset, verbose=verbose, return_dict=True)",
"def evaluate_model(self, xyz, scale=True):\n xyz = np.array(xyz)\n if scale:\n xyz = self.scale(xyz,inplace=False)\n strat_id = np.zeros(xyz.shape[0],dtype=int)\n for group in self.stratigraphic_column.keys():\n if group == 'faults':\n continue\n feature_id = self.feature_name_index.get(group, -1)\n if feature_id >= 0:\n feature = self.features[feature_id]\n vals = feature.evaluate_value(xyz)\n for series in self.stratigraphic_column[group].values():\n strat_id[np.logical_and(vals < series.get('max',feature.max()), vals > series.get('min',feature.min()))] = series['id']\n if feature_id == -1:\n logger.error('Model does not contain {}'.format(group))\n return strat_id",
"def test_identity(model_name):\n env = DummyVecEnv([lambda: IdentityEnv(10)])\n\n model = LEARN_FUNC_DICT[model_name](env)\n evaluate_policy(model, env, n_eval_episodes=20, reward_threshold=90)\n\n obs = env.reset()\n print('test')\n action = env.action_space.sample()\n del model, env",
"def test_get_case_by_id(self):\n pass",
"def emm(dataset):\r\n\r\n ####################### CONFIGURE THIS ##############################\r\n\r\n #Define subgroup\r\n #subgroup = dataset[(dataset['dvce_type'] == 'Tablet')]\r\n subgroup = dataset[(dataset['os_timezone'].str.contains(\"Asia\") & (dataset['os_name'].str.contains(\"iPhone\")))]\r\n\r\n #Define target 1\r\n target1 = 'revenue'\r\n\r\n #Define target 2\r\n target2 = 'new_buttons'\r\n\r\n #####################################################################\r\n\r\n logging.info(\"Exceptional Model Mining. (Two targets)\")\r\n\r\n lengthDataset = len(dataset)\r\n logging.debug('Examples of the dataset {}'.format(lengthDataset)) \r\n logging.debug('Examples of subgroup: {} ({:.2f}%)'.format(len(subgroup), (len(subgroup)/lengthDataset) * 100))\r\n correlationTargets = phi_coefficient (dataset,target1,target2)\r\n logging.debug('Correlation of the two targets: {:.2f}'.format(correlationTargets))\r\n \r\n evaluate(QualityMeasure.SCD,ModelClass.PhiCoefficient,dataset,subgroup,target1,target2)",
"def test_wind_mel_model(preds_paths, data_val):\n # Load model predicitions - allowing for possibility of ensemble\n model_preds = np.stack([np.load(pred_path) for pred_path in preds_paths])\n model_preds = np.mean(model_preds, axis=0)\n\n # Get ids and true labels\n labels = []\n ids = []\n for example in data_val:\n labels.append(example[1])\n ids.append(example[2])\n\n # Calculate accuracy and label-predication pairs\n num_examples = 0\n num_correct = 0\n current_id = None\n current_label = None\n c_matrix = np.zeros((50, 50))\n for i in range(len(ids)):\n label = labels[i]\n id = ids[i]\n\n # Check to see if new example has entered\n if id != current_id:\n\n # Evaluate previous id fully - will not enter on first iteration\n if current_id:\n current_prediction_probs /= num_ids\n prediction = np.argmax(current_prediction_probs)\n\n # update lab_pred counts\n c_matrix[int(current_label), int(prediction)] += 1\n\n # Increment correct prediction counter if prediction correct\n if prediction == current_label:\n num_correct += 1\n\n # reset and increment variables\n num_examples += 1\n current_id = id\n current_label = label\n num_ids = 1\n current_prediction_probs = model_preds[i]\n else:\n num_ids += 1\n current_prediction_probs += model_preds[i]\n\n accuracy = num_correct / num_examples\n\n print(f\"{num_correct} / {num_examples} = {accuracy:.4f}\")\n\n return accuracy, c_matrix",
"def call(self, model):\n raise NotImplementedError('Define your score here')",
"def run(self, X, Y, model):\n\n p0 = X.iloc[0] # read in the input info\n params = lmfit.Parameters() # empty parameter class\n success = True # check for success\n\n if model == 'Medlyn':\n min, max = self.param_space('g1')\n params.add('g1', p0.g1, min=min, max=max)\n min, max = self.param_space('sref')\n params.add('sref', p0.sref, min=min, max=max)\n\n if model == 'Eller':\n min, max = self.param_space('kmax')\n params.add('kmaxS1', p0.kmaxS1, min=min, max=max)\n\n if (model == 'ProfitMax') or (model == 'ProfitMax2'):\n min, max = self.param_space('kmax')\n params.add('kmax', p0.kmax, min=min, max=max)\n\n # the following models all require the Sperry kmax as an input!\n if model == 'Tuzet':\n min, max = self.param_space('g1')\n params.add('g1T', p0.g1T, min=min, max=max)\n\n if 'Tleaf' in X.columns: # vary g1 and kmax\n min, max = self.param_space('kmax')\n params.add('kmaxT', p0.kmax, min=min, max=max)\n\n else: # vary g1 and Pref, sref fixed\n min, max = self.param_space('PrefT', P50=p0.P50, P88=p0.P88)\n\n if any(X['Ps_pd'] > p0.PrefT):\n params.add('PrefT', p0.PrefT, min=min, max=max)\n\n else:\n params.add('PrefT', -p0.P88, min=min, max=max)\n\n if model == 'WUE-LWP':\n min, max = self.param_space('Lambda')\n params.add('Lambda', p0.Lambda, min=min, max=max)\n\n if model == 'CGain':\n min, max = self.param_space('Kappa')\n params.add('Kappa', p0.Kappa, min=min, max=max)\n\n if model == 'CMax':\n min, max = self.param_space('Alpha')\n params.add('Alpha', p0.Alpha, min=min, max=max)\n min, max = self.param_space('Beta')\n params.add('Beta', p0.Beta, min=min, max=max)\n\n if model == 'SOX-OPT':\n min, max = self.param_space('kmax')\n params.add('kmaxS2', p0.kmaxS2, min=min, max=max)\n\n if model == 'LeastCost':\n min, max = self.param_space('kmax')\n params.add('kmaxLC', p0.kmaxLC, min=min, max=max)\n min, max = self.param_space('Eta')\n params.add('Eta', p0.Eta, min=min, max=max)\n\n if model == 'CAP':\n min, max = self.param_space('krl')\n params.add('krlC', p0.krlC, min=min, max=max)\n min, max = self.param_space('Pcrit', P50=p0.P50, P88=p0.P88)\n\n if any(X['Ps_pd'] > p0.PcritC):\n params.add('PcritC', p0.PcritC, min=min, max=max)\n\n else:\n params.add('PcritC', -p0.P88, min=min, max=max)\n\n if model == 'MES':\n min, max = self.param_space('krl')\n params.add('krlM', p0.krlM, min=min, max=max)\n min, max = self.param_space('Pcrit', P50=p0.P50, P88=p0.P88)\n\n if any(X['Ps_pd'] > p0.PcritM):\n params.add('PcritM', p0.PcritM, min=min, max=max)\n\n else:\n params.add('PcritM', -p0.P88, min=min, max=max)\n\n if not os.path.isdir(self.opath): # create output dir\n os.makedirs(self.opath)\n\n # run the minimizer\n if self.method == 'emcee':\n out = lmfit.minimize(fres, params, args=(model, X, Y,\n self.inf_gb,),\n method=self.method, steps=self.steps,\n nwalkers=self.nchains, burn=self.burn,\n thin=self.thin, is_weighted=False,\n progress=False, nan_policy='omit')\n\n else:\n out = lmfit.minimize(fres, params, args=(model, X, Y,\n self.inf_gb,),\n method=self.method, nan_policy='omit')\n\n for param in out.params.values():\n\n if np.isclose(param.value, param.init_value):\n params[param.name] = lmfit.Parameter(name=param.name,\n value=1.5 *\n param.init_value)\n out = lmfit.minimize(fres, params,\n args=(model, X, Y, self.inf_gb,),\n method=self.method,\n nan_policy='omit')\n\n if not os.path.isfile(os.path.join(self.opath, '%s.txt' % (model))):\n txt = open(os.path.join(self.opath, '%s.txt' % (model)), 'w+')\n\n else: # append to existing file\n txt = open(os.path.join(self.opath, '%s.txt' % (model)), 'a+')\n\n txt.write('\\n')\n txt.write(lmfit.fit_report(out))\n\n if not success:\n txt.write('\\n## Warning: had to fix first parameter value')\n\n txt.write('\\n')\n txt.close() # close text file\n\n return out.params.valuesdict()",
"def evaluate(rule_id):\n try:\n rule = Rule.objects.get(id=rule_id)\n except Rule.DoesNotExist:\n log.warning('Cannot evaluate rule %s, not found', rule_id)\n return\n rule.ctl.evaluate(update_state=True, trigger_actions=True)",
"def _doRun(self, model: Model):\n raise Exception(\"Not implemented\")",
"def evaluateModel(model, val_data, abs_idx2word, device, batch_size):\n #modify abs_idx2word by removing pad tokens so as to correctly calculate Reouge scores\n abs_idx2word[0] = ''\n\n #data setup\n val_data.move_to(torch.device('cpu')) #keep data on cpu\n val_dataloader = data.DataLoader(val_data, batch_size=batch_size, shuffle=True, num_workers=0)\n #model instantiation\n model = model.to(device=device)\n #evaluation\n logger.debug(f'\\tModel eval on validation data...')\n r1, r2, rl = evaluate.evaluate_model(model, val_dataloader, abs_idx2word, device, print_example=True)\n logger.debug(f'\\nRouge-1 is {r1:.4f}, Rouge-2 is {r2:.4f}, and Rouge-l is {rl:.4f}')",
"def model_evaluate(model,x_train,n_y_array,x_val, vald_array):\n\n scores = model.evaluate(x_train, n_y_array, verbose=1)\n\n scores2 = model.evaluate(x_val, vald_array, verbose=1)\n\n\n print(\"for traininf set\")\n\n print(\"%s: %.2f%%\" % (model.metrics_names[1], scores[1]*100))\n\n print(\"%s: %.2f%%\" % (model.metrics_names[0], scores[0]))\n\n\n\n print(\"for validation set : \") \n\n print(\"%s: %.2f%%\" % (model.metrics_names[1], scores2[1]*100))\n\n print(\"%s: %.2f%%\" % (model.metrics_names[0], scores2[0]))",
"def evaluate(self) -> int:",
"def solve(self, model, sentence):\r\n if model == \"Simple\":\r\n return self.simple(sentence)\r\n elif model == \"Complex\":\r\n return self.complex(sentence)\r\n elif model == \"HMM\":\r\n return self.hmm(sentence)\r\n else:\r\n print(\"Unknown algorithm!\")"
] | [
"0.59154487",
"0.57015085",
"0.5590141",
"0.5463902",
"0.54456145",
"0.54454935",
"0.540437",
"0.53600127",
"0.52872545",
"0.52231187",
"0.5206733",
"0.5189098",
"0.51769215",
"0.5170207",
"0.5170207",
"0.5166084",
"0.5152556",
"0.5131439",
"0.50984055",
"0.50914276",
"0.5076474",
"0.5075499",
"0.5070281",
"0.50580925",
"0.50521165",
"0.50393516",
"0.5013379",
"0.5009861",
"0.50020134",
"0.49895704"
] | 0.59381014 | 0 |
Visualization using user profile images as the points. | def icons(users, distance):
# It would be pretty cool to put user thumbails where points are.
# but i'm still not sure how to do this yet.
images = []
try:
print 'getting images..'
for p in users:
print p
f = p.image
img = imread('image.tmp')
images.append(img)
except Exception as e:
print 'got an error...'
import traceback
etype, evalue, tb = sys.exc_info()
print yellow % '\n'.join(traceback.format_exception(etype, evalue, tb))
ip()
(W, H, _) = shape(img) # thumbnails should all be the same size
count = len(images)
pl.figure()
P2, _ = mds(distance, 2)
X,Y = P2[:,0], P2[:,1]
## XXX: not a great transformation b/c we might stretch more in one dimension
def N(x):
"force x to fit in interval [0,1]"
x = (x - x.min())
x = x / x.max()
assert all(x >= 0) and all(x <= 1)
return x
X = N(X)*475
Y = N(Y)*425
figimages = [pl.figimage(img, xo=x, yo=y) for img, x, y in zip(images, X, Y)] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self, user, enabled=True):\n super().__init__()\n self.setObjectName(\"user-profile\")\n self.enabled = enabled\n self.setProperty(\"follow-mouse\", enabled)\n\n image, label = _get_visuals(user)\n\n grid = QGridLayout(self)\n i = QLabel()\n i.setPixmap(image)\n i.setAlignment(Qt.AlignCenter)\n\n text = label\n text.setAlignment(Qt.AlignCenter)\n\n grid.addWidget(i, 0, 0)\n grid.addWidget(text, 1, 0)",
"def visualize(self, paths, instance, during_analysis):\r\n xvalues = np.arange(self.data.shape[0])\r\n\r\n model_data = self.model_data_from_instance(instance=instance)\r\n\r\n residual_map = self.data - model_data\r\n chi_squared_map = (residual_map / self.noise_map) ** 2.0\r\n\r\n \"\"\"The visualizer now outputs images of the best-fit results to hard-disk (checkout `visualizer.py`).\"\"\"\r\n plot_profile_1d(\r\n xvalues=xvalues,\r\n profile_1d=self.data,\r\n title=\"Data\",\r\n ylabel=\"Data Values\",\r\n color=\"k\",\r\n output_path=paths.image_path,\r\n output_filename=\"data\",\r\n )\r\n\r\n plot_profile_1d(\r\n xvalues=xvalues,\r\n profile_1d=model_data,\r\n title=\"Model Data\",\r\n ylabel=\"Model Data Values\",\r\n color=\"k\",\r\n output_path=paths.image_path,\r\n output_filename=\"model_data\",\r\n )\r\n\r\n plot_profile_1d(\r\n xvalues=xvalues,\r\n profile_1d=residual_map,\r\n title=\"Residual Map\",\r\n ylabel=\"Residuals\",\r\n color=\"k\",\r\n output_path=paths.image_path,\r\n output_filename=\"residual_map\",\r\n )\r\n\r\n plot_profile_1d(\r\n xvalues=xvalues,\r\n profile_1d=chi_squared_map,\r\n title=\"Chi-Squared Map\",\r\n ylabel=\"Chi-Squareds\",\r\n color=\"k\",\r\n output_path=paths.image_path,\r\n output_filename=\"chi_squared_map\",\r\n )",
"def visualize(self, U, **kwargs):\n raise NotImplementedError",
"def plot_potential(self):\n imshow(self.U, extent=(self.x[0], self.x[-1], self.y[0], self.y[-1]), aspect='auto', interpolation='None')\n xlabel('x')\n ylabel('y')",
"def choose_profile(image_data):\n print(\"The data is a profile.\")\n\n if len(image_data.shape) == 1:\n \n plt.plot(image_data)\n plt.show(block=False)\n\n return image_data,np.array(range(image_data.shape[0]))\n \n elif len(image_data.shape) == 2:\n if image_data.shape[0] == 1:\n plt.plot(image_data[0,:])\n plt.show(block=False)\n #plt.show()\n\n return image_data[0,:],np.array(range(image_data.shape[1]))\n\n\n elif image_data.shape[1] == 1:\n plt.plot(image_data[1,:])\n plt.show(block=False)\n #plt.show()\n return image_data[0,:],np.array(range(image_data.shape[1]))\n\n\n\n else:\n \"\"\n\n return image_data[1,:],image_data[0,:]\n\n else:\n # show the user which profiles are present\n for i in range(image_data.shape[0]):\n plt.plot(image_data[i,:], label=\"profile {}\".format(i))\n plt.legend()\n plt.show(block=False)\n\n # have the user select the profile\n profile_choice = validate_profile_choice(image_data.shape)\n plt.close()\n \n image_data = image_data[profile_choice]\n \n # show chosen profile\n plt.plot(image_data)\n plt.show(block=False)\n \n return image_data",
"def userProfile(userid):\n images = get_uploaded_images()\n record = UserProfile.query.filter_by(id=userid).first()\n return render_template('userProfile.html', images=images, record =record)",
"def profiles():\n images = get_uploaded_images()\n records = db.session.query(UserProfile).all()\n return render_template('profiles.html', images=images, records =records)",
"def plot_img():\n plt.subplot(121)\n plt.imshow(data.data.numpy()[0,].squeeze())\n plt.subplot(122)\n plt.imshow(dec_mean.view(-1,28,28).data.numpy()[0,].squeeze())\n\n plt.show()\n plt.pause(1e-6)\n plt.gcf().clear()\n sample = model.sample_z(data) \n plt.imshow(sample)",
"def plot_profiles(self, fig=0, title=''):\n plot_input.plot_profiles(self, fig, title)",
"def plot_networks(student, shape):\n plt.figure()\n s = np.arange(np.prod(shape))\n plt.figure()\n value = student.train_model.value(s).reshape(shape)\n plt.imshow(value)\n\n pi = student.train_model.proba_step(s).T.reshape((-1,) + shape)\n x, y = np.unravel_index(s, shape)\n\n for a in range(pi.shape[0]):\n if a == UP:\n u = np.zeros_like(s)\n v = pi[a].T.ravel()\n if a == DOWN:\n u = np.zeros_like(s)\n v = -pi[a].T.ravel()\n if a == RIGHT:\n v = np.zeros_like(s)\n u = pi[a].T.ravel()\n if a == LEFT:\n v = np.zeros_like(s)\n u = -pi[a].T.ravel()\n plt.quiver(x, y, u, v)",
"def plot_image_sequence(self):\r\n\r\n imv = pg.ImageView()\r\n\r\n imv.show()\r\n\r\n imv.setImage(self.imageData)\r\n\r\n self.layout.addWidget(imv, 0, 0)\r\n\r\n\r\n\r\n avgImage = np.mean(self.imageData, axis=0)\r\n\r\n ima = pg.ImageView()\r\n\r\n ima.setImage(avgImage)\r\n\r\n self.layout.addWidget(ima, 1, 0)",
"def show_user_profile(user_id):\n # check if the logged in user has permission to view the user profile page\n if int(user_id) != session.get('logged_in'):\n return redirect(\"/search\")\n\n print user_id\n user = User.query.filter(User.id == user_id).one()\n email = user.email\n pic = user.pic\n username = user.username\n\n ensembles = user.ensembles\n\n points_per_ensemble ={}\n points = 0\n for ea in user.ensemble_associations:\n points += ea.points\n points_per_ensemble[ea.ensemble] = ea.points\n\n # if points:\n # flash(\"You got a point!\")\n\n movie_ensemble = {}\n for ensemble in ensembles:\n ensemble_points_pair = (points_per_ensemble.get(ensemble, 0), ensemble)\n if ensemble.movie not in movie_ensemble:\n movie_ensemble[ensemble.movie] = [ensemble_points_pair]\n else:\n movie_ensemble[ensemble.movie].append(ensemble_points_pair)\n\n for pair_lst in movie_ensemble.values():\n pair_lst.sort(reverse=True)\n\n # return render_template('../front_end/templates/user_profile.html',\n # pic=pic,\n # email=email,\n # username=username,\n # ensembles=ensembles,\n # movie_ensemble=movie_ensemble,\n # points=points,\n # )\n\n return jsonify(dict(pic=pic,\n email=email,\n username=username,\n ensembles=ensembles,\n movie_ensemble=movie_ensemble,\n points=points,))",
"def stm_profile_plot(flat_file, points, scan_dir=0, cmap=None, vmin=None, vmax=None, xy_ticks=4, z_ticks=4):\n nm = 10 ** -9 # Define the nanometer to meter conversion.\n\n fig, ax = plt.subplots() # Create an instance of a pyplot figure and axis.\n\n # Set the minimum of the scan data to zero.\n figure_data = (flat_file[scan_dir].data - np.amin(flat_file[scan_dir].data)) / nm\n\n if cmap is None: # If no color scheme is given use hot as default.\n cmap = 'hot'\n\n if vmin is None: # If no z-axis minimum is given use minimum of the image data.\n vmin = np.amin(figure_data)\n if vmax is None: # If no z-axis maxmimum is given use 125% of the maximum in the image data.\n vmax = 1.25 * np.amax(figure_data)\n\n # Add image plot to the axis and define it so that the color map can be generated.\n cax = ax.imshow(figure_data, origin='lower', cmap=cmap, vmin=vmin, vmax=vmax)\n\n # Convert nanometer values into pixel numbers.\n for point in range(len(points)):\n points[point][0] = nm2pnt(points[point][0], flat_file, axis='x')\n points[point][1] = nm2pnt(points[point][1], flat_file, axis='y')\n\n # Plot the line profile points on the axis.\n ax.plot(points[:, 0], points[:, 1], 'bo-')\n\n xy_units = flat_file[scan_dir].info['unitxy'] # Get xy units.\n\n x_res = flat_file[scan_dir].info['xres'] # Get number of x-axis pixels.\n y_res = flat_file[scan_dir].info['yres'] # Get number of y-axis pixels.\n\n x_max = flat_file[scan_dir].info['xreal'] # Get x-axis image size.\n y_max = flat_file[scan_dir].info['yreal'] # get y-axis image size.\n\n # Set the x-axis ticks from number given.\n ax.set_xticks([x for x in np.arange(0, x_res + 1, x_res / xy_ticks)])\n # Set the x-axis tick labels from image size.\n ax.set_xticklabels([str(np.round(x, 1)) for x in np.arange(0, x_max + 1, x_max / xy_ticks)])\n\n # Set the y-axis ticks from number given\n ax.set_yticks([y for y in np.arange(0, y_res + 1, y_res / xy_ticks)])\n # Set the y-axis tick labels from image size.\n ax.set_yticklabels([str(np.round(y, 1)) for y in np.arange(0, y_max + 1, y_max / xy_ticks)])\n\n # Set the x- and y-axis labels.\n ax.set_xlabel(xy_units, size=16, weight='bold')\n ax.set_ylabel(xy_units, size=16, weight='bold')\n\n # Define the limits of the plot.\n ax.set_xlim([0, x_res])\n ax.set_ylim([0, y_res])\n\n # St the plot title with the image setpoint parameters.\n ax.set_title('Set-Points: {voltage} V, {current} pA'.format(voltage=flat_file[scan_dir].info['vgap'],\n current=np.round(\n flat_file[scan_dir].info['current']*10**12)))\n\n # Define list containing the z-axis ticks from number given.\n cbar_ticks = [z for z in np.arange(vmin, vmax * 1.01, vmax / z_ticks)]\n # Define the z-axis tick labels.\n cbar_ticklabels = [str(np.round(z, 1)) for z in np.arange(vmin, vmax + 1, vmax / z_ticks)]\n # Create color bar.\n cbar = fig.colorbar(cax, ticks=cbar_ticks)\n # Set the color bar tick labels.\n cbar.ax.set_yticklabels(cbar_ticklabels, size=16)\n # Set color bar label.\n cbar.set_label('Height [' + xy_units + ']', size=18, weight='bold')\n\n plt.show()",
"def viz1(img, interest_points, color='r'):\n\n\tx = [a[1] for a in interest_points] #blob detection x axis\n\ty = [a[0] for a in interest_points] #blob detection y axis\n\ts = [a[2] for a in interest_points] #blob detected at sigma \n \n\tplt.imshow(img, cmap='gray') #adding the input image to plot \n\tfor x, y, s in zip(x, y, s):\n\t\tplt.scatter(x, y, alpha=1, facecolors='none', edgecolors='r', s=s**2) #plotting the input interest points\n\tplt.axis('off')\n\t#plt.show() #showing the image // can be changed to saving the image locally \n\n\treturn",
"def display(self):\n nrow = 2\n ncol = len(self.views) + 1\n rows = [(self.views[0].original, len(self.views)),\n (self.views[0].image, len(self.views) + 1)]\n fig, axes = plt.subplots(nrows=nrow, ncols=ncol,\n figsize=self._figsize(rows),\n squeeze=True)\n originals = [(v.position.id, v.original) for v in self.views] + [\n ('combined', np.median(np.stack([v.original for v in self.views]), axis=0))]\n warped = [(v.position.id, v.image) for v in self.views] + [\n ('combined', self.image)]\n for ax, (title, img) in zip(axes.ravel(), originals + warped):\n ax.imshow(img)\n ax.axis('off')\n ax.xaxis.set_visible(False)\n ax.yaxis.set_visible(False)\n ax.set(title=title)\n fig.tight_layout()\n fig.canvas.draw()\n img_array = np.array(fig.canvas.renderer._renderer)\n plt.close('all')\n return img_array",
"def displayFaceData(X, rows, cols):\n\twidth, height = 32, 32 # This is the shape of original photo (32*32)\n\tpictures_combined = np.zeros((height*rows, width*cols))\n\t\n\trow, col = 0, 0\n\tfor a_picture_index in xrange(rows*cols):\n\t\tif col == cols:\n\t\t\trow += 1\n\t\t\tcol = 0\n\t\ta_picture = ReshapeIntoImage(X[a_picture_index],width)\n\t\tpictures_combined[row*height:(row*height+a_picture.shape[0]), col*width:(col*width+a_picture.shape[1])] = a_picture\n\t\tcol += 1\n\n\tfig = plt.figure(figsize=(10,10))\n\timg = scipy.misc.toimage( pictures_combined )\n\tplt.imshow(img,cmap = cm.Greys_r)\n\tplt.show(block=False)",
"def plot_profile(outdir, xval='x', xscale=1, yscale=1, comp2los=False, adjustRadial=False,\n fig=True):\n #Load data\n path = os.path.join(outdir,'points.h5')\n x,y,z,ux,uy,uz = pu.extract_points(path)\n\n Y = uz / yscale\n if xval == 'x':\n X = x / xscale\n Y1 = ux / yscale\n elif xval == 'r':\n X = np.hypot(x,y) / xscale\n ur = np.hypot(ux,uy)\n Y1 = ur / yscale\n if adjustRadial: #fix sign from hypot square root\n ur = pu.radial2negative(Y1)\n\n if fig:\n plt.figure()\n # otherwise profile added to active plot\n\n #plt.plot(X,uy/yscale,'r.-',label='Uy') #should be zero along EW axis\n de = 90e3 / xscale #eastern data extent\n if comp2los != False:\n data_extents = (X<=de)\n if comp2los == 'west': #switch sign of radial profile\n #ux = -ux #move to comp2los function\n X = -X\n Y1 = -Y1\n de = -de\n data_extents = (X>=de)\n\n los = pu.comp2los(x,ux,uy,uz,track=comp2los)\n plt.plot(X, los/yscale, 'k-', lw=2, label='Ulos_' + comp2los)\n plt.fill_between(X,los/yscale, where=data_extents, color='gray',alpha=0.5)\n\n plt.plot(X, Y, 'b-', lw=2, label='Uz')\n plt.plot(X, Y1, 'b--',lw=2, mfc='None',label='U{0}'.format(xval))\n\n # Annotate\n plt.title(outdir)\n plt.xlabel('Distance [{}]'.format(get_unit(xscale)))\n plt.ylabel('Uz [{}]'.format(get_unit(yscale)))\n plt.axhline(color='k')\n plt.axvline(de,color='k', linestyle='dashed', label='EW data extent') #EW extent of InSAR coverage\n plt.legend(loc='best')\n plt.grid(True)\n plt.show()",
"def profile(request):\n draws = []\n try:\n draws = MONGO.get_user_draws(request.user.pk)\n except Exception as e:\n LOG.error(\"There was an issue when retrieving user draws. {0}\".format(e))\n\n context = {'draws': draws}\n return render(request, 'profile.html', context)",
"def plot_directory_profiles(path, outname=None, show=True, xscale=1, yscale=1,\n xval='x', adjustRadial=True):\n outdirs = np.sort(os.listdir(path))\n plt.figure()\n\n #labels=['homogeneous','1D layering', '3D tomography'] #xscale=1e-3, yscale=1e2\n for i,outdir in enumerate(outdirs):\n pointsFile = os.path.join(path, outdir, 'points.h5')\n #print(pointsFile)\n #x_fem, ur_fem, uz_fem = pu.extract_points(pointsFile, output='cyl',adjustRadial=adjustRadial)\n #x_fem, ur_fem, uz_fem = pu.extract_points(pointsFile)\n #Load data\n\n x,y,z,ux,uy,uz = pu.extract_points(pointsFile)\n #Y = uz / yscale\n if xval == 'x':\n X = x / xscale\n Y1 = ux / yscale\n elif xval == 'r':\n X = np.hypot(x,y) / xscale\n ur_fem = np.hypot(ux,uy)\n Y1 = ur_fem / yscale\n if adjustRadial: #fix sign from hypot square root\n ur_fem = pu.radial2negative(Y1)\n\n x_fem = X #/ xscale #double scaling!\n ur_fem = Y1 #/ yscale\n uz_fem = uz / yscale\n\n #print(pointsFile)\n print(ur_fem.min(), ur_fem.max(), uz_fem.min(), uz_fem.max())\n\n l, = plt.plot(x_fem,uz_fem,'.-',lw=3,label=outdir)\n #l, = plt.plot(x_fem,uz_fem,'.-',lw=2,label=labels[i]) #for 3d heterogeneity example\n plt.plot(x_fem,ur_fem,'.--',lw=3, mfc='w',color=l.get_color()) #mfc='none' transparent\n\n # Annotate\n plt.axhline(color='k',lw=0.5)\n plt.xlabel('Distance [{}]'.format(get_unit(xscale)))\n plt.ylabel('Displacement [{}]'.format(get_unit(yscale)))\n plt.legend()\n\n #NOTE: custom annotations for 3d heterogeneity\n #plt.title('Elastic Heterogeneity Effects')\n #plt.legend([l1,l2,l3],['homogeneous','1D layering', '3D tomography'])\n\n if outname: plt.savefig(outname)\n if show: plt.show()",
"def mri_point_plot(self, vcol=1):\n img = self.voxels\n points = self.point_position \n ax = []\n fig = plt.figure(figsize=(9, 8))\n # TODO make this setable in the function call\n columns = 3\n rows = 2\n\n for i in range(points.shape[0]):\n im_slice = int(np.round(points[i, vcol]))\n if vcol == 0:\n im = img[im_slice, :, :]\n elif vcol == 1:\n im = img[:, im_slice, :]\n else:\n im = img[:, :, im_slice]\n ax.append( fig.add_subplot(rows, columns, i+1))\n ax[-1].set_title(\"Image depth: \"+str(im_slice)) # set title\n plt.imshow(im)\n plot_cols = np.array([0, 1, 2])\n plot_cols = plot_cols[plot_cols != vcol]\n plt.plot(points[i, min(plot_cols)], points[i, max(plot_cols)], 'ro')\n\n plt.show()",
"def user_view(cls, user, profile):\r\n pass",
"def paintTrousers(self):\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(os.path.join(PATH_EDITOR_IMG, self.avatarConfiguration[\"gender\"], self.avatarConfiguration[\"bodySize\"], self.avatarConfiguration[\"typeTrousers\"]+\"_trousers\", self.avatarConfiguration[\"trousers\"] + IMG_EXTENSION))\n self.newAvatarImage(imgPath, \"trousers\")",
"def plot_visco_profiles(pointsh5, skip=slice(None,None,1), xscale=1e3, yscale=1e-2, tscale=3.1536e7, adjustRadial=False, benchmark=[], title=None):\n\tplt.figure()\n\n\tcoords,data,number,times = pu.load_h5_visco(pointsh5)\n\n\t#x = 1e3*np.loadtxt(points,usecols=[0]) # output_points2.txt\n\t#y = np.zeros_like(x)\n\tx = coords[:,0]\n\ty = np.zeros_like(x)\n\n\t# NOTE: plot elastic solution by passing dictionary as showelastic\n\t# Plot analytic elastic solution (t=0)\n\t#print(benchmark)\n\tif len(benchmark)>=1:\n\t\tur = zeros_like(x)\n\t\tuz = np.zeros_like(x)\n\t\tfor b in benchmark:\n\t\t\turi,uzi = m.calc_mogi_dp(x,y,**params)\n\t\t\tur += uri\n\t\t\tuz += uzi\n\t\tplt.plot(x*xscale,uz*yscale,'ko',label='benchmark')\n\n\t# Convert units\n\t#ur = np.hypot(data[:,:,0], data[:,:,1]) #assume progiles are along EW profile\n\tur = data[:,:,0]\n\tuz = data[:,:,2]\n\tx = x / xscale\n\tur = ur / yscale #cm\n\tuz = uz / yscale #cm\n\ttimes = times / tscale\n\t#times = times / 8.64e4 #days\n\t#times = times / 31536000 #years\n\n\t#plots = np.arange(0,times.size,skip)\n\t#print(plots.size)\n\t#way to cycle through markers if plotting many lines\n\t#marker = itertools.cycle(['o','^','s','D']) #plot(marker=marker.next() iterates list)\n\t#way to use gradually changing colors from a colormap\n\t#color = plt.cm.jet(1.0*i/plots.size)\n\tindplots = np.arange(times.size-1)\n\tprint(indplots)\n\tindplots = indplots[skip]\n\tprint(indplots)\n\tfor i in indplots:\n\t\tline, = plt.plot(x, uz[i], color=plt.cm.jet(1.0*i/indplots[-1]), label='{:.1f}'.format(times[i]))\n\t\tplt.plot(x, ur[i], ls='dashed', color=line.get_color())\n\t#print uz[i]\n\t#print uz[i-1]\n\n\tif title:\n\t\tplt.title(title)\n\telse:\n\t\tplt.title(pointsh5)\n\n\tplt.axhline(color='k',linestyle='dashed')\n\tplt.xlabel('Distance [{}]'.format(get_unit(xscale)))\n\tplt.ylabel('Displacement [{}]'.format(get_unit(yscale)))\n\tplt.show()\n\tplt.legend(title='{}'.format(get_unit(tscale)))\n\tplt.grid()",
"def user_view(cls, user, profile):\n pass",
"def display(self):\n fig, axes = plt.subplots(1, len(self.views),\n figsize=self._figsize(\n [(self.views[0].image, len(self.views))]),\n squeeze=True)\n for ax, view in zip(axes.ravel(), self.views):\n ax.imshow(view.grey)\n points = self._common_keypoints(view).reshape(-1, 2)[::-1]\n ax.plot(points[..., 0], points[..., 1], 'r+')\n ax.axis('off')\n ax.xaxis.set_visible(False)\n ax.yaxis.set_visible(False)\n ax.set(title=view.position.id)\n fig.tight_layout()\n fig.canvas.draw()\n img_array = np.array(fig.canvas.renderer._renderer)\n plt.close('all')\n return img_array",
"def testProfile2D(self):\n self.plot = StackView()\n self.plot.show()\n self.qWaitForWindowExposed(self.plot)\n\n self.plot.setStack(numpy.array([[[0, 1], [2, 3]],\n [[4, 5], [6, 7]]]))\n\n toolBar = self.plot.getProfileToolbar()\n\n manager = toolBar.getProfileManager()\n roiManager = manager.getRoiManager()\n\n roi = rois.ProfileImageStackHorizontalLineROI()\n roi.setPosition(0.5)\n roi.setProfileType(\"2D\")\n roiManager.addRoi(roi)\n roiManager.setCurrentRoi(roi)\n\n for _ in range(20):\n self.qWait(200)\n if not manager.hasPendingOperations():\n break\n\n profileWindow = roi.getProfileWindow()\n self.assertIsInstance(roi.getProfileWindow(), qt.QMainWindow)\n self.assertIsInstance(profileWindow.getCurrentPlotWidget(), Plot2D)\n\n roi.setProfileType(\"1D\")\n\n for _ in range(20):\n self.qWait(200)\n if not manager.hasPendingOperations():\n break\n\n profileWindow = roi.getProfileWindow()\n self.assertIsInstance(roi.getProfileWindow(), qt.QMainWindow)\n self.assertIsInstance(profileWindow.getCurrentPlotWidget(), Plot1D)",
"def displayImages(self):\n\n plt.figure(figsize=(8,6))\n plt.subplot(1,2,1)\n plt.imshow( self.original_image, cmap=\"gray\")\n plt.title(\"Original Image\")\n plt.subplot(1,2,2)\n plt.imshow( self.blurred_image, cmap=\"gray\")\n plt.title(\"Blurred Image\")",
"def draw_points(self, pic_path, points_data):\n # Pupil Finding here\n pupils = get_eye_locations_in_image(pic_path)\n img = cv2.imread(pic_path)\n frame_number = int(re.findall(r'\\d+', pic_path.split('/')[-1])[0])\n dets = detector(img)\n shape = None\n height, width, channels = img.shape\n\n for k, d in enumerate(dets):\n shape = predictor(img, d)\n\n if(not shape):\n return\n\n pointList = []\n c = 0\n for b in range(68):\n # sanitizing input points\n point = Point(shape.part(b).x, shape.part(b).y)\n points_data[c] = [point.x, point.y]\n c = c + 1\n # some points might be out of bound\n # so, move them to the closest boundary\n if(point.x < 0):\n point.x = 0\n elif(point.x >= width):\n point.x = width - 1\n if(point.y < 0):\n point.y = 0\n elif(point.y >= height):\n point.y = height - 1\n\n pointList.append(point)\n\n roll = findRoll(pointList)\n #print(\"roll is \" + str(roll) + ' angles')\n yaw = findYaw(pointList)\n #print(\"yaw is \" + str(yaw) + ' angles')\n pitch = findPitch(pointList)\n #print(\"pitch is \" + str(pitch) + ' angles')\n self.data[frame_number] = [roll, yaw, pitch]\n counter = 0\n for point in pointList:\n cv2.circle(img, (point.x, point.y), ImageProcessor.POINT_SIZE, ImageProcessor.POINT_COLOR, -1)\n counter = counter + 1\n\n self.draw_triangles(img, pointList)\n \n for pupil in pupils:\n cv2.circle(img, (pupil.left.x, pupil.left.y), 5, (0,0,255), -1)\n cv2.circle(img, (pupil.right.x, pupil.right.y), 5, (0,0,255), -1)\n points_data[-1] = [pupil.left.x, pupil.left.y]\n points_data[-2] = [pupil.right.x, pupil.right.y]\n #print(pupil.left.x, \", \", pupil.left.y)\n #print(pupil.right.x, \", \", pupil.right.y)\n\n cv2.imwrite(pic_path, img)",
"def get_context_data(self, **kwargs):\n user = ImagerProfile.objects.get(user__username=self.request.user.username)\n # import pdb;\n context = super(ProfileView, self).get_context_data(**kwargs)\n photos = self.request.user.photos.all()\n ph_public = len(photos.filter(published=\"Public\"))\n ph_private = len(photos.filter(published=\"Private\"))\n albums = self.request.user.albums.all()\n al_public = len(albums.filter(published=\"Public\"))\n al_private = len(albums.filter(published=\"Private\"))\n context = {'user': user, 'ph_public': ph_public, 'ph_private': ph_private,\n 'al_public': al_public, 'al_private': al_private}\n return context",
"def plot_physical_profiles(self, fig=2):\n # Select the physical parameters\n phys_parms = ['temperature', 'salinity', 'theta', 'N']\n \n # Plot these parameters\n self.plot_profiles(phys_parms, fig)"
] | [
"0.6102291",
"0.59152985",
"0.5812397",
"0.5785912",
"0.5781453",
"0.56563425",
"0.55763805",
"0.55537695",
"0.55522704",
"0.55514014",
"0.5545138",
"0.5540783",
"0.5524364",
"0.5510808",
"0.54797715",
"0.54732275",
"0.5429194",
"0.5428557",
"0.54024166",
"0.5393831",
"0.53897643",
"0.5379064",
"0.53761244",
"0.53451616",
"0.5323242",
"0.5314014",
"0.53067786",
"0.5262752",
"0.52418065",
"0.52109164"
] | 0.5977453 | 1 |
Find all the paths from start to goal recursively on a dict. | def dfs_paths_dict_recur(
graph: Mapping[Node, set[Node]],
start: Node,
goal: Node,
path: Optional[list[Node]] = None
) -> Iterable[list[Node]]:
if path is None:
path = [start]
if start == goal:
yield path
else:
for next_node in graph[start].difference(path):
next_path = path + [next_node]
yield from dfs_paths_dict_recur(graph, next_node, goal, next_path) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_path(start, child_father_dict):\n\n def _dfs(node, path, res):\n path.append(node)\n if node not in child_father_dict:\n res.append(path.copy())\n path.pop()\n return\n for ni in child_father_dict[node]:\n _dfs(ni, path, res)\n path.pop()\n\n all_path = []\n if start in child_father_dict:\n _dfs(start, [], all_path)\n return all_path",
"def find_paths(self, start_key, target_key):\n\n stack = [(start_key, [start_key])]\n while stack:\n node_key, path = stack.pop()\n node = self.nodes[node_key]\n for nxt in node.neighbors - set(path):\n if nxt == target_key:\n yield path + [nxt]\n else:\n stack.append((nxt, path + [nxt]))",
"def get_path(prevs, goal, start):\n path = OD({goal: 0})\n cur = goal\n while cur != start:\n (cost, node) = prevs.get(cur)\n if node == None or node in path:\n print(\"ERROR: No path found from %s -> %s\" % (start, goal))\n return (0, None)\n path[node] = path[cur] + cost\n cur = node\n return (path[start], path.keys()[::-1])",
"def _find_all_paths(sample, previous_path=None):\n paths = []\n for key in sample:\n current_path = []\n if previous_path:\n current_path.extend(previous_path)\n current_path.append(key)\n #If the current value ist a mapping, search in this mapping for more paths\n if isinstance(sample[key], abc.Mapping):\n paths.extend(MappingValidator._find_all_paths(sample[key],\n previous_path=current_path))\n paths.append(current_path)\n return sorted(paths, key=lambda k: len(k))",
"def find_all_paths(parents_to_children, start, end, path=[]):\r\n path = path + [start]\r\n if start == end:\r\n return [path]\r\n if start not in parents_to_children.keys():\r\n return []\r\n paths = []\r\n for node in parents_to_children[start]:\r\n if node not in path:\r\n newpaths = find_all_paths(parents_to_children, node, end, path)\r\n for newpath in newpaths:\r\n paths.append(tuple(newpath))\r\n return paths",
"def find_all_path(self, start, end, path=[]):\n path = path+[start]\n if start == end:\n return path\n paths = []\n for node in self.graph[start]:\n if node not in path:\n newpaths = self.find_path(node, end, path)\n paths.append(newpaths)\n return paths",
"def dict_path(my_dict, path=None):\n if path is None:\n path = \"\"\n for k, v in my_dict.items():\n newpath = path + (\".\" if path != \"\" else \"\") + k\n if isinstance(v, dict):\n for u in dict_path(v, newpath):\n yield u\n else:\n yield newpath, v",
"def find_path(self, start):\n path = []\n leaf = start\n seen_nodes = []\n while True:\n if self.nodes[leaf]['address'] == '':\n return path\n\n left = leaf if self.nodes[leaf][\n 'left'] else self.nodes[leaf]['sibling']\n right = leaf if not self.nodes[leaf][\n 'left'] else self.nodes[leaf]['sibling']\n next_hash = do_hash(left + right, self.algo)\n leaf = self.nodes[leaf]['parent']\n assert leaf == next_hash\n assert next_hash not in seen_nodes\n assert next_hash in self.nodes\n step = [left, right, next_hash]\n path.append(step)",
"def trace_path(parent_map: dict, start, end) -> List[str]:\n # start with end node\n path = [end]\n # find until we reach start node\n while path[-1] != start:\n # append parent of current node\n path.append(parent_map[path[-1]])\n path.reverse()\n return path",
"def find_all_paths(graph, start, end, path=[]):\n path = path + [start]\n if start == end:\n return [path]\n paths = []\n for node in graph[start]:\n newpaths = find_all_paths(graph, node, end, path)\n paths += newpaths\n return paths",
"def find_paths(start, current, distance, paths, choices):\r\n # Find all paths resulting in the minimum distance\r\n options = []\r\n min_distance = min(paths[current].values())\r\n for option, distance in paths[current].items():\r\n if distance == min_distance:\r\n\r\n # If we find the beginning, break out\r\n if option == start:\r\n if option not in choices or choices[current] < distance + min_distance:\r\n choices[current] = distance + min_distance\r\n return\r\n\r\n # Add to list of options\r\n options.append(option)\r\n\r\n # For each path, recursively find minimal paths\r\n for option in options:\r\n find_paths(start, option, min_distance, paths, choices)",
"def depthFirstSearch(problem):\n \n from game import Directions\n North = Directions.NORTH\n South = Directions.SOUTH\n East = Directions.EAST\n West = Directions.WEST \n \n pathDict = {}\n visited = set()\n #visited start\n visited.add(problem.getStartState())\n #initial successors\n successor = problem.getSuccessors(problem.getStartState())\n for initSucc in successor:\n pathDict[initSucc[0]] = [initSucc[1]]\n #loop\n while (1):\n #if fringe = null, return failure\n if (len(successor) == 0):\n print \"Fringe is empty\"\n return util.raiseNotDefined()\n #(v, path) = fringe.pop\n succLocation = successor[0][0]\n succDirection = successor[0][1]\n del successor[0]\n #if isGoal = true, return path\n if problem.isGoalState(succLocation):\n return pathDict[succLocation]\n #if visited = false\n if succLocation not in visited:\n #visited = true\n visited.add(succLocation)\n #L = expand(v,path)\n tempSuccList = problem.getSuccessors(succLocation)\n #Fringe <- L\n for succ in reversed(tempSuccList):\n successor.insert(0,succ)\n pathDict[succ[0]] = []\n pathDict[succ[0]].extend(pathDict[succLocation])\n pathDict[succ[0]].append(succ[1])",
"def paths(self, start):\n # This is probably a little slow\n tupadd = lambda p, v: (p[0] + v[0], p[1] + v[1])\n # First, we'll check adjacency moves.\n adj = [tupadd(start, v) for v in DIRECTIONS]\n yield from (p for p in adj if self.board(p) == 0)\n # Now we check repeated hops.\n # We do this by a breadth first search.\n\n #TODO: Consensus on legality of hopping back to start and \"skipping\"\n visited = set(adj)\n to_visit = [start]\n while len(to_visit):\n pt = to_visit.pop(0)\n if pt in visited:\n continue\n\n # We have to actually move a piece\n # But this stops us from considering \"start\" even if we can\n # make some hops and get back to start\n if pt is not start:\n yield pt\n \n visited.add(pt)\n # Compute the hop directions\n dirs = ((tupadd(pt, v), tupadd(pt, tupadd(v, v))) for v in DIRECTIONS)\n to_visit.extend(\n dest for over, dest in dirs\n if self.board(over) > 0\n and self.board(dest) == 0\n and dest not in visited\n and over != start\n )",
"def FindAllPaths(graph, start, end, path=[]):\n path = path + [start]\n if start == end:\n return [path]\n if start not in graph:\n return None\n paths = []\n for node in graph[start]:\n if node not in path:\n newpaths = find_all_paths(graph, node, end, path)\n for newpath in newpaths:\n paths.append(newpath)\n return paths",
"def findPath(g, start, end, path=[]):\n path = path + [start]\n if start == end:\n return path\n if not start in g:\n return None\n for node in g[start]:\n if node not in path:\n newpath = findPath(g, node, end, path)\n if newpath: return newpath\n return None",
"def path_search(start, goal):\n if start == goal:\n return [start]\n explored = set() \n queue = [ [start, ('', 0)] ]\n while queue:\n path = queue.pop(0)\n s = path[-2]\n linenum, changetimes = path[-1]\n if s == goal:\n print changetimes\n print path\n for x in queue:\n print x\n return path\n for state, action in bj_subway[s].items():\n if state not in explored:\n linechange = changetimes + 1\n explored.add(state)\n if linenum != action:\n linechange += 2\n path2 = path[:-1] + [action, state, (action, linechange)]\n queue.append(path2)\n queue.sort(key=lambda path:path[-1][-1])\n return []",
"def __search_path(self, start_node, goal_node):\n\n path = []\n queue = PriorityQueue()\n queue.put((0, start_node))\n visited = set(start_node)\n\n branch = {}\n found = False\n \n while not queue.empty():\n item = queue.get()\n current_cost = item[0]\n current_node = item[1]\n\n if current_node == goal_node: \n found = True\n break\n else:\n for next_node in self._route_graph[current_node]:\n cost = self._route_graph.edges[current_node, next_node]['weight']\n new_cost = current_cost + cost + self.__heuristic(next_node, goal_node)\n\n if next_node not in visited: \n visited.add(next_node) \n queue.put((new_cost, next_node))\n\n branch[next_node] = (new_cost, current_node)\n\n path = []\n path_cost = 0\n if found:\n # retrace steps\n path = []\n n = goal_node\n path_cost = branch[n][0]\n while branch[n][1] != start_node:\n path.append(branch[n][1])\n n = branch[n][1]\n path.append(branch[n][1])\n else:\n print(\"Path Not Found\")\n\n return path[::-1], path_cost",
"def find_nodes_from_here(start_node, key):\n node_ = start_node\n yield from find_nodes(node_, key)\n while node_.parent:\n this_key_ = node_.key\n node_ = node_.parent\n if node_.key == key: # pragma: no branch\n yield node_\n for child_ in node_.children:\n if child_.key == this_key_: # pragma: no branch\n continue\n yield from find_nodes(child_, key)",
"def bfs_paths(self, start: str, goal: str) -> List[Path]:\n queue = [(start, [start])]\n while queue:\n (node, path) = queue.pop(0)\n if node not in self.graph:\n yield []\n for _next in set(self.graph[node]) - set(path):\n if _next == goal:\n yield path + [_next]\n elif _next in self.graph:\n queue.append((_next, path + [_next]))",
"def dfs_paths(graph, start, goal, method='dfs'):\n \n # Define the search method\n stack_pop = -1\n if method == 'bfs':\n stack_pop = 0\n \n stack = [(start, [start])]\n while stack:\n (vertex, path) = stack.pop(stack_pop)\n neighbors = node_neighbors(graph, vertex)\n for next_node in set(neighbors) - set(path):\n if next_node == goal:\n yield path + [next_node]\n else:\n stack.append((next_node, path + [next_node]))",
"def __iter__(self):\n\n result = []\n\n # d - dict, p - path (keys sequence)\n def recurs_iter(d, p=None):\n p = p or []\n\n # k - key, v - value\n for k, v in iteritems(d):\n next_p = p + [k]\n if isinstance(v, dict):\n recurs_iter(v, next_p)\n else:\n result.append(tuple(next_p))\n\n recurs_iter(self.__dict__)\n\n return iter(result)",
"def find_next_step(start, end, paths):\r\n def find_paths(start, current, distance, paths, choices):\r\n \"\"\"\r\n Given the start point, and the current point, builds a dictionary indicating the first step\r\n and the minimum distance to the end using that step. Distance indicates the distance from\r\n current to end.\r\n \"\"\"\r\n # Find all paths resulting in the minimum distance\r\n options = []\r\n min_distance = min(paths[current].values())\r\n for option, distance in paths[current].items():\r\n if distance == min_distance:\r\n\r\n # If we find the beginning, break out\r\n if option == start:\r\n if option not in choices or choices[current] < distance + min_distance:\r\n choices[current] = distance + min_distance\r\n return\r\n\r\n # Add to list of options\r\n options.append(option)\r\n\r\n # For each path, recursively find minimal paths\r\n for option in options:\r\n find_paths(start, option, min_distance, paths, choices)\r\n\r\n choices = {}\r\n find_paths(start, end, 0, paths, choices)\r\n choices = sorted(choices.keys())\r\n return choices[0]",
"def reconstruct_path(goal: Vector2D, prev_node: dict) -> list:\n path = []\n prev = prev_node[goal] # remove 'goal' from path\n \n while prev != None:\n path.append(prev)\n prev = prev_node[prev]\n \n path = path[:-1] # remove 'start' from path\n path.reverse()\n return path",
"def _recursive_search(self, current_dict: dict, keys: List[str], max_depth: int, current_depth: int = 0) -> tuple:\n current_key = keys[current_depth]\n if current_depth < max_depth:\n\n if current_key not in current_dict:\n current_dict[current_key] = {}\n\n return self._recursive_search(current_dict[current_key], keys, max_depth, current_depth + 1)\n else:\n return current_key, current_dict",
"def reconstruct_path(came_from, start, goal):\n current = goal\n path = [current]\n\n # Append configuartion to board as a step until the begin situation is reached\n while current != start:\n current = came_from[current][0]\n path.append(current)\n path.append(start)\n path.reverse()\n return [path[1:]]",
"def get_final_key_paths(\n obj: Union[dict, list, tuple], cur_path: str = '',\n append_values: bool = False,\n paths: list = None, black_list: list = None,\n final_keys_only: bool = False):\n if paths is None:\n paths = []\n\n if isinstance(obj, (dict, list, tuple)):\n if isinstance(obj, dict):\n for key in obj:\n new_path = cur_path + f'[\\'{key}\\']'\n if isinstance(obj[key], dict):\n if black_list is not None and key in black_list:\n continue\n get_final_key_paths(\n obj[key], new_path, append_values, paths, black_list,\n final_keys_only)\n elif isinstance(obj[key], (list, tuple)):\n get_final_key_paths(\n obj[key], new_path, append_values, paths, black_list,\n final_keys_only)\n else:\n if final_keys_only:\n last_bracket = new_path.rfind('[\\'')\n new_path = new_path[\n last_bracket+2:new_path.rfind('\\'')]\n if append_values:\n to_append = [new_path, obj[key]]\n else:\n to_append = new_path\n paths.append(to_append)\n else:\n key_added = False\n for i in range(len(obj)):\n if isinstance(obj[i], (dict, tuple, list)):\n get_final_key_paths(\n obj[i], cur_path + f'[{i}]', append_values,\n paths, black_list, final_keys_only)\n else:\n if not key_added:\n if final_keys_only:\n last_bracket = cur_path.rfind('[\\'')\n cur_path = cur_path[\n last_bracket+2:cur_path.rfind('\\'')]\n if append_values:\n to_append = [cur_path, obj]\n else:\n to_append = cur_path\n paths.append(to_append)\n key_added = True\n\n return paths",
"def find_all_path(self, start_vertex, end_vertex, path=[]):\n\n graph = self.__graph_dict\n path = path + [start_vertex]\n if start_vertex == end_vertex:\n return [path]\n\n if start_vertex not in graph:\n return []\n\n paths = []\n for vertex in graph[start_vertex]:\n if vertex not in path:\n extended_paths = self.find_all_path(vertex, end_vertex,path)\n for p in extended_paths:\n paths.append(p)\n return paths",
"def depth_first_search(start, finish_line, next_moves):\n frontier = Stack()\n frontier.push(Move(start, None))\n searched = {start}\n full_search = []\n while not frontier.stuck:\n loc = frontier.pop()\n active = loc.current\n full_search.append(active)\n if finish_line(active):\n final_path = get_path(\"Depth First:\", loc)\n return final_path[1:], full_search[1:-1]\n for space in next_moves(active):\n if space not in searched:\n searched.add(space)\n frontier.push(Move(space, loc))\n return None, None",
"def depth_first_traversal(self, start_val, path=None):\n if not path:\n path = []\n if start_val not in self.keys():\n raise ValueError('No such starting value')\n if start_val not in path:\n path.append(start_val)\n for val in self.neighbors(start_val):\n self.depth_first_traversal(val, path)\n return path",
"def depth_first_traversal(self, start_val, path=None):\n if not path:\n path = []\n if start_val not in self.keys():\n raise ValueError('No such starting value')\n if start_val not in path:\n path.append(start_val)\n for val in self.neighbors(start_val):\n self.depth_first_traversal(val, path)\n return path"
] | [
"0.7048798",
"0.67607284",
"0.643885",
"0.6375813",
"0.63585025",
"0.62897635",
"0.62609977",
"0.6206619",
"0.6176041",
"0.6093086",
"0.6086056",
"0.60289514",
"0.6025127",
"0.6021499",
"0.6012426",
"0.6009373",
"0.596184",
"0.59196025",
"0.5900486",
"0.5898333",
"0.5861026",
"0.5860554",
"0.5856738",
"0.584956",
"0.58346456",
"0.58262736",
"0.5793318",
"0.57453877",
"0.57159865",
"0.57159865"
] | 0.7483994 | 0 |
Function to update news with schedule | def update_news(system: ChatSystem):
def update(system):
"""
update news as schedule function
:param system:
:return:
"""
session = system.db_session.create_session()
for sett in session.query(system.db_session.Settings).filter(
system.db_session.Settings.name == 'news'):
session.delete(sett)
session.commit()
system.module_news = set()
apply_news(system.module_news)
update(system)
schedule.every(5).hours.do(update, system)
# system.module_news = set()
# apply_news(system.module_news) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_news_to_model(self):\n internet = 1\n news_page_url_base = 'https://news.mydrivers.com/update/'\n url_to_get_list = []\n if self.dates_to_get[0] == 'Date entered error':\n self.show_in_browser.emit(f'Date entered error: {self.dates_to_get[1]}')\n self.show_in_browser.emit('Please check the entered date.')\n self.thread.quit()\n return\n for date_to_get in self.dates_to_get:\n if pd.to_datetime(date_to_get).date() > (datetime.date.today() + datetime.timedelta(days=1)):\n break\n url_to_get = news_page_url_base + date_to_get + '_1.htm'\n url_to_get_list.append(url_to_get)\n if url_to_get_list == []:\n self.show_in_browser.emit(f'Date entered error: {self.dates_to_get}')\n self.show_in_browser.emit('Please check the entered date.')\n self.thread.quit()\n return\n\n def get_url_content(url_to_get):\n try:\n r = requests.get(url_to_get)\n except BaseException as e:\n print('Cannot establish Internet connection with server.\\n', e)\n self.show_in_browser.emit('Cannot establish Internet connection with server.\\n')\n self.show_in_browser.emit(str(e))\n nonlocal internet\n internet = 0\n return\n status_code = r.status_code\n if status_code == 200:\n bs = BeautifulSoup(r.content, 'html.parser')\n news_pages_list = bs.select('#newsleft > div > a')\n news_summary_list = bs.select('#newsleft > li')\n else:\n news_pages_list = []\n news_summary_list = []\n return status_code, news_pages_list, news_summary_list\n\n def extract_news_pages(news_pages_list: list) -> list:\n news_page_url_list = []\n for item in news_pages_list:\n news_page_url_tail = item.get('href')\n if item.text.isnumeric() and news_page_url_tail:\n news_page_url = news_page_url_base + news_page_url_tail\n news_page_url_list.append(news_page_url)\n return news_page_url_list\n\n def extract_news_summary(news_summary_list: list):\n for item in news_summary_list:\n news_title = item.find('h3').find('a').text\n news_link = 'https:' + item.find('h3').find('a').get('href')\n news_author = item.find(class_=\"newstiao4\").text\n news_datetime_text = item.find(class_=\"news_plun hui2\") \\\n .find('li').text\n match = re.search('\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}',\n news_datetime_text)\n news_datetime = pd.to_datetime(match.group())\n # Naive Bayes classifier\n seg_list = jieba.cut(news_title)\n p_news_like_product = 1 # p_1*p_2*...*p_n\n p_news_like_1_product = 1 # (1-p_1)*(1-p_2)*...*(1-p_n)\n p_news_nolike_product = 1\n p_news_nolike_1_product = 1\n for word in seg_list:\n sql_query = f\"SELECT * FROM words WHERE word = '{word}';\"\n rows = self.conn.execute(sql_query).fetchall()\n if rows == []:\n p_word_like = 0.4\n p_word_nolike = 0.4\n else:\n p_word_like = rows[0][1] / rows[0][3]\n p_word_nolike = rows[0][2] / rows[0][3]\n p_news_like_product = p_news_like_product * p_word_like\n p_news_like_1_product = p_news_like_1_product * (1 - p_word_like)\n p_news_nolike_product = p_news_nolike_product * p_word_nolike\n p_news_nolike_1_product = p_news_nolike_1_product * (1 - p_word_nolike)\n p_news_like = 100 * p_news_like_product * self.p_like / (\n p_news_like_product * self.p_like + p_news_like_1_product * (1 - self.p_like))\n p_news_nolike = 100 * p_news_nolike_product * self.p_nolike / (\n p_news_nolike_product * self.p_nolike + p_news_nolike_1_product * (1 - self.p_nolike))\n p_news_like = round(p_news_like, 2)\n p_news_nolike = round(p_news_nolike, 2)\n\n self.news_df = self.news_df.append(\n pd.DataFrame([[news_datetime,\n news_title,\n news_link,\n news_author,\n p_news_like,\n p_news_nolike]],\n columns=self.news_df.columns),\n ignore_index=True)\n\n self.news_df = pd.DataFrame({'datetime': [], 'title': [], 'link': [],\n 'author': [], 'like': [], 'no_like': []})\n\n for url_to_get in url_to_get_list:\n try:\n status_code, news_pages_list, news_summary_list = \\\n get_url_content(url_to_get)\n except:\n break\n if status_code == 200:\n print(url_to_get, ' loaded.')\n self.show_in_browser.emit(url_to_get + ' loaded.')\n extract_news_summary(news_summary_list)\n else:\n print(url_to_get, 'is not available. Status Code:', status_code)\n self.show_in_browser.emit(url_to_get + ' is not available. Status Code: ' + str(status_code))\n continue\n\n for news_page_url in extract_news_pages(news_pages_list):\n status_code, news_pages_list, news_summary_list = \\\n get_url_content(news_page_url)\n if status_code == 200:\n extract_news_summary(news_summary_list)\n print(news_page_url, ' loaded.')\n self.show_in_browser.emit(news_page_url + ' loaded.')\n else:\n print(url_to_get, 'is not available. Status Code:',\n status_code)\n self.show_in_browser.emit(url_to_get + ' is not available. Status Code: ' + str(status_code))\n continue\n\n if internet == 0:\n self.thread.quit()\n return\n\n if (pd.to_datetime(self.dates_to_get[0]).date() == self.news_latest_datetime.date()):\n self.news_df.drop(self.news_df[self.news_df.datetime < self.news_latest_datetime].index,\n inplace=True)\n try:\n for link in self.news_latest_link:\n self.news_df.drop(self.news_df[self.news_df.link == link].index, inplace=True)\n except BaseException as e:\n print(e)\n else:\n sql_query = \"\"\"SELECT datetime FROM news_unread\"\"\"\n tmp_news_datetime = pd.read_sql_query(sql_query, self.conn,\n parse_dates='datetime')\n try: # Raise error when news_read is empty.\n sql_query = \"\"\"SELECT datetime FROM news_read\"\"\"\n tmp2_news_datetime = pd.read_sql_query(sql_query, self.conn,\n parse_dates='datetime')\n tmp_news_datetime = tmp_news_datetime.append(tmp2_news_datetime, ignore_index=True)\n del tmp2_news_datetime\n except:\n pass\n tmp_news_datetime.datetime = tmp_news_datetime.datetime.apply(lambda x: x.date())\n for date in self.dates_to_get:\n date = pd.to_datetime(date).date()\n if (tmp_news_datetime.datetime == date).any():\n sql_query = \"\"\"SELECT link FROM news_unread\"\"\"\n links = pd.read_sql_query(sql_query, self.conn)\n try:\n sql_query = \"\"\"SELECT link FROM news_read\"\"\"\n links2 = pd.read_sql_query(sql_query, self.conn)\n links = links.append(links2, ignore_index=True)\n except:\n pass\n links = links.link.tolist()\n self.news_df = self.news_df.drop(self.news_df[self.news_df.link.isin(links)].index)\n break\n\n self.news_df.sort_values('datetime', inplace=True)\n self.news_df.to_sql('news_unread', self.conn, index=False,\n if_exists='append')\n sql_query = \"\"\"\n SELECT * FROM news_unread ORDER BY datetime ASC\n \"\"\"\n self._data = pd.read_sql_query(sql_query, self.conn,\n parse_dates='datetime')\n self._data['datetime'] = self._data['datetime'].dt.strftime('%m-%d %H:%M')\n self.update_finished.emit()",
"def call_schedule(self, bot, update):\n bot.send_message(update.message.chat_id, '_1 пара_ 08:30 - 10:05\\n'\n '_2 пара_ 10:25 - 12:00\\n'\n '_3 пара_ 12:20 - 13:55\\n'\n '_4 пара_ 14:15 - 15:50\\n'\n '_5 пара_ 16:10 - 17:45',\n parse_mode='Markdown')",
"def update_news_intime(minutes):\n while True:\n db_update.update()\n time.sleep(60 * minutes)",
"def updateNewsFeed(self):\n try:\n news, events, categories, eventCategories = self.requestData()\n for language in NEWSFEED_LANGUAGES:\n self.newsFeedModel.update(news[language], events[language], categories[language],\n eventCategories[language], language=language)\n except Exception as e:\n print(\"there was a problem while updating the news feed\")\n raise e",
"def do_upt(self, arg):\n self.do_timesheet('update today')",
"def update_news_timer():\n try:\n global last_time_user_got_news\n last_time_user_got_news = dt.datetime.timestamp(dt.datetime.now())\n return '', 204\n except Exception as e:\n return e, 500",
"def update(system):\n session = system.db_session.create_session()\n for sett in session.query(system.db_session.Settings).filter(\n system.db_session.Settings.name == 'news'):\n session.delete(sett)\n session.commit()\n system.module_news = set()\n apply_news(system.module_news)",
"def news():\n mesosite = iemdb.connect('mesosite', bypass=True)\n mcursor = mesosite.cursor(cursor_factory=psycopg2.extras.DictCursor)\n # Last dailyb delivery\n lastts = mx.DateTime.now() + mx.DateTime.RelativeDateTime(hour=11, days=-1)\n mcursor.execute(\"\"\"\n SELECT *, to_char(entered, 'DD Mon HH:MI AM') as nicedate \n from news WHERE entered > '%s' \n ORDER by entered DESC\"\"\" % (\n lastts.strftime(\"%Y-%m-%d %H:%M\"),) )\n\n textfmt = \"\"\"\n +----------------------------------------------\n | Title : %(title)s\n | Date : %(nicedate)s\n | Author: %(author)s\n | URL : %(url)s\n +----------------------------------------------\n\n%(body)s\n\n\"\"\"\n htmlfmt = \"\"\"\n<hr />\n<br /><strong>Title:</strong> <a href=\"http://mesonet.agron.iastate.edu/onsite/news.phtml?id=%(id)s\">%(title)s</a>\n<br /><strong>Date:</strong> %(nicedate)s\n<br /><strong>Author:</strong> %(author)s\n<br /><a href=\"%(url)s\">link</a>\n\n<p>%(body)s\n\n\"\"\"\n txt = \"> News\\n\"\n html = \"<h3>News</h3>\"\n\n for row in mcursor:\n txt += textfmt % row\n html += htmlfmt % row\n if mcursor.rowcount == 0:\n txt += \"\\n No news is good news\\n\\n\"\n html += \"<strong>No news is good news</strong>\"\n\n return txt, html",
"def _update_schedule(self, interval):\n while True:\n if Schedule().update_current_week():\n self._view_schedule()\n print 'Yes'\n time.sleep(interval)",
"def get_news() -> None:\r\n api_key = get_api_key()\r\n country = 'gb'\r\n url = 'https://newsapi.org/v2/top-headlines?country={}&apiKey={}' \\\r\n .format(country, api_key)\r\n new_news = requests.get(url).json()\r\n with open('news.json', 'r') as news_file:\r\n try:\r\n old_news = json.load(news_file)\r\n except Exception as error:\r\n log_warning(error)\r\n # Checks if the news is new or the same as the news already stored\r\n # in news.json.\r\n for i in range(5):\r\n if old_news['articles'][i] != new_news['articles'][i]:\r\n news_notification = ({'timestamp': \\\r\n time.strftime('%H:%M:%S'),\r\n 'type': 'News',\r\n 'title': new_news \\\r\n ['articles'][i]['title'],\r\n 'description': ''})\r\n news_log = ({'timestamp': time.strftime('%H:%M:%S'),\r\n 'type': 'news',\r\n 'description': 'New news articles' \\\r\n + new_news['articles'][i]['title'],\r\n 'error': ''})\r\n new_notification(news_notification)\r\n log_info(news_log)\r\n # RuntimeError caused when text to speech is already\r\n # currently playing something else.\r\n try:\r\n tts('New news story.' \\\r\n + new_news['articles'][i]['title'])\r\n except RuntimeError:\r\n log_error(RuntimeError)\r\n\r\n with open('news.json', 'w') as news_file:\r\n json.dump(new_news, news_file, indent=2)\r\n # Start the timer to run this function every 60 seconds.\r\n Timer(60, get_news).start()",
"def test_update_schedule(self):\n body = Schedule()\n response = self.client.open('/v1/schedule',\n method='PUT',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))",
"def save(self, *args, **kwargs):\n super(News, self).save(*args, **kwargs)\n pigeonpost_queue.send(sender=self, defer_for=6*60*60)",
"def update_cinema_schedule(self):\n logging.warning(\"initialise cinema schedule update process ...\")\n\n logging.warning(\"deleting outdated schedules ...\")\n\n self.loader.delete_outdated_schedules()\n\n logging.warning(\"deleting outdated schedules complete!\")\n\n cinema_schedule = CinemaSchedule()\n cinema_schedule_data = {} # declare data object\n\n logging.warning(\"retrieving and merging cathay schedules ...\")\n cathay_schedule = cinema_schedule.get_cathay_schedule()\n self._merge_schedules(cinema_schedule_data, cathay_schedule)\n\n logging.warning(\"retrieving and merging golden village schedules ...\")\n gv_schedule = cinema_schedule.get_gv_schedule()\n self._merge_schedules(cinema_schedule_data, gv_schedule)\n\n logging.warning(\"retrieving and merging shaw brother schedules ...\")\n sb_schedule = cinema_schedule.get_sb_schedule()\n self._merge_schedules(cinema_schedule_data, sb_schedule)\n self._match_movie_titles(cinema_schedule_data) # insert imdb id\n self.loader.load_cinema_schedule(cinema_schedule_data) # load data\n\n logging.warning(\"cinema schedule update process complete.\")",
"def svn_info_t_schedule_set(svn_info_t_self, svn_wc_schedule_t_schedule): # real signature unknown; restored from __doc__\n pass",
"def news():\n mesosite = get_dbconn(\"mesosite\")\n mcursor = mesosite.cursor(cursor_factory=psycopg2.extras.DictCursor)\n # Last dailyb delivery\n lastts = datetime.datetime.now() + datetime.timedelta(days=-1)\n mcursor.execute(\n \"SELECT *, to_char(entered, 'DD Mon HH:MI AM') as nicedate \"\n \"from news WHERE entered > %s ORDER by entered DESC\",\n (lastts,),\n )\n\n textfmt = \"\"\"\n +----------------------------------------------\n | Title : %(title)s\n | Date : %(nicedate)s\n | Author: %(author)s\n | URL : %(url)s\n +----------------------------------------------\n\n%(body)s\n\n\"\"\"\n htmlfmt = (\n \"<hr />\\n\"\n \"<br /><strong>Title:</strong>\\n\"\n '<a href=\"https://mesonet.agron.iastate.edu/'\n 'onsite/news.phtml?id=%(id)s\">%(title)s</a>\\n'\n \"<br /><strong>Date:</strong> %(nicedate)s\\n\"\n \"<br /><strong>Author:</strong> %(author)s\\n\"\n '<br /><a href=\"%(url)s\">link</a>\\n\\n'\n \"<p>%(body)s\\n\"\n )\n txt = \"> News\\n\"\n html = \"<h3>News</h3>\"\n\n for row in mcursor:\n txt += textfmt % row\n html += htmlfmt % row\n if mcursor.rowcount == 0:\n txt += \"\\n No news is good news\\n\\n\"\n html += \"<strong>No news is good news</strong>\"\n\n return txt, html",
"def put(self):\n try:\n save_schedules_to_file(request.json['payload'])\n return 'Celery Beat schedules updated.'\n except Exception:\n logging.exception('Failed to update Celery Beat schedules!')\n raise",
"def schedule_text():",
"async def news(self):\n url = f\"https://newsapi.org/v2/top-headlines?country=nz&apiKey={self.bot.news_api_key}\"\n async with ClientSession() as session:\n async with session.get(url) as response:\n r = await response.json()\n firstArticle = r[\"articles\"][0]\n nSource = firstArticle[\"source\"][\"name\"]\n nTitle = firstArticle[\"title\"]\n nTimestamp = firstArticle[\"publishedAt\"]\n embed = discord.Embed(\n title=f\"News Title: {nTitle}\", description=f\"News Source: {nSource}\"\n )\n embed.add_field(name=\"News Content\", value=firstArticle[\"description\"])\n embed.set_image(url=firstArticle[\"urlToImage\"])\n embed.set_footer(text=f\"News Timestamp: {nTimestamp}\")\n\n channel = self.bot.get_channel(self.bot.main_channel_id)\n await channel.send(embed=embed)",
"def schedule_paragraph():",
"def news(request, start_id):\n\n MAX_NEWS = 10\n end_id = string.atoi(start_id) + 10\n\n news_count = New.objects.count() # Pocet vsech zaznamu novinek\n news_list = New.objects.all().order_by(\"-date\")[start_id:end_id] # Sort by date ... and only part of list\n # misto vsech zaznamu ziskat jen ty v intervalu start - stop -> API\n\n # Vypocet prvniho ID z predchozi skupiny novinek (jedna skupina = MAX_NEWS) \n start_id_num = string.atoi(start_id)\n if (start_id_num + MAX_NEWS) < news_count:\n preview_start_id = start_id_num + MAX_NEWS\n else:\n preview_start_id = start_id_num\n\n # Vypocet prvniho ID z nasledujici skupiny novinek (jedna skupina = MAX_NEWS) \n next_start_id = start_id_num - MAX_NEWS # prvni ID nasledujicich novinek\n if next_start_id < 0:\n next_start_id = 0;\n\n pictureOfWeek = PhotoOfWeek.objects.last()\n context = {'news_list': news_list, 'news_count': news_count, 'pictureOfWeek': pictureOfWeek, 'start_id': start_id,\n 'preview_start_id': preview_start_id, 'next_start_id': next_start_id}\n return render(request, 'news/news.html', context)",
"def schedule(request):\r\n\r\n return render(request, 'editorial/schedule.html', {})",
"def _updateFeed(pk):\n feed = get_object_or_404(Feed, pk=pk)\n\n rawFeed, entries = feed._fetch_feed() \n\n feed.title = rawFeed.get('title', None)\n feed.subtitle = rawFeed.get('subtitle', None)\n feed.copyright = rawFeed.get('rights', None)\n feed.ttl = rawFeed.get('ttl', None)\n feed.atomLogo = rawFeed.get('logo', None)\n\n # Try to find the updated time\n updated = rawFeed.get(\n 'updated_parsed',\n rawFeed.get('published_parsed', None),\n )\n\n if updated:\n updated = datetime.datetime.fromtimestamp(\n time.mktime(updated)\n )\n\n feed.pubdate = updated\n\n super(Feed, feed).save()\n\n if entries:\n dbEntriesCreate = []\n dbEntriesupdate = []\n for raw_entry in entries:\n entry = Entry.objects.parseFromFeed(raw_entry)\n entry.feed = feed\n\n try:\n newEntry = Entry.objects.get(guid=entry.guid, feed=feed)\n except:\n newEntry = None\n\n \n if newEntry:\n # if it was updated, then mark it as unread, otherwise no need to do anything\n if newEntry.date > entry.date:\n entry.state = ENTRY_UNREAD\n id = newEntry.id\n newEntry = entry\n newEntry.id = id\n dbEntriesupdate.append(newEntry)\n else:\n dbEntriesCreate.append(entry)\n\n with transaction.atomic():\n if len(dbEntriesCreate)>0:\n Entry.objects.bulk_create(dbEntriesCreate)\n if len(dbEntriesupdate)>0:\n fields = ['feed', 'state', 'title' , 'content', 'date', 'author', 'url' ,'comments_url']\n Entry.objects.bulk_update(dbEntriesupdate, fields)\n\n return",
"def news_for_week(self):\n\n raise NotImplementedError",
"def nflschedule(self, irc, msg, args, optlist, optteam):\n \n fullSchedule = False\n for (option, arg) in optlist:\n if option == 'full':\n fullSchedule = True\n \n optteam = optteam.upper()\n \n if optteam not in self._validteams():\n irc.reply(\"Team not found. Must be one of: %s\" % self._validteams())\n return\n \n lookupteam = self._translateTeam('yahoo', 'team', optteam) # don't need a check for 0 here because we validate prior.\n \n if fullSchedule: # diff url/method.\n url = self._b64decode('aHR0cDovL3Nwb3J0cy55YWhvby5jb20vbmZsL3RlYW1z') + '/%s/schedule' % lookupteam\n\n try:\n request = urllib2.Request(url)\n html = (urllib2.urlopen(request)).read()\n except:\n irc.reply(\"Failed to open: %s\" % url)\n return\n \n soup = BeautifulSoup(html)\n table = soup.find('table', attrs={'summary':'Regular Season Games'})\n \n if not table:\n irc.reply(\"ERROR: Failed to find schedule for: %s\") % optteam\n return\n \n tbody = table.find('tbody')\n rows = tbody.findAll('tr')\n\n append_list = []\n\n for row in rows:\n tds = row.findAll('td')\n week = tds[0]\n \n if row.find('td', attrs={'class':'title bye'}):\n date = \"BYE\"\n opp = \"\"\n score = \"\"\n appendString = \"W{0}-{1}\".format(ircutils.bold(week.getText()), ircutils.underline(\"BYE\"))\n else:\n date = tds[1].getText()\n dateSplit = date.split(',', 1) # take the date, dump the rest.\n date = dateSplit[1]\n opp = tds[2] # with how the Tag/string comes in, we need to extract one part and format the other.\n oppName = opp.find('span')\n if oppName:\n oppName.extract()\n oppTeam = opp.find('a').getText() \n #opp = tds[2].find('span').getText()\n #opp = self._translateTeam('team','full', opp) # use the db to make a full team small.\n score = tds[3].getText().replace('EDT','').replace('EST','').replace('pm','').replace('am','') # strip the garbage\n #score = score.replace('W', ircutils.mircColor('W', 'green')).replace('L', ircutils.mircColor('L', 'red'))\n appendString = \"W{0}-{1} {2} {3}\".format(ircutils.bold(week.getText()), date.strip(), oppTeam.strip(), score.strip())\n \n append_list.append(appendString)\n\n descstring = string.join([item for item in append_list], \" | \")\n output = \"{0} SCHED :: {1}\".format(ircutils.mircColor(optteam, 'red'), descstring)\n irc.reply(output)\n else:\n url = self._b64decode('aHR0cDovL3Nwb3J0cy55YWhvby5jb20vbmZsL3RlYW1z') + '/%s/calendar/rss.xml' % lookupteam\n \n try:\n req = urllib2.Request(url)\n response = urllib2.urlopen(req)\n html = response.read()\n except:\n irc.reply(\"Cannot open: %s\" % url)\n return\n\n # clean this stuff up\n html = html.replace('<![CDATA[','').replace(']]>','').replace('EDT','').replace('\\xc2\\xa0',' ')\n\n soup = BeautifulSoup(html)\n items = soup.find('channel').findAll('item')\n \n append_list = []\n\n for item in items:\n title = item.find('title').renderContents().strip() # title is good.\n day, date = title.split(',')\n desc = item.find('description') # everything in desc but its messy.\n desctext = desc.findAll(text=True) # get all text, first, but its in a list.\n descappend = (''.join(desctext).strip()) # list transform into a string.\n if not descappend.startswith('@'): # if something is @, it's before, but vs. otherwise.\n descappend = 'vs. ' + descappend\n descappend += \" [\" + date.strip() + \"]\"\n append_list.append(descappend) # put all into a list.\n\n \n descstring = string.join([item for item in append_list], \" | \")\n output = \"{0} {1}\".format(ircutils.bold(optteam), descstring)\n irc.reply(output)",
"def _create_schedules(self):\n\n ''''''",
"def mlbschedule(self, irc, msg, args, optteam):\n \n optteam = optteam.upper().strip()\n\n if optteam not in self._validteams():\n irc.reply(\"Team not found. Must be one of: %s\" % self._validteams())\n return\n \n lookupteam = self._translateTeam('yahoo', 'team', optteam) # (db, column, optteam)\n\n url = self._b64decode('aHR0cDovL3Nwb3J0cy55YWhvby5jb20vbWxiL3RlYW1z') + '/%s/calendar/rss.xml' % lookupteam\n\n try:\n req = urllib2.Request(url)\n html = (urllib2.urlopen(req)).read()\n except:\n irc.reply(\"Cannot open: %s\" % url)\n return\n \n if \"Schedule for\" not in html:\n irc.reply(\"Cannot find schedule. Broken url?\")\n return\n \n # clean this stuff up\n html = html.replace('<![CDATA[','') #remove cdata\n html = html.replace(']]>','') # end of cdata\n html = html.replace('EDT','') # tidy up times\n html = html.replace('\\xc2\\xa0',' ') # remove some stupid character.\n\n soup = BeautifulSoup(html)\n items = soup.find('channel').findAll('item')\n\n append_list = []\n\n for item in items:\n title = item.find('title').renderContents().strip() # title is good.\n day, date = title.split(',')\n desc = item.find('description') # everything in desc but its messy.\n desctext = desc.findAll(text=True) # get all text, first, but its in a list.\n descappend = (''.join(desctext).strip()) # list transform into a string.\n if not descappend.startswith('@'): # if something is @, it's before, but vs. otherwise.\n descappend = 'vs. ' + descappend\n descappend += \" [\" + date.strip() + \"]\" # can't translate since Yahoo! sucks with the team names here. \n append_list.append(descappend) # put all into a list.\n\n descstring = string.join([item for item in append_list], \" | \")\n output = \"{0} {1}\".format(ircutils.bold(optteam), descstring)\n \n irc.reply(output)",
"def updateOneFeed(self):\n feeds = backend.Feed.query.order_by(\"check_date\").limit(1).all()\n if feeds:\n feed = feeds[0]\n print feed.check_date\n # Only check if it has not been checked in at least 10 minutes\n if (datetime.datetime.now() - feed.check_date).seconds > 600:\n print \"Scheduled update of: \",feed.xmlurl\n fetcher_in.put(['update', feed.xmlurl, feed.etag, feed.check_date])",
"def update_activity():\n pass",
"def news_daily():\n #Fetches data from API and creates global varibles.\n news_handle(news_fetch(config_fetcher('news_region'), config_fetcher('news_key')))\n #Creates a daily breifing using varibles\n news_daily_news = Markup((f\"The top headline for today is entitled: {title_1}, and was \\\nwritten by {author_1}. It was written for {source_1} and can be found here: {url_1_final}. \\\n\\nHere is a second headline, entitled: {title_2}, written by {author_2}. \\\nIt was written for {source_2} and can be found here {url_2_final}.\"))\n return news_daily_news",
"def set_schedule(self, new_schedule):\n #first, set all the others to inactive\n\n new_schedule.deprecated=False\n if new_schedule.started == None or new_schedule.started <= datetime.utcnow():\n new_schedule.started=datetime.utcnow()\n for sched in self.weekly_schedule:\n if not sched.deprecated:\n #sched.deprecated=True\n sched.ended=datetime.utcnow()\n sched.save()\n elif new_schedule.started > datetime.utcnow():\n #if it's in the future, then don't deprecate the future schedule, just procede along and let the system set the dates correctly\n pass\n self.weekly_schedule.append(new_schedule)\n self.save()"
] | [
"0.6767329",
"0.67203206",
"0.66092193",
"0.636598",
"0.6168731",
"0.60896766",
"0.60416806",
"0.59389657",
"0.59258604",
"0.59028083",
"0.5899791",
"0.5885284",
"0.5835838",
"0.5824128",
"0.5811874",
"0.576966",
"0.57586664",
"0.57467675",
"0.5735995",
"0.5734876",
"0.571546",
"0.57066053",
"0.5701186",
"0.56997466",
"0.56811994",
"0.5669418",
"0.5645255",
"0.56271243",
"0.5611421",
"0.55962336"
] | 0.77732843 | 0 |
Initialize the downloader with the specified msgId string | def __init__(self, msgId):
self.msgId = msgId
# The HTTPDownloader
self.downloader = None
# Write the downloaded NZB here temporarily
self.tempFilename = os.path.join(Hellanzb.TEMP_DIR,
tempFilename(self.TEMP_FILENAME_PREFIX) + '.nzb')
# The real NZB filename determined from HTTP headers
self.nzbFilename = None
# Whether or not it appears that this NZB with the msgId does not exist on newzbin
self.nonExistantNZB = False
# DNZB error message
self.errMessage = False
# Number of attempts to download this NZB
self.attempt = 0 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self, msg_id, msg_data=None, msg_priority=MSG_DEFAULT_PRIORITY, msg_is_waitable=False):\n self._id = int(msg_id)\n self._data = msg_data\n self._processed = False\n self._error = None\n self._condition = threading.Condition() if msg_is_waitable else None\n\n if msg_priority is None:\n self._priority = Message.MSG_DEFAULT_PRIORITY\n else:\n if msg_priority > Message.MSG_LOWEST_PRIORITY:\n self._priority = Message.MSG_LOWEST_PRIORITY\n elif msg_priority < Message.MSG_HIGHEST_PRIORITY:\n self._priority = Message.MSG_HIGHEST_PRIORITY\n else:\n self._priority = int(msg_priority)",
"def _initReceived(self, msg):\r\n if len(msg) != 32:\r\n log.msg('Protocol Error: iInit message has invalid format.')\r\n self.transport.loseConnection()\r\n return\r\n\r\n d = self._endpoint.processInit(self, msg[:16], msg[16:])\r\n d.addCallbacks(self._initSuccessful, self._initFailed)",
"def init_downloader(self) -> None:\n raise NotImplementedError",
"def __init__(self,\n msg_id=0x00,\n payload=b''):\n self.sync = MESSAGE_TX_SYNC\n self._msg_id = msg_id\n\n self.is_extended_message = False\n self.flag_byte = None\n self._extended_data_bytes = bytearray()\n self._payload = bytearray()\n self.set_payload(payload)",
"def __init__(self, message, decode_msg, state):\n self.message = message\n self.state = state\n self.decode_msg = decode_msg\n self.transport = None",
"def __init__(self, msg):\n\n self.msg = msg",
"def __init__(self, msg: str):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def __init__(self, msg):\n self.msg = msg",
"def load_message(message_id):\n pathname = \"messages/{}.json\".format(message_id)\n return _load_message(pathname)",
"def __init__(self,msg) -> None:\n\n super().__init__(self)\n self.msg=msg",
"def __init__(self, downloader=None):\n self._ready = False\n self.set_downloader(downloader)",
"def __init__(self, msg=\"\"):\n self._msg = msg\n super().__init__()",
"def __init__(self, message_id: str, error_code: str):\n super().__init__(f\"Command failed: {error_code}\")\n self.message_id = message_id\n self.error_code = error_code",
"def init(self):\n\t\tsp_addcallback(self.sp_callback)\n\t\tself.downloader.start()",
"def __init__(self, loader, id):\n\n self.loader = loader\n self.id = id",
"def init(self, id_dds, dds):\n self.info('Initialized with dds %r' % id_dds) \n self._dds = dds\n self.id_dds = id_dds",
"def initialize(dump_directory=\".\"):\n global _messages_file\n fb_message_filename = \"html/messages.htm\"\n _messages_file = os.path.join(dump_directory, fb_message_filename)\n if not os.path.isfile(_messages_file):\n print(\"\"\"\n The directory provided did not contain messages.htm,\n the directory should be within the archive\n downloaded from facebook.com\n \"\"\")\n _messages_file = None",
"def __init__(self, stream_id):\n self.stream_id = stream_id\n self._stream = None",
"async def retrieve(self, msg_id: int):\n if not self._session:\n await self._create_session()\n \n raise NotImplementedError"
] | [
"0.58565474",
"0.5774373",
"0.56294054",
"0.56270933",
"0.55998594",
"0.5594729",
"0.5539883",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5494888",
"0.5478392",
"0.53790474",
"0.5343294",
"0.53221077",
"0.52817357",
"0.5278199",
"0.527527",
"0.52541804",
"0.5241599",
"0.5223825",
"0.519162"
] | 0.69161874 | 0 |
The downloader will feeds headers via this function | def gotHeaders(self, headers):
super(self.__class__, self).gotHeaders(headers)
if headers.has_key('x-dnzb-name'):
name = headers.get('x-dnzb-name')[0]
# XXX may want to sanitize a little more
cleanName = name.replace('/', '_').replace('\\','_')
self.nzbFilename = '%s_%s.nzb' % (self.msgId, cleanName)
else:
# The failure case will go to the generic error handler atm, so this is most likely unused
if headers.has_key('x-dnzb-rtext'):
self.errMessage = headers.get('x-dnzb-rtext')[0]
else:
self.errMessage = 'DNZB service error'
info('DNZB request failed: %s' % self.errMessage)
self.nzbFilename = None
if headers.has_key('x-dnzb-rcode') and headers.get('x-dnzb-rcode')[0] == '404':
self.nonExistantNZB = True
self.nzbCategory = headers.get('x-dnzb-category')[0] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __getFile_urllib(self, _src, _dst):\n\n #-------------------- \n # Open the local destination file \n # so that it can start reading in the buffers.\n #-------------------- \n try:\n dstDir = os.path.dirname(_dst) \n if not os.path.exists(dstDir):\n os.makedirs(dstDir)\n dstFile = open(_dst, \"wb\")\n except Exception as e:\n self.__downloadFailed(_src, _dst, dstFile, str(e))\n return\n\n\n\n #-------------------- \n # Construct the request and authentication handler\n #-------------------- \n xnatUrl = Xnat.path.makeXnatUrl(self.host, _src)\n request = urllib.request.Request(xnatUrl)\n request.add_header(\"Authorization\", \n self.authHeader['Authorization'])\n\n\n\n #-------------------- \n # Get the response from the XNAT host.\n #-------------------- \n try:\n response = urllib.request.urlopen(request)\n\n\n\n\n #-------------------- \n # If the urllib.request version fails then use http.client.\n # See get_http.client for more details.\n #-------------------- \n #except urllib.request.HTTPError, e:\n except Exception as e:\n #print(str(e))\n #print(f\"{_src} {_dst}\")\n #print(d)\n self.__downloadFailed(_src, _dst, dstFile, str(e))\n return\n\n\n #-------------------- \n # Get the content size, first by checking log, then by reading \n # header\n #-------------------- \n self.downloadTracker['downloadedSize']['bytes'] = 0 \n self.downloadTracker['totalDownloadSize'] = \\\n self.getFileSize(xnatUrl)\n if not self.downloadTracker['totalDownloadSize']['bytes']:\n # If not in log, read the header\n if response.headers and \"Content-Length\" in response.headers:\n self.downloadTracker['totalDownloadSize']['bytes'] = \\\n int(response.headers[\"Content-Length\"]) \n self.downloadTracker['totalDownloadSize']['MB'] = \\\n Xnat.utils.bytesToMB(\\\n self.downloadTracker['totalDownloadSize']['bytes'])\n\n\n #-------------------- \n # Start the buffer reading cycle by\n # calling on the buffer_read function above.\n #-------------------- \n bytesRead = self.__bufferRead(xnatUrl, dstFile, response)\n dstFile.close()",
"def down(self):\n try:\n self.do_request()\n info = self.response.info()\n self.mime_type = info.gettype()\n if self.mime_type in self.deny_mimes:\n raise DenyMimes('Wrong Mime type: ' + self.mime_type)\n self.header_file_bytes = int(info.getheader(\"Content-Length\", '0').strip())\n self.content_disposition = info.getheader(\"Content-Disposition\", '')\n\n self.auto_get_file_extension()\n if None == self.save_file_ext:\n raise CanNotGuessExtension('Can not guess file extension, the url is: ' + self.url)\n\n local_file_full_path = self.save_dir + os.sep + self.save_file_without_ext + self.save_file_ext\n outfile = open(local_file_full_path, 'wb')\n self.downloaded_bytes = 0\n while True:\n s = self.response.read(self.chunk_read_size)\n read_len = len(s)\n if self.downing_callback:\n self.downing_callback(self) # this is a hook\n if read_len == 0:\n break\n outfile.write(s)\n self.downloaded_bytes += read_len\n\n except urllib2.HTTPError, e:\n raise DownError('urllib2.HTTPError code: %s' % (e.code,))\n except urllib2.URLError, e:\n raise DownError('urllib2.URLError:reason: %s' % (str(e.reason),))\n except socket.timeout, e:\n raise DownError('socket.timeout: %s' % (str(e),))\n except Exception, e:\n raise DownError('unknown exception: %s' % (str(e) + traceback.format_exc(),))\n else:\n pass\n finally:\n pass",
"def download_files(self):",
"def __getFile_httplib(self, _src, _dst):\n\n #-------------------- \n # Pre-download callbacks\n #-------------------- \n self.runEventCallbacks('downloadStarted', _src, -1)\n self.runEventCallbacks('downloading', _src, 0)\n\n\n\n #-------------------- \n # Download\n #-------------------- \n response = self.__httpsRequest('GET', _src)\n data = response.read() \n with open(_dst, 'wb') as f:\n f.write(data) \n\n\n\n #-------------------- \n # Post-download callbacks\n #-------------------- \n self.removeFromDownloadQueue(_src)\n self.runEventCallbacks('downloadFinished', _src)",
"def get(self, *args, **kwargs):\n hdr = CurlHeader()\n buf = strio()\n self._set_defaults()\n cmd = self._mkcurlcmd(*args, **kwargs)\n status = Uprocess().call(cmd, close_fds=True, stderr=Msg.chlderr,\n stdout=Msg.chlderr) # call curl\n hdr.setvalue_from_file(self._files[\"header_file\"])\n hdr.data[\"X-ND-CURLSTATUS\"] = status\n if status:\n err_down = str(FileUtil(self._files[\"error_file\"]).getdata('r'))\n Msg().err(\"Error: in download: %s\", err_down)\n FileUtil(self._files[\"output_file\"]).remove()\n return (hdr, buf)\n status_code = self.get_status_code(hdr.data[\"X-ND-HTTPSTATUS\"])\n if \"header\" in kwargs:\n hdr.data[\"X-ND-HEADERS\"] = kwargs[\"header\"]\n if status_code == 401: # needs authentication\n pass\n elif 300 <= status_code <= 308: # redirect\n pass\n elif \"ofile\" in kwargs:\n if status_code == 206 and \"resume\" in kwargs:\n os.rename(self._files[\"output_file\"], kwargs[\"ofile\"])\n elif status_code == 416:\n if \"resume\" in kwargs:\n kwargs[\"resume\"] = False\n (hdr, buf) = self.get(self._files[\"url\"], **kwargs)\n elif status_code != 200:\n Msg().err(\"Error: in download: \", str(\n hdr.data[\"X-ND-HTTPSTATUS\"]), \": \", str(status))\n FileUtil(self._files[\"output_file\"]).remove()\n else: # OK downloaded\n os.rename(self._files[\"output_file\"], kwargs[\"ofile\"])\n if \"ofile\" not in kwargs:\n try:\n buf = strio(open(self._files[\"output_file\"], 'rb').read())\n except(IOError, OSError):\n Msg().err(\"Error: reading curl output file to buffer\")\n FileUtil(self._files[\"output_file\"]).remove()\n FileUtil(self._files[\"error_file\"]).remove()\n FileUtil(self._files[\"header_file\"]).remove()\n return (hdr, buf)",
"def download(self):\n pass",
"def download(self):\n pass",
"def download_and_prepare(self):\n self._download_and_prepare()",
"def download(self):\r\n \r\n # RAR Files names\r\n if self.debug==0:\r\n rar_files_name = [\"K001.rar\",\"K002.rar\",\"K003.rar\",\"K004.rar\",\"K005.rar\",\"K006.rar\",\r\n \"KA01.rar\", \"KA03.rar\", \"KA04.rar\", \"KA05.rar\", \"KA06.rar\", \"KA07.rar\", \r\n \"KA08.rar\", \"KA09.rar\", \"KA15.rar\", \"KA16.rar\", \"KA22.rar\", \"KA30.rar\", \r\n \"KB23.rar\", \"KB24.rar\", \"KB27.rar\", \r\n \"KI01.rar\", \"KI03.rar\", \"KI04.rar\", \"KI05.rar\", \"KI07.rar\", \"KI08.rar\", \r\n \"KI14.rar\", \"KI16.rar\", \"KI17.rar\", \"KI18.rar\", \"KI21.rar\"]\r\n else:\r\n rar_files_name = [\"K002.rar\", \"KA01.rar\", \"KI01.rar\"]\r\n\r\n url = self.url\r\n \r\n dirname = self.rawfilesdir\r\n dir_rar = \"rar_files\"\r\n if not os.path.isdir(dirname):\r\n os.mkdir(dirname)\r\n if not os.path.isdir(os.path.join(dirname, dir_rar)):\r\n os.mkdir(os.path.join(dirname, dir_rar))\r\n \r\n\r\n print(\"Downloading RAR files:\")\r\n for i in rar_files_name:\r\n file_name = i\r\n if not os.path.exists(os.path.join(dirname, dir_rar, file_name)):\r\n urllib.request.urlretrieve(url+file_name, os.path.join(dirname, dir_rar, file_name))\r\n print(file_name)\r\n \r\n print(\"Extracting files:\")\r\n for i in rar_files_name:\r\n if not os.path.exists(os.path.join(dirname, i[:4])):\r\n file_name = os.path.join(dirname, dir_rar, i)\r\n Archive(file_name).extractall(dirname) \r\n print(i)\r\n\r\n if self.debug==0:\r\n files_path = self.files\r\n else:\r\n files_path = files_debug(self.rawfilesdir)\r\n\r\n print(files_path)\r\n self.files = files_path",
"def get_download_data(self, response_headers):\n data = dict()\n data['time'] = time.time()\n data['data'] = response_headers['Content-Length']\n return data",
"def download(self):\n #the link has some meta data in it that we need to get a hold of so we cant use metaData.getLink()\n data = None\n\n for link in self.metaData.jsonObj['links']:\n if link.get('rel') == \"content\":\n data = link\n\n assert data is not None\n\n response = self._adapter.getRequest(data['href'], self._baseHeader)\n return {\"filename\": data['title'], \"mime\": data['type'], \"binary\": response['Body'] }",
"def get_headers_and_data(self):\n\n if self.config.flag_usecache:\n fpath, fhdr, dirpath = self.get_url_store_paths()\n\n fpath_f = os.path.isfile(fpath)\n fhdr_f = os.path.isfile(fhdr)\n \n if fpath_f and fhdr_f:\n try:\n content = zlib.decompress(open(fpath).read())\n headers = eval(zlib.decompress(open(fhdr).read()))\n\n if self.make_head_request(headers):\n # Update URL from cache\n self.url = self.headers.get('url', self.url)\n \n log.info(self.url, \"==> URL is up-to-date, returning data from cache\")\n\n self.content = content\n self.headers = headers\n\n self.content_type = urlhelper.get_content_type(self.url, self.headers)\n \n eventr = crawlerbase.CrawlerEventRegistry.getInstance() \n # Raise the event for retrieving URL from cache\n eventr.publish(self, 'download_cache',\n message='URL has been retrieved from cache',\n code=304,\n event_key=self.url, \n params=self.__dict__) \n\n return True\n except Exception, e:\n log.error(\"Error in getting URL headers & data for URL\",self.url)\n log.error(\"\\t\",str(e))\n else:\n if not fpath_f:\n log.debug(\"Data file [%s] not present =>\" % fpath, self.url)\n if not fhdr_f:\n log.debug(\"Header file [%s] not present =>\" % fhdr, self.url) \n\n return False",
"def __getFile_requests(self, _src, _dst):\n\n #-------------------- \n # Get the content size from scan json\n #-------------------- \n self.downloadTracker['downloadedSize']['bytes'] = 0 \n self.downloadTracker['totalDownloadSize'] = self.getFileSize(_src)\n\n #-------------------- \n # Pre-download callbacks\n #-------------------- \n size = self.downloadTracker['totalDownloadSize']['bytes'] \\\n if self.downloadTracker['totalDownloadSize']['bytes'] else -1\n self.runEventCallbacks('downloadStarted', _src, size)\n self.runEventCallbacks('downloading', _src, 0)\n\n #-------------------- \n # Open the local destination file \n # so that it can start reading in the buffers.\n #-------------------- \n try:\n dstFile = _dst\n dstDir = os.path.dirname(_dst) \n if not os.path.exists(dstDir):\n os.makedirs(dstDir)\n # print(\"dstFile: {}\".format(dstFile))\n except Exception as e:\n print(e)\n self.__downloadFailed(_src, _dst, dstFile, str(e))\n self.exceptionPopup.setText(str(e))\n return\n\n #-------------------- \n # Construct the request\n #-------------------- \n url = Xnat.path.makeXnatUrl(self.host, _src)\n r = self.__httpsRequest('GET', url, stream=True)\n f = open(dstFile, 'wb')\n\n for chunk in r.iter_content(chunk_size=1024*1024):\n # Check for cancel event\n if not self.inDownloadQueue(_src):\n f.close()\n os.remove(f.name)\n self.runEventCallbacks('downloadCancelled', _src)\n break\n\n f.write(chunk)\n\n self.downloadTracker['downloadedSize']['bytes'] += len(chunk)\n self.runEventCallbacks('downloading', _src, \n self.downloadTracker['downloadedSize']['bytes'])\n\n r.close()\n f.close()\n\n #-------------------- \n # Post-download callbacks\n #-------------------- \n self.removeFromDownloadQueue(_src)\n self.runEventCallbacks('downloadFinished', _src)",
"def onContentDownloadComplete(self, fetcher, connectionResp): #$NON-NLS-1$\r",
"def download(self, download_request):\n raise NotImplementedError",
"def download_archive(self):\n\n def time_convert(structure):\n \"\"\"\n :param structure: tuple representation of time\n :return: GitHub archive time\n \"\"\"\n \n \n join_number_to_zero = lambda number: (\"\" if number > 9 else \"0\") + str(number)\n\n return \"%s-%s-%s-%s\" % (\n structure.tm_year, join_number_to_zero(structure.tm_mon), join_number_to_zero(structure.tm_mday),\n structure.tm_hour)\n\n current_time = self.get_time()\n self.logger.debug(__name__ + \": \" + \"current time: \" + str(gmtime(current_time)))\n\n difference = -25200\n #timezone difference in seconds between GMT and west coast of USA\n\n downloading_time = int(timegm(self.config[\"last_connection_time\"])) + 3600\n self.logger.debug(__name__ + \": \" + \"downloading time: \" + str(gmtime(downloading_time)))\n\n if downloading_time > current_time - 7200:\n self.logger.info(__name__ + \": \" + \"unable to download file (time limiting).\")\n return\n\n downloading_time += difference\n\n json_file_name = self.download_file(time_convert(gmtime(downloading_time)))\n\n self.config[\"last_connection_time\"] = gmtime(downloading_time - difference)\n self.logger.debug(__name__ + \": \" + \"last_connection_time: \" + str(self.config[\"last_connection_time\"]))\n\n return json_file_name",
"def onContentDownloadStart(self, fetcher, contentLength): #$NON-NLS-1$\r",
"def download_with_callback(self, url, path=None, filename=None, headers=None, force=False, func=None):",
"def _download(self):\n self._system.download_file(\"http://curl.haxx.se/download/\" + self._tar_name)",
"def pre_download(self, remote_files):\n pass",
"def initialize_response(self, filename):\n key = 'Content-Disposition'\n self.response = HttpResponse(content_type='text/csv')\n self.response[key] = f'attachment; filename=\"{filename}\"'\n self.writer = UnicodeCsvWriter(self.response)",
"def _download(self) -> None:\n download_url(\n self.url,\n self.root,\n filename=self.data_dir,\n md5=self.md5 if self.checksum else None,\n )\n self._extract()",
"def onContentDownload(self, fetcher, numBytes): #$NON-NLS-1$\r",
"def download(self,**attrs):\n\t\treturn super().download(**attrs)",
"def download_finish(self, cloud_file):",
"def download(self, url_match):\n pass",
"def real_download(self, filename, info_dict):\n raise NotImplementedError('This method must be implemented by subclasses')",
"def header_callback(self,buf):\n self.response_headers.extend(buf) #Optional TODO use chunk or byte-array storage",
"def parse_header(self):",
"def _handle_head_request(self):\n self._header_only = True\n self._handle_get_request()"
] | [
"0.64802045",
"0.63556856",
"0.6344954",
"0.63279104",
"0.6272933",
"0.62594026",
"0.62594026",
"0.6174976",
"0.60916114",
"0.60540736",
"0.59925616",
"0.5934997",
"0.5917263",
"0.59070027",
"0.5896219",
"0.5875113",
"0.58729607",
"0.5870548",
"0.5868026",
"0.58543277",
"0.5839921",
"0.5823234",
"0.5822554",
"0.5816369",
"0.5808778",
"0.5780594",
"0.5771124",
"0.57666034",
"0.5754608",
"0.5740666"
] | 0.64953953 | 0 |
Start the NZB download process | def download(self):
debug(str(self) + ' Downloading from newzbin.com..')
if not NewzbinDownloader.canDownload():
debug(str(self) + ' download: No www.newzbin.com login information')
return
info('Downloading newzbin NZB: %s ' % self.msgId)
self.handleNZBDownloadFromNewzbin() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def start_download(self) -> NoReturn:\n if self.threaded:\n self.threaded_download()\n else:\n self.regular_download()",
"def x_download():\n\t#_loadconfig()\n\tconf = _get_config()\n\t#print conf['xplane']\n\tdownload_url = conf['xplane']['download']\n\tlocal(\"wget -P %s %s\" % (navimport.conf.work_dir(\"/xplane_zips\"), download_url))",
"def run(self):\n download(self.attempt)",
"def download():\n env_banner()\n\n download_data = Download()\n download_data()\n click.echo('Download done.')",
"def start_torrent_download(filename):\n return tadapt.start_download(filename)",
"def start(self, filename=None, url=None, basename=None, size=None, text=None):\n if basename:\n self.package = basename\n self.callback('download_start', filename, url, basename, size, text)",
"def download_and_prepare(self):\n self._download_and_prepare()",
"def _download(self):\n self._system.download_file(\"http://curl.haxx.se/download/\" + self._tar_name)",
"def download_start(file_name):\n global g_download_pid\n\n g_download_pid += 1\n\n #\n # There may be a multi-second lapse time from the naming of the file to\n # the actual start of the download so we should err on that side by putting it\n # in the future by some margin\n #\n file_name = '%s/%s-%s.mp3' % (misc.DIR_STREAMS, callsign, TS.ts_to_name(TS.now(offset_sec=misc.PROCESS_DELAY / 2)))\n logging.info('Starting download #%d (%s). Next up in %ds' % (g_download_pid, file_name, cascade_margin))\n\n process = Thread(target=stream_download, name='Download-%d:%s' % (g_download_pid, TS.ts_to_name()), args=(callsign, misc.config['stream'], g_download_pid, file_name))\n process.daemon = True\n process.start()\n return [file_name, process]",
"def start_download(url):\n return _add_download_to_deluge(url)",
"def run(self):\n # sends download range from offset to offset + block_size - 1 (including) in the header\n headers = {'User-Agent': self.user_agent, 'Refferer': '{}://{}/'.format(self.url.protocol, self.url.host), \n 'Range': 'bytes={}-{}'.format(self.offset, self.offset + self.block_size - 1)}\n status = 0 # set status to 0 that means a connection error\n try:\n self.conn.request('GET', self.url.request, headers=headers)\n response = self.conn.getresponse()\n # the server does not support partial downloading - error\n if response.status != 206:\n status = response.status\n raise MirrorError\n part_size = int(response.getheader('Content-Length')) # actual count of bytes sent by the server\n data = b'' # data buffer\n # loop while all data will be received\n while part_size > len(data):\n if self.cancelled.is_set(): # if the thread has been cancelled\n # stop the thread, the TaskError would not be processed\n # because a loop in the main thread already broken\n raise Exception\n data_fragment = response.read(self.FRAGMENT_SIZE)\n data += data_fragment # add data to the buffer\n # put progress information into the queue\n info = TaskProgress(self.url.host, response.status, len(data))\n self.data_queue.put(info)\n # when the downloading loop finished, create TaskData object\n info = TaskData(self.url.host, response.status, self.offset, data)\n response.close()\n except:\n # if an error has occurred - create a TaskError object\n info = TaskError(self.url.host, status, self.offset)\n finally:\n self.data_queue.put(info) # put result TaskInfo object into the queue\n self.ready.set() # mark the thread as comleted",
"def fetch(thread=False):\r\n if thread:\r\n Fetch.start()\r\n else:\r\n urlretrieve(OBSURL,ZFILE)",
"def download(self):\r\n \r\n # RAR Files names\r\n if self.debug==0:\r\n rar_files_name = [\"K001.rar\",\"K002.rar\",\"K003.rar\",\"K004.rar\",\"K005.rar\",\"K006.rar\",\r\n \"KA01.rar\", \"KA03.rar\", \"KA04.rar\", \"KA05.rar\", \"KA06.rar\", \"KA07.rar\", \r\n \"KA08.rar\", \"KA09.rar\", \"KA15.rar\", \"KA16.rar\", \"KA22.rar\", \"KA30.rar\", \r\n \"KB23.rar\", \"KB24.rar\", \"KB27.rar\", \r\n \"KI01.rar\", \"KI03.rar\", \"KI04.rar\", \"KI05.rar\", \"KI07.rar\", \"KI08.rar\", \r\n \"KI14.rar\", \"KI16.rar\", \"KI17.rar\", \"KI18.rar\", \"KI21.rar\"]\r\n else:\r\n rar_files_name = [\"K002.rar\", \"KA01.rar\", \"KI01.rar\"]\r\n\r\n url = self.url\r\n \r\n dirname = self.rawfilesdir\r\n dir_rar = \"rar_files\"\r\n if not os.path.isdir(dirname):\r\n os.mkdir(dirname)\r\n if not os.path.isdir(os.path.join(dirname, dir_rar)):\r\n os.mkdir(os.path.join(dirname, dir_rar))\r\n \r\n\r\n print(\"Downloading RAR files:\")\r\n for i in rar_files_name:\r\n file_name = i\r\n if not os.path.exists(os.path.join(dirname, dir_rar, file_name)):\r\n urllib.request.urlretrieve(url+file_name, os.path.join(dirname, dir_rar, file_name))\r\n print(file_name)\r\n \r\n print(\"Extracting files:\")\r\n for i in rar_files_name:\r\n if not os.path.exists(os.path.join(dirname, i[:4])):\r\n file_name = os.path.join(dirname, dir_rar, i)\r\n Archive(file_name).extractall(dirname) \r\n print(i)\r\n\r\n if self.debug==0:\r\n files_path = self.files\r\n else:\r\n files_path = files_debug(self.rawfilesdir)\r\n\r\n print(files_path)\r\n self.files = files_path",
"def main(acc_number, download, output):\n\n source = source_from_id(acc_number)\n file_urls = file_links(source, acc_number)\n\n # Download section\n if download and file_urls:\n dir_path = make_dir(output, acc_number)\n for file_url in file_urls:\n info = url_info(file_url)\n click.echo(\"Downloading... \" + filename_process(info['filename']))\n if info['scheme'] == 'ftp':\n client.download_ftp_files(\n info['domain'], info['project_dir'], dir_path, info['filename'])\n elif info['scheme'] == 'https' or info['scheme'] == 'http':\n info = url_info(file_url)\n client.download_http_files(\n file_url, filename_process(info['filename']), dir_path)\n else:\n click.echo('--> Scheme is not supported for ' + file_url)\n # DEFAULT: Printing URLS when -d is not given\n else:\n pretty = '\\n'.join(file_urls)\n click.echo(pretty)",
"def start_downloads():\n todownload = jobtracker.query(\"SELECT * FROM files \" \\\n \"WHERE status='retrying' \" \\\n \"ORDER BY created_at ASC\")\n todownload += jobtracker.query(\"SELECT * FROM files \" \\\n \"WHERE status='new' \" \\\n \"ORDER BY created_at ASC\")\n\n for file in todownload:\n if can_download():\n dlm_cout.outs(\"Initiating download of %s\" % \\\n os.path.split(file['filename'])[-1])\n\n # Update file status and insert entry into download_attempts\n queries = []\n queries.append(\"UPDATE files \" \\\n \"SET status='downloading', \" \\\n \"details='Initiated download', \" \\\n \"updated_at='%s' \" \\\n \"WHERE id=%d\" % \\\n (jobtracker.nowstr(), file['id']))\n jobtracker.query(queries)\n queries = []\n queries.append(\"INSERT INTO download_attempts (\" \\\n \"status, \" \\\n \"details, \" \\\n \"updated_at, \" \\\n \"created_at, \" \\\n \"file_id) \" \\\n \"VALUES ('%s', '%s', '%s', '%s', %d)\" % \\\n ('downloading', 'Initiated download', jobtracker.nowstr(), \\\n jobtracker.nowstr(), file['id']))\n insert_id = jobtracker.query(queries, fetchone=True)\n attempt = jobtracker.query(\"SELECT * FROM download_attempts \" \\\n \"WHERE id=%d\" % insert_id, fetchone=True)\n \n # download(attempt)\n DownloadThread(attempt).start()\n else:\n break",
"def run_xp(self, net, run, client, fileToDownload, **kwargs):\n gateway = net.get('gateway')\n # cmd = \"wget http://%s/%s -O /dev/null\" \n cmd = \"curl -so /dev/null -w '%%{time_total}\\n' http://%s/%s\"\n cmd = cmd % (\"7.7.7.7:8000\", fileToDownload)\n # import re\n # elapsed_ms_str = re.sub(\"tcpdump.*\", \"\", client.cmd( % (topo.server_addr, filename)).replace(\"> \", \"\"))\n # elapsed_ms = float(elapsed_ms_str)*1000\n\n # out = client.monitor(timeoutms=2000)\n # log.info(cmd)\n # CLI(net)\n print(\"starting\")\n out = client.cmdPrint(cmd)\n print(out)\n # import re\n # elapsed_ms_str = re.sub(\"tcpdump.*\", \"\", client.cmd(\"curl -so /dev/null -w '%%{time_total}\\n' http://%s/%s\" % (topo.server_addr, filename)).replace(\"> \", \"\"))\n\n elapsed_ms_str = out[2:].rstrip() # replace(\"> \", \"\"))\n # print(\"elapsed_ms_str\", elapsed_ms_str)\n elapsed_ms = float(elapsed_ms_str)*1000\n\n print(\"elapsed_ms\", elapsed_ms)\n\n # write avec ou sans dupack puis la valeur\n return elapsed_ms",
"def _download(self):\n self._system.download(\"http://geant4.web.cern.ch/geant4/support/source/\" + self._tar_name)",
"def download():\n raise NotImplementedError",
"def download(all):\n print(\"Downloading\")",
"def dowload_vt():\n print get_date_time_now() + \" ==> Download VT Samples started!\"\n print get_date_time_now() + \" ==> Nothing downloaded\"",
"def main():\n show_banner()\n args = parse_args(sys.argv[1:])\n urls = get_urls(args.inputfiles)\n if args.only_urls:\n print(\"URL\")\n else:\n print('{:70.70} {}'.format(\"URL\", \"Response\"))\n\n loop = asyncio.get_event_loop()\n loop.run_until_complete(download(urls,\n args.concurrency,\n args.only_success,\n args.outputfile,\n args.only_urls))",
"def startDownloadQueue(self):\n\n self.runEventCallbacks('downloadQueueStarted') \n while len(self.downloadQueue):\n if self.downloadQueue[0]['dst'] != None:\n self.getFile(self.downloadQueue[0]['src'], \n self.downloadQueue[0]['dst'])\n self.runEventCallbacks('downloadQueueFinished') \n self.clearDownloadQueue()",
"def download(self, args):\n\n\t\t\"\"\" Default argument for Architecture \"\"\"\n\t\tif len(args) >= 4:\n\t\t\tarch = args[3]\n\t\telse:\n\t\t\tarch = platform.processor()\n\n\t\t\"\"\" Default argument for Version \"\"\"\n\t\tif len(args) >= 3:\n\t\t\tif args[2] == \"latest\":\n\t\t\t\tversion = \"Latest\"\n\t\t\telse:\n\t\t\t\tversion = args[2]\n\t\telse:\n\t\t\tversion = \"Latest\"\n\n\t\t\"\"\" Find package path from package list, based on prev. arguments \"\"\"\n\t\tif len(args) >= 2:\n\t\t\tpackage = args[1]\n\t\t\tfilename = False\n\t\t\t\n\t\t\tversions = self.master.Dump(package)\n\t\t\tfor d in versions:\n\t\t\t\tif d[\"Version\"] == version:\n\t\t\t\t\tif d[\"Version\"] != \"Latest\" and d[\"Architecture\"] == arch:\n\t\t\t\t\t\tfilename = d[\"Filename\"]\n\t\t\t\t\telse:\n\t\t\t\t\t\tfor e in versions:\n\t\t\t\t\t\t\tif e[\"Version\"] == d[\"LatestVersion\"] and e[\"Architecture\"] == arch:\n\t\t\t\t\t\t\t\tfilename = e[\"Filename\"]\n\t\t\t\t\t\t\t\tversion = d[\"LatestVersion\"];\n\t\t\tif not filename:\n\t\t\t\tself.write_line(\"ERROR XXX: Package not found.\")\n\t\t\t\treturn\n\n\t\t\t\"\"\" Find chunks to download \"\"\"\n\t\t\tid = 0\n\t\t\tto_download = False\n\t\t\tfor f in self.torrent_info.files():\n\t\t\t\tprint(f.path.replace(\"packages/\", \"\") + \" = \" + filename);\n\t\t\t\tif f.path.replace(\"packages/\", \"\") == filename:\n\t\t\t\t\tto_download = f\n\t\t\t\t\tbreak;\n\t\t\t\tid += 1\n\t\t\tif not to_download:\n\t\t\t\tprint(\"ERROR XXX: dunno\")\n\t\t\t\treturn\n\n\t\t\t\"\"\" Set chunks priority to 7? (download max priority) \"\"\"\n\t\t\tpr = self.torrent_info.map_file(id, 0, to_download.size);\n\t\t\tn_pieces = math.ceil(pr.length / self.torrent_info.piece_length() + 1);\n\n\t\t\tfor i in range(self.torrent_info.num_pieces()):\n\t\t\t\tif i in range(pr.piece, pr.piece + n_pieces):\n\t\t\t\t\tself.handler.piece_priority(i, 7)\n\n\n\t\t\t\"\"\" Print download of package status \"\"\"\n\t\t\tself.print_status(id, pr, package, version, filename)\n\t\t\t\t\n\t\t\t\"\"\" Check the server for hash validation \"\"\"\n\t\t\tif self.valid_tpkg_file(to_download.path):\n\t\t\t\tself.write_line(\"DONE {0} {1} {2} {3}\".format(package, version, arch, self.config[\"daemon\"][\"rootdir\"] + \"/\" + to_download.path).replace('//', '/'))\n\t\t\telse:\n\t\t\t\tself.write_line(\"ERROR XXX: Hash verification failed.\")\n\t\telse:\n\t\t\tself.write_line(\"INVALID ARGUMENTS\");",
"def _download(self) -> None:\n download_url(\n self.url,\n self.root,\n filename=self.data_dir,\n md5=self.md5 if self.checksum else None,\n )\n self._extract()",
"def download_start(self, local_path, cloud_file, size):\n\t\telog(\"downloading {1} ({2})\".format(local_path, cloud_file.path, bytes_scaled(size)))",
"def download():\n try:\n cli.run(\n [URL, '--output', TEMP_DIR],\n )\n except SystemExit:\n return None",
"def set_download(self):\n print 'Setting download command...'\n wget = 0\n urllib = 0\n # JULIE : Cut proxy stuff...was causing problems (see scalapack installer if you want it back)\n if urllib == 0:\n # if urllib2 is not present checks if wget is present\n # in the PATH and if yes it sets the download command\n # to be wget\n print \"Checking availablility of wget...\",\n path=str(os.getenv('PATH')).split(os.pathsep)\n for i in path:\n if (os.path.isfile(os.path.join(i,'wget'))):\n print \"available\"\n wget = 1\n break\n if wget:\n # test wget\n print \"Testing wget...\",\n comm = 'wget --tries=2 --timeout=5 http://www.netlib.org/lapack/index'\n (output, error, retz) = runShellCommand(comm)\n if(retz != 0):\n print 'not working.'\n wget = -1\n else:\n print \"working\"\n self.downcmd=\"wget\"\n os.remove(\"index\")\n return\n else:\n # wget not available\n print \"not available\"\n wget=0",
"def download_checkpoint(self, exec_count):\n threads = []\n #dirs = range(1,exec_count+1)\n dirs = [exec_count]\n if self.use_active_set == \"True\":\n dirs += ['as']\n for i in range(1,exec_count):\n os.makedirs(\"/tmp/pico_cache/{0}/{1}\".format(self.pico_id, i))\n for i in dirs:\n logger.info(\"Starting thread for \" + \"{0}\".format(i))\n threads.append(self._run_in_new_thread(self.send_cmd, [\"download {0}\".format(i)]))\n logger.info(\"Done starting thread for \" + \"{0}\".format(i))\n # self.send_cmd(\"download {0}\".format(i))\n\n logger.info(\"About to join on threads\")\n map(lambda x: x.join(), threads)\n logger.info(\"Done join on threads\")",
"def download():\n basedir = os.path.dirname(os.path.dirname(__file__))\n print(basedir)\n datadir = os.path.join(basedir,\"data/NeonTreeEvaluation/\")\n print(\"Downloading data files to {}\".format(datadir)) \n eval_url = zenodo_url(concept_rec_id=\"3723356\", datadir=datadir)",
"def main():\n get_obofoundry(force_download=True)"
] | [
"0.6757942",
"0.63902557",
"0.6272311",
"0.6187265",
"0.6104816",
"0.6057119",
"0.60374814",
"0.5990123",
"0.5872523",
"0.58668387",
"0.58480763",
"0.5846919",
"0.580224",
"0.57705647",
"0.5765174",
"0.57354814",
"0.5667718",
"0.56237143",
"0.561297",
"0.56028575",
"0.5600036",
"0.5547252",
"0.5546427",
"0.55358994",
"0.5533395",
"0.5518165",
"0.5511369",
"0.55108714",
"0.55087495",
"0.55052924"
] | 0.7246163 | 0 |
Add the new NZB to the queue | def handleEnqueueNZB(self, page):
if super(self.__class__, self).handleEnqueueNZB(page):
Hellanzb.NZBQueue.writeStateXML()
else:
msg = 'Unable to download newzbin NZB: %s' % self.msgId
if self.errMessage:
error('%s (%s)' % [msg, self.errMessage])
elif self.nonExistantNZB:
error('%s (This appears to be an invalid msgid)' % msg)
else:
error('%s (Incorrect NEWZBIN_USERNAME/PASSWORD?)' % msg)
# Invalidate the cached cookies
Hellanzb.NZBQueue.writeStateXML() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def queue_append(self, obj, value):\n self.queue.append((obj, value))\n if len(self.queue) > self.queue_size:\n self.dump_queue()",
"def add_nb_queue_to_session_queue(self, session):\n rpc_list = []\n client_id = get_element('cid', session['client'])\n\n if client_id is not None and client_id in RPCS.Northbound_Queue:\n # Check if all commands have been serviced\n if RPCS.Northbound_Queue[client_id]:\n # Get first request in the client queue, in the form:\n # (Client_COMMAND, RESPONSE STREAM)\n # TODO pop might be unresolved\n nb_request = RPCS.Northbound_Queue[client_id].pop(0)\n # Parse and queue request(s)\n client_command = nb_request[0]\n rpc_list.append(client_command)\n # Insert nb commands to the front of queue\n session['queue'] = queued_nb_methods + session['queue']\n # Store stream which expects the client response in the session\n session['nb_response_stream'] = nb_request[1]",
"def _add_state_to_queue(self, new_state):\n self._available = new_state.state != STATE_UNAVAILABLE\n if new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE, None):\n return\n\n try:\n if self.is_binary:\n self.states.append(new_state.state)\n else:\n self.states.append(float(new_state.state))\n self.ages.append(new_state.last_updated)\n except ValueError:\n _LOGGER.error(\n \"%s: parsing error, expected number and received %s\",\n self.entity_id,\n new_state.state,\n )\n return\n\n self._unit_of_measurement = self._derive_unit_of_measurement(new_state)",
"def addToBq(self, p):\n self.allJobs.append(p)\n p.submitToQueue()\n if len(self.circQ) < self.maxLength - 1:\n self.circQ.append(p)",
"def addToBq(self, p):\n self.allJobs.append(p)\n p.submitToQueue()\n if len(self.circQ) < self.maxLength - 1:\n self.circQ.append(p)",
"def __add__(self, value):\n self.queue.append(value)",
"def enqueue(self, record):\r\n self.queue.put_nowait(record)",
"def enqueue(self, item):\n self.queue.append(item)",
"def enqueue(self, item):\n self.queue.append(item)",
"def enqueue(self, val):\r\n self.queue.append(val)",
"def _put(self, item, queue):",
"def add_to_queue(self, word):\n self.q.put(word)\n print(\"word \\'{}\\' added in clients queue\".format(word))",
"def put(self, conn):\r\n self.queue.append((conn, time.time()))",
"def runQueueEnqueue(self):\n raise NotImplementedError",
"def add_queue(self, queue):\n\n queue_id = queue[\"ovsdb:queues\"][0][\"queue-id\"]\n self.queue_dict[queue_id] = queue",
"def __post_init__(self) -> None:\n self.gtex += [None]\n self.bm += [None]\n self._q: queue.Queue = queue.Queue(maxsize=self.maxsize)",
"def add_queue(self, queue):\n with self.mutex:\n self.queues.append(queue)",
"def putonqueue(self, nr, *args):\n self.outqueues[10-nr].put_nowait(args)\n self.tickqueue.put_nowait('go')",
"def add_to_queue(self, name, pic_num, crop_num):\n #if the picture is not already in the queue\n #and if it is not already downloaded\n if ((self.communicator.image_store.get_crop(pic_num, crop_num).inqueue == False) & \\\n (self.communicator.image_store.get_crop(pic_num, crop_num).completed == False)):\n #insert in queue\n myiter = self.list_store.append(None)\n #set the data in column 0\n #if the picture is ready for download set color to black\n if (self.communicator.image_store.get_crop(pic_num, crop_num).available == True):\n self.list_store.set_value(myiter, \\\n 0, '<span foreground=\"#000000\"><b>' + name + '</b></span>')\n #otherwise set to gray\n else:\n self.list_store.set_value(myiter, \\\n 0, '<span foreground=\"#A0A0A0\"><b>' + name + '</b></span>')\n #set the data in column 1 and 2\n self.list_store.set_value(myiter, 1, pic_num)\n self.list_store.set_value(myiter, 2, crop_num)\n #let model know picture is inqueue\n self.communicator.image_store.get_crop(pic_num, crop_num).inqueue = True\n #call queue_changed function\n self.queue_changed()\n elif self.communicator.image_store.get_crop(pic_num, crop_num).completed == True:\n print \"image has already been downloaded\"\n else:\n print \"image is currently in the queue\"",
"def enqueue(self, message, qat, nbf):\n dst = self.abspath('%s.tmp' % str(message.id))\n with open(dst, 'wb') as f:\n f.write(nbf.to_bytes(8, 'big'))\n f.write(message.encode())\n f.flush()\n os.fsync(f.fileno())\n\n os.rename(dst, self.abspath('%s.amqp' % str(message.id)))",
"def enqueue(self, item):\n self.__queue.insert(0, item)",
"def push(self, x):\r\n self.queue.append(x)",
"def enqueue(self, element):\n self.the_queue.append(element)",
"def enqueue(self, item):\n self._queue.append(item)",
"def ztest_tokyo_queue(self):\n \n sql_queue = TokyoCabinetQueue()\n \n print(\"Queue size = %d\\n\" %(sql_queue.size()) )\n \n #insertion\n for i in range(10):\n if i % 2 == 0:\n p = 0\n else:\n p = 1\n item = NMSQueueItem(p,\"data %s\" % (i))\n item.set_uuid()\n sql_queue.put(item.dictify())\n #time.sleep(0.5)\n \n size = sql_queue.size()\n \n while size != 0:\n the_dict = sql_queue.pop()\n item = NMSQueueItem.create_from_dict(the_dict)\n print(\"size = %d, item = %s\\n\" % (size, item))\n size = sql_queue.size()\n \n print(\"size = %s\" % size )",
"def add_to_queue(self, msg):\n if not self.queue.full():\n self.queue.put(msg)",
"def _add_to_queue(self, tok):\n if self._genpostfix:\n self._queue.append(tok)",
"def test_the_queue_enqueue(the_queue):\n the_queue.enqueue(2)\n assert the_queue._new_dll.head.data == the_queue._new_dll.tail.data == 2",
"def enqueue(self, item):\n self.list.append(item)",
"def putonqueue(self, nr, *args):\n\n self.outqueues[nr].put_nowait(*args)\n self.tickqueue.put_nowait('go')"
] | [
"0.6199243",
"0.6176773",
"0.6118145",
"0.61039335",
"0.61039335",
"0.60769063",
"0.6053558",
"0.5965219",
"0.5965219",
"0.59588814",
"0.59458435",
"0.58998424",
"0.58986986",
"0.58671236",
"0.58596694",
"0.5859027",
"0.5857519",
"0.58512956",
"0.5840158",
"0.5835103",
"0.5816021",
"0.57947695",
"0.5789675",
"0.578609",
"0.5776642",
"0.5774638",
"0.5772926",
"0.57487637",
"0.570733",
"0.57030624"
] | 0.6672806 | 0 |
Computes and saves the reference sphere | def compute_reference_sphere(self, x, y):
theta_0=np.pi/2.0
pictures=np.zeros(shape=(360, int(y.shape[0]), int(x.shape[0]), 3), dtype=np.uint8)
xx, yy=np.meshgrid(x,y)
rho=np.sqrt(xx**2+yy**2)
c=2.0*np.arctan(rho/2.0)
theta=theta_0-np.arcsin(yy*np.sin(c)/rho)
for phi_0 in range(0, 360):
phi_in_rad=np.deg2rad(phi_0)
phi=phi_in_rad+np.arctan(xx*np.sin(c)/(rho*np.cos(c)))
for k in range(len(x)):
for j in range(len(y)):
pixnum=AST1100SolarSystem.ang2pix(theta[j,k], phi[j,k])
temp=self.celestial_sphere[pixnum]
pictures[phi_0, j, k, :]=[temp[2], temp[3], temp[4]]
print "Done with phi: ", phi_0
np.save("Reference_sphere.npy", pictures) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def generate_sphere_full():\n \n num_voxels = 31\n c = (15.0, 15.0, 15.0)\n\n data_x = []\n data_y = []\n data_z = []\n data_intensity = []\n\n volume = numpy.zeros((num_voxels, num_voxels, num_voxels))\n\n for x in range(num_voxels):\n for y in range(num_voxels):\n for z in range(num_voxels):\n\n if numpy.sqrt((x-c[0])**2 + (y-c[1])**2 + (z-c[2])**2) - 7.5 < 1.5:\n data_x.append(x)\n data_y.append(y)\n data_z.append(z)\n data_intensity.append(200.0)\n\n volume[x,y,z] = 200.0\n\n\n return data_x, data_y, data_z, data_intensity, volume",
"def sphere_example():\n env = holodeck.make(\"MazeWorld-FinishMazeSphere\")\n\n # This command is to constantly rotate to the right\n command = 2\n for i in range(10):\n env.reset()\n for _ in range(1000):\n state, reward, terminal, _ = env.step(command)\n\n # To access specific sensor data:\n pixels = state[\"RGBCamera\"]\n orientation = state[\"OrientationSensor\"]\n\n # For a full list of sensors the sphere robot has, view the README",
"def save(self, _name):\r\n try:\r\n with open(_name, 'w+') as fout:\r\n fout.write(\".cube file generated from prt_esolv.py\\n\")\r\n fout.write(f\"{_name}\\n\")\r\n\r\n fout.write(\r\n f\"{int(self.n_atoms)} {float(self.origin[0])} {float(self.origin[1])} {float(self.origin[2])}\\n\")\r\n\r\n fout.write(f\"{int(self.n_x)} {float(self.x[0])} {float(self.x[1])} {float(self.x[2])}\\n\")\r\n fout.write(f\"{int(self.n_y)} {float(self.y[0])} {float(self.y[1])} {float(self.y[2])}\\n\")\r\n fout.write(f\"{int(self.n_z)} {float(self.z[0])} {float(self.z[1])} {float(self.z[2])}\\n\")\r\n\r\n for atom, xyz in zip(self.atoms, self.atoms_xyz):\r\n fout.write(f\"{atom} 0 {xyz[0]} {xyz[1]} {xyz[2]}\\n\")\r\n\r\n for ix in range(self.n_x):\r\n for iy in range(self.n_y):\r\n for iz in range(self.n_z):\r\n fout.write(f\"{self.data[ix][iy][iz]}\")\r\n if iz % 6 == 5:\r\n fout.write('\\n')\r\n fout.write(\"\\n\")\r\n except IOError:\r\n print(f\"Can't create {_name} file!!!\")\r\n raise\r\n\r\n return None",
"def sph(grlat, elong, ht):\n\n # Initialize Variables\n global cth, sth, clg, slg, dif, radn, gl # common/obs/\n gn = 9.798277692\n ae = 6378140.0\n f = 0.00335281\n rm = 0.00344978\n dr = 0.01745329252\n\n clong = np.cos(elong * dr)\n slong = np.sin(elong * dr)\n # latitude difference\n dvert = f * (1.0 + 0.5 * f) * np.sin(2.0 * grlat * dr) - 0.5 * f * f * np.sin(\n 4.0 * grlat * dr\n )\n gcclat = (3.1415926535898 / 2.0) - (grlat * dr - dvert)\n cthet = np.cos(gcclat)\n sthet = np.sin(gcclat)\n # geocentric radius\n radn = 1 - f * (cthet ** 2) * (1 + 1.5 * f * (sthet ** 2))\n # formulae for g are from jeffreys, 4.022 and 4.023\n g = gn * (\n 1\n + f\n - 1.5 * rm\n + f * (f - (27 / 14) * rm)\n + (2.5 * rm - f - f * (f - (39 / 14) * rm)) * (cthet ** 2)\n - (f / 2) * (7 * f - 15.0 * rm) * ((cthet * sthet) ** 2)\n )\n # free air correction\n g = g - g * (2.0 * ht * (1.0 + f + rm - 2.0 * f * (cthet ** 2)) / ae)\n\n # Conversion Here for Globals\n cth = cthet\n sth = sthet\n clg = clong\n slg = slong\n dif = dvert\n gl = g",
"def odf(self, sphere):\n raise NotImplementedError(\"To be implemented in sub classes\")",
"def sphere_sre(solution):\n a = 0\n bias = 0.2\n x = solution.get_x()\n x1 = x[:10]\n x2 = x[10:]\n value1 = sum([(i-bias)*(i-bias) for i in x1])\n value2 = 1/len(x) * sum([(i-bias)*(i-bias) for i in x2])\n return value1 + value2",
"def sphere2img(lat,lon,latc,lonc,xcen,ycen,rSun,peff,hemi_out=False):\n # Correction of finite distance (1AU)\n sin_asd = 0.004660\n cos_asd = 0.99998914\n\n last_latc = 0.0\n cos_latc = 1.0\n sin_latc = 0.0\n\n if latc != last_latc:\n sin_latc = np.sin(latc)\n cos_latc = np.cos(latc)\n last_latc = latc\n\n sin_lat = np.sin(lat)\n cos_lat = np.cos(lat)\n cos_lat_lon = cos_lat*np.cos(lon-lonc)\n\n cos_cang = sin_lat*sin_latc + cos_latc*cos_lat_lon\n if cos_cang < 0.0:\n hemisphere = 1\n else:\n hemisphere = 0\n\n r = rSun*cos_asd/(1.0 - cos_cang*sin_asd)\n xr = r*cos_lat*np.sin(lon - lonc)\n yr = r*(sin_lat*cos_latc - sin_latc*cos_lat_lon)\n\n cospa = np.cos(peff)\n sinpa = np.sin(peff)\n xi = xr*cospa - yr*sinpa\n eta = xr*sinpa + yr*cospa\n\n xi = xi + xcen\n eta = eta + ycen\n\n if hemi_out == True:\n return xi,eta,hemisphere\n else:\n return xi,eta",
"def sphere(\n network,\n pore_diameter='pore.diameter'\n):\n return 4/3*_pi*(network[pore_diameter]/2)**3",
"def __init__(self, name, a=115, b=111, c=19, mu=10**7, omegavec=[0, 0, 1],\r\n rho=0.5, szscale=2, n=0):\r\n assert(len(omegavec) == 3)\r\n assert(szscale >= 1)\r\n assert(n >= 0)\r\n\r\n # set the name\r\n self.name = name\r\n\r\n # set the rotation axis\r\n self.omegavec = omegavec\r\n\r\n # set the principal axes\r\n self.a = a\r\n self.b = b\r\n self.c = c\r\n\r\n # set the size scale\r\n self.szscale = szscale\r\n\r\n # convert the axes from meters to cm\r\n a *= 100\r\n b *= 100\r\n c *= 100\r\n\r\n # set the maximum allowed size\r\n self.sizecut = szscale*np.max([a, b, c])/2\r\n\r\n # set viscosity, create a Constant to avoid slowdowns\r\n self.mu = Constant(mu)\r\n\r\n # initialize the time, and the number of cycles\r\n self.t = 0\r\n self.ind = 0\r\n\r\n # set dt to 1 temporarily, for use in the solvers\r\n self.dt = Constant(1)\r\n\r\n # set density, create a Constant to avoid slowdowns\r\n self.rho = Constant(rho)\r\n\r\n # set the inital time, for logging\r\n self.start_time = time.time()\r\n\r\n # read in mesh, with n refinements\r\n with pkg_resources.path('SAMUS.meshes', '3ball%s.xml' % (n)) as p:\r\n mesh_path = p\r\n self.mesh = Mesh(str(mesh_path))\r\n\r\n # rescale the mesh to the input ellipsoids\r\n self.mesh.coordinates()[:, 0] *= a/2\r\n self.mesh.coordinates()[:, 1] *= b/2\r\n self.mesh.coordinates()[:, 2] *= c/2\r\n\r\n # use Elements to make a mixed function space\r\n V = VectorElement(\"CG\", self.mesh.ufl_cell(), 2)\r\n Q = FiniteElement(\"CG\", self.mesh.ufl_cell(), 1)\r\n self.Z = FunctionSpace(self.mesh, V*Q)\r\n\r\n # create actual function spaces which compose the mixed\r\n self.V = VectorFunctionSpace(self.mesh, \"CG\", 2)\r\n self.Q = FunctionSpace(self.mesh, \"CG\", 1)\r\n\r\n # create solution functions from the mixed space\r\n self.up = Function(self.Z) # solution function\r\n self.u_p_ = Function(self.Z) # function for previous solutions\r\n\r\n # get trial and test functions from the mixed space\r\n dup = TrialFunction(self.Z)\r\n v, q = TestFunctions(self.Z)\r\n\r\n # create the function of the rotation vector\r\n self.omega = interpolate(Constant(tuple(omegavec)), self.V)\r\n\r\n # split the solution functions\r\n self.u, self.p = split(self.up)\r\n u_, p_ = split(self.u_p_)\r\n\r\n # set solution functions to 0\r\n self.up.assign(Constant((0, 0, 0, 0)))\r\n self.u_p_.assign(Constant((0, 0, 0, 0)))\r\n\r\n # create the functions for storing the forces\r\n self.ftides = Function(self.V) # tides\r\n self.gravity = Function(self.V) # gravity\r\n self.centrifugal = Function(self.V) # centrifugal\r\n self.coriolis = Function(self.V) # coriolis\r\n self.forcing = Function(self.V) # total forces\r\n\r\n # name the functions for storage\r\n self.ftides.rename(\"Tidal Force\", \"Tidal Force\")\r\n self.gravity.rename(\"Self-Gravity\", \"Gravitational Force\")\r\n self.centrifugal.rename(\"Centrifugal\", \"Centrifugal Force\")\r\n self.coriolis.rename(\"Coriolis\", \"Coriolis Force\")\r\n self.forcing.rename(\"Forcing\", \"Total force on the object\")\r\n\r\n # create a constant to ensure solution stability\r\n A = Constant(1e4/max(mu, 1e4))\r\n\r\n # create the solution for the Navier-Stokes equations\r\n F = (\r\n # acceleration term\r\n A*self.rho*inner(((self.u-u_)/(self.dt)), v) * dx +\r\n\r\n # viscosity term\r\n A*self.mu*inner(grad(self.u), grad(v)) * dx +\r\n\r\n # advection term\r\n A*self.rho*inner(dot(self.u, nabla_grad(self.u)), v) * dx -\r\n\r\n # pressure term\r\n A*self.p*div(v) * dx +\r\n\r\n # mass continuity equation\r\n q*div(self.u) * dx -\r\n\r\n # force term\r\n A*inner(self.forcing, v) * dx)\r\n\r\n # find the derivative, for speed\r\n J = derivative(F, self.up, dup)\r\n\r\n # set up the Navier-Stokes solver\r\n problem = NonlinearVariationalProblem(F, self.up, J=J)\r\n self.solver = NonlinearVariationalSolver(problem)\r\n self.solver.parameters['newton_solver']['relaxation_parameter'] = 1.\r\n\r\n # split solution functions for access (weird FEniCS quirk)\r\n self.u, self.p = self.up.split()\r\n u_, p_ = self.u_p_.split()\r\n\r\n # name the solution functions\r\n self.u.rename(\"Velocity\", \"Velocity\")\r\n self.p.rename(\"Pressure\", \"Pressure\")\r\n\r\n # COMPUTE FUNCTIONS FOR GRAVITY SOLUTIONS\r\n self.G = Constant(6.674e-8) # sets gravitational constant, in cgs\r\n\r\n # get solution, trial, and test functions\r\n self.gravgs = Function(self.Z)\r\n dgs = TrialFunction(self.Z)\r\n gravh, gravc = TestFunctions(self.Z)\r\n gravg, gravs = split(self.gravgs)\r\n\r\n # set a scale to ensure the stability of the solution. this is undone\r\n # in the solution, but for unknown reasons O(10^-8) is too large for\r\n # the solver to maintain stability\r\n self.gravscale = 1e-3\r\n\r\n # compute the scaling constant for the Gaussian gravity form, which is\r\n # rescaled by self.gravscale. A Constant, for speed\r\n gravA = Constant(4*np.pi*float(self.G)*float(self.rho)*self.gravscale)\r\n\r\n # creates the equation set for Gaussian gravity\r\n gravF = (\r\n # this equation is 0=0, used to mix vector and scalar solutions\r\n gravs*div(gravh) * dx + inner(gravg, gravh) * dx +\r\n # this equation is the Gaussian form, div(g)=-4 pi G rho\r\n gravc*div(gravg) * dx + gravA*gravc * dx)\r\n\r\n # find the derivative, for speed\r\n gravJ = derivative(gravF, self.gravgs, dgs)\r\n\r\n # set up the gravitational solver\r\n gravproblem = NonlinearVariationalProblem(gravF, self.gravgs, J=gravJ)\r\n self.gravsolver = NonlinearVariationalSolver(gravproblem)\r\n self.gravsolver.parameters['newton_solver'\r\n ]['relaxation_parameter'] = 1.",
"def make_soma(self, size, location):\n bpy.ops.mesh.primitive_uv_sphere_add(segments=8, ring_count=8, size=size, location=location)\n # Name object as cell\n bpy.context.object.name = self.id\n # Save referrence\n self.blender_obj = bpy.context.object",
"def compute_phi(self, input_image):\n ref_image=Image.open(input_image)\n imarray=np.array(ref_image)\n ypix=imarray[0,:,0].shape\n ref_image.close()\n\n infile=open('Reference_sphere.npy', 'rb')\n reference_sphere=np.load(infile)\n infile.close()\n print reference_sphere.shape\n\n diff = 1000000000\n j=0\n for k in range(reference_sphere.shape[0]):\n diff_img=(imarray-reference_sphere[k, :, :, :])**2\n least_square=np.sum(diff_img)\n print least_square\n if least_square < diff:\n diff = least_square\n j=k\n img3=Image.fromarray(reference_sphere[j, :, :, :])\n img3.save('Compute_image.png')\n return j, diff",
"def LoadSphere():\n return vtkInterface.PolyData(spherefile)",
"def odf(self, sphere):\n self.gqi_vector = self.model.cache_get('gqi_vector', key=sphere)\n if self.gqi_vector is None:\n if self.model.method == 'gqi2':\n H = squared_radial_component\n # print self.gqi_vector.shape\n self.gqi_vector = np.real(H(np.dot(\n self.model.b_vector, sphere.vertices.T) *\n self.model.Lambda))\n if self.model.method == 'standard':\n self.gqi_vector = np.real(np.sinc(np.dot(\n self.model.b_vector, sphere.vertices.T) *\n self.model.Lambda / np.pi))\n self.model.cache_set('gqi_vector', sphere, self.gqi_vector)\n\n return np.dot(self.data, self.gqi_vector)",
"def drawSphere3D(x0,y0,z0, radius, hres, vres):\n dislin.sphe3d(x0,y0,z0, radius, hres, vres)",
"def Sphere_ExactSerendipityLagrangeQuad():\n\n mesh = Sphere_CubeToSerendipityLagrangeQuad(1)\n \n ################\n # Modifications for exact sphere\n ################\n # x=+1 side\n def posXvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.ones(xi1.shape);yb=np.array(-xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def posXnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.ones(xi1.shape);yb=np.array(-xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = -1.0 * xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dxdxi2 = 1.0 * xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = -1.0 * np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dydxi2 = 1.0 * yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = -1.0 * zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n dzdxi2 = 1.0 * np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def posXJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.ones(xi1.shape);yb=np.array(-xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = -1.0 * xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dxdxi2 = 1.0 * xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = -1.0 * np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dydxi2 = 1.0 * yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = -1.0 * zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n dzdxi2 = 1.0 * np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[0].vals = posXvals\n mesh.eList[0].normals = posXnormals\n mesh.eList[0].J = posXJ\n \n def posYvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def posYnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def posYJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[1].vals = posYvals\n mesh.eList[1].normals = posYnormals\n mesh.eList[1].J = posYJ\n \n # x=-1 side\n def negXvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=-np.ones(xi1.shape);yb=np.array(xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def negXnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=-np.ones(xi1.shape);yb=np.array(xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = 1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dydxi2 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def negXJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=-np.ones(xi1.shape);yb=np.array(xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = 1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dydxi2 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[2].vals = negXvals\n mesh.eList[2].normals = negXnormals\n mesh.eList[2].J = negXJ\n\n # y=-1 side\n def negYvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(-xi1);yb=-np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def negYnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(-xi1);yb=-np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2; \n dxdxi1 = -1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*0.5*xb*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5*(-zb+2.0*yy*zb/3.0)\n dydxi1 = -1.0*0.5*yb*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5*(-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*0.5*yb*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5*(-zb+2.0*xx*zb/3.0)\n dzdxi1 = -1.0*0.5*zb*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5*(-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0) \n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def negYJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(-xi1);yb=-np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2; \n dxdxi1 = -1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*0.5*xb*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5*(-zb+2.0*yy*zb/3.0)\n dydxi1 = -1.0*0.5*yb*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5*(-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*0.5*yb*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5*(-zb+2.0*xx*zb/3.0)\n dzdxi1 = -1.0*0.5*zb*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5*(-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0) \n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[3].vals = negYvals\n mesh.eList[3].normals = negYnormals\n mesh.eList[3].J = negYJ\n \n # z=+1 side\n def posZvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1)\n yb=np.array(-xi2)\n zb=np.ones(xi1.shape)\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def posZnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(-xi2);zb=np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = -1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = -1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = -1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def posZJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(-xi2);zb=np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = -1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = -1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = -1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[4].vals = posZvals\n mesh.eList[4].normals = posZnormals\n mesh.eList[4].J = posZJ\n \n # z=-1 side\n def negZvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(xi2);zb=-np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def negZnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(xi2);zb=-np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def negZJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(xi2);zb=-np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[5].vals = negZvals\n mesh.eList[5].normals = negZnormals\n mesh.eList[5].J = negZJ\n \n for e in mesh.eList:\n e.ExactElement = True\n \n return mesh",
"def sphgen(self, force_rerun=False):\n log.debug('{}: running sphere generation...'.format(self.id))\n\n if not self.dms_path:\n return ValueError('Please run dms_maker')\n\n sph = op.join(self.dock_dir, '{}_receptor.sph'.format(self.id))\n insph = op.join(self.dock_dir, 'INSPH')\n\n if ssbio.utils.force_rerun(flag=force_rerun, outfile=sph):\n with open(insph, \"w\") as f:\n f.write(\"{}\\n\".format(self.dms_path))\n f.write(\"R\\n\")\n f.write(\"X\\n\")\n f.write(\"0.0\\n\")\n f.write(\"4.0\\n\")\n f.write(\"1.4\\n\")\n f.write(\"{}\\n\".format(sph))\n\n os.chdir(self.dock_dir)\n cmd = \"sphgen_cpp\"\n os.system(cmd)\n os.remove(insph)\n\n if ssbio.utils.is_non_zero_file(sph):\n self.sphgen_path = sph\n log.debug('{}: successful sphgen execution'.format(self.sphgen_path))\n else:\n log.critical('{}: sphgen_cpp failed to run on dms file'.format(self.dms_path))",
"def sphere(*args, axis: Union[List[float, float, float], bool]=None, caching: bool=True,\n degree: Union[int, bool]=3, endSweep: Union[float, bool]=2, heightRatio: Union[float,\n bool]=2.0, nodeState: Union[int, bool]=0, pivot: Union[List[float, float, float],\n bool]=None, radius: Union[float, bool]=1.0, sections: Union[int, bool]=8, spans:\n Union[int, bool]=1, startSweep: Union[float, bool]=0, tolerance: Union[float,\n bool]=0.01, useTolerance: bool=False, constructionHistory: bool=True, name:\n AnyStr=\"\", object: bool=True, polygon: int=0, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[List[AnyStr], Any]:\n pass",
"def Sphere(self,radius=1.0, npoints=10):\n\n # RESET MESH\n self.__reset__()\n\n from math import pi, cos, sin\n from meshpy.tet import MeshInfo, build\n from meshpy.geometry import generate_surface_of_revolution, EXT_OPEN, GeometryBuilder\n\n r = radius\n\n points = npoints\n dphi = pi/points\n\n def truncate(r):\n if abs(r) < 1e-10:\n return 0\n else:\n return r\n\n rz = [(truncate(r*sin(i*dphi)), r*cos(i*dphi)) for i in range(points+1)]\n\n geob = GeometryBuilder()\n geob.add_geometry(*generate_surface_of_revolution(rz,\n closure=EXT_OPEN, radial_subdiv=10))\n\n mesh_info = MeshInfo()\n geob.set(mesh_info)\n\n mesh = build(mesh_info)\n\n self.points = np.asarray(mesh.points)\n self.elements = np.asarray(mesh.elements)\n # self.faces = np.asarray(mesh.faces)\n # self.edges = np.asarray(self.edges)\n self.nelem = self.elements.shape[0]\n self.element_type = \"tet\"\n\n\n # GET EDGES & FACES - NONE ASSIGNMENT IS NECESSARY OTHERWISE IF FACES/EDGES ALREADY EXIST\n # THEY WON'T GET UPDATED\n self.faces = None\n self.edges = None\n self.GetBoundaryFacesTet()\n self.GetBoundaryEdgesTet()\n\n # CHECK MESH\n points = self.points[np.unique(self.faces),:]\n if not np.isclose(np.linalg.norm(points,axis=1),radius).all():\n raise ValueError(\"MeshPy could not construct a valid linear mesh for sphere\")",
"def rdmb_povray_save_q(out_file,\n vs,\n ucs, vcs,\n width=800, height=600,\n rotx=0, roty=0, rotz=0,\n angle=14):\n\n ucmax = 6.0\n ucs = ucs / ucmax\n ucs[ucs > 1.0] = 1.0\n # ucs = ucs / np.max(ucs)\n\n rot1 = [rotx, 0, 0]\n rot2 = [0, roty, 0]\n rot3 = [0, 0, rotz]\n\n camera = Camera('location', [0, 0, -25],\n 'look_at', [0, 0, 0],\n 'angle', angle,\n 'right x*image_width/image_height')\n\n light = LightSource([0, 0, -10],\n 'color', [1.0, 1.0, 1.0], 'parallel', 'shadowless')\n light1 = LightSource([-10, 0, 0],\n 'color', [0.5, 0.5, 0.5], 'parallel', 'shadowless')\n light2 = LightSource([10, 0, 0],\n 'color', [0.5, 0.5, 0.5], 'parallel', 'shadowless')\n light3 = LightSource([0, -10, 0],\n 'color', [0.5, 0.5, 0.5], 'parallel', 'shadowless')\n light4 = LightSource([0, 10, 0],\n 'color', [0.5, 0.5, 0.5], 'parallel', 'shadowless')\n\n background = Background('color', [1, 1, 1, 1])\n\n spheres = [Sphere(v, 0.02,\n Finish('ambient', 1.0),\n Texture(Pigment('color',\n [0.3+uc*0.7, 0.2+uc*0.8, 0.2+uc*0.8])),\n 'rotate', rot1,\n 'rotate', rot2,\n 'rotate', rot3) for v, uc in zip(vs, ucs)]\n\n objects = [light, light1, light2, light3, light4, background] + spheres\n\n scene = Scene(camera, objects=objects)\n scene.render(out_file, width=width, height=height,\n output_alpha=True, antialiasing=0.001,\n tempfile=out_file+\"__temp__.pov\")",
"def add_sphere(self):\n self.scenes[self.current_scene].add_object(Sphere())\n self.redraw()",
"def Project(self, *args):\n return _Bnd.Bnd_Sphere_Project(self, *args)",
"def tf(xp, yp, zp, spheres, inc, dec, pmag=None):\n if xp.shape != yp.shape != zp.shape:\n raise ValueError(\"Input arrays xp, yp, and zp must have same shape!\")\n tf = numpy.zeros_like(xp)\n # Calculate the 3 components of the unit vector in the direction of the\n # regional field\n fx, fy, fz = utils.dircos(inc, dec)\n if pmag is not None:\n if isinstance(pmag, float) or isinstance(pmag, int):\n pintensity = pmag\n pmx, pmy, pmz = fx, fy, fz\n else:\n pintensity = numpy.linalg.norm(pmag)\n pmx, pmy, pmz = numpy.array(pmag) / pintensity\n for sphere in spheres:\n if sphere is None or ('magnetization' not in sphere.props\n and pmag is None):\n continue\n radius = sphere.radius\n # Get the intensity and unit vector from the magnetization\n if pmag is None:\n mag = sphere.props['magnetization']\n if isinstance(mag, float) or isinstance(mag, int):\n intensity = mag\n mx, my, mz = fx, fy, fz\n else:\n intensity = numpy.linalg.norm(mag)\n mx, my, mz = numpy.array(mag) / intensity\n else:\n intensity = pintensity\n mx, my, mz = pmx, pmy, pmz\n # First thing to do is make the computation point P the origin of the\n # coordinate system\n x = sphere.x - xp\n y = sphere.y - yp\n z = sphere.z - zp\n # Calculate the 3 components of B\n dotprod = mx * x + my * y + mz * z\n r_sqr = x ** 2 + y ** 2 + z ** 2\n r5 = r_sqr ** (2.5)\n moment = intensity * (4. * numpy.pi * (radius ** 3) / 3.)\n bx = moment * (3 * dotprod * x - r_sqr * mx) / r5\n by = moment * (3 * dotprod * y - r_sqr * my) / r5\n bz = moment * (3 * dotprod * z - r_sqr * mz) / r5\n tf += (fx * bx + fy * by + fz * bz)\n tf *= CM * T2NT\n return tf",
"def on_sphere():\n vec = np.random.standard_normal(3)\n return vec / np.linalg.norm(vec)",
"def polySphericalProjection(*args, imageCenter: Union[List[float, float], bool]=None,\n imageCenterX: Union[float, bool]=0.5, imageCenterY: Union[float,\n bool]=0.5, imageScale: Union[List[float, float], bool]=None,\n imageScaleU: Union[float, bool]=1.0, imageScaleV: Union[float,\n bool]=1.0, projectionCenter: Union[List[float, float, float],\n bool]=None, projectionCenterX: Union[float, bool]=0.0,\n projectionCenterY: Union[float, bool]=0.0, projectionCenterZ:\n Union[float, bool]=0.0, projectionHorizontalSweep: Union[float,\n bool]=0.0, projectionScale: Union[List[float, float], bool]=None,\n projectionScaleU: Union[float, bool]=180.0, projectionScaleV:\n Union[float, bool]=90.0, radius: Union[float, bool]=0.0, rotate:\n Union[List[float, float, float], bool]=None, rotateX: Union[float,\n bool]=0.0, rotateY: Union[float, bool]=0.0, rotateZ: Union[float,\n bool]=0.0, rotationAngle: Union[float, bool]=10.0, seamCorrect:\n bool=True, caching: bool=True, constructionHistory: bool=True,\n createNewMap: bool=True, insertBeforeDeformers: bool=True,\n keepImageRatio: bool=True, mapDirection: AnyStr=\"\", name: AnyStr=\"\",\n nodeState: Union[int, bool]=0, perInstance: bool=True, smartFit:\n bool=True, worldSpace: bool=True, q=True, query=True, e=True,\n edit=True, **kwargs)->Union[AnyStr, Any]:\n pass",
"def sphere_volume(r):\n return (4/3) * 3.14159 * r**3",
"def two_sphere_system(\n omega: float,\n rot_axis: np.ndarray,\n size: int = 200,\n s1_center_rel: np.ndarray = np.array([0.2, 0.2, 0.2]),\n s1_radius_rel: float = 0.05,\n s2_center_rel: np.ndarray = np.array([-0.2, -0.2, -0.2]),\n s2_radius_rel: float = 0.06,\n) -> np.ndarray:\n # get the rotation object\n rot_axis = rot_axis / np.linalg.norm(rot_axis)\n rotation = R.from_rotvec(-omega * rot_axis)\n # calculate the rotated sphere centers\n # sphere 1\n s1_rel = rotation.apply(s1_center_rel)\n # sphere 2\n s2_rel = rotation.apply(s2_center_rel)\n # get the index grid\n # NOTE: extend the range to make sure the sphere is not rotated out of the volume\n # grid_x, grid_y, grid_z = np.mgrid[0:size, 0:size, 0:size]\n # remapping to compensate for the strange coordinate system in tomopy projector\n grid_y, grid_z, grid_x = np.mgrid[0:size, 0:size, 0:size]\n # rescale to [-0.5, 0.5]\n grid_x = grid_x / (size - 1) - 0.5\n grid_y = -(grid_y / (size - 1) - 0.5)\n grid_z = grid_z / (size - 1) - 0.5\n # init volume\n vol = np.zeros_like(grid_x)\n # mark the voxels of sphere 1 to be 1\n s1_dist_squared = (grid_x - s1_rel[0]) ** 2 + (grid_y - s1_rel[1]) ** 2 + (grid_z - s1_rel[2]) ** 2\n r1_squared = s1_radius_rel**2\n vol[s1_dist_squared < r1_squared] = 1.0\n # mark the voxels of sphere 2 to be 2\n s2_dist_squared = (grid_x - s2_rel[0]) ** 2 + (grid_y - s2_rel[1]) ** 2 + (grid_z - s2_rel[2]) ** 2\n r2_squared = s2_radius_rel**2\n vol[s2_dist_squared < r2_squared] = 1.0\n return vol",
"def sphere(geometry,\n psd_name,psd_shape,psd_loc,psd_scale,\n pore_seed='pore.seed',\n psd_offset=0,\n **kwargs):\n import scipy.stats as spst\n prob_fn = getattr(spst,psd_name)\n P = prob_fn(psd_shape,loc=psd_loc,scale=psd_scale)\n value = P.ppf(geometry[pore_seed])+psd_offset\n return value",
"def sphere(self, path, args):\n note_name = args[0]\n hands = args[1]\n probability = args[2]\n force = args[3]\n \n print \"got sphere : %s '%s', %d, %f, %f\" % (path, \n note_name, \n hands, \n probability,\n force)\n\n bowls = {'c2': (0, 127),\n 'd2': (0, 127)}\n\n speed_min = 0\n speed_max = 1\n\n try:\n bowl_lower = bowls[note_name][0]\n bowl_upper = bowls[note_name][1]\n except KeyError:\n bowl_lower = 0\n bowl_upper = 127\n\n bowl_range = bowl_upper - bowl_lower\n step = float(speed_max) / bowl_range\n\n print step, bowl_range\n \n midi_vel = int(min(math.ceil(bowl_lower + (float(force*force + 0.1) / step) + 5), 127))\n\n print midi_vel\n\n if hands == RIGHT_HAND:\n engine._TheEngine().process(NoteOnEvent(engine.in_ports()[0],\n settings.MIDI_HAMMER_CHANNEL,\n note_number(note_name),\n midi_vel)\n )\n\n elif hands == LEFT_HAND:\n engine._TheEngine().process(NoteOnEvent(engine.in_ports()[0],\n settings.MIDI_REPEAT_CHANNEL,\n note_number(note_name),\n midi_vel)\n )",
"def to_spherical(d, r_grid, theta_grid, phi_grid, items):\n import numpy as np\n nr, nt, nphi = len(r_grid), len(theta_grid), len(phi_grid)\n files = {}\n\n for key in items:\n files.update({key: open(items[key]['filename'], 'w')})\n\n state = query_state()\n\n for i in range(nphi-1):\n phi = 0.5 * (phi_grid[i] + phi_grid[i+1])\n for j in range(nt-1):\n theta = 0.5 * (theta_grid[j] + theta_grid[j+1])\n for k in range(nr-1):\n r = 0.5 * (r_grid[k] + r_grid[k+1])\n rho = r * np.sin(theta)\n z = r * np.cos(theta)\n for key in items:\n val = state.query(d, rho, z, key)\n files[key].write('{0:.6e}\\n'.format(val))\n\n for key in items:\n files[key].close()",
"def delaz(eqlat, eqlon, stlat, stlon, flag):\n\n if flag==0: # convert geographic degrees to geocentric radians\n eqlat, eqlon = coortr(eqlat,eqlon,flag)\n stlat, stlon = coortr(stlat,stlon,flag) \n\n eqcolat = m.pi/2-eqlat\n stcolat = m.pi/2-stlat\n\n cos_eq = m.cos(eqcolat)\n sin_eq = m.sin(eqcolat)\n cos_st = m.cos(stcolat)\n sin_st = m.sin(stcolat)\n cos_eqst = m.cos(stlon-eqlon)\n sin_eqst = m.sin(stlon-eqlon)\n\n cos_delta = cos_eq * cos_st + sin_eq * sin_st * cos_eqst\n sin_delta = m.sqrt(1-cos_delta * cos_delta)\n delta = m.atan2(sin_delta,cos_delta)\n # if sin(delta)=0, set sin(delta)=eps=10**-16\n eps = 3.e-7\n sin_delta = sin_delta + (sin_delta==0)*eps\n\n # index is zero if expression is false, 1 if true; \n # if false, leave unchanged, if true azeqst=pi-azeqst\n # this puts azeqst into the correct quadrant\n azeqst = m.asin(sin_st*sin_eqst/sin_delta)\n index = (sin_eq*cos_st - cos_eq*sin_st*cos_eqst < 0)\n azeqst = azeqst + index*(m.pi-2*azeqst)\n azeqst = azeqst + (azeqst<0)*2*m.pi\n\n azsteq = m.asin(-sin_eq*sin_eqst/sin_delta)\n index = (cos_eq*sin_st - sin_eq*cos_st*cos_eqst < 0)\n azsteq = azsteq + index*(m.pi-2*azsteq)\n azsteq = azsteq + (azsteq<0)*2*m.pi\n\n # convert to degrees\n delta = delta*180/m.pi\n azeqst = azeqst*180/m.pi\n azsteq = azsteq*180/m.pi\n\n return delta, azeqst, azsteq"
] | [
"0.599128",
"0.59544706",
"0.5854337",
"0.58359677",
"0.5774822",
"0.57709026",
"0.57272184",
"0.570104",
"0.56385297",
"0.55973965",
"0.5587924",
"0.5579241",
"0.5560049",
"0.5556892",
"0.5509854",
"0.54964167",
"0.5462277",
"0.5460161",
"0.5453829",
"0.5449165",
"0.5439587",
"0.54326665",
"0.5407802",
"0.5406529",
"0.5400025",
"0.5395192",
"0.5356507",
"0.5339282",
"0.5326056",
"0.5325227"
] | 0.6863035 | 0 |
Perform different collection dependent operations and validate we get CollectionNotAvailable exception for all the ops 1. Perform scope create/delete from SDK 2. Perform collection create/delete from SDK 3. Perform crud to target collection and validate | def test_collections_not_available(self):
# Acquire SDK client for mutations
client = self.sdk_client_pool.get_client_for_bucket(self.bucket)
scope_name = self.bucket_util.get_random_name()
col_name = self.bucket_util.get_random_name()
try:
client.create_collection(CbServer.default_collection)
self.log_failure("Collection with default name created")
except CouchbaseException as e:
if "First character must not be _ or %" not in str(e):
self.log_failure("Create default collection invalid message")
client.create_scope(scope_name)
client.create_collection(col_name, scope=CbServer.default_scope)
client.create_collection(col_name, scope=scope_name)
# Create collection with same name
try:
client.create_collection(col_name, scope=scope_name)
except CollectionExistsException:
pass
# Create scope under invalid scope
try:
client.create_collection(col_name, scope="scope_unavailable")
except ScopeNotFoundException:
pass
client.drop_collection(CbServer.default_scope, col_name)
client.drop_collection(scope_name, col_name)
# Drop already dropped collection
try:
client.drop_collection(scope_name, col_name)
except CollectionNotFoundException:
pass
self.sleep(10, "Wait for meta kv refresh")
client.select_collection(scope_name, col_name)
result = client.crud("create", "key", "value")
if result["status"] is True:
self.log_failure("Collection create successful")
elif SDKException.AmbiguousTimeoutException \
not in str(result["error"]):
self.log_failure("Invalid exception during doc create")
# Drop scope
client.drop_scope(scope_name)
# Drop scope which was already dropped
try:
client.drop_scope(scope_name)
except ScopeNotFoundException:
pass
# Release the acquired client
client.select_collection(CbServer.default_scope,
CbServer.default_collection)
self.sdk_client_pool.release_client(client)
self.validate_test_failure() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_basic_ops(self):\n load_spec = dict()\n verification_dict = dict()\n\n # Stat validation reference variables\n verification_dict[\"ops_create\"] = 0\n verification_dict[\"ops_update\"] = 0\n verification_dict[\"ops_delete\"] = 0\n verification_dict[\"rollback_item_count\"] = 0\n verification_dict[\"sync_write_aborted_count\"] = 0\n verification_dict[\"sync_write_committed_count\"] = 0\n\n for _, scope in self.bucket.scopes.items():\n for _, collection in scope.collections.items():\n verification_dict[\"ops_create\"] += collection.num_items\n if self.durability_level in self.supported_d_levels:\n verification_dict[\"sync_write_committed_count\"] \\\n += collection.num_items\n\n failed = self.durability_helper.verify_vbucket_details_stats(\n self.bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if failed:\n self.fail(\"Cbstat vbucket-details verification failed\")\n\n # load_spec[\"target_vbuckets\"] = []\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 100\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][MetaCrudParams.DocCrud.COMMON_DOC_KEY] \\\n = \"test_collections\"\n\n self.log.info(\"Perform 'create', 'update', 'delete' mutations\")\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=2,\n async_load=True)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud(verification_dict=verification_dict)\n\n # Wait for doc_loading to complete\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Doc CRUDs failed\")\n self.validate_test_failure()\n\n self.log.info(\"Validating doc_count in buckets\")\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)\n\n # Validate vbucket stats\n self.update_verification_dict_from_collection_task(verification_dict,\n doc_loading_task)\n\n failed = self.durability_helper.verify_vbucket_details_stats(\n self.bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if failed:\n self.fail(\"Cbstat vbucket-details verification failed\")\n\n self.validate_cruds_from_collection_mutation(doc_loading_task)",
"def test_crud(self):\n c1 = self.hiarc_util.create_collection()\n c = self.hiarc_collections.create_collection(c1)\n gc = self.hiarc_collections.get_collection(c1.key)\n assert c == gc\n\n new_name = \"New Name\"\n new_description = \"New description\"\n ucr = hiarc.UpdateCollectionRequest(\n name=new_name, description=new_description)\n uc = self.hiarc_collections.update_collection(ucr, c.key)\n assert new_name == uc.name\n assert new_description == uc.description\n assert uc.modified_at > uc.created_at\n\n uc = hiarc.UpdateCollectionRequest(key='new key')\n self.assertRaises(hiarc.rest.ApiException,\n self.hiarc_collections.update_collection, uc, c.key)\n\n self.hiarc_collections.delete_collection(c.key)\n self.assertRaises(hiarc.rest.ApiException,\n self.hiarc_collections.get_collection, c.key)",
"def test_create_collection(self):\n pass",
"def test_non_overlapping_similar_crud(self):\n\n # Stat validation reference variables\n verification_dict = dict()\n verification_dict[\"ops_create\"] = 0\n verification_dict[\"ops_update\"] = 0\n verification_dict[\"ops_delete\"] = 0\n verification_dict[\"rollback_item_count\"] = 0\n verification_dict[\"sync_write_aborted_count\"] = 0\n verification_dict[\"sync_write_committed_count\"] = 0\n\n for _, scope in self.bucket.scopes.items():\n for _, collection in scope.collections.items():\n verification_dict[\"ops_create\"] += collection.num_items\n if self.durability_level in self.supported_d_levels:\n verification_dict[\"sync_write_committed_count\"] \\\n += collection.num_items\n\n failed = self.durability_helper.verify_vbucket_details_stats(\n self.bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if failed:\n self.fail(\"Cbstat vbucket-details verification failed\")\n\n doc_ops = self.input.param(\"doc_ops\", \"create\")\n # Reset initial doc_loading params to NO_OPS\n doc_load_template = \\\n self.bucket_util.get_crud_template_from_package(\"initial_load\")\n doc_load_template[MetaCrudParams.DURABILITY_LEVEL] = \"\"\n doc_load_template[MetaCrudParams.COLLECTIONS_CONSIDERED_FOR_CRUD] = 3\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 0\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \"test_collections\"\n\n # Create required doc_generators for CRUD ops\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.READ_PERCENTAGE_PER_COLLECTION] = 25\n if DocLoading.Bucket.DocOps.CREATE in doc_ops:\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 100\n elif DocLoading.Bucket.DocOps.UPDATE in doc_ops:\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 50\n elif DocLoading.Bucket.DocOps.DELETE in doc_ops:\n doc_load_template[\"doc_crud\"][\n MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 50\n\n async_write_crud_spec = deepcopy(doc_load_template)\n sync_write_crud_spec = deepcopy(doc_load_template)\n\n sync_write_crud_spec[MetaCrudParams.DURABILITY_LEVEL] = \\\n self.durability_level\n\n async_write_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n async_write_crud_spec,\n mutation_num=1,\n async_load=True)\n sync_write_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n sync_write_crud_spec,\n mutation_num=2,\n async_load=True)\n\n # Wait for all task to complete\n self.task.jython_task_manager.get_task_result(async_write_loading_task)\n self.task.jython_task_manager.get_task_result(sync_write_loading_task)\n\n # Validate CRUD loading results\n self.bucket_util.validate_doc_loading_results(async_write_loading_task)\n self.bucket_util.validate_doc_loading_results(sync_write_loading_task)\n\n if async_write_loading_task.result is False:\n self.log_failure(\"Doc_ops failed in async_write_task\")\n if sync_write_loading_task.result is False:\n self.log_failure(\"Doc_ops failed in sync_write_task\")\n\n # Verify doc count and other stats\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)",
"def collection(self, name=\"\", desc=\"\", collection=None, remove=False,\n elements=None, **kwargs):\n\n #in the future, MPO may support updates of values such as name and desc. At that point,\n #specifying a UUID will enable updates of those values. May want to be able to remove element\n #from a collection too.\n #remove option could apply to the entire collection in future api extensions\n\n ##validation of input\n #elements must be a list if present\n if elements:\n if not isinstance(elements,list):\n elements=[elements]\n else:\n elements=[]\n\n if collection: #add to existing collection\n\n if remove:\n if desc!=\"\":\n warnings.warn(\"InvalidArgs in collect/collection. No description used when removing an element.\")\n if name!=\"\":\n warnings.warn(\"InvalidArgs in collect/collection. No name used when removing an element.\")\n assert elements,\"InvalidArgs in collect/collection. Must specify an element to remove.\"\n assert collection!=None,\"InvalidArgs in collect/collection. Must specify the collection from which to remove the element.\"\n\n for element in elements:\n r=self.delete(self.COLLECTION_ELEMENT_RT.format(cid=collection)+'/'+element)\n\n else:\n payload={\"elements\":elements}\n r=self.post(self.COLLECTION_ELEMENT_RT.format(cid=collection), None,\n collection, data=payload, **kwargs)\n\n else: #make new collection\n payload={\"name\":name,\"description\":desc,\"elements\":elements}\n r=self.post(self.COLLECTION_RT, None, None, data=payload, **kwargs)\n\n return r",
"async def createCollection(self, body=\"\"):\n payload = {}\n \n\n # Parameter validation\n schema = CatalogValidator.createCollection()\n schema.dump(schema.load(payload))\n \n # Body validation\n from .models import CreateCollection\n schema = CreateCollection()\n schema.dump(schema.load(body))\n \n\n url_with_params = await create_url_with_params(self._conf.domain, f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/\", \"\"\"{\"required\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}]}\"\"\", )\n query_string = await create_query_string()\n headers = {\n \"Authorization\": \"Bearer \" + await self._conf.getAccessToken()\n }\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"POST\", url_with_params, headers=get_headers_with_signature(self._conf.domain, \"post\", await create_url_without_domain(f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/\", ), query_string, headers, body, exclude_headers=exclude_headers), data=body)",
"def test_sub_doc_basic_ops(self):\n load_spec = dict()\n\n # Stat validation reference variables\n verification_dict = dict()\n verification_dict[\"ops_create\"] = 0\n verification_dict[\"ops_update\"] = 0\n verification_dict[\"ops_delete\"] = 0\n verification_dict[\"rollback_item_count\"] = 0\n verification_dict[\"sync_write_aborted_count\"] = 0\n verification_dict[\"sync_write_committed_count\"] = 0\n\n for _, scope in self.bucket.scopes.items():\n for _, collection in scope.collections.items():\n verification_dict[\"ops_create\"] += collection.num_items\n if self.durability_level in self.supported_d_levels:\n verification_dict[\"sync_write_committed_count\"] \\\n += collection.num_items\n\n # Initial validation\n failed = self.durability_helper.verify_vbucket_details_stats(\n self.bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if failed:\n self.fail(\"Cbstat vbucket-details verification failed\")\n\n if self.target_vbucket and type(self.target_vbucket) is not list:\n self.target_vbucket = [self.target_vbucket]\n\n # load_spec[\"target_vbuckets\"] = []\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"subdoc_crud\"] = dict()\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 100\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 0\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.REMOVE_PER_COLLECTION] = 0\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \"test_collections\"\n load_spec[MetaCrudParams.COLLECTIONS_CONSIDERED_FOR_CRUD] = \"all\"\n load_spec[MetaCrudParams.SCOPES_CONSIDERED_FOR_CRUD] = \"all\"\n load_spec[MetaCrudParams.BUCKETS_CONSIDERED_FOR_CRUD] = \"all\"\n load_spec[MetaCrudParams.DURABILITY_LEVEL] = self.durability_level\n\n self.log.info(\"Perform initial 'insert' mutations\")\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=1,\n async_load=True)\n\n # Wait for doc_loading to complete\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Doc CRUDs failed\")\n self.validate_test_failure()\n\n # Verify initial doc load count\n self.log.info(\"Validating doc_count in buckets\")\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)\n\n # Validate vbucket stats\n self.update_verification_dict_from_collection_task(verification_dict,\n doc_loading_task)\n failed = self.durability_helper.verify_vbucket_details_stats(\n self.bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if failed:\n self.fail(\"Cbstat vbucket-details verification failed\")\n\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 0\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 30\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.REMOVE_PER_COLLECTION] = 30\n\n self.log.info(\"Perform 'upsert' & 'remove' mutations\")\n doc_loading_task = self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=2,\n async_load=True)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud(mutation_num=3,\n verification_dict=verification_dict)\n\n # Wait for doc_loading to complete\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Doc CRUDs failed\")\n self.update_verification_dict_from_collection_task(verification_dict,\n doc_loading_task)\n self.validate_test_failure()\n\n # Verify doc count and other stats\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)\n\n # Validate verification_dict and validate\n failed = self.durability_helper.verify_vbucket_details_stats(\n self.bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if failed:\n # MB-39963 - Not failing due to known behavior\n self.log.warning(\"Cbstat vbucket-details verification failed\")\n # self.validate_cruds_from_collection_mutation(doc_loading_task)",
"def test_collection_viewset_permissions(logged_in_apiclient):\n client, _ = logged_in_apiclient\n client.logout()\n collection = CollectionFactory()\n urls = (\n reverse(\"models-api:collection-list\"),\n reverse(\"models-api:collection-detail\", kwargs={\"key\": collection.hexkey}),\n )\n for url in urls:\n assert client.get(url).status_code == status.HTTP_200_OK\n assert client.post(url, {\"owner\": 1}).status_code == status.HTTP_403_FORBIDDEN",
"def create_collections(self):\n\n ''''''",
"def change_collection(self):\n\n # Print out all collections with index numbers\n self.clear_screen()\n print \"**Collections**\\n\"\n for i, collection in enumerate(self.master_collection):\n print \"Collection ID: %d | %s\" % (i, collection.name)\n print \"\"\n\n # User select collection\n selection = raw_input(\n \"Enter Collection ID or 'new' for a new collection. > \")\n\n # Create a new collection and add to master collection list.\n if selection.strip().lower() == 'new':\n collection_name = raw_input(\"Name for new collection. > \")\n self.master_collection.append(\n doto.Collection(collection_name))\n self.current_collection = self.master_collection[-1]\n return\n\n # Validate user selection\n try:\n index = int(selection)\n self.current_collection = self.master_collection[index]\n except (ValueError, IndexError) as e:\n raw_input(\"Invalid selection: %s\\nPress Enter.\" % e)\n return\n\n # With selected collection, offer options\n print \"'r': Rename collection\"\n print \"'x': Delete collection (cannot be undone)\"\n print \"'v': View current collection tasks\"\n selection = ''\n\n # Continue until user selects to view collection\n while selection != 'v':\n selection = raw_input(\"Enter command. > \")\n selection = selection.strip().lower()\n\n # Rename collection and set to current collection\n if selection == 'r':\n new_name = raw_input(\"Enter new collection name. > \")\n self.current_collection.name = new_name\n break\n\n # Delete collection and set current to default\n elif selection == 'x':\n\n # There must be at least one collection\n if len(self.master_collection) <= 1:\n print \"One collection remaining, cannot delete.\",\n print \"Create new collection first.\"\n raw_input(\"Press Enter to continue.\")\n break\n delete = raw_input(\"Delete this collection? y/n > \")\n if delete.strip().lower() in ('y', 'yes'):\n del self.master_collection[index]\n self.current_collection = self.master_collection[0]\n break\n else:\n print \"Invalid command. Try again.\"\n return",
"def test_get_all_collections(self):\n for i in range(self.hiarc_util.LARGE_ENTITY_COUNT):\n self.hiarc_collections.create_collection(\n self.hiarc_util.create_collection())\n all_collections = self.hiarc_collections.get_all_collections()\n assert self.hiarc_util.LARGE_ENTITY_COUNT == len(all_collections)\n\n u1 = self.hiarc_users.create_user(self.hiarc_util.create_user())\n self.assertRaises(hiarc.rest.ApiException,\n self.hiarc_collections.get_all_collections, x_hiarc_user_key=u1.key)\n\n ut = self.hiarc_token.create_user_token(\n hiarc.CreateUserTokenRequest(key=u1.key))\n lc = hiarc.CollectionApi(hiarc.ApiClient(\n configuration=self.hiarc_util.init_hiarc_client_jwt_token(ut.bearer_token)))\n self.assertRaises(hiarc.rest.ApiException,\n lc.get_all_collections)\n # reset ApiClient to Admin User\n # hiarc.ApiClient(\n # configuration=self.hiarc_util.init_hiarc_config_admin())",
"async def ensure_collection(self, collection):\n if await self.does_collection_exist(collection):\n return\n # Create Solr collection\n try:\n # Collection creation in API v2 doesn't support collection.configName yet.\n # So using old API (/solr/...).\n response = await self.get(\n '/solr/admin/collections',\n params={\n 'action': 'CREATE',\n 'name': collection,\n 'collection.configName': APPSCALE_CONFIG_SET_NAME,\n 'replicationFactor': self._settings.replication_factor,\n 'autoAddReplicas': True,\n 'numShards': self._settings.shards_number,\n 'maxShardsPerNode': self._settings.max_shards_per_node,\n 'waitForFinalState': True,\n }\n )\n logger.info('Successfully created collection {} ({})'\n .format(collection, response.body))\n except SolrError as err:\n if 'collection already exists' in err.error_detail:\n logger.info('Collection {} already exists'.format(collection))\n elif 'Cannot create collection ' in err.error_detail:\n logging.warning('Solr message: {}'.format(err.error_detail))\n logging.warning('Scheduling deletion of collection {}'\n .format(collection))\n ioloop.IOLoop.current().spawn_callback(\n self.delete_collection, collection\n )\n raise\n else:\n logger.warning('Failed to create collection {}'.format(collection))\n raise\n # Update collections cache in background\n ioloop.IOLoop.current().spawn_callback(self.list_collections)",
"def create_new_collections(self, name, doc_type, is_cluster):\n print('selecting collection tab \\n')\n select_collection_page_sitem = self.locator_finder_by_id(self.select_collection_page_id)\n select_collection_page_sitem.click()\n time.sleep(1)\n\n print('Clicking on create new collection box \\n')\n select_create_collection_sitem = self.locator_finder_by_id(self.select_create_collection_id)\n select_create_collection_sitem.click()\n time.sleep(1)\n\n print('Selecting new collection name \\n')\n select_new_collection_name_sitem = self.locator_finder_by_id(self.select_new_collection_name_id)\n select_new_collection_name_sitem.click()\n select_new_collection_name_sitem.send_keys(name)\n time.sleep(1)\n\n print(f'Selecting collection type for {name} \\n') # collection Document type where # '2' = Document, '3' = Edge\n self.locator_finder_by_select(self.select_collection_type_id, doc_type)\n time.sleep(1)\n\n if is_cluster:\n print(f'selecting number of Shards for the {name} \\n')\n shards = 'new-collection-shards'\n shards_sitem = self.locator_finder_by_id(shards)\n shards_sitem.click()\n shards_sitem.clear()\n shards_sitem.send_keys(9)\n time.sleep(2)\n\n print(f'selecting number of replication factor for {name} \\n')\n rf = 'new-replication-factor'\n rf_sitem = self.locator_finder_by_id(rf)\n rf_sitem.click()\n rf_sitem.clear()\n rf_sitem.send_keys(3)\n time.sleep(2)\n\n print(f'Selecting collection advance options for {name} \\n')\n select_advance_option_sitem = self.locator_finder_by_xpath(self.select_advance_option_id)\n select_advance_option_sitem.click()\n time.sleep(1)\n\n # Selecting collection wait type where value # 0 = YES, '1' = NO)\n self.locator_finder_by_select(self.wait_for_sync_id, 0)\n time.sleep(1)\n\n print(f'Selecting create button for {name} \\n')\n create_new_collection_btn_sitem = self.locator_finder_by_id(self.create_new_collection_btn_id)\n create_new_collection_btn_sitem.click()\n time.sleep(3)\n self.webdriver.refresh()",
"def test_delete_collection_o_auth_client_authorization(self):\n pass",
"def test_delete_collections(self):\n pass",
"def test_find_collection(self):\n md = self.hiarc_util.get_test_metadata()\n c1 = self.hiarc_collections.create_collection(\n self.hiarc_util.create_collection(metadata=md))\n md[\"quotaCarrying\"] = False\n self.hiarc_collections.create_collection(\n self.hiarc_util.create_collection(metadata=md))\n self.hiarc_collections.create_collection(\n self.hiarc_util.create_collection())\n\n q = [{\n \"prop\": \"department\",\n \"op\": \"starts with\",\n \"value\": \"sal\"\n },\n {\n \"bool\": \"and\"\n },\n {\n \"parens\": \"(\"\n },\n {\n \"prop\": \"targetRate\",\n \"op\": \">=\",\n \"value\": 4.22\n },\n {\n \"bool\": \"and\"\n },\n {\n \"prop\": \"quotaCarrying\",\n \"op\": \"=\",\n \"value\": True\n },\n {\n \"parens\": \")\"\n }]\n\n qr = hiarc.FindCollectionsRequest(query=q)\n fc = self.hiarc_collections.find_collection(qr)\n assert len(fc) == 1\n assert self.hiarc_util.compare_dict_to_entity(fc[0], c1)",
"def test_create_access_and_update_collection_asuser(self):\n u1 = self.hiarc_users.create_user(self.hiarc_util.create_user())\n c1 = self.hiarc_util.create_collection()\n c = self.hiarc_collections.create_collection(\n c1, x_hiarc_user_key=u1.key)\n gc = self.hiarc_collections.get_collection(\n c1.key, x_hiarc_user_key=u1.key)\n assert c == gc\n\n u2 = self.hiarc_users.create_user(self.hiarc_util.create_user())\n autc = hiarc.AddUserToCollectionRequest(\n u2.key, hiarc.AccessLevel.READ_ONLY)\n self.hiarc_collections.add_user_to_collection(autc, c1.key)\n fc = self.hiarc_collections.get_collection(\n c1.key, x_hiarc_user_key=u2.key)\n assert c == fc\n\n u3 = self.hiarc_users.create_user(self.hiarc_util.create_user())\n self.assertRaises(hiarc.rest.ApiException,\n self.hiarc_collections.get_collection, c1.key, x_hiarc_user_key=u3.key)\n\n ac = self.hiarc_collections.get_collection(c1.key)\n assert c == ac\n\n new_name = \"New Name\"\n new_description = \"New description\"\n ucr = hiarc.UpdateCollectionRequest(\n name=new_name, description=new_description)\n uc = self.hiarc_collections.update_collection(\n ucr, c.key, x_hiarc_user_key=u1.key)\n assert new_name == uc.name\n assert new_description == uc.description\n assert uc.modified_at > uc.created_at\n\n new_name = \"New Name 2\"\n new_description = \"New description 2\"\n ucr = hiarc.UpdateCollectionRequest(\n name=new_name, description=new_description)\n self.assertRaises(hiarc.rest.ApiException,\n self.hiarc_collections.update_collection, ucr, c1.key, x_hiarc_user_key=u2.key)\n\n autc = hiarc.AddUserToCollectionRequest(\n u3.key, hiarc.AccessLevel.READ_WRITE)\n self.hiarc_collections.add_user_to_collection(autc, c1.key)\n new_name = \"New Name 3\"\n new_description = \"New description 3\"\n ucr = hiarc.UpdateCollectionRequest(\n name=new_name, description=new_description)\n uc = self.hiarc_collections.update_collection(\n ucr, c.key, x_hiarc_user_key=u3.key)\n assert new_name == uc.name\n assert new_description == uc.description\n assert uc.modified_at > uc.created_at\n\n u4 = self.hiarc_users.create_user(self.hiarc_util.create_user())\n self.assertRaises(hiarc.rest.ApiException,\n self.hiarc_collections.update_collection, ucr, c1.key, x_hiarc_user_key=u4.key)\n\n new_name = \"New Name 4\"\n new_description = \"New description 4\"\n ucr = hiarc.UpdateCollectionRequest(\n name=new_name, description=new_description)\n uc = self.hiarc_collections.update_collection(ucr, c.key)\n assert new_name == uc.name\n assert new_description == uc.description\n assert uc.modified_at > uc.created_at",
"def collection_batch():\n collections = [CollectionFactory() for _ in range(randint(3, 5))]\n for collection in collections:\n ClientFactory.create_batch(randint(0, 3), collection=collection)\n RoleFactory.create_batch(randint(0, 3), collection=collection)\n return collections",
"def test_delete_collection_namespaced_build(self):\n pass",
"def test_delete_collection(self):\n pass",
"def test_get_collection(self):\n pass",
"def test_delete_collection_o_auth_client(self):\n pass",
"def add_document(collection, testMode=True):\n\n print (\"\")\n log_app.debug(\"add_document\")\n log_app.debug(\"add_document / method : %s\", request.method )\n\n req_json = request.get_json()\n # log_app.debug(\"add_document / req_json : \\n%s\", pformat(req_json) )\n\n # role_to_check = request.args.get('role', default='admin', type=str)\n roles_to_check = COLLECTIONS_AUTH_MODIFICATIONS[collection][request.method]\n log_app.debug(\"add_document / roles_to_check : %s\", roles_to_check )\n\n allowedCollections = [\"tabs\", \"endpoints\" , \"routes\" ]\n ### not editable fields\n notAllowedFields = ['_id', 'apiviz_front_uuid', 'app_version', 'is_default']\n\n if request.method == 'POST' and collection in allowedCollections :\n \n log_app.debug(\"config app route / POST\" )\n\n token = req_json.get('token', '')\n auth_mode = req_json.get('auth_mode', None)\n apiviz_uuid = req_json['apiviz_front_uuid']\n is_authorized = checkJWT(token, roles_to_check, uuid=apiviz_uuid, auth_mode=auth_mode)\n\n if is_authorized : \n\n ### trim request doc from not allowed fields + add \n newDocData = req_json['doc_data']\n newDoc = { k: v for k, v in newDocData.items() if k not in notAllowedFields} \n newDoc['apiviz_front_uuid'] = apiviz_uuid\n newDoc['app_version'] = version\n newDoc['is_default'] = testMode\n\n # insert newdoc to collection\n mongoColl = mongoConfigColls[collection]\n addedDoc = mongoColl.insert_one(newDoc)\n\n log_app.debug(\"add_document / newDoc : \\n%s\", pformat(DocOidToString(newDoc)) )\n\n msg = \"you added this document to the collection\"\n respDoc = DocOidToString(newDoc)\n\n return jsonify({\n 'msg' : msg,\n 'request' : req_json,\n 'new_doc' : respDoc\n })\n\n else : \n msg = \"you don't have the authorization level to add a new document\"\n return jsonify({\n 'msg' : msg,\n 'request' : req_json,\n })\n \n else : \n msg = \"you can't add this document to this collection or the method is not allowed...\"\n return jsonify({\n 'msg' : msg,\n 'request' : req_json,\n })",
"def test_get_collections(self):\n pass",
"def CreateResourceInCollectionSample():\n client = CreateClient()\n col = gdata.docs.data.Resource(type='folder', title='My Sample Folder')\n col = client.CreateResource(col)\n print 'Created collection:', col.title.text, col.resource_id.text\n doc = gdata.docs.data.Resource(type='document', title='My Sample Doc')\n doc = client.CreateResource(doc, collection=col)\n print 'Created:', doc.title.text, doc.resource_id.text",
"def test_create_remove_collection_with_node_crash(self):\n def create_collection(client_type, bucket_obj, scope, collection):\n if client_type == \"sdk\":\n client.create_collection(collection, scope)\n self.bucket_util.create_collection_object(bucket_obj, scope,\n {\"name\": collection})\n elif client_type == \"rest\":\n self.bucket_util.create_collection(self.cluster.master,\n bucket_obj,\n scope,\n {\"name\": collection})\n else:\n self.log_failure(\"Invalid client_type provided\")\n\n def remove_collection(client_type, bucket_obj, scope, collection):\n if client_type == \"sdk\":\n client.drop_collection(scope, collection)\n self.bucket_util.mark_collection_as_dropped(bucket_obj, scope,\n collection)\n elif client_type == \"rest\":\n self.bucket_util.drop_collection(self.cluster.master,\n bucket_obj, scope, collection)\n else:\n self.log_failure(\"Invalid client_type provided\")\n\n kv_nodes = self.cluster_util.get_kv_nodes(self.cluster)\n if len(kv_nodes) == 1:\n self.fail(\"Need atleast two KV nodes to run this test\")\n\n client = None\n task = None\n action = self.input.param(\"action\", \"create\")\n crash_during = self.input.param(\"crash_during\", \"pre_action\")\n data_load_option = self.input.param(\"data_load_option\", None)\n crash_type = self.input.param(\"simulate_error\",\n CouchbaseError.KILL_MEMCACHED)\n\n if self.scope_name != CbServer.default_scope:\n self.scope_name = \\\n BucketUtils.get_random_name(\n max_length=CbServer.max_scope_name_len)\n self.bucket_util.create_scope(self.cluster.master, self.bucket,\n {\"name\": self.scope_name})\n if self.collection_name != CbServer.default_collection:\n self.collection_name = \\\n BucketUtils.get_random_name(\n max_length=CbServer.max_collection_name_len)\n\n # Select a KV node other than master node from the cluster\n node_to_crash = kv_nodes[sample(range(1, len(kv_nodes)), 1)[0]]\n\n client = self.sdk_client_pool.get_client_for_bucket(self.bucket)\n use_client = sample([\"sdk\", \"rest\"], 1)[0]\n\n if action == \"remove\" \\\n and self.collection_name != CbServer.default_collection:\n # Create a collection to be removed\n create_collection(use_client, self.bucket,\n self.scope_name, self.collection_name)\n\n # Create a error scenario\n self.log.info(\"Selected scenario for test '%s'\" % crash_type)\n shell = RemoteMachineShellConnection(node_to_crash)\n cb_error = CouchbaseError(self.log, shell)\n cbstat_obj = Cbstats(node_to_crash)\n active_vbs = cbstat_obj.vbucket_list(self.bucket.name,\n vbucket_type=\"active\")\n target_vbuckets = list(\n set(range(0, 1024)).difference(set(active_vbs)))\n doc_gen = doc_generator(self.key, 0, 1000,\n target_vbucket=target_vbuckets)\n\n if crash_during == \"pre_action\":\n cb_error.create(crash_type)\n\n if data_load_option == \"mutate_default_collection\":\n task = self.task.async_load_gen_docs(\n self.cluster, self.bucket, doc_gen,\n DocLoading.Bucket.DocOps.UPDATE,\n exp=self.maxttl,\n batch_size=200, process_concurrency=8,\n compression=self.sdk_compression,\n durability=self.durability_level,\n timeout_secs=self.sdk_timeout)\n\n if action == \"create\":\n create_collection(self.client_type, self.bucket,\n self.scope_name, self.collection_name)\n elif action == \"remove\":\n remove_collection(self.client_type, self.bucket,\n self.scope_name, self.collection_name)\n\n if crash_during == \"post_action\":\n cb_error.create(crash_type)\n\n if data_load_option == \"mutate_default_collection\":\n self.task_manager.get_task_result(task)\n\n self.sleep(60, \"Wait before reverting the error scenario\")\n cb_error.revert(crash_type)\n\n # Close SSH and SDK connections\n shell.disconnect()\n if self.atomicity is False:\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)\n self.validate_test_failure()",
"def test_02_get_geometry_collection_details(self):\n geometry_collection = GeometryCollection(**self.test_data)\n geometry_collection.save()\n response = self.client.get('/api/v1/collection/%s/' % geometry_collection.pk)\n self.assertEqual(response.status_code, status.HTTP_200_OK)",
"def _translate_to_collection(\n self,\n collection,\n recursive=False,\n run_conditions=[],\n resource_conditions=[],\n variety_conditions=[],\n ):\n\n run_list = []\n if recursive:\n run_conditions.extend(\n [\n (\"collection_id =\", collection[\"id\"]),\n ]\n )\n _logger.debug(\"Loading run with conditions: {0}\".format(run_conditions))\n run_list = self.load_runs(\n recursive=recursive,\n run_conditions=run_conditions,\n resource_conditions=resource_conditions,\n variety_conditions=variety_conditions,\n )\n\n res = RunCollection(collection[\"name\"], data=run_list)\n res.set_id(collection[\"id\"])\n\n return res",
"def create_collection(collection_json):\n print(\"collection_checker.create_collection()\")\n title = collection_json['title']\n collection_id = collection_dao.create_collection(title)\n\n list_book_ids = collection_json['book_ids']\n for book_id in list_book_ids:\n if BookDao.contains(book_id):\n continue\n else:\n abort(404, 'Invalid book ID')\n\n for book_id in list_book_ids:\n book = BookDao.get_book_object(book_id)\n collection_dao.append_collection(collection_id, book)\n return collection_dao.get_collection(collection_id)",
"def process_collection(pathname, ctx):\n collection_pth = Path(pathname)\n if ctx.found_collection_name is None:\n ctx.found_collection_name = \".\".join(collection_pth.parts[-2:])\n ctx.enter_collection(ctx.found_collection_name, pathname)\n ctx.add_dependencies()\n get_collection_plugins(pathname, ctx)\n process_collection_roles(str(collection_pth / \"roles\"), ctx)\n process_collection_tests(str(collection_pth / \"tests\"), ctx)\n ctx.exit_collection()"
] | [
"0.6478409",
"0.6240927",
"0.6176871",
"0.6035952",
"0.60354066",
"0.595611",
"0.594751",
"0.58774096",
"0.57835233",
"0.57726324",
"0.57502973",
"0.5734204",
"0.5733641",
"0.5721092",
"0.5701316",
"0.5699766",
"0.5696797",
"0.567209",
"0.5648594",
"0.5633532",
"0.5613327",
"0.56013125",
"0.55738485",
"0.55026525",
"0.5481866",
"0.54728776",
"0.5452189",
"0.54182786",
"0.54014",
"0.5399938"
] | 0.6556466 | 0 |
Test to make sure timeout is handled in durability calls and no documents are loaded when durability cannot be met using error simulation in server node side This will validate failure in majority of nodes, where durability will surely fail for all CRUDs 1. Select a node from the cluster to simulate the specified error 2. Perform CRUD on the target bucket with given timeout 3. Using cbstats to verify no operations succeeds 4. Revert the error scenario from the cluster to resume durability 5. Validate all mutations are succeeded after reverting the error condition | def test_timeout_with_crud_failures(self):
# Local methods to validate vb_seqno
def compare_vb_stat(stat_1, stat_2, vb, comparison="!="):
keys_to_check = ["high_seqno", "high_completed_seqno"]
result = True
for key in keys_to_check:
if vb in stat_1.keys():
if stat_1[vb]["uuid"] != stat_2[vb]["uuid"]:
self.log_failure("Mismatch in vb-%s UUID. %s != %s"
% (vb, stat_1[vb]["uuid"],
stat_2[vb]["uuid"]))
if comparison == "!=":
if stat_1[vb][key] != stat_2[vb][key]:
result = False
self.log.warning(
"Mismatch in vb-%s stat %s. %s != %s"
% (vb, key, stat_1[vb][key], stat_2[vb][key]))
elif stat_1[vb][key] == stat_2[vb][key]:
result = False
self.log.warning("Stat not updated for vb-%s stat %s. "
"%s == %s"
% (vb, key,
stat_1[vb][key], stat_2[vb][key]))
return result
def validate_vb_seqno_stats():
"""
:return retry_validation: Boolean denoting to retry validation
"""
retry_validation = False
vb_info["post_timeout"][node.ip] = \
cbstat_obj[node.ip].vbucket_seqno(self.bucket.name)
for tem_vb_num in range(self.cluster.vbuckets):
tem_vb_num = str(tem_vb_num)
if tem_vb_num not in affected_vbs:
if compare_vb_stat(vb_info["init"][node.ip],
vb_info["post_timeout"][node.ip],
tem_vb_num) is False:
self.log_failure("Unaffected vb-%s stat" % tem_vb_num)
elif int(tem_vb_num) in target_nodes_vbuckets["active"]:
if compare_vb_stat(vb_info["init"][node.ip],
vb_info["post_timeout"][node.ip],
tem_vb_num) is False:
self.log.warning("%s - mismatch in %s vb-%s seq_no"
% (node.ip, "active", tem_vb_num))
elif int(tem_vb_num) in target_nodes_vbuckets["replica"]:
if compare_vb_stat(vb_info["init"][node.ip],
vb_info["post_timeout"][node.ip],
tem_vb_num, comparison="==") is False:
retry_validation = True
self.log.warning("%s - mismatch in %s vb-%s seq_no"
% (node.ip, "replica", tem_vb_num))
return retry_validation
shell_conn = dict()
cbstat_obj = dict()
error_sim = dict()
target_nodes_vbuckets = dict()
vb_info = dict()
tasks = dict()
doc_gen = dict()
affected_vbs = list()
target_nodes_vbuckets["active"] = []
target_nodes_vbuckets["replica"] = []
vb_info["init"] = dict()
vb_info["post_timeout"] = dict()
vb_info["afterCrud"] = dict()
# Override crud_batch_size to minimum value for testing
self.crud_batch_size = 5
self.key = "test_collections"
self.sdk_timeout = 3
# Select target vbucket type to load_docs
target_vb_type = "replica"
if self.simulate_error == CouchbaseError.STOP_PERSISTENCE \
and self.durability_level \
== Bucket.DurabilityLevel.MAJORITY_AND_PERSIST_TO_ACTIVE:
target_vb_type = "active"
# Create required scope/collection for successful CRUD operation
if self.scope_name != CbServer.default_scope:
self.scope_name = self.bucket_util.get_random_name()
self.collection_name = self.bucket_util.get_random_name()
self.log.info("Creating scope::collection %s::%s"
% (self.scope_name, self.collection_name))
self.create_scope_collection()
# Load docs into created collection
self.log.info("Loading data into created collection")
load_gen = doc_generator(self.key, 0, self.num_items)
task = self.task.async_load_gen_docs(
self.cluster, self.bucket, load_gen, "create", 0,
scope=self.scope_name,
collection=self.collection_name,
sdk_client_pool=self.sdk_client_pool,
batch_size=200, process_concurrency=8,
timeout_secs=60)
self.task_manager.get_task_result(task)
if self.subdoc_test:
load_gen = sub_doc_generator(self.key, 0, self.num_items/2)
task = self.task.async_load_gen_sub_docs(
self.cluster, self.bucket,
load_gen, Bucket_Op.SubDocOps.INSERT,
timeout_secs=self.sdk_timeout,
compression=self.sdk_compression,
path_create=True,
batch_size=100,
process_concurrency=8,
durability=self.durability_level,
scope=self.scope_name, collection=self.collection_name,
sdk_client_pool=self.sdk_client_pool)
self.task_manager.get_task_result(task)
self.bucket.scopes[self.scope_name].collections[
self.collection_name].num_items = self.num_items
target_nodes = DurabilityHelper.getTargetNodes(self.cluster,
self.nodes_init,
self.num_nodes_affected)
for node in target_nodes:
shell_conn[node.ip] = RemoteMachineShellConnection(node)
cbstat_obj[node.ip] = Cbstats(node)
target_nodes_vbuckets["active"] += \
cbstat_obj[node.ip].vbucket_list(self.bucket.name,
vbucket_type="active")
target_nodes_vbuckets["replica"] += \
cbstat_obj[node.ip].vbucket_list(self.bucket.name,
vbucket_type="replica")
vb_info["init"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(
self.bucket.name)
error_sim[node.ip] = CouchbaseError(self.log, shell_conn[node.ip])
curr_time = int(time.time())
expected_timeout = curr_time + self.sdk_timeout
if target_vb_type == "active":
target_vbs = list(
set(target_nodes_vbuckets[target_vb_type])
.difference(set(target_nodes_vbuckets["replica"])))
else:
target_vbs = list(
set(target_nodes_vbuckets[target_vb_type])
.difference(set(target_nodes_vbuckets["active"])))
# Create required doc_generators
doc_gen["create"] = doc_generator(self.key, self.num_items,
self.crud_batch_size,
target_vbucket=target_vbs)
doc_gen["delete"] = doc_generator(self.key, 0,
self.crud_batch_size,
target_vbucket=target_vbs)
doc_gen["read"] = doc_generator(
self.key, int(self.num_items/3),
self.crud_batch_size,
target_vbucket=target_vbs)
doc_gen["update"] = doc_generator(
self.key, int(self.num_items/2),
self.crud_batch_size,
target_vbucket=target_vbs)
# Create required subdoc generators
doc_gen["insert"] = sub_doc_generator(
self.key, int(self.num_items/2), self.crud_batch_size,
target_vbucket=target_vbs)
doc_gen["upsert"] = sub_doc_generator_for_edit(
self.key, 0, self.crud_batch_size,
template_index=1,
target_vbucket=target_vbs)
doc_gen["remove"] = sub_doc_generator(
self.key, 0, self.crud_batch_size,
target_vbucket=target_vbs)
# Perform specified action
for node in target_nodes:
error_sim[node.ip].create(self.simulate_error,
bucket_name=self.bucket.name)
self.sleep(5, "Wait for error_simulation to take effect")
ops_to_perform = [Bucket_Op.DocOps.CREATE, Bucket_Op.DocOps.UPDATE,
Bucket_Op.DocOps.READ, Bucket_Op.DocOps.DELETE]
if self.subdoc_test:
ops_to_perform = [Bucket_Op.SubDocOps.INSERT,
Bucket_Op.SubDocOps.UPSERT,
Bucket_Op.SubDocOps.REMOVE]
for op_type in ops_to_perform:
self.log.info("Starting doc op %s" % op_type)
if op_type in Bucket_Op.DOC_OPS:
tasks[op_type] = self.task.async_load_gen_docs(
self.cluster, self.bucket, doc_gen[op_type], op_type, 0,
scope=self.scope_name,
collection=self.collection_name,
sdk_client_pool=self.sdk_client_pool,
batch_size=1, process_concurrency=8,
durability=self.durability_level,
timeout_secs=self.sdk_timeout,
suppress_error_table=True,
print_ops_rate=False,
skip_read_on_error=True)
else:
tasks[op_type] = self.task.async_load_gen_sub_docs(
self.cluster, self.bucket, doc_gen[op_type], op_type, 0,
scope=self.scope_name,
collection=self.collection_name,
sdk_client_pool=self.sdk_client_pool,
path_create=True,
batch_size=1, process_concurrency=8,
durability=self.durability_level,
timeout_secs=self.sdk_timeout,
print_ops_rate=False)
self.task.jython_task_manager.get_task_result(tasks[op_type])
# Validate task failures
if op_type == Bucket_Op.DocOps.READ:
# Validation for read task
if len(tasks[op_type].fail.keys()) != 0:
self.log_failure("Read failed for few docs: %s"
% tasks[op_type].fail.keys())
else:
# Validation of CRUDs - Update / Create / Delete
for doc_id, crud_result in tasks[op_type].fail.items():
vb_num = self.bucket_util.get_vbucket_num_for_key(
doc_id, self.cluster.vbuckets)
if SDKException.DurabilityAmbiguousException \
not in str(crud_result["error"]):
self.log_failure(
"Invalid exception for doc %s, vb %s: %s"
% (doc_id, vb_num, crud_result))
# Revert the specified error scenario
for node in target_nodes:
error_sim[node.ip].revert(self.simulate_error,
bucket_name=self.bucket.name)
# Check whether the timeout triggered properly
if int(time.time()) < expected_timeout:
self.log_failure("Timed-out before expected time")
for op_type in ops_to_perform:
if op_type == Bucket_Op.DocOps.READ:
continue
while doc_gen[op_type].has_next():
doc_id, _ = doc_gen[op_type].next()
affected_vbs.append(
str(self.bucket_util.get_vbucket_num_for_key(
doc_id,
self.cluster.vbuckets)))
affected_vbs = list(set(affected_vbs))
# Fetch latest stats and validate the seq_nos are not updated
for node in target_nodes:
retry_count = 0
max_retry = 3
while retry_count < max_retry:
self.log.info("Trying to validate vbseq_no stats: %d"
% (retry_count+1))
retry_count += 1
retry_required = validate_vb_seqno_stats()
if not retry_required:
break
self.sleep(5, "Sleep for vbseq_no stats to update")
else:
# This will be exited only if `break` condition is not met
self.log_failure("validate_vb_seqno_stats verification failed")
self.validate_test_failure()
# Get SDK Client from client_pool
sdk_client = self.sdk_client_pool.get_client_for_bucket(
self.bucket,
self.scope_name,
self.collection_name)
# Doc error validation
for op_type in ops_to_perform:
task = tasks[op_type]
if self.nodes_init == 1 \
and op_type != Bucket_Op.DocOps.READ \
and len(task.fail.keys()) != (doc_gen[op_type].end
- doc_gen[op_type].start):
self.log_failure("Failed keys %d are less than expected %d"
% (len(task.fail.keys()),
(doc_gen[op_type].end
- doc_gen[op_type].start)))
# Create table objects for display
table_view = TableView(self.log.error)
ambiguous_table_view = TableView(self.log.info)
table_view.set_headers(["Key", "vBucket", "Exception"])
ambiguous_table_view.set_headers(["Key", "vBucket"])
# Iterate failed keys for validation
for doc_key, doc_info in task.fail.items():
vb_for_key = self.bucket_util.get_vbucket_num_for_key(doc_key)
if SDKException.DurabilityAmbiguousException \
not in str(doc_info["error"]):
table_view.add_row([doc_key, vb_for_key,
doc_info["error"]])
ambiguous_table_view.add_row([doc_key, str(vb_for_key)])
if op_type not in Bucket_Op.SUB_DOC_OPS:
retry_success = \
self.durability_helper.retry_for_ambiguous_exception(
sdk_client, op_type, doc_key, doc_info)
if not retry_success:
self.log_failure("%s failed in retry for %s"
% (op_type, doc_key))
# Display the tables (if any errors)
table_view.display("Unexpected exception during %s" % op_type)
ambiguous_table_view.display("D_Ambiguous exception during %s"
% op_type)
# Release the acquired client
self.sdk_client_pool.release_client(sdk_client)
# Verify doc count after expected CRUD failure
self.bucket_util._wait_for_stats_all_buckets(self.cluster,
self.cluster.buckets)
self.bucket_util.validate_docs_per_collections_all_buckets(
self.cluster)
# Fetch latest stats and validate the values are updated
for node in target_nodes:
vb_info["afterCrud"][node.ip] = \
cbstat_obj[node.ip].vbucket_seqno(self.bucket.name)
if vb_info["init"][node.ip] == vb_info["afterCrud"][node.ip]:
self.log_failure("vBucket seq_no stats not updated")
# Disconnect the shell connection
for node in target_nodes:
shell_conn[node.ip].disconnect()
self.validate_test_failure() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_with_persistence_issues(self):\n\n if self.durability_level in [\n Bucket.DurabilityLevel.MAJORITY_AND_PERSIST_TO_ACTIVE,\n Bucket.DurabilityLevel.PERSIST_TO_MAJORITY]:\n self.log.critical(\"Test not valid for persistence durability\")\n return\n\n error_sim = dict()\n shell_conn = dict()\n cbstat_obj = dict()\n failover_info = dict()\n vb_info_info = dict()\n active_vbs_in_target_nodes = list()\n failover_info[\"init\"] = dict()\n failover_info[\"afterCrud\"] = dict()\n vb_info_info[\"init\"] = dict()\n vb_info_info[\"afterCrud\"] = dict()\n\n self.log.info(\"Selecting nodes to simulate error condition\")\n target_nodes = DurabilityHelper.getTargetNodes(self.cluster,\n self.nodes_init,\n self.num_nodes_affected)\n\n self.log.info(\"Simulate error condition on %s\" % target_nodes)\n for node in target_nodes:\n cbstat_obj[node.ip] = Cbstats(node)\n active_vbs_in_target_nodes += cbstat_obj[node.ip].vbucket_list(\n self.bucket.name,\n \"active\")\n vb_info_info[\"init\"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(\n self.bucket.name)\n failover_info[\"init\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(self.bucket.name)\n\n if self.simulate_error \\\n in [DiskError.DISK_FULL, DiskError.DISK_FAILURE]:\n error_sim = DiskError(self.log, self.task_manager,\n self.cluster.master, target_nodes,\n 60, 0, False, 120,\n disk_location=\"/data\")\n error_sim.create(action=self.simulate_error)\n else:\n for node in target_nodes:\n # Create shell_connections\n shell_conn[node.ip] = RemoteMachineShellConnection(node)\n\n # Perform specified action\n error_sim[node.ip] = CouchbaseError(self.log,\n shell_conn[node.ip],\n node=node)\n error_sim[node.ip].create(self.simulate_error,\n bucket_name=self.bucket.name)\n\n # Perform CRUDs with induced error scenario is active\n load_spec = dict()\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 100\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \"test_collections\"\n\n self.log.info(\"Perform 'create', 'update', 'delete' mutations\")\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=1,\n async_load=True)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud(mutation_num=2)\n\n # Wait for doc_loading to complete and validate the doc ops\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Doc CRUDs failed with persistence issue\")\n\n if self.simulate_error \\\n in [DiskError.DISK_FULL, DiskError.DISK_FAILURE]:\n error_sim.revert(self.simulate_error)\n else:\n # Revert the induced error condition\n for node in target_nodes:\n error_sim[node.ip].revert(self.simulate_error,\n bucket_name=self.bucket.name)\n\n # Disconnect the shell connection\n shell_conn[node.ip].disconnect()\n self.sleep(10, \"Wait for node recovery to complete\")\n\n # Doc count validation\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)\n\n # Fetch latest failover stats and validate the values are updated\n self.log.info(\"Validating failover and seqno cbstats\")\n for node in target_nodes:\n vb_info_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].vbucket_seqno(self.bucket.name)\n failover_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(self.bucket.name)\n\n # Failover validation\n val = \\\n failover_info[\"init\"][node.ip] \\\n == failover_info[\"afterCrud\"][node.ip]\n error_msg = \"Failover stats got updated\"\n self.assertTrue(val, msg=error_msg)\n\n # Seq_no validation (High level)\n val = \\\n vb_info_info[\"init\"][node.ip] \\\n != vb_info_info[\"afterCrud\"][node.ip]\n self.assertTrue(val, msg=\"vbucket seq_no not updated after CRUDs\")\n\n self.validate_test_failure()\n\n # Doc count validation\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)",
"def test_timeout_with_successful_crud(self):\n\n shell_conn = dict()\n cbstat_obj = dict()\n error_sim = dict()\n vb_info = dict()\n vb_info[\"init\"] = dict()\n vb_info[\"afterCrud\"] = dict()\n\n target_nodes = DurabilityHelper.getTargetNodes(self.cluster,\n self.nodes_init,\n self.num_nodes_affected)\n for node in target_nodes:\n shell_conn[node.ip] = RemoteMachineShellConnection(node)\n cbstat_obj[node.ip] = Cbstats(node)\n vb_info[\"init\"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(\n self.bucket.name)\n error_sim[node.ip] = CouchbaseError(self.log, shell_conn[node.ip])\n\n doc_load_spec = dict()\n doc_load_spec[MetaCrudParams.SDK_TIMEOUT] = self.sdk_timeout\n doc_load_spec[MetaCrudParams.DURABILITY_LEVEL] = self.durability_level\n doc_load_spec[\"doc_crud\"] = dict()\n doc_load_spec[\"subdoc_crud\"] = dict()\n doc_load_spec[\"doc_crud\"][MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \\\n \"test_collections\"\n doc_load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 0\n doc_load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 0\n doc_load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 0\n\n doc_load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 0\n doc_load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 0\n doc_load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.REMOVE_PER_COLLECTION] = 0\n\n ops_to_perform = [\"create\", \"update\", \"read\", \"replace\", \"delete\"]\n if self.subdoc_test:\n ops_to_perform = [\"insert\", \"upsert\", \"remove\"]\n\n for op_type in ops_to_perform:\n self.log.info(\"Performing '%s' with timeout=%s\"\n % (op_type, self.sdk_timeout))\n curr_spec = deepcopy(doc_load_spec)\n if op_type == \"create\":\n curr_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] \\\n = 5\n elif op_type == \"update\":\n curr_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] \\\n = 5\n elif op_type == \"delete\":\n curr_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] \\\n = 5\n elif op_type == \"read\":\n curr_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.READ_PERCENTAGE_PER_COLLECTION] = 5\n curr_spec[MetaCrudParams.RETRY_EXCEPTIONS] = [\n SDKException.TimeoutException]\n elif op_type == \"insert\":\n curr_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 5\n elif op_type == \"upsert\":\n curr_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 5\n elif op_type == \"remove\":\n curr_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.REMOVE_PER_COLLECTION] = 5\n\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n curr_spec,\n mutation_num=1,\n async_load=True,\n validate_task=False)\n\n # Perform specified action\n for node in target_nodes:\n error_sim[node.ip].create(self.simulate_error,\n bucket_name=self.bucket.name)\n\n self.sleep(10, \"Wait before reverting the error condition\")\n\n # Revert the specified error scenario\n for node in target_nodes:\n error_sim[node.ip].revert(self.simulate_error,\n bucket_name=self.bucket.name)\n\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.fail(\"Doc_loading for '%s' failed\" % op_type)\n\n # Fetch latest stats and validate the values are updated\n for node in target_nodes:\n curr_stat = cbstat_obj[node.ip].vbucket_seqno(self.bucket.name)\n if vb_info[\"init\"][node.ip] == curr_stat:\n self.log_failure(\"vbucket_seqno not updated. %s == %s\"\n % (vb_info[\"init\"][node.ip], curr_stat))\n\n # Disconnect the shell connection\n for node in target_nodes:\n shell_conn[node.ip].disconnect()\n\n # Verify initial doc load count\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)\n self.validate_test_failure()",
"def test_with_process_crash(self):\n if self.num_replicas < 2:\n self.assertTrue(False, msg=\"Required: num_replicas > 1\")\n\n # Override num_of_nodes affected to 1 (Positive case)\n self.num_nodes_affected = 1\n\n error_sim = dict()\n shell_conn = dict()\n cbstat_obj = dict()\n failover_info = dict()\n vb_info_info = dict()\n active_vbs_in_target_nodes = list()\n failover_info[\"init\"] = dict()\n failover_info[\"afterCrud\"] = dict()\n vb_info_info[\"init\"] = dict()\n vb_info_info[\"afterCrud\"] = dict()\n\n self.log.info(\"Selecting nodes to simulate error condition\")\n target_nodes = DurabilityHelper.getTargetNodes(self.cluster,\n self.nodes_init,\n self.num_nodes_affected)\n\n self.log.info(\"Will simulate error condition on %s\" % target_nodes)\n for node in target_nodes:\n cbstat_obj[node.ip] = Cbstats(node)\n active_vbs_in_target_nodes += cbstat_obj[node.ip].vbucket_list(\n self.bucket.name,\n \"active\")\n vb_info_info[\"init\"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(\n self.bucket.name)\n failover_info[\"init\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(self.bucket.name)\n\n # Remove active vbuckets from doc_loading to avoid errors\n load_spec = dict()\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 100\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \"test_collections\"\n load_spec[\"target_vbuckets\"] = list(set(range(0, 1024))\n ^ set(active_vbs_in_target_nodes))\n\n self.log.info(\"Perform 'create', 'update', 'delete' mutations\")\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=1,\n async_load=True)\n\n self.sleep(5, \"Wait for doc loaders to start loading data\")\n\n for node in target_nodes:\n # Create shell_connections\n shell_conn[node.ip] = RemoteMachineShellConnection(node)\n\n # Perform specified action\n error_sim[node.ip] = CouchbaseError(self.log,\n shell_conn[node.ip],\n node=node)\n error_sim[node.ip].create(self.simulate_error,\n bucket_name=self.bucket.name)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud()\n\n # Wait for document_loader tasks to complete\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Doc CRUDs failed with process crash\")\n\n if self.simulate_error \\\n not in [DiskError.DISK_FULL, DiskError.DISK_FAILURE]:\n # Revert the induced error condition\n for node in target_nodes:\n error_sim[node.ip].revert(self.simulate_error,\n bucket_name=self.bucket.name)\n\n # Disconnect the shell connection\n shell_conn[node.ip].disconnect()\n self.sleep(10, \"Wait for node recovery to complete\")\n\n # In case of error with Ephemeral bucket, need to rebalance\n # to make sure data is redistributed properly\n if self.bucket_type == Bucket.Type.EPHEMERAL:\n retry_num = 0\n result = None\n while retry_num != 2:\n result = self.task.rebalance(\n self.servers[0:self.nodes_init],\n [], [])\n if result:\n break\n retry_num += 1\n self.sleep(10, \"Wait before retrying rebalance\")\n\n self.assertTrue(result, \"Rebalance failed\")\n\n # Fetch latest failover stats and validate the values are updated\n self.log.info(\"Validating failover and seqno cbstats\")\n for node in target_nodes:\n vb_info_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].vbucket_seqno(self.bucket.name)\n failover_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(self.bucket.name)\n\n # Failover stat validation\n if self.simulate_error == CouchbaseError.KILL_MEMCACHED:\n val = failover_info[\"init\"][node.ip] \\\n != failover_info[\"afterCrud\"][node.ip]\n else:\n if self.simulate_error != CouchbaseError.STOP_MEMCACHED \\\n and self.bucket_type == Bucket.Type.EPHEMERAL:\n val = failover_info[\"init\"][node.ip] \\\n != failover_info[\"afterCrud\"][node.ip]\n else:\n val = failover_info[\"init\"][node.ip] \\\n == failover_info[\"afterCrud\"][node.ip]\n error_msg = \"Failover stats mismatch after error condition:\" \\\n \" %s != %s\" \\\n % (failover_info[\"init\"][node.ip],\n failover_info[\"afterCrud\"][node.ip])\n self.assertTrue(val, msg=error_msg)\n\n # Seq_no validation (High level)\n val = \\\n vb_info_info[\"init\"][node.ip] \\\n != vb_info_info[\"afterCrud\"][node.ip]\n self.assertTrue(val, msg=\"vbucket seq_no not updated after CRUDs\")\n\n # Doc count validation\n self.validate_test_failure()\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)",
"def test_stop_process(self):\n error_to_simulate = self.input.param(\"simulate_error\", None)\n target_node = self.getTargetNode()\n remote = RemoteMachineShellConnection(target_node)\n error_sim = CouchbaseError(self.log, remote)\n target_vbuckets = Cbstats(target_node).vbucket_list(\n self.bucket.name, target_node)\n\n bucket_dict = BucketUtils.get_random_collections(\n self.cluster.buckets,\n req_num=1,\n consider_scopes=\"all\",\n consider_buckets=\"all\")\n\n bucket = BucketUtils.get_bucket_obj(self.cluster.buckets,\n bucket_dict.keys()[0])\n scope_name = bucket_dict[bucket.name][\"scopes\"].keys()[0]\n collection_name = bucket_dict[bucket.name][\n \"scopes\"][scope_name][\"collections\"].keys()[0]\n scope = BucketUtils.get_scope_obj(\n bucket, scope_name)\n collection = BucketUtils.get_collection_obj(scope, collection_name)\n\n if len(target_vbuckets) == 0:\n self.log.error(\"No target vbucket list generated to load data\")\n remote.disconnect()\n return\n\n self.start_doc_loading_tasks(target_vbuckets, scope_name, collection)\n\n # Induce the error condition\n error_sim.create(error_to_simulate)\n\n if self.allowed_hosts:\n self.set_allowed_hosts()\n\n self.sleep(20, \"Wait before reverting the error condition\")\n # Revert the simulated error condition and close the ssh session\n error_sim.revert(error_to_simulate)\n remote.disconnect()\n\n # Wait for doc loading task to complete\n self.task.jython_task_manager.get_task_result(self.doc_loading_task)\n if self.atomicity:\n self.task.jython_task_manager.get_task_result(\n self.transaction_load_task)\n elif self.N1qltxn:\n self.task.jython_task_manager.get_task_result(\n self.N1ql_load_task)\n\n if len(self.doc_loading_task.fail.keys()) != 0:\n if self.target_node == \"active\" or self.num_replicas in [2, 3]:\n self.log_failure(\"Unwanted failures for keys: %s\"\n % self.doc_loading_task.fail.keys())\n\n validate_passed = \\\n self.durability_helper.validate_durability_exception(\n self.doc_loading_task.fail,\n SDKException.DurabilityAmbiguousException)\n if not validate_passed:\n self.log_failure(\"Unwanted exception seen during validation\")\n\n # Get SDK client for CRUD retries\n sdk_client = self.sdk_client_pool.get_client_for_bucket(self.bucket)\n for doc_key, crud_result in self.doc_loading_task.fail.items():\n result = sdk_client.crud(DocLoading.Bucket.DocOps.CREATE,\n doc_key,\n crud_result[\"value\"],\n replicate_to=self.replicate_to,\n persist_to=self.persist_to,\n durability=self.durability_level,\n timeout=self.sdk_timeout)\n if result[\"status\"] is False:\n self.log_failure(\"Retry of doc_key %s failed: %s\"\n % (doc_key, result[\"error\"]))\n # Close the SDK connection\n self.sdk_client_pool.release_client(sdk_client)\n\n self.validate_test_failure()\n\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n # Update self.num_items and validate docs per collection\n if not self.N1qltxn and self.atomicity is False:\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)",
"def test_sub_doc_with_persistence_issues(self):\n\n if self.durability_level.upper() in [\n Bucket.DurabilityLevel.MAJORITY_AND_PERSIST_TO_ACTIVE,\n Bucket.DurabilityLevel.PERSIST_TO_MAJORITY]:\n self.log.critical(\"Test not valid for persistence durability\")\n return\n\n error_sim = dict()\n shell_conn = dict()\n cbstat_obj = dict()\n failover_info = dict()\n vb_info_info = dict()\n active_vbs_in_target_nodes = list()\n failover_info[\"init\"] = dict()\n failover_info[\"afterCrud\"] = dict()\n vb_info_info[\"init\"] = dict()\n vb_info_info[\"afterCrud\"] = dict()\n def_bucket = self.cluster.buckets[0]\n\n load_spec = dict()\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"subdoc_crud\"] = dict()\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \"test_collections\"\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.READ_PERCENTAGE_PER_COLLECTION] = 50\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 20\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 10\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.REMOVE_PER_COLLECTION] = 10\n\n self.log.info(\"Selecting nodes to simulate error condition\")\n target_nodes = DurabilityHelper.getTargetNodes(self.cluster,\n self.nodes_init,\n self.num_nodes_affected)\n\n # Create new docs for sub-doc operations to run\n self.load_data_for_sub_doc_ops()\n\n self.log.info(\"Will simulate error condition on %s\" % target_nodes)\n for node in target_nodes:\n # Create shell_connections\n shell_conn[node.ip] = RemoteMachineShellConnection(node)\n cbstat_obj[node.ip] = Cbstats(node)\n active_vbs = cbstat_obj[node.ip] .vbucket_list(def_bucket.name,\n \"active\")\n active_vbs_in_target_nodes += active_vbs\n vb_info_info[\"init\"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(\n def_bucket.name)\n failover_info[\"init\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(def_bucket.name)\n\n for node in target_nodes:\n # Perform specified action\n error_sim[node.ip] = CouchbaseError(self.log,\n shell_conn[node.ip],\n node=node)\n error_sim[node.ip].create(self.simulate_error,\n bucket_name=def_bucket.name)\n\n # Perform CRUDs with induced error scenario is active\n self.log.info(\"Perform 'insert', 'upsert', 'remove' mutations\")\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=0,\n async_load=True)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud(mutation_num=1)\n\n # Wait for doc_loading to complete and validate the doc ops\n self.task_manager.get_task_result(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Doc CRUDs failed with persistence issue\")\n\n # Revert the induced error condition\n for node in target_nodes:\n error_sim[node.ip].revert(self.simulate_error,\n bucket_name=def_bucket.name)\n\n # Fetch latest failover stats and validate the values are updated\n self.log.info(\"Validating failover and seqno cbstats\")\n for node in target_nodes:\n vb_info_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].vbucket_seqno(def_bucket.name)\n failover_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(def_bucket.name)\n\n # Failover validation\n val = \\\n failover_info[\"init\"][node.ip] \\\n == failover_info[\"afterCrud\"][node.ip]\n self.assertTrue(val, msg=\"Failover stats not updated\")\n\n # Seq_no validation (High level)\n val = \\\n vb_info_info[\"init\"][node.ip] \\\n != vb_info_info[\"afterCrud\"][node.ip]\n self.assertTrue(val, msg=\"vbucket seq_no not updated after CRUDs\")\n\n # Disconnect the shell connection\n for node in target_nodes:\n shell_conn[node.ip].disconnect()\n\n self.validate_test_failure()\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)",
"def test_sub_doc_with_process_crash(self):\n if self.num_replicas < 2:\n self.assertTrue(False, msg=\"Required: num_replicas > 1\")\n\n # Override num_of_nodes affected to 1\n self.num_nodes_affected = 1\n\n error_sim = dict()\n shell_conn = dict()\n cbstat_obj = dict()\n failover_info = dict()\n vb_info_info = dict()\n active_vbs_in_target_nodes = list()\n failover_info[\"init\"] = dict()\n failover_info[\"afterCrud\"] = dict()\n vb_info_info[\"init\"] = dict()\n vb_info_info[\"afterCrud\"] = dict()\n def_bucket = self.cluster.buckets[0]\n\n self.load_data_for_sub_doc_ops()\n\n self.log.info(\"Selecting nodes to simulate error condition\")\n target_nodes = DurabilityHelper.getTargetNodes(self.cluster,\n self.nodes_init,\n self.num_nodes_affected)\n\n self.log.info(\"Will simulate error condition on %s\" % target_nodes)\n for node in target_nodes:\n # Create shell_connections\n shell_conn[node.ip] = RemoteMachineShellConnection(node)\n cbstat_obj[node.ip] = Cbstats(node)\n active_vbs = cbstat_obj[node.ip] .vbucket_list(def_bucket.name,\n \"active\")\n active_vbs_in_target_nodes += active_vbs\n vb_info_info[\"init\"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(\n def_bucket.name)\n failover_info[\"init\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(def_bucket.name)\n\n # Remove active vbuckets from doc_loading to avoid errors\n\n load_spec = dict()\n # load_spec[\"target_vbuckets\"] = list(set(target_vbuckets)\n # ^ set(active_vbs_in_target_nodes))\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"subdoc_crud\"] = dict()\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.READ_PERCENTAGE_PER_COLLECTION] = 10\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 50\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 25\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.REMOVE_PER_COLLECTION] = 25\n\n self.log.info(\"Perform 'create', 'update', 'delete' mutations\")\n\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=1,\n async_load=True)\n\n self.sleep(5, \"Wait for doc loaders to start loading data\")\n\n for node in target_nodes:\n # Perform specified action\n error_sim[node.ip] = CouchbaseError(self.log,\n shell_conn[node.ip],\n node=node)\n error_sim[node.ip].create(self.simulate_error,\n bucket_name=def_bucket.name)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud(mutation_num=2)\n\n # Wait for document_loader tasks to complete\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Sub_doc CRUDs failed with process crash\")\n\n # Revert the induced error condition\n for node in target_nodes:\n error_sim[node.ip].revert(self.simulate_error,\n bucket_name=def_bucket.name)\n\n # Fetch latest failover stats and validate the values are updated\n self.log.info(\"Validating failover and seqno cbstats\")\n for node in target_nodes:\n vb_info_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].vbucket_seqno(def_bucket.name)\n failover_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(def_bucket.name)\n\n # Failover validation\n val = \\\n failover_info[\"init\"][node.ip] \\\n == failover_info[\"afterCrud\"][node.ip]\n error_msg = \"Failover stats not updated after error condition\"\n self.assertTrue(val, msg=error_msg)\n\n # Seq_no validation (High level)\n val = \\\n vb_info_info[\"init\"][node.ip] \\\n != vb_info_info[\"afterCrud\"][node.ip]\n self.assertTrue(val, msg=\"vbucket seq_no not updated after CRUDs\")\n\n # Disconnect the shell connection\n for node in target_nodes:\n shell_conn[node.ip].disconnect()\n\n self.validate_test_failure()\n # Doc count validation\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)",
"def test_metering_database(self):\n self.with_deletion = self.input.param(\"delete\", False)\n self.db_name = \"%s-testmetering\" % self.db_name\n # validate initial throughput is 5000/3 = 1666\n for bucket in self.cluster.buckets:\n print(bucket.servers)\n self.assertEqual(self.bucket_util.get_throttle_limit(bucket),\n self.bucket_throttling_limit)\n\n # validate create, update, delete stat\n for op_type in [\"create\", \"update\"]:\n if op_type == \"create\":\n self.load_data(create_start=0, create_end=self.num_items, create_perc=100)\n self.update_expected_stat(self.key_size, self.doc_size,\n 0, self.num_items, self.cluster.buckets)\n if op_type == \"update\":\n self.load_data(update_start=0, update_end=self.num_items, update_perc=100, mutated=1)\n self.update_expected_stat(self.key_size, self.doc_size,\n 0, self.num_items, self.cluster.buckets)\n if self.with_deletion:\n self.log.info(\"performing delete operation\")\n self.load_data(delete_start=0, delete_end=self.num_items, delete_perc=100)\n self.update_expected_stat(self.key_size, self.doc_size,\n 0, self.num_items, self.cluster.buckets)",
"def test_concurrent_failover_timer_reset(self):\n\n services_to_fo = self.failover_order[0].split(\":\")\n self.nodes_to_fail = self.get_nodes_to_fail(services_to_fo,\n dynamic_fo_method=True)\n expected_fo_nodes = self.num_nodes_to_be_failover\n self.__update_server_obj()\n rand_node = choice(self.nodes_to_fail.keys())\n self.__update_unaffected_node()\n self.__display_failure_node_status(\"Nodes to be failed\")\n try:\n self.log.info(\"Starting auto-failover procedure\")\n failover_task = ConcurrentFailoverTask(\n task_manager=self.task_manager, master=self.orchestrator,\n servers_to_fail=self.nodes_to_fail,\n expected_fo_nodes=expected_fo_nodes,\n task_type=\"induce_failure\")\n self.task_manager.add_new_task(failover_task)\n self.sleep(int(self.timeout * 0.7),\n \"Wait before bringing back the failed nodes\")\n\n self.log.info(\"Bringing back '%s' for some time\" % rand_node.ip)\n new_timer = None\n shell = RemoteMachineShellConnection(rand_node)\n cb_err = CouchbaseError(self.log, shell)\n if self.nodes_to_fail[rand_node] == CouchbaseError.STOP_MEMCACHED:\n cb_err.revert(CouchbaseError.STOP_MEMCACHED)\n self.sleep(10, \"Wait before creating failure again\")\n cb_err.create(CouchbaseError.STOP_MEMCACHED)\n new_timer = time()\n elif self.nodes_to_fail[rand_node] == \"stop_couchbase\":\n cb_err.revert(CouchbaseError.STOP_SERVER)\n self.sleep(10, \"Wait before creating failure again\")\n cb_err.create(CouchbaseError.STOP_SERVER)\n new_timer = time()\n shell.disconnect()\n\n # Validate the previous auto-failover task failed\n # due to the random_node coming back online\n self.task_manager.get_task_result(failover_task)\n self.assertFalse(failover_task.result,\n \"Nodes failed over though nodes became active\")\n\n # Validate auto_failover_settings\n self.validate_failover_settings(True, self.timeout,\n 0, self.max_count)\n\n # Make sure the new auto-failover timing is honoured\n new_timer = new_timer + self.timeout\n while int(time()) < new_timer:\n settings = self.rest.get_autofailover_settings()\n if settings.count != 0:\n self.fail(\"Nodes failed over before new failover time\")\n\n self.sleep(10, \"Wait for failover rebalance to trigger\")\n self.rest.monitorRebalance()\n\n # Validate auto_failover_settings after actual auto failover\n self.validate_failover_settings(True, self.timeout,\n expected_fo_nodes, self.max_count)\n finally:\n # Recover all nodes from induced failures\n failover_task = ConcurrentFailoverTask(\n task_manager=self.task_manager, master=self.orchestrator,\n servers_to_fail=self.nodes_to_fail,\n expected_fo_nodes=expected_fo_nodes,\n task_type=\"revert_failure\")\n self.task_manager.add_new_task(failover_task)\n self.task_manager.get_task_result(failover_task)\n self.log.info(\"Rebalance out the failed nodes\")\n result = self.cluster_util.rebalance(self.cluster)\n self.assertTrue(result, \"Final rebalance failed\")\n\n # Perform collection crud + doc_ops after rebalance operation\n self.__perform_doc_ops()",
"def test_s3_table_functions_timeouts(started_cluster):\n with PartitionManager() as pm:\n pm.add_network_delay(node, 1200)\n\n with pytest.raises(QueryRuntimeException):\n node.query(\n \"\"\"\n INSERT INTO FUNCTION s3\n (\n nc_s3,\n filename = 'test_file.tsv.gz',\n format = 'TSV',\n structure = 'number UInt64',\n compression_method = 'gz'\n )\n SELECT * FROM numbers(1000000)\n \"\"\",\n settings=settings,\n )",
"def test_crash_process(self):\n def_bucket = self.cluster.buckets[0]\n target_node = self.getTargetNode()\n remote = RemoteMachineShellConnection(target_node)\n target_vbuckets = range(0, self.cluster.vbuckets)\n retry_exceptions = list()\n self.transaction_load_task = None\n self.doc_loading_task = None\n self.N1ql_load_task = None\n\n # If Memcached is killed, we should not perform KV ops on\n # particular node. If not we can target all nodes for KV operation.\n if self.process_name == \"memcached\":\n target_vbuckets = Cbstats(target_node).vbucket_list(\n def_bucket.name, self.target_node)\n if self.target_node == \"active\":\n retry_exceptions = [SDKException.TimeoutException]\n if len(target_vbuckets) == 0:\n self.log.error(\"No target vbucket list generated to load data\")\n remote.disconnect()\n return\n\n bucket_dict = BucketUtils.get_random_collections(\n self.cluster.buckets,\n req_num=1,\n consider_scopes=\"all\",\n consider_buckets=\"all\")\n\n bucket = BucketUtils.get_bucket_obj(self.cluster.buckets,\n bucket_dict.keys()[0])\n scope_name = bucket_dict[bucket.name][\"scopes\"].keys()[0]\n collection_name = bucket_dict[bucket.name][\n \"scopes\"][scope_name][\"collections\"].keys()[0]\n scope = BucketUtils.get_scope_obj(\n bucket, scope_name)\n collection = BucketUtils.get_collection_obj(\n scope, collection_name)\n\n self.start_doc_loading_tasks(target_vbuckets, scope_name, collection)\n\n task_info = dict()\n task_info[self.doc_loading_task] = \\\n self.bucket_util.get_doc_op_info_dict(\n def_bucket, DocLoading.Bucket.DocOps.CREATE, 0,\n replicate_to=self.replicate_to, persist_to=self.persist_to,\n durability=self.durability_level,\n timeout=self.sdk_timeout, time_unit=\"seconds\",\n retry_exceptions=retry_exceptions)\n\n self.sleep(10, \"Wait for doc_ops to start\")\n self.log.info(\"Killing {0}:{1} on node {2}\"\n .format(self.process_name, self.service_name,\n target_node.ip))\n remote.kill_process(self.process_name, self.service_name,\n signum=signum[self.sig_type])\n remote.disconnect()\n # Wait for tasks completion and validate failures\n if self.transaction_load_task:\n self.task.jython_task_manager.get_task_result(\n self.transaction_load_task)\n if self.N1qltxn:\n self.task.jython_task_manager.get_task_result(\n self.N1ql_load_task)\n self.task_manager.get_task_result(self.doc_loading_task)\n self.bucket_util.verify_doc_op_task_exceptions(task_info,\n self.cluster)\n self.bucket_util.log_doc_ops_task_failures(task_info)\n\n # Verification stats\n verification_dict = dict()\n verification_dict[\"ops_create\"] = 2*self.num_items\n verification_dict[\"sync_write_aborted_count\"] = 0\n verification_dict[\"rollback_item_count\"] = 0\n verification_dict[\"pending_writes\"] = 0\n if self.__is_sync_write_enabled:\n verification_dict[\"sync_write_committed_count\"] = 2*self.num_items\n\n if self.bucket_type == Bucket.Type.EPHEMERAL \\\n and self.process_name == \"memcached\":\n result = self.task.rebalance(self.cluster, [], [])\n self.assertTrue(result, \"Rebalance failed\")\n\n # Validate doc count\n if self.process_name != \"memcached\":\n stats_failed = \\\n self.durability_helper.verify_vbucket_details_stats(\n def_bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if stats_failed:\n self.fail(\"Cbstats verification failed\")\n\n # Doc count validation per collection\n if not self.N1qltxn and self.atomicity is False:\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)",
"def test_wait_for_predicate_timeout(self):\n predicate_mock = mock.MagicMock(side_effect=[True, True, True])\n with self.assertRaises(TimeoutError):\n train_utils.wait_for_predicate(predicate_mock, num_retries=3)",
"def assert_timeout(self) -> None:",
"def simulate_node_failure(node_ips, max_duration, tests_completed):\n run = True\n l.info(\"START Cassandra Node Failure Simulation. Entering.\")\n while run:\n # If stress-tests are still running continue with node failure simulation\n if not tests_completed.isSet():\n # Select 'random' node from Cassandra Cluster\n node_ip = select_random_node(node_ips)\n # Determine delay before stopping cassandra node (to simulate failure / node down)\n duration_secs = max_duration*60\n time_next_stop = random.randint(1, duration_secs/4)\n l.debug(\"STOP programmed in %s seconds\" % time_next_stop)\n # Wait\n time.sleep(time_next_stop)\n ssh_fail = False\n # Stop Cassandra Node (simulate failure / stop the service)\n stop_cmd = \"sudo service cassandra stop\"\n l.debug(\"STOP Cassandra Node: %s\"%node_ip)\n try:\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh.connect(str(node_ip))\n l.debug(\"[Simulate Cassandra Node Failure] Connected to host: %s\" % node_ip)\n except paramiko.AuthenticationException as e:\n l.error(\"Authentication failed when connecting to %s. ERROR: %s\" % (node_ip, e))\n ssh_fail = True\n except:\n l.error(\"Could not SSH to %s, waiting for it to start\" % node_ip)\n ssh_fail = True\n if not ssh_fail:\n # Send the command to STOP cassandra node\n ssh.exec_command(stop_cmd)\n # Determine delay before starting cassandra node (to simulate rejoin to the cluster)\n time_next_rejoin = random.randint(1, duration_secs/4)\n l.debug(\"START programmed in %s seconds\" % time_next_rejoin)\n time.sleep(time_next_rejoin)\n # Start Cassandra Node (simulate rejoin / start the service)\n start_cmd = \"sudo service cassandra start\"\n l.debug(\"START Cassandra Node: %s\"%node_ip)\n # Send the command (non-blocking)\n ssh.exec_command(start_cmd)\n # Disconnect from the host\n l.debug(\"Closing SSH connection to host: %s\" % node_ip)\n ssh.close()\n run=False\n else:\n # Tests Complete has been signaled\n run=False\n l.info(\"END node failure simulation. Exiting.\")",
"def test_bulk_round_trip_with_timeouts(self):\n self._test_bulk_round_trip(nodes=1, partitioner=\"murmur3\", num_operations=100000,\n configuration_options={'range_request_timeout_in_ms': '200',\n 'write_request_timeout_in_ms': '100'},\n copy_from_options={'MAXINSERTERRORS': -1},\n skip_count_checks=True)",
"def test_MB_51219(self):\n len_of_nodes_to_afo = len(self.failover_order[0].split(\":\"))\n nodes_to_fo = dict()\n nodes_in_cluster = self.rest.get_nodes()\n for node in nodes_in_cluster:\n if len_of_nodes_to_afo <= 0:\n break\n if str(self.cluster.master.ip) == str(node.ip):\n continue\n nodes_to_fo[node] = self.failover_method\n len_of_nodes_to_afo -= 1\n self.cluster_util.update_cluster_nodes_service_list(self.cluster)\n self.nodes_to_fail = nodes_to_fo\n self.__update_server_obj()\n try:\n failover_task = ConcurrentFailoverTask(\n task_manager=self.task_manager, master=self.orchestrator,\n servers_to_fail=self.nodes_to_fail,\n expected_fo_nodes=self.fo_events,\n task_type=\"induce_failure\")\n self.task_manager.add_new_task(failover_task)\n self.task_manager.get_task_result(failover_task)\n dictionary = dict(list(self.nodes_to_fail.items())[:1])\n failover_task = ConcurrentFailoverTask(\n task_manager=self.task_manager, master=self.orchestrator,\n servers_to_fail=dictionary,\n task_type=\"revert_failure\")\n self.task_manager.add_new_task(failover_task)\n self.task_manager.get_task_result(failover_task)\n timeout = int(time()) + 15\n task_id_changed = False\n self.prev_rebalance_status_id = None\n while not task_id_changed and int(time()) < timeout:\n server_task = self.rest.ns_server_tasks(\n task_type=\"rebalance\", task_sub_type=\"failover\")\n if server_task and server_task[\"statusId\"] != \\\n self.prev_rebalance_status_id:\n task_id_changed = True\n self.prev_rebalance_status_id = server_task[\"statusId\"]\n self.log.debug(\"New failover status id: %s\"\n % server_task[\"statusId\"])\n self.assertTrue(task_id_changed,\n \"Fail-over did not happen as expected\")\n self.bucket_util._wait_warmup_completed(self.cluster.buckets[0],\n servers=[\n self.cluster.master],\n wait_time=30)\n finally:\n # reverting failure from all the nodes\n failover_task = ConcurrentFailoverTask(\n task_manager=self.task_manager, master=self.orchestrator,\n servers_to_fail=self.nodes_to_fail,\n task_type=\"revert_failure\")\n self.task_manager.add_new_task(failover_task)\n self.task_manager.get_task_result(failover_task)\n result = self.cluster_util.rebalance(self.cluster)\n self.assertTrue(result, \"Final re-balance failed\")",
"def test_timeout(self):\n start = time.time()\n dr = EventualResult(Deferred(), None)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.03)\n # be a little lenient for slow computers:\n self.assertTrue(abs(time.time() - start) < 0.05)",
"def test_cli_bucket_maxttl_setting(self):\n self.rest.force_eject_node()\n\n shell = RemoteMachineShellConnection(self.master)\n if self.input.param('enable_ipv6', False):\n self.reset_and_enable_ipv6(self.master)\n set_index_storage_type = \" --index-storage-setting=memopt \"\n options = ' --cluster-port=8091 \\\n --cluster-ramsize=300 \\\n --cluster-index-ramsize=300 \\\n --services=data,index,query %s ' \\\n % set_index_storage_type\n o, e = shell.execute_couchbase_cli(cli_command=\"cluster-init\",\n options=options)\n self.assertEqual(o[0], 'SUCCESS: Cluster initialized')\n\n self.log.info(\"Add new user after reset node! \")\n self.add_built_in_server_user(node=self.master)\n bucket_type = self.input.param(\"bucket_type\", \"couchbase\")\n options = ' --bucket=default \\\n --bucket-type={0} \\\n --bucket-ramsize=200 \\\n --max-ttl=400 \\\n --wait '.format(bucket_type)\n o, e = shell.execute_couchbase_cli(cli_command=\"bucket-create\",\n options=options)\n self.assertEqual(o[0], 'SUCCESS: Bucket created')\n\n self.sleep(30, \"Sleep before loading doc using cbdocloader\")\n\n cluster_flag = \"-c\"\n bucket_quota_flag = \"-m\"\n data_set_location_flag = \"-d\"\n shell.execute_command(\n \"{0}cbdocloader -u Administrator -p password \"\n \"{3} {1} -b default {4} 100 {5} {2}travel-sample.zip\"\n .format(self.bin_path, self.master.ip, self.sample_path,\n cluster_flag, bucket_quota_flag,\n data_set_location_flag))\n shell.disconnect()\n\n buckets = RestConnection(self.master).get_buckets()\n for bucket in buckets:\n if bucket.name != \"default\":\n self.fail(\"default bucket did not get created\")\n\n \"\"\" check for load data into travel-sample bucket \"\"\"\n end_time = time.time() + 120\n num_actual = 0\n while time.time() < end_time:\n self.sleep(10)\n num_actual = self.get_item_count(self.master, \"default\")\n if int(num_actual) == self.total_items_travel_sample:\n break\n self.assertTrue(int(num_actual) == self.total_items_travel_sample,\n \"Items number expected %s, actual %s\"\n % (self.total_items_travel_sample, num_actual))\n self.log.info(\"Total items %s \" % num_actual)\n self.sleep(400, \"Waiting for docs to expire as per maxttl\")\n self.expire_pager([self.master])\n self.sleep(20, \"Wait for expiry_purger to run\")\n num_actual = self.get_item_count(self.master, \"default\")\n if int(num_actual) != 0:\n self.fail(\"Item count is not 0 after maxttl has elapsed\")\n else:\n self.log.info(\"SUCCESS: Item count is 0 after maxttl has elapsed\")",
"def test_unavailable_server(cluster):\n node2 = cluster.instances[\"node2\"]\n global uuids\n node2.query(\n \"\"\"\n CREATE TABLE test0 UUID '{}'\n (id Int32) ENGINE = MergeTree() ORDER BY id\n SETTINGS storage_policy = 'web';\n \"\"\".format(\n uuids[0]\n )\n )\n node2.stop_clickhouse()\n try:\n # NOTE: you cannot use separate disk instead, since MergeTree engine will\n # try to lookup parts on all disks (to look unexpected disks with parts)\n # and fail because of unavailable server.\n node2.exec_in_container(\n [\n \"bash\",\n \"-c\",\n \"sed -i 's#http://nginx:80/test1/#http://nginx:8080/test1/#' /etc/clickhouse-server/config.d/storage_conf_web.xml\",\n ]\n )\n with pytest.raises(Exception):\n # HTTP retries with backup can take awhile\n node2.start_clickhouse(start_wait_sec=120, retry_start=False)\n assert node2.contains_in_log(\n \"Caught exception while loading metadata.*Connection refused\"\n )\n assert node2.contains_in_log(\n \"HTTP request to \\`http://nginx:8080/test1/.*\\` failed at try 1/10 with bytes read: 0/unknown. Error: Connection refused.\"\n )\n finally:\n node2.exec_in_container(\n [\n \"bash\",\n \"-c\",\n \"sed -i 's#http://nginx:8080/test1/#http://nginx:80/test1/#' /etc/clickhouse-server/config.d/storage_conf_web.xml\",\n ]\n )\n node2.start_clickhouse()\n node2.query(\"DROP TABLE test0 SYNC\")",
"def check_server_drop():\n try:\n from tvm.rpc import tracker, proxy, base\n from tvm.rpc.base import TrackerCode\n\n @tvm.register_func(\"rpc.test2.addone\")\n def addone(x):\n return x + 1\n\n def _put(tclient, value):\n base.sendjson(tclient._sock, value)\n base.recvjson(tclient._sock)\n\n tserver = tracker.Tracker(\"localhost\", 8888)\n tproxy = proxy.Proxy(\"localhost\", 8881,\n tracker_addr=(\"localhost\", tserver.port))\n tclient = rpc.connect_tracker(\"localhost\", tserver.port)\n\n server0 = rpc.Server(\n \"localhost\", port=9099,\n tracker_addr=(\"localhost\", tserver.port),\n key=\"abc\")\n server1 = rpc.Server(\n \"localhost\", port=9099,\n tracker_addr=(\"localhost\", tserver.port),\n key=\"xyz\")\n server2 = rpc.Server(\n \"localhost\", tproxy.port, is_proxy=True,\n key=\"xyz\")\n server3 = rpc.Server(\n \"localhost\", tproxy.port, is_proxy=True,\n key=\"xyz1\")\n\n # Fault tolerence to un-handled requested value\n _put(tclient, [TrackerCode.REQUEST, \"abc\", \"\", 1])\n _put(tclient, [TrackerCode.REQUEST, \"xyz1\", \"\", 1])\n\n # Fault tolerence to stale worker value\n _put(tclient, [TrackerCode.PUT, \"xyz\", (server1.port, \"abc\")])\n _put(tclient, [TrackerCode.PUT, \"xyz\", (server1.port, \"abcxxx\")])\n _put(tclient, [TrackerCode.PUT, \"xyz\", (tproxy.port, \"abcxxx11\")])\n\n # Fault tolerence server timeout\n def check_timeout(timeout, sleeptime):\n def myfunc(remote):\n time.sleep(sleeptime)\n f1 = remote.get_function(\"rpc.test2.addone\")\n assert f1(10) == 11\n try:\n tclient.request_and_run(\"xyz\", myfunc, session_timeout=timeout)\n except RuntimeError:\n pass\n print(tclient.text_summary())\n try:\n remote = tclient.request(\"xyz\", priority=0, session_timeout=timeout)\n remote2 = tclient.request(\"xyz\", session_timeout=timeout)\n time.sleep(sleeptime)\n f1 = remote.get_function(\"rpc.test2.addone\")\n assert f1(10) == 11\n f1 = remote2.get_function(\"rpc.test2.addone\")\n assert f1(10) == 11\n\n except tvm.TVMError as e:\n pass\n remote3 = tclient.request(\"abc\")\n f1 = remote3.get_function(\"rpc.test2.addone\")\n remote3 = tclient.request(\"xyz1\")\n f1 = remote3.get_function(\"rpc.test2.addone\")\n assert f1(10) == 11\n\n check_timeout(0.01, 0.1)\n check_timeout(2, 0)\n tserver.terminate()\n server0.terminate()\n server1.terminate()\n server2.terminate()\n server3.terminate()\n tproxy.terminate()\n except ImportError:\n print(\"Skip because tornado is not available\")",
"def test_socket_timeout():\n schema = vol.Schema(cv.socket_timeout)\n\n with pytest.raises(vol.Invalid):\n schema(0.0)\n\n with pytest.raises(vol.Invalid):\n schema(-1)\n\n assert schema(None) == _GLOBAL_DEFAULT_TIMEOUT\n\n assert schema(1) == 1.0",
"def testFailure():\n run(\"chariot-me\") #Start management-engine without initial deplflag\n egress()",
"def test_check_cluster1():\n with pytest.raises(AssertionError) as err_info:\n Check_BuoyDC.check_cluster(cluster_fail_1)\n assert str(err_info.value) == 'cluster type input not within range of index'",
"def test_limits_boundary_values(self):\n\n def check_error_msg(status, output, storagelimit=False):\n import json\n if status == False:\n content = json.loads(output)[\"errors\"]\n if storagelimit:\n actual_error = content[\"dataStorageLimit\"]\n expected_error = '\"dataStorageLimit\" must be an integer between -1 and 100000'\n else:\n actual_error = content[\"dataThrottleLimit\"]\n expected_error = '\"dataThrottleLimit\" must be an integer between -1 and 2147483647'\n self.assertEqual(actual_error, expected_error)\n else:\n self.fail(\"expected to fail but passsed\")\n\n bucket = self.cluster.buckets[0]\n server = random.choice(bucket.servers)\n bucket_helper = BucketHelper(server)\n status, content = bucket_helper.set_throttle_n_storage_limit(bucket.name,\n throttle_limit=-2)\n check_error_msg(status, content)\n status, content = bucket_helper.set_throttle_n_storage_limit(bucket.name,\n throttle_limit=2147483648)\n check_error_msg(status, content)\n\n status, content = bucket_helper.set_throttle_n_storage_limit(bucket.name,\n storage_limit=-2)\n check_error_msg(status, content, True)\n status, content = bucket_helper.set_throttle_n_storage_limit(bucket.name,\n storage_limit=2147483648)\n check_error_msg(status, content, True)\n\n status, content = bucket_helper.set_throttle_n_storage_limit(bucket.name,\n throttle_limit=-2,\n storage_limit=-2)\n check_error_msg(status, content)\n check_error_msg(status, content, True)\n status, content = bucket_helper.set_throttle_n_storage_limit(bucket.name,\n throttle_limit=2147483648,\n storage_limit=2147483648)\n check_error_msg(status, content)\n check_error_msg(status, content, True)",
"def _check_timeouts(self, chunk_timeout, total_timeout):\n cur_time = time()\n\n if chunk_timeout is not None and cur_time > self._chunk_time + chunk_timeout:\n raise ChunkTimeout('Item timeout expired.')\n elif total_timeout is not None and cur_time > self._total_time + total_timeout:\n raise TotalTimeout('Total timeout expired.')",
"def test_read_cluster_resource_quota_status(self):\n pass",
"def test_wait_timeout_inheritance():\n # confirm subclassed from pypyr root error\n err = WaitTimeOut()\n assert isinstance(err, PypyrAwsError)\n assert isinstance(err, PlugInError)\n assert isinstance(err, PypyrError)",
"def test_create_data(self):\n\n #######################################################################\n # test status_code == 200, timeout_retries = 0\n mock_connection = mock_connection_method('post', status_code=200)\n batch = Batch(mock_connection)\n batch._create_data('references', ReferenceBatchRequest())\n mock_connection.post.assert_called_with(\n path=\"/batch/references\",\n weaviate_object=[],\n )\n self.assertEqual(mock_connection.post.call_count, 1)\n\n #######################################################################\n # timeout_retries = 2, and no exception raised\n mock_connection = mock_connection_method('post', status_code=200)\n batch = Batch(mock_connection)\n batch.timeout_retries = 2\n batch._create_data('references', ReferenceBatchRequest())\n mock_connection.post.assert_called_with(\n path=\"/batch/references\",\n weaviate_object=[],\n )\n self.assertEqual(mock_connection.post.call_count, 1)\n\n #######################################################################\n # test errors\n #######################################################################\n ## error messages\n requests_error_message = 'Batch was not added to weaviate.'\n read_timeout_error_message = lambda data_type: (f\"The '{data_type}' creation was cancelled because it took \"\n \"longer than the configured timeout of 100s. \"\n \"Try reducing the batch size (currently 0) to a lower value. \"\n \"Aim to on average complete batch request within less than 10s\")\n \n unexpected_error_message = lambda data: f\"Create {data} in batch\"\n\n #######################################################################\n ## test RequestsConnectionError\n mock_connection = mock_connection_method('post', side_effect=RequestsConnectionError('Test!'))\n batch = Batch(mock_connection)\n with self.assertRaises(RequestsConnectionError) as error:\n batch._create_data('objects', ObjectsBatchRequest())\n check_error_message(self, error, requests_error_message)\n mock_connection.post.assert_called_with(\n path=\"/batch/objects\",\n weaviate_object={\"fields\": [\"ALL\"], \"objects\": []},\n )\n\n ## test ReadTimeout, timeout_retries = 0\n mock_connection = mock_connection_method('post', side_effect = ReadTimeout('Test!'))\n mock_connection.timeout_config = (2, 100)\n batch = Batch(mock_connection)\n with self.assertRaises(ReadTimeout) as error:\n batch._create_data('references', ReferenceBatchRequest())\n check_startswith_error_message(self, error, read_timeout_error_message('references'))\n mock_connection.post.assert_called_with(\n path=\"/batch/references\",\n weaviate_object=[],\n )\n self.assertEqual(mock_connection.post.call_count, 1)\n\n ## test ReadTimeout, timeout_retries = 3\n mock_connection = mock_connection_method('post', side_effect = ReadTimeout('Test!'))\n mock_connection.timeout_config = (2, 100)\n batch = Batch(mock_connection)\n batch.timeout_retries = 3\n with self.assertRaises(ReadTimeout) as error:\n batch._create_data('objects', ObjectsBatchRequest())\n check_startswith_error_message(self, error, read_timeout_error_message('objects'))\n mock_connection.post.assert_called_with(\n path=\"/batch/objects\",\n weaviate_object={'fields' : ['ALL'], 'objects': []},\n )\n self.assertEqual(mock_connection.post.call_count, 4)\n\n ## test status_code != 200\n mock_connection = mock_connection_method('post', status_code=204)\n batch = Batch(mock_connection)\n with self.assertRaises(UnexpectedStatusCodeException) as error:\n batch._create_data('references', ReferenceBatchRequest())\n check_startswith_error_message(self, error, unexpected_error_message('references'))\n mock_connection.post.assert_called_with(\n path=\"/batch/references\",\n weaviate_object=[],\n )",
"def test_basic_ops(self):\n load_spec = dict()\n verification_dict = dict()\n\n # Stat validation reference variables\n verification_dict[\"ops_create\"] = 0\n verification_dict[\"ops_update\"] = 0\n verification_dict[\"ops_delete\"] = 0\n verification_dict[\"rollback_item_count\"] = 0\n verification_dict[\"sync_write_aborted_count\"] = 0\n verification_dict[\"sync_write_committed_count\"] = 0\n\n for _, scope in self.bucket.scopes.items():\n for _, collection in scope.collections.items():\n verification_dict[\"ops_create\"] += collection.num_items\n if self.durability_level in self.supported_d_levels:\n verification_dict[\"sync_write_committed_count\"] \\\n += collection.num_items\n\n failed = self.durability_helper.verify_vbucket_details_stats(\n self.bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if failed:\n self.fail(\"Cbstat vbucket-details verification failed\")\n\n # load_spec[\"target_vbuckets\"] = []\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 100\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][MetaCrudParams.DocCrud.COMMON_DOC_KEY] \\\n = \"test_collections\"\n\n self.log.info(\"Perform 'create', 'update', 'delete' mutations\")\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=2,\n async_load=True)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud(verification_dict=verification_dict)\n\n # Wait for doc_loading to complete\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Doc CRUDs failed\")\n self.validate_test_failure()\n\n self.log.info(\"Validating doc_count in buckets\")\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)\n\n # Validate vbucket stats\n self.update_verification_dict_from_collection_task(verification_dict,\n doc_loading_task)\n\n failed = self.durability_helper.verify_vbucket_details_stats(\n self.bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if failed:\n self.fail(\"Cbstat vbucket-details verification failed\")\n\n self.validate_cruds_from_collection_mutation(doc_loading_task)",
"def testTrialErrored2(self):\n stats = self.default_statistics()\n trial_count = stats[str(0)][\"n\"] + stats[str(1)][\"n\"]\n sched, mock_runner = self.schedulerSetup(trial_count)\n trials = sched._state[\"bracket\"].current_trials()\n for t in trials[:-1]:\n mock_runner._launch_trial(t)\n sched.on_trial_result(\n mock_runner, t, result(stats[str(1)][\"r\"], 10))\n\n mock_runner._launch_trial(trials[-1])\n sched.on_trial_error(mock_runner, trials[-1])\n self.assertEqual(len(sched._state[\"bracket\"].current_trials()),\n self.downscale(stats[str(1)][\"n\"], sched))",
"def test_timeout_exceeded():\n connection = FakeBaseConnection(session_timeout=10)\n start = time.time() - 11\n try:\n connection._timeout_exceeded(start)\n except NetmikoTimeoutException as exc:\n assert isinstance(exc, NetmikoTimeoutException)\n return\n\n assert False"
] | [
"0.72220546",
"0.7006495",
"0.66652346",
"0.6464856",
"0.6201903",
"0.6171258",
"0.60649824",
"0.6048298",
"0.60285425",
"0.59659433",
"0.5917591",
"0.5882442",
"0.5838443",
"0.5833825",
"0.57513314",
"0.5749805",
"0.5723556",
"0.5720168",
"0.5686857",
"0.5666851",
"0.5659723",
"0.565627",
"0.5622322",
"0.5619692",
"0.5611754",
"0.56105524",
"0.560133",
"0.559338",
"0.5589031",
"0.558134"
] | 0.7034123 | 1 |
Given a field, convert to a JSON serializable type | def _make_serializable(self, field):
if isinstance(field, datetime):
return str(field)
elif isinstance(field, Decimal):
return float(field)
else:
return field | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def serialize_field(value):\r\n if isinstance(value, basestring):\r\n return value\r\n\r\n return json.dumps(value, cls=EdxJSONEncoder)",
"def _FieldToJsonObject(self, field, value):\n if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:\n return self._MessageToJsonObject(value)\n elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:\n if self.use_integers_for_enums:\n return value\n if field.enum_type.full_name == 'google.protobuf.NullValue':\n return None\n enum_value = field.enum_type.values_by_number.get(value, None)\n if enum_value is not None:\n return enum_value.name\n else:\n if field.file.syntax == 'proto3':\n return value\n raise SerializeToJsonError('Enum field contains an integer value '\n 'which can not mapped to an enum value.')\n elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:\n if field.type == descriptor.FieldDescriptor.TYPE_BYTES:\n # Use base64 Data encoding for bytes\n return base64.b64encode(value).decode('utf-8')\n else:\n return value\n elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:\n return bool(value)\n elif field.cpp_type in _INT64_TYPES:\n return str(value)\n elif field.cpp_type in _FLOAT_TYPES:\n if math.isinf(value):\n if value < 0.0:\n return _NEG_INFINITY\n else:\n return _INFINITY\n if math.isnan(value):\n return _NAN\n if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:\n if self.float_format:\n return float(format(value, self.float_format))\n else:\n return type_checkers.ToShortestFloat(value)\n\n return value",
"def mongo_to_python_type(field, data):\n if isinstance(field, ObjectIdField):\n return str(data)\n elif isinstance(field, DecimalField):\n return data\n elif isinstance(field, BooleanField):\n return data\n else:\n return str(data)",
"def field_to_object(value):\n mapping = {\n str: StringField,\n int: IntField,\n list: ListField,\n dict: DictField,\n datetime.datetime: DateField,\n }\n return mapping.get(type(value), AnyField)(value)",
"def deserialize_field(field, value):\r\n try:\r\n deserialized = json.loads(value)\r\n if deserialized is None:\r\n return deserialized\r\n try:\r\n field.from_json(deserialized)\r\n return deserialized\r\n except (ValueError, TypeError):\r\n # Support older serialized version, which was just a string, not result of json.dumps.\r\n # If the deserialized version cannot be converted to the type (via from_json),\r\n # just return the original value. For example, if a string value of '3.4' was\r\n # stored for a String field (before we started storing the result of json.dumps),\r\n # then it would be deserialized as 3.4, but 3.4 is not supported for a String\r\n # field. Therefore field.from_json(3.4) will throw an Error, and we should\r\n # actually return the original value of '3.4'.\r\n return value\r\n\r\n except (ValueError, TypeError):\r\n # Support older serialized version.\r\n return value",
"def to_python(self, value):\n # Composite types are serialized as JSON blobs. If BaseField.to_python\n # is called with a string, assume it was produced by value_to_string\n # and decode it\n if isinstance(value, str):\n try:\n value = json.loads(value)\n except ValueError as exc:\n raise ValidationError(\n self.error_messages[\"bad_json\"],\n code=\"bad_json\",\n ) from exc\n\n return self.Meta.model(\n **{\n name: field.to_python(value.get(name))\n for name, field in self.Meta.fields\n }\n )\n\n return super().to_python(value)",
"def encode_value(x):\n for serializer in string_serializers:\n if isinstance(x, serializer.type):\n return {\"$type\": serializer.name, \"$value\": serializer.to_json(x)}\n\n raise TypeError(type(x)) # pragma: no cover",
"def serialize_field(field: str) -> Callable[[Dict], None]:\n\n def f(data: Dict):\n \"\"\"\n Serialize specific field of type list\n \"\"\"\n if field in data and isinstance(data[field], List):\n data[field] = \",\".join(data[field])\n\n return f",
"def jsonSerial(obj):\n\n if isinstance(obj, (datetime, date)):\n return obj.isoformat()\n\n if isinstance(obj, enum.Enum):\n return obj.value\n\n raise TypeError (\"Type %s not serializable\" % type(obj))",
"def serialize(cls, obj):\n return json.dumps(obj, cls=CustomTypeEncoder)",
"def json_serialize(value):\n if value is None or isinstance(value, (int, long, float, basestring, bool)):\n return value\n elif isinstance(value, (list, tuple, set)):\n return [json_serialize(v) for v in value]\n elif isinstance(value, dict):\n for k, v in value.items():\n value[k] = json_serialize(v)\n return value\n # return date/time in isoformat\n elif isinstance(value, (dt.datetime, dt.date, dt.time)):\n return value.isoformat()\n elif isinstance(value, ActiveRecordMixin):\n return _model_to_dict(value)\n else:\n return unicode(value)",
"def json_serial(obj):\n if isinstance(obj, LegipyModel):\n return obj.to_json()\n elif isinstance(obj, (datetime.date, datetime.datetime)):\n return obj.isoformat()\n raise TypeError(\"Type {0} not serializable\".format(repr(type(obj))))",
"def json_serial2(self, obj):\n if isinstance(obj, (datetime.datetime, datetime.date)):\n return obj.isoformat()\n raise TypeError(\"Type %s not serializable\" % type(obj))",
"def to_field(obj):\r\n\r\n\r\n if isinstance(obj, Field):\r\n field = obj\r\n else:\r\n d = { \"storage_type\": \"unknown\" }\r\n\r\n if isinstance(obj, basestring):\r\n d[\"name\"] = obj\r\n elif type(obj) == tuple or type(obj) == list:\r\n d[\"name\"] = obj[0]\r\n try:\r\n d[\"storage_type\"] = obj[1]\r\n try:\r\n d[\"analytical_type\"] = obj[2]\r\n except:\r\n pass\r\n except:\r\n pass\r\n else: # assume dictionary\r\n d[\"name\"] = obj[\"name\"]\r\n d[\"label\"] = obj.get(\"label\")\r\n d[\"storage_type\"] = obj.get(\"storage_type\")\r\n d[\"analytical_type\"] = obj.get(\"analytical_type\")\r\n d[\"adapter_storage_type\"] = obj.get(\"adapter_storage_type\")\r\n\r\n if \"analytical_type\" not in d:\r\n storage_type = d.get(\"storage_type\")\r\n if storage_type:\r\n deftype = default_analytical_types.get(storage_type)\r\n d[\"analytical_type\"] = deftype or \"typeless\"\r\n else:\r\n d[\"analytical_type\"] = \"typeless\"\r\n\r\n field = Field(**d)\r\n return field",
"def _to_base_type(self, value):\n if value is None:\n return ''\n else:\n return value.to_json()",
"def serialize_to_python(cls, value):\n raise NotImplementedError",
"def json_serial(obj):\n if isinstance(obj, (datetime, date)):\n return obj.isoformat()\n raise TypeError(\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\n if isinstance(obj, (datetime, date)):\n return obj.isoformat()\n raise TypeError(\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\r\n\r\n\t\tif isinstance(obj,(datetime, date)):\r\n\t\t\treturn obj.isoformat()\r\n\t\traise TypeError (\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\n\n if isinstance(obj, (datetime, date)):\n return obj.isoformat()\n raise TypeError(\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\n\n if isinstance(obj, (datetime, date)):\n return obj.isoformat()\n raise TypeError (\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\n\n if isinstance(obj, (datetime, date)):\n return obj.isoformat()\n raise TypeError (\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\n\n if isinstance(obj, (datetime, date)):\n return obj.isoformat()\n raise TypeError (\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\n\n if isinstance(obj, (datetime, date)):\n return obj.isoformat()\n raise TypeError (\"Type %s not serializable\" % type(obj))",
"def serialize_field(self, payload, model, field, mapping):\n try:\n if field in model.fk_field_names():\n payload[field] = self.serialize_related_field(\n model, field, mapping\n )\n else:\n payload[field] = getattr(model, field)\n except SkipField:\n payload.pop(field, None)",
"def test_json_dump(self, force_field):\n import json\n\n json.dumps(force_field._to_smirnoff_data())",
"def json_serial(obj):\n if isinstance(obj, (dt.datetime, dt.date)):\n return obj.isoformat()\n raise TypeError(\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\n\n if isinstance(obj, (datetime, date)):\n return str(obj) #.isoformat()\n raise TypeError (\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\n if isinstance(obj, (datetime.datetime, datetime.date)):\n return obj.isoformat()\n raise TypeError(\"Type %s not serializable\" % type(obj))",
"def json_serial(obj):\n\n if isinstance(obj, (datetime, date,date)):\n return obj.isoformat()\n raise TypeError (\"Type %s not serializable\" % type(obj))"
] | [
"0.7454555",
"0.7386094",
"0.69976497",
"0.6777119",
"0.6751909",
"0.66603667",
"0.64811426",
"0.63531446",
"0.6318845",
"0.6310226",
"0.62857896",
"0.6274474",
"0.6269156",
"0.62448287",
"0.6216889",
"0.61956656",
"0.61125726",
"0.61125726",
"0.6108365",
"0.6091767",
"0.608519",
"0.608519",
"0.608519",
"0.608519",
"0.60828644",
"0.6073148",
"0.60731155",
"0.6071532",
"0.6069946",
"0.6068627"
] | 0.82485443 | 0 |
Given a dictionary of related entities names, nest the related entities into the given dictionary representation, of the original entity. | def _nest_dictionary_include(self, dictionary, include):
related_entity = self.get_related_entity(list(include)[0])
if not isinstance(related_entity, InstrumentedList):
dictionary[
related_entity.__singularfieldname__
] = related_entity.to_nested_dict(include[list(include)[0]])
else:
for entity in related_entity:
if entity.__pluralfieldname__ in dictionary.keys():
dictionary[entity.__pluralfieldname__].append(
entity.to_nested_dict(include[list(include)[0]]),
)
else:
dictionary[entity.__pluralfieldname__] = [
entity.to_nested_dict(include[list(include)[0]]),
] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _nest_string_include(self, dictionary, include):\n related_entity = self.get_related_entity(include)\n if not isinstance(related_entity, InstrumentedList):\n dictionary[related_entity.__singularfieldname__] = related_entity.to_dict()\n else:\n for entity in related_entity:\n if entity.__pluralfieldname__ in dictionary.keys():\n dictionary[entity.__pluralfieldname__].append(entity.to_dict())\n else:\n dictionary[entity.__pluralfieldname__] = [entity.to_dict()]",
"def translate_dict(entity_dict, config):\n\n dump_accepted_entity_dict = OrderedDict()\n\n for key in entity_dict:\n if key in config[\"ent_keys_dump\"]:\n dump_accepted_entity_dict[config[\n \"ent_keys_dump\"][key]] = entity_dict[key]\n\n else:\n dump_accepted_entity_dict[key] = entity_dict[key]\n\n return dump_accepted_entity_dict",
"def encode_with_relatives(entity, **kwargs):\n exclude_models = kwargs.pop('exclude_models', None)\n entities, models = collect_related_instanses(entity, exclude_models)\n parsed_data = serializers.serialize('json', entities, use_natural_keys=True, **kwargs)\n parsed_data = json.loads(parsed_data)\n return create_encoded_entity(parsed_data, entity)",
"def simplify_dict(d: Dict[str, Any]) -> Dict[str, Any]:\n return {\n k: [ast_to_testing_string(n) for n in v] if k == \"children\" else v\n for k, v in d.items()\n }",
"def _process_entity_map(entity_type, entity_map, normalizer):\n item_map = {}\n syn_map = {}\n seen_ids = []\n for item in entity_map.get(\"entities\"):\n cname = item[\"cname\"]\n item_id = item.get(\"id\")\n if cname in item_map:\n msg = \"Canonical name %s specified in %s entity map multiple times\"\n logger.debug(msg, cname, entity_type)\n if item_id:\n if item_id in seen_ids:\n msg = \"Item id {!r} specified in {!r} entity map multiple times\"\n raise ValueError(msg.format(item_id, entity_type))\n seen_ids.append(item_id)\n\n aliases = [cname] + item.pop(\"whitelist\", [])\n items_for_cname = item_map.get(cname, [])\n items_for_cname.append(item)\n item_map[cname] = items_for_cname\n for alias in aliases:\n norm_alias = normalizer(alias)\n if norm_alias in syn_map:\n msg = \"Synonym %s specified in %s entity map multiple times\"\n logger.debug(msg, cname, entity_type)\n cnames_for_syn = syn_map.get(norm_alias, [])\n cnames_for_syn.append(cname)\n syn_map[norm_alias] = list(set(cnames_for_syn))\n\n return {\"items\": item_map, \"synonyms\": syn_map}",
"def _dictRoundTripNormalize(self, treedict):\n for key, value in list(treedict.items()):\n if isinstance(value, dict):\n self._dictRoundTripNormalize(value)\n\n # Expand treedict[(\"group\", \"attr_name\")]\n # to treedict[\"group\"][\"attr_name\"]\n for key, value in list(treedict.items()):\n if not isinstance(key, tuple):\n continue\n # Put the attribute inside the group\n grpname, attr = key\n if not grpname:\n continue\n group = treedict.setdefault(grpname, dict())\n if isinstance(group, dict):\n del treedict[key]\n group[(\"\", attr)] = value",
"def encode_with_full_relatives(entity, **kwargs):\n exclude_models = kwargs.pop('exclude_models', None)\n entities, models = collect_related_instanses(entity, exclude_models)\n fk_excludes = kwargs.pop('fk_excludes', set())\n if fk_excludes:\n fk_excludes = set(fk_excludes)\n for model in models:\n fk_excludes.update(map(attrgetter('field.name'),\n model._meta.get_all_related_objects()))\n kwargs['fk_excludes'] = fk_excludes\n parsed_data = serializers.serialize('json', entities, use_natural_keys=True, **kwargs)\n parsed_data = json.loads(parsed_data)\n return create_encoded_entity(parsed_data, entity)",
"def _flatten_dict(self, d: Mapping[str, Any]) -> Dict[str, Any]:\n nested = {k for k, v in d.items() if isinstance(v, (Mapping, Configuration))}\n if self._lowercase:\n result = {\n k.lower() + \".\" + ki: vi\n for k in nested\n for ki, vi in self._flatten_dict(d[k]).items()\n }\n result.update(\n (k.lower(), v)\n for k, v in d.items()\n if not isinstance(v, (Mapping, Configuration))\n )\n else:\n result = {\n k + \".\" + ki: vi\n for k in nested\n for ki, vi in self._flatten_dict(d[k]).items()\n }\n result.update(\n (k, v)\n for k, v in d.items()\n if not isinstance(v, (Mapping, Configuration))\n )\n return result",
"def flatten(d):\n\n c = {}\n\n def _flatten(parents, items):\n for k, v in items:\n cur = parents + [k]\n if isinstance(v, list):\n _flatten(cur, enumerate(v))\n elif isinstance(v, dict):\n _flatten(cur, v.items())\n else:\n if v is None:\n cur.append('$NULL')\n v = ''\n name = str(cur[0]) + ''.join(['['+str(x)+']' for x in cur[1:]])\n c[name] = v\n \n _flatten([], d.items())\n\n return c",
"def merge_dict_recursive(target, src):\r\n for k in src.keys():\r\n if ((k in target and isinstance(target[k], dict) and\r\n isinstance(src[k], collections.Mapping))):\r\n merge_dict_recursive(target[k], src[k])\r\n else:\r\n target[k] = src[k]",
"def denormalize_entity(in_dict):\n out_dict = in_dict.copy()\n if 'id' in list(in_dict):\n out_dict['pk'] = in_dict['id']\n del out_dict['id']\n if 'start_time' in list(in_dict):\n # if not a datetime object, throw error\n if in_dict['start_time'] and not isinstance(in_dict['start_time'],\n datetime):\n raise IncorrectType()\n out_dict['start_time'] = \\\n in_dict['start_time'].strftime('%Y-%m-%dT%H:%M:%S.%fZ') \\\n if in_dict['start_time'] else None\n if 'end_time' in list(in_dict):\n # if not a datetime object, throw error\n if in_dict['end_time'] and not isinstance(in_dict['end_time'],\n datetime):\n raise IncorrectType()\n out_dict['end_time'] = \\\n in_dict['end_time'].strftime('%Y-%m-%dT%H:%M:%S.%fZ') \\\n if in_dict['end_time'] else None\n if 'created_at' in list(in_dict):\n # if not a datetime object, throw error\n if not isinstance(in_dict['created_at'], datetime):\n raise IncorrectType()\n out_dict['created_at'] = in_dict['created_at'].strftime(\n '%Y-%m-%dT%H:%M:%S.%fZ')\n if 'updated_at' in list(in_dict):\n # if not a datetime object, throw error\n if not isinstance(in_dict['updated_at'], datetime):\n raise IncorrectType()\n out_dict['updated_at'] = in_dict['updated_at'].strftime(\n '%Y-%m-%dT%H:%M:%S.%fZ')\n return out_dict",
"def deep_normalize(d):\n if 'sudsobject' in str(d.__class__):\n d = deep_normalize(dict(d))\n elif isinstance(d, dict):\n for k,v in d.iteritems():\n if 'sudsobject' in str(v.__class__):\n #print k, v, '%s' % v.__class__\n r = deep_normalize(dict(v))\n d[k] = r\n elif isinstance(v, dict):\n r = deep_normalize(v)\n d[k] = r\n elif isinstance(v, (list, tuple, )):\n d[k] = [deep_normalize(i) for i in v]\n elif isinstance(v, datetime):\n # per problemi di permessi sugli oggetti datetime trasformo\n # in DateTime di Zope\n d[k] = DateTime(v.isoformat())\n elif isinstance(d, (list, tuple, )):\n d = [deep_normalize(i) for i in d]\n\n return d",
"def flatten(d, parent_key='', sep='_'):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.extend(flatten(v, new_key, sep=sep).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def expand_flattened_dict(flattened, separator='.'):\n merged = {}\n for key, value in flattened.items():\n expanded = expand_flattened_path(key, value=value, separator=separator)\n merged = merge_dicts(merged, expanded)\n return merged",
"def _expand_keys(entities):\n keys = list(entities.keys())\n values = list(product(*[entities[k] for k in keys]))\n return [{k: v for k, v in zip(keys, combs)} for combs in values]",
"def preprocess_entities_by_mapping(entities, objects_alias_mapping, predicates_alias_mapping):\n\n for entity in entities:\n\n for object in entity.objects:\n candidate_object = object.names[0].lower()\n\n # Update object name according to the objects_alias_mapping or just save it lower-case\n if candidate_object in objects_alias_mapping:\n object.names[0] = objects_alias_mapping[candidate_object]\n else:\n object.names[0] = candidate_object\n\n for relation in entity.relationships:\n candidate_predicate = relation.predicate.lower()\n\n # Update object name according to the predicates_to_be_used or just save it lower-case\n if candidate_predicate in predicates_alias_mapping:\n relation.predicate = predicates_alias_mapping[candidate_predicate]\n else:\n relation.predicate = candidate_predicate",
"def flatten(d: MutableMapping, sep: str = \".\", parent_key: str = \"\") -> dict:\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, MutableMapping):\n items.extend(flatten(v, sep=sep, parent_key=new_key).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def add_relations(specification: Mapping[str, Any]) -> Mapping[str, Any]:\n\n # Class 'Mapping' does not define '__setitem__', so the '[]' operator cannot be used on its instances\n specification[\"relations\"][\"list\"] = []\n specification[\"relations\"][\"list_short\"] = []\n specification[\"relations\"][\"list_long\"] = []\n specification[\"relations\"][\"to_short\"] = {}\n specification[\"relations\"][\"to_long\"] = {}\n\n for relation_name in specification[\"relations\"][\"info\"]:\n\n abbreviated_name = specification[\"relations\"][\"info\"][relation_name][\"abbreviation\"]\n specification[\"relations\"][\"list\"].extend((relation_name, abbreviated_name))\n specification[\"relations\"][\"list_long\"].append(relation_name)\n specification[\"relations\"][\"list_short\"].append(abbreviated_name)\n\n specification[\"relations\"][\"to_short\"][relation_name] = abbreviated_name\n specification[\"relations\"][\"to_short\"][abbreviated_name] = abbreviated_name\n\n specification[\"relations\"][\"to_long\"][abbreviated_name] = relation_name\n specification[\"relations\"][\"to_long\"][relation_name] = relation_name\n\n specification[\"relations\"][\"list\"] = list(set(specification[\"relations\"][\"list\"]))\n\n return specification",
"def _flatten_dictionary(self, params, parent=None):\r\n data = OrderedDict()\r\n for key, val in params.items():\r\n full_key = parent + \"[\" + key + \"]\" if parent else key\r\n if isinstance(val, dict):\r\n data.update(self._flatten_dictionary(val, full_key))\r\n else:\r\n data[full_key] = val\r\n return data",
"def flatten_dict(d, parent_key=\"\", sep=\"_\"):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, MutableMapping):\n items.extend(flatten_dict(v, new_key, sep=sep).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def flatten(d, parent_key='', sep='_'):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.extend(flatten(v, new_key, sep=sep).items())\n else:\n items.append((new_key, v))\n items = dict(items)\n # remove info like PCA primitive ID\n items_not_strings = {k: v for k, v in items.items() if type(v) != str}\n return dict(items_not_strings)",
"def replace_dots(son):\n for key, value in son.items():\n if '.' in key:\n new_key = key.replace('.', '_')\n if isinstance(value, dict):\n son[new_key] = replace_dots(\n son.pop(key)\n )\n else:\n son[new_key] = son.pop(key)\n elif isinstance(value, dict): # recurse into sub-docs\n son[key] = replace_dots(value)\n return son",
"def expand_objects(record):\n new_record = copy.deepcopy(record)\n for key, value in record.items():\n parts = key.split(\".\")\n if len(parts) > 1:\n parts.reverse()\n current = {parts[0]: value}\n for part in parts[1:]:\n current = {part: current}\n del new_record[key]\n new_record = merge_dicts(new_record, current)\n\n return new_record",
"def expand_entities(context, **kwargs):\n kwargs = kwargs.copy()\n entities = set(context.child_entities)\n for key, maybe_entity in six.iteritems(kwargs):\n if isinstance(maybe_entity, Entity):\n entities.add(maybe_entity)\n kwargs[key] = maybe_entity.handle\n entities = list(entities)\n random.shuffle(entities)\n\n child_descriptions = []\n for entity in entities:\n child_descriptions.append(entity.child_description)\n if not entity.expression_used:\n child_descriptions.append(entity.description)\n\n child_description = ' '.join([s for s in child_descriptions if s])\n return child_description, kwargs",
"def flatten(d: Union[dict, list], parent_key: str = \"\", sep: str = \".\") -> dict:\n items = []\n if isinstance(d, dict):\n for k, v in d.items():\n new_key = f\"{parent_key}{sep}{k}\" if parent_key else str(k)\n items.extend(flatten(v, new_key, sep=sep).items())\n elif isinstance(d, list):\n for i, elem in enumerate(d):\n new_key = f\"{parent_key}{sep}{i}\" if parent_key else str(i)\n items.extend(flatten(elem, new_key, sep).items())\n else:\n items.append((parent_key, d))\n return dict(items)",
"def unflatten(\n d: Dict[str, Any],\n base: Dict[str, Any] = None,\n) -> Dict[str, Any]:\n if base is None:\n base = {}\n\n for key, value in d.items():\n root = base\n\n ###\n # If a dotted path is encountered, create nested dicts for all but\n # the last level, then change root to that last level, and key to\n # the final key in the path. This allows one final setitem at the bottom\n # of the loop.\n if '.' in key:\n *parts, key = key.split('.')\n\n for part in parts:\n root.setdefault(part, {})\n root = root[part]\n\n if isinstance(value, dict):\n value = unflatten(value, root.get(key, {}))\n\n root[key] = value\n\n return base",
"def deepupdate(original, update):\n for key, value in original.iteritems():\n if not key in update:\n update[key] = value\n elif isinstance(value, dict):\n deepupdate(value, update[key])\n return update",
"def make_entity_dict(class_reference, template, partial_dict): \n _data = class_reference.properties()\n for _key in _data:\n _data[_key] = partial_dict.get(_key, template.get(_key, '')) \n return _data",
"def flatten_dict(d, sep=' ', parent_key=''):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.extend(flatten_dict(v, sep=sep, parent_key=new_key).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def flatten_dict(\n d, parent_key=\"\", sep=\".\", ignore_under_prefixed=True, mark_value=True\n):\n items = {}\n for k in d:\n if ignore_under_prefixed and k.startswith(\"__\"):\n continue\n v = d[k]\n if mark_value and k.startswith(\"_\") and not k.startswith(\"__\"):\n v = MarkValue(repr(v))\n\n new_key = sep.join((parent_key, k)) if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.update(\n flatten_dict(\n v, new_key, sep=sep, ignore_under_prefixed=True, mark_value=True\n )\n )\n else:\n items[new_key] = v\n\n return items"
] | [
"0.66751766",
"0.61724216",
"0.5608691",
"0.5521861",
"0.5493362",
"0.54766047",
"0.54461294",
"0.54113847",
"0.539981",
"0.53932256",
"0.53712",
"0.5352817",
"0.5349516",
"0.53416413",
"0.5324117",
"0.5303318",
"0.5291922",
"0.52735025",
"0.5255791",
"0.52344567",
"0.5196594",
"0.5194013",
"0.5185957",
"0.51043653",
"0.50833654",
"0.5069063",
"0.5067738",
"0.50643605",
"0.5045804",
"0.5042112"
] | 0.7275026 | 0 |
Given the name of a single related entity, nest the related entity into the given dictionary representation of the original entity. | def _nest_string_include(self, dictionary, include):
related_entity = self.get_related_entity(include)
if not isinstance(related_entity, InstrumentedList):
dictionary[related_entity.__singularfieldname__] = related_entity.to_dict()
else:
for entity in related_entity:
if entity.__pluralfieldname__ in dictionary.keys():
dictionary[entity.__pluralfieldname__].append(entity.to_dict())
else:
dictionary[entity.__pluralfieldname__] = [entity.to_dict()] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _nest_dictionary_include(self, dictionary, include):\n related_entity = self.get_related_entity(list(include)[0])\n if not isinstance(related_entity, InstrumentedList):\n dictionary[\n related_entity.__singularfieldname__\n ] = related_entity.to_nested_dict(include[list(include)[0]])\n else:\n for entity in related_entity:\n if entity.__pluralfieldname__ in dictionary.keys():\n dictionary[entity.__pluralfieldname__].append(\n entity.to_nested_dict(include[list(include)[0]]),\n )\n else:\n dictionary[entity.__pluralfieldname__] = [\n entity.to_nested_dict(include[list(include)[0]]),\n ]",
"def translate_dict(entity_dict, config):\n\n dump_accepted_entity_dict = OrderedDict()\n\n for key in entity_dict:\n if key in config[\"ent_keys_dump\"]:\n dump_accepted_entity_dict[config[\n \"ent_keys_dump\"][key]] = entity_dict[key]\n\n else:\n dump_accepted_entity_dict[key] = entity_dict[key]\n\n return dump_accepted_entity_dict",
"def _packaged_dict_for_entity(rt):\n entity = rt.entity\n return {u'entity_id': entity.id,\\\n u'name': entity.aggregation_paths['_geo'][-1]}",
"def _dictRoundTripNormalize(self, treedict):\n for key, value in list(treedict.items()):\n if isinstance(value, dict):\n self._dictRoundTripNormalize(value)\n\n # Expand treedict[(\"group\", \"attr_name\")]\n # to treedict[\"group\"][\"attr_name\"]\n for key, value in list(treedict.items()):\n if not isinstance(key, tuple):\n continue\n # Put the attribute inside the group\n grpname, attr = key\n if not grpname:\n continue\n group = treedict.setdefault(grpname, dict())\n if isinstance(group, dict):\n del treedict[key]\n group[(\"\", attr)] = value",
"def encode_with_relatives(entity, **kwargs):\n exclude_models = kwargs.pop('exclude_models', None)\n entities, models = collect_related_instanses(entity, exclude_models)\n parsed_data = serializers.serialize('json', entities, use_natural_keys=True, **kwargs)\n parsed_data = json.loads(parsed_data)\n return create_encoded_entity(parsed_data, entity)",
"def flat_to_nested(self, data: dict, original_data, target, method):\n data[target] = method(original_data)\n return data",
"def encode_with_full_relatives(entity, **kwargs):\n exclude_models = kwargs.pop('exclude_models', None)\n entities, models = collect_related_instanses(entity, exclude_models)\n fk_excludes = kwargs.pop('fk_excludes', set())\n if fk_excludes:\n fk_excludes = set(fk_excludes)\n for model in models:\n fk_excludes.update(map(attrgetter('field.name'),\n model._meta.get_all_related_objects()))\n kwargs['fk_excludes'] = fk_excludes\n parsed_data = serializers.serialize('json', entities, use_natural_keys=True, **kwargs)\n parsed_data = json.loads(parsed_data)\n return create_encoded_entity(parsed_data, entity)",
"def related_entity(self, related_entity):\n \n self._related_entity = related_entity",
"def toDictionary(self):\n result = {}\n for key, method in self.__dict__.iteritems():\n if (key == '_Entity__instanced'): continue\n\n value = method()\n if (isinstance(value, Entity)):\n value = value.toDictionary()\n elif (isinstance(value, list)):\n expands = []\n for node in value:\n if (hasattr(node, 'toDictionary')):\n expands.append(node.toDictionary())\n else:\n expands.append(node)\n value = expands\n\n result[_toPropertyName(key)] = value\n return result",
"def convert_dotKeyToNestedDict(self, tree, key, value):\n\n t = tree\n if \".\" in key:\n key, rest = key.split(\".\", 1)\n if key not in tree:\n t[key] = {}\n self.convert_dotKeyToNestedDict(t[key], rest, value)\n else:\n t[key] = value\n\n return t",
"def _flatten_dict(self, current, key, result):\n if isinstance(current, dict):\n for k in current:\n new_key = \"{1}\".format(key, k) if len(key) > 0 else k\n self._flatten_dict(current[k], new_key, result)\n else:\n result[key] = current\n return result",
"def add_entity(self, entity_obj):\n if (\n type(entity_obj) is not dict\n or \"entity_id\" not in entity_obj\n or \"mentions\" not in entity_obj\n ):\n raise ValueError(\n \"The input to update_entity needs to be a dictionary with an entity_id key and mentions key as \"\n \"you are replacing the entity information in bulk.\"\n )\n try:\n ent = EntityObj(\n entity_id=entity_obj[\"entity_id\"],\n mentions=entity_obj[\"mentions\"],\n title=entity_obj.get(\"title\", entity_obj[\"entity_id\"]),\n description=entity_obj.get(\"description\", \"\"),\n types=entity_obj.get(\"types\", {}),\n relations=entity_obj.get(\"relations\", []),\n )\n except ValidationError as e:\n print(e.json())\n raise e\n # We assume this is a new entity\n if self._entity_symbols.qid_exists(ent.entity_id):\n raise ValueError(\n f\"The entity {ent.entity_id} already exists. Please call update_entity instead.\"\n )\n # Add type systems of type_sys -> QID -> list of type names\n for type_sys in ent.types:\n if type_sys not in self._type_systems:\n raise ValueError(\n f\"Error {entity_obj}. When adding a new entity, you must use the same type system. \"\n f\"We don't support new type systems.\"\n )\n # Add kg relations QID -> relation -> list of object QIDs\n parsed_rels = {}\n for rel_pair in ent.relations:\n if \"relation\" not in rel_pair or \"object\" not in rel_pair:\n raise ValueError(\n \"For each value in relations, it must be a JSON with keys relation and object\"\n )\n if (\n self._kg_symbols is not None\n and rel_pair[\"relation\"] not in self._kg_symbols.get_all_relations()\n ):\n raise ValueError(\n f\"Error {entity_obj}. When adding a new entity, you must use the same set of relations. \"\n f\"We don't support new relations.\"\n )\n if rel_pair[\"relation\"] not in parsed_rels:\n parsed_rels[rel_pair[\"relation\"]] = []\n parsed_rels[rel_pair[\"relation\"]].append(rel_pair[\"object\"])\n # Lower case mentions for mention extraction\n mentions = [\n [get_lnrm(men[0], strip=True, lower=True), men[1]] for men in ent.mentions\n ]\n self._entity_symbols.add_entity(\n ent.entity_id, mentions, ent.title, ent.description\n )\n for type_sys in self._type_systems:\n self._type_systems[type_sys].add_entity(\n ent.entity_id, ent.types.get(type_sys, [])\n )\n if self._kg_symbols is not None:\n self._kg_symbols.add_entity(ent.entity_id, parsed_rels)",
"def merge_dict_recursive(target, src):\r\n for k in src.keys():\r\n if ((k in target and isinstance(target[k], dict) and\r\n isinstance(src[k], collections.Mapping))):\r\n merge_dict_recursive(target[k], src[k])\r\n else:\r\n target[k] = src[k]",
"def _flatten_dict(self, d: Mapping[str, Any]) -> Dict[str, Any]:\n nested = {k for k, v in d.items() if isinstance(v, (Mapping, Configuration))}\n if self._lowercase:\n result = {\n k.lower() + \".\" + ki: vi\n for k in nested\n for ki, vi in self._flatten_dict(d[k]).items()\n }\n result.update(\n (k.lower(), v)\n for k, v in d.items()\n if not isinstance(v, (Mapping, Configuration))\n )\n else:\n result = {\n k + \".\" + ki: vi\n for k in nested\n for ki, vi in self._flatten_dict(d[k]).items()\n }\n result.update(\n (k, v)\n for k, v in d.items()\n if not isinstance(v, (Mapping, Configuration))\n )\n return result",
"def post(self, obj):\n\n\t\tmodelobj = (not obj.get('parent_type')) and model.get(obj) or None\n\t\tmodelobj and modelobj.before_post()\n\t\tmodelobj and modelobj.validate()\n\t\t\t\t\n\t\tobj_single, is_vector = self._get_single(obj)\n\t\t# save the parent\n\t\tself.post_single(obj_single)\n\t\tif is_vector:\t\n\t\t\tfor k in obj:\n\t\t\t\td = {\"type\":k, \"parent\":obj[\"name\"], \"parent_type\":obj[\"type\"]}\n\t\t\t\t# dict, one child only\n\t\t\t\tif type(obj[k]) is dict:\n\t\t\t\t\tobj[k].update(d)\n\t\t\t\t\tself.post(obj[k])\n\t\t\t\t\n\t\t\t\t# multiple children\n\t\t\t\tif type(obj[k]) in (list, tuple):\n\t\t\t\t\tidx = 0\n\t\t\t\t\tfor child in obj[k]:\n\t\t\t\t\t\td['idx'] = idx\n\t\t\t\t\t\tidx += 1\n\t\t\t\t\t\t\n\t\t\t\t\t\t# child is a dict\n\t\t\t\t\t\tif type(child) is dict:\n\t\t\t\t\t\t\tchild.update(d)\n\t\t\t\t\t\t\tself.post(child)\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t# child is literal (only names)\n\t\t\t\t\t\telif type(child) in (str, int, float):\n\t\t\t\t\t\t\tc = {\"value\":child}\n\t\t\t\t\t\t\tc.update(d)\n\t\t\t\t\t\t\tself.post_single(c)\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\traise Exception, \"child %s must be dict or literal\" % str(child)\t\n\t\tmodelobj and modelobj.after_post()",
"def simplify_dict(d: Dict[str, Any]) -> Dict[str, Any]:\n return {\n k: [ast_to_testing_string(n) for n in v] if k == \"children\" else v\n for k, v in d.items()\n }",
"def denormalize_entity(in_dict):\n out_dict = in_dict.copy()\n if 'id' in list(in_dict):\n out_dict['pk'] = in_dict['id']\n del out_dict['id']\n if 'start_time' in list(in_dict):\n # if not a datetime object, throw error\n if in_dict['start_time'] and not isinstance(in_dict['start_time'],\n datetime):\n raise IncorrectType()\n out_dict['start_time'] = \\\n in_dict['start_time'].strftime('%Y-%m-%dT%H:%M:%S.%fZ') \\\n if in_dict['start_time'] else None\n if 'end_time' in list(in_dict):\n # if not a datetime object, throw error\n if in_dict['end_time'] and not isinstance(in_dict['end_time'],\n datetime):\n raise IncorrectType()\n out_dict['end_time'] = \\\n in_dict['end_time'].strftime('%Y-%m-%dT%H:%M:%S.%fZ') \\\n if in_dict['end_time'] else None\n if 'created_at' in list(in_dict):\n # if not a datetime object, throw error\n if not isinstance(in_dict['created_at'], datetime):\n raise IncorrectType()\n out_dict['created_at'] = in_dict['created_at'].strftime(\n '%Y-%m-%dT%H:%M:%S.%fZ')\n if 'updated_at' in list(in_dict):\n # if not a datetime object, throw error\n if not isinstance(in_dict['updated_at'], datetime):\n raise IncorrectType()\n out_dict['updated_at'] = in_dict['updated_at'].strftime(\n '%Y-%m-%dT%H:%M:%S.%fZ')\n return out_dict",
"def make_entity_dict(class_reference, template, partial_dict): \n _data = class_reference.properties()\n for _key in _data:\n _data[_key] = partial_dict.get(_key, template.get(_key, '')) \n return _data",
"def _deserialize(self, value, *args, **kwargs):\n if not isinstance(value, dict):\n if len(self.related_keys) != 1:\n keys = [prop.key for prop in self.related_keys]\n raise self.make_error(\"invalid\", value=value, keys=keys)\n value = {self.related_keys[0].key: value}\n if self.transient:\n return self.related_model(**value)\n try:\n result = self._get_existing_instance(self.related_model, value)\n except NoResultFound:\n # The related-object DNE in the DB, but we still want to deserialize it\n # ...perhaps we want to add it to the DB later\n return self.related_model(**value)\n return result",
"def expand_flattened_dict(flattened, separator='.'):\n merged = {}\n for key, value in flattened.items():\n expanded = expand_flattened_path(key, value=value, separator=separator)\n merged = merge_dicts(merged, expanded)\n return merged",
"def Related(self,name):\n # Get related property-entity (property id, property name, entity id, entity name) given entity name\n #\n # Fail to fetch eid would result in empty list\n query = self.entity2id(name)\n # print \"query\" + query\n # if query == \"Not Applicable\": return []\n ans = []\n for i in range(len(query)):\n s_id = query[i][\"s_id\"]\n subject_des = query[i][\"subject_des\"]\n url = \"https://www.wikidata.org/w/api.php?action=wbgetentities&ids=\" + s_id + \"&format=json&languages=en\"\n # print urllib.urlopen(url).read()\n response = json.loads(urllib.urlopen(url).read())\n for p in response[\"entities\"][s_id][\"claims\"]:\n for o in response[\"entities\"][s_id][\"claims\"][p]:\n # Enumerate property & entity (multi-property, multi-entity)\n try:\n # Some properties are not related to entities, thus try & except\n o_id = o[\"mainsnak\"][\"datavalue\"][\"value\"][\"id\"]\n ans.append({\n \"s_id\": s_id,\n \"s_des\": subject_des,\n \"p_id\": p,\n \"property\": self.getp(p),\n \"o_id\": o_id,\n \"object\": self.getc(o_id)\n })\n # ans.append(\"\\\\property\\\\\"+p+\"\\t\"+getp(p)+\"\\t\\\\entity\\\\\"+cid+\"\\t\"+getc(cid))\n # Print in a pid-pname-eid-ename fashion\n except:\n continue\n return ans",
"def _flatten_dictionary(self, params, parent=None):\r\n data = OrderedDict()\r\n for key, val in params.items():\r\n full_key = parent + \"[\" + key + \"]\" if parent else key\r\n if isinstance(val, dict):\r\n data.update(self._flatten_dictionary(val, full_key))\r\n else:\r\n data[full_key] = val\r\n return data",
"def expand_objects(record):\n new_record = copy.deepcopy(record)\n for key, value in record.items():\n parts = key.split(\".\")\n if len(parts) > 1:\n parts.reverse()\n current = {parts[0]: value}\n for part in parts[1:]:\n current = {part: current}\n del new_record[key]\n new_record = merge_dicts(new_record, current)\n\n return new_record",
"def nested_to_flat(self, data: dict, target: str, **kwargs):\n data.update(data.pop(target, {}))\n return data",
"def update(sn, d):\n if isinstance(sn, SN):\n sn = vars(sn)\n d = unwind_nested_dict(decode(d))\n for k, v in d.items():\n if k in sn and isinstance(v, (dict, SN)) and isinstance(sn[k], (dict, SN)):\n update(sn[k], v)\n else:\n sn[k] = encode(v)",
"def deep_normalize(d):\n if 'sudsobject' in str(d.__class__):\n d = deep_normalize(dict(d))\n elif isinstance(d, dict):\n for k,v in d.iteritems():\n if 'sudsobject' in str(v.__class__):\n #print k, v, '%s' % v.__class__\n r = deep_normalize(dict(v))\n d[k] = r\n elif isinstance(v, dict):\n r = deep_normalize(v)\n d[k] = r\n elif isinstance(v, (list, tuple, )):\n d[k] = [deep_normalize(i) for i in v]\n elif isinstance(v, datetime):\n # per problemi di permessi sugli oggetti datetime trasformo\n # in DateTime di Zope\n d[k] = DateTime(v.isoformat())\n elif isinstance(d, (list, tuple, )):\n d = [deep_normalize(i) for i in d]\n\n return d",
"def update_entity(self, entity_obj):\n if (\n type(entity_obj) is not dict\n or \"entity_id\" not in entity_obj\n or \"mentions\" not in entity_obj\n ):\n raise ValueError(\n \"The input to update_entity needs to be a dictionary with an entity_id key and mentions key as \"\n \"you are replacing the entity information in bulk.\"\n )\n if not self._entity_symbols.qid_exists(entity_obj[\"entity_id\"]):\n raise ValueError(f\"The entity {entity_obj['entity_id']} is not in our dump\")\n try:\n ent = EntityObj(\n entity_id=entity_obj[\"entity_id\"],\n mentions=entity_obj[\"mentions\"],\n title=entity_obj.get(\"title\", entity_obj[\"entity_id\"]),\n types=entity_obj.get(\"types\", {}),\n relations=entity_obj.get(\"relations\", []),\n )\n except ValidationError as e:\n print(e.json())\n raise e\n # Update mentions\n for men in self.get_mentions(ent.entity_id):\n self._entity_symbols.remove_alias(ent.entity_id, men)\n for men in ent.mentions:\n # Lower case mentions for mention extraction\n men = [get_lnrm(men[0], strip=True, lower=True), men[1]]\n self._entity_symbols.add_alias(ent.entity_id, men)\n # Update title\n self._entity_symbols.set_title(ent.entity_id, ent.title)\n # Update types\n for type_sys in self._type_systems:\n for typename in self._type_systems[type_sys].get_types(ent.entity_id):\n self._type_systems[type_sys].remove_type(ent.entity_id, typename)\n for type_sys in ent.types:\n for typename in ent.types[type_sys]:\n self._type_systems[type_sys].add_type(ent.entity_id, typename)\n # Update KG\n if self._kg_symbols is not None:\n for rel in self._kg_symbols.get_relations(ent.entity_id):\n for qid2 in self._kg_symbols.get_connections_by_relation(\n ent.entity_id, rel\n ):\n self._kg_symbols.remove_kg(ent.entity_id, rel, qid2)\n for rel_pair in ent.relations:\n self._kg_symbols.add_kg(\n ent.entity_id, rel_pair[\"relation\"], rel_pair[\"object\"]\n )",
"def _encode_dictionary(data, name=\"Second\", sub=False):\n\n if sub:\n root = ET.Element(\"Field\", {\"Name\": f'{name}', \"Type\": \"elsystem.collections.dictionary\"})\n else: \n root = ET.Element(\"elsystem.collections.dictionary\")\n\n items = ET.SubElement(root, 'Field', {'Name': 'Items', 'Type': 'elsystem.collections.vector'})\n\n index = 0\n\n for key, val in data.items():\n\n pair = ET.SubElement(items, 'Field', {'Name': f'E{index}', 'Type': 'elsystem.collections.pair'})\n \n if type(val) == dict:\n ET.SubElement(pair, 'Field', {'Name': 'First', 'Value': _encode_value(key)}) \n sub_dict = _encode_dictionary(data=val, name=\"Second\", sub=True)\n pair.append(sub_dict)\n elif type(val) == list:\n ET.SubElement(pair, 'Field', {'Name': 'First', 'Value': _encode_value(key)}) \n sub_vec = _encode_list(data=val, name=F'E{index}', sub=True)\n pair.append(sub_vec)\n else:\n ET.SubElement(pair, 'Field', {'Name': 'First', 'Value': _encode_value(key)}) \n ET.SubElement(pair, 'Field', {'Name': 'Second', 'Value': _encode_value(val)}) \n\n index += 1\n\n ET.SubElement(items, 'Field', {'Name': 'count', 'Value': _encode_value(index)})\n\n if sub:\n return root \n else:\n return ET.tostring(root)",
"def test_conversion_to_dict():\n model_definition = {\n 'language': {'type': 'fixed', 'default': 'english'},\n 'a': {'type': 'fixed', 'persisted': True},\n 'b.c': {'type': 'fixed', 'persisted': True},\n 'b.d.e': {'type': 'text', 'persisted': True},\n 'b.d.f': {'type': 'numeric', 'persisted': True}\n }\n factory = ProductModelFactory(model_definition)\n raw_product = {\n 'a': 'foo',\n 'b': {\n 'c': 'bar',\n 'd': {\n 'e': 'some nested stuff',\n 'f': 12345\n }\n }\n }\n stemmed = text.parse_text_to_stems('english', raw_product['b']['d']['e'])\n model = factory.build('test_product', raw_product)\n model_dict = model.to_dict()\n nose.tools.eq_(model_dict['a'], raw_product['a'], 'Attribute does not match')\n nose.tools.eq_(model_dict['b']['c'], raw_product['b']['c'], 'Attribute does not match')\n nose.tools.assert_list_equal(model_dict['b']['d']['e'], stemmed, 'Attribute does not match')\n nose.tools.eq_(model_dict['b']['d']['f'], raw_product['b']['d']['f'], 'Attribute does not match')",
"def unedgify(edgy_node):\n if isinstance(edgy_node, dict):\n if 'edges' in edgy_node:\n edgy_node = edgy_node['edges']\n index = 0\n while index < len(edgy_node):\n if isinstance(edgy_node[index], dict) and\\\n 'node' in edgy_node[index]:\n edgy_node[index] = edgy_node[index]['node']\n edgy_node[index] = unedgify(edgy_node[index])\n index = index + 1\n else:\n for key in edgy_node:\n edgy_node[key] = unedgify(edgy_node[key])\n return edgy_node"
] | [
"0.6981927",
"0.5456595",
"0.5351446",
"0.53328013",
"0.52869356",
"0.50926924",
"0.5067823",
"0.50247926",
"0.4971832",
"0.4952855",
"0.4946507",
"0.49437508",
"0.49123368",
"0.48964164",
"0.48820087",
"0.48600113",
"0.48468426",
"0.48367497",
"0.48357335",
"0.48296165",
"0.48295262",
"0.48160505",
"0.47931907",
"0.47763023",
"0.4762864",
"0.4760027",
"0.4745244",
"0.47308138",
"0.4721416",
"0.47065482"
] | 0.6343933 | 1 |
Given a string for the related entity name, return the related entity. This function is used to retrieve a related entity as a result of an include filter. Some related entities have a plural name (so end with an 's') so this is checked. If `entity` contains a name which is a singular, `entity` is uppercased (table names are defined in upper case, as seen in `__tablename__` in the entity classes lower down in this file). Both related entity name types (singular and plural) are used to retrieve the actual related entity. | def get_related_entity(self, entity):
try:
return getattr(self, entity if entity[-1] == "s" else entity.upper())
except AttributeError:
raise FilterError(f" No related entity: {entity}") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_name(name_or_entity):\n if isinstance(name_or_entity, (Subject, SubjectType)):\n return name_or_entity.name\n elif isinstance(name_or_entity, db.Model):\n return name_or_entity.key().name()\n else:\n return name_or_entity",
"def get_entity_by_name(self, entity_name):\n return Artifact.get_by_name(entity_name)",
"def get_entity_name(self, entity_id, type_name):\n return self._symtab[type_name].get_symbol(entity_id)",
"def get_singular_name(cls, entity_name_plural):\n\n if entity_name_plural in Utils.INVARIANT_RESOURCES:\n return entity_name_plural\n\n if entity_name_plural[-3:] == \"ies\":\n return entity_name_plural[:-3] + \"y\"\n\n if entity_name_plural[-1] == \"s\":\n return entity_name_plural[:-1]\n\n return entity_name_plural",
"def getEntity(self):\n\n fid = file(self.filename)\n entityre = re.compile(\"entity (\\w+) is\", re.IGNORECASE)\n\n matches = entityre.search(fid.read())\n self.entityname = matches.groups()[0]\n return self.entityname",
"def get_entity_name() -> str:\n return \"NewsItemEntity\"",
"def _match_entity(hass, name):\n from fuzzywuzzy import process as fuzzyExtract\n entities = {state.entity_id: state.name for state\n in hass.states.async_all()}\n entity_id = fuzzyExtract.extractOne(\n name, entities, score_cutoff=65)[2]\n return hass.states.get(entity_id) if entity_id else None",
"def resolve_model(root: Entity, *args) -> str:\n return 'entities.entity'",
"def get_related_entities(self, entity_name, rel_str=\"similar to\"):\n if rel_str not in self.approved_relations.values():\n print(\"WARN: querying for invalid relations. Only allow: {}\".format(self.approved_relations))\n\n try:\n with closing(self.connection) as con:\n # Auto-commit\n with con:\n with closing(con.cursor()) as cursor:\n # Inner query retrieves IDs of all related entities\n cursor.execute(\"\"\"\n SELECT name\n FROM nodes\n WHERE id IN (\n SELECT dest\n FROM edges JOIN nodes ON source == id\n WHERE name LIKE (?) AND rel == (?)\n );\n \"\"\", (entity_name, rel_str))\n # [(\"Justin Timberlake\",), (\"Shawn Mendes\",)] => [\"Justin Timberlake\", \"Shawn Mendes\"]\n return [x[0] for x in cursor.fetchall()]\n\n except sqlite3.OperationalError as e:\n print(\"ERROR: Could not find entities similar to entity with name '{}': {}\".format(entity_name, str(e)))\n return []",
"def find_entity_by_name(parent_entity, name):\n children = parent_entity.GetChildEntity()\n for item in children:\n if item.GetName() == name:\n return item\n return None",
"async def friendly_name(self, entity_id: str, **kwargs) -> str:\n await self._check_entity(self._get_namespace(**kwargs), entity_id)\n state = await self.get_state(**kwargs)\n if entity_id in state:\n if \"friendly_name\" in state[entity_id][\"attributes\"]:\n return state[entity_id][\"attributes\"][\"friendly_name\"]\n else:\n return entity_id\n return None",
"def entity_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"entity_name\")",
"def entity_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"entity_name\")",
"def entity_name(self):\n return self.__entity_name",
"def _get_full_entity(entity: spacy.tokens.Token) -> str:\n entity_string = SpacyEventExtractor._get_chunk(entity)\n\n word = entity\n while True:\n prep, word = SpacyEventExtractor._get_prep_with_word(word)\n if word is None:\n break\n entity_string += \" \" + prep\n return entity_string",
"def Related(self,name):\n # Get related property-entity (property id, property name, entity id, entity name) given entity name\n #\n # Fail to fetch eid would result in empty list\n query = self.entity2id(name)\n # print \"query\" + query\n # if query == \"Not Applicable\": return []\n ans = []\n for i in range(len(query)):\n s_id = query[i][\"s_id\"]\n subject_des = query[i][\"subject_des\"]\n url = \"https://www.wikidata.org/w/api.php?action=wbgetentities&ids=\" + s_id + \"&format=json&languages=en\"\n # print urllib.urlopen(url).read()\n response = json.loads(urllib.urlopen(url).read())\n for p in response[\"entities\"][s_id][\"claims\"]:\n for o in response[\"entities\"][s_id][\"claims\"][p]:\n # Enumerate property & entity (multi-property, multi-entity)\n try:\n # Some properties are not related to entities, thus try & except\n o_id = o[\"mainsnak\"][\"datavalue\"][\"value\"][\"id\"]\n ans.append({\n \"s_id\": s_id,\n \"s_des\": subject_des,\n \"p_id\": p,\n \"property\": self.getp(p),\n \"o_id\": o_id,\n \"object\": self.getc(o_id)\n })\n # ans.append(\"\\\\property\\\\\"+p+\"\\t\"+getp(p)+\"\\t\\\\entity\\\\\"+cid+\"\\t\"+getc(cid))\n # Print in a pid-pname-eid-ename fashion\n except:\n continue\n return ans",
"def get_entity_name_plural(cls, singular_name):\n\n if singular_name in Utils.INVARIANT_RESOURCES:\n return singular_name\n\n vowels = [\"a\", \"e\", \"i\", \"o\", \"u\", \"y\"]\n if singular_name[-1:] == \"y\" and singular_name[-2] not in vowels:\n return singular_name[:-1] + \"ies\"\n\n if singular_name[-1:] == \"s\":\n return singular_name\n\n return singular_name + \"s\"",
"def get_entity(endpoint):\n _entity, _id = parser_endpoint(endpoint)\n\n return _entity",
"def _get_entity2(verb):\n for child in verb.children:\n if child.dep_ == \"dobj\":\n return child\n return None",
"def related(filterset, filter_name):\n if not filterset.relationship:\n return filter_name\n return LOOKUP_SEP.join([filterset.relationship, filter_name])",
"def get_display_name(entity):\n if isinstance(entity, User):\n if entity.last_name is not None:\n return '{} {}'.format(entity.first_name, entity.last_name)\n return entity.first_name\n\n if isinstance(entity, Chat) or isinstance(entity, Channel):\n return entity.title",
"def get_related_name(self, field=None):\n related_name = self.subrecord.__name__.lower()\n if field:\n related_name = \"{0}__{1}\".format(related_name, field)\n if self.is_patient_subrecord:\n return \"patient__{0}\".format(related_name)\n else:\n return related_name",
"def resolve_entity(root, info, slug: str) -> Optional[Entity]:\n try:\n return Entity.objects.get(slug=slug)\n except ObjectDoesNotExist:\n if slug.isnumeric():\n try:\n return Entity.objects.get(pk=int(slug))\n except GraphQLError:\n pass\n return None",
"def parse_entity_from_bidspath(path,entity,mode='r2l'):\n entity = entity if '-' in entity else entity + '-'\n # Easier to find it from the tail of the bidspath\n if mode == 'r2l':\n entity_position = path.rfind(entity)\n elif mode == 'l2r':\n entity_position = path.find(entity)\n else:\n raise ValueError('Incorrect usage of the mode argument.')\n\n if entity_position == -1:\n return None\n\n little_path = path[entity_position:]\n\n value = re.search('%s(.*?)%s' % ('-', '_'), little_path,).group(1)\n\n return value",
"def fetch_entity(endpoint, values):\n values['entity'] = Entity.objects.get_or_404(name=values['entity'])",
"def get_related_field(cls, model, name=None):\n for field_name, rel_model in cls._meta.relations.iteritems():\n if rel_model == model and name in [None, field_name]:\n return cls.get_field(field_name)",
"def related_entity(self):\n return self._related_entity",
"def _get_raw_entity_kind(cls, entity_kind_or_model_classpath):\n return entity_kind_or_model_classpath",
"def _get_entity1(span):\n for word in span:\n if word.head is word: # main verb\n for child in word.children:\n if child.dep_.endswith(\"nsubj\"):\n return child\n break\n return None",
"def related(self, name=None, reverse_lookup=False):\n\t\tif not name and reverse:\n\t\t\tfrom .models import Related\n\t\t\tmodel = self\n\t\t\tif hasattr(self, 'parent_model'):\n\t\t\t\tmodel = self.parent_model\n\t\t\tct = ContentType.objects.get_for_model(model)\n\t\t\tret = Related.objects.filter(related_content_type=ct.pk, related_object_id=self.pk).order_by('content_type__model', 'object_id')\n\t\t\treturn ret\n\n\t\tif not name:\n\t\t\traise Exception('Need a related item name to lookup!')\n\n\t\t# Convert to list if needed\n\t\tif isinstance(name, str):\n\t\t\tname = [name]\n\n\t\t# Grab this model's content type\n\t\tcontent_type = ContentType.objects.get_for_model(type(self))\n\n\t\t# Grab model paths via aliases and combine with dot-notation model names\n\t\tmodel_paths = [v[1] for v in self.related_overrides.get(self.related_override_key(), self.related_models) if v[0] in name] + [v for v in name if '.' in v]\n\t\t# Grab related content types\n\t\trelated_content_types = [ContentType.objects.get_for_model(apps.get_model(*model_path.split('.'))) for model_path in model_paths]\n\n\t\t# Set to/from fields\n\t\tfields = ['object_id', 'content_type', 'content_object', 'content_type_id']\n\t\t_from = dict(zip(fields, fields))\n\t\t_to = {k: 'related_{}'.format(v) for (k, v) in _from.items()}\n\n\t\t# Switch to/from if reversed\n\t\tif reverse_lookup:\n\t\t\t_from, _to = _to, _from\n\n\t\targs = {\n\t\t\t_from['content_type']: content_type,\n\t\t\t_from['object_id']: self.pk,\n\t\t\t'{}__in'.format(_to['content_type']): related_content_types,\n\t\t}\n\n\t\tif not reverse_lookup:\n\t\t\targs['group__in'] = name\n\n\t\t# Get relations\n\t\tfrom .models import Related\n\t\trelations = Related.objects.filter(**args)\n\n\t\t# For reverse lookup, if there's only one related content type, query those models directly\n\t\tif reverse_lookup and len(related_content_types) == 1:\n\t\t\treturn related_content_types[0].model_class().objects.filter(pk__in=relations.values('object_id')).public()\n\t\t# Otherwise, prefetch in bulk and cache each content type separately\n\t\telse:\n\t\t\tself.prefetch_relations(relations, _to)\n\t\t\treturn [getattr(relation, '_content_object_cache') for relation in relations if hasattr(relation, '_content_object_cache')]"
] | [
"0.59919703",
"0.5937406",
"0.58765435",
"0.57505286",
"0.56437147",
"0.55702424",
"0.55522764",
"0.553932",
"0.55249816",
"0.5521107",
"0.5295941",
"0.5176583",
"0.5176583",
"0.5101699",
"0.5007097",
"0.499974",
"0.4968771",
"0.4964467",
"0.49382612",
"0.49290913",
"0.4915373",
"0.49113485",
"0.49040988",
"0.49040458",
"0.4902337",
"0.48889127",
"0.48583564",
"0.4774757",
"0.4711718",
"0.4700155"
] | 0.74088454 | 0 |
Checks if privilaged access is enabled for the server. Throws an exception if it is not. | def __require_privilaged_access(self):
if not self.getLoggedInUser():
raise codechecker_api_shared.ttypes.RequestFailed(
codechecker_api_shared.ttypes.ErrorCode.UNAUTHORIZED,
"The server must be start by using privilaged access to "
"execute this action.") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _check_access_priv(required_privilege_level):\n auth_user, prog_name, user, host, uuid = _get_client_info()\n priv_level = _get_priv_level(auth_user)\n if (PRIVILEGE_LEVELS.index(priv_level) <\n PRIVILEGE_LEVELS.index(required_privilege_level)):\n err = CONNECT_DENIED_PRIV_TMPL % (\n priv_level, required_privilege_level,\n user, host, prog_name, uuid)\n #LOG.warning(err)\n # Raise an exception to be sent back to the client.\n raise InvalidUsage(err, status_code=403)\n return True",
"def _check_permissions(server, priv):\n # Check user permissions\n user_pass_host = server.user\n if server.passwd is not None and len(server.passwd) > 0:\n user_pass_host += \":\" + server.passwd\n user_pass_host += \"@\" + server.host\n user = User(server, user_pass_host, False)\n if not user.has_privilege(\"*\", \"*\", priv):\n raise UtilError(\"Not enough permissions. The user must have the \"\n \"%s privilege.\" % priv)",
"def is_accessible(self):\n if self._is_accessible:\n return self._is_accessible\n\n check_host_cmd = '/usr/rift/bin/ssh_root {ip} -q -n -o BatchMode=yes -o StrictHostKeyChecking=no ls > /dev/null'\n rc = subprocess.call(check_host_cmd.format(ip=self._ip), shell=True)\n logger.info(\"Checking if {} is accessible\".format(self._ip))\n\n\n\n if rc != 0:\n return False\n\n self._is_accessible = True\n return self._is_accessible",
"def is_accessible(self):\n if login.current_user.is_authenticated:\n return login.current_user.is_admin()\n return False",
"def require_server_administrator():\n if not test_server_administrator():\n raise cherrypy.HTTPError(403)",
"def cog_check(self, ctx):\n return ctx.author.guild_permissions.administrator",
"def cog_check(self, ctx):\r\n return ctx.author.guild_permissions.administrator",
"def cog_check(self, ctx):\r\n return ctx.author.guild_permissions.administrator",
"def is_accessible(self):\n return current_user.is_authenticated and current_user.role == 'admin'",
"def is_accessible(self):\n return current_user.is_authenticated and current_user.role == 'admin'",
"def hasPrivilege(self, privilege, groupid=None):\n\n if groupid is None:\n # Privilege needs to be a string, for example: \"TMS.Systems.Update\"\n url = \"/hasPrivilege?{}\".format(urllib.urlencode({ \"privilege\" : privilege }))\n else:\n # groupid needs to be of type integer, e.g. \"1\" , check CIC Administration > Groups\n url = \"/hasPrivilege?{}\".format(urllib.urlencode({ \"privilege\" : privilege, \"groupid\" : groupid }))\n try:\n response = self.httpHandler.sendHttpRequest(url)\n except urllib2.HTTPError as e:\n\n logger.debug(traceback.format_exc())\n\n # 403 is a valid return code when the privilege is not available\n c = e.code\n if c == 403:\n\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, e.read()))\n return False\n\n else:\n raise\n else:\n c = response.getcode()\n\n if c == 200:\n return True\n\n else:\n\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, body))\n raise AssertionError(\n \"Got unexpected http return code: {} when calling {}\".format(\n c, url))",
"def is_allowed(self) -> bool:\n return self.effect == ALLOW_ACCESS",
"def no_network_access_check(user):\n return not user.has_property(\"network_access\")",
"def check_privilege(self, username):\n con = self.connect()\n cursor = con.cursor()\n cursor.execute(\"SELECT isAdmin \\\n FROM users WHERE username = %s\", (username,))\n privilege = cursor.fetchone()\n cursor.close()\n con.commit()\n con.close()\n if (privilege[0] is True):\n return True\n else:\n return False",
"def check_permission():\n if IS_ADMIN:\n out_info(\"Running as Root/Admin\")\n else:\n out_warning(\"Running without root/admin privileges\")",
"def privileged(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"privileged\")",
"def privileged(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"privileged\")",
"def is_accessible(url: str) -> bool:\n try:\n return requests.get(url).status_code == requests.codes.ok\n except Exception:\n return False",
"def check_config_admin():\n user_email = app_util.get_oauth_id()\n if is_config_admin(user_email):\n logging.info(f\"User {user_email} ALLOWED for config endpoint\")\n return\n logging.info(f\"User {user_email} NOT ALLOWED for config endpoint\")\n raise Forbidden()",
"def check_admin():\n\tif not current_user.is_admin:\n\t\tabort(403)",
"def can_disable_fourth_level(self):\n context = self._get_real_context()\n sm = getSecurityManager()\n return self.is_enabled and sm.checkPermission(\"Portlets: Manage portlets\", context)",
"def check_admin() -> bool:\n return ctypes.windll.shell32.IsUserAnAdmin() == 1",
"def has_access(need_privileged_access=False, job_type=None, fuzzer_name=None):\n result = get_access(\n need_privileged_access=need_privileged_access,\n job_type=job_type,\n fuzzer_name=fuzzer_name)\n\n return result == UserAccess.Allowed",
"def check_admin():\r\n if not current_user.is_admin:\r\n abort(403)",
"def check_admin():\n if not current_user.is_admin:\n abort(403)",
"def check_admin():\n if not current_user.is_admin:\n abort(403)",
"def verify_privileged(self):\n community_text = self.fetch(self.base_url + \"/community\")\n return \"You must be logged in to see this page.\" not in community_text",
"def privilege_check(user, *required_privileges):\n for perm in required_privileges:\n if user.has_property(perm):\n return True\n return False",
"async def protection_enabled(self) -> bool:\n response = await self._request(\"status\")\n return response[\"protection_enabled\"]",
"def has_access_to_admin_console(self):\n return self.is_superuser or self.has_perm('user.can_view_admin_console')"
] | [
"0.7557653",
"0.72404724",
"0.6943704",
"0.6715594",
"0.6620799",
"0.6591731",
"0.6571485",
"0.6571485",
"0.6561439",
"0.6561439",
"0.63770384",
"0.6369124",
"0.6322632",
"0.63182443",
"0.6280057",
"0.6267551",
"0.6267551",
"0.62510467",
"0.6189843",
"0.61657476",
"0.61598116",
"0.61219025",
"0.6112528",
"0.6109397",
"0.6081005",
"0.6081005",
"0.6015169",
"0.6004565",
"0.599693",
"0.59951204"
] | 0.7526212 | 1 |
Checks if the curret user has PERMISSION_VIEW rights. Throws an exception if it is not. | def __require_permission_view(self):
permission = codechecker_api_shared.ttypes.Permission.PERMISSION_VIEW
if not self.__has_permission(permission):
raise codechecker_api_shared.ttypes.RequestFailed(
codechecker_api_shared.ttypes.ErrorCode.UNAUTHORIZED,
"You are not authorized to execute this action.") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def can_view(self, user):\r\n return True",
"def has_view_permission(self, request, obj=None):\n return True\n opts = self.opts\n codename = get_permission_codename('view', opts)\n return any([\n request.user.has_perm(\"%s.%s\" % (opts.app_label, codename)),\n request.user.has_perm(\"%s.%s\" % (opts.app_label, codename), obj)])",
"def has_permission(self, request, view):\n return False",
"def has_permission(self, request, view):\n return True",
"def can_view(self, user):\n if self.applicant == user:\n return True\n elif user.has_perm('funding.view_all_applications'):\n # Fundihg commitee\n return True\n elif user.has_perm('funding.make_application_decisions'):\n # Fundihg manager - should have the view permissions, but just in case\n return True\n return False",
"def can_be_viewed_by(self,user):\n\n # check whether everyone is allowed to view this. Anymous user is the only member of group\n # 'everyone' for which permissions can be set\n anonymousUser = get_anonymous_user()\n\n if anonymousUser.has_perm(\"view_ComicSiteModel\",self):\n return True\n else:\n # if not everyone has access, check whether given user has permissions\n return user.has_perm(\"view_ComicSiteModel\",self)",
"def can_edit_or_403(self, user):\n if self.get_permission_level(user) < self.OWNER_PERMISSION:\n raise PermissionDenied\n return True",
"def view(self, user, action, *args):\n if user.is_anonymous or user.is_client:\n return False\n\n if user.is_administrator:\n return True\n\n if user.is_manager:\n return False\n\n # TODO check groups in request maybe ? dunno\n if user.is_advisor:\n return True\n\n return self.admin_permission(user, action, *args)",
"def can_access(self, permission_name: str, view_name: str) -> bool:\n\n user = g.user\n if user.is_anonymous:\n return self.is_item_public(permission_name, view_name)\n return self._has_view_access(user, permission_name, view_name)",
"def has_permission(self, request, view):\n user = request.user\n try:\n user.user_client\n return True\n except Exception:\n return False",
"def has_permission(self, request, view):\n usuario = request.user\n return str(usuario.grupo) == \"Vendedor\"",
"def has_permission(self, request, view):\n if request.user.is_authenticated():\n return True\n return False",
"def can_be_viewed_by(self,user):\n return True",
"def has_permission(self, request, view):\n if settings.ENHANCED_ORG_ADMIN and request.user.admin:\n return True\n\n if not request.user.access:\n return False\n\n if request.method in permissions.SAFE_METHODS:\n rates_read = request.user.access.get(\"cost_model\", {}).get(\"read\", [])\n if rates_read:\n return True\n else:\n rates_write = request.user.access.get(\"cost_model\", {}).get(\"write\", [])\n if \"*\" in rates_write:\n return True\n if self.get_uuid_from_url(request) in rates_write:\n return True\n return False",
"def can_edit_or_403(self, user):\n if user.id != self.game_master.id:\n raise PermissionDenied\n return True",
"def test_func(self):\n member_to_view = self.get_object()\n is_self = self.request.user.rfid == member_to_view.rfid\n view_others = self.request.user.has_permission(\"core.view_member\")\n return view_others or is_self",
"def has_view_permissions(self, obj):\n queryset = self.model.objects.filter(pk=obj.pk)\n if hasattr(queryset, 'has_view_permissions'):\n return queryset.has_view_permissions( PyFormsMiddleware.user() )\n else:\n return True",
"def has_permission(self, request, view):\n return request.user.group == 'admin'",
"def has_permission(self, request, view):\n if request.method in permissions.SAFE_METHODS:\n return True\n return False",
"def process_view(self, request, view, view_args, view_kwargs):\n group = getattr(request, 'group', None)\n if group is not None and getattr(group, 'private', False)\\\n and not request.user.has_perm('view', group):\n raise PermissionDenied",
"def is_visible(cls, request):\n if cls.permission_required:\n return request.user.has_perm(cls.permission_uri)\n else:\n return True",
"def has_permission(self, request, view):\n return request.user.group != 'patient'",
"def has_permission_to_view(page, user):\n if page.permissions.count() == 0:\n return True\n for perm in page.permissions.all():\n perm_label = '%s.%s' % (perm.content_type.app_label, perm.codename)\n if user.has_perm(perm_label):\n return True\n return False",
"def has_object_permission(self, request, view, user):\n return user == request.user or request.user.is_superuser",
"def can_retrieve(self, user):\n return user.has_perm('agenda.can_see')",
"def has_view_permission(self, request, obj=None):\n user = request.user\n if obj and type(obj) is Client:\n return obj.is_user_in_sales_contacts_of_client(user) or obj.is_user_in_support_contacts_of_client(user)\n return True",
"def has_permission(self, request, view):\n # el request nos dara los mismos atributos que nos da el request de las vistas genericas\n # en los customs permissions tenemos que retorna SIEMPRE un booleano (true o false) porque si es verdero procedera con el siguiente permiso o con el controlador final\n print(SAFE_METHODS)\n # SAFE_METHODS son GET, HEAD, OPTIONS\n print(request.user.personalTipo)\n if request.user.personalTipo == 1:\n return True\n else:\n return False\n # if request.method in SAFE_METHODS:\n # return True\n # else:\n # return False",
"def has_object_permission(self, request, view, obj):\n\n # Users can always see and edit their own comments\n if obj.create_user == request.user:\n return True\n\n # And see but not edit those from their others in their own\n # organization\n if obj.create_user.organization == request.user.organization and \\\n request.method in permissions.SAFE_METHODS:\n return True\n\n # Government roles can always view comments\n # and can view or edit privileged comments with correct permission\n if request.user.is_government_user:\n # read\n if request.method in permissions.SAFE_METHODS:\n if obj.privileged_access:\n return request.user.has_perm('DOCUMENTS_VIEW')\n return True\n\n # write\n if request.method not in permissions.SAFE_METHODS:\n if obj.privileged_access:\n return request.user.has_perm('DOCUMENTS_GOVERNMENT_REVIEW')\n return True\n\n # not authorized\n return False",
"def has_permission(self, request, view):\n\n is_authenticated = request.user.is_authenticated()\n safe_request = request.method in permissions.SAFE_METHODS\n return is_authenticated and safe_request",
"def has_permission(self, request, view):\n usuario = request.user\n grupo = usuario.grupo\n return grupo.name in [\"SuperUsuario\", \"Administrador\"]"
] | [
"0.7633026",
"0.74133563",
"0.73833144",
"0.73323774",
"0.73181283",
"0.7290978",
"0.7193194",
"0.7171869",
"0.70900744",
"0.70630527",
"0.7049611",
"0.69974184",
"0.6995054",
"0.6973803",
"0.6966059",
"0.6943227",
"0.69359297",
"0.6922291",
"0.6895127",
"0.6892424",
"0.6867687",
"0.68474174",
"0.6799237",
"0.67930156",
"0.67240304",
"0.67233837",
"0.67216593",
"0.6716635",
"0.6700297",
"0.6680756"
] | 0.7595057 | 1 |
Helper function to unpack the extra_params JSON string to a dict. If specified, add the config_db_session to this dict too. | def __unpack_extra_params(extra_params, session=None):
if extra_params and extra_params != "":
params = json.loads(extra_params)
else:
params = {}
if session:
params['config_db_session'] = session
return params | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def extra_dejson(self):\n obj = {}\n if self.extra:\n try:\n obj = json.loads(self.extra)\n except Exception as e:\n self.log.exception(e)\n self.log.error(\"Failed parsing the json for conn_id %s\", self.conn_id)\n\n return obj",
"def get_event_params(self) -> dict[str, Any]:\n\n return json.loads( # type: ignore[no-any-return]\n json.dumps(self.__dict__, default=lambda o: o.__dict__),\n object_pairs_hook=self._dict_clean,\n )",
"def _prepare_param_dict(self, params_dict):\n return params_dict",
"def _parse_params_for_add_mac_extended(self, **kwargs):\n user_data = {}\n\n user_data['acl_name'] = kwargs['acl_name']\n user_data['seq_id'] = self.mac.parse_seq_id(**kwargs)\n user_data['action'] = self.mac.parse_action(**kwargs)\n user_data['source'] = self.mac.parse_source(**kwargs)\n user_data['dst'] = self.mac.parse_dst(**kwargs)\n user_data['ethertype'] = self.mac.parse_ethertype(**kwargs)\n user_data['vlan'] = self.mac.parse_vlan(**kwargs)\n user_data['pcp'] = self.mac.parse_pcp(**kwargs)\n bool_params = ['log', 'count']\n self.mac.parse_boolean_params(user_data, bool_params, **kwargs)\n\n return user_data",
"def parse_config(config):\n bucket = config[\"bucket\"]\n storage_class = config.get(\"storage_class\", \"STANDARD\")\n\n assert isinstance(bucket, string_types)\n assert storage_class in _VALID_STORAGE_CLASSES\n\n result = {\n \"bucket\": bucket,\n \"extra_args\": {\"StorageClass\": storage_class},\n }\n\n if \"region_name\" in config:\n result[\"region_name\"] = config[\"region_name\"]\n\n if \"endpoint_url\" in config:\n result[\"endpoint_url\"] = config[\"endpoint_url\"]\n\n if \"access_key_id\" in config:\n result[\"access_key_id\"] = config[\"access_key_id\"]\n\n if \"secret_access_key\" in config:\n result[\"secret_access_key\"] = config[\"secret_access_key\"]\n\n if \"sse_customer_key\" in config:\n result[\"extra_args\"][\"SSECustomerKey\"] = config[\"sse_customer_key\"]\n result[\"extra_args\"][\"SSECustomerAlgorithm\"] = config.get(\n \"sse_customer_algo\", \"AES256\"\n )\n\n return result",
"def extra(self) -> Dict[str, Any]:\n extra = self.extras.copy()\n if isinstance(self.author, str):\n extra['Author'] = self.author\n if isinstance(self.email, str):\n extra['Email'] = self.email\n if isinstance(self.description, str):\n extra['Description'] = self.description\n return extra",
"def _get_params(additional_properties: Optional[List[str]], with_vector: bool) -> dict:\n\n params = {}\n if additional_properties:\n if not isinstance(additional_properties, list):\n raise TypeError(\"Additional properties must be of type list \"\n f\"but are {type(additional_properties)}\")\n params['include'] = \",\".join(additional_properties)\n\n if with_vector:\n if 'include' in params:\n params['include'] = params['include'] + ',vector'\n else:\n params['include'] = 'vector'\n return params",
"def get_request_extra_params(self, **kwargs):\n params = self.request_extra_params.copy()\n params.update(kwargs)\n return params",
"def _unpack(self, json_value):\r\n json_d = json.loads(json_value)\r\n if type(json_d) != dict:\r\n json_d = {}\r\n\r\n comment_value = json_d.get('comment', '')\r\n if not isinstance(json_d, basestring):\r\n comment_value = ''\r\n\r\n options_value = json_d.get('options', [])\r\n if not isinstance(options_value, list):\r\n options_value = []\r\n\r\n return {\r\n 'options_value': options_value,\r\n 'comment_value': comment_value\r\n }",
"def _parse_kwargs(self):\n re_kwargs = r'^[\\w_][\\w\\d_]*=.+$'\n kwargs = [a.split('=') for a in self.args if re.findall(re_kwargs, a)]\n self.kwargs = {k: self._load_json(v) for k, v in kwargs}\n self.args = [a for a in self.args if not re.findall(re_kwargs, a)]",
"def _dict_with_extra_specs(inst_type_query):\n inst_type_dict = dict(inst_type_query)\n extra_specs = dict([(x['key'], x['value'])\n for x in inst_type_query['extra_specs']])\n inst_type_dict['extra_specs'] = extra_specs\n return inst_type_dict",
"def _get_kwargs_for_backend(self):\n return dict()",
"def get_connection_params(self, session, **kwargs):\n return {}",
"def get_config_dict(external_config):\n config = {}\n for _, arg_dict in external_config.items():\n for arg, _ in arg_dict.items():\n config[arg] = getattr(FLAGS, arg)\n return config",
"def _get_conn_params(self) -> dict[str, Any]:\n conn = self.get_connection(self.slack_conn_id)\n if not conn.password:\n raise AirflowNotFoundException(\n f\"Connection ID {self.slack_conn_id!r} does not contain password (Slack API Token).\"\n )\n conn_params: dict[str, Any] = {\"token\": conn.password, \"retry_handlers\": self.retry_handlers}\n extra_config = ConnectionExtraConfig(\n conn_type=self.conn_type, conn_id=conn.conn_id, extra=conn.extra_dejson\n )\n # Merge Hook parameters with Connection config\n conn_params.update(\n {\n \"timeout\": self.timeout or extra_config.getint(\"timeout\", default=None),\n \"base_url\": self.base_url or extra_config.get(\"base_url\", default=None),\n \"proxy\": self.proxy or extra_config.get(\"proxy\", default=None),\n }\n )\n # Add additional client args\n conn_params.update(self.extra_client_args)\n return {k: v for k, v in conn_params.items() if v is not None}",
"def _cmd_params_to_dict(params):\n return {t[0]: t[1] for t in params}",
"def prepare_config(config: dict) -> dict:\n config.setdefault('password', None)\n config.setdefault('private_key', None)\n config.setdefault('private_key_pass', None)\n config.setdefault('to', None)\n\n return config",
"def generate_command_args_with_additional_fields(additional_fields):\n command_args: Dict[str, str] = {}\n actual_additional_fields: Dict[str, str] = {}\n for each_field in additional_fields:\n if each_field in DEFAULT_ARGS:\n command_args[each_field] = additional_fields[each_field]\n else:\n actual_additional_fields[each_field] = additional_fields[each_field]\n command_args[\"additional_fields\"] = remove_null_fields_and_convert_additional_fields_in_string(\n actual_additional_fields)\n return command_args",
"def _create_extra_environment(self):\n return {}",
"def parameters_config(self) -> dict:\n if not self.params_optional and not self.params_required:\n return {}\n parameters = dict()\n for parameter, parameter_details in self.parameters.items():\n parameters[parameter_details.name] = parameter_details.parameter_config()\n return parameters",
"def _unpack(self, json_value):\r\n d = json.loads(json_value)\r\n if type(d) != dict:\r\n d = {}\r\n\r\n comment_value = d.get('comment', '')\r\n if not isinstance(comment_value, basestring):\r\n comment_value = ''\r\n\r\n options_value = d.get('options', [])\r\n if not isinstance(options_value, list):\r\n options_value = []\r\n\r\n return {\r\n 'options_value': options_value,\r\n 'has_options_value': len(options_value) > 0, # for convenience\r\n 'comment_value': comment_value,\r\n }",
"def make_extra(self):\n extra = {}\n extra['log_lines'] = self.get_raw_log_lines()\n return extra",
"def config(self) -> dict:\n\n params = self.get_params()\n _config = dict()\n _config['func'] = self.serialize_func(self.func)\n _config['inverse_func'] = self.inverse_func_ser\n _config['kw_args'] = jsonize(self.kw_args)\n _config['inv_kw_args'] = jsonize(self.inv_kw_args)\n\n for k, v in params.items():\n if k not in _config:\n _config.update({k: v})\n\n return _config",
"def _parse_params( self ):\n paramDic={}\n # Parameters are on the 3rd arg passed to the script\n paramStr=sys.argv[2]\n print paramStr\n if len(paramStr)>1:\n paramStr = paramStr.replace('?','')\n \n # Ignore last char if it is a '/'\n if (paramStr[len(paramStr)-1]=='/'):\n paramStr=paramStr[0:len(paramStr)-2]\n \n # Processing each parameter splited on '&' \n for param in paramStr.split(\"&\"):\n try:\n # Spliting couple key/value\n key,value=param.split(\"=\")\n except:\n key=param\n value=\"\"\n \n key = urllib.unquote_plus(key)\n value = urllib.unquote_plus(value)\n \n # Filling dictionnary\n paramDic[key]=value\n print paramDic\n return paramDic",
"def parse_mysql_url(mysql_url):\n\n params = dj_database_url.parse(mysql_url)\n\n conn_kwargs = {}\n conn_kwargs['host'] = params['HOST']\n conn_kwargs['user'] = params['USER']\n conn_kwargs['passwd'] = params['PASSWORD']\n conn_kwargs['db'] = params['NAME']\n conn_kwargs['port'] = params['PORT']\n\n # Remove items with empty values\n conn_kwargs = dict((k, v) for k, v in conn_kwargs.iteritems() if v)\n\n return conn_kwargs",
"def _preprocess_config(self, config: Dict[str, Any]) -> Dict[str, Any]:\n return cast_config_values(\n {k: v for k, v in config.items() if k in self._hyperparameter_keys},\n config_space=self.config_space,\n )",
"def prepare_config(config: dict) -> dict:\n config.setdefault('password', None)\n config.setdefault('private_key', None)\n config.setdefault('private_key_pass', None)\n config.setdefault('host_key', None)\n config.setdefault('dirs', ['.'])\n\n return config",
"def _arg_parse(self, **options) -> Dict[str, Any]:\n extra_options = dict()\n for key, value in options.items():\n private_key = f\"__{key}\"\n if hasattr(self, private_key):\n setattr(self, private_key, value)\n else:\n extra_options[key] = value\n\n return extra_options",
"def parse_mysql_url(mysql_url):\n params = dj_database_url.parse(mysql_url)\n\n connection_kwargs = {}\n connection_kwargs['host'] = params['HOST']\n connection_kwargs['user'] = params['USER']\n connection_kwargs['passwd'] = params['PASSWORD']\n connection_kwargs['database'] = params['NAME']\n # connection_kwargs['port'] = params['PORT']\n\n connection_kwargs = dict(\n (k, v)\n for k, v in connection_kwargs.items()\n if v)\n\n return connection_kwargs",
"def _get_step_config() -> Dict[str, Any]:\n config_str = os.environ['MAHIRU_STEP_CONFIG']\n return json.loads(config_str) # type: ignore"
] | [
"0.6527355",
"0.5482184",
"0.5454081",
"0.541346",
"0.53157496",
"0.5295096",
"0.5290023",
"0.52707696",
"0.5267996",
"0.52658087",
"0.52495235",
"0.52490944",
"0.52272344",
"0.52263105",
"0.5201891",
"0.5200079",
"0.51494527",
"0.51418424",
"0.5115259",
"0.506082",
"0.50599104",
"0.50031686",
"0.49797255",
"0.4977508",
"0.49772334",
"0.49524486",
"0.49488708",
"0.49477753",
"0.49323526",
"0.49280918"
] | 0.8805929 | 0 |
Helper function to transform the permissionspecific values from the API into the appropriate Python constructs needed by the permission library. | def __create_permission_args(perm_enum, extra_params_string, session):
perm = permissions.permission_from_api_enum(perm_enum)
params = ThriftAuthHandler.__unpack_extra_params(extra_params_string,
session)
return perm, params | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_extract_permission_docstring(self):\n for permission, expected_dict in [\n (\n PermissionA & PermissionB,\n {\n \"PermissionA\": \"Permission A.\",\n \"PermissionB\": \"Permission B.\",\n },\n ),\n (\n PermissionA | PermissionB,\n {\n \"PermissionA\": \"Permission A.\",\n \"PermissionB\": \"Permission B.\",\n },\n ),\n (\n ~PermissionA,\n {\n \"PermissionA\": \"Permission A.\",\n },\n ),\n (\n PermissionA,\n {\n \"PermissionA\": \"Permission A.\",\n },\n ),\n (\n (PermissionA & PermissionB) | ~PermissionA,\n {\n \"PermissionA\": \"Permission A.\",\n \"PermissionB\": \"Permission B.\",\n },\n ),\n (\n (PermissionA & PermissionB) | ~PermissionC,\n {\n \"PermissionA\": \"Permission A.\",\n \"PermissionB\": \"Permission B.\",\n \"PermissionC\": \"Permission C.\",\n },\n ),\n ]:\n with self.subTest(permission=permission):\n self.assertEqual(\n # mimic `get_permissions` by calling permission\n extract_permission_docstring(permission()),\n expected_dict,\n )",
"def _perms_from_short(value):\n value = value or \"\"\n if len(value) > 2:\n return aslist(value)\n perms = []\n if \"r\" in value:\n perms.append(\"read\")\n if \"w\" in value:\n perms.append(\"write\")\n return perms",
"def _transform(self, resource_from_api):\n for iam_policy_map in resource_from_api:\n iam_policy = iam_policy_map['iam_policy']\n bindings = iam_policy.get('bindings', [])\n for binding in bindings:\n members = binding.get('members', [])\n for member in members:\n member_type, member_name, member_domain = (\n parser.parse_member_info(member))\n role = binding.get('role', '')\n if role.startswith('roles/'):\n role = role.replace('roles/', '')\n yield {\n 'project_number': iam_policy_map['project_number'],\n 'role': role,\n 'member_type': member_type,\n 'member_name': member_name,\n 'member_domain': member_domain}",
"def permissions():\n pass",
"def get_permissions(self, principal_id):",
"def _perms_from_short(value):\n value = value or ''\n perms = []\n if 'r' in value:\n perms.append('read')\n if 'w' in value:\n perms.append('write')\n return perms",
"def manage_getPermissionMapping(self):\n wrapper = getattr(self, '_permissionMapper', None)\n if wrapper is None:\n wrapper = PM()\n\n perms = {}\n for p in self.possible_permissions():\n perms[getPermissionIdentifier(p)] = p\n\n r = []\n a = r.append\n for ac_perms in self.ac_inherited_permissions(1):\n p = perms.get(getPermissionMapping(ac_perms[0], wrapper), '')\n a({'permission_name': ac_perms[0], 'class_permission': p})\n return r",
"def decompress(self, value):\n # is single value passed, convert to tuple, else raise error if not anticipated iterable\n if isinstance(value, str):\n value = (value,)\n elif not isinstance(value, (list, tuple, QuerySet)):\n raise TypeError()\n\n exportable_fields = self.get_all_exportable_fields()\n permission_keys = set()\n\n for val in value:\n permissions = json.loads(val)\n # when val is retrieved from queryset, load needs to be done twice for some reason unknown to me\n if isinstance(permissions, str):\n permissions = json.loads(permissions)\n\n # due to previous bug this case is handled instead of creating migration\n if permissions is None:\n permissions={}\n # if two loads where not enough there gotta be something wrong\n elif not isinstance(permissions, dict):\n raise TypeError()\n\n for exporter_key, permitted_fields in permissions.items():\n for field in permitted_fields:\n permission_keys.add('/'.join([exporter_key, field]))\n\n # set group and exporter fields\n app, model, format = exporter_key.split('.')\n exporter_fields_count = 0\n group_index = -1\n\n for group, group_fields in exportable_fields[app][model][format].items():\n exporter_fields_count += len(group_fields)\n group_index += 1\n group_permitted_count = 0\n\n for field in group_fields:\n if field[0] not in permitted_fields:\n break\n group_permitted_count += 1\n\n # if all fields within a group are permitted add group_key too (to initial values)\n if group_permitted_count == len(group_fields):\n permission_keys.add('/'.join([exporter_key, 'group', str(group_index)]))\n\n # if all fields of exporter are permitted add exporter_key too (to initial values)\n if len(permitted_fields) == exporter_fields_count:\n permission_keys.add(exporter_key)\n\n return permission_keys",
"def permissionsDefinitions(self):\n\n ### for the records:\n ### this method contains lots of generation logic. in fact this\n ### should move over to the WorkflowGenerator.py and reduce here in\n ### just deliver the pure data\n ### the parser should really just parse to be as independent as possible\n\n # permissions_mapping (abbreviations for lazy guys)\n # keys are case insensitive\n\n # STATE_PERMISSION_MAPPING in TaggedValueSupport.py now\n # contains the handy mappings from 'access' to 'Access contents\n # information' and so.\n\n state = self.state\n tagged_values = state.getTaggedValues()\n permission_definitions = []\n\n for tag_name, tag_value in tagged_values.items():\n # list of tagged values that are NOT permissions\n if tag_name in self.non_permissions:\n # short check if its registered, registry complains in log.\n tgvRegistry.isRegistered(tag_name, state.classcategory,\n silent=True)\n continue\n tag_name = tag_name.strip()\n\n # look up abbreviations if any\n permission = STATE_PERMISSION_MAPPING.get(tag_name.lower(),\n tag_name or '')\n\n if not tag_value:\n log.debug(\"Empty tag value, treating it as a reset \"\n \"for acquisition, so acquisition=0.\")\n permission_definitions.append({'permission' : permission,\n 'roles' : [],\n 'acquisition' : 0})\n continue\n\n # split roles-string into list\n raw_roles = tag_value.replace(';', ',')\n roles = [str(r.strip()) for r in raw_roles.split(',') if r.strip()]\n\n # verify if this permission is acquired\n nv = 'acquire'\n acquisition = 0\n if nv in roles:\n acquisition = 1\n roles.remove(nv)\n\n permission = utils.processExpression(permission, asString=False)\n permission_definitions.append(\n {'permission' : permission,\n 'roles' : roles,\n 'acquisition' : acquisition}\n )\n\n # If View was defined but Access was not defined, the Access\n # permission should be generated with the same rights defined\n # for View\n\n has_access = 0\n has_view = 0\n view = {}\n for permission_definition in permission_definitions:\n if (permission_definition.get('permission', None) ==\n STATE_PERMISSION_MAPPING['access']):\n has_access = 1\n if (permission_definition.get('permission', None) ==\n STATE_PERMISSION_MAPPING['view']):\n view = permission_definition\n has_view = 1\n if has_view and not has_access:\n permission = STATE_PERMISSION_MAPPING['access']\n permission_definitions.append({'permission': permission,\n 'roles': view['roles'],\n 'acquisition': view['acquisition']})\n return permission_definitions",
"def test__parse_allow(input_data):\n output = parse_allow(input_data)\n vampytest.assert_instance(output, Permission)\n return output",
"def test_get_permissions(self):\n pass",
"def get_permissions(self):\n if self.action in ['create', 'retrieve', 'react', 'reactions']:\n permissions = [IsAuthenticated, IsFriendPostOwner]\n elif self.action in ['update', 'partial_update']:\n permissions = [IsAuthenticated, IsCommentOwner]\n elif self.action in ['destroy']:\n permissions = [IsAuthenticated, IsCommentOrPostOwner]\n else:\n permissions = [IsAuthenticated]\n return[p() for p in permissions]",
"def get_permissions_meta_data(board_uuid):\n if not is_uuid(board_uuid):\n raise InvalidUsage(messages=[_(\"Not found\")], status_code=404)\n non_empty_query_required(Permission, board_uuid=str(board_uuid))\n\n data = dict()\n data[\"roles\"] = Permission.Role.dict_choices()\n return data",
"def build_permissions(self):\n\t\tself.build_doctype_map()\n\t\tself.build_perm_map()\n\t\tuser_shared = frappe.share.get_shared_doctypes()\n\t\tno_list_view_link = []\n\t\tactive_modules = get_active_modules() or []\n\t\tfor dt in self.doctype_map:\n\t\t\tdtp = self.doctype_map[dt]\n\n\t\t\tp = self.perm_map.get(dt, {})\n\n\t\t\tif not p.get(\"read\") and (dt in user_shared):\n\t\t\t\tp[\"read\"] = 1\n\n\t\t\tif p.get(\"select\"):\n\t\t\t\tself.can_select.append(dt)\n\n\t\t\tif not dtp.get(\"istable\"):\n\t\t\t\tif p.get(\"create\") and not dtp.get(\"issingle\"):\n\t\t\t\t\tif dtp.get(\"in_create\"):\n\t\t\t\t\t\tself.in_create.append(dt)\n\t\t\t\t\telse:\n\t\t\t\t\t\tself.can_create.append(dt)\n\t\t\t\telif p.get(\"write\"):\n\t\t\t\t\tself.can_write.append(dt)\n\t\t\t\telif p.get(\"read\"):\n\t\t\t\t\tif dtp.get(\"read_only\"):\n\t\t\t\t\t\t# read_only = \"User Cannot Search\"\n\t\t\t\t\t\tself.all_read.append(dt)\n\t\t\t\t\t\tno_list_view_link.append(dt)\n\t\t\t\t\telse:\n\t\t\t\t\t\tself.can_read.append(dt)\n\n\t\t\tif p.get(\"cancel\"):\n\t\t\t\tself.can_cancel.append(dt)\n\n\t\t\tif p.get(\"delete\"):\n\t\t\t\tself.can_delete.append(dt)\n\n\t\t\tif p.get(\"read\") or p.get(\"write\") or p.get(\"create\"):\n\t\t\t\tif p.get(\"report\"):\n\t\t\t\t\tself.can_get_report.append(dt)\n\t\t\t\tfor key in (\"import\", \"export\", \"print\", \"email\"):\n\t\t\t\t\tif p.get(key):\n\t\t\t\t\t\tgetattr(self, \"can_\" + key).append(dt)\n\n\t\t\t\tif not dtp.get(\"istable\"):\n\t\t\t\t\tif not dtp.get(\"issingle\") and not dtp.get(\"read_only\"):\n\t\t\t\t\t\tself.can_search.append(dt)\n\t\t\t\t\tif dtp.get(\"module\") not in self.allow_modules:\n\t\t\t\t\t\tif active_modules and dtp.get(\"module\") not in active_modules:\n\t\t\t\t\t\t\tpass\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tself.allow_modules.append(dtp.get(\"module\"))\n\n\t\tself.can_write += self.can_create\n\t\tself.can_write += self.in_create\n\t\tself.can_read += self.can_write\n\n\t\tself.shared = frappe.get_all(\n\t\t\t\"DocShare\", {\"user\": self.name, \"read\": 1}, distinct=True, pluck=\"share_doctype\"\n\t\t)\n\t\tself.can_read = list(set(self.can_read + self.shared))\n\t\tself.all_read += self.can_read\n\n\t\tfor dt in no_list_view_link:\n\t\t\tif dt in self.can_read:\n\t\t\t\tself.can_read.remove(dt)\n\n\t\tif \"System Manager\" in self.get_roles():\n\t\t\tself.can_import += frappe.get_all(\"DocType\", {\"allow_import\": 1}, pluck=\"name\")\n\t\t\tself.can_import += frappe.get_all(\n\t\t\t\t\"Property Setter\",\n\t\t\t\tpluck=\"doc_type\",\n\t\t\t\tfilters={\"property\": \"allow_import\", \"value\": \"1\"},\n\t\t\t)\n\n\t\tfrappe.cache.hset(\"can_import\", frappe.session.user, self.can_import)",
"def transform_fs_access_output(result):\n\n new_result = {}\n useful_keys = ['acl', 'group', 'owner', 'permissions']\n for key in useful_keys:\n new_result[key] = result[key]\n return new_result",
"def OptionalPermissions(self) -> _n_6_t_0:",
"def from_string(cls, permission):\n p_read = 'r' in permission\n p_add = 'a' in permission\n p_update = 'u' in permission\n p_process = 'p' in permission\n\n parsed = cls(p_read, p_add, p_update, p_process)\n\n return parsed",
"def octopus_permissions_get(self, msg, args):\r\n return self.permissions.get_permissions()",
"def get_permissions_map(self, created):\n company = get_object_or_404(models.Company, pk=self.data['company'])\n admins = company.admins\n accountants = company.accountants\n return {\n 'view_purchase': [admins, accountants],\n 'change_purchase': [admins, accountants],\n 'delete_purchase': [admins, accountants]\n }",
"def _scrub_auth_info(param_info, auth_param_name):\n info = param_info.copy()\n info[auth_param_name] = {key: '*' * len(str(value))\n for key, value in info[auth_param_name].items()}\n\n return info",
"def _get_api_role(pulp_version):\n api_role = {}\n\n # Get \"scheme\"\n api_role['scheme'] = click.prompt(\n \"What scheme should be used when communicating with Pulp's API?\",\n default='https',\n type=click.Choice(('https', 'http')),\n )\n\n # Get \"verify\"\n if (api_role['scheme'] == 'https' and\n click.confirm('Verify HTTPS?', default=True)):\n certificate_path = click.prompt(\n 'SSL certificate path',\n default='',\n type=click.Path(),\n )\n api_role['verify'] = certificate_path if certificate_path else True\n else:\n api_role['verify'] = False\n\n # Get \"port\"\n click.echo(\n \"By default, Pulp Smash will communicate with Pulp's API on the port \"\n \"number implied by the scheme. For example, if Pulp's API is \"\n 'available over HTTPS, then Pulp Smash will communicate on port 443. '\n \"If Pulp's API is available on a non-standard port, like 8000, then \"\n 'Pulp Smash needs to know about that.'\n )\n port = click.prompt('Pulp API port number', default=0, type=click.INT)\n if port:\n api_role['port'] = port\n\n # Get \"service\"\n api_role['service'] = click.prompt(\n \"What web server service backs Pulp's API?\",\n default='httpd' if pulp_version < Version('3') else 'nginx',\n type=click.Choice(('httpd', 'nginx'))\n )\n\n return api_role",
"def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .android_required_password_type import AndroidRequiredPasswordType\n from .device_compliance_policy import DeviceCompliancePolicy\n from .device_threat_protection_level import DeviceThreatProtectionLevel\n\n from .android_required_password_type import AndroidRequiredPasswordType\n from .device_compliance_policy import DeviceCompliancePolicy\n from .device_threat_protection_level import DeviceThreatProtectionLevel\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"deviceThreatProtectionEnabled\": lambda n : setattr(self, 'device_threat_protection_enabled', n.get_bool_value()),\n \"deviceThreatProtectionRequiredSecurityLevel\": lambda n : setattr(self, 'device_threat_protection_required_security_level', n.get_enum_value(DeviceThreatProtectionLevel)),\n \"minAndroidSecurityPatchLevel\": lambda n : setattr(self, 'min_android_security_patch_level', n.get_str_value()),\n \"osMaximumVersion\": lambda n : setattr(self, 'os_maximum_version', n.get_str_value()),\n \"osMinimumVersion\": lambda n : setattr(self, 'os_minimum_version', n.get_str_value()),\n \"passwordExpirationDays\": lambda n : setattr(self, 'password_expiration_days', n.get_int_value()),\n \"passwordMinimumLength\": lambda n : setattr(self, 'password_minimum_length', n.get_int_value()),\n \"passwordMinutesOfInactivityBeforeLock\": lambda n : setattr(self, 'password_minutes_of_inactivity_before_lock', n.get_int_value()),\n \"passwordPreviousPasswordBlockCount\": lambda n : setattr(self, 'password_previous_password_block_count', n.get_int_value()),\n \"passwordRequired\": lambda n : setattr(self, 'password_required', n.get_bool_value()),\n \"passwordRequiredType\": lambda n : setattr(self, 'password_required_type', n.get_enum_value(AndroidRequiredPasswordType)),\n \"securityBlockJailbrokenDevices\": lambda n : setattr(self, 'security_block_jailbroken_devices', n.get_bool_value()),\n \"securityDisableUsbDebugging\": lambda n : setattr(self, 'security_disable_usb_debugging', n.get_bool_value()),\n \"securityPreventInstallAppsFromUnknownSources\": lambda n : setattr(self, 'security_prevent_install_apps_from_unknown_sources', n.get_bool_value()),\n \"securityRequireCompanyPortalAppIntegrity\": lambda n : setattr(self, 'security_require_company_portal_app_integrity', n.get_bool_value()),\n \"securityRequireGooglePlayServices\": lambda n : setattr(self, 'security_require_google_play_services', n.get_bool_value()),\n \"securityRequireSafetyNetAttestationBasicIntegrity\": lambda n : setattr(self, 'security_require_safety_net_attestation_basic_integrity', n.get_bool_value()),\n \"securityRequireSafetyNetAttestationCertifiedDevice\": lambda n : setattr(self, 'security_require_safety_net_attestation_certified_device', n.get_bool_value()),\n \"securityRequireUpToDateSecurityProviders\": lambda n : setattr(self, 'security_require_up_to_date_security_providers', n.get_bool_value()),\n \"securityRequireVerifyApps\": lambda n : setattr(self, 'security_require_verify_apps', n.get_bool_value()),\n \"storageRequireEncryption\": lambda n : setattr(self, 'storage_require_encryption', n.get_bool_value()),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields",
"def RequestedPermissions(self) -> _n_6_t_0:",
"def test_clean_permission(self):\n for permission, expected_string in [\n (\n PermissionA & PermissionB,\n \" **(** PermissionA **AND** PermissionB **)** \",\n ),\n (\n PermissionA | PermissionB,\n \" **(** PermissionA **OR** PermissionB **)** \",\n ),\n (\n ~PermissionA,\n \" **(NOT** PermissionA **)** \",\n ),\n (\n PermissionA,\n \"PermissionA\",\n ),\n (\n (PermissionA & PermissionB) | ~PermissionC,\n (\n \" **(** **(** PermissionA **AND** PermissionB **)** \"\n \"**OR** **(NOT** PermissionC **)** **)** \"\n ),\n ),\n ]:\n with self.subTest(permission=permission):\n self.assertEqual(\n # mimic `get_permissions` by calling permission\n clean_permission(permission()),\n expected_string,\n )",
"def __init__(self, minfo=None):\n\n if minfo is None:\n minfo = {}\n self.verb = minfo.get('verb', 'reg')\n self.whitelist_name = minfo.get('whitelist_name', '')\n self.permissioned_public_keys = minfo.get('permissioned_public_keys',\n NullIdentifier)\n self.permissioned_addrs = minfo.get('permissioned_addrs',\n NullIdentifier)",
"def get_permissions_map(self, created):\n company = get_object_or_404(models.Company, pk=self.data['company'])\n admins = company.admins\n accountants = company.accountants\n return {\n 'view_sale': [admins, accountants],\n 'change_sale': [admins, accountants],\n 'delete_sale': [admins, accountants]\n }",
"def _get_permission(self, obj_type, path, username):\n if obj_type == Collection:\n# XXX - in iRODS < 4.2, CollectionUser.name isn't supported.\n# query = self.session.query(Collection, CollectionAccess).filter(\n# CollectionUser.name == username, Collection.name == path)\n# result = [self.perm_str_mapping[row[CollectionAccess.name]] for row in query\n query = self.session.query(User.id).filter(User.name == username)\n for row in query:\n id = row[User.id]\n query = self.session.query(Collection, CollectionAccess).filter(\n CollectionAccess.user_id == id, Collection.name == path)\n result = [self.perm_str_mapping[row[CollectionAccess.name]] for row in query]\n### XXX - ^^^\n return result\n if obj_type == DataObject:\n conditions = [\n Collection.name == dirname(path),\n DataObject.name == basename(path),\n User.name == username\n ]\n query = self.session.query(DataObject.name, DataAccess.name) \\\n .filter(*conditions).all()\n result = [self.perm_str_mapping[row[DataAccess.name]] for row in query]\n return result\n self._fail(\"Unsupported Object type\")\n return None",
"def get_permissions_map(self, created):\n company = get_object_or_404(models.Company, pk=self.data['company'])\n admins = company.admins\n accountants = company.accountants\n return {\n 'view_media': [admins, accountants],\n 'change_media': [admins, accountants],\n 'delete_media': [admins, accountants]\n }",
"def test_format_permissions_and_docstring(self):\n self.assertEqual(\n format_permissions_and_docstring(\n [\"permission formatted string\"],\n {\"some\": \"docstring\"},\n ),\n (\n \"## Permissions\\n\\n\"\n \"permission formatted string\\n\"\n \"### Permission description\\n\\n\"\n \"- **some** : docstring\"\n ),\n )\n\n self.assertEqual(\n format_permissions_and_docstring(\n [\"permission formatted string\", \"another permission\"],\n {\"some\": \"docstring\", \"another\": \"docstring\"},\n ),\n (\n \"## Permissions\\n\\n\"\n \"- permission formatted string\\n\"\n \"- another permission\\n\"\n \"### Permission description\\n\\n\"\n \"- **some** : docstring\\n\"\n \"- **another** : docstring\"\n ),\n )",
"def get_assign_permission(userid, group):"
] | [
"0.56616086",
"0.5626562",
"0.5614697",
"0.54989165",
"0.54741514",
"0.5468934",
"0.5415927",
"0.5410423",
"0.53547746",
"0.5350134",
"0.53181773",
"0.5297058",
"0.529078",
"0.5254452",
"0.52494055",
"0.523184",
"0.5169583",
"0.51539904",
"0.51349455",
"0.50904536",
"0.5045614",
"0.50451666",
"0.5033335",
"0.5023498",
"0.50228506",
"0.5022232",
"0.50205284",
"0.5009359",
"0.49978614",
"0.4979943"
] | 0.57189345 | 0 |
Returns all the defined permissions in the given permission scope. | def getPermissions(self, scope):
return [permissions.api_enum_for_permission(p)
for p in permissions.get_permissions(scope)] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def list_permissions(self):\n # type: () -> List[Permission]\n headers = Headers({\"accept\": \"application/json\"})\n return self.connection.api_call(\n \"GET\", [\"resources\", self.id, \"permissions\"], model=Permission, headers=headers,\n )",
"def get_permissions(self):\n permissions = [IsAuthenticated]\n return [permission() for permission in permissions]",
"def permissions(self):\n return [DSSWorkspacePermissionItem(permission) for permission in self.settings['permissions']]",
"async def fetch_permissions(self, condensed=False):\n\n logging.debug(\"Getting permissions (%scondensed)\" % (\n \"\" if condensed else \"not \"))\n\n if condensed:\n perms = await self.client.request.get(\n \"/auth/permissions\", params={\"condensed\": True})\n return perms[\"data\"]\n else:\n perms = await self.client.request.get(\"/auth/permissions\")\n return [BasePermission.build_permission(\n self.client, perm, self.loop) for perm in perms[\"data\"]]",
"def permissions(self) -> 'outputs.PermissionsResponse':\n return pulumi.get(self, \"permissions\")",
"def get_permissions(self):\n if not hasattr(self, '_permissions'):\n self._permissions = self.permissions.all()\n return self._permissions",
"def get_permissions(self):\n return [permission() for permission in self.permission_classes]",
"def get_permissions(self):\n if self.action in ['signup', 'login']:\n permissions = [AllowAny]\n elif self.action in ['retrieve']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [AllowAny]\n return [p() for p in permissions]",
"def get_permissions(self):\n if self.action in ['create', 'retrieve', 'react', 'reactions']:\n permissions = [IsAuthenticated, IsFriendPostOwner]\n elif self.action in ['update', 'partial_update']:\n permissions = [IsAuthenticated, IsCommentOwner]\n elif self.action in ['destroy']:\n permissions = [IsAuthenticated, IsCommentOrPostOwner]\n else:\n permissions = [IsAuthenticated]\n return[p() for p in permissions]",
"def get_permissions(self):\n permissions = [IsAdminUser]\n return [permission() for permission in permissions]",
"def get_permissions(self):\n if self.action == 'destroy' or self.action == 'partial_update':\n permission_classes = [\n permissions.IsOwner,\n IsAuthenticated,\n ]\n else:\n permission_classes = [\n permissions.IsAdminOrReadOnly,\n IsAuthenticated,\n ]\n return [permission() for permission in permission_classes]",
"def getPermissionsForUser(self, scope, extra_params, perm_filter):\n\n if perm_filter is None or not any(perm_filter.__dict__.values()):\n # If no filtering is needed, this function behaves identically\n # to getPermissions().\n return self.getPermissions(scope)\n\n with DBSession(self.__config_db) as session:\n # The database connection must always be passed to the permission\n # handler.\n params = ThriftAuthHandler.__unpack_extra_params(extra_params,\n session)\n\n perms = []\n for perm in permissions.get_permissions(scope):\n should_return = True\n handler = make_handler(perm, params)\n\n if should_return and perm_filter.given:\n should_return = handler.has_permission(self.__auth_session)\n\n if should_return and perm_filter.canManage:\n # If the user has any of the permissions that are\n # authorised to manage the currently iterated permission,\n # the filter passes.\n should_return = require_manager(\n perm, params, self.__auth_session)\n\n if should_return:\n perms.append(perm)\n\n return [permissions.api_enum_for_permission(p)\n for p in perms]",
"def get_permissions(self):\n if self.action in ['signup', 'login', 'verify']:\n permissions = [AllowAny]\n elif self.action in ['retrieve', 'update', 'partial_update', 'destroy', 'u', 'p']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [IsAuthenticated]\n return [p() for p in permissions]",
"def fusion_api_list_permission_scopes(self, api=None, headers=None, resource_uri='', sessionID=None):\n param = '/association-scopes%s' % resource_uri\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"def get_all_permissions(self, obj=None):",
"def permissions(self):\n return list(self._permissions)",
"def get_permissions(self):\n if self.action in ['list', 'retrieve']:\n permission_classes = [IsAuthenticated]\n else:\n permission_classes = [IsAdminUser]\n return [permission() for permission in permission_classes]",
"def get_group_permissions (self):\n return [] # likewise with the other permission defs",
"def get_group_permissions (self):\n return [] # likewise with the other permission defs",
"def get_permissions(self):\n if self.action == 'list':\n permission_classes = [IsAuthenticated]\n else:\n permission_classes = [IsAdminUser]\n return [permission() for permission in permission_classes]",
"def collect_all_perms(cls):\n permissions = filter(lambda perm: perm.startswith('biom_perm') or perm.startswith('entity_perm'), dir(cls))\n\n result = [{\n 'perm_name': perm,\n 'description': getattr(cls, perm).__doc__,\n 'perm_type': getattr(cls, perm).action_type if hasattr(getattr(cls, perm), 'action_type') else None,\n 'default_value': getattr(cls, perm).default_value if hasattr(getattr(cls, perm), 'default_value') else None,\n\n } for perm in permissions]\n return result",
"def get_permissions(self):\n if self.action in [\"update\", \"partial_update\", \"destroy\"]:\n permission_classes = [IsAdminOrOwner]\n else:\n permission_classes = [IsAuthenticated]\n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action in ['signup', 'login']:\n permissions = [AllowAny]\n return [permission() for permission in permissions]",
"def all_perms(self, id, **kwargs):\r\n p = self.db.auth_permission\r\n if self.all_permissions:\r\n ret = self.sql(\r\n (p.record_id == id) & (p.table_name == self.table._tablename) & p.name.belongs(self.all_permissions),\r\n p.name, p.group_id,\r\n orderby=p.group_id)\r\n else:\r\n ret = []\r\n current.response.text = ret\r\n return ret",
"def get_all_permissions(self, obj=None):\n return self.get_group_permissions(obj)",
"def get_all_permissions(self):\n\t\turl = f'{self.root.url}/api/v1/sessions/permissions'\n\t\treturn self.root.r('GET', url, body=None, headers=None, verify=self.root.verify)",
"def get_permissions(self):\n if self.action in ['list', 'create']:\n permission_classes = [IsStaffOrReadOnly]\n else:\n permission_classes = [IsAuthorOrReadOnly, IsStaffOrReadOnly]\n return [permission() for permission in permission_classes]",
"def permissions(self):\n return self.get_permissions()",
"def get_permissions(self):\n # Condition to check the action level and set desired permission_class\n if self.action == 'create':\n permission_classes = [AllowAny]\n else:\n permission_classes = [IsAuthenticated]\n \n # Finally return the all the permissions\n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action == 'list':\n permission_classes = [AdminPermission.__or__(ReviewerPermission)]\n elif self.action == 'retrieve':\n permission_classes = [\n AdminPermission.__or__(\n ReviewerPermission.__or__(UserPermission)\n )\n ]\n elif self.action in ['update', 'partial_update']:\n permission_classes = [AdminPermission.__or__(UserPermission)]\n else:\n permission_classes = [AdminPermission]\n return [permission() for permission in permission_classes]"
] | [
"0.6820318",
"0.6814613",
"0.68080956",
"0.6793074",
"0.6764647",
"0.66984344",
"0.66721714",
"0.6651246",
"0.66414154",
"0.66171235",
"0.6599955",
"0.6577237",
"0.6561327",
"0.6560519",
"0.6550772",
"0.65363073",
"0.65345067",
"0.65273196",
"0.65273196",
"0.65095675",
"0.64869636",
"0.6479609",
"0.6477319",
"0.6471811",
"0.6470482",
"0.6466994",
"0.6464491",
"0.64526504",
"0.6442788",
"0.6440624"
] | 0.8260817 | 0 |
Returns the permissions in the given permission scope and with the given scopespecific extra_params for the current logged in user, based on the permission filters. Filters in the perm_filter struct are joined in an AND clause. | def getPermissionsForUser(self, scope, extra_params, perm_filter):
if perm_filter is None or not any(perm_filter.__dict__.values()):
# If no filtering is needed, this function behaves identically
# to getPermissions().
return self.getPermissions(scope)
with DBSession(self.__config_db) as session:
# The database connection must always be passed to the permission
# handler.
params = ThriftAuthHandler.__unpack_extra_params(extra_params,
session)
perms = []
for perm in permissions.get_permissions(scope):
should_return = True
handler = make_handler(perm, params)
if should_return and perm_filter.given:
should_return = handler.has_permission(self.__auth_session)
if should_return and perm_filter.canManage:
# If the user has any of the permissions that are
# authorised to manage the currently iterated permission,
# the filter passes.
should_return = require_manager(
perm, params, self.__auth_session)
if should_return:
perms.append(perm)
return [permissions.api_enum_for_permission(p)
for p in perms] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def fetch_permissions(self, condensed=False):\n\n logging.debug(\"Getting permissions (%scondensed)\" % (\n \"\" if condensed else \"not \"))\n\n if condensed:\n perms = await self.client.request.get(\n \"/auth/permissions\", params={\"condensed\": True})\n return perms[\"data\"]\n else:\n perms = await self.client.request.get(\"/auth/permissions\")\n return [BasePermission.build_permission(\n self.client, perm, self.loop) for perm in perms[\"data\"]]",
"def _check_conditions_permissions(user, permissions, course_id, **kwargs):\r\n\r\n def test(user, per, operator=\"or\"):\r\n if isinstance(per, basestring):\r\n if per in CONDITIONS:\r\n return _check_condition(user, per, course_id, kwargs)\r\n return cached_has_permission(user, per, course_id=course_id)\r\n elif isinstance(per, list) and operator in [\"and\", \"or\"]:\r\n results = [test(user, x, operator=\"and\") for x in per]\r\n if operator == \"or\":\r\n return True in results\r\n elif operator == \"and\":\r\n return not False in results\r\n return test(user, permissions, operator=\"or\")",
"def get_permissions(self):\n if self.action in ['create', 'retrieve', 'react', 'reactions']:\n permissions = [IsAuthenticated, IsFriendPostOwner]\n elif self.action in ['update', 'partial_update']:\n permissions = [IsAuthenticated, IsCommentOwner]\n elif self.action in ['destroy']:\n permissions = [IsAuthenticated, IsCommentOrPostOwner]\n else:\n permissions = [IsAuthenticated]\n return[p() for p in permissions]",
"def QueryTestablePermissions(self, request, global_params=None):\n config = self.GetMethodConfig('QueryTestablePermissions')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def __create_permission_args(perm_enum, extra_params_string, session):\n\n perm = permissions.permission_from_api_enum(perm_enum)\n params = ThriftAuthHandler.__unpack_extra_params(extra_params_string,\n session)\n return perm, params",
"def get_available_scopes(self, application=None, request=None, *args, **kwargs):\n app_scopes = list(\n ProtectedCapability.objects.filter(Q(default=True) | Q(application=application)).values_list('slug', flat=True))\n return app_scopes",
"def resource_permissions(self, resource_type, params, username, group):\n # create session for ConfigDB\n session = self.config_models.session()\n\n # query permitted resources\n permissions = self.resource_permission_handler.permissions(\n resource_type, params, username, group, session\n )\n\n # close session\n session.close()\n\n return {\n 'permissions': permissions\n }",
"def get_permissions(self):\n if self.action in ['signup', 'login']:\n permissions = [AllowAny]\n elif self.action in ['retrieve']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [AllowAny]\n return [p() for p in permissions]",
"def get_permissions(self):\n if self.action in ['signup', 'login', 'verify']:\n permissions = [AllowAny]\n elif self.action in ['retrieve', 'update', 'partial_update', 'destroy', 'u', 'p']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [IsAuthenticated]\n return [p() for p in permissions]",
"def get_permitted_projects(self, permissions):\n user_permission_query = Q()\n group_permission_query = Q()\n for permission in permissions:\n user_permission_query = user_permission_query & Q(\n userpermission__permissions__key=permission\n )\n group_permission_query = group_permission_query & Q(\n grouppermission__permissions__key=permission\n )\n\n user_query = Q(userpermission__user=self) & (\n user_permission_query | Q(userpermission__admin=True)\n )\n group_query = Q(grouppermission__group__users=self) & (\n group_permission_query | Q(grouppermission__admin=True)\n )\n organisation_query = Q(\n organisation__userorganisation__user=self,\n organisation__userorganisation__role=OrganisationRole.ADMIN.name,\n )\n\n query = user_query | group_query | organisation_query\n\n return Project.objects.filter(query).distinct()",
"def get_permitted_environments(self, permissions):\n user_permission_query = Q()\n group_permission_query = Q()\n for permission in permissions:\n user_permission_query = user_permission_query & Q(\n userpermission__permissions__key=permission\n )\n group_permission_query = group_permission_query & Q(\n grouppermission__permissions__key=permission\n )\n\n user_query = Q(userpermission__user=self) & (\n user_permission_query | Q(userpermission__admin=True)\n )\n group_query = Q(grouppermission__group__users=self) & (\n group_permission_query | Q(grouppermission__admin=True)\n )\n organisation_query = Q(\n project__organisation__userorganisation__user=self,\n project__organisation__userorganisation__role=OrganisationRole.ADMIN.name,\n )\n project_admin_query = Q(\n project__userpermission__user=self, project__userpermission__admin=True\n ) | Q(\n project__grouppermission__group__users=self,\n project__grouppermission__admin=True,\n )\n\n query = user_query | group_query | organisation_query | project_admin_query\n\n return Environment.objects.filter(query).distinct()",
"def getPermissions(self, scope):\n\n return [permissions.api_enum_for_permission(p)\n for p in permissions.get_permissions(scope)]",
"def intersect_permissions(self, users_qs=None, groups_qs=None):\n eligible_user_pks = itertools.chain([self.user.pk],\n self.users_allowed.values_list(\"pk\", flat=True))\n\n users_qs = users_qs if users_qs is not None else User.objects.all()\n groups_qs = groups_qs if groups_qs is not None else Group.objects.all()\n\n # If the Everyone group has access to this object, then we don't filter.\n if not self.groups_allowed.filter(pk=groups.EVERYONE_PK).exists():\n users_qs = users_qs.filter(pk__in=eligible_user_pks)\n groups_qs = groups_qs.filter(\n pk__in=self.groups_allowed.values_list(\"pk\", flat=True)\n )\n\n return users_qs, groups_qs",
"def get_permissions(self, principal_id):",
"def filter_granted(self, queryset):\n granted_containers = Container.filter_by_user(self.request.user)\n\n return queryset.filter(app__container_id__in=granted_containers)",
"def has_necessary_permissions(perm_json, required_perms, all_required=True):\n\n # Make list if not required_perms is string\n if isinstance(required_perms, str) or isinstance(required_perms, unicode):\n list_perms = [required_perms]\n else:\n list_perms = required_perms\n\n # Loop and check presence\n is_permitted = True\n for perm_key in list_perms:\n is_present = lookup_permission(perm_json, perm_key)\n\n if all_required:\n # All required: AND operation\n is_permitted = is_permitted and is_present\n if not is_permitted:\n break\n else:\n # Atleast one required: OR operation\n is_permitted = is_permitted or is_present\n\n\n return is_permitted",
"def get_permissions(self):\n\n permissions = [\n IsAuthenticated(),\n IsCircleActiveMember(),\n ]\n\n if self.action in ['update', 'partial_update', 'finish']:\n permissions.append(\n IsRideOwner()\n )\n\n if self.action in ['join', 'qualify']:\n permissions.append(\n IsNotRideOwner()\n )\n\n return permissions",
"def oauth2_check_permissions(self, request, required_permissions,\n additional_permissions=None,\n fql_check=True, force_check=True):\n has_permissions = False\n\n req_perms = set(required_permissions.split(','))\n\n if 'oauth2_extended_permissions' in request.session:\n cached_perms = request.session['oauth2_extended_permissions']\n\n # so now, fb_sig_ext_perms seems to contain the right perms (!)\n\n if not force_check and cached_perms and req_perms.issubset(cached_perms):\n # Note that this has the potential to be out of date!\n has_permissions = True\n elif fql_check:\n # TODO allow option to use preload FQL for this?\n perms_query = required_permissions\n \n # Note that we can query additional permissions that we\n # don't require. This can be useful for optional\n # functionality (or simply for better caching)\n if additional_permissions:\n perms_query += ',' + additional_permissions\n \n perms_results = self.fql.query('select %s from permissions where uid=%s'\n % (perms_query, self.uid))[0]\n actual_perms = set()\n for permission, allowed in perms_results.items():\n if allowed == 1:\n actual_perms.add(permission)\n request.session['oauth2_extended_permissions'] = actual_perms\n has_permissions = req_perms.issubset(actual_perms)\n\n return has_permissions",
"def get_permissions(self, exclude=None):\n\n exclude = exclude or []\n exclude.extend(['id', 'name', 'description'])\n\n perms = {}\n groups = self.secondary_groups.all()\n groups.append(self.primary_group)\n for group in groups:\n for c in group.__table__.columns:\n # try if the permission already exists in the dictionary\n # and if the permission is true, set it to True\n try:\n if not perms[c.name] and getattr(group, c.name):\n perms[c.name] = True\n\n # if the permission doesn't exist in the dictionary\n # add it to the dictionary\n except KeyError:\n # if the permission is in the exclude list,\n # skip to the next permission\n if c.name in exclude:\n continue\n perms[c.name] = getattr(group, c.name)\n return perms",
"def aws_permissions(self, perms):\n for perm in perms:\n group = perm.get(\"Group\")\n if group:\n self.allowed_groups.append(group)\n\n user = perm.get(\"UserId\")\n if user:\n self.allowed_users.append(user)",
"def get_permissions(self):\n if self.action == 'list':\n permission_classes = [AdminPermission.__or__(ReviewerPermission)]\n elif self.action == 'retrieve':\n permission_classes = [\n AdminPermission.__or__(\n ReviewerPermission.__or__(UserPermission)\n )\n ]\n elif self.action in ['update', 'partial_update']:\n permission_classes = [AdminPermission.__or__(UserPermission)]\n else:\n permission_classes = [AdminPermission]\n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action == 'list':\n permission_classes = [AdminPermission.__or__(ReviewerPermission)]\n elif self.action == 'retrieve':\n permission_classes = [\n AdminPermission.__or__(\n ReviewerPermission.__or__(UserPermission)\n )\n ]\n elif self.action in ['update', 'partial_update']:\n permission_classes = [AdminPermission.__or__(UserPermission)]\n else:\n permission_classes = [AdminPermission]\n return [permission() for permission in permission_classes]",
"def can_read_list(self, auth_params: List[str]) -> Dict[str, bool]:\n perms = self._get_workspace_permissions(auth_params)\n ret_perms = dict()\n for p in auth_params:\n ret_perms[p] = self._has_read_perm(perms.get(p, WorkspacePermission.NONE))\n return ret_perms",
"def build_permissions(self):\n\t\tself.build_doctype_map()\n\t\tself.build_perm_map()\n\t\tuser_shared = frappe.share.get_shared_doctypes()\n\t\tno_list_view_link = []\n\t\tactive_modules = get_active_modules() or []\n\t\tfor dt in self.doctype_map:\n\t\t\tdtp = self.doctype_map[dt]\n\n\t\t\tp = self.perm_map.get(dt, {})\n\n\t\t\tif not p.get(\"read\") and (dt in user_shared):\n\t\t\t\tp[\"read\"] = 1\n\n\t\t\tif p.get(\"select\"):\n\t\t\t\tself.can_select.append(dt)\n\n\t\t\tif not dtp.get(\"istable\"):\n\t\t\t\tif p.get(\"create\") and not dtp.get(\"issingle\"):\n\t\t\t\t\tif dtp.get(\"in_create\"):\n\t\t\t\t\t\tself.in_create.append(dt)\n\t\t\t\t\telse:\n\t\t\t\t\t\tself.can_create.append(dt)\n\t\t\t\telif p.get(\"write\"):\n\t\t\t\t\tself.can_write.append(dt)\n\t\t\t\telif p.get(\"read\"):\n\t\t\t\t\tif dtp.get(\"read_only\"):\n\t\t\t\t\t\t# read_only = \"User Cannot Search\"\n\t\t\t\t\t\tself.all_read.append(dt)\n\t\t\t\t\t\tno_list_view_link.append(dt)\n\t\t\t\t\telse:\n\t\t\t\t\t\tself.can_read.append(dt)\n\n\t\t\tif p.get(\"cancel\"):\n\t\t\t\tself.can_cancel.append(dt)\n\n\t\t\tif p.get(\"delete\"):\n\t\t\t\tself.can_delete.append(dt)\n\n\t\t\tif p.get(\"read\") or p.get(\"write\") or p.get(\"create\"):\n\t\t\t\tif p.get(\"report\"):\n\t\t\t\t\tself.can_get_report.append(dt)\n\t\t\t\tfor key in (\"import\", \"export\", \"print\", \"email\"):\n\t\t\t\t\tif p.get(key):\n\t\t\t\t\t\tgetattr(self, \"can_\" + key).append(dt)\n\n\t\t\t\tif not dtp.get(\"istable\"):\n\t\t\t\t\tif not dtp.get(\"issingle\") and not dtp.get(\"read_only\"):\n\t\t\t\t\t\tself.can_search.append(dt)\n\t\t\t\t\tif dtp.get(\"module\") not in self.allow_modules:\n\t\t\t\t\t\tif active_modules and dtp.get(\"module\") not in active_modules:\n\t\t\t\t\t\t\tpass\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tself.allow_modules.append(dtp.get(\"module\"))\n\n\t\tself.can_write += self.can_create\n\t\tself.can_write += self.in_create\n\t\tself.can_read += self.can_write\n\n\t\tself.shared = frappe.get_all(\n\t\t\t\"DocShare\", {\"user\": self.name, \"read\": 1}, distinct=True, pluck=\"share_doctype\"\n\t\t)\n\t\tself.can_read = list(set(self.can_read + self.shared))\n\t\tself.all_read += self.can_read\n\n\t\tfor dt in no_list_view_link:\n\t\t\tif dt in self.can_read:\n\t\t\t\tself.can_read.remove(dt)\n\n\t\tif \"System Manager\" in self.get_roles():\n\t\t\tself.can_import += frappe.get_all(\"DocType\", {\"allow_import\": 1}, pluck=\"name\")\n\t\t\tself.can_import += frappe.get_all(\n\t\t\t\t\"Property Setter\",\n\t\t\t\tpluck=\"doc_type\",\n\t\t\t\tfilters={\"property\": \"allow_import\", \"value\": \"1\"},\n\t\t\t)\n\n\t\tfrappe.cache.hset(\"can_import\", frappe.session.user, self.can_import)",
"def filter_granted(self, queryset):\n granted_runs = ContainerRun.filter_by_user(self.request.user)\n\n return queryset.filter(run_id__in=granted_runs)",
"def permissionContexts(\n self,\n tokenData: TokenData,\n roleAccessPermissionData: List[RoleAccessPermissionData],\n resultFrom: int = 0,\n resultSize: int = 100,\n order: List[dict] = None,\n ) -> dict:",
"def filter_granted(self, queryset):\n granted_containers = Container.filter_by_user(self.request.user)\n\n return queryset.filter(container_id__in=granted_containers)",
"def resource_restrictions(self, resource_type, params, username, group):\n # create session for ConfigDB\n session = self.config_models.session()\n\n # query restricted resources\n restrictions = self.resource_permission_handler.restrictions(\n resource_type, params, username, group, session\n )\n\n # close session\n session.close()\n\n return {\n 'restrictions': restrictions\n }",
"def get_permissions(self):\n if self.action in [\"update\", \"partial_update\", \"destroy\"]:\n permission_classes = [IsAdminOrOwner]\n else:\n permission_classes = [IsAuthenticated]\n return [permission() for permission in permission_classes]",
"def my_perms(self, ids, **kwargs):\r\n auth = self.app.auth\r\n # checking all objects\r\n p = self.db.auth_permission\r\n if type(ids) in (list, tuple, set):\r\n _ids = type(ids)((0,)) + ids\r\n else:\r\n _ids = [0, ids]\r\n grouped = self.db(p.record_id.belongs(_ids) & p.group_id.belongs(auth.user_groups.keys()) & (\r\n p.table_name == self.table._tablename)).select(p.name, p.record_id).group_by_value('record_id')\r\n take_names = itemgetter('name')\r\n base_permissions = set(imap(take_names, grouped.get(0, set())))\r\n ret = dict(PERMISSIONS={self.name: [\r\n dict((id, set(imap(take_names, grouped.get(id, []))).union(base_permissions)) for id in map(int, ids))]})\r\n current.response.text = ret\r\n return ret"
] | [
"0.5772158",
"0.56378037",
"0.5527674",
"0.55151045",
"0.5405103",
"0.523535",
"0.5208555",
"0.5207722",
"0.5189785",
"0.5178944",
"0.5175606",
"0.5154353",
"0.51384",
"0.5122797",
"0.5094861",
"0.506578",
"0.5061828",
"0.50566465",
"0.50273377",
"0.5019476",
"0.50016457",
"0.50016457",
"0.49889776",
"0.49872518",
"0.4985775",
"0.4960211",
"0.4958071",
"0.49524623",
"0.4949891",
"0.49447906"
] | 0.72128654 | 0 |
Adds the given permission to the user or group named auth_name. | def addPermission(self, permission, auth_name, is_group, extra_params):
with DBSession(self.__config_db) as session:
perm, params = ThriftAuthHandler.__create_permission_args(
permission, extra_params, session)
if not require_manager(perm, params, self.__auth_session):
raise codechecker_api_shared.ttypes.RequestFailed(
codechecker_api_shared.ttypes.ErrorCode.UNAUTHORIZED,
"You can not manage the permission '{0}'"
.format(perm.name))
handler = make_handler(perm, params)
handler.add_permission(auth_name.strip(),
is_group,
user_name=self.getLoggedInUser())
session.commit()
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def addUserPermission(self, name, _type):\n self._client.addUserPermission(name, _type)",
"def add_permission(self, permission: str):\n setattr(self.scopes, permission, True)\n self.save(update_fields=[\"scopes\"])",
"def add_user_grant(self, permission, user_id):\r\n acl = self.get_acl()\r\n acl.add_user_grant(permission, user_id)\r\n self.set_acl(acl)",
"def add_user_grant(self, permission, user_id):\n acl = self.get_acl()\n acl.add_user_grant(permission, user_id)\n self.set_acl(acl)",
"def add_permission(self, perm):\n if not self.has_permission(perm):\n self.permissions += perm",
"def add_permission(self, permission):\n self._permissions.add(permission)",
"def addPermission(self, permission=None, permName=None, kvDict=None):\n return _modelActionBase(self, instance=permission, instanceName=permName, kvDict=kvDict,\n model=get_model('perm'), db=db, action='add', modelType='permission')",
"def addPermission(self, permission=None, permName=None, kvDict=None):\n return _modelActionBase(self, instance=permission, instanceName=permName, kvDict=kvDict,\n model=get_model('perm'), db=db, action='add', modelType='permission')",
"def add_permisstion(self, perm_name):\n try:\n perm_set = self.permissions[perm_name]\n except KeyError:\n self.permissions[perm_name] = set()\n else:\n raise PermissionError(\"Permission Exists\")",
"def permit_user(self, perm_name, user):\n try:\n perm_set = self.permissions[perm_name]\n except KeyError:\n raise PermissionError(\"Permission does not Exists\")\n else:\n if user.username not in self.authenticator.users:\n raise UsernameNotFoundError\n perm_set.add(user.username)\n if 'add' and 'property' in perm_name:\n user.can_add_property = True",
"def add_user_grant(self, permission, user_id, headers=None,\r\n display_name=None):\r\n policy = self.get_acl()\r\n policy.acl.add_user_grant(permission, user_id,\r\n display_name=display_name)\r\n self.set_acl(policy, headers=headers)",
"def apply_perm(permission_name: Optional[str], entity: UserOrGroup):\n try:\n permission = Permission.from_name(permission_name or \"none\")\n except KeyError:\n raise exceptions.ParseError(f\"Unknown permission: {permission_name}\")\n\n obj.set_permission(permission, entity)",
"async def permissions_add(\n self,\n ctx,\n type_: str.lower,\n name: str,\n *,\n user_or_role: Union[Role, utils.User, str],\n ):\n\n if type_ not in {\"command\", \"level\"}:\n return await ctx.send_help(ctx.command)\n\n command = level = None\n if type_ == \"command\":\n name = name.lower()\n command = self.bot.get_command(name)\n check = command is not None\n else:\n level = self._parse_level(name)\n check = level is not PermissionLevel.INVALID\n\n if not check:\n embed = Embed(\n title=\"Error\",\n color=Color.red(),\n description=f\"The referenced {type_} does not exist: `{name}`.\",\n )\n return await ctx.send(embed=embed)\n\n value = self._verify_user_or_role(user_or_role)\n if type_ == \"command\":\n name = command.qualified_name\n await self.bot.update_perms(name, value)\n else:\n await self.bot.update_perms(level, value)\n name = level.name\n if level > PermissionLevel.REGULAR:\n if value == -1:\n key = self.bot.modmail_guild.default_role\n elif isinstance(user_or_role, Role):\n key = user_or_role\n else:\n key = self.bot.modmail_guild.get_member(value)\n if key is not None:\n logger.info(\"Granting %s access to Modmail category.\", key.name)\n await self.bot.main_category.set_permissions(\n key, read_messages=True\n )\n\n embed = Embed(\n title=\"Success\",\n color=self.bot.main_color,\n description=f\"Permission for `{name}` was successfully updated.\",\n )\n return await ctx.send(embed=embed)",
"def add_group_grant(self, permission, group_id):\r\n acl = self.get_acl()\r\n acl.add_group_grant(permission, group_id)\r\n self.set_acl(acl)",
"def add_permission(self, label, aws_account_id, action_name):\r\n return self.connection.add_permission(self, label, aws_account_id, action_name)",
"def add_permission(self, queue, label, aws_account_id, action_name):\r\n params = {'Label': label,\r\n 'AWSAccountId' : aws_account_id,\r\n 'ActionName' : action_name}\r\n return self.get_status('AddPermission', params, queue.id)",
"def add_permission(self, identity_id, permission):\n # type: (str, str) -> Union[bool, Permission]\n headers = Headers({\"content-type\": \"application/json\", \"accept\": \"application/json\"})\n return self.connection.api_call(\n \"PUT\",\n [\"permissions\", self.id, identity_id, permission],\n model=Permission,\n headers=headers,\n )",
"def __add_permission_to_group(self, group: Group) -> None:\n for permission_codename in main_app_groups[group.name]:\n permission = Permission.objects.get(codename=permission_codename)\n group.permissions.add(permission)",
"def add_user_grant(self, permission, user_id, recursive=False, headers=None):\r\n if permission not in GSPermissions:\r\n raise self.connection.provider.storage_permissions_error(\r\n 'Unknown Permission: %s' % permission)\r\n acl = self.get_acl(headers=headers)\r\n acl.add_user_grant(permission, user_id)\r\n self.set_acl(acl, headers=headers)\r\n if recursive:\r\n for key in self:\r\n key.add_user_grant(permission, user_id, headers=headers)",
"def grant_permission(self, extra_permission=None):\n permission_list = self.get_permission()\n if extra_permission is not None:\n permission_list += (\n extra_permission\n if isinstance(extra_permission, (tuple, list))\n else [extra_permission]\n )\n for perm in self.get_permission():\n obj = self.get_permission_object()\n assign_perm(perm, self.user, obj)",
"def add_permission(cls, perm, user):\n\n # Ensure we do not create duplicate users\n try:\n user_perm = cls.objects.get(user_id=user.id)\n new_user = False\n except cls.DoesNotExist:\n user_perm = cls(user_id=user.id, permission_list=str(perm))\n new_user = True\n\n if new_user:\n user_perm.save(force_insert=True)\n else:\n # Make sure the usr does not already have the given permission\n existing_perms = cls.get_permissions(user)\n if perm not in existing_perms:\n existing_perms.append(perm)\n\n user_perm.permission_list = \",\".join(existing_perms)\n user_perm.save(force_update=True)",
"def grant_permission(self, role, permission):\n return permissions.utils.grant_permission(self, role, permission)",
"def _add(self, name, permissions):\n data = {\"name\": name, \"permissions\": permissions}\n path = self.router.roles\n return self.request(method=\"put\", path=path, json=data)",
"def add_permission(user, model, permission_codename):\n content_type = ContentType.objects.get_for_model(model)\n try:\n permission = Permission.objects.get(codename=permission_codename,\n content_type=content_type)\n except Permission.DoesNotExist:\n permission = Permission.objects.create(codename=permission_codename,\n name=permission_codename,\n content_type=content_type)\n user.user_permissions.add(permission)",
"def add_user_grant(self, permission, user_id, recursive=False,\r\n headers=None, display_name=None):\r\n if permission not in S3Permissions:\r\n raise self.connection.provider.storage_permissions_error(\r\n 'Unknown Permission: %s' % permission)\r\n policy = self.get_acl(headers=headers)\r\n policy.acl.add_user_grant(permission, user_id,\r\n display_name=display_name)\r\n self.set_acl(policy, headers=headers)\r\n if recursive:\r\n for key in self:\r\n key.add_user_grant(permission, user_id, headers=headers,\r\n display_name=display_name)",
"def add_access(self, access_group):\n\n if not self.has_auth_access(access_group):\n self.access_groups.append(access_group)",
"def add_group_email_grant(self, permission, email_address, headers=None):\r\n acl = self.get_acl(headers=headers)\r\n acl.add_group_email_grant(permission, email_address)\r\n self.set_acl(acl, headers=headers)",
"def add_email_grant(self, permission, email_address, headers=None):\r\n policy = self.get_acl(headers=headers)\r\n policy.acl.add_email_grant(permission, email_address)\r\n self.set_acl(policy, headers=headers)",
"def removePermission(self, permission, auth_name, is_group, extra_params):\n\n with DBSession(self.__config_db) as session:\n perm, params = ThriftAuthHandler.__create_permission_args(\n permission, extra_params, session)\n\n if not require_manager(perm, params, self.__auth_session):\n raise codechecker_api_shared.ttypes.RequestFailed(\n codechecker_api_shared.ttypes.ErrorCode.UNAUTHORIZED,\n \"You can not manage the permission '{0}'\"\n .format(perm.name))\n\n handler = make_handler(perm, params)\n handler.remove_permission(auth_name, is_group,\n user_name=self.getLoggedInUser())\n\n session.commit()\n return True",
"def add_email_grant(self, permission, email_address):\r\n acl = self.get_acl()\r\n acl.add_email_grant(permission, email_address)\r\n self.set_acl(acl)"
] | [
"0.6800436",
"0.6654492",
"0.66036886",
"0.65800774",
"0.65765387",
"0.6558139",
"0.6409958",
"0.6409958",
"0.6350122",
"0.6349882",
"0.6331041",
"0.6268157",
"0.6261521",
"0.6219297",
"0.6207565",
"0.6176919",
"0.61668557",
"0.6051228",
"0.6022761",
"0.60026187",
"0.5873656",
"0.57948285",
"0.5784955",
"0.5728844",
"0.5726524",
"0.5687489",
"0.5670841",
"0.5661127",
"0.5660812",
"0.56235284"
] | 0.7970993 | 0 |
Removes the given permission from the user or group auth_name. | def removePermission(self, permission, auth_name, is_group, extra_params):
with DBSession(self.__config_db) as session:
perm, params = ThriftAuthHandler.__create_permission_args(
permission, extra_params, session)
if not require_manager(perm, params, self.__auth_session):
raise codechecker_api_shared.ttypes.RequestFailed(
codechecker_api_shared.ttypes.ErrorCode.UNAUTHORIZED,
"You can not manage the permission '{0}'"
.format(perm.name))
handler = make_handler(perm, params)
handler.remove_permission(auth_name, is_group,
user_name=self.getLoggedInUser())
session.commit()
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_permission(self, permission):\n self._permissions.remove(permission)",
"def delPermission(self,request):\n request.needAuthType(request.ADMIN)\n request.checkArgs(\"admin_username\",\"perm_name\")\n request.getAuthNameObj().canDo(\"CHANGE ADMIN PERMISSIONS\")\n perm_actions.getActionManager().deletePermission(request[\"admin_username\"],request[\"perm_name\"])",
"def remove_permission(self, perm):\n if self.has_permission(perm):\n self.permissions -= perm",
"def remove_permission(self, label):\r\n return self.connection.remove_permission(self, label)",
"def removePermission(self, permission=None, permName=None, kvDict=None):\n return _modelActionBase(self, instance=permission, instanceName=permName, kvDict=kvDict,\n model=get_model('perm'), db=db, action='remove', modelType='permission')",
"def removePermission(self, permission=None, permName=None, kvDict=None):\n return _modelActionBase(self, instance=permission, instanceName=permName, kvDict=kvDict,\n model=get_model('perm'), db=db, action='remove', modelType='permission')",
"def remove_permission(self, role, permission):\n return permissions.utils.remove_permission(self, role, permission)",
"def deleteUserPermission(self, name, _type):\n self._client.deleteUserPermission(name, _type)",
"def remove_permission(self, queue, label):\r\n params = {'Label': label}\r\n return self.get_status('RemovePermission', params, queue.id)",
"def remove_permission(cls, perm, user):\n current_permissions = cls.get_permissions(user)\n\n # Verify the user has the given permission\n if perm in current_permissions:\n current_permissions.remove(perm)\n user_perm = cls.objects.get(user_id=user.id)\n\n user_perm.permission_list = \",\".join(current_permissions)\n user_perm.save(force_update=True)",
"def remove_permissions(apps, schema_editor):\n\n Permission = apps.get_model(\"auth\", \"Permission\")\n Group = apps.get_model(\"auth\", \"Group\")\n\n permission = Permission.objects.get(\n codename=\"can_approve_estimated_completion_date\",\n )\n\n admin_group = Group.objects.get(name=\"Administrator\")\n admin_group.permissions.remove(permission)\n permission.delete()\n\n print(\n 'Permission \"can_approve_estimated_completion_date\" removed from the \"Admin\" group.'\n )",
"def delete_permission(self, identity_id, permission):\n # type: (str, str) -> bool\n headers = Headers({\"content-type\": \"application/json\", \"accept\": \"application/json\"})\n return self.connection.api_call(\n \"DELETE\", [\"permissions\", self.id, identity_id, permission], headers=headers\n )",
"def remove_access(self, access_group):\n\n if self.has_auth_access(access_group):\n self.access_groups.remove(access_group)",
"def remove_perm(self, perm, user, obj, ctype=None):\n if getattr(obj, 'pk', None) is None:\n raise ObjectNotPersisted(\"Object %s needs to be persisted first\" % obj)\n\n if not ctype:\n ctype = ContentType.objects.get_for_model(obj)\n\n (self.filter(permission__codename=perm,\n user=user,\n object_pk=obj.pk,\n content_type=ctype)\n .delete())",
"def delPermissionValue(self,request):\n request.needAuthType(request.ADMIN)\n request.checkArgs(\"admin_username\",\"perm_name\",\"perm_value\")\n request.getAuthNameObj().canDo(\"CHANGE ADMIN PERMISSIONS\")\n perm_actions.getActionManager().deleteFromPermValues(request[\"admin_username\"],request[\"perm_name\"],\n request[\"perm_value\"])",
"def remove_permission(self, topic, label):\r\n params = {'ContentType' : 'JSON',\r\n 'TopicArn' : topic,\r\n 'Label' : label}\r\n response = self.make_request('RemovePermission', params, '/', 'GET')\r\n body = response.read()\r\n if response.status == 200:\r\n return json.loads(body)\r\n else:\r\n boto.log.error('%s %s' % (response.status, response.reason))\r\n boto.log.error('%s' % body)\r\n raise self.ResponseError(response.status, response.reason, body)",
"def remove_permission_from_role(self, role: Role, permission: Permission) -> None:\n if permission in role.permissions:\n try:\n role.permissions.remove(permission)\n self.get_session.merge(role)\n self.get_session.commit()\n log.info(const.LOGMSG_INF_SEC_DEL_PERMROLE.format(permission, role.name))\n except Exception as e:\n log.error(const.LOGMSG_ERR_SEC_DEL_PERMROLE.format(e))\n self.get_session.rollback()",
"async def permissions_remove(\n self,\n ctx,\n type_: str.lower,\n name: str,\n *,\n user_or_role: Union[Role, utils.User, str] = None,\n ):\n if type_ not in {\"command\", \"level\", \"override\"} or (\n type_ != \"override\" and user_or_role is None\n ):\n return await ctx.send_help(ctx.command)\n\n if type_ == \"override\":\n extension = ctx.kwargs[\"user_or_role\"]\n if extension is not None:\n name += f\" {extension}\"\n name = name.lower()\n name = getattr(self.bot.get_command(name), \"qualified_name\", name)\n level = self.bot.config[\"override_command_level\"].get(name)\n if level is None:\n perm = self.bot.command_perm(name)\n embed = Embed(\n title=\"Error\",\n color=Color.red(),\n description=f\"The command permission level was never overridden: `{name}`, \"\n f\"current permission level is {perm.name}.\",\n )\n else:\n logger.info(\"Restored command permission level for `%s`.\", name)\n self.bot.config[\"override_command_level\"].pop(name)\n await self.bot.config.update()\n perm = self.bot.command_perm(name)\n embed = Embed(\n title=\"Success\",\n color=self.bot.main_color,\n description=f\"Command permission level for `{name}` was successfully restored to {perm.name}.\",\n )\n return await ctx.send(embed=embed)\n\n level = None\n if type_ == \"command\":\n name = name.lower()\n name = getattr(self.bot.get_command(name), \"qualified_name\", name)\n else:\n level = self._parse_level(name)\n if level is PermissionLevel.INVALID:\n embed = Embed(\n title=\"Error\",\n color=Color.red(),\n description=f\"The referenced level does not exist: `{name}`.\",\n )\n return await ctx.send(embed=embed)\n name = level.name\n\n value = self._verify_user_or_role(user_or_role)\n await self.bot.update_perms(level or name, value, add=False)\n\n if type_ == \"level\":\n if level > PermissionLevel.REGULAR:\n if value == -1:\n logger.info(\"Denying @everyone access to Modmail category.\")\n await self.bot.main_category.set_permissions(\n self.bot.modmail_guild.default_role, read_messages=False\n )\n elif isinstance(user_or_role, Role):\n logger.info(\n \"Denying %s access to Modmail category.\", user_or_role.name\n )\n await self.bot.main_category.set_permissions(\n user_or_role, overwrite=None\n )\n else:\n member = self.bot.modmail_guild.get_member(value)\n if member is not None and member != self.bot.modmail_guild.me:\n logger.info(\n \"Denying %s access to Modmail category.\", member.name\n )\n await self.bot.main_category.set_permissions(\n member, overwrite=None\n )\n\n embed = Embed(\n title=\"Success\",\n color=self.bot.main_color,\n description=f\"Permission for `{name}` was successfully updated.\",\n )\n return await ctx.send(embed=embed)",
"def deletePermissionContext(\n self, obj: PermissionContext, tokenData: TokenData\n ) -> None:",
"def delete_permission(self, action_name: str, resource_name: str) -> None:\n if not (action_name and resource_name):\n return\n perm = self.get_permission(action_name, resource_name)\n if not perm:\n return\n roles = (\n self.get_session.query(self.role_model).filter(self.role_model.permissions.contains(perm)).first()\n )\n if roles:\n log.warning(const.LOGMSG_WAR_SEC_DEL_PERMVIEW.format(resource_name, action_name, roles))\n return\n try:\n # delete permission on resource\n self.get_session.delete(perm)\n self.get_session.commit()\n # if no more permission on permission view, delete permission\n if not self.get_session.query(self.permission_model).filter_by(action=perm.action).all():\n self.delete_action(perm.action.name)\n log.info(const.LOGMSG_INF_SEC_DEL_PERMVIEW.format(action_name, resource_name))\n except Exception as e:\n log.error(const.LOGMSG_ERR_SEC_DEL_PERMVIEW.format(e))\n self.get_session.rollback()",
"def remove_permission_from_bucket(bucket_name, role_type, member_type):\n\n # initialize client & get bucket\n _, bucket, _ = create_client(bucket_name)\n\n policy = bucket.get_iam_policy(requested_policy_version=3)\n \n # get member type\n member_value = get_member_bucket_level(member_type)\n\n # get role type\n role_value = get_role_bucket_level(role_type)\n\n for binding in policy.bindings:\n # print(binding)\n if binding[\"role\"] == role_value and binding.get(\"condition\") is None:\n # revoke role from member\n binding[\"members\"].discard(member_value)\n\n bucket.set_iam_policy(policy)\n\n print(\"removed {} with role {} from {}\".format(member_value, role_value, bucket_name))",
"def deleteUserAccess(self, name, read, write, catalog='*', repository='*'):\n self._client.deleteUserAccess(name, read, write, catalog, repository)",
"def delete_TestGroupResourcePermission(test_case, # type: AnyMagpieTestCaseType\n resource_info=null, # type: Optional[JSON]\n override_resource_id=null, # type: Optional[int]\n override_permission=null, # type: Optional[AnyPermissionType]\n override_group_name=null, # type: Optional[Str]\n override_headers=null, # type: Optional[HeadersType]\n override_cookies=null, # type: Optional[CookiesType]\n ignore_missing=True, # type: bool\n ): # type: (...) -> JSON\n result = TestSetup.update_TestAnyResourcePermission(\n test_case, \"group\", \"DELETE\", resource_info=resource_info,\n override_resource_id=override_resource_id, override_permission=override_permission,\n override_item_name=override_group_name, override_headers=override_headers, override_cookies=override_cookies\n )\n if not ignore_missing:\n check_val_equal(result[\"code\"], 200)\n return result",
"def octopus_permissions_clear(self, msg, args):\r\n return self.permissions.clear_permissions()",
"def test_permission_remove_one_action_ok(self):\n test_name = sys._getframe().f_code.co_name\n self._execute('permission remove anonymous TICKET_MODIFY')\n rv, output = self._execute('permission list')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)",
"def remove_permission_from_blob(bucket_name, blob_name, role_type, member_type):\n\n # initialize client, get bucket, & get blob\n _, _, blob = create_client(bucket_name, blob_name)\n \n # get member type\n member = get_member_blob_level(member_type, blob)\n \n # revoke role from member\n revoke_role_blob_level(role_type, member)\n\n blob.acl.save()\n\n print(\n \"removed permission for {} to {} from blob {} in bucket {}\".format(\n member_type, role_type, blob_name, bucket_name\n )\n )",
"def __check_removed_permissions(self) -> None:\n for permission in Permission.objects.all():\n if not self.__is_permission_allowed_to_delete(permission):\n continue\n\n if self.__is_permission_in_groups(permission.codename):\n raise PermissionInUse(f'Permission {permission.codename} is used in groups. Delete it first.')\n\n permission.delete()\n\n self.stdout.write(f'Removed {permission.codename} permission')",
"def remove_access(acl, list_to_edit):\n post_key = '%s_remove_' % list_to_edit\n removal_keys = [k for k in request.POST.keys() if k.startswith(post_key)]\n for key in removal_keys:\n model_type = models.UserGroup\n if list_to_edit.startswith('user'):\n model_type = models.UserProfile\n key_id = int(key.replace(post_key, ''))\n datastore_object = model_type.get_by_id(key_id)\n acl.__getattribute__(list_to_edit).remove(datastore_object.key())",
"def delete_TestUserResourcePermission(test_case, # type: AnyMagpieTestCaseType\n resource_info=null, # type: Optional[JSON]\n override_resource_id=null, # type: Optional[int]\n override_permission=null, # type: Optional[AnyPermissionType]\n override_user_name=null, # type: Optional[Str]\n override_headers=null, # type: Optional[HeadersType]\n override_cookies=null, # type: Optional[CookiesType]\n ignore_missing=True, # type: bool\n ): # type: (...) -> JSON\n result = TestSetup.update_TestAnyResourcePermission(\n test_case, \"user\", \"DELETE\", resource_info=resource_info,\n override_resource_id=override_resource_id, override_permission=override_permission,\n override_item_name=override_user_name, override_headers=override_headers, override_cookies=override_cookies\n )\n if not ignore_missing:\n check_val_equal(result[\"code\"], 200)\n return result",
"def remove_from_group(user: User, group: Group) -> Result:\n if user.pw_name not in group.gr_mem:\n return Result(State.unchanged)\n command([\"/usr/sbin/deluser\", user.pw_name, group.gr_name])\n group.gr_mem.remove(user.pw_name)\n return Result(State.success)"
] | [
"0.73214144",
"0.7299543",
"0.72618645",
"0.71100914",
"0.71083",
"0.71083",
"0.7041201",
"0.7016033",
"0.67834526",
"0.67520237",
"0.66453856",
"0.6616732",
"0.6514527",
"0.6446896",
"0.63673264",
"0.6328633",
"0.6277535",
"0.6022995",
"0.59801275",
"0.5931496",
"0.5857478",
"0.5828733",
"0.58238965",
"0.5814595",
"0.58106875",
"0.58019125",
"0.57337815",
"0.5716989",
"0.56937987",
"0.56846553"
] | 0.83243895 | 0 |
Generate a new personal access token with the given description. | def newToken(self, description):
self.__require_privilaged_access()
with DBSession(self.__config_db) as session:
token = generate_session_token()
user = self.getLoggedInUser()
groups = ';'.join(self.__auth_session.groups)
session_token = Session(token, user, groups, description, False)
session.add(session_token)
session.commit()
LOG.info("New personal access token '%s...' has been generated "
"by '%s'.", token[:5], self.getLoggedInUser())
return SessionTokenData(token,
description,
str(session_token.last_access)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def generate_token(email):\n access_token = create_access_token(email)\n return access_token",
"def auth_token_generate(identity_param_val, expires_delta=False):\n access_token = ''\n try:\n if expires_delta is not False:\n expires_delta = timedelta(minutes=expires_delta)\n access_token = create_access_token(identity=identity_param_val, expires_delta=expires_delta)\n except Exception as e:\n print(e)\n\n return access_token",
"def generate_new_token(self):\n self.access_token = random_auth_key()",
"def generate_access_token(self):\n return gen_api_key(length=self.token_length)",
"def build_access_token():\n return do_build_access_token(tenant_id='intility_tenant_id')",
"def test_create_o_auth_access_token(self):\n pass",
"def build_access_token_normal_user():\n return do_build_access_token(tenant_id='intility_tenant_id', admin=False)",
"def create_access_token(oauth):\n #create parameters for API authorization\n\tredirect_uri = 'oob'\n\tparams = {'client_secret': oauth.client_secret,\n\t\t\t 'redirect_uri': redirect_uri,\n\t\t\t 'response_type': 'code'}\n\t#store the access code\n\turl = oauth.get_authorize_url(**params)\n\n\t#open a web browser to get access token and then store it via manual input\n\twebbrowser.open(url)\n\tcode = input('Enter code: ')\n\t#create credentials item\n\tstart_time = time.time()\n\t#create dictionary to hold credentials and store beginning time\n\tcredentials = {'token_time': start_time}\n\n\t#NEED TO ADD IN 'REFRESH TOKEN' FUNCTION HERE SOMEWHERE\n\t#\n\t\n\t#create parameters\n\tdata = {'code': code,\n\t\t\t'redirect_uri': redirect_uri,\n\t\t\t'grant_type': 'authorization_code'}\n\t#build the headers\n\theaders = oauth_headers(oauth)\n\t#create the raw access token\n\traw_access = oauth.get_raw_access_token(data=data, headers=headers)\n\t#parse the raw access token and add to credentials variable\n\tcredentials.update(access_parse(raw_access))\n\n\t#parse access token from credentials\n\taccess_token = credentials['access_token']\n\t#return access token\n\treturn access_token",
"def generate_token(self):\n self.__get_auth_token_and_secret()\n return self.get_token()",
"def get_personal_access_token() -> str:\n return getpass.getpass(\"Enter SurveyMonkey API personal access token: \")",
"def create_access_token(identity: Union[str,int], type_token: str, fresh: Optional[bool] = False) -> bytes:\n return AuthJWT.create_token(\n identity=identity,\n type_token=type_token,\n fresh=fresh,\n exp_time=timedelta(minutes=AuthJWT._ACCESS_TOKEN_EXPIRES)\n )",
"def create_access_token(self):\n\t\t# Wraper for also caching invalid results\n #def getMetadataRofs(path):\n #\ttry:\n # \treturn self.client.metadata(path)\n # except Exception, e:\n # log.write('Exception at getMetadataRofs for path '+ path + '\\n')\n # pprint(e, log)\n # return False\n\n\t\ttry:\n\t\t\trequest_token = self.session.obtain_request_token()\n\t\t\turl = self.session.build_authorize_url(request_token)\n\t\t\tprint url\n\t\t\traw_input()\n\t\t\taccess_token = self.session.obtain_access_token(request_token)\n\t\t\tself.client = client.DropboxClient(self.session)\n\t\t\t\n\t\t\t# Build cache for metadata querying\n\n\t\t\t# Wraper for also caching invalid results\n\t\t\tdef getMetadataRofs(path):\n\t\t\t\ttry:\n\t\t\t\t\treturn self.client.metadata(path)\n\t\t\t\texcept Exception, e:\n\t\t\t\t\tlogger.error('Exception at getMetadataRofs for path '+ path + '\\n')\n\t\t logger.debug(sys.exc_info()[0])\n\t\t\t\t\treturn False\n\n\t\t\tself.cache_metadata = Cache(getMetadataRofs)\n\t\t\tself.cache_files = {}\n\n\t\texcept Exception, e:\n\t\t\tlogger.error('Exception %s at create_access_token' % (sys.exc_info()[0]))\n\t\t\tlogger.debug(pformat(sys.exc_info()))",
"def gen_access_token(secrets_dict, auth_code, callback_uri=default_callback_uri):\n if auth_code is None:\n raise ValueError(\"auth_code cannot be None!\")\n \n validate_client_secrets_dict(secrets_dict)\n client_id = secrets_dict[\"client_id\"] \n client_secret = secrets_dict[\"client_secret\"]\n\n data = {'grant_type': 'authorization_code', \n 'code': auth_code, \n 'redirect_uri': callback_uri}\n print(\"requesting access token\")\n #TODO: catch bad requests return values\n try:\n access_token_response = requests.post(token_url, \n data=data, \n verify=False, \n allow_redirects=False, \n auth=(client_id, client_secret))\n except Exception as ex:\n raise ex\n\n # add better error handling here\n tokens = json.loads(access_token_response.text)\n access_token = tokens['access_token']\n refresh_token = tokens['refresh_token']\n expires_in = tokens['expires_in']\n\n expires_at_datetime = calc_token_expiration_datetime(expires_in)\n\n secrets_dict['access_token'] = access_token\n secrets_dict['refresh_token'] = refresh_token\n secrets_dict['expires_at'] = datetime_to_str(expires_at_datetime)\n\n # reminder, dict's are mutable\n dict_to_json_file(secrets_dict, \"secrets.json\")",
"def create_auth_token():\n data = get_request_data(request)\n address = data.get(\"address\")\n expiration = int(data.get(\"expiration\"))\n\n pk = get_provider_private_key(use_universal_key=True)\n token = jwt.encode({\"exp\": expiration, \"address\": address}, pk, algorithm=\"HS256\")\n token = token.decode(\"utf-8\") if isinstance(token, bytes) else token\n\n valid, message = is_token_valid(token, address)\n if not valid:\n if message == \"Token is deleted.\":\n force_restore_token(token)\n else:\n return jsonify(error=message), 400\n\n return jsonify(token=token)",
"def create_token(self, consumer, token_type, timestamp, user=None):\n token, created = self.first_or_create(consumer=consumer, \n token_type=token_type, \n timestamp=timestamp,\n user=user)\n\n if created:\n token.key, token.secret = self.generate_random_codes()\n token.save()\n\n return token",
"def generateAuthToken(self):\n try:\n payload = {\n 'exp': datetime.utcnow() + timedelta(days=0, minutes=30),\n 'iat': datetime.utcnow(),\n 'sub': self.id\n }\n return jwt.encode(payload, current_app.config['SECRET_KEY'], algorithm='HS256').decode()\n except Exception as error:\n print(error)\n return error",
"def create_access_token(\n data: tp.Mapping[str, tp.Any],\n *,\n expires_delta: tp.Optional[timedelta] = None\n) -> str:\n to_encode = data.copy()\n expires_delta = expires_delta or DEFAULT_EXPIRES_DELTA\n expires = datetime.utcnow() + expires_delta\n to_encode.update({\"exp\": expires, \"sub\": ACCESS_TOKEN_SUBJECT})\n return jwt.encode(\n to_encode,\n config.SECRET_KEY,\n algorithm=ALGORITHM,\n json_encoder=JSONEncoderUUID\n )",
"def generate_token(exp=None):\n\n secret_key = getenv('JWT_SECRET_KEY')\n user = {\n 'first_name': fake.name(),\n 'last_name': fake.name(),\n 'email': fake.email(),\n 'is_admin': IsAdmin.yes,\n 'password': fake.password()\n }\n\n payload = {'id': str(User.find_or_create(user, email=user['email']).id)}\n payload.__setitem__('exp', exp) if exp is not None else ''\n token = jwt.encode(payload, secret_key, algorithm='HS256').decode(CHARSET)\n return 'Bearer {0}'.format(token)",
"def create_token(self, consumer, token_type, timestamp, scope,\n user=None, callback=None, callback_confirmed=False):\n token = self.create(consumer=consumer, \n token_type=token_type, \n timestamp=timestamp,\n scope=scope,\n user=user,\n callback=callback,\n callback_confirmed=callback_confirmed,\n key=uuid.uuid4().hex,\n secret=get_random_string(length=SECRET_SIZE))\n\n return token",
"def createAccessTokenReplacement(self):\r\n\r\n url = self._config['OAUTH2ENDPOINT']['huddleAuthServer'] + \"request?response_type=code\" + \\\r\n \"&client_id=\" + self._config['OAUTH2']['clientID'] + \\\r\n \"&redirect_uri=\" + self._config['OAUTH2']['redirectUri']\r\n webbrowser.open_new(url)\r\n code = input('Please enter the code from your web browser:')\r\n\r\n response = self._oauth.obtainAccessTokenBy3LeggedOAuth(code)\r\n responseBody = json.loads(response['Body'])\r\n\r\n try:\r\n oauthToken = Token(responseBody)\r\n except TypeError as e:\r\n print (\"Bad response when requesting a token \" + str(response))\r\n sys.exit()\r\n\r\n return oauthToken",
"def create_token(user, title, expiration=_default_expiration_duration_opt):\n if expiration == _default_expiration_duration_opt:\n duration = _default_expiration_duration()\n expiration = duration + datetime.now() if duration else None\n\n token_code = random_string_generator(TOKEN_NAME_PREFIX_LENGTH + MINIMUM_TOKEN_SUFFIX_LENGTH)()\n token_name = token_code[:TOKEN_NAME_PREFIX_LENGTH]\n token_secret = token_code[TOKEN_NAME_PREFIX_LENGTH:]\n\n assert token_name\n assert token_secret\n\n return AppSpecificAuthToken.create(\n user=user,\n title=title,\n expiration=expiration,\n token_name=token_name,\n token_secret=DecryptedValue(token_secret),\n )",
"def generate_token(dictionary: dict, expiration: datetime.timedelta):\n\n dictionary['expiration'] = (datetime.datetime.utcnow() + expiration).timestamp()\n\n return jwt.encode(dictionary, current_app.config['TOKEN_SECRET_KEY'], algorithm='HS256')",
"def do_build_access_token(tenant_id=None, aud=None, expired=False, evil=False, admin=True):\n\n issued_at = int(time.time())\n expires = issued_at - 1 if expired else issued_at + 3600\n claims = {\n 'aud': aud or 'api://oauth299-9999-9999-abcd-efghijkl1234567890',\n 'iss': 'https://sts.windows.net/intility_tenant_id/',\n 'iat': issued_at,\n 'exp': expires,\n 'nbf': issued_at,\n 'acr': '1',\n 'aio': 'hello',\n 'amr': ['pwd'],\n 'roles': ['AdminUser' if admin else 'NormalUser'],\n 'appid': '11111111-1111-1111-1111-111111111111',\n 'appidacr': '0',\n 'family_name': 'Krüger Svensson',\n 'given_name': 'Jonas',\n 'in_corp': 'true',\n 'ipaddr': '192.168.0.0',\n 'name': 'Jonas Krüger Svensson / Intility AS',\n 'oid': '22222222-2222-2222-2222-222222222222',\n 'onprem_sid': 'S-1-2-34-5678901234-5678901234-456789012-34567',\n 'rh': '0.hellomylittletokenfriendwhatsupwi-thyoutodayheheiho.',\n 'scp': 'user_impersonation',\n 'sub': '5ZGASZqgF1taj9GlxDHOpeIJjWlyZJwD3mnZBoz9XVc',\n 'tid': tenant_id,\n 'unique_name': 'jonas',\n 'upn': 'jonas@cool',\n 'uti': 'abcdefghijkl-mnopqrstu',\n 'ver': '1.0',\n }\n signing_key = signing_key_a if evil else signing_key_b\n return jwt.encode(\n claims,\n signing_key.private_bytes(\n crypto_serialization.Encoding.PEM,\n crypto_serialization.PrivateFormat.PKCS8,\n crypto_serialization.NoEncryption(),\n ),\n algorithm='RS256',\n headers={'kid': 'real thumbprint', 'x5t': 'another thumbprint'},\n )",
"def generate_jwt_token(private_pem: bytes, app_id: int) -> str:\n private_key = jwcrypto.jwk.JWK.from_pem(private_pem)\n payload = {\"iss\": app_id}\n duration = datetime.timedelta(minutes=10)\n return python_jwt.generate_jwt(payload, private_key, \"RS256\", duration)",
"def obtain_access_token(request, consumer_id, consumer_secret, code,\n redirect_uri):\n # NOTE(garcianavalon) right now this method has no use because is a wrapper for a\n # method intented to be use by the client/consumer. For the IdM is much more \n # convenient to simply forward the request, see forward_access_token_request method\n LOG.debug('Exchanging code: {0} by application: {1}'.format(code, consumer_id))\n manager = internal_keystoneclient(request).oauth2.access_tokens\n access_token = manager.create(consumer_id=consumer_id,\n consumer_secret=consumer_secret,\n authorization_code=code,\n redirect_uri=redirect_uri)\n return access_token",
"def generate_token(payload: Any, secret: str | List[str]) -> str:\n return url_encode_full_stops(URLSafeTimedSerializer(secret).dumps(payload, \"token\"))",
"def build_evil_access_token():\n return do_build_access_token(tenant_id='intility_tenant_id', evil=True)",
"def get_access_token(self, minutes: int = 1440) -> str:\n return crypt.encode_token({\n 'uuid': str(self.pk),\n 'space_id': str(self.space_id),\n }, timedelta(minutes=minutes))",
"def create_token(identity: int, type_token: str, exp_time: timedelta, fresh: Optional[bool] = False) -> bytes:\n if type_token not in ['access','refresh']:\n raise ValueError(\"Type token must be between access or refresh\")\n\n payload = {\n \"iat\": AuthJWT.get_int_from_datetime(datetime.now(timezone.utc)),\n \"nbf\": AuthJWT.get_int_from_datetime(datetime.now(timezone.utc)),\n \"jti\": AuthJWT.get_jwt_id(),\n \"exp\": AuthJWT.get_int_from_datetime(datetime.now(timezone.utc) + exp_time),\n \"identity\": identity,\n \"type\": type_token\n }\n\n # for access_token only fresh needed\n if type_token == 'access':\n payload['fresh'] = fresh\n\n return jwt.encode(payload,AuthJWT._SECRET_KEY,algorithm=AuthJWT._ALGORITHM)",
"def _generate_jwt_token(self):\n import jwt\n from datetime import datetime, timedelta\n from django.conf import settings\n\n dt = datetime.now() + timedelta(days=60)\n\n token = jwt.encode({\n 'id': self.pk,\n 'username': self.username,\n 'exp': int(dt.strftime('%s')),\n }, settings.SECRET_KEY, algorithm='HS256')\n # print(token)\n return token"
] | [
"0.6900923",
"0.6899821",
"0.6854177",
"0.684906",
"0.65835893",
"0.6576911",
"0.6561197",
"0.6522176",
"0.6501121",
"0.6464966",
"0.6422411",
"0.64120436",
"0.6355992",
"0.6337109",
"0.62936896",
"0.6243143",
"0.6217495",
"0.61761385",
"0.61607",
"0.61149985",
"0.6105348",
"0.6027264",
"0.60075474",
"0.59776103",
"0.59735465",
"0.59732205",
"0.59659076",
"0.59554404",
"0.5950479",
"0.592479"
] | 0.76991534 | 0 |
Removes the given personal access token of the logged in user. | def removeToken(self, token):
self.__require_privilaged_access()
with DBSession(self.__config_db) as session:
# Check if the given token is a personal access token so it can be
# removed.
user = self.getLoggedInUser()
num_of_removed = session.query(Session) \
.filter(Session.user_name == user) \
.filter(Session.token == token) \
.filter(Session.can_expire.is_(False)) \
.delete(synchronize_session=False)
session.commit()
if not num_of_removed:
raise codechecker_api_shared.ttypes.RequestFailed(
codechecker_api_shared.ttypes.ErrorCode.DATABASE,
"Personal access token {0} was not found in the "
"database.".format(token))
# Invalidate the local session by token.
self.__manager.invalidate_local_session(token)
LOG.info("Personal access token '%s...' has been removed by '%s'.",
token[:5], self.getLoggedInUser())
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def revoke_token(self, token, token_type_hint, request, *args, **kwargs):\n if token_type_hint:\n tok = self._tokengetter(**{token_type_hint: token})\n else:\n tok = self._tokengetter(access_token=token)\n if not tok:\n tok = self._tokengetter(refresh_token=token)\n\n if tok and tok.client_id == request.client.client_id:\n request.client_id = tok.client_id\n request.user = tok.user\n tok.delete()\n return True\n\n msg = 'Invalid token supplied.'\n log.debug(msg)\n request.error_message = msg\n return False",
"def delete_user_access_token(self, user_id, user_password, user_access_token, give_json=False):\n\n url = Constants.BASE_URL + 'domains/users/accesstokens'\n response = requests.delete(url=url,\n params={'key': self.api_key, 'user_id': user_id, 'user_password': user_password})\n if give_json:\n return response.json()\n else:\n return response.text",
"def delete(self):\n\n user_id = get_jwt_identity()\n user = user_crud.get(user_id)\n if not user:\n abort(404, message=\"User not Found\")\n all_tokens = auth_crud.get_user_tokens(user_id)\n tokens = [token.to_dict() for token in all_tokens]\n for token in tokens:\n auth_crud.revoke_token(token['id'], user_id)\n user = user_crud.remove(user_id)\n\n return {'msg': 'User Removed'}",
"def revoke(self):\n if self.access_token is None:\n raise InvalidInvocation('no token available to revoke')\n\n self._authenticator.revoke_token(self.access_token, 'access_token')\n self._clear_access_token()",
"def fusion_api_remove_user(self, name=None, uri=None, api=None, headers=None):\n return self.user.delete(name, uri, api, headers)",
"def logout(request):\n request.user.auth_token.delete()\n return Response({}, status=status.HTTP_200_OK)",
"def revoke_access_token(self):\n response = self._telegraph.method('revokeAccessToken')\n\n self._telegraph.access_token = response.get('access_token')\n\n return response",
"def remove(self, token):\n self.rpc.call(MsfRpcMethod.AuthTokenRemove, [token])",
"def delete_my_account():\n # Remove user ownerships\n for p in current_user.projects:\n p.user_id = None\n p.save()\n # Delete user posts\n [ a.delete() for a in current_user.activities ]\n # Delete user account\n current_user.delete()\n logout_user()\n flash('We are sorry to see you go. Your profile has been deleted.', 'info')\n return redirect(url_for('public.home'))",
"def delete_auth_token():\n data = get_request_data(request)\n address = data.get(\"address\")\n token = data.get(\"token\")\n\n valid, message = is_token_valid(token, address)\n if not valid:\n return jsonify(error=message), 400\n\n force_expire_token(token)\n\n return jsonify(success=\"Token has been deactivated.\")",
"def delete_user():\n del globalopts.appdata[request.user]\n del globalopts.users[request.user]\n return \"\", 200",
"def revoke_token(token):\n token.delete_instance()",
"def logout(self, request, *args, **kwargs):\n token = get_object_or_404(Token, key=request.auth)\n token.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)",
"def post(self, request):\n if 'person_id' in self.request.POST:\n user = User.objects.get(person__id=self.request.POST['person_id'])\n if AccessToken.objects.filter(user=user).exists():\n tokens = AccessToken.objects.filter(user=user)\n for token in tokens:\n token.revoke()\n logout(request)\n return Response({'status': True})\n return Response({'status': False})",
"def revoke(self, only_access=False):\n if only_access or self.refresh_token is None:\n super(Authorizer, self).revoke()\n else:\n self._authenticator.revoke_token(self.refresh_token,\n 'refresh_token')\n self._clear_access_token()\n self.refresh_token = None",
"def delete(self, new_data, user_id):\n print(new_data)\n request_id = get_jwt_identity()\n user = user_crud.get(request_id)\n if not user.is_superuser:\n abort(401,\n message=\"You do not have permission to view this endpoint\")\n all_tokens = auth_crud.get_user_tokens(user_id)\n tokens = [token.to_dict() for token in all_tokens]\n for token in tokens:\n auth_crud.revoke_token(token['id'], user_id)\n user = user_crud.remove(user_id)\n\n return {'msg': 'User Removed'}",
"def invalidate_existing_tokens(self, client_id, user):\n\n app = Application.objects.get(client_id=client_id)\n tokens = AccessToken.objects.filter(user=user, application=app)\n tokens.delete()",
"def remove(self, user):\r\n url = '{0}/{1}'.format(self.get_url(), user)\r\n\r\n return http.Request('DELETE', url), parsers.parse_empty",
"def delete_access_token_file():\n if os.path.isfile(AccessData.ACCESS_TOKEN_FILE):\n os.remove(AccessData.ACCESS_TOKEN_FILE)\n logger.info('deleted file %s' % (AccessData.ACCESS_TOKEN_FILE))",
"def delete_user():\n token = request.args.get('token')\n data = jwt.decode(token, app.config['SECRET_KEY'])\n\n permit = functions.delete_user(data)\n if permit:\n return make_response(jsonify({'Delete': 'User Deleted Successfully'}), 201)\n else:\n return make_response(jsonify({'Delete Failed': 'Credentials not match or the user not exist'}), 201)",
"def delete(self, request):\n serializer = UserLogoutSerializer(data=request.data)\n if serializer.is_valid(raise_exception=True):\n token = RefreshToken(serializer.validated_data[\"refresh\"])\n token.blacklist()\n return Response(status=status.HTTP_204_NO_CONTENT)",
"def remove_user(self):\n self.currentuser = None\n self.carlocked = False",
"def fb_deauth(self, request):\n signed_request = request.data.get('signed_request')\n if signed_request:\n parsed_signed_request = facebook_controller.parse_signed_request(signed_request)\n facebook_user_id = parsed_signed_request.get('user_id')\n if facebook_user_id:\n facebook_controller.delete_linked_facebook_account(facebook_user_id)\n return Response('OK')",
"def logout_user(session):\n del session['user']",
"def revoke_access_token(cls, jti: str) -> None:\n redis = cls._conn_redis(cls)\n expired_time = int(timedelta(minutes=cls._ACCESS_TOKEN_EXPIRES).total_seconds())\n redis.setex(jti,expired_time,'true')",
"def deauth(request):\n\n if(request.token):\n request.token.delete()\n return JsonResponse({'message': 'Your token is revoked'}) \n else:\n return HttpResponseBadRequest('It does not make sense to revoke a token ' +\n 'if no token are supplied to the request')",
"def del_user_id(user_id):\r\n obj = storage.get(User, user_id)\r\n if obj is None:\r\n abort(404)\r\n obj.delete()\r\n storage.save()\r\n return jsonify({}), 200",
"def remove(self, user_id):\n pass",
"async def revoke_token(self, request: Request, token: str) -> None:\n token_record = ...\n token_record.revoked = True\n token_record.save()",
"def delete_user(self, user):\n self.delete(user)"
] | [
"0.68508357",
"0.6681796",
"0.66742015",
"0.6516821",
"0.6512082",
"0.6453964",
"0.641179",
"0.63502675",
"0.6316928",
"0.626436",
"0.6243326",
"0.61909586",
"0.6139808",
"0.61332875",
"0.61328125",
"0.6090404",
"0.60894763",
"0.6081416",
"0.6063863",
"0.60013586",
"0.59941447",
"0.5977804",
"0.5935405",
"0.5920284",
"0.59160405",
"0.5907984",
"0.5900905",
"0.5900447",
"0.58784056",
"0.58651435"
] | 0.75845927 | 0 |
Show matplotlib plots immediately if using the inline backend. With ipywidgets 6.0, matplotlib plots don't work well with interact when using the inline backend that comes with ipykernel. Basically, the inline backend only shows the plot after the entire cell executes, which does not play well with drawing plots inside of an interact function. See | def show_inline_matplotlib_plots():
if 'matplotlib' not in sys.modules:
# matplotlib hasn't been imported, nothing to do.
return
try:
import matplotlib as mpl
from ipykernel.pylab.backend_inline import flush_figures
except ImportError:
return
if (mpl.get_backend() == 'module://ipykernel.pylab.backend_inline' or
mpl.get_backend() == 'module://matplotlib_inline.backend_inline'):
flush_figures() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def embed_matplotlib(self):",
"def _ipython_display_(self):\n with self._sc:\n self._box._ipython_display_()",
"def show():\n plt.show()",
"def show():\n plt.show()",
"def show():\n plt.show()",
"def show():\n setup()\n plt.show()",
"def update_plot():\n pass",
"def ion():\n plt.ion()",
"def plot(data, interactive=False):\n if interactive:\n plt.ion()\n fig = plt.figure()\n fig.canvas.draw()\n image = call_imshow(data)\n else:\n fig = plt.figure()\n image = call_imshow(data)\n plt.show()\n return fig, image",
"def show_plot(figure_id=None):\n if figure_id is None:\n fig = pl.gcf()\n else:\n # do this even if figure_id == 0\n fig = pl.figure(num=figure_id)\n pl.show()\n pl.pause(1e-9)\n fig.canvas.manager.window.activateWindow()\n fig.canvas.manager.window.raise_()",
"def show_plots():\n plt.show()",
"def _UpdatePlot( self ):\n self._BusyDoOp( self._UpdatePlotImpl )",
"def _ipython_display_(self):\n spec, render_type = self._get_spec_info()\n\n id = uuid.uuid4()\n publish_display_data(\n {'text/html': self._generate_html(id)},\n metadata={'jupyter-vega3': '#{0}'.format(id)}\n )\n publish_display_data(\n {'application/javascript':\n self._generate_js(id, spec, render_type)},\n metadata={'jupyter-vega3': '#{0}'.format(id)}\n )",
"def showPlot2():\n raise NotImplementedError",
"def update(self, *args):\n self.kwargs = {}\n if self.manual:\n self.manual_button.disabled = True\n try:\n show_inline_matplotlib_plots()\n with self.out:\n if self.clear_output:\n clear_output(wait=True)\n for widget in self.kwargs_widgets:\n value = widget.get_interact_value()\n self.kwargs[widget._kwarg] = value\n self.result = self.f(**self.kwargs)\n show_inline_matplotlib_plots()\n if self.auto_display and self.result is not None:\n display(self.result)\n except Exception as e:\n ip = get_ipython()\n if ip is None:\n self.log.warn(\"Exception in interact callback: %s\", e, exc_info=True)\n else:\n ip.showtraceback()\n finally:\n if self.manual:\n self.manual_button.disabled = False",
"def show(fig):\n fig.tight_layout()\n if _mpl_is_gui_backend():\n if platform.system() == \"Windows\":\n plt.show(block=True)\n else:\n fig.show()",
"def show():\n\tplt.show()",
"def showPlot1(): \n raise NotImplementedError",
"def plot_refresh():\n figure.canvas.draw()",
"def display(self):\n self.figure, self.axes = self.createFigure()\n\n self.setupLayout()\n self.quitFlag = False\n self.animation = animation.FuncAnimation(self.figure, self.animate, interval=100)\n plt.show()",
"def show_figure(self):\n pylab.show()",
"def show_plot(self):\r\n\t\tself.generate_plot()\r\n\t\tplt.show()",
"def showPlot3():\n raise NotImplementedError",
"def jupyter_inline(url):\n # note: `get_ipython` is available without import since ipython 5.1\n # (and it's fine to fail here, since the next viewer is attempted in that case)\n ipython = get_ipython()\n logger.debug('Running inside ipython: %r', ipython)\n if 'ZMQInteractiveShell' not in type(ipython).__name__:\n raise ValueError('non-gui interactive shell')\n\n # render URL/IFrame inline in jupyter notebook, or fail trying\n # note: since ipython 5.4/6.1 (May 2017) `display` is available without import\n rich_url = RichDisplayURL(url)\n display(rich_url)\n\n # don't block if gui interactive shell is used\n return False",
"def show_fig_and_wait(self):\n\n # window management\n self.fig.canvas.manager.show()\n self.fig.canvas.draw_idle()\n # starting a 'blocking' loop to let the user interact\n self.fig.canvas.start_event_loop(timeout=-1)",
"def plots():\n out = interactive_output(generate_plots, {'gsize':gridSlider, 'ra':RABox, 'ra':RASlider, 'dec':DECBox, 'dec':DECSlider, 'ang':radBox, 'ang':radSlider, 'style':hexDrop})\n return display(widgrid, out)",
"def force_draw(self):\n import matplotlib.pyplot as plt\n\n plt.show()",
"def interactive():\n IPython.start_ipython(argv=[])",
"def show(self):\n plt.show()",
"def plot_finalize():\n global figure\n global axes\n\n plot_refresh()\n plt.ioff()\n plt.show()\n\n figure, axes = None, None"
] | [
"0.67240685",
"0.671005",
"0.6426132",
"0.6426132",
"0.6426132",
"0.64154273",
"0.63586575",
"0.63485914",
"0.63166267",
"0.62941664",
"0.62881005",
"0.6260716",
"0.6200048",
"0.61584425",
"0.6113244",
"0.6111419",
"0.6110425",
"0.6098969",
"0.6076505",
"0.60664433",
"0.60629284",
"0.60009295",
"0.5975697",
"0.5935095",
"0.5912684",
"0.5908649",
"0.58439714",
"0.58226866",
"0.57873297",
"0.578562"
] | 0.72732526 | 0 |
Match a pattern of types in a sequence. | def _matches(o, pattern):
if not len(o) == len(pattern):
return False
comps = zip(o,pattern)
return all(isinstance(obj,kind) for obj,kind in comps) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _match(self, *token_types):\n for token in token_types:\n if self._check(token):\n self._advance()\n return True\n\n return False",
"def test_build_sequence_multiple_values(self):\n # Test basic sequence rule\n r = Rule(schema={'type': 'seq', 'sequence': [{'type': 'str'}, {'type': 'int'}]})\n assert r.type == \"seq\"\n assert r.matching == \"any\"\n assert len(r.sequence) == 2\n assert isinstance(r.sequence, list)\n assert all(isinstance(r.sequence[i], Rule) for i in range(len(r.sequence)))\n assert r.sequence[0].type == \"str\"\n assert r.sequence[1].type == \"int\"\n\n # Test sequence without explicit type\n r = Rule(schema={'sequence': [{'type': 'str'}, {'type': 'int'}]})\n assert r.type == \"seq\"\n assert r.matching == \"any\"\n assert len(r.sequence) == 2\n assert isinstance(r.sequence, list)\n assert all(isinstance(r.sequence[i], Rule) for i in range(len(r.sequence)))\n assert r.sequence[0].type == \"str\"\n assert r.sequence[1].type == \"int\"\n\n # Test adding matchin rules",
"def is_seq_of(seq, expected_type, seq_type=None):\n if seq_type is None:\n exp_seq_type = abc.Sequence\n else:\n assert isinstance(seq_type, type)\n exp_seq_type = seq_type\n if not isinstance(seq, exp_seq_type):\n return False\n for item in seq:\n if not isinstance(item, expected_type):\n return False\n return True",
"def type_match(cls, line):\n match = Ftype.itype_re.match(line.strip())\n return match",
"def test_match_types(self):\n f = lws.match_types\n # assert f(str, u'test') is True\n assert f(str, 'test') is True\n assert f(int, 123) is True\n assert f(int, 123.00) is False\n assert f(bool, [1, 2, 3]) is False",
"def target_pattern(lst_tag_types):\n return ''.join([r'\\s{1}\\@(',\n '|'.join(lst_tag_types),\n r')\\(([^\\)]+)\\)'])",
"def _match_array(tipo, array):\n\n return bool(re.match(array, tipo))",
"def contains_tokens(pattern):\n return type(pattern) is list and len(pattern) > 0",
"def gen_matches(self, subseq, startpos):\n \n raise TypeError, \"PatternBase is an abstract base class\"",
"def match(self, seq):\n myseq = seq\n if not type(seq) is Sequence:\n myseq = Sequence(seq, self.alpha)\n mystr = myseq.getString()\n if not Motif.isAlphabet(self, mystr):\n raise RuntimeError(\"Motif alphabet is not valid for sequence \" + myseq.getName())\n for m in re.finditer(self.pattern, mystr):\n yield (m.start(), m.group(), 1.0)",
"def test_multiple_types() -> None:\n soup = generate_case(\"multiple_types\")\n\n tests.html_schema_doc_asserts.assert_types(\n soup, [\"object\", \"string\", \"string or null\", \"integer or number\", \"integer, string, number or null\"]\n )",
"def scan_seq(seq, pattern):\n\n # Look for matches in the sequence\n matches = [str(match.group(1)) for match in re.finditer(pattern, seq)]\n\n # Look for matches in the reverse complementary of the sequence\n revcomp_seq = reverse_complementary(seq)\n matches += [str(match.group(1)) for match in re.finditer(pattern, revcomp_seq)]\n\n return matches",
"def type_match(cls, line):\n match = Ftype_type_decl.__type_decl_re__.match(line.strip())\n # End if\n return match",
"def onNameType(self, match):\n\t\treturn [self.process(match[0]), self.process(match[1])]",
"def _match_entry_type_string(code_entry, string_list):\n entry_type = re.match(r\"<(AST.*):.*\", code_entry.get('type')).group(1)\n return bool(entry_type in string_list)",
"def _match_type_against_type(self, t1, t2, subst):\n t1 = self.maybe_lookup_type_param(t1, subst)\n t2 = self.maybe_lookup_type_param(t2, subst)\n # TODO(b/159058933): Use utils:TypeMatcher to simplify this?\n if isinstance(t2, pytd.AnythingType):\n # We can match anything against AnythingType. (It's like top)\n return booleq.TRUE\n elif isinstance(t1, pytd.AnythingType):\n if self.any_also_is_bottom:\n # We can match AnythingType against everything. (It's like bottom)\n return booleq.TRUE\n else:\n return booleq.FALSE\n elif isinstance(t1, pytd.NothingType):\n # nothing as an actual type matches against everything, since it\n # represents an empty value.\n return booleq.TRUE\n elif isinstance(t2, pytd.NothingType):\n # We can't match anything against nothing as an expected type (except\n # nothing itself, above).\n return booleq.FALSE\n elif isinstance(t1, pytd.UnionType):\n return booleq.And(self.match_type_against_type(u, t2, subst)\n for u in t1.type_list)\n elif isinstance(t2, pytd.UnionType):\n return booleq.Or(self.match_type_against_type(t1, u, subst)\n for u in t2.type_list)\n elif (isinstance(t1, pytd.ClassType) and isinstance(t2, StrictType) or\n isinstance(t1, StrictType) and isinstance(t2, pytd.ClassType)):\n # For strict types, avoid subclasses of the left side.\n return booleq.Eq(self._full_name(t1), self._full_name(t2))\n elif isinstance(t1, pytd.ClassType) and t2.name == \"builtins.object\":\n return booleq.TRUE\n elif (t1.name in (\"builtins.type\", \"typing.Callable\") and\n t2.name in (\"builtins.type\", \"typing.Callable\")):\n return booleq.TRUE\n elif isinstance(t1, pytd.ClassType):\n # ClassTypes are similar to Unions, except they're disjunctions: We can\n # match the type or any of its base classes against the formal parameter.\n return booleq.Or(self.match_type_against_type(t, t2, subst)\n for t in self.expand_superclasses(t1))\n elif isinstance(t2, pytd.ClassType):\n # ClassTypes on the right are exactly like Unions: We can match against\n # this type or any of its subclasses.\n return booleq.Or(self.match_type_against_type(t1, t, subst)\n for t in self.expand_subclasses(t2))\n assert not isinstance(t1, pytd.ClassType)\n assert not isinstance(t2, pytd.ClassType)\n if is_unknown(t1) and isinstance(t2, pytd.GenericType):\n return self.match_Unknown_against_Generic(t1, t2, subst)\n elif isinstance(t1, pytd.GenericType) and is_unknown(t2):\n return self.match_Generic_against_Unknown(t1, t2, subst)\n elif isinstance(t1, pytd.GenericType) and isinstance(t2, pytd.GenericType):\n return self.match_Generic_against_Generic(t1, t2, subst)\n elif isinstance(t1, pytd.GenericType):\n # E.g. list[...] matches against list, or even object.\n return self.match_type_against_type(t1.base_type, t2, subst)\n elif isinstance(t2, pytd.GenericType):\n if self.any_also_is_bottom:\n # E.g. list (a.k.a. list[Any]) matches against list[str]\n return self.match_type_against_type(t1, t2.base_type, subst)\n else:\n return booleq.FALSE\n elif is_unknown(t1) and is_unknown(t2):\n return booleq.Eq(t1.name, t2.name)\n elif (isinstance(t1, (pytd.NamedType, StrictType)) and\n isinstance(t2, (pytd.NamedType, StrictType))):\n if is_complete(t1) and is_complete(t2) and t1.name != t2.name:\n # Optimization: If we know these two can never be equal, just return\n # false right away.\n return booleq.FALSE\n else:\n return booleq.Eq(t1.name, t2.name)\n elif isinstance(t1, pytd.NamedType) and isinstance(t2, pytd.Literal):\n return booleq.FALSE\n elif isinstance(t1, pytd.LateType) or isinstance(t2, pytd.LateType):\n # Unresolved types never match against anything.\n return booleq.FALSE\n elif isinstance(t1, pytd.Literal) and isinstance(t2, pytd.Literal):\n return booleq.TRUE if t1.value == t2.value else booleq.FALSE\n else:\n raise AssertionError(f\"Don't know how to match {type(t1)} against \"\n f\"{type(t2)}\")",
"def parse_pattern(pattern):\n return map(lambda x: True if x == '1' else False, pattern)",
"def IsValidInputType(self, list_of_matches):\n for entry in list_of_matches:\n if not entry:\n return False\n\n return True",
"def typeToRegularExpressions(matchingRegex, subPattern, type):\n contextsList = []\n\n if type == 1:\n contextsList = RegexType.TYPE1\n elif type == 2:\n contextsList = RegexType.TYPE2\n elif type == 3:\n contextsList = RegexType.TYPE3\n elif type == 4:\n contextsList = RegexType.TYPE4\n elif type == 5:\n contextsList = RegexType.TYPE5\n elif type == 6:\n contextsList = RegexType.TYPE6\n else:\n raise Exception(\"Unknown type %d\" % type)\n\n typeRegexList = []\n\n # Before parsing text, spaces are converted to double\n # spaces. We need to match that in the regular\n # expressions\n matchingRegex = \\\n RegularExpressionFormula.normalizeSpaces(matchingRegex, True)\n nbGroups = RegexType.getNumGroups(matchingRegex)\n\n # Context is a tuple with the before and after\n # context. A type can have multiple context\n for context in contextsList:\n strMatch = matchingRegex\n # Left context\n if context[0] is not None:\n strMatch = context[0] + strMatch\n subPattern = \"\\g<1>\" + subPattern\n nbGroups += 1\n # Right context\n if context[1] is not None:\n strMatch = strMatch + context[1]\n subPattern = subPattern + \"\\g<%d>\" % (nbGroups + 1)\n\n typeRegexList.append((strMatch, subPattern))\n\n return typeRegexList",
"def one_of(*types):\n\n def one_of_types(string):\n exceptions = []\n for type_constructor in types:\n try:\n return type_constructor(string)\n except (ArgumentTypeError, TypeError, ValueError) as e:\n exceptions.append(f'{type_constructor.__name__}: {e}')\n\n names = ', '.join(t.__name__ for t in types)\n exceptions = ''.join('\\n\\t' + e for e in exceptions)\n\n raise ArgumentTypeError(\n f'Argument {string} does not match any of allowed types: {names}.\\n' +\n f'Following exceptions has been raised: {exceptions}'\n )\n\n return one_of_types",
"def type_matches(val: Any, expected_type: Union[List, TypeVar, None]) -> bool:\n if isinstance(expected_type, list):\n # A list of allowed values is given, not an actual type\n return val in expected_type\n elif expected_type == Any:\n return True\n elif expected_type is None:\n return val is None\n elif hasattr(expected_type, \"__origin__\"):\n # Something from the typing module\n if expected_type.__origin__ == Union:\n for union_member in expected_type.__args__:\n if type_matches(val, union_member):\n return True\n elif is_callable_type(expected_type):\n return callable(val)\n elif expected_type.__origin__ == dict:\n if not isinstance(val, dict):\n return False\n for key in val.keys():\n if not type_matches(key, expected_type.__args__[0]):\n return False\n for value in val.values():\n if not type_matches(value, expected_type.__args__[1]):\n return False\n return True\n elif expected_type.__origin__ == list:\n if not isinstance(val, list):\n return False\n for el in val:\n if not type_matches(el, expected_type.__args__[0]):\n return False\n return True\n elif isinstance(expected_type, TypeVar):\n # too complex to check if TypeVars (List[TypeVar]) are alright... Treat like Any\n return True\n elif isinstance(val, expected_type):\n return True\n return False",
"def test_handle_wildcard(self):\n sequence1 = 'ATCG'\n sequence2 = 'ATNG'\n sequence3 = 'NNCN'\n self.assertEqual(handle_wildcard(sequence1), ['ATCG'])\n self.assertEqual(handle_wildcard(sequence2), [\"%AT_G%\"])\n self.assertEqual(handle_wildcard(sequence3), [\"%__C_%\"])",
"def findType(*args, deep: bool=True, exact: bool=True, forward: bool=True, type: AnyStr=\"\",\n **kwargs)->List[AnyStr]:\n pass",
"def findMatches(sequence, patterns):\n#\n#\n# idGenerator = IdGenerator()\n# root = Edge('', None, idGenerator)\n# i = 0\n# sequence = sequence + '$'\n# print len(sequence)\n# for i in range(len(sequence)):\n# seq = sequence[i:]\n# edge = root\n# while len(seq) > 0:\n# edge = edge.addSequence(seq, i)\n# seq = seq[1:]\n# print i\n # root = buildTrie(generateSequences(sequence))\n matches = [[m.start() for m in re.finditer('(?=' + pattern + ')', sequence)] for pattern in patterns]\n return matches",
"def test(types, _):\n return 'Date' in types and 'Postal Code' in types",
"def any_of(*args:List[str]) -> str:\n return group(\"|\".join(args))",
"def exception_matches(e: Exception, patterns: Tuple[Exception, ...]) -> bool:\n e_type = type(e)\n e_msg = str(e)\n for pattern in patterns:\n if issubclass(e_type, type(pattern)):\n if re.match(str(pattern), e_msg):\n return True\n return False",
"def test_pattern_matching(self):\n\n for hashtype in [HashTypes.SHA1, HashTypes.SHA2, HashTypes.SHA3, ]:\n self.do_test_pattern_matching(hashtype)",
"def match_list(column, patterns):\n for pattern in patterns:\n if pattern.match(column):\n return True\n return False",
"def has_item(match):\n return IsSequenceContaining(wrap_matcher(match))"
] | [
"0.6306849",
"0.6063895",
"0.6062589",
"0.60322946",
"0.5910432",
"0.5845407",
"0.5746501",
"0.5730287",
"0.5725967",
"0.57160014",
"0.5696791",
"0.5614536",
"0.5550764",
"0.5529471",
"0.5435691",
"0.54298306",
"0.53875875",
"0.53801197",
"0.5365809",
"0.53359324",
"0.5335244",
"0.53267485",
"0.53265554",
"0.5324716",
"0.5287905",
"0.52742976",
"0.5262925",
"0.5262267",
"0.5242449",
"0.52421266"
] | 0.65527374 | 0 |
Call the interact function and update the output widget with the result of the function call. | def update(self, *args):
self.kwargs = {}
if self.manual:
self.manual_button.disabled = True
try:
show_inline_matplotlib_plots()
with self.out:
if self.clear_output:
clear_output(wait=True)
for widget in self.kwargs_widgets:
value = widget.get_interact_value()
self.kwargs[widget._kwarg] = value
self.result = self.f(**self.kwargs)
show_inline_matplotlib_plots()
if self.auto_display and self.result is not None:
display(self.result)
except Exception as e:
ip = get_ipython()
if ip is None:
self.log.warn("Exception in interact callback: %s", e, exc_info=True)
else:
ip.showtraceback()
finally:
if self.manual:
self.manual_button.disabled = False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def interact(self):\r\n pass",
"def interact(self):\n # We use a library_interact instead of a normal interact here,\n # since this is an interact in the library, and a normal\n # \"@interact\" is all mangled.\n\n from sage.interacts.library import library_interact\n from sagenb.notebook.interact import slider, input_box, selector\n\n # self._last holds the last state of all controls. This allows\n # us to deduce which control changed to cause the update, or that\n # nothing changed, in which case we assume the user requested to\n # re-evaluate the input box (for some reason -- currently there is\n # no point in doing so). It is a shortcoming of @interact that\n # we have to do this.\n self._last = None\n\n # two sliders and a box to put in commands with an evaluate button.\n @library_interact\n def dbg(frame = slider(vmin=0, vmax=len(self._stack)-1, step_size=1, default=len(self._stack)-1, label='stack frame'),\n lines = slider(vmin=3, vmax=99, step_size=2, default=11, label='lines of context'),\n command = input_box(\"\", label=\"\", type=str),\n button = selector(['Evaluate'], label='', buttons=True)\n ):\n\n if self._last is None:\n self._last = {'command':command, 'button':button, 'lines':lines, 'frame':frame}\n\n if self._last['lines'] != lines:\n # they dragged the number-of-lines slider, so done\n pass\n elif self._last['command'] != command and command.strip():\n # they changed the command, so evaluate that\n self.evaluate(command)\n elif self._last['frame'] != frame:\n # they dragged the frame slider.\n self._curframe_index = frame\n elif command:\n # must have hit the evaluate button\n self.evaluate(command)\n\n print('<html><hr>{}</html>'.format(self.listing(lines//2)))\n # save control state for next time around\n self._last = {'command':command, 'button':button, 'lines':lines, 'frame':frame}\n\n dbg()",
"def interact(self, antagonist):\n pass",
"def interact(self):\n # Setup the initial value options for the location\n dim = self.default_dimension\n dim2 = set(self.data.dims).difference({dim}).pop()\n options = self.data[dim2].values.tolist()[::self.profile_interval]\n mid = options[len(options)//2]\n\n # Make the slider for choosing the location\n slider_label = widgets.Label(\"at {} value\".format(dim2))\n slider = widgets.SelectionSlider(options=options, value=mid,\n layout=widgets.Layout(width=\"350px\"))\n # Make a menu for choosing the profile direction\n dimension_chooser = widgets.Dropdown(\n options=self.data.dims.keys(), value=dim,\n description=\"Profile along\")\n\n def displayer(location, dimension):\n \"Update and display the plot with given arguments\"\n self.plot(location, dimension)\n display(self.fig)\n\n def handle_dimension_change(change):\n \"Change the location options when dimension changes\"\n dim2 = set(self.data.dims).difference({change.new}).pop()\n slider_label.value = \"at {} value\".format(dim2)\n options = self.data[dim2].values.tolist()[::self.profile_interval]\n slider.options = options\n slider.value = options[len(options)//2]\n\n # Connect the dimension change to the slider\n dimension_chooser.observe(handle_dimension_change, names='value')\n\n # Make the output display and connect it to the callback\n output = widgets.interactive_output(\n displayer, {'location': slider, 'dimension': dimension_chooser})\n\n # Make a title for the widget\n title = widgets.HTML(\n '<strong style=\"font-size: 1.5em;\">Profile selector</strong>')\n\n # Layout the widgets\n layout = widgets.VBox(\n [title,\n widgets.HBox([dimension_chooser, slider_label, slider]),\n output],\n layout=widgets.Layout(align_items=\"center\"))\n\n # For some reason, calling _figure_setup inserts a plot in the output\n # Call clear_output to get rid of it.\n with output:\n clear_output(wait=True)\n display(self.fig)\n\n return layout",
"def interact(self):\n x, mu = self.update_position_direction(self.l_int)\n mu_mean = self.calculate_mean_mu(self.x, x, self.l_int)\n self.update_estimators(self.l_int, mu_mean)\n\n self.is_absorbed = True\n self.is_active = False",
"def execute_ui(function):\n raise NotImplementedError(\"execute_ui() has not been implemented\")",
"def do_interact(self, arg):\n ns = self.curframe.f_globals.copy()\n ns.update(self.curframe_locals)\n code.interact(\"*interactive*\", local=ns)",
"def exec_script_and_interact(self):\r\n self.exec_script(set_focus=True)",
"def interact(self):\n param_min = self.param_vals[0]\n param_max = self.param_vals[-1]\n param_step = self.param_vals[1] - self.param_vals[0]\n\n qbt_indices = [index for (index, subsystem) in self.sweep.hilbertspace.qbt_subsys_list]\n osc_indices = [index for (index, subsystem) in self.sweep.hilbertspace.osc_subsys_list]\n\n param_slider = ipywidgets.FloatSlider(min=param_min, max=param_max, step=param_step,\n description=self.param_name, continuous_update=False)\n photon_slider = ipywidgets.IntSlider(value=1, min=1, max=4, description='photon number')\n initial_slider = ipywidgets.IntSlider(value=0, min=0, max=self.evals_count, description='initial state index')\n final_slider = ipywidgets.IntSlider(value=1, min=1, max=self.evals_count, description='final state index')\n\n qbt_dropdown = ipywidgets.Dropdown(options=qbt_indices, description='qubit subsys')\n osc_dropdown = ipywidgets.Dropdown(options=osc_indices, description='oscillator subsys')\n\n def update_min_final_index(*args):\n final_slider.min = initial_slider.value + 1\n\n initial_slider.observe(update_min_final_index, 'value')\n\n out = ipywidgets.interactive_output(self.plot_explorer_panels,\n {'param_val': param_slider,\n 'photonnumber': photon_slider,\n 'initial_index': initial_slider,\n 'final_index': final_slider,\n 'qbt_index': qbt_dropdown,\n 'osc_index': osc_dropdown\n })\n\n left_box = ipywidgets.VBox([param_slider])\n mid_box = ipywidgets.VBox([initial_slider, final_slider, photon_slider])\n right_box = ipywidgets.VBox([qbt_dropdown, osc_dropdown])\n\n user_interface = ipywidgets.HBox([left_box, mid_box, right_box])\n display(user_interface, out)",
"def __call__(self, __interact_f=None, **kwargs):\n # If kwargs are given, replace self by a new\n # _InteractFactory with the updated kwargs\n if kwargs:\n kw = dict(self.kwargs)\n kw.update(kwargs)\n self = type(self)(self.cls, self.opts, kw)\n\n f = __interact_f\n if f is None:\n # This branch handles the case 3\n # @interact(a=30, b=40)\n # def f(*args, **kwargs):\n # ...\n #\n # Simply return the new factory\n return self\n\n # positional arg support in: https://gist.github.com/8851331\n # Handle the cases 1 and 2\n # 1. interact(f, **kwargs)\n # 2. @interact\n # def f(*args, **kwargs):\n # ...\n w = self.widget(f)\n try:\n f.widget = w\n except AttributeError:\n # some things (instancemethods) can't have attributes attached,\n # so wrap in a lambda\n f = lambda *args, **kwargs: __interact_f(*args, **kwargs)\n f.widget = w\n show_inline_matplotlib_plots()\n display(w)\n return f",
"def start_interaction(self):\n self.__interact()",
"def main():\n user_interaction()",
"def interact(banner=None, readfunc=None, context=None):\n\n console = qb_console(context)\n# if readfunc is not None:\n# console.raw_input = readfunc\n console.interact(banner)",
"def interactive_output(f, controls):\n\n out = Output()\n def observer(change):\n kwargs = {k:v.value for k,v in controls.items()}\n show_inline_matplotlib_plots()\n with out:\n clear_output(wait=True)\n f(**kwargs)\n show_inline_matplotlib_plots()\n for k,w in controls.items():\n w.observe(observer, 'value')\n show_inline_matplotlib_plots()\n observer(None)\n return out",
"def trigger_output(self):\n\n EmptyPromise(self.q(css='div#ready').is_present, \"Click ready\").fulfill()\n self.q(css='div#fixture button').first.click()\n EmptyPromise(self.q(css='div#output').is_present, \"Output available\").fulfill()",
"def update(self):\n self._varText.setText(self._function())",
"def user_output(angles, pulses):\n sg.theme('DarkBrown1') \n layout = [ [sg.Text('Target to Go', size=(20, 2), justification='center')],\n [sg.Text(size=(10, 2), font=('Helvetica', 12), justification='center', key='-OUTPUT-')],\n # [sg.T(' ' * 2), sg.Quit()]]\n [sg.T(' ' * 8), sg.Button('Go !!', focus=True)]] \n window = sg.Window('Output Window', layout) \n \n while True: # Event Loop\n event, values = window.read(timeout=10) # Please try and use as high of a timeout value as you can\n window['-OUTPUT-'].update(f'Angle:{angles} , Pulse:{pulses}')\n if event == 'Go !!' or event == sg.WIN_CLOSED: # if user closed the window using X or clicked Quit button\n break\n window.close()",
"def get_output(self):\n\n num, in_text = self.get_input()\n\n if not in_text:\n self.current_prompt.freeze(in_text=\"\", show_output=False)\n # Makes the prompt un-editable.\n self.current_prompt = self.iogrid.add_prompt(number=-1)\n else:\n output = self.execute_query(num, in_text)\n self.current_prompt.freeze(in_text=in_text, output=output)\n self.current_prompt = self.iogrid.add_prompt()",
"def _interact_with_user(code: str, increase: bool):\n pass",
"def update(self, *_):\n if not self.input_main.edit_modified():\n return\n\n analyze_text = self.create_analysis()\n self.output_main[\"state\"] = tk.NORMAL\n self.output_main.delete(\"1.0\", tk.END)\n self.output_main.insert(\"1.0\", analyze_text)\n self.output_main[\"state\"] = tk.DISABLED\n self.input_main.edit_modified(False)",
"def trigger_output(self):\n self.q(css='div#fixture button').first.click()",
"def trigger_output(self):\n self.q(css='div#fixture button').first.click()",
"def show_result():\n print(\"I win!!\")",
"def interact(self):\n print('Ready to interact on socket connected with {}.'.format(self.remote_addr))\n try:\n # get initial input from user\n print('Enter input or press CTRL-D for no input.')\n data = sys.stdin.readline()\n self.remote_socket.sendall(data.encode())\n while True:\n if data.startswith('exit'):\n print('[*] Closing remote shell.')\n self.close()\n break\n # wait for response from target host\n recv_len = 1\n response = ''\n while recv_len:\n data = self.remote_socket.recv(4096)\n recv_len = len(data)\n response += data.decode()\n if recv_len < 4096:\n break\n print(response)\n # get further input from user\n print('Enter further input or press CTRL-D for no input.')\n data = sys.stdin.readline()\n self.remote_socket.sendall(data.encode())\n except Exception as e:\n print(e)\n print('[*] Closing remote shell.')\n self.close()",
"def execute():\n # print('Wow')\n result = gui.controller.main('execute')\n print(result)\n\n return render_template('results.html', data=json.dumps(result))",
"def interact(parts, method):\n details = ACTIONS[method]\n command = details['command'] % (details[parts['action']], parts['attacker'])\n status, output = commands.getstatusoutput(command)\n activity[parts['attacker']] = (parts['action'], status,)\n \n return command, status, output",
"def display_stdout_and_err_in_curr_cell(self):\n ipy_display(self.output_widget)",
"def __call__(self):\n self.show()",
"def output(self):\r\n self.logic ( )\r\n return self.output",
"def _display_command(self):\n idx = self.current_idx # Local copy to avoid race condition updates\n output = self.outputs[idx]\n if output is None:\n self.screen.addstr('Waiting for command to run...')\n return\n\n # Set row limits\n top_line = self.top_lines[idx]\n top_line = 0 if len(output) < self.max_line else min(max(top_line, 0), len(output)-self.max_line)\n bottom_line = min(top_line+self.max_line, len(output)) # Last page may not be full\n self.top_lines[idx] = top_line\n\n # Crop output to fit screen height & width\n output = [line[:self.max_col-1] for line in output[top_line:bottom_line]]\n self.screen.addstr(b'\\n'.join(output))"
] | [
"0.6873947",
"0.6764477",
"0.6496218",
"0.59996116",
"0.584276",
"0.5782574",
"0.57297826",
"0.5726981",
"0.57025045",
"0.5590474",
"0.55040187",
"0.54436517",
"0.5438048",
"0.54256636",
"0.53991383",
"0.5363977",
"0.52975446",
"0.5290476",
"0.52520204",
"0.52434456",
"0.51663697",
"0.51663697",
"0.5163653",
"0.5157268",
"0.51311564",
"0.51186323",
"0.5092123",
"0.50257874",
"0.49920595",
"0.49872097"
] | 0.72222763 | 0 |
Find the abbreviations for the given function and kwargs. Return (name, abbrev, default) tuples. | def find_abbreviations(self, kwargs):
new_kwargs = []
try:
sig = self.signature()
except (ValueError, TypeError):
# can't inspect, no info from function; only use kwargs
return [ (key, value, value) for key, value in kwargs.items() ]
for param in sig.parameters.values():
for name, value, default in _yield_abbreviations_for_parameter(param, kwargs):
if value is empty:
raise ValueError('cannot find widget or abbreviation for argument: {!r}'.format(name))
new_kwargs.append((name, value, default))
return new_kwargs | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _parse_abbreviation(self, cell_content):\n span = cell_content.find(\"span\")\n full = span.attrs[\"title\"].strip()\n abbrv = span.text.strip()\n return abbrv, full",
"def method_abbreviator(arg):\n regexp = re.compile(arg)\n matches = []\n for method in methods:\n if regexp.match(method.name):\n matches.append(method.name)\n\n return matches[0] if len(matches) == 1 else arg",
"def test_abbreviate_all():\n statement = \"ENDPROC\"\n assert abbreviate(build_match_tree(ABBREVIATIONS), statement) == \"ENDP.\"\n statement = \"POSITION\"\n assert abbreviate(build_match_tree(ABBREVIATIONS), statement) == \"POS.\"",
"def abbrev_help(self):\n pass",
"def abbreviate_binomials(binomials: list[dict], single_expanded_name=True):\n abbrevs = defaultdict(set)\n\n for term in binomials:\n pattern = term[\"pattern\"]\n abbrev = abbreviate(pattern)\n abbrevs[abbrev].add(pattern.split()[0])\n\n if single_expanded_name:\n abbrevs = {k: v.pop().title() for k, v in abbrevs.items() if len(v) == 1}\n\n return abbrevs",
"def parse_abbreviation(text):\n rv = {}\n match = ABBREVIATION.findall(text)\n for m in match:\n line = m[0]\n key = m[1]\n value = m[2]\n # print key + \", \" + value\n text = re.sub(re.escape(line), '', text)\n text = re.sub('(?<=\\s|:|\\()' + re.escape(key) + '(?=\\s|.|\\))',\n '<abbr title=\"' + value + '\" >' + key + '</abbr>', text)\n return text",
"def getStateAbbreviations():\n state_abbrev = {\n \"01\": \"AL\",\n \"02\": \"AK\",\n \"04\": \"AZ\",\n \"05\": \"AR\",\n \"06\": \"CA\",\n \"08\": \"CO\",\n \"09\": \"CT\",\n \"10\": \"DE\",\n \"11\": \"DC\",\n \"12\": \"FL\",\n \"13\": \"GA\",\n \"15\": \"HI\",\n \"16\": \"ID\",\n \"17\": \"IL\",\n \"18\": \"IN\",\n \"19\": \"IA\",\n \"20\": \"KS\",\n \"21\": \"KY\",\n \"22\": \"LA\",\n \"23\": \"ME\",\n \"24\": \"MD\",\n \"25\": \"MA\",\n \"26\": \"MI\",\n \"27\": \"MN\",\n \"28\": \"MS\",\n \"29\": \"MO\",\n \"30\": \"MT\",\n \"31\": \"NE\",\n \"32\": \"NV\",\n \"33\": \"NH\",\n \"34\": \"NJ\",\n \"35\": \"NM\",\n \"36\": \"NY\",\n \"37\": \"NC\",\n \"38\": \"ND\",\n \"39\": \"OH\",\n \"40\": \"OK\",\n \"41\": \"OR\",\n \"42\": \"PA\",\n \"44\": \"RI\",\n \"45\": \"SC\",\n \"46\": \"SD\",\n \"47\": \"TN\",\n \"48\": \"TX\",\n \"49\": \"UT\",\n \"50\": \"VT\",\n \"51\": \"VA\",\n \"53\": \"WA\",\n \"54\": \"WV\",\n \"55\": \"WI\",\n \"56\": \"WY\",\n \"72\": \"PR\"\n }\n return state_abbrev",
"def test_abbreviate_partial():\n statement = \"ENDPROC A\"\n assert abbreviate(build_match_tree(ABBREVIATIONS), statement) == \"ENDP. A\"\n statement = \"POSITION10,5\"\n assert abbreviate(build_match_tree(ABBREVIATIONS), statement) == \"POS.10,5\"",
"def deabbreviate(self, st):\n\t\tabbrs = {'gws': 'greater western sydney giants',\n\t\t\t\t 'gwsg': 'greater western sydney giants',\n\t\t\t\t 'afl': 'australian football league',\n\t\t\t\t 'nrc': 'national rugby championship',\n\t\t\t\t 'nrl': 'national rugby league',\n\t\t\t\t 'syd': 'sydney',\n\t\t\t\t 'mel': 'melbourne',\n\t\t\t\t 'melb': 'melbourne',\n\t\t\t\t 'bris': 'brisbane',\n\t\t\t\t 'brisb': 'brisbane',\n\t\t\t\t 'gc': 'gold coast',\n\t\t\t\t 'adel': 'adelaide',\n\t\t\t\t 'canb': 'canberra',\n\t\t\t\t 'mt': 'mount',\n\t\t\t\t 'utd': 'united',\n\t\t\t\t 'cty': 'city',\n\t\t\t\t 'football club': 'fc',\n\t\t\t\t 'snr': 'senior',\n\t\t\t\t 'jr': 'junion',\n\t\t\t\t 'nsw': 'new south wales' ,\n\t\t\t\t 'vic': 'victoria',\n\t\t\t\t 'tas' : 'tasmania',\n\t\t\t\t 'sa': 'south australia',\n\t\t\t\t 'wa': 'western australia',\n\t\t\t\t 'act': 'australian capital territory',\n\t\t\t\t 'nt': 'northern territory',\n\t\t\t\t 'qld': 'queensland',\n\t\t\t\t 'champs': 'championships', \n\t\t\t\t 'champ': 'championship', \n\t\t\t\t 'soc': 'society',\n\t\t\t\t 'ent': 'entertainment',\n\t\t\t\t 'intl': 'international', \n\t\t\t\t 'int': 'international', \n\t\t\t\t 'aust': 'australian'}\n\n\t\t# first replace full state names by abbreviations;\n\t\tfor ab in abbrs:\n\t\t\tst = re.sub(r'\\b' + ab + r'\\b', abbrs[ab], st)\n\n\t\treturn st",
"def find_abbr(lsbody,abbrs):\n for abbr in abbrs:\n if lsbody.startswith(abbr.abbr):\n return abbr\n return None",
"def get_abbreviation(res_type, abbr):\n\treturn get_settings_resource(res_type, abbr, 'abbreviations')",
"def _yield_abbreviations_for_parameter(param, kwargs):\n name = param.name\n kind = param.kind\n ann = param.annotation\n default = param.default\n not_found = (name, empty, empty)\n if kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY):\n if name in kwargs:\n value = kwargs.pop(name)\n elif ann is not empty:\n warn(\"Using function annotations to implicitly specify interactive controls is deprecated. Use an explicit keyword argument for the parameter instead.\", DeprecationWarning)\n value = ann\n elif default is not empty:\n value = default\n else:\n yield not_found\n yield (name, value, default)\n elif kind == Parameter.VAR_KEYWORD:\n # In this case name=kwargs and we yield the items in kwargs with their keys.\n for k, v in kwargs.copy().items():\n kwargs.pop(k)\n yield k, v, empty",
"def find_abbr(lsbody,abbrs):\n for abbr in abbrs:\n if lsbody.startswith(abbr.abbr):\n return abbr\n #print('find_abbr error. lsbody=',lsbody)\n return None",
"def extract_keywords(func):\n if hasattr(func, 'im_func'):\n func = func.im_func\n\n try:\n return func.func_code.co_varnames[-len(func.func_defaults):]\n except (TypeError, ValueError, IndexError):\n return tuple()",
"def test_abbreviate_miss():\n statement = \"PEEK(1234)\"\n assert abbreviate(build_match_tree(ABBREVIATIONS), statement) == \"PEEK(1234)\"\n statement = \"QUIT\"\n assert abbreviate(build_match_tree(ABBREVIATIONS), statement) == \"QUIT\"\n statement = \"ENDPRO\"\n assert abbreviate(build_match_tree(ABBREVIATIONS), statement) == \"ENDPRO\"\n statement = \"POSITIOM\"\n assert abbreviate(build_match_tree(ABBREVIATIONS), statement) == \"POSITIOM\"",
"def combine_state_names_and_abbreviations():\n return sorted(us_state_abbrev.values())[:10] + sorted(states)[-10:]",
"def abbreviate(line):\n spl = line.split()\n try: \n machloc = spl.index('-m')\n except ValueError: \n spl2 = spl\n else: spl2 = spl[:machloc] + spl[machloc+2:]\n if 'experiments.py' in spl2[0]:\n spl3 = ['experiments.py']+spl2[1:]\n else: spl3 = spl2\n spl4 = []\n i = 1\n while i < len(spl3):\n e = spl3[i]\n if e.startswith('--'): \n e2 = e\n elif e == '-f': \n e2 = None\n elif e[:1] == '-' and e[1:2] in 'agcs' and len(e)>2: \n e2 = e[:2] +' '+e[2:]\n elif e.startswith('-'): \n e2 = ' '.join('-'+ch for ch in e[1:])\n else: \n e2 = e\n if e2 is not None: \n spl4.append(e2)\n i += 1\n return spl3[0]+' ' +' '.join(spl4)",
"def stationabbreviation(station):\n stations = {'Utrecht': 'Ut',\n 'Amsterdam Centraal': 'asd'}\n if station in stations:\n return stations[station]",
"def find_location_abbreviations(question_tokens, question):\n country_name_abbrevations_US = [\n 'USA', 'US', 'United States', 'United States of America'\n ]\n\n country_name_abbrevations_UK = [\n 'UK', 'United Kingdom', 'England'\n ]\n\n location_abbvreviations_US = {\n 'AK': ['Alaska'],\n 'AL': ['Alabama'],\n 'AR': ['Arkansas'],\n 'AZ': ['Arizona'],\n 'CA': ['California'],\n 'CO': ['Colorado'],\n 'CT': ['Connecticut'],\n 'DE': ['Delaware'],\n 'FL': ['Florida'],\n 'GA': ['Georgia'],\n 'HI': ['Hawaii'],\n 'IA': ['Iowa'],\n 'ID': ['Idaho'],\n 'IL': ['Illinois'],\n 'IN': ['Indiana'],\n 'KS': ['Kansas'],\n 'KY': ['Kentucky'],\n 'LA': ['Louisiana', 'Los Angeles'],\n 'MA': ['Massachusetts'],\n 'MD': ['Maryland'],\n 'ME': ['Maine'],\n 'MI': ['Michigan'],\n 'MN': ['Minnesota'],\n 'MO': ['Missouri'],\n 'MS': ['Mississippi'],\n 'MT': ['Montana'],\n 'NC': ['North Carolin'],\n 'ND': ['North Dakota'],\n 'NE': ['Nebraska'],\n 'NH': ['New Hampshire'],\n 'NJ': ['New Jersey'],\n 'NM': ['New Mexico'],\n 'NV': ['Nevada'],\n 'NY': ['New York'],\n 'OH': ['Ohio'],\n 'OK': ['Oklahoma'],\n 'OR': ['Oregon'],\n 'PA': ['Pennsylvania'],\n 'RI': ['Rhode Island'],\n 'SC': ['South Carolin'],\n 'SD': ['South Dakota'],\n 'TN': ['Tennessee'],\n 'TX': ['Texas'],\n 'UT': ['Utah'],\n 'VA': ['Virginia'],\n 'VT': ['Vermont'],\n 'WA': ['Washington'],\n 'WI': ['Wisconsin'],\n 'WV': ['West Virginia'],\n 'WY': ['Wyoming']\n }\n\n location_candidates = []\n\n for key, potential_values in location_abbvreviations_US.items():\n add_me = False\n if key in question_tokens:\n add_me = True\n\n for sub_value in potential_values:\n if sub_value in question_tokens:\n add_me = True\n\n if add_me:\n location_candidates.append(key)\n location_candidates.extend(potential_values)\n\n for abbreviation in country_name_abbrevations_US:\n if abbreviation in question:\n # we don't know how to look for USA - therefore add all options. The database finder should sort them out.\n location_candidates.extend(country_name_abbrevations_US)\n\n for abbreviation in country_name_abbrevations_UK:\n if abbreviation in question:\n # we don't know how to look for United Kingdom - therefore add all options. The database finder should sort them out.\n location_candidates.extend(country_name_abbrevations_UK)\n\n return location_candidates",
"def combine_state_names_and_abbreviations():\n lst=[]\n for k,v in us_state_abbrev.items():\n lst.append(v)\n lst = sorted(lst[:10])\n state = sorted(states)\n print(lst+state[-10:])\n return",
"def suffixDict():\n return {'trpk': 'tpke', 'forges': 'frgs', 'bypas': 'byp', 'mnr': 'mnr', 'viaduct': 'via', 'mnt': 'mt',\n 'lndng': 'lndg', 'vill': 'vlg', 'aly': 'aly', 'mill': 'ml', 'pts': 'pts', 'centers': 'ctrs', 'row': 'row', 'cnter': 'ctr',\n 'hrbor': 'hbr', 'tr': 'trl', 'lndg': 'lndg', 'passage': 'psge', 'walks': 'walk', 'frks': 'frks', 'crest': 'crst', 'meadows': 'mdws',\n 'freewy': 'fwy', 'garden': 'gdn', 'bluffs': 'blfs', 'vlg': 'vlg', 'vly': 'vly', 'fall': 'fall', 'trk': 'trak', 'squares': 'sqs',\n 'trl': 'trl', 'harbor': 'hbr', 'frry': 'fry', 'div': 'dv', 'straven': 'stra', 'cmp': 'cp', 'grdns': 'gdns', 'villg': 'vlg',\n 'meadow': 'mdw', 'trails': 'trl', 'streets': 'sts', 'prairie': 'pr', 'hts': 'hts', 'crescent': 'cres', 'pass': 'pass',\n 'ter': 'ter', 'port': 'prt', 'bluf': 'blf', 'avnue': 'ave', 'lights': 'lgts', 'rpds': 'rpds', 'harbors': 'hbrs',\n 'mews': 'mews', 'lodg': 'ldg', 'plz': 'plz', 'tracks': 'trak', 'path': 'path', 'pkway': 'pkwy', 'gln': 'gln',\n 'bot': 'btm', 'drv': 'dr', 'rdg': 'rdg', 'fwy': 'fwy', 'hbr': 'hbr', 'via': 'via', 'divide': 'dv', 'inlt': 'inlt',\n 'fords': 'frds', 'avenu': 'ave', 'vis': 'vis', 'brk': 'brk', 'rivr': 'riv', 'oval': 'oval', 'gateway': 'gtwy',\n 'stream': 'strm', 'bayoo': 'byu', 'msn': 'msn', 'knoll': 'knl', 'expressway': 'expy', 'sprng': 'spg',\n 'flat': 'flt', 'holw': 'holw', 'grden': 'gdn', 'trail': 'trl', 'jctns': 'jcts', 'rdgs': 'rdgs',\n 'tunnel': 'tunl', 'ml': 'ml', 'fls': 'fls', 'flt': 'flt', 'lks': 'lks', 'mt': 'mt', 'groves': 'grvs',\n 'vally': 'vly', 'ferry': 'fry', 'parkway': 'pkwy', 'radiel': 'radl', 'strvnue': 'stra', 'fld': 'fld',\n 'overpass': 'opas', 'plaza': 'plz', 'estate': 'est', 'mntn': 'mtn', 'lock': 'lck', 'orchrd': 'orch',\n 'strvn': 'stra', 'locks': 'lcks', 'bend': 'bnd', 'kys': 'kys', 'junctions': 'jcts', 'mountin': 'mtn',\n 'burgs': 'bgs', 'pine': 'pne', 'ldge': 'ldg', 'causway': 'cswy', 'spg': 'spg', 'beach': 'bch', 'ft': 'ft',\n 'crse': 'crse', 'motorway': 'mtwy', 'bluff': 'blf', 'court': 'ct', 'grov': 'grv', 'sprngs': 'spgs',\n 'ovl': 'oval', 'villag': 'vlg', 'vdct': 'via', 'neck': 'nck', 'orchard': 'orch', 'light': 'lgt',\n 'sq': 'sq', 'pkwy': 'pkwy', 'shore': 'shr', 'green': 'grn', 'strm': 'strm', 'islnd': 'is',\n 'turnpike': 'tpke', 'stra': 'stra', 'mission': 'msn', 'spngs': 'spgs', 'course': 'crse',\n 'trafficway': 'trfy', 'terrace': 'ter', 'hway': 'hwy', 'avenue': 'ave', 'glen': 'gln',\n 'boul': 'blvd', 'inlet': 'inlt', 'la': 'ln', 'ln': 'ln', 'frst': 'frst', 'clf': 'clf',\n 'cres': 'cres', 'brook': 'brk', 'lk': 'lk', 'byp': 'byp', 'shoar': 'shr', 'bypass': 'byp',\n 'mtin': 'mtn', 'ally': 'aly', 'forest': 'frst', 'junction': 'jct', 'views': 'vws', 'wells': 'wls', 'cen': 'ctr',\n 'exts': 'exts', 'crt': 'ct', 'corners': 'cors', 'trak': 'trak', 'frway': 'fwy', 'prarie': 'pr', 'crossing': 'xing',\n 'extn': 'ext', 'cliffs': 'clfs', 'manors': 'mnrs', 'ports': 'prts', 'gatewy': 'gtwy', 'square': 'sq', 'hls': 'hls',\n 'harb': 'hbr', 'loops': 'loop', 'mdw': 'mdw', 'smt': 'smt', 'rd': 'rd', 'hill': 'hl', 'blf': 'blf',\n 'highway': 'hwy', 'walk': 'walk', 'clfs': 'clfs', 'brooks': 'brks', 'brnch': 'br', 'aven': 'ave',\n 'shores': 'shrs', 'iss': 'iss', 'route': 'rte', 'wls': 'wls', 'place': 'pl', 'sumit': 'smt', 'pines': 'pnes',\n 'trks': 'trak', 'shoal': 'shl', 'strt': 'st', 'frwy': 'fwy', 'heights': 'hts', 'ranches': 'rnch',\n 'boulevard': 'blvd', 'extnsn': 'ext', 'mdws': 'mdws', 'hollows': 'holw', 'vsta': 'vis', 'plains': 'plns',\n 'station': 'sta', 'circl': 'cir', 'mntns': 'mtns', 'prts': 'prts', 'shls': 'shls', 'villages': 'vlgs',\n 'park': 'park', 'nck': 'nck', 'rst': 'rst', 'haven': 'hvn', 'turnpk': 'tpke', 'expy': 'expy', 'sta': 'sta',\n 'expr': 'expy', 'stn': 'sta', 'expw': 'expy', 'street': 'st', 'str': 'st', 'spurs': 'spur', 'crecent': 'cres',\n 'rad': 'radl', 'ranch': 'rnch', 'well': 'wl', 'shoals': 'shls', 'alley': 'aly', 'plza': 'plz', 'medows': 'mdws',\n 'allee': 'aly', 'knls': 'knls', 'ests': 'ests', 'st': 'st', 'anx': 'anx', 'havn': 'hvn', 'paths': 'path', 'bypa': 'byp',\n 'spgs': 'spgs', 'mills': 'mls', 'parks': 'park', 'byps': 'byp', 'flts': 'flts', 'tunnels': 'tunl', 'club': 'clb', 'sqrs': 'sqs',\n 'hllw': 'holw', 'manor': 'mnr', 'centre': 'ctr', 'track': 'trak', 'hgts': 'hts', 'rnch': 'rnch', 'crcle': 'cir', 'falls': 'fls',\n 'landing': 'lndg', 'plaines': 'plns', 'viadct': 'via', 'gdns': 'gdns', 'gtwy': 'gtwy', 'grove': 'grv', 'camp': 'cp', 'tpk': 'tpke',\n 'drive': 'dr', 'freeway': 'fwy', 'ext': 'ext', 'points': 'pts', 'exp': 'expy', 'ky': 'ky', 'courts': 'cts', 'pky': 'pkwy', 'corner': 'cor',\n 'crssing': 'xing', 'mnrs': 'mnrs', 'unions': 'uns', 'cyn': 'cyn', 'lodge': 'ldg', 'trfy': 'trfy', 'circle': 'cir', 'bridge': 'brg',\n 'dl': 'dl', 'dm': 'dm', 'express': 'expy', 'tunls': 'tunl', 'dv': 'dv', 'dr': 'dr', 'shr': 'shr', 'knolls': 'knls', 'greens': 'grns',\n 'tunel': 'tunl', 'fields': 'flds', 'common': 'cmn', 'orch': 'orch', 'crk': 'crk', 'river': 'riv', 'shl': 'shl', 'view': 'vw',\n 'crsent': 'cres', 'rnchs': 'rnch', 'crscnt': 'cres', 'arc': 'arc', 'btm': 'btm', 'blvd': 'blvd', 'ways': 'ways', 'radl': 'radl',\n 'rdge': 'rdg', 'causeway': 'cswy', 'parkwy': 'pkwy', 'juncton': 'jct', 'statn': 'sta', 'gardn': 'gdn', 'mntain': 'mtn',\n 'crssng': 'xing', 'rapid': 'rpd', 'key': 'ky', 'plns': 'plns', 'wy': 'way', 'cor': 'cor', 'ramp': 'ramp', 'throughway': 'trwy',\n 'estates': 'ests', 'ck': 'crk', 'loaf': 'lf', 'hvn': 'hvn', 'wall': 'wall', 'hollow': 'holw', 'canyon': 'cyn', 'clb': 'clb',\n 'cswy': 'cswy', 'village': 'vlg', 'cr': 'crk', 'trce': 'trce', 'cp': 'cp', 'cv': 'cv', 'ct': 'cts', 'pr': 'pr', 'frg': 'frg',\n 'jction': 'jct', 'pt': 'pt', 'mssn': 'msn', 'frk': 'frk', 'brdge': 'brg', 'cent': 'ctr', 'spur': 'spur', 'frt': 'ft', 'pk': 'park',\n 'fry': 'fry', 'pl': 'pl', 'lanes': 'ln', 'gtway': 'gtwy', 'prk': 'park', 'vws': 'vws', 'stravenue': 'stra', 'lgt': 'lgt',\n 'hiway': 'hwy', 'ctr': 'ctr', 'prt': 'prt', 'ville': 'vl', 'plain': 'pln', 'mount': 'mt', 'mls': 'mls', 'loop': 'loop',\n 'riv': 'riv', 'centr': 'ctr', 'is': 'is', 'prr': 'pr', 'vl': 'vl', 'avn': 'ave', 'vw': 'vw', 'ave': 'ave', 'spng': 'spg',\n 'hiwy': 'hwy', 'dam': 'dm', 'isle': 'isle', 'crcl': 'cir', 'sqre': 'sq', 'jct': 'jct', 'jctn': 'jct', 'mountain': 'mtn',\n 'keys': 'kys', 'parkways': 'pkwy', 'drives': 'drs', 'tunl': 'tunl', 'jcts': 'jcts', 'knl': 'knl', 'center': 'ctr',\n 'driv': 'dr', 'tpke': 'tpke', 'sumitt': 'smt', 'canyn': 'cyn', 'ldg': 'ldg', 'harbr': 'hbr', 'rest': 'rst', 'shoars': 'shrs',\n 'vist': 'vis', 'gdn': 'gdn', 'islnds': 'iss', 'hills': 'hls', 'cresent': 'cres', 'point': 'pt', 'lake': 'lk', 'vlly': 'vly',\n 'strav': 'stra', 'crossroad': 'xrd', 'bnd': 'bnd', 'strave': 'stra', 'stravn': 'stra', 'knol': 'knl', 'vlgs': 'vlgs',\n 'forge': 'frg', 'cntr': 'ctr', 'cape': 'cpe', 'height': 'hts', 'lck': 'lck', 'highwy': 'hwy', 'trnpk': 'tpke', 'rpd': 'rpd',\n 'boulv': 'blvd', 'circles': 'cirs', 'valleys': 'vlys', 'vst': 'vis', 'creek': 'crk', 'mall': 'mall', 'spring': 'spg',\n 'brg': 'brg', 'holws': 'holw', 'lf': 'lf', 'est': 'est', 'xing': 'xing', 'trace': 'trce', 'bottom': 'btm',\n 'streme': 'strm', 'isles': 'isle', 'circ': 'cir', 'forks': 'frks', 'burg': 'bg', 'run': 'run', 'trls': 'trl',\n 'radial': 'radl', 'lakes': 'lks', 'rue': 'rue', 'vlys': 'vlys', 'br': 'br', 'cors': 'cors', 'pln': 'pln',\n 'pike': 'pike', 'extension': 'ext', 'island': 'is', 'frd': 'frd', 'lcks': 'lcks', 'terr': 'ter',\n 'union': 'un', 'extensions': 'exts', 'pkwys': 'pkwy', 'islands': 'iss', 'road': 'rd', 'shrs': 'shrs',\n 'roads': 'rds', 'glens': 'glns', 'springs': 'spgs', 'missn': 'msn', 'ridge': 'rdg', 'arcade': 'arc',\n 'bayou': 'byu', 'crsnt': 'cres', 'junctn': 'jct', 'way': 'way', 'valley': 'vly', 'fork': 'frk',\n 'mountains': 'mtns', 'bottm': 'btm', 'forg': 'frg', 'ht': 'hts', 'ford': 'frd', 'hl': 'hl',\n 'grdn': 'gdn', 'fort': 'ft', 'traces': 'trce', 'cnyn': 'cyn', 'cir': 'cir', 'un': 'un', 'mtn': 'mtn',\n 'flats': 'flts', 'anex': 'anx', 'gatway': 'gtwy', 'rapids': 'rpds', 'villiage': 'vlg', 'flds': 'flds',\n 'coves': 'cvs', 'rvr': 'riv', 'av': 'ave', 'pikes': 'pike', 'grv': 'grv', 'vista': 'vis', 'pnes': 'pnes',\n 'forests': 'frst', 'field': 'fld', 'branch': 'br', 'grn': 'grn', 'dale': 'dl', 'rds': 'rds', 'annex': 'anx',\n 'sqr': 'sq', 'cove': 'cv', 'squ': 'sq', 'skyway': 'skwy', 'ridges': 'rdgs', 'hwy': 'hwy', 'tunnl': 'tunl',\n 'underpass': 'upas', 'cliff': 'clf', 'lane': 'ln', 'land': 'land', 'bch': 'bch', 'dvd': 'dv', 'curve': 'curv',\n 'cpe': 'cpe', 'summit': 'smt', 'gardens': 'gdns'}",
"def abbr_2_st(state_series, abbr_2_st=True):\n us_state_abbrev = {\n 'Alabama': 'AL',\n 'Alaska': 'AK',\n 'American Samoa': 'AS',\n 'Arizona': 'AZ',\n 'Arkansas': 'AR',\n 'California': 'CA',\n 'Colorado': 'CO',\n 'Connecticut': 'CT',\n 'Delaware': 'DE',\n 'District of Columbia': 'DC',\n 'Florida': 'FL',\n 'Georgia': 'GA',\n 'Guam': 'GU',\n 'Hawaii': 'HI',\n 'Idaho': 'ID',\n 'Illinois': 'IL',\n 'Indiana': 'IN',\n 'Iowa': 'IA',\n 'Kansas': 'KS',\n 'Kentucky': 'KY',\n 'Louisiana': 'LA',\n 'Maine': 'ME',\n 'Maryland': 'MD',\n 'Massachusetts': 'MA',\n 'Michigan': 'MI',\n 'Minnesota': 'MN',\n 'Mississippi': 'MS',\n 'Missouri': 'MO',\n 'Montana': 'MT',\n 'Nebraska': 'NE',\n 'Nevada': 'NV',\n 'New Hampshire': 'NH',\n 'New Jersey': 'NJ',\n 'New Mexico': 'NM',\n 'New York': 'NY',\n 'North Carolina': 'NC',\n 'North Dakota': 'ND',\n 'Northern Mariana Islands':'MP',\n 'Ohio': 'OH',\n 'Oklahoma': 'OK',\n 'Oregon': 'OR',\n 'Pennsylvania': 'PA',\n 'Puerto Rico': 'PR',\n 'Rhode Island': 'RI',\n 'South Carolina': 'SC',\n 'South Dakota': 'SD',\n 'Tennessee': 'TN',\n 'Texas': 'TX',\n 'Utah': 'UT',\n 'Vermont': 'VT',\n 'Virgin Islands': 'VI',\n 'Virginia': 'VA',\n 'Washington': 'WA',\n 'West Virginia': 'WV',\n 'Wisconsin': 'WI',\n 'Wyoming': 'WY'\n}\n if abbr_2_st == True:\n inv_map = {v: k for k, v in us_state_abbrev.items()}\n full_names = []\n for abbv in state_series:\n full_names.append(inv_map[abbv])\n return full_names\n else:\n # Return Abbreviation\n abbvs = []\n for full_name in state_series:\n abbvs.append(us_state_abbrev[full_name])\n return abbvs",
"def convert_abbrev(word):\r\n return abbreviations[word.lower()] if word.lower() in abbreviations.keys() else word",
"def fullNameFor( self, name ):\n if name in self.named: return name\n if name[-3:] == '...':\n best= [ n for n in self.named.keys()\n if n.startswith( name[:-3] ) ]\n if len(best) > 1:\n raise Error(\"Ambiguous abbreviation {!r}, matches {!r}\".format( name, list(sorted(best)) ) )\n elif len(best) == 1: \n return best[0]\n return name",
"def acronym(name):\n return tuple(map(first, filter(capitalized, name.split())))",
"def python_func_kw_matches(self,text):\n\n if \".\" in text: # a parameter cannot be dotted\n return []\n try: regexp = self.__funcParamsRegex\n except AttributeError:\n regexp = self.__funcParamsRegex = re.compile(r'''\n '.*?' | # single quoted strings or\n \".*?\" | # double quoted strings or\n \\w+ | # identifier\n \\S # other characters\n ''', re.VERBOSE | re.DOTALL)\n # 1. find the nearest identifier that comes before an unclosed\n # parenthesis e.g. for \"foo (1+bar(x), pa\", the candidate is \"foo\"\n tokens = regexp.findall(self.get_line_buffer())\n tokens.reverse()\n iterTokens = iter(tokens); openPar = 0\n for token in iterTokens:\n if token == ')':\n openPar -= 1\n elif token == '(':\n openPar += 1\n if openPar > 0:\n # found the last unclosed parenthesis\n break\n else:\n return []\n # 2. Concatenate any dotted names (e.g. \"foo.bar\" for \"foo.bar(x, pa\" )\n ids = []\n isId = re.compile(r'\\w+$').match\n while True:\n try:\n ids.append(iterTokens.next())\n if not isId(ids[-1]):\n ids.pop(); break\n if not iterTokens.next() == '.':\n break\n except StopIteration:\n break\n # lookup the candidate callable matches either using global_matches\n # or attr_matches for dotted names\n if len(ids) == 1:\n callableMatches = self.global_matches(ids[0])\n else:\n callableMatches = self.attr_matches('.'.join(ids[::-1]))\n argMatches = []\n for callableMatch in callableMatches:\n try: namedArgs = self._default_arguments(eval(callableMatch,\n self.namespace))\n except: continue\n for namedArg in namedArgs:\n if namedArg.startswith(text):\n argMatches.append(\"%s=\" %namedArg)\n return argMatches",
"def abbreviator(max_length):\n \n def abbreviate(text):\n if len(text) <= max_length:\n return text\n else:\n return text[: max_length - 3] + \"...\"\n\n return abbreviate",
"def _get_function_defaults(func: FunctionType) -> dict[str, Any]:\n # extracted bit from inspect.signature... ~20x faster\n pos_count = func.__code__.co_argcount\n arg_names = func.__code__.co_varnames\n\n defaults = func.__defaults__ or ()\n\n non_default_count = pos_count - len(defaults)\n positional_args = arg_names[:pos_count]\n\n output = {\n name: defaults[offset]\n for offset, name in enumerate(positional_args[non_default_count:])\n }\n if func.__kwdefaults__:\n output.update(func.__kwdefaults__)\n return output",
"def test_police_abbreviations(self):\n for word in self.report.get_words():\n for uword in self.rules.police_abbreviations:\n if uword[\"word\"] == word.text.lower():\n self.add_error(\n f\"{word.text} är en intern förkortning. \"\n f\"Använd {uword['means']} istället.\",\n word=word,\n )",
"def widgets_from_abbreviations(self, seq):\n result = []\n for name, abbrev, default in seq:\n widget = self.widget_from_abbrev(abbrev, default)\n if not (isinstance(widget, ValueWidget) or isinstance(widget, fixed)):\n if widget is None:\n raise ValueError(\"{!r} cannot be transformed to a widget\".format(abbrev))\n else:\n raise TypeError(\"{!r} is not a ValueWidget\".format(widget))\n if not widget.description:\n widget.description = name\n widget._kwarg = name\n result.append(widget)\n return result"
] | [
"0.55997264",
"0.55988103",
"0.55512005",
"0.55499125",
"0.54829174",
"0.54123336",
"0.5390938",
"0.5324624",
"0.53143686",
"0.5279437",
"0.5245413",
"0.52400696",
"0.52346224",
"0.50955373",
"0.5080197",
"0.5065388",
"0.5045806",
"0.5037079",
"0.503532",
"0.5020661",
"0.49809927",
"0.49787873",
"0.49410373",
"0.49380308",
"0.4894953",
"0.48828125",
"0.48703536",
"0.48602277",
"0.48346704",
"0.48167515"
] | 0.764695 | 0 |
Given a sequence of (name, abbrev, default) tuples, return a sequence of Widgets. | def widgets_from_abbreviations(self, seq):
result = []
for name, abbrev, default in seq:
widget = self.widget_from_abbrev(abbrev, default)
if not (isinstance(widget, ValueWidget) or isinstance(widget, fixed)):
if widget is None:
raise ValueError("{!r} cannot be transformed to a widget".format(abbrev))
else:
raise TypeError("{!r} is not a ValueWidget".format(widget))
if not widget.description:
widget.description = name
widget._kwarg = name
result.append(widget)
return result | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def widget_from_abbrev(cls, abbrev, default=empty):\n if isinstance(abbrev, ValueWidget) or isinstance(abbrev, fixed):\n return abbrev\n\n if isinstance(abbrev, tuple):\n widget = cls.widget_from_tuple(abbrev)\n if default is not empty:\n try:\n widget.value = default\n except Exception:\n # ignore failure to set default\n pass\n return widget\n\n # Try single value\n widget = cls.widget_from_single_value(abbrev)\n if widget is not None:\n return widget\n\n # Something iterable (list, dict, generator, ...). Note that str and\n # tuple should be handled before, that is why we check this case last.\n if isinstance(abbrev, Iterable):\n widget = cls.widget_from_iterable(abbrev)\n if default is not empty:\n try:\n widget.value = default\n except Exception:\n # ignore failure to set default\n pass\n return widget\n\n # No idea...\n return None",
"def widgets(parameter: Parameter):\n widgets = []\n for key in parameter.keys():\n textEdit = QTextEdit()\n textEdit.setText(key)\n widgets.append(textEdit)\n if isinstance(parameter[key], Enum):\n comboBox = MyQtEnumComboBox()\n comboBox.fillValues(type(parameter[key]))\n widgets.append(comboBox)\n elif isinstance(parameter[key], bool):\n comboBox = QComboBox()\n comboBox.addItems((\"False\", \"True\"))\n widgets.append(comboBox)\n else:\n textEdit = QTextEdit()\n textEdit.setText(str(parameter[key]))\n widgets.append(textEdit)\n for widget in widgets:\n widget.setFixedHeight(30)\n return widgets",
"def widgets(std_prm: Parameter) -> List[QWidget]:\n widgets = []\n\n # The name widget\n textEdit = QLineEdit()\n textEdit.setText(std_prm[\"name\"])\n widgets.append(textEdit)\n\n # The input widget\n inputWidget = std_prm[\"build method\"](std_prm[\"build method prms\"], std_prm[\"slot\"])\n widgets.append(inputWidget)\n\n # Add the input widget to the parameter\n # this field will be used to identify the parameter\n # in the slot\n std_prm[\"name widget\"] = textEdit\n std_prm[\"widget\"] = inputWidget\n return widgets",
"def subwidgets(self, name, value, attrs=None, choices=()):\n yield SubWidget(self, name, value, attrs, choices)",
"def generate_widgets(self, **kwargs):\n #todo base class?\n\n names = [p for p in self.param if self.param[p].precedence is None or self.param[p].precedence > 1]\n widgets = pn.Param(self.param, show_name=False, show_labels=True, widgets=kwargs)\n\n return {k: v for k, v in zip(names[1:], widgets)}",
"def make_widgets(self):\n self.mode_select = Selector(**MODE_SELECT_SETTINGS)\n self.bind_keys_to_modes()\n self.layer_select = Selector(**LAYER_SELECT_SETTINGS)\n self.check_boxes = CheckBoxArray(**CHECK_ARRAY_SETTINGS)\n self.check_boxes.bind_key(pg.K_v, self.toggle_layer_visibility)\n self.navs = [Button(**NAV_LEFT), Button(**NAV_RIGHT)]\n self.save_button = Button(**SAVE_BUTTON)\n self.load_button = Button(**LOAD_BUTTON)\n self.new_button = Button(**NEW_BUTTON)\n self.widgets = [self.mode_select, self.layer_select, self.check_boxes,\n self.navs[0], self.navs[1],\n self.save_button, self.load_button, self.new_button]",
"def create_widgets(self):\n for name in self.names:\n new_label = Label(text=name, id=name, font_size=50)\n self.root.ids.name_entries.add_widget(new_label)",
"def gen_widgets(df):\n w = {\n \"a\": widgets.Dropdown(options=[(col, list(df[col])) for col in list(df)]),\n \"alpha\": widgets.FloatSlider(min=0.0, max=1.0, step=0.05),\n \"annot\": widgets.Checkbox(),\n \"aspect\": widgets.FloatText(value=1, step=0.05),\n \"axlabel\": widgets.Text(),\n \"bins\": widgets.IntText(value=10),\n \"bw\": widgets.Dropdown(options=[\"scott\", \"silverman\"]),\n \"capsize\": widgets.FloatText(value=1.0),\n \"cbar\": widgets.Checkbox(),\n \"cbar\": widgets.Checkbox(),\n \"cbar_ax\": widgets.Checkbox(),\n \"center\": widgets.FloatText(value=1.0),\n \"ci\": widgets.FloatSlider(min=0, max=100, value=95, step=0.1),\n \"cmap\": widgets.Text(value=\"viridis\"),\n \"col\": widgets.Dropdown(options=list(df)),\n \"col_wrap\": widgets.IntText(value=10),\n \"color\": widgets.Text(value=\"g\"),\n \"cumulative\": widgets.Checkbox(),\n \"cut\": widgets.FloatText(value=1.0),\n \"data\": widgets.Dropdown(options=[(col, list(df[col])) for col in list(df)]),\n \"data2\": widgets.Dropdown(options=[(col, list(df[col])) for col in list(df)]),\n \"diag_kind\": widgets.Dropdown(options=[\"auto\", \"hist\", \"kde\"]),\n \"dropna\": widgets.Checkbox(),\n \"edgecolor\": widgets.Text(value=\"gray\"),\n \"err_style\": widgets.Dropdown(options=[\"band\", \"bars\"]),\n \"errwidth\": widgets.FloatText(value=1.0),\n \"fit_reg\": widgets.Checkbox(),\n \"fliersize\": widgets.FloatText(value=1.0),\n \"fmt\": widgets.Text(value=\".2g\"),\n \"gridsize\": widgets.IntText(value=100),\n \"height\": widgets.FloatText(value=5),\n \"hist\": widgets.Checkbox(),\n \"hue\": widgets.Dropdown(options=list(df)),\n \"inner\": widgets.Dropdown(options=[\"box\", \"quartile\", \"point\", \"stick\"]),\n \"jitter\": widgets.Checkbox(),\n \"join\": widgets.Checkbox(),\n \"k_depth\": widgets.Dropdown(options=[\"proportion\", \"tukey\", \"trustworthy\"]),\n \"kde\": widgets.Checkbox(),\n \"kernel\": widgets.Dropdown(options=['gau', 'cos', 'biw', 'epa', 'tri', 'triw']),\n \"kind_catplot\": widgets.Dropdown(options=[\"point\", \"bar\", \"strip\", \"swarm\", \"box\", \"violin\", \"boxen\"]),\n \"kind_jointplot\": widgets.Dropdown(options=[\"scatter\", \"reg\", \"resid\", \"kde\", \"hex\"]),\n \"kind_pairplot\": widgets.Dropdown(options=[\"scatter\", \"reg\"]),\n \"kind_relplot\": widgets.Dropdown(options=[\"scatter\", \"line\"]),\n \"label\": widgets.Text(),\n \"legend\": widgets.Dropdown(options={\"brief\": \"brief\", \"full\":\"full\", \"False\": False}),\n \"legend_out\": widgets.Checkbox(),\n \"linecolor\": widgets.Text(\"white\"),\n \"linewidth\": widgets.FloatText(value=1.0),\n \"linewidths\": widgets.FloatText(value=0.0, step=0.01),\n \"logistic\": widgets.Checkbox(),\n \"logx\": widgets.Checkbox(),\n \"lowess\": widgets.Checkbox(),\n \"margin_titles\": widgets.Checkbox(),\n \"marker\": widgets.Dropdown(options=['o', 'v', '^', '<', '>', '8', 's', 'p', '*', 'h', 'H', 'D', 'd', 'P', 'X']),\n \"n_boot\": widgets.IntText(value=1000),\n \"norm_hist\": widgets.Checkbox(),\n \"notch\": widgets.Checkbox(),\n \"order_regression\": widgets.IntText(value=1),\n \"orient\": widgets.Dropdown(options=[\"v\", \"h\"]),\n \"outlier_prop\": widgets.FloatSlider(min=0.0, max=1.0, step=0.001, value=0.007),\n \"palette\": widgets.Text(value='viridis'),\n \"ratio\": widgets.IntText(value=5),\n \"robust\": widgets.Checkbox(),\n \"row\": widgets.Dropdown(options=list(df)),\n \"rug\": widgets.Checkbox(),\n \"saturation\": widgets.FloatSlider(min=0.0, max=1.0, step=0.05, value=1.0),\n \"scale_boxenplot\": widgets.Dropdown(options=[\"linear\", \"exponential\", \"area\"]),\n \"scale_float\": widgets.FloatText(value=1.0),\n \"scale_hue\": widgets.Checkbox(),\n \"scale_violinplot\": widgets.Dropdown(options=[\"area\", \"count\", \"width\"]),\n \"scatter\": widgets.Checkbox(),\n \"shade\": widgets.Checkbox(),\n \"shade_lowest\": widgets.Checkbox(),\n \"sharex\": widgets.Dropdown(options={\"True\": True, \"col\": \"col\", \"row\": \"row\"}),\n \"sharey\": widgets.Dropdown(options={\"True\": True, \"col\": \"col\", \"row\": \"row\"}),\n \"size_float\": widgets.FloatText(value=1.0),\n \"size_vector\": widgets.Dropdown(options=list(df)),\n \"sort\": widgets.Checkbox(),\n \"space\": widgets.FloatText(value=.2),\n \"split\": widgets.Checkbox(),\n \"square\": widgets.Checkbox(),\n \"style\": widgets.Dropdown(options=list(df)),\n \"truncate\": widgets.Checkbox(),\n \"units\": widgets.Dropdown(options=list(df)),\n \"vertical\": widgets.Checkbox(),\n \"vmax\": widgets.FloatText(value=1.0, step=0.1),\n \"vmin\": widgets.FloatText(value=1.0, step=0.1),\n \"whis\": widgets.FloatText(value=1.0),\n \"width\": widgets.FloatText(value=1.0),\n \"x\": widgets.Dropdown(options=list(df)),\n \"x_bins\": widgets.IntText(value=10),\n \"x_ci\": widgets.IntSlider(min=0, max=100, value=95),\n \"x_jitter\": widgets.FloatText(value=.1),\n \"x_partial\": widgets.Dropdown(options=list(df)),\n \"y\": widgets.Dropdown(options=list(df)),\n \"y_jitter\": widgets.FloatText(value=.1),\n \"y_partial\": widgets.Dropdown(options=list(df)),\n }\n relplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n \"size\": w[\"size_vector\"],\n \"style\": w[\"style\"],\n \"row\": w[\"row\"],\n \"col\": w[\"col\"],\n \"col_wrap\": w[\"col_wrap\"],\n #\"row_order\":\n #\"col_order\":\n \"palette\": w[\"palette\"],\n #\"hue_order\":\n #\"hue_norm\":\n #\"sizes\"\n #\"size_order\":\n #\"size_norm\":\n \"legend\": w[\"legend\"],\n \"kind\": w[\"kind_relplot\"],\n \"height\": w[\"height\"],\n \"aspect\": w[\"aspect\"],\n }\n scatterplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n \"size\": w[\"size_vector\"],\n \"style\": w[\"style\"],\n \"palette\": w[\"palette\"],\n #\"hue_order\":\n #\"hue_norm\":\n #\"sizes\": w[\"sizes\"],\n #\"size_order\":\n #\"size_norm\":\n #\"markers\":\n #\"style_order\":\n #\"{x,y}_bins\": (non functional)\n #\"units\": (non functional)\n #\"estimator\": (non functional)\n #\"ci\": (non functional)\n #\"n_boot\": (non functional)\n \"alpha\": w[\"alpha\"],\n #\"{x,y}_jitter\": (non functional)\n \"legend\": w[\"legend\"],\n }\n lineplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n \"size\": w[\"size_vector\"],\n \"style\": w[\"style\"],\n \"palette\": w[\"palette\"],\n #\"hue_order\":\n #\"hue_norm\":\n #\"sizes\",\n #\"dashes\":,\n #\"markers\"\n #\"style_order\":\n \"units\": w[\"units\"],\n #\"estimator\"\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"sort\": w[\"sort\"],\n \"err_style\": w[\"err_style\"],\n \"legend\": w[\"legend\"],\n }\n catplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"row\": w[\"row\"],\n \"col\": w[\"col\"],\n \"col_wrap\": w[\"col_wrap\"],\n #\"estimator\"\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n #\"order\",\"hue_order\"\n #\"row_order\",\"col_order\"\n \"kind\": w[\"kind_catplot\"],\n \"height\": w[\"height\"],\n \"aspect\": w[\"aspect\"],\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"legend\": w[\"legend\"],\n \"legend_out\": w[\"legend_out\"],\n \"sharex\": w[\"sharex\"],\n \"sharey\": w[\"sharey\"],\n \"margin_titles\": w[\"margin_titles\"],\n }\n stripplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n #\"order\",\"hue_order\"\n \"jitter\": w[\"jitter\"],\n #\"dodge\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"size\": w[\"size_float\"],\n \"edgecolor\": w[\"edgecolor\"],\n \"linewidth\": w[\"linewidth\"],\n }\n swarmplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n #\"order\",\"hue_order\"\n #\"dodge\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"size\": w[\"size_float\"],\n \"edgecolor\": w[\"edgecolor\"],\n \"linewidth\": w[\"linewidth\"],\n }\n boxplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n #\"order\",\"hue_order\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n \"width\": w[\"width\"],\n #\"dodge\"\n \"fliersize\": w[\"fliersize\"],\n \"linewidth\": w[\"linewidth\"],\n \"whis\": w[\"whis\"],\n \"notch\": w[\"notch\"],\n }\n violinplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n \"bw\": w[\"bw\"],\n \"cut\": w[\"cut\"],\n \"scale\": w[\"scale_violinplot\"],\n \"scale_hue\": w[\"scale_hue\"],\n \"gridsize\": w[\"gridsize\"],\n \"width\": w[\"width\"],\n \"inner\": w[\"inner\"],\n \"split\": w[\"split\"],\n #\"dodge\"\n \"orient\": w[\"orient\"],\n \"linewidth\": w[\"linewidth\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n }\n boxenplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n \"width\": w[\"width\"],\n #\"dodge\"\n \"k_depth\": w[\"k_depth\"],\n \"linewidth\": w[\"linewidth\"],\n \"scale\": w[\"scale_boxenplot\"],\n \"outlier_prop\": w[\"outlier_prop\"],\n }\n pointplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n #\"estimator\"\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n #\"markers\"\n #linestyles\n #\"dodge\"\n \"join\": w[\"join\"],\n \"scale\": w[\"scale_float\"],\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"errwidth\": w[\"errwidth\"],\n \"capsize\": w[\"capsize\"],\n }\n barplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n #\"estimator\"\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n #\"errcolor\"\n \"errwidth\": w[\"errwidth\"],\n \"capsize\": w[\"capsize\"],\n #\"dodge\"\n }\n countplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n #\"dodge\"\n }\n jointplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"kind\": w[\"kind_jointplot\"],\n #stat_func\n \"color\": w[\"color\"],\n \"height\": w[\"height\"],\n \"ratio\": w[\"ratio\"],\n \"space\": w[\"space\"],\n \"dropna\": w[\"dropna\"],\n #\"xlim\"\n #\"ylim\"\n }\n pairplot = {\n \"hue\": w[\"hue\"],\n #hue_order\n \"palette\": w[\"palette\"],\n #vars\n #x_vars\n #y_vars\n \"kind\": w[\"kind_pairplot\"],\n \"diag_kind\": w[\"diag_kind\"],\n #\"markers\"\n \"height\": w[\"height\"],\n \"aspect\": w[\"aspect\"],\n \"dropna\": w[\"dropna\"],\n }\n distplot = {\n \"a\": w[\"a\"],\n \"bins\": w[\"bins\"],\n \"hist\": w[\"hist\"],\n \"kde\": w[\"kde\"],\n \"rug\": w[\"rug\"],\n #\"fit\"\n #{hist, kde, rug, fit}_kws\n \"color\": w[\"color\"],\n \"vertical\": w[\"vertical\"],\n \"norm_hist\": w[\"norm_hist\"],\n \"axlabel\": w[\"axlabel\"],\n \"label\": w[\"label\"],\n }\n kdeplot = {\n \"data\": w[\"data\"],\n \"data2\": w[\"data2\"],\n \"shade\": w[\"shade\"],\n \"vertical\": w[\"vertical\"],\n \"kernel\": w[\"kernel\"],\n \"bw\": w[\"bw\"],\n \"gridsize\": w[\"gridsize\"],\n \"cut\": w[\"cut\"],\n #\"clip\":\n \"legend\": w[\"legend\"],\n \"cumulative\": w[\"cumulative\"],\n \"shade_lowest\": w[\"shade_lowest\"],\n \"cbar\": w[\"cbar\"],\n \"cbar_ax\": w[\"cbar_ax\"],\n }\n lmplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n \"col\": w[\"col\"],\n \"row\": w[\"row\"],\n \"palette\": w[\"palette\"],\n \"col_wrap\": w[\"col_wrap\"],\n \"height\": w[\"height\"],\n \"aspect\": w[\"aspect\"],\n #\"markers\",\n \"sharex\": w[\"sharex\"],\n \"sharey\": w[\"sharey\"],\n \"legend\": w[\"legend\"],\n \"legend_out\": w[\"legend_out\"],\n #x_estimator\n \"x_bins\": w[\"x_bins\"],\n \"x_ci\": w[\"x_ci\"],\n \"scatter\": w[\"scatter\"],\n \"fit_reg\": w[\"fit_reg\"],\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n \"order\": w[\"order_regression\"],\n \"logistic\": w[\"logistic\"],\n \"lowess\": w[\"lowess\"],\n \"robust\": w[\"robust\"],\n \"logx\": w[\"logx\"],\n \"x_partial\": w[\"x_partial\"],\n \"y_partial\": w[\"y_partial\"],\n \"truncate\": w[\"truncate\"],\n \"x_jitter\": w[\"x_jitter\"],\n \"y_jitter\": w[\"y_jitter\"],\n }\n regplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n #x_estimator\n \"x_bins\": w[\"x_bins\"],\n \"x_ci\": w[\"x_ci\"],\n \"scatter\": w[\"scatter\"],\n \"fit_reg\": w[\"fit_reg\"],\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n \"order\": w[\"order_regression\"],\n \"logistic\": w[\"logistic\"],\n \"lowess\": w[\"lowess\"],\n \"robust\": w[\"robust\"],\n \"logx\": w[\"logx\"],\n \"x_partial\": w[\"x_partial\"],\n \"y_partial\": w[\"y_partial\"],\n \"truncate\": w[\"truncate\"],\n \"x_jitter\": w[\"x_jitter\"],\n \"y_jitter\": w[\"y_jitter\"],\n \"label\": w[\"label\"],\n \"color\": w[\"color\"],\n \"marker\": w[\"marker\"],\n }\n residplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"lowess\": w[\"lowess\"],\n \"x_partial\": w[\"x_partial\"],\n \"y_partial\": w[\"y_partial\"],\n \"order\": w[\"order_regression\"],\n \"robust\": w[\"robust\"],\n \"dropna\": w[\"dropna\"],\n \"label\": w[\"label\"],\n \"color\": w[\"color\"],\n }\n heatmap = {\n \"vmin\": w[\"vmin\"],\n \"vmax\": w[\"vmax\"],\n \"cmap\": w[\"cmap\"],\n \"center\": w[\"center\"],\n \"robust\": w[\"robust\"],\n \"annot\": w[\"annot\"],\n \"fmt\": w[\"fmt\"],\n \"linewidths\": w[\"linewidths\"],\n \"linecolor\": w[\"linecolor\"],\n \"cbar\": w[\"cbar\"],\n \"square\": w[\"square\"],\n #xticklabels, yticklabels\n #\"mask\"\n }\n #clustermap = {}\n return {\n \"*\": w,\n \"relplot\": relplot,\n \"scatterplot\": scatterplot,\n \"lineplot\": lineplot,\n \"catplot\": catplot,\n \"stripplot\": stripplot,\n \"swarmplot\": swarmplot,\n \"boxplot\": boxplot,\n \"violinplot\": violinplot,\n \"boxenplot\": boxenplot,\n \"pointplot\": pointplot,\n \"barplot\": barplot,\n \"countplot\": countplot,\n \"jointplot\": jointplot,\n \"pairplot\": pairplot,\n \"distplot\": distplot,\n \"kdeplot\": kdeplot,\n #rugplot -> not interesting\n \"lmplot\": lmplot,\n \"regplot\": regplot,\n \"residplot\": residplot,\n \"heatmap\": heatmap,\n #\"clustermap\": clustermap,\n }",
"def __getattr__(self, name: str) -> WidgetVar:\n for widget in self._list:\n if name == widget.name:\n return widget\n return object.__getattribute__(self, name) # type: ignore",
"def create_widgets(self):\n for name in self.phonebook:\n temp_button = Label(text=name, id=name)\n self.root.ids.entries_box.add_widget(temp_button)",
"def generate_widgets(self, **kwargs):\n\n names = [p for p in self.param if self.param[p].precedence is None or self.param[p].precedence > 1]\n widgets = pn.Param(self.param, show_name=False, show_labels=True, widgets=kwargs)\n\n return {k: v for k, v in zip(names[1:], widgets)}\n #\n # return {k: v for k, v in zip(list(self.param)[1:],\n # pn.Param(self.param, show_name=False, show_labels=True, widgets=kwargs))}",
"def buildWidgetCollection(self, widgetDom):\n widgetNodes = widgetDom.getElementsByTagName('widgets')[0].childNodes\n for widget in widgetNodes:\n if (widget.nodeType == Node.ELEMENT_NODE):\n #Create a new widget of the type as specified in the xml file \n newWidget = Widget()\n newWidget.type = widget.attributes['type'].value\n parameterList = widget.getElementsByTagName('parameter')\n #Look into the parameter list and build parameter dictionary\n for parameter in parameterList:\n if parameter.attributes != None:\n name = parameter.attributes['name'].value\n value = parameter.attributes['value'].value\n newWidget.parameters[name] = value\n self.widgetCollection.append(newWidget)",
"def widgets(self):\r\n url = '{0}/{1}/'.format(self.get_url(), 'widgets')\r\n\r\n return http.Request('GET', url), parsers.parse_json",
"def widgets_opt(widgets_list, arg):\n append = arg and arg[0] == '+'\n if append:\n arg = arg[1:]\n\n if arg:\n widgets = arg.split(',')\n else:\n widgets = []\n\n if not append:\n del widgets_list[:]\n\n widgets_list.extend(widgets)",
"def add_widgets(self, names, labels):\r\n\r\n for row, txt_name in enumerate(names):\r\n\r\n label_name = \"lbl_\" + txt_name # Label name for label widget\r\n label_text = labels[row] # Get the label text from list of labels\r\n btn_name = \"btn_\" + txt_name # Button name for button widget\r\n\r\n # Set class attribute as QLabel and set text and object name\r\n self.__dict__[label_name] = QLabel(text=label_text, objectName=label_name)\r\n # Set class attribute as QPushButton and set text and object name\r\n self.__dict__[btn_name] = QPushButton(text=\"Browse\", objectName=btn_name)\r\n # Set class attribute as line edit with object name\r\n self.__dict__[txt_name] = QLineEdit(objectName=txt_name)\r\n\r\n txt_widget = self.__getattribute__(txt_name) # Set text widget as object\r\n lbl_widget = self.__getattribute__(label_name) # Set label widget as object\r\n btn_widget = self.__getattribute__(btn_name) # Set button widget as object\r\n\r\n # Add widgets to layout based on position in list\r\n self.layout.addWidget(lbl_widget, row, 0)\r\n self.layout.addWidget(txt_widget, row, 1)\r\n self.layout.addWidget(btn_widget, row, 2)\r\n\r\n # Set size policy of the line edit widgets\r\n txt_widget.setSizePolicy(QSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.Preferred))",
"def _create_value_widgets(self):\n \n # sort values\n self.values = sorted(self.values)\n self.selection = self.default\n \n for value in self.values:\n widget = self.panel.createWidgetT(\"Button\", \"Button\", \n mygui.IntCoord(15, (len(self.widgets)* 20 + 10), self.width - 20, 20),\n mygui.Align())\n widget.setUserString(\"value\", value)\n widget.setCaption(value)\n self.widgets.append(widget)\n \n if value == self.default:\n widget.setStateCheck(True)\n \n widget.subscribeEventMouseButtonClick(self, '_onTypeClick')\n \n self.panel.setSize(self.width, len(self.widgets) * 20 + 20)",
"def make_ddls( self, parent ):\n # ---- 0\n a_widget = ttk.Combobox( parent,\n width = self.arg_width,\n state = \"normal\",\n textvariable = \"self.arg_2_var\" )\n\n a_widget.bind( \"<<ComboboxSelected>>\", self.sync_ddl_0 )\n\n # AppGlobal.gui_style.style_combobox( a_widget )\n self.ddl_0_widget = a_widget\n\n # ---- 1\n a_widget = ttk.Combobox( parent,\n width = self.arg_width,\n state = \"normal\",\n textvariable = \"self.arg_2_var\" )\n\n self.ddl_1_widget = a_widget\n\n # ---- 2\n a_widget = ttk.Combobox( parent,\n width = self.arg_width + 5,\n state = \"normal\",\n textvariable = \"self.arg_2_var\" )\n\n # AppGlobal.gui_style.style_combobox( a_widget )\n self.ddl_2_widget = a_widget\n\n # self.ddl_widgets = [ self.ddl_0_widget, self.ddl_1_widget, self.ddl_2_widget, ]\n # print( self.ddl_widgets[ 0 ] == self.ddl_widgets[ 1 ])\n\n #self.load_ddl_0( )",
"def listBuilderNames():",
"def listBuilderNames():",
"def create_widgets( self ):",
"def AutoBox(\n widgets: [Gtk.Widget], vspacing: int = 10, hspacing: int = 10,\n orientation: Gtk.Orientation = Gtk.Orientation.VERTICAL) -> Gtk.Box:\n # {{{\n\n box = Gtk.Box.new(\n orientation,\n vspacing if orientation == Gtk.Orientation.VERTICAL else hspacing\n )\n\n sub_orientation = 1 - orientation\n for x in widgets:\n if isinstance(x, list):\n x = AutoBox(x, vspacing, hspacing, sub_orientation)\n\n if len(widgets) == 1:\n return x\n\n if isinstance(x, Gtk.Widget):\n box.pack_start(x, True, True, 0)\n\n if not box.get_children():\n return None\n\n return box\n # }}}",
"def widgets(self):\r\n return resources.Widgets(self)",
"def children(v=()):\n assert all([isinstance(w, Widget) for w in v])\n return tuple(v)",
"def create_widget_set(\n self, validate_cmd, callback, default: float, label_text: str, row: int\n ):\n label = self.create_label(label_text, 0, row)\n var = self.create_entry(default, validate_cmd, callback, 1, row)\n self.create_button(callback, \"ENTER\", 2, row)\n return var, label",
"def get_info(input_widgets, column_names, input_info):\n for question_num, (widgets, state) in enumerate(input_widgets):\n if isinstance(widgets, list):\n if len(widgets) > 1:\n for option_num, widget in enumerate(widgets):\n input_info.append(widget.get())\n column_name = \"Q_\" + str(question_num) + \"_\" + str(option_num + 1)\n column_names.append(column_name)\n \n elif len(widgets) == 1:\n column_name = \"Q_\" + str(question_num)\n column_names.append(column_name)\n else:\n input_info.append(widgets.get())\n column_name = \"Q_\" + str(question_num)\n column_names.append(column_name)",
"def create_widgets(self):",
"def widgets(self):\n raise NotImplementedError(\"This method is not ready to be used yet\")",
"def widget_from_iterable(o):\n # Dropdown expects a dict or list, so we convert an arbitrary\n # iterable to either of those.\n if isinstance(o, (list, dict)):\n return Dropdown(options=o)\n elif isinstance(o, Mapping):\n return Dropdown(options=list(o.items()))\n else:\n return Dropdown(options=list(o))",
"def choose(builders, spec):\n\n candidates = search(builders, spec)\n buildername = get_builder(candidates, spec.values()[0])\n\n if not buildername:\n return None\n\n blder = [b for b in builders if b['name'] == buildername][0]\n\n return blder",
"def create_widgets (self):\r\n # create first button\r\n self.bttn1 = Button (self, text = \"I do nothing!\")\r\n self.bttn1.grid ()\r\n\r\n # create a second button\r\n self.bttn2 = Button (self)\r\n self.bttn2.grid ()\r\n self.bttn2.configure (text = \"Me too\")\r\n\r\n # create a third button\r\n self.bttn3 = Button (self)\r\n self.bttn3.grid ()\r\n self.bttn3 [\"text\"] = \"Same here!\""
] | [
"0.65434057",
"0.62620914",
"0.5907812",
"0.56015867",
"0.53328896",
"0.5304384",
"0.52960455",
"0.52779883",
"0.5182024",
"0.51722246",
"0.51666266",
"0.5141146",
"0.51313585",
"0.51123697",
"0.5105806",
"0.5079483",
"0.5075808",
"0.5056689",
"0.5056689",
"0.5035367",
"0.50302666",
"0.50279933",
"0.5006987",
"0.49752638",
"0.49440768",
"0.48684192",
"0.48650542",
"0.4849187",
"0.48379025",
"0.4807676"
] | 0.7826472 | 0 |
Build a ValueWidget instance given an abbreviation or Widget. | def widget_from_abbrev(cls, abbrev, default=empty):
if isinstance(abbrev, ValueWidget) or isinstance(abbrev, fixed):
return abbrev
if isinstance(abbrev, tuple):
widget = cls.widget_from_tuple(abbrev)
if default is not empty:
try:
widget.value = default
except Exception:
# ignore failure to set default
pass
return widget
# Try single value
widget = cls.widget_from_single_value(abbrev)
if widget is not None:
return widget
# Something iterable (list, dict, generator, ...). Note that str and
# tuple should be handled before, that is why we check this case last.
if isinstance(abbrev, Iterable):
widget = cls.widget_from_iterable(abbrev)
if default is not empty:
try:
widget.value = default
except Exception:
# ignore failure to set default
pass
return widget
# No idea...
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build_widget(cls_or_instance: Any) -> ContainerWidget[ValueWidget]:\n values = None if isinstance(cls_or_instance, type) else _get_values(cls_or_instance)\n return _uifields_to_container(get_ui_fields(cls_or_instance), values=values)",
"def create_widget(self, value: T | _Undefined = Undefined) -> ValueWidget[T]:\n from magicgui.type_map import get_widget_class\n\n # TODO: this should be cached in some way\n # Map uifield names to widget kwargs\n # FIXME: this part needs a lot of work.\n # This is the biggest challenge for integrating this new UiField idea\n # (which tries to map nicely to existing schemas like JSON Schema)\n # with the rest of the codebase, which used less \"general\" naming schemes.\n _name_map = {\n \"name\": \"name\",\n \"visible\": \"visible\",\n \"nullable\": \"nullable\",\n \"orientation\": \"orientation\",\n \"type\": \"annotation\",\n \"enum\": \"choices\",\n \"title\": \"label\",\n \"description\": \"tooltip\",\n \"maximum\": \"max\",\n \"minimum\": \"min\",\n # \"title\": \"text\", # PushButton only\n # \"exclusive_maximum\": \"stop\", # RangeEdit only\n # \"minimum\": \"start\", # RangeEdit only\n \"multiple_of\": \"step\",\n \"widget\": \"widget_type\",\n }\n\n d = (\n self.parse_annotated()\n .replace(_native_field=None)\n .asdict(include_unset=False)\n )\n opts = {_name_map[k]: v for k, v in d.items() if k in _name_map}\n if \"disabled\" in d:\n opts[\"enabled\"] = not d[\"disabled\"]\n\n # TODO: very hacky... but we don't have the concept of exclusive min/max\n # for float values.\n if \"exclusive_maximum\" in d:\n m = 1 if d.get(\"type\") is int else 0.00000000001\n opts[\"max\"] = d[\"exclusive_maximum\"] - m\n if \"exclusive_minimum\" in d:\n m = 1 if d.get(\"type\") is int else 0.00000000001\n opts[\"min\"] = d[\"exclusive_minimum\"] + m\n\n value = value if value is not Undefined else self.get_default() # type: ignore\n cls, kwargs = get_widget_class(value=value, annotation=self.type, options=opts)\n return cls(**kwargs) # type: ignore",
"def _create_value_widgets(self):\n \n # sort values\n self.values = sorted(self.values)\n self.selection = self.default\n \n for value in self.values:\n widget = self.panel.createWidgetT(\"Button\", \"Button\", \n mygui.IntCoord(15, (len(self.widgets)* 20 + 10), self.width - 20, 20),\n mygui.Align())\n widget.setUserString(\"value\", value)\n widget.setCaption(value)\n self.widgets.append(widget)\n \n if value == self.default:\n widget.setStateCheck(True)\n \n widget.subscribeEventMouseButtonClick(self, '_onTypeClick')\n \n self.panel.setSize(self.width, len(self.widgets) * 20 + 20)",
"def widget_from_single_value(o):\n if isinstance(o, string_types):\n return Text(value=unicode_type(o))\n elif isinstance(o, bool):\n return Checkbox(value=o)\n elif isinstance(o, Integral):\n min, max, value = _get_min_max_value(None, None, o)\n return IntSlider(value=o, min=min, max=max)\n elif isinstance(o, Real):\n min, max, value = _get_min_max_value(None, None, o)\n return FloatSlider(value=o, min=min, max=max)\n else:\n return None",
"def build_entry(var, win):\n\tif var.type.is_range():\n\t\tentry = build_range_entry(var, win)\n\telif var.type.is_enum():\n\t\tentry = build_enum_entry(var, win)\n\telse:\n\t\tentry = Gtk.Label(var.label)\n\tif var.help != \"\":\n\t\tentry.set_tooltip_text(var.help)\n\treturn entry",
"def widgets_from_abbreviations(self, seq):\n result = []\n for name, abbrev, default in seq:\n widget = self.widget_from_abbrev(abbrev, default)\n if not (isinstance(widget, ValueWidget) or isinstance(widget, fixed)):\n if widget is None:\n raise ValueError(\"{!r} cannot be transformed to a widget\".format(abbrev))\n else:\n raise TypeError(\"{!r} is not a ValueWidget\".format(widget))\n if not widget.description:\n widget.description = name\n widget._kwarg = name\n result.append(widget)\n return result",
"def build(self, builder):\n return self.placeholder()",
"def _create_string_widget(self,frame,name,widget_options):\n widget = T.Entry(frame,textvariable=self._tkvars[name],**widget_options)\n param = self.get_parameter_object(name)\n if not lookup_by_class(self.param_immediately_apply_change,type(param)):\n widget.bind('<Return>', lambda e=None,x=name: self._handle_gui_set(x,force=True))\n widget.bind('<FocusOut>', lambda e=None,x=name: self._handle_gui_set(x,force=True))\n return widget",
"def new_varTypeWidget():\n newWidget = QtGui.QComboBox()\n newWidget.addItems(['=', '+', 'num'])\n return newWidget",
"def widget_from_setting(\n recorder: dict,\n key: str,\n group: str,\n element: str,\n override: Union[float, None] = None,\n indent: bool = False,\n) -> dict:\n\n _ = group[element]\n\n if key not in recorder:\n recorder[key] = {}\n\n if \"description\" in _:\n tooltip = _[\"description\"]\n else:\n tooltip = \"\"\n\n value = _[\"default\"]\n\n if override:\n value = override\n\n if indent:\n c1, c2 = st.columns((1, 8))\n else:\n c2 = st\n\n if _[\"type\"] == \"doublespinbox\":\n recorder[key][element] = c2.slider(\n element,\n min_value=float(_[\"min\"]),\n max_value=float(_[\"max\"]),\n value=float(value),\n help=tooltip,\n )\n elif _[\"type\"] == \"spinbox\":\n recorder[key][element] = c2.slider(\n element, min_value=_[\"min\"], max_value=_[\"max\"], value=value, help=tooltip\n )\n elif _[\"type\"] == \"checkbox\":\n recorder[key][element] = c2.checkbox(element, value=value, help=tooltip)\n elif _[\"type\"] == \"checkgroup\":\n opts = list(_[\"value\"].keys())\n recorder[key][element] = c2.multiselect(\n label=element, options=opts, default=value, help=tooltip\n )\n elif _[\"type\"] == \"combobox\":\n recorder[key][element] = c2.selectbox(\n label=element, options=_[\"value\"], index=_[\"value\"].index(value), help=tooltip\n )\n elif _[\"type\"] == \"string\":\n recorder[key][element] = c2.text_input(label=element, default=value, help=tooltip)\n else:\n st.write(f\"Not understood {_}\")\n\n return recorder",
"def application_command_autocomplete_choice_builder(name, value):\n return {\n 'name': name,\n 'value': value,\n }",
"def _create_widget(self,name,master,widget_options={},on_set=None,on_modify=None):\n # select the appropriate widget-creation method;\n # default is self._create_string_widget...\n widget_creation_fn = self._create_string_widget\n\n param_obj,source_po = self.get_parameter_object(name,with_source=True)\n\n if not (param_is_dynamically_generated(param_obj,source_po) or name in self.allow_dynamic):\n # ...but overwrite that with a more specific one, if possible\n for c in classlist(type(param_obj))[::-1]:\n if self.widget_creators.has_key(c):\n widget_creation_fn = self.widget_creators[c]\n break\n elif name not in self.allow_dynamic:\n self.allow_dynamic.append(name)\n\n if on_set is not None:\n self._tkvars[name]._on_set=on_set\n\n if on_modify is not None:\n self._tkvars[name]._on_modify=on_modify\n\n widget=widget_creation_fn(master,name,widget_options)\n\n # Is widget a button (but not a checkbutton)? If so, no label wanted.\n # CEBALERT 'notNonelabel': change to have a label with no text\n if is_button(widget):\n label = None\n else:\n label = T.Label(master,text=self._pretty_print(name))\n\n # disable widgets for constant params\n if param_obj.constant and isinstance(source_po,Parameterized):\n # (need to be able to set on class, hence check it's PO not POMetaclass\n widget.config(state='disabled')\n\n widget.bind('<<right-click>>',lambda event: self._right_click(event, widget))\n\n return widget,label",
"def gen_widgets(df):\n w = {\n \"a\": widgets.Dropdown(options=[(col, list(df[col])) for col in list(df)]),\n \"alpha\": widgets.FloatSlider(min=0.0, max=1.0, step=0.05),\n \"annot\": widgets.Checkbox(),\n \"aspect\": widgets.FloatText(value=1, step=0.05),\n \"axlabel\": widgets.Text(),\n \"bins\": widgets.IntText(value=10),\n \"bw\": widgets.Dropdown(options=[\"scott\", \"silverman\"]),\n \"capsize\": widgets.FloatText(value=1.0),\n \"cbar\": widgets.Checkbox(),\n \"cbar\": widgets.Checkbox(),\n \"cbar_ax\": widgets.Checkbox(),\n \"center\": widgets.FloatText(value=1.0),\n \"ci\": widgets.FloatSlider(min=0, max=100, value=95, step=0.1),\n \"cmap\": widgets.Text(value=\"viridis\"),\n \"col\": widgets.Dropdown(options=list(df)),\n \"col_wrap\": widgets.IntText(value=10),\n \"color\": widgets.Text(value=\"g\"),\n \"cumulative\": widgets.Checkbox(),\n \"cut\": widgets.FloatText(value=1.0),\n \"data\": widgets.Dropdown(options=[(col, list(df[col])) for col in list(df)]),\n \"data2\": widgets.Dropdown(options=[(col, list(df[col])) for col in list(df)]),\n \"diag_kind\": widgets.Dropdown(options=[\"auto\", \"hist\", \"kde\"]),\n \"dropna\": widgets.Checkbox(),\n \"edgecolor\": widgets.Text(value=\"gray\"),\n \"err_style\": widgets.Dropdown(options=[\"band\", \"bars\"]),\n \"errwidth\": widgets.FloatText(value=1.0),\n \"fit_reg\": widgets.Checkbox(),\n \"fliersize\": widgets.FloatText(value=1.0),\n \"fmt\": widgets.Text(value=\".2g\"),\n \"gridsize\": widgets.IntText(value=100),\n \"height\": widgets.FloatText(value=5),\n \"hist\": widgets.Checkbox(),\n \"hue\": widgets.Dropdown(options=list(df)),\n \"inner\": widgets.Dropdown(options=[\"box\", \"quartile\", \"point\", \"stick\"]),\n \"jitter\": widgets.Checkbox(),\n \"join\": widgets.Checkbox(),\n \"k_depth\": widgets.Dropdown(options=[\"proportion\", \"tukey\", \"trustworthy\"]),\n \"kde\": widgets.Checkbox(),\n \"kernel\": widgets.Dropdown(options=['gau', 'cos', 'biw', 'epa', 'tri', 'triw']),\n \"kind_catplot\": widgets.Dropdown(options=[\"point\", \"bar\", \"strip\", \"swarm\", \"box\", \"violin\", \"boxen\"]),\n \"kind_jointplot\": widgets.Dropdown(options=[\"scatter\", \"reg\", \"resid\", \"kde\", \"hex\"]),\n \"kind_pairplot\": widgets.Dropdown(options=[\"scatter\", \"reg\"]),\n \"kind_relplot\": widgets.Dropdown(options=[\"scatter\", \"line\"]),\n \"label\": widgets.Text(),\n \"legend\": widgets.Dropdown(options={\"brief\": \"brief\", \"full\":\"full\", \"False\": False}),\n \"legend_out\": widgets.Checkbox(),\n \"linecolor\": widgets.Text(\"white\"),\n \"linewidth\": widgets.FloatText(value=1.0),\n \"linewidths\": widgets.FloatText(value=0.0, step=0.01),\n \"logistic\": widgets.Checkbox(),\n \"logx\": widgets.Checkbox(),\n \"lowess\": widgets.Checkbox(),\n \"margin_titles\": widgets.Checkbox(),\n \"marker\": widgets.Dropdown(options=['o', 'v', '^', '<', '>', '8', 's', 'p', '*', 'h', 'H', 'D', 'd', 'P', 'X']),\n \"n_boot\": widgets.IntText(value=1000),\n \"norm_hist\": widgets.Checkbox(),\n \"notch\": widgets.Checkbox(),\n \"order_regression\": widgets.IntText(value=1),\n \"orient\": widgets.Dropdown(options=[\"v\", \"h\"]),\n \"outlier_prop\": widgets.FloatSlider(min=0.0, max=1.0, step=0.001, value=0.007),\n \"palette\": widgets.Text(value='viridis'),\n \"ratio\": widgets.IntText(value=5),\n \"robust\": widgets.Checkbox(),\n \"row\": widgets.Dropdown(options=list(df)),\n \"rug\": widgets.Checkbox(),\n \"saturation\": widgets.FloatSlider(min=0.0, max=1.0, step=0.05, value=1.0),\n \"scale_boxenplot\": widgets.Dropdown(options=[\"linear\", \"exponential\", \"area\"]),\n \"scale_float\": widgets.FloatText(value=1.0),\n \"scale_hue\": widgets.Checkbox(),\n \"scale_violinplot\": widgets.Dropdown(options=[\"area\", \"count\", \"width\"]),\n \"scatter\": widgets.Checkbox(),\n \"shade\": widgets.Checkbox(),\n \"shade_lowest\": widgets.Checkbox(),\n \"sharex\": widgets.Dropdown(options={\"True\": True, \"col\": \"col\", \"row\": \"row\"}),\n \"sharey\": widgets.Dropdown(options={\"True\": True, \"col\": \"col\", \"row\": \"row\"}),\n \"size_float\": widgets.FloatText(value=1.0),\n \"size_vector\": widgets.Dropdown(options=list(df)),\n \"sort\": widgets.Checkbox(),\n \"space\": widgets.FloatText(value=.2),\n \"split\": widgets.Checkbox(),\n \"square\": widgets.Checkbox(),\n \"style\": widgets.Dropdown(options=list(df)),\n \"truncate\": widgets.Checkbox(),\n \"units\": widgets.Dropdown(options=list(df)),\n \"vertical\": widgets.Checkbox(),\n \"vmax\": widgets.FloatText(value=1.0, step=0.1),\n \"vmin\": widgets.FloatText(value=1.0, step=0.1),\n \"whis\": widgets.FloatText(value=1.0),\n \"width\": widgets.FloatText(value=1.0),\n \"x\": widgets.Dropdown(options=list(df)),\n \"x_bins\": widgets.IntText(value=10),\n \"x_ci\": widgets.IntSlider(min=0, max=100, value=95),\n \"x_jitter\": widgets.FloatText(value=.1),\n \"x_partial\": widgets.Dropdown(options=list(df)),\n \"y\": widgets.Dropdown(options=list(df)),\n \"y_jitter\": widgets.FloatText(value=.1),\n \"y_partial\": widgets.Dropdown(options=list(df)),\n }\n relplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n \"size\": w[\"size_vector\"],\n \"style\": w[\"style\"],\n \"row\": w[\"row\"],\n \"col\": w[\"col\"],\n \"col_wrap\": w[\"col_wrap\"],\n #\"row_order\":\n #\"col_order\":\n \"palette\": w[\"palette\"],\n #\"hue_order\":\n #\"hue_norm\":\n #\"sizes\"\n #\"size_order\":\n #\"size_norm\":\n \"legend\": w[\"legend\"],\n \"kind\": w[\"kind_relplot\"],\n \"height\": w[\"height\"],\n \"aspect\": w[\"aspect\"],\n }\n scatterplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n \"size\": w[\"size_vector\"],\n \"style\": w[\"style\"],\n \"palette\": w[\"palette\"],\n #\"hue_order\":\n #\"hue_norm\":\n #\"sizes\": w[\"sizes\"],\n #\"size_order\":\n #\"size_norm\":\n #\"markers\":\n #\"style_order\":\n #\"{x,y}_bins\": (non functional)\n #\"units\": (non functional)\n #\"estimator\": (non functional)\n #\"ci\": (non functional)\n #\"n_boot\": (non functional)\n \"alpha\": w[\"alpha\"],\n #\"{x,y}_jitter\": (non functional)\n \"legend\": w[\"legend\"],\n }\n lineplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n \"size\": w[\"size_vector\"],\n \"style\": w[\"style\"],\n \"palette\": w[\"palette\"],\n #\"hue_order\":\n #\"hue_norm\":\n #\"sizes\",\n #\"dashes\":,\n #\"markers\"\n #\"style_order\":\n \"units\": w[\"units\"],\n #\"estimator\"\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"sort\": w[\"sort\"],\n \"err_style\": w[\"err_style\"],\n \"legend\": w[\"legend\"],\n }\n catplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"row\": w[\"row\"],\n \"col\": w[\"col\"],\n \"col_wrap\": w[\"col_wrap\"],\n #\"estimator\"\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n #\"order\",\"hue_order\"\n #\"row_order\",\"col_order\"\n \"kind\": w[\"kind_catplot\"],\n \"height\": w[\"height\"],\n \"aspect\": w[\"aspect\"],\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"legend\": w[\"legend\"],\n \"legend_out\": w[\"legend_out\"],\n \"sharex\": w[\"sharex\"],\n \"sharey\": w[\"sharey\"],\n \"margin_titles\": w[\"margin_titles\"],\n }\n stripplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n #\"order\",\"hue_order\"\n \"jitter\": w[\"jitter\"],\n #\"dodge\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"size\": w[\"size_float\"],\n \"edgecolor\": w[\"edgecolor\"],\n \"linewidth\": w[\"linewidth\"],\n }\n swarmplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n #\"order\",\"hue_order\"\n #\"dodge\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"size\": w[\"size_float\"],\n \"edgecolor\": w[\"edgecolor\"],\n \"linewidth\": w[\"linewidth\"],\n }\n boxplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n #\"order\",\"hue_order\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n \"width\": w[\"width\"],\n #\"dodge\"\n \"fliersize\": w[\"fliersize\"],\n \"linewidth\": w[\"linewidth\"],\n \"whis\": w[\"whis\"],\n \"notch\": w[\"notch\"],\n }\n violinplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n \"bw\": w[\"bw\"],\n \"cut\": w[\"cut\"],\n \"scale\": w[\"scale_violinplot\"],\n \"scale_hue\": w[\"scale_hue\"],\n \"gridsize\": w[\"gridsize\"],\n \"width\": w[\"width\"],\n \"inner\": w[\"inner\"],\n \"split\": w[\"split\"],\n #\"dodge\"\n \"orient\": w[\"orient\"],\n \"linewidth\": w[\"linewidth\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n }\n boxenplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n \"width\": w[\"width\"],\n #\"dodge\"\n \"k_depth\": w[\"k_depth\"],\n \"linewidth\": w[\"linewidth\"],\n \"scale\": w[\"scale_boxenplot\"],\n \"outlier_prop\": w[\"outlier_prop\"],\n }\n pointplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n #\"estimator\"\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n #\"markers\"\n #linestyles\n #\"dodge\"\n \"join\": w[\"join\"],\n \"scale\": w[\"scale_float\"],\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"errwidth\": w[\"errwidth\"],\n \"capsize\": w[\"capsize\"],\n }\n barplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n #\"estimator\"\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n #\"errcolor\"\n \"errwidth\": w[\"errwidth\"],\n \"capsize\": w[\"capsize\"],\n #\"dodge\"\n }\n countplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n #\"order\",\"hue_order\"\n \"orient\": w[\"orient\"],\n \"color\": w[\"color\"],\n \"palette\": w[\"palette\"],\n \"saturation\": w[\"saturation\"],\n #\"dodge\"\n }\n jointplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"kind\": w[\"kind_jointplot\"],\n #stat_func\n \"color\": w[\"color\"],\n \"height\": w[\"height\"],\n \"ratio\": w[\"ratio\"],\n \"space\": w[\"space\"],\n \"dropna\": w[\"dropna\"],\n #\"xlim\"\n #\"ylim\"\n }\n pairplot = {\n \"hue\": w[\"hue\"],\n #hue_order\n \"palette\": w[\"palette\"],\n #vars\n #x_vars\n #y_vars\n \"kind\": w[\"kind_pairplot\"],\n \"diag_kind\": w[\"diag_kind\"],\n #\"markers\"\n \"height\": w[\"height\"],\n \"aspect\": w[\"aspect\"],\n \"dropna\": w[\"dropna\"],\n }\n distplot = {\n \"a\": w[\"a\"],\n \"bins\": w[\"bins\"],\n \"hist\": w[\"hist\"],\n \"kde\": w[\"kde\"],\n \"rug\": w[\"rug\"],\n #\"fit\"\n #{hist, kde, rug, fit}_kws\n \"color\": w[\"color\"],\n \"vertical\": w[\"vertical\"],\n \"norm_hist\": w[\"norm_hist\"],\n \"axlabel\": w[\"axlabel\"],\n \"label\": w[\"label\"],\n }\n kdeplot = {\n \"data\": w[\"data\"],\n \"data2\": w[\"data2\"],\n \"shade\": w[\"shade\"],\n \"vertical\": w[\"vertical\"],\n \"kernel\": w[\"kernel\"],\n \"bw\": w[\"bw\"],\n \"gridsize\": w[\"gridsize\"],\n \"cut\": w[\"cut\"],\n #\"clip\":\n \"legend\": w[\"legend\"],\n \"cumulative\": w[\"cumulative\"],\n \"shade_lowest\": w[\"shade_lowest\"],\n \"cbar\": w[\"cbar\"],\n \"cbar_ax\": w[\"cbar_ax\"],\n }\n lmplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"hue\": w[\"hue\"],\n \"col\": w[\"col\"],\n \"row\": w[\"row\"],\n \"palette\": w[\"palette\"],\n \"col_wrap\": w[\"col_wrap\"],\n \"height\": w[\"height\"],\n \"aspect\": w[\"aspect\"],\n #\"markers\",\n \"sharex\": w[\"sharex\"],\n \"sharey\": w[\"sharey\"],\n \"legend\": w[\"legend\"],\n \"legend_out\": w[\"legend_out\"],\n #x_estimator\n \"x_bins\": w[\"x_bins\"],\n \"x_ci\": w[\"x_ci\"],\n \"scatter\": w[\"scatter\"],\n \"fit_reg\": w[\"fit_reg\"],\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n \"order\": w[\"order_regression\"],\n \"logistic\": w[\"logistic\"],\n \"lowess\": w[\"lowess\"],\n \"robust\": w[\"robust\"],\n \"logx\": w[\"logx\"],\n \"x_partial\": w[\"x_partial\"],\n \"y_partial\": w[\"y_partial\"],\n \"truncate\": w[\"truncate\"],\n \"x_jitter\": w[\"x_jitter\"],\n \"y_jitter\": w[\"y_jitter\"],\n }\n regplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n #x_estimator\n \"x_bins\": w[\"x_bins\"],\n \"x_ci\": w[\"x_ci\"],\n \"scatter\": w[\"scatter\"],\n \"fit_reg\": w[\"fit_reg\"],\n \"ci\": w[\"ci\"],\n \"n_boot\": w[\"n_boot\"],\n \"units\": w[\"units\"],\n \"order\": w[\"order_regression\"],\n \"logistic\": w[\"logistic\"],\n \"lowess\": w[\"lowess\"],\n \"robust\": w[\"robust\"],\n \"logx\": w[\"logx\"],\n \"x_partial\": w[\"x_partial\"],\n \"y_partial\": w[\"y_partial\"],\n \"truncate\": w[\"truncate\"],\n \"x_jitter\": w[\"x_jitter\"],\n \"y_jitter\": w[\"y_jitter\"],\n \"label\": w[\"label\"],\n \"color\": w[\"color\"],\n \"marker\": w[\"marker\"],\n }\n residplot = {\n \"x\": w[\"x\"],\n \"y\": w[\"y\"],\n \"lowess\": w[\"lowess\"],\n \"x_partial\": w[\"x_partial\"],\n \"y_partial\": w[\"y_partial\"],\n \"order\": w[\"order_regression\"],\n \"robust\": w[\"robust\"],\n \"dropna\": w[\"dropna\"],\n \"label\": w[\"label\"],\n \"color\": w[\"color\"],\n }\n heatmap = {\n \"vmin\": w[\"vmin\"],\n \"vmax\": w[\"vmax\"],\n \"cmap\": w[\"cmap\"],\n \"center\": w[\"center\"],\n \"robust\": w[\"robust\"],\n \"annot\": w[\"annot\"],\n \"fmt\": w[\"fmt\"],\n \"linewidths\": w[\"linewidths\"],\n \"linecolor\": w[\"linecolor\"],\n \"cbar\": w[\"cbar\"],\n \"square\": w[\"square\"],\n #xticklabels, yticklabels\n #\"mask\"\n }\n #clustermap = {}\n return {\n \"*\": w,\n \"relplot\": relplot,\n \"scatterplot\": scatterplot,\n \"lineplot\": lineplot,\n \"catplot\": catplot,\n \"stripplot\": stripplot,\n \"swarmplot\": swarmplot,\n \"boxplot\": boxplot,\n \"violinplot\": violinplot,\n \"boxenplot\": boxenplot,\n \"pointplot\": pointplot,\n \"barplot\": barplot,\n \"countplot\": countplot,\n \"jointplot\": jointplot,\n \"pairplot\": pairplot,\n \"distplot\": distplot,\n \"kdeplot\": kdeplot,\n #rugplot -> not interesting\n \"lmplot\": lmplot,\n \"regplot\": regplot,\n \"residplot\": residplot,\n \"heatmap\": heatmap,\n #\"clustermap\": clustermap,\n }",
"def add_simple_widget(self, name, widget, label=None, value_handler=None, add_indicator=None, location=(None,0)):\n if name in self.params:\n raise KeyError(\"widget {} already exists\".format(name))\n row,col,rowspan,colspan=self._normalize_location(location,default=(None,0,1,None))\n if label is not None:\n wlabel=QtWidgets.QLabel(self)\n wlabel.setObjectName(_fromUtf8(\"{}__label\".format(name)))\n self.formLayout.addWidget(wlabel,row,col,rowspan,1)\n wlabel.setText(_translate(self.name,label,None))\n else:\n wlabel=None\n value_handler=value_handler or values_module.get_default_value_handler(widget)\n if add_indicator is None:\n add_indicator=self.add_indicator\n if add_indicator:\n windicator=QtWidgets.QLabel(self)\n windicator.setObjectName(_fromUtf8(\"{}__indicator\".format(name)))\n self.formLayout.addWidget(windicator,row,col+2,rowspan,1)\n indicator_handler=values_module.WidgetLabelIndicatorHandler(windicator,widget=value_handler)\n else:\n indicator_handler=None\n if wlabel is None:\n self.formLayout.addWidget(widget,row,col,rowspan,colspan or (2 if add_indicator else 3))\n else:\n self.formLayout.addWidget(widget,row,col+1,rowspan,colspan or (1 if add_indicator else 2))\n self._add_widget(name,self.ParamRow(widget,wlabel,value_handler,indicator_handler))\n return value_handler",
"def conform_to_value(self, owner, value):\n self.owner = owner\n if isinstance(value, Node):\n self.node = value\n try:\n # Sometimes the WidgyWidget is wrapped in a\n # RelatedFieldWidgetWrapper\n self.widget.widget.node = value\n except AttributeError:\n self.widget.node = value\n self.queryset = None\n else:\n # remove the empty choice\n choices = [c for c in self.choices if c[0]]\n if len(choices) == 1:\n self._value = choices[0][0]\n self.widget = DisplayWidget(display_name=choices[0][1])\n self.help_text = _('You must save before you can edit this.')\n else:\n self.widget = ContentTypeRadioSelect(\n choices=choices,\n )\n\n try:\n self.widget.widget.site = self.site\n self.widget.widget.owner = owner\n except AttributeError:\n self.widget.site = self.site\n self.widget.owner = owner",
"def get_widget_value(self, widget):\n if isinstance(widget, Entry):\n return widget.get()\n elif isinstance(widget, Spinbox):\n widget.selection_element()\n return widget.get()\n elif isinstance(widget, Listbox):\n return widget.get(0, END)\n elif isinstance(widget, List):\n return [self.get_widget_value(w) for w in widget]",
"def fromwidget(cls, widget, unmap_b=False, **kw):\n unmap_b = kw.pop(\"unmap\", unmap_b)\n obj = cls(None, widget, **kw)\n if unmap_b:\n unmap(widget)\n return obj",
"def _ValueOrPlaceHolder(value_string, description):\n value_element = xml.etree.ElementTree.Element('value')\n value_element.set('xml:lang', _VALUE_LANGUAGE)\n\n if value_string:\n value_element.text = value_string\n else:\n value_element.text = '** INSERT %s **' % description\n\n return value_element",
"def add_custom_widget(self, name, widget, value_handler=None, indicator_handler=None, location=(None,0,1,None)):\n if name in self.params:\n raise KeyError(\"widget {} already exists\".format(name))\n location=self._normalize_location(location,default=(None,0,1,3))\n self.formLayout.addWidget(widget,*location)\n value_handler=value_handler or values_module.get_default_value_handler(widget)\n indicator_handler=indicator_handler or values_module.get_default_indicator_handler(widget)\n self._add_widget(name,self.ParamRow(widget,None,value_handler,indicator_handler))\n return value_handler",
"def build_value_type(validator: Validator) -> Callable[[Any, Any], Any]:\n return functools.partial(ValueTypeDefinition, validator)",
"def subwidgets(self, name, value, attrs=None, choices=()):\n yield SubWidget(self, name, value, attrs, choices)",
"def __init__(self, name, value=None, desc=None, prop=None, style=None, attr=None,\n disabled=False, app=None, css_cls=None, min=None, max=None, start=None, step=None,\n number_format=None, onchange_callback=None):\n Widget.__init__(self, name, desc=desc, prop=prop, style=style, attr=attr,\n css_cls=css_cls)\n self._app = app\n self._onchange_callback = onchange_callback\n self._disabled = disabled\n if value is not None:\n self._value = value\n else:\n self._value = 0\n if min is not None:\n self._min = min\n else:\n self._min = 0\n if max is not None:\n self._max = max\n else:\n self._max = 100\n if start is not None:\n self._start = start\n else:\n self._start = 0\n if step is not None:\n self._step = step\n else:\n self._step = 1\n if number_format is not None:\n self._number_format = number_format\n else:\n self._number_format = \"n\"\n if disabled is not None:\n self._disabled = disabled\n else:\n self._disabled = False",
"def create_widget_set(\n self, validate_cmd, callback, default: float, label_text: str, row: int\n ):\n label = self.create_label(label_text, 0, row)\n var = self.create_entry(default, validate_cmd, callback, 1, row)\n self.create_button(callback, \"ENTER\", 2, row)\n return var, label",
"def create_widget(self):\n pass",
"def __init__(self, text=\"\", widget=None):\n self._label_text = text\n self._widget = widget\n self._widget.on_change = self._update\n super().__init__(text=f\"{text} {widget.value}\")",
"def __init__(self,\n\t label:str=None,\n\t variable_name:str=None,\n\t value:typing.Any=None,\n\t parent:QtWidgets.QWidget=None,\n\t on_change:typing.Callable=None):\n\t\tQtWidgets.QWidget.__init__(self, parent=parent)\n\n\t\tif label is None:\n\t\t\tif variable_name is None:\n\t\t\t\tlabel = \"\"\n\t\t\telse:\n\t\t\t\tlabel = app.translator(variable_name)\n\n\t\tself._make_label_widget(label)\n\t\tself.layout = self._formset()\n\t\tself.setLayout(self.layout)\n\t\tself.label = label\n\n\t\tValueMixin.__init__(self, variable_name=variable_name, on_change=on_change, value=value)",
"def widgets(parameter: Parameter):\n widgets = []\n for key in parameter.keys():\n textEdit = QTextEdit()\n textEdit.setText(key)\n widgets.append(textEdit)\n if isinstance(parameter[key], Enum):\n comboBox = MyQtEnumComboBox()\n comboBox.fillValues(type(parameter[key]))\n widgets.append(comboBox)\n elif isinstance(parameter[key], bool):\n comboBox = QComboBox()\n comboBox.addItems((\"False\", \"True\"))\n widgets.append(comboBox)\n else:\n textEdit = QTextEdit()\n textEdit.setText(str(parameter[key]))\n widgets.append(textEdit)\n for widget in widgets:\n widget.setFixedHeight(30)\n return widgets",
"def _create_number_widget(self,frame,name,widget_options):\n w = TaggedSlider(frame,variable=self._tkvars[name],**widget_options)\n param = self.get_parameter_object(name)\n\n lower_bound,upper_bound = param.get_soft_bounds()\n\n if upper_bound is not None and lower_bound is not None:\n # TaggedSlider needs BOTH bounds (neither can be None)\n w.set_bounds(lower_bound,upper_bound,inclusive_bounds=param.inclusive_bounds)\n\n\n # have to do the lookup because subclass might override default\n if not lookup_by_class(self.param_immediately_apply_change,type(param)):\n w.bind('<<TagReturn>>', lambda e=None,x=name: self._handle_gui_set(x,force=True))\n w.bind('<<TagFocusOut>>', lambda e=None,x=name: self._handle_gui_set(x,force=True))\n w.bind('<<SliderSet>>', lambda e=None,x=name: self._handle_gui_set(x,force=True))\n\n return w",
"def test_widget_init(self):\n widget = Widget('formula', 'amount', '[title]', '[description]', '[footer]')\n\n assert widget._type == 'formula'\n assert widget._title == '[title]'\n assert widget._description == '[description]'\n assert widget._footer == '[footer]'",
"def on_action_widget_open(self, value):\n _log.debug('widget_open %s', value)\n obj: QtWidgets.QWidget = None\n floating = False\n unique_id = None\n args = []\n kwargs = {}\n if isinstance(value, dict):\n floating = bool(value.get('floating', False))\n spec = value['value']\n args = value.get('args', args)\n kwargs = value.get('kwargs', kwargs)\n else:\n spec = value\n if isinstance(spec, str):\n cls_unique_id = get_unique_id(spec)\n if ':' in spec:\n unique_id = spec\n cls_unique_id = unique_id.split(':')[0]\n spec = get_instance(cls_unique_id, default=None)\n if isinstance(spec, type):\n obj = spec(*args, **kwargs)\n else:\n obj = spec\n pubsub_singleton.register(obj, unique_id=unique_id, parent=self)\n unique_id = obj.unique_id\n obj.setObjectName(unique_id)\n obj.dock_widget = DockWidget(obj)\n obj.dock_widget.setObjectName(f'{unique_id}__dock')\n tab_widget = obj.dock_widget.tabWidget()\n tab_widget.setElideMode(QtCore.Qt.TextElideMode.ElideNone)\n self._dock_manager.addDockWidget(QtAds.TopDockWidgetArea, obj.dock_widget)\n pubsub_singleton.publish('registry/style/actions/!render', unique_id)\n if floating:\n dw = obj.dock_widget\n dw.setFloating()\n c = dw.floatingDockContainer()\n c.resize(800, 600)\n if getattr(obj, 'view_skip_undo', False):\n return None\n else:\n return [['registry/view/actions/!widget_close', unique_id],\n ['registry/view/actions/!widget_open', unique_id]]"
] | [
"0.7156323",
"0.6786408",
"0.58533776",
"0.57980424",
"0.57632685",
"0.5763224",
"0.5702193",
"0.55954725",
"0.53923345",
"0.53619194",
"0.5360439",
"0.53555506",
"0.53469926",
"0.53051937",
"0.51514065",
"0.5137003",
"0.51353097",
"0.512663",
"0.5123442",
"0.5101219",
"0.51009643",
"0.5096644",
"0.50798625",
"0.5027526",
"0.4976889",
"0.4943157",
"0.49383628",
"0.4923497",
"0.4874728",
"0.48480377"
] | 0.72978383 | 0 |
Make widgets from an iterable. This should not be done for a string or tuple. | def widget_from_iterable(o):
# Dropdown expects a dict or list, so we convert an arbitrary
# iterable to either of those.
if isinstance(o, (list, dict)):
return Dropdown(options=o)
elif isinstance(o, Mapping):
return Dropdown(options=list(o.items()))
else:
return Dropdown(options=list(o)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _uifields_to_container(\n ui_fields: Iterable[UiField],\n values: Mapping[str, Any] | None = None,\n *,\n container_kwargs: Mapping | None = None,\n) -> ContainerWidget[ValueWidget]:\n from magicgui import widgets\n\n container = widgets.Container(\n widgets=[field.create_widget() for field in ui_fields],\n **(container_kwargs or {}),\n )\n if values is not None:\n container.update(values)\n return container",
"def pack_widget(widget, **kwargs):\n try:\n widget.pack(**kwargs)\n except AttributeError:\n return [pack_widget(w, **kwargs) for w in widget]\n return widget",
"def create_widget_tuple_list (labels: list, values: list) -> list:\n _merged = tuple(zip(labels, values))\n return _merged",
"def fromiter(cls, args, **assumptions):\n return cls(*tuple(args), **assumptions)",
"def create_widgets(self):\n for name in self.phonebook:\n temp_button = Label(text=name, id=name)\n self.root.ids.entries_box.add_widget(temp_button)",
"def from_iterable(cls, iterable):\n\t\tdata = []\n\t\twidth, height = None, 0\n\t\tfor row in iterable:\n\t\t\trow = list(row)\n\t\t\tif width is None:\n\t\t\t\twidth = len(row)\n\t\t\telif len(row) != width:\n\t\t\t\traise ValueError('Not all lines are of equal width ({0})'.format(width))\n\t\t\tdata.extend(row)\n\t\t\theight += 1\n\t\tm = cls(Size(width, height))\n\t\tm.data = data\n\t\treturn m",
"def widget_from_tuple(o):\n if _matches(o, (Real, Real)):\n min, max, value = _get_min_max_value(o[0], o[1])\n if all(isinstance(_, Integral) for _ in o):\n cls = IntSlider\n else:\n cls = FloatSlider\n return cls(value=value, min=min, max=max)\n elif _matches(o, (Real, Real, Real)):\n step = o[2]\n if step <= 0:\n raise ValueError(\"step must be >= 0, not %r\" % step)\n min, max, value = _get_min_max_value(o[0], o[1], step=step)\n if all(isinstance(_, Integral) for _ in o):\n cls = IntSlider\n else:\n cls = FloatSlider\n return cls(value=value, min=min, max=max, step=step)",
"def add_widgets(self, names, labels):\r\n\r\n for row, txt_name in enumerate(names):\r\n\r\n label_name = \"lbl_\" + txt_name # Label name for label widget\r\n label_text = labels[row] # Get the label text from list of labels\r\n btn_name = \"btn_\" + txt_name # Button name for button widget\r\n\r\n # Set class attribute as QLabel and set text and object name\r\n self.__dict__[label_name] = QLabel(text=label_text, objectName=label_name)\r\n # Set class attribute as QPushButton and set text and object name\r\n self.__dict__[btn_name] = QPushButton(text=\"Browse\", objectName=btn_name)\r\n # Set class attribute as line edit with object name\r\n self.__dict__[txt_name] = QLineEdit(objectName=txt_name)\r\n\r\n txt_widget = self.__getattribute__(txt_name) # Set text widget as object\r\n lbl_widget = self.__getattribute__(label_name) # Set label widget as object\r\n btn_widget = self.__getattribute__(btn_name) # Set button widget as object\r\n\r\n # Add widgets to layout based on position in list\r\n self.layout.addWidget(lbl_widget, row, 0)\r\n self.layout.addWidget(txt_widget, row, 1)\r\n self.layout.addWidget(btn_widget, row, 2)\r\n\r\n # Set size policy of the line edit widgets\r\n txt_widget.setSizePolicy(QSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.Preferred))",
"def from_iterable(self, iterable):\n raise NotImplementedError()",
"def aSizer(sizer,*elements):\r\n for element in elements:\r\n if isinstance(element,tuple):\r\n if element[0] != None:\r\n sizer.Add(*element)\r\n elif element != None:\r\n sizer.Add(element)\r\n return sizer",
"def widgets_from_abbreviations(self, seq):\n result = []\n for name, abbrev, default in seq:\n widget = self.widget_from_abbrev(abbrev, default)\n if not (isinstance(widget, ValueWidget) or isinstance(widget, fixed)):\n if widget is None:\n raise ValueError(\"{!r} cannot be transformed to a widget\".format(abbrev))\n else:\n raise TypeError(\"{!r} is not a ValueWidget\".format(widget))\n if not widget.description:\n widget.description = name\n widget._kwarg = name\n result.append(widget)\n return result",
"def _create_value_widgets(self):\n \n # sort values\n self.values = sorted(self.values)\n self.selection = self.default\n \n for value in self.values:\n widget = self.panel.createWidgetT(\"Button\", \"Button\", \n mygui.IntCoord(15, (len(self.widgets)* 20 + 10), self.width - 20, 20),\n mygui.Align())\n widget.setUserString(\"value\", value)\n widget.setCaption(value)\n self.widgets.append(widget)\n \n if value == self.default:\n widget.setStateCheck(True)\n \n widget.subscribeEventMouseButtonClick(self, '_onTypeClick')\n \n self.panel.setSize(self.width, len(self.widgets) * 20 + 20)",
"def from_iterable(iterable: Iterable) -> ObservableBase:\n from ..operators.observable.fromiterable import from_iterable\n return from_iterable(iterable)",
"def create_widgets( self ):",
"def create_widgets(self):\n for name in self.names:\n new_label = Label(text=name, id=name, font_size=50)\n self.root.ids.name_entries.add_widget(new_label)",
"def widgets(parameter: Parameter):\n widgets = []\n for key in parameter.keys():\n textEdit = QTextEdit()\n textEdit.setText(key)\n widgets.append(textEdit)\n if isinstance(parameter[key], Enum):\n comboBox = MyQtEnumComboBox()\n comboBox.fillValues(type(parameter[key]))\n widgets.append(comboBox)\n elif isinstance(parameter[key], bool):\n comboBox = QComboBox()\n comboBox.addItems((\"False\", \"True\"))\n widgets.append(comboBox)\n else:\n textEdit = QTextEdit()\n textEdit.setText(str(parameter[key]))\n widgets.append(textEdit)\n for widget in widgets:\n widget.setFixedHeight(30)\n return widgets",
"def radio_buttons_from_list(parent, items, variable, command):\n\n for row, item in enumerate(items):\n tk.Radiobutton(\n parent,\n text=item.title(),\n variable=variable,\n value=item,\n anchor=\"w\",\n justify=\"left\",\n command=command,\n ).grid(row=row, column=0, sticky=\"we\")",
"def create_widgets(self):",
"def view_widget(items, **kwargs):\n from . import (\n visualize_images,\n visualize_landmarks_2d,\n visualize_landmarks_3d,\n visualize_shapes_2d,\n visualize_shapes_3d,\n visualize_shape_model_2d,\n visualize_meshes_3d,\n visualize_shape_model_3d,\n )\n\n # We use the first item to select the correct widget\n if not isinstance(items, Sized) or isinstance(items, LandmarkManager):\n template = items\n else:\n template = items[0]\n # note that the ordering of this list is important - a TriMesh isa\n # PointCloud, so we must test for the more specialized case first\n # if we want to do that. Third item is an optional boolean test.\n cls_to_items_widget = [\n (Image, visualize_images, None),\n (TriMesh, visualize_meshes_3d, lambda m: m.n_dims == 3),\n (PointCloud, visualize_shapes_2d, lambda pc: pc.n_dims == 2),\n (PointCloud, visualize_shapes_3d, lambda pc: pc.n_dims == 3),\n (\n LandmarkManager,\n visualize_landmarks_2d,\n lambda lms: list(lms.values())[0].n_dims == 2,\n ),\n (\n LandmarkManager,\n visualize_landmarks_3d,\n lambda lms: list(lms.values())[0].n_dims == 3,\n ),\n (\n PCAModel,\n visualize_shape_model_2d,\n lambda m: isinstance(m.template_instance, PointCloud)\n and m.template_instance.n_dims == 2,\n ),\n (\n PCAModel,\n visualize_shape_model_3d,\n lambda m: isinstance(m.template_instance, PointCloud)\n and m.template_instance.n_dims == 3,\n ),\n ]\n\n for (cls, widget, test) in cls_to_items_widget:\n if isinstance(template, cls) and (test is None or test(template)):\n return widget(items, **kwargs)\n\n raise ValueError(\n \"No suitable list visualization found for type {} - valid types are \"\n \"{} or subclasses thereof\".format(\n type(template), \", \".format([x[0] for x in cls_to_items_widget])\n )\n )",
"def create_layout(layout_list):\n qt_button_layout = QGridLayout()\n\n for row in range(len(layout_list)):\n for column in range(len(layout_list[row])):\n user_input_object = layout_list[row][column]\n qt_button_layout.addWidget(user_input_object, row, column, 1, 1)\n\n return qt_button_layout",
"def to_html_inputs(cls):\n spec = inspect.getfullargspec(cls)\n args = [arg for arg in spec.args if arg != 'self']\n annotations = spec.annotations\n defaults = [cls.marshal_data_for_ui(default) for default in spec.defaults]\n\n divs = []\n states = []\n for i, arg in enumerate(args):\n left = html.Label(arg, style={'display': 'inline-block', 'width': '100px'})\n\n annotation = annotations.get(arg)\n default = defaults[i]\n\n if annotation is bool:\n right = daq.BooleanSwitch(id=arg, on=default)\n state = State(arg, 'value')\n elif 'level' == arg:\n right = dcc.Dropdown(id=arg,\n options=[{'label': item.value, 'value': item.value} for item in IntervalLevel],\n value=default)\n state = State(arg, 'value')\n\n elif 'timestamp' in arg:\n right = dcc.DatePickerSingle(id=arg, date=default)\n state = State(arg, 'date')\n else:\n if 'filters' == arg and default:\n default = json.dumps(default, cls=CustomJsonDecoder)\n\n if 'columns' == arg and default:\n columns = [column.name for column in default]\n default = ','.join(columns)\n\n if isinstance(default, list) or isinstance(default, dict):\n default = json.dumps(default)\n\n right = dcc.Input(id=arg, type='text', value=default)\n state = State(arg, 'value')\n\n right.style = {'display': 'inline-block'}\n divs.append(html.Div([left, right], style={'margin-left': '120px'}))\n states.append(state)\n\n return divs, states",
"def __init__(self,name,value,*args,**kargs):\n self.ndim = len(value)\n if 'fields' in kargs:\n fields = kargs['fields']\n else:\n fields = [ str(i) for i in range(self.ndim) ]\n\n self.input = QtGui.QWidget(*args)\n InputItem.__init__(self,name,*args,**kargs)\n #self.layout().insertWidget(1,self.input)\n\n #layout = QtGui.QHBoxLayout(self)\n #self.input.setLayout(layout)\n layout = self.layout()\n self.fields = []\n for fld,val in zip(fields,value):\n f = InputInteger(fld,val)\n self.fields.append(f)\n layout.addWidget(f)",
"def fromiter(iterable, dtype, count=-1):\n\n return call_origin(numpy.fromiter, iterable, dtype, count)",
"def make_widgets(self):\n self.mode_select = Selector(**MODE_SELECT_SETTINGS)\n self.bind_keys_to_modes()\n self.layer_select = Selector(**LAYER_SELECT_SETTINGS)\n self.check_boxes = CheckBoxArray(**CHECK_ARRAY_SETTINGS)\n self.check_boxes.bind_key(pg.K_v, self.toggle_layer_visibility)\n self.navs = [Button(**NAV_LEFT), Button(**NAV_RIGHT)]\n self.save_button = Button(**SAVE_BUTTON)\n self.load_button = Button(**LOAD_BUTTON)\n self.new_button = Button(**NEW_BUTTON)\n self.widgets = [self.mode_select, self.layer_select, self.check_boxes,\n self.navs[0], self.navs[1],\n self.save_button, self.load_button, self.new_button]",
"def widgets_opt(widgets_list, arg):\n append = arg and arg[0] == '+'\n if append:\n arg = arg[1:]\n\n if arg:\n widgets = arg.split(',')\n else:\n widgets = []\n\n if not append:\n del widgets_list[:]\n\n widgets_list.extend(widgets)",
"def create_widget_set(\n self, validate_cmd, callback, default: float, label_text: str, row: int\n ):\n label = self.create_label(label_text, 0, row)\n var = self.create_entry(default, validate_cmd, callback, 1, row)\n self.create_button(callback, \"ENTER\", 2, row)\n return var, label",
"def customize_fields(self, fields):\n\n for field in fields.values():\n\n field_type = type(field.field)\n\n if field_type is List or field_type is Set:\n field.widgetFactory = CheckBoxFieldWidget\n\n elif field_type is Choice:\n field.widgetFactory = RadioFieldWidget",
"def AutoBox(\n widgets: [Gtk.Widget], vspacing: int = 10, hspacing: int = 10,\n orientation: Gtk.Orientation = Gtk.Orientation.VERTICAL) -> Gtk.Box:\n # {{{\n\n box = Gtk.Box.new(\n orientation,\n vspacing if orientation == Gtk.Orientation.VERTICAL else hspacing\n )\n\n sub_orientation = 1 - orientation\n for x in widgets:\n if isinstance(x, list):\n x = AutoBox(x, vspacing, hspacing, sub_orientation)\n\n if len(widgets) == 1:\n return x\n\n if isinstance(x, Gtk.Widget):\n box.pack_start(x, True, True, 0)\n\n if not box.get_children():\n return None\n\n return box\n # }}}",
"def _build_iterable(self):",
"def build_form(vars, win):\n\tgrid = Gtk.Grid(column_spacing=8, row_spacing=8)\n\ti = 0\n\tfor v in vars:\n\t\tlabel = Gtk.Label(v.label)\n\t\tlabel.set_xalign(1.)\n\t\tgrid.attach(label, 0, i, 1, 1)\n\t\titem = build_entry(v, win)\n\t\tgrid.attach_next_to(item, label, Gtk.PositionType.RIGHT, 1, 1)\n\t\ti = i + 1\n\tgrid.show_all()\n\treturn grid"
] | [
"0.603943",
"0.59534204",
"0.58285683",
"0.5662036",
"0.564148",
"0.5636002",
"0.55858034",
"0.5555193",
"0.55464804",
"0.5515366",
"0.54974586",
"0.5476306",
"0.5476136",
"0.54628336",
"0.5435945",
"0.5406763",
"0.53550434",
"0.52587557",
"0.52172387",
"0.52097",
"0.5205433",
"0.51533324",
"0.50967264",
"0.5063394",
"0.5022544",
"0.5016405",
"0.49736696",
"0.49729505",
"0.4970229",
"0.49621344"
] | 0.6669193 | 0 |
Return the value for this widget which should be passed to interactive functions. Custom widgets can change this method to process the raw value ``self.value``. | def get_interact_value(self):
return self.value | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_value(self):\n if self.notebook:\n return self.widget.value\n else:\n if hasattr(self.widget, \"value\"):\n return self.widget.value()\n elif hasattr(self.widget, \"currentText\"):\n return self.widget.currentText()\n elif hasattr(self.widget, \"text\"):\n return self.widget.text()",
"def get_value(self):\n return self.value",
"def get_value(self):\n return self.value",
"def get_value(self):\n return self.value",
"def _get_value(self):\n return self.__value",
"def get_value(self):\n return self._value",
"def get_value(self):\n return self._value",
"def get_value(self):\n return self._value",
"def get_value(self):\n return self._value",
"def get_value(self):\n return self._value",
"def value(self):\n return self.__value",
"def value(self):\n return self.__value",
"def get_value(self):\n return self._value",
"def value(self):\n\n\t\treturn self.__value",
"def value (self) :\n\n return self.__value__",
"def _get_value(self):\n \n return self._value",
"def get_value(self):\n return self._val",
"def get_val(self):\n return self.value",
"def value(self) -> any:\r\n\r\n return self.__value",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")",
"def value(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"value\")"
] | [
"0.82415944",
"0.7548149",
"0.7548149",
"0.7548149",
"0.7469256",
"0.74551094",
"0.74551094",
"0.73919475",
"0.73919475",
"0.73919475",
"0.73580617",
"0.73580617",
"0.7339713",
"0.7287065",
"0.7286482",
"0.7276106",
"0.72296405",
"0.722479",
"0.7199927",
"0.71925837",
"0.71925837",
"0.71925837",
"0.71925837",
"0.71925837",
"0.71925837",
"0.71925837",
"0.71925837",
"0.71925837",
"0.71925837",
"0.71925837"
] | 0.79434526 | 1 |
Returns True if clause is true (satisfied) or False if not satisfied. | def is_clause_satisfied(self, valuation_list, clause):
for literal in clause:
if literal < 0:
v = 1 - valuation_list[-literal - 1]
else:
v = valuation_list[literal - 1]
if v == 1:
return True
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_clause_satisfied(valuation_list, clause):\n for literal in clause:\n if literal < 0:\n v = 1 - valuation_list[-literal - 1]\n else:\n v = valuation_list[literal - 1]\n\n if v == 1:\n return True\n\n return False",
"def one_true(conds):\n\n for c in conds:\n if c:\n return True\n\n return False",
"def is_true(expr: Any) -> bool:\n if expr is None:\n return False\n if isinstance(expr, bool):\n return expr\n return True",
"def is_satisfied(self, item):\n return all(map(\n # we go through every single item in self.args and check if the specks are satisfied.\n lambda spec: spec.is_satisfied(item), self.args\n ))",
"def isTrueOrDoesSentence(self, sentence):\n name = sentence.__name__\n return name == GdlPool.TRUE or name == GdlPool.DOES",
"def is_satisfiable(formula: Formula) -> bool:\r\n # satisfiable - if it gets the value True at least once\r\n # Task 2.5c\r\n all_models_local = all_models(list(formula.variables()))\r\n for bool_val in truth_values(formula, all_models_local):\r\n if bool_val:\r\n return True\r\n return False",
"def is_bool(self):\n return self.op in self.cond_ops",
"def is_bool(self):\n return self.op in self.cond_ops",
"def check_condition(self, query_dict):\n return all(key in self.__data and self.__data[key] == value\n for key, value in query_dict.items())",
"def is_satisfied(self, item: Product) -> bool:\n return self.satisfied(item)",
"def __call__(self, possibility: object) -> bool:\n if {truth(possibility) for truth in self.truths} == {True}:\n return True\n else:\n return False",
"def isTrue(self, hard=False):\n global debug_solver_count\n \n if( self.cond == CT.TRUE ):\n return True\n elif( self.cond == CT.FALSE ):\n return False\n \n if( hard == False ):\n res = self.customSimplify()\n return ( res == CE.TRUE )\n else:\n condZ3 = self.simplifyZ3()\n self.checked = True\n v = Solver()\n v.add(Not(condZ3))\n signal.signal(signal.SIGALRM, TimeOutRaiser)\n #signal.setitimer(signal.ITIMER_REAL, 1) \n try: \n res = str(v.check())\n except Exception as e:\n res = \"sat\"\n debug_solver_count = debug_solver_count + 1\n if( str(res) == \"unsat\" ):\n self.setTrue()\n return True\n else:\n return False",
"def satisfied(self, fields, field, values):\n\n requires = field.get(\"requires\", [])\n\n if isinstance(requires, str):\n requires = [requires]\n\n for require in requires:\n if require not in fields or not fields[require].validate(store=False):\n return False\n\n if \"condition\" in field and self.env.from_string(field[\"condition\"]).render(**values) != \"True\":\n return False\n\n return True",
"def evaluate_clause(clause: str, match: str) -> bool:\n result = compile_regex(clause).fullmatch(match)\n return result is not None",
"def __bool__(self):\n return any(p for p in self)",
"def resolve_to_true(self):\n print(colored(f\"Checking {self}\\n\", attrs=['bold', 'underline']))\n for elem in self.operands:\n if elem.resolve_to_true():\n print(colored(f\"Since {elem} is True then {self} is True\\n\", attrs=[\n 'bold', 'underline']))\n return True\n print(colored(f\"Since no element was True then {self} is False\\n\", attrs=[\n 'bold', 'underline']))\n return False",
"def check_condition(self, element):\n conditional = element.getAttribute(\"conditional\")\n\n # No condition, then we execute this statement.\n #\n if len(conditional) == 0:\n return True\n\n # We have a conditional. See if it begins with a '!', which inverts\n # our test.\n #\n result = True\n oc = conditional\n if conditional[0] == '!':\n result = False\n conditional = conditional[1:]\n\n if self.settings is not None and conditional in self.settings.ids:\n if self.settings.value(conditional) is True:\n return result\n return not result\n return not result",
"def evaluate(self, d):\n return bool(eval(self.expr, facts_globals, d))",
"def check_condition(self):\n\n\t\traw_context = {\n\t\t\t'folk': self.folk\n\t\t}\n\n\t\tstatus, param = self.execute(self.mission_grid, 'condition', self.pending_mission.kingdom, raw_context)\n\t\treturn status",
"def is_satisfied_by(self, val):",
"def is_satisfied(self, item: Product) -> bool:\n return all(spec.is_satisfied(item) for spec in self.specs)",
"def is_satisfied(self, item: Any) -> bool:",
"def isTrue(*args, **kwargs)->None:\n pass",
"def resolve_to_true(self):\n print(colored(f\"Checking {self}\\n\", attrs=['bold', 'underline']))\n for elem in self.operands:\n # print(f\"Checking elem {elem}\")\n if not elem.resolve_to_true():\n print(colored(f\"Since {elem} is False then {self} is False\\n\", attrs=[\n 'bold', 'underline']))\n return False\n print(colored(f\"{self} is True !\\n\", attrs=['bold', 'underline']))\n return True",
"def satisfied(self):\n\n if self.var1.get_value() is None or self.var2.get_value() is None:\n return False\n\n return self.var1.get_value() != self.var2.get_value()",
"def prove(self, query, context):\n return self.askOne(query, context) != None",
"def assert_true(self, expr, msg=None):\r\n assert bool(expr) is True",
"def is_satisfiable(formula: Formula) -> bool:\n # Task 2.5c\n variables = list(sorted(formula.variables()))\n assignment_dict = all_models(list(variables))\n for val in truth_values(formula, assignment_dict):\n if val:\n return True\n return False",
"def can_commit(self):\n for name in self.conditions:\n if name in _conditions:\n condition = _conditions[name]\n # If one condition matches, we're good\n if self.check_condition(condition):\n return True\n return False",
"def is_true(self, state):\n (truth_value, dict) = self.rewrite().is_true(state)\n dict['condition'] = '{rewrite} {rewrite_condition}'.format(\n rewrite=self._condition(state),\n rewrite_condition=dict['condition']\n )\n return (truth_value, dict)"
] | [
"0.6426093",
"0.59659606",
"0.59554565",
"0.59093994",
"0.58817816",
"0.582734",
"0.58092874",
"0.58092874",
"0.5780725",
"0.5756507",
"0.5726416",
"0.5719493",
"0.5701392",
"0.56912065",
"0.56283927",
"0.562499",
"0.55559367",
"0.55359155",
"0.5531413",
"0.55142295",
"0.55128205",
"0.54569066",
"0.5446944",
"0.544528",
"0.5440079",
"0.5426102",
"0.54119384",
"0.5409547",
"0.53869516",
"0.53841096"
] | 0.67531824 | 0 |
Fitness function with weights in order to identify the hard clauses | def fitness(self, valuation):
return sum(map(lambda i: i['w'] * self.is_clause_satisfied(valuation, i['clause']), self.clauses)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fitness(self):\n pass",
"def _calculate_fitness(self):\n pass",
"def fitness_function(neural_net):\r\n fitness = 25\r\n for i in range(1, 6):\r\n for j in range(1, 6):\r\n answer = np.exp(neural_net.calculate([np.log(i), np.log(j)])[0])\r\n result = i*j\r\n fitness -= abs(answer - result)\r\n\r\n return fitness",
"def fitness(self):\n # TO BE DECIDED\n return 1",
"def fitness(self):\n params = np.array([self['p{}'.format(i)] for i in range(n_pars)])\n \n return func(params)",
"def calculate_weighted_results():\n pass",
"def evaluator(self, candidates, args):\r\n fitness = []\r\n if self._use_ants:\r\n for candidate in candidates:\r\n total = 0\r\n for c in candidate:\r\n total += self.weights[c.element[0]][c.element[1]]\r\n last = (candidate[-1].element[1], candidate[0].element[0])\r\n total += self.weights[last[0]][last[1]]\r\n fitness.append(1 / total)\r\n else:\r\n for candidate in candidates:\r\n total = 0\r\n for src, dst in zip(candidate, candidate[1:] + [candidate[0]]):\r\n total += self.weights[src][dst]\r\n fitness.append(1 / total)\r\n return fitness",
"def fitness(ind):\n return kNN.distance(ind),",
"def weight(self):",
"def calculate_fitness(self):\n fitness = (self.matrix * self.weight_matrix).sum()\n self.fitness = fitness\n return fitness",
"def fitness(NN):\n x = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])\n expected_y = np.array([[0], [1], [1], [0]])\n y = NN.feed_forward(x)\n error = expected_y - y\n return 1 / (np.square(np.dot(error.T, error)).squeeze() + 0.01)",
"def fitness(self):\r\n history = self.history\r\n return sum(history) / len(history)",
"def calculateWeights(self):\n return self.distances #En lo que encontramos una funcion que represente",
"def fitness_function(items, m):\r\n cost = 0\r\n weight = 0\r\n for key, is_selected in items.items():\r\n if is_selected:\r\n weight += key[0]\r\n cost += key[1]\r\n res = cost if weight <= m else 0\r\n return res",
"def f(n):\n n[0].evaluate(n[1])\n return (n.code,n.fitness,)",
"def _compute_fitness(self, solution, bias=5):\n\n solution_fitness = 0.0\n\n for index in range(len(solution)):\n waypoint1 = solution[index - 1]\n waypoint2 = solution[index]\n solution_fitness += self._waypoint_distances[frozenset([waypoint1, waypoint2])] - bias * abs(\n self._dicAvailability[waypoint2] - self._dicAvailability[waypoint1])\n\n return solution_fitness",
"def _fitness_model__(self, solution=None, minmax=0):\n return self.objective_func(solution) if minmax == 0 else 1.0 / (self.objective_func(solution) + self.EPSILON)",
"def get_weights(self):",
"def evaluator(self, candidates, args):\r\n fitness = []\r\n if self._use_ants:\r\n for candidate in candidates:\r\n total = 0\r\n for c in candidate:\r\n total += c.value\r\n fitness.append(total)\r\n else:\r\n for candidate in candidates:\r\n total_value = 0\r\n total_weight = 0\r\n for c, i in zip(candidate, self.items):\r\n total_weight += c * i[0]\r\n total_value += c * i[1]\r\n if total_weight > self.capacity:\r\n fitness.append(self.capacity - total_weight)\r\n else:\r\n fitness.append(total_value)\r\n return fitness",
"def weights(self) -> List[float]:",
"def fitness(self, solution):\n cur_fit = 0\n for i in range(self.N):\n cur_fit += self.dist(solution[i % self.N], solution[(i + 1) % self.N])\n return cur_fit",
"def _update_samples_weight(self):\n m, n = 0, self.u.shape[0]\n T = self.u.shape[1]\n N = n + T\n d_0 = matrix(self.d_0.reshape(n, 1))\n\n # Linear Inequallity Constraints, Gx <= h\n G = matrix(-1 * np.eye(N))\n h = matrix(np.zeros(shape=(N, 1)))\n\n # Linear Equality Constraints, Ax = b\n A = matrix(np.concatenate((np.ones(shape=(T, 1)), np.zeros(shape=(n, 1))), axis=0).T)\n b = matrix(1.0)\n\n def F(x=None, z=None):\n if x is None: return 0, matrix(0.5, (N, 1))\n w = x[:T, :]\n phi = x[T:, :]\n reg_inv = 1 / self.reg\n\n weighted_u = np.dot(self.u, w) # n x 1\n scores = -1 * reg_inv * (weighted_u + phi) # n x 1\n\n # Numeric correction\n scores -= max(scores)\n\n # Auxilliaries\n weighted_scores_exp = np.multiply(d_0, np.exp(scores))\n sum_weighted_scores_exp = np.sum(weighted_scores_exp)\n sum_weighted_scores_exp_square = sum_weighted_scores_exp ** 2\n squared_weighted_scores_exp = np.square(weighted_scores_exp)\n weighted_scores_exp_mults = np.dot(weighted_scores_exp, weighted_scores_exp.T)\n uw_mult = np.multiply(self.u, weighted_scores_exp)\n uw_mult_sum = np.sum(np.multiply(self.u, weighted_scores_exp), axis=0)\n\n f = self.reg * np.log(sum_weighted_scores_exp) + self.kappa * np.sum(phi) # f(x)\n\n dfdw = -1 * uw_mult_sum.T / sum_weighted_scores_exp\n dfdphi = (-1 * weighted_scores_exp / sum_weighted_scores_exp) + self.kappa\n Df = np.concatenate((dfdw, dfdphi), axis=0) # Gradient\n\n mf = matrix(f)\n mDf = matrix(Df.T)\n if z is None:\n return mf, mDf\n # Assumes d_0 is uniform\n H = np.zeros(shape=(N, N)) # Hessian\n dfdwiwi = np.zeros(shape=(T, 1))\n dfdphiiphij = -1 * reg_inv * (np.tril(weighted_scores_exp_mults)) / sum_weighted_scores_exp_square\n dfdphiiphii = reg_inv * (np.multiply(weighted_scores_exp,\n sum_weighted_scores_exp - weighted_scores_exp) / sum_weighted_scores_exp_square)\n # dfdwiwj, dfwiphij are zeros\n dfdphiiwj = reg_inv * ((\n uw_mult * sum_weighted_scores_exp - weighted_scores_exp * uw_mult_sum) / sum_weighted_scores_exp_square)\n\n H[T:, T:] = dfdphiiphij\n H[T:, :T] = dfdphiiwj\n H_diagonal = np.concatenate((dfdwiwi, dfdphiiphii), axis=0)\n np.fill_diagonal(H, H_diagonal)\n\n mH = matrix(z[0] * H)\n return mf, mDf, mH\n\n prev_w = self.w\n prev_slacks = self.slacks\n try:\n wphi = solvers.cp(F, G=G, h=h, A=A, b=b)['x']\n self.w = wphi[:T, :]\n self.slacks = wphi[T:, :]\n except Exception as e: # Catch rank errors and continue to next iteration\n self.slacks = prev_slacks\n self.w = prev_w\n try:\n self.w = np.concatenate((self.w, [[1 / (len(self.w) + 1)]]), axis=0)\n except:\n self.w = np.concatenate((self.w, [1 / (len(self.w) + 1)]), axis=0)\n self.w /= np.sum(self.w)\n\n scores = ((-1 / self.reg) * np.squeeze(np.asarray(np.dot(self.u, self.w) + self.slacks))) + np.log(\n self.d_0) # Update according to Equation (6)\n return self.softmax(scores)",
"def getReward(self):\n# def evaluateFitness(self):\n fitness = 0.0\n distance = self.env.getDistance()\n speed = self.env.getSpeed()\n theta = self.env.getOrientation()\n\n ## implementation 101\n timeBonus = (self.maxTime - self.t)/self.maxTime\n alpha = 1.0/((1+distance)*(1+fabs(theta))*(speed+1));\n if distance < 0.5*self.env.init_distance :\n if(distance < self.env.vicinity_distance and\n abs(theta) < self.env.vicinity_orientation and\n speed < self.env.vicinity_speed ):\n fitness = 1 + timeBonus; \n else:\n fitness = alpha;\n else: fitness = 0\n self.lastFitness = fitness\n if fitness > self.bestFitness : \n self.bestFitness = fitness \n\n return fitness",
"def c_test_fitness_function(self, function):\r\n return 1",
"def c_test_fitness_function(self, function):\r\n return 1",
"def fitness(self):\n return (len(self.body)**2) * self.age",
"def get_fitness(self):\n hard_conflicts = self.get_conflicts()\n soft_conflicts = self.get_soft_conflicts()\n hard_fitness = 1 / hard_conflicts if hard_conflicts != 0 else math.inf\n soft_fitness = 1 / soft_conflicts if soft_conflicts != 0 else math.inf\n return [hard_fitness, soft_fitness]",
"def evaluate(self, representativeness: float, weight: float) -> float:\n pass",
"def assess_fitness(solution, features, interaction):\n result = 0\n # add the features first\n for i, k in enumerate(solution):\n if k == 1:\n result += features[i]\n # add the interactions second\n for i, k in enumerate(solution):\n for x, y in enumerate(solution):\n if k == 1 and y == 1:\n try:\n result += interaction[i, x]\n except KeyError:\n continue\n else:\n result += interaction[i, x]\n return result",
"def calc_fitness_by_gen(self):\r\n f_sum = 0\r\n # first loop gives us the sum of the fitness\r\n for c, _ in self.temp_hist_by_gen.items():\r\n f_sum += c.fitness()\r\n # now we calc the chances by fitness of each one\r\n for c, _ in self.temp_hist_by_gen.items():\r\n self.temp_hist_by_gen[c] = c.fitness() / f_sum"
] | [
"0.73278224",
"0.71118915",
"0.6852314",
"0.6778631",
"0.6731181",
"0.6659678",
"0.6543733",
"0.6529293",
"0.65142494",
"0.6491511",
"0.64818954",
"0.646624",
"0.6402855",
"0.63975805",
"0.6379013",
"0.6376958",
"0.6361221",
"0.6343551",
"0.6323188",
"0.62790895",
"0.6278208",
"0.62421715",
"0.6234875",
"0.623052",
"0.623052",
"0.6221757",
"0.6164853",
"0.61519396",
"0.61446613",
"0.61401916"
] | 0.71778464 | 1 |
Update clauses weight to identify the hard clauses | def update_clauses_weight(self):
for clause in self.clauses:
clause['w'] = clause['w'] + 1 - self.is_clause_satisfied(self.global_best, clause['clause']) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize_clause_weights(self):\n self.clause_weights = np.ones(len(self.clauses), dtype=np.int)",
"def updateWeights(self,weightUpdate):\n\t\n\t\tbranches = self.collectAllBranches()\n\n\t\tfor i in range(self.nBranches):\n\n\t\t\tbranches[i].weight -= weightUpdate[i]",
"def update_weights(self):\n\t\tpass",
"def update(self, state, action, nextState, reward):\n \"\"\"Description:\n Use second equation in slide 71 of MDP\n Adjest weight of active features depend on tranistion \n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n feat = self.featExtractor.getFeatures(state, action)\n\n # if weight is empty, then weight will need to initial to 1 for all features\n # According to which Extractor user choose, weight counter will have equal number of keys.\n if len(self.weight) == 0:\n feat = self.featExtractor.getFeatures(state, action)\n self.weight.incrementAll(feat.keys(), 1)\n \n maxQns = self.getValue(nextState)\n if maxQns == None:\n maxQns = 0\n Qsa = self.getQValue(state, action)\n difference = ( reward + self.discountRate * maxQns ) - Qsa\n \n for key in self.weight.keys():\n self.weight[key] += (self.alpha * difference * feat[key])\n \n \n \"\"\" END CODE \"\"\"",
"def updateWeights(self, message):\n prefWeights = [self.prefWghts1.GetValue(), \n self.prefWghts2.GetValue(), \n self.prefWghts3.GetValue()]\n\n self.model.setWeights(prefWeights, \n self.Prefs.GetValue(), \n self.ExcessCap.GetValue(), \n self.CongPenalty.GetValue(), \n self.DeptFairness.GetValue(), \n self.Back2Back.GetValue())",
"def weighted_clause(cpt, lookup, s):\r\n \r\n # the variable we are working with \r\n var = lookup[cpt[0]] \r\n # cpt[1] is everything after the variable\r\n for i in range(len(cpt[1])):\r\n # are there any preconditions\r\n prereqs = cpt[1][i][0]\r\n # if not then it has no parents so we simply add it\r\n if prereqs[0] == []:\r\n if cpt[1][i][1][1] == '1':\r\n s.add_soft(Not(var), -log(cpt[1][i][-1],2))\r\n else:\r\n s.add_soft(var, -log(cpt[1][i][-1],2))\r\n else:\r\n # create a blank slate to be our clause\r\n others = []\r\n # cpt[1][i][0] is our list of preconditions\r\n # so other is a precondition\r\n for other in cpt[1][i][0]:\r\n # if true\r\n if other[1] == '1':\r\n others.append(Not(lookup[other[0]]))\r\n # if false\r\n else:\r\n others.append(lookup[other[0]])\r\n # append the variable we're working with\r\n if cpt[1][i][1][1] == '0':\r\n others.append(var)\r\n else:\r\n others.append(Not(var))\r\n # OR all of this together\r\n others = Or(others)\r\n # add it as a soft clause with the proper weight\r\n s.add_soft(others, -log(cpt[1][i][-1],2))",
"def update_weights(self):\n self._weights = self._weights + self.update_weights_value",
"def update_edge_weight(self, G):\r\n for node in G.nodes:\r\n n1 = G.nodes[node]['belief_strength'] #gets a node's belief strength\r\n for pre in G.predecessors(node):\r\n n2 = G.nodes[pre]['belief_strength'] #gets the node's predecessors' belief strength\r\n dif = abs(n1-n2)\r\n if n1*n2> 0:\r\n G.edges[pre, node]['weight'] += (dif/2000) #clean\r\n else:\r\n G.edges[pre, node]['weight'] -= (dif/2000)\r\n return G",
"def _update_weights(self, good_da, bad_da, good_tree, bad_tree, good_feats, bad_feats):\n # import ipdb; ipdb.set_trace()\n if self.diffing_trees:\n good_sts, bad_sts = good_tree.diffing_trees(bad_tree, symmetric=True)\n for good_st, bad_st in zip(good_sts, bad_sts):\n good_feats = self._extract_feats(good_st, good_da)\n bad_feats = self._extract_feats(bad_st, bad_da)\n subtree_w = 1\n if self.diffing_trees.endswith('weighted'):\n subtree_w = (len(good_st) + len(bad_st)) / float(len(good_tree) + len(bad_tree))\n self._update_nn(bad_feats, good_feats, subtree_w * self.alpha)\n else:\n self._update_nn(bad_feats, good_feats, self.alpha)",
"def update_weights(self):\n self._weights = self._weights + self.update_weights_value\n self.weights_clipping()",
"def weight_wrtg(self, wrtg):\n # Clear caches because weights are going to change.\n # TODO: it might be possible to not clear the caches\n # if the weight doesn't change, and re-use previous decoding.\n wrtg.ClearCaches()\n for p in wrtg.P:\n rule = p.rhs.rule\n assert isinstance(rule.features, list)\n rule.weight = self.weight_rule(rule)",
"def update_edge_weights(self):\n # set all weights to 0\n for arc in self.arc_info.keys():\n self.arc_info[arc][\"weight\"] = 0\n # iterate through all paths and add weights to arcs\n for (path, weight) in zip(self.paths, self.weights):\n for arc in path:\n # Count this path's flow toward the arc's total\n self.arc_info[arc][\"weight\"] = self.arc_info[arc][\"weight\"] + \\\n weight",
"def update_weights(self):\r\n\r\n inedges=self.in_edges\r\n for edge in inedges:\r\n weight=edge.weight+self.learning_rate*self.delta*(edge.source.activation)\r\n edge.change_weight(weight)",
"def update_weight(self,ctr,new_weight):\n self.sum1 -= self.data_set[ctr].weight\n self.data_set[ctr].weight = new_weight\n self.sum1 += new_weight",
"def weight(self):",
"def _reweight(self):\n self._seed_weights = [self._graph.degree(seed) for seed in self._seeds]\n weight_sum = np.sum(self._seed_weights)\n self._seed_weights = [float(weight)/weight_sum for weight in self._seed_weights]",
"def soft_update_critic(self):\n local_weights = np.array(self.critic_local.model.get_weights())\n target_weights = np.array(self.critic_target.model.get_weights())\n\n assert len(local_weights) == len(\n target_weights), ('Local and target model parameters must have '\n 'the same size')\n\n new_weights = self.tau * local_weights + (1 - self.tau) * target_weights\n self.critic_target.model.set_weights(new_weights)",
"def update_weights_rocchio(isRelevant, result):\n\tglobal global_weights\n\tmax_contributing_weight = max(result.sim_measures, key=result.sim_measures.get)\n\tif isRelevant:\n\t\tglobal_weights[max_contributing_weight] += alpha\n\t\tfor measure in result.sim_measures:\n\t\t\tglobal_weights[measure] -= beta \n\n\telse:\n\t\tglobal_weights[max_contributing_weight] -= alpha\n\t\tfor measure in result.sim_measures:\n\t\t\tglobal_weights[measure] += beta",
"def soft_update(self, other, tau):\n new_weights = {}\n\n own_weights = self.get_weight_copies()\n other_weights = other.get_weight_copies()\n\n for k in own_weights:\n #print(own_weights[k].shape, other_weights[k].shape)\n new_weights[k] = (1 - tau) * own_weights[k] + tau * other_weights[k]\n self.set_weights(new_weights)",
"def setWeights(self, decay):\n for i in range(15):\n self.model_parts[i].weights = decay ** torch.abs(torch.arange(15.0) - i)",
"def _create_weight_update_ops(self):\n with tf.name_scope(\"Weight_Update_Operators\"):\n self.weight_vars_assign_ops = []\n for weight_matrix, grad in zip(self._train_vars, self.step_direction_variables):\n self.weight_vars_assign_ops.append(\n tf.assign_add(weight_matrix, self._step_on_line_plh * -grad / self.norm_of_gradient_var).op)",
"def reassignWeights(self,weights):\n\t\n\t\tbranches = self.collectAllBranches()\n\n\t\tfor i in range(self.nBranches):\n\n\t\t\tbranches[i].weight = weights[i]",
"def update_optimizer(self, context, optimizer, host):\n pass",
"def update_weights(self):\n\n self.weights -= self.loss_grads\n self.loss_grads = np.zeros(self.weights.shape)",
"def _recalculate_opinions(self, idea):\r\n \r\n global INFLUENCE_FACTOR\r\n \r\n last_idea = self.opinions[idea.category]\r\n last_idea.weight = last_idea.weight+(idea.weight*INFLUENCE_FACTOR)\r\n if last_idea.weight >1:\r\n last_idea.weight = 1\r\n elif last_idea.weight <-1:\r\n last_idea.weight = -1",
"def update_weights_negative(self):\n eta = self.config.eta\n self.w_xh -= eta * (self.x.T @ self.h)\n self.w_th -= eta * (self.t.T @ self.h)\n self.w_ho -= eta * (self.h.T @ self.o) \n self.w_hz -= eta * (self.h.T @ self.z)",
"def update_weights_positive(self):\n eta = self.config.eta\n self.w_xh += eta * (self.x.T @ self.h)\n self.w_th += eta * (self.t.T @ self.h)\n self.w_ho += eta * (self.h.T @ self.o)\n self.w_hz += eta * (self.h.T @ self.z)",
"def weight_update_steepest_descent(self, network):\n for l, layer in enumerate(network.layers):\n layer.b -= self.alpha * self.dc_db[l]\n layer.q -= self.alpha * self.dc_dq[l]\n layer.rx_inp -= self.alpha * self.dc_drx_inp[l]\n layer.ry_inp -= self.alpha * self.dc_dry_inp[l]\n layer.rx_pos_out -= self.alpha * self.dc_drx_pos_out[l]\n layer.ry_pos_out -= self.alpha * self.dc_dry_pos_out[l]\n layer.rx_neg_out -= self.alpha * self.dc_drx_neg_out[l]\n layer.ry_neg_out -= self.alpha * self.dc_dry_neg_out[l]",
"def update(self):\n #self.consider_deactivation() if self.active_flag else self.consider_activation()\n if self.active_flag:\n self.consider_deactivation()\n else:\n self.consider_activation()\n if self.active_flag:\n self.sense_and_act()\n self.weight = self.match_degree*self.priority",
"def update(self):\n #self.consider_deactivation() if self.active_flag else self.consider_activation()\n if self.active_flag:\n self.consider_deactivation()\n else:\n self.consider_activation()\n if self.active_flag:\n self.sense_and_act()\n self.weight = self.match_degree*self.priority"
] | [
"0.68354654",
"0.6344763",
"0.62895304",
"0.6028888",
"0.6028666",
"0.5940405",
"0.58278716",
"0.5811622",
"0.57929915",
"0.5784587",
"0.5732507",
"0.57304317",
"0.5729231",
"0.57145494",
"0.56781715",
"0.5653222",
"0.5609049",
"0.55869013",
"0.5580688",
"0.54819936",
"0.5480642",
"0.5480071",
"0.5472337",
"0.5463373",
"0.5462522",
"0.54607326",
"0.5458895",
"0.54488593",
"0.5408665",
"0.5408665"
] | 0.8432571 | 0 |
Number of satisfied clauses with given valuation | def num_satisfied_clauses(self, val_list):
num_true_clauses = 0
for c in self.clauses:
num_true_clauses += self.is_clause_satisfied(val_list, c['clause'])
return num_true_clauses | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fitness(self, valuation):\n return sum(map(lambda i: i['w'] * self.is_clause_satisfied(valuation, i['clause']), self.clauses))",
"def solution(i, literals, clauses):\n valuation_list = binary_list(i, literals)\n num_true_clauses = 0\n\n for c in clauses:\n num_true_clauses += is_clause_satisfied(valuation_list, c)\n\n return valuation_list, num_true_clauses",
"def nb_predicates(self) -> int:\n return 0",
"def count_parms(self):\n min_freq = self.get_high_pass_index()\n rejection = self.rejection_at(np.arange(min_freq, self.nf))\n if rejection.ndim < 2:\n return np.sum(rejection)\n else:\n return np.sum(rejection, axis=1)",
"def evaluate_solution(self, chosen):\n self.candidate_counter += 1\n\n # evaluation function in abstract superclass\n \n solved_clauses = np.any(self.truth_clauses & np.array([chosen, ~chosen]), axis=(2, 1)) \n num_solved_clauses = np.sum(solved_clauses)\n # calculate evaluation with weight adaption heuristic\n evaluation = np.sum(solved_clauses * self.clause_weights)\n\n if self.candidate_counter == self.WEIGHT_ADAPTION_DURATION:\n # increase weights for unsatisfied clauses\n self.clause_weights += ~solved_clauses\n self.candidate_counter = 0\n\n return evaluation, num_solved_clauses",
"def count(self,val):\n return sum(1 for e in self.frontierpq if e[0]==val)",
"def number_of_connectives(formula):\n pass\n # ======== YOUR CODE HERE ========",
"def total_present_value_rule(_m):\r\n\r\n return sum(m.DELTA[y] * (m.INV[y] + m.FOM[y] + m.OP[y]) for y in m.Y) + m.EOH",
"def score(self):\n return len([req for req in list(set(self.knowledges)) if req in \n self.key_requirements or req in self.other_requirements])",
"def get_number_of_evaluation(self):\n return self.n_eval",
"def objectiveFunction(self):\n # start at zero\n satisfiedConstraints = 0\n # loop through all of the constraints\n for constraint in self.constraints:\n # if the constraint is satisfied, then increase the count\n if (constraint.satisfied(constraint.tail.value, constraint.head.value)):\n satisfiedConstraints += 1\n # return the count of satisfied constraints\n return satisfiedConstraints",
"def nconflicts(self, var, val, assignment):\r\n\r\n # Subclasses may implement this more efficiently\r\n def conflict(var2):\r\n return var2 in assignment and not self.constraints(var, val, var2, assignment[var2])\r\n\r\n return count(conflict(v) for v in self.neighbors[var])",
"def count(pred, l):\n nl = [i for i in range(0,len(l)) if pred(l[i])]\n\n return len(nl)",
"def main():\n for num in range(372304, 847061):\n if meets_criteria2(num):\n print (num)\n\n meet_criteria = [meets_criteria2(x) for x in range(372304, 847061)]\n print (sum(meet_criteria))",
"def count_terms(equat_orig):\n\tterms = 0\n\tfor pow_group in equat_orig:\n\t\tif pow_group:\n\t\t\tfor _ in pow_group:\n\t\t\t\tterms += 1\n\tprint(f'\\033[1;95mTerms in the polynom: \\033[0m{terms}')",
"def get_ability(hyplo, indivs):\r\n\tcount=0\r\n\tfor indiv in indivs:\r\n\t\tif match(hyplo, indiv):\r\n\t\t\tcount+=1\r\n\treturn count",
"def get_count_of_elements_by_condition(sequence):\n elements_and_indexes = {sequence[i]: i + 1\n for i in range(1, len(sequence) - 1)}\n filtered_values = filter(lambda element:\n 2 ** element[1] < element[0] < math.factorial(element[1]),\n elements_and_indexes.items())\n return len(dict(filtered_values))",
"def count_cond(condition):\n def countDef(n):\n i,count = 1,0\n while i <= n:\n if condition(n, i):\n count += 1;s\n i += 1;\n return count;\n return countDef;",
"def count_cond(condition):\n \"*** YOUR CODE HERE ***\"\n def f(n):\n i, total = 1, 0\n while i <= n:\n if condition(n, i):\n total += 1\n i += 1\n return total\n return f",
"def calc(self,newValue):\n return np.sum(self.values<=newValue)/self.n",
"def number_of_solutions(self):\n return len(self._solutions)",
"def brute_force_search_solution():\n return len(coin_search(TOTAL, COINS))",
"def count(pred, l):\n temp_number = 0\n if l == []:\n return 0\n \n while len(l) > 0:\n if pred(l[0]) == True:\n temp_number = temp_number + 1\n #print(l[0], 'it is true')\n l = l[1:]\n \n \n else:\n #print(l[0], 'it is not true')\n l = l[1:]\n \n output = temp_number\n output = output + count(pred, l)\n return output",
"def give_conds_nb(self):\n return len(self._conds)",
"def give_conds_nb(self):\n return len(self._conds)",
"def __checkNrVars(self):\n variables = set()\n for q in self.__quantifierList:\n for var in q.getVariableNames():\n variables.add(\"%s\" % var)\n for c in self.__clauseList:\n for var in c.getVariableNames():\n variables.add(\"%s\" % var)\n \n return len(variables)",
"def _get_count(text, if_clause):\n return label(text, func.SUM(func.IF(if_clause, 1, 0)))",
"def test_num_evals(self):\n\t\tdetails = self.watcher.describe()\t\t\n\t\tself.assertTrue((details.M * details.rf == details.num_evals).all())",
"def test_num_evals(self):\n\t\tdetails = self.watcher.describe()\t\t\n\t\tself.assertTrue((details.M * details.rf == details.num_evals).all())",
"def test_count_35(self):\n value: int = 35\n result: int = 6\n self.assertEqual(count(value), result, f'Between 0 and {value}, there are {result} lucky numbers.')"
] | [
"0.70631784",
"0.67033887",
"0.65471554",
"0.61733276",
"0.6066021",
"0.6004589",
"0.59886974",
"0.5926692",
"0.5896444",
"0.5818225",
"0.5800353",
"0.5769594",
"0.574551",
"0.5736465",
"0.57226527",
"0.5703242",
"0.56671673",
"0.5644893",
"0.5639909",
"0.5623285",
"0.55975455",
"0.55955094",
"0.559221",
"0.5586224",
"0.5586224",
"0.55643004",
"0.5556311",
"0.55266994",
"0.55266994",
"0.55234617"
] | 0.72403574 | 0 |
Local search that doesn't flip every bit of particle, but only randomly chosen k % of particle length | def local_search_random_k(self, particle, fitness, k):
improvement = 1
nbrflip = 0
while improvement > 0 and nbrflip < self.max_flip and particle.position not in self.tabuList:
improvement = 0
for i in sample(range(self.num_literals), int(self.num_literals*k)):
fit_before = fitness(particle.position)
#Flip the i-th variable of the particle
particle.position[i] = 1 - particle.position[i]
nbrflip += 1
fit_after = fitness(particle.position)
gain = fit_after - fit_before
if gain >= 0:
#Accept flip
improvement += gain
else:
#There is no improvement
#Undo flip
particle.position[i] = 1 - particle.position[i]
if improvement == 0:
#If no improvement add this solution to tabu list
self.tabuList.append(particle.position)
if len(self.tabuList) > self.maxTabuSize:
del self.tabuList[0] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def random_walk(n, p):\n random_array = np.random.uniform(0, 1, n)\n left = random_array[random_array > p].size\n right = n - left\n \n return (right-left)",
"def solution3(nums, K):\n modSeen = {0:-1}\n s = 0\n for i in range(len(nums)):\n n = nums[i]\n s += n\n mod = s % K if K != 0 else s\n if mod in modSeen:\n if i - modSeen[mod] > 1:\n return True\n else:\n modSeen[mod] = i\n return False",
"def sample(self):\n L = e ** (-self.lamb)\n k, p = 1, rand()\n while p > L:\n k += 1\n p *= rand()\n return k - 1",
"def fn(k):\n seen = set()\n for i in range(len(s)-k+1): \n val = (prefix[i+k] - prefix[i]*fac[k]) % MOD \n if val in seen: return True # rolling hash (ver. Monte Carlo)\n seen.add(val)\n return False",
"def slow_kp(p, f):\n return sum(1 for n in range(p) if f(n) % p == 0)",
"def fn(i, x):\n if i == goal: return x == n \n ans = 0 \n if x < n: ans += (n-x) * fn(i+1, x+1) # a new song\n if k < x: ans += (x-k) * fn(i+1, x) # an old song\n return ans % 1_000_000_007",
"def f(k):\n return k * k * k * k * pk(k, suppression) * spherical_jn(0, k * r)",
"def sample_from(self, p):\n return np.searchsorted(np.cumsum(p), np.random.rand())",
"def f(k):\n return k * k * k * k * pk(k, suppression) * spherical_jn(2, k * r)",
"def f(k):\n return k * k * k * pk(k, suppression) * spherical_jn(1, k * r)",
"def search(self, L: int, a: int, modulus: int, n: int, nums: List[int]) -> str:\n # compute the hash of string S[:L]\n h = 0\n for i in range(L):\n h = (h * a + nums[i]) % modulus\n \n # already seen hashes of strings of length L\n seen = {h} \n # const value to be used often : a**L % modulus\n aL = pow(a, L, modulus) \n for start in range(1, n - L + 1):\n # compute rolling hash in O(1) time\n h = (h * a - nums[start - 1] * aL + nums[start + L - 1]) % modulus\n if h in seen:\n return start\n seen.add(h)\n return -1",
"def rand_prime(k=10): \n i = random.randint(2**(k-2),2**(k-1))\n i,l=2*i+1,0\n while True:\n j = 3\n l +=1\n while i%j!=0:\n j += 1\n if i == j:\n return i\n #return i,len(bin(i)[2:]),l\n i += 2",
"def f(k):\n return k * k * pk(k, suppression) * spherical_jn(0, k * r)",
"def neighbor(R, L, p):\n Rp = []\n for k in range(0,len(R)):\n epsilon = random.random()\n if (epsilon > p):\n Rp.append(R[k])\n else:\n randpoint = randVacantPoint(L)\n # update L according to the random\n # vacant point previously found.\n L[randpoint[0]][randpoint[1]]=1\n L[R[k][0]][R[k][1]]=0\n\n Rp.append(randpoint)\n \n return Rp",
"def partial_permutations(n, k):\n return int((factorial(n) / factorial(n - k)) % 1000000)",
"def emitter(p=0.03):\n while True:\n v = np.random.rand(1)\n if v > p:\n yield 0.\n else:\n yield np.random.rand(1)",
"def particle_filter(particle_set_t, measurement_t):\n global count\n n_samples, dim = particle_set_t.shape # no of particles and dimension of each particle\n\n pred_state = np.zeros((n_samples, dim), dtype=\"float64\") # store the predicted state \n weights = np.zeros(n_samples, dtype=\"float64\") # corresponding weights for resampling\n\n particle_set_t1 = np.zeros((n_samples, dim), dtype=\"float64\") # next iteration of particles\n\n\n # this loop calculates \\bar{X_t}, i.e. the predicted belief.\n for n in range(n_samples):\n # predicted motion step:\n xn_t1 = sample_motion_model(particle_set_t[n]) # 3x1 vector: hypothetical state\n\n # measurement correction step:\n weight_xn_t1 = state_likelihood(measurement_t, xn_t1) # scalar value\n\n pred_state[n] = xn_t1\n weights[n] = weight_xn_t1\n\n \n # It was observed that if all weights are 0, the resampling step breaks. \n # Thus, adding a uniform distribution. This is obviously a very bad idea \\ \n # as the true state can easily be discarded in the resampling step: TODO!\n if np.sum(weights) > 0.0:\n weights = weights/np.sum(weights) # normalize array only when sum in not 0\n else:\n print(\"possile divergence!\")\n weights[:] = 1 / n_samples # if sum is 0 then assign uniform distribution throughout\n\n\n # the resampling step:\n # indices = monte_carlo.residual_resample(weights)\n indices = monte_carlo.stratified_resample(weights)\n count += 1\n print(count)\n\n # new particle set is particles at index locations\n for i, index in enumerate(indices):\n particle_set_t1[i] = pred_state[index]\n\n return particle_set_t1",
"def premier(p: int) -> bool:\n if p < 2: return False\n k = 2\n while k**2 <= p:\n if p%k == 0:\n return False\n k+=1\n return True",
"def roll(self):\n roll = random.random()\n sum = 0\n for item in self.mask:\n sum += item.prob\n if sum >= roll: return item.elem\n return None",
"def stirling(k, r) :\n\n return sum((-1)**(r-i)*binomial(r, i)*i**k for i in range(r+1)) / math.factorial(r)",
"def random_dist(k):\n res = [random.random() for i in range(k)]\n s = sum(res)\n return [v/s for v in res]",
"def detect():\n\n _pass_done = 0\n _improve = True\n new_mod = modularity()\n cur_mod = -999999999.0\n rl = random.sample(range(0, node_count), node_count)\n while _improve & (_pass_done < max_pass) & (new_mod - cur_mod > min_mod):\n cur_mod = new_mod\n _improve = False\n _pass_done += 1\n for node_tmp in rl:\n n = node_tmp\n nc = bl[n]\n ncomm = neigh_comm(n)\n remove(n, nc, ncomm[nc])\n best_c = nc\n best_l = 0.0\n best_incre = 0.0\n for c in ncomm:\n incre = modularity_gain(n, c, ncomm[c])\n if incre > best_incre:\n best_incre = incre\n best_c = c\n best_l = ncomm[c]\n insert(n, best_c, best_l)\n if best_c != nc:\n _improve = True\n new_mod = modularity()\n print new_mod",
"def simulate_rerolling(p: float, n: int) -> int:\n\n counter = 0\n new_n = n\n while new_n > 0:\n for _ in range(new_n):\n ran = random.random()\n if ran < p:\n new_n -= 1\n counter += 1\n return counter",
"def sample(N_samples, N_particles, drop):\n\n max_dist = (np.maximum(drop.axisA, drop.axisB))[:, None, None]\n particles = (np.random.random(\n size=(N_samples, 90*N_particles, 2))-0.5)*3*max_dist\n w = drop.isInside(particles)\n\n newpart = np.zeros([N_samples, N_particles, 2])\n for i in range(N_samples):\n newpart[i] = particles[i, w[i]][:N_particles]\n return newpart",
"def pseudorandom(n, p, key):\n import numpy as np\n p = list(p)\n cp = np.cumsum([0] + p)\n assert np.allclose(1, cp[-1])\n assert len(p) < 256\n\n x = np.random.RandomState(key).random_sample(n)\n out = np.empty(n, dtype='i1')\n\n for i, (low, high) in enumerate(zip(cp[:-1], cp[1:])):\n out[(x >= low) & (x < high)] = i\n return out",
"def fn(i):\n if i == 0: return 1 # boundary condition \n ans = 0\n for k in range(1, N+1): \n if k not in seen and (k%i == 0 or i%k == 0): \n seen.add(k)\n ans += fn(i-1)\n seen.remove(k)\n return ans",
"def _decimate_k(self):\n # Setting the seed here once so as to get the same set\n # independent of filters.\n random.seed(1)\n k_sorted = sorted(self.kvec.keys())\n k_selected = []\n for knorm in k_sorted:\n nkmax = len(self.kvec[knorm])\n k_selected.append(random.sample(list(range(nkmax)), min(self.nk, nkmax)))\n return k_sorted, k_selected",
"def partition_random(A, p, r):\n n = random.randint(p, r)\n A[n], A[r] = A[r], A[n]\n x = A[r]\n i = p-1\n for j in range(p, r):\n if A[j] <= x:\n i += 1\n A[i], A[j] = A[j], A[i]\n A[i+1], A[r] = A[r], A[i+1]\n return i+1",
"def ball_volume(n, N=10000):\n #get uniform sample\n points = np.random.rand(int(N), 2)\n\n #run samples through function\n points_combo = points[:,0]**2+points[:,1]**2\n\n #use mask to calculate points inside and outside\n mask = points_combo<=1\n return 4*sum(mask)/len(mask)",
"def mutate(w, h, mines, p):\r\n for i in range(w*h):\r\n if uniform(0, 1) <= p:\r\n #mutate:\r\n if i in mines:\r\n mines.remove(i)\r\n else:\r\n mines.append(i)\r\n return mines"
] | [
"0.6029336",
"0.5921499",
"0.591397",
"0.58484614",
"0.58470005",
"0.5806551",
"0.57981557",
"0.57783556",
"0.57715005",
"0.5751681",
"0.575026",
"0.5740897",
"0.5667054",
"0.5616238",
"0.5614245",
"0.55603385",
"0.554581",
"0.5543597",
"0.55319035",
"0.553032",
"0.55213654",
"0.5510235",
"0.5499955",
"0.54993767",
"0.54876244",
"0.5482772",
"0.5470347",
"0.54458386",
"0.5423517",
"0.5416213"
] | 0.732121 | 0 |
Remove node object from list by index | def removeNode(self, index):
del self.nodes[index] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_a_specific_item(self, index):\n\n current = self.head\n previous = None\n for i in range(index):\n previous = current\n current = current.next\n if previous is None: self.head = current.next\n else: previous.next = current.next\n self.size -= 1",
"def delete_at_index(self, index: int) -> T:\n try:\n previous_node = self.__get_node_at_index(index-1)\n except ValueError as e:\n if self.is_empty(): \n raise ValueError(\"List is empty\")\n elif index == 0:\n item = self.head.items\n self.head = self.head.link\n else:\n raise e\n else:\n item = previous_node.link.items\n previous_node.link = previous_node.link.link\n self.length -= 1\n return item",
"def delete(self, index):\n # check validity of index:\n if index < 0 or index > self.n:\n print(\"Index Error; please input valid index\")\n return\n # if head element is to be removed,\n if index == 0:\n _ = self.pop_front()\n return\n # else,\n temp_node = self.head\n for _ in range(index-1):\n temp_node = temp_node.next # traverse the list\n index_node = temp_node.next\n # unlink\n temp_node.next = temp_node.next.next\n index_node = None\n self.n -= 1",
"def remove(self,index=0):\n if index>self.size-1: raise IndexError(\"Index out of range.\")\n elif self.size==1: self.reference=None\n else:\n pointer = self.reference\n for i in range(index): pointer = pointer.next\n pointer.previous.next, pointer.next.previous = pointer.next, pointer.previous\n if index==0: self.reference=self.reference.next\n self.size-=1",
"def deleteAtIndex(self, index: int) -> None:\n node = self.get_node(index)\n\n if node:\n #print(\"called inside node to delete is \" + str(node) )\n prev_node = node.prev\n next_node = node.next\n\n if prev_node:\n prev_node.next = next_node\n else:\n self.head = next_node\n if next_node:\n next_node.prev = prev_node\n\n\n\n\n self.node_count -= 1",
"def deleteAtIndex(self, index):\n cur = self.head\n if cur == None:\n return\n elif index == 0:\n self.head = cur.next\n\n cur, i = self.head, 1\n while cur and i != index:\n cur = cur.next\n i += 1\n if cur.next == None:\n cur = None\n else:\n cur.next = cur.next.next",
"def delete_by_index(self, index):\n if index < 0 or index >= self.get_size():\n raise IndexError('Index out of bounds')\n if index == 0:\n self.head = self.head.next\n return\n i = 0\n temp = self.head\n while temp is not None:\n if i == index-1:\n temp.next = temp.next.next\n break\n temp = temp.next\n i += 1",
"def removeChildAtIndex(self, index):\n self.__initChild()\n return self.__child.pop(index)",
"def remove_from_list(self,list_,index):\r\n try:\r\n return list_.pop(self._index_to_int(index))\r\n except IndexError:\r\n self._index_error(list_,index)",
"def deleteAtIndex(self, index: int) -> None:\n if index < 0 or index >= self.size:\n return\n\n curr = self.head\n for _ in range(index):\n curr = curr.next\n curr.next = curr.next.next\n self.size -= 1",
"def remove(self, index):\n raise NotImplementedError()",
"def deleteAtIndex(self, index: int) -> None:\n if index < 0 or index > self.cnt-1:\n return \n tmp = self.dummy\n for _ in range(index):\n tmp = tmp.next\n if index == self.cnt - 1:\n tmp.next = None\n else:\n tmp.next = tmp.next.next\n if tmp.next:\n tmp.next.pre = tmp\n self.cnt -= 1",
"def remove_index(self, index):\n current = self.head\n position = index\n if index > (self.size() - 1):\n return None\n elif index == 0:\n self.head = current.next_node\n else: \n while position >= 1:\n previous = current\n current = current.next_node\n position -= 1 \n previous.next_node = current.next_node\n\n return current",
"def erase(self, index):\n node = self._get_node_at(index) \n if node is None:\n raise IndexError('List index out of range.') \n if node == self.head: \n if node.next_node is None:\n self.tail = None \n else: \n node.next_node.prev_node = None \n self.head = node.next_node\n elif node == self.tail: \n node.prev_node.next_node = None \n self.tail = node.prev_node\n else: \n node.prev_node.next_node = node.next_node\n node.next_node.prev_node = node.prev_node\n return node.value",
"def delete_node_at_index(self, index):\n if index < 0 or index >= self.size:\n return\n\n curr = self.head\n if index == 0:\n self.head = curr.next\n else:\n for i in range(index - 1):\n curr = curr.next\n curr.next = curr.next.next\n\n self.size -= 1",
"def deleteAtIndex(self, index: int) -> None:\n if(index == 0):\n self.head = self.head.next\n else:\n prev = None \n cur = self.head \n cnt = 0 \n \n while cur != None:\n if(cnt == index):\n next_node = cur.next\n prev.next = next_node \n return\n else:\n prev = cur \n cur = cur.next\n cnt += 1",
"def __delitem__(self, index):\n # If input is a slice then delete all elements as determined\n # by the slice attributes, using an offset to account for the\n # changing size of the list.\n if isinstance(index, slice):\n offset = 0\n for i in xrange(*index.indices(len(self))):\n if i > -(len(self) + 1) or i < len(self):\n del self[i - offset]\n offset += 1\n return\n\n self.__verify_index(index)\n\n if index < 0:\n index += self.length\n\n index, prev_node, cur_node = self.__find_node_index(index)\n del cur_node.data_list[index]\n self.length -= 1\n\n self.__balance_node(prev_node, cur_node)",
"def delete_element(some_list, index):\n del some_list[index]\n return some_list",
"def remove(self, index):\n self.__validate_index(index)\n value = self.__list[index]\n self.__list = self.__list[:index] + self.__list[index + 1:]\n return value",
"def removeNode(self, node):",
"def delete_by_index(self, index):\n cur = self.head\n length=self.get_length()\n if type(index) is int:\n if self.is_empty():\n return\n else:\n if index > length:\n # The index value is out of range and prompts and exits\n print(\"Index is out of range.\")\n return\n else:\n if index == 0:\n if cur.next == None:\n self.head = None\n else:\n cur.next.prev = None\n self.head = cur.next\n return\n else:\n while (index) > 0:\n cur = cur.next\n index -= 1\n\n # Point the next node of cur to the next node of cur\n cur.prev.next = cur.next\n # Point the prev of the next node of cur to the previous node of cur\n cur.next.prev = cur.prev\n length -= 1\n return\n else:\n print(\"Index value is not int.\")\n return",
"def deleteAtIndex(self, index):\n\n if index < 0:\n return -1\n\n p = self.head\n while index and p: # 0-index before index-th\n p = p.next\n index -= 1\n\n if p == None or p.next == None:\n return\n if p.next.next:\n p.next.next.prev = p\n p.next = p.next.next\n if p.next == None:\n self.tail = p\n # self.printList()",
"def deleteAtIndex(self, index):\n cur = self.head\n prev = None\n# self.display(\"deleteAtIndex, deleting value at index \"+str(index))\n if not index:\n head = head.nxt\n if self.tail == cur:\n self.tail = None\n del cur\n return\n \n i = 0\n while i < index and cur:\n prev = cur\n cur = cur.nxt\n i+=1\n if prev:\n if cur:\n prev.nxt = cur.nxt\n if self.tail == cur:\n self.tail = prev\n del cur",
"def delete(self, index):\n if index == 0 and self.head is not None:\n self.head = self.head.next\n return\n\n current_index = 0\n current = self.head\n previous = None\n\n while current:\n if current_index == index:\n previous.next = current.next\n\n previous = current\n current = current.next\n current_index += 1",
"def remove(self, index):\n self.data.pop(index)",
"def __delitem__(self, index):\n del self.chromosome_list[index]",
"def __delitem__(self, idx):\n self.pop(idx)",
"def _remove_child_element(self, index):\n del self._child_elements[index]",
"def erase(self, index):\r\n if index >= self.length():\r\n print(\"ERROR\")\r\n return None\r\n current_index = 0\r\n current_node = self.head\r\n while True:\r\n last_node = current_node\r\n current_node = current_node.next\r\n if current_index == index:\r\n last_node.next = current_node.next\r\n return\r\n current_index += 1",
"def deleteAtIndex(self, index: int) -> None:\n if self.head == None:\n return -1\n curr = self.head\n if index == 0:\n self.head = curr.next\n return\n if index < 0:\n return -1\n for i in range(index - 1):\n curr = curr.next\n if curr is None:\n break\n if curr is None:\n return -1\n if curr.next is None:\n return -1\n \n next = curr.next.next\n curr.next = None\n curr.next = next"
] | [
"0.76427823",
"0.7413118",
"0.73565614",
"0.73051375",
"0.7282952",
"0.7209629",
"0.71623164",
"0.71563464",
"0.7112378",
"0.7101152",
"0.70595527",
"0.7057477",
"0.70570284",
"0.7045277",
"0.70423543",
"0.70323426",
"0.7025147",
"0.70203966",
"0.69942147",
"0.6972183",
"0.6952488",
"0.6946078",
"0.6934494",
"0.69271624",
"0.69245374",
"0.69117385",
"0.68876076",
"0.6887024",
"0.6885122",
"0.6866272"
] | 0.7771887 | 0 |
Checks whether the object is a sheet | def isSheet(self):
return True if self.leftSon is None and self.rightSon is None else False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def check_sheet(path, sheet): \n xl = pd.ExcelFile(path)\n if sheet not in xl.sheet_names:\n raise ValueError(\"Invalid sheet name \\'\" + sheet +\"\\'\")",
"def check_presSheetExists(self):\n\n if not self.pres_sheetname:\n self.raise_excelerror(\"Required sheet for preservation files\" +\n \"could not be found in workbook.\")\n\n return True",
"def testAddingPropertySheet(self):\n self.failUnless(hasattr(self.properties.aq_base, PROPERTY_SHEET))",
"def is_pdf(obj):\n return isinstance(obj, PDF)",
"def is_object(space, w_obj):\n return space.wrap(space.is_object(w_obj))",
"def test_is_path_supported_excel_with_sheet_success(self, _, input_path):\n self.assertTrue(\n lpt.is_path_supported_excel_with_sheet(input_path),\n input_path + \" should be a valid excel path with a sheet.\",\n )",
"def __call__(self):\n if len(self.handle.sheet_names) > 1: self.multi_sheet()\n else: self.mono_sheet()",
"def _is_user_class(obj):\n type_dict = type(obj).__dict__\n is_user_class = '_pandas_type' in type_dict\n return is_user_class",
"def test_is_path_supported_excel_with_sheet_failure(self, _, input_path):\n self.assertFalse(\n lpt.is_path_supported_excel_with_sheet(input_path),\n input_path + \" should not be a valid excel path with a sheet.\",\n )",
"def is_subclass_of_landscape(cand_type):\n is_scol = False\n if inspect.isclass(cand_type) and issubclass(cand_type, Landscape):\n is_scol = True\n return is_scol",
"def check_workbook_exists(service, spreadsheet_id, tab_name):\n try:\n spreadsheet = service.spreadsheets().get(\n spreadsheetId=spreadsheet_id).execute()\n sheets = spreadsheet['sheets']\n exists = [True for sheet in sheets if sheet['properties']\n ['title'] == tab_name]\n return True if exists else False\n except Exception as e:\n print(f'Failed to check workbook {tab_name} for spreadsheet '\n f'{spreadsheet_id}')\n raise(e)",
"def _valid_typable_object(ui_object, platform=Platform.ANDROID):\n if platform == Platform.ANDROID:\n return ui_object.obj_type in _TYPABLE_OBJECT_DESC.keys()\n else:\n assert False, 'Wrong Platform'",
"def is_trait(schema_obj):\n\n return isinstance(schema_obj, schema.Trait)",
"def _valid_typable_object_with_name(ui_object, platform=Platform.ANDROID):\n if platform == Platform.ANDROID:\n return (ui_object.obj_type in _TYPABLE_OBJECT_DESC.keys() and\n _valid_object_with_name(ui_object))\n else:\n assert False, 'Wrong Platform'",
"def check_workbook_sheets(workbook):\n sheets_in_file = workbook.sheetnames\n valid_sheets_template = []\n for sheet in sheets_in_file:\n # check if a template with that name exists in templates folder\n if os.path.isfile(os.path.join(cd,'templates', sheet + '.xml')):\n logger.info('{} is a valid template'.format(sheet))\n valid_sheets_template.append(sheet)\n else:\n logger.error('{} is not a valid template. Skipping'.format(sheet))\n return valid_sheets_template",
"def is_wcsaxes(axes):\n return isinstance(axes, wcsaxes.WCSAxes)",
"def _is_instance_of(obj: dict, geojson_type: str) -> bool:\n try:\n schema_name = next(t + '.json' for t in GEOJSON_TYPES\n if t.lower() == geojson_type.lower())\n except StopIteration:\n raise GeoJSONError(f'Specified geojson_type ({geojson_type}) does '\n 'not match a supported GeoJSON type.')\n\n filename = DATA_DIR / schema_name\n with open(filename, 'r') as src:\n schema = json.load(src)\n\n return Draft7Validator(schema).is_valid(obj)",
"def is_valid(tab):\n try:\n dynamic_tab_scrapper = eval(f\"{TabScrapper.get_name(tab)}TabScrapper\")\n except NameError:\n return False\n else:\n return issubclass(dynamic_tab_scrapper, TabScrapper)",
"def is_typespace(schema_obj):\n\n return isinstance(schema_obj, schema.Typespace)",
"def is_object_type(self):\n raise exceptions.NotImplementedError()",
"def sheet_setup(self):\n raise NotImplementedError",
"def validate_workbook(self):\n\n valid = True\n\n #Check for a sheet that should have preservation metadata data\n try:\n self.check_presSheetExists()\n except AMIExcelError as e:\n print(\"Error in workbook sheets: \", e.value)\n valid = False\n\n #Check that preservation sheet contains required headers\n for i in range(0, 3):\n try:\n expected = set([item[i] for item in ami_md_constants.MEDIAINGEST_EXPECTED_HEADERS if item[i]])\n found = self.get_headerRow(self.pres_sheetname, i)\n self.check_headerRow(expected, found)\n except AMIExcelError as e:\n print(\"Error in preservation header row {}: {}\"\n .format(i + 1, e.value))\n valid = False\n\n #Check that preservation sheet headers have the correct heirarchy\n try:\n header_entries = set(self.get_headerEntries(self.pres_sheetname))\n self.check_headerEntries(set(ami_md_constants.MEDIAINGEST_EXPECTED_HEADERS), header_entries)\n except AMIExcelError as e:\n print(\"Error in header entries: \", e.value)\n valid = False\n\n #Check that the preservation sheet does not contain equations\n try:\n self.check_noequations(self.pres_sheetname)\n except AMIExcelError as e:\n print(\"Error in cell values: \", e.value)\n valid = False\n\n return valid",
"def load_sheet(sheet_name):\n workbook_path = get_workbook_path()\n wb = openpyxl.load_workbook(workbook_path)\n sheet_obj = wb[sheet_name]\n return sheet_obj, wb",
"def isinstance_blender_object(self, b_obj):\n # lame and slow, but functional\n return b_obj in Blender.Object.Get()",
"def is_object(self, name: str) -> bool:\r\n return os.path.exists(self._path_for_pickle(name))",
"def getSheet(self, sheet_name):\r\n return self.workbook.Sheets(sheet_name)",
"def checkObjectInNameSpace(objectName):\n if objectName is None or not isinstance(objectName, basestring) or objectName == u\"\": return False\n if objectName in globals(): return True\n return objectName in dir(builtins)",
"def __eq__(self, other):\n if not isinstance(other, Worksheet):\n return False\n\n return self.__dict__ == other.__dict__",
"def is_element(obj):\n return isinstance(obj, etree._Element) # noqa",
"def is_work(self):\n from .works import Work\n return isinstance(self, Work)"
] | [
"0.66219234",
"0.61277586",
"0.60819465",
"0.5930213",
"0.5887744",
"0.5702059",
"0.5631677",
"0.56167376",
"0.55882126",
"0.5522886",
"0.55084896",
"0.5459295",
"0.5446342",
"0.54172677",
"0.5390506",
"0.5380341",
"0.5351818",
"0.53457916",
"0.53396326",
"0.533245",
"0.5325259",
"0.532033",
"0.5309598",
"0.53086716",
"0.5303463",
"0.5289029",
"0.5282697",
"0.52405524",
"0.5227871",
"0.5224453"
] | 0.7400545 | 0 |
Callback for a request to stop or start the turtlebot. | def handle_start_stop(self, req):
# if turtlebot is moving, stop it
if self.is_moving:
self.is_moving = False
r = rospy.Rate(2)
cmd_msg = Twist()
cmd_msg.linear.x = NO_SPEED
cmd_msg.angular.z = NO_SPEED
self.cmd_pub.publish(cmd_msg)
r.sleep()
return TriggerResponse(True, "Robot safely stopped.")
# if turtlebot is not moving, start it
else:
self.is_moving = True
self.state.reinitialize()
return TriggerResponse(True, "Robot safely started.") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _stop_bot(_event):\n pass",
"def start():\n\tdata = bottle.request.json\n\t(\"START:\", json.dumps(data))\n\n\tresponse = {\"color\": \"#4F1851\", \"headType\": \"evil\", \"tailType\": \"hook\"}\n\treturn HTTPResponse(\n\t\tstatus=200,\n\t\theaders={\"Content-Type\": \"application/json\"},\n\t\tbody=json.dumps(response),\n\t)",
"def start(self):\n while True:\n #requests.get(\"http://localhost:8080/clear\")\n if use_launch_phrase:\n recognizer, audio = self.speech.listen_for_audio()\n if self.speech.is_call_to_action(recognizer, audio):\n self.__acknowledge_action()\n self.decide_action()\n else:\n self.decide_action()",
"def main():\n\n # Play start sound\n play_wave_file(\"start.wav\")\n\n # Connect to Lego Boost\n hub = connect()\n\n # If hub works, starts the main app flow\n if hub:\n speech(\n \"Olá. Eu sou a Faustina, uma robô assistente do ueivespeisse. Em que posso ajudar?\", hub, {})\n while True:\n try:\n act({\"legoAction\": \"colorGreen\"}, hub)\n\n recorded_file = audio.record()\n\n act({\"legoAction\": \"colorRed\"}, hub)\n\n wit_response = wit_client.get_response(recorded_file)\n\n if wit_response[\"_text\"]:\n print(wit_response)\n answer = get_answer(wit_response)\n\n text = add_information_to_text(\n answer) if answer else \"Desculpa, nao entendi o que voce quis dizer\"\n\n speech(text, hub, answer)\n if answer:\n act(answer, hub)\n else:\n act({\"legoAction\": \"colorYellow\"}, hub)\n print(\"No sound detected\")\n time.sleep(2)\n except Exception as exception:\n print(exception)\n\n time.sleep(2)\n hub.motor_external.stop()",
"def TerminalClientStart(self):\n pass",
"def request() -> None:\n\t_flag.set()",
"def quit():\n #quits from python turtle graphics screen\n bye()",
"def run(self):\n self.initialize()\n\n # run the start callback\n tools.run_callback(\"start\", {'request': self._request})\n\n data = self._request.getData()\n pyhttp = self._request.getHttp()\n config = self._request.getConfiguration()\n\n # allow anyone else to handle the request at this point\n handled = tools.run_callback(\"handle\", \n {'request': self._request},\n mappingfunc=lambda x,y:x,\n donefunc=lambda x:x)\n\n if not handled == 1:\n blosxom_handler(self._request)\n\n # do end callback\n tools.run_callback(\"end\", {'request': self._request})",
"def stop():",
"def stop():",
"def stop():",
"def stop():",
"def stop() -> None:",
"def post_stop(self):",
"def _stop(self):",
"def main():\n args = _argument_parsing()\n _prepare_turtle()\n _if_else_statement(args)\n turtle.mainloop()",
"def done(self):\n turtle.done()",
"def TurbiniaStart(\n self,\n evidence: Dict[str, Any],\n threat_intel_indicators: Optional[List[Optional[str]]] = None,\n yara_rules: Optional[List[str]] = None) -> str:\n request_id = ''\n yara_text = ''\n jobs_denylist = [\n 'StringsJob', 'BinaryExtractorJob', 'BulkExtractorJob',\n 'PhotorecJob', 'PsortJob'\n ]\n if not evidence:\n self.ModuleError('No evidence to process', critical=True)\n\n evidence_name = evidence.get('type')\n if yara_rules:\n yara_text = self.DEFAULT_YARA_MODULES + '\\n'.join(list(yara_rules))\n\n # Build request and request_options objects to send to the API server.\n request_options: Dict[str, Any] = {\n 'filter_pattern': threat_intel_indicators,\n 'jobs_allowlist': [],\n 'jobs_denylist': jobs_denylist,\n 'reason': self.incident_id,\n 'requester': getpass.getuser(),\n 'yara_rules': yara_text\n }\n if self.sketch_id:\n request_options['sketch_id'] = int(self.sketch_id)\n\n if self.turbinia_recipe:\n request_options['recipe_name'] = self.turbinia_recipe\n # Remove incompatible options\n try:\n request_options.pop('filter_pattern')\n request_options.pop('jobs_denylist')\n request_options.pop('jobs_allowlist')\n except KeyError as exception:\n self.logger.debug(f'Key: {exception} not found in request options.')\n\n request = {'evidence': evidence, 'request_options': request_options}\n\n # Send the request to the API server.\n try:\n response = self.requests_api_instance.create_request(request)\n request_id = response.get('request_id')\n self.PublishMessage(\n 'Creating Turbinia request {0!s} with evidence {1!s}'.format(\n request_id, evidence_name))\n self.PublishMessage(\n 'Turbinia request status at {0!s}'.format(self.turbinia_api))\n except turbinia_api_lib.ApiException as exception:\n self.ModuleError(str(exception), critical=True)\n\n if not request_id:\n self.ModuleError('Unable to create Turbinia request', critical=True)\n\n return request_id",
"def force_stop(self):\n #cancel any current request:\n self._cancel_current_request()",
"def setTurtle(t):\r\n t.pu()\r\n t.goto(initialCoordinates())",
"def handle_set_loop(self, req):\n rospy.wait_for_service('/get_trajectory')\n try:\n get_traj = rospy.ServiceProxy('/get_trajectory', srvs.GetTrajectory)\n trajectory = get_traj(True, req.node_id, 0).trajectory\n if trajectory == []:\n msg = ('No trajectory found!!')\n return srvs.SetLoopResponse(False, msg)\n except rospy.ServiceException, e:\n raise NameError(\"Service call failed: %s\" % e)\n self.np_trajectory = to_numpy_trajectory(trajectory)\n self.loop = True\n self.at_dest = False\n msg = (\"Closed loop trajectory of vehicle #%i \" % self.vehicle_id +\n \"successfully set.\")\n return srvs.SetLoopResponse(True, msg)",
"def request_stop(self):\n self._messaged.emit((\"stop\",None,0,None))",
"def StopRequestHook(ref, args, request):\n del ref\n del args\n stop_request = GetMessagesModule().StopNodeRequest()\n request.stopNodeRequest = stop_request\n return request",
"def _start_stop(self):\n if not self.running:\n #get configuration from gui\n self._start_session()\n else:\n self._stop_session()",
"def stop(self):\n self.close_conn()\n self.finished = True\n self.request_handler.stop()\n print \"Finished closing\"",
"def start(self):\n while True:\n requests.get(\"http://localhost:8080/clear\") #clearing the screen on the web browser\n speech=\"Welcome to Smart Mirror !!\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % speech) # calling the text to appear on the browser\n self.speech.synthesize_text(\"hello\"+speech) #synthesizing the text into speech\n speech1=\"Say The launch Phrase .\" #asking the user to say the lauch phrase\n self.speech.synthesize_text(speech1) #speaking of the above line,\n if self.vision.recognize_face(): #checking if\n print \"Face Found\"\t\t\t#the person is infront of camera\n if use_launch_phrase:\t\t\t#checking whether to use the launch phrase or not\n recognizer, audio = self.speech.listen_for_audio()\t\t#initializing\n if self.speech.is_call_to_action(recognizer, audio):\t#checking if the audio is recognized\n self.__acknowledge_action()\t\t\t#if it is recognized take action\n self.decide_action()\t\t\t#deciding which action to be taken\n else:\n self.decide_action()\t\t\t#printing the else part",
"def run(self):\n while self.running:\n self.handle_request()",
"def stop():\n tidyup()\n shutdown_server()\n return \"Parando Servidor\"",
"def stop():\n tidyup()\n shutdown_server()\n return \"Parando Servidor\"",
"def stop():\n tidyup()\n shutdown_server()\n return \"Parando Servidor\""
] | [
"0.5796821",
"0.5615687",
"0.54661393",
"0.5440809",
"0.54155004",
"0.5407269",
"0.5348813",
"0.52929497",
"0.5290731",
"0.5290731",
"0.5290731",
"0.5290731",
"0.52868515",
"0.521621",
"0.5197771",
"0.51816416",
"0.5175744",
"0.51570326",
"0.5147758",
"0.5133411",
"0.5114982",
"0.508264",
"0.5076514",
"0.5049754",
"0.5044871",
"0.50148094",
"0.50126934",
"0.5002446",
"0.5002446",
"0.5002446"
] | 0.7249804 | 0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.