body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def fetch(self): '\n Fetch a FeedbackInstance\n\n :returns: Fetched FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' params = values.of({}) payload = self._version.fetch('GET', self._uri, params=params) return FeedbackInstance(self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
-2,003,002,863,357,165,600
Fetch a FeedbackInstance :returns: Fetched FeedbackInstance :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
fetch
Jason-Paprocki/hacknjit
python
def fetch(self): '\n Fetch a FeedbackInstance\n\n :returns: Fetched FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' params = values.of({}) payload = self._version.fetch('GET', self._uri, params=params) return FeedbackInstance(self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
def update(self, quality_score, issue=values.unset): '\n Update the FeedbackInstance\n\n :param unicode quality_score: An integer from 1 to 5\n :param FeedbackInstance.Issues issue: Issues experienced during the call\n\n :returns: Updated FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' data = values.of({'QualityScore': quality_score, 'Issue': issue}) payload = self._version.update('POST', self._uri, data=data) return FeedbackInstance(self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
1,302,640,320,467,985,700
Update the FeedbackInstance :param unicode quality_score: An integer from 1 to 5 :param FeedbackInstance.Issues issue: Issues experienced during the call :returns: Updated FeedbackInstance :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
update
Jason-Paprocki/hacknjit
python
def update(self, quality_score, issue=values.unset): '\n Update the FeedbackInstance\n\n :param unicode quality_score: An integer from 1 to 5\n :param FeedbackInstance.Issues issue: Issues experienced during the call\n\n :returns: Updated FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' data = values.of({'QualityScore': quality_score, 'Issue': issue}) payload = self._version.update('POST', self._uri, data=data) return FeedbackInstance(self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
def __repr__(self): '\n Provide a friendly representation\n\n :returns: Machine friendly representation\n :rtype: str\n ' context = ' '.join(('{}={}'.format(k, v) for (k, v) in self._solution.items())) return '<Twilio.Api.V2010.FeedbackContext {}>'.format(context)
-4,387,753,038,428,856,000
Provide a friendly representation :returns: Machine friendly representation :rtype: str
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
__repr__
Jason-Paprocki/hacknjit
python
def __repr__(self): '\n Provide a friendly representation\n\n :returns: Machine friendly representation\n :rtype: str\n ' context = ' '.join(('{}={}'.format(k, v) for (k, v) in self._solution.items())) return '<Twilio.Api.V2010.FeedbackContext {}>'.format(context)
def __init__(self, version, payload, account_sid, call_sid): '\n Initialize the FeedbackInstance\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' super(FeedbackInstance, self).__init__(version) self._properties = {'account_sid': payload['account_sid'], 'date_created': deserialize.rfc2822_datetime(payload['date_created']), 'date_updated': deserialize.rfc2822_datetime(payload['date_updated']), 'issues': payload['issues'], 'quality_score': deserialize.integer(payload['quality_score']), 'sid': payload['sid']} self._context = None self._solution = {'account_sid': account_sid, 'call_sid': call_sid}
-801,141,970,854,188,800
Initialize the FeedbackInstance :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
__init__
Jason-Paprocki/hacknjit
python
def __init__(self, version, payload, account_sid, call_sid): '\n Initialize the FeedbackInstance\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' super(FeedbackInstance, self).__init__(version) self._properties = {'account_sid': payload['account_sid'], 'date_created': deserialize.rfc2822_datetime(payload['date_created']), 'date_updated': deserialize.rfc2822_datetime(payload['date_updated']), 'issues': payload['issues'], 'quality_score': deserialize.integer(payload['quality_score']), 'sid': payload['sid']} self._context = None self._solution = {'account_sid': account_sid, 'call_sid': call_sid}
@property def _proxy(self): '\n Generate an instance context for the instance, the context is capable of\n performing various actions. All instance actions are proxied to the context\n\n :returns: FeedbackContext for this FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n ' if (self._context is None): self._context = FeedbackContext(self._version, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid']) return self._context
-4,084,114,105,181,760,500
Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: FeedbackContext for this FeedbackInstance :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
_proxy
Jason-Paprocki/hacknjit
python
@property def _proxy(self): '\n Generate an instance context for the instance, the context is capable of\n performing various actions. All instance actions are proxied to the context\n\n :returns: FeedbackContext for this FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n ' if (self._context is None): self._context = FeedbackContext(self._version, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid']) return self._context
@property def account_sid(self): '\n :returns: The account_sid\n :rtype: unicode\n ' return self._properties['account_sid']
-6,875,424,500,779,715,000
:returns: The account_sid :rtype: unicode
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
account_sid
Jason-Paprocki/hacknjit
python
@property def account_sid(self): '\n :returns: The account_sid\n :rtype: unicode\n ' return self._properties['account_sid']
@property def date_created(self): '\n :returns: The date_created\n :rtype: datetime\n ' return self._properties['date_created']
-250,917,683,243,838,050
:returns: The date_created :rtype: datetime
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
date_created
Jason-Paprocki/hacknjit
python
@property def date_created(self): '\n :returns: The date_created\n :rtype: datetime\n ' return self._properties['date_created']
@property def date_updated(self): '\n :returns: The date_updated\n :rtype: datetime\n ' return self._properties['date_updated']
-8,425,627,007,073,683,000
:returns: The date_updated :rtype: datetime
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
date_updated
Jason-Paprocki/hacknjit
python
@property def date_updated(self): '\n :returns: The date_updated\n :rtype: datetime\n ' return self._properties['date_updated']
@property def issues(self): '\n :returns: The issues\n :rtype: FeedbackInstance.Issues\n ' return self._properties['issues']
-594,813,764,122,206,500
:returns: The issues :rtype: FeedbackInstance.Issues
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
issues
Jason-Paprocki/hacknjit
python
@property def issues(self): '\n :returns: The issues\n :rtype: FeedbackInstance.Issues\n ' return self._properties['issues']
@property def quality_score(self): '\n :returns: 1 to 5 quality score\n :rtype: unicode\n ' return self._properties['quality_score']
-4,891,796,970,587,220,000
:returns: 1 to 5 quality score :rtype: unicode
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
quality_score
Jason-Paprocki/hacknjit
python
@property def quality_score(self): '\n :returns: 1 to 5 quality score\n :rtype: unicode\n ' return self._properties['quality_score']
@property def sid(self): '\n :returns: The sid\n :rtype: unicode\n ' return self._properties['sid']
-8,253,500,487,129,927,000
:returns: The sid :rtype: unicode
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
sid
Jason-Paprocki/hacknjit
python
@property def sid(self): '\n :returns: The sid\n :rtype: unicode\n ' return self._properties['sid']
def create(self, quality_score, issue=values.unset): '\n Create a new FeedbackInstance\n\n :param unicode quality_score: The quality_score\n :param FeedbackInstance.Issues issue: The issue\n\n :returns: Newly created FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' return self._proxy.create(quality_score, issue=issue)
-2,144,275,642,321,963,300
Create a new FeedbackInstance :param unicode quality_score: The quality_score :param FeedbackInstance.Issues issue: The issue :returns: Newly created FeedbackInstance :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
create
Jason-Paprocki/hacknjit
python
def create(self, quality_score, issue=values.unset): '\n Create a new FeedbackInstance\n\n :param unicode quality_score: The quality_score\n :param FeedbackInstance.Issues issue: The issue\n\n :returns: Newly created FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' return self._proxy.create(quality_score, issue=issue)
def fetch(self): '\n Fetch a FeedbackInstance\n\n :returns: Fetched FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' return self._proxy.fetch()
-8,067,892,351,115,993,000
Fetch a FeedbackInstance :returns: Fetched FeedbackInstance :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
fetch
Jason-Paprocki/hacknjit
python
def fetch(self): '\n Fetch a FeedbackInstance\n\n :returns: Fetched FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' return self._proxy.fetch()
def update(self, quality_score, issue=values.unset): '\n Update the FeedbackInstance\n\n :param unicode quality_score: An integer from 1 to 5\n :param FeedbackInstance.Issues issue: Issues experienced during the call\n\n :returns: Updated FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' return self._proxy.update(quality_score, issue=issue)
969,629,373,209,911,300
Update the FeedbackInstance :param unicode quality_score: An integer from 1 to 5 :param FeedbackInstance.Issues issue: Issues experienced during the call :returns: Updated FeedbackInstance :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
update
Jason-Paprocki/hacknjit
python
def update(self, quality_score, issue=values.unset): '\n Update the FeedbackInstance\n\n :param unicode quality_score: An integer from 1 to 5\n :param FeedbackInstance.Issues issue: Issues experienced during the call\n\n :returns: Updated FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' return self._proxy.update(quality_score, issue=issue)
def __repr__(self): '\n Provide a friendly representation\n\n :returns: Machine friendly representation\n :rtype: str\n ' context = ' '.join(('{}={}'.format(k, v) for (k, v) in self._solution.items())) return '<Twilio.Api.V2010.FeedbackInstance {}>'.format(context)
-429,944,536,459,628,300
Provide a friendly representation :returns: Machine friendly representation :rtype: str
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
__repr__
Jason-Paprocki/hacknjit
python
def __repr__(self): '\n Provide a friendly representation\n\n :returns: Machine friendly representation\n :rtype: str\n ' context = ' '.join(('{}={}'.format(k, v) for (k, v) in self._solution.items())) return '<Twilio.Api.V2010.FeedbackInstance {}>'.format(context)
def processLayoutText(self, layoutText): '\n Coordinates are flipped from the input format to the (x,y) convention here\n\n The shape of the maze. Each character\n represents a different type of object.\n % - Wall\n . - Food\n o - Capsule\n G - Ghost\n P - Pacman\n Other characters are ignored.\n ' maxY = (self.height - 1) for y in range(self.height): for x in range(self.width): layoutChar = layoutText[(maxY - y)][x] self.processLayoutChar(x, y, layoutChar) self.agentPositions.sort() self.agentPositions = [((i == 0), pos) for (i, pos) in self.agentPositions]
9,172,213,862,141,142,000
Coordinates are flipped from the input format to the (x,y) convention here The shape of the maze. Each character represents a different type of object. % - Wall . - Food o - Capsule G - Ghost P - Pacman Other characters are ignored.
tracking/layout.py
processLayoutText
chibinz/CS181
python
def processLayoutText(self, layoutText): '\n Coordinates are flipped from the input format to the (x,y) convention here\n\n The shape of the maze. Each character\n represents a different type of object.\n % - Wall\n . - Food\n o - Capsule\n G - Ghost\n P - Pacman\n Other characters are ignored.\n ' maxY = (self.height - 1) for y in range(self.height): for x in range(self.width): layoutChar = layoutText[(maxY - y)][x] self.processLayoutChar(x, y, layoutChar) self.agentPositions.sort() self.agentPositions = [((i == 0), pos) for (i, pos) in self.agentPositions]
def train(estimator: object, X: Iterable[Union[(int, float)]], y: Iterable): '\n Train custom classifier model.\n\n Parameters:\n estimator: Unfitted estimator.\n X: Input training data.\n y: Labels for test data.\n\n Returns:\n Fitted estimator model.\n ' return estimator.fit(X=X, y=y)
-8,926,492,144,770,881,000
Train custom classifier model. Parameters: estimator: Unfitted estimator. X: Input training data. y: Labels for test data. Returns: Fitted estimator model.
ml.py
train
Fennec2000GH/Poly-Finance
python
def train(estimator: object, X: Iterable[Union[(int, float)]], y: Iterable): '\n Train custom classifier model.\n\n Parameters:\n estimator: Unfitted estimator.\n X: Input training data.\n y: Labels for test data.\n\n Returns:\n Fitted estimator model.\n ' return estimator.fit(X=X, y=y)
def classify(estimator: object, X: Iterable[Union[(int, float)]]): '\n Predict with custom classifier model.\n\n Parameters:\n estimator: Fitted estimator.\n X: Input test data.\n\n Returns:\n Predicted labels.\n ' return estimator.predict(X=X)
7,964,491,657,674,355,000
Predict with custom classifier model. Parameters: estimator: Fitted estimator. X: Input test data. Returns: Predicted labels.
ml.py
classify
Fennec2000GH/Poly-Finance
python
def classify(estimator: object, X: Iterable[Union[(int, float)]]): '\n Predict with custom classifier model.\n\n Parameters:\n estimator: Fitted estimator.\n X: Input test data.\n\n Returns:\n Predicted labels.\n ' return estimator.predict(X=X)
def regress(estimator: object, X: Iterable[Union[(int, float)]], y: Iterable): '\n Predict with custom regressor model.\n\n Parameters:\n estimator: Fitted estimator.\n X: Input test data.\n y: Labels for test data.\n\n Returns:\n Predicted labels.\n ' pass
1,504,345,972,072,559,400
Predict with custom regressor model. Parameters: estimator: Fitted estimator. X: Input test data. y: Labels for test data. Returns: Predicted labels.
ml.py
regress
Fennec2000GH/Poly-Finance
python
def regress(estimator: object, X: Iterable[Union[(int, float)]], y: Iterable): '\n Predict with custom regressor model.\n\n Parameters:\n estimator: Fitted estimator.\n X: Input test data.\n y: Labels for test data.\n\n Returns:\n Predicted labels.\n ' pass
def evaluate(estimator: object, X: Iterable[Union[(int, float)]], y: Iterable): '\n Predict with custom classifier model.\n\n Parameters:\n estimator: Fitted estimator.\n X: Input test data.\n y: Labels for test data.\n\n Returns:\n Predicted labels.\n ' pred = estimator.predict(X=X) report = classification_report(y_true=y, y_pred=pred) st.write('Classification Report') st.write(report) (fpr, tpr, thresholds) = roc_curve(y, pred) roc_auc = auc(fpr, tpr) (_, _, figure) = RocCurveDisplay(fpr=fpr, tpr=tpr, roc_auc=roc_auc, estimator_name=type(estimator)) st.pyplot(fig=figure)
-6,500,435,570,618,804,000
Predict with custom classifier model. Parameters: estimator: Fitted estimator. X: Input test data. y: Labels for test data. Returns: Predicted labels.
ml.py
evaluate
Fennec2000GH/Poly-Finance
python
def evaluate(estimator: object, X: Iterable[Union[(int, float)]], y: Iterable): '\n Predict with custom classifier model.\n\n Parameters:\n estimator: Fitted estimator.\n X: Input test data.\n y: Labels for test data.\n\n Returns:\n Predicted labels.\n ' pred = estimator.predict(X=X) report = classification_report(y_true=y, y_pred=pred) st.write('Classification Report') st.write(report) (fpr, tpr, thresholds) = roc_curve(y, pred) roc_auc = auc(fpr, tpr) (_, _, figure) = RocCurveDisplay(fpr=fpr, tpr=tpr, roc_auc=roc_auc, estimator_name=type(estimator)) st.pyplot(fig=figure)
def iter_linear_fit(xy, uv, wxy=None, wuv=None, fitgeom='general', center=None, nclip=3, sigma=(3.0, 'rmse'), clip_accum=False): '\n Compute linear transformation parameters that "best" (in the sense of\n minimizing residuals) transform ``uv`` source position to ``xy``\n sources iteratively using sigma-clipping.\n\n More precisely, this functions attempts to find a ``2x2`` matrix ``F`` and\n a shift vector ``s`` that minimize the residuals between the *transformed*\n reference source coordinates ``uv``\n\n .. math::\n \\mathbf{xy}\'_k = \\mathbf{F}\\cdot(\\mathbf{uv}_k-\\mathbf{c})+\\\n \\mathbf{s} + \\mathbf{c}\n :label: ilf1\n\n and the "observed" source positions ``xy``:\n\n .. math::\n \\epsilon^2 = \\Sigma_k w_k \\|\\mathbf{xy}_k-\\mathbf{xy}\'_k\\|^2.\n :label: ilf2\n\n In the above equations, :math:`\\mathbf{F}` is a ``2x2`` matrix while\n :math:`\\mathbf{xy}_k` and :math:`\\mathbf{uv}_k` are the position\n coordinates of the ``k``-th source (row in input ``xy`` and ``uv`` arrays).\n\n One of the two catalogs (``xy`` or ``uv``) contains what we refer to as\n "image" source positions and the other one as "reference" source positions.\n The meaning assigned to ``xy`` and ``uv`` parameters are up to the\n caller of this function.\n\n Parameters\n ----------\n xy: numpy.ndarray\n A ``(N, 2)``-shaped array of source positions (one 2-coordinate\n position per line).\n\n uv: numpy.ndarray\n A ``(N, 2)``-shaped array of source positions (one 2-coordinate\n position per line). This array *must have* the same length (shape)\n as the ``xy`` array.\n\n wxy: numpy.ndarray, None, optional\n A 1-dimensional array of weights of the same length (``N``)\n as ``xy`` array indicating how much a given coordinate should be\n weighted in the fit. If not provided or set to `None`, all positions\n will be contribute equally to the fit if ``wuv`` is also set to `None`.\n See ``Notes`` section for more details.\n\n wuv: numpy.ndarray, None, optional\n A 1-dimensional array of weights of the same length (``N``)\n as ``xy`` array indicating how much a given coordinate should be\n weighted in the fit. If not provided or set to `None`, all positions\n will be contribute equally to the fit if ``wxy`` is also set to `None`.\n See ``Notes`` section for more details.\n\n fitgeom: {\'shift\', \'rscale\', \'general\'}, optional\n The fitting geometry to be used in fitting the matched object lists.\n This parameter is used in fitting the shifts (offsets), rotations\n and/or scale changes from the matched object lists. The \'general\'\n fit geometry allows for independent scale and rotation for each axis.\n\n center: tuple, list, numpy.ndarray, None, optional\n A list-like container with two ``X``- and ``Y``-positions of the center\n (origin) of rotations in the ``uv`` and ``xy`` coordinate frames.\n If not provided, ``center`` is estimated as a (weighted) mean position\n in the ``uv`` frame.\n\n nclip: int, None, optional\n Number (a non-negative integer) of clipping iterations in fit.\n Clipping will be turned off if ``nclip`` is either `None` or 0.\n\n sigma: float, tuple of the form (float, str), optional\n When a tuple is provided, first value (a positive number)\n indicates the number of "fit error estimates" to use for clipping.\n The second value (a string) indicates the statistic to be\n used for "fit error estimate". Currently the following values are\n supported: ``\'rmse\'``, ``\'mae\'``, and ``\'std\'``\n - see ``Notes`` section for more details.\n\n When ``sigma`` is a single number, it must be a positive number and\n the default error estimate ``\'rmse\'`` is assumed.\n\n This parameter is ignored when ``nclip`` is either `None` or 0.\n\n clip_accum: bool, optional\n Indicates whether or not to reset the list of "bad" (clipped out)\n sources after each clipping iteration. When set to `True` the list\n only grows with each iteration as "bad" positions never re-enter the\n pool of available position for the fit. By default the list of\n "bad" source positions is purged at each iteration.\n\n Returns\n -------\n fit: dict\n - ``\'shift\'``: A ``numpy.ndarray`` with two components of the\n computed shift.\n - ``\'shift_ld\'``: A ``numpy.ndarray`` with two components of the\n computed shift of type ``numpy.longdouble``.\n - ``\'matrix\'``: A ``2x2`` ``numpy.ndarray`` with the computed\n generalized rotation matrix.\n - ``\'matrix_ld\'``: A ``2x2`` ``numpy.ndarray`` with the computed\n generalized rotation matrix of type ``numpy.longdouble``.\n - ``\'proper_rot\'``: Rotation angle (degree) as if the rotation is\n proper.\n - ``\'rot\'``: A tuple of ``(rotx, roty)`` - the rotation angles with\n regard to the ``X`` and ``Y`` axes.\n - ``\'<rot>\'``: *Arithmetic mean* of the angles of rotation around\n ``X`` and ``Y`` axes.\n - ``\'scale\'``: A tuple of ``(sx, sy)`` - scale change in the direction\n of the ``X`` and ``Y`` axes.\n - ``\'<scale>\'``: *Geometric mean* of scales ``sx`` and ``sy``.\n - ``\'skew\'``: Computed skew.\n - ``\'proper\'``: a boolean indicating whether the rotation is proper.\n - ``\'fitgeom\'``: Fit geometry (allowed transformations) used for\n fitting data (to minimize residuals). This is copy of the input\n argument ``fitgeom``.\n - ``\'center\'``: Center of rotation\n - ``\'center_ld\'``: Center of rotation as a ``numpy.longdouble``.\n - ``\'fitmask\'``: A boolean array indicating which source positions\n where used for fitting (`True`) and which were clipped out\n (`False`). **NOTE** For weighted fits, positions with zero\n weights are automatically excluded from the fits.\n - ``\'eff_nclip\'``: Effective number of clipping iterations\n - ``\'rmse\'``: Root-Mean-Square Error\n - ``\'mae\'``: Mean Absolute Error\n - ``\'std\'``: Standard Deviation of the residuals\n - ``\'resids\'``: An array of residuals of the fit.\n **NOTE:** Only the residuals for the "valid" points are reported\n here. Therefore the length of this array may be smaller than the\n length of input arrays of positions.\n\n Notes\n -----\n **Weights**\n\n Weights can be provided for both "image" source positions and "reference"\n source positions. When no weights are given, all positions are weighted\n equally. When only one set of positions have weights (i.e., either ``wxy``\n or ``wuv`` is not `None`) then weights in :eq:`ilf2` are set to be equal\n to the provided set of weights. When weights for *both* "image" source\n positions and "reference" source positions are provided, then the\n combined weight that is used in :eq:`ilf2` is computed as:\n\n .. math::\n 1/w = 1/w_{xy} + 1/w_{uv}.\n\n **Statistics for clipping**\n\n Several statistics are available for clipping iterations and all of them\n are reported in the returned ``fit`` dictionary regardless of the\n setting in ``sigma``:\n\n .. math::\n \\mathrm{RMSE} = \\sqrt{\\Sigma_k w_k \\|\\mathbf{r}_k\\|^2}\n\n .. math::\n \\mathrm{MAE} = \\sqrt{\\Sigma_k w_k \\|\\mathbf{r}_k\\|}\n\n .. math::\n \\mathrm{STD} = \\sqrt{\\Sigma_k w_k \\|\\mathbf{r}_k - \\\n \\mathbf{\\overline{r}}\\|^2}/(1-V_2)\n\n where :math:`\\mathbf{r}_k=\\mathbf{xy}_k-\\mathbf{xy}\'_k`,\n :math:`\\Sigma_k w_k = 1`, and :math:`V_2=\\Sigma_k w_k^2`.\n\n ' if (fitgeom == 'general'): linear_fit = fit_general elif (fitgeom == 'rscale'): linear_fit = fit_rscale elif (fitgeom == 'shift'): linear_fit = fit_shifts else: raise ValueError("Unsupported 'fitgeom' value: '{}'".format(fitgeom)) minobj_per_fitgeom = {'shift': 1, 'rscale': 2, 'general': 3} minobj = minobj_per_fitgeom[fitgeom] xy = np.array(xy, dtype=np.longdouble) uv = np.array(uv, dtype=np.longdouble) if ((len(xy.shape) != 2) or (xy.shape[1] != 2) or (uv.shape != xy.shape)): raise ValueError("Input coordinate arrays 'xy' and 'uv' must be of shape (N, 2) where N is the number of coordinate points.") wmask = np.ones(len(xy), dtype=np.bool_) if (wxy is not None): wxy = np.asarray(wxy) if ((len(wxy.shape) != 1) or (wxy.shape[0] != xy.shape[0])): raise ValueError("Weights 'wxy' must be a 1-dimensional vector of lengths equal to the number of input points.") wmask *= (wxy > 0.0) if (wuv is not None): wuv = np.asarray(wuv) if ((len(wuv.shape) != 1) or (wuv.shape[0] != xy.shape[0])): raise ValueError("Weights 'wuv' must be a 1-dimensional vector of lengths equal to the number of input points.") wmask *= (wuv > 0.0) mask = wmask if ((sigma is None) and (nclip is not None) and (nclip > 0)): raise ValueError("Argument 'sigma' cannot be None when 'nclip' is a positive number.") if isinstance(sigma, numbers.Number): sigstat = 'rmse' nsigma = float(sigma) elif (sigma is not None): nsigma = float(sigma[0]) sigstat = sigma[1] if (sigstat not in ['rmse', 'mae', 'std']): raise ValueError('Unsupported sigma statistics value.') if ((sigma is not None) and (nsigma <= 0.0)): raise ValueError('The value of sigma for clipping iterations must be positive.') if (nclip is None): nclip = 0 else: if (nclip < 0): raise ValueError("Argument 'nclip' must be non-negative.") nclip = int(nclip) if (np.count_nonzero(mask) == minobj): log.warning("The number of sources for the fit is smaller than the minimum number of sources necessary for the requested 'fitgeom'.") log.warning('Resetting number of clipping iterations to 0.') nclip = 0 if (center is None): center_ld = uv[mask].mean(axis=0, dtype=np.longdouble) center = center_ld.astype(np.double) else: center_ld = np.longdouble(center) xy[mask] -= center_ld uv[mask] -= center_ld log.info("Performing '{:s}' fit".format(fitgeom)) wmxy = (None if (wxy is None) else wxy[mask]) wmuv = (None if (wuv is None) else wuv[mask]) fit = linear_fit(xy[mask], uv[mask], wmxy, wmuv) effective_nclip = 0 for n in range(nclip): resids = fit['resids'] cutoff = (nsigma * fit[sigstat]) nonclipped = (np.linalg.norm(resids, axis=1) < cutoff) if ((np.count_nonzero(nonclipped) < minobj) or nonclipped.all()): break effective_nclip += 1 prev_mask = mask if (not clip_accum): mask = np.array(wmask) mask[prev_mask] *= nonclipped wmxy = (None if (wxy is None) else wxy[mask]) wmuv = (None if (wuv is None) else wuv[mask]) fit = linear_fit(xy[mask], uv[mask], wmxy, wmuv) fit['center'] = center fit['center_ld'] = center_ld fit['fitmask'] = mask fit['eff_nclip'] = effective_nclip return fit
-8,699,323,938,293,037,000
Compute linear transformation parameters that "best" (in the sense of minimizing residuals) transform ``uv`` source position to ``xy`` sources iteratively using sigma-clipping. More precisely, this functions attempts to find a ``2x2`` matrix ``F`` and a shift vector ``s`` that minimize the residuals between the *transformed* reference source coordinates ``uv`` .. math:: \mathbf{xy}'_k = \mathbf{F}\cdot(\mathbf{uv}_k-\mathbf{c})+\ \mathbf{s} + \mathbf{c} :label: ilf1 and the "observed" source positions ``xy``: .. math:: \epsilon^2 = \Sigma_k w_k \|\mathbf{xy}_k-\mathbf{xy}'_k\|^2. :label: ilf2 In the above equations, :math:`\mathbf{F}` is a ``2x2`` matrix while :math:`\mathbf{xy}_k` and :math:`\mathbf{uv}_k` are the position coordinates of the ``k``-th source (row in input ``xy`` and ``uv`` arrays). One of the two catalogs (``xy`` or ``uv``) contains what we refer to as "image" source positions and the other one as "reference" source positions. The meaning assigned to ``xy`` and ``uv`` parameters are up to the caller of this function. Parameters ---------- xy: numpy.ndarray A ``(N, 2)``-shaped array of source positions (one 2-coordinate position per line). uv: numpy.ndarray A ``(N, 2)``-shaped array of source positions (one 2-coordinate position per line). This array *must have* the same length (shape) as the ``xy`` array. wxy: numpy.ndarray, None, optional A 1-dimensional array of weights of the same length (``N``) as ``xy`` array indicating how much a given coordinate should be weighted in the fit. If not provided or set to `None`, all positions will be contribute equally to the fit if ``wuv`` is also set to `None`. See ``Notes`` section for more details. wuv: numpy.ndarray, None, optional A 1-dimensional array of weights of the same length (``N``) as ``xy`` array indicating how much a given coordinate should be weighted in the fit. If not provided or set to `None`, all positions will be contribute equally to the fit if ``wxy`` is also set to `None`. See ``Notes`` section for more details. fitgeom: {'shift', 'rscale', 'general'}, optional The fitting geometry to be used in fitting the matched object lists. This parameter is used in fitting the shifts (offsets), rotations and/or scale changes from the matched object lists. The 'general' fit geometry allows for independent scale and rotation for each axis. center: tuple, list, numpy.ndarray, None, optional A list-like container with two ``X``- and ``Y``-positions of the center (origin) of rotations in the ``uv`` and ``xy`` coordinate frames. If not provided, ``center`` is estimated as a (weighted) mean position in the ``uv`` frame. nclip: int, None, optional Number (a non-negative integer) of clipping iterations in fit. Clipping will be turned off if ``nclip`` is either `None` or 0. sigma: float, tuple of the form (float, str), optional When a tuple is provided, first value (a positive number) indicates the number of "fit error estimates" to use for clipping. The second value (a string) indicates the statistic to be used for "fit error estimate". Currently the following values are supported: ``'rmse'``, ``'mae'``, and ``'std'`` - see ``Notes`` section for more details. When ``sigma`` is a single number, it must be a positive number and the default error estimate ``'rmse'`` is assumed. This parameter is ignored when ``nclip`` is either `None` or 0. clip_accum: bool, optional Indicates whether or not to reset the list of "bad" (clipped out) sources after each clipping iteration. When set to `True` the list only grows with each iteration as "bad" positions never re-enter the pool of available position for the fit. By default the list of "bad" source positions is purged at each iteration. Returns ------- fit: dict - ``'shift'``: A ``numpy.ndarray`` with two components of the computed shift. - ``'shift_ld'``: A ``numpy.ndarray`` with two components of the computed shift of type ``numpy.longdouble``. - ``'matrix'``: A ``2x2`` ``numpy.ndarray`` with the computed generalized rotation matrix. - ``'matrix_ld'``: A ``2x2`` ``numpy.ndarray`` with the computed generalized rotation matrix of type ``numpy.longdouble``. - ``'proper_rot'``: Rotation angle (degree) as if the rotation is proper. - ``'rot'``: A tuple of ``(rotx, roty)`` - the rotation angles with regard to the ``X`` and ``Y`` axes. - ``'<rot>'``: *Arithmetic mean* of the angles of rotation around ``X`` and ``Y`` axes. - ``'scale'``: A tuple of ``(sx, sy)`` - scale change in the direction of the ``X`` and ``Y`` axes. - ``'<scale>'``: *Geometric mean* of scales ``sx`` and ``sy``. - ``'skew'``: Computed skew. - ``'proper'``: a boolean indicating whether the rotation is proper. - ``'fitgeom'``: Fit geometry (allowed transformations) used for fitting data (to minimize residuals). This is copy of the input argument ``fitgeom``. - ``'center'``: Center of rotation - ``'center_ld'``: Center of rotation as a ``numpy.longdouble``. - ``'fitmask'``: A boolean array indicating which source positions where used for fitting (`True`) and which were clipped out (`False`). **NOTE** For weighted fits, positions with zero weights are automatically excluded from the fits. - ``'eff_nclip'``: Effective number of clipping iterations - ``'rmse'``: Root-Mean-Square Error - ``'mae'``: Mean Absolute Error - ``'std'``: Standard Deviation of the residuals - ``'resids'``: An array of residuals of the fit. **NOTE:** Only the residuals for the "valid" points are reported here. Therefore the length of this array may be smaller than the length of input arrays of positions. Notes ----- **Weights** Weights can be provided for both "image" source positions and "reference" source positions. When no weights are given, all positions are weighted equally. When only one set of positions have weights (i.e., either ``wxy`` or ``wuv`` is not `None`) then weights in :eq:`ilf2` are set to be equal to the provided set of weights. When weights for *both* "image" source positions and "reference" source positions are provided, then the combined weight that is used in :eq:`ilf2` is computed as: .. math:: 1/w = 1/w_{xy} + 1/w_{uv}. **Statistics for clipping** Several statistics are available for clipping iterations and all of them are reported in the returned ``fit`` dictionary regardless of the setting in ``sigma``: .. math:: \mathrm{RMSE} = \sqrt{\Sigma_k w_k \|\mathbf{r}_k\|^2} .. math:: \mathrm{MAE} = \sqrt{\Sigma_k w_k \|\mathbf{r}_k\|} .. math:: \mathrm{STD} = \sqrt{\Sigma_k w_k \|\mathbf{r}_k - \ \mathbf{\overline{r}}\|^2}/(1-V_2) where :math:`\mathbf{r}_k=\mathbf{xy}_k-\mathbf{xy}'_k`, :math:`\Sigma_k w_k = 1`, and :math:`V_2=\Sigma_k w_k^2`.
tweakwcs/linearfit.py
iter_linear_fit
jhunkeler/tweakwcs
python
def iter_linear_fit(xy, uv, wxy=None, wuv=None, fitgeom='general', center=None, nclip=3, sigma=(3.0, 'rmse'), clip_accum=False): '\n Compute linear transformation parameters that "best" (in the sense of\n minimizing residuals) transform ``uv`` source position to ``xy``\n sources iteratively using sigma-clipping.\n\n More precisely, this functions attempts to find a ``2x2`` matrix ``F`` and\n a shift vector ``s`` that minimize the residuals between the *transformed*\n reference source coordinates ``uv``\n\n .. math::\n \\mathbf{xy}\'_k = \\mathbf{F}\\cdot(\\mathbf{uv}_k-\\mathbf{c})+\\\n \\mathbf{s} + \\mathbf{c}\n :label: ilf1\n\n and the "observed" source positions ``xy``:\n\n .. math::\n \\epsilon^2 = \\Sigma_k w_k \\|\\mathbf{xy}_k-\\mathbf{xy}\'_k\\|^2.\n :label: ilf2\n\n In the above equations, :math:`\\mathbf{F}` is a ``2x2`` matrix while\n :math:`\\mathbf{xy}_k` and :math:`\\mathbf{uv}_k` are the position\n coordinates of the ``k``-th source (row in input ``xy`` and ``uv`` arrays).\n\n One of the two catalogs (``xy`` or ``uv``) contains what we refer to as\n "image" source positions and the other one as "reference" source positions.\n The meaning assigned to ``xy`` and ``uv`` parameters are up to the\n caller of this function.\n\n Parameters\n ----------\n xy: numpy.ndarray\n A ``(N, 2)``-shaped array of source positions (one 2-coordinate\n position per line).\n\n uv: numpy.ndarray\n A ``(N, 2)``-shaped array of source positions (one 2-coordinate\n position per line). This array *must have* the same length (shape)\n as the ``xy`` array.\n\n wxy: numpy.ndarray, None, optional\n A 1-dimensional array of weights of the same length (``N``)\n as ``xy`` array indicating how much a given coordinate should be\n weighted in the fit. If not provided or set to `None`, all positions\n will be contribute equally to the fit if ``wuv`` is also set to `None`.\n See ``Notes`` section for more details.\n\n wuv: numpy.ndarray, None, optional\n A 1-dimensional array of weights of the same length (``N``)\n as ``xy`` array indicating how much a given coordinate should be\n weighted in the fit. If not provided or set to `None`, all positions\n will be contribute equally to the fit if ``wxy`` is also set to `None`.\n See ``Notes`` section for more details.\n\n fitgeom: {\'shift\', \'rscale\', \'general\'}, optional\n The fitting geometry to be used in fitting the matched object lists.\n This parameter is used in fitting the shifts (offsets), rotations\n and/or scale changes from the matched object lists. The \'general\'\n fit geometry allows for independent scale and rotation for each axis.\n\n center: tuple, list, numpy.ndarray, None, optional\n A list-like container with two ``X``- and ``Y``-positions of the center\n (origin) of rotations in the ``uv`` and ``xy`` coordinate frames.\n If not provided, ``center`` is estimated as a (weighted) mean position\n in the ``uv`` frame.\n\n nclip: int, None, optional\n Number (a non-negative integer) of clipping iterations in fit.\n Clipping will be turned off if ``nclip`` is either `None` or 0.\n\n sigma: float, tuple of the form (float, str), optional\n When a tuple is provided, first value (a positive number)\n indicates the number of "fit error estimates" to use for clipping.\n The second value (a string) indicates the statistic to be\n used for "fit error estimate". Currently the following values are\n supported: ``\'rmse\'``, ``\'mae\'``, and ``\'std\'``\n - see ``Notes`` section for more details.\n\n When ``sigma`` is a single number, it must be a positive number and\n the default error estimate ``\'rmse\'`` is assumed.\n\n This parameter is ignored when ``nclip`` is either `None` or 0.\n\n clip_accum: bool, optional\n Indicates whether or not to reset the list of "bad" (clipped out)\n sources after each clipping iteration. When set to `True` the list\n only grows with each iteration as "bad" positions never re-enter the\n pool of available position for the fit. By default the list of\n "bad" source positions is purged at each iteration.\n\n Returns\n -------\n fit: dict\n - ``\'shift\'``: A ``numpy.ndarray`` with two components of the\n computed shift.\n - ``\'shift_ld\'``: A ``numpy.ndarray`` with two components of the\n computed shift of type ``numpy.longdouble``.\n - ``\'matrix\'``: A ``2x2`` ``numpy.ndarray`` with the computed\n generalized rotation matrix.\n - ``\'matrix_ld\'``: A ``2x2`` ``numpy.ndarray`` with the computed\n generalized rotation matrix of type ``numpy.longdouble``.\n - ``\'proper_rot\'``: Rotation angle (degree) as if the rotation is\n proper.\n - ``\'rot\'``: A tuple of ``(rotx, roty)`` - the rotation angles with\n regard to the ``X`` and ``Y`` axes.\n - ``\'<rot>\'``: *Arithmetic mean* of the angles of rotation around\n ``X`` and ``Y`` axes.\n - ``\'scale\'``: A tuple of ``(sx, sy)`` - scale change in the direction\n of the ``X`` and ``Y`` axes.\n - ``\'<scale>\'``: *Geometric mean* of scales ``sx`` and ``sy``.\n - ``\'skew\'``: Computed skew.\n - ``\'proper\'``: a boolean indicating whether the rotation is proper.\n - ``\'fitgeom\'``: Fit geometry (allowed transformations) used for\n fitting data (to minimize residuals). This is copy of the input\n argument ``fitgeom``.\n - ``\'center\'``: Center of rotation\n - ``\'center_ld\'``: Center of rotation as a ``numpy.longdouble``.\n - ``\'fitmask\'``: A boolean array indicating which source positions\n where used for fitting (`True`) and which were clipped out\n (`False`). **NOTE** For weighted fits, positions with zero\n weights are automatically excluded from the fits.\n - ``\'eff_nclip\'``: Effective number of clipping iterations\n - ``\'rmse\'``: Root-Mean-Square Error\n - ``\'mae\'``: Mean Absolute Error\n - ``\'std\'``: Standard Deviation of the residuals\n - ``\'resids\'``: An array of residuals of the fit.\n **NOTE:** Only the residuals for the "valid" points are reported\n here. Therefore the length of this array may be smaller than the\n length of input arrays of positions.\n\n Notes\n -----\n **Weights**\n\n Weights can be provided for both "image" source positions and "reference"\n source positions. When no weights are given, all positions are weighted\n equally. When only one set of positions have weights (i.e., either ``wxy``\n or ``wuv`` is not `None`) then weights in :eq:`ilf2` are set to be equal\n to the provided set of weights. When weights for *both* "image" source\n positions and "reference" source positions are provided, then the\n combined weight that is used in :eq:`ilf2` is computed as:\n\n .. math::\n 1/w = 1/w_{xy} + 1/w_{uv}.\n\n **Statistics for clipping**\n\n Several statistics are available for clipping iterations and all of them\n are reported in the returned ``fit`` dictionary regardless of the\n setting in ``sigma``:\n\n .. math::\n \\mathrm{RMSE} = \\sqrt{\\Sigma_k w_k \\|\\mathbf{r}_k\\|^2}\n\n .. math::\n \\mathrm{MAE} = \\sqrt{\\Sigma_k w_k \\|\\mathbf{r}_k\\|}\n\n .. math::\n \\mathrm{STD} = \\sqrt{\\Sigma_k w_k \\|\\mathbf{r}_k - \\\n \\mathbf{\\overline{r}}\\|^2}/(1-V_2)\n\n where :math:`\\mathbf{r}_k=\\mathbf{xy}_k-\\mathbf{xy}\'_k`,\n :math:`\\Sigma_k w_k = 1`, and :math:`V_2=\\Sigma_k w_k^2`.\n\n ' if (fitgeom == 'general'): linear_fit = fit_general elif (fitgeom == 'rscale'): linear_fit = fit_rscale elif (fitgeom == 'shift'): linear_fit = fit_shifts else: raise ValueError("Unsupported 'fitgeom' value: '{}'".format(fitgeom)) minobj_per_fitgeom = {'shift': 1, 'rscale': 2, 'general': 3} minobj = minobj_per_fitgeom[fitgeom] xy = np.array(xy, dtype=np.longdouble) uv = np.array(uv, dtype=np.longdouble) if ((len(xy.shape) != 2) or (xy.shape[1] != 2) or (uv.shape != xy.shape)): raise ValueError("Input coordinate arrays 'xy' and 'uv' must be of shape (N, 2) where N is the number of coordinate points.") wmask = np.ones(len(xy), dtype=np.bool_) if (wxy is not None): wxy = np.asarray(wxy) if ((len(wxy.shape) != 1) or (wxy.shape[0] != xy.shape[0])): raise ValueError("Weights 'wxy' must be a 1-dimensional vector of lengths equal to the number of input points.") wmask *= (wxy > 0.0) if (wuv is not None): wuv = np.asarray(wuv) if ((len(wuv.shape) != 1) or (wuv.shape[0] != xy.shape[0])): raise ValueError("Weights 'wuv' must be a 1-dimensional vector of lengths equal to the number of input points.") wmask *= (wuv > 0.0) mask = wmask if ((sigma is None) and (nclip is not None) and (nclip > 0)): raise ValueError("Argument 'sigma' cannot be None when 'nclip' is a positive number.") if isinstance(sigma, numbers.Number): sigstat = 'rmse' nsigma = float(sigma) elif (sigma is not None): nsigma = float(sigma[0]) sigstat = sigma[1] if (sigstat not in ['rmse', 'mae', 'std']): raise ValueError('Unsupported sigma statistics value.') if ((sigma is not None) and (nsigma <= 0.0)): raise ValueError('The value of sigma for clipping iterations must be positive.') if (nclip is None): nclip = 0 else: if (nclip < 0): raise ValueError("Argument 'nclip' must be non-negative.") nclip = int(nclip) if (np.count_nonzero(mask) == minobj): log.warning("The number of sources for the fit is smaller than the minimum number of sources necessary for the requested 'fitgeom'.") log.warning('Resetting number of clipping iterations to 0.') nclip = 0 if (center is None): center_ld = uv[mask].mean(axis=0, dtype=np.longdouble) center = center_ld.astype(np.double) else: center_ld = np.longdouble(center) xy[mask] -= center_ld uv[mask] -= center_ld log.info("Performing '{:s}' fit".format(fitgeom)) wmxy = (None if (wxy is None) else wxy[mask]) wmuv = (None if (wuv is None) else wuv[mask]) fit = linear_fit(xy[mask], uv[mask], wmxy, wmuv) effective_nclip = 0 for n in range(nclip): resids = fit['resids'] cutoff = (nsigma * fit[sigstat]) nonclipped = (np.linalg.norm(resids, axis=1) < cutoff) if ((np.count_nonzero(nonclipped) < minobj) or nonclipped.all()): break effective_nclip += 1 prev_mask = mask if (not clip_accum): mask = np.array(wmask) mask[prev_mask] *= nonclipped wmxy = (None if (wxy is None) else wxy[mask]) wmuv = (None if (wuv is None) else wuv[mask]) fit = linear_fit(xy[mask], uv[mask], wmxy, wmuv) fit['center'] = center fit['center_ld'] = center_ld fit['fitmask'] = mask fit['eff_nclip'] = effective_nclip return fit
def fit_shifts(xy, uv, wxy=None, wuv=None): " Fits (non-iteratively and without sigma-clipping) a displacement\n transformation only between input lists of positions ``xy`` and ``uv``.\n When weights are provided, a weighted fit is performed. Parameter\n descriptions and return values are identical to those in `iter_linear_fit`,\n except returned ``fit`` dictionary does not contain the following\n keys irrelevant to this function: ``'center'``, ``'fitmask'``, and\n ``'eff_nclip'``.\n\n " if (xy.size == 0): raise NotEnoughPointsError('At least one point is required to find shifts.') diff_pts = np.subtract(xy, uv, dtype=np.longdouble) if ((wxy is None) and (wuv is None)): w = None meanx = diff_pts[:, 0].mean(dtype=np.longdouble) meany = diff_pts[:, 1].mean(dtype=np.longdouble) else: if (wxy is None): w = np.array(wuv, dtype=np.longdouble) elif (wuv is None): w = np.array(wxy, dtype=np.longdouble) else: wuv = np.array(wuv, dtype=np.longdouble) wxy = np.array(wxy, dtype=np.longdouble) m = np.logical_and((wuv > 0), (wxy > 0)) w = np.zeros_like(wuv) w[m] = ((wxy[m] * wuv[m]) / (wxy[m] + wuv[m])) if np.any((w < 0.0)): raise ValueError('Invalid weights: weights must be non-negative.') if (not np.sum((w > 0), dtype=np.int)): raise ValueError("Not enough valid data for 'shift' fit: too many weights are zero!") w /= np.sum(w, dtype=np.longdouble) meanx = np.dot(w, diff_pts[:, 0]) meany = np.dot(w, diff_pts[:, 1]) p = np.array([1.0, 0.0, meanx], dtype=np.longdouble) q = np.array([0.0, 1.0, meany], dtype=np.longdouble) fit = _build_fit(p, q, 'shift') resids = (diff_pts - fit['shift']) fit['resids'] = resids.astype(np.double) _compute_stat(fit, residuals=resids, weights=w) return fit
-1,491,896,026,331,747,000
Fits (non-iteratively and without sigma-clipping) a displacement transformation only between input lists of positions ``xy`` and ``uv``. When weights are provided, a weighted fit is performed. Parameter descriptions and return values are identical to those in `iter_linear_fit`, except returned ``fit`` dictionary does not contain the following keys irrelevant to this function: ``'center'``, ``'fitmask'``, and ``'eff_nclip'``.
tweakwcs/linearfit.py
fit_shifts
jhunkeler/tweakwcs
python
def fit_shifts(xy, uv, wxy=None, wuv=None): " Fits (non-iteratively and without sigma-clipping) a displacement\n transformation only between input lists of positions ``xy`` and ``uv``.\n When weights are provided, a weighted fit is performed. Parameter\n descriptions and return values are identical to those in `iter_linear_fit`,\n except returned ``fit`` dictionary does not contain the following\n keys irrelevant to this function: ``'center'``, ``'fitmask'``, and\n ``'eff_nclip'``.\n\n " if (xy.size == 0): raise NotEnoughPointsError('At least one point is required to find shifts.') diff_pts = np.subtract(xy, uv, dtype=np.longdouble) if ((wxy is None) and (wuv is None)): w = None meanx = diff_pts[:, 0].mean(dtype=np.longdouble) meany = diff_pts[:, 1].mean(dtype=np.longdouble) else: if (wxy is None): w = np.array(wuv, dtype=np.longdouble) elif (wuv is None): w = np.array(wxy, dtype=np.longdouble) else: wuv = np.array(wuv, dtype=np.longdouble) wxy = np.array(wxy, dtype=np.longdouble) m = np.logical_and((wuv > 0), (wxy > 0)) w = np.zeros_like(wuv) w[m] = ((wxy[m] * wuv[m]) / (wxy[m] + wuv[m])) if np.any((w < 0.0)): raise ValueError('Invalid weights: weights must be non-negative.') if (not np.sum((w > 0), dtype=np.int)): raise ValueError("Not enough valid data for 'shift' fit: too many weights are zero!") w /= np.sum(w, dtype=np.longdouble) meanx = np.dot(w, diff_pts[:, 0]) meany = np.dot(w, diff_pts[:, 1]) p = np.array([1.0, 0.0, meanx], dtype=np.longdouble) q = np.array([0.0, 1.0, meany], dtype=np.longdouble) fit = _build_fit(p, q, 'shift') resids = (diff_pts - fit['shift']) fit['resids'] = resids.astype(np.double) _compute_stat(fit, residuals=resids, weights=w) return fit
def fit_rscale(xy, uv, wxy=None, wuv=None): " Fits (non-iteratively and without sigma-clipping) a displacement,\n rotation and scale transformations between input lists of positions\n ``xy`` and ``uv``. When weights are provided, a weighted fit is performed.\n Parameter descriptions and return values are identical to those\n in `iter_linear_fit`, except returned ``fit`` dictionary does not contain\n the following keys irrelevant to this function: ``'center'``,\n ``'fitmask'``, and ``'eff_nclip'``.\n\n " if (len(xy) < 2): raise NotEnoughPointsError('At least two points are required to find shifts, rotation, and scale.') x = np.array(xy[:, 0], dtype=np.longdouble) y = np.array(xy[:, 1], dtype=np.longdouble) u = np.array(uv[:, 0], dtype=np.longdouble) v = np.array(uv[:, 1], dtype=np.longdouble) if ((wxy is None) and (wuv is None)): w = None xm = np.mean(x) ym = np.mean(y) um = np.mean(u) vm = np.mean(v) x -= xm y -= ym u -= um v -= vm su2 = np.dot(u, u) sv2 = np.dot(v, v) sxv = np.dot(x, v) syu = np.dot(y, u) sxu = np.dot(x, u) syv = np.dot(y, v) su2v2 = (su2 + sv2) else: if (wxy is None): w = np.array(wuv, dtype=np.longdouble) elif (wuv is None): w = np.array(wxy, dtype=np.longdouble) else: wuv = np.array(wuv, dtype=np.longdouble) wxy = np.array(wxy, dtype=np.longdouble) m = np.logical_and((wuv > 0), (wxy > 0)) w = np.zeros_like(wuv) w[m] = ((wxy[m] * wuv[m]) / (wxy[m] + wuv[m])) if np.any((w < 0.0)): raise ValueError('Invalid weights: weights must be non-negative.') if (np.sum((w > 0)) < 2): raise ValueError("Not enough valid data for 'rscale' fit: too many weights are zero!") w /= np.sum(w, dtype=np.longdouble) xm = np.dot(w, x) ym = np.dot(w, y) um = np.dot(w, u) vm = np.dot(w, v) x -= xm y -= ym u -= um v -= vm su2 = np.dot(w, (u ** 2)) sv2 = np.dot(w, (v ** 2)) sxv = np.dot(w, (x * v)) syu = np.dot(w, (y * u)) sxu = np.dot(w, (x * u)) syv = np.dot(w, (y * v)) su2v2 = (su2 + sv2) det = ((sxu * syv) - (sxv * syu)) if (det < 0): rot_num = (sxv + syu) rot_denom = (sxu - syv) else: rot_num = (sxv - syu) rot_denom = (sxu + syv) if (rot_num == rot_denom): theta = 0.0 else: theta = np.rad2deg(np.arctan2(rot_num, rot_denom)) if (theta < 0): theta += 360.0 ctheta = np.cos(np.deg2rad(theta)) stheta = np.sin(np.deg2rad(theta)) s_num = ((rot_denom * ctheta) + (rot_num * stheta)) if (su2v2 > 0.0): mag = (s_num / su2v2) else: raise SingularMatrixError('Singular matrix: suspected colinear points.') if (det < 0): sthetax = ((- mag) * stheta) cthetay = ((- mag) * ctheta) else: sthetax = (mag * stheta) cthetay = (mag * ctheta) cthetax = (mag * ctheta) sthetay = (mag * stheta) sdet = np.sign(det) xshift = ((xm - (um * cthetax)) - ((sdet * vm) * sthetax)) yshift = ((ym + ((sdet * um) * sthetay)) - (vm * cthetay)) p = np.array([cthetax, sthetay, xshift], dtype=np.longdouble) q = np.array([(- sthetax), cthetay, yshift], dtype=np.longdouble) fit = _build_fit(p, q, fitgeom='rscale') resids = ((xy - np.dot(uv, fit['matrix_ld'].T)) - fit['shift_ld']) fit['resids'] = resids.astype(np.double) _compute_stat(fit, residuals=resids, weights=w) return fit
2,982,510,986,755,505,700
Fits (non-iteratively and without sigma-clipping) a displacement, rotation and scale transformations between input lists of positions ``xy`` and ``uv``. When weights are provided, a weighted fit is performed. Parameter descriptions and return values are identical to those in `iter_linear_fit`, except returned ``fit`` dictionary does not contain the following keys irrelevant to this function: ``'center'``, ``'fitmask'``, and ``'eff_nclip'``.
tweakwcs/linearfit.py
fit_rscale
jhunkeler/tweakwcs
python
def fit_rscale(xy, uv, wxy=None, wuv=None): " Fits (non-iteratively and without sigma-clipping) a displacement,\n rotation and scale transformations between input lists of positions\n ``xy`` and ``uv``. When weights are provided, a weighted fit is performed.\n Parameter descriptions and return values are identical to those\n in `iter_linear_fit`, except returned ``fit`` dictionary does not contain\n the following keys irrelevant to this function: ``'center'``,\n ``'fitmask'``, and ``'eff_nclip'``.\n\n " if (len(xy) < 2): raise NotEnoughPointsError('At least two points are required to find shifts, rotation, and scale.') x = np.array(xy[:, 0], dtype=np.longdouble) y = np.array(xy[:, 1], dtype=np.longdouble) u = np.array(uv[:, 0], dtype=np.longdouble) v = np.array(uv[:, 1], dtype=np.longdouble) if ((wxy is None) and (wuv is None)): w = None xm = np.mean(x) ym = np.mean(y) um = np.mean(u) vm = np.mean(v) x -= xm y -= ym u -= um v -= vm su2 = np.dot(u, u) sv2 = np.dot(v, v) sxv = np.dot(x, v) syu = np.dot(y, u) sxu = np.dot(x, u) syv = np.dot(y, v) su2v2 = (su2 + sv2) else: if (wxy is None): w = np.array(wuv, dtype=np.longdouble) elif (wuv is None): w = np.array(wxy, dtype=np.longdouble) else: wuv = np.array(wuv, dtype=np.longdouble) wxy = np.array(wxy, dtype=np.longdouble) m = np.logical_and((wuv > 0), (wxy > 0)) w = np.zeros_like(wuv) w[m] = ((wxy[m] * wuv[m]) / (wxy[m] + wuv[m])) if np.any((w < 0.0)): raise ValueError('Invalid weights: weights must be non-negative.') if (np.sum((w > 0)) < 2): raise ValueError("Not enough valid data for 'rscale' fit: too many weights are zero!") w /= np.sum(w, dtype=np.longdouble) xm = np.dot(w, x) ym = np.dot(w, y) um = np.dot(w, u) vm = np.dot(w, v) x -= xm y -= ym u -= um v -= vm su2 = np.dot(w, (u ** 2)) sv2 = np.dot(w, (v ** 2)) sxv = np.dot(w, (x * v)) syu = np.dot(w, (y * u)) sxu = np.dot(w, (x * u)) syv = np.dot(w, (y * v)) su2v2 = (su2 + sv2) det = ((sxu * syv) - (sxv * syu)) if (det < 0): rot_num = (sxv + syu) rot_denom = (sxu - syv) else: rot_num = (sxv - syu) rot_denom = (sxu + syv) if (rot_num == rot_denom): theta = 0.0 else: theta = np.rad2deg(np.arctan2(rot_num, rot_denom)) if (theta < 0): theta += 360.0 ctheta = np.cos(np.deg2rad(theta)) stheta = np.sin(np.deg2rad(theta)) s_num = ((rot_denom * ctheta) + (rot_num * stheta)) if (su2v2 > 0.0): mag = (s_num / su2v2) else: raise SingularMatrixError('Singular matrix: suspected colinear points.') if (det < 0): sthetax = ((- mag) * stheta) cthetay = ((- mag) * ctheta) else: sthetax = (mag * stheta) cthetay = (mag * ctheta) cthetax = (mag * ctheta) sthetay = (mag * stheta) sdet = np.sign(det) xshift = ((xm - (um * cthetax)) - ((sdet * vm) * sthetax)) yshift = ((ym + ((sdet * um) * sthetay)) - (vm * cthetay)) p = np.array([cthetax, sthetay, xshift], dtype=np.longdouble) q = np.array([(- sthetax), cthetay, yshift], dtype=np.longdouble) fit = _build_fit(p, q, fitgeom='rscale') resids = ((xy - np.dot(uv, fit['matrix_ld'].T)) - fit['shift_ld']) fit['resids'] = resids.astype(np.double) _compute_stat(fit, residuals=resids, weights=w) return fit
def fit_general(xy, uv, wxy=None, wuv=None): " Fits (non-iteratively and without sigma-clipping) a displacement,\n rotation, scale, and skew transformations (i.e., the full ``2x2``\n transformation matrix) between input lists of positions\n ``xy`` and ``uv``. When weights are provided, a weighted fit is performed.\n Parameter descriptions and return values are identical to those\n in `iter_linear_fit`, except returned ``fit`` dictionary does not contain\n the following keys irrelevant to this function: ``'center'``,\n ``'fitmask'``, and ``'eff_nclip'``.\n\n " if (len(xy) < 3): raise NotEnoughPointsError('At least three points are required to find 6-parameter linear affine transformations.') x = np.array(xy[:, 0], dtype=np.longdouble) y = np.array(xy[:, 1], dtype=np.longdouble) u = np.array(uv[:, 0], dtype=np.longdouble) v = np.array(uv[:, 1], dtype=np.longdouble) if ((wxy is None) and (wuv is None)): w = None sw = float(x.size) sx = x.sum() sy = y.sum() su = u.sum() sv = v.sum() sxu = np.dot(x, u) syu = np.dot(y, u) sxv = np.dot(x, v) syv = np.dot(y, v) suu = np.dot(u, u) svv = np.dot(v, v) suv = np.dot(u, v) else: if (wxy is None): w = np.array(wuv, dtype=np.longdouble) elif (wuv is None): w = np.array(wxy, dtype=np.longdouble) else: wuv = np.array(wuv, dtype=np.longdouble) wxy = np.array(wxy, dtype=np.longdouble) m = np.logical_and((wuv > 0), (wxy > 0)) w = np.zeros_like(wuv) w[m] = ((wxy[m] * wuv[m]) / (wxy[m] + wuv[m])) if np.any((w < 0.0)): raise ValueError('Invalid weights: weights must be non-negative.') if (np.sum((w > 0)) < 3): raise ValueError("Not enough valid data for 'general' fit: too many weights are zero!") sw = np.sum(w, dtype=np.longdouble) sx = np.dot(w, x) sy = np.dot(w, y) su = np.dot(w, u) sv = np.dot(w, v) sxu = np.dot(w, (x * u)) syu = np.dot(w, (y * u)) sxv = np.dot(w, (x * v)) syv = np.dot(w, (y * v)) suu = np.dot(w, (u * u)) svv = np.dot(w, (v * v)) suv = np.dot(w, (u * v)) m = np.array([[su, sv, sw], [suu, suv, su], [suv, svv, sv]], dtype=np.longdouble) a = np.array([sx, sxu, sxv], dtype=np.longdouble) b = np.array([sy, syu, syv], dtype=np.longdouble) try: inv_m = inv(m) except np.linalg.LinAlgError: raise SingularMatrixError('Singular matrix: suspected colinear points.') p = np.dot(inv_m, a) q = np.dot(inv_m, b) if (not (np.all(np.isfinite(p)) and np.all(np.isfinite(q)))): raise SingularMatrixError('Singular matrix: suspected colinear points.') fit = _build_fit(p, q, 'general') resids = ((xy - np.dot(uv, fit['matrix_ld'].T)) - fit['shift_ld']) fit['resids'] = resids.astype(np.double) _compute_stat(fit, residuals=resids, weights=w) return fit
3,775,803,470,868,652,500
Fits (non-iteratively and without sigma-clipping) a displacement, rotation, scale, and skew transformations (i.e., the full ``2x2`` transformation matrix) between input lists of positions ``xy`` and ``uv``. When weights are provided, a weighted fit is performed. Parameter descriptions and return values are identical to those in `iter_linear_fit`, except returned ``fit`` dictionary does not contain the following keys irrelevant to this function: ``'center'``, ``'fitmask'``, and ``'eff_nclip'``.
tweakwcs/linearfit.py
fit_general
jhunkeler/tweakwcs
python
def fit_general(xy, uv, wxy=None, wuv=None): " Fits (non-iteratively and without sigma-clipping) a displacement,\n rotation, scale, and skew transformations (i.e., the full ``2x2``\n transformation matrix) between input lists of positions\n ``xy`` and ``uv``. When weights are provided, a weighted fit is performed.\n Parameter descriptions and return values are identical to those\n in `iter_linear_fit`, except returned ``fit`` dictionary does not contain\n the following keys irrelevant to this function: ``'center'``,\n ``'fitmask'``, and ``'eff_nclip'``.\n\n " if (len(xy) < 3): raise NotEnoughPointsError('At least three points are required to find 6-parameter linear affine transformations.') x = np.array(xy[:, 0], dtype=np.longdouble) y = np.array(xy[:, 1], dtype=np.longdouble) u = np.array(uv[:, 0], dtype=np.longdouble) v = np.array(uv[:, 1], dtype=np.longdouble) if ((wxy is None) and (wuv is None)): w = None sw = float(x.size) sx = x.sum() sy = y.sum() su = u.sum() sv = v.sum() sxu = np.dot(x, u) syu = np.dot(y, u) sxv = np.dot(x, v) syv = np.dot(y, v) suu = np.dot(u, u) svv = np.dot(v, v) suv = np.dot(u, v) else: if (wxy is None): w = np.array(wuv, dtype=np.longdouble) elif (wuv is None): w = np.array(wxy, dtype=np.longdouble) else: wuv = np.array(wuv, dtype=np.longdouble) wxy = np.array(wxy, dtype=np.longdouble) m = np.logical_and((wuv > 0), (wxy > 0)) w = np.zeros_like(wuv) w[m] = ((wxy[m] * wuv[m]) / (wxy[m] + wuv[m])) if np.any((w < 0.0)): raise ValueError('Invalid weights: weights must be non-negative.') if (np.sum((w > 0)) < 3): raise ValueError("Not enough valid data for 'general' fit: too many weights are zero!") sw = np.sum(w, dtype=np.longdouble) sx = np.dot(w, x) sy = np.dot(w, y) su = np.dot(w, u) sv = np.dot(w, v) sxu = np.dot(w, (x * u)) syu = np.dot(w, (y * u)) sxv = np.dot(w, (x * v)) syv = np.dot(w, (y * v)) suu = np.dot(w, (u * u)) svv = np.dot(w, (v * v)) suv = np.dot(w, (u * v)) m = np.array([[su, sv, sw], [suu, suv, su], [suv, svv, sv]], dtype=np.longdouble) a = np.array([sx, sxu, sxv], dtype=np.longdouble) b = np.array([sy, syu, syv], dtype=np.longdouble) try: inv_m = inv(m) except np.linalg.LinAlgError: raise SingularMatrixError('Singular matrix: suspected colinear points.') p = np.dot(inv_m, a) q = np.dot(inv_m, b) if (not (np.all(np.isfinite(p)) and np.all(np.isfinite(q)))): raise SingularMatrixError('Singular matrix: suspected colinear points.') fit = _build_fit(p, q, 'general') resids = ((xy - np.dot(uv, fit['matrix_ld'].T)) - fit['shift_ld']) fit['resids'] = resids.astype(np.double) _compute_stat(fit, residuals=resids, weights=w) return fit
def build_fit_matrix(rot, scale=1): '\n Create an affine transformation matrix (2x2) from the provided rotation\n angle(s) and scale(s):\n\n .. math::\n\n M = \\begin{bmatrix}\n s_x \\cos(\\theta_x) & s_y \\sin(\\theta_y) \\\\\n -s_x \\sin(\\theta_x) & s_y \\cos(\\theta_y)\n \\end{bmatrix}\n\n Parameters\n ----------\n rot: tuple, float, optional\n Rotation angle in degrees. Two values (one for each axis) can be\n provided as a tuple.\n\n scale: tuple, float, optional\n Scale of the liniar transformation. Two values (one for each axis)\n can be provided as a tuple.\n\n Returns\n -------\n matrix: numpy.ndarray\n A 2x2 `numpy.ndarray` containing coefficients of a liniear\n transformation.\n\n ' if hasattr(rot, '__iter__'): (rx, ry) = map(np.deg2rad, rot) else: rx = ry = np.deg2rad(float(rot)) if hasattr(scale, '__iter__'): (sx, sy) = scale else: sx = sy = float(scale) matrix = np.array([[(sx * np.cos(rx)), (sy * np.sin(ry))], [((- sx) * np.sin(rx)), (sy * np.cos(ry))]]) return matrix
-4,852,843,185,402,100,000
Create an affine transformation matrix (2x2) from the provided rotation angle(s) and scale(s): .. math:: M = \begin{bmatrix} s_x \cos(\theta_x) & s_y \sin(\theta_y) \\ -s_x \sin(\theta_x) & s_y \cos(\theta_y) \end{bmatrix} Parameters ---------- rot: tuple, float, optional Rotation angle in degrees. Two values (one for each axis) can be provided as a tuple. scale: tuple, float, optional Scale of the liniar transformation. Two values (one for each axis) can be provided as a tuple. Returns ------- matrix: numpy.ndarray A 2x2 `numpy.ndarray` containing coefficients of a liniear transformation.
tweakwcs/linearfit.py
build_fit_matrix
jhunkeler/tweakwcs
python
def build_fit_matrix(rot, scale=1): '\n Create an affine transformation matrix (2x2) from the provided rotation\n angle(s) and scale(s):\n\n .. math::\n\n M = \\begin{bmatrix}\n s_x \\cos(\\theta_x) & s_y \\sin(\\theta_y) \\\\\n -s_x \\sin(\\theta_x) & s_y \\cos(\\theta_y)\n \\end{bmatrix}\n\n Parameters\n ----------\n rot: tuple, float, optional\n Rotation angle in degrees. Two values (one for each axis) can be\n provided as a tuple.\n\n scale: tuple, float, optional\n Scale of the liniar transformation. Two values (one for each axis)\n can be provided as a tuple.\n\n Returns\n -------\n matrix: numpy.ndarray\n A 2x2 `numpy.ndarray` containing coefficients of a liniear\n transformation.\n\n ' if hasattr(rot, '__iter__'): (rx, ry) = map(np.deg2rad, rot) else: rx = ry = np.deg2rad(float(rot)) if hasattr(scale, '__iter__'): (sx, sy) = scale else: sx = sy = float(scale) matrix = np.array([[(sx * np.cos(rx)), (sy * np.sin(ry))], [((- sx) * np.sin(rx)), (sy * np.cos(ry))]]) return matrix
@pytest.fixture async def trio_kivy_app(request, nursery, _app_release_list, _app_release) -> AsyncUnitApp: 'Fixture yielding a :class:`~pytest_kivy.app.AsyncUnitApp` using\n explicitly trio as backend for the async library.\n\n pytest-trio and trio must be installed, and ``trio_mode = true`` must be\n set in pytest.ini.\n ' (cls, kwargs, app_cls, app_list) = _get_request_config(request, _app_release_list, _app_release) async with cls(nursery=nursery, async_lib='trio', **kwargs) as app: if (app_list is not None): app_list.append((weakref.ref(app), weakref.ref(request))) if (app_cls is not None): (await app(app_cls)) app.raise_startup_exception() (yield app) (await app.wait_stop_app())
-5,318,611,033,535,439,000
Fixture yielding a :class:`~pytest_kivy.app.AsyncUnitApp` using explicitly trio as backend for the async library. pytest-trio and trio must be installed, and ``trio_mode = true`` must be set in pytest.ini.
pytest_kivy/plugin.py
trio_kivy_app
matham/pytest-kivy
python
@pytest.fixture async def trio_kivy_app(request, nursery, _app_release_list, _app_release) -> AsyncUnitApp: 'Fixture yielding a :class:`~pytest_kivy.app.AsyncUnitApp` using\n explicitly trio as backend for the async library.\n\n pytest-trio and trio must be installed, and ``trio_mode = true`` must be\n set in pytest.ini.\n ' (cls, kwargs, app_cls, app_list) = _get_request_config(request, _app_release_list, _app_release) async with cls(nursery=nursery, async_lib='trio', **kwargs) as app: if (app_list is not None): app_list.append((weakref.ref(app), weakref.ref(request))) if (app_cls is not None): (await app(app_cls)) app.raise_startup_exception() (yield app) (await app.wait_stop_app())
@pytest.fixture async def asyncio_kivy_app(request, event_loop, _app_release_list, _app_release) -> AsyncUnitApp: 'Fixture yielding a :class:`~pytest_kivy.app.AsyncUnitApp` using\n explicitly asyncio as backend for the async library.\n\n pytest-asyncio must be installed.\n ' (cls, kwargs, app_cls, app_list) = _get_request_config(request, _app_release_list, _app_release) async with cls(event_loop=event_loop, async_lib='asyncio', **kwargs) as app: if (app_list is not None): app_list.append((weakref.ref(app), weakref.ref(request))) if (app_cls is not None): (await app(app_cls)) app.raise_startup_exception() (yield app) (await app.wait_stop_app())
-1,731,140,748,102,840,000
Fixture yielding a :class:`~pytest_kivy.app.AsyncUnitApp` using explicitly asyncio as backend for the async library. pytest-asyncio must be installed.
pytest_kivy/plugin.py
asyncio_kivy_app
matham/pytest-kivy
python
@pytest.fixture async def asyncio_kivy_app(request, event_loop, _app_release_list, _app_release) -> AsyncUnitApp: 'Fixture yielding a :class:`~pytest_kivy.app.AsyncUnitApp` using\n explicitly asyncio as backend for the async library.\n\n pytest-asyncio must be installed.\n ' (cls, kwargs, app_cls, app_list) = _get_request_config(request, _app_release_list, _app_release) async with cls(event_loop=event_loop, async_lib='asyncio', **kwargs) as app: if (app_list is not None): app_list.append((weakref.ref(app), weakref.ref(request))) if (app_cls is not None): (await app(app_cls)) app.raise_startup_exception() (yield app) (await app.wait_stop_app())
@pytest.fixture async def async_kivy_app(request, _app_release_list, _app_release, _nursery, _event_loop) -> AsyncUnitApp: 'Fixture yielding a :class:`~pytest_kivy.app.AsyncUnitApp` using\n trio or asyncio as backend for the async library, depending on\n KIVY_EVENTLOOP.\n\n If using trio, pytest-trio and trio must be installed, and\n ``trio_mode = true`` must be set in pytest.ini. If using asyncio,\n pytest-asyncio must be installed.\n ' (cls, kwargs, app_cls, app_list) = _get_request_config(request, _app_release_list, _app_release) async with cls(nursery=_nursery, event_loop=_event_loop, async_lib=_async_lib, **kwargs) as app: if (app_list is not None): app_list.append((weakref.ref(app), weakref.ref(request))) if (app_cls is not None): (await app(app_cls)) app.raise_startup_exception() (yield app) (await app.wait_stop_app())
-737,944,087,339,065,900
Fixture yielding a :class:`~pytest_kivy.app.AsyncUnitApp` using trio or asyncio as backend for the async library, depending on KIVY_EVENTLOOP. If using trio, pytest-trio and trio must be installed, and ``trio_mode = true`` must be set in pytest.ini. If using asyncio, pytest-asyncio must be installed.
pytest_kivy/plugin.py
async_kivy_app
matham/pytest-kivy
python
@pytest.fixture async def async_kivy_app(request, _app_release_list, _app_release, _nursery, _event_loop) -> AsyncUnitApp: 'Fixture yielding a :class:`~pytest_kivy.app.AsyncUnitApp` using\n trio or asyncio as backend for the async library, depending on\n KIVY_EVENTLOOP.\n\n If using trio, pytest-trio and trio must be installed, and\n ``trio_mode = true`` must be set in pytest.ini. If using asyncio,\n pytest-asyncio must be installed.\n ' (cls, kwargs, app_cls, app_list) = _get_request_config(request, _app_release_list, _app_release) async with cls(nursery=_nursery, event_loop=_event_loop, async_lib=_async_lib, **kwargs) as app: if (app_list is not None): app_list.append((weakref.ref(app), weakref.ref(request))) if (app_cls is not None): (await app(app_cls)) app.raise_startup_exception() (yield app) (await app.wait_stop_app())
def test_purge_old_states(hass, hass_recorder): 'Test deleting old states.' hass = hass_recorder() _add_test_states(hass) with session_scope(hass=hass) as session: states = session.query(States) assert (states.count() == 6) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert (not finished) assert (states.count() == 4) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert (not finished) assert (states.count() == 2) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert finished assert (states.count() == 2)
3,161,758,145,078,438,000
Test deleting old states.
tests/components/recorder/test_purge.py
test_purge_old_states
AdmiralStipe/core
python
def test_purge_old_states(hass, hass_recorder): hass = hass_recorder() _add_test_states(hass) with session_scope(hass=hass) as session: states = session.query(States) assert (states.count() == 6) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert (not finished) assert (states.count() == 4) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert (not finished) assert (states.count() == 2) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert finished assert (states.count() == 2)
def test_purge_old_events(hass, hass_recorder): 'Test deleting old events.' hass = hass_recorder() _add_test_events(hass) with session_scope(hass=hass) as session: events = session.query(Events).filter(Events.event_type.like('EVENT_TEST%')) assert (events.count() == 6) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert (not finished) assert (events.count() == 4) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert (not finished) assert (events.count() == 2) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert finished assert (events.count() == 2)
3,234,367,589,813,852,700
Test deleting old events.
tests/components/recorder/test_purge.py
test_purge_old_events
AdmiralStipe/core
python
def test_purge_old_events(hass, hass_recorder): hass = hass_recorder() _add_test_events(hass) with session_scope(hass=hass) as session: events = session.query(Events).filter(Events.event_type.like('EVENT_TEST%')) assert (events.count() == 6) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert (not finished) assert (events.count() == 4) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert (not finished) assert (events.count() == 2) finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False) assert finished assert (events.count() == 2)
def test_purge_old_recorder_runs(hass, hass_recorder): 'Test deleting old recorder runs keeps current run.' hass = hass_recorder() _add_test_recorder_runs(hass) with session_scope(hass=hass) as session: recorder_runs = session.query(RecorderRuns) assert (recorder_runs.count() == 7) finished = purge_old_data(hass.data[DATA_INSTANCE], 0, repack=False) assert finished assert (recorder_runs.count() == 1)
-3,228,106,912,315,216,000
Test deleting old recorder runs keeps current run.
tests/components/recorder/test_purge.py
test_purge_old_recorder_runs
AdmiralStipe/core
python
def test_purge_old_recorder_runs(hass, hass_recorder): hass = hass_recorder() _add_test_recorder_runs(hass) with session_scope(hass=hass) as session: recorder_runs = session.query(RecorderRuns) assert (recorder_runs.count() == 7) finished = purge_old_data(hass.data[DATA_INSTANCE], 0, repack=False) assert finished assert (recorder_runs.count() == 1)
def test_purge_method(hass, hass_recorder): 'Test purge method.' hass = hass_recorder() service_data = {'keep_days': 4} _add_test_events(hass) _add_test_states(hass) _add_test_recorder_runs(hass) with session_scope(hass=hass) as session: states = session.query(States) assert (states.count() == 6) events = session.query(Events).filter(Events.event_type.like('EVENT_TEST%')) assert (events.count() == 6) recorder_runs = session.query(RecorderRuns) assert (recorder_runs.count() == 7) hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) hass.services.call('recorder', 'purge') hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) assert (states.count() == 4) assert (events.count() == 4) hass.services.call('recorder', 'purge', service_data=service_data) hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) assert (states.count() == 2) assert (events.count() == 2) assert (recorder_runs.count() == 3) assert (not ('EVENT_TEST_PURGE' in (event.event_type for event in events.all()))) with patch('homeassistant.components.recorder.purge._LOGGER') as mock_logger: service_data['repack'] = True hass.services.call('recorder', 'purge', service_data=service_data) hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) assert (mock_logger.debug.mock_calls[5][1][0] == 'Vacuuming SQL DB to free space')
7,501,323,893,733,130,000
Test purge method.
tests/components/recorder/test_purge.py
test_purge_method
AdmiralStipe/core
python
def test_purge_method(hass, hass_recorder): hass = hass_recorder() service_data = {'keep_days': 4} _add_test_events(hass) _add_test_states(hass) _add_test_recorder_runs(hass) with session_scope(hass=hass) as session: states = session.query(States) assert (states.count() == 6) events = session.query(Events).filter(Events.event_type.like('EVENT_TEST%')) assert (events.count() == 6) recorder_runs = session.query(RecorderRuns) assert (recorder_runs.count() == 7) hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) hass.services.call('recorder', 'purge') hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) assert (states.count() == 4) assert (events.count() == 4) hass.services.call('recorder', 'purge', service_data=service_data) hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) assert (states.count() == 2) assert (events.count() == 2) assert (recorder_runs.count() == 3) assert (not ('EVENT_TEST_PURGE' in (event.event_type for event in events.all()))) with patch('homeassistant.components.recorder.purge._LOGGER') as mock_logger: service_data['repack'] = True hass.services.call('recorder', 'purge', service_data=service_data) hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) assert (mock_logger.debug.mock_calls[5][1][0] == 'Vacuuming SQL DB to free space')
def _add_test_states(hass): 'Add multiple states to the db for testing.' now = datetime.now() five_days_ago = (now - timedelta(days=5)) eleven_days_ago = (now - timedelta(days=11)) attributes = {'test_attr': 5, 'test_attr_10': 'nice'} hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) with recorder.session_scope(hass=hass) as session: for event_id in range(6): if (event_id < 2): timestamp = eleven_days_ago state = 'autopurgeme' elif (event_id < 4): timestamp = five_days_ago state = 'purgeme' else: timestamp = now state = 'dontpurgeme' session.add(States(entity_id='test.recorder2', domain='sensor', state=state, attributes=json.dumps(attributes), last_changed=timestamp, last_updated=timestamp, created=timestamp, event_id=(event_id + 1000)))
6,691,055,191,001,606,000
Add multiple states to the db for testing.
tests/components/recorder/test_purge.py
_add_test_states
AdmiralStipe/core
python
def _add_test_states(hass): now = datetime.now() five_days_ago = (now - timedelta(days=5)) eleven_days_ago = (now - timedelta(days=11)) attributes = {'test_attr': 5, 'test_attr_10': 'nice'} hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) with recorder.session_scope(hass=hass) as session: for event_id in range(6): if (event_id < 2): timestamp = eleven_days_ago state = 'autopurgeme' elif (event_id < 4): timestamp = five_days_ago state = 'purgeme' else: timestamp = now state = 'dontpurgeme' session.add(States(entity_id='test.recorder2', domain='sensor', state=state, attributes=json.dumps(attributes), last_changed=timestamp, last_updated=timestamp, created=timestamp, event_id=(event_id + 1000)))
def _add_test_events(hass): 'Add a few events for testing.' now = datetime.now() five_days_ago = (now - timedelta(days=5)) eleven_days_ago = (now - timedelta(days=11)) event_data = {'test_attr': 5, 'test_attr_10': 'nice'} hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) with recorder.session_scope(hass=hass) as session: for event_id in range(6): if (event_id < 2): timestamp = eleven_days_ago event_type = 'EVENT_TEST_AUTOPURGE' elif (event_id < 4): timestamp = five_days_ago event_type = 'EVENT_TEST_PURGE' else: timestamp = now event_type = 'EVENT_TEST' session.add(Events(event_type=event_type, event_data=json.dumps(event_data), origin='LOCAL', created=timestamp, time_fired=timestamp))
-3,786,387,227,581,622,300
Add a few events for testing.
tests/components/recorder/test_purge.py
_add_test_events
AdmiralStipe/core
python
def _add_test_events(hass): now = datetime.now() five_days_ago = (now - timedelta(days=5)) eleven_days_ago = (now - timedelta(days=11)) event_data = {'test_attr': 5, 'test_attr_10': 'nice'} hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) with recorder.session_scope(hass=hass) as session: for event_id in range(6): if (event_id < 2): timestamp = eleven_days_ago event_type = 'EVENT_TEST_AUTOPURGE' elif (event_id < 4): timestamp = five_days_ago event_type = 'EVENT_TEST_PURGE' else: timestamp = now event_type = 'EVENT_TEST' session.add(Events(event_type=event_type, event_data=json.dumps(event_data), origin='LOCAL', created=timestamp, time_fired=timestamp))
def _add_test_recorder_runs(hass): 'Add a few recorder_runs for testing.' now = datetime.now() five_days_ago = (now - timedelta(days=5)) eleven_days_ago = (now - timedelta(days=11)) hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) with recorder.session_scope(hass=hass) as session: for rec_id in range(6): if (rec_id < 2): timestamp = eleven_days_ago elif (rec_id < 4): timestamp = five_days_ago else: timestamp = now session.add(RecorderRuns(start=timestamp, created=dt_util.utcnow(), end=(timestamp + timedelta(days=1))))
-535,643,562,059,519,400
Add a few recorder_runs for testing.
tests/components/recorder/test_purge.py
_add_test_recorder_runs
AdmiralStipe/core
python
def _add_test_recorder_runs(hass): now = datetime.now() five_days_ago = (now - timedelta(days=5)) eleven_days_ago = (now - timedelta(days=11)) hass.block_till_done() hass.data[DATA_INSTANCE].block_till_done() wait_recording_done(hass) with recorder.session_scope(hass=hass) as session: for rec_id in range(6): if (rec_id < 2): timestamp = eleven_days_ago elif (rec_id < 4): timestamp = five_days_ago else: timestamp = now session.add(RecorderRuns(start=timestamp, created=dt_util.utcnow(), end=(timestamp + timedelta(days=1))))
@callback @bind_hass def entity_sources(hass: HomeAssistant) -> Dict[(str, Dict[(str, str)])]: 'Get the entity sources.' return hass.data.get(DATA_ENTITY_SOURCE, {})
-6,209,629,769,332,989,000
Get the entity sources.
homeassistant/helpers/entity.py
entity_sources
Leviosa-Shades/core
python
@callback @bind_hass def entity_sources(hass: HomeAssistant) -> Dict[(str, Dict[(str, str)])]: return hass.data.get(DATA_ENTITY_SOURCE, {})
def generate_entity_id(entity_id_format: str, name: Optional[str], current_ids: Optional[List[str]]=None, hass: Optional[HomeAssistant]=None) -> str: 'Generate a unique entity ID based on given entity IDs or used IDs.' return async_generate_entity_id(entity_id_format, name, current_ids, hass)
3,168,408,939,775,173,600
Generate a unique entity ID based on given entity IDs or used IDs.
homeassistant/helpers/entity.py
generate_entity_id
Leviosa-Shades/core
python
def generate_entity_id(entity_id_format: str, name: Optional[str], current_ids: Optional[List[str]]=None, hass: Optional[HomeAssistant]=None) -> str: return async_generate_entity_id(entity_id_format, name, current_ids, hass)
@callback def async_generate_entity_id(entity_id_format: str, name: Optional[str], current_ids: Optional[Iterable[str]]=None, hass: Optional[HomeAssistant]=None) -> str: 'Generate a unique entity ID based on given entity IDs or used IDs.' name = (name or DEVICE_DEFAULT_NAME).lower() preferred_string = entity_id_format.format(slugify(name)) if (current_ids is not None): return ensure_unique_string(preferred_string, current_ids) if (hass is None): raise ValueError('Missing required parameter current_ids or hass') test_string = preferred_string tries = 1 while (not hass.states.async_available(test_string)): tries += 1 test_string = f'{preferred_string}_{tries}' return test_string
5,117,351,041,053,576,000
Generate a unique entity ID based on given entity IDs or used IDs.
homeassistant/helpers/entity.py
async_generate_entity_id
Leviosa-Shades/core
python
@callback def async_generate_entity_id(entity_id_format: str, name: Optional[str], current_ids: Optional[Iterable[str]]=None, hass: Optional[HomeAssistant]=None) -> str: name = (name or DEVICE_DEFAULT_NAME).lower() preferred_string = entity_id_format.format(slugify(name)) if (current_ids is not None): return ensure_unique_string(preferred_string, current_ids) if (hass is None): raise ValueError('Missing required parameter current_ids or hass') test_string = preferred_string tries = 1 while (not hass.states.async_available(test_string)): tries += 1 test_string = f'{preferred_string}_{tries}' return test_string
@property def should_poll(self) -> bool: 'Return True if entity has to be polled for state.\n\n False if entity pushes its state to HA.\n ' return True
8,328,211,760,097,568,000
Return True if entity has to be polled for state. False if entity pushes its state to HA.
homeassistant/helpers/entity.py
should_poll
Leviosa-Shades/core
python
@property def should_poll(self) -> bool: 'Return True if entity has to be polled for state.\n\n False if entity pushes its state to HA.\n ' return True
@property def unique_id(self) -> Optional[str]: 'Return a unique ID.' return None
2,538,883,729,920,680,000
Return a unique ID.
homeassistant/helpers/entity.py
unique_id
Leviosa-Shades/core
python
@property def unique_id(self) -> Optional[str]: return None
@property def name(self) -> Optional[str]: 'Return the name of the entity.' return None
5,905,829,013,751,394,000
Return the name of the entity.
homeassistant/helpers/entity.py
name
Leviosa-Shades/core
python
@property def name(self) -> Optional[str]: return None
@property def state(self) -> StateType: 'Return the state of the entity.' return STATE_UNKNOWN
1,356,358,700,227,505,700
Return the state of the entity.
homeassistant/helpers/entity.py
state
Leviosa-Shades/core
python
@property def state(self) -> StateType: return STATE_UNKNOWN
@property def capability_attributes(self) -> Optional[Dict[(str, Any)]]: 'Return the capability attributes.\n\n Attributes that explain the capabilities of an entity.\n\n Implemented by component base class. Convention for attribute names\n is lowercase snake_case.\n ' return None
4,158,045,754,724,678,700
Return the capability attributes. Attributes that explain the capabilities of an entity. Implemented by component base class. Convention for attribute names is lowercase snake_case.
homeassistant/helpers/entity.py
capability_attributes
Leviosa-Shades/core
python
@property def capability_attributes(self) -> Optional[Dict[(str, Any)]]: 'Return the capability attributes.\n\n Attributes that explain the capabilities of an entity.\n\n Implemented by component base class. Convention for attribute names\n is lowercase snake_case.\n ' return None
@property def state_attributes(self) -> Optional[Dict[(str, Any)]]: 'Return the state attributes.\n\n Implemented by component base class, should not be extended by integrations.\n Convention for attribute names is lowercase snake_case.\n ' return None
7,571,243,939,475,639,000
Return the state attributes. Implemented by component base class, should not be extended by integrations. Convention for attribute names is lowercase snake_case.
homeassistant/helpers/entity.py
state_attributes
Leviosa-Shades/core
python
@property def state_attributes(self) -> Optional[Dict[(str, Any)]]: 'Return the state attributes.\n\n Implemented by component base class, should not be extended by integrations.\n Convention for attribute names is lowercase snake_case.\n ' return None
@property def device_state_attributes(self) -> Optional[Dict[(str, Any)]]: 'Return entity specific state attributes.\n\n This method is deprecated, platform classes should implement\n extra_state_attributes instead.\n ' return None
-3,700,846,581,224,470,000
Return entity specific state attributes. This method is deprecated, platform classes should implement extra_state_attributes instead.
homeassistant/helpers/entity.py
device_state_attributes
Leviosa-Shades/core
python
@property def device_state_attributes(self) -> Optional[Dict[(str, Any)]]: 'Return entity specific state attributes.\n\n This method is deprecated, platform classes should implement\n extra_state_attributes instead.\n ' return None
@property def extra_state_attributes(self) -> Optional[Dict[(str, Any)]]: 'Return entity specific state attributes.\n\n Implemented by platform classes. Convention for attribute names\n is lowercase snake_case.\n ' return None
4,480,478,286,833,381,000
Return entity specific state attributes. Implemented by platform classes. Convention for attribute names is lowercase snake_case.
homeassistant/helpers/entity.py
extra_state_attributes
Leviosa-Shades/core
python
@property def extra_state_attributes(self) -> Optional[Dict[(str, Any)]]: 'Return entity specific state attributes.\n\n Implemented by platform classes. Convention for attribute names\n is lowercase snake_case.\n ' return None
@property def device_info(self) -> Optional[Dict[(str, Any)]]: 'Return device specific attributes.\n\n Implemented by platform classes.\n ' return None
7,117,282,649,591,217,000
Return device specific attributes. Implemented by platform classes.
homeassistant/helpers/entity.py
device_info
Leviosa-Shades/core
python
@property def device_info(self) -> Optional[Dict[(str, Any)]]: 'Return device specific attributes.\n\n Implemented by platform classes.\n ' return None
@property def device_class(self) -> Optional[str]: 'Return the class of this device, from component DEVICE_CLASSES.' return None
-3,089,613,303,586,104,000
Return the class of this device, from component DEVICE_CLASSES.
homeassistant/helpers/entity.py
device_class
Leviosa-Shades/core
python
@property def device_class(self) -> Optional[str]: return None
@property def unit_of_measurement(self) -> Optional[str]: 'Return the unit of measurement of this entity, if any.' return None
4,082,615,635,480,496,000
Return the unit of measurement of this entity, if any.
homeassistant/helpers/entity.py
unit_of_measurement
Leviosa-Shades/core
python
@property def unit_of_measurement(self) -> Optional[str]: return None
@property def icon(self) -> Optional[str]: 'Return the icon to use in the frontend, if any.' return None
-5,121,040,458,521,612,000
Return the icon to use in the frontend, if any.
homeassistant/helpers/entity.py
icon
Leviosa-Shades/core
python
@property def icon(self) -> Optional[str]: return None
@property def entity_picture(self) -> Optional[str]: 'Return the entity picture to use in the frontend, if any.' return None
1,998,631,624,207,249,400
Return the entity picture to use in the frontend, if any.
homeassistant/helpers/entity.py
entity_picture
Leviosa-Shades/core
python
@property def entity_picture(self) -> Optional[str]: return None
@property def available(self) -> bool: 'Return True if entity is available.' return True
-3,548,180,598,612,628,500
Return True if entity is available.
homeassistant/helpers/entity.py
available
Leviosa-Shades/core
python
@property def available(self) -> bool: return True
@property def assumed_state(self) -> bool: 'Return True if unable to access real state of the entity.' return False
1,269,755,479,481,557,800
Return True if unable to access real state of the entity.
homeassistant/helpers/entity.py
assumed_state
Leviosa-Shades/core
python
@property def assumed_state(self) -> bool: return False
@property def force_update(self) -> bool: 'Return True if state updates should be forced.\n\n If True, a state change will be triggered anytime the state property is\n updated, not just when the value changes.\n ' return False
-1,228,016,239,530,733,600
Return True if state updates should be forced. If True, a state change will be triggered anytime the state property is updated, not just when the value changes.
homeassistant/helpers/entity.py
force_update
Leviosa-Shades/core
python
@property def force_update(self) -> bool: 'Return True if state updates should be forced.\n\n If True, a state change will be triggered anytime the state property is\n updated, not just when the value changes.\n ' return False
@property def supported_features(self) -> Optional[int]: 'Flag supported features.' return None
-6,673,123,116,929,117,000
Flag supported features.
homeassistant/helpers/entity.py
supported_features
Leviosa-Shades/core
python
@property def supported_features(self) -> Optional[int]: return None
@property def context_recent_time(self) -> timedelta: 'Time that a context is considered recent.' return timedelta(seconds=5)
4,102,250,335,939,575,300
Time that a context is considered recent.
homeassistant/helpers/entity.py
context_recent_time
Leviosa-Shades/core
python
@property def context_recent_time(self) -> timedelta: return timedelta(seconds=5)
@property def entity_registry_enabled_default(self) -> bool: 'Return if the entity should be enabled when first added to the entity registry.' return True
-6,825,246,018,270,339,000
Return if the entity should be enabled when first added to the entity registry.
homeassistant/helpers/entity.py
entity_registry_enabled_default
Leviosa-Shades/core
python
@property def entity_registry_enabled_default(self) -> bool: return True
@property def enabled(self) -> bool: 'Return if the entity is enabled in the entity registry.\n\n If an entity is not part of the registry, it cannot be disabled\n and will therefore always be enabled.\n ' return ((self.registry_entry is None) or (not self.registry_entry.disabled))
-4,212,866,319,016,292,000
Return if the entity is enabled in the entity registry. If an entity is not part of the registry, it cannot be disabled and will therefore always be enabled.
homeassistant/helpers/entity.py
enabled
Leviosa-Shades/core
python
@property def enabled(self) -> bool: 'Return if the entity is enabled in the entity registry.\n\n If an entity is not part of the registry, it cannot be disabled\n and will therefore always be enabled.\n ' return ((self.registry_entry is None) or (not self.registry_entry.disabled))
@callback def async_set_context(self, context: Context) -> None: 'Set the context the entity currently operates under.' self._context = context self._context_set = dt_util.utcnow()
7,498,294,261,140,403,000
Set the context the entity currently operates under.
homeassistant/helpers/entity.py
async_set_context
Leviosa-Shades/core
python
@callback def async_set_context(self, context: Context) -> None: self._context = context self._context_set = dt_util.utcnow()
async def async_update_ha_state(self, force_refresh: bool=False) -> None: 'Update Home Assistant with current state of entity.\n\n If force_refresh == True will update entity before setting state.\n\n This method must be run in the event loop.\n ' if (self.hass is None): raise RuntimeError(f'Attribute hass is None for {self}') if (self.entity_id is None): raise NoEntitySpecifiedError(f'No entity id specified for entity {self.name}') if force_refresh: try: (await self.async_device_update()) except Exception: _LOGGER.exception('Update for %s fails', self.entity_id) return self._async_write_ha_state()
-2,629,200,371,007,674,000
Update Home Assistant with current state of entity. If force_refresh == True will update entity before setting state. This method must be run in the event loop.
homeassistant/helpers/entity.py
async_update_ha_state
Leviosa-Shades/core
python
async def async_update_ha_state(self, force_refresh: bool=False) -> None: 'Update Home Assistant with current state of entity.\n\n If force_refresh == True will update entity before setting state.\n\n This method must be run in the event loop.\n ' if (self.hass is None): raise RuntimeError(f'Attribute hass is None for {self}') if (self.entity_id is None): raise NoEntitySpecifiedError(f'No entity id specified for entity {self.name}') if force_refresh: try: (await self.async_device_update()) except Exception: _LOGGER.exception('Update for %s fails', self.entity_id) return self._async_write_ha_state()
@callback def async_write_ha_state(self) -> None: 'Write the state to the state machine.' if (self.hass is None): raise RuntimeError(f'Attribute hass is None for {self}') if (self.entity_id is None): raise NoEntitySpecifiedError(f'No entity id specified for entity {self.name}') self._async_write_ha_state()
-7,814,871,863,252,089,000
Write the state to the state machine.
homeassistant/helpers/entity.py
async_write_ha_state
Leviosa-Shades/core
python
@callback def async_write_ha_state(self) -> None: if (self.hass is None): raise RuntimeError(f'Attribute hass is None for {self}') if (self.entity_id is None): raise NoEntitySpecifiedError(f'No entity id specified for entity {self.name}') self._async_write_ha_state()
@callback def _async_write_ha_state(self) -> None: 'Write the state to the state machine.' if (self.registry_entry and self.registry_entry.disabled_by): if (not self._disabled_reported): self._disabled_reported = True assert (self.platform is not None) _LOGGER.warning('Entity %s is incorrectly being triggered for updates while it is disabled. This is a bug in the %s integration', self.entity_id, self.platform.platform_name) return start = timer() attr = self.capability_attributes attr = (dict(attr) if attr else {}) if (not self.available): state = STATE_UNAVAILABLE else: sstate = self.state state = (STATE_UNKNOWN if (sstate is None) else str(sstate)) attr.update((self.state_attributes or {})) extra_state_attributes = self.extra_state_attributes if (extra_state_attributes is None): extra_state_attributes = self.device_state_attributes attr.update((extra_state_attributes or {})) unit_of_measurement = self.unit_of_measurement if (unit_of_measurement is not None): attr[ATTR_UNIT_OF_MEASUREMENT] = unit_of_measurement entry = self.registry_entry name = ((entry and entry.name) or self.name) if (name is not None): attr[ATTR_FRIENDLY_NAME] = name icon = ((entry and entry.icon) or self.icon) if (icon is not None): attr[ATTR_ICON] = icon entity_picture = self.entity_picture if (entity_picture is not None): attr[ATTR_ENTITY_PICTURE] = entity_picture assumed_state = self.assumed_state if assumed_state: attr[ATTR_ASSUMED_STATE] = assumed_state supported_features = self.supported_features if (supported_features is not None): attr[ATTR_SUPPORTED_FEATURES] = supported_features device_class = self.device_class if (device_class is not None): attr[ATTR_DEVICE_CLASS] = str(device_class) end = timer() if (((end - start) > 0.4) and (not self._slow_reported)): self._slow_reported = True extra = '' if ('custom_components' in type(self).__module__): extra = 'Please report it to the custom component author.' else: extra = 'Please create a bug report at https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue' if self.platform: extra += f'+label%3A%22integration%3A+{self.platform.platform_name}%22' _LOGGER.warning('Updating state for %s (%s) took %.3f seconds. %s', self.entity_id, type(self), (end - start), extra) if (DATA_CUSTOMIZE in self.hass.data): attr.update(self.hass.data[DATA_CUSTOMIZE].get(self.entity_id)) try: unit_of_measure = attr.get(ATTR_UNIT_OF_MEASUREMENT) units = self.hass.config.units if ((unit_of_measure in (TEMP_CELSIUS, TEMP_FAHRENHEIT)) and (unit_of_measure != units.temperature_unit)): prec = (((len(state) - state.index('.')) - 1) if ('.' in state) else 0) temp = units.temperature(float(state), unit_of_measure) state = str((round(temp) if (prec == 0) else round(temp, prec))) attr[ATTR_UNIT_OF_MEASUREMENT] = units.temperature_unit except ValueError: pass if ((self._context_set is not None) and ((dt_util.utcnow() - self._context_set) > self.context_recent_time)): self._context = None self._context_set = None self.hass.states.async_set(self.entity_id, state, attr, self.force_update, self._context)
5,703,493,031,829,491,000
Write the state to the state machine.
homeassistant/helpers/entity.py
_async_write_ha_state
Leviosa-Shades/core
python
@callback def _async_write_ha_state(self) -> None: if (self.registry_entry and self.registry_entry.disabled_by): if (not self._disabled_reported): self._disabled_reported = True assert (self.platform is not None) _LOGGER.warning('Entity %s is incorrectly being triggered for updates while it is disabled. This is a bug in the %s integration', self.entity_id, self.platform.platform_name) return start = timer() attr = self.capability_attributes attr = (dict(attr) if attr else {}) if (not self.available): state = STATE_UNAVAILABLE else: sstate = self.state state = (STATE_UNKNOWN if (sstate is None) else str(sstate)) attr.update((self.state_attributes or {})) extra_state_attributes = self.extra_state_attributes if (extra_state_attributes is None): extra_state_attributes = self.device_state_attributes attr.update((extra_state_attributes or {})) unit_of_measurement = self.unit_of_measurement if (unit_of_measurement is not None): attr[ATTR_UNIT_OF_MEASUREMENT] = unit_of_measurement entry = self.registry_entry name = ((entry and entry.name) or self.name) if (name is not None): attr[ATTR_FRIENDLY_NAME] = name icon = ((entry and entry.icon) or self.icon) if (icon is not None): attr[ATTR_ICON] = icon entity_picture = self.entity_picture if (entity_picture is not None): attr[ATTR_ENTITY_PICTURE] = entity_picture assumed_state = self.assumed_state if assumed_state: attr[ATTR_ASSUMED_STATE] = assumed_state supported_features = self.supported_features if (supported_features is not None): attr[ATTR_SUPPORTED_FEATURES] = supported_features device_class = self.device_class if (device_class is not None): attr[ATTR_DEVICE_CLASS] = str(device_class) end = timer() if (((end - start) > 0.4) and (not self._slow_reported)): self._slow_reported = True extra = if ('custom_components' in type(self).__module__): extra = 'Please report it to the custom component author.' else: extra = 'Please create a bug report at https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue' if self.platform: extra += f'+label%3A%22integration%3A+{self.platform.platform_name}%22' _LOGGER.warning('Updating state for %s (%s) took %.3f seconds. %s', self.entity_id, type(self), (end - start), extra) if (DATA_CUSTOMIZE in self.hass.data): attr.update(self.hass.data[DATA_CUSTOMIZE].get(self.entity_id)) try: unit_of_measure = attr.get(ATTR_UNIT_OF_MEASUREMENT) units = self.hass.config.units if ((unit_of_measure in (TEMP_CELSIUS, TEMP_FAHRENHEIT)) and (unit_of_measure != units.temperature_unit)): prec = (((len(state) - state.index('.')) - 1) if ('.' in state) else 0) temp = units.temperature(float(state), unit_of_measure) state = str((round(temp) if (prec == 0) else round(temp, prec))) attr[ATTR_UNIT_OF_MEASUREMENT] = units.temperature_unit except ValueError: pass if ((self._context_set is not None) and ((dt_util.utcnow() - self._context_set) > self.context_recent_time)): self._context = None self._context_set = None self.hass.states.async_set(self.entity_id, state, attr, self.force_update, self._context)
def schedule_update_ha_state(self, force_refresh: bool=False) -> None: 'Schedule an update ha state change task.\n\n Scheduling the update avoids executor deadlocks.\n\n Entity state and attributes are read when the update ha state change\n task is executed.\n If state is changed more than once before the ha state change task has\n been executed, the intermediate state transitions will be missed.\n ' self.hass.add_job(self.async_update_ha_state(force_refresh))
4,930,532,841,608,269,000
Schedule an update ha state change task. Scheduling the update avoids executor deadlocks. Entity state and attributes are read when the update ha state change task is executed. If state is changed more than once before the ha state change task has been executed, the intermediate state transitions will be missed.
homeassistant/helpers/entity.py
schedule_update_ha_state
Leviosa-Shades/core
python
def schedule_update_ha_state(self, force_refresh: bool=False) -> None: 'Schedule an update ha state change task.\n\n Scheduling the update avoids executor deadlocks.\n\n Entity state and attributes are read when the update ha state change\n task is executed.\n If state is changed more than once before the ha state change task has\n been executed, the intermediate state transitions will be missed.\n ' self.hass.add_job(self.async_update_ha_state(force_refresh))
@callback def async_schedule_update_ha_state(self, force_refresh: bool=False) -> None: 'Schedule an update ha state change task.\n\n This method must be run in the event loop.\n Scheduling the update avoids executor deadlocks.\n\n Entity state and attributes are read when the update ha state change\n task is executed.\n If state is changed more than once before the ha state change task has\n been executed, the intermediate state transitions will be missed.\n ' if force_refresh: self.hass.async_create_task(self.async_update_ha_state(force_refresh)) else: self.async_write_ha_state()
221,238,759,961,931,260
Schedule an update ha state change task. This method must be run in the event loop. Scheduling the update avoids executor deadlocks. Entity state and attributes are read when the update ha state change task is executed. If state is changed more than once before the ha state change task has been executed, the intermediate state transitions will be missed.
homeassistant/helpers/entity.py
async_schedule_update_ha_state
Leviosa-Shades/core
python
@callback def async_schedule_update_ha_state(self, force_refresh: bool=False) -> None: 'Schedule an update ha state change task.\n\n This method must be run in the event loop.\n Scheduling the update avoids executor deadlocks.\n\n Entity state and attributes are read when the update ha state change\n task is executed.\n If state is changed more than once before the ha state change task has\n been executed, the intermediate state transitions will be missed.\n ' if force_refresh: self.hass.async_create_task(self.async_update_ha_state(force_refresh)) else: self.async_write_ha_state()
async def async_device_update(self, warning: bool=True) -> None: "Process 'update' or 'async_update' from entity.\n\n This method is a coroutine.\n " if self._update_staged: return self._update_staged = True if self.parallel_updates: (await self.parallel_updates.acquire()) try: if hasattr(self, 'async_update'): task = self.hass.async_create_task(self.async_update()) elif hasattr(self, 'update'): task = self.hass.async_add_executor_job(self.update) else: return if (not warning): (await task) return (finished, _) = (await asyncio.wait([task], timeout=SLOW_UPDATE_WARNING)) for done in finished: exc = done.exception() if exc: raise exc return _LOGGER.warning('Update of %s is taking over %s seconds', self.entity_id, SLOW_UPDATE_WARNING) (await task) finally: self._update_staged = False if self.parallel_updates: self.parallel_updates.release()
-3,065,611,561,549,544,400
Process 'update' or 'async_update' from entity. This method is a coroutine.
homeassistant/helpers/entity.py
async_device_update
Leviosa-Shades/core
python
async def async_device_update(self, warning: bool=True) -> None: "Process 'update' or 'async_update' from entity.\n\n This method is a coroutine.\n " if self._update_staged: return self._update_staged = True if self.parallel_updates: (await self.parallel_updates.acquire()) try: if hasattr(self, 'async_update'): task = self.hass.async_create_task(self.async_update()) elif hasattr(self, 'update'): task = self.hass.async_add_executor_job(self.update) else: return if (not warning): (await task) return (finished, _) = (await asyncio.wait([task], timeout=SLOW_UPDATE_WARNING)) for done in finished: exc = done.exception() if exc: raise exc return _LOGGER.warning('Update of %s is taking over %s seconds', self.entity_id, SLOW_UPDATE_WARNING) (await task) finally: self._update_staged = False if self.parallel_updates: self.parallel_updates.release()
@callback def async_on_remove(self, func: CALLBACK_TYPE) -> None: 'Add a function to call when entity removed.' if (self._on_remove is None): self._on_remove = [] self._on_remove.append(func)
2,298,680,603,316,999,700
Add a function to call when entity removed.
homeassistant/helpers/entity.py
async_on_remove
Leviosa-Shades/core
python
@callback def async_on_remove(self, func: CALLBACK_TYPE) -> None: if (self._on_remove is None): self._on_remove = [] self._on_remove.append(func)
async def async_removed_from_registry(self) -> None: 'Run when entity has been removed from entity registry.\n\n To be extended by integrations.\n '
-4,028,870,512,133,826,600
Run when entity has been removed from entity registry. To be extended by integrations.
homeassistant/helpers/entity.py
async_removed_from_registry
Leviosa-Shades/core
python
async def async_removed_from_registry(self) -> None: 'Run when entity has been removed from entity registry.\n\n To be extended by integrations.\n '
@callback def add_to_platform_start(self, hass: HomeAssistant, platform: EntityPlatform, parallel_updates: Optional[asyncio.Semaphore]) -> None: 'Start adding an entity to a platform.' if self._added: raise HomeAssistantError(f'Entity {self.entity_id} cannot be added a second time to an entity platform') self.hass = hass self.platform = platform self.parallel_updates = parallel_updates self._added = True
-1,761,300,229,261,884,400
Start adding an entity to a platform.
homeassistant/helpers/entity.py
add_to_platform_start
Leviosa-Shades/core
python
@callback def add_to_platform_start(self, hass: HomeAssistant, platform: EntityPlatform, parallel_updates: Optional[asyncio.Semaphore]) -> None: if self._added: raise HomeAssistantError(f'Entity {self.entity_id} cannot be added a second time to an entity platform') self.hass = hass self.platform = platform self.parallel_updates = parallel_updates self._added = True
@callback def add_to_platform_abort(self) -> None: 'Abort adding an entity to a platform.' self.hass = None self.platform = None self.parallel_updates = None self._added = False
-2,633,042,765,756,663,300
Abort adding an entity to a platform.
homeassistant/helpers/entity.py
add_to_platform_abort
Leviosa-Shades/core
python
@callback def add_to_platform_abort(self) -> None: self.hass = None self.platform = None self.parallel_updates = None self._added = False
async def add_to_platform_finish(self) -> None: 'Finish adding an entity to a platform.' (await self.async_internal_added_to_hass()) (await self.async_added_to_hass()) self.async_write_ha_state()
-2,320,288,375,294,386,000
Finish adding an entity to a platform.
homeassistant/helpers/entity.py
add_to_platform_finish
Leviosa-Shades/core
python
async def add_to_platform_finish(self) -> None: (await self.async_internal_added_to_hass()) (await self.async_added_to_hass()) self.async_write_ha_state()
async def async_remove(self, *, force_remove: bool=False) -> None: "Remove entity from Home Assistant.\n\n If the entity has a non disabled entry in the entity registry,\n the entity's state will be set to unavailable, in the same way\n as when the entity registry is loaded.\n\n If the entity doesn't have a non disabled entry in the entity registry,\n or if force_remove=True, its state will be removed.\n " if (self.platform and (not self._added)): raise HomeAssistantError(f'Entity {self.entity_id} async_remove called twice') self._added = False if (self._on_remove is not None): while self._on_remove: self._on_remove.pop()() (await self.async_internal_will_remove_from_hass()) (await self.async_will_remove_from_hass()) if ((not force_remove) and self.registry_entry and (not self.registry_entry.disabled)): self.registry_entry.write_unavailable_state(self.hass) else: self.hass.states.async_remove(self.entity_id, context=self._context)
149,070,520,219,460,640
Remove entity from Home Assistant. If the entity has a non disabled entry in the entity registry, the entity's state will be set to unavailable, in the same way as when the entity registry is loaded. If the entity doesn't have a non disabled entry in the entity registry, or if force_remove=True, its state will be removed.
homeassistant/helpers/entity.py
async_remove
Leviosa-Shades/core
python
async def async_remove(self, *, force_remove: bool=False) -> None: "Remove entity from Home Assistant.\n\n If the entity has a non disabled entry in the entity registry,\n the entity's state will be set to unavailable, in the same way\n as when the entity registry is loaded.\n\n If the entity doesn't have a non disabled entry in the entity registry,\n or if force_remove=True, its state will be removed.\n " if (self.platform and (not self._added)): raise HomeAssistantError(f'Entity {self.entity_id} async_remove called twice') self._added = False if (self._on_remove is not None): while self._on_remove: self._on_remove.pop()() (await self.async_internal_will_remove_from_hass()) (await self.async_will_remove_from_hass()) if ((not force_remove) and self.registry_entry and (not self.registry_entry.disabled)): self.registry_entry.write_unavailable_state(self.hass) else: self.hass.states.async_remove(self.entity_id, context=self._context)
async def async_added_to_hass(self) -> None: 'Run when entity about to be added to hass.\n\n To be extended by integrations.\n '
-5,585,018,881,033,491,000
Run when entity about to be added to hass. To be extended by integrations.
homeassistant/helpers/entity.py
async_added_to_hass
Leviosa-Shades/core
python
async def async_added_to_hass(self) -> None: 'Run when entity about to be added to hass.\n\n To be extended by integrations.\n '
async def async_will_remove_from_hass(self) -> None: 'Run when entity will be removed from hass.\n\n To be extended by integrations.\n '
-8,817,618,303,532,068,000
Run when entity will be removed from hass. To be extended by integrations.
homeassistant/helpers/entity.py
async_will_remove_from_hass
Leviosa-Shades/core
python
async def async_will_remove_from_hass(self) -> None: 'Run when entity will be removed from hass.\n\n To be extended by integrations.\n '
async def async_internal_added_to_hass(self) -> None: 'Run when entity about to be added to hass.\n\n Not to be extended by integrations.\n ' if self.platform: info = {'domain': self.platform.platform_name} if self.platform.config_entry: info['source'] = SOURCE_CONFIG_ENTRY info['config_entry'] = self.platform.config_entry.entry_id else: info['source'] = SOURCE_PLATFORM_CONFIG self.hass.data.setdefault(DATA_ENTITY_SOURCE, {})[self.entity_id] = info if (self.registry_entry is not None): assert (not self.registry_entry.disabled_by), f"Entity {self.entity_id} is being added while it's disabled" self.async_on_remove(async_track_entity_registry_updated_event(self.hass, self.entity_id, self._async_registry_updated))
7,854,938,287,459,399,000
Run when entity about to be added to hass. Not to be extended by integrations.
homeassistant/helpers/entity.py
async_internal_added_to_hass
Leviosa-Shades/core
python
async def async_internal_added_to_hass(self) -> None: 'Run when entity about to be added to hass.\n\n Not to be extended by integrations.\n ' if self.platform: info = {'domain': self.platform.platform_name} if self.platform.config_entry: info['source'] = SOURCE_CONFIG_ENTRY info['config_entry'] = self.platform.config_entry.entry_id else: info['source'] = SOURCE_PLATFORM_CONFIG self.hass.data.setdefault(DATA_ENTITY_SOURCE, {})[self.entity_id] = info if (self.registry_entry is not None): assert (not self.registry_entry.disabled_by), f"Entity {self.entity_id} is being added while it's disabled" self.async_on_remove(async_track_entity_registry_updated_event(self.hass, self.entity_id, self._async_registry_updated))
async def async_internal_will_remove_from_hass(self) -> None: 'Run when entity will be removed from hass.\n\n Not to be extended by integrations.\n ' if self.platform: self.hass.data[DATA_ENTITY_SOURCE].pop(self.entity_id)
1,536,591,258,824,101,000
Run when entity will be removed from hass. Not to be extended by integrations.
homeassistant/helpers/entity.py
async_internal_will_remove_from_hass
Leviosa-Shades/core
python
async def async_internal_will_remove_from_hass(self) -> None: 'Run when entity will be removed from hass.\n\n Not to be extended by integrations.\n ' if self.platform: self.hass.data[DATA_ENTITY_SOURCE].pop(self.entity_id)
async def _async_registry_updated(self, event: Event) -> None: 'Handle entity registry update.' data = event.data if (data['action'] == 'remove'): (await self.async_removed_from_registry()) self.registry_entry = None (await self.async_remove()) if (data['action'] != 'update'): return ent_reg = (await self.hass.helpers.entity_registry.async_get_registry()) old = self.registry_entry self.registry_entry = ent_reg.async_get(data['entity_id']) assert (self.registry_entry is not None) if self.registry_entry.disabled: (await self.async_remove()) return assert (old is not None) if (self.registry_entry.entity_id == old.entity_id): self.async_write_ha_state() return (await self.async_remove(force_remove=True)) assert (self.platform is not None) self.entity_id = self.registry_entry.entity_id (await self.platform.async_add_entities([self]))
8,834,064,360,167,315,000
Handle entity registry update.
homeassistant/helpers/entity.py
_async_registry_updated
Leviosa-Shades/core
python
async def _async_registry_updated(self, event: Event) -> None: data = event.data if (data['action'] == 'remove'): (await self.async_removed_from_registry()) self.registry_entry = None (await self.async_remove()) if (data['action'] != 'update'): return ent_reg = (await self.hass.helpers.entity_registry.async_get_registry()) old = self.registry_entry self.registry_entry = ent_reg.async_get(data['entity_id']) assert (self.registry_entry is not None) if self.registry_entry.disabled: (await self.async_remove()) return assert (old is not None) if (self.registry_entry.entity_id == old.entity_id): self.async_write_ha_state() return (await self.async_remove(force_remove=True)) assert (self.platform is not None) self.entity_id = self.registry_entry.entity_id (await self.platform.async_add_entities([self]))
def __eq__(self, other: Any) -> bool: 'Return the comparison.' if (not isinstance(other, self.__class__)): return False if ((self.unique_id is None) or (other.unique_id is None)): return False if ((self.platform is not None) or (other.platform is not None)): if ((self.platform is None) or (other.platform is None)): return False if (self.platform.platform != other.platform.platform): return False return (self.unique_id == other.unique_id)
-4,800,598,507,128,546,000
Return the comparison.
homeassistant/helpers/entity.py
__eq__
Leviosa-Shades/core
python
def __eq__(self, other: Any) -> bool: if (not isinstance(other, self.__class__)): return False if ((self.unique_id is None) or (other.unique_id is None)): return False if ((self.platform is not None) or (other.platform is not None)): if ((self.platform is None) or (other.platform is None)): return False if (self.platform.platform != other.platform.platform): return False return (self.unique_id == other.unique_id)
def __repr__(self) -> str: 'Return the representation.' return f'<Entity {self.name}: {self.state}>'
5,428,986,823,689,221,000
Return the representation.
homeassistant/helpers/entity.py
__repr__
Leviosa-Shades/core
python
def __repr__(self) -> str: return f'<Entity {self.name}: {self.state}>'
async def async_request_call(self, coro: Awaitable) -> None: 'Process request batched.' if self.parallel_updates: (await self.parallel_updates.acquire()) try: (await coro) finally: if self.parallel_updates: self.parallel_updates.release()
3,011,644,997,447,190,500
Process request batched.
homeassistant/helpers/entity.py
async_request_call
Leviosa-Shades/core
python
async def async_request_call(self, coro: Awaitable) -> None: if self.parallel_updates: (await self.parallel_updates.acquire()) try: (await coro) finally: if self.parallel_updates: self.parallel_updates.release()
@property def state(self) -> str: 'Return the state.' return (STATE_ON if self.is_on else STATE_OFF)
-3,434,155,257,435,733,000
Return the state.
homeassistant/helpers/entity.py
state
Leviosa-Shades/core
python
@property def state(self) -> str: return (STATE_ON if self.is_on else STATE_OFF)
@property def is_on(self) -> bool: 'Return True if entity is on.' raise NotImplementedError()
8,799,682,652,869,394,000
Return True if entity is on.
homeassistant/helpers/entity.py
is_on
Leviosa-Shades/core
python
@property def is_on(self) -> bool: raise NotImplementedError()
def turn_on(self, **kwargs: Any) -> None: 'Turn the entity on.' raise NotImplementedError()
2,322,256,188,001,028,600
Turn the entity on.
homeassistant/helpers/entity.py
turn_on
Leviosa-Shades/core
python
def turn_on(self, **kwargs: Any) -> None: raise NotImplementedError()
async def async_turn_on(self, **kwargs: Any) -> None: 'Turn the entity on.' (await self.hass.async_add_executor_job(ft.partial(self.turn_on, **kwargs)))
-8,243,134,644,201,260,000
Turn the entity on.
homeassistant/helpers/entity.py
async_turn_on
Leviosa-Shades/core
python
async def async_turn_on(self, **kwargs: Any) -> None: (await self.hass.async_add_executor_job(ft.partial(self.turn_on, **kwargs)))
def turn_off(self, **kwargs: Any) -> None: 'Turn the entity off.' raise NotImplementedError()
-3,799,708,282,002,590,700
Turn the entity off.
homeassistant/helpers/entity.py
turn_off
Leviosa-Shades/core
python
def turn_off(self, **kwargs: Any) -> None: raise NotImplementedError()
async def async_turn_off(self, **kwargs: Any) -> None: 'Turn the entity off.' (await self.hass.async_add_executor_job(ft.partial(self.turn_off, **kwargs)))
-6,121,593,494,274,528,000
Turn the entity off.
homeassistant/helpers/entity.py
async_turn_off
Leviosa-Shades/core
python
async def async_turn_off(self, **kwargs: Any) -> None: (await self.hass.async_add_executor_job(ft.partial(self.turn_off, **kwargs)))
def toggle(self, **kwargs: Any) -> None: 'Toggle the entity.' if self.is_on: self.turn_off(**kwargs) else: self.turn_on(**kwargs)
-6,398,084,867,639,560,000
Toggle the entity.
homeassistant/helpers/entity.py
toggle
Leviosa-Shades/core
python
def toggle(self, **kwargs: Any) -> None: if self.is_on: self.turn_off(**kwargs) else: self.turn_on(**kwargs)
async def async_toggle(self, **kwargs: Any) -> None: 'Toggle the entity.' if self.is_on: (await self.async_turn_off(**kwargs)) else: (await self.async_turn_on(**kwargs))
6,141,564,757,956,466,000
Toggle the entity.
homeassistant/helpers/entity.py
async_toggle
Leviosa-Shades/core
python
async def async_toggle(self, **kwargs: Any) -> None: if self.is_on: (await self.async_turn_off(**kwargs)) else: (await self.async_turn_on(**kwargs))
def get_image_size(fname): 'Determine the image type of fhandle and return its size.\n from draco' with open(fname, 'rb') as fhandle: head = fhandle.read(24) if (len(head) != 24): return if (imghdr.what(fname) == 'png'): check = struct.unpack('>i', head[4:8])[0] if (check != 218765834): return (width, height) = struct.unpack('>ii', head[16:24]) elif (imghdr.what(fname) == 'gif'): (width, height) = struct.unpack('<HH', head[6:10]) elif (imghdr.what(fname) == 'jpeg'): try: fhandle.seek(0) size = 2 ftype = 0 while (not (192 <= ftype <= 207)): fhandle.seek(size, 1) byte = fhandle.read(1) while (ord(byte) == 255): byte = fhandle.read(1) ftype = ord(byte) size = (struct.unpack('>H', fhandle.read(2))[0] - 2) fhandle.seek(1, 1) (height, width) = struct.unpack('>HH', fhandle.read(4)) except Exception: return else: return return (width, height)
3,716,537,104,232,310,000
Determine the image type of fhandle and return its size. from draco
mdpdfbook/mdpdf/image.py
get_image_size
geraked/mdpdfbook
python
def get_image_size(fname): 'Determine the image type of fhandle and return its size.\n from draco' with open(fname, 'rb') as fhandle: head = fhandle.read(24) if (len(head) != 24): return if (imghdr.what(fname) == 'png'): check = struct.unpack('>i', head[4:8])[0] if (check != 218765834): return (width, height) = struct.unpack('>ii', head[16:24]) elif (imghdr.what(fname) == 'gif'): (width, height) = struct.unpack('<HH', head[6:10]) elif (imghdr.what(fname) == 'jpeg'): try: fhandle.seek(0) size = 2 ftype = 0 while (not (192 <= ftype <= 207)): fhandle.seek(size, 1) byte = fhandle.read(1) while (ord(byte) == 255): byte = fhandle.read(1) ftype = ord(byte) size = (struct.unpack('>H', fhandle.read(2))[0] - 2) fhandle.seek(1, 1) (height, width) = struct.unpack('>HH', fhandle.read(4)) except Exception: return else: return return (width, height)
@property def path_url(self): 'Build the path URL to use.' url = [] p = urlsplit(self.url) path = p.path if (not path): path = '/' url.append(path) query = p.query if query: url.append('?') url.append(query) return ''.join(url)
1,763,220,040,231,475,700
Build the path URL to use.
src/oci/_vendor/requests/models.py
path_url
LaudateCorpus1/oci-python-sdk
python
@property def path_url(self): url = [] p = urlsplit(self.url) path = p.path if (not path): path = '/' url.append(path) query = p.query if query: url.append('?') url.append(query) return .join(url)
@staticmethod def _encode_params(data): 'Encode parameters in a piece of data.\n\n Will successfully encode parameters when passed as a dict or a list of\n 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary\n if parameters are supplied as a dict.\n ' if isinstance(data, (str, bytes)): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for (k, vs) in to_key_val_list(data): if (isinstance(vs, basestring) or (not hasattr(vs, '__iter__'))): vs = [vs] for v in vs: if (v is not None): result.append(((k.encode('utf-8') if isinstance(k, str) else k), (v.encode('utf-8') if isinstance(v, str) else v))) return urlencode(result, doseq=True) else: return data
-8,298,462,985,702,277,000
Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict.
src/oci/_vendor/requests/models.py
_encode_params
LaudateCorpus1/oci-python-sdk
python
@staticmethod def _encode_params(data): 'Encode parameters in a piece of data.\n\n Will successfully encode parameters when passed as a dict or a list of\n 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary\n if parameters are supplied as a dict.\n ' if isinstance(data, (str, bytes)): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for (k, vs) in to_key_val_list(data): if (isinstance(vs, basestring) or (not hasattr(vs, '__iter__'))): vs = [vs] for v in vs: if (v is not None): result.append(((k.encode('utf-8') if isinstance(k, str) else k), (v.encode('utf-8') if isinstance(v, str) else v))) return urlencode(result, doseq=True) else: return data
@staticmethod def _encode_files(files, data): 'Build the body for a multipart/form-data request.\n\n Will successfully encode files when passed as a dict or a list of\n tuples. Order is retained if data is a list of tuples but arbitrary\n if parameters are supplied as a dict.\n The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)\n or 4-tuples (filename, fileobj, contentype, custom_headers).\n ' if (not files): raise ValueError('Files must be provided.') elif isinstance(data, basestring): raise ValueError('Data must not be a string.') new_fields = [] fields = to_key_val_list((data or {})) files = to_key_val_list((files or {})) for (field, val) in fields: if (isinstance(val, basestring) or (not hasattr(val, '__iter__'))): val = [val] for v in val: if (v is not None): if (not isinstance(v, bytes)): v = str(v) new_fields.append(((field.decode('utf-8') if isinstance(field, bytes) else field), (v.encode('utf-8') if isinstance(v, str) else v))) for (k, v) in files: ft = None fh = None if isinstance(v, (tuple, list)): if (len(v) == 2): (fn, fp) = v elif (len(v) == 3): (fn, fp, ft) = v else: (fn, fp, ft, fh) = v else: fn = (guess_filename(v) or k) fp = v if isinstance(fp, (str, bytes, bytearray)): fdata = fp elif hasattr(fp, 'read'): fdata = fp.read() elif (fp is None): continue else: fdata = fp rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) (body, content_type) = encode_multipart_formdata(new_fields) return (body, content_type)
-5,009,398,987,837,660,000
Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers).
src/oci/_vendor/requests/models.py
_encode_files
LaudateCorpus1/oci-python-sdk
python
@staticmethod def _encode_files(files, data): 'Build the body for a multipart/form-data request.\n\n Will successfully encode files when passed as a dict or a list of\n tuples. Order is retained if data is a list of tuples but arbitrary\n if parameters are supplied as a dict.\n The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)\n or 4-tuples (filename, fileobj, contentype, custom_headers).\n ' if (not files): raise ValueError('Files must be provided.') elif isinstance(data, basestring): raise ValueError('Data must not be a string.') new_fields = [] fields = to_key_val_list((data or {})) files = to_key_val_list((files or {})) for (field, val) in fields: if (isinstance(val, basestring) or (not hasattr(val, '__iter__'))): val = [val] for v in val: if (v is not None): if (not isinstance(v, bytes)): v = str(v) new_fields.append(((field.decode('utf-8') if isinstance(field, bytes) else field), (v.encode('utf-8') if isinstance(v, str) else v))) for (k, v) in files: ft = None fh = None if isinstance(v, (tuple, list)): if (len(v) == 2): (fn, fp) = v elif (len(v) == 3): (fn, fp, ft) = v else: (fn, fp, ft, fh) = v else: fn = (guess_filename(v) or k) fp = v if isinstance(fp, (str, bytes, bytearray)): fdata = fp elif hasattr(fp, 'read'): fdata = fp.read() elif (fp is None): continue else: fdata = fp rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) (body, content_type) = encode_multipart_formdata(new_fields) return (body, content_type)
def register_hook(self, event, hook): 'Properly register a hook.' if (event not in self.hooks): raise ValueError(('Unsupported event specified, with event name "%s"' % event)) if isinstance(hook, Callable): self.hooks[event].append(hook) elif hasattr(hook, '__iter__'): self.hooks[event].extend((h for h in hook if isinstance(h, Callable)))
-4,681,226,212,105,443,000
Properly register a hook.
src/oci/_vendor/requests/models.py
register_hook
LaudateCorpus1/oci-python-sdk
python
def register_hook(self, event, hook): if (event not in self.hooks): raise ValueError(('Unsupported event specified, with event name "%s"' % event)) if isinstance(hook, Callable): self.hooks[event].append(hook) elif hasattr(hook, '__iter__'): self.hooks[event].extend((h for h in hook if isinstance(h, Callable)))
def deregister_hook(self, event, hook): 'Deregister a previously registered hook.\n Returns True if the hook existed, False if not.\n ' try: self.hooks[event].remove(hook) return True except ValueError: return False
7,969,727,305,059,734,000
Deregister a previously registered hook. Returns True if the hook existed, False if not.
src/oci/_vendor/requests/models.py
deregister_hook
LaudateCorpus1/oci-python-sdk
python
def deregister_hook(self, event, hook): 'Deregister a previously registered hook.\n Returns True if the hook existed, False if not.\n ' try: self.hooks[event].remove(hook) return True except ValueError: return False
def prepare(self): 'Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.' p = PreparedRequest() p.prepare(method=self.method, url=self.url, headers=self.headers, files=self.files, data=self.data, json=self.json, params=self.params, auth=self.auth, cookies=self.cookies, hooks=self.hooks) return p
-3,505,980,801,294,424,600
Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.
src/oci/_vendor/requests/models.py
prepare
LaudateCorpus1/oci-python-sdk
python
def prepare(self): p = PreparedRequest() p.prepare(method=self.method, url=self.url, headers=self.headers, files=self.files, data=self.data, json=self.json, params=self.params, auth=self.auth, cookies=self.cookies, hooks=self.hooks) return p
def prepare(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None): 'Prepares the entire request with the given parameters.' self.prepare_method(method) self.prepare_url(url, params) self.prepare_headers(headers) self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) self.prepare_hooks(hooks)
-8,748,719,649,105,786,000
Prepares the entire request with the given parameters.
src/oci/_vendor/requests/models.py
prepare
LaudateCorpus1/oci-python-sdk
python
def prepare(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None): self.prepare_method(method) self.prepare_url(url, params) self.prepare_headers(headers) self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) self.prepare_hooks(hooks)
def prepare_method(self, method): 'Prepares the given HTTP method.' self.method = method if (self.method is not None): self.method = to_native_string(self.method.upper())
-8,811,068,185,766,678,000
Prepares the given HTTP method.
src/oci/_vendor/requests/models.py
prepare_method
LaudateCorpus1/oci-python-sdk
python
def prepare_method(self, method): self.method = method if (self.method is not None): self.method = to_native_string(self.method.upper())
def prepare_url(self, url, params): 'Prepares the given HTTP URL.' if isinstance(url, bytes): url = url.decode('utf8') else: url = (unicode(url) if is_py2 else str(url)) url = url.lstrip() if ((':' in url) and (not url.lower().startswith('http'))): self.url = url return try: (scheme, auth, host, port, path, query, fragment) = parse_url(url) except LocationParseError as e: raise InvalidURL(*e.args) if (not scheme): error = 'Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?' error = error.format(to_native_string(url, 'utf8')) raise MissingSchema(error) if (not host): raise InvalidURL(('Invalid URL %r: No host supplied' % url)) if (not unicode_is_ascii(host)): try: host = self._get_idna_encoded_host(host) except UnicodeError: raise InvalidURL('URL has an invalid label.') elif host.startswith(u'*'): raise InvalidURL('URL has an invalid label.') netloc = (auth or '') if netloc: netloc += '@' netloc += host if port: netloc += (':' + str(port)) if (not path): path = '/' if is_py2: if isinstance(scheme, str): scheme = scheme.encode('utf-8') if isinstance(netloc, str): netloc = netloc.encode('utf-8') if isinstance(path, str): path = path.encode('utf-8') if isinstance(query, str): query = query.encode('utf-8') if isinstance(fragment, str): fragment = fragment.encode('utf-8') if isinstance(params, (str, bytes)): params = to_native_string(params) enc_params = self._encode_params(params) if enc_params: if query: query = ('%s&%s' % (query, enc_params)) else: query = enc_params url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) self.url = url
-5,598,319,010,347,869,000
Prepares the given HTTP URL.
src/oci/_vendor/requests/models.py
prepare_url
LaudateCorpus1/oci-python-sdk
python
def prepare_url(self, url, params): if isinstance(url, bytes): url = url.decode('utf8') else: url = (unicode(url) if is_py2 else str(url)) url = url.lstrip() if ((':' in url) and (not url.lower().startswith('http'))): self.url = url return try: (scheme, auth, host, port, path, query, fragment) = parse_url(url) except LocationParseError as e: raise InvalidURL(*e.args) if (not scheme): error = 'Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?' error = error.format(to_native_string(url, 'utf8')) raise MissingSchema(error) if (not host): raise InvalidURL(('Invalid URL %r: No host supplied' % url)) if (not unicode_is_ascii(host)): try: host = self._get_idna_encoded_host(host) except UnicodeError: raise InvalidURL('URL has an invalid label.') elif host.startswith(u'*'): raise InvalidURL('URL has an invalid label.') netloc = (auth or ) if netloc: netloc += '@' netloc += host if port: netloc += (':' + str(port)) if (not path): path = '/' if is_py2: if isinstance(scheme, str): scheme = scheme.encode('utf-8') if isinstance(netloc, str): netloc = netloc.encode('utf-8') if isinstance(path, str): path = path.encode('utf-8') if isinstance(query, str): query = query.encode('utf-8') if isinstance(fragment, str): fragment = fragment.encode('utf-8') if isinstance(params, (str, bytes)): params = to_native_string(params) enc_params = self._encode_params(params) if enc_params: if query: query = ('%s&%s' % (query, enc_params)) else: query = enc_params url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) self.url = url
def prepare_headers(self, headers): 'Prepares the given HTTP headers.' self.headers = CaseInsensitiveDict() if headers: for header in headers.items(): check_header_validity(header) (name, value) = header self.headers[to_native_string(name)] = value
5,526,637,669,010,412,000
Prepares the given HTTP headers.
src/oci/_vendor/requests/models.py
prepare_headers
LaudateCorpus1/oci-python-sdk
python
def prepare_headers(self, headers): self.headers = CaseInsensitiveDict() if headers: for header in headers.items(): check_header_validity(header) (name, value) = header self.headers[to_native_string(name)] = value
def prepare_body(self, data, files, json=None): 'Prepares the given HTTP body data.' body = None content_type = None if ((not data) and (json is not None)): content_type = 'application/json' body = complexjson.dumps(json) if (not isinstance(body, bytes)): body = body.encode('utf-8') is_stream = all([hasattr(data, '__iter__'), (not isinstance(data, (basestring, list, tuple, Mapping)))]) if is_stream: try: length = super_len(data) except (TypeError, AttributeError, UnsupportedOperation): length = None body = data if (getattr(body, 'tell', None) is not None): try: self._body_position = body.tell() except (IOError, OSError): self._body_position = object() if files: raise NotImplementedError('Streamed bodies and files are mutually exclusive.') if length: self.headers['Content-Length'] = builtin_str(length) else: self.headers['Transfer-Encoding'] = 'chunked' else: if files: (body, content_type) = self._encode_files(files, data) elif data: body = self._encode_params(data) if (isinstance(data, basestring) or hasattr(data, 'read')): content_type = None else: content_type = 'application/x-www-form-urlencoded' self.prepare_content_length(body) if (content_type and ('content-type' not in self.headers)): self.headers['Content-Type'] = content_type self.body = body
2,758,324,703,423,574,000
Prepares the given HTTP body data.
src/oci/_vendor/requests/models.py
prepare_body
LaudateCorpus1/oci-python-sdk
python
def prepare_body(self, data, files, json=None): body = None content_type = None if ((not data) and (json is not None)): content_type = 'application/json' body = complexjson.dumps(json) if (not isinstance(body, bytes)): body = body.encode('utf-8') is_stream = all([hasattr(data, '__iter__'), (not isinstance(data, (basestring, list, tuple, Mapping)))]) if is_stream: try: length = super_len(data) except (TypeError, AttributeError, UnsupportedOperation): length = None body = data if (getattr(body, 'tell', None) is not None): try: self._body_position = body.tell() except (IOError, OSError): self._body_position = object() if files: raise NotImplementedError('Streamed bodies and files are mutually exclusive.') if length: self.headers['Content-Length'] = builtin_str(length) else: self.headers['Transfer-Encoding'] = 'chunked' else: if files: (body, content_type) = self._encode_files(files, data) elif data: body = self._encode_params(data) if (isinstance(data, basestring) or hasattr(data, 'read')): content_type = None else: content_type = 'application/x-www-form-urlencoded' self.prepare_content_length(body) if (content_type and ('content-type' not in self.headers)): self.headers['Content-Type'] = content_type self.body = body
def prepare_content_length(self, body): 'Prepare Content-Length header based on request method and body' if (body is not None): length = super_len(body) if length: self.headers['Content-Length'] = builtin_str(length) elif ((self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None)): self.headers['Content-Length'] = '0'
-1,581,070,182,644,072,400
Prepare Content-Length header based on request method and body
src/oci/_vendor/requests/models.py
prepare_content_length
LaudateCorpus1/oci-python-sdk
python
def prepare_content_length(self, body): if (body is not None): length = super_len(body) if length: self.headers['Content-Length'] = builtin_str(length) elif ((self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None)): self.headers['Content-Length'] = '0'