repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
cmbruns/pyopenvr
src/openvr/__init__.py
IVRInput.getSkeletalSummaryData
def getSkeletalSummaryData(self, action): """Reads summary information about the current pose of the skeleton associated with the given action.""" fn = self.function_table.getSkeletalSummaryData pSkeletalSummaryData = VRSkeletalSummaryData_t() result = fn(action, byref(pSkeletalSummaryData)) return result, pSkeletalSummaryData
python
def getSkeletalSummaryData(self, action): """Reads summary information about the current pose of the skeleton associated with the given action.""" fn = self.function_table.getSkeletalSummaryData pSkeletalSummaryData = VRSkeletalSummaryData_t() result = fn(action, byref(pSkeletalSummaryData)) return result, pSkeletalSummaryData
[ "def", "getSkeletalSummaryData", "(", "self", ",", "action", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getSkeletalSummaryData", "pSkeletalSummaryData", "=", "VRSkeletalSummaryData_t", "(", ")", "result", "=", "fn", "(", "action", ",", "byref", "(", "pSkeletalSummaryData", ")", ")", "return", "result", ",", "pSkeletalSummaryData" ]
Reads summary information about the current pose of the skeleton associated with the given action.
[ "Reads", "summary", "information", "about", "the", "current", "pose", "of", "the", "skeleton", "associated", "with", "the", "given", "action", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6075-L6081
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRInput.decompressSkeletalBoneData
def decompressSkeletalBoneData(self, pvCompressedBuffer, unCompressedBufferSize, eTransformSpace, unTransformArrayCount): """Turns a compressed buffer from GetSkeletalBoneDataCompressed and turns it back into a bone transform array.""" fn = self.function_table.decompressSkeletalBoneData pTransformArray = VRBoneTransform_t() result = fn(pvCompressedBuffer, unCompressedBufferSize, eTransformSpace, byref(pTransformArray), unTransformArrayCount) return result, pTransformArray
python
def decompressSkeletalBoneData(self, pvCompressedBuffer, unCompressedBufferSize, eTransformSpace, unTransformArrayCount): """Turns a compressed buffer from GetSkeletalBoneDataCompressed and turns it back into a bone transform array.""" fn = self.function_table.decompressSkeletalBoneData pTransformArray = VRBoneTransform_t() result = fn(pvCompressedBuffer, unCompressedBufferSize, eTransformSpace, byref(pTransformArray), unTransformArrayCount) return result, pTransformArray
[ "def", "decompressSkeletalBoneData", "(", "self", ",", "pvCompressedBuffer", ",", "unCompressedBufferSize", ",", "eTransformSpace", ",", "unTransformArrayCount", ")", ":", "fn", "=", "self", ".", "function_table", ".", "decompressSkeletalBoneData", "pTransformArray", "=", "VRBoneTransform_t", "(", ")", "result", "=", "fn", "(", "pvCompressedBuffer", ",", "unCompressedBufferSize", ",", "eTransformSpace", ",", "byref", "(", "pTransformArray", ")", ",", "unTransformArrayCount", ")", "return", "result", ",", "pTransformArray" ]
Turns a compressed buffer from GetSkeletalBoneDataCompressed and turns it back into a bone transform array.
[ "Turns", "a", "compressed", "buffer", "from", "GetSkeletalBoneDataCompressed", "and", "turns", "it", "back", "into", "a", "bone", "transform", "array", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6095-L6101
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRInput.triggerHapticVibrationAction
def triggerHapticVibrationAction(self, action, fStartSecondsFromNow, fDurationSeconds, fFrequency, fAmplitude, ulRestrictToDevice): """Triggers a haptic event as described by the specified action""" fn = self.function_table.triggerHapticVibrationAction result = fn(action, fStartSecondsFromNow, fDurationSeconds, fFrequency, fAmplitude, ulRestrictToDevice) return result
python
def triggerHapticVibrationAction(self, action, fStartSecondsFromNow, fDurationSeconds, fFrequency, fAmplitude, ulRestrictToDevice): """Triggers a haptic event as described by the specified action""" fn = self.function_table.triggerHapticVibrationAction result = fn(action, fStartSecondsFromNow, fDurationSeconds, fFrequency, fAmplitude, ulRestrictToDevice) return result
[ "def", "triggerHapticVibrationAction", "(", "self", ",", "action", ",", "fStartSecondsFromNow", ",", "fDurationSeconds", ",", "fFrequency", ",", "fAmplitude", ",", "ulRestrictToDevice", ")", ":", "fn", "=", "self", ".", "function_table", ".", "triggerHapticVibrationAction", "result", "=", "fn", "(", "action", ",", "fStartSecondsFromNow", ",", "fDurationSeconds", ",", "fFrequency", ",", "fAmplitude", ",", "ulRestrictToDevice", ")", "return", "result" ]
Triggers a haptic event as described by the specified action
[ "Triggers", "a", "haptic", "event", "as", "described", "by", "the", "specified", "action" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6103-L6108
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRInput.getActionOrigins
def getActionOrigins(self, actionSetHandle, digitalActionHandle, originOutCount): """Retrieve origin handles for an action""" fn = self.function_table.getActionOrigins originsOut = VRInputValueHandle_t() result = fn(actionSetHandle, digitalActionHandle, byref(originsOut), originOutCount) return result, originsOut
python
def getActionOrigins(self, actionSetHandle, digitalActionHandle, originOutCount): """Retrieve origin handles for an action""" fn = self.function_table.getActionOrigins originsOut = VRInputValueHandle_t() result = fn(actionSetHandle, digitalActionHandle, byref(originsOut), originOutCount) return result, originsOut
[ "def", "getActionOrigins", "(", "self", ",", "actionSetHandle", ",", "digitalActionHandle", ",", "originOutCount", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getActionOrigins", "originsOut", "=", "VRInputValueHandle_t", "(", ")", "result", "=", "fn", "(", "actionSetHandle", ",", "digitalActionHandle", ",", "byref", "(", "originsOut", ")", ",", "originOutCount", ")", "return", "result", ",", "originsOut" ]
Retrieve origin handles for an action
[ "Retrieve", "origin", "handles", "for", "an", "action" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6110-L6116
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRInput.getOriginLocalizedName
def getOriginLocalizedName(self, origin, pchNameArray, unNameArraySize, unStringSectionsToInclude): """ Retrieves the name of the origin in the current language. unStringSectionsToInclude is a bitfield of values in EVRInputStringBits that allows the application to specify which parts of the origin's information it wants a string for. """ fn = self.function_table.getOriginLocalizedName result = fn(origin, pchNameArray, unNameArraySize, unStringSectionsToInclude) return result
python
def getOriginLocalizedName(self, origin, pchNameArray, unNameArraySize, unStringSectionsToInclude): """ Retrieves the name of the origin in the current language. unStringSectionsToInclude is a bitfield of values in EVRInputStringBits that allows the application to specify which parts of the origin's information it wants a string for. """ fn = self.function_table.getOriginLocalizedName result = fn(origin, pchNameArray, unNameArraySize, unStringSectionsToInclude) return result
[ "def", "getOriginLocalizedName", "(", "self", ",", "origin", ",", "pchNameArray", ",", "unNameArraySize", ",", "unStringSectionsToInclude", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOriginLocalizedName", "result", "=", "fn", "(", "origin", ",", "pchNameArray", ",", "unNameArraySize", ",", "unStringSectionsToInclude", ")", "return", "result" ]
Retrieves the name of the origin in the current language. unStringSectionsToInclude is a bitfield of values in EVRInputStringBits that allows the application to specify which parts of the origin's information it wants a string for.
[ "Retrieves", "the", "name", "of", "the", "origin", "in", "the", "current", "language", ".", "unStringSectionsToInclude", "is", "a", "bitfield", "of", "values", "in", "EVRInputStringBits", "that", "allows", "the", "application", "to", "specify", "which", "parts", "of", "the", "origin", "s", "information", "it", "wants", "a", "string", "for", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6118-L6126
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRInput.getOriginTrackedDeviceInfo
def getOriginTrackedDeviceInfo(self, origin, unOriginInfoSize): """Retrieves useful information for the origin of this action""" fn = self.function_table.getOriginTrackedDeviceInfo pOriginInfo = InputOriginInfo_t() result = fn(origin, byref(pOriginInfo), unOriginInfoSize) return result, pOriginInfo
python
def getOriginTrackedDeviceInfo(self, origin, unOriginInfoSize): """Retrieves useful information for the origin of this action""" fn = self.function_table.getOriginTrackedDeviceInfo pOriginInfo = InputOriginInfo_t() result = fn(origin, byref(pOriginInfo), unOriginInfoSize) return result, pOriginInfo
[ "def", "getOriginTrackedDeviceInfo", "(", "self", ",", "origin", ",", "unOriginInfoSize", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOriginTrackedDeviceInfo", "pOriginInfo", "=", "InputOriginInfo_t", "(", ")", "result", "=", "fn", "(", "origin", ",", "byref", "(", "pOriginInfo", ")", ",", "unOriginInfoSize", ")", "return", "result", ",", "pOriginInfo" ]
Retrieves useful information for the origin of this action
[ "Retrieves", "useful", "information", "for", "the", "origin", "of", "this", "action" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6128-L6134
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRInput.showActionOrigins
def showActionOrigins(self, actionSetHandle, ulActionHandle): """Shows the current binding for the action in-headset""" fn = self.function_table.showActionOrigins result = fn(actionSetHandle, ulActionHandle) return result
python
def showActionOrigins(self, actionSetHandle, ulActionHandle): """Shows the current binding for the action in-headset""" fn = self.function_table.showActionOrigins result = fn(actionSetHandle, ulActionHandle) return result
[ "def", "showActionOrigins", "(", "self", ",", "actionSetHandle", ",", "ulActionHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "showActionOrigins", "result", "=", "fn", "(", "actionSetHandle", ",", "ulActionHandle", ")", "return", "result" ]
Shows the current binding for the action in-headset
[ "Shows", "the", "current", "binding", "for", "the", "action", "in", "-", "headset" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6136-L6141
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRInput.showBindingsForActionSet
def showBindingsForActionSet(self, unSizeOfVRSelectedActionSet_t, unSetCount, originToHighlight): """Shows the current binding all the actions in the specified action sets""" fn = self.function_table.showBindingsForActionSet pSets = VRActiveActionSet_t() result = fn(byref(pSets), unSizeOfVRSelectedActionSet_t, unSetCount, originToHighlight) return result, pSets
python
def showBindingsForActionSet(self, unSizeOfVRSelectedActionSet_t, unSetCount, originToHighlight): """Shows the current binding all the actions in the specified action sets""" fn = self.function_table.showBindingsForActionSet pSets = VRActiveActionSet_t() result = fn(byref(pSets), unSizeOfVRSelectedActionSet_t, unSetCount, originToHighlight) return result, pSets
[ "def", "showBindingsForActionSet", "(", "self", ",", "unSizeOfVRSelectedActionSet_t", ",", "unSetCount", ",", "originToHighlight", ")", ":", "fn", "=", "self", ".", "function_table", ".", "showBindingsForActionSet", "pSets", "=", "VRActiveActionSet_t", "(", ")", "result", "=", "fn", "(", "byref", "(", "pSets", ")", ",", "unSizeOfVRSelectedActionSet_t", ",", "unSetCount", ",", "originToHighlight", ")", "return", "result", ",", "pSets" ]
Shows the current binding all the actions in the specified action sets
[ "Shows", "the", "current", "binding", "all", "the", "actions", "in", "the", "specified", "action", "sets" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6143-L6149
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRIOBuffer.open
def open(self, pchPath, mode, unElementSize, unElements): """opens an existing or creates a new IOBuffer of unSize bytes""" fn = self.function_table.open pulBuffer = IOBufferHandle_t() result = fn(pchPath, mode, unElementSize, unElements, byref(pulBuffer)) return result, pulBuffer
python
def open(self, pchPath, mode, unElementSize, unElements): """opens an existing or creates a new IOBuffer of unSize bytes""" fn = self.function_table.open pulBuffer = IOBufferHandle_t() result = fn(pchPath, mode, unElementSize, unElements, byref(pulBuffer)) return result, pulBuffer
[ "def", "open", "(", "self", ",", "pchPath", ",", "mode", ",", "unElementSize", ",", "unElements", ")", ":", "fn", "=", "self", ".", "function_table", ".", "open", "pulBuffer", "=", "IOBufferHandle_t", "(", ")", "result", "=", "fn", "(", "pchPath", ",", "mode", ",", "unElementSize", ",", "unElements", ",", "byref", "(", "pulBuffer", ")", ")", "return", "result", ",", "pulBuffer" ]
opens an existing or creates a new IOBuffer of unSize bytes
[ "opens", "an", "existing", "or", "creates", "a", "new", "IOBuffer", "of", "unSize", "bytes" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6190-L6196
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRIOBuffer.close
def close(self, ulBuffer): """closes a previously opened or created buffer""" fn = self.function_table.close result = fn(ulBuffer) return result
python
def close(self, ulBuffer): """closes a previously opened or created buffer""" fn = self.function_table.close result = fn(ulBuffer) return result
[ "def", "close", "(", "self", ",", "ulBuffer", ")", ":", "fn", "=", "self", ".", "function_table", ".", "close", "result", "=", "fn", "(", "ulBuffer", ")", "return", "result" ]
closes a previously opened or created buffer
[ "closes", "a", "previously", "opened", "or", "created", "buffer" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6198-L6203
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRIOBuffer.propertyContainer
def propertyContainer(self, ulBuffer): """retrieves the property container of an buffer.""" fn = self.function_table.propertyContainer result = fn(ulBuffer) return result
python
def propertyContainer(self, ulBuffer): """retrieves the property container of an buffer.""" fn = self.function_table.propertyContainer result = fn(ulBuffer) return result
[ "def", "propertyContainer", "(", "self", ",", "ulBuffer", ")", ":", "fn", "=", "self", ".", "function_table", ".", "propertyContainer", "result", "=", "fn", "(", "ulBuffer", ")", "return", "result" ]
retrieves the property container of an buffer.
[ "retrieves", "the", "property", "container", "of", "an", "buffer", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6220-L6225
train
cmbruns/pyopenvr
src/openvr/__init__.py
IVRIOBuffer.hasReaders
def hasReaders(self, ulBuffer): """inexpensively checks for readers to allow writers to fast-fail potentially expensive copies and writes.""" fn = self.function_table.hasReaders result = fn(ulBuffer) return result
python
def hasReaders(self, ulBuffer): """inexpensively checks for readers to allow writers to fast-fail potentially expensive copies and writes.""" fn = self.function_table.hasReaders result = fn(ulBuffer) return result
[ "def", "hasReaders", "(", "self", ",", "ulBuffer", ")", ":", "fn", "=", "self", ".", "function_table", ".", "hasReaders", "result", "=", "fn", "(", "ulBuffer", ")", "return", "result" ]
inexpensively checks for readers to allow writers to fast-fail potentially expensive copies and writes.
[ "inexpensively", "checks", "for", "readers", "to", "allow", "writers", "to", "fast", "-", "fail", "potentially", "expensive", "copies", "and", "writes", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6227-L6232
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.execute
async def execute(self, sql, *params): """Executes the given operation substituting any markers with the given parameters. :param sql: the SQL statement to execute with optional ? parameter markers. Note that pyodbc never modifies the SQL statement. :param params: optional parameters for the markers in the SQL. They can be passed in a single sequence as defined by the DB API. For convenience, however, they can also be passed individually """ if self._echo: logger.info(sql) logger.info("%r", sql) await self._run_operation(self._impl.execute, sql, *params) return self
python
async def execute(self, sql, *params): """Executes the given operation substituting any markers with the given parameters. :param sql: the SQL statement to execute with optional ? parameter markers. Note that pyodbc never modifies the SQL statement. :param params: optional parameters for the markers in the SQL. They can be passed in a single sequence as defined by the DB API. For convenience, however, they can also be passed individually """ if self._echo: logger.info(sql) logger.info("%r", sql) await self._run_operation(self._impl.execute, sql, *params) return self
[ "async", "def", "execute", "(", "self", ",", "sql", ",", "*", "params", ")", ":", "if", "self", ".", "_echo", ":", "logger", ".", "info", "(", "sql", ")", "logger", ".", "info", "(", "\"%r\"", ",", "sql", ")", "await", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "execute", ",", "sql", ",", "*", "params", ")", "return", "self" ]
Executes the given operation substituting any markers with the given parameters. :param sql: the SQL statement to execute with optional ? parameter markers. Note that pyodbc never modifies the SQL statement. :param params: optional parameters for the markers in the SQL. They can be passed in a single sequence as defined by the DB API. For convenience, however, they can also be passed individually
[ "Executes", "the", "given", "operation", "substituting", "any", "markers", "with", "the", "given", "parameters", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L108-L122
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.executemany
def executemany(self, sql, *params): """Prepare a database query or command and then execute it against all parameter sequences found in the sequence seq_of_params. :param sql: the SQL statement to execute with optional ? parameters :param params: sequence parameters for the markers in the SQL. """ fut = self._run_operation(self._impl.executemany, sql, *params) return fut
python
def executemany(self, sql, *params): """Prepare a database query or command and then execute it against all parameter sequences found in the sequence seq_of_params. :param sql: the SQL statement to execute with optional ? parameters :param params: sequence parameters for the markers in the SQL. """ fut = self._run_operation(self._impl.executemany, sql, *params) return fut
[ "def", "executemany", "(", "self", ",", "sql", ",", "*", "params", ")", ":", "fut", "=", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "executemany", ",", "sql", ",", "*", "params", ")", "return", "fut" ]
Prepare a database query or command and then execute it against all parameter sequences found in the sequence seq_of_params. :param sql: the SQL statement to execute with optional ? parameters :param params: sequence parameters for the markers in the SQL.
[ "Prepare", "a", "database", "query", "or", "command", "and", "then", "execute", "it", "against", "all", "parameter", "sequences", "found", "in", "the", "sequence", "seq_of_params", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L124-L132
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.fetchmany
def fetchmany(self, size): """Returns a list of remaining rows, containing no more than size rows, used to process results in chunks. The list will be empty when there are no more rows. The default for cursor.arraysize is 1 which is no different than calling fetchone(). A ProgrammingError exception is raised if no SQL has been executed or if it did not return a result set (e.g. was not a SELECT statement). :param size: int, max number of rows to return """ fut = self._run_operation(self._impl.fetchmany, size) return fut
python
def fetchmany(self, size): """Returns a list of remaining rows, containing no more than size rows, used to process results in chunks. The list will be empty when there are no more rows. The default for cursor.arraysize is 1 which is no different than calling fetchone(). A ProgrammingError exception is raised if no SQL has been executed or if it did not return a result set (e.g. was not a SELECT statement). :param size: int, max number of rows to return """ fut = self._run_operation(self._impl.fetchmany, size) return fut
[ "def", "fetchmany", "(", "self", ",", "size", ")", ":", "fut", "=", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "fetchmany", ",", "size", ")", "return", "fut" ]
Returns a list of remaining rows, containing no more than size rows, used to process results in chunks. The list will be empty when there are no more rows. The default for cursor.arraysize is 1 which is no different than calling fetchone(). A ProgrammingError exception is raised if no SQL has been executed or if it did not return a result set (e.g. was not a SELECT statement). :param size: int, max number of rows to return
[ "Returns", "a", "list", "of", "remaining", "rows", "containing", "no", "more", "than", "size", "rows", "used", "to", "process", "results", "in", "chunks", ".", "The", "list", "will", "be", "empty", "when", "there", "are", "no", "more", "rows", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L169-L184
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.tables
def tables(self, **kw): """Creates a result set of tables in the database that match the given criteria. :param table: the table tname :param catalog: the catalog name :param schema: the schmea name :param tableType: one of TABLE, VIEW, SYSTEM TABLE ... """ fut = self._run_operation(self._impl.tables, **kw) return fut
python
def tables(self, **kw): """Creates a result set of tables in the database that match the given criteria. :param table: the table tname :param catalog: the catalog name :param schema: the schmea name :param tableType: one of TABLE, VIEW, SYSTEM TABLE ... """ fut = self._run_operation(self._impl.tables, **kw) return fut
[ "def", "tables", "(", "self", ",", "*", "*", "kw", ")", ":", "fut", "=", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "tables", ",", "*", "*", "kw", ")", "return", "fut" ]
Creates a result set of tables in the database that match the given criteria. :param table: the table tname :param catalog: the catalog name :param schema: the schmea name :param tableType: one of TABLE, VIEW, SYSTEM TABLE ...
[ "Creates", "a", "result", "set", "of", "tables", "in", "the", "database", "that", "match", "the", "given", "criteria", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L200-L210
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.columns
def columns(self, **kw): """Creates a results set of column names in specified tables by executing the ODBC SQLColumns function. Each row fetched has the following columns. :param table: the table tname :param catalog: the catalog name :param schema: the schmea name :param column: string search pattern for column names. """ fut = self._run_operation(self._impl.columns, **kw) return fut
python
def columns(self, **kw): """Creates a results set of column names in specified tables by executing the ODBC SQLColumns function. Each row fetched has the following columns. :param table: the table tname :param catalog: the catalog name :param schema: the schmea name :param column: string search pattern for column names. """ fut = self._run_operation(self._impl.columns, **kw) return fut
[ "def", "columns", "(", "self", ",", "*", "*", "kw", ")", ":", "fut", "=", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "columns", ",", "*", "*", "kw", ")", "return", "fut" ]
Creates a results set of column names in specified tables by executing the ODBC SQLColumns function. Each row fetched has the following columns. :param table: the table tname :param catalog: the catalog name :param schema: the schmea name :param column: string search pattern for column names.
[ "Creates", "a", "results", "set", "of", "column", "names", "in", "specified", "tables", "by", "executing", "the", "ODBC", "SQLColumns", "function", ".", "Each", "row", "fetched", "has", "the", "following", "columns", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L212-L223
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.statistics
def statistics(self, catalog=None, schema=None, unique=False, quick=True): """Creates a results set of statistics about a single table and the indexes associated with the table by executing SQLStatistics. :param catalog: the catalog name :param schema: the schmea name :param unique: if True, only unique indexes are retured. Otherwise all indexes are returned. :param quick: if True, CARDINALITY and PAGES are returned only if they are readily available from the server """ fut = self._run_operation(self._impl.statistics, catalog=catalog, schema=schema, unique=unique, quick=quick) return fut
python
def statistics(self, catalog=None, schema=None, unique=False, quick=True): """Creates a results set of statistics about a single table and the indexes associated with the table by executing SQLStatistics. :param catalog: the catalog name :param schema: the schmea name :param unique: if True, only unique indexes are retured. Otherwise all indexes are returned. :param quick: if True, CARDINALITY and PAGES are returned only if they are readily available from the server """ fut = self._run_operation(self._impl.statistics, catalog=catalog, schema=schema, unique=unique, quick=quick) return fut
[ "def", "statistics", "(", "self", ",", "catalog", "=", "None", ",", "schema", "=", "None", ",", "unique", "=", "False", ",", "quick", "=", "True", ")", ":", "fut", "=", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "statistics", ",", "catalog", "=", "catalog", ",", "schema", "=", "schema", ",", "unique", "=", "unique", ",", "quick", "=", "quick", ")", "return", "fut" ]
Creates a results set of statistics about a single table and the indexes associated with the table by executing SQLStatistics. :param catalog: the catalog name :param schema: the schmea name :param unique: if True, only unique indexes are retured. Otherwise all indexes are returned. :param quick: if True, CARDINALITY and PAGES are returned only if they are readily available from the server
[ "Creates", "a", "results", "set", "of", "statistics", "about", "a", "single", "table", "and", "the", "indexes", "associated", "with", "the", "table", "by", "executing", "SQLStatistics", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L225-L238
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.rowIdColumns
def rowIdColumns(self, table, catalog=None, schema=None, # nopep8 nullable=True): """Executes SQLSpecialColumns with SQL_BEST_ROWID which creates a result set of columns that uniquely identify a row """ fut = self._run_operation(self._impl.rowIdColumns, table, catalog=catalog, schema=schema, nullable=nullable) return fut
python
def rowIdColumns(self, table, catalog=None, schema=None, # nopep8 nullable=True): """Executes SQLSpecialColumns with SQL_BEST_ROWID which creates a result set of columns that uniquely identify a row """ fut = self._run_operation(self._impl.rowIdColumns, table, catalog=catalog, schema=schema, nullable=nullable) return fut
[ "def", "rowIdColumns", "(", "self", ",", "table", ",", "catalog", "=", "None", ",", "schema", "=", "None", ",", "# nopep8", "nullable", "=", "True", ")", ":", "fut", "=", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "rowIdColumns", ",", "table", ",", "catalog", "=", "catalog", ",", "schema", "=", "schema", ",", "nullable", "=", "nullable", ")", "return", "fut" ]
Executes SQLSpecialColumns with SQL_BEST_ROWID which creates a result set of columns that uniquely identify a row
[ "Executes", "SQLSpecialColumns", "with", "SQL_BEST_ROWID", "which", "creates", "a", "result", "set", "of", "columns", "that", "uniquely", "identify", "a", "row" ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L240-L248
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.primaryKeys
def primaryKeys(self, table, catalog=None, schema=None): # nopep8 """Creates a result set of column names that make up the primary key for a table by executing the SQLPrimaryKeys function.""" fut = self._run_operation(self._impl.primaryKeys, table, catalog=catalog, schema=schema) return fut
python
def primaryKeys(self, table, catalog=None, schema=None): # nopep8 """Creates a result set of column names that make up the primary key for a table by executing the SQLPrimaryKeys function.""" fut = self._run_operation(self._impl.primaryKeys, table, catalog=catalog, schema=schema) return fut
[ "def", "primaryKeys", "(", "self", ",", "table", ",", "catalog", "=", "None", ",", "schema", "=", "None", ")", ":", "# nopep8", "fut", "=", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "primaryKeys", ",", "table", ",", "catalog", "=", "catalog", ",", "schema", "=", "schema", ")", "return", "fut" ]
Creates a result set of column names that make up the primary key for a table by executing the SQLPrimaryKeys function.
[ "Creates", "a", "result", "set", "of", "column", "names", "that", "make", "up", "the", "primary", "key", "for", "a", "table", "by", "executing", "the", "SQLPrimaryKeys", "function", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L261-L266
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.getTypeInfo
def getTypeInfo(self, sql_type): # nopep8 """Executes SQLGetTypeInfo a creates a result set with information about the specified data type or all data types supported by the ODBC driver if not specified. """ fut = self._run_operation(self._impl.getTypeInfo, sql_type) return fut
python
def getTypeInfo(self, sql_type): # nopep8 """Executes SQLGetTypeInfo a creates a result set with information about the specified data type or all data types supported by the ODBC driver if not specified. """ fut = self._run_operation(self._impl.getTypeInfo, sql_type) return fut
[ "def", "getTypeInfo", "(", "self", ",", "sql_type", ")", ":", "# nopep8", "fut", "=", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "getTypeInfo", ",", "sql_type", ")", "return", "fut" ]
Executes SQLGetTypeInfo a creates a result set with information about the specified data type or all data types supported by the ODBC driver if not specified.
[ "Executes", "SQLGetTypeInfo", "a", "creates", "a", "result", "set", "with", "information", "about", "the", "specified", "data", "type", "or", "all", "data", "types", "supported", "by", "the", "ODBC", "driver", "if", "not", "specified", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L278-L284
train
aio-libs/aioodbc
aioodbc/cursor.py
Cursor.procedures
def procedures(self, *a, **kw): """Executes SQLProcedures and creates a result set of information about the procedures in the data source. """ fut = self._run_operation(self._impl.procedures, *a, **kw) return fut
python
def procedures(self, *a, **kw): """Executes SQLProcedures and creates a result set of information about the procedures in the data source. """ fut = self._run_operation(self._impl.procedures, *a, **kw) return fut
[ "def", "procedures", "(", "self", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "fut", "=", "self", ".", "_run_operation", "(", "self", ".", "_impl", ".", "procedures", ",", "*", "a", ",", "*", "*", "kw", ")", "return", "fut" ]
Executes SQLProcedures and creates a result set of information about the procedures in the data source.
[ "Executes", "SQLProcedures", "and", "creates", "a", "result", "set", "of", "information", "about", "the", "procedures", "in", "the", "data", "source", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/cursor.py#L286-L291
train
aio-libs/aioodbc
aioodbc/__init__.py
dataSources
async def dataSources(loop=None, executor=None): """Returns a dictionary mapping available DSNs to their descriptions. :param loop: asyncio compatible event loop :param executor: instance of custom ThreadPoolExecutor, if not supplied default executor will be used :return dict: mapping of dsn to driver description """ loop = loop or asyncio.get_event_loop() sources = await loop.run_in_executor(executor, _dataSources) return sources
python
async def dataSources(loop=None, executor=None): """Returns a dictionary mapping available DSNs to their descriptions. :param loop: asyncio compatible event loop :param executor: instance of custom ThreadPoolExecutor, if not supplied default executor will be used :return dict: mapping of dsn to driver description """ loop = loop or asyncio.get_event_loop() sources = await loop.run_in_executor(executor, _dataSources) return sources
[ "async", "def", "dataSources", "(", "loop", "=", "None", ",", "executor", "=", "None", ")", ":", "loop", "=", "loop", "or", "asyncio", ".", "get_event_loop", "(", ")", "sources", "=", "await", "loop", ".", "run_in_executor", "(", "executor", ",", "_dataSources", ")", "return", "sources" ]
Returns a dictionary mapping available DSNs to their descriptions. :param loop: asyncio compatible event loop :param executor: instance of custom ThreadPoolExecutor, if not supplied default executor will be used :return dict: mapping of dsn to driver description
[ "Returns", "a", "dictionary", "mapping", "available", "DSNs", "to", "their", "descriptions", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/__init__.py#L14-L24
train
aio-libs/aioodbc
aioodbc/connection.py
connect
def connect(*, dsn, autocommit=False, ansi=False, timeout=0, loop=None, executor=None, echo=False, after_created=None, **kwargs): """Accepts an ODBC connection string and returns a new Connection object. The connection string can be passed as the string `str`, as a list of keywords,or a combination of the two. Any keywords except autocommit, ansi, and timeout are simply added to the connection string. :param autocommit bool: False or zero, the default, if True or non-zero, the connection is put into ODBC autocommit mode and statements are committed automatically. :param ansi bool: By default, pyodbc first attempts to connect using the Unicode version of SQLDriverConnectW. If the driver returns IM001 indicating it does not support the Unicode version, the ANSI version is tried. :param timeout int: An integer login timeout in seconds, used to set the SQL_ATTR_LOGIN_TIMEOUT attribute of the connection. The default is 0 which means the database's default timeout, if any, is use :param after_created callable: support customize configuration after connection is connected. Must be an async unary function, or leave it as None. """ return _ContextManager(_connect(dsn=dsn, autocommit=autocommit, ansi=ansi, timeout=timeout, loop=loop, executor=executor, echo=echo, after_created=after_created, **kwargs))
python
def connect(*, dsn, autocommit=False, ansi=False, timeout=0, loop=None, executor=None, echo=False, after_created=None, **kwargs): """Accepts an ODBC connection string and returns a new Connection object. The connection string can be passed as the string `str`, as a list of keywords,or a combination of the two. Any keywords except autocommit, ansi, and timeout are simply added to the connection string. :param autocommit bool: False or zero, the default, if True or non-zero, the connection is put into ODBC autocommit mode and statements are committed automatically. :param ansi bool: By default, pyodbc first attempts to connect using the Unicode version of SQLDriverConnectW. If the driver returns IM001 indicating it does not support the Unicode version, the ANSI version is tried. :param timeout int: An integer login timeout in seconds, used to set the SQL_ATTR_LOGIN_TIMEOUT attribute of the connection. The default is 0 which means the database's default timeout, if any, is use :param after_created callable: support customize configuration after connection is connected. Must be an async unary function, or leave it as None. """ return _ContextManager(_connect(dsn=dsn, autocommit=autocommit, ansi=ansi, timeout=timeout, loop=loop, executor=executor, echo=echo, after_created=after_created, **kwargs))
[ "def", "connect", "(", "*", ",", "dsn", ",", "autocommit", "=", "False", ",", "ansi", "=", "False", ",", "timeout", "=", "0", ",", "loop", "=", "None", ",", "executor", "=", "None", ",", "echo", "=", "False", ",", "after_created", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "_ContextManager", "(", "_connect", "(", "dsn", "=", "dsn", ",", "autocommit", "=", "autocommit", ",", "ansi", "=", "ansi", ",", "timeout", "=", "timeout", ",", "loop", "=", "loop", ",", "executor", "=", "executor", ",", "echo", "=", "echo", ",", "after_created", "=", "after_created", ",", "*", "*", "kwargs", ")", ")" ]
Accepts an ODBC connection string and returns a new Connection object. The connection string can be passed as the string `str`, as a list of keywords,or a combination of the two. Any keywords except autocommit, ansi, and timeout are simply added to the connection string. :param autocommit bool: False or zero, the default, if True or non-zero, the connection is put into ODBC autocommit mode and statements are committed automatically. :param ansi bool: By default, pyodbc first attempts to connect using the Unicode version of SQLDriverConnectW. If the driver returns IM001 indicating it does not support the Unicode version, the ANSI version is tried. :param timeout int: An integer login timeout in seconds, used to set the SQL_ATTR_LOGIN_TIMEOUT attribute of the connection. The default is 0 which means the database's default timeout, if any, is use :param after_created callable: support customize configuration after connection is connected. Must be an async unary function, or leave it as None.
[ "Accepts", "an", "ODBC", "connection", "string", "and", "returns", "a", "new", "Connection", "object", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/connection.py#L15-L40
train
aio-libs/aioodbc
aioodbc/connection.py
Connection.close
async def close(self): """Close pyodbc connection""" if not self._conn: return c = await self._execute(self._conn.close) self._conn = None return c
python
async def close(self): """Close pyodbc connection""" if not self._conn: return c = await self._execute(self._conn.close) self._conn = None return c
[ "async", "def", "close", "(", "self", ")", ":", "if", "not", "self", ".", "_conn", ":", "return", "c", "=", "await", "self", ".", "_execute", "(", "self", ".", "_conn", ".", "close", ")", "self", ".", "_conn", "=", "None", "return", "c" ]
Close pyodbc connection
[ "Close", "pyodbc", "connection" ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/connection.py#L133-L139
train
aio-libs/aioodbc
aioodbc/connection.py
Connection.execute
async def execute(self, sql, *args): """Create a new Cursor object, call its execute method, and return it. See Cursor.execute for more details.This is a convenience method that is not part of the DB API. Since a new Cursor is allocated by each call, this should not be used if more than one SQL statement needs to be executed. :param sql: str, formated sql statement :param args: tuple, arguments for construction of sql statement """ _cursor = await self._execute(self._conn.execute, sql, *args) connection = self cursor = Cursor(_cursor, connection, echo=self._echo) return cursor
python
async def execute(self, sql, *args): """Create a new Cursor object, call its execute method, and return it. See Cursor.execute for more details.This is a convenience method that is not part of the DB API. Since a new Cursor is allocated by each call, this should not be used if more than one SQL statement needs to be executed. :param sql: str, formated sql statement :param args: tuple, arguments for construction of sql statement """ _cursor = await self._execute(self._conn.execute, sql, *args) connection = self cursor = Cursor(_cursor, connection, echo=self._echo) return cursor
[ "async", "def", "execute", "(", "self", ",", "sql", ",", "*", "args", ")", ":", "_cursor", "=", "await", "self", ".", "_execute", "(", "self", ".", "_conn", ".", "execute", ",", "sql", ",", "*", "args", ")", "connection", "=", "self", "cursor", "=", "Cursor", "(", "_cursor", ",", "connection", ",", "echo", "=", "self", ".", "_echo", ")", "return", "cursor" ]
Create a new Cursor object, call its execute method, and return it. See Cursor.execute for more details.This is a convenience method that is not part of the DB API. Since a new Cursor is allocated by each call, this should not be used if more than one SQL statement needs to be executed. :param sql: str, formated sql statement :param args: tuple, arguments for construction of sql statement
[ "Create", "a", "new", "Cursor", "object", "call", "its", "execute", "method", "and", "return", "it", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/connection.py#L153-L167
train
aio-libs/aioodbc
aioodbc/connection.py
Connection.getinfo
def getinfo(self, type_): """Returns general information about the driver and data source associated with a connection by calling SQLGetInfo and returning its results. See Microsoft's SQLGetInfo documentation for the types of information available. :param type_: int, pyodbc.SQL_* constant """ fut = self._execute(self._conn.getinfo, type_) return fut
python
def getinfo(self, type_): """Returns general information about the driver and data source associated with a connection by calling SQLGetInfo and returning its results. See Microsoft's SQLGetInfo documentation for the types of information available. :param type_: int, pyodbc.SQL_* constant """ fut = self._execute(self._conn.getinfo, type_) return fut
[ "def", "getinfo", "(", "self", ",", "type_", ")", ":", "fut", "=", "self", ".", "_execute", "(", "self", ".", "_conn", ".", "getinfo", ",", "type_", ")", "return", "fut" ]
Returns general information about the driver and data source associated with a connection by calling SQLGetInfo and returning its results. See Microsoft's SQLGetInfo documentation for the types of information available. :param type_: int, pyodbc.SQL_* constant
[ "Returns", "general", "information", "about", "the", "driver", "and", "data", "source", "associated", "with", "a", "connection", "by", "calling", "SQLGetInfo", "and", "returning", "its", "results", ".", "See", "Microsoft", "s", "SQLGetInfo", "documentation", "for", "the", "types", "of", "information", "available", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/connection.py#L169-L178
train
aio-libs/aioodbc
aioodbc/connection.py
Connection.add_output_converter
def add_output_converter(self, sqltype, func): """Register an output converter function that will be called whenever a value with the given SQL type is read from the database. :param sqltype: the integer SQL type value to convert, which can be one of the defined standard constants (pyodbc.SQL_VARCHAR) or a database-specific value (e.g. -151 for the SQL Server 2008 geometry data type). :param func: the converter function which will be called with a single parameter, the value, and should return the converted value. If the value is NULL, the parameter will be None. Otherwise it will be a Python string. """ fut = self._execute(self._conn.add_output_converter, sqltype, func) return fut
python
def add_output_converter(self, sqltype, func): """Register an output converter function that will be called whenever a value with the given SQL type is read from the database. :param sqltype: the integer SQL type value to convert, which can be one of the defined standard constants (pyodbc.SQL_VARCHAR) or a database-specific value (e.g. -151 for the SQL Server 2008 geometry data type). :param func: the converter function which will be called with a single parameter, the value, and should return the converted value. If the value is NULL, the parameter will be None. Otherwise it will be a Python string. """ fut = self._execute(self._conn.add_output_converter, sqltype, func) return fut
[ "def", "add_output_converter", "(", "self", ",", "sqltype", ",", "func", ")", ":", "fut", "=", "self", ".", "_execute", "(", "self", ".", "_conn", ".", "add_output_converter", ",", "sqltype", ",", "func", ")", "return", "fut" ]
Register an output converter function that will be called whenever a value with the given SQL type is read from the database. :param sqltype: the integer SQL type value to convert, which can be one of the defined standard constants (pyodbc.SQL_VARCHAR) or a database-specific value (e.g. -151 for the SQL Server 2008 geometry data type). :param func: the converter function which will be called with a single parameter, the value, and should return the converted value. If the value is NULL, the parameter will be None. Otherwise it will be a Python string.
[ "Register", "an", "output", "converter", "function", "that", "will", "be", "called", "whenever", "a", "value", "with", "the", "given", "SQL", "type", "is", "read", "from", "the", "database", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/connection.py#L180-L194
train
aio-libs/aioodbc
aioodbc/connection.py
Connection.set_attr
def set_attr(self, attr_id, value): """Calls SQLSetConnectAttr with the given values. :param attr_id: the attribute ID (integer) to set. These are ODBC or driver constants. :parm value: the connection attribute value to set. At this time only integer values are supported. """ fut = self._execute(self._conn.set_attr, attr_id, value) return fut
python
def set_attr(self, attr_id, value): """Calls SQLSetConnectAttr with the given values. :param attr_id: the attribute ID (integer) to set. These are ODBC or driver constants. :parm value: the connection attribute value to set. At this time only integer values are supported. """ fut = self._execute(self._conn.set_attr, attr_id, value) return fut
[ "def", "set_attr", "(", "self", ",", "attr_id", ",", "value", ")", ":", "fut", "=", "self", ".", "_execute", "(", "self", ".", "_conn", ".", "set_attr", ",", "attr_id", ",", "value", ")", "return", "fut" ]
Calls SQLSetConnectAttr with the given values. :param attr_id: the attribute ID (integer) to set. These are ODBC or driver constants. :parm value: the connection attribute value to set. At this time only integer values are supported.
[ "Calls", "SQLSetConnectAttr", "with", "the", "given", "values", "." ]
01245560828d4adce0d7d16930fa566102322a0a
https://github.com/aio-libs/aioodbc/blob/01245560828d4adce0d7d16930fa566102322a0a/aioodbc/connection.py#L203-L212
train
tsifrer/python-twitch-client
twitch/api/base.py
TwitchAPI._request_get
def _request_get(self, path, params=None, json=True, url=BASE_URL): """Perform a HTTP GET request.""" url = urljoin(url, path) headers = self._get_request_headers() response = requests.get(url, params=params, headers=headers) if response.status_code >= 500: backoff = self._initial_backoff for _ in range(self._max_retries): time.sleep(backoff) backoff_response = requests.get( url, params=params, headers=headers, timeout=DEFAULT_TIMEOUT) if backoff_response.status_code < 500: response = backoff_response break backoff *= 2 response.raise_for_status() if json: return response.json() else: return response
python
def _request_get(self, path, params=None, json=True, url=BASE_URL): """Perform a HTTP GET request.""" url = urljoin(url, path) headers = self._get_request_headers() response = requests.get(url, params=params, headers=headers) if response.status_code >= 500: backoff = self._initial_backoff for _ in range(self._max_retries): time.sleep(backoff) backoff_response = requests.get( url, params=params, headers=headers, timeout=DEFAULT_TIMEOUT) if backoff_response.status_code < 500: response = backoff_response break backoff *= 2 response.raise_for_status() if json: return response.json() else: return response
[ "def", "_request_get", "(", "self", ",", "path", ",", "params", "=", "None", ",", "json", "=", "True", ",", "url", "=", "BASE_URL", ")", ":", "url", "=", "urljoin", "(", "url", ",", "path", ")", "headers", "=", "self", ".", "_get_request_headers", "(", ")", "response", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "params", ",", "headers", "=", "headers", ")", "if", "response", ".", "status_code", ">=", "500", ":", "backoff", "=", "self", ".", "_initial_backoff", "for", "_", "in", "range", "(", "self", ".", "_max_retries", ")", ":", "time", ".", "sleep", "(", "backoff", ")", "backoff_response", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "params", ",", "headers", "=", "headers", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", "if", "backoff_response", ".", "status_code", "<", "500", ":", "response", "=", "backoff_response", "break", "backoff", "*=", "2", "response", ".", "raise_for_status", "(", ")", "if", "json", ":", "return", "response", ".", "json", "(", ")", "else", ":", "return", "response" ]
Perform a HTTP GET request.
[ "Perform", "a", "HTTP", "GET", "request", "." ]
d8eda09acddabe1a9fd9eb76b3f454fa827b5074
https://github.com/tsifrer/python-twitch-client/blob/d8eda09acddabe1a9fd9eb76b3f454fa827b5074/twitch/api/base.py#L34-L57
train
tsifrer/python-twitch-client
twitch/api/base.py
TwitchAPI._request_post
def _request_post(self, path, data=None, params=None, url=BASE_URL): """Perform a HTTP POST request..""" url = urljoin(url, path) headers = self._get_request_headers() response = requests.post( url, json=data, params=params, headers=headers, timeout=DEFAULT_TIMEOUT) response.raise_for_status() if response.status_code == 200: return response.json()
python
def _request_post(self, path, data=None, params=None, url=BASE_URL): """Perform a HTTP POST request..""" url = urljoin(url, path) headers = self._get_request_headers() response = requests.post( url, json=data, params=params, headers=headers, timeout=DEFAULT_TIMEOUT) response.raise_for_status() if response.status_code == 200: return response.json()
[ "def", "_request_post", "(", "self", ",", "path", ",", "data", "=", "None", ",", "params", "=", "None", ",", "url", "=", "BASE_URL", ")", ":", "url", "=", "urljoin", "(", "url", ",", "path", ")", "headers", "=", "self", ".", "_get_request_headers", "(", ")", "response", "=", "requests", ".", "post", "(", "url", ",", "json", "=", "data", ",", "params", "=", "params", ",", "headers", "=", "headers", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", "response", ".", "raise_for_status", "(", ")", "if", "response", ".", "status_code", "==", "200", ":", "return", "response", ".", "json", "(", ")" ]
Perform a HTTP POST request..
[ "Perform", "a", "HTTP", "POST", "request", ".." ]
d8eda09acddabe1a9fd9eb76b3f454fa827b5074
https://github.com/tsifrer/python-twitch-client/blob/d8eda09acddabe1a9fd9eb76b3f454fa827b5074/twitch/api/base.py#L59-L70
train
tsifrer/python-twitch-client
twitch/api/base.py
TwitchAPI._request_delete
def _request_delete(self, path, params=None, url=BASE_URL): """Perform a HTTP DELETE request.""" url = urljoin(url, path) headers = self._get_request_headers() response = requests.delete( url, params=params, headers=headers, timeout=DEFAULT_TIMEOUT) response.raise_for_status() if response.status_code == 200: return response.json()
python
def _request_delete(self, path, params=None, url=BASE_URL): """Perform a HTTP DELETE request.""" url = urljoin(url, path) headers = self._get_request_headers() response = requests.delete( url, params=params, headers=headers, timeout=DEFAULT_TIMEOUT) response.raise_for_status() if response.status_code == 200: return response.json()
[ "def", "_request_delete", "(", "self", ",", "path", ",", "params", "=", "None", ",", "url", "=", "BASE_URL", ")", ":", "url", "=", "urljoin", "(", "url", ",", "path", ")", "headers", "=", "self", ".", "_get_request_headers", "(", ")", "response", "=", "requests", ".", "delete", "(", "url", ",", "params", "=", "params", ",", "headers", "=", "headers", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", "response", ".", "raise_for_status", "(", ")", "if", "response", ".", "status_code", "==", "200", ":", "return", "response", ".", "json", "(", ")" ]
Perform a HTTP DELETE request.
[ "Perform", "a", "HTTP", "DELETE", "request", "." ]
d8eda09acddabe1a9fd9eb76b3f454fa827b5074
https://github.com/tsifrer/python-twitch-client/blob/d8eda09acddabe1a9fd9eb76b3f454fa827b5074/twitch/api/base.py#L84-L94
train
jgorset/facepy
facepy/signed_request.py
SignedRequest.parse
def parse(cls, signed_request, application_secret_key): """Parse a signed request, returning a dictionary describing its payload.""" def decode(encoded): padding = '=' * (len(encoded) % 4) return base64.urlsafe_b64decode(encoded + padding) try: encoded_signature, encoded_payload = (str(string) for string in signed_request.split('.', 2)) signature = decode(encoded_signature) signed_request_data = json.loads(decode(encoded_payload).decode('utf-8')) except (TypeError, ValueError): raise SignedRequestError("Signed request had a corrupt payload") if signed_request_data.get('algorithm', '').upper() != 'HMAC-SHA256': raise SignedRequestError("Signed request is using an unknown algorithm") expected_signature = hmac.new(application_secret_key.encode('utf-8'), msg=encoded_payload.encode('utf-8'), digestmod=hashlib.sha256).digest() if signature != expected_signature: raise SignedRequestError("Signed request signature mismatch") return signed_request_data
python
def parse(cls, signed_request, application_secret_key): """Parse a signed request, returning a dictionary describing its payload.""" def decode(encoded): padding = '=' * (len(encoded) % 4) return base64.urlsafe_b64decode(encoded + padding) try: encoded_signature, encoded_payload = (str(string) for string in signed_request.split('.', 2)) signature = decode(encoded_signature) signed_request_data = json.loads(decode(encoded_payload).decode('utf-8')) except (TypeError, ValueError): raise SignedRequestError("Signed request had a corrupt payload") if signed_request_data.get('algorithm', '').upper() != 'HMAC-SHA256': raise SignedRequestError("Signed request is using an unknown algorithm") expected_signature = hmac.new(application_secret_key.encode('utf-8'), msg=encoded_payload.encode('utf-8'), digestmod=hashlib.sha256).digest() if signature != expected_signature: raise SignedRequestError("Signed request signature mismatch") return signed_request_data
[ "def", "parse", "(", "cls", ",", "signed_request", ",", "application_secret_key", ")", ":", "def", "decode", "(", "encoded", ")", ":", "padding", "=", "'='", "*", "(", "len", "(", "encoded", ")", "%", "4", ")", "return", "base64", ".", "urlsafe_b64decode", "(", "encoded", "+", "padding", ")", "try", ":", "encoded_signature", ",", "encoded_payload", "=", "(", "str", "(", "string", ")", "for", "string", "in", "signed_request", ".", "split", "(", "'.'", ",", "2", ")", ")", "signature", "=", "decode", "(", "encoded_signature", ")", "signed_request_data", "=", "json", ".", "loads", "(", "decode", "(", "encoded_payload", ")", ".", "decode", "(", "'utf-8'", ")", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "SignedRequestError", "(", "\"Signed request had a corrupt payload\"", ")", "if", "signed_request_data", ".", "get", "(", "'algorithm'", ",", "''", ")", ".", "upper", "(", ")", "!=", "'HMAC-SHA256'", ":", "raise", "SignedRequestError", "(", "\"Signed request is using an unknown algorithm\"", ")", "expected_signature", "=", "hmac", ".", "new", "(", "application_secret_key", ".", "encode", "(", "'utf-8'", ")", ",", "msg", "=", "encoded_payload", ".", "encode", "(", "'utf-8'", ")", ",", "digestmod", "=", "hashlib", ".", "sha256", ")", ".", "digest", "(", ")", "if", "signature", "!=", "expected_signature", ":", "raise", "SignedRequestError", "(", "\"Signed request signature mismatch\"", ")", "return", "signed_request_data" ]
Parse a signed request, returning a dictionary describing its payload.
[ "Parse", "a", "signed", "request", "returning", "a", "dictionary", "describing", "its", "payload", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/signed_request.py#L92-L113
train
jgorset/facepy
facepy/signed_request.py
SignedRequest.generate
def generate(self): """Generate a signed request from this instance.""" payload = { 'algorithm': 'HMAC-SHA256' } if self.data: payload['app_data'] = self.data if self.page: payload['page'] = {} if self.page.id: payload['page']['id'] = self.page.id if self.page.is_liked: payload['page']['liked'] = self.page.is_liked if self.page.is_admin: payload['page']['admin'] = self.page.is_admin if self.user: payload['user'] = {} if self.user.country: payload['user']['country'] = self.user.country if self.user.locale: payload['user']['locale'] = self.user.locale if self.user.age: payload['user']['age'] = { 'min': self.user.age[0], 'max': self.user.age[-1] } if self.user.oauth_token: if self.user.oauth_token.token: payload['oauth_token'] = self.user.oauth_token.token if self.user.oauth_token.expires_at is None: payload['expires_in'] = 0 else: payload['expires_in'] = int(time.mktime(self.user.oauth_token.expires_at.timetuple())) if self.user.oauth_token.issued_at: payload['issued_at'] = int(time.mktime(self.user.oauth_token.issued_at.timetuple())) if self.user.id: payload['user_id'] = self.user.id encoded_payload = base64.urlsafe_b64encode( json.dumps(payload, separators=(',', ':')).encode('utf-8') ) encoded_signature = base64.urlsafe_b64encode(hmac.new( self.application_secret_key.encode('utf-8'), encoded_payload, hashlib.sha256 ).digest()) return '%(signature)s.%(payload)s' % { 'signature': encoded_signature, 'payload': encoded_payload }
python
def generate(self): """Generate a signed request from this instance.""" payload = { 'algorithm': 'HMAC-SHA256' } if self.data: payload['app_data'] = self.data if self.page: payload['page'] = {} if self.page.id: payload['page']['id'] = self.page.id if self.page.is_liked: payload['page']['liked'] = self.page.is_liked if self.page.is_admin: payload['page']['admin'] = self.page.is_admin if self.user: payload['user'] = {} if self.user.country: payload['user']['country'] = self.user.country if self.user.locale: payload['user']['locale'] = self.user.locale if self.user.age: payload['user']['age'] = { 'min': self.user.age[0], 'max': self.user.age[-1] } if self.user.oauth_token: if self.user.oauth_token.token: payload['oauth_token'] = self.user.oauth_token.token if self.user.oauth_token.expires_at is None: payload['expires_in'] = 0 else: payload['expires_in'] = int(time.mktime(self.user.oauth_token.expires_at.timetuple())) if self.user.oauth_token.issued_at: payload['issued_at'] = int(time.mktime(self.user.oauth_token.issued_at.timetuple())) if self.user.id: payload['user_id'] = self.user.id encoded_payload = base64.urlsafe_b64encode( json.dumps(payload, separators=(',', ':')).encode('utf-8') ) encoded_signature = base64.urlsafe_b64encode(hmac.new( self.application_secret_key.encode('utf-8'), encoded_payload, hashlib.sha256 ).digest()) return '%(signature)s.%(payload)s' % { 'signature': encoded_signature, 'payload': encoded_payload }
[ "def", "generate", "(", "self", ")", ":", "payload", "=", "{", "'algorithm'", ":", "'HMAC-SHA256'", "}", "if", "self", ".", "data", ":", "payload", "[", "'app_data'", "]", "=", "self", ".", "data", "if", "self", ".", "page", ":", "payload", "[", "'page'", "]", "=", "{", "}", "if", "self", ".", "page", ".", "id", ":", "payload", "[", "'page'", "]", "[", "'id'", "]", "=", "self", ".", "page", ".", "id", "if", "self", ".", "page", ".", "is_liked", ":", "payload", "[", "'page'", "]", "[", "'liked'", "]", "=", "self", ".", "page", ".", "is_liked", "if", "self", ".", "page", ".", "is_admin", ":", "payload", "[", "'page'", "]", "[", "'admin'", "]", "=", "self", ".", "page", ".", "is_admin", "if", "self", ".", "user", ":", "payload", "[", "'user'", "]", "=", "{", "}", "if", "self", ".", "user", ".", "country", ":", "payload", "[", "'user'", "]", "[", "'country'", "]", "=", "self", ".", "user", ".", "country", "if", "self", ".", "user", ".", "locale", ":", "payload", "[", "'user'", "]", "[", "'locale'", "]", "=", "self", ".", "user", ".", "locale", "if", "self", ".", "user", ".", "age", ":", "payload", "[", "'user'", "]", "[", "'age'", "]", "=", "{", "'min'", ":", "self", ".", "user", ".", "age", "[", "0", "]", ",", "'max'", ":", "self", ".", "user", ".", "age", "[", "-", "1", "]", "}", "if", "self", ".", "user", ".", "oauth_token", ":", "if", "self", ".", "user", ".", "oauth_token", ".", "token", ":", "payload", "[", "'oauth_token'", "]", "=", "self", ".", "user", ".", "oauth_token", ".", "token", "if", "self", ".", "user", ".", "oauth_token", ".", "expires_at", "is", "None", ":", "payload", "[", "'expires_in'", "]", "=", "0", "else", ":", "payload", "[", "'expires_in'", "]", "=", "int", "(", "time", ".", "mktime", "(", "self", ".", "user", ".", "oauth_token", ".", "expires_at", ".", "timetuple", "(", ")", ")", ")", "if", "self", ".", "user", ".", "oauth_token", ".", "issued_at", ":", "payload", "[", "'issued_at'", "]", "=", "int", "(", "time", ".", "mktime", "(", "self", ".", "user", ".", "oauth_token", ".", "issued_at", ".", "timetuple", "(", ")", ")", ")", "if", "self", ".", "user", ".", "id", ":", "payload", "[", "'user_id'", "]", "=", "self", ".", "user", ".", "id", "encoded_payload", "=", "base64", ".", "urlsafe_b64encode", "(", "json", ".", "dumps", "(", "payload", ",", "separators", "=", "(", "','", ",", "':'", ")", ")", ".", "encode", "(", "'utf-8'", ")", ")", "encoded_signature", "=", "base64", ".", "urlsafe_b64encode", "(", "hmac", ".", "new", "(", "self", ".", "application_secret_key", ".", "encode", "(", "'utf-8'", ")", ",", "encoded_payload", ",", "hashlib", ".", "sha256", ")", ".", "digest", "(", ")", ")", "return", "'%(signature)s.%(payload)s'", "%", "{", "'signature'", ":", "encoded_signature", ",", "'payload'", ":", "encoded_payload", "}" ]
Generate a signed request from this instance.
[ "Generate", "a", "signed", "request", "from", "this", "instance", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/signed_request.py#L117-L182
train
jgorset/facepy
facepy/graph_api.py
GraphAPI.for_application
def for_application(self, id, secret_key, api_version=None): """ Initialize GraphAPI with an OAuth access token for an application. :param id: An integer describing a Facebook application. :param secret_key: A String describing the Facebook application's secret key. """ from facepy.utils import get_application_access_token access_token = get_application_access_token(id, secret_key, api_version=api_version) return GraphAPI(access_token, version=api_version)
python
def for_application(self, id, secret_key, api_version=None): """ Initialize GraphAPI with an OAuth access token for an application. :param id: An integer describing a Facebook application. :param secret_key: A String describing the Facebook application's secret key. """ from facepy.utils import get_application_access_token access_token = get_application_access_token(id, secret_key, api_version=api_version) return GraphAPI(access_token, version=api_version)
[ "def", "for_application", "(", "self", ",", "id", ",", "secret_key", ",", "api_version", "=", "None", ")", ":", "from", "facepy", ".", "utils", "import", "get_application_access_token", "access_token", "=", "get_application_access_token", "(", "id", ",", "secret_key", ",", "api_version", "=", "api_version", ")", "return", "GraphAPI", "(", "access_token", ",", "version", "=", "api_version", ")" ]
Initialize GraphAPI with an OAuth access token for an application. :param id: An integer describing a Facebook application. :param secret_key: A String describing the Facebook application's secret key.
[ "Initialize", "GraphAPI", "with", "an", "OAuth", "access", "token", "for", "an", "application", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/graph_api.py#L43-L53
train
jgorset/facepy
facepy/graph_api.py
GraphAPI.get
def get(self, path='', page=False, retry=3, **options): """ Get an item from the Graph API. :param path: A string describing the path to the item. :param page: A boolean describing whether to return a generator that iterates over each page of results. :param retry: An integer describing how many times the request may be retried. :param options: Graph API parameters such as 'limit', 'offset' or 'since'. Floating-point numbers will be returned as :class:`decimal.Decimal` instances. See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_ for an exhaustive list of parameters. """ response = self._query( method='GET', path=path, data=options, page=page, retry=retry ) if response is False: raise FacebookError('Could not get "%s".' % path) return response
python
def get(self, path='', page=False, retry=3, **options): """ Get an item from the Graph API. :param path: A string describing the path to the item. :param page: A boolean describing whether to return a generator that iterates over each page of results. :param retry: An integer describing how many times the request may be retried. :param options: Graph API parameters such as 'limit', 'offset' or 'since'. Floating-point numbers will be returned as :class:`decimal.Decimal` instances. See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_ for an exhaustive list of parameters. """ response = self._query( method='GET', path=path, data=options, page=page, retry=retry ) if response is False: raise FacebookError('Could not get "%s".' % path) return response
[ "def", "get", "(", "self", ",", "path", "=", "''", ",", "page", "=", "False", ",", "retry", "=", "3", ",", "*", "*", "options", ")", ":", "response", "=", "self", ".", "_query", "(", "method", "=", "'GET'", ",", "path", "=", "path", ",", "data", "=", "options", ",", "page", "=", "page", ",", "retry", "=", "retry", ")", "if", "response", "is", "False", ":", "raise", "FacebookError", "(", "'Could not get \"%s\".'", "%", "path", ")", "return", "response" ]
Get an item from the Graph API. :param path: A string describing the path to the item. :param page: A boolean describing whether to return a generator that iterates over each page of results. :param retry: An integer describing how many times the request may be retried. :param options: Graph API parameters such as 'limit', 'offset' or 'since'. Floating-point numbers will be returned as :class:`decimal.Decimal` instances. See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_ for an exhaustive list of parameters.
[ "Get", "an", "item", "from", "the", "Graph", "API", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/graph_api.py#L55-L82
train
jgorset/facepy
facepy/graph_api.py
GraphAPI.post
def post(self, path='', retry=0, **data): """ Post an item to the Graph API. :param path: A string describing the path to the item. :param retry: An integer describing how many times the request may be retried. :param data: Graph API parameters such as 'message' or 'source'. See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_ for an exhaustive list of options. """ response = self._query( method='POST', path=path, data=data, retry=retry ) if response is False: raise FacebookError('Could not post to "%s"' % path) return response
python
def post(self, path='', retry=0, **data): """ Post an item to the Graph API. :param path: A string describing the path to the item. :param retry: An integer describing how many times the request may be retried. :param data: Graph API parameters such as 'message' or 'source'. See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_ for an exhaustive list of options. """ response = self._query( method='POST', path=path, data=data, retry=retry ) if response is False: raise FacebookError('Could not post to "%s"' % path) return response
[ "def", "post", "(", "self", ",", "path", "=", "''", ",", "retry", "=", "0", ",", "*", "*", "data", ")", ":", "response", "=", "self", ".", "_query", "(", "method", "=", "'POST'", ",", "path", "=", "path", ",", "data", "=", "data", ",", "retry", "=", "retry", ")", "if", "response", "is", "False", ":", "raise", "FacebookError", "(", "'Could not post to \"%s\"'", "%", "path", ")", "return", "response" ]
Post an item to the Graph API. :param path: A string describing the path to the item. :param retry: An integer describing how many times the request may be retried. :param data: Graph API parameters such as 'message' or 'source'. See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_ for an exhaustive list of options.
[ "Post", "an", "item", "to", "the", "Graph", "API", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/graph_api.py#L84-L105
train
jgorset/facepy
facepy/graph_api.py
GraphAPI.search
def search(self, term, type='place', page=False, retry=3, **options): """ Search for an item in the Graph API. :param term: A string describing the search term. :param type: A string describing the type of items to search for. :param page: A boolean describing whether to return a generator that iterates over each page of results. :param retry: An integer describing how many times the request may be retried. :param options: Graph API parameters, such as 'center' and 'distance'. Supported types are only ``place`` since Graph API 2.0. See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_ for an exhaustive list of options. """ if type != 'place': raise ValueError('Unsupported type "%s". The only supported type is "place" since Graph API 2.0.' % type) options = dict({ 'q': term, 'type': type, }, **options) response = self._query('GET', 'search', options, page, retry) return response
python
def search(self, term, type='place', page=False, retry=3, **options): """ Search for an item in the Graph API. :param term: A string describing the search term. :param type: A string describing the type of items to search for. :param page: A boolean describing whether to return a generator that iterates over each page of results. :param retry: An integer describing how many times the request may be retried. :param options: Graph API parameters, such as 'center' and 'distance'. Supported types are only ``place`` since Graph API 2.0. See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_ for an exhaustive list of options. """ if type != 'place': raise ValueError('Unsupported type "%s". The only supported type is "place" since Graph API 2.0.' % type) options = dict({ 'q': term, 'type': type, }, **options) response = self._query('GET', 'search', options, page, retry) return response
[ "def", "search", "(", "self", ",", "term", ",", "type", "=", "'place'", ",", "page", "=", "False", ",", "retry", "=", "3", ",", "*", "*", "options", ")", ":", "if", "type", "!=", "'place'", ":", "raise", "ValueError", "(", "'Unsupported type \"%s\". The only supported type is \"place\" since Graph API 2.0.'", "%", "type", ")", "options", "=", "dict", "(", "{", "'q'", ":", "term", ",", "'type'", ":", "type", ",", "}", ",", "*", "*", "options", ")", "response", "=", "self", ".", "_query", "(", "'GET'", ",", "'search'", ",", "options", ",", "page", ",", "retry", ")", "return", "response" ]
Search for an item in the Graph API. :param term: A string describing the search term. :param type: A string describing the type of items to search for. :param page: A boolean describing whether to return a generator that iterates over each page of results. :param retry: An integer describing how many times the request may be retried. :param options: Graph API parameters, such as 'center' and 'distance'. Supported types are only ``place`` since Graph API 2.0. See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_ for an exhaustive list of options.
[ "Search", "for", "an", "item", "in", "the", "Graph", "API", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/graph_api.py#L130-L157
train
jgorset/facepy
facepy/graph_api.py
GraphAPI.batch
def batch(self, requests): """ Make a batch request. :param requests: A list of dictionaries with keys 'method', 'relative_url' and optionally 'body'. Yields a list of responses and/or exceptions. """ for request in requests: if 'body' in request: request['body'] = urlencode(request['body']) def _grouper(complete_list, n=1): """ Batches a list into constant size chunks. :param complete_list: A input list (not a generator). :param n: The size of the chunk. Adapted from <http://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks-in-python> """ for i in range(0, len(complete_list), n): yield complete_list[i:i + n] responses = [] # Maximum batch size for Facebook is 50 so split up requests # https://developers.facebook.com/docs/graph-api/making-multiple-requests/#limits for group in _grouper(requests, 50): responses += self.post( batch=json.dumps(group) ) for response, request in zip(responses, requests): # Facilitate for empty Graph API responses. # # https://github.com/jgorset/facepy/pull/30 if not response: yield None continue try: yield self._parse(response['body']) except FacepyError as exception: exception.request = request yield exception
python
def batch(self, requests): """ Make a batch request. :param requests: A list of dictionaries with keys 'method', 'relative_url' and optionally 'body'. Yields a list of responses and/or exceptions. """ for request in requests: if 'body' in request: request['body'] = urlencode(request['body']) def _grouper(complete_list, n=1): """ Batches a list into constant size chunks. :param complete_list: A input list (not a generator). :param n: The size of the chunk. Adapted from <http://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks-in-python> """ for i in range(0, len(complete_list), n): yield complete_list[i:i + n] responses = [] # Maximum batch size for Facebook is 50 so split up requests # https://developers.facebook.com/docs/graph-api/making-multiple-requests/#limits for group in _grouper(requests, 50): responses += self.post( batch=json.dumps(group) ) for response, request in zip(responses, requests): # Facilitate for empty Graph API responses. # # https://github.com/jgorset/facepy/pull/30 if not response: yield None continue try: yield self._parse(response['body']) except FacepyError as exception: exception.request = request yield exception
[ "def", "batch", "(", "self", ",", "requests", ")", ":", "for", "request", "in", "requests", ":", "if", "'body'", "in", "request", ":", "request", "[", "'body'", "]", "=", "urlencode", "(", "request", "[", "'body'", "]", ")", "def", "_grouper", "(", "complete_list", ",", "n", "=", "1", ")", ":", "\"\"\"\n Batches a list into constant size chunks.\n\n :param complete_list: A input list (not a generator).\n :param n: The size of the chunk.\n\n Adapted from <http://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks-in-python>\n \"\"\"", "for", "i", "in", "range", "(", "0", ",", "len", "(", "complete_list", ")", ",", "n", ")", ":", "yield", "complete_list", "[", "i", ":", "i", "+", "n", "]", "responses", "=", "[", "]", "# Maximum batch size for Facebook is 50 so split up requests", "# https://developers.facebook.com/docs/graph-api/making-multiple-requests/#limits", "for", "group", "in", "_grouper", "(", "requests", ",", "50", ")", ":", "responses", "+=", "self", ".", "post", "(", "batch", "=", "json", ".", "dumps", "(", "group", ")", ")", "for", "response", ",", "request", "in", "zip", "(", "responses", ",", "requests", ")", ":", "# Facilitate for empty Graph API responses.", "#", "# https://github.com/jgorset/facepy/pull/30", "if", "not", "response", ":", "yield", "None", "continue", "try", ":", "yield", "self", ".", "_parse", "(", "response", "[", "'body'", "]", ")", "except", "FacepyError", "as", "exception", ":", "exception", ".", "request", "=", "request", "yield", "exception" ]
Make a batch request. :param requests: A list of dictionaries with keys 'method', 'relative_url' and optionally 'body'. Yields a list of responses and/or exceptions.
[ "Make", "a", "batch", "request", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/graph_api.py#L159-L207
train
jgorset/facepy
facepy/graph_api.py
GraphAPI._parse
def _parse(self, data): """ Parse the response from Facebook's Graph API. :param data: A string describing the Graph API's response. """ if type(data) == type(bytes()): try: data = data.decode('utf-8') except UnicodeDecodeError: return data try: data = json.loads(data, parse_float=Decimal) except ValueError: return data # Facebook's Graph API sometimes responds with 'true' or 'false'. Facebook offers no documentation # as to the prerequisites for this type of response, though it seems that it responds with 'true' # when objects are successfully deleted and 'false' upon attempting to delete or access an item that # one does not have access to. # # For example, the API would respond with 'false' upon attempting to query a feed item without having # the 'read_stream' extended permission. If you were to query the entire feed, however, it would respond # with an empty list instead. # # Genius. # # We'll handle this discrepancy as gracefully as we can by implementing logic to deal with this behavior # in the high-level access functions (get, post, delete etc.). if type(data) is dict: if 'error' in data: error = data['error'] if error.get('type') == "OAuthException": exception = OAuthError else: exception = FacebookError raise exception(**self._get_error_params(data)) # Facebook occasionally reports errors in its legacy error format. if 'error_msg' in data: raise FacebookError(**self._get_error_params(data)) return data
python
def _parse(self, data): """ Parse the response from Facebook's Graph API. :param data: A string describing the Graph API's response. """ if type(data) == type(bytes()): try: data = data.decode('utf-8') except UnicodeDecodeError: return data try: data = json.loads(data, parse_float=Decimal) except ValueError: return data # Facebook's Graph API sometimes responds with 'true' or 'false'. Facebook offers no documentation # as to the prerequisites for this type of response, though it seems that it responds with 'true' # when objects are successfully deleted and 'false' upon attempting to delete or access an item that # one does not have access to. # # For example, the API would respond with 'false' upon attempting to query a feed item without having # the 'read_stream' extended permission. If you were to query the entire feed, however, it would respond # with an empty list instead. # # Genius. # # We'll handle this discrepancy as gracefully as we can by implementing logic to deal with this behavior # in the high-level access functions (get, post, delete etc.). if type(data) is dict: if 'error' in data: error = data['error'] if error.get('type') == "OAuthException": exception = OAuthError else: exception = FacebookError raise exception(**self._get_error_params(data)) # Facebook occasionally reports errors in its legacy error format. if 'error_msg' in data: raise FacebookError(**self._get_error_params(data)) return data
[ "def", "_parse", "(", "self", ",", "data", ")", ":", "if", "type", "(", "data", ")", "==", "type", "(", "bytes", "(", ")", ")", ":", "try", ":", "data", "=", "data", ".", "decode", "(", "'utf-8'", ")", "except", "UnicodeDecodeError", ":", "return", "data", "try", ":", "data", "=", "json", ".", "loads", "(", "data", ",", "parse_float", "=", "Decimal", ")", "except", "ValueError", ":", "return", "data", "# Facebook's Graph API sometimes responds with 'true' or 'false'. Facebook offers no documentation", "# as to the prerequisites for this type of response, though it seems that it responds with 'true'", "# when objects are successfully deleted and 'false' upon attempting to delete or access an item that", "# one does not have access to.", "#", "# For example, the API would respond with 'false' upon attempting to query a feed item without having", "# the 'read_stream' extended permission. If you were to query the entire feed, however, it would respond", "# with an empty list instead.", "#", "# Genius.", "#", "# We'll handle this discrepancy as gracefully as we can by implementing logic to deal with this behavior", "# in the high-level access functions (get, post, delete etc.).", "if", "type", "(", "data", ")", "is", "dict", ":", "if", "'error'", "in", "data", ":", "error", "=", "data", "[", "'error'", "]", "if", "error", ".", "get", "(", "'type'", ")", "==", "\"OAuthException\"", ":", "exception", "=", "OAuthError", "else", ":", "exception", "=", "FacebookError", "raise", "exception", "(", "*", "*", "self", ".", "_get_error_params", "(", "data", ")", ")", "# Facebook occasionally reports errors in its legacy error format.", "if", "'error_msg'", "in", "data", ":", "raise", "FacebookError", "(", "*", "*", "self", ".", "_get_error_params", "(", "data", ")", ")", "return", "data" ]
Parse the response from Facebook's Graph API. :param data: A string describing the Graph API's response.
[ "Parse", "the", "response", "from", "Facebook", "s", "Graph", "API", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/graph_api.py#L372-L417
train
jgorset/facepy
facepy/utils.py
get_extended_access_token
def get_extended_access_token(access_token, application_id, application_secret_key, api_version=None): """ Get an extended OAuth access token. :param access_token: A string describing an OAuth access token. :param application_id: An integer describing the Facebook application's ID. :param application_secret_key: A string describing the Facebook application's secret key. Returns a tuple with a string describing the extended access token and a datetime instance describing when it expires. """ graph = GraphAPI(version=api_version) response = graph.get( path='oauth/access_token', client_id=application_id, client_secret=application_secret_key, grant_type='fb_exchange_token', fb_exchange_token=access_token ) try: components = parse_qs(response) except AttributeError: # api_version >= 2.3 returns a dict return response['access_token'], None token = components['access_token'][0] try: expires_at = datetime.now() + timedelta(seconds=int(components['expires'][0])) except KeyError: # there is no expiration expires_at = None return token, expires_at
python
def get_extended_access_token(access_token, application_id, application_secret_key, api_version=None): """ Get an extended OAuth access token. :param access_token: A string describing an OAuth access token. :param application_id: An integer describing the Facebook application's ID. :param application_secret_key: A string describing the Facebook application's secret key. Returns a tuple with a string describing the extended access token and a datetime instance describing when it expires. """ graph = GraphAPI(version=api_version) response = graph.get( path='oauth/access_token', client_id=application_id, client_secret=application_secret_key, grant_type='fb_exchange_token', fb_exchange_token=access_token ) try: components = parse_qs(response) except AttributeError: # api_version >= 2.3 returns a dict return response['access_token'], None token = components['access_token'][0] try: expires_at = datetime.now() + timedelta(seconds=int(components['expires'][0])) except KeyError: # there is no expiration expires_at = None return token, expires_at
[ "def", "get_extended_access_token", "(", "access_token", ",", "application_id", ",", "application_secret_key", ",", "api_version", "=", "None", ")", ":", "graph", "=", "GraphAPI", "(", "version", "=", "api_version", ")", "response", "=", "graph", ".", "get", "(", "path", "=", "'oauth/access_token'", ",", "client_id", "=", "application_id", ",", "client_secret", "=", "application_secret_key", ",", "grant_type", "=", "'fb_exchange_token'", ",", "fb_exchange_token", "=", "access_token", ")", "try", ":", "components", "=", "parse_qs", "(", "response", ")", "except", "AttributeError", ":", "# api_version >= 2.3 returns a dict", "return", "response", "[", "'access_token'", "]", ",", "None", "token", "=", "components", "[", "'access_token'", "]", "[", "0", "]", "try", ":", "expires_at", "=", "datetime", ".", "now", "(", ")", "+", "timedelta", "(", "seconds", "=", "int", "(", "components", "[", "'expires'", "]", "[", "0", "]", ")", ")", "except", "KeyError", ":", "# there is no expiration", "expires_at", "=", "None", "return", "token", ",", "expires_at" ]
Get an extended OAuth access token. :param access_token: A string describing an OAuth access token. :param application_id: An integer describing the Facebook application's ID. :param application_secret_key: A string describing the Facebook application's secret key. Returns a tuple with a string describing the extended access token and a datetime instance describing when it expires.
[ "Get", "an", "extended", "OAuth", "access", "token", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/utils.py#L11-L44
train
jgorset/facepy
facepy/utils.py
get_application_access_token
def get_application_access_token(application_id, application_secret_key, api_version=None): """ Get an OAuth access token for the given application. :param application_id: An integer describing a Facebook application's ID. :param application_secret_key: A string describing a Facebook application's secret key. """ graph = GraphAPI(version=api_version) response = graph.get( path='oauth/access_token', client_id=application_id, client_secret=application_secret_key, grant_type='client_credentials' ) try: data = parse_qs(response) try: return data['access_token'][0] except KeyError: raise GraphAPI.FacebookError('No access token given') except AttributeError: # api_version >= 2.3 returns a dict return response['access_token'], None
python
def get_application_access_token(application_id, application_secret_key, api_version=None): """ Get an OAuth access token for the given application. :param application_id: An integer describing a Facebook application's ID. :param application_secret_key: A string describing a Facebook application's secret key. """ graph = GraphAPI(version=api_version) response = graph.get( path='oauth/access_token', client_id=application_id, client_secret=application_secret_key, grant_type='client_credentials' ) try: data = parse_qs(response) try: return data['access_token'][0] except KeyError: raise GraphAPI.FacebookError('No access token given') except AttributeError: # api_version >= 2.3 returns a dict return response['access_token'], None
[ "def", "get_application_access_token", "(", "application_id", ",", "application_secret_key", ",", "api_version", "=", "None", ")", ":", "graph", "=", "GraphAPI", "(", "version", "=", "api_version", ")", "response", "=", "graph", ".", "get", "(", "path", "=", "'oauth/access_token'", ",", "client_id", "=", "application_id", ",", "client_secret", "=", "application_secret_key", ",", "grant_type", "=", "'client_credentials'", ")", "try", ":", "data", "=", "parse_qs", "(", "response", ")", "try", ":", "return", "data", "[", "'access_token'", "]", "[", "0", "]", "except", "KeyError", ":", "raise", "GraphAPI", ".", "FacebookError", "(", "'No access token given'", ")", "except", "AttributeError", ":", "# api_version >= 2.3 returns a dict", "return", "response", "[", "'access_token'", "]", ",", "None" ]
Get an OAuth access token for the given application. :param application_id: An integer describing a Facebook application's ID. :param application_secret_key: A string describing a Facebook application's secret key.
[ "Get", "an", "OAuth", "access", "token", "for", "the", "given", "application", "." ]
1be3ee21389fb2db543927a2f4ffa949faec4242
https://github.com/jgorset/facepy/blob/1be3ee21389fb2db543927a2f4ffa949faec4242/facepy/utils.py#L47-L71
train
ionelmc/python-redis-lock
src/redis_lock/django_cache.py
RedisCache.locked_get_or_set
def locked_get_or_set(self, key, value_creator, version=None, expire=None, id=None, lock_key=None, timeout=DEFAULT_TIMEOUT): """ Fetch a given key from the cache. If the key does not exist, the key is added and set to the value returned when calling `value_creator`. The creator function is invoked inside of a lock. """ if lock_key is None: lock_key = 'get_or_set:' + key val = self.get(key, version=version) if val is not None: return val with self.lock(lock_key, expire=expire, id=id): # Was the value set while we were trying to acquire the lock? val = self.get(key, version=version) if val is not None: return val # Nope, create value now. val = value_creator() if val is None: raise ValueError('`value_creator` must return a value') self.set(key, val, timeout=timeout, version=version) return val
python
def locked_get_or_set(self, key, value_creator, version=None, expire=None, id=None, lock_key=None, timeout=DEFAULT_TIMEOUT): """ Fetch a given key from the cache. If the key does not exist, the key is added and set to the value returned when calling `value_creator`. The creator function is invoked inside of a lock. """ if lock_key is None: lock_key = 'get_or_set:' + key val = self.get(key, version=version) if val is not None: return val with self.lock(lock_key, expire=expire, id=id): # Was the value set while we were trying to acquire the lock? val = self.get(key, version=version) if val is not None: return val # Nope, create value now. val = value_creator() if val is None: raise ValueError('`value_creator` must return a value') self.set(key, val, timeout=timeout, version=version) return val
[ "def", "locked_get_or_set", "(", "self", ",", "key", ",", "value_creator", ",", "version", "=", "None", ",", "expire", "=", "None", ",", "id", "=", "None", ",", "lock_key", "=", "None", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", ":", "if", "lock_key", "is", "None", ":", "lock_key", "=", "'get_or_set:'", "+", "key", "val", "=", "self", ".", "get", "(", "key", ",", "version", "=", "version", ")", "if", "val", "is", "not", "None", ":", "return", "val", "with", "self", ".", "lock", "(", "lock_key", ",", "expire", "=", "expire", ",", "id", "=", "id", ")", ":", "# Was the value set while we were trying to acquire the lock?", "val", "=", "self", ".", "get", "(", "key", ",", "version", "=", "version", ")", "if", "val", "is", "not", "None", ":", "return", "val", "# Nope, create value now.", "val", "=", "value_creator", "(", ")", "if", "val", "is", "None", ":", "raise", "ValueError", "(", "'`value_creator` must return a value'", ")", "self", ".", "set", "(", "key", ",", "val", ",", "timeout", "=", "timeout", ",", "version", "=", "version", ")", "return", "val" ]
Fetch a given key from the cache. If the key does not exist, the key is added and set to the value returned when calling `value_creator`. The creator function is invoked inside of a lock.
[ "Fetch", "a", "given", "key", "from", "the", "cache", ".", "If", "the", "key", "does", "not", "exist", "the", "key", "is", "added", "and", "set", "to", "the", "value", "returned", "when", "calling", "value_creator", ".", "The", "creator", "function", "is", "invoked", "inside", "of", "a", "lock", "." ]
5481cd88b64d86d318e389c79b0575a73464b1f5
https://github.com/ionelmc/python-redis-lock/blob/5481cd88b64d86d318e389c79b0575a73464b1f5/src/redis_lock/django_cache.py#L23-L51
train
ionelmc/python-redis-lock
src/redis_lock/__init__.py
_eval_script
def _eval_script(redis, script_id, *keys, **kwargs): """Tries to call ``EVALSHA`` with the `hash` and then, if it fails, calls regular ``EVAL`` with the `script`. """ args = kwargs.pop('args', ()) if kwargs: raise TypeError("Unexpected keyword arguments %s" % kwargs.keys()) try: return redis.evalsha(SCRIPTS[script_id], len(keys), *keys + args) except NoScriptError: logger.info("%s not cached.", SCRIPTS[script_id + 2]) return redis.eval(SCRIPTS[script_id + 1], len(keys), *keys + args)
python
def _eval_script(redis, script_id, *keys, **kwargs): """Tries to call ``EVALSHA`` with the `hash` and then, if it fails, calls regular ``EVAL`` with the `script`. """ args = kwargs.pop('args', ()) if kwargs: raise TypeError("Unexpected keyword arguments %s" % kwargs.keys()) try: return redis.evalsha(SCRIPTS[script_id], len(keys), *keys + args) except NoScriptError: logger.info("%s not cached.", SCRIPTS[script_id + 2]) return redis.eval(SCRIPTS[script_id + 1], len(keys), *keys + args)
[ "def", "_eval_script", "(", "redis", ",", "script_id", ",", "*", "keys", ",", "*", "*", "kwargs", ")", ":", "args", "=", "kwargs", ".", "pop", "(", "'args'", ",", "(", ")", ")", "if", "kwargs", ":", "raise", "TypeError", "(", "\"Unexpected keyword arguments %s\"", "%", "kwargs", ".", "keys", "(", ")", ")", "try", ":", "return", "redis", ".", "evalsha", "(", "SCRIPTS", "[", "script_id", "]", ",", "len", "(", "keys", ")", ",", "*", "keys", "+", "args", ")", "except", "NoScriptError", ":", "logger", ".", "info", "(", "\"%s not cached.\"", ",", "SCRIPTS", "[", "script_id", "+", "2", "]", ")", "return", "redis", ".", "eval", "(", "SCRIPTS", "[", "script_id", "+", "1", "]", ",", "len", "(", "keys", ")", ",", "*", "keys", "+", "args", ")" ]
Tries to call ``EVALSHA`` with the `hash` and then, if it fails, calls regular ``EVAL`` with the `script`.
[ "Tries", "to", "call", "EVALSHA", "with", "the", "hash", "and", "then", "if", "it", "fails", "calls", "regular", "EVAL", "with", "the", "script", "." ]
5481cd88b64d86d318e389c79b0575a73464b1f5
https://github.com/ionelmc/python-redis-lock/blob/5481cd88b64d86d318e389c79b0575a73464b1f5/src/redis_lock/__init__.py#L129-L140
train
ionelmc/python-redis-lock
src/redis_lock/__init__.py
Lock.reset
def reset(self): """ Forcibly deletes the lock. Use this with care. """ _eval_script(self._client, RESET, self._name, self._signal) self._delete_signal()
python
def reset(self): """ Forcibly deletes the lock. Use this with care. """ _eval_script(self._client, RESET, self._name, self._signal) self._delete_signal()
[ "def", "reset", "(", "self", ")", ":", "_eval_script", "(", "self", ".", "_client", ",", "RESET", ",", "self", ".", "_name", ",", "self", ".", "_signal", ")", "self", ".", "_delete_signal", "(", ")" ]
Forcibly deletes the lock. Use this with care.
[ "Forcibly", "deletes", "the", "lock", ".", "Use", "this", "with", "care", "." ]
5481cd88b64d86d318e389c79b0575a73464b1f5
https://github.com/ionelmc/python-redis-lock/blob/5481cd88b64d86d318e389c79b0575a73464b1f5/src/redis_lock/__init__.py#L205-L210
train
ionelmc/python-redis-lock
src/redis_lock/__init__.py
Lock.extend
def extend(self, expire=None): """Extends expiration time of the lock. :param expire: New expiration time. If ``None`` - `expire` provided during lock initialization will be taken. """ if expire is None: if self._expire is not None: expire = self._expire else: raise TypeError( "To extend a lock 'expire' must be provided as an " "argument to extend() method or at initialization time." ) error = _eval_script(self._client, EXTEND, self._name, args=(expire, self._id)) if error == 1: raise NotAcquired("Lock %s is not acquired or it already expired." % self._name) elif error == 2: raise NotExpirable("Lock %s has no assigned expiration time" % self._name) elif error: raise RuntimeError("Unsupported error code %s from EXTEND script" % error)
python
def extend(self, expire=None): """Extends expiration time of the lock. :param expire: New expiration time. If ``None`` - `expire` provided during lock initialization will be taken. """ if expire is None: if self._expire is not None: expire = self._expire else: raise TypeError( "To extend a lock 'expire' must be provided as an " "argument to extend() method or at initialization time." ) error = _eval_script(self._client, EXTEND, self._name, args=(expire, self._id)) if error == 1: raise NotAcquired("Lock %s is not acquired or it already expired." % self._name) elif error == 2: raise NotExpirable("Lock %s has no assigned expiration time" % self._name) elif error: raise RuntimeError("Unsupported error code %s from EXTEND script" % error)
[ "def", "extend", "(", "self", ",", "expire", "=", "None", ")", ":", "if", "expire", "is", "None", ":", "if", "self", ".", "_expire", "is", "not", "None", ":", "expire", "=", "self", ".", "_expire", "else", ":", "raise", "TypeError", "(", "\"To extend a lock 'expire' must be provided as an \"", "\"argument to extend() method or at initialization time.\"", ")", "error", "=", "_eval_script", "(", "self", ".", "_client", ",", "EXTEND", ",", "self", ".", "_name", ",", "args", "=", "(", "expire", ",", "self", ".", "_id", ")", ")", "if", "error", "==", "1", ":", "raise", "NotAcquired", "(", "\"Lock %s is not acquired or it already expired.\"", "%", "self", ".", "_name", ")", "elif", "error", "==", "2", ":", "raise", "NotExpirable", "(", "\"Lock %s has no assigned expiration time\"", "%", "self", ".", "_name", ")", "elif", "error", ":", "raise", "RuntimeError", "(", "\"Unsupported error code %s from EXTEND script\"", "%", "error", ")" ]
Extends expiration time of the lock. :param expire: New expiration time. If ``None`` - `expire` provided during lock initialization will be taken.
[ "Extends", "expiration", "time", "of", "the", "lock", "." ]
5481cd88b64d86d318e389c79b0575a73464b1f5
https://github.com/ionelmc/python-redis-lock/blob/5481cd88b64d86d318e389c79b0575a73464b1f5/src/redis_lock/__init__.py#L263-L285
train
ionelmc/python-redis-lock
src/redis_lock/__init__.py
Lock._lock_renewer
def _lock_renewer(lockref, interval, stop): """ Renew the lock key in redis every `interval` seconds for as long as `self._lock_renewal_thread.should_exit` is False. """ log = getLogger("%s.lock_refresher" % __name__) while not stop.wait(timeout=interval): log.debug("Refreshing lock") lock = lockref() if lock is None: log.debug("The lock no longer exists, " "stopping lock refreshing") break lock.extend(expire=lock._expire) del lock log.debug("Exit requested, stopping lock refreshing")
python
def _lock_renewer(lockref, interval, stop): """ Renew the lock key in redis every `interval` seconds for as long as `self._lock_renewal_thread.should_exit` is False. """ log = getLogger("%s.lock_refresher" % __name__) while not stop.wait(timeout=interval): log.debug("Refreshing lock") lock = lockref() if lock is None: log.debug("The lock no longer exists, " "stopping lock refreshing") break lock.extend(expire=lock._expire) del lock log.debug("Exit requested, stopping lock refreshing")
[ "def", "_lock_renewer", "(", "lockref", ",", "interval", ",", "stop", ")", ":", "log", "=", "getLogger", "(", "\"%s.lock_refresher\"", "%", "__name__", ")", "while", "not", "stop", ".", "wait", "(", "timeout", "=", "interval", ")", ":", "log", ".", "debug", "(", "\"Refreshing lock\"", ")", "lock", "=", "lockref", "(", ")", "if", "lock", "is", "None", ":", "log", ".", "debug", "(", "\"The lock no longer exists, \"", "\"stopping lock refreshing\"", ")", "break", "lock", ".", "extend", "(", "expire", "=", "lock", ".", "_expire", ")", "del", "lock", "log", ".", "debug", "(", "\"Exit requested, stopping lock refreshing\"", ")" ]
Renew the lock key in redis every `interval` seconds for as long as `self._lock_renewal_thread.should_exit` is False.
[ "Renew", "the", "lock", "key", "in", "redis", "every", "interval", "seconds", "for", "as", "long", "as", "self", ".", "_lock_renewal_thread", ".", "should_exit", "is", "False", "." ]
5481cd88b64d86d318e389c79b0575a73464b1f5
https://github.com/ionelmc/python-redis-lock/blob/5481cd88b64d86d318e389c79b0575a73464b1f5/src/redis_lock/__init__.py#L288-L303
train
ionelmc/python-redis-lock
src/redis_lock/__init__.py
Lock._start_lock_renewer
def _start_lock_renewer(self): """ Starts the lock refresher thread. """ if self._lock_renewal_thread is not None: raise AlreadyStarted("Lock refresh thread already started") logger.debug( "Starting thread to refresh lock every %s seconds", self._lock_renewal_interval ) self._lock_renewal_stop = threading.Event() self._lock_renewal_thread = threading.Thread( group=None, target=self._lock_renewer, kwargs={'lockref': weakref.ref(self), 'interval': self._lock_renewal_interval, 'stop': self._lock_renewal_stop} ) self._lock_renewal_thread.setDaemon(True) self._lock_renewal_thread.start()
python
def _start_lock_renewer(self): """ Starts the lock refresher thread. """ if self._lock_renewal_thread is not None: raise AlreadyStarted("Lock refresh thread already started") logger.debug( "Starting thread to refresh lock every %s seconds", self._lock_renewal_interval ) self._lock_renewal_stop = threading.Event() self._lock_renewal_thread = threading.Thread( group=None, target=self._lock_renewer, kwargs={'lockref': weakref.ref(self), 'interval': self._lock_renewal_interval, 'stop': self._lock_renewal_stop} ) self._lock_renewal_thread.setDaemon(True) self._lock_renewal_thread.start()
[ "def", "_start_lock_renewer", "(", "self", ")", ":", "if", "self", ".", "_lock_renewal_thread", "is", "not", "None", ":", "raise", "AlreadyStarted", "(", "\"Lock refresh thread already started\"", ")", "logger", ".", "debug", "(", "\"Starting thread to refresh lock every %s seconds\"", ",", "self", ".", "_lock_renewal_interval", ")", "self", ".", "_lock_renewal_stop", "=", "threading", ".", "Event", "(", ")", "self", ".", "_lock_renewal_thread", "=", "threading", ".", "Thread", "(", "group", "=", "None", ",", "target", "=", "self", ".", "_lock_renewer", ",", "kwargs", "=", "{", "'lockref'", ":", "weakref", ".", "ref", "(", "self", ")", ",", "'interval'", ":", "self", ".", "_lock_renewal_interval", ",", "'stop'", ":", "self", ".", "_lock_renewal_stop", "}", ")", "self", ".", "_lock_renewal_thread", ".", "setDaemon", "(", "True", ")", "self", ".", "_lock_renewal_thread", ".", "start", "(", ")" ]
Starts the lock refresher thread.
[ "Starts", "the", "lock", "refresher", "thread", "." ]
5481cd88b64d86d318e389c79b0575a73464b1f5
https://github.com/ionelmc/python-redis-lock/blob/5481cd88b64d86d318e389c79b0575a73464b1f5/src/redis_lock/__init__.py#L305-L325
train
ionelmc/python-redis-lock
src/redis_lock/__init__.py
Lock._stop_lock_renewer
def _stop_lock_renewer(self): """ Stop the lock renewer. This signals the renewal thread and waits for its exit. """ if self._lock_renewal_thread is None or not self._lock_renewal_thread.is_alive(): return logger.debug("Signalling the lock refresher to stop") self._lock_renewal_stop.set() self._lock_renewal_thread.join() self._lock_renewal_thread = None logger.debug("Lock refresher has stopped")
python
def _stop_lock_renewer(self): """ Stop the lock renewer. This signals the renewal thread and waits for its exit. """ if self._lock_renewal_thread is None or not self._lock_renewal_thread.is_alive(): return logger.debug("Signalling the lock refresher to stop") self._lock_renewal_stop.set() self._lock_renewal_thread.join() self._lock_renewal_thread = None logger.debug("Lock refresher has stopped")
[ "def", "_stop_lock_renewer", "(", "self", ")", ":", "if", "self", ".", "_lock_renewal_thread", "is", "None", "or", "not", "self", ".", "_lock_renewal_thread", ".", "is_alive", "(", ")", ":", "return", "logger", ".", "debug", "(", "\"Signalling the lock refresher to stop\"", ")", "self", ".", "_lock_renewal_stop", ".", "set", "(", ")", "self", ".", "_lock_renewal_thread", ".", "join", "(", ")", "self", ".", "_lock_renewal_thread", "=", "None", "logger", ".", "debug", "(", "\"Lock refresher has stopped\"", ")" ]
Stop the lock renewer. This signals the renewal thread and waits for its exit.
[ "Stop", "the", "lock", "renewer", "." ]
5481cd88b64d86d318e389c79b0575a73464b1f5
https://github.com/ionelmc/python-redis-lock/blob/5481cd88b64d86d318e389c79b0575a73464b1f5/src/redis_lock/__init__.py#L327-L339
train
ionelmc/python-redis-lock
src/redis_lock/__init__.py
Lock.release
def release(self): """Releases the lock, that was acquired with the same object. .. note:: If you want to release a lock that you acquired in a different place you have two choices: * Use ``Lock("name", id=id_from_other_place).release()`` * Use ``Lock("name").reset()`` """ if self._lock_renewal_thread is not None: self._stop_lock_renewer() logger.debug("Releasing %r.", self._name) error = _eval_script(self._client, UNLOCK, self._name, self._signal, args=(self._id,)) if error == 1: raise NotAcquired("Lock %s is not acquired or it already expired." % self._name) elif error: raise RuntimeError("Unsupported error code %s from EXTEND script." % error) else: self._delete_signal()
python
def release(self): """Releases the lock, that was acquired with the same object. .. note:: If you want to release a lock that you acquired in a different place you have two choices: * Use ``Lock("name", id=id_from_other_place).release()`` * Use ``Lock("name").reset()`` """ if self._lock_renewal_thread is not None: self._stop_lock_renewer() logger.debug("Releasing %r.", self._name) error = _eval_script(self._client, UNLOCK, self._name, self._signal, args=(self._id,)) if error == 1: raise NotAcquired("Lock %s is not acquired or it already expired." % self._name) elif error: raise RuntimeError("Unsupported error code %s from EXTEND script." % error) else: self._delete_signal()
[ "def", "release", "(", "self", ")", ":", "if", "self", ".", "_lock_renewal_thread", "is", "not", "None", ":", "self", ".", "_stop_lock_renewer", "(", ")", "logger", ".", "debug", "(", "\"Releasing %r.\"", ",", "self", ".", "_name", ")", "error", "=", "_eval_script", "(", "self", ".", "_client", ",", "UNLOCK", ",", "self", ".", "_name", ",", "self", ".", "_signal", ",", "args", "=", "(", "self", ".", "_id", ",", ")", ")", "if", "error", "==", "1", ":", "raise", "NotAcquired", "(", "\"Lock %s is not acquired or it already expired.\"", "%", "self", ".", "_name", ")", "elif", "error", ":", "raise", "RuntimeError", "(", "\"Unsupported error code %s from EXTEND script.\"", "%", "error", ")", "else", ":", "self", ".", "_delete_signal", "(", ")" ]
Releases the lock, that was acquired with the same object. .. note:: If you want to release a lock that you acquired in a different place you have two choices: * Use ``Lock("name", id=id_from_other_place).release()`` * Use ``Lock("name").reset()``
[ "Releases", "the", "lock", "that", "was", "acquired", "with", "the", "same", "object", "." ]
5481cd88b64d86d318e389c79b0575a73464b1f5
https://github.com/ionelmc/python-redis-lock/blob/5481cd88b64d86d318e389c79b0575a73464b1f5/src/redis_lock/__init__.py#L349-L368
train
mitodl/edx-api-client
edx_api/course_detail/__init__.py
CourseDetails.get_detail
def get_detail(self, course_id): """ Fetches course details. Args: course_id (str): An edx course id. Returns: CourseDetail """ # the request is done in behalf of the current logged in user resp = self._requester.get( urljoin( self._base_url, '/api/courses/v1/courses/{course_key}/'.format(course_key=course_id) ) ) resp.raise_for_status() return CourseDetail(resp.json())
python
def get_detail(self, course_id): """ Fetches course details. Args: course_id (str): An edx course id. Returns: CourseDetail """ # the request is done in behalf of the current logged in user resp = self._requester.get( urljoin( self._base_url, '/api/courses/v1/courses/{course_key}/'.format(course_key=course_id) ) ) resp.raise_for_status() return CourseDetail(resp.json())
[ "def", "get_detail", "(", "self", ",", "course_id", ")", ":", "# the request is done in behalf of the current logged in user", "resp", "=", "self", ".", "_requester", ".", "get", "(", "urljoin", "(", "self", ".", "_base_url", ",", "'/api/courses/v1/courses/{course_key}/'", ".", "format", "(", "course_key", "=", "course_id", ")", ")", ")", "resp", ".", "raise_for_status", "(", ")", "return", "CourseDetail", "(", "resp", ".", "json", "(", ")", ")" ]
Fetches course details. Args: course_id (str): An edx course id. Returns: CourseDetail
[ "Fetches", "course", "details", "." ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/course_detail/__init__.py#L16-L36
train
mitodl/edx-api-client
edx_api/user_info/__init__.py
UserInfo.get_user_info
def get_user_info(self): """ Returns a UserInfo object for the logged in user. Returns: UserInfo: object representing the student current grades """ # the request is done in behalf of the current logged in user resp = self.requester.get( urljoin( self.base_url, '/api/mobile/v0.5/my_user_info' ) ) resp.raise_for_status() return Info(resp.json())
python
def get_user_info(self): """ Returns a UserInfo object for the logged in user. Returns: UserInfo: object representing the student current grades """ # the request is done in behalf of the current logged in user resp = self.requester.get( urljoin( self.base_url, '/api/mobile/v0.5/my_user_info' ) ) resp.raise_for_status() return Info(resp.json())
[ "def", "get_user_info", "(", "self", ")", ":", "# the request is done in behalf of the current logged in user", "resp", "=", "self", ".", "requester", ".", "get", "(", "urljoin", "(", "self", ".", "base_url", ",", "'/api/mobile/v0.5/my_user_info'", ")", ")", "resp", ".", "raise_for_status", "(", ")", "return", "Info", "(", "resp", ".", "json", "(", ")", ")" ]
Returns a UserInfo object for the logged in user. Returns: UserInfo: object representing the student current grades
[ "Returns", "a", "UserInfo", "object", "for", "the", "logged", "in", "user", "." ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/user_info/__init__.py#L21-L38
train
mitodl/edx-api-client
edx_api/course_structure/__init__.py
CourseStructure.course_blocks
def course_blocks(self, course_id, username): """ Fetches course blocks. Args: course_id (str): An edx course id. username (str): username of the user to query for (can reveal hidden modules) Returns: Structure """ resp = self.requester.get( urljoin(self.base_url, '/api/courses/v1/blocks/'), params={ "depth": "all", "username": username, "course_id": course_id, "requested_fields": "children,display_name,id,type,visible_to_staff_only", }) resp.raise_for_status() return Structure(resp.json())
python
def course_blocks(self, course_id, username): """ Fetches course blocks. Args: course_id (str): An edx course id. username (str): username of the user to query for (can reveal hidden modules) Returns: Structure """ resp = self.requester.get( urljoin(self.base_url, '/api/courses/v1/blocks/'), params={ "depth": "all", "username": username, "course_id": course_id, "requested_fields": "children,display_name,id,type,visible_to_staff_only", }) resp.raise_for_status() return Structure(resp.json())
[ "def", "course_blocks", "(", "self", ",", "course_id", ",", "username", ")", ":", "resp", "=", "self", ".", "requester", ".", "get", "(", "urljoin", "(", "self", ".", "base_url", ",", "'/api/courses/v1/blocks/'", ")", ",", "params", "=", "{", "\"depth\"", ":", "\"all\"", ",", "\"username\"", ":", "username", ",", "\"course_id\"", ":", "course_id", ",", "\"requested_fields\"", ":", "\"children,display_name,id,type,visible_to_staff_only\"", ",", "}", ")", "resp", ".", "raise_for_status", "(", ")", "return", "Structure", "(", "resp", ".", "json", "(", ")", ")" ]
Fetches course blocks. Args: course_id (str): An edx course id. username (str): username of the user to query for (can reveal hidden modules) Returns: Structure
[ "Fetches", "course", "blocks", "." ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/course_structure/__init__.py#L15-L38
train
mitodl/edx-api-client
edx_api/grades/__init__.py
UserCurrentGrades.get_student_current_grade
def get_student_current_grade(self, username, course_id): """ Returns an CurrentGrade object for the user in a course Args: username (str): an edx user's username course_id (str): an edX course id. Returns: CurrentGrade: object representing the student current grade for a course """ # the request is done in behalf of the current logged in user resp = self.requester.get( urljoin( self.base_url, '/api/grades/v1/courses/{course_key}/?username={username}'.format( username=username, course_key=course_id ) ) ) resp.raise_for_status() return CurrentGrade(resp.json()[0])
python
def get_student_current_grade(self, username, course_id): """ Returns an CurrentGrade object for the user in a course Args: username (str): an edx user's username course_id (str): an edX course id. Returns: CurrentGrade: object representing the student current grade for a course """ # the request is done in behalf of the current logged in user resp = self.requester.get( urljoin( self.base_url, '/api/grades/v1/courses/{course_key}/?username={username}'.format( username=username, course_key=course_id ) ) ) resp.raise_for_status() return CurrentGrade(resp.json()[0])
[ "def", "get_student_current_grade", "(", "self", ",", "username", ",", "course_id", ")", ":", "# the request is done in behalf of the current logged in user", "resp", "=", "self", ".", "requester", ".", "get", "(", "urljoin", "(", "self", ".", "base_url", ",", "'/api/grades/v1/courses/{course_key}/?username={username}'", ".", "format", "(", "username", "=", "username", ",", "course_key", "=", "course_id", ")", ")", ")", "resp", ".", "raise_for_status", "(", ")", "return", "CurrentGrade", "(", "resp", ".", "json", "(", ")", "[", "0", "]", ")" ]
Returns an CurrentGrade object for the user in a course Args: username (str): an edx user's username course_id (str): an edX course id. Returns: CurrentGrade: object representing the student current grade for a course
[ "Returns", "an", "CurrentGrade", "object", "for", "the", "user", "in", "a", "course" ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/grades/__init__.py#L25-L49
train
mitodl/edx-api-client
edx_api/grades/__init__.py
UserCurrentGrades.get_student_current_grades
def get_student_current_grades(self, username, course_ids=None): """ Returns a CurrentGradesByUser object with the user current grades. Args: username (str): an edx user's username course_ids (list): a list of edX course ids. Returns: CurrentGradesByUser: object representing the student current grades """ # if no course ids are provided, let's get the user enrollments if course_ids is None: enrollments_client = CourseEnrollments(self.requester, self.base_url) enrollments = enrollments_client.get_student_enrollments() course_ids = list(enrollments.get_enrolled_course_ids()) all_current_grades = [] for course_id in course_ids: try: all_current_grades.append(self.get_student_current_grade(username, course_id)) except HTTPError as error: if error.response.status_code >= 500: raise return CurrentGradesByUser(all_current_grades)
python
def get_student_current_grades(self, username, course_ids=None): """ Returns a CurrentGradesByUser object with the user current grades. Args: username (str): an edx user's username course_ids (list): a list of edX course ids. Returns: CurrentGradesByUser: object representing the student current grades """ # if no course ids are provided, let's get the user enrollments if course_ids is None: enrollments_client = CourseEnrollments(self.requester, self.base_url) enrollments = enrollments_client.get_student_enrollments() course_ids = list(enrollments.get_enrolled_course_ids()) all_current_grades = [] for course_id in course_ids: try: all_current_grades.append(self.get_student_current_grade(username, course_id)) except HTTPError as error: if error.response.status_code >= 500: raise return CurrentGradesByUser(all_current_grades)
[ "def", "get_student_current_grades", "(", "self", ",", "username", ",", "course_ids", "=", "None", ")", ":", "# if no course ids are provided, let's get the user enrollments", "if", "course_ids", "is", "None", ":", "enrollments_client", "=", "CourseEnrollments", "(", "self", ".", "requester", ",", "self", ".", "base_url", ")", "enrollments", "=", "enrollments_client", ".", "get_student_enrollments", "(", ")", "course_ids", "=", "list", "(", "enrollments", ".", "get_enrolled_course_ids", "(", ")", ")", "all_current_grades", "=", "[", "]", "for", "course_id", "in", "course_ids", ":", "try", ":", "all_current_grades", ".", "append", "(", "self", ".", "get_student_current_grade", "(", "username", ",", "course_id", ")", ")", "except", "HTTPError", "as", "error", ":", "if", "error", ".", "response", ".", "status_code", ">=", "500", ":", "raise", "return", "CurrentGradesByUser", "(", "all_current_grades", ")" ]
Returns a CurrentGradesByUser object with the user current grades. Args: username (str): an edx user's username course_ids (list): a list of edX course ids. Returns: CurrentGradesByUser: object representing the student current grades
[ "Returns", "a", "CurrentGradesByUser", "object", "with", "the", "user", "current", "grades", "." ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/grades/__init__.py#L51-L76
train
mitodl/edx-api-client
edx_api/grades/__init__.py
UserCurrentGrades.get_course_current_grades
def get_course_current_grades(self, course_id): """ Returns a CurrentGradesByCourse object for all users in the specified course. Args: course_id (str): an edX course ids. Returns: CurrentGradesByCourse: object representing the student current grades Authorization: The authenticated user must have staff permissions to see grades for all users in a course. """ resp = self.requester.get( urljoin( self.base_url, '/api/grades/v1/courses/{course_key}/'.format(course_key=course_id) ) ) resp.raise_for_status() resp_json = resp.json() if 'results' in resp_json: grade_entries = [CurrentGrade(entry) for entry in resp_json["results"]] while resp_json['next'] is not None: resp = self.requester.get(resp_json['next']) resp.raise_for_status() resp_json = resp.json() grade_entries.extend((CurrentGrade(entry) for entry in resp_json["results"])) else: grade_entries = [CurrentGrade(entry) for entry in resp_json] return CurrentGradesByCourse(grade_entries)
python
def get_course_current_grades(self, course_id): """ Returns a CurrentGradesByCourse object for all users in the specified course. Args: course_id (str): an edX course ids. Returns: CurrentGradesByCourse: object representing the student current grades Authorization: The authenticated user must have staff permissions to see grades for all users in a course. """ resp = self.requester.get( urljoin( self.base_url, '/api/grades/v1/courses/{course_key}/'.format(course_key=course_id) ) ) resp.raise_for_status() resp_json = resp.json() if 'results' in resp_json: grade_entries = [CurrentGrade(entry) for entry in resp_json["results"]] while resp_json['next'] is not None: resp = self.requester.get(resp_json['next']) resp.raise_for_status() resp_json = resp.json() grade_entries.extend((CurrentGrade(entry) for entry in resp_json["results"])) else: grade_entries = [CurrentGrade(entry) for entry in resp_json] return CurrentGradesByCourse(grade_entries)
[ "def", "get_course_current_grades", "(", "self", ",", "course_id", ")", ":", "resp", "=", "self", ".", "requester", ".", "get", "(", "urljoin", "(", "self", ".", "base_url", ",", "'/api/grades/v1/courses/{course_key}/'", ".", "format", "(", "course_key", "=", "course_id", ")", ")", ")", "resp", ".", "raise_for_status", "(", ")", "resp_json", "=", "resp", ".", "json", "(", ")", "if", "'results'", "in", "resp_json", ":", "grade_entries", "=", "[", "CurrentGrade", "(", "entry", ")", "for", "entry", "in", "resp_json", "[", "\"results\"", "]", "]", "while", "resp_json", "[", "'next'", "]", "is", "not", "None", ":", "resp", "=", "self", ".", "requester", ".", "get", "(", "resp_json", "[", "'next'", "]", ")", "resp", ".", "raise_for_status", "(", ")", "resp_json", "=", "resp", ".", "json", "(", ")", "grade_entries", ".", "extend", "(", "(", "CurrentGrade", "(", "entry", ")", "for", "entry", "in", "resp_json", "[", "\"results\"", "]", ")", ")", "else", ":", "grade_entries", "=", "[", "CurrentGrade", "(", "entry", ")", "for", "entry", "in", "resp_json", "]", "return", "CurrentGradesByCourse", "(", "grade_entries", ")" ]
Returns a CurrentGradesByCourse object for all users in the specified course. Args: course_id (str): an edX course ids. Returns: CurrentGradesByCourse: object representing the student current grades Authorization: The authenticated user must have staff permissions to see grades for all users in a course.
[ "Returns", "a", "CurrentGradesByCourse", "object", "for", "all", "users", "in", "the", "specified", "course", "." ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/grades/__init__.py#L78-L110
train
mitodl/edx-api-client
edx_api/client.py
EdxApi.get_requester
def get_requester(self): """ Returns an object to make authenticated requests. See python `requests` for the API. """ # TODO(abrahms): Perhaps pull this out into a factory function for # generating an EdxApi instance with the proper requester & credentials. session = requests.session() session.headers.update({ 'Authorization': 'Bearer {}'.format(self.credentials['access_token']) }) old_request = session.request def patched_request(*args, **kwargs): """ adds timeout param to session.request """ return old_request(*args, timeout=self.timeout, **kwargs) session.request = patched_request return session
python
def get_requester(self): """ Returns an object to make authenticated requests. See python `requests` for the API. """ # TODO(abrahms): Perhaps pull this out into a factory function for # generating an EdxApi instance with the proper requester & credentials. session = requests.session() session.headers.update({ 'Authorization': 'Bearer {}'.format(self.credentials['access_token']) }) old_request = session.request def patched_request(*args, **kwargs): """ adds timeout param to session.request """ return old_request(*args, timeout=self.timeout, **kwargs) session.request = patched_request return session
[ "def", "get_requester", "(", "self", ")", ":", "# TODO(abrahms): Perhaps pull this out into a factory function for", "# generating an EdxApi instance with the proper requester & credentials.", "session", "=", "requests", ".", "session", "(", ")", "session", ".", "headers", ".", "update", "(", "{", "'Authorization'", ":", "'Bearer {}'", ".", "format", "(", "self", ".", "credentials", "[", "'access_token'", "]", ")", "}", ")", "old_request", "=", "session", ".", "request", "def", "patched_request", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"\n adds timeout param to session.request\n \"\"\"", "return", "old_request", "(", "*", "args", ",", "timeout", "=", "self", ".", "timeout", ",", "*", "*", "kwargs", ")", "session", ".", "request", "=", "patched_request", "return", "session" ]
Returns an object to make authenticated requests. See python `requests` for the API.
[ "Returns", "an", "object", "to", "make", "authenticated", "requests", ".", "See", "python", "requests", "for", "the", "API", "." ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/client.py#L30-L50
train
mitodl/edx-api-client
edx_api/ccx.py
CCX.create
def create(self, master_course_id, coach_email, max_students_allowed, title, modules=None): """ Creates a CCX Args: master_course_id (str): edx course id of the master course coach_email (str): email of the user to make a coach. This user must exist on edx. max_students_allowed (int): Maximum number of students to allow in this ccx. title (str): Title of the CCX to be created modules (optional list): A list of locator_ids (str) for the modules to enable. Returns: ccx_id (str): The ID of the ccx. """ payload = { 'master_course_id': master_course_id, 'coach_email': coach_email, 'max_students_allowed': max_students_allowed, 'display_name': title, } if modules is not None: payload['course_modules'] = modules resp = self.requester.post( parse.urljoin(self.base_url, '/api/ccx/v0/ccx/'), json=payload ) try: resp.raise_for_status() except: log.error(resp.json()) raise return resp.json()['ccx_course_id']
python
def create(self, master_course_id, coach_email, max_students_allowed, title, modules=None): """ Creates a CCX Args: master_course_id (str): edx course id of the master course coach_email (str): email of the user to make a coach. This user must exist on edx. max_students_allowed (int): Maximum number of students to allow in this ccx. title (str): Title of the CCX to be created modules (optional list): A list of locator_ids (str) for the modules to enable. Returns: ccx_id (str): The ID of the ccx. """ payload = { 'master_course_id': master_course_id, 'coach_email': coach_email, 'max_students_allowed': max_students_allowed, 'display_name': title, } if modules is not None: payload['course_modules'] = modules resp = self.requester.post( parse.urljoin(self.base_url, '/api/ccx/v0/ccx/'), json=payload ) try: resp.raise_for_status() except: log.error(resp.json()) raise return resp.json()['ccx_course_id']
[ "def", "create", "(", "self", ",", "master_course_id", ",", "coach_email", ",", "max_students_allowed", ",", "title", ",", "modules", "=", "None", ")", ":", "payload", "=", "{", "'master_course_id'", ":", "master_course_id", ",", "'coach_email'", ":", "coach_email", ",", "'max_students_allowed'", ":", "max_students_allowed", ",", "'display_name'", ":", "title", ",", "}", "if", "modules", "is", "not", "None", ":", "payload", "[", "'course_modules'", "]", "=", "modules", "resp", "=", "self", ".", "requester", ".", "post", "(", "parse", ".", "urljoin", "(", "self", ".", "base_url", ",", "'/api/ccx/v0/ccx/'", ")", ",", "json", "=", "payload", ")", "try", ":", "resp", ".", "raise_for_status", "(", ")", "except", ":", "log", ".", "error", "(", "resp", ".", "json", "(", ")", ")", "raise", "return", "resp", ".", "json", "(", ")", "[", "'ccx_course_id'", "]" ]
Creates a CCX Args: master_course_id (str): edx course id of the master course coach_email (str): email of the user to make a coach. This user must exist on edx. max_students_allowed (int): Maximum number of students to allow in this ccx. title (str): Title of the CCX to be created modules (optional list): A list of locator_ids (str) for the modules to enable. Returns: ccx_id (str): The ID of the ccx.
[ "Creates", "a", "CCX" ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/ccx.py#L20-L55
train
mitodl/edx-api-client
edx_api/enrollments/__init__.py
CourseEnrollments._get_enrollments_list_page
def _get_enrollments_list_page(self, params=None): """ Submit request to retrieve enrollments list. Args: params (dict): Query parameters to use in the request. Valid parameters are: * course_id: Filters the result to course enrollments for the course corresponding to the given course ID. The value must be URL encoded. Optional. * username: username: List of comma-separated usernames. Filters the result to the course enrollments of the given users. Optional. """ req_url = urljoin(self.base_url, self.enrollment_list_url) resp = self.requester.get(req_url, params=params) resp.raise_for_status() resp_json = resp.json() results = resp_json['results'] next_url_str = resp_json.get('next') cursor = None qstr_cursor = None if next_url_str: next_url = urlparse(next_url_str) qstr = parse_qs(next_url.query) qstr_cursor = qstr.get('cursor') if qstr_cursor and isinstance(qstr_cursor, list): cursor = qstr_cursor[0] return results, cursor
python
def _get_enrollments_list_page(self, params=None): """ Submit request to retrieve enrollments list. Args: params (dict): Query parameters to use in the request. Valid parameters are: * course_id: Filters the result to course enrollments for the course corresponding to the given course ID. The value must be URL encoded. Optional. * username: username: List of comma-separated usernames. Filters the result to the course enrollments of the given users. Optional. """ req_url = urljoin(self.base_url, self.enrollment_list_url) resp = self.requester.get(req_url, params=params) resp.raise_for_status() resp_json = resp.json() results = resp_json['results'] next_url_str = resp_json.get('next') cursor = None qstr_cursor = None if next_url_str: next_url = urlparse(next_url_str) qstr = parse_qs(next_url.query) qstr_cursor = qstr.get('cursor') if qstr_cursor and isinstance(qstr_cursor, list): cursor = qstr_cursor[0] return results, cursor
[ "def", "_get_enrollments_list_page", "(", "self", ",", "params", "=", "None", ")", ":", "req_url", "=", "urljoin", "(", "self", ".", "base_url", ",", "self", ".", "enrollment_list_url", ")", "resp", "=", "self", ".", "requester", ".", "get", "(", "req_url", ",", "params", "=", "params", ")", "resp", ".", "raise_for_status", "(", ")", "resp_json", "=", "resp", ".", "json", "(", ")", "results", "=", "resp_json", "[", "'results'", "]", "next_url_str", "=", "resp_json", ".", "get", "(", "'next'", ")", "cursor", "=", "None", "qstr_cursor", "=", "None", "if", "next_url_str", ":", "next_url", "=", "urlparse", "(", "next_url_str", ")", "qstr", "=", "parse_qs", "(", "next_url", ".", "query", ")", "qstr_cursor", "=", "qstr", ".", "get", "(", "'cursor'", ")", "if", "qstr_cursor", "and", "isinstance", "(", "qstr_cursor", ",", "list", ")", ":", "cursor", "=", "qstr_cursor", "[", "0", "]", "return", "results", ",", "cursor" ]
Submit request to retrieve enrollments list. Args: params (dict): Query parameters to use in the request. Valid parameters are: * course_id: Filters the result to course enrollments for the course corresponding to the given course ID. The value must be URL encoded. Optional. * username: username: List of comma-separated usernames. Filters the result to the course enrollments of the given users. Optional.
[ "Submit", "request", "to", "retrieve", "enrollments", "list", "." ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/enrollments/__init__.py#L31-L59
train
mitodl/edx-api-client
edx_api/enrollments/__init__.py
CourseEnrollments.get_enrollments
def get_enrollments(self, course_id=None, usernames=None): """ List all course enrollments. Args: course_id (str, optional): If used enrollments will be filtered to the specified course id. usernames (list, optional): List of usernames to filter enrollments. Notes: - This method returns an iterator to avoid going through the entire pagination at once. - The :class:`Enrollments` instance returned for each generated item will not have any course details. Examples: Get all enrollments for a specific course id >>> api = EdxApi({'access_token': 'token'}, 'http://base_url') >>> enrollments = api.enrollments.get_enrollments(course_id='course_id') >>> for enrollment in enrollments: do_something(enrollment) Get all enrollments for a set of usernames >>> api = EdxApi({'access_token': 'token'}, 'http://base_url') >>> enrollments = api.enrollments.get_enrollments(usernames=['user1', 'user2']) >>> for enrollment in enrollments: do_something(enrollment) Returns: Generator with an instance of :class:`Enrollments` for each item. """ params = {} if course_id is not None: params['course_id'] = course_id if usernames is not None and isinstance(usernames, list): params['username'] = ','.join(usernames) done = False while not done: enrollments, next_cursor = self._get_enrollments_list_page(params) for enrollment in enrollments: yield Enrollment(enrollment) if next_cursor: params['cursor'] = next_cursor else: done = True
python
def get_enrollments(self, course_id=None, usernames=None): """ List all course enrollments. Args: course_id (str, optional): If used enrollments will be filtered to the specified course id. usernames (list, optional): List of usernames to filter enrollments. Notes: - This method returns an iterator to avoid going through the entire pagination at once. - The :class:`Enrollments` instance returned for each generated item will not have any course details. Examples: Get all enrollments for a specific course id >>> api = EdxApi({'access_token': 'token'}, 'http://base_url') >>> enrollments = api.enrollments.get_enrollments(course_id='course_id') >>> for enrollment in enrollments: do_something(enrollment) Get all enrollments for a set of usernames >>> api = EdxApi({'access_token': 'token'}, 'http://base_url') >>> enrollments = api.enrollments.get_enrollments(usernames=['user1', 'user2']) >>> for enrollment in enrollments: do_something(enrollment) Returns: Generator with an instance of :class:`Enrollments` for each item. """ params = {} if course_id is not None: params['course_id'] = course_id if usernames is not None and isinstance(usernames, list): params['username'] = ','.join(usernames) done = False while not done: enrollments, next_cursor = self._get_enrollments_list_page(params) for enrollment in enrollments: yield Enrollment(enrollment) if next_cursor: params['cursor'] = next_cursor else: done = True
[ "def", "get_enrollments", "(", "self", ",", "course_id", "=", "None", ",", "usernames", "=", "None", ")", ":", "params", "=", "{", "}", "if", "course_id", "is", "not", "None", ":", "params", "[", "'course_id'", "]", "=", "course_id", "if", "usernames", "is", "not", "None", "and", "isinstance", "(", "usernames", ",", "list", ")", ":", "params", "[", "'username'", "]", "=", "','", ".", "join", "(", "usernames", ")", "done", "=", "False", "while", "not", "done", ":", "enrollments", ",", "next_cursor", "=", "self", ".", "_get_enrollments_list_page", "(", "params", ")", "for", "enrollment", "in", "enrollments", ":", "yield", "Enrollment", "(", "enrollment", ")", "if", "next_cursor", ":", "params", "[", "'cursor'", "]", "=", "next_cursor", "else", ":", "done", "=", "True" ]
List all course enrollments. Args: course_id (str, optional): If used enrollments will be filtered to the specified course id. usernames (list, optional): List of usernames to filter enrollments. Notes: - This method returns an iterator to avoid going through the entire pagination at once. - The :class:`Enrollments` instance returned for each generated item will not have any course details. Examples: Get all enrollments for a specific course id >>> api = EdxApi({'access_token': 'token'}, 'http://base_url') >>> enrollments = api.enrollments.get_enrollments(course_id='course_id') >>> for enrollment in enrollments: do_something(enrollment) Get all enrollments for a set of usernames >>> api = EdxApi({'access_token': 'token'}, 'http://base_url') >>> enrollments = api.enrollments.get_enrollments(usernames=['user1', 'user2']) >>> for enrollment in enrollments: do_something(enrollment) Returns: Generator with an instance of :class:`Enrollments` for each item.
[ "List", "all", "course", "enrollments", "." ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/enrollments/__init__.py#L61-L106
train
mitodl/edx-api-client
edx_api/enrollments/__init__.py
CourseEnrollments.get_student_enrollments
def get_student_enrollments(self): """ Returns an Enrollments object with the user enrollments Returns: Enrollments: object representing the student enrollments """ # the request is done in behalf of the current logged in user resp = self.requester.get( urljoin(self.base_url, self.enrollment_url)) resp.raise_for_status() return Enrollments(resp.json())
python
def get_student_enrollments(self): """ Returns an Enrollments object with the user enrollments Returns: Enrollments: object representing the student enrollments """ # the request is done in behalf of the current logged in user resp = self.requester.get( urljoin(self.base_url, self.enrollment_url)) resp.raise_for_status() return Enrollments(resp.json())
[ "def", "get_student_enrollments", "(", "self", ")", ":", "# the request is done in behalf of the current logged in user", "resp", "=", "self", ".", "requester", ".", "get", "(", "urljoin", "(", "self", ".", "base_url", ",", "self", ".", "enrollment_url", ")", ")", "resp", ".", "raise_for_status", "(", ")", "return", "Enrollments", "(", "resp", ".", "json", "(", ")", ")" ]
Returns an Enrollments object with the user enrollments Returns: Enrollments: object representing the student enrollments
[ "Returns", "an", "Enrollments", "object", "with", "the", "user", "enrollments" ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/enrollments/__init__.py#L108-L119
train
mitodl/edx-api-client
edx_api/enrollments/__init__.py
CourseEnrollments.create_audit_student_enrollment
def create_audit_student_enrollment(self, course_id): """ Creates an audit enrollment for the user in a given course Args: course_id (str): an edX course id Returns: Enrollment: object representing the student enrollment in the provided course """ audit_enrollment = { "mode": "audit", "course_details": {"course_id": course_id} } # the request is done in behalf of the current logged in user resp = self.requester.post( urljoin(self.base_url, self.enrollment_url), json=audit_enrollment ) resp.raise_for_status() return Enrollment(resp.json())
python
def create_audit_student_enrollment(self, course_id): """ Creates an audit enrollment for the user in a given course Args: course_id (str): an edX course id Returns: Enrollment: object representing the student enrollment in the provided course """ audit_enrollment = { "mode": "audit", "course_details": {"course_id": course_id} } # the request is done in behalf of the current logged in user resp = self.requester.post( urljoin(self.base_url, self.enrollment_url), json=audit_enrollment ) resp.raise_for_status() return Enrollment(resp.json())
[ "def", "create_audit_student_enrollment", "(", "self", ",", "course_id", ")", ":", "audit_enrollment", "=", "{", "\"mode\"", ":", "\"audit\"", ",", "\"course_details\"", ":", "{", "\"course_id\"", ":", "course_id", "}", "}", "# the request is done in behalf of the current logged in user", "resp", "=", "self", ".", "requester", ".", "post", "(", "urljoin", "(", "self", ".", "base_url", ",", "self", ".", "enrollment_url", ")", ",", "json", "=", "audit_enrollment", ")", "resp", ".", "raise_for_status", "(", ")", "return", "Enrollment", "(", "resp", ".", "json", "(", ")", ")" ]
Creates an audit enrollment for the user in a given course Args: course_id (str): an edX course id Returns: Enrollment: object representing the student enrollment in the provided course
[ "Creates", "an", "audit", "enrollment", "for", "the", "user", "in", "a", "given", "course" ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/enrollments/__init__.py#L121-L141
train
mitodl/edx-api-client
edx_api/certificates/__init__.py
UserCertificates.get_student_certificate
def get_student_certificate(self, username, course_id): """ Returns an Certificate object with the user certificates Args: username (str): an edx user's username course_id (str): an edX course id. Returns: Certificate: object representing the student certificate for a course """ # the request is done in behalf of the current logged in user resp = self.requester.get( urljoin( self.base_url, '/api/certificates/v0/certificates/{username}/courses/{course_key}/'.format( username=username, course_key=course_id ) ) ) resp.raise_for_status() return Certificate(resp.json())
python
def get_student_certificate(self, username, course_id): """ Returns an Certificate object with the user certificates Args: username (str): an edx user's username course_id (str): an edX course id. Returns: Certificate: object representing the student certificate for a course """ # the request is done in behalf of the current logged in user resp = self.requester.get( urljoin( self.base_url, '/api/certificates/v0/certificates/{username}/courses/{course_key}/'.format( username=username, course_key=course_id ) ) ) resp.raise_for_status() return Certificate(resp.json())
[ "def", "get_student_certificate", "(", "self", ",", "username", ",", "course_id", ")", ":", "# the request is done in behalf of the current logged in user", "resp", "=", "self", ".", "requester", ".", "get", "(", "urljoin", "(", "self", ".", "base_url", ",", "'/api/certificates/v0/certificates/{username}/courses/{course_key}/'", ".", "format", "(", "username", "=", "username", ",", "course_key", "=", "course_id", ")", ")", ")", "resp", ".", "raise_for_status", "(", ")", "return", "Certificate", "(", "resp", ".", "json", "(", ")", ")" ]
Returns an Certificate object with the user certificates Args: username (str): an edx user's username course_id (str): an edX course id. Returns: Certificate: object representing the student certificate for a course
[ "Returns", "an", "Certificate", "object", "with", "the", "user", "certificates" ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/certificates/__init__.py#L25-L49
train
mitodl/edx-api-client
edx_api/certificates/__init__.py
UserCertificates.get_student_certificates
def get_student_certificates(self, username, course_ids=None): """ Returns an Certificates object with the user certificates Args: username (str): an edx user's username course_ids (list): a list of edX course ids. Returns: Certificates: object representing the student certificates for a course """ # if no course ids are provided, let's get the user enrollments if course_ids is None: enrollments_client = CourseEnrollments(self.requester, self.base_url) enrollments = enrollments_client.get_student_enrollments() course_ids = list(enrollments.get_enrolled_course_ids()) all_certificates = [] for course_id in course_ids: try: all_certificates.append(self.get_student_certificate(username, course_id)) except HTTPError as error: if error.response.status_code >= 500: raise return Certificates(all_certificates)
python
def get_student_certificates(self, username, course_ids=None): """ Returns an Certificates object with the user certificates Args: username (str): an edx user's username course_ids (list): a list of edX course ids. Returns: Certificates: object representing the student certificates for a course """ # if no course ids are provided, let's get the user enrollments if course_ids is None: enrollments_client = CourseEnrollments(self.requester, self.base_url) enrollments = enrollments_client.get_student_enrollments() course_ids = list(enrollments.get_enrolled_course_ids()) all_certificates = [] for course_id in course_ids: try: all_certificates.append(self.get_student_certificate(username, course_id)) except HTTPError as error: if error.response.status_code >= 500: raise return Certificates(all_certificates)
[ "def", "get_student_certificates", "(", "self", ",", "username", ",", "course_ids", "=", "None", ")", ":", "# if no course ids are provided, let's get the user enrollments", "if", "course_ids", "is", "None", ":", "enrollments_client", "=", "CourseEnrollments", "(", "self", ".", "requester", ",", "self", ".", "base_url", ")", "enrollments", "=", "enrollments_client", ".", "get_student_enrollments", "(", ")", "course_ids", "=", "list", "(", "enrollments", ".", "get_enrolled_course_ids", "(", ")", ")", "all_certificates", "=", "[", "]", "for", "course_id", "in", "course_ids", ":", "try", ":", "all_certificates", ".", "append", "(", "self", ".", "get_student_certificate", "(", "username", ",", "course_id", ")", ")", "except", "HTTPError", "as", "error", ":", "if", "error", ".", "response", ".", "status_code", ">=", "500", ":", "raise", "return", "Certificates", "(", "all_certificates", ")" ]
Returns an Certificates object with the user certificates Args: username (str): an edx user's username course_ids (list): a list of edX course ids. Returns: Certificates: object representing the student certificates for a course
[ "Returns", "an", "Certificates", "object", "with", "the", "user", "certificates" ]
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/certificates/__init__.py#L51-L76
train
metakirby5/colorz
colorz.py
get_colors
def get_colors(img): """ Returns a list of all the image's colors. """ w, h = img.size return [color[:3] for count, color in img.convert('RGB').getcolors(w * h)]
python
def get_colors(img): """ Returns a list of all the image's colors. """ w, h = img.size return [color[:3] for count, color in img.convert('RGB').getcolors(w * h)]
[ "def", "get_colors", "(", "img", ")", ":", "w", ",", "h", "=", "img", ".", "size", "return", "[", "color", "[", ":", "3", "]", "for", "count", ",", "color", "in", "img", ".", "convert", "(", "'RGB'", ")", ".", "getcolors", "(", "w", "*", "h", ")", "]" ]
Returns a list of all the image's colors.
[ "Returns", "a", "list", "of", "all", "the", "image", "s", "colors", "." ]
11fd47a28d7a4af5b91d29978524335c8fef8cc9
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L56-L61
train
metakirby5/colorz
colorz.py
clamp
def clamp(color, min_v, max_v): """ Clamps a color such that the value is between min_v and max_v. """ h, s, v = rgb_to_hsv(*map(down_scale, color)) min_v, max_v = map(down_scale, (min_v, max_v)) v = min(max(min_v, v), max_v) return tuple(map(up_scale, hsv_to_rgb(h, s, v)))
python
def clamp(color, min_v, max_v): """ Clamps a color such that the value is between min_v and max_v. """ h, s, v = rgb_to_hsv(*map(down_scale, color)) min_v, max_v = map(down_scale, (min_v, max_v)) v = min(max(min_v, v), max_v) return tuple(map(up_scale, hsv_to_rgb(h, s, v)))
[ "def", "clamp", "(", "color", ",", "min_v", ",", "max_v", ")", ":", "h", ",", "s", ",", "v", "=", "rgb_to_hsv", "(", "*", "map", "(", "down_scale", ",", "color", ")", ")", "min_v", ",", "max_v", "=", "map", "(", "down_scale", ",", "(", "min_v", ",", "max_v", ")", ")", "v", "=", "min", "(", "max", "(", "min_v", ",", "v", ")", ",", "max_v", ")", "return", "tuple", "(", "map", "(", "up_scale", ",", "hsv_to_rgb", "(", "h", ",", "s", ",", "v", ")", ")", ")" ]
Clamps a color such that the value is between min_v and max_v.
[ "Clamps", "a", "color", "such", "that", "the", "value", "is", "between", "min_v", "and", "max_v", "." ]
11fd47a28d7a4af5b91d29978524335c8fef8cc9
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L64-L71
train
metakirby5/colorz
colorz.py
order_by_hue
def order_by_hue(colors): """ Orders colors by hue. """ hsvs = [rgb_to_hsv(*map(down_scale, color)) for color in colors] hsvs.sort(key=lambda t: t[0]) return [tuple(map(up_scale, hsv_to_rgb(*hsv))) for hsv in hsvs]
python
def order_by_hue(colors): """ Orders colors by hue. """ hsvs = [rgb_to_hsv(*map(down_scale, color)) for color in colors] hsvs.sort(key=lambda t: t[0]) return [tuple(map(up_scale, hsv_to_rgb(*hsv))) for hsv in hsvs]
[ "def", "order_by_hue", "(", "colors", ")", ":", "hsvs", "=", "[", "rgb_to_hsv", "(", "*", "map", "(", "down_scale", ",", "color", ")", ")", "for", "color", "in", "colors", "]", "hsvs", ".", "sort", "(", "key", "=", "lambda", "t", ":", "t", "[", "0", "]", ")", "return", "[", "tuple", "(", "map", "(", "up_scale", ",", "hsv_to_rgb", "(", "*", "hsv", ")", ")", ")", "for", "hsv", "in", "hsvs", "]" ]
Orders colors by hue.
[ "Orders", "colors", "by", "hue", "." ]
11fd47a28d7a4af5b91d29978524335c8fef8cc9
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L74-L80
train
metakirby5/colorz
colorz.py
brighten
def brighten(color, brightness): """ Adds or subtracts value to a color. """ h, s, v = rgb_to_hsv(*map(down_scale, color)) return tuple(map(up_scale, hsv_to_rgb(h, s, v + down_scale(brightness))))
python
def brighten(color, brightness): """ Adds or subtracts value to a color. """ h, s, v = rgb_to_hsv(*map(down_scale, color)) return tuple(map(up_scale, hsv_to_rgb(h, s, v + down_scale(brightness))))
[ "def", "brighten", "(", "color", ",", "brightness", ")", ":", "h", ",", "s", ",", "v", "=", "rgb_to_hsv", "(", "*", "map", "(", "down_scale", ",", "color", ")", ")", "return", "tuple", "(", "map", "(", "up_scale", ",", "hsv_to_rgb", "(", "h", ",", "s", ",", "v", "+", "down_scale", "(", "brightness", ")", ")", ")", ")" ]
Adds or subtracts value to a color.
[ "Adds", "or", "subtracts", "value", "to", "a", "color", "." ]
11fd47a28d7a4af5b91d29978524335c8fef8cc9
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L83-L88
train
metakirby5/colorz
colorz.py
colorz
def colorz(fd, n=DEFAULT_NUM_COLORS, min_v=DEFAULT_MINV, max_v=DEFAULT_MAXV, bold_add=DEFAULT_BOLD_ADD, order_colors=True): """ Get the n most dominant colors of an image. Clamps value to between min_v and max_v. Creates bold colors using bold_add. Total number of colors returned is 2*n, optionally ordered by hue. Returns as a list of pairs of RGB triples. For terminal colors, the hue order is: red, yellow, green, cyan, blue, magenta """ img = Image.open(fd) img.thumbnail(THUMB_SIZE) obs = get_colors(img) clamped = [clamp(color, min_v, max_v) for color in obs] clusters, _ = kmeans(array(clamped).astype(float), n) colors = order_by_hue(clusters) if order_colors else clusters return list(zip(colors, [brighten(c, bold_add) for c in colors]))
python
def colorz(fd, n=DEFAULT_NUM_COLORS, min_v=DEFAULT_MINV, max_v=DEFAULT_MAXV, bold_add=DEFAULT_BOLD_ADD, order_colors=True): """ Get the n most dominant colors of an image. Clamps value to between min_v and max_v. Creates bold colors using bold_add. Total number of colors returned is 2*n, optionally ordered by hue. Returns as a list of pairs of RGB triples. For terminal colors, the hue order is: red, yellow, green, cyan, blue, magenta """ img = Image.open(fd) img.thumbnail(THUMB_SIZE) obs = get_colors(img) clamped = [clamp(color, min_v, max_v) for color in obs] clusters, _ = kmeans(array(clamped).astype(float), n) colors = order_by_hue(clusters) if order_colors else clusters return list(zip(colors, [brighten(c, bold_add) for c in colors]))
[ "def", "colorz", "(", "fd", ",", "n", "=", "DEFAULT_NUM_COLORS", ",", "min_v", "=", "DEFAULT_MINV", ",", "max_v", "=", "DEFAULT_MAXV", ",", "bold_add", "=", "DEFAULT_BOLD_ADD", ",", "order_colors", "=", "True", ")", ":", "img", "=", "Image", ".", "open", "(", "fd", ")", "img", ".", "thumbnail", "(", "THUMB_SIZE", ")", "obs", "=", "get_colors", "(", "img", ")", "clamped", "=", "[", "clamp", "(", "color", ",", "min_v", ",", "max_v", ")", "for", "color", "in", "obs", "]", "clusters", ",", "_", "=", "kmeans", "(", "array", "(", "clamped", ")", ".", "astype", "(", "float", ")", ",", "n", ")", "colors", "=", "order_by_hue", "(", "clusters", ")", "if", "order_colors", "else", "clusters", "return", "list", "(", "zip", "(", "colors", ",", "[", "brighten", "(", "c", ",", "bold_add", ")", "for", "c", "in", "colors", "]", ")", ")" ]
Get the n most dominant colors of an image. Clamps value to between min_v and max_v. Creates bold colors using bold_add. Total number of colors returned is 2*n, optionally ordered by hue. Returns as a list of pairs of RGB triples. For terminal colors, the hue order is: red, yellow, green, cyan, blue, magenta
[ "Get", "the", "n", "most", "dominant", "colors", "of", "an", "image", ".", "Clamps", "value", "to", "between", "min_v", "and", "max_v", "." ]
11fd47a28d7a4af5b91d29978524335c8fef8cc9
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L91-L111
train
ansible/pytest-ansible
setup.py
long_description
def long_description(*paths): '''Returns a RST formated string. ''' result = '' # attempt to import pandoc try: import pypandoc except (ImportError, OSError) as e: print("Unable to import pypandoc - %s" % e) return result # attempt md -> rst conversion try: for path in paths: result += '\n' + pypandoc.convert( path, 'rst', format='markdown' ) except (OSError, IOError) as e: print("Failed to convert with pypandoc - %s" % e) return result return result
python
def long_description(*paths): '''Returns a RST formated string. ''' result = '' # attempt to import pandoc try: import pypandoc except (ImportError, OSError) as e: print("Unable to import pypandoc - %s" % e) return result # attempt md -> rst conversion try: for path in paths: result += '\n' + pypandoc.convert( path, 'rst', format='markdown' ) except (OSError, IOError) as e: print("Failed to convert with pypandoc - %s" % e) return result return result
[ "def", "long_description", "(", "*", "paths", ")", ":", "result", "=", "''", "# attempt to import pandoc", "try", ":", "import", "pypandoc", "except", "(", "ImportError", ",", "OSError", ")", "as", "e", ":", "print", "(", "\"Unable to import pypandoc - %s\"", "%", "e", ")", "return", "result", "# attempt md -> rst conversion", "try", ":", "for", "path", "in", "paths", ":", "result", "+=", "'\\n'", "+", "pypandoc", ".", "convert", "(", "path", ",", "'rst'", ",", "format", "=", "'markdown'", ")", "except", "(", "OSError", ",", "IOError", ")", "as", "e", ":", "print", "(", "\"Failed to convert with pypandoc - %s\"", "%", "e", ")", "return", "result", "return", "result" ]
Returns a RST formated string.
[ "Returns", "a", "RST", "formated", "string", "." ]
0f7eea80887715fb290a425b8ea4a1c1cfad1ecf
https://github.com/ansible/pytest-ansible/blob/0f7eea80887715fb290a425b8ea4a1c1cfad1ecf/setup.py#L78-L100
train
wmayner/pyphi
pyphi/cache.py
memory_full
def memory_full(): """Check if the memory is too full for further caching.""" current_process = psutil.Process(os.getpid()) return (current_process.memory_percent() > config.MAXIMUM_CACHE_MEMORY_PERCENTAGE)
python
def memory_full(): """Check if the memory is too full for further caching.""" current_process = psutil.Process(os.getpid()) return (current_process.memory_percent() > config.MAXIMUM_CACHE_MEMORY_PERCENTAGE)
[ "def", "memory_full", "(", ")", ":", "current_process", "=", "psutil", ".", "Process", "(", "os", ".", "getpid", "(", ")", ")", "return", "(", "current_process", ".", "memory_percent", "(", ")", ">", "config", ".", "MAXIMUM_CACHE_MEMORY_PERCENTAGE", ")" ]
Check if the memory is too full for further caching.
[ "Check", "if", "the", "memory", "is", "too", "full", "for", "further", "caching", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L26-L30
train
wmayner/pyphi
pyphi/cache.py
cache
def cache(cache={}, maxmem=config.MAXIMUM_CACHE_MEMORY_PERCENTAGE, typed=False): """Memory-limited cache decorator. ``maxmem`` is a float between 0 and 100, inclusive, specifying the maximum percentage of physical memory that the cache can use. If ``typed`` is ``True``, arguments of different types will be cached separately. For example, f(3.0) and f(3) will be treated as distinct calls with distinct results. Arguments to the cached function must be hashable. View the cache statistics named tuple (hits, misses, currsize) with f.cache_info(). Clear the cache and statistics with f.cache_clear(). Access the underlying function with f.__wrapped__. """ # Constants shared by all lru cache instances: # Unique object used to signal cache misses. sentinel = object() # Build a key from the function arguments. make_key = _make_key def decorating_function(user_function, hits=0, misses=0): full = False # Bound method to look up a key or return None. cache_get = cache.get if not maxmem: def wrapper(*args, **kwds): # Simple caching without memory limit. nonlocal hits, misses key = make_key(args, kwds, typed) result = cache_get(key, sentinel) if result is not sentinel: hits += 1 return result result = user_function(*args, **kwds) cache[key] = result misses += 1 return result else: def wrapper(*args, **kwds): # Memory-limited caching. nonlocal hits, misses, full key = make_key(args, kwds, typed) result = cache_get(key) if result is not None: hits += 1 return result result = user_function(*args, **kwds) if not full: cache[key] = result # Cache is full if the total recursive usage is greater # than the maximum allowed percentage. current_process = psutil.Process(os.getpid()) full = current_process.memory_percent() > maxmem misses += 1 return result def cache_info(): """Report cache statistics.""" return _CacheInfo(hits, misses, len(cache)) def cache_clear(): """Clear the cache and cache statistics.""" nonlocal hits, misses, full cache.clear() hits = misses = 0 full = False wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return update_wrapper(wrapper, user_function) return decorating_function
python
def cache(cache={}, maxmem=config.MAXIMUM_CACHE_MEMORY_PERCENTAGE, typed=False): """Memory-limited cache decorator. ``maxmem`` is a float between 0 and 100, inclusive, specifying the maximum percentage of physical memory that the cache can use. If ``typed`` is ``True``, arguments of different types will be cached separately. For example, f(3.0) and f(3) will be treated as distinct calls with distinct results. Arguments to the cached function must be hashable. View the cache statistics named tuple (hits, misses, currsize) with f.cache_info(). Clear the cache and statistics with f.cache_clear(). Access the underlying function with f.__wrapped__. """ # Constants shared by all lru cache instances: # Unique object used to signal cache misses. sentinel = object() # Build a key from the function arguments. make_key = _make_key def decorating_function(user_function, hits=0, misses=0): full = False # Bound method to look up a key or return None. cache_get = cache.get if not maxmem: def wrapper(*args, **kwds): # Simple caching without memory limit. nonlocal hits, misses key = make_key(args, kwds, typed) result = cache_get(key, sentinel) if result is not sentinel: hits += 1 return result result = user_function(*args, **kwds) cache[key] = result misses += 1 return result else: def wrapper(*args, **kwds): # Memory-limited caching. nonlocal hits, misses, full key = make_key(args, kwds, typed) result = cache_get(key) if result is not None: hits += 1 return result result = user_function(*args, **kwds) if not full: cache[key] = result # Cache is full if the total recursive usage is greater # than the maximum allowed percentage. current_process = psutil.Process(os.getpid()) full = current_process.memory_percent() > maxmem misses += 1 return result def cache_info(): """Report cache statistics.""" return _CacheInfo(hits, misses, len(cache)) def cache_clear(): """Clear the cache and cache statistics.""" nonlocal hits, misses, full cache.clear() hits = misses = 0 full = False wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return update_wrapper(wrapper, user_function) return decorating_function
[ "def", "cache", "(", "cache", "=", "{", "}", ",", "maxmem", "=", "config", ".", "MAXIMUM_CACHE_MEMORY_PERCENTAGE", ",", "typed", "=", "False", ")", ":", "# Constants shared by all lru cache instances:", "# Unique object used to signal cache misses.", "sentinel", "=", "object", "(", ")", "# Build a key from the function arguments.", "make_key", "=", "_make_key", "def", "decorating_function", "(", "user_function", ",", "hits", "=", "0", ",", "misses", "=", "0", ")", ":", "full", "=", "False", "# Bound method to look up a key or return None.", "cache_get", "=", "cache", ".", "get", "if", "not", "maxmem", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwds", ")", ":", "# Simple caching without memory limit.", "nonlocal", "hits", ",", "misses", "key", "=", "make_key", "(", "args", ",", "kwds", ",", "typed", ")", "result", "=", "cache_get", "(", "key", ",", "sentinel", ")", "if", "result", "is", "not", "sentinel", ":", "hits", "+=", "1", "return", "result", "result", "=", "user_function", "(", "*", "args", ",", "*", "*", "kwds", ")", "cache", "[", "key", "]", "=", "result", "misses", "+=", "1", "return", "result", "else", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwds", ")", ":", "# Memory-limited caching.", "nonlocal", "hits", ",", "misses", ",", "full", "key", "=", "make_key", "(", "args", ",", "kwds", ",", "typed", ")", "result", "=", "cache_get", "(", "key", ")", "if", "result", "is", "not", "None", ":", "hits", "+=", "1", "return", "result", "result", "=", "user_function", "(", "*", "args", ",", "*", "*", "kwds", ")", "if", "not", "full", ":", "cache", "[", "key", "]", "=", "result", "# Cache is full if the total recursive usage is greater", "# than the maximum allowed percentage.", "current_process", "=", "psutil", ".", "Process", "(", "os", ".", "getpid", "(", ")", ")", "full", "=", "current_process", ".", "memory_percent", "(", ")", ">", "maxmem", "misses", "+=", "1", "return", "result", "def", "cache_info", "(", ")", ":", "\"\"\"Report cache statistics.\"\"\"", "return", "_CacheInfo", "(", "hits", ",", "misses", ",", "len", "(", "cache", ")", ")", "def", "cache_clear", "(", ")", ":", "\"\"\"Clear the cache and cache statistics.\"\"\"", "nonlocal", "hits", ",", "misses", ",", "full", "cache", ".", "clear", "(", ")", "hits", "=", "misses", "=", "0", "full", "=", "False", "wrapper", ".", "cache_info", "=", "cache_info", "wrapper", ".", "cache_clear", "=", "cache_clear", "return", "update_wrapper", "(", "wrapper", ",", "user_function", ")", "return", "decorating_function" ]
Memory-limited cache decorator. ``maxmem`` is a float between 0 and 100, inclusive, specifying the maximum percentage of physical memory that the cache can use. If ``typed`` is ``True``, arguments of different types will be cached separately. For example, f(3.0) and f(3) will be treated as distinct calls with distinct results. Arguments to the cached function must be hashable. View the cache statistics named tuple (hits, misses, currsize) with f.cache_info(). Clear the cache and statistics with f.cache_clear(). Access the underlying function with f.__wrapped__.
[ "Memory", "-", "limited", "cache", "decorator", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L78-L156
train
wmayner/pyphi
pyphi/cache.py
MICECache
def MICECache(subsystem, parent_cache=None): """Construct a |MICE| cache. Uses either a Redis-backed cache or a local dict cache on the object. Args: subsystem (Subsystem): The subsystem that this is a cache for. Kwargs: parent_cache (MICECache): The cache generated by the uncut version of ``subsystem``. Any cached |MICE| which are unaffected by the cut are reused in this cache. If None, the cache is initialized empty. """ if config.REDIS_CACHE: cls = RedisMICECache else: cls = DictMICECache return cls(subsystem, parent_cache=parent_cache)
python
def MICECache(subsystem, parent_cache=None): """Construct a |MICE| cache. Uses either a Redis-backed cache or a local dict cache on the object. Args: subsystem (Subsystem): The subsystem that this is a cache for. Kwargs: parent_cache (MICECache): The cache generated by the uncut version of ``subsystem``. Any cached |MICE| which are unaffected by the cut are reused in this cache. If None, the cache is initialized empty. """ if config.REDIS_CACHE: cls = RedisMICECache else: cls = DictMICECache return cls(subsystem, parent_cache=parent_cache)
[ "def", "MICECache", "(", "subsystem", ",", "parent_cache", "=", "None", ")", ":", "if", "config", ".", "REDIS_CACHE", ":", "cls", "=", "RedisMICECache", "else", ":", "cls", "=", "DictMICECache", "return", "cls", "(", "subsystem", ",", "parent_cache", "=", "parent_cache", ")" ]
Construct a |MICE| cache. Uses either a Redis-backed cache or a local dict cache on the object. Args: subsystem (Subsystem): The subsystem that this is a cache for. Kwargs: parent_cache (MICECache): The cache generated by the uncut version of ``subsystem``. Any cached |MICE| which are unaffected by the cut are reused in this cache. If None, the cache is initialized empty.
[ "Construct", "a", "|MICE|", "cache", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L396-L414
train
wmayner/pyphi
pyphi/cache.py
method
def method(cache_name, key_prefix=None): """Caching decorator for object-level method caches. Cache key generation is delegated to the cache. Args: cache_name (str): The name of the (already-instantiated) cache on the decorated object which should be used to store results of this method. *key_prefix: A constant to use as part of the cache key in addition to the method arguments. """ def decorator(func): if (func.__name__ in ['cause_repertoire', 'effect_repertoire'] and not config.CACHE_REPERTOIRES): return func @wraps(func) def wrapper(obj, *args, **kwargs): cache = getattr(obj, cache_name) # Delegate key generation key = cache.key(*args, _prefix=key_prefix, **kwargs) # Get cached value, or compute value = cache.get(key) if value is None: # miss value = func(obj, *args, **kwargs) cache.set(key, value) return value return wrapper return decorator
python
def method(cache_name, key_prefix=None): """Caching decorator for object-level method caches. Cache key generation is delegated to the cache. Args: cache_name (str): The name of the (already-instantiated) cache on the decorated object which should be used to store results of this method. *key_prefix: A constant to use as part of the cache key in addition to the method arguments. """ def decorator(func): if (func.__name__ in ['cause_repertoire', 'effect_repertoire'] and not config.CACHE_REPERTOIRES): return func @wraps(func) def wrapper(obj, *args, **kwargs): cache = getattr(obj, cache_name) # Delegate key generation key = cache.key(*args, _prefix=key_prefix, **kwargs) # Get cached value, or compute value = cache.get(key) if value is None: # miss value = func(obj, *args, **kwargs) cache.set(key, value) return value return wrapper return decorator
[ "def", "method", "(", "cache_name", ",", "key_prefix", "=", "None", ")", ":", "def", "decorator", "(", "func", ")", ":", "if", "(", "func", ".", "__name__", "in", "[", "'cause_repertoire'", ",", "'effect_repertoire'", "]", "and", "not", "config", ".", "CACHE_REPERTOIRES", ")", ":", "return", "func", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "obj", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "cache", "=", "getattr", "(", "obj", ",", "cache_name", ")", "# Delegate key generation", "key", "=", "cache", ".", "key", "(", "*", "args", ",", "_prefix", "=", "key_prefix", ",", "*", "*", "kwargs", ")", "# Get cached value, or compute", "value", "=", "cache", ".", "get", "(", "key", ")", "if", "value", "is", "None", ":", "# miss", "value", "=", "func", "(", "obj", ",", "*", "args", ",", "*", "*", "kwargs", ")", "cache", ".", "set", "(", "key", ",", "value", ")", "return", "value", "return", "wrapper", "return", "decorator" ]
Caching decorator for object-level method caches. Cache key generation is delegated to the cache. Args: cache_name (str): The name of the (already-instantiated) cache on the decorated object which should be used to store results of this method. *key_prefix: A constant to use as part of the cache key in addition to the method arguments.
[ "Caching", "decorator", "for", "object", "-", "level", "method", "caches", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L426-L458
train
wmayner/pyphi
pyphi/cache.py
DictCache.get
def get(self, key): """Get a value out of the cache. Returns None if the key is not in the cache. Updates cache statistics. """ if key in self.cache: self.hits += 1 return self.cache[key] self.misses += 1 return None
python
def get(self, key): """Get a value out of the cache. Returns None if the key is not in the cache. Updates cache statistics. """ if key in self.cache: self.hits += 1 return self.cache[key] self.misses += 1 return None
[ "def", "get", "(", "self", ",", "key", ")", ":", "if", "key", "in", "self", ".", "cache", ":", "self", ".", "hits", "+=", "1", "return", "self", ".", "cache", "[", "key", "]", "self", ".", "misses", "+=", "1", "return", "None" ]
Get a value out of the cache. Returns None if the key is not in the cache. Updates cache statistics.
[ "Get", "a", "value", "out", "of", "the", "cache", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L183-L193
train
wmayner/pyphi
pyphi/cache.py
DictCache.key
def key(self, *args, _prefix=None, **kwargs): """Get the cache key for the given function args. Kwargs: prefix: A constant to prefix to the key. """ if kwargs: raise NotImplementedError( 'kwarg cache keys not implemented') return (_prefix,) + tuple(args)
python
def key(self, *args, _prefix=None, **kwargs): """Get the cache key for the given function args. Kwargs: prefix: A constant to prefix to the key. """ if kwargs: raise NotImplementedError( 'kwarg cache keys not implemented') return (_prefix,) + tuple(args)
[ "def", "key", "(", "self", ",", "*", "args", ",", "_prefix", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "kwargs", ":", "raise", "NotImplementedError", "(", "'kwarg cache keys not implemented'", ")", "return", "(", "_prefix", ",", ")", "+", "tuple", "(", "args", ")" ]
Get the cache key for the given function args. Kwargs: prefix: A constant to prefix to the key.
[ "Get", "the", "cache", "key", "for", "the", "given", "function", "args", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L201-L210
train
wmayner/pyphi
pyphi/cache.py
RedisCache.info
def info(self): """Return cache information. .. note:: This is not the cache info for the entire Redis key space. """ info = redis_conn.info() return _CacheInfo(info['keyspace_hits'], info['keyspace_misses'], self.size())
python
def info(self): """Return cache information. .. note:: This is not the cache info for the entire Redis key space. """ info = redis_conn.info() return _CacheInfo(info['keyspace_hits'], info['keyspace_misses'], self.size())
[ "def", "info", "(", "self", ")", ":", "info", "=", "redis_conn", ".", "info", "(", ")", "return", "_CacheInfo", "(", "info", "[", "'keyspace_hits'", "]", ",", "info", "[", "'keyspace_misses'", "]", ",", "self", ".", "size", "(", ")", ")" ]
Return cache information. .. note:: This is not the cache info for the entire Redis key space.
[ "Return", "cache", "information", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L251-L259
train
wmayner/pyphi
pyphi/cache.py
RedisMICECache.set
def set(self, key, value): """Only need to set if the subsystem is uncut. Caches are only inherited from uncut subsystems. """ if not self.subsystem.is_cut: super().set(key, value)
python
def set(self, key, value): """Only need to set if the subsystem is uncut. Caches are only inherited from uncut subsystems. """ if not self.subsystem.is_cut: super().set(key, value)
[ "def", "set", "(", "self", ",", "key", ",", "value", ")", ":", "if", "not", "self", ".", "subsystem", ".", "is_cut", ":", "super", "(", ")", ".", "set", "(", "key", ",", "value", ")" ]
Only need to set if the subsystem is uncut. Caches are only inherited from uncut subsystems.
[ "Only", "need", "to", "set", "if", "the", "subsystem", "is", "uncut", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L334-L340
train
wmayner/pyphi
pyphi/cache.py
DictMICECache._build
def _build(self, parent_cache): """Build the initial cache from the parent. Only include the |MICE| which are unaffected by the subsystem cut. A |MICE| is affected if either the cut splits the mechanism or splits the connections between the purview and mechanism """ for key, mice in parent_cache.cache.items(): if not mice.damaged_by_cut(self.subsystem): self.cache[key] = mice
python
def _build(self, parent_cache): """Build the initial cache from the parent. Only include the |MICE| which are unaffected by the subsystem cut. A |MICE| is affected if either the cut splits the mechanism or splits the connections between the purview and mechanism """ for key, mice in parent_cache.cache.items(): if not mice.damaged_by_cut(self.subsystem): self.cache[key] = mice
[ "def", "_build", "(", "self", ",", "parent_cache", ")", ":", "for", "key", ",", "mice", "in", "parent_cache", ".", "cache", ".", "items", "(", ")", ":", "if", "not", "mice", ".", "damaged_by_cut", "(", "self", ".", "subsystem", ")", ":", "self", ".", "cache", "[", "key", "]", "=", "mice" ]
Build the initial cache from the parent. Only include the |MICE| which are unaffected by the subsystem cut. A |MICE| is affected if either the cut splits the mechanism or splits the connections between the purview and mechanism
[ "Build", "the", "initial", "cache", "from", "the", "parent", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L362-L371
train
wmayner/pyphi
pyphi/cache.py
PurviewCache.set
def set(self, key, value): """Only set if purview caching is enabled""" if config.CACHE_POTENTIAL_PURVIEWS: self.cache[key] = value
python
def set(self, key, value): """Only set if purview caching is enabled""" if config.CACHE_POTENTIAL_PURVIEWS: self.cache[key] = value
[ "def", "set", "(", "self", ",", "key", ",", "value", ")", ":", "if", "config", ".", "CACHE_POTENTIAL_PURVIEWS", ":", "self", ".", "cache", "[", "key", "]", "=", "value" ]
Only set if purview caching is enabled
[ "Only", "set", "if", "purview", "caching", "is", "enabled" ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L420-L423
train
wmayner/pyphi
pyphi/connectivity.py
apply_boundary_conditions_to_cm
def apply_boundary_conditions_to_cm(external_indices, cm): """Remove connections to or from external nodes.""" cm = cm.copy() cm[external_indices, :] = 0 # Zero-out row cm[:, external_indices] = 0 # Zero-out columnt return cm
python
def apply_boundary_conditions_to_cm(external_indices, cm): """Remove connections to or from external nodes.""" cm = cm.copy() cm[external_indices, :] = 0 # Zero-out row cm[:, external_indices] = 0 # Zero-out columnt return cm
[ "def", "apply_boundary_conditions_to_cm", "(", "external_indices", ",", "cm", ")", ":", "cm", "=", "cm", ".", "copy", "(", ")", "cm", "[", "external_indices", ",", ":", "]", "=", "0", "# Zero-out row", "cm", "[", ":", ",", "external_indices", "]", "=", "0", "# Zero-out columnt", "return", "cm" ]
Remove connections to or from external nodes.
[ "Remove", "connections", "to", "or", "from", "external", "nodes", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L13-L18
train
wmayner/pyphi
pyphi/connectivity.py
get_inputs_from_cm
def get_inputs_from_cm(index, cm): """Return indices of inputs to the node with the given index.""" return tuple(i for i in range(cm.shape[0]) if cm[i][index])
python
def get_inputs_from_cm(index, cm): """Return indices of inputs to the node with the given index.""" return tuple(i for i in range(cm.shape[0]) if cm[i][index])
[ "def", "get_inputs_from_cm", "(", "index", ",", "cm", ")", ":", "return", "tuple", "(", "i", "for", "i", "in", "range", "(", "cm", ".", "shape", "[", "0", "]", ")", "if", "cm", "[", "i", "]", "[", "index", "]", ")" ]
Return indices of inputs to the node with the given index.
[ "Return", "indices", "of", "inputs", "to", "the", "node", "with", "the", "given", "index", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L21-L23
train
wmayner/pyphi
pyphi/connectivity.py
get_outputs_from_cm
def get_outputs_from_cm(index, cm): """Return indices of the outputs of node with the given index.""" return tuple(i for i in range(cm.shape[0]) if cm[index][i])
python
def get_outputs_from_cm(index, cm): """Return indices of the outputs of node with the given index.""" return tuple(i for i in range(cm.shape[0]) if cm[index][i])
[ "def", "get_outputs_from_cm", "(", "index", ",", "cm", ")", ":", "return", "tuple", "(", "i", "for", "i", "in", "range", "(", "cm", ".", "shape", "[", "0", "]", ")", "if", "cm", "[", "index", "]", "[", "i", "]", ")" ]
Return indices of the outputs of node with the given index.
[ "Return", "indices", "of", "the", "outputs", "of", "node", "with", "the", "given", "index", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L26-L28
train
wmayner/pyphi
pyphi/connectivity.py
causally_significant_nodes
def causally_significant_nodes(cm): """Return indices of nodes that have both inputs and outputs.""" inputs = cm.sum(0) outputs = cm.sum(1) nodes_with_inputs_and_outputs = np.logical_and(inputs > 0, outputs > 0) return tuple(np.where(nodes_with_inputs_and_outputs)[0])
python
def causally_significant_nodes(cm): """Return indices of nodes that have both inputs and outputs.""" inputs = cm.sum(0) outputs = cm.sum(1) nodes_with_inputs_and_outputs = np.logical_and(inputs > 0, outputs > 0) return tuple(np.where(nodes_with_inputs_and_outputs)[0])
[ "def", "causally_significant_nodes", "(", "cm", ")", ":", "inputs", "=", "cm", ".", "sum", "(", "0", ")", "outputs", "=", "cm", ".", "sum", "(", "1", ")", "nodes_with_inputs_and_outputs", "=", "np", ".", "logical_and", "(", "inputs", ">", "0", ",", "outputs", ">", "0", ")", "return", "tuple", "(", "np", ".", "where", "(", "nodes_with_inputs_and_outputs", ")", "[", "0", "]", ")" ]
Return indices of nodes that have both inputs and outputs.
[ "Return", "indices", "of", "nodes", "that", "have", "both", "inputs", "and", "outputs", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L31-L36
train
wmayner/pyphi
pyphi/connectivity.py
relevant_connections
def relevant_connections(n, _from, to): """Construct a connectivity matrix. Args: n (int): The dimensions of the matrix _from (tuple[int]): Nodes with outgoing connections to ``to`` to (tuple[int]): Nodes with incoming connections from ``_from`` Returns: np.ndarray: An |n x n| connectivity matrix with the |i,jth| entry is ``1`` if |i| is in ``_from`` and |j| is in ``to``, and 0 otherwise. """ cm = np.zeros((n, n)) # Don't try and index with empty arrays. Older versions of NumPy # (at least up to 1.9.3) break with empty array indices. if not _from or not to: return cm cm[np.ix_(_from, to)] = 1 return cm
python
def relevant_connections(n, _from, to): """Construct a connectivity matrix. Args: n (int): The dimensions of the matrix _from (tuple[int]): Nodes with outgoing connections to ``to`` to (tuple[int]): Nodes with incoming connections from ``_from`` Returns: np.ndarray: An |n x n| connectivity matrix with the |i,jth| entry is ``1`` if |i| is in ``_from`` and |j| is in ``to``, and 0 otherwise. """ cm = np.zeros((n, n)) # Don't try and index with empty arrays. Older versions of NumPy # (at least up to 1.9.3) break with empty array indices. if not _from or not to: return cm cm[np.ix_(_from, to)] = 1 return cm
[ "def", "relevant_connections", "(", "n", ",", "_from", ",", "to", ")", ":", "cm", "=", "np", ".", "zeros", "(", "(", "n", ",", "n", ")", ")", "# Don't try and index with empty arrays. Older versions of NumPy", "# (at least up to 1.9.3) break with empty array indices.", "if", "not", "_from", "or", "not", "to", ":", "return", "cm", "cm", "[", "np", ".", "ix_", "(", "_from", ",", "to", ")", "]", "=", "1", "return", "cm" ]
Construct a connectivity matrix. Args: n (int): The dimensions of the matrix _from (tuple[int]): Nodes with outgoing connections to ``to`` to (tuple[int]): Nodes with incoming connections from ``_from`` Returns: np.ndarray: An |n x n| connectivity matrix with the |i,jth| entry is ``1`` if |i| is in ``_from`` and |j| is in ``to``, and 0 otherwise.
[ "Construct", "a", "connectivity", "matrix", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L40-L60
train
wmayner/pyphi
pyphi/connectivity.py
block_cm
def block_cm(cm): """Return whether ``cm`` can be arranged as a block connectivity matrix. If so, the corresponding mechanism/purview is trivially reducible. Technically, only square matrices are "block diagonal", but the notion of connectivity carries over. We test for block connectivity by trying to grow a block of nodes such that: - 'source' nodes only input to nodes in the block - 'sink' nodes only receive inputs from source nodes in the block For example, the following connectivity matrix represents connections from ``nodes1 = A, B, C`` to ``nodes2 = D, E, F, G`` (without loss of generality, note that ``nodes1`` and ``nodes2`` may share elements):: D E F G A [1, 1, 0, 0] B [1, 1, 0, 0] C [0, 0, 1, 1] Since nodes |AB| only connect to nodes |DE|, and node |C| only connects to nodes |FG|, the subgraph is reducible, because the cut :: A,B C ─── ✕ ─── D,E F,G does not change the structure of the graph. """ if np.any(cm.sum(1) == 0): return True if np.all(cm.sum(1) == 1): return True outputs = list(range(cm.shape[1])) # CM helpers: def outputs_of(nodes): """Return all nodes that `nodes` connect to (output to).""" return np.where(cm[nodes, :].sum(0))[0] def inputs_to(nodes): """Return all nodes which connect to (input to) `nodes`.""" return np.where(cm[:, nodes].sum(1))[0] # Start: source node with most outputs sources = [np.argmax(cm.sum(1))] sinks = outputs_of(sources) sink_inputs = inputs_to(sinks) while True: if np.array_equal(sink_inputs, sources): # sources exclusively connect to sinks. # There are no other nodes which connect sink nodes, # hence set(sources) + set(sinks) form a component # which is not connected to the rest of the graph return True # Recompute sources, sinks, and sink_inputs sources = sink_inputs sinks = outputs_of(sources) sink_inputs = inputs_to(sinks) # Considering all output nodes? if np.array_equal(sinks, outputs): return False
python
def block_cm(cm): """Return whether ``cm`` can be arranged as a block connectivity matrix. If so, the corresponding mechanism/purview is trivially reducible. Technically, only square matrices are "block diagonal", but the notion of connectivity carries over. We test for block connectivity by trying to grow a block of nodes such that: - 'source' nodes only input to nodes in the block - 'sink' nodes only receive inputs from source nodes in the block For example, the following connectivity matrix represents connections from ``nodes1 = A, B, C`` to ``nodes2 = D, E, F, G`` (without loss of generality, note that ``nodes1`` and ``nodes2`` may share elements):: D E F G A [1, 1, 0, 0] B [1, 1, 0, 0] C [0, 0, 1, 1] Since nodes |AB| only connect to nodes |DE|, and node |C| only connects to nodes |FG|, the subgraph is reducible, because the cut :: A,B C ─── ✕ ─── D,E F,G does not change the structure of the graph. """ if np.any(cm.sum(1) == 0): return True if np.all(cm.sum(1) == 1): return True outputs = list(range(cm.shape[1])) # CM helpers: def outputs_of(nodes): """Return all nodes that `nodes` connect to (output to).""" return np.where(cm[nodes, :].sum(0))[0] def inputs_to(nodes): """Return all nodes which connect to (input to) `nodes`.""" return np.where(cm[:, nodes].sum(1))[0] # Start: source node with most outputs sources = [np.argmax(cm.sum(1))] sinks = outputs_of(sources) sink_inputs = inputs_to(sinks) while True: if np.array_equal(sink_inputs, sources): # sources exclusively connect to sinks. # There are no other nodes which connect sink nodes, # hence set(sources) + set(sinks) form a component # which is not connected to the rest of the graph return True # Recompute sources, sinks, and sink_inputs sources = sink_inputs sinks = outputs_of(sources) sink_inputs = inputs_to(sinks) # Considering all output nodes? if np.array_equal(sinks, outputs): return False
[ "def", "block_cm", "(", "cm", ")", ":", "if", "np", ".", "any", "(", "cm", ".", "sum", "(", "1", ")", "==", "0", ")", ":", "return", "True", "if", "np", ".", "all", "(", "cm", ".", "sum", "(", "1", ")", "==", "1", ")", ":", "return", "True", "outputs", "=", "list", "(", "range", "(", "cm", ".", "shape", "[", "1", "]", ")", ")", "# CM helpers:", "def", "outputs_of", "(", "nodes", ")", ":", "\"\"\"Return all nodes that `nodes` connect to (output to).\"\"\"", "return", "np", ".", "where", "(", "cm", "[", "nodes", ",", ":", "]", ".", "sum", "(", "0", ")", ")", "[", "0", "]", "def", "inputs_to", "(", "nodes", ")", ":", "\"\"\"Return all nodes which connect to (input to) `nodes`.\"\"\"", "return", "np", ".", "where", "(", "cm", "[", ":", ",", "nodes", "]", ".", "sum", "(", "1", ")", ")", "[", "0", "]", "# Start: source node with most outputs", "sources", "=", "[", "np", ".", "argmax", "(", "cm", ".", "sum", "(", "1", ")", ")", "]", "sinks", "=", "outputs_of", "(", "sources", ")", "sink_inputs", "=", "inputs_to", "(", "sinks", ")", "while", "True", ":", "if", "np", ".", "array_equal", "(", "sink_inputs", ",", "sources", ")", ":", "# sources exclusively connect to sinks.", "# There are no other nodes which connect sink nodes,", "# hence set(sources) + set(sinks) form a component", "# which is not connected to the rest of the graph", "return", "True", "# Recompute sources, sinks, and sink_inputs", "sources", "=", "sink_inputs", "sinks", "=", "outputs_of", "(", "sources", ")", "sink_inputs", "=", "inputs_to", "(", "sinks", ")", "# Considering all output nodes?", "if", "np", ".", "array_equal", "(", "sinks", ",", "outputs", ")", ":", "return", "False" ]
Return whether ``cm`` can be arranged as a block connectivity matrix. If so, the corresponding mechanism/purview is trivially reducible. Technically, only square matrices are "block diagonal", but the notion of connectivity carries over. We test for block connectivity by trying to grow a block of nodes such that: - 'source' nodes only input to nodes in the block - 'sink' nodes only receive inputs from source nodes in the block For example, the following connectivity matrix represents connections from ``nodes1 = A, B, C`` to ``nodes2 = D, E, F, G`` (without loss of generality, note that ``nodes1`` and ``nodes2`` may share elements):: D E F G A [1, 1, 0, 0] B [1, 1, 0, 0] C [0, 0, 1, 1] Since nodes |AB| only connect to nodes |DE|, and node |C| only connects to nodes |FG|, the subgraph is reducible, because the cut :: A,B C ─── ✕ ─── D,E F,G does not change the structure of the graph.
[ "Return", "whether", "cm", "can", "be", "arranged", "as", "a", "block", "connectivity", "matrix", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L63-L130
train
wmayner/pyphi
pyphi/connectivity.py
block_reducible
def block_reducible(cm, nodes1, nodes2): """Return whether connections from ``nodes1`` to ``nodes2`` are reducible. Args: cm (np.ndarray): The network's connectivity matrix. nodes1 (tuple[int]): Source nodes nodes2 (tuple[int]): Sink nodes """ # Trivial case if not nodes1 or not nodes2: return True cm = cm[np.ix_(nodes1, nodes2)] # Validate the connectivity matrix. if not cm.sum(0).all() or not cm.sum(1).all(): return True if len(nodes1) > 1 and len(nodes2) > 1: return block_cm(cm) return False
python
def block_reducible(cm, nodes1, nodes2): """Return whether connections from ``nodes1`` to ``nodes2`` are reducible. Args: cm (np.ndarray): The network's connectivity matrix. nodes1 (tuple[int]): Source nodes nodes2 (tuple[int]): Sink nodes """ # Trivial case if not nodes1 or not nodes2: return True cm = cm[np.ix_(nodes1, nodes2)] # Validate the connectivity matrix. if not cm.sum(0).all() or not cm.sum(1).all(): return True if len(nodes1) > 1 and len(nodes2) > 1: return block_cm(cm) return False
[ "def", "block_reducible", "(", "cm", ",", "nodes1", ",", "nodes2", ")", ":", "# Trivial case", "if", "not", "nodes1", "or", "not", "nodes2", ":", "return", "True", "cm", "=", "cm", "[", "np", ".", "ix_", "(", "nodes1", ",", "nodes2", ")", "]", "# Validate the connectivity matrix.", "if", "not", "cm", ".", "sum", "(", "0", ")", ".", "all", "(", ")", "or", "not", "cm", ".", "sum", "(", "1", ")", ".", "all", "(", ")", ":", "return", "True", "if", "len", "(", "nodes1", ")", ">", "1", "and", "len", "(", "nodes2", ")", ">", "1", ":", "return", "block_cm", "(", "cm", ")", "return", "False" ]
Return whether connections from ``nodes1`` to ``nodes2`` are reducible. Args: cm (np.ndarray): The network's connectivity matrix. nodes1 (tuple[int]): Source nodes nodes2 (tuple[int]): Sink nodes
[ "Return", "whether", "connections", "from", "nodes1", "to", "nodes2", "are", "reducible", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L135-L154
train
wmayner/pyphi
pyphi/connectivity.py
_connected
def _connected(cm, nodes, connection): """Test connectivity for the connectivity matrix.""" if nodes is not None: cm = cm[np.ix_(nodes, nodes)] num_components, _ = connected_components(cm, connection=connection) return num_components < 2
python
def _connected(cm, nodes, connection): """Test connectivity for the connectivity matrix.""" if nodes is not None: cm = cm[np.ix_(nodes, nodes)] num_components, _ = connected_components(cm, connection=connection) return num_components < 2
[ "def", "_connected", "(", "cm", ",", "nodes", ",", "connection", ")", ":", "if", "nodes", "is", "not", "None", ":", "cm", "=", "cm", "[", "np", ".", "ix_", "(", "nodes", ",", "nodes", ")", "]", "num_components", ",", "_", "=", "connected_components", "(", "cm", ",", "connection", "=", "connection", ")", "return", "num_components", "<", "2" ]
Test connectivity for the connectivity matrix.
[ "Test", "connectivity", "for", "the", "connectivity", "matrix", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L157-L163
train
wmayner/pyphi
pyphi/connectivity.py
is_full
def is_full(cm, nodes1, nodes2): """Test connectivity of one set of nodes to another. Args: cm (``np.ndarrray``): The connectivity matrix nodes1 (tuple[int]): The nodes whose outputs to ``nodes2`` will be tested. nodes2 (tuple[int]): The nodes whose inputs from ``nodes1`` will be tested. Returns: bool: ``True`` if all elements in ``nodes1`` output to some element in ``nodes2`` and all elements in ``nodes2`` have an input from some element in ``nodes1``, or if either set of nodes is empty; ``False`` otherwise. """ if not nodes1 or not nodes2: return True cm = cm[np.ix_(nodes1, nodes2)] # Do all nodes have at least one connection? return cm.sum(0).all() and cm.sum(1).all()
python
def is_full(cm, nodes1, nodes2): """Test connectivity of one set of nodes to another. Args: cm (``np.ndarrray``): The connectivity matrix nodes1 (tuple[int]): The nodes whose outputs to ``nodes2`` will be tested. nodes2 (tuple[int]): The nodes whose inputs from ``nodes1`` will be tested. Returns: bool: ``True`` if all elements in ``nodes1`` output to some element in ``nodes2`` and all elements in ``nodes2`` have an input from some element in ``nodes1``, or if either set of nodes is empty; ``False`` otherwise. """ if not nodes1 or not nodes2: return True cm = cm[np.ix_(nodes1, nodes2)] # Do all nodes have at least one connection? return cm.sum(0).all() and cm.sum(1).all()
[ "def", "is_full", "(", "cm", ",", "nodes1", ",", "nodes2", ")", ":", "if", "not", "nodes1", "or", "not", "nodes2", ":", "return", "True", "cm", "=", "cm", "[", "np", ".", "ix_", "(", "nodes1", ",", "nodes2", ")", "]", "# Do all nodes have at least one connection?", "return", "cm", ".", "sum", "(", "0", ")", ".", "all", "(", ")", "and", "cm", ".", "sum", "(", "1", ")", ".", "all", "(", ")" ]
Test connectivity of one set of nodes to another. Args: cm (``np.ndarrray``): The connectivity matrix nodes1 (tuple[int]): The nodes whose outputs to ``nodes2`` will be tested. nodes2 (tuple[int]): The nodes whose inputs from ``nodes1`` will be tested. Returns: bool: ``True`` if all elements in ``nodes1`` output to some element in ``nodes2`` and all elements in ``nodes2`` have an input from some element in ``nodes1``, or if either set of nodes is empty; ``False`` otherwise.
[ "Test", "connectivity", "of", "one", "set", "of", "nodes", "to", "another", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L192-L214
train
wmayner/pyphi
pyphi/models/cuts.py
_CutBase.apply_cut
def apply_cut(self, cm): """Return a modified connectivity matrix with all connections that are severed by this cut removed. Args: cm (np.ndarray): A connectivity matrix. """ # Invert the cut matrix, creating a matrix of preserved connections inverse = np.logical_not(self.cut_matrix(cm.shape[0])).astype(int) return cm * inverse
python
def apply_cut(self, cm): """Return a modified connectivity matrix with all connections that are severed by this cut removed. Args: cm (np.ndarray): A connectivity matrix. """ # Invert the cut matrix, creating a matrix of preserved connections inverse = np.logical_not(self.cut_matrix(cm.shape[0])).astype(int) return cm * inverse
[ "def", "apply_cut", "(", "self", ",", "cm", ")", ":", "# Invert the cut matrix, creating a matrix of preserved connections", "inverse", "=", "np", ".", "logical_not", "(", "self", ".", "cut_matrix", "(", "cm", ".", "shape", "[", "0", "]", ")", ")", ".", "astype", "(", "int", ")", "return", "cm", "*", "inverse" ]
Return a modified connectivity matrix with all connections that are severed by this cut removed. Args: cm (np.ndarray): A connectivity matrix.
[ "Return", "a", "modified", "connectivity", "matrix", "with", "all", "connections", "that", "are", "severed", "by", "this", "cut", "removed", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L48-L57
train
wmayner/pyphi
pyphi/models/cuts.py
_CutBase.cuts_connections
def cuts_connections(self, a, b): """Check if this cut severs any connections from ``a`` to ``b``. Args: a (tuple[int]): A set of nodes. b (tuple[int]): A set of nodes. """ n = max(self.indices) + 1 return self.cut_matrix(n)[np.ix_(a, b)].any()
python
def cuts_connections(self, a, b): """Check if this cut severs any connections from ``a`` to ``b``. Args: a (tuple[int]): A set of nodes. b (tuple[int]): A set of nodes. """ n = max(self.indices) + 1 return self.cut_matrix(n)[np.ix_(a, b)].any()
[ "def", "cuts_connections", "(", "self", ",", "a", ",", "b", ")", ":", "n", "=", "max", "(", "self", ".", "indices", ")", "+", "1", "return", "self", ".", "cut_matrix", "(", "n", ")", "[", "np", ".", "ix_", "(", "a", ",", "b", ")", "]", ".", "any", "(", ")" ]
Check if this cut severs any connections from ``a`` to ``b``. Args: a (tuple[int]): A set of nodes. b (tuple[int]): A set of nodes.
[ "Check", "if", "this", "cut", "severs", "any", "connections", "from", "a", "to", "b", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L59-L67
train
wmayner/pyphi
pyphi/models/cuts.py
_CutBase.all_cut_mechanisms
def all_cut_mechanisms(self): """Return all mechanisms with elements on both sides of this cut. Yields: tuple[int]: The next cut mechanism. """ for mechanism in utils.powerset(self.indices, nonempty=True): if self.splits_mechanism(mechanism): yield mechanism
python
def all_cut_mechanisms(self): """Return all mechanisms with elements on both sides of this cut. Yields: tuple[int]: The next cut mechanism. """ for mechanism in utils.powerset(self.indices, nonempty=True): if self.splits_mechanism(mechanism): yield mechanism
[ "def", "all_cut_mechanisms", "(", "self", ")", ":", "for", "mechanism", "in", "utils", ".", "powerset", "(", "self", ".", "indices", ",", "nonempty", "=", "True", ")", ":", "if", "self", ".", "splits_mechanism", "(", "mechanism", ")", ":", "yield", "mechanism" ]
Return all mechanisms with elements on both sides of this cut. Yields: tuple[int]: The next cut mechanism.
[ "Return", "all", "mechanisms", "with", "elements", "on", "both", "sides", "of", "this", "cut", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L81-L89
train
wmayner/pyphi
pyphi/models/cuts.py
Cut.cut_matrix
def cut_matrix(self, n): """Compute the cut matrix for this cut. The cut matrix is a square matrix which represents connections severed by the cut. Args: n (int): The size of the network. Example: >>> cut = Cut((1,), (2,)) >>> cut.cut_matrix(3) array([[0., 0., 0.], [0., 0., 1.], [0., 0., 0.]]) """ return connectivity.relevant_connections(n, self.from_nodes, self.to_nodes)
python
def cut_matrix(self, n): """Compute the cut matrix for this cut. The cut matrix is a square matrix which represents connections severed by the cut. Args: n (int): The size of the network. Example: >>> cut = Cut((1,), (2,)) >>> cut.cut_matrix(3) array([[0., 0., 0.], [0., 0., 1.], [0., 0., 0.]]) """ return connectivity.relevant_connections(n, self.from_nodes, self.to_nodes)
[ "def", "cut_matrix", "(", "self", ",", "n", ")", ":", "return", "connectivity", ".", "relevant_connections", "(", "n", ",", "self", ".", "from_nodes", ",", "self", ".", "to_nodes", ")" ]
Compute the cut matrix for this cut. The cut matrix is a square matrix which represents connections severed by the cut. Args: n (int): The size of the network. Example: >>> cut = Cut((1,), (2,)) >>> cut.cut_matrix(3) array([[0., 0., 0.], [0., 0., 1.], [0., 0., 0.]])
[ "Compute", "the", "cut", "matrix", "for", "this", "cut", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L153-L170
train
wmayner/pyphi
pyphi/models/cuts.py
KCut.cut_matrix
def cut_matrix(self, n): """The matrix of connections that are severed by this cut.""" cm = np.zeros((n, n)) for part in self.partition: from_, to = self.direction.order(part.mechanism, part.purview) # All indices external to this part external = tuple(set(self.indices) - set(to)) cm[np.ix_(from_, external)] = 1 return cm
python
def cut_matrix(self, n): """The matrix of connections that are severed by this cut.""" cm = np.zeros((n, n)) for part in self.partition: from_, to = self.direction.order(part.mechanism, part.purview) # All indices external to this part external = tuple(set(self.indices) - set(to)) cm[np.ix_(from_, external)] = 1 return cm
[ "def", "cut_matrix", "(", "self", ",", "n", ")", ":", "cm", "=", "np", ".", "zeros", "(", "(", "n", ",", "n", ")", ")", "for", "part", "in", "self", ".", "partition", ":", "from_", ",", "to", "=", "self", ".", "direction", ".", "order", "(", "part", ".", "mechanism", ",", "part", ".", "purview", ")", "# All indices external to this part", "external", "=", "tuple", "(", "set", "(", "self", ".", "indices", ")", "-", "set", "(", "to", ")", ")", "cm", "[", "np", ".", "ix_", "(", "from_", ",", "external", ")", "]", "=", "1", "return", "cm" ]
The matrix of connections that are severed by this cut.
[ "The", "matrix", "of", "connections", "that", "are", "severed", "by", "this", "cut", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L204-L214
train
wmayner/pyphi
pyphi/compute/distance.py
concept_distance
def concept_distance(c1, c2): """Return the distance between two concepts in concept space. Args: c1 (Concept): The first concept. c2 (Concept): The second concept. Returns: float: The distance between the two concepts in concept space. """ # Calculate the sum of the cause and effect EMDs, expanding the repertoires # to the combined purview of the two concepts, so that the EMD signatures # are the same size. cause_purview = tuple(set(c1.cause.purview + c2.cause.purview)) effect_purview = tuple(set(c1.effect.purview + c2.effect.purview)) # Take the sum return (repertoire_distance(c1.expand_cause_repertoire(cause_purview), c2.expand_cause_repertoire(cause_purview)) + repertoire_distance(c1.expand_effect_repertoire(effect_purview), c2.expand_effect_repertoire(effect_purview)))
python
def concept_distance(c1, c2): """Return the distance between two concepts in concept space. Args: c1 (Concept): The first concept. c2 (Concept): The second concept. Returns: float: The distance between the two concepts in concept space. """ # Calculate the sum of the cause and effect EMDs, expanding the repertoires # to the combined purview of the two concepts, so that the EMD signatures # are the same size. cause_purview = tuple(set(c1.cause.purview + c2.cause.purview)) effect_purview = tuple(set(c1.effect.purview + c2.effect.purview)) # Take the sum return (repertoire_distance(c1.expand_cause_repertoire(cause_purview), c2.expand_cause_repertoire(cause_purview)) + repertoire_distance(c1.expand_effect_repertoire(effect_purview), c2.expand_effect_repertoire(effect_purview)))
[ "def", "concept_distance", "(", "c1", ",", "c2", ")", ":", "# Calculate the sum of the cause and effect EMDs, expanding the repertoires", "# to the combined purview of the two concepts, so that the EMD signatures", "# are the same size.", "cause_purview", "=", "tuple", "(", "set", "(", "c1", ".", "cause", ".", "purview", "+", "c2", ".", "cause", ".", "purview", ")", ")", "effect_purview", "=", "tuple", "(", "set", "(", "c1", ".", "effect", ".", "purview", "+", "c2", ".", "effect", ".", "purview", ")", ")", "# Take the sum", "return", "(", "repertoire_distance", "(", "c1", ".", "expand_cause_repertoire", "(", "cause_purview", ")", ",", "c2", ".", "expand_cause_repertoire", "(", "cause_purview", ")", ")", "+", "repertoire_distance", "(", "c1", ".", "expand_effect_repertoire", "(", "effect_purview", ")", ",", "c2", ".", "expand_effect_repertoire", "(", "effect_purview", ")", ")", ")" ]
Return the distance between two concepts in concept space. Args: c1 (Concept): The first concept. c2 (Concept): The second concept. Returns: float: The distance between the two concepts in concept space.
[ "Return", "the", "distance", "between", "two", "concepts", "in", "concept", "space", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/distance.py#L16-L35
train
wmayner/pyphi
pyphi/compute/distance.py
small_phi_ces_distance
def small_phi_ces_distance(C1, C2): """Return the difference in |small_phi| between |CauseEffectStructure|.""" return sum(c.phi for c in C1) - sum(c.phi for c in C2)
python
def small_phi_ces_distance(C1, C2): """Return the difference in |small_phi| between |CauseEffectStructure|.""" return sum(c.phi for c in C1) - sum(c.phi for c in C2)
[ "def", "small_phi_ces_distance", "(", "C1", ",", "C2", ")", ":", "return", "sum", "(", "c", ".", "phi", "for", "c", "in", "C1", ")", "-", "sum", "(", "c", ".", "phi", "for", "c", "in", "C2", ")" ]
Return the difference in |small_phi| between |CauseEffectStructure|.
[ "Return", "the", "difference", "in", "|small_phi|", "between", "|CauseEffectStructure|", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/distance.py#L149-L151
train
wmayner/pyphi
pyphi/node.py
generate_nodes
def generate_nodes(tpm, cm, network_state, indices, node_labels=None): """Generate |Node| objects for a subsystem. Args: tpm (np.ndarray): The system's TPM cm (np.ndarray): The corresponding CM. network_state (tuple): The state of the network. indices (tuple[int]): Indices to generate nodes for. Keyword Args: node_labels (|NodeLabels|): Textual labels for each node. Returns: tuple[Node]: The nodes of the system. """ if node_labels is None: node_labels = NodeLabels(None, indices) node_state = utils.state_of(indices, network_state) return tuple(Node(tpm, cm, index, state, node_labels) for index, state in zip(indices, node_state))
python
def generate_nodes(tpm, cm, network_state, indices, node_labels=None): """Generate |Node| objects for a subsystem. Args: tpm (np.ndarray): The system's TPM cm (np.ndarray): The corresponding CM. network_state (tuple): The state of the network. indices (tuple[int]): Indices to generate nodes for. Keyword Args: node_labels (|NodeLabels|): Textual labels for each node. Returns: tuple[Node]: The nodes of the system. """ if node_labels is None: node_labels = NodeLabels(None, indices) node_state = utils.state_of(indices, network_state) return tuple(Node(tpm, cm, index, state, node_labels) for index, state in zip(indices, node_state))
[ "def", "generate_nodes", "(", "tpm", ",", "cm", ",", "network_state", ",", "indices", ",", "node_labels", "=", "None", ")", ":", "if", "node_labels", "is", "None", ":", "node_labels", "=", "NodeLabels", "(", "None", ",", "indices", ")", "node_state", "=", "utils", ".", "state_of", "(", "indices", ",", "network_state", ")", "return", "tuple", "(", "Node", "(", "tpm", ",", "cm", ",", "index", ",", "state", ",", "node_labels", ")", "for", "index", ",", "state", "in", "zip", "(", "indices", ",", "node_state", ")", ")" ]
Generate |Node| objects for a subsystem. Args: tpm (np.ndarray): The system's TPM cm (np.ndarray): The corresponding CM. network_state (tuple): The state of the network. indices (tuple[int]): Indices to generate nodes for. Keyword Args: node_labels (|NodeLabels|): Textual labels for each node. Returns: tuple[Node]: The nodes of the system.
[ "Generate", "|Node|", "objects", "for", "a", "subsystem", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/node.py#L156-L177
train
wmayner/pyphi
pyphi/node.py
expand_node_tpm
def expand_node_tpm(tpm): """Broadcast a node TPM over the full network. This is different from broadcasting the TPM of a full system since the last dimension (containing the state of the node) contains only the probability of *this* node being on, rather than the probabilities for each node. """ uc = np.ones([2 for node in tpm.shape]) return uc * tpm
python
def expand_node_tpm(tpm): """Broadcast a node TPM over the full network. This is different from broadcasting the TPM of a full system since the last dimension (containing the state of the node) contains only the probability of *this* node being on, rather than the probabilities for each node. """ uc = np.ones([2 for node in tpm.shape]) return uc * tpm
[ "def", "expand_node_tpm", "(", "tpm", ")", ":", "uc", "=", "np", ".", "ones", "(", "[", "2", "for", "node", "in", "tpm", ".", "shape", "]", ")", "return", "uc", "*", "tpm" ]
Broadcast a node TPM over the full network. This is different from broadcasting the TPM of a full system since the last dimension (containing the state of the node) contains only the probability of *this* node being on, rather than the probabilities for each node.
[ "Broadcast", "a", "node", "TPM", "over", "the", "full", "network", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/node.py#L180-L188
train
wmayner/pyphi
pyphi/tpm.py
condition_tpm
def condition_tpm(tpm, fixed_nodes, state): """Return a TPM conditioned on the given fixed node indices, whose states are fixed according to the given state-tuple. The dimensions of the new TPM that correspond to the fixed nodes are collapsed onto their state, making those dimensions singletons suitable for broadcasting. The number of dimensions of the conditioned TPM will be the same as the unconditioned TPM. """ conditioning_indices = [[slice(None)]] * len(state) for i in fixed_nodes: # Preserve singleton dimensions with `np.newaxis` conditioning_indices[i] = [state[i], np.newaxis] # Flatten the indices. conditioning_indices = list(chain.from_iterable(conditioning_indices)) # Obtain the actual conditioned TPM by indexing with the conditioning # indices. return tpm[tuple(conditioning_indices)]
python
def condition_tpm(tpm, fixed_nodes, state): """Return a TPM conditioned on the given fixed node indices, whose states are fixed according to the given state-tuple. The dimensions of the new TPM that correspond to the fixed nodes are collapsed onto their state, making those dimensions singletons suitable for broadcasting. The number of dimensions of the conditioned TPM will be the same as the unconditioned TPM. """ conditioning_indices = [[slice(None)]] * len(state) for i in fixed_nodes: # Preserve singleton dimensions with `np.newaxis` conditioning_indices[i] = [state[i], np.newaxis] # Flatten the indices. conditioning_indices = list(chain.from_iterable(conditioning_indices)) # Obtain the actual conditioned TPM by indexing with the conditioning # indices. return tpm[tuple(conditioning_indices)]
[ "def", "condition_tpm", "(", "tpm", ",", "fixed_nodes", ",", "state", ")", ":", "conditioning_indices", "=", "[", "[", "slice", "(", "None", ")", "]", "]", "*", "len", "(", "state", ")", "for", "i", "in", "fixed_nodes", ":", "# Preserve singleton dimensions with `np.newaxis`", "conditioning_indices", "[", "i", "]", "=", "[", "state", "[", "i", "]", ",", "np", ".", "newaxis", "]", "# Flatten the indices.", "conditioning_indices", "=", "list", "(", "chain", ".", "from_iterable", "(", "conditioning_indices", ")", ")", "# Obtain the actual conditioned TPM by indexing with the conditioning", "# indices.", "return", "tpm", "[", "tuple", "(", "conditioning_indices", ")", "]" ]
Return a TPM conditioned on the given fixed node indices, whose states are fixed according to the given state-tuple. The dimensions of the new TPM that correspond to the fixed nodes are collapsed onto their state, making those dimensions singletons suitable for broadcasting. The number of dimensions of the conditioned TPM will be the same as the unconditioned TPM.
[ "Return", "a", "TPM", "conditioned", "on", "the", "given", "fixed", "node", "indices", "whose", "states", "are", "fixed", "according", "to", "the", "given", "state", "-", "tuple", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L29-L46
train
wmayner/pyphi
pyphi/tpm.py
expand_tpm
def expand_tpm(tpm): """Broadcast a state-by-node TPM so that singleton dimensions are expanded over the full network. """ unconstrained = np.ones([2] * (tpm.ndim - 1) + [tpm.shape[-1]]) return tpm * unconstrained
python
def expand_tpm(tpm): """Broadcast a state-by-node TPM so that singleton dimensions are expanded over the full network. """ unconstrained = np.ones([2] * (tpm.ndim - 1) + [tpm.shape[-1]]) return tpm * unconstrained
[ "def", "expand_tpm", "(", "tpm", ")", ":", "unconstrained", "=", "np", ".", "ones", "(", "[", "2", "]", "*", "(", "tpm", ".", "ndim", "-", "1", ")", "+", "[", "tpm", ".", "shape", "[", "-", "1", "]", "]", ")", "return", "tpm", "*", "unconstrained" ]
Broadcast a state-by-node TPM so that singleton dimensions are expanded over the full network.
[ "Broadcast", "a", "state", "-", "by", "-", "node", "TPM", "so", "that", "singleton", "dimensions", "are", "expanded", "over", "the", "full", "network", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L49-L54
train