body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def assign_only_product_as_production(db): "Assign only product as reference product.\n\n Skips datasets that already have a reference product or no production exchanges. Production exchanges must have a ``name`` and an amount.\n\n Will replace the following activity fields, if not already specified:\n\n * 'name' - name of reference product\n * 'unit' - unit of reference product\n * 'production amount' - amount of reference product\n\n " for ds in db: if ds.get('reference product'): continue products = [x for x in ds.get('exchanges', []) if (x.get('type') == 'production')] if (len(products) == 1): product = products[0] assert product['name'] ds['reference product'] = product['name'] ds['production amount'] = product['amount'] ds['name'] = (ds.get('name') or product['name']) ds['unit'] = (ds.get('unit') or product.get('unit') or 'Unknown') return db
-1,332,683,319,952,914,000
Assign only product as reference product. Skips datasets that already have a reference product or no production exchanges. Production exchanges must have a ``name`` and an amount. Will replace the following activity fields, if not already specified: * 'name' - name of reference product * 'unit' - unit of reference product * 'production amount' - amount of reference product
bw2io/strategies/generic.py
assign_only_product_as_production
pjamesjoyce/brightway2-io
python
def assign_only_product_as_production(db): "Assign only product as reference product.\n\n Skips datasets that already have a reference product or no production exchanges. Production exchanges must have a ``name`` and an amount.\n\n Will replace the following activity fields, if not already specified:\n\n * 'name' - name of reference product\n * 'unit' - unit of reference product\n * 'production amount' - amount of reference product\n\n " for ds in db: if ds.get('reference product'): continue products = [x for x in ds.get('exchanges', []) if (x.get('type') == 'production')] if (len(products) == 1): product = products[0] assert product['name'] ds['reference product'] = product['name'] ds['production amount'] = product['amount'] ds['name'] = (ds.get('name') or product['name']) ds['unit'] = (ds.get('unit') or product.get('unit') or 'Unknown') return db
def link_technosphere_by_activity_hash(db, external_db_name=None, fields=None): 'Link technosphere exchanges using ``activity_hash`` function.\n\n If ``external_db_name``, link against a different database; otherwise link internally.\n\n If ``fields``, link using only certain fields.' TECHNOSPHERE_TYPES = {'technosphere', 'substitution', 'production'} if (external_db_name is not None): if (external_db_name not in databases): raise StrategyError("Can't find external database {}".format(external_db_name)) other = (obj for obj in Database(external_db_name) if (obj.get('type', 'process') == 'process')) internal = False else: other = None internal = True return link_iterable_by_fields(db, other, internal=internal, kind=TECHNOSPHERE_TYPES, fields=fields)
4,406,278,707,064,293,400
Link technosphere exchanges using ``activity_hash`` function. If ``external_db_name``, link against a different database; otherwise link internally. If ``fields``, link using only certain fields.
bw2io/strategies/generic.py
link_technosphere_by_activity_hash
pjamesjoyce/brightway2-io
python
def link_technosphere_by_activity_hash(db, external_db_name=None, fields=None): 'Link technosphere exchanges using ``activity_hash`` function.\n\n If ``external_db_name``, link against a different database; otherwise link internally.\n\n If ``fields``, link using only certain fields.' TECHNOSPHERE_TYPES = {'technosphere', 'substitution', 'production'} if (external_db_name is not None): if (external_db_name not in databases): raise StrategyError("Can't find external database {}".format(external_db_name)) other = (obj for obj in Database(external_db_name) if (obj.get('type', 'process') == 'process')) internal = False else: other = None internal = True return link_iterable_by_fields(db, other, internal=internal, kind=TECHNOSPHERE_TYPES, fields=fields)
def set_code_by_activity_hash(db, overwrite=False): "Use ``activity_hash`` to set dataset code.\n\n By default, won't overwrite existing codes, but will if ``overwrite`` is ``True``." for ds in db: if (('code' not in ds) or overwrite): ds['code'] = activity_hash(ds) return db
8,632,796,420,951,601,000
Use ``activity_hash`` to set dataset code. By default, won't overwrite existing codes, but will if ``overwrite`` is ``True``.
bw2io/strategies/generic.py
set_code_by_activity_hash
pjamesjoyce/brightway2-io
python
def set_code_by_activity_hash(db, overwrite=False): "Use ``activity_hash`` to set dataset code.\n\n By default, won't overwrite existing codes, but will if ``overwrite`` is ``True``." for ds in db: if (('code' not in ds) or overwrite): ds['code'] = activity_hash(ds) return db
def drop_unlinked(db): 'This is the nuclear option - use at your own risk!' for ds in db: ds['exchanges'] = [obj for obj in ds['exchanges'] if obj.get('input')] return db
127,509,395,245,177,260
This is the nuclear option - use at your own risk!
bw2io/strategies/generic.py
drop_unlinked
pjamesjoyce/brightway2-io
python
def drop_unlinked(db): for ds in db: ds['exchanges'] = [obj for obj in ds['exchanges'] if obj.get('input')] return db
def normalize_units(db): 'Normalize units in datasets and their exchanges' for ds in db: if ('unit' in ds): ds['unit'] = normalize_units_function(ds['unit']) for exc in ds.get('exchanges', []): if ('unit' in exc): exc['unit'] = normalize_units_function(exc['unit']) for param in ds.get('parameters', {}).values(): if ('unit' in param): param['unit'] = normalize_units_function(param['unit']) return db
8,029,459,837,467,838
Normalize units in datasets and their exchanges
bw2io/strategies/generic.py
normalize_units
pjamesjoyce/brightway2-io
python
def normalize_units(db): for ds in db: if ('unit' in ds): ds['unit'] = normalize_units_function(ds['unit']) for exc in ds.get('exchanges', []): if ('unit' in exc): exc['unit'] = normalize_units_function(exc['unit']) for param in ds.get('parameters', {}).values(): if ('unit' in param): param['unit'] = normalize_units_function(param['unit']) return db
def add_database_name(db, name): 'Add database name to datasets' for ds in db: ds['database'] = name return db
3,011,635,110,219,210,000
Add database name to datasets
bw2io/strategies/generic.py
add_database_name
pjamesjoyce/brightway2-io
python
def add_database_name(db, name): for ds in db: ds['database'] = name return db
def convert_uncertainty_types_to_integers(db): 'Generic number conversion function convert to floats. Return to integers.' for ds in db: for exc in ds['exchanges']: try: exc['uncertainty type'] = int(exc['uncertainty type']) except: pass return db
7,904,900,524,443,326,000
Generic number conversion function convert to floats. Return to integers.
bw2io/strategies/generic.py
convert_uncertainty_types_to_integers
pjamesjoyce/brightway2-io
python
def convert_uncertainty_types_to_integers(db): for ds in db: for exc in ds['exchanges']: try: exc['uncertainty type'] = int(exc['uncertainty type']) except: pass return db
def drop_falsey_uncertainty_fields_but_keep_zeros(db): "Drop fields like '' but keep zero and NaN.\n\n Note that this doesn't strip `False`, which behaves *exactly* like 0.\n\n " uncertainty_fields = ['minimum', 'maximum', 'scale', 'shape', 'loc'] def drop_if_appropriate(exc): for field in uncertainty_fields: if ((field not in exc) or (exc[field] == 0)): continue elif (isinstance(exc[field], numbers.Number) and np.isnan(exc[field])): continue elif (not exc[field]): del exc[field] for ds in db: for exc in ds['exchanges']: drop_if_appropriate(exc) return db
-8,108,741,207,312,171,000
Drop fields like '' but keep zero and NaN. Note that this doesn't strip `False`, which behaves *exactly* like 0.
bw2io/strategies/generic.py
drop_falsey_uncertainty_fields_but_keep_zeros
pjamesjoyce/brightway2-io
python
def drop_falsey_uncertainty_fields_but_keep_zeros(db): "Drop fields like but keep zero and NaN.\n\n Note that this doesn't strip `False`, which behaves *exactly* like 0.\n\n " uncertainty_fields = ['minimum', 'maximum', 'scale', 'shape', 'loc'] def drop_if_appropriate(exc): for field in uncertainty_fields: if ((field not in exc) or (exc[field] == 0)): continue elif (isinstance(exc[field], numbers.Number) and np.isnan(exc[field])): continue elif (not exc[field]): del exc[field] for ds in db: for exc in ds['exchanges']: drop_if_appropriate(exc) return db
def convert_activity_parameters_to_list(data): 'Convert activity parameters from dictionary to list of dictionaries' def _(key, value): dct = deepcopy(value) dct['name'] = key return dct for ds in data: if ('parameters' in ds): ds['parameters'] = [_(x, y) for (x, y) in ds['parameters'].items()] return data
-8,496,173,251,939,245,000
Convert activity parameters from dictionary to list of dictionaries
bw2io/strategies/generic.py
convert_activity_parameters_to_list
pjamesjoyce/brightway2-io
python
def convert_activity_parameters_to_list(data): def _(key, value): dct = deepcopy(value) dct['name'] = key return dct for ds in data: if ('parameters' in ds): ds['parameters'] = [_(x, y) for (x, y) in ds['parameters'].items()] return data
@property def Ospfv3PseudoInterface(self): '\n Returns\n -------\n - obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.ospfv3pseudointerface_bbc932877888c8c8400661ec299754a8.Ospfv3PseudoInterface): An instance of the Ospfv3PseudoInterface class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n ' from uhd_restpy.testplatform.sessions.ixnetwork.topology.ospfv3pseudointerface_bbc932877888c8c8400661ec299754a8 import Ospfv3PseudoInterface return Ospfv3PseudoInterface(self)
-7,806,945,575,050,585,000
Returns ------- - obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.ospfv3pseudointerface_bbc932877888c8c8400661ec299754a8.Ospfv3PseudoInterface): An instance of the Ospfv3PseudoInterface class Raises ------ - ServerError: The server has encountered an uncategorized error condition
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
Ospfv3PseudoInterface
rfrye-github/ixnetwork_restpy
python
@property def Ospfv3PseudoInterface(self): '\n Returns\n -------\n - obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.ospfv3pseudointerface_bbc932877888c8c8400661ec299754a8.Ospfv3PseudoInterface): An instance of the Ospfv3PseudoInterface class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n ' from uhd_restpy.testplatform.sessions.ixnetwork.topology.ospfv3pseudointerface_bbc932877888c8c8400661ec299754a8 import Ospfv3PseudoInterface return Ospfv3PseudoInterface(self)
@property def Count(self): '\n Returns\n -------\n - number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.\n ' return self._get_attribute(self._SDM_ATT_MAP['Count'])
9,202,294,428,103,448,000
Returns ------- - number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
Count
rfrye-github/ixnetwork_restpy
python
@property def Count(self): '\n Returns\n -------\n - number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.\n ' return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property def DescriptiveName(self): "\n Returns\n -------\n - str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.\n " return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
6,335,322,004,352,822,000
Returns ------- - str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
DescriptiveName
rfrye-github/ixnetwork_restpy
python
@property def DescriptiveName(self): "\n Returns\n -------\n - str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.\n " return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property def EnableIp(self): '\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Enable IPv6\n ' from uhd_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableIp']))
1,591,239,963,546,353,000
Returns ------- - obj(uhd_restpy.multivalue.Multivalue): Enable IPv6
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
EnableIp
rfrye-github/ixnetwork_restpy
python
@property def EnableIp(self): '\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Enable IPv6\n ' from uhd_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableIp']))
@property def FromIP(self): '\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): 128 Bits IPv6 address.\n ' from uhd_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FromIP']))
619,688,722,874,091,400
Returns ------- - obj(uhd_restpy.multivalue.Multivalue): 128 Bits IPv6 address.
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
FromIP
rfrye-github/ixnetwork_restpy
python
@property def FromIP(self): '\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): 128 Bits IPv6 address.\n ' from uhd_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FromIP']))
@property def Name(self): '\n Returns\n -------\n - str: Name of NGPF element, guaranteed to be unique in Scenario\n ' return self._get_attribute(self._SDM_ATT_MAP['Name'])
-1,824,082,867,023,513,900
Returns ------- - str: Name of NGPF element, guaranteed to be unique in Scenario
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
Name
rfrye-github/ixnetwork_restpy
python
@property def Name(self): '\n Returns\n -------\n - str: Name of NGPF element, guaranteed to be unique in Scenario\n ' return self._get_attribute(self._SDM_ATT_MAP['Name'])
@property def SubnetPrefixLength(self): '\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Subnet Prefix Length\n ' from uhd_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SubnetPrefixLength']))
-8,855,745,252,700,621,000
Returns ------- - obj(uhd_restpy.multivalue.Multivalue): Subnet Prefix Length
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
SubnetPrefixLength
rfrye-github/ixnetwork_restpy
python
@property def SubnetPrefixLength(self): '\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Subnet Prefix Length\n ' from uhd_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SubnetPrefixLength']))
@property def ToIP(self): '\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): 128 Bits IPv6 address.\n ' from uhd_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ToIP']))
-1,024,568,113,818,389,900
Returns ------- - obj(uhd_restpy.multivalue.Multivalue): 128 Bits IPv6 address.
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
ToIP
rfrye-github/ixnetwork_restpy
python
@property def ToIP(self): '\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): 128 Bits IPv6 address.\n ' from uhd_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ToIP']))
def update(self, Name=None): 'Updates simInterfaceIPv6Config resource on the server.\n\n This method has some named parameters with a type: obj (Multivalue).\n The Multivalue class has documentation that details the possible values for those named parameters.\n\n Args\n ----\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
8,011,544,818,928,487,000
Updates simInterfaceIPv6Config resource on the server. This method has some named parameters with a type: obj (Multivalue). The Multivalue class has documentation that details the possible values for those named parameters. Args ---- - Name (str): Name of NGPF element, guaranteed to be unique in Scenario Raises ------ - ServerError: The server has encountered an uncategorized error condition
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
update
rfrye-github/ixnetwork_restpy
python
def update(self, Name=None): 'Updates simInterfaceIPv6Config resource on the server.\n\n This method has some named parameters with a type: obj (Multivalue).\n The Multivalue class has documentation that details the possible values for those named parameters.\n\n Args\n ----\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, Count=None, DescriptiveName=None, Name=None): "Finds and retrieves simInterfaceIPv6Config resources from the server.\n\n All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve simInterfaceIPv6Config resources from the server.\n To retrieve an exact match ensure the parameter value starts with ^ and ends with $\n By default the find method takes no parameters and will retrieve all simInterfaceIPv6Config resources from the server.\n\n Args\n ----\n - Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.\n - DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Returns\n -------\n - self: This instance with matching simInterfaceIPv6Config resources retrieved from the server available through an iterator or index\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n " return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
-1,710,958,394,887,526,400
Finds and retrieves simInterfaceIPv6Config resources from the server. All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve simInterfaceIPv6Config resources from the server. To retrieve an exact match ensure the parameter value starts with ^ and ends with $ By default the find method takes no parameters and will retrieve all simInterfaceIPv6Config resources from the server. Args ---- - Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group. - DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context. - Name (str): Name of NGPF element, guaranteed to be unique in Scenario Returns ------- - self: This instance with matching simInterfaceIPv6Config resources retrieved from the server available through an iterator or index Raises ------ - ServerError: The server has encountered an uncategorized error condition
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
find
rfrye-github/ixnetwork_restpy
python
def find(self, Count=None, DescriptiveName=None, Name=None): "Finds and retrieves simInterfaceIPv6Config resources from the server.\n\n All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve simInterfaceIPv6Config resources from the server.\n To retrieve an exact match ensure the parameter value starts with ^ and ends with $\n By default the find method takes no parameters and will retrieve all simInterfaceIPv6Config resources from the server.\n\n Args\n ----\n - Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.\n - DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Returns\n -------\n - self: This instance with matching simInterfaceIPv6Config resources retrieved from the server available through an iterator or index\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n " return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href): 'Retrieves a single instance of simInterfaceIPv6Config data from the server.\n\n Args\n ----\n - href (str): An href to the instance to be retrieved\n\n Returns\n -------\n - self: This instance with the simInterfaceIPv6Config resources from the server available through an iterator or index\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._read(href)
-3,705,495,493,069,741,000
Retrieves a single instance of simInterfaceIPv6Config data from the server. Args ---- - href (str): An href to the instance to be retrieved Returns ------- - self: This instance with the simInterfaceIPv6Config resources from the server available through an iterator or index Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
read
rfrye-github/ixnetwork_restpy
python
def read(self, href): 'Retrieves a single instance of simInterfaceIPv6Config data from the server.\n\n Args\n ----\n - href (str): An href to the instance to be retrieved\n\n Returns\n -------\n - self: This instance with the simInterfaceIPv6Config resources from the server available through an iterator or index\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._read(href)
def get_device_ids(self, PortNames=None, EnableIp=None, FromIP=None, SubnetPrefixLength=None, ToIP=None): 'Base class infrastructure that gets a list of simInterfaceIPv6Config device ids encapsulated by this object.\n\n Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.\n\n Args\n ----\n - PortNames (str): optional regex of port names\n - EnableIp (str): optional regex of enableIp\n - FromIP (str): optional regex of fromIP\n - SubnetPrefixLength (str): optional regex of subnetPrefixLength\n - ToIP (str): optional regex of toIP\n\n Returns\n -------\n - list(int): A list of device ids that meets the regex criteria provided in the method parameters\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._get_ngpf_device_ids(locals())
-2,168,971,069,518,069,800
Base class infrastructure that gets a list of simInterfaceIPv6Config device ids encapsulated by this object. Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object. Args ---- - PortNames (str): optional regex of port names - EnableIp (str): optional regex of enableIp - FromIP (str): optional regex of fromIP - SubnetPrefixLength (str): optional regex of subnetPrefixLength - ToIP (str): optional regex of toIP Returns ------- - list(int): A list of device ids that meets the regex criteria provided in the method parameters Raises ------ - ServerError: The server has encountered an uncategorized error condition
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
get_device_ids
rfrye-github/ixnetwork_restpy
python
def get_device_ids(self, PortNames=None, EnableIp=None, FromIP=None, SubnetPrefixLength=None, ToIP=None): 'Base class infrastructure that gets a list of simInterfaceIPv6Config device ids encapsulated by this object.\n\n Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.\n\n Args\n ----\n - PortNames (str): optional regex of port names\n - EnableIp (str): optional regex of enableIp\n - FromIP (str): optional regex of fromIP\n - SubnetPrefixLength (str): optional regex of subnetPrefixLength\n - ToIP (str): optional regex of toIP\n\n Returns\n -------\n - list(int): A list of device ids that meets the regex criteria provided in the method parameters\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._get_ngpf_device_ids(locals())
def Abort(self): 'Executes the abort operation on the server.\n\n Abort CPF control plane (equals to demote to kUnconfigured state).\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' payload = {'Arg1': self} return self._execute('abort', payload=payload, response_object=None)
2,809,284,878,408,633,000
Executes the abort operation on the server. Abort CPF control plane (equals to demote to kUnconfigured state). Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
Abort
rfrye-github/ixnetwork_restpy
python
def Abort(self): 'Executes the abort operation on the server.\n\n Abort CPF control plane (equals to demote to kUnconfigured state).\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' payload = {'Arg1': self} return self._execute('abort', payload=payload, response_object=None)
def Start(self): 'Executes the start operation on the server.\n\n Start CPF control plane (equals to promote to negotiated state).\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' payload = {'Arg1': self} return self._execute('start', payload=payload, response_object=None)
6,849,918,185,653,348,000
Executes the start operation on the server. Start CPF control plane (equals to promote to negotiated state). Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
Start
rfrye-github/ixnetwork_restpy
python
def Start(self): 'Executes the start operation on the server.\n\n Start CPF control plane (equals to promote to negotiated state).\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' payload = {'Arg1': self} return self._execute('start', payload=payload, response_object=None)
def Stop(self): 'Executes the stop operation on the server.\n\n Stop CPF control plane (equals to demote to PreValidated-DoDDone state).\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' payload = {'Arg1': self} return self._execute('stop', payload=payload, response_object=None)
328,731,283,898,110,100
Executes the stop operation on the server. Stop CPF control plane (equals to demote to PreValidated-DoDDone state). Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition
uhd_restpy/testplatform/sessions/ixnetwork/topology/siminterfaceipv6config_189f3bfbc365f2b105e35cd8b9d542d6.py
Stop
rfrye-github/ixnetwork_restpy
python
def Stop(self): 'Executes the stop operation on the server.\n\n Stop CPF control plane (equals to demote to PreValidated-DoDDone state).\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' payload = {'Arg1': self} return self._execute('stop', payload=payload, response_object=None)
@logargs @ValidateAndDefault({'account_name': (OptionalValueType(StrType), None), 'account_id': (OptionalValueType(SolidFireIDType), None), 'by_id': (BoolType, False), 'mvip': (IPv4AddressType, sfdefaults.mvip), 'username': (StrType, sfdefaults.username), 'password': (StrType, sfdefaults.password), 'output_format': (OptionalValueType(SelectionType(sfdefaults.all_output_formats)), None)}) def AccountListVolumes(account_name, account_id, by_id, mvip, username, password, output_format): '\n Show the list of volumes for an account\n\n Args:\n \n account_name: the name of the account\n account_id: the ID of the account\n by_id: show volume IDs instead of names\n mvip: the management IP of the cluster\n username: the admin user of the cluster\n password: the admin password of the cluster\n output_format: the format to display the information\n ' log = GetLogger() NameOrID(account_name, account_id, 'account') log.info('Searching for accounts') try: account = SFCluster(mvip, username, password).FindAccount(accountName=account_name, accountID=account_id) except UnknownObjectError: log.error('Account does not exists') return False except SolidFireError as e: log.error('Could not search for accounts: {}'.format(e)) return False log.info('Searching for volumes') try: all_volumes = SFCluster(mvip, username, password).ListActiveVolumes() all_volumes += SFCluster(mvip, username, password).ListDeletedVolumes() except SolidFireError as e: log.error('Could not search for volumes: {}'.format(e)) return False all_volumes = {vol['volumeID']: vol for vol in all_volumes} attr = 'name' if by_id: attr = 'volumeID' account_volumes = [all_volumes[vid][attr] for vid in account.volumes] if (output_format and (output_format == 'bash')): sys.stdout.write((' '.join([str(item) for item in account_volumes]) + '\n')) sys.stdout.flush() elif (output_format and (output_format == 'json')): sys.stdout.write((json.dumps({'volumes': account_volumes}) + '\n')) sys.stdout.flush() else: log.info('{} volumes in account {}'.format(len(account.volumes), account.username)) if account.volumes: log.info(' {}'.format(', '.join([str(item) for item in account_volumes]))) return True
-4,768,445,726,859,416,000
Show the list of volumes for an account Args: account_name: the name of the account account_id: the ID of the account by_id: show volume IDs instead of names mvip: the management IP of the cluster username: the admin user of the cluster password: the admin password of the cluster output_format: the format to display the information
account_list_volumes.py
AccountListVolumes
cseelye/sfauto
python
@logargs @ValidateAndDefault({'account_name': (OptionalValueType(StrType), None), 'account_id': (OptionalValueType(SolidFireIDType), None), 'by_id': (BoolType, False), 'mvip': (IPv4AddressType, sfdefaults.mvip), 'username': (StrType, sfdefaults.username), 'password': (StrType, sfdefaults.password), 'output_format': (OptionalValueType(SelectionType(sfdefaults.all_output_formats)), None)}) def AccountListVolumes(account_name, account_id, by_id, mvip, username, password, output_format): '\n Show the list of volumes for an account\n\n Args:\n \n account_name: the name of the account\n account_id: the ID of the account\n by_id: show volume IDs instead of names\n mvip: the management IP of the cluster\n username: the admin user of the cluster\n password: the admin password of the cluster\n output_format: the format to display the information\n ' log = GetLogger() NameOrID(account_name, account_id, 'account') log.info('Searching for accounts') try: account = SFCluster(mvip, username, password).FindAccount(accountName=account_name, accountID=account_id) except UnknownObjectError: log.error('Account does not exists') return False except SolidFireError as e: log.error('Could not search for accounts: {}'.format(e)) return False log.info('Searching for volumes') try: all_volumes = SFCluster(mvip, username, password).ListActiveVolumes() all_volumes += SFCluster(mvip, username, password).ListDeletedVolumes() except SolidFireError as e: log.error('Could not search for volumes: {}'.format(e)) return False all_volumes = {vol['volumeID']: vol for vol in all_volumes} attr = 'name' if by_id: attr = 'volumeID' account_volumes = [all_volumes[vid][attr] for vid in account.volumes] if (output_format and (output_format == 'bash')): sys.stdout.write((' '.join([str(item) for item in account_volumes]) + '\n')) sys.stdout.flush() elif (output_format and (output_format == 'json')): sys.stdout.write((json.dumps({'volumes': account_volumes}) + '\n')) sys.stdout.flush() else: log.info('{} volumes in account {}'.format(len(account.volumes), account.username)) if account.volumes: log.info(' {}'.format(', '.join([str(item) for item in account_volumes]))) return True
@cached_property def openapi_types(): '\n This must be a method because a model may have properties that are\n of type self, this must run after the class is loaded\n\n Returns\n openapi_types (dict): The key is attribute name\n and the value is attribute type.\n ' lazy_import() return {'class_id': (str,), 'object_type': (str,), 'array_controller': (StorageNetAppNodeRelationship,)}
4,470,210,284,486,104,600
This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type.
intersight/model/storage_net_app_sensor_all_of.py
openapi_types
CiscoDevNet/intersight-python
python
@cached_property def openapi_types(): '\n This must be a method because a model may have properties that are\n of type self, this must run after the class is loaded\n\n Returns\n openapi_types (dict): The key is attribute name\n and the value is attribute type.\n ' lazy_import() return {'class_id': (str,), 'object_type': (str,), 'array_controller': (StorageNetAppNodeRelationship,)}
@convert_js_args_to_python_args def __init__(self, *args, **kwargs): 'StorageNetAppSensorAllOf - a model defined in OpenAPI\n\n Args:\n\n Keyword Args:\n class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "storage.NetAppSensor", must be one of ["storage.NetAppSensor", ] # noqa: E501\n object_type (str): The fully-qualified name of the instantiated, concrete type. The value should be the same as the \'ClassId\' property.. defaults to "storage.NetAppSensor", must be one of ["storage.NetAppSensor", ] # noqa: E501\n _check_type (bool): if True, values for parameters in openapi_types\n will be type checked and a TypeError will be\n raised if the wrong type is input.\n Defaults to True\n _path_to_item (tuple/list): This is a list of keys or values to\n drill down to the model in received_data\n when deserializing a response\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _configuration (Configuration): the instance to use when\n deserializing a file_type parameter.\n If passed, type conversion is attempted\n If omitted no type conversion is done.\n _visited_composed_classes (tuple): This stores a tuple of\n classes that we have traveled through so that\n if we see that class again we will not use its\n discriminator again.\n When traveling through a discriminator, the\n composed schema that is\n is traveled through is added to this set.\n For example if Animal has a discriminator\n petType and we pass in "Dog", and the class Dog\n allOf includes Animal, we move through Animal\n once using the discriminator, and pick Dog.\n Then in Dog, we will make an instance of the\n Animal class but this time we won\'t travel\n through its discriminator because we passed in\n _visited_composed_classes = (Animal,)\n array_controller (StorageNetAppNodeRelationship): [optional] # noqa: E501\n ' class_id = kwargs.get('class_id', 'storage.NetAppSensor') object_type = kwargs.get('object_type', 'storage.NetAppSensor') _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) self.class_id = class_id self.object_type = object_type for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value)
-5,255,135,685,495,371,000
StorageNetAppSensorAllOf - a model defined in OpenAPI Args: Keyword Args: class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "storage.NetAppSensor", must be one of ["storage.NetAppSensor", ] # noqa: E501 object_type (str): The fully-qualified name of the instantiated, concrete type. The value should be the same as the 'ClassId' property.. defaults to "storage.NetAppSensor", must be one of ["storage.NetAppSensor", ] # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) array_controller (StorageNetAppNodeRelationship): [optional] # noqa: E501
intersight/model/storage_net_app_sensor_all_of.py
__init__
CiscoDevNet/intersight-python
python
@convert_js_args_to_python_args def __init__(self, *args, **kwargs): 'StorageNetAppSensorAllOf - a model defined in OpenAPI\n\n Args:\n\n Keyword Args:\n class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "storage.NetAppSensor", must be one of ["storage.NetAppSensor", ] # noqa: E501\n object_type (str): The fully-qualified name of the instantiated, concrete type. The value should be the same as the \'ClassId\' property.. defaults to "storage.NetAppSensor", must be one of ["storage.NetAppSensor", ] # noqa: E501\n _check_type (bool): if True, values for parameters in openapi_types\n will be type checked and a TypeError will be\n raised if the wrong type is input.\n Defaults to True\n _path_to_item (tuple/list): This is a list of keys or values to\n drill down to the model in received_data\n when deserializing a response\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _configuration (Configuration): the instance to use when\n deserializing a file_type parameter.\n If passed, type conversion is attempted\n If omitted no type conversion is done.\n _visited_composed_classes (tuple): This stores a tuple of\n classes that we have traveled through so that\n if we see that class again we will not use its\n discriminator again.\n When traveling through a discriminator, the\n composed schema that is\n is traveled through is added to this set.\n For example if Animal has a discriminator\n petType and we pass in "Dog", and the class Dog\n allOf includes Animal, we move through Animal\n once using the discriminator, and pick Dog.\n Then in Dog, we will make an instance of the\n Animal class but this time we won\'t travel\n through its discriminator because we passed in\n _visited_composed_classes = (Animal,)\n array_controller (StorageNetAppNodeRelationship): [optional] # noqa: E501\n ' class_id = kwargs.get('class_id', 'storage.NetAppSensor') object_type = kwargs.get('object_type', 'storage.NetAppSensor') _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) self.class_id = class_id self.object_type = object_type for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value)
@export def dbcluster_events(data: Any, cluster_columns: List[Any]=None, verbose: bool=False, normalize: bool=True, time_column: str='TimeCreatedUtc', max_cluster_distance: float=0.01, min_cluster_samples: int=2, **kwargs) -> Tuple[(pd.DataFrame, DBSCAN, np.ndarray)]: "\n Cluster data set according to cluster_columns features.\n\n Parameters\n ----------\n data : Any\n Input data as a pandas DataFrame or numpy array\n cluster_columns : List[Any], optional\n List of columns to use for features\n - for DataFrame this is a list of column names\n - for numpy array this is a list of column indexes\n verbose : bool, optional\n Print additional information about clustering results (the default is False)\n normalize : bool, optional\n Normalize the input data (should probably always be True)\n time_column : str, optional\n If there is a time column the output data will be ordered by this\n (the default is 'TimeCreatedUtc')\n max_cluster_distance : float, optional\n DBSCAN eps (max cluster member distance) (the default is 0.01)\n min_cluster_samples : int, optional\n DBSCAN min_samples (the minimum cluster size) (the default is 2)\n\n Other Parameters\n ----------------\n kwargs: Other arguments are passed to DBSCAN constructor\n\n Returns\n -------\n Tuple[pd.DataFrame, DBSCAN, np.ndarray]\n Output dataframe with clustered rows\n DBSCAN model\n Normalized data set\n\n " allowed_types = [np.ndarray, pd.DataFrame] x_input = None if isinstance(data, pd.DataFrame): if (cluster_columns is None): x_input = data.values else: x_input = data[cluster_columns].values elif isinstance(data, np.ndarray): x_input = (data if (cluster_columns is None) else data[:, cluster_columns].values) if (x_input is None): mssg = 'Input data not in expected format.\n{} is not one of allowed types {}' type_list = ', '.join((str(t) for t in allowed_types)) mssg = mssg.format(str(type(data)), type_list) raise ValueError(mssg) db_cluster = DBSCAN(eps=max_cluster_distance, min_samples=min_cluster_samples, **kwargs) x_norm = (Normalizer().fit_transform(x_input) if normalize else x_input) db_cluster.fit(x_norm) labels = db_cluster.labels_ (cluster_set, counts) = np.unique(labels, return_counts=True) if verbose: print('Clustering for set size ', len(x_norm), ' - ', len(cluster_set), ' clusters') print('Individual cluster sizes: ', ', '.join((str(c) for c in counts))) clustered_events = _merge_clustered_items(cluster_set, labels, data, time_column, counts) if verbose: print('Cluster output rows: ', len(clustered_events)) return (clustered_events, db_cluster, x_norm)
-3,809,816,684,162,627,600
Cluster data set according to cluster_columns features. Parameters ---------- data : Any Input data as a pandas DataFrame or numpy array cluster_columns : List[Any], optional List of columns to use for features - for DataFrame this is a list of column names - for numpy array this is a list of column indexes verbose : bool, optional Print additional information about clustering results (the default is False) normalize : bool, optional Normalize the input data (should probably always be True) time_column : str, optional If there is a time column the output data will be ordered by this (the default is 'TimeCreatedUtc') max_cluster_distance : float, optional DBSCAN eps (max cluster member distance) (the default is 0.01) min_cluster_samples : int, optional DBSCAN min_samples (the minimum cluster size) (the default is 2) Other Parameters ---------------- kwargs: Other arguments are passed to DBSCAN constructor Returns ------- Tuple[pd.DataFrame, DBSCAN, np.ndarray] Output dataframe with clustered rows DBSCAN model Normalized data set
msticpy/analysis/eventcluster.py
dbcluster_events
2xyo/msticpy
python
@export def dbcluster_events(data: Any, cluster_columns: List[Any]=None, verbose: bool=False, normalize: bool=True, time_column: str='TimeCreatedUtc', max_cluster_distance: float=0.01, min_cluster_samples: int=2, **kwargs) -> Tuple[(pd.DataFrame, DBSCAN, np.ndarray)]: "\n Cluster data set according to cluster_columns features.\n\n Parameters\n ----------\n data : Any\n Input data as a pandas DataFrame or numpy array\n cluster_columns : List[Any], optional\n List of columns to use for features\n - for DataFrame this is a list of column names\n - for numpy array this is a list of column indexes\n verbose : bool, optional\n Print additional information about clustering results (the default is False)\n normalize : bool, optional\n Normalize the input data (should probably always be True)\n time_column : str, optional\n If there is a time column the output data will be ordered by this\n (the default is 'TimeCreatedUtc')\n max_cluster_distance : float, optional\n DBSCAN eps (max cluster member distance) (the default is 0.01)\n min_cluster_samples : int, optional\n DBSCAN min_samples (the minimum cluster size) (the default is 2)\n\n Other Parameters\n ----------------\n kwargs: Other arguments are passed to DBSCAN constructor\n\n Returns\n -------\n Tuple[pd.DataFrame, DBSCAN, np.ndarray]\n Output dataframe with clustered rows\n DBSCAN model\n Normalized data set\n\n " allowed_types = [np.ndarray, pd.DataFrame] x_input = None if isinstance(data, pd.DataFrame): if (cluster_columns is None): x_input = data.values else: x_input = data[cluster_columns].values elif isinstance(data, np.ndarray): x_input = (data if (cluster_columns is None) else data[:, cluster_columns].values) if (x_input is None): mssg = 'Input data not in expected format.\n{} is not one of allowed types {}' type_list = ', '.join((str(t) for t in allowed_types)) mssg = mssg.format(str(type(data)), type_list) raise ValueError(mssg) db_cluster = DBSCAN(eps=max_cluster_distance, min_samples=min_cluster_samples, **kwargs) x_norm = (Normalizer().fit_transform(x_input) if normalize else x_input) db_cluster.fit(x_norm) labels = db_cluster.labels_ (cluster_set, counts) = np.unique(labels, return_counts=True) if verbose: print('Clustering for set size ', len(x_norm), ' - ', len(cluster_set), ' clusters') print('Individual cluster sizes: ', ', '.join((str(c) for c in counts))) clustered_events = _merge_clustered_items(cluster_set, labels, data, time_column, counts) if verbose: print('Cluster output rows: ', len(clustered_events)) return (clustered_events, db_cluster, x_norm)
def _merge_clustered_items(cluster_set: np.array, labels: np.array, data: Union[(pd.DataFrame, np.array)], time_column: str, counts: np.array) -> pd.DataFrame: '\n Merge outliers and core clusters into single DataFrame.\n\n Parameters\n ----------\n cluster_set : np.array\n The set of clusters\n labels : np.array\n The cluster labels\n data : Union[pd.DataFrame, np.array]\n The source data\n time_column : str\n Name of the Time column\n counts : np.array\n The counts of members in each cluster\n\n Returns\n -------\n pd.DataFrame\n Merged dataframe\n\n ' tz_aware = data.iloc[0][time_column].tz ts_type = ('datetime64[ns, UTC]' if (tz_aware is not None) else 'datetime64[ns]') cluster_list = [] for idx in range(len(cluster_set)): cluster_id = cluster_set[idx] class_members = (labels == cluster_id) if isinstance(data, pd.DataFrame): time_ordered = data[class_members].sort_values(time_column, ascending=True) first_event_time = time_ordered[0:][time_column].iat[0] last_event_time = time_ordered[(- 1):][time_column].iat[0] else: first_event_time = None last_event_time = None if (cluster_id == (- 1)): cluster_list.append(data[class_members].assign(Clustered=False, ClusterId=cluster_id, ClusterSize=1, TimeGenerated=first_event_time, FirstEventTime=first_event_time, LastEventTime=last_event_time).astype(dtype={'TimeGenerated': ts_type, 'FirstEventTime': ts_type, 'LastEventTime': ts_type})) else: cluster_list.append(data[class_members].assign(Clustered=True, ClusterId=cluster_id, ClusterSize=counts[idx], TimeGenerated=first_event_time, FirstEventTime=first_event_time, LastEventTime=last_event_time)[0:1].astype(dtype={'TimeGenerated': ts_type, 'FirstEventTime': ts_type, 'LastEventTime': ts_type})) return pd.concat(cluster_list)
413,818,286,560,348,900
Merge outliers and core clusters into single DataFrame. Parameters ---------- cluster_set : np.array The set of clusters labels : np.array The cluster labels data : Union[pd.DataFrame, np.array] The source data time_column : str Name of the Time column counts : np.array The counts of members in each cluster Returns ------- pd.DataFrame Merged dataframe
msticpy/analysis/eventcluster.py
_merge_clustered_items
2xyo/msticpy
python
def _merge_clustered_items(cluster_set: np.array, labels: np.array, data: Union[(pd.DataFrame, np.array)], time_column: str, counts: np.array) -> pd.DataFrame: '\n Merge outliers and core clusters into single DataFrame.\n\n Parameters\n ----------\n cluster_set : np.array\n The set of clusters\n labels : np.array\n The cluster labels\n data : Union[pd.DataFrame, np.array]\n The source data\n time_column : str\n Name of the Time column\n counts : np.array\n The counts of members in each cluster\n\n Returns\n -------\n pd.DataFrame\n Merged dataframe\n\n ' tz_aware = data.iloc[0][time_column].tz ts_type = ('datetime64[ns, UTC]' if (tz_aware is not None) else 'datetime64[ns]') cluster_list = [] for idx in range(len(cluster_set)): cluster_id = cluster_set[idx] class_members = (labels == cluster_id) if isinstance(data, pd.DataFrame): time_ordered = data[class_members].sort_values(time_column, ascending=True) first_event_time = time_ordered[0:][time_column].iat[0] last_event_time = time_ordered[(- 1):][time_column].iat[0] else: first_event_time = None last_event_time = None if (cluster_id == (- 1)): cluster_list.append(data[class_members].assign(Clustered=False, ClusterId=cluster_id, ClusterSize=1, TimeGenerated=first_event_time, FirstEventTime=first_event_time, LastEventTime=last_event_time).astype(dtype={'TimeGenerated': ts_type, 'FirstEventTime': ts_type, 'LastEventTime': ts_type})) else: cluster_list.append(data[class_members].assign(Clustered=True, ClusterId=cluster_id, ClusterSize=counts[idx], TimeGenerated=first_event_time, FirstEventTime=first_event_time, LastEventTime=last_event_time)[0:1].astype(dtype={'TimeGenerated': ts_type, 'FirstEventTime': ts_type, 'LastEventTime': ts_type})) return pd.concat(cluster_list)
@export def add_process_features(input_frame: pd.DataFrame, path_separator: str=None, force: bool=False) -> pd.DataFrame: '\n Add numerical features based on patterns of command line and process name.\n\n Parameters\n ----------\n input_frame : pd.DataFrame\n The input dataframe\n path_separator : str, optional\n Path separator. If not supplied, try to determine\n from \'NewProcessName\' column of first 10 rows\n (the default is None)\n force : bool, optional\n Forces re-calculation of feature columns even if they\n already exist (the default is False)\n\n Returns\n -------\n pd.DataFrame\n Copy of the dataframe with the additional numeric features\n\n Notes\n -----\n Features added:\n\n - processNameLen: length of process file name (inc path)\n - processNameTokens: the number of elements in the path\n - processName: the process file name (minus path)\n - commandlineTokens: number of space-separated tokens in the command line\n - commandlineLen: length of the command line\n - commandlineLogLen: log10 length of commandline\n - isSystemSession: 1 if session Id is 0x3e7 for Windows or -1 for Linux\n - commandlineTokensFull: counts number of token separators in commandline\n [\\\\s\\-\\\\/\\.,"\\\'\\|&:;%$()]\n - pathScore: sum of ord() value of characters in path\n - pathLogScore: log10 of pathScore\n - commandlineScore: sum of ord() value of characters in commandline\n - commandlineLogScore: log10 of commandlineScore\n\n ' output_df = input_frame.copy() if (('NewProcessName' in output_df) and ('CommandLine' in output_df)): output_df[['NewProcessName', 'CommandLine']] = output_df[['NewProcessName', 'CommandLine']].fillna(value='') if (path_separator is None): sample_df = output_df.head(10) lx_path = len(sample_df[sample_df['NewProcessName'].str.contains('/')]) path_separator = ('/' if lx_path else '\\') if ('NewProcessName' in output_df): _add_processname_features(output_df, force, path_separator) if ('CommandLine' in output_df): _add_commandline_features(output_df, force) if (('SubjectLogonId' in output_df) and (('isSystemSession' not in output_df) or force)): output_df['isSystemSession'] = output_df['SubjectLogonId'].isin(['0x3e7', '-1']) return output_df
-5,266,310,471,347,284,000
Add numerical features based on patterns of command line and process name. Parameters ---------- input_frame : pd.DataFrame The input dataframe path_separator : str, optional Path separator. If not supplied, try to determine from 'NewProcessName' column of first 10 rows (the default is None) force : bool, optional Forces re-calculation of feature columns even if they already exist (the default is False) Returns ------- pd.DataFrame Copy of the dataframe with the additional numeric features Notes ----- Features added: - processNameLen: length of process file name (inc path) - processNameTokens: the number of elements in the path - processName: the process file name (minus path) - commandlineTokens: number of space-separated tokens in the command line - commandlineLen: length of the command line - commandlineLogLen: log10 length of commandline - isSystemSession: 1 if session Id is 0x3e7 for Windows or -1 for Linux - commandlineTokensFull: counts number of token separators in commandline [\\s\-\\/\.,"\'\|&:;%$()] - pathScore: sum of ord() value of characters in path - pathLogScore: log10 of pathScore - commandlineScore: sum of ord() value of characters in commandline - commandlineLogScore: log10 of commandlineScore
msticpy/analysis/eventcluster.py
add_process_features
2xyo/msticpy
python
@export def add_process_features(input_frame: pd.DataFrame, path_separator: str=None, force: bool=False) -> pd.DataFrame: '\n Add numerical features based on patterns of command line and process name.\n\n Parameters\n ----------\n input_frame : pd.DataFrame\n The input dataframe\n path_separator : str, optional\n Path separator. If not supplied, try to determine\n from \'NewProcessName\' column of first 10 rows\n (the default is None)\n force : bool, optional\n Forces re-calculation of feature columns even if they\n already exist (the default is False)\n\n Returns\n -------\n pd.DataFrame\n Copy of the dataframe with the additional numeric features\n\n Notes\n -----\n Features added:\n\n - processNameLen: length of process file name (inc path)\n - processNameTokens: the number of elements in the path\n - processName: the process file name (minus path)\n - commandlineTokens: number of space-separated tokens in the command line\n - commandlineLen: length of the command line\n - commandlineLogLen: log10 length of commandline\n - isSystemSession: 1 if session Id is 0x3e7 for Windows or -1 for Linux\n - commandlineTokensFull: counts number of token separators in commandline\n [\\\\s\\-\\\\/\\.,"\\\'\\|&:;%$()]\n - pathScore: sum of ord() value of characters in path\n - pathLogScore: log10 of pathScore\n - commandlineScore: sum of ord() value of characters in commandline\n - commandlineLogScore: log10 of commandlineScore\n\n ' output_df = input_frame.copy() if (('NewProcessName' in output_df) and ('CommandLine' in output_df)): output_df[['NewProcessName', 'CommandLine']] = output_df[['NewProcessName', 'CommandLine']].fillna(value=) if (path_separator is None): sample_df = output_df.head(10) lx_path = len(sample_df[sample_df['NewProcessName'].str.contains('/')]) path_separator = ('/' if lx_path else '\\') if ('NewProcessName' in output_df): _add_processname_features(output_df, force, path_separator) if ('CommandLine' in output_df): _add_commandline_features(output_df, force) if (('SubjectLogonId' in output_df) and (('isSystemSession' not in output_df) or force)): output_df['isSystemSession'] = output_df['SubjectLogonId'].isin(['0x3e7', '-1']) return output_df
def _add_processname_features(output_df: pd.DataFrame, force: bool, path_separator: str): '\n Add process name default features.\n\n Parameters\n ----------\n output_df : pd.DataFrame\n The dataframe to add features to\n force : bool\n If True overwrite existing feature columns\n path_separator : str\n Path separator for OS\n\n ' if (('processName' not in output_df) or force): output_df['processName'] = output_df.apply((lambda x: x.NewProcessName.split(path_separator)[(- 1)]), axis=1) if (('pathScore' not in output_df) or force): output_df['pathScore'] = output_df.apply((lambda x: char_ord_score(x.NewProcessName)), axis=1) if (('pathLogScore' not in output_df) or force): output_df['pathLogScore'] = output_df.apply((lambda x: (log10(x.pathScore) if x.pathScore else 0)), axis=1) if (('pathHash' not in output_df) or force): output_df['pathHash'] = output_df.apply((lambda x: crc32_hash(x.NewProcessName)), axis=1)
-2,476,954,572,613,136,000
Add process name default features. Parameters ---------- output_df : pd.DataFrame The dataframe to add features to force : bool If True overwrite existing feature columns path_separator : str Path separator for OS
msticpy/analysis/eventcluster.py
_add_processname_features
2xyo/msticpy
python
def _add_processname_features(output_df: pd.DataFrame, force: bool, path_separator: str): '\n Add process name default features.\n\n Parameters\n ----------\n output_df : pd.DataFrame\n The dataframe to add features to\n force : bool\n If True overwrite existing feature columns\n path_separator : str\n Path separator for OS\n\n ' if (('processName' not in output_df) or force): output_df['processName'] = output_df.apply((lambda x: x.NewProcessName.split(path_separator)[(- 1)]), axis=1) if (('pathScore' not in output_df) or force): output_df['pathScore'] = output_df.apply((lambda x: char_ord_score(x.NewProcessName)), axis=1) if (('pathLogScore' not in output_df) or force): output_df['pathLogScore'] = output_df.apply((lambda x: (log10(x.pathScore) if x.pathScore else 0)), axis=1) if (('pathHash' not in output_df) or force): output_df['pathHash'] = output_df.apply((lambda x: crc32_hash(x.NewProcessName)), axis=1)
def _add_commandline_features(output_df: pd.DataFrame, force: bool): '\n Add commandline default features.\n\n Parameters\n ----------\n output_df : pd.DataFrame\n The dataframe to add features to\n force : bool\n If True overwrite existing feature columns\n\n ' if (('commandlineLen' not in output_df) or force): output_df['commandlineLen'] = output_df.apply((lambda x: len(x.CommandLine)), axis=1) if (('commandlineLogLen' not in output_df) or force): output_df['commandlineLogLen'] = output_df.apply((lambda x: (log10(x.commandlineLen) if x.commandlineLen else 0)), axis=1) if (('commandlineTokensFull' not in output_df) or force): output_df['commandlineTokensFull'] = output_df[['CommandLine']].apply((lambda x: delim_count(x.CommandLine)), axis=1) if (('commandlineScore' not in output_df) or force): output_df['commandlineScore'] = output_df.apply((lambda x: char_ord_score(x.CommandLine)), axis=1) if (('commandlineTokensHash' not in output_df) or force): output_df['commandlineTokensHash'] = output_df.apply((lambda x: delim_hash(x.CommandLine)), axis=1)
9,055,919,210,541,722,000
Add commandline default features. Parameters ---------- output_df : pd.DataFrame The dataframe to add features to force : bool If True overwrite existing feature columns
msticpy/analysis/eventcluster.py
_add_commandline_features
2xyo/msticpy
python
def _add_commandline_features(output_df: pd.DataFrame, force: bool): '\n Add commandline default features.\n\n Parameters\n ----------\n output_df : pd.DataFrame\n The dataframe to add features to\n force : bool\n If True overwrite existing feature columns\n\n ' if (('commandlineLen' not in output_df) or force): output_df['commandlineLen'] = output_df.apply((lambda x: len(x.CommandLine)), axis=1) if (('commandlineLogLen' not in output_df) or force): output_df['commandlineLogLen'] = output_df.apply((lambda x: (log10(x.commandlineLen) if x.commandlineLen else 0)), axis=1) if (('commandlineTokensFull' not in output_df) or force): output_df['commandlineTokensFull'] = output_df[['CommandLine']].apply((lambda x: delim_count(x.CommandLine)), axis=1) if (('commandlineScore' not in output_df) or force): output_df['commandlineScore'] = output_df.apply((lambda x: char_ord_score(x.CommandLine)), axis=1) if (('commandlineTokensHash' not in output_df) or force): output_df['commandlineTokensHash'] = output_df.apply((lambda x: delim_hash(x.CommandLine)), axis=1)
@export @lru_cache(maxsize=1024) def delim_count(value: str, delim_list: str='[\\s\\-\\\\/\\.,"\\\'|&:;%$()]') -> int: '\n Count the delimiters in input column.\n\n Parameters\n ----------\n value : str\n Data to process\n delim_list : str, optional\n delimiters to use. (the default is r\'[\\\\s\\\\\\\\-\\\\\\\\\\\\\\\\/\\.,"\\\\\\\\\'|&:;%$()]\')\n\n Returns\n -------\n int\n Count of delimiters in the string.\n\n ' return len(re.findall(delim_list, value))
-5,264,490,467,521,285,000
Count the delimiters in input column. Parameters ---------- value : str Data to process delim_list : str, optional delimiters to use. (the default is r'[\\s\\\\-\\\\\\\\/\.,"\\\\'|&:;%$()]') Returns ------- int Count of delimiters in the string.
msticpy/analysis/eventcluster.py
delim_count
2xyo/msticpy
python
@export @lru_cache(maxsize=1024) def delim_count(value: str, delim_list: str='[\\s\\-\\\\/\\.,"\\\'|&:;%$()]') -> int: '\n Count the delimiters in input column.\n\n Parameters\n ----------\n value : str\n Data to process\n delim_list : str, optional\n delimiters to use. (the default is r\'[\\\\s\\\\\\\\-\\\\\\\\\\\\\\\\/\\.,"\\\\\\\\\'|&:;%$()]\')\n\n Returns\n -------\n int\n Count of delimiters in the string.\n\n ' return len(re.findall(delim_list, value))
@export @lru_cache(maxsize=1024) def delim_hash(value: str, delim_list: str='[\\s\\-\\\\/\\.,"\\\'|&:;%$()]') -> int: '\n Return a hash (CRC32) of the delimiters from input column.\n\n Parameters\n ----------\n value : str\n Data to process\n delim_list : str, optional\n delimiters to use. (the default is r\'[\\\\s\\\\\\\\-\\\\\\\\\\\\\\\\/\\.,"\\\\\\\\\'|&:;%$()]\')\n\n Returns\n -------\n int\n Hash of delimiter set in the string.\n\n ' return crc32(bytes(''.join(re.findall(delim_list, value)), 'utf-8'))
-8,977,949,746,261,767,000
Return a hash (CRC32) of the delimiters from input column. Parameters ---------- value : str Data to process delim_list : str, optional delimiters to use. (the default is r'[\\s\\\\-\\\\\\\\/\.,"\\\\'|&:;%$()]') Returns ------- int Hash of delimiter set in the string.
msticpy/analysis/eventcluster.py
delim_hash
2xyo/msticpy
python
@export @lru_cache(maxsize=1024) def delim_hash(value: str, delim_list: str='[\\s\\-\\\\/\\.,"\\\'|&:;%$()]') -> int: '\n Return a hash (CRC32) of the delimiters from input column.\n\n Parameters\n ----------\n value : str\n Data to process\n delim_list : str, optional\n delimiters to use. (the default is r\'[\\\\s\\\\\\\\-\\\\\\\\\\\\\\\\/\\.,"\\\\\\\\\'|&:;%$()]\')\n\n Returns\n -------\n int\n Hash of delimiter set in the string.\n\n ' return crc32(bytes(.join(re.findall(delim_list, value)), 'utf-8'))
@export @lru_cache(maxsize=1024) def char_ord_score(value: str, scale: int=1) -> int: '\n Return sum of ord values of characters in string.\n\n Parameters\n ----------\n value : str\n Data to process\n scale : int, optional\n reduce the scale of the feature (reducing the\n influence of variations this feature on the clustering\n algorithm (the default is 1)\n\n Returns\n -------\n int\n [description]\n\n Notes\n -----\n This function sums the ordinal value of each character in the\n input string. Two strings with minor differences will result in\n a similar score. However, for strings with highly variable content\n (e.g. command lines or http requests containing GUIDs) this may result\n in too much variance to be useful when you are trying to detect\n similar patterns. You can use the scale parameter to reduce the\n influence of features using this function on clustering and anomaly\n algorithms.\n\n ' return floor((sum((ord(x) for x in value)) / scale))
-2,230,151,292,892,541,200
Return sum of ord values of characters in string. Parameters ---------- value : str Data to process scale : int, optional reduce the scale of the feature (reducing the influence of variations this feature on the clustering algorithm (the default is 1) Returns ------- int [description] Notes ----- This function sums the ordinal value of each character in the input string. Two strings with minor differences will result in a similar score. However, for strings with highly variable content (e.g. command lines or http requests containing GUIDs) this may result in too much variance to be useful when you are trying to detect similar patterns. You can use the scale parameter to reduce the influence of features using this function on clustering and anomaly algorithms.
msticpy/analysis/eventcluster.py
char_ord_score
2xyo/msticpy
python
@export @lru_cache(maxsize=1024) def char_ord_score(value: str, scale: int=1) -> int: '\n Return sum of ord values of characters in string.\n\n Parameters\n ----------\n value : str\n Data to process\n scale : int, optional\n reduce the scale of the feature (reducing the\n influence of variations this feature on the clustering\n algorithm (the default is 1)\n\n Returns\n -------\n int\n [description]\n\n Notes\n -----\n This function sums the ordinal value of each character in the\n input string. Two strings with minor differences will result in\n a similar score. However, for strings with highly variable content\n (e.g. command lines or http requests containing GUIDs) this may result\n in too much variance to be useful when you are trying to detect\n similar patterns. You can use the scale parameter to reduce the\n influence of features using this function on clustering and anomaly\n algorithms.\n\n ' return floor((sum((ord(x) for x in value)) / scale))
@export @lru_cache(maxsize=1024) def token_count(value: str, delimiter: str=' ') -> int: "\n Return count of delimiter-separated tokens pd.Series column.\n\n Parameters\n ----------\n value : str\n Data to process\n delimiter : str, optional\n Delimiter used to split the column string.\n (the default is ' ')\n\n Returns\n -------\n int\n count of tokens\n\n " return len(value.split(delimiter))
-6,788,009,994,740,111,000
Return count of delimiter-separated tokens pd.Series column. Parameters ---------- value : str Data to process delimiter : str, optional Delimiter used to split the column string. (the default is ' ') Returns ------- int count of tokens
msticpy/analysis/eventcluster.py
token_count
2xyo/msticpy
python
@export @lru_cache(maxsize=1024) def token_count(value: str, delimiter: str=' ') -> int: "\n Return count of delimiter-separated tokens pd.Series column.\n\n Parameters\n ----------\n value : str\n Data to process\n delimiter : str, optional\n Delimiter used to split the column string.\n (the default is ' ')\n\n Returns\n -------\n int\n count of tokens\n\n " return len(value.split(delimiter))
def _string_score(input_str): 'Sum the ord(c) for characters in a string.' return sum((ord(x) for x in input_str))
2,873,044,902,948,387,000
Sum the ord(c) for characters in a string.
msticpy/analysis/eventcluster.py
_string_score
2xyo/msticpy
python
def _string_score(input_str): return sum((ord(x) for x in input_str))
@export @lru_cache(maxsize=1024) def crc32_hash(value: str) -> int: '\n Return the CRC32 hash of the input column.\n\n Parameters\n ----------\n value : str\n Data to process\n\n Returns\n -------\n int\n CRC32 hash\n\n ' return crc32(bytes(value.encode('utf-8')))
4,065,869,139,596,183,000
Return the CRC32 hash of the input column. Parameters ---------- value : str Data to process Returns ------- int CRC32 hash
msticpy/analysis/eventcluster.py
crc32_hash
2xyo/msticpy
python
@export @lru_cache(maxsize=1024) def crc32_hash(value: str) -> int: '\n Return the CRC32 hash of the input column.\n\n Parameters\n ----------\n value : str\n Data to process\n\n Returns\n -------\n int\n CRC32 hash\n\n ' return crc32(bytes(value.encode('utf-8')))
@export def delim_count_df(data: pd.DataFrame, column: str, delim_list: str='[\\s\\-\\\\/\\.,"\\\'|&:;%$()]') -> pd.Series: '\n Count the delimiters in input column.\n\n Parameters\n ----------\n data : pd.DataFrame\n The DataFrame to process\n column : str\n The name of the column to process\n delim_list : str, optional\n delimiters to use. (the default is r\\\'[\\\\s\\\\\\\\-\\\\\\\\\\\\\\\\/\\.,"\\\\\\\\\'|&:;%$()]\\\')\n\n Returns\n -------\n pd.Series\n Count of delimiters in the string in `column`.\n\n ' return data[column].str.count(delim_list)
1,464,786,969,080,396,500
Count the delimiters in input column. Parameters ---------- data : pd.DataFrame The DataFrame to process column : str The name of the column to process delim_list : str, optional delimiters to use. (the default is r\'[\\s\\\\-\\\\\\\\/\.,"\\\\'|&:;%$()]\') Returns ------- pd.Series Count of delimiters in the string in `column`.
msticpy/analysis/eventcluster.py
delim_count_df
2xyo/msticpy
python
@export def delim_count_df(data: pd.DataFrame, column: str, delim_list: str='[\\s\\-\\\\/\\.,"\\\'|&:;%$()]') -> pd.Series: '\n Count the delimiters in input column.\n\n Parameters\n ----------\n data : pd.DataFrame\n The DataFrame to process\n column : str\n The name of the column to process\n delim_list : str, optional\n delimiters to use. (the default is r\\\'[\\\\s\\\\\\\\-\\\\\\\\\\\\\\\\/\\.,"\\\\\\\\\'|&:;%$()]\\\')\n\n Returns\n -------\n pd.Series\n Count of delimiters in the string in `column`.\n\n ' return data[column].str.count(delim_list)
@export def char_ord_score_df(data: pd.DataFrame, column: str, scale: int=1) -> pd.Series: '\n Return sum of ord values of characters in string.\n\n Parameters\n ----------\n data : pd.DataFrame\n The DataFrame to process\n column : str\n Column name to process\n scale : int, optional\n reduce the scale of the feature (reducing the\n influence of variations this feature on the clustering\n algorithm (the default is 1)\n\n Returns\n -------\n pd.Series\n The sum of the ordinal values of the characters\n in `column`.\n\n Notes\n -----\n This function sums the ordinal value of each character in the\n input string. Two strings with minor differences will result in\n a similar score. However, for strings with highly variable content\n (e.g. command lines or http requests containing GUIDs) this may result\n in too much variance to be useful when you are trying to detect\n similar patterns. You can use the scale parameter to reduce the\n influence of features using this function on clustering and anomaly\n algorithms.\n\n ' return data.apply((lambda x: (sum((ord(char) for char in x[column])) / scale)), axis=1)
8,791,648,072,038,837,000
Return sum of ord values of characters in string. Parameters ---------- data : pd.DataFrame The DataFrame to process column : str Column name to process scale : int, optional reduce the scale of the feature (reducing the influence of variations this feature on the clustering algorithm (the default is 1) Returns ------- pd.Series The sum of the ordinal values of the characters in `column`. Notes ----- This function sums the ordinal value of each character in the input string. Two strings with minor differences will result in a similar score. However, for strings with highly variable content (e.g. command lines or http requests containing GUIDs) this may result in too much variance to be useful when you are trying to detect similar patterns. You can use the scale parameter to reduce the influence of features using this function on clustering and anomaly algorithms.
msticpy/analysis/eventcluster.py
char_ord_score_df
2xyo/msticpy
python
@export def char_ord_score_df(data: pd.DataFrame, column: str, scale: int=1) -> pd.Series: '\n Return sum of ord values of characters in string.\n\n Parameters\n ----------\n data : pd.DataFrame\n The DataFrame to process\n column : str\n Column name to process\n scale : int, optional\n reduce the scale of the feature (reducing the\n influence of variations this feature on the clustering\n algorithm (the default is 1)\n\n Returns\n -------\n pd.Series\n The sum of the ordinal values of the characters\n in `column`.\n\n Notes\n -----\n This function sums the ordinal value of each character in the\n input string. Two strings with minor differences will result in\n a similar score. However, for strings with highly variable content\n (e.g. command lines or http requests containing GUIDs) this may result\n in too much variance to be useful when you are trying to detect\n similar patterns. You can use the scale parameter to reduce the\n influence of features using this function on clustering and anomaly\n algorithms.\n\n ' return data.apply((lambda x: (sum((ord(char) for char in x[column])) / scale)), axis=1)
@export def token_count_df(data: pd.DataFrame, column: str, delimiter: str=' ') -> pd.Series: "\n Return count of delimiter-separated tokens pd.Series column.\n\n Parameters\n ----------\n data : pd.DataFrame\n The DataFrame to process\n column : str\n Column name to process\n delimiter : str, optional\n Delimiter used to split the column string.\n (the default is ' ')\n\n Returns\n -------\n pd.Series\n count of tokens in strings in `column`\n\n " return data.apply((lambda x: len(x[column].split(delimiter))), axis=1)
6,190,990,618,236,187,000
Return count of delimiter-separated tokens pd.Series column. Parameters ---------- data : pd.DataFrame The DataFrame to process column : str Column name to process delimiter : str, optional Delimiter used to split the column string. (the default is ' ') Returns ------- pd.Series count of tokens in strings in `column`
msticpy/analysis/eventcluster.py
token_count_df
2xyo/msticpy
python
@export def token_count_df(data: pd.DataFrame, column: str, delimiter: str=' ') -> pd.Series: "\n Return count of delimiter-separated tokens pd.Series column.\n\n Parameters\n ----------\n data : pd.DataFrame\n The DataFrame to process\n column : str\n Column name to process\n delimiter : str, optional\n Delimiter used to split the column string.\n (the default is ' ')\n\n Returns\n -------\n pd.Series\n count of tokens in strings in `column`\n\n " return data.apply((lambda x: len(x[column].split(delimiter))), axis=1)
@export def crc32_hash_df(data: pd.DataFrame, column: str) -> pd.Series: '\n Return the CRC32 hash of the input column.\n\n Parameters\n ----------\n data : pd.DataFrame\n The DataFrame to process\n column : str\n Column name to process\n\n Returns\n -------\n pd.Series\n CRC32 hash of input column\n\n ' return data.apply((lambda x: crc32(bytes(x[column].encode('utf-8')))), axis=1)
3,132,224,339,412,920,300
Return the CRC32 hash of the input column. Parameters ---------- data : pd.DataFrame The DataFrame to process column : str Column name to process Returns ------- pd.Series CRC32 hash of input column
msticpy/analysis/eventcluster.py
crc32_hash_df
2xyo/msticpy
python
@export def crc32_hash_df(data: pd.DataFrame, column: str) -> pd.Series: '\n Return the CRC32 hash of the input column.\n\n Parameters\n ----------\n data : pd.DataFrame\n The DataFrame to process\n column : str\n Column name to process\n\n Returns\n -------\n pd.Series\n CRC32 hash of input column\n\n ' return data.apply((lambda x: crc32(bytes(x[column].encode('utf-8')))), axis=1)
@export def plot_cluster(db_cluster: DBSCAN, data: pd.DataFrame, x_predict: np.ndarray, plot_label: str=None, plot_features: Tuple[(int, int)]=(0, 1), verbose: bool=False, cut_off: int=3, xlabel: str=None, ylabel: str=None): '\n Plot clustered data as scatter chart.\n\n Parameters\n ----------\n db_cluster : DBSCAN\n DBScan Cluster (from SkLearn DBSCAN).\n data : pd.DataFrame\n Dataframe containing original data.\n x_predict : np.ndarray\n The DBSCAN predict numpy array\n plot_label : str, optional\n If set the column to use to label data points\n (the default is None)\n plot_features : Tuple[int, int], optional\n Which two features in x_predict to plot (the default is (0, 1))\n verbose : bool, optional\n Verbose execution with some extra info\n (the default is False)\n cut_off : int, optional\n The cluster size below which items are considered outliers\n (the default is 3)\n xlabel : str, optional\n x-axis label (the default is None)\n ylabel : str, optional\n y-axis label (the default is None)\n\n ' max_idx = (x_predict.shape[1] - 1) if (plot_features[0] >= x_predict.shape[1]): raise ValueError('plot_features[0] index must be a value from 0 to {}.'.format(max_idx)) if (plot_features[1] >= x_predict.shape[1]): raise ValueError('plot_features[1] index must be a value from 0 to {}.'.format(max_idx)) if (plot_features[0] == plot_features[1]): mssg = 'plot_features indexes must be 2 different values in range 0 to' raise ValueError((mssg + f' {max_idx}.')) labels = db_cluster.labels_ core_samples_mask = np.zeros_like(labels, dtype=bool) core_samples_mask[db_cluster.core_sample_indices_] = True unique_labels = set(labels) colors = [cm.Spectral(each) for each in np.linspace(0, 1, len(unique_labels))] n_clusters_ = (len(set(labels)) - (1 if ((- 1) in labels) else 0)) n_noise_ = list(labels).count((- 1)) (_, counts) = np.unique(labels, return_counts=True) if verbose: print(('Estimated number of clusters: %d' % n_clusters_)) print(('Estimated number of noise points: %d' % n_noise_)) if ((not isinstance(data, pd.DataFrame)) or ((plot_label is not None) and (plot_label not in data))): plot_label = None p_label = None for (cluster_id, color) in zip(unique_labels, colors): if (cluster_id == (- 1)): color = [0, 0, 0, 1] class_member_mask = (labels == cluster_id) cluster_size = counts[cluster_id] marker_size = cluster_size marker = 'o' font_size = 'small' alpha = 0.4 if (cluster_size < cut_off): marker = '+' marker_size = 10 font_size = 'large' alpha = 1.0 xy_pos = x_predict[(class_member_mask & core_samples_mask)] plt.plot(xy_pos[:, plot_features[0]], xy_pos[:, plot_features[1]], marker, markerfacecolor=tuple(color), markersize=marker_size) if plot_label: first_row = data[class_member_mask].iloc[0] if ((not first_row.empty) and (plot_label in first_row)): p_label = first_row[plot_label] try: plt.annotate(p_label, xy=(xy_pos[(0, plot_features[0])], xy_pos[(0, plot_features[1])]), fontsize=font_size, alpha=alpha) except IndexError: pass plt.xlabel(xlabel) plt.ylabel(ylabel) plt.title(('Estimated number of clusters: %d' % n_clusters_)) plt.show() return plt
463,275,847,204,298,500
Plot clustered data as scatter chart. Parameters ---------- db_cluster : DBSCAN DBScan Cluster (from SkLearn DBSCAN). data : pd.DataFrame Dataframe containing original data. x_predict : np.ndarray The DBSCAN predict numpy array plot_label : str, optional If set the column to use to label data points (the default is None) plot_features : Tuple[int, int], optional Which two features in x_predict to plot (the default is (0, 1)) verbose : bool, optional Verbose execution with some extra info (the default is False) cut_off : int, optional The cluster size below which items are considered outliers (the default is 3) xlabel : str, optional x-axis label (the default is None) ylabel : str, optional y-axis label (the default is None)
msticpy/analysis/eventcluster.py
plot_cluster
2xyo/msticpy
python
@export def plot_cluster(db_cluster: DBSCAN, data: pd.DataFrame, x_predict: np.ndarray, plot_label: str=None, plot_features: Tuple[(int, int)]=(0, 1), verbose: bool=False, cut_off: int=3, xlabel: str=None, ylabel: str=None): '\n Plot clustered data as scatter chart.\n\n Parameters\n ----------\n db_cluster : DBSCAN\n DBScan Cluster (from SkLearn DBSCAN).\n data : pd.DataFrame\n Dataframe containing original data.\n x_predict : np.ndarray\n The DBSCAN predict numpy array\n plot_label : str, optional\n If set the column to use to label data points\n (the default is None)\n plot_features : Tuple[int, int], optional\n Which two features in x_predict to plot (the default is (0, 1))\n verbose : bool, optional\n Verbose execution with some extra info\n (the default is False)\n cut_off : int, optional\n The cluster size below which items are considered outliers\n (the default is 3)\n xlabel : str, optional\n x-axis label (the default is None)\n ylabel : str, optional\n y-axis label (the default is None)\n\n ' max_idx = (x_predict.shape[1] - 1) if (plot_features[0] >= x_predict.shape[1]): raise ValueError('plot_features[0] index must be a value from 0 to {}.'.format(max_idx)) if (plot_features[1] >= x_predict.shape[1]): raise ValueError('plot_features[1] index must be a value from 0 to {}.'.format(max_idx)) if (plot_features[0] == plot_features[1]): mssg = 'plot_features indexes must be 2 different values in range 0 to' raise ValueError((mssg + f' {max_idx}.')) labels = db_cluster.labels_ core_samples_mask = np.zeros_like(labels, dtype=bool) core_samples_mask[db_cluster.core_sample_indices_] = True unique_labels = set(labels) colors = [cm.Spectral(each) for each in np.linspace(0, 1, len(unique_labels))] n_clusters_ = (len(set(labels)) - (1 if ((- 1) in labels) else 0)) n_noise_ = list(labels).count((- 1)) (_, counts) = np.unique(labels, return_counts=True) if verbose: print(('Estimated number of clusters: %d' % n_clusters_)) print(('Estimated number of noise points: %d' % n_noise_)) if ((not isinstance(data, pd.DataFrame)) or ((plot_label is not None) and (plot_label not in data))): plot_label = None p_label = None for (cluster_id, color) in zip(unique_labels, colors): if (cluster_id == (- 1)): color = [0, 0, 0, 1] class_member_mask = (labels == cluster_id) cluster_size = counts[cluster_id] marker_size = cluster_size marker = 'o' font_size = 'small' alpha = 0.4 if (cluster_size < cut_off): marker = '+' marker_size = 10 font_size = 'large' alpha = 1.0 xy_pos = x_predict[(class_member_mask & core_samples_mask)] plt.plot(xy_pos[:, plot_features[0]], xy_pos[:, plot_features[1]], marker, markerfacecolor=tuple(color), markersize=marker_size) if plot_label: first_row = data[class_member_mask].iloc[0] if ((not first_row.empty) and (plot_label in first_row)): p_label = first_row[plot_label] try: plt.annotate(p_label, xy=(xy_pos[(0, plot_features[0])], xy_pos[(0, plot_features[1])]), fontsize=font_size, alpha=alpha) except IndexError: pass plt.xlabel(xlabel) plt.ylabel(ylabel) plt.title(('Estimated number of clusters: %d' % n_clusters_)) plt.show() return plt
def get_store(factory=AuthStore, key=_store_registry_key, app=None): "Returns an instance of :class:`AuthStore` from the app registry.\n\n It'll try to get it from the current app registry, and if it is not\n registered it'll be instantiated and registered. A second call to this\n function will return the same instance.\n\n :param factory:\n The callable used to build and register the instance if it is not yet\n registered. The default is the class :class:`AuthStore` itself.\n :param key:\n The key used to store the instance in the registry. A default is used\n if it is not set.\n :param app:\n A :class:`webapp2.WSGIApplication` instance used to store the instance.\n The active app is used if it is not set.\n " app = (app or webapp2.get_app()) store = app.registry.get(key) if (not store): store = app.registry[key] = factory(app) return store
6,624,406,828,909,451,000
Returns an instance of :class:`AuthStore` from the app registry. It'll try to get it from the current app registry, and if it is not registered it'll be instantiated and registered. A second call to this function will return the same instance. :param factory: The callable used to build and register the instance if it is not yet registered. The default is the class :class:`AuthStore` itself. :param key: The key used to store the instance in the registry. A default is used if it is not set. :param app: A :class:`webapp2.WSGIApplication` instance used to store the instance. The active app is used if it is not set.
Webapp2_samplesite/webapp2_extras/auth.py
get_store
Rockfish/PythonCourse
python
def get_store(factory=AuthStore, key=_store_registry_key, app=None): "Returns an instance of :class:`AuthStore` from the app registry.\n\n It'll try to get it from the current app registry, and if it is not\n registered it'll be instantiated and registered. A second call to this\n function will return the same instance.\n\n :param factory:\n The callable used to build and register the instance if it is not yet\n registered. The default is the class :class:`AuthStore` itself.\n :param key:\n The key used to store the instance in the registry. A default is used\n if it is not set.\n :param app:\n A :class:`webapp2.WSGIApplication` instance used to store the instance.\n The active app is used if it is not set.\n " app = (app or webapp2.get_app()) store = app.registry.get(key) if (not store): store = app.registry[key] = factory(app) return store
def set_store(store, key=_store_registry_key, app=None): 'Sets an instance of :class:`AuthStore` in the app registry.\n\n :param store:\n An instance of :class:`AuthStore`.\n :param key:\n The key used to retrieve the instance from the registry. A default\n is used if it is not set.\n :param request:\n A :class:`webapp2.WSGIApplication` instance used to retrieve the\n instance. The active app is used if it is not set.\n ' app = (app or webapp2.get_app()) app.registry[key] = store
2,798,940,574,467,385,300
Sets an instance of :class:`AuthStore` in the app registry. :param store: An instance of :class:`AuthStore`. :param key: The key used to retrieve the instance from the registry. A default is used if it is not set. :param request: A :class:`webapp2.WSGIApplication` instance used to retrieve the instance. The active app is used if it is not set.
Webapp2_samplesite/webapp2_extras/auth.py
set_store
Rockfish/PythonCourse
python
def set_store(store, key=_store_registry_key, app=None): 'Sets an instance of :class:`AuthStore` in the app registry.\n\n :param store:\n An instance of :class:`AuthStore`.\n :param key:\n The key used to retrieve the instance from the registry. A default\n is used if it is not set.\n :param request:\n A :class:`webapp2.WSGIApplication` instance used to retrieve the\n instance. The active app is used if it is not set.\n ' app = (app or webapp2.get_app()) app.registry[key] = store
def get_auth(factory=Auth, key=_auth_registry_key, request=None): "Returns an instance of :class:`Auth` from the request registry.\n\n It'll try to get it from the current request registry, and if it is not\n registered it'll be instantiated and registered. A second call to this\n function will return the same instance.\n\n :param factory:\n The callable used to build and register the instance if it is not yet\n registered. The default is the class :class:`Auth` itself.\n :param key:\n The key used to store the instance in the registry. A default is used\n if it is not set.\n :param request:\n A :class:`webapp2.Request` instance used to store the instance. The\n active request is used if it is not set.\n " request = (request or webapp2.get_request()) auth = request.registry.get(key) if (not auth): auth = request.registry[key] = factory(request) return auth
2,269,344,620,877,514,200
Returns an instance of :class:`Auth` from the request registry. It'll try to get it from the current request registry, and if it is not registered it'll be instantiated and registered. A second call to this function will return the same instance. :param factory: The callable used to build and register the instance if it is not yet registered. The default is the class :class:`Auth` itself. :param key: The key used to store the instance in the registry. A default is used if it is not set. :param request: A :class:`webapp2.Request` instance used to store the instance. The active request is used if it is not set.
Webapp2_samplesite/webapp2_extras/auth.py
get_auth
Rockfish/PythonCourse
python
def get_auth(factory=Auth, key=_auth_registry_key, request=None): "Returns an instance of :class:`Auth` from the request registry.\n\n It'll try to get it from the current request registry, and if it is not\n registered it'll be instantiated and registered. A second call to this\n function will return the same instance.\n\n :param factory:\n The callable used to build and register the instance if it is not yet\n registered. The default is the class :class:`Auth` itself.\n :param key:\n The key used to store the instance in the registry. A default is used\n if it is not set.\n :param request:\n A :class:`webapp2.Request` instance used to store the instance. The\n active request is used if it is not set.\n " request = (request or webapp2.get_request()) auth = request.registry.get(key) if (not auth): auth = request.registry[key] = factory(request) return auth
def set_auth(auth, key=_auth_registry_key, request=None): 'Sets an instance of :class:`Auth` in the request registry.\n\n :param auth:\n An instance of :class:`Auth`.\n :param key:\n The key used to retrieve the instance from the registry. A default\n is used if it is not set.\n :param request:\n A :class:`webapp2.Request` instance used to retrieve the instance. The\n active request is used if it is not set.\n ' request = (request or webapp2.get_request()) request.registry[key] = auth
-940,054,351,290,193,800
Sets an instance of :class:`Auth` in the request registry. :param auth: An instance of :class:`Auth`. :param key: The key used to retrieve the instance from the registry. A default is used if it is not set. :param request: A :class:`webapp2.Request` instance used to retrieve the instance. The active request is used if it is not set.
Webapp2_samplesite/webapp2_extras/auth.py
set_auth
Rockfish/PythonCourse
python
def set_auth(auth, key=_auth_registry_key, request=None): 'Sets an instance of :class:`Auth` in the request registry.\n\n :param auth:\n An instance of :class:`Auth`.\n :param key:\n The key used to retrieve the instance from the registry. A default\n is used if it is not set.\n :param request:\n A :class:`webapp2.Request` instance used to retrieve the instance. The\n active request is used if it is not set.\n ' request = (request or webapp2.get_request()) request.registry[key] = auth
def __init__(self, app, config=None): 'Initializes the session store.\n\n :param app:\n A :class:`webapp2.WSGIApplication` instance.\n :param config:\n A dictionary of configuration values to be overridden. See\n the available keys in :data:`default_config`.\n ' self.app = app self.config = app.config.load_config(self.config_key, default_values=default_config, user_values=config)
-9,101,138,287,151,809,000
Initializes the session store. :param app: A :class:`webapp2.WSGIApplication` instance. :param config: A dictionary of configuration values to be overridden. See the available keys in :data:`default_config`.
Webapp2_samplesite/webapp2_extras/auth.py
__init__
Rockfish/PythonCourse
python
def __init__(self, app, config=None): 'Initializes the session store.\n\n :param app:\n A :class:`webapp2.WSGIApplication` instance.\n :param config:\n A dictionary of configuration values to be overridden. See\n the available keys in :data:`default_config`.\n ' self.app = app self.config = app.config.load_config(self.config_key, default_values=default_config, user_values=config)
@webapp2.cached_property def session_attributes(self): 'The list of attributes stored in a session.\n\n This must be an ordered list of unique elements.\n ' seen = set() attrs = (self._session_attributes + self.user_attributes) return [a for a in attrs if ((a not in seen) and (not seen.add(a)))]
-3,796,023,426,019,284,000
The list of attributes stored in a session. This must be an ordered list of unique elements.
Webapp2_samplesite/webapp2_extras/auth.py
session_attributes
Rockfish/PythonCourse
python
@webapp2.cached_property def session_attributes(self): 'The list of attributes stored in a session.\n\n This must be an ordered list of unique elements.\n ' seen = set() attrs = (self._session_attributes + self.user_attributes) return [a for a in attrs if ((a not in seen) and (not seen.add(a)))]
@webapp2.cached_property def user_attributes(self): 'The list of attributes retrieved from the user model.\n\n This must be an ordered list of unique elements.\n ' seen = set() attrs = self.config['user_attributes'] return [a for a in attrs if ((a not in seen) and (not seen.add(a)))]
-1,450,585,386,066,368,500
The list of attributes retrieved from the user model. This must be an ordered list of unique elements.
Webapp2_samplesite/webapp2_extras/auth.py
user_attributes
Rockfish/PythonCourse
python
@webapp2.cached_property def user_attributes(self): 'The list of attributes retrieved from the user model.\n\n This must be an ordered list of unique elements.\n ' seen = set() attrs = self.config['user_attributes'] return [a for a in attrs if ((a not in seen) and (not seen.add(a)))]
@webapp2.cached_property def user_model(self): 'Configured user model.' cls = self.config['user_model'] if isinstance(cls, str): cls = self.config['user_model'] = webapp2.import_string(cls) return cls
-9,142,937,729,856,513,000
Configured user model.
Webapp2_samplesite/webapp2_extras/auth.py
user_model
Rockfish/PythonCourse
python
@webapp2.cached_property def user_model(self): cls = self.config['user_model'] if isinstance(cls, str): cls = self.config['user_model'] = webapp2.import_string(cls) return cls
def user_to_dict(self, user): "Returns a dictionary based on a user object.\n\n Extra attributes to be retrieved must be set in this module's\n configuration.\n\n :param user:\n User object: an instance the custom user model.\n :returns:\n A dictionary with user data.\n " if (not user): return None user_dict = dict(((a, getattr(user, a)) for a in self.user_attributes)) user_dict['user_id'] = user.key.id() return user_dict
395,166,271,577,502,900
Returns a dictionary based on a user object. Extra attributes to be retrieved must be set in this module's configuration. :param user: User object: an instance the custom user model. :returns: A dictionary with user data.
Webapp2_samplesite/webapp2_extras/auth.py
user_to_dict
Rockfish/PythonCourse
python
def user_to_dict(self, user): "Returns a dictionary based on a user object.\n\n Extra attributes to be retrieved must be set in this module's\n configuration.\n\n :param user:\n User object: an instance the custom user model.\n :returns:\n A dictionary with user data.\n " if (not user): return None user_dict = dict(((a, getattr(user, a)) for a in self.user_attributes)) user_dict['user_id'] = user.key.id() return user_dict
def get_user_by_auth_password(self, auth_id, password, silent=False): 'Returns a user dict based on auth_id and password.\n\n :param auth_id:\n Authentication id.\n :param password:\n User password.\n :param silent:\n If True, raises an exception if auth_id or password are invalid.\n :returns:\n A dictionary with user data.\n :raises:\n ``InvalidAuthIdError`` or ``InvalidPasswordError``.\n ' try: user = self.user_model.get_by_auth_password(auth_id, password) return self.user_to_dict(user) except (InvalidAuthIdError, InvalidPasswordError): if (not silent): raise return None
3,215,861,248,836,423,000
Returns a user dict based on auth_id and password. :param auth_id: Authentication id. :param password: User password. :param silent: If True, raises an exception if auth_id or password are invalid. :returns: A dictionary with user data. :raises: ``InvalidAuthIdError`` or ``InvalidPasswordError``.
Webapp2_samplesite/webapp2_extras/auth.py
get_user_by_auth_password
Rockfish/PythonCourse
python
def get_user_by_auth_password(self, auth_id, password, silent=False): 'Returns a user dict based on auth_id and password.\n\n :param auth_id:\n Authentication id.\n :param password:\n User password.\n :param silent:\n If True, raises an exception if auth_id or password are invalid.\n :returns:\n A dictionary with user data.\n :raises:\n ``InvalidAuthIdError`` or ``InvalidPasswordError``.\n ' try: user = self.user_model.get_by_auth_password(auth_id, password) return self.user_to_dict(user) except (InvalidAuthIdError, InvalidPasswordError): if (not silent): raise return None
def get_user_by_auth_token(self, user_id, token): 'Returns a user dict based on user_id and auth token.\n\n :param user_id:\n User id.\n :param token:\n Authentication token.\n :returns:\n A tuple ``(user_dict, token_timestamp)``. Both values can be None.\n The token timestamp will be None if the user is invalid or it\n is valid but the token requires renewal.\n ' (user, ts) = self.user_model.get_by_auth_token(user_id, token) return (self.user_to_dict(user), ts)
-490,687,262,392,920,260
Returns a user dict based on user_id and auth token. :param user_id: User id. :param token: Authentication token. :returns: A tuple ``(user_dict, token_timestamp)``. Both values can be None. The token timestamp will be None if the user is invalid or it is valid but the token requires renewal.
Webapp2_samplesite/webapp2_extras/auth.py
get_user_by_auth_token
Rockfish/PythonCourse
python
def get_user_by_auth_token(self, user_id, token): 'Returns a user dict based on user_id and auth token.\n\n :param user_id:\n User id.\n :param token:\n Authentication token.\n :returns:\n A tuple ``(user_dict, token_timestamp)``. Both values can be None.\n The token timestamp will be None if the user is invalid or it\n is valid but the token requires renewal.\n ' (user, ts) = self.user_model.get_by_auth_token(user_id, token) return (self.user_to_dict(user), ts)
def create_auth_token(self, user_id): 'Creates a new authentication token.\n\n :param user_id:\n Authentication id.\n :returns:\n A new authentication token.\n ' return self.user_model.create_auth_token(user_id)
8,363,434,732,694,904,000
Creates a new authentication token. :param user_id: Authentication id. :returns: A new authentication token.
Webapp2_samplesite/webapp2_extras/auth.py
create_auth_token
Rockfish/PythonCourse
python
def create_auth_token(self, user_id): 'Creates a new authentication token.\n\n :param user_id:\n Authentication id.\n :returns:\n A new authentication token.\n ' return self.user_model.create_auth_token(user_id)
def delete_auth_token(self, user_id, token): 'Deletes an authentication token.\n\n :param user_id:\n User id.\n :param token:\n Authentication token.\n ' return self.user_model.delete_auth_token(user_id, token)
7,380,778,098,418,366,000
Deletes an authentication token. :param user_id: User id. :param token: Authentication token.
Webapp2_samplesite/webapp2_extras/auth.py
delete_auth_token
Rockfish/PythonCourse
python
def delete_auth_token(self, user_id, token): 'Deletes an authentication token.\n\n :param user_id:\n User id.\n :param token:\n Authentication token.\n ' return self.user_model.delete_auth_token(user_id, token)
def get_session(self, request): 'Returns an auth session.\n\n :param request:\n A :class:`webapp2.Request` instance.\n :returns:\n A session dict.\n ' store = sessions.get_store(request=request) return store.get_session(self.config['cookie_name'], backend=self.config['session_backend'])
-5,181,140,631,074,181,000
Returns an auth session. :param request: A :class:`webapp2.Request` instance. :returns: A session dict.
Webapp2_samplesite/webapp2_extras/auth.py
get_session
Rockfish/PythonCourse
python
def get_session(self, request): 'Returns an auth session.\n\n :param request:\n A :class:`webapp2.Request` instance.\n :returns:\n A session dict.\n ' store = sessions.get_store(request=request) return store.get_session(self.config['cookie_name'], backend=self.config['session_backend'])
def serialize_session(self, data): 'Serializes values for a session.\n\n :param data:\n A dict with session data.\n :returns:\n A list with session data.\n ' assert (len(data) == len(self.session_attributes)) return [data.get(k) for k in self.session_attributes]
4,561,535,077,663,560,000
Serializes values for a session. :param data: A dict with session data. :returns: A list with session data.
Webapp2_samplesite/webapp2_extras/auth.py
serialize_session
Rockfish/PythonCourse
python
def serialize_session(self, data): 'Serializes values for a session.\n\n :param data:\n A dict with session data.\n :returns:\n A list with session data.\n ' assert (len(data) == len(self.session_attributes)) return [data.get(k) for k in self.session_attributes]
def deserialize_session(self, data): 'Deserializes values for a session.\n\n :param data:\n A list with session data.\n :returns:\n A dict with session data.\n ' assert (len(data) == len(self.session_attributes)) return dict(list(zip(self.session_attributes, data)))
6,139,114,933,462,788,000
Deserializes values for a session. :param data: A list with session data. :returns: A dict with session data.
Webapp2_samplesite/webapp2_extras/auth.py
deserialize_session
Rockfish/PythonCourse
python
def deserialize_session(self, data): 'Deserializes values for a session.\n\n :param data:\n A list with session data.\n :returns:\n A dict with session data.\n ' assert (len(data) == len(self.session_attributes)) return dict(list(zip(self.session_attributes, data)))
def set_password_validator(self, func): 'Sets the function used to perform password validation.\n\n :param func:\n A function that receives ``(store, auth_id, password)``\n and returns a user dict or None.\n ' self.validate_password = func.__get__(self, self.__class__)
-5,540,569,157,499,997,000
Sets the function used to perform password validation. :param func: A function that receives ``(store, auth_id, password)`` and returns a user dict or None.
Webapp2_samplesite/webapp2_extras/auth.py
set_password_validator
Rockfish/PythonCourse
python
def set_password_validator(self, func): 'Sets the function used to perform password validation.\n\n :param func:\n A function that receives ``(store, auth_id, password)``\n and returns a user dict or None.\n ' self.validate_password = func.__get__(self, self.__class__)
def set_token_validator(self, func): 'Sets the function used to perform token validation.\n\n :param func:\n A function that receives ``(store, user_id, token, token_ts)``\n and returns a tuple ``(user_dict, token)``.\n ' self.validate_token = func.__get__(self, self.__class__)
-8,153,688,014,519,468,000
Sets the function used to perform token validation. :param func: A function that receives ``(store, user_id, token, token_ts)`` and returns a tuple ``(user_dict, token)``.
Webapp2_samplesite/webapp2_extras/auth.py
set_token_validator
Rockfish/PythonCourse
python
def set_token_validator(self, func): 'Sets the function used to perform token validation.\n\n :param func:\n A function that receives ``(store, user_id, token, token_ts)``\n and returns a tuple ``(user_dict, token)``.\n ' self.validate_token = func.__get__(self, self.__class__)
def default_password_validator(self, auth_id, password, silent=False): 'Validates a password.\n\n Passwords are used to log-in using forms or to request auth tokens\n from services.\n\n :param auth_id:\n Authentication id.\n :param password:\n Password to be checked.\n :param silent:\n If True, raises an exception if auth_id or password are invalid.\n :returns:\n user or None\n :raises:\n ``InvalidAuthIdError`` or ``InvalidPasswordError``.\n ' return self.get_user_by_auth_password(auth_id, password, silent=silent)
-5,114,930,920,428,402,000
Validates a password. Passwords are used to log-in using forms or to request auth tokens from services. :param auth_id: Authentication id. :param password: Password to be checked. :param silent: If True, raises an exception if auth_id or password are invalid. :returns: user or None :raises: ``InvalidAuthIdError`` or ``InvalidPasswordError``.
Webapp2_samplesite/webapp2_extras/auth.py
default_password_validator
Rockfish/PythonCourse
python
def default_password_validator(self, auth_id, password, silent=False): 'Validates a password.\n\n Passwords are used to log-in using forms or to request auth tokens\n from services.\n\n :param auth_id:\n Authentication id.\n :param password:\n Password to be checked.\n :param silent:\n If True, raises an exception if auth_id or password are invalid.\n :returns:\n user or None\n :raises:\n ``InvalidAuthIdError`` or ``InvalidPasswordError``.\n ' return self.get_user_by_auth_password(auth_id, password, silent=silent)
def default_token_validator(self, user_id, token, token_ts=None): 'Validates a token.\n\n Tokens are random strings used to authenticate temporarily. They are\n used to validate sessions or service requests.\n\n :param user_id:\n User id.\n :param token:\n Token to be checked.\n :param token_ts:\n Optional token timestamp used to pre-validate the token age.\n :returns:\n A tuple ``(user_dict, token)``.\n ' now = int(time.time()) delete = (token_ts and ((now - token_ts) > self.config['token_max_age'])) create = False if (not delete): (user, ts) = self.get_user_by_auth_token(user_id, token) if user: delete = ((now - ts) > self.config['token_max_age']) create = ((now - ts) > self.config['token_new_age']) if (delete or create or (not user)): if (delete or create): self.delete_auth_token(user_id, token) if delete: user = None token = None return (user, token)
-311,591,043,904,925,200
Validates a token. Tokens are random strings used to authenticate temporarily. They are used to validate sessions or service requests. :param user_id: User id. :param token: Token to be checked. :param token_ts: Optional token timestamp used to pre-validate the token age. :returns: A tuple ``(user_dict, token)``.
Webapp2_samplesite/webapp2_extras/auth.py
default_token_validator
Rockfish/PythonCourse
python
def default_token_validator(self, user_id, token, token_ts=None): 'Validates a token.\n\n Tokens are random strings used to authenticate temporarily. They are\n used to validate sessions or service requests.\n\n :param user_id:\n User id.\n :param token:\n Token to be checked.\n :param token_ts:\n Optional token timestamp used to pre-validate the token age.\n :returns:\n A tuple ``(user_dict, token)``.\n ' now = int(time.time()) delete = (token_ts and ((now - token_ts) > self.config['token_max_age'])) create = False if (not delete): (user, ts) = self.get_user_by_auth_token(user_id, token) if user: delete = ((now - ts) > self.config['token_max_age']) create = ((now - ts) > self.config['token_new_age']) if (delete or create or (not user)): if (delete or create): self.delete_auth_token(user_id, token) if delete: user = None token = None return (user, token)
def __init__(self, request): 'Initializes the auth provider for a request.\n\n :param request:\n A :class:`webapp2.Request` instance.\n ' self.request = request self.store = get_store(app=request.app)
5,832,942,312,021,142,000
Initializes the auth provider for a request. :param request: A :class:`webapp2.Request` instance.
Webapp2_samplesite/webapp2_extras/auth.py
__init__
Rockfish/PythonCourse
python
def __init__(self, request): 'Initializes the auth provider for a request.\n\n :param request:\n A :class:`webapp2.Request` instance.\n ' self.request = request self.store = get_store(app=request.app)
def get_user_by_session(self, save_session=True): 'Returns a user based on the current session.\n\n :param save_session:\n If True, saves the user in the session if authentication succeeds.\n :returns:\n A user dict or None.\n ' if (self._user is None): data = self.get_session_data(pop=True) if (not data): self._user = _anon else: self._user = self.get_user_by_token(user_id=data['user_id'], token=data['token'], token_ts=data['token_ts'], cache=data, cache_ts=data['cache_ts'], remember=data['remember'], save_session=save_session) return self._user_or_none()
3,667,674,903,363,847,000
Returns a user based on the current session. :param save_session: If True, saves the user in the session if authentication succeeds. :returns: A user dict or None.
Webapp2_samplesite/webapp2_extras/auth.py
get_user_by_session
Rockfish/PythonCourse
python
def get_user_by_session(self, save_session=True): 'Returns a user based on the current session.\n\n :param save_session:\n If True, saves the user in the session if authentication succeeds.\n :returns:\n A user dict or None.\n ' if (self._user is None): data = self.get_session_data(pop=True) if (not data): self._user = _anon else: self._user = self.get_user_by_token(user_id=data['user_id'], token=data['token'], token_ts=data['token_ts'], cache=data, cache_ts=data['cache_ts'], remember=data['remember'], save_session=save_session) return self._user_or_none()
def get_user_by_token(self, user_id, token, token_ts=None, cache=None, cache_ts=None, remember=False, save_session=True): 'Returns a user based on an authentication token.\n\n :param user_id:\n User id.\n :param token:\n Authentication token.\n :param token_ts:\n Token timestamp, used to perform pre-validation.\n :param cache:\n Cached user data (from the session).\n :param cache_ts:\n Cache timestamp.\n :param remember:\n If True, saves permanent sessions.\n :param save_session:\n If True, saves the user in the session if authentication succeeds.\n :returns:\n A user dict or None.\n ' if (self._user is not None): assert ((self._user is not _anon) and (self._user['user_id'] == user_id) and (self._user['token'] == token)) return self._user_or_none() if (cache and cache_ts): now = int(time.time()) valid = ((now - cache_ts) < self.store.config['token_cache_age']) if (valid and token_ts): valid2 = ((now - token_ts) < self.store.config['token_max_age']) valid3 = ((now - token_ts) < self.store.config['token_new_age']) valid = (valid2 and valid3) if valid: self._user = cache else: cache_ts = None if (self._user is None): (self._user, token) = self.store.validate_token(user_id, token, token_ts=token_ts) if (self._user is None): self._user = _anon elif save_session: if (not token): token_ts = None self.set_session(self._user, token=token, token_ts=token_ts, cache_ts=cache_ts, remember=remember) return self._user_or_none()
-5,872,988,609,794,356,000
Returns a user based on an authentication token. :param user_id: User id. :param token: Authentication token. :param token_ts: Token timestamp, used to perform pre-validation. :param cache: Cached user data (from the session). :param cache_ts: Cache timestamp. :param remember: If True, saves permanent sessions. :param save_session: If True, saves the user in the session if authentication succeeds. :returns: A user dict or None.
Webapp2_samplesite/webapp2_extras/auth.py
get_user_by_token
Rockfish/PythonCourse
python
def get_user_by_token(self, user_id, token, token_ts=None, cache=None, cache_ts=None, remember=False, save_session=True): 'Returns a user based on an authentication token.\n\n :param user_id:\n User id.\n :param token:\n Authentication token.\n :param token_ts:\n Token timestamp, used to perform pre-validation.\n :param cache:\n Cached user data (from the session).\n :param cache_ts:\n Cache timestamp.\n :param remember:\n If True, saves permanent sessions.\n :param save_session:\n If True, saves the user in the session if authentication succeeds.\n :returns:\n A user dict or None.\n ' if (self._user is not None): assert ((self._user is not _anon) and (self._user['user_id'] == user_id) and (self._user['token'] == token)) return self._user_or_none() if (cache and cache_ts): now = int(time.time()) valid = ((now - cache_ts) < self.store.config['token_cache_age']) if (valid and token_ts): valid2 = ((now - token_ts) < self.store.config['token_max_age']) valid3 = ((now - token_ts) < self.store.config['token_new_age']) valid = (valid2 and valid3) if valid: self._user = cache else: cache_ts = None if (self._user is None): (self._user, token) = self.store.validate_token(user_id, token, token_ts=token_ts) if (self._user is None): self._user = _anon elif save_session: if (not token): token_ts = None self.set_session(self._user, token=token, token_ts=token_ts, cache_ts=cache_ts, remember=remember) return self._user_or_none()
def get_user_by_password(self, auth_id, password, remember=False, save_session=True, silent=False): 'Returns a user based on password credentials.\n\n :param auth_id:\n Authentication id.\n :param password:\n User password.\n :param remember:\n If True, saves permanent sessions.\n :param save_session:\n If True, saves the user in the session if authentication succeeds.\n :param silent:\n If True, raises an exception if auth_id or password are invalid.\n :returns:\n A user dict or None.\n :raises:\n ``InvalidAuthIdError`` or ``InvalidPasswordError``.\n ' if save_session: self.unset_session() self._user = self.store.validate_password(auth_id, password, silent=silent) if (not self._user): self._user = _anon elif save_session: self.set_session(self._user, remember=remember) return self._user_or_none()
9,061,094,405,721,935,000
Returns a user based on password credentials. :param auth_id: Authentication id. :param password: User password. :param remember: If True, saves permanent sessions. :param save_session: If True, saves the user in the session if authentication succeeds. :param silent: If True, raises an exception if auth_id or password are invalid. :returns: A user dict or None. :raises: ``InvalidAuthIdError`` or ``InvalidPasswordError``.
Webapp2_samplesite/webapp2_extras/auth.py
get_user_by_password
Rockfish/PythonCourse
python
def get_user_by_password(self, auth_id, password, remember=False, save_session=True, silent=False): 'Returns a user based on password credentials.\n\n :param auth_id:\n Authentication id.\n :param password:\n User password.\n :param remember:\n If True, saves permanent sessions.\n :param save_session:\n If True, saves the user in the session if authentication succeeds.\n :param silent:\n If True, raises an exception if auth_id or password are invalid.\n :returns:\n A user dict or None.\n :raises:\n ``InvalidAuthIdError`` or ``InvalidPasswordError``.\n ' if save_session: self.unset_session() self._user = self.store.validate_password(auth_id, password, silent=silent) if (not self._user): self._user = _anon elif save_session: self.set_session(self._user, remember=remember) return self._user_or_none()
@webapp2.cached_property def session(self): 'Auth session.' return self.store.get_session(self.request)
5,353,006,842,283,151,000
Auth session.
Webapp2_samplesite/webapp2_extras/auth.py
session
Rockfish/PythonCourse
python
@webapp2.cached_property def session(self): return self.store.get_session(self.request)
def set_session(self, user, token=None, token_ts=None, cache_ts=None, remember=False, **session_args): 'Saves a user in the session.\n\n :param user:\n A dictionary with user data.\n :param token:\n A unique token to be persisted. If None, a new one is created.\n :param token_ts:\n Token timestamp. If None, a new one is created.\n :param cache_ts:\n Token cache timestamp. If None, a new one is created.\n :remember:\n If True, session is set to be persisted.\n :param session_args:\n Keyword arguments to set the session arguments.\n ' now = int(time.time()) token = (token or self.store.create_auth_token(user['user_id'])) token_ts = (token_ts or now) cache_ts = (cache_ts or now) if remember: max_age = self.store.config['token_max_age'] else: max_age = None session_args.setdefault('max_age', max_age) user.update({'token': token, 'token_ts': token_ts, 'cache_ts': cache_ts, 'remember': int(remember)}) self.set_session_data(user, **session_args) self._user = user
-2,310,134,152,950,392,300
Saves a user in the session. :param user: A dictionary with user data. :param token: A unique token to be persisted. If None, a new one is created. :param token_ts: Token timestamp. If None, a new one is created. :param cache_ts: Token cache timestamp. If None, a new one is created. :remember: If True, session is set to be persisted. :param session_args: Keyword arguments to set the session arguments.
Webapp2_samplesite/webapp2_extras/auth.py
set_session
Rockfish/PythonCourse
python
def set_session(self, user, token=None, token_ts=None, cache_ts=None, remember=False, **session_args): 'Saves a user in the session.\n\n :param user:\n A dictionary with user data.\n :param token:\n A unique token to be persisted. If None, a new one is created.\n :param token_ts:\n Token timestamp. If None, a new one is created.\n :param cache_ts:\n Token cache timestamp. If None, a new one is created.\n :remember:\n If True, session is set to be persisted.\n :param session_args:\n Keyword arguments to set the session arguments.\n ' now = int(time.time()) token = (token or self.store.create_auth_token(user['user_id'])) token_ts = (token_ts or now) cache_ts = (cache_ts or now) if remember: max_age = self.store.config['token_max_age'] else: max_age = None session_args.setdefault('max_age', max_age) user.update({'token': token, 'token_ts': token_ts, 'cache_ts': cache_ts, 'remember': int(remember)}) self.set_session_data(user, **session_args) self._user = user
def unset_session(self): 'Removes a user from the session and invalidates the auth token.' self._user = None data = self.get_session_data(pop=True) if data: self.store.delete_auth_token(data['user_id'], data['token'])
4,615,528,405,671,208,000
Removes a user from the session and invalidates the auth token.
Webapp2_samplesite/webapp2_extras/auth.py
unset_session
Rockfish/PythonCourse
python
def unset_session(self): self._user = None data = self.get_session_data(pop=True) if data: self.store.delete_auth_token(data['user_id'], data['token'])
def get_session_data(self, pop=False): 'Returns the session data as a dictionary.\n\n :param pop:\n If True, removes the session.\n :returns:\n A deserialized session, or None.\n ' func = (self.session.pop if pop else self.session.get) rv = func('_user', None) if rv: return self.store.deserialize_session(rv)
3,557,656,058,955,620,000
Returns the session data as a dictionary. :param pop: If True, removes the session. :returns: A deserialized session, or None.
Webapp2_samplesite/webapp2_extras/auth.py
get_session_data
Rockfish/PythonCourse
python
def get_session_data(self, pop=False): 'Returns the session data as a dictionary.\n\n :param pop:\n If True, removes the session.\n :returns:\n A deserialized session, or None.\n ' func = (self.session.pop if pop else self.session.get) rv = func('_user', None) if rv: return self.store.deserialize_session(rv)
def set_session_data(self, data, **session_args): 'Sets the session data as a list.\n\n :param data:\n Deserialized session data.\n :param session_args:\n Extra arguments for the session.\n ' self.session['_user'] = self.store.serialize_session(data) self.session.container.session_args.update(session_args)
7,625,603,847,048,672,000
Sets the session data as a list. :param data: Deserialized session data. :param session_args: Extra arguments for the session.
Webapp2_samplesite/webapp2_extras/auth.py
set_session_data
Rockfish/PythonCourse
python
def set_session_data(self, data, **session_args): 'Sets the session data as a list.\n\n :param data:\n Deserialized session data.\n :param session_args:\n Extra arguments for the session.\n ' self.session['_user'] = self.store.serialize_session(data) self.session.container.session_args.update(session_args)
@pytest.fixture(scope='module') def setup(duthosts, ptfhost, rand_selected_dut, rand_unselected_dut, tbinfo, ptfadapter): 'Gather all required test information from DUT and tbinfo.\n\n Args:\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n tbinfo: A fixture to gather information about the testbed.\n\n Yields:\n A Dictionary with required test information.\n\n ' mg_facts = rand_selected_dut.get_extended_minigraph_facts(tbinfo) topo = tbinfo['topo']['type'] vlan_ports = [] vlan_mac = None if (topo == 't0'): vlan_ports = [mg_facts['minigraph_ptf_indices'][ifname] for ifname in mg_facts['minigraph_vlans'].values()[0]['members']] config_facts = rand_selected_dut.get_running_config_facts() vlan_table = config_facts['VLAN'] vlan_name = list(vlan_table.keys())[0] if ('mac' in vlan_table[vlan_name]): vlan_mac = vlan_table[vlan_name]['mac'] downstream_ports = defaultdict(list) upstream_ports = defaultdict(list) downstream_port_ids = [] upstream_port_ids = [] upstream_port_id_to_router_mac_map = {} downstream_port_id_to_router_mac_map = {} downlink_dst_mac = (vlan_mac if (vlan_mac is not None) else rand_selected_dut.facts['router_mac']) for (interface, neighbor) in mg_facts['minigraph_neighbors'].items(): port_id = mg_facts['minigraph_ptf_indices'][interface] if (((topo == 't1') and ('T0' in neighbor['name'])) or ((topo == 't0') and ('Server' in neighbor['name']))): downstream_ports[neighbor['namespace']].append(interface) downstream_port_ids.append(port_id) downstream_port_id_to_router_mac_map[port_id] = downlink_dst_mac elif (((topo == 't1') and ('T2' in neighbor['name'])) or ((topo == 't0') and ('T1' in neighbor['name']))): upstream_ports[neighbor['namespace']].append(interface) upstream_port_ids.append(port_id) upstream_port_id_to_router_mac_map[port_id] = rand_selected_dut.facts['router_mac'] if ('dualtor' not in tbinfo['topo']['name']): logging.info('Stopping GARP service on single tor') ptfhost.shell('supervisorctl stop garp_service', module_ignore_errors=True) if (('dualtor' in tbinfo['topo']['name']) and (rand_unselected_dut is not None)): peer_mg_facts = rand_unselected_dut.get_extended_minigraph_facts(tbinfo) for (interface, neighbor) in peer_mg_facts['minigraph_neighbors'].items(): if (((topo == 't1') and ('T2' in neighbor['name'])) or ((topo == 't0') and ('T1' in neighbor['name']))): port_id = peer_mg_facts['minigraph_ptf_indices'][interface] upstream_port_ids.append(port_id) upstream_port_id_to_router_mac_map[port_id] = rand_unselected_dut.facts['router_mac'] port_channels = mg_facts['minigraph_portchannels'] acl_table_ports = defaultdict(list) if ((topo == 't0') or (tbinfo['topo']['name'] in ('t1', 't1-lag'))): for (namespace, port) in downstream_ports.iteritems(): acl_table_ports[namespace] += port if namespace: acl_table_ports[''] += port if ((topo == 't0') or (tbinfo['topo']['name'] in ('t1-lag', 't1-64-lag', 't1-64-lag-clet'))): for (k, v) in port_channels.iteritems(): acl_table_ports[v['namespace']].append(k) if v['namespace']: acl_table_ports[''].append(k) else: for (namespace, port) in upstream_ports.iteritems(): acl_table_ports[namespace] += port if namespace: acl_table_ports[''] += port dest_mac_mapping = {'downlink->uplink': downstream_port_id_to_router_mac_map, 'uplink->downlink': upstream_port_id_to_router_mac_map} setup_information = {'destination_mac': dest_mac_mapping, 'downstream_port_ids': downstream_port_ids, 'upstream_port_ids': upstream_port_ids, 'acl_table_ports': acl_table_ports, 'vlan_ports': vlan_ports, 'topo': topo, 'vlan_mac': vlan_mac} logger.info('Gathered variables for ACL test:\n{}'.format(pprint.pformat(setup_information))) logger.info('Creating temporary folder "{}" for ACL test'.format(DUT_TMP_DIR)) for duthost in duthosts: duthost.command('mkdir -p {}'.format(DUT_TMP_DIR)) (yield setup_information) logger.info('Removing temporary directory "{}"'.format(DUT_TMP_DIR)) for duthost in duthosts: duthost.command('rm -rf {}'.format(DUT_TMP_DIR))
2,646,736,912,230,296,000
Gather all required test information from DUT and tbinfo. Args: duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. tbinfo: A fixture to gather information about the testbed. Yields: A Dictionary with required test information.
tests/acl/test_acl.py
setup
KostiantynYarovyiBf/sonic-mgmt
python
@pytest.fixture(scope='module') def setup(duthosts, ptfhost, rand_selected_dut, rand_unselected_dut, tbinfo, ptfadapter): 'Gather all required test information from DUT and tbinfo.\n\n Args:\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n tbinfo: A fixture to gather information about the testbed.\n\n Yields:\n A Dictionary with required test information.\n\n ' mg_facts = rand_selected_dut.get_extended_minigraph_facts(tbinfo) topo = tbinfo['topo']['type'] vlan_ports = [] vlan_mac = None if (topo == 't0'): vlan_ports = [mg_facts['minigraph_ptf_indices'][ifname] for ifname in mg_facts['minigraph_vlans'].values()[0]['members']] config_facts = rand_selected_dut.get_running_config_facts() vlan_table = config_facts['VLAN'] vlan_name = list(vlan_table.keys())[0] if ('mac' in vlan_table[vlan_name]): vlan_mac = vlan_table[vlan_name]['mac'] downstream_ports = defaultdict(list) upstream_ports = defaultdict(list) downstream_port_ids = [] upstream_port_ids = [] upstream_port_id_to_router_mac_map = {} downstream_port_id_to_router_mac_map = {} downlink_dst_mac = (vlan_mac if (vlan_mac is not None) else rand_selected_dut.facts['router_mac']) for (interface, neighbor) in mg_facts['minigraph_neighbors'].items(): port_id = mg_facts['minigraph_ptf_indices'][interface] if (((topo == 't1') and ('T0' in neighbor['name'])) or ((topo == 't0') and ('Server' in neighbor['name']))): downstream_ports[neighbor['namespace']].append(interface) downstream_port_ids.append(port_id) downstream_port_id_to_router_mac_map[port_id] = downlink_dst_mac elif (((topo == 't1') and ('T2' in neighbor['name'])) or ((topo == 't0') and ('T1' in neighbor['name']))): upstream_ports[neighbor['namespace']].append(interface) upstream_port_ids.append(port_id) upstream_port_id_to_router_mac_map[port_id] = rand_selected_dut.facts['router_mac'] if ('dualtor' not in tbinfo['topo']['name']): logging.info('Stopping GARP service on single tor') ptfhost.shell('supervisorctl stop garp_service', module_ignore_errors=True) if (('dualtor' in tbinfo['topo']['name']) and (rand_unselected_dut is not None)): peer_mg_facts = rand_unselected_dut.get_extended_minigraph_facts(tbinfo) for (interface, neighbor) in peer_mg_facts['minigraph_neighbors'].items(): if (((topo == 't1') and ('T2' in neighbor['name'])) or ((topo == 't0') and ('T1' in neighbor['name']))): port_id = peer_mg_facts['minigraph_ptf_indices'][interface] upstream_port_ids.append(port_id) upstream_port_id_to_router_mac_map[port_id] = rand_unselected_dut.facts['router_mac'] port_channels = mg_facts['minigraph_portchannels'] acl_table_ports = defaultdict(list) if ((topo == 't0') or (tbinfo['topo']['name'] in ('t1', 't1-lag'))): for (namespace, port) in downstream_ports.iteritems(): acl_table_ports[namespace] += port if namespace: acl_table_ports[] += port if ((topo == 't0') or (tbinfo['topo']['name'] in ('t1-lag', 't1-64-lag', 't1-64-lag-clet'))): for (k, v) in port_channels.iteritems(): acl_table_ports[v['namespace']].append(k) if v['namespace']: acl_table_ports[].append(k) else: for (namespace, port) in upstream_ports.iteritems(): acl_table_ports[namespace] += port if namespace: acl_table_ports[] += port dest_mac_mapping = {'downlink->uplink': downstream_port_id_to_router_mac_map, 'uplink->downlink': upstream_port_id_to_router_mac_map} setup_information = {'destination_mac': dest_mac_mapping, 'downstream_port_ids': downstream_port_ids, 'upstream_port_ids': upstream_port_ids, 'acl_table_ports': acl_table_ports, 'vlan_ports': vlan_ports, 'topo': topo, 'vlan_mac': vlan_mac} logger.info('Gathered variables for ACL test:\n{}'.format(pprint.pformat(setup_information))) logger.info('Creating temporary folder "{}" for ACL test'.format(DUT_TMP_DIR)) for duthost in duthosts: duthost.command('mkdir -p {}'.format(DUT_TMP_DIR)) (yield setup_information) logger.info('Removing temporary directory "{}"'.format(DUT_TMP_DIR)) for duthost in duthosts: duthost.command('rm -rf {}'.format(DUT_TMP_DIR))
@pytest.fixture(scope='module') def populate_vlan_arp_entries(setup, ptfhost, duthosts, rand_one_dut_hostname, ip_version): 'Set up the ARP responder utility in the PTF container.' duthost = duthosts[rand_one_dut_hostname] if (setup['topo'] != 't0'): def noop(): pass (yield noop) return addr_list = [DOWNSTREAM_DST_IP[ip_version], DOWNSTREAM_IP_TO_ALLOW[ip_version], DOWNSTREAM_IP_TO_BLOCK[ip_version]] vlan_host_map = defaultdict(dict) for i in range(len(addr_list)): mac = VLAN_BASE_MAC_PATTERN.format(i) port = random.choice(setup['vlan_ports']) addr = addr_list[i] vlan_host_map[port][str(addr)] = mac DOWNSTREAM_IP_PORT_MAP[addr] = port arp_responder_conf = {} for port in vlan_host_map: arp_responder_conf['eth{}'.format(port)] = vlan_host_map[port] with open('/tmp/from_t1.json', 'w') as ar_config: json.dump(arp_responder_conf, ar_config) ptfhost.copy(src='/tmp/from_t1.json', dest='/tmp/from_t1.json') ptfhost.host.options['variable_manager'].extra_vars.update({'arp_responder_args': '-e'}) ptfhost.template(src='templates/arp_responder.conf.j2', dest='/etc/supervisor/conf.d/arp_responder.conf') ptfhost.shell('supervisorctl reread && supervisorctl update') ptfhost.shell('supervisorctl restart arp_responder') def populate_arp_table(): for dut in duthosts: dut.command('sonic-clear fdb all') dut.command('sonic-clear arp') time.sleep(20) for addr in addr_list: dut.command('ping {} -c 3'.format(addr), module_ignore_errors=True) populate_arp_table() (yield populate_arp_table) logging.info('Stopping ARP responder') ptfhost.shell('supervisorctl stop arp_responder') duthost.command('sonic-clear fdb all') duthost.command('sonic-clear arp')
-6,976,798,631,610,011,000
Set up the ARP responder utility in the PTF container.
tests/acl/test_acl.py
populate_vlan_arp_entries
KostiantynYarovyiBf/sonic-mgmt
python
@pytest.fixture(scope='module') def populate_vlan_arp_entries(setup, ptfhost, duthosts, rand_one_dut_hostname, ip_version): duthost = duthosts[rand_one_dut_hostname] if (setup['topo'] != 't0'): def noop(): pass (yield noop) return addr_list = [DOWNSTREAM_DST_IP[ip_version], DOWNSTREAM_IP_TO_ALLOW[ip_version], DOWNSTREAM_IP_TO_BLOCK[ip_version]] vlan_host_map = defaultdict(dict) for i in range(len(addr_list)): mac = VLAN_BASE_MAC_PATTERN.format(i) port = random.choice(setup['vlan_ports']) addr = addr_list[i] vlan_host_map[port][str(addr)] = mac DOWNSTREAM_IP_PORT_MAP[addr] = port arp_responder_conf = {} for port in vlan_host_map: arp_responder_conf['eth{}'.format(port)] = vlan_host_map[port] with open('/tmp/from_t1.json', 'w') as ar_config: json.dump(arp_responder_conf, ar_config) ptfhost.copy(src='/tmp/from_t1.json', dest='/tmp/from_t1.json') ptfhost.host.options['variable_manager'].extra_vars.update({'arp_responder_args': '-e'}) ptfhost.template(src='templates/arp_responder.conf.j2', dest='/etc/supervisor/conf.d/arp_responder.conf') ptfhost.shell('supervisorctl reread && supervisorctl update') ptfhost.shell('supervisorctl restart arp_responder') def populate_arp_table(): for dut in duthosts: dut.command('sonic-clear fdb all') dut.command('sonic-clear arp') time.sleep(20) for addr in addr_list: dut.command('ping {} -c 3'.format(addr), module_ignore_errors=True) populate_arp_table() (yield populate_arp_table) logging.info('Stopping ARP responder') ptfhost.shell('supervisorctl stop arp_responder') duthost.command('sonic-clear fdb all') duthost.command('sonic-clear arp')
@pytest.fixture(scope='module', params=['ingress', 'egress']) def stage(request, duthosts, rand_one_dut_hostname): 'Parametrize tests for Ingress/Egress stage testing.\n\n Args:\n request: A fixture to interact with Pytest data.\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n\n Returns:\n str: The ACL stage to be tested.\n\n ' duthost = duthosts[rand_one_dut_hostname] pytest_require(((request.param == 'ingress') or (duthost.facts['asic_type'] not in 'broadcom')), 'Egress ACLs are not currently supported on "{}" ASICs'.format(duthost.facts['asic_type'])) return request.param
-784,252,154,036,044,000
Parametrize tests for Ingress/Egress stage testing. Args: request: A fixture to interact with Pytest data. duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. Returns: str: The ACL stage to be tested.
tests/acl/test_acl.py
stage
KostiantynYarovyiBf/sonic-mgmt
python
@pytest.fixture(scope='module', params=['ingress', 'egress']) def stage(request, duthosts, rand_one_dut_hostname): 'Parametrize tests for Ingress/Egress stage testing.\n\n Args:\n request: A fixture to interact with Pytest data.\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n\n Returns:\n str: The ACL stage to be tested.\n\n ' duthost = duthosts[rand_one_dut_hostname] pytest_require(((request.param == 'ingress') or (duthost.facts['asic_type'] not in 'broadcom')), 'Egress ACLs are not currently supported on "{}" ASICs'.format(duthost.facts['asic_type'])) return request.param
@pytest.fixture(scope='module') def acl_table(duthosts, rand_one_dut_hostname, setup, stage, ip_version): 'Apply ACL table configuration and remove after tests.\n\n Args:\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n setup: Parameters for the ACL tests.\n stage: The ACL stage under test.\n ip_version: The IP version under test\n\n Yields:\n The ACL table configuration.\n\n ' table_name = 'DATA_{}_{}_TEST'.format(stage.upper(), ip_version.upper()) acl_table_config = {'table_name': table_name, 'table_ports': ','.join(setup['acl_table_ports']['']), 'table_stage': stage, 'table_type': ('L3' if (ip_version == 'ipv4') else 'L3V6')} logger.info('Generated ACL table configuration:\n{}'.format(pprint.pformat(acl_table_config))) dut_to_analyzer_map = {} for duthost in duthosts: loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix='acl') loganalyzer.load_common_config() dut_to_analyzer_map[duthost] = loganalyzer try: loganalyzer.expect_regex = [LOG_EXPECT_ACL_TABLE_CREATE_RE] with loganalyzer: create_or_remove_acl_table(duthost, acl_table_config, setup, 'add') except LogAnalyzerError as err: logger.error('ACL table creation failed, attempting to clean-up...') create_or_remove_acl_table(duthost, acl_table_config, setup, 'remove') raise err try: (yield acl_table_config) finally: for (duthost, loganalyzer) in dut_to_analyzer_map.items(): loganalyzer.expect_regex = [LOG_EXPECT_ACL_TABLE_REMOVE_RE] with loganalyzer: create_or_remove_acl_table(duthost, acl_table_config, setup, 'remove')
-3,063,090,470,390,482,000
Apply ACL table configuration and remove after tests. Args: duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. setup: Parameters for the ACL tests. stage: The ACL stage under test. ip_version: The IP version under test Yields: The ACL table configuration.
tests/acl/test_acl.py
acl_table
KostiantynYarovyiBf/sonic-mgmt
python
@pytest.fixture(scope='module') def acl_table(duthosts, rand_one_dut_hostname, setup, stage, ip_version): 'Apply ACL table configuration and remove after tests.\n\n Args:\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n setup: Parameters for the ACL tests.\n stage: The ACL stage under test.\n ip_version: The IP version under test\n\n Yields:\n The ACL table configuration.\n\n ' table_name = 'DATA_{}_{}_TEST'.format(stage.upper(), ip_version.upper()) acl_table_config = {'table_name': table_name, 'table_ports': ','.join(setup['acl_table_ports'][]), 'table_stage': stage, 'table_type': ('L3' if (ip_version == 'ipv4') else 'L3V6')} logger.info('Generated ACL table configuration:\n{}'.format(pprint.pformat(acl_table_config))) dut_to_analyzer_map = {} for duthost in duthosts: loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix='acl') loganalyzer.load_common_config() dut_to_analyzer_map[duthost] = loganalyzer try: loganalyzer.expect_regex = [LOG_EXPECT_ACL_TABLE_CREATE_RE] with loganalyzer: create_or_remove_acl_table(duthost, acl_table_config, setup, 'add') except LogAnalyzerError as err: logger.error('ACL table creation failed, attempting to clean-up...') create_or_remove_acl_table(duthost, acl_table_config, setup, 'remove') raise err try: (yield acl_table_config) finally: for (duthost, loganalyzer) in dut_to_analyzer_map.items(): loganalyzer.expect_regex = [LOG_EXPECT_ACL_TABLE_REMOVE_RE] with loganalyzer: create_or_remove_acl_table(duthost, acl_table_config, setup, 'remove')
@abstractmethod def setup_rules(self, dut, acl_table, ip_version): 'Setup ACL rules for testing.\n\n Args:\n dut: The DUT having ACLs applied.\n acl_table: Configuration info for the ACL table.\n\n ' pass
-7,690,963,436,585,185,000
Setup ACL rules for testing. Args: dut: The DUT having ACLs applied. acl_table: Configuration info for the ACL table.
tests/acl/test_acl.py
setup_rules
KostiantynYarovyiBf/sonic-mgmt
python
@abstractmethod def setup_rules(self, dut, acl_table, ip_version): 'Setup ACL rules for testing.\n\n Args:\n dut: The DUT having ACLs applied.\n acl_table: Configuration info for the ACL table.\n\n ' pass
def post_setup_hook(self, dut, localhost, populate_vlan_arp_entries, tbinfo): 'Perform actions after rules have been applied.\n\n Args:\n dut: The DUT having ACLs applied.\n localhost: The host from which tests are run.\n populate_vlan_arp_entries: A function to populate ARP/FDB tables for VLAN interfaces.\n\n ' pass
5,227,020,109,855,199,000
Perform actions after rules have been applied. Args: dut: The DUT having ACLs applied. localhost: The host from which tests are run. populate_vlan_arp_entries: A function to populate ARP/FDB tables for VLAN interfaces.
tests/acl/test_acl.py
post_setup_hook
KostiantynYarovyiBf/sonic-mgmt
python
def post_setup_hook(self, dut, localhost, populate_vlan_arp_entries, tbinfo): 'Perform actions after rules have been applied.\n\n Args:\n dut: The DUT having ACLs applied.\n localhost: The host from which tests are run.\n populate_vlan_arp_entries: A function to populate ARP/FDB tables for VLAN interfaces.\n\n ' pass
def teardown_rules(self, dut): 'Tear down ACL rules once the tests have completed.\n\n Args:\n dut: The DUT having ACLs applied.\n\n ' logger.info('Finished with tests, removing all ACL rules...') dut.copy(src=os.path.join(FILES_DIR, ACL_REMOVE_RULES_FILE), dest=DUT_TMP_DIR) remove_rules_dut_path = os.path.join(DUT_TMP_DIR, ACL_REMOVE_RULES_FILE) logger.info('Applying "{}"'.format(remove_rules_dut_path)) dut.command('config acl update full {}'.format(remove_rules_dut_path))
-7,491,526,164,275,526,000
Tear down ACL rules once the tests have completed. Args: dut: The DUT having ACLs applied.
tests/acl/test_acl.py
teardown_rules
KostiantynYarovyiBf/sonic-mgmt
python
def teardown_rules(self, dut): 'Tear down ACL rules once the tests have completed.\n\n Args:\n dut: The DUT having ACLs applied.\n\n ' logger.info('Finished with tests, removing all ACL rules...') dut.copy(src=os.path.join(FILES_DIR, ACL_REMOVE_RULES_FILE), dest=DUT_TMP_DIR) remove_rules_dut_path = os.path.join(DUT_TMP_DIR, ACL_REMOVE_RULES_FILE) logger.info('Applying "{}"'.format(remove_rules_dut_path)) dut.command('config acl update full {}'.format(remove_rules_dut_path))
@pytest.fixture(scope='class', autouse=True) def acl_rules(self, duthosts, localhost, setup, acl_table, populate_vlan_arp_entries, tbinfo, ip_version): 'Setup/teardown ACL rules for the current set of tests.\n\n Args:\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n localhost: The host from which tests are run.\n setup: Parameters for the ACL tests.\n acl_table: Configuration info for the ACL table.\n populate_vlan_arp_entries: A function to populate ARP/FDB tables for VLAN interfaces.\n\n ' dut_to_analyzer_map = {} for duthost in duthosts: loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix='acl_rules') loganalyzer.load_common_config() dut_to_analyzer_map[duthost] = loganalyzer try: loganalyzer.expect_regex = [LOG_EXPECT_ACL_RULE_CREATE_RE] with loganalyzer: self.setup_rules(duthost, acl_table, ip_version) self.post_setup_hook(duthost, localhost, populate_vlan_arp_entries, tbinfo) assert self.check_rule_counters(duthost), 'Rule counters should be ready!' except LogAnalyzerError as err: logger.error('ACL rule application failed, attempting to clean-up...') self.teardown_rules(duthost) raise err try: (yield) finally: for (duthost, loganalyzer) in dut_to_analyzer_map.items(): loganalyzer.expect_regex = [LOG_EXPECT_ACL_RULE_REMOVE_RE] with loganalyzer: logger.info('Removing ACL rules') self.teardown_rules(duthost)
-2,370,236,786,902,345,000
Setup/teardown ACL rules for the current set of tests. Args: duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. localhost: The host from which tests are run. setup: Parameters for the ACL tests. acl_table: Configuration info for the ACL table. populate_vlan_arp_entries: A function to populate ARP/FDB tables for VLAN interfaces.
tests/acl/test_acl.py
acl_rules
KostiantynYarovyiBf/sonic-mgmt
python
@pytest.fixture(scope='class', autouse=True) def acl_rules(self, duthosts, localhost, setup, acl_table, populate_vlan_arp_entries, tbinfo, ip_version): 'Setup/teardown ACL rules for the current set of tests.\n\n Args:\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n localhost: The host from which tests are run.\n setup: Parameters for the ACL tests.\n acl_table: Configuration info for the ACL table.\n populate_vlan_arp_entries: A function to populate ARP/FDB tables for VLAN interfaces.\n\n ' dut_to_analyzer_map = {} for duthost in duthosts: loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix='acl_rules') loganalyzer.load_common_config() dut_to_analyzer_map[duthost] = loganalyzer try: loganalyzer.expect_regex = [LOG_EXPECT_ACL_RULE_CREATE_RE] with loganalyzer: self.setup_rules(duthost, acl_table, ip_version) self.post_setup_hook(duthost, localhost, populate_vlan_arp_entries, tbinfo) assert self.check_rule_counters(duthost), 'Rule counters should be ready!' except LogAnalyzerError as err: logger.error('ACL rule application failed, attempting to clean-up...') self.teardown_rules(duthost) raise err try: (yield) finally: for (duthost, loganalyzer) in dut_to_analyzer_map.items(): loganalyzer.expect_regex = [LOG_EXPECT_ACL_RULE_REMOVE_RE] with loganalyzer: logger.info('Removing ACL rules') self.teardown_rules(duthost)
@pytest.yield_fixture(scope='class', autouse=True) def counters_sanity_check(self, duthosts, acl_rules, acl_table): 'Validate that the counters for each rule in the rules list increased as expected.\n\n This fixture yields a list of rule IDs. The test case should add on to this list if\n it is required to check the rule for increased counters.\n\n After the test cases pass, the fixture will wait for the ACL counters to update and then\n check if the counters for each rule in the list were increased.\n\n Args:\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n acl_rules: Fixture that sets up the ACL rules.\n acl_table: Fixture that sets up the ACL table.\n\n ' acl_facts = defaultdict(dict) table_name = acl_table['table_name'] for duthost in duthosts: acl_facts[duthost]['before'] = duthost.acl_facts()['ansible_facts']['ansible_acl_facts'][table_name]['rules'] rule_list = [] (yield rule_list) if (not rule_list): return time.sleep(self.ACL_COUNTERS_UPDATE_INTERVAL_SECS) for duthost in duthosts: acl_facts[duthost]['after'] = duthost.acl_facts()['ansible_facts']['ansible_acl_facts'][table_name]['rules'] for duthost in duthosts: assert (len(acl_facts[duthost]['before']) == len(acl_facts[duthost]['after'])) for rule in rule_list: rule = 'RULE_{}'.format(rule) counters_before = {PACKETS_COUNT: 0, BYTES_COUNT: 0} for duthost in duthosts: counters_before[PACKETS_COUNT] += acl_facts[duthost]['before'][rule][PACKETS_COUNT] counters_before[BYTES_COUNT] += acl_facts[duthost]['before'][rule][BYTES_COUNT] logger.info('Counters for ACL rule "{}" before traffic:\n{}'.format(rule, pprint.pformat(counters_before))) counters_after = {PACKETS_COUNT: 0, BYTES_COUNT: 0} for duthost in duthosts: counters_after[PACKETS_COUNT] += acl_facts[duthost]['after'][rule][PACKETS_COUNT] counters_after[BYTES_COUNT] += acl_facts[duthost]['after'][rule][BYTES_COUNT] logger.info('Counters for ACL rule "{}" after traffic:\n{}'.format(rule, pprint.pformat(counters_after))) assert (counters_after[PACKETS_COUNT] > counters_before[PACKETS_COUNT]) assert (counters_after[BYTES_COUNT] > counters_before[BYTES_COUNT])
3,925,598,669,342,153,700
Validate that the counters for each rule in the rules list increased as expected. This fixture yields a list of rule IDs. The test case should add on to this list if it is required to check the rule for increased counters. After the test cases pass, the fixture will wait for the ACL counters to update and then check if the counters for each rule in the list were increased. Args: duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. acl_rules: Fixture that sets up the ACL rules. acl_table: Fixture that sets up the ACL table.
tests/acl/test_acl.py
counters_sanity_check
KostiantynYarovyiBf/sonic-mgmt
python
@pytest.yield_fixture(scope='class', autouse=True) def counters_sanity_check(self, duthosts, acl_rules, acl_table): 'Validate that the counters for each rule in the rules list increased as expected.\n\n This fixture yields a list of rule IDs. The test case should add on to this list if\n it is required to check the rule for increased counters.\n\n After the test cases pass, the fixture will wait for the ACL counters to update and then\n check if the counters for each rule in the list were increased.\n\n Args:\n duthosts: All DUTs belong to the testbed.\n rand_one_dut_hostname: hostname of a random chosen dut to run test.\n acl_rules: Fixture that sets up the ACL rules.\n acl_table: Fixture that sets up the ACL table.\n\n ' acl_facts = defaultdict(dict) table_name = acl_table['table_name'] for duthost in duthosts: acl_facts[duthost]['before'] = duthost.acl_facts()['ansible_facts']['ansible_acl_facts'][table_name]['rules'] rule_list = [] (yield rule_list) if (not rule_list): return time.sleep(self.ACL_COUNTERS_UPDATE_INTERVAL_SECS) for duthost in duthosts: acl_facts[duthost]['after'] = duthost.acl_facts()['ansible_facts']['ansible_acl_facts'][table_name]['rules'] for duthost in duthosts: assert (len(acl_facts[duthost]['before']) == len(acl_facts[duthost]['after'])) for rule in rule_list: rule = 'RULE_{}'.format(rule) counters_before = {PACKETS_COUNT: 0, BYTES_COUNT: 0} for duthost in duthosts: counters_before[PACKETS_COUNT] += acl_facts[duthost]['before'][rule][PACKETS_COUNT] counters_before[BYTES_COUNT] += acl_facts[duthost]['before'][rule][BYTES_COUNT] logger.info('Counters for ACL rule "{}" before traffic:\n{}'.format(rule, pprint.pformat(counters_before))) counters_after = {PACKETS_COUNT: 0, BYTES_COUNT: 0} for duthost in duthosts: counters_after[PACKETS_COUNT] += acl_facts[duthost]['after'][rule][PACKETS_COUNT] counters_after[BYTES_COUNT] += acl_facts[duthost]['after'][rule][BYTES_COUNT] logger.info('Counters for ACL rule "{}" after traffic:\n{}'.format(rule, pprint.pformat(counters_after))) assert (counters_after[PACKETS_COUNT] > counters_before[PACKETS_COUNT]) assert (counters_after[BYTES_COUNT] > counters_before[BYTES_COUNT])
@pytest.fixture(params=['downlink->uplink', 'uplink->downlink']) def direction(self, request): 'Parametrize test based on direction of traffic.' return request.param
3,840,651,992,981,872,000
Parametrize test based on direction of traffic.
tests/acl/test_acl.py
direction
KostiantynYarovyiBf/sonic-mgmt
python
@pytest.fixture(params=['downlink->uplink', 'uplink->downlink']) def direction(self, request): return request.param
@pytest.fixture(autouse=True) def get_src_port(self, setup, direction): 'Get a source port for the current test.' src_ports = (setup['downstream_port_ids'] if (direction == 'downlink->uplink') else setup['upstream_port_ids']) src_port = random.choice(src_ports) logger.info('Selected source port {}'.format(src_port)) self.src_port = src_port
-910,017,347,846,818,700
Get a source port for the current test.
tests/acl/test_acl.py
get_src_port
KostiantynYarovyiBf/sonic-mgmt
python
@pytest.fixture(autouse=True) def get_src_port(self, setup, direction): src_ports = (setup['downstream_port_ids'] if (direction == 'downlink->uplink') else setup['upstream_port_ids']) src_port = random.choice(src_ports) logger.info('Selected source port {}'.format(src_port)) self.src_port = src_port
def get_dst_ports(self, setup, direction): 'Get the set of possible destination ports for the current test.' return (setup['upstream_port_ids'] if (direction == 'downlink->uplink') else setup['downstream_port_ids'])
-7,723,670,512,636,729,000
Get the set of possible destination ports for the current test.
tests/acl/test_acl.py
get_dst_ports
KostiantynYarovyiBf/sonic-mgmt
python
def get_dst_ports(self, setup, direction): return (setup['upstream_port_ids'] if (direction == 'downlink->uplink') else setup['downstream_port_ids'])
def get_dst_ip(self, direction, ip_version): 'Get the default destination IP for the current test.' return (UPSTREAM_DST_IP[ip_version] if (direction == 'downlink->uplink') else DOWNSTREAM_DST_IP[ip_version])
7,964,747,422,421,615,000
Get the default destination IP for the current test.
tests/acl/test_acl.py
get_dst_ip
KostiantynYarovyiBf/sonic-mgmt
python
def get_dst_ip(self, direction, ip_version): return (UPSTREAM_DST_IP[ip_version] if (direction == 'downlink->uplink') else DOWNSTREAM_DST_IP[ip_version])
def tcp_packet(self, setup, direction, ptfadapter, ip_version, src_ip=None, dst_ip=None, proto=None, sport=17185, dport=81, flags=None): 'Generate a TCP packet for testing.' src_ip = (src_ip or DEFAULT_SRC_IP[ip_version]) dst_ip = (dst_ip or self.get_dst_ip(direction, ip_version)) if (ip_version == 'ipv4'): pkt = testutils.simple_tcp_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ip_dst=dst_ip, ip_src=src_ip, tcp_sport=sport, tcp_dport=dport, ip_ttl=64) if proto: pkt['IP'].proto = proto else: pkt = testutils.simple_tcpv6_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ipv6_dst=dst_ip, ipv6_src=src_ip, tcp_sport=sport, tcp_dport=dport, ipv6_hlim=64) if proto: pkt['IPv6'].nh = proto if flags: pkt['TCP'].flags = flags return pkt
7,023,518,864,380,091,000
Generate a TCP packet for testing.
tests/acl/test_acl.py
tcp_packet
KostiantynYarovyiBf/sonic-mgmt
python
def tcp_packet(self, setup, direction, ptfadapter, ip_version, src_ip=None, dst_ip=None, proto=None, sport=17185, dport=81, flags=None): src_ip = (src_ip or DEFAULT_SRC_IP[ip_version]) dst_ip = (dst_ip or self.get_dst_ip(direction, ip_version)) if (ip_version == 'ipv4'): pkt = testutils.simple_tcp_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ip_dst=dst_ip, ip_src=src_ip, tcp_sport=sport, tcp_dport=dport, ip_ttl=64) if proto: pkt['IP'].proto = proto else: pkt = testutils.simple_tcpv6_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ipv6_dst=dst_ip, ipv6_src=src_ip, tcp_sport=sport, tcp_dport=dport, ipv6_hlim=64) if proto: pkt['IPv6'].nh = proto if flags: pkt['TCP'].flags = flags return pkt
def udp_packet(self, setup, direction, ptfadapter, ip_version, src_ip=None, dst_ip=None, sport=1234, dport=80): 'Generate a UDP packet for testing.' src_ip = (src_ip or DEFAULT_SRC_IP[ip_version]) dst_ip = (dst_ip or self.get_dst_ip(direction, ip_version)) if (ip_version == 'ipv4'): return testutils.simple_udp_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ip_dst=dst_ip, ip_src=src_ip, udp_sport=sport, udp_dport=dport, ip_ttl=64) else: return testutils.simple_udpv6_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ipv6_dst=dst_ip, ipv6_src=src_ip, udp_sport=sport, udp_dport=dport, ipv6_hlim=64)
7,555,449,266,937,661,000
Generate a UDP packet for testing.
tests/acl/test_acl.py
udp_packet
KostiantynYarovyiBf/sonic-mgmt
python
def udp_packet(self, setup, direction, ptfadapter, ip_version, src_ip=None, dst_ip=None, sport=1234, dport=80): src_ip = (src_ip or DEFAULT_SRC_IP[ip_version]) dst_ip = (dst_ip or self.get_dst_ip(direction, ip_version)) if (ip_version == 'ipv4'): return testutils.simple_udp_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ip_dst=dst_ip, ip_src=src_ip, udp_sport=sport, udp_dport=dport, ip_ttl=64) else: return testutils.simple_udpv6_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ipv6_dst=dst_ip, ipv6_src=src_ip, udp_sport=sport, udp_dport=dport, ipv6_hlim=64)
def icmp_packet(self, setup, direction, ptfadapter, ip_version, src_ip=None, dst_ip=None, icmp_type=8, icmp_code=0): 'Generate an ICMP packet for testing.' src_ip = (src_ip or DEFAULT_SRC_IP[ip_version]) dst_ip = (dst_ip or self.get_dst_ip(direction, ip_version)) if (ip_version == 'ipv4'): return testutils.simple_icmp_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ip_dst=dst_ip, ip_src=src_ip, icmp_type=icmp_type, icmp_code=icmp_code, ip_ttl=64) else: return testutils.simple_icmpv6_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ipv6_dst=dst_ip, ipv6_src=src_ip, icmp_type=icmp_type, icmp_code=icmp_code, ipv6_hlim=64)
-4,280,413,460,718,121,000
Generate an ICMP packet for testing.
tests/acl/test_acl.py
icmp_packet
KostiantynYarovyiBf/sonic-mgmt
python
def icmp_packet(self, setup, direction, ptfadapter, ip_version, src_ip=None, dst_ip=None, icmp_type=8, icmp_code=0): src_ip = (src_ip or DEFAULT_SRC_IP[ip_version]) dst_ip = (dst_ip or self.get_dst_ip(direction, ip_version)) if (ip_version == 'ipv4'): return testutils.simple_icmp_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ip_dst=dst_ip, ip_src=src_ip, icmp_type=icmp_type, icmp_code=icmp_code, ip_ttl=64) else: return testutils.simple_icmpv6_packet(eth_dst=setup['destination_mac'][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ipv6_dst=dst_ip, ipv6_src=src_ip, icmp_type=icmp_type, icmp_code=icmp_code, ipv6_hlim=64)
def expected_mask_routed_packet(self, pkt, ip_version): 'Generate the expected mask for a routed packet.' exp_pkt = pkt.copy() exp_pkt = mask.Mask(exp_pkt) exp_pkt.set_do_not_care_scapy(packet.Ether, 'dst') exp_pkt.set_do_not_care_scapy(packet.Ether, 'src') if (ip_version == 'ipv4'): exp_pkt.set_do_not_care_scapy(packet.IP, 'chksum') exp_pkt.set_do_not_care_scapy(packet.IP, 'ttl') else: exp_pkt.set_do_not_care_scapy(packet.IPv6, 'hlim') return exp_pkt
-3,256,258,984,992,967,000
Generate the expected mask for a routed packet.
tests/acl/test_acl.py
expected_mask_routed_packet
KostiantynYarovyiBf/sonic-mgmt
python
def expected_mask_routed_packet(self, pkt, ip_version): exp_pkt = pkt.copy() exp_pkt = mask.Mask(exp_pkt) exp_pkt.set_do_not_care_scapy(packet.Ether, 'dst') exp_pkt.set_do_not_care_scapy(packet.Ether, 'src') if (ip_version == 'ipv4'): exp_pkt.set_do_not_care_scapy(packet.IP, 'chksum') exp_pkt.set_do_not_care_scapy(packet.IP, 'ttl') else: exp_pkt.set_do_not_care_scapy(packet.IPv6, 'hlim') return exp_pkt
def test_ingress_unmatched_blocked(self, setup, direction, ptfadapter, ip_version, stage): 'Verify that unmatched packets are dropped for ingress.' if (stage == 'egress'): pytest.skip('Only run for ingress') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version)
2,178,950,070,650,520,800
Verify that unmatched packets are dropped for ingress.
tests/acl/test_acl.py
test_ingress_unmatched_blocked
KostiantynYarovyiBf/sonic-mgmt
python
def test_ingress_unmatched_blocked(self, setup, direction, ptfadapter, ip_version, stage): if (stage == 'egress'): pytest.skip('Only run for ingress') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version)
def test_egress_unmatched_forwarded(self, setup, direction, ptfadapter, ip_version, stage): 'Verify that default egress rule allow all traffics' if (stage == 'ingress'): pytest.skip('Only run for egress') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version)
5,269,488,178,567,495,000
Verify that default egress rule allow all traffics
tests/acl/test_acl.py
test_egress_unmatched_forwarded
KostiantynYarovyiBf/sonic-mgmt
python
def test_egress_unmatched_forwarded(self, setup, direction, ptfadapter, ip_version, stage): if (stage == 'ingress'): pytest.skip('Only run for egress') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version)
def test_source_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): 'Verify that we can match and forward a packet on source IP.' src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(1)
5,713,231,115,088,048,000
Verify that we can match and forward a packet on source IP.
tests/acl/test_acl.py
test_source_ip_match_forwarded
KostiantynYarovyiBf/sonic-mgmt
python
def test_source_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(1)
def test_rules_priority_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): 'Verify that we respect rule priorites in the forwarding case.' src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(20)
-8,105,410,999,960,000,000
Verify that we respect rule priorites in the forwarding case.
tests/acl/test_acl.py
test_rules_priority_forwarded
KostiantynYarovyiBf/sonic-mgmt
python
def test_rules_priority_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(20)
def test_rules_priority_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): 'Verify that we respect rule priorites in the drop case.' src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(7)
959,052,328,267,444,600
Verify that we respect rule priorites in the drop case.
tests/acl/test_acl.py
test_rules_priority_dropped
KostiantynYarovyiBf/sonic-mgmt
python
def test_rules_priority_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(7)
def test_dest_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): 'Verify that we can match and forward a packet on destination IP.' dst_ip = (DOWNSTREAM_IP_TO_ALLOW[ip_version] if (direction == 'uplink->downlink') else UPSTREAM_IP_TO_ALLOW[ip_version]) pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dst_ip=dst_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append((2 if (direction == 'uplink->downlink') else 3))
7,955,340,746,633,166,000
Verify that we can match and forward a packet on destination IP.
tests/acl/test_acl.py
test_dest_ip_match_forwarded
KostiantynYarovyiBf/sonic-mgmt
python
def test_dest_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): dst_ip = (DOWNSTREAM_IP_TO_ALLOW[ip_version] if (direction == 'uplink->downlink') else UPSTREAM_IP_TO_ALLOW[ip_version]) pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dst_ip=dst_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append((2 if (direction == 'uplink->downlink') else 3))
def test_dest_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): 'Verify that we can match and drop a packet on destination IP.' dst_ip = (DOWNSTREAM_IP_TO_BLOCK[ip_version] if (direction == 'uplink->downlink') else UPSTREAM_IP_TO_BLOCK[ip_version]) pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dst_ip=dst_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append((15 if (direction == 'uplink->downlink') else 16))
-6,354,044,041,571,471,000
Verify that we can match and drop a packet on destination IP.
tests/acl/test_acl.py
test_dest_ip_match_dropped
KostiantynYarovyiBf/sonic-mgmt
python
def test_dest_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): dst_ip = (DOWNSTREAM_IP_TO_BLOCK[ip_version] if (direction == 'uplink->downlink') else UPSTREAM_IP_TO_BLOCK[ip_version]) pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dst_ip=dst_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append((15 if (direction == 'uplink->downlink') else 16))
def test_source_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): 'Verify that we can match and drop a packet on source IP.' src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(14)
5,289,088,510,342,193,000
Verify that we can match and drop a packet on source IP.
tests/acl/test_acl.py
test_source_ip_match_dropped
KostiantynYarovyiBf/sonic-mgmt
python
def test_source_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(14)
def test_udp_source_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): 'Verify that we can match and forward a UDP packet on source IP.' src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.udp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(13)
-7,716,854,753,495,035,000
Verify that we can match and forward a UDP packet on source IP.
tests/acl/test_acl.py
test_udp_source_ip_match_forwarded
KostiantynYarovyiBf/sonic-mgmt
python
def test_udp_source_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.udp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(13)
def test_udp_source_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): 'Verify that we can match and drop a UDP packet on source IP.' src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.udp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(26)
3,425,280,445,662,462,500
Verify that we can match and drop a UDP packet on source IP.
tests/acl/test_acl.py
test_udp_source_ip_match_dropped
KostiantynYarovyiBf/sonic-mgmt
python
def test_udp_source_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.udp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(26)
def test_icmp_source_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): 'Verify that we can match and drop an ICMP packet on source IP.' src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.icmp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(25)
-7,805,632,420,765,763,000
Verify that we can match and drop an ICMP packet on source IP.
tests/acl/test_acl.py
test_icmp_source_ip_match_dropped
KostiantynYarovyiBf/sonic-mgmt
python
def test_icmp_source_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): src_ip = ('0.0.0.0' if (ip_version == 'ipv4') else '0000:0000:0000:0000:0000:0000:0000:0000') pkt = self.icmp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(25)