id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/taskcc-alipay-sdk-python-3.3.398.tar.gz/taskcc-alipay-sdk-python-3.3.398/alipay/aop/api/response/AlipayInsSceneProductAgreementQueryResponse.py
|
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayInsSceneProductAgreementQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayInsSceneProductAgreementQueryResponse, self).__init__()
self._agreeement_sign_type = None
self._alipay_user_id = None
self._effect_end_time = None
self._effect_start_time = None
self._item_id = None
self._product_sign_no = None
self._sign_user_id = None
self._sign_user_type = None
self._status = None
@property
def agreeement_sign_type(self):
return self._agreeement_sign_type
@agreeement_sign_type.setter
def agreeement_sign_type(self, value):
self._agreeement_sign_type = value
@property
def alipay_user_id(self):
return self._alipay_user_id
@alipay_user_id.setter
def alipay_user_id(self, value):
self._alipay_user_id = value
@property
def effect_end_time(self):
return self._effect_end_time
@effect_end_time.setter
def effect_end_time(self, value):
self._effect_end_time = value
@property
def effect_start_time(self):
return self._effect_start_time
@effect_start_time.setter
def effect_start_time(self, value):
self._effect_start_time = value
@property
def item_id(self):
return self._item_id
@item_id.setter
def item_id(self, value):
self._item_id = value
@property
def product_sign_no(self):
return self._product_sign_no
@product_sign_no.setter
def product_sign_no(self, value):
self._product_sign_no = value
@property
def sign_user_id(self):
return self._sign_user_id
@sign_user_id.setter
def sign_user_id(self, value):
self._sign_user_id = value
@property
def sign_user_type(self):
return self._sign_user_type
@sign_user_type.setter
def sign_user_type(self, value):
self._sign_user_type = value
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
def parse_response_content(self, response_content):
response = super(AlipayInsSceneProductAgreementQueryResponse, self).parse_response_content(response_content)
if 'agreeement_sign_type' in response:
self.agreeement_sign_type = response['agreeement_sign_type']
if 'alipay_user_id' in response:
self.alipay_user_id = response['alipay_user_id']
if 'effect_end_time' in response:
self.effect_end_time = response['effect_end_time']
if 'effect_start_time' in response:
self.effect_start_time = response['effect_start_time']
if 'item_id' in response:
self.item_id = response['item_id']
if 'product_sign_no' in response:
self.product_sign_no = response['product_sign_no']
if 'sign_user_id' in response:
self.sign_user_id = response['sign_user_id']
if 'sign_user_type' in response:
self.sign_user_type = response['sign_user_type']
if 'status' in response:
self.status = response['status']
|
PypiClean
|
/eve_utils-0.9.44-py3-none-any.whl/eve_utils/addins/serverless.py
|
import os
import sys
import sys
import click
from subprocess import Popen, PIPE
import eve_utils
def warning():
print('''
NOTE: this feature is still under development - use at your own risk!
*** DO NOT USE THIS UNLESS YOU KNOW WHAT YOU ARE DOING ***
This script will
- check for node/npm
- install serverless globally
- npm init the api folder
- install serverless plugins
- add dnspython==2.1.0 to requirements.txt
You can then run the API with
sls wsgi serve --config serverless-XXX.yml -p 2112
Before you deploy
- configure your credentials
(e.g. sls config credentials --provider aws --key XXXX --secret YYYY -o)
- ensure your logging.yml makes no reference to the file system
(e.g. copy logging_no-files.yml to logging.yml)
- modify as required the serverless-*.yml files (esp. connection to MongoDB!)
- test with serverless
- sls wsgi serve --config serverless-XXX.yml -p 2112
- when you are ready to deploy:
- sls deploy --config serverless-XXX.yml
- if you only use one cloud provider, copy that serverless-XXX.yml
to serverless.yml, then you can leave off the --config...
''')
click.confirm('Do you want to continue?', abort=True)
def run_process(cmd):
process = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE, encoding='utf-8')
out, err = process.communicate()
exit_code = process.wait()
return exit_code, out, err
def is_node_installed():
exit_code, out, err = run_process('node -v')
try:
major_version = int(out[1:].split('.')[0])
except:
major_version = 0
if exit_code:
print('node.js is not installed.\nPlease install and try again.')
return False
elif major_version < 10:
print('node.js is installed, version must be greater than v10 (yours is {out}).\nPlease upgrade and try again.')
return False
# TODO: is any of this even required given a proper installation of node.js?
exit_code, out, err = run_process('npm -v')
try:
major_version = int(out.split('.')[0])
except:
major_version = 0
if exit_code:
print('npm is not installed.\nPlease install and try again.')
return False
elif major_version < 0:
# UNREACHABLE: is there a minimun npm version required by serverlesss?
print('npm is installed, version must be greater than XX (yours is {out}).\nPlease upgrade and try again.')
return False
return True
def ensure_serverless_is_installed():
exit_code, out, err = run_process('sls -v')
if not exit_code: # TODO: serverless is installed, but should we check version?
return True
print('installing serverless framework')
exit_code, out, err = run_process('npm install -g serverless')
if exit_code:
print('Something went wrong installing serverless.')
return False
def ensure_node_initialized():
if os.path.exists('./package.json'):
return True
print('running npm init')
exit_code, out, err = run_process('npm init -f')
if exit_code:
print('Something went wrong running npm init.')
return False
return True
def ensure_serverless_plugins_installed():
print('Installing serverless plugins')
exit_code, out, err = run_process('npm install --save-dev serverless-wsgi serverless-python-requirements serverless-domain-manager')
if exit_code:
print('Something went wrong installing serverless plugins.')
return False
return True
def add():
warning()
try:
settings = eve_utils.jump_to_api_folder('src')
except RuntimeError:
print('This command must be run in an eve_service API folder structure')
sys.exit(1)
if os.path.exists('./serverless.py'):
print('serverless has already been added')
sys.exit(601)
eve_utils.copy_skel(settings['project_name'], 'serverless', '.')
eve_utils.replace_project_name(settings['project_name'], '.')
if not is_node_installed():
sys.exit(602)
if not ensure_serverless_is_installed():
sys.exit(603)
os.chdir(f"./{settings['project_name']}")
eve_utils.install_packages(['dnspython'], 'add_serverless')
if not ensure_node_initialized():
sys.exit(604)
if not ensure_serverless_plugins_installed():
sys.exit(605)
|
PypiClean
|
/rws-0.6.tar.gz/rws-0.6/cmsranking/User.py
|
# Contest Management System - http://cms-dev.github.io/
# Copyright © 2011-2013 Luca Wehrstedt <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import six
from cmsranking.Entity import Entity, InvalidData
from cmsranking.Store import Store
from cmsranking.Submission import store as submission_store
class User(Entity):
"""The entity representing a user.
It consists of the following properties:
- f_name (unicode): the first name of the user
- l_name (unicode): the last name of the user
- team (unicode): the id of the team the user belongs to
"""
def __init__(self):
"""Set the properties to some default values.
"""
Entity.__init__(self)
self.f_name = None
self.l_name = None
self.team = None
@staticmethod
def validate(data):
"""Validate the given dictionary.
See if it contains a valid representation of this entity.
"""
try:
assert isinstance(data, dict), \
"Not a dictionary"
assert isinstance(data['f_name'], six.text_type), \
"Field 'f_name' isn't a string"
assert isinstance(data['l_name'], six.text_type), \
"Field 'l_name' isn't a string"
assert data['team'] is None or \
isinstance(data['team'], six.text_type), \
"Field 'team' isn't a string (or null)"
except KeyError as exc:
raise InvalidData("Field %s is missing" % exc.message)
except AssertionError as exc:
raise InvalidData(exc.message)
def set(self, data):
self.validate(data)
self.f_name = data['f_name']
self.l_name = data['l_name']
self.team = data['team']
def get(self):
result = self.__dict__.copy()
del result['key']
return result
def consistent(self):
from cmsranking.Team import store as team_store
return self.team is None or self.team in team_store
store = Store(User, 'users', [submission_store])
|
PypiClean
|
/msgraph-sdk-1.0.0a3.tar.gz/msgraph-sdk-1.0.0a3/msgraph/generated/groups/item/sites/item/term_store/groups/item/sets/item/children/item/children/item/set/set_request_builder.py
|
from __future__ import annotations
from dataclasses import dataclass
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.response_handler import ResponseHandler
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, Union
from ...............models.o_data_errors import o_data_error
from ...............models.term_store import set
class SetRequestBuilder():
"""
Provides operations to manage the set property of the microsoft.graph.termStore.term entity.
"""
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new SetRequestBuilder and sets the default values.
Args:
pathParameters: The raw url or the Url template parameters for the request.
requestAdapter: The request adapter to use to execute the requests.
"""
if path_parameters is None:
raise Exception("path_parameters cannot be undefined")
if request_adapter is None:
raise Exception("request_adapter cannot be undefined")
# Url template to use to build the URL for the current request builder
self.url_template: str = "{+baseurl}/groups/{group%2Did}/sites/{site%2Did}/termStore/groups/{group%2Did1}/sets/{set%2Did}/children/{term%2Did}/children/{term%2Did1}/set{?%24select,%24expand}"
url_tpl_params = get_path_parameters(path_parameters)
self.path_parameters = url_tpl_params
self.request_adapter = request_adapter
def create_get_request_information(self,request_configuration: Optional[SetRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
The [set] in which the term is created.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = "application/json"
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
async def get(self,request_configuration: Optional[SetRequestBuilderGetRequestConfiguration] = None, response_handler: Optional[ResponseHandler] = None) -> Optional[set.Set]:
"""
The [set] in which the term is created.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
responseHandler: Response handler to use in place of the default response handling provided by the core service
Returns: Optional[set.Set]
"""
request_info = self.create_get_request_information(
request_configuration
)
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
return await self.request_adapter.send_async(request_info, set.Set, response_handler, error_mapping)
@dataclass
class SetRequestBuilderGetQueryParameters():
"""
The [set] in which the term is created.
"""
# Expand related entities
expand: Optional[List[str]] = None
# Select properties to be returned
select: Optional[List[str]] = None
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
originalName: The original query parameter name in the class.
Returns: str
"""
if original_name is None:
raise Exception("original_name cannot be undefined")
if original_name == "expand":
return "%24expand"
if original_name == "select":
return "%24select"
return original_name
@dataclass
class SetRequestBuilderGetRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, str]] = None
# Request options
options: Optional[List[RequestOption]] = None
# Request query parameters
query_parameters: Optional[SetRequestBuilder.SetRequestBuilderGetQueryParameters] = None
|
PypiClean
|
/compute_api_client-0.0.3-py3-none-any.whl/compute_api_client/api/pipeline_controller/pipeline_controller_update_by_id.py
|
from http import HTTPStatus
from typing import Any, Dict, Optional
import httpx
from ... import errors
from ...client import Client
from ...models.pipeline_partial import PipelinePartial
from ...types import Response
def _get_kwargs(
id: str,
*,
client: Client,
json_body: PipelinePartial,
) -> Dict[str, Any]:
url = "{}/compute/pipelines/{id}".format(client.base_url, id=id)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
json_json_body = json_body.to_dict()
return {
"method": "patch",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"json": json_json_body,
}
def _parse_response(*, client: Client, response: httpx.Response) -> Optional[Any]:
if response.status_code == HTTPStatus.NO_CONTENT:
return None
if client.raise_on_unexpected_status:
raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}")
else:
return None
def _build_response(*, client: Client, response: httpx.Response) -> Response[Any]:
return Response(
status_code=HTTPStatus(response.status_code),
content=response.content,
headers=response.headers,
parsed=_parse_response(client=client, response=response),
)
def sync_detailed(
id: str,
*,
client: Client,
json_body: PipelinePartial,
) -> Response[Any]:
"""
Args:
id (str):
json_body (PipelinePartial): (tsType: Partial<Pipeline>, schemaOptions: { partial: true })
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Any]
"""
kwargs = _get_kwargs(
id=id,
client=client,
json_body=json_body,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(client=client, response=response)
async def asyncio_detailed(
id: str,
*,
client: Client,
json_body: PipelinePartial,
) -> Response[Any]:
"""
Args:
id (str):
json_body (PipelinePartial): (tsType: Partial<Pipeline>, schemaOptions: { partial: true })
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Any]
"""
kwargs = _get_kwargs(
id=id,
client=client,
json_body=json_body,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(client=client, response=response)
|
PypiClean
|
/nurses_2-0.18.8.tar.gz/nurses_2-0.18.8/nurses_2/widgets/scroll_view/scroll_view.py
|
from ...clamp import clamp
from ...io import KeyEvent, MouseEventType, MouseEvent
from ..behaviors.grabbable_behavior import GrabbableBehavior
from ..widget import Widget, subscribable, Size
from .scrollbars import _HorizontalBar, _VerticalBar
class ScrollView(GrabbableBehavior, Widget):
"""
A scrollable view widget.
The view can be set with the :attr:`view` property, e.g., ``my_scrollview.view = some_widget``.
Parameters
----------
allow_vertical_scroll : bool, default: True
Allow vertical scrolling.
allow_horizontal_scroll : bool, default: True
Allow horizontal scrolling.
show_vertical_bar : bool, default: True
Show the vertical scrollbar.
show_horizontal_bar : bool, default: True
Show the horizontal scrollbar.
is_grabbable : bool, default: True
Allow moving scroll view by dragging mouse.
scrollwheel_enabled : bool, default: True
Allow vertical scrolling with scrollwheel.
arrow_keys_enabled : bool, default: True
Allow scrolling with arrow keys.
vertical_proportion : float, default: 0.0
Vertical scroll position as a proportion of total.
horizontal_proportion : float, default: 0.0
Horizontal scroll position as a proportion of total.
is_grabbable : bool, default: True
If False, grabbable behavior is disabled.
disable_ptf : bool, default: False
If True, widget will not be pulled to front when grabbed.
size : Size, default: Size(10, 10)
Size of widget.
pos : Point, default: Point(0, 0)
Position of upper-left corner in parent.
size_hint : SizeHint, default: SizeHint(None, None)
Proportion of parent's height and width. Non-None values will have
precedent over :attr:`size`.
min_height : int | None, default: None
Minimum height set due to size_hint. Ignored if corresponding size
hint is None.
max_height : int | None, default: None
Maximum height set due to size_hint. Ignored if corresponding size
hint is None.
min_width : int | None, default: None
Minimum width set due to size_hint. Ignored if corresponding size
hint is None.
max_width : int | None, default: None
Maximum width set due to size_hint. Ignored if corresponding size
hint is None.
pos_hint : PosHint, default: PosHint(None, None)
Position as a proportion of parent's height and width. Non-None values
will have precedent over :attr:`pos`.
anchor : Anchor, default: Anchor.TOP_LEFT
The point of the widget attached to :attr:`pos_hint`.
is_transparent : bool, default: False
If true, background_char and background_color_pair won't be painted.
is_visible : bool, default: True
If false, widget won't be painted, but still dispatched.
is_enabled : bool, default: True
If false, widget won't be painted or dispatched.
background_char : str | None, default: None
The background character of the widget if not `None` and if the widget
is not transparent.
background_color_pair : ColorPair | None, default: None
The background color pair of the widget if not `None` and if the
widget is not transparent.
Attributes
----------
view : Widget | None
The scrolled widget.
allow_vertical_scroll : bool
Allow vertical scrolling.
allow_horizontal_scroll : bool
Allow horizontal scrolling.
show_vertical_bar : bool
Show the vertical scrollbar.
show_horizontal_bar : bool
Show the horizontal scrollbar.
is_grabbable : bool
Allow moving scroll view by dragging mouse.
scrollwheel_enabled : bool
Allow vertical scrolling with scrollwheel.
arrow_keys_enabled : bool
Allow scrolling with arrow keys.
vertical_proportion : float
Vertical scroll position as a proportion of total.
horizontal_proportion : float
Horizontal scroll position as a proportion of total.
view : Widget | None
The scroll view's child.
is_grabbable : bool
If False, grabbable behavior is disabled.
disable_ptf : bool
If True, widget will not be pulled to front when grabbed.
is_grabbed : bool
True if widget is grabbed.
mouse_dyx : Point
Last change in mouse position.
mouse_dy : int
Last vertical change in mouse position.
mouse_dx : int
Last horizontal change in mouse position.
size : Size
Size of widget.
height : int
Height of widget.
rows : int
Alias for :attr:`height`.
width : int
Width of widget.
columns : int
Alias for :attr:`width`.
pos : Point
Position relative to parent.
top : int
Y-coordinate of position.
y : int
Y-coordinate of position.
left : int
X-coordinate of position.
x : int
X-coordinate of position.
bottom : int
:attr:`top` + :attr:`height`.
right : int
:attr:`left` + :attr:`width`.
absolute_pos : Point
Absolute position on screen.
center : Point
Center of widget in local coordinates.
size_hint : SizeHint
Size as a proportion of parent's size.
height_hint : float | None
Height as a proportion of parent's height.
width_hint : float | None
Width as a proportion of parent's width.
min_height : int
Minimum height allowed when using :attr:`size_hint`.
max_height : int
Maximum height allowed when using :attr:`size_hint`.
min_width : int
Minimum width allowed when using :attr:`size_hint`.
max_width : int
Maximum width allowed when using :attr:`size_hint`.
pos_hint : PosHint
Position as a proportion of parent's size.
y_hint : float | None
Vertical position as a proportion of parent's size.
x_hint : float | None
Horizontal position as a proportion of parent's size.
anchor : Anchor
Determines which point is attached to :attr:`pos_hint`.
background_char : str | None
Background character.
background_color_pair : ColorPair | None
Background color pair.
parent : Widget | None
Parent widget.
children : list[Widget]
Children widgets.
is_transparent : bool
True if widget is transparent.
is_visible : bool
True if widget is visible.
is_enabled : bool
True if widget is enabled.
root : Widget | None
If widget is in widget tree, return the root widget.
app : App
The running app.
Methods
-------
grab:
Grab the widget.
ungrab:
Ungrab the widget.
grab_update:
Update widget with incoming mouse events while grabbed.
on_size:
Called when widget is resized.
apply_hints:
Apply size and pos hints.
to_local:
Convert point in absolute coordinates to local coordinates.
collides_point:
True if point is within widget's bounding box.
collides_widget:
True if other is within widget's bounding box.
add_widget:
Add a child widget.
add_widgets:
Add multiple child widgets.
remove_widget:
Remove a child widget.
pull_to_front:
Move to end of widget stack so widget is drawn last.
walk_from_root:
Yield all descendents of root widget.
walk:
Yield all descendents (or ancestors if `reverse` is True).
subscribe:
Subscribe to a widget property.
unsubscribe:
Unsubscribe to a widget property.
on_key:
Handle key press event.
on_mouse:
Handle mouse event.
on_paste:
Handle paste event.
tween:
Sequentially update a widget property over time.
on_add:
Called after a widget is added to widget tree.
on_remove:
Called before widget is removed from widget tree.
prolicide:
Recursively remove all children.
destroy:
Destroy this widget and all descendents.
"""
def __init__(
self,
allow_vertical_scroll=True,
allow_horizontal_scroll=True,
show_vertical_bar=True,
show_horizontal_bar=True,
is_grabbable=True,
scrollwheel_enabled=True,
arrow_keys_enabled=True,
vertical_proportion=0.0,
horizontal_proportion=0.0,
**kwargs,
):
super().__init__(**kwargs)
self.allow_vertical_scroll = allow_vertical_scroll
self.allow_horizontal_scroll = allow_horizontal_scroll
self.is_grabbable = is_grabbable
self.scrollwheel_enabled = scrollwheel_enabled
self.arrow_keys_enabled = arrow_keys_enabled
self._vertical_proportion = clamp(vertical_proportion, 0, 1)
self._horizontal_proportion = clamp(horizontal_proportion, 0, 1)
self._view = None
self._vertical_bar = _VerticalBar(is_enabled=show_vertical_bar)
self._horizontal_bar = _HorizontalBar(is_enabled=show_horizontal_bar)
self.add_widgets(self._vertical_bar, self._horizontal_bar)
@property
def show_vertical_bar(self) -> bool:
return self._vertical_bar.is_enabled
@show_vertical_bar.setter
@subscribable
def show_vertical_bar(self, show: bool):
self._vertical_bar.is_enabled = show
@property
def show_horizontal_bar(self) -> bool:
return self._horizontal_bar.is_enabled
@show_horizontal_bar.setter
@subscribable
def show_horizontal_bar(self, show: bool):
self._horizontal_bar.is_enabled = show
@property
def view(self) -> Widget | None:
return self._view
@property
def vertical_proportion(self):
return self._vertical_proportion
@vertical_proportion.setter
@subscribable
def vertical_proportion(self, value):
if self.allow_vertical_scroll:
if self._view is None or self.total_vertical_distance <= 0:
self._vertical_proportion = 0
else:
self._vertical_proportion = clamp(value, 0, 1)
self._set_view_top()
@property
def horizontal_proportion(self):
return self._horizontal_proportion
@horizontal_proportion.setter
@subscribable
def horizontal_proportion(self, value):
if self.allow_horizontal_scroll:
if self._view is None or self.total_horizontal_distance <= 0:
self._horizontal_proportion = 0
else:
self._horizontal_proportion = clamp(value, 0, 1)
self._set_view_left()
@property
def port_height(self) -> int:
return self.height - self.show_horizontal_bar
@property
def port_width(self) -> int:
return self.width - self.show_vertical_bar * 2
@property
def total_vertical_distance(self) -> int:
"""
Return difference between child height and scrollview height.
"""
if self._view is None:
return 0
return max(0, self._view.height - self.port_height)
@property
def total_horizontal_distance(self) -> int:
"""
Return difference between child width and scrollview width.
"""
if self._view is None:
return 0
return max(0, self._view.width - self.port_width)
def _set_view_top(self):
"""
Set the top-coordinate of the view.
"""
self._view.top = -round(self.vertical_proportion * self.total_vertical_distance)
def _set_view_left(self):
"""
Set the left-coordinate of the view.
"""
self._view.left = -round(self.horizontal_proportion * self.total_horizontal_distance)
def _set_view_pos(self):
"""
Set position of the view.
"""
self._set_view_top()
self._set_view_left()
@property
def view(self) -> Widget | None:
return self._view
@view.setter
def view(self, view: Widget | None):
if self._view is not None:
self.remove_widget(self._view)
self._view = view
if view is not None:
self.add_widget(view)
self.children.insert(0, self.children.pop()) # Move view to top of view stack.
h_ind = self._horizontal_bar.indicator
v_ind = self._vertical_bar.indicator
self.subscribe(view, "size", self._set_view_pos)
h_ind.subscribe(view, "size", h_ind.update_size_pos)
v_ind.subscribe(view, "size", v_ind.update_size_pos)
self._set_view_pos()
def remove_widget(self, widget: Widget):
if widget is self._view:
self._view = None
self.unsubscribe(widget, "size")
self._horizontal_bar.indicator.unsubscribe(widget, "size")
self._vertical_bar.indicator.unsubscribe(widget, "size")
super().remove_widget(widget)
def on_size(self):
if self._view is not None:
self._set_view_pos()
def on_key(self, key_event: KeyEvent):
if not self.arrow_keys_enabled:
return False
match key_event.key:
case "up":
self._scroll_up()
case "down":
self._scroll_down()
case "left":
self._scroll_left()
case "right":
self._scroll_right()
case _:
return super().on_key(key_event)
return True
def grab_update(self, mouse_event: MouseEvent):
self._scroll_up(self.mouse_dy)
self._scroll_left(self.mouse_dx)
def _scroll_left(self, n=1):
if self._view is not None:
if self.total_horizontal_distance == 0:
self.horizontal_proportion = 0
else:
self.horizontal_proportion = clamp((-self.view.left - n) / self.total_horizontal_distance, 0, 1)
def _scroll_right(self, n=1):
self._scroll_left(-n)
def _scroll_up(self, n=1):
if self._view is not None:
if self.total_vertical_distance == 0:
self.vertical_proportion = 0
else:
self.vertical_proportion = clamp((-self.view.top - n) / self.total_vertical_distance, 0, 1)
def _scroll_down(self, n=1):
self._scroll_up(-n)
def on_mouse(self, mouse_event: MouseEvent):
if (
self.scrollwheel_enabled
and self.collides_point(mouse_event.position)
):
match mouse_event.event_type:
case MouseEventType.SCROLL_UP:
self._scroll_up()
return True
case MouseEventType.SCROLL_DOWN:
self._scroll_down()
return True
return super().on_mouse(mouse_event)
|
PypiClean
|
/nni_daily-1.5.2005180104-py3-none-manylinux1_x86_64.whl/nni_daily-1.5.2005180104.data/data/nni/node_modules/moment/dist/locale/gd.js
|
import moment from '../moment';
var months = [
'Am Faoilleach',
'An Gearran',
'Am Màrt',
'An Giblean',
'An Cèitean',
'An t-Ògmhios',
'An t-Iuchar',
'An Lùnastal',
'An t-Sultain',
'An Dàmhair',
'An t-Samhain',
'An Dùbhlachd',
],
monthsShort = [
'Faoi',
'Gear',
'Màrt',
'Gibl',
'Cèit',
'Ògmh',
'Iuch',
'Lùn',
'Sult',
'Dàmh',
'Samh',
'Dùbh',
],
weekdays = [
'Didòmhnaich',
'Diluain',
'Dimàirt',
'Diciadain',
'Diardaoin',
'Dihaoine',
'Disathairne',
],
weekdaysShort = ['Did', 'Dil', 'Dim', 'Dic', 'Dia', 'Dih', 'Dis'],
weekdaysMin = ['Dò', 'Lu', 'Mà', 'Ci', 'Ar', 'Ha', 'Sa'];
export default moment.defineLocale('gd', {
months: months,
monthsShort: monthsShort,
monthsParseExact: true,
weekdays: weekdays,
weekdaysShort: weekdaysShort,
weekdaysMin: weekdaysMin,
longDateFormat: {
LT: 'HH:mm',
LTS: 'HH:mm:ss',
L: 'DD/MM/YYYY',
LL: 'D MMMM YYYY',
LLL: 'D MMMM YYYY HH:mm',
LLLL: 'dddd, D MMMM YYYY HH:mm',
},
calendar: {
sameDay: '[An-diugh aig] LT',
nextDay: '[A-màireach aig] LT',
nextWeek: 'dddd [aig] LT',
lastDay: '[An-dè aig] LT',
lastWeek: 'dddd [seo chaidh] [aig] LT',
sameElse: 'L',
},
relativeTime: {
future: 'ann an %s',
past: 'bho chionn %s',
s: 'beagan diogan',
ss: '%d diogan',
m: 'mionaid',
mm: '%d mionaidean',
h: 'uair',
hh: '%d uairean',
d: 'latha',
dd: '%d latha',
M: 'mìos',
MM: '%d mìosan',
y: 'bliadhna',
yy: '%d bliadhna',
},
dayOfMonthOrdinalParse: /\d{1,2}(d|na|mh)/,
ordinal: function (number) {
var output = number === 1 ? 'd' : number % 10 === 2 ? 'na' : 'mh';
return number + output;
},
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
});
|
PypiClean
|
/conduktor_public_api_client-0.0.1.tar.gz/conduktor_public_api_client-0.0.1/conduktor_public_api_client/models/kafka_connect_with_id.py
|
from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union
from attrs import define, field
from ..types import UNSET, Unset
if TYPE_CHECKING:
from ..models.kafka_connect_basic_auth import KafkaConnectBasicAuth
from ..models.kafka_connect_bearer_token import KafkaConnectBearerToken
from ..models.kafka_connect_no_security import KafkaConnectNoSecurity
from ..models.kafka_connect_ssl_auth import KafkaConnectSSLAuth
T = TypeVar("T", bound="KafkaConnectWithId")
@define
class KafkaConnectWithId:
"""
Attributes:
id (str):
slug (str):
url (str):
name (str):
security (Union['KafkaConnectBasicAuth', 'KafkaConnectBearerToken', 'KafkaConnectNoSecurity',
'KafkaConnectSSLAuth']):
ignore_untrusted_certificate (bool):
headers (Union[Unset, str]):
"""
id: str
slug: str
url: str
name: str
security: Union[
"KafkaConnectBasicAuth",
"KafkaConnectBearerToken",
"KafkaConnectNoSecurity",
"KafkaConnectSSLAuth",
]
ignore_untrusted_certificate: bool
headers: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = field(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
from ..models.kafka_connect_basic_auth import KafkaConnectBasicAuth
from ..models.kafka_connect_bearer_token import KafkaConnectBearerToken
from ..models.kafka_connect_no_security import KafkaConnectNoSecurity
id = self.id
slug = self.slug
url = self.url
name = self.name
security: Dict[str, Any]
if isinstance(self.security, KafkaConnectBasicAuth):
security = self.security.to_dict()
elif isinstance(self.security, KafkaConnectBearerToken):
security = self.security.to_dict()
elif isinstance(self.security, KafkaConnectNoSecurity):
security = self.security.to_dict()
else:
security = self.security.to_dict()
ignore_untrusted_certificate = self.ignore_untrusted_certificate
headers = self.headers
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"id": id,
"slug": slug,
"url": url,
"name": name,
"security": security,
"ignoreUntrustedCertificate": ignore_untrusted_certificate,
}
)
if headers is not UNSET:
field_dict["headers"] = headers
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.kafka_connect_basic_auth import KafkaConnectBasicAuth
from ..models.kafka_connect_bearer_token import KafkaConnectBearerToken
from ..models.kafka_connect_no_security import KafkaConnectNoSecurity
from ..models.kafka_connect_ssl_auth import KafkaConnectSSLAuth
d = src_dict.copy()
id = d.pop("id")
slug = d.pop("slug")
url = d.pop("url")
name = d.pop("name")
def _parse_security(
data: object,
) -> Union[
"KafkaConnectBasicAuth",
"KafkaConnectBearerToken",
"KafkaConnectNoSecurity",
"KafkaConnectSSLAuth",
]:
try:
if not isinstance(data, dict):
raise TypeError()
componentsschemas_kafka_connect_security_type_0 = (
KafkaConnectBasicAuth.from_dict(data)
)
return componentsschemas_kafka_connect_security_type_0
except: # noqa: E722
pass
try:
if not isinstance(data, dict):
raise TypeError()
componentsschemas_kafka_connect_security_type_1 = (
KafkaConnectBearerToken.from_dict(data)
)
return componentsschemas_kafka_connect_security_type_1
except: # noqa: E722
pass
try:
if not isinstance(data, dict):
raise TypeError()
componentsschemas_kafka_connect_security_type_2 = (
KafkaConnectNoSecurity.from_dict(data)
)
return componentsschemas_kafka_connect_security_type_2
except: # noqa: E722
pass
if not isinstance(data, dict):
raise TypeError()
componentsschemas_kafka_connect_security_type_3 = (
KafkaConnectSSLAuth.from_dict(data)
)
return componentsschemas_kafka_connect_security_type_3
security = _parse_security(d.pop("security"))
ignore_untrusted_certificate = d.pop("ignoreUntrustedCertificate")
headers = d.pop("headers", UNSET)
kafka_connect_with_id = cls(
id=id,
slug=slug,
url=url,
name=name,
security=security,
ignore_untrusted_certificate=ignore_untrusted_certificate,
headers=headers,
)
kafka_connect_with_id.additional_properties = d
return kafka_connect_with_id
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
|
PypiClean
|
/aiida_quantumespresso-4.4.0.tar.gz/aiida_quantumespresso-4.4.0/src/aiida_quantumespresso/utils/convert.py
|
"""Utilties to convert between python and fortran data types and formats."""
import numbers
def conv_to_fortran(val, quote_strings=True):
"""Convert a python value to a format suited for fortran input.
:param val: the value to be read and converted to a Fortran-friendly string.
"""
import numpy
# Note that bool should come before integer, because a boolean matches also isinstance(..., int)
if isinstance(val, (bool, numpy.bool_)):
if val:
val_str = '.true.'
else:
val_str = '.false.'
elif isinstance(val, numbers.Integral):
val_str = f'{val:d}'
elif isinstance(val, numbers.Real):
val_str = f'{val:18.10e}'.replace('e', 'd')
elif isinstance(val, str):
if quote_strings:
val_str = f"'{val!s}'"
else:
val_str = f'{val!s}'
else:
raise ValueError(
f"Invalid value '{val}' of type '{type(val)}' passed, accepts only bools, ints, floats and strings"
)
return val_str
def conv_to_fortran_withlists(val, quote_strings=True):
"""Convert a python value to a format suited for fortran input, recursively for lists.
:param val: the value to be read and converted to a Fortran-friendly string.
"""
# pylint: disable=too-many-return-statements
# Note that bool should come before integer, because a boolean matches also isinstance(..., int)
if isinstance(val, (list, tuple)):
val_str = ', '.join(conv_to_fortran(thing, quote_strings=quote_strings) for thing in val)
return val_str
if isinstance(val, bool):
if val:
return '.true.'
return '.false.'
if isinstance(val, int):
return f'{val:d}'
if isinstance(val, float):
return f'{val:18.10e}'.replace('e', 'd')
if isinstance(val, str):
if quote_strings:
return f"'{val!s}'"
return f'{val!s}'
raise ValueError('Invalid value passed, accepts only bools, ints, floats and strings')
def convert_input_to_namelist_entry(key, val, mapping=None):
"""Convert a key and a value, from an input parameters dictionary for a namelist calculation.
Map it to the appropriate string format for the namelist input file. For single values it will return a single
string, but for values that are a dictionary, list or tuple, the returned string may be multiline.
:param key: the namelist keyword name
:param val: the namelist keyword value
The value can be either a single value, list/tuple, a double nested list or a dictionary.
Depending on the type of the value the resulting string differs vastly
* single list:
A list of keywords will be generated, where the index of the value in the list will be
used as the index in the keyword and the value itself will be converted using conv_to_fortran.
For example::
'efield': [4, 5, 6]
will result in::
efield(1) = 4
efield(1) = 5
efield(1) = 6
* double nested list:
This format can be used for keywords that require one or more indices that do not necessarily
follow a sequential number, but take specific values that need to be defined by the user.
For example::
'starting_ns_eigenvalue': [
[1, 1, 3, 3.5],
[2, 1, 1, 2.8]
]
will be formatted as::
starting_ns_eigenvalue(1,1,3) = 3.5
starting_ns_eigenvalue(2,1,1) = 2.8
Note that if the mapping argument is provided in the input, any value in sub lists that matches
a key in the mapping dictionary (that is to say it is a string that matches one of the kinds), it
will be replaced with the index of the corresponding atomic species. For example::
hubbard_j: [
[2, 'Ni', 3.5],
[2, 'Fe', 7.4],
]
would be formatted as::
hubbard_j(2, 1) = 3.5
hubbard_j(2, 3) = 7.4
Assuming the mapping dictionary contained the kinds 'Ni' and 'Fe', with the indices 1 and 3, respectively
* dictionary:
The keys of this dictionary should correspond to keys in the mapping input argument and will be replaced
with the corresponding value. This can be used for keywords that take a single index that needs to conform
to the index of the atomic species to which the keyword value should apply. For example::
hubbard_u: {
'Co': 3.5,
'O': 7.4,
}
will be formatted as::
hubbard_u(1) = 3.5
hubbard_u(3) = 7.4
assuming that the kinds 'Co' and 'O' would have atomic species indices 1 and 3, respectively.
This mapping from kind name to atomic species index should be defined by the `mapping` argument.
:param mapping: optional parameter, that must be provided if val is a dictionary or a double nested list
where the sub lists contain string values. The keys of the mapping dictionary should be the atomic species
names that will be encountered in the value, and the corresponding value should be the index of that
atomic species. Example::
mapping = {
'Fe': 1,
'O': 2,
}
This will map every occurrence of 'Fe' and 'O' in the values to the corresponding integer.
"""
# pylint: disable=too-many-branches,too-many-nested-blocks,no-else-return
# I don't try to do iterator=iter(val) and catch TypeError because it would also match strings
# I check first the dictionary, because it would also match hasattr(__iter__)
if isinstance(val, dict):
if mapping is None:
raise ValueError("If 'val' is a dictionary, you must provide also the 'mapping' parameter")
# At difference with the case of a list, at the beginning list_of_strings
# is a list of 2-tuples where the first element is the idx, and the
# second is the actual line. This is used at the end to resort everything.
list_of_strings = []
for elemk, itemval in val.items():
try:
idx = mapping[elemk]
except KeyError as exception:
raise ValueError(f"Unable to find the key '{elemk}' in the mapping dictionary") from exception
list_of_strings.append((idx, f' {key}({idx}) = {conv_to_fortran(itemval)}\n'))
# I first have to resort, then to remove the index from the first column, finally to join the strings
list_of_strings = list(zip(*sorted(list_of_strings)))[1]
return ''.join(list_of_strings)
# A list/tuple of values
elif isinstance(val, (list, tuple)):
list_of_strings = []
for idx, itemval in enumerate(val):
if isinstance(itemval, (list, tuple)):
values = []
for value in itemval[:-1]:
if not isinstance(value, (int, str)):
raise ValueError('values of double nested lists should be either integers or strings')
if isinstance(value, str):
if mapping is None:
raise ValueError('cannot map the string value because no mapping was defined')
if value not in mapping:
raise ValueError(
f'the nested list contained string {value} but this is not a key in the mapping'
)
else:
values.append(str(mapping[value]))
else:
values.append(str(value))
idx_string = ','.join(values)
itemval = itemval.pop()
else:
idx_string = f'{idx + 1}'
list_of_strings.append(f' {key}({idx_string}) = {conv_to_fortran(itemval)}\n')
return ''.join(list_of_strings)
# Single value
else:
return f' {key} = {conv_to_fortran(val)}\n'
|
PypiClean
|
/snowflake_snowpark_python3-1.0.0-py3-none-any.whl/snowflake/snowpark/udtf.py
|
"""User-defined table functions (UDTFs) in Snowpark. Refer to :class:`~snowflake.snowpark.udtf.UDTFRegistration` for details and sample code."""
import collections.abc
import sys
from types import ModuleType
from typing import (
Callable,
Dict,
Iterable,
List,
Optional,
Tuple,
Type,
Union,
get_args,
get_origin,
get_type_hints,
)
import snowflake.snowpark
from snowflake.connector import ProgrammingError
from snowflake.snowpark._internal import type_utils
from snowflake.snowpark._internal.error_message import SnowparkClientExceptionMessages
from snowflake.snowpark._internal.type_utils import (
ColumnOrName,
python_type_str_to_object,
retrieve_func_type_hints_from_source,
)
from snowflake.snowpark._internal.udf_utils import (
TABLE_FUNCTION_PROCESS_METHOD,
UDFColumn,
check_register_args,
cleanup_failed_permanent_registration,
create_python_udf_or_sp,
process_file_path,
process_registration_inputs,
resolve_imports_and_packages,
)
from snowflake.snowpark._internal.utils import TempObjectType, validate_object_name
from snowflake.snowpark.table_function import TableFunctionCall
from snowflake.snowpark.types import DataType, StructField, StructType
class UserDefinedTableFunction:
"""
Encapsulates a user defined table function that is returned by
:func:`~snowflake.snowpark.functions.udtf`, :meth:`UDTFRegistration.register` or
:meth:`UDTFRegistration.register_from_file`. The constructor of this class is not supposed
to be called directly.
Call an instance of :class:`UserDefinedTableFunction` to generate a
:class:`~snowflake.snowpark.table_function.TableFunctionCall` instance. The input type can be
a column name as a :class:`str`, or a :class:`~snowflake.snowpark.Column` object.
See Also:
- :class:`UDTFRegistration`
- :func:`~snowflake.snowpark.functions.udtf`
"""
def __init__(
self,
handler: Union[Callable, Tuple[str, str]],
output_schema: StructType,
input_types: List[DataType],
name: str,
) -> None:
#: The Python class or a tuple containing the Python file path and the function name.
self.handler: Union[Callable, Tuple[str, str]] = handler
#: The UDTF name.
self.name: str = name
self._output_schema = output_schema
self._input_types = input_types
def __call__(
self,
*arguments: Union[ColumnOrName, Iterable[ColumnOrName]],
**named_arguments,
) -> TableFunctionCall:
table_function_call = TableFunctionCall(
self.name, *arguments, **named_arguments
)
table_function_call._set_api_call_source("UserDefinedTableFunction.__call__")
return table_function_call
class UDTFRegistration:
"""
Provides methods to register classes as UDTFs in the Snowflake database.
For more information about Snowflake Python UDTFs, see `Python UDTFs <https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-tabular-functions.html>`__.
:attr:`session.udtf <snowflake.snowpark.Session.udtf>` returns an object of this class.
You can use this object to register UDTFs that you plan to use in the current session or
permanently. The methods that register a UDTF return a :class:`UserDefinedTableFunction` object,
which you can also use to call the UDTF.
Registering a UDTF is like registering a scalar UDF, you can use :meth:`register` or :func:`snowflake.snowpark.functions.udtf`
to explicitly register it. You can also use the decorator `@udtf`. They all use ``cloudpickle`` to transfer the code from the client to the server.
Another way is to use :meth:`register_from_file`. Refer to module :class:`snowflake.snowpark.udtf.UDTFRegistration` for when to use them.
To query a registered UDTF is the same as to query other table functions.
Refer to :meth:`~snowflake.snowpark.Session.table_function` and :meth:`~snowflake.snowpark.DataFrame.join_table_function`.
If you want to query a UDTF right after it's created, you can call the created :class:`UserDefinedTableFunction` instance like in Example 1 below.
Example 1
Create a temporary UDTF and call it:
>>> from snowflake.snowpark.types import IntegerType, StructField, StructType
>>> from snowflake.snowpark.functions import udtf, lit
>>> class GeneratorUDTF:
... def process(self, n):
... for i in range(n):
... yield (i, )
>>> generator_udtf = udtf(GeneratorUDTF, output_schema=StructType([StructField("number", IntegerType())]), input_types=[IntegerType()])
>>> session.table_function(generator_udtf(lit(3))).collect() # Query it by calling it
[Row(NUMBER=0), Row(NUMBER=1), Row(NUMBER=2)]
>>> session.table_function(generator_udtf.name, lit(3)).collect() # Query it by using the name
[Row(NUMBER=0), Row(NUMBER=1), Row(NUMBER=2)]
>>> # Or you can lateral-join a UDTF like any other table functions
>>> df = session.create_dataframe([2, 3], schema=["c"])
>>> df.join_table_function(generator_udtf(df["c"])).sort("c", "number").show()
------------------
|"C" |"NUMBER" |
------------------
|2 |0 |
|2 |1 |
|3 |0 |
|3 |1 |
|3 |2 |
------------------
<BLANKLINE>
Example 2
Create a UDTF with type hints and ``@udtf`` decorator and query it:
>>> from snowflake.snowpark.types import IntegerType, StructField, StructType
>>> from snowflake.snowpark.functions import udtf, lit
>>> @udtf(output_schema=["number"])
... class generator_udtf:
... def process(self, n: int) -> Iterable[Tuple[int]]:
... for i in range(n):
... yield (i, )
>>> session.table_function(generator_udtf(lit(3))).collect() # Query it by calling it
[Row(NUMBER=0), Row(NUMBER=1), Row(NUMBER=2)]
>>> session.table_function(generator_udtf.name, lit(3)).collect() # Query it by using the name
[Row(NUMBER=0), Row(NUMBER=1), Row(NUMBER=2)]
Example 3
Create a permanent UDTF with a name and call it in SQL:
>>> from snowflake.snowpark.types import IntegerType, StructField, StructType
>>> from snowflake.snowpark.functions import udtf, lit
>>> _ = session.sql("create or replace temp stage mystage").collect()
>>> class GeneratorUDTF:
... def process(self, n):
... for i in range(n):
... yield (i, )
>>> generator_udtf = udtf(
... GeneratorUDTF, output_schema=StructType([StructField("number", IntegerType())]), input_types=[IntegerType()],
... is_permanent=True, name="generator_udtf", replace=True, stage_location="@mystage"
... )
>>> session.sql("select * from table(generator_udtf(3))").collect()
[Row(NUMBER=0), Row(NUMBER=1), Row(NUMBER=2)]
Example 4
Create a UDTF with type hints:
>>> from snowflake.snowpark.types import IntegerType, StructField, StructType
>>> from snowflake.snowpark.functions import udtf, lit
>>> @udtf(output_schema=["n1", "n2"])
... class generator_udtf:
... def process(self, n: int) -> Iterable[Tuple[int, int]]:
... for i in range(n):
... yield (i, i+1)
>>> session.table_function(generator_udtf(lit(3))).collect()
[Row(N1=0, N2=1), Row(N1=1, N2=2), Row(N1=2, N2=3)]
Example 5
Create a UDTF with type hints by using ``...`` for multiple columns of the same type:
>>> from snowflake.snowpark.types import IntegerType, StructField, StructType
>>> from snowflake.snowpark.functions import udtf, lit
>>> @udtf(output_schema=["n1", "n2"])
... class generator_udtf:
... def process(self, n: int) -> Iterable[Tuple[int, ...]]:
... for i in range(n):
... yield (i, i+1)
>>> session.table_function(generator_udtf(lit(3))).collect()
[Row(N1=0, N2=1), Row(N1=1, N2=2), Row(N1=2, N2=3)]
Example 6
Create a UDTF with UDF-level imports and type hints:
>>> from resources.test_udf_dir.test_udf_file import mod5
>>> from snowflake.snowpark.types import IntegerType, StructField, StructType
>>> from snowflake.snowpark.functions import udtf, lit
>>> @udtf(output_schema=["number"], imports=[("tests/resources/test_udf_dir/test_udf_file.py", "resources.test_udf_dir.test_udf_file")])
... class generator_udtf:
... def process(self, n: int) -> Iterable[Tuple[int]]:
... for i in range(n):
... yield (mod5(i), )
>>> session.table_function(generator_udtf(lit(6))).collect()
[Row(NUMBER=0), Row(NUMBER=1), Row(NUMBER=2), Row(NUMBER=3), Row(NUMBER=4), Row(NUMBER=0)]
Example 7
Create a UDTF with UDF-level packages and type hints:
>>> from snowflake.snowpark.types import IntegerType, StructField, StructType
>>> from snowflake.snowpark.functions import udtf, lit
>>> import numpy as np
>>> @udtf(output_schema=["number"], packages=["numpy"])
... class generator_udtf:
... def process(self, n: int) -> Iterable[Tuple[int]]:
... for i in np.arange(n):
... yield (i, )
>>> session.table_function(generator_udtf(lit(3))).collect()
[Row(NUMBER=0), Row(NUMBER=1), Row(NUMBER=2)]
Example 8
Creating a UDTF with the constructor and ``end_partition`` method.
>>> from collections import Counter
>>> from typing import Iterable, Tuple
>>> from snowflake.snowpark.functions import lit
>>> class MyWordCount:
... def __init__(self) -> None:
... self._total_per_partition = 0
...
... def process(self, s1: str) -> Iterable[Tuple[str, int]]:
... words = s1.split()
... self._total_per_partition = len(words)
... counter = Counter(words)
... yield from counter.items()
...
... def end_partition(self):
... yield ("partition_total", self._total_per_partition)
>>> udtf_name = "word_count_udtf"
>>> word_count_udtf = session.udtf.register(
... MyWordCount, ["word", "count"], name=udtf_name, is_permanent=False, replace=True
... )
>>> # Call it by its name
>>> df1 = session.table_function(udtf_name, lit("w1 w2 w2 w3 w3 w3"))
>>> df1.show()
-----------------------------
|"WORD" |"COUNT" |
-----------------------------
|w1 |1 |
|w2 |2 |
|w3 |3 |
|partition_total |6 |
-----------------------------
<BLANKLINE>
>>> # Call it by the returned callable instance
>>> df2 = session.table_function(word_count_udtf(lit("w1 w2 w2 w3 w3 w3")))
>>> df2.show()
-----------------------------
|"WORD" |"COUNT" |
-----------------------------
|w1 |1 |
|w2 |2 |
|w3 |3 |
|partition_total |6 |
-----------------------------
<BLANKLINE>
Example 9
Creating a UDTF from a local Python file:
>>> from snowflake.snowpark.types import IntegerType, StructField, StructType
>>> from snowflake.snowpark.functions import udtf, lit
>>> generator_udtf = session.udtf.register_from_file(
... file_path="tests/resources/test_udtf_dir/test_udtf_file.py",
... handler_name="GeneratorUDTF",
... output_schema=StructType([StructField("number", IntegerType())]),
... input_types=[IntegerType()]
... )
>>> session.table_function(generator_udtf(lit(3))).collect()
[Row(NUMBER=0), Row(NUMBER=1), Row(NUMBER=2)]
Example 10
Creating a UDTF from a Python file on an internal stage:
>>> from snowflake.snowpark.types import IntegerType, StructField, StructType
>>> from snowflake.snowpark.functions import udtf, lit
>>> _ = session.sql("create or replace temp stage mystage").collect()
>>> _ = session.file.put("tests/resources/test_udtf_dir/test_udtf_file.py", "@mystage", auto_compress=False)
>>> generator_udtf = session.udtf.register_from_file(
... file_path="@mystage/test_udtf_file.py",
... handler_name="GeneratorUDTF",
... output_schema=StructType([StructField("number", IntegerType())]),
... input_types=[IntegerType()]
... )
>>> session.table_function(generator_udtf(lit(3))).collect()
[Row(NUMBER=0), Row(NUMBER=1), Row(NUMBER=2)]
See Also:
- :func:`~snowflake.snowpark.functions.udtf`
- :meth:`register`
- :meth:`register_from_file`
- :meth:`~snowflake.snowpark.Session.add_import`
- :meth:`~snowflake.snowpark.Session.add_packages`
- :meth:`~snowflake.snowpark.Session.table_function`
- :meth:`~snowflake.snowpark.DataFrame.join_table_function`
"""
def __init__(self, session: "snowflake.snowpark.Session") -> None:
self._session = session
def register(
self,
handler: Type,
output_schema: Union[StructType, Iterable[str]],
input_types: Optional[List[DataType]] = None,
name: Optional[Union[str, Iterable[str]]] = None,
is_permanent: bool = False,
stage_location: Optional[str] = None,
imports: Optional[List[Union[str, Tuple[str, str]]]] = None,
packages: Optional[List[Union[str, ModuleType]]] = None,
replace: bool = False,
parallel: int = 4,
strict: bool = False,
secure: bool = False,
*,
statement_params: Optional[Dict[str, str]] = None,
) -> UserDefinedTableFunction:
"""
Registers a Python class as a Snowflake Python UDTF and returns the UDTF.
The usage, input arguments, and return value of this method are the same as
they are for :func:`~snowflake.snowpark.functions.udtf`, but :meth:`register`
cannot be used as a decorator. See examples in
:class:`~snowflake.snowpark.udtf.UDTFRegistration`.
Args:
handler: A Python class used for creating the UDTF.
output_schema: A list of column names, or a :class:`~snowflake.snowpark.types.StructType` instance that represents the table function's columns.
If a list of column names is provided, the ``process`` method of the handler class must have a return type hint to indicate the output schema data types.
input_types: A list of :class:`~snowflake.snowpark.types.DataType`
representing the input data types of the UDTF. Optional if
type hints are provided.
name: A string or list of strings that specify the name or fully-qualified
object identifier (database name, schema name, and function name) for
the UDTF in Snowflake.
If it is not provided, a name will be automatically generated for the UDTF.
A name must be specified when ``is_permanent`` is ``True``.
is_permanent: Whether to create a permanent UDTF. The default is ``False``.
If it is ``True``, a valid ``stage_location`` must be provided.
stage_location: The stage location where the Python file for the UDTF
and its dependencies should be uploaded. The stage location must be specified
when ``is_permanent`` is ``True``, and it will be ignored when
``is_permanent`` is ``False``. It can be any stage other than temporary
stages and external stages.
imports: A list of imports that only apply to this UDTF. You can use a string to
represent a file path (similar to the ``path`` argument in
:meth:`~snowflake.snowpark.Session.add_import`) in this list, or a tuple of two
strings to represent a file path and an import path (similar to the ``import_path``
argument in :meth:`~snowflake.snowpark.Session.add_import`). These UDTF-level imports
will override the session-level imports added by
:meth:`~snowflake.snowpark.Session.add_import`.
packages: A list of packages that only apply to this UDTF. These UDTF-level packages
will override the session-level packages added by
:meth:`~snowflake.snowpark.Session.add_packages` and
:meth:`~snowflake.snowpark.Session.add_requirements`.
replace: Whether to replace a UDTF that already was registered. The default is ``False``.
If it is ``False``, attempting to register a UDTF with a name that already exists
results in a ``SnowparkSQLException`` exception being thrown. If it is ``True``,
an existing UDTF with the same name is overwritten.
session: Use this session to register the UDTF. If it's not specified, the session that you created before calling this function will be used.
You need to specify this parameter if you have created multiple sessions before calling this method.
parallel: The number of threads to use for uploading UDTF files with the
`PUT <https://docs.snowflake.com/en/sql-reference/sql/put.html#put>`_
command. The default value is 4 and supported values are from 1 to 99.
Increasing the number of threads can improve performance when uploading
large UDTF files.
strict: Whether the created UDTF is strict. A strict UDTF will not invoke the UDTF if any input is
null. Instead, a null value will always be returned for that row. Note that the UDTF might
still return null for non-null inputs.
secure: Whether the created UDTF is secure. For more information about secure functions,
see `Secure UDFs <https://docs.snowflake.com/en/sql-reference/udf-secure.html>`_.
statement_params: Dictionary of statement level parameters to be set while executing this action.
See Also:
- :func:`~snowflake.snowpark.functions.udtf`
- :meth:`register_from_file`
"""
if not callable(handler):
raise TypeError(
"Invalid function: not a function or callable "
f"(__call__ is not defined): {type(handler)}"
)
check_register_args(
TempObjectType.TABLE_FUNCTION, name, is_permanent, stage_location, parallel
)
# register udtf
return self._do_register_udtf(
handler,
output_schema,
input_types,
name,
stage_location,
imports,
packages,
replace,
parallel,
strict,
secure,
statement_params=statement_params,
api_call_source="UDTFRegistration.register",
)
def register_from_file(
self,
file_path: str,
handler_name: str,
output_schema: Union[StructType, Iterable[str]],
input_types: Optional[List[DataType]] = None,
name: Optional[Union[str, Iterable[str]]] = None,
is_permanent: bool = False,
stage_location: Optional[str] = None,
imports: Optional[List[Union[str, Tuple[str, str]]]] = None,
packages: Optional[List[Union[str, ModuleType]]] = None,
replace: bool = False,
parallel: int = 4,
strict: bool = False,
secure: bool = False,
*,
statement_params: Optional[Dict[str, str]] = None,
) -> UserDefinedTableFunction:
"""
Registers a Python class as a Snowflake Python UDTF from a Python or zip file,
and returns the UDTF. Apart from ``file_path`` and ``func_name``, the input arguments
of this method are the same as :meth:`register`. See examples in
:class:`~snowflake.snowpark.udtf.UDTFRegistration`.
Args:
file_path: The path of a local file or a remote file in the stage. See
more details on ``path`` argument of
:meth:`session.add_import() <snowflake.snowpark.Session.add_import>`.
Note that unlike ``path`` argument of
:meth:`session.add_import() <snowflake.snowpark.Session.add_import>`,
here the file can only be a Python file or a compressed file
(e.g., .zip file) containing Python modules.
handler_name: The Python class name in the file that the UDTF will use as the handler.
output_schema: A list of column names, or a :class:`~snowflake.snowpark.types.StructType` instance that represents the table function's columns.
input_types: A list of :class:`~snowflake.snowpark.types.DataType`
representing the input data types of the UDTF. Optional if
type hints are provided.
name: A string or list of strings that specify the name or fully-qualified
object identifier (database name, schema name, and function name) for
the UDTF in Snowflake, which allows you to call this UDTF in a SQL
command or via :func:`~snowflake.snowpark.functions.call_udtf`.
If it is not provided, a name will be automatically generated for the UDTF.
A name must be specified when ``is_permanent`` is ``True``.
is_permanent: Whether to create a permanent UDTF. The default is ``False``.
If it is ``True``, a valid ``stage_location`` must be provided.
stage_location: The stage location where the Python file for the UDTF
and its dependencies should be uploaded. The stage location must be specified
when ``is_permanent`` is ``True``, and it will be ignored when
``is_permanent`` is ``False``. It can be any stage other than temporary
stages and external stages.
imports: A list of imports that only apply to this UDTF. You can use a string to
represent a file path (similar to the ``path`` argument in
:meth:`~snowflake.snowpark.Session.add_import`) in this list, or a tuple of two
strings to represent a file path and an import path (similar to the ``import_path``
argument in :meth:`~snowflake.snowpark.Session.add_import`). These UDTF-level imports
will override the session-level imports added by
:meth:`~snowflake.snowpark.Session.add_import`.
packages: A list of packages that only apply to this UDTF. These UDTF-level packages
will override the session-level packages added by
:meth:`~snowflake.snowpark.Session.add_packages` and
:meth:`~snowflake.snowpark.Session.add_requirements`.
replace: Whether to replace a UDTF that already was registered. The default is ``False``.
If it is ``False``, attempting to register a UDTF with a name that already exists
results in a ``SnowparkSQLException`` exception being thrown. If it is ``True``,
an existing UDTF with the same name is overwritten.
session: Use this session to register the UDTF. If it's not specified, the session that you created before calling this function will be used.
You need to specify this parameter if you have created multiple sessions before calling this method.
parallel: The number of threads to use for uploading UDTF files with the
`PUT <https://docs.snowflake.com/en/sql-reference/sql/put.html#put>`_
command. The default value is 4 and supported values are from 1 to 99.
Increasing the number of threads can improve performance when uploading
large UDTF files.
strict: Whether the created UDTF is strict. A strict UDTF will not invoke the UDTF if any input is
null. Instead, a null value will always be returned for that row. Note that the UDTF might
still return null for non-null inputs.
secure: Whether the created UDTF is secure. For more information about secure functions,
see `Secure UDFs <https://docs.snowflake.com/en/sql-reference/udf-secure.html>`_.
statement_params: Dictionary of statement level parameters to be set while executing this action.
Note::
The type hints can still be extracted from the source Python file if they
are provided, but currently are not working for a zip file. Therefore,
you have to provide ``output_schema`` and ``input_types`` when ``path``
points to a zip file.
See Also:
- :func:`~snowflake.snowpark.functions.udtf`
- :meth:`register`
"""
file_path = process_file_path(file_path)
check_register_args(
TempObjectType.TABLE_FUNCTION, name, is_permanent, stage_location, parallel
)
# register udtf
return self._do_register_udtf(
(file_path, handler_name),
output_schema,
input_types,
name,
stage_location,
imports,
packages,
replace,
parallel,
strict,
secure,
statement_params=statement_params,
api_call_source="UDTFRegistration.register_from_file",
)
def _do_register_udtf(
self,
handler: Union[Callable, Tuple[str, str]],
output_schema: Union[StructType, Iterable[str]],
input_types: Optional[List[DataType]],
name: Optional[str],
stage_location: Optional[str] = None,
imports: Optional[List[Union[str, Tuple[str, str]]]] = None,
packages: Optional[List[Union[str, ModuleType]]] = None,
replace: bool = False,
parallel: int = 4,
strict: bool = False,
secure: bool = False,
*,
statement_params: Optional[Dict[str, str]] = None,
api_call_source: str,
) -> UserDefinedTableFunction:
if not isinstance(output_schema, (Iterable, StructType)):
raise ValueError(
f"'output_schema' must be a list of column names or StructType instance to create a UDTF. Got {type(output_schema)}."
)
if isinstance(output_schema, StructType):
_validate_output_schema_names(output_schema.names)
if isinstance(
output_schema, Iterable
): # with column names instead of StructType. Read type hints to infer column types.
output_schema = tuple(output_schema)
_validate_output_schema_names(output_schema)
# A typical type hint for method process is like Iterable[Tuple[int, str, datetime]], or Iterable[Tuple[str, ...]]
# The inner Tuple is a single row of the table function result.
if isinstance(handler, Callable):
type_hints = get_type_hints(
getattr(handler, TABLE_FUNCTION_PROCESS_METHOD)
)
return_type_hint = type_hints.get("return")
else:
type_hints = retrieve_func_type_hints_from_source(
handler[0],
func_name=TABLE_FUNCTION_PROCESS_METHOD,
class_name=handler[1],
)
return_type_hint = python_type_str_to_object(type_hints.get("return"))
if not return_type_hint:
raise ValueError(
"The return type hint is not set but 'output_schema' has only column names. You can either use a StructType instance for 'output_schema', or use"
"a combination of a return type hint for method 'process' and column names for 'output_schema'."
)
if get_origin(return_type_hint) not in (
list,
tuple,
collections.abc.Iterable,
collections.abc.Iterator,
):
raise ValueError(
f"The return type hint for a UDTF handler must but a collection type. {return_type_hint} is used."
)
row_type_hint = get_args(return_type_hint)[0] # The inner Tuple
if get_origin(row_type_hint) != tuple:
raise ValueError(
f"The return type hint of method '{handler.__name__}.process' must be a collection of tuples, for instance, Iterable[Tuple[str, int]], if you specify return type hint."
)
column_type_hints = get_args(row_type_hint)
if len(column_type_hints) > 1 and column_type_hints[1] == Ellipsis:
output_schema = StructType(
[
StructField(
name,
type_utils.python_type_to_snow_type(column_type_hints[0])[
0
],
)
for name in output_schema
]
)
else:
if len(column_type_hints) != len(output_schema):
raise ValueError(
f"'output_schema' has {len(output_schema)} names while type hints Tuple has only {len(column_type_hints)}."
)
output_schema = StructType(
[
StructField(
name,
type_utils.python_type_to_snow_type(column_type)[0],
)
for name, column_type in zip(output_schema, column_type_hints)
]
)
# get the udtf name, input types
(udtf_name, _, _, _, input_types,) = process_registration_inputs(
self._session,
TempObjectType.TABLE_FUNCTION,
handler,
output_schema,
input_types,
name,
)
arg_names = [f"arg{i + 1}" for i in range(len(input_types))]
input_args = [
UDFColumn(dt, arg_name) for dt, arg_name in zip(input_types, arg_names)
]
(
handler_name,
code,
all_imports,
all_packages,
upload_file_stage_location,
) = resolve_imports_and_packages(
self._session,
TempObjectType.TABLE_FUNCTION,
handler,
arg_names,
udtf_name,
stage_location,
imports,
packages,
parallel,
False,
False,
statement_params=statement_params,
)
raised = False
try:
create_python_udf_or_sp(
session=self._session,
return_type=output_schema,
input_args=input_args,
handler=handler_name,
object_type=TempObjectType.FUNCTION,
object_name=udtf_name,
all_imports=all_imports,
all_packages=all_packages,
is_temporary=stage_location is None,
replace=replace,
inline_python_code=code,
api_call_source=api_call_source,
strict=strict,
secure=secure,
)
# an exception might happen during registering a udtf
# (e.g., a dependency might not be found on the stage),
# then for a permanent udtf, we should delete the uploaded
# python file and raise the exception
except ProgrammingError as pe:
raised = True
tb = sys.exc_info()[2]
ne = SnowparkClientExceptionMessages.SQL_EXCEPTION_FROM_PROGRAMMING_ERROR(
pe
)
raise ne.with_traceback(tb) from None
except BaseException:
raised = True
raise
finally:
if raised:
cleanup_failed_permanent_registration(
self._session, upload_file_stage_location, stage_location
)
return UserDefinedTableFunction(handler, output_schema, input_types, udtf_name)
def _validate_output_schema_names(names: Iterable[str]) -> None:
for name in names:
validate_object_name(name)
|
PypiClean
|
/drf-auth-service-2.0.tar.gz/drf-auth-service-2.0/drf_auth_service/authentication/views.py
|
from urllib.parse import urlparse
from django.utils import timezone
from drf_yasg import openapi
from drf_yasg.utils import swagger_auto_schema, no_body
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
from rest_framework_simplejwt.settings import api_settings
from rest_framework_simplejwt.views import TokenViewBase
from drf_auth_service.authentication.serializers import CustomTokenRefreshSerializer, \
ReturnRegisterSerializer, ReturnAccessTokenSerializer, TokenByCookieSerializer
from drf_auth_service.common.helpers import set_cookies
from drf_auth_service.common.managers import BaseManager
from drf_auth_service.common.mixins import GenericEBSViewSet
from drf_auth_service.common.register_backends import RegisterManager
from drf_auth_service.settings import settings, User
jwt_response = openapi.Response('Respond with jwt access&refresh token', ReturnRegisterSerializer)
refresh_response = openapi.Response('Respond with jwt access token, ', ReturnAccessTokenSerializer)
class AuthenticationViewSet(GenericEBSViewSet):
serializer_create_class = settings.SERIALIZERS.REGISTER_SERIALIZER
serializer_class = settings.SERIALIZERS.REGISTER_RETURN_SERIALIZER
permission_classes_by_action = settings.PERMISSIONS.AUTHENTICATION_PERMISSIONS
@action(detail=False, methods=['POST'])
def register(self, request, *args, **kwargs):
api_settings.SIGNING_KEY = request.service.secret_token
serializer = self.get_serializer_create(data=request.data)
serializer.is_valid(raise_exception=True)
register_manager = RegisterManager(register_type=serializer.validated_data['register_type'], request=request)
response = register_manager.register()
return Response(response)
@action(detail=False, methods=['POST'], url_path='restore-request', url_name='reset-password',
serializer_class=settings.SERIALIZERS.RETURN_SUCCESS_SERIALIZER,
serializer_create_class=settings.SERIALIZERS.SEND_RESET_PASSWORD_SERIALIZER)
def send_reset_password(self, request, *args, **kwargs):
serializer = self.get_serializer_create(data=request.data)
serializer.is_valid(raise_exception=True)
manager = BaseManager.load_manager(serializer.validated_data['user'], configs=None, request=request)
manager.send_reset_password(serializer.validated_data['user'])
return Response(self.get_serializer(dict(message='Reset password was sent successfully')).data)
@action(detail=False, methods=['POST'], url_path='restore-verify',
serializer_class=settings.SERIALIZERS.RETURN_SUCCESS_SERIALIZER,
serializer_create_class=settings.SERIALIZERS.RESET_PASSWORD_VERIFY_SERIALIZER)
def reset_password_verify(self, request, *args, **kwargs):
serializer = self.get_serializer_create(data=request.data)
message = 'Valid Token' if serializer.is_valid() else 'Invalid Token'
return Response(self.get_serializer(
dict(success=serializer.is_valid(), message=message)
).data)
@action(detail=False, methods=['POST'], url_path='restore-confirm',
serializer_class=settings.SERIALIZERS.RETURN_SUCCESS_SERIALIZER,
serializer_create_class=settings.SERIALIZERS.RESET_PASSWORD_CONFIRMATION_SERIALIZER)
def reset_password_confirm(self, request, *args, **kwargs):
serializer = self.get_serializer_create(data=request.data)
serializer.is_valid(raise_exception=True)
password = serializer.validated_data['password']
reset_password_token = serializer.validated_data['token']
reset_password_token.user.set_password(password)
reset_password_token.user.save()
reset_password_token.delete()
return Response(self.get_serializer(dict(message='Password was reset successfully')).data)
@swagger_auto_schema(method='GET', request_body=no_body, responses={})
@action(detail=False, methods=['GET'], url_name='logout')
def logout(self, request, *args, **kwargs):
response = Response(status=status.HTTP_200_OK)
response.set_cookie(
settings.COOKIE_KEY,
"", # Clean the token
max_age=settings.REFRESH_TOKEN_LIFETIME.total_seconds(),
domain=urlparse(settings.DOMAIN_ADDRESS).netloc if settings.DOMAIN_ADDRESS else None,
httponly=True,
secure=True
)
response.cookies[settings.COOKIE_KEY]['samesite'] = 'None'
return response
class LoginViewSet(TokenViewBase):
serializer_class = settings.SERIALIZERS.LOGIN_SERIALIZER
permission_classes = (settings.PERMISSIONS.SERVICE_TOKEN_PERMISSION,)
@swagger_auto_schema(request_body=settings.SERIALIZERS.LOGIN_SERIALIZER, responses={200: jwt_response})
def post(self, request, *args, **kwargs):
api_settings.SIGNING_KEY = request.service.secret_token
User.objects.filter(username=request.data.get('username')).update(last_login=timezone.now())
return set_cookies(super(LoginViewSet, self).post(request, *args, **kwargs))
class EBSTokenRefreshView(TokenViewBase):
serializer_class = CustomTokenRefreshSerializer
permission_classes = (settings.PERMISSIONS.SERVICE_TOKEN_PERMISSION,)
@swagger_auto_schema(request_body=CustomTokenRefreshSerializer, responses={200: refresh_response})
def post(self, request, *args, **kwargs):
api_settings.SIGNING_KEY = request.service.secret_token
return super().post(request, *args, **kwargs)
class TokenByCookieView(GenericAPIView):
permission_classes = ()
serializer_class = TokenByCookieSerializer
def get(self, request):
refresh_token = request.COOKIES.get(settings.COOKIE_KEY)
serializer = TokenByCookieSerializer(data={'refresh': refresh_token})
if not serializer.is_valid():
return Response({settings.COOKIE_KEY: ["Cookie token not valid"]}, status=status.HTTP_403_FORBIDDEN)
return Response({'refresh': refresh_token, **serializer.validated_data}, status=status.HTTP_200_OK)
|
PypiClean
|
/letigre-moto-0.0.1.tar.gz/letigre-moto-0.0.1/moto/dynamodb2/models.py
|
from __future__ import unicode_literals
from collections import defaultdict
import copy
import datetime
import decimal
import json
import re
import uuid
import boto3
from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel
from moto.core.utils import unix_time
from moto.core.exceptions import JsonRESTError
from .comparisons import get_comparison_func, get_filter_expression, Op
from .exceptions import InvalidIndexNameError
class DynamoJsonEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, 'to_json'):
return obj.to_json()
def dynamo_json_dump(dynamo_object):
return json.dumps(dynamo_object, cls=DynamoJsonEncoder)
class DynamoType(object):
"""
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes
"""
def __init__(self, type_as_dict):
self.type = list(type_as_dict)[0]
self.value = list(type_as_dict.values())[0]
def __hash__(self):
return hash((self.type, self.value))
def __eq__(self, other):
return (
self.type == other.type and
self.value == other.value
)
def __lt__(self, other):
return self.value < other.value
def __le__(self, other):
return self.value <= other.value
def __gt__(self, other):
return self.value > other.value
def __ge__(self, other):
return self.value >= other.value
def __repr__(self):
return "DynamoType: {0}".format(self.to_json())
@property
def cast_value(self):
if self.is_number():
try:
return int(self.value)
except ValueError:
return float(self.value)
elif self.is_set():
return set(self.value)
else:
return self.value
def to_json(self):
return {self.type: self.value}
def compare(self, range_comparison, range_objs):
"""
Compares this type against comparison filters
"""
range_values = [obj.cast_value for obj in range_objs]
comparison_func = get_comparison_func(range_comparison)
return comparison_func(self.cast_value, *range_values)
def is_number(self):
return self.type == 'N'
def is_set(self):
return self.type == 'SS' or self.type == 'NS' or self.type == 'BS'
def same_type(self, other):
return self.type == other.type
class Item(BaseModel):
def __init__(self, hash_key, hash_key_type, range_key, range_key_type, attrs):
self.hash_key = hash_key
self.hash_key_type = hash_key_type
self.range_key = range_key
self.range_key_type = range_key_type
self.attrs = {}
for key, value in attrs.items():
self.attrs[key] = DynamoType(value)
def __repr__(self):
return "Item: {0}".format(self.to_json())
def to_json(self):
attributes = {}
for attribute_key, attribute in self.attrs.items():
attributes[attribute_key] = {
attribute.type: attribute.value
}
return {
"Attributes": attributes
}
def describe_attrs(self, attributes):
if attributes:
included = {}
for key, value in self.attrs.items():
if key in attributes:
included[key] = value
else:
included = self.attrs
return {
"Item": included
}
def update(self, update_expression, expression_attribute_names, expression_attribute_values):
# Update subexpressions are identifiable by the operator keyword, so split on that and
# get rid of the empty leading string.
parts = [p for p in re.split(r'\b(SET|REMOVE|ADD|DELETE)\b', update_expression, flags=re.I) if p]
# make sure that we correctly found only operator/value pairs
assert len(parts) % 2 == 0, "Mismatched operators and values in update expression: '{}'".format(update_expression)
for action, valstr in zip(parts[:-1:2], parts[1::2]):
action = action.upper()
# "Should" retain arguments in side (...)
values = re.split(r',(?![^(]*\))', valstr)
for value in values:
# A Real value
value = value.lstrip(":").rstrip(",").strip()
for k, v in expression_attribute_names.items():
value = re.sub(r'{0}\b'.format(k), v, value)
if action == "REMOVE":
self.attrs.pop(value, None)
elif action == 'SET':
key, value = value.split("=", 1)
key = key.strip()
value = value.strip()
# If not exists, changes value to a default if needed, else its the same as it was
if value.startswith('if_not_exists'):
# Function signature
match = re.match(r'.*if_not_exists\s*\((?P<path>.+),\s*(?P<default>.+)\).*', value)
if not match:
raise TypeError
path, value = match.groups()
# If it already exists, get its value so we dont overwrite it
if path in self.attrs:
value = self.attrs[path]
if type(value) != DynamoType:
if value in expression_attribute_values:
value = DynamoType(expression_attribute_values[value])
else:
value = DynamoType({"S": value})
if '.' not in key:
self.attrs[key] = value
else:
# Handle nested dict updates
key_parts = key.split('.')
attr = key_parts.pop(0)
if attr not in self.attrs:
raise ValueError
last_val = self.attrs[attr].value
for key_part in key_parts:
# Hack but it'll do, traverses into a dict
last_val_type = list(last_val.keys())
if last_val_type and last_val_type[0] == 'M':
last_val = last_val['M']
if key_part not in last_val:
last_val[key_part] = {'M': {}}
last_val = last_val[key_part]
# We have reference to a nested object but we cant just assign to it
current_type = list(last_val.keys())[0]
if current_type == value.type:
last_val[current_type] = value.value
else:
last_val[value.type] = value.value
del last_val[current_type]
elif action == 'ADD':
key, value = value.split(" ", 1)
key = key.strip()
value_str = value.strip()
if value_str in expression_attribute_values:
dyn_value = DynamoType(expression_attribute_values[value])
else:
raise TypeError
# Handle adding numbers - value gets added to existing value,
# or added to 0 if it doesn't exist yet
if dyn_value.is_number():
existing = self.attrs.get(key, DynamoType({"N": '0'}))
if not existing.same_type(dyn_value):
raise TypeError()
self.attrs[key] = DynamoType({"N": str(
decimal.Decimal(existing.value) +
decimal.Decimal(dyn_value.value)
)})
# Handle adding sets - value is added to the set, or set is
# created with only this value if it doesn't exist yet
# New value must be of same set type as previous value
elif dyn_value.is_set():
existing = self.attrs.get(key, DynamoType({dyn_value.type: {}}))
if not existing.same_type(dyn_value):
raise TypeError()
new_set = set(existing.value).union(dyn_value.value)
self.attrs[key] = DynamoType({existing.type: list(new_set)})
else: # Number and Sets are the only supported types for ADD
raise TypeError
elif action == 'DELETE':
key, value = value.split(" ", 1)
key = key.strip()
value_str = value.strip()
if value_str in expression_attribute_values:
dyn_value = DynamoType(expression_attribute_values[value])
else:
raise TypeError
if not dyn_value.is_set():
raise TypeError
existing = self.attrs.get(key, None)
if existing:
if not existing.same_type(dyn_value):
raise TypeError
new_set = set(existing.value).difference(dyn_value.value)
self.attrs[key] = DynamoType({existing.type: list(new_set)})
else:
raise NotImplementedError('{} update action not yet supported'.format(action))
def update_with_attribute_updates(self, attribute_updates):
for attribute_name, update_action in attribute_updates.items():
action = update_action['Action']
if action == 'DELETE' and 'Value' not in update_action:
if attribute_name in self.attrs:
del self.attrs[attribute_name]
continue
new_value = list(update_action['Value'].values())[0]
if action == 'PUT':
# TODO deal with other types
if isinstance(new_value, list) or isinstance(new_value, set):
self.attrs[attribute_name] = DynamoType({"SS": new_value})
elif isinstance(new_value, dict):
self.attrs[attribute_name] = DynamoType({"M": new_value})
elif set(update_action['Value'].keys()) == set(['N']):
self.attrs[attribute_name] = DynamoType({"N": new_value})
elif set(update_action['Value'].keys()) == set(['NULL']):
if attribute_name in self.attrs:
del self.attrs[attribute_name]
else:
self.attrs[attribute_name] = DynamoType({"S": new_value})
elif action == 'ADD':
if set(update_action['Value'].keys()) == set(['N']):
existing = self.attrs.get(
attribute_name, DynamoType({"N": '0'}))
self.attrs[attribute_name] = DynamoType({"N": str(
decimal.Decimal(existing.value) +
decimal.Decimal(new_value)
)})
elif set(update_action['Value'].keys()) == set(['SS']):
existing = self.attrs.get(attribute_name, DynamoType({"SS": {}}))
new_set = set(existing.value).union(set(new_value))
self.attrs[attribute_name] = DynamoType({
"SS": list(new_set)
})
else:
# TODO: implement other data types
raise NotImplementedError(
'ADD not supported for %s' % ', '.join(update_action['Value'].keys()))
elif action == 'DELETE':
if set(update_action['Value'].keys()) == set(['SS']):
existing = self.attrs.get(attribute_name, DynamoType({"SS": {}}))
new_set = set(existing.value).difference(set(new_value))
self.attrs[attribute_name] = DynamoType({
"SS": list(new_set)
})
else:
raise NotImplementedError(
'ADD not supported for %s' % ', '.join(update_action['Value'].keys()))
else:
raise NotImplementedError(
'%s action not support for update_with_attribute_updates' % action)
class StreamRecord(BaseModel):
def __init__(self, table, stream_type, event_name, old, new, seq):
old_a = old.to_json()['Attributes'] if old is not None else {}
new_a = new.to_json()['Attributes'] if new is not None else {}
rec = old if old is not None else new
keys = {table.hash_key_attr: rec.hash_key.to_json()}
if table.range_key_attr is not None:
keys[table.range_key_attr] = rec.range_key.to_json()
self.record = {
'eventID': uuid.uuid4().hex,
'eventName': event_name,
'eventSource': 'aws:dynamodb',
'eventVersion': '1.0',
'awsRegion': 'us-east-1',
'dynamodb': {
'StreamViewType': stream_type,
'ApproximateCreationDateTime': datetime.datetime.utcnow().isoformat(),
'SequenceNumber': seq,
'SizeBytes': 1,
'Keys': keys
}
}
if stream_type in ('NEW_IMAGE', 'NEW_AND_OLD_IMAGES'):
self.record['dynamodb']['NewImage'] = new_a
if stream_type in ('OLD_IMAGE', 'NEW_AND_OLD_IMAGES'):
self.record['dynamodb']['OldImage'] = old_a
# This is a substantial overestimate but it's the easiest to do now
self.record['dynamodb']['SizeBytes'] = len(
json.dumps(self.record['dynamodb']))
def to_json(self):
return self.record
class StreamShard(BaseModel):
def __init__(self, table):
self.table = table
self.id = 'shardId-00000001541626099285-f35f62ef'
self.starting_sequence_number = 1100000000017454423009
self.items = []
self.created_on = datetime.datetime.utcnow()
def to_json(self):
return {
'ShardId': self.id,
'SequenceNumberRange': {
'StartingSequenceNumber': str(self.starting_sequence_number)
}
}
def add(self, old, new):
t = self.table.stream_specification['StreamViewType']
if old is None:
event_name = 'INSERT'
elif new is None:
event_name = 'DELETE'
else:
event_name = 'MODIFY'
seq = len(self.items) + self.starting_sequence_number
self.items.append(
StreamRecord(self.table, t, event_name, old, new, seq))
def get(self, start, quantity):
start -= self.starting_sequence_number
assert start >= 0
end = start + quantity
return [i.to_json() for i in self.items[start:end]]
class Table(BaseModel):
def __init__(self, table_name, schema=None, attr=None, throughput=None, indexes=None, global_indexes=None, streams=None):
self.name = table_name
self.attr = attr
self.schema = schema
self.range_key_attr = None
self.hash_key_attr = None
self.range_key_type = None
self.hash_key_type = None
for elem in schema:
if elem["KeyType"] == "HASH":
self.hash_key_attr = elem["AttributeName"]
self.hash_key_type = elem["KeyType"]
else:
self.range_key_attr = elem["AttributeName"]
self.range_key_type = elem["KeyType"]
if throughput is None:
self.throughput = {
'WriteCapacityUnits': 10, 'ReadCapacityUnits': 10}
else:
self.throughput = throughput
self.throughput["NumberOfDecreasesToday"] = 0
self.indexes = indexes
self.global_indexes = global_indexes if global_indexes else []
self.created_at = datetime.datetime.utcnow()
self.items = defaultdict(dict)
self.table_arn = self._generate_arn(table_name)
self.tags = []
self.ttl = {
'TimeToLiveStatus': 'DISABLED' # One of 'ENABLING'|'DISABLING'|'ENABLED'|'DISABLED',
# 'AttributeName': 'string' # Can contain this
}
self.set_stream_specification(streams)
def _generate_arn(self, name):
return 'arn:aws:dynamodb:us-east-1:123456789011:table/' + name
def set_stream_specification(self, streams):
self.stream_specification = streams
if streams and (streams.get('StreamEnabled') or streams.get('StreamViewType')):
self.stream_specification['StreamEnabled'] = True
self.latest_stream_label = datetime.datetime.utcnow().isoformat()
self.stream_shard = StreamShard(self)
else:
self.stream_specification = {'StreamEnabled': False}
self.latest_stream_label = None
self.stream_shard = None
def describe(self, base_key='TableDescription'):
results = {
base_key: {
'AttributeDefinitions': self.attr,
'ProvisionedThroughput': self.throughput,
'TableSizeBytes': 0,
'TableName': self.name,
'TableStatus': 'ACTIVE',
'TableArn': self.table_arn,
'KeySchema': self.schema,
'ItemCount': len(self),
'CreationDateTime': unix_time(self.created_at),
'GlobalSecondaryIndexes': [index for index in self.global_indexes],
'LocalSecondaryIndexes': [index for index in self.indexes],
}
}
if self.stream_specification and self.stream_specification['StreamEnabled']:
results[base_key]['StreamSpecification'] = self.stream_specification
if self.latest_stream_label:
results[base_key]['LatestStreamLabel'] = self.latest_stream_label
results[base_key]['LatestStreamArn'] = self.table_arn + '/stream/' + self.latest_stream_label
return results
def __len__(self):
count = 0
for key, value in self.items.items():
if self.has_range_key:
count += len(value)
else:
count += 1
return count
@property
def hash_key_names(self):
keys = [self.hash_key_attr]
for index in self.global_indexes:
hash_key = None
for key in index['KeySchema']:
if key['KeyType'] == 'HASH':
hash_key = key['AttributeName']
keys.append(hash_key)
return keys
@property
def range_key_names(self):
keys = [self.range_key_attr]
for index in self.global_indexes:
range_key = None
for key in index['KeySchema']:
if key['KeyType'] == 'RANGE':
range_key = keys.append(key['AttributeName'])
keys.append(range_key)
return keys
def put_item(self, item_attrs, expected=None, overwrite=False):
hash_value = DynamoType(item_attrs.get(self.hash_key_attr))
if self.has_range_key:
range_value = DynamoType(item_attrs.get(self.range_key_attr))
else:
range_value = None
if expected is None:
expected = {}
lookup_range_value = range_value
else:
expected_range_value = expected.get(
self.range_key_attr, {}).get("Value")
if(expected_range_value is None):
lookup_range_value = range_value
else:
lookup_range_value = DynamoType(expected_range_value)
current = self.get_item(hash_value, lookup_range_value)
item = Item(hash_value, self.hash_key_type, range_value,
self.range_key_type, item_attrs)
if not overwrite:
if current is None:
current_attr = {}
elif hasattr(current, 'attrs'):
current_attr = current.attrs
else:
current_attr = current
for key, val in expected.items():
if 'Exists' in val and val['Exists'] is False \
or 'ComparisonOperator' in val and val['ComparisonOperator'] == 'NULL':
if key in current_attr:
raise ValueError("The conditional request failed")
elif key not in current_attr:
raise ValueError("The conditional request failed")
elif 'Value' in val and DynamoType(val['Value']).value != current_attr[key].value:
raise ValueError("The conditional request failed")
elif 'ComparisonOperator' in val:
dynamo_types = [
DynamoType(ele) for ele in
val.get("AttributeValueList", [])
]
if not current_attr[key].compare(val['ComparisonOperator'], dynamo_types):
raise ValueError('The conditional request failed')
if range_value:
self.items[hash_value][range_value] = item
else:
self.items[hash_value] = item
if self.stream_shard is not None:
self.stream_shard.add(current, item)
return item
def __nonzero__(self):
return True
def __bool__(self):
return self.__nonzero__()
@property
def has_range_key(self):
return self.range_key_attr is not None
def get_item(self, hash_key, range_key=None):
if self.has_range_key and not range_key:
raise ValueError(
"Table has a range key, but no range key was passed into get_item")
try:
if range_key:
return self.items[hash_key][range_key]
if hash_key in self.items:
return self.items[hash_key]
raise KeyError
except KeyError:
return None
def delete_item(self, hash_key, range_key):
try:
if range_key:
item = self.items[hash_key].pop(range_key)
else:
item = self.items.pop(hash_key)
if self.stream_shard is not None:
self.stream_shard.add(item, None)
return item
except KeyError:
return None
def query(self, hash_key, range_comparison, range_objs, limit,
exclusive_start_key, scan_index_forward, projection_expression,
index_name=None, filter_expression=None, **filter_kwargs):
results = []
if index_name:
all_indexes = self.all_indexes()
indexes_by_name = dict((i['IndexName'], i) for i in all_indexes)
if index_name not in indexes_by_name:
raise ValueError('Invalid index: %s for table: %s. Available indexes are: %s' % (
index_name, self.name, ', '.join(indexes_by_name.keys())
))
index = indexes_by_name[index_name]
try:
index_hash_key = [key for key in index[
'KeySchema'] if key['KeyType'] == 'HASH'][0]
except IndexError:
raise ValueError('Missing Hash Key. KeySchema: %s' %
index['KeySchema'])
try:
index_range_key = [key for key in index[
'KeySchema'] if key['KeyType'] == 'RANGE'][0]
except IndexError:
index_range_key = None
possible_results = []
for item in self.all_items():
if not isinstance(item, Item):
continue
item_hash_key = item.attrs.get(index_hash_key['AttributeName'])
if index_range_key is None:
if item_hash_key and item_hash_key == hash_key:
possible_results.append(item)
else:
item_range_key = item.attrs.get(index_range_key['AttributeName'])
if item_hash_key and item_hash_key == hash_key and item_range_key:
possible_results.append(item)
else:
possible_results = [item for item in list(self.all_items()) if isinstance(
item, Item) and item.hash_key == hash_key]
if range_comparison:
if index_name and not index_range_key:
raise ValueError(
'Range Key comparison but no range key found for index: %s' % index_name)
elif index_name:
for result in possible_results:
if result.attrs.get(index_range_key['AttributeName']).compare(range_comparison, range_objs):
results.append(result)
else:
for result in possible_results:
if result.range_key.compare(range_comparison, range_objs):
results.append(result)
if filter_kwargs:
for result in possible_results:
for field, value in filter_kwargs.items():
dynamo_types = [DynamoType(ele) for ele in value[
"AttributeValueList"]]
if result.attrs.get(field).compare(value['ComparisonOperator'], dynamo_types):
results.append(result)
if not range_comparison and not filter_kwargs:
# If we're not filtering on range key or on an index return all
# values
results = possible_results
if index_name:
if index_range_key:
results.sort(key=lambda item: item.attrs[index_range_key['AttributeName']].value
if item.attrs.get(index_range_key['AttributeName']) else None)
else:
results.sort(key=lambda item: item.range_key)
if scan_index_forward is False:
results.reverse()
scanned_count = len(list(self.all_items()))
if filter_expression is not None:
results = [item for item in results if filter_expression.expr(item)]
if projection_expression:
expressions = [x.strip() for x in projection_expression.split(',')]
results = copy.deepcopy(results)
for result in results:
for attr in list(result.attrs):
if attr not in expressions:
result.attrs.pop(attr)
results, last_evaluated_key = self._trim_results(results, limit,
exclusive_start_key)
return results, scanned_count, last_evaluated_key
def all_items(self):
for hash_set in self.items.values():
if self.range_key_attr:
for item in hash_set.values():
yield item
else:
yield hash_set
def all_indexes(self):
return (self.global_indexes or []) + (self.indexes or [])
def has_idx_items(self, index_name):
all_indexes = self.all_indexes()
indexes_by_name = dict((i['IndexName'], i) for i in all_indexes)
idx = indexes_by_name[index_name]
idx_col_set = set([i['AttributeName'] for i in idx['KeySchema']])
for hash_set in self.items.values():
if self.range_key_attr:
for item in hash_set.values():
if idx_col_set.issubset(set(item.attrs)):
yield item
else:
if idx_col_set.issubset(set(hash_set.attrs)):
yield hash_set
def scan(self, filters, limit, exclusive_start_key, filter_expression=None, index_name=None):
results = []
scanned_count = 0
all_indexes = self.all_indexes()
indexes_by_name = dict((i['IndexName'], i) for i in all_indexes)
if index_name:
if index_name not in indexes_by_name:
raise InvalidIndexNameError('The table does not have the specified index: %s' % index_name)
items = self.has_idx_items(index_name)
else:
items = self.all_items()
for item in items:
scanned_count += 1
passes_all_conditions = True
for attribute_name, (comparison_operator, comparison_objs) in filters.items():
attribute = item.attrs.get(attribute_name)
if attribute:
# Attribute found
if not attribute.compare(comparison_operator, comparison_objs):
passes_all_conditions = False
break
elif comparison_operator == 'NULL':
# Comparison is NULL and we don't have the attribute
continue
else:
# No attribute found and comparison is no NULL. This item
# fails
passes_all_conditions = False
break
if filter_expression is not None:
passes_all_conditions &= filter_expression.expr(item)
if passes_all_conditions:
results.append(item)
results, last_evaluated_key = self._trim_results(results, limit,
exclusive_start_key, index_name)
return results, scanned_count, last_evaluated_key
def _trim_results(self, results, limit, exclusive_start_key, scaned_index=None):
if exclusive_start_key is not None:
hash_key = DynamoType(exclusive_start_key.get(self.hash_key_attr))
range_key = exclusive_start_key.get(self.range_key_attr)
if range_key is not None:
range_key = DynamoType(range_key)
for i in range(len(results)):
if results[i].hash_key == hash_key and results[i].range_key == range_key:
results = results[i + 1:]
break
last_evaluated_key = None
if limit and len(results) > limit:
results = results[:limit]
last_evaluated_key = {
self.hash_key_attr: results[-1].hash_key
}
if results[-1].range_key is not None:
last_evaluated_key[self.range_key_attr] = results[-1].range_key
if scaned_index:
all_indexes = self.all_indexes()
indexes_by_name = dict((i['IndexName'], i) for i in all_indexes)
idx = indexes_by_name[scaned_index]
idx_col_list = [i['AttributeName'] for i in idx['KeySchema']]
for col in idx_col_list:
last_evaluated_key[col] = results[-1].attrs[col]
return results, last_evaluated_key
def lookup(self, *args, **kwargs):
if not self.schema:
self.describe()
for x, arg in enumerate(args):
kwargs[self.schema[x].name] = arg
ret = self.get_item(**kwargs)
if not ret.keys():
return None
return ret
class DynamoDBBackend(BaseBackend):
def __init__(self, region_name=None):
self.region_name = region_name
self.tables = OrderedDict()
def reset(self):
region_name = self.region_name
self.__dict__ = {}
self.__init__(region_name)
def create_table(self, name, **params):
if name in self.tables:
return None
table = Table(name, **params)
self.tables[name] = table
return table
def delete_table(self, name):
return self.tables.pop(name, None)
def tag_resource(self, table_arn, tags):
for table in self.tables:
if self.tables[table].table_arn == table_arn:
self.tables[table].tags.extend(tags)
def untag_resource(self, table_arn, tag_keys):
for table in self.tables:
if self.tables[table].table_arn == table_arn:
self.tables[table].tags = [tag for tag in self.tables[table].tags if tag['Key'] not in tag_keys]
def list_tags_of_resource(self, table_arn):
required_table = None
for table in self.tables:
if self.tables[table].table_arn == table_arn:
required_table = self.tables[table]
return required_table.tags
def update_table_throughput(self, name, throughput):
table = self.tables[name]
table.throughput = throughput
return table
def update_table_streams(self, name, stream_specification):
table = self.tables[name]
if (stream_specification.get('StreamEnabled') or stream_specification.get('StreamViewType')) and table.latest_stream_label:
raise ValueError('Table already has stream enabled')
table.set_stream_specification(stream_specification)
return table
def update_table_global_indexes(self, name, global_index_updates):
table = self.tables[name]
gsis_by_name = dict((i['IndexName'], i) for i in table.global_indexes)
for gsi_update in global_index_updates:
gsi_to_create = gsi_update.get('Create')
gsi_to_update = gsi_update.get('Update')
gsi_to_delete = gsi_update.get('Delete')
if gsi_to_delete:
index_name = gsi_to_delete['IndexName']
if index_name not in gsis_by_name:
raise ValueError('Global Secondary Index does not exist, but tried to delete: %s' %
gsi_to_delete['IndexName'])
del gsis_by_name[index_name]
if gsi_to_update:
index_name = gsi_to_update['IndexName']
if index_name not in gsis_by_name:
raise ValueError('Global Secondary Index does not exist, but tried to update: %s' %
gsi_to_update['IndexName'])
gsis_by_name[index_name].update(gsi_to_update)
if gsi_to_create:
if gsi_to_create['IndexName'] in gsis_by_name:
raise ValueError(
'Global Secondary Index already exists: %s' % gsi_to_create['IndexName'])
gsis_by_name[gsi_to_create['IndexName']] = gsi_to_create
# in python 3.6, dict.values() returns a dict_values object, but we expect it to be a list in other
# parts of the codebase
table.global_indexes = list(gsis_by_name.values())
return table
def put_item(self, table_name, item_attrs, expected=None, overwrite=False):
table = self.tables.get(table_name)
if not table:
return None
return table.put_item(item_attrs, expected, overwrite)
def get_table_keys_name(self, table_name, keys):
"""
Given a set of keys, extracts the key and range key
"""
table = self.tables.get(table_name)
if not table:
return None, None
else:
if len(keys) == 1:
for key in keys:
if key in table.hash_key_names:
return key, None
# for potential_hash, potential_range in zip(table.hash_key_names, table.range_key_names):
# if set([potential_hash, potential_range]) == set(keys):
# return potential_hash, potential_range
potential_hash, potential_range = None, None
for key in set(keys):
if key in table.hash_key_names:
potential_hash = key
elif key in table.range_key_names:
potential_range = key
return potential_hash, potential_range
def get_keys_value(self, table, keys):
if table.hash_key_attr not in keys or (table.has_range_key and table.range_key_attr not in keys):
raise ValueError(
"Table has a range key, but no range key was passed into get_item")
hash_key = DynamoType(keys[table.hash_key_attr])
range_key = DynamoType(
keys[table.range_key_attr]) if table.has_range_key else None
return hash_key, range_key
def get_table(self, table_name):
return self.tables.get(table_name)
def get_item(self, table_name, keys):
table = self.get_table(table_name)
if not table:
raise ValueError("No table found")
hash_key, range_key = self.get_keys_value(table, keys)
return table.get_item(hash_key, range_key)
def query(self, table_name, hash_key_dict, range_comparison, range_value_dicts,
limit, exclusive_start_key, scan_index_forward, projection_expression, index_name=None,
expr_names=None, expr_values=None, filter_expression=None,
**filter_kwargs):
table = self.tables.get(table_name)
if not table:
return None, None
hash_key = DynamoType(hash_key_dict)
range_values = [DynamoType(range_value)
for range_value in range_value_dicts]
if filter_expression is not None:
filter_expression = get_filter_expression(filter_expression, expr_names, expr_values)
else:
filter_expression = Op(None, None) # Will always eval to true
return table.query(hash_key, range_comparison, range_values, limit,
exclusive_start_key, scan_index_forward, projection_expression, index_name, filter_expression, **filter_kwargs)
def scan(self, table_name, filters, limit, exclusive_start_key, filter_expression, expr_names, expr_values, index_name):
table = self.tables.get(table_name)
if not table:
return None, None, None
scan_filters = {}
for key, (comparison_operator, comparison_values) in filters.items():
dynamo_types = [DynamoType(value) for value in comparison_values]
scan_filters[key] = (comparison_operator, dynamo_types)
if filter_expression is not None:
filter_expression = get_filter_expression(filter_expression, expr_names, expr_values)
else:
filter_expression = Op(None, None) # Will always eval to true
return table.scan(scan_filters, limit, exclusive_start_key, filter_expression, index_name)
def update_item(self, table_name, key, update_expression, attribute_updates, expression_attribute_names,
expression_attribute_values, expected=None):
table = self.get_table(table_name)
if all([table.hash_key_attr in key, table.range_key_attr in key]):
# Covers cases where table has hash and range keys, ``key`` param
# will be a dict
hash_value = DynamoType(key[table.hash_key_attr])
range_value = DynamoType(key[table.range_key_attr])
elif table.hash_key_attr in key:
# Covers tables that have a range key where ``key`` param is a dict
hash_value = DynamoType(key[table.hash_key_attr])
range_value = None
else:
# Covers other cases
hash_value = DynamoType(key)
range_value = None
item = table.get_item(hash_value, range_value)
if item is None:
item_attr = {}
elif hasattr(item, 'attrs'):
item_attr = item.attrs
else:
item_attr = item
if not expected:
expected = {}
for key, val in expected.items():
if 'Exists' in val and val['Exists'] is False \
or 'ComparisonOperator' in val and val['ComparisonOperator'] == 'NULL':
if key in item_attr:
raise ValueError("The conditional request failed")
elif key not in item_attr:
raise ValueError("The conditional request failed")
elif 'Value' in val and DynamoType(val['Value']).value != item_attr[key].value:
raise ValueError("The conditional request failed")
elif 'ComparisonOperator' in val:
dynamo_types = [
DynamoType(ele) for ele in
val.get("AttributeValueList", [])
]
if not item_attr[key].compare(val['ComparisonOperator'], dynamo_types):
raise ValueError('The conditional request failed')
# Update does not fail on new items, so create one
if item is None:
data = {
table.hash_key_attr: {
hash_value.type: hash_value.value,
},
}
if range_value:
data.update({
table.range_key_attr: {
range_value.type: range_value.value,
}
})
table.put_item(data)
item = table.get_item(hash_value, range_value)
if update_expression:
item.update(update_expression, expression_attribute_names,
expression_attribute_values)
else:
item.update_with_attribute_updates(attribute_updates)
return item
def delete_item(self, table_name, keys):
table = self.get_table(table_name)
if not table:
return None
hash_key, range_key = self.get_keys_value(table, keys)
return table.delete_item(hash_key, range_key)
def update_ttl(self, table_name, ttl_spec):
table = self.tables.get(table_name)
if table is None:
raise JsonRESTError('ResourceNotFound', 'Table not found')
if 'Enabled' not in ttl_spec or 'AttributeName' not in ttl_spec:
raise JsonRESTError('InvalidParameterValue',
'TimeToLiveSpecification does not contain Enabled and AttributeName')
if ttl_spec['Enabled']:
table.ttl['TimeToLiveStatus'] = 'ENABLED'
else:
table.ttl['TimeToLiveStatus'] = 'DISABLED'
table.ttl['AttributeName'] = ttl_spec['AttributeName']
def describe_ttl(self, table_name):
table = self.tables.get(table_name)
if table is None:
raise JsonRESTError('ResourceNotFound', 'Table not found')
return table.ttl
available_regions = boto3.session.Session().get_available_regions("dynamodb")
dynamodb_backends = {region: DynamoDBBackend(region_name=region) for region in available_regions}
|
PypiClean
|
/dngsmgmt-0.0.1a3.tar.gz/dngsmgmt-0.0.1a3/README.md
|
dngsmgmt
-----------
dngsmgmt is a Django app to manage NGS projects
Quick start
-----------
1. Add "dngsmgmt" to your INSTALLED_APPS setting like this::
INSTALLED_APPS = [
...
'dngsmgmt',
]
2. Include the dgenome URLconf in your project urls.py like this::
url(r'^dngsmgmt/', include((dngsmgmt.urls, 'dngsmgmt'), namespace='dngsmgmt')),
3. Run `python manage.py makemigrations dngsmgmt` to create the dngsmgmt tables.
4. Run `python manage.py migrate` to create the dngsmgmt models.
|
PypiClean
|
/pyVME-1.0.1.tar.gz/pyVME-1.0.1/README.md
|
# pyVME
This package uses exposed cpp functions using [```pybind11```](https://pybind11.readthedocs.io/en/stable/index.html) to use the VMEbus (Versa Module Eurocard-bus) to interact with FPGAs.
The shared object used for this was compiled on a 64bit Linux machine and supports no other platforms.
It is intended for the use at ELSA (University of Bonn) and can be used for many elegant implementations of different tools.
The main functionality comes from a shared object (.so) that ships as a binary blob within this package.
# Table of Contents
1. [Requierements](#Requierements)
2. [Installation](#Installation)
3. [Building from source](#Building-from-source)
## Requierements:
This package needs to be run on a Linux 64bit machine with python >= 3.9 installed
## Installation
This package is available via [```pypi```](https://pypi.org) and can be simply installed by running:
pip install pyVME
## Features
After installation (e.g. pip install pyVME) you can import this package into your projects via ```import pyVME```.
This gives you access to the three classes that come within this package, returns ```
1. ```pyVME.fpga(baseaddr)```: Allows you to instantiate one FPGA that is directly connected to the CPU your python program is running on
2. ```pyVME.server(int server_port)```: Allows you to run a server on a remote machine that is connected with one or more FPGAs. It will instantiate for each FPGA defined by the client a new instance.
3. ```pyVME.remoteFPGA(int baseaddr, string server_ip, int server_port)```: Allows you to connect to a running server and call functions of the remote FPGA instances.
Every class has the same set of functions that act differently in the background without the user having to change anything.
### Functions
The following functions are supported:
- ```readRegisters(int register)```, returns ```str```
- ```writeRegisters(int register, int value)```, returns ```str```
- ```load_fpga(string file_path)```, returns ```bool```
- ```load_fpga_xml(string file_path)```, returns ```bool```
- ```load_fpga_if_new(string file_path)```, returns ```bool```
- ```load_fpga_if_new_xml(string file_path)```, returns ```bool```
- ```load_fpga_from_flash(string file_path)```, returns ```bool```
- ```async_load_fpga_from_flash(string file_path)```, returns ```str```
- ```swap_bits(int inputbits)```, returns ```str```
- ```getBaseaddress()```, returns ```int```
- ```getModulePCIBaseAdress()```, returns ```int```
- ```getBoardID()```, returns ```int```
- ```getBoardType()```, returns ```int```
- ```getFirmwareVersion()```, returns ```int```
- ```getFirmwareType()```, returns ```int```
- ```getInterfaceVersion()```, returns ```int```
- ```getMezzanineType()s```, returns ```int```
- ```getFPGADone()```, returns ```int```
- ```wait_on_fpga_done()```, returns ```str```
### Examples
#### Server:
```
import pyVME as vme
server = vme.server(port=5555)
server.run()
```
#### remote FPGA:
```
import pyVME as vme
fpga = vme.remoteFPGA(baseaddr=0xAB000000, server_ip='remote_ip/domain', server_port=5555)
```
#### local FPGA:
```
import pyVME as vme
fpga = vme.fpga(baseaddr=0xAB000000)
```
### Source Code
This project is only partially open source because it comes with a binary blob in form of a shared object (the source code for the ```.so``` is accessable for members of the HISKP only on the [HISKP Gitlab](https://agthoma.hiskp.uni-bonn.de/gitlab/CB/daq/daq-tr/-/tree/master/utilities/pyVME)).
The source code can be found at [pyVME](https://github.com/dschuechter/pyVME).
## Building from source
To build this package you need to have the build package installed:
```
pip install build
```
No other packages are requiered. You can simply build this package by running:
```
python3 -m build
```
in the root directory of this repository.
It will automatically generate a ```dist```folder with the contents ```pyVME-X.X.X-py3-none-any.whl``` and ```pyVME-X.X.X.tar.gz```.
You can install the build package by running
```
pip install ./dist/pyVME-X.X.X-py3-none-any.whl
```
or
```
pip install ./dist/pyVME-X.X.X.tar.gz
```
|
PypiClean
|
/GSAS-II-WONDER_linux-1.0.1.tar.gz/GSAS-II-WONDER_linux-1.0.1/GSAS-II-WONDER/FormFactors.py
|
#
# The atomic scattering factor is calculated using the method developed by D. Waasmaier & A. Kirfel
#
# New Analytical Scattering Factor Functions for Free Atoms
# and Ions for Free Atoms and Ions
# D. Waasmaier & A. Kirfel
# Acta Cryst. (1995). A51, 416-413
#
# fo the non-dispersive part of the atomic scattering factor is a
# function of the selected element and of sin(theta)/lambda, where
# lambda is the photon wavelengh and theta is incident angle.
# This function can be approximated by a function:
#
# f0[k] = c + [SUM a_i*EXP(-b_i*(k^2)) ]
# i=1,5
#
# where k = sin(theta) / lambda and c, a_i and b_i
# are the coefficients tabulated in this file (in columns:
# a1 a2 a3 a4 a5 c b1 b2 b3 b4 b5
#
FFac5term = {
'H' :[ 0.413048, 0.294953, 0.187491, 0.080701, 0.023736, 0.000049, 15.569946, 32.398468, 5.711404, 61.889874, 1.334118],
'He':[ 0.732354, 0.753896, 0.283819, 0.190003, 0.039139, 0.000487, 11.553918, 4.595831, 1.546299, 26.463964, 0.377523],
'Li':[ 0.974637, 0.158472, 0.811855, 0.262416, 0.790108, 0.002542, 4.334946, 0.342451, 97.102966, 201.363831, 1.409234],
'Be':[ 1.533712, 0.638283, 0.601052, 0.106139, 1.118414, 0.002511, 42.662079, 0.595420, 99.106499, 0.151340, 1.843093],
'B' :[ 2.085185, 1.064580, 1.062788, 0.140515, 0.641784, 0.003823, 23.494068, 1.137894, 61.238976, 0.114886, 0.399036],
'C' :[ 2.657506, 1.078079, 1.490909, -4.241070, 0.713791, 4.297983, 14.780758, 0.776775, 42.086842, -0.000294, 0.239535],
'N' :[ 11.893780, 3.277479, 1.858092, 0.858927, 0.912985, -11.80490, 0.000158, 10.232723, 30.344690, 0.656065, 0.217287],
'O' :[ 2.960427, 2.508818, 0.637853, 0.722838, 1.142756, 0.027014, 14.182259, 5.936858, 0.112726, 34.958481, 0.390240],
'F' :[ 3.511943, 2.772244, 0.678385, 0.915159, 1.089261, 0.032557, 10.687859, 4.380466, 0.093982, 27.255203, 0.313066],
'Ne':[ 4.183749, 2.905726, 0.520513, 1.135641, 1.228065, 0.025576, 8.175457, 3.252536, 0.063295, 21.813910, 0.224952],
'Na':[ 4.910127, 3.081783, 1.262067, 1.098938, 0.560991, 0.079712, 3.281434, 9.119178, 0.102763, 132.013947, 0.405878],
'Mg':[ 4.708971, 1.194814, 1.558157, 1.170413, 3.239403, 0.126842, 4.875207,108.506081, 0.111516, 48.292408, 1.928171],
'Al':[ 4.730796, 2.313951, 1.541980, 1.117564, 3.154754, 0.139509, 3.628931, 43.051167, 0.095960, 108.932388, 1.555918],
'Si':[ 5.275329, 3.191038, 1.511514, 1.356849, 2.519114, 0.145073, 2.631338, 33.730728, 0.081119, 86.288643, 1.170087],
'P' :[ 1.950541, 4.146930, 1.494560, 1.522042, 5.729711, 0.155233, 0.908139, 27.044952, 0.071280, 67.520187, 1.981173],
'S' :[ 6.372157, 5.154568, 1.473732, 1.635073, 1.209372, 0.154722, 1.514347, 22.092527, 0.061373, 55.445175, 0.646925],
'Cl':[ 1.446071, 6.870609, 6.151801, 1.750347, 0.634168, 0.146773, 0.052357, 1.193165, 18.343416, 46.398396, 0.401005],
'Ar':[ 7.188004, 6.638454, 0.454180, 1.929593, 1.523654, 0.265954, 0.956221, 15.339877, 15.339862, 39.043823, 0.062409],
'K' :[ 8.163991, 7.146945, 1.070140, 0.877316, 1.486434, 0.253614, 12.816323, 0.808945,210.327011, 39.597652, 0.052821],
'Ca':[ 8.593655, 1.477324, 1.436254, 1.182839, 7.113258, 0.196255, 10.460644, 0.041891, 81.390381, 169.847839, 0.688098],
'Sc':[ 1.476566, 1.487278, 1.600187, 9.177463, 7.099750, 0.157765, 53.131023, 0.035325,137.319489, 9.098031, 0.602102],
'Ti':[ 9.818524, 1.522646, 1.703101, 1.768774, 7.082555, 0.102473, 8.001879, 0.029763, 39.885422, 120.157997, 0.532405],
'V' :[ 10.473575, 1.547881, 1.986381, 1.865616, 7.056250, 0.067744, 7.081940, 0.026040, 31.909672, 108.022842, 0.474882],
'Cr':[ 11.007069, 1.555477, 2.985293, 1.347855, 7.034779, 0.065510, 6.366281, 0.023987, 23.244839, 105.774498, 0.429369],
'Mn':[ 11.709542, 1.733414, 2.673141, 2.023368, 7.003180, -0.147293, 5.597120, 0.017800, 21.788420, 89.517914, 0.383054],
'Fe':[ 12.311098, 1.876623, 3.066177, 2.070451, 6.975185, -0.304931, 5.009415, 0.014461, 18.743040, 82.767876, 0.346506],
'Co':[ 12.914510, 2.481908, 3.466894, 2.106351, 6.960892, -0.936572, 4.507138, 0.009126, 16.438129, 76.987320, 0.314418],
'Ni':[ 13.521865, 6.947285, 3.866028, 2.135900, 4.284731, -2.762697, 4.077277, 0.286763, 14.622634, 71.966080, 0.004437],
'Cu':[ 14.014192, 4.784577, 5.056806, 1.457971, 6.932996, -3.254477, 3.738280, 0.003744, 13.034982, 72.554794, 0.265666],
'Zn':[ 14.741002, 6.907748, 4.642337, 2.191766, 38.424042, -36.915829, 3.388232, 0.243315, 11.903689, 63.312130, 0.000397],
'Ga':[ 15.758946, 6.841123, 4.121016, 2.714681, 2.395246, -0.847395, 3.121754, 0.226057, 12.482196, 66.203621, 0.007238],
'Ge':[ 16.540613, 1.567900, 3.727829, 3.345098, 6.785079, 0.018726, 2.866618, 0.012198, 13.432163, 58.866047, 0.210974],
'As':[ 17.025642, 4.503441, 3.715904, 3.937200, 6.790175, -2.984117, 2.597739, 0.003012, 14.272119, 50.437996, 0.193015],
'Se':[ 17.354071, 4.653248, 4.259489, 4.136455, 6.749163, -3.160982, 2.349787, 0.002550, 15.579460, 45.181202, 0.177432],
'Br':[ 17.550570, 5.411882, 3.937180, 3.880645, 6.707793, -2.492088, 2.119226, 16.557184, 0.002481, 42.164009, 0.162121],
'Kr':[ 17.655279, 6.848105, 4.171004, 3.446760, 6.685200, -2.810592, 1.908231, 16.606236, 0.001598, 39.917473, 0.146896],
'Rb':[ 8.123134, 2.138042, 6.761702, 1.156051, 17.679546, 1.139548, 15.142385, 33.542667, 0.129372, 224.132507, 1.713368],
'Sr':[ 17.730219, 9.795867, 6.099763, 2.620025, 0.600053, 1.140251, 1.563060, 14.310868, 0.120574, 135.771317, 0.120574],
'Y' :[ 17.792040, 10.253252, 5.714949, 3.170516, 0.918251, 1.131787, 1.429691, 13.132816, 0.112173, 108.197029, 0.112173],
'Zr':[ 17.859772, 10.911038, 5.821115, 3.512513, 0.746965, 1.124859, 1.310692, 12.319285, 0.104353, 91.777542, 0.104353],
'Nb':[ 17.958399, 12.063054, 5.007015, 3.287667, 1.531019, 1.123452, 1.211590, 12.246687, 0.098615, 75.011948, 0.098615],
'Mo':[ 6.236218, 17.987711, 12.973127, 3.451426, 0.210899, 1.108770, 0.090780, 1.108310, 11.468720, 66.684151, 0.090780],
'Tc':[ 17.840963, 3.428236, 1.373012, 12.947364, 6.335469, 1.074784, 1.005729, 41.901382,119.320541, 9.781542, 0.083391],
'Ru':[ 6.271624, 17.906738, 14.123269, 3.746008, 0.908235, 1.043992, 0.077040, 0.928222, 9.555345, 35.860680, 123.552246],
'Rh':[ 6.216648, 17.919739, 3.854252, 0.840326, 15.173498, 0.995452, 0.070789, 0.856121, 33.889484, 121.686691, 9.029517],
'Pd':[ 6.121511, 4.784063, 16.631683, 4.318258, 13.246773, 0.883099, 0.062549, 0.784031, 8.751391, 34.489983, 0.784031],
'Ag':[ 6.073874, 17.155437, 4.173344, 0.852238, 17.988686, 0.756603, 0.055333, 7.896512, 28.443739, 110.376106, 0.716809],
'Cd':[ 6.080986, 18.019468, 4.018197, 1.303510, 17.974669, 0.603504, 0.048990, 7.273646, 29.119284, 95.831207, 0.661231],
'In':[ 6.196477, 18.816183, 4.050479, 1.638929, 17.962912, 0.333097, 0.042072, 6.695665, 31.009790, 103.284348, 0.610714],
'Sn':[ 19.325171, 6.281571, 4.498866, 1.856934, 17.917318, 0.119024, 6.118104, 0.036915, 32.529045, 95.037186, 0.565651],
'Sb':[ 5.394956, 6.549570, 19.650681, 1.827820, 17.867832, -0.290506, 33.326523, 0.030974, 5.564929, 87.130966, 0.523992],
'Te':[ 6.660302, 6.940756, 19.847015, 1.557175, 17.802427, -0.806668, 33.031654, 0.025750, 5.065547, 84.101616, 0.487660],
'I' :[ 19.884502, 6.736593, 8.110516, 1.170953, 17.548716, -0.448811, 4.628591, 0.027754, 31.849096, 84.406387, 0.463550],
'Xe':[ 19.978920, 11.774945, 9.332182, 1.244749, 17.737501, -6.065902, 4.143356, 0.010142, 28.796200, 75.280685, 0.413616],
'Cs':[ 17.418674, 8.314444, 10.323193, 1.383834, 19.876251, -2.322802, 0.399828, 0.016872, 25.605827, 233.339676, 3.826915],
'Ba':[ 19.747343, 17.368477, 10.465718, 2.592602, 11.003653, -5.183497, 3.481823, 0.371224, 21.226641, 173.834274, 0.010719],
'La':[ 19.966019, 27.329655, 11.018425, 3.086696, 17.335455, -21.745489, 3.197408, 0.003446, 19.955492, 141.381973, 0.341817],
'Ce':[ 17.355122, 43.988499, 20.546650, 3.130670, 11.353665, -38.386017, 0.328369, 0.002047, 3.088196, 134.907654, 18.832960],
'Pr':[ 21.551311, 17.161730, 11.903859, 2.679103, 9.564197, -3.871068, 2.995675, 0.312491, 17.716705, 152.192825, 0.010468],
'Nd':[ 17.331244, 62.783924, 12.160097, 2.663483, 22.239950, -57.189842, 0.300269, 0.001320, 17.026001, 148.748993, 2.910268],
'Pm':[ 17.286388, 51.560162, 12.478557, 2.675515, 22.960947, -45.973682, 0.286620, 0.001550, 16.223755, 143.984512, 2.796480],
'Sm':[ 23.700363, 23.072214, 12.777782, 2.684217, 17.204367, -17.452166, 2.689539, 0.003491, 15.495437, 139.862473, 0.274536],
'Eu':[ 17.186195, 37.156837, 13.103387, 2.707246, 24.419271, -31.586687, 0.261678, 0.001995, 14.787360, 134.816299, 2.581883],
'Gd':[ 24.898117, 17.104952, 13.222581, 3.266152, 48.995213, -43.505684, 2.435028, 0.246961, 13.996325, 110.863091, 0.001383],
'Tb':[ 25.910013, 32.344139, 13.765117, 2.751404, 17.064405, -26.851971, 2.373912, 0.002034, 13.481969, 125.836510, 0.236916],
'Dy':[ 26.671785, 88.687576, 14.065445, 2.768497, 17.067781, -83.279831, 2.282593, 0.000665, 12.920230, 121.937187, 0.225531],
'Ho':[ 27.150190, 16.999819, 14.059334, 3.386979, 46.546471, -41.165253, 2.169660, 0.215414, 12.213148, 100.506783, 0.001211],
'Er':[ 28.174887, 82.493271, 14.624002, 2.802756, 17.018515, -77.135223, 2.120995, 0.000640, 11.915256, 114.529938, 0.207519],
'Tm':[ 28.925894, 76.173798, 14.904704, 2.814812, 16.998117, -70.839813, 2.046203, 0.000656, 11.465375, 111.411980, 0.199376],
'Yb':[ 29.676760, 65.624069, 15.160854, 2.830288, 16.997850, -60.313812, 1.977630, 0.000720, 11.044622, 108.139153, 0.192110],
'Lu':[ 30.122866, 15.099346, 56.314899, 3.540980, 16.943729, -51.049416, 1.883090, 10.342764, 0.000780, 89.559250, 0.183849],
'Hf':[ 30.617033, 15.145351, 54.933548, 4.096253, 16.896156, -49.719837, 1.795613, 9.934469, 0.000739, 76.189705, 0.175914],
'Ta':[ 31.066359, 15.341823, 49.278297, 4.577665, 16.828321, -44.119026, 1.708732, 9.618455, 0.000760, 66.346199, 0.168002],
'W' :[ 31.507900, 15.682498, 37.960129, 4.885509, 16.792112, -32.864574, 1.629485, 9.446448, 0.000898, 59.980675, 0.160798],
'Re':[ 31.888456, 16.117104, 42.390297, 5.211669, 16.767591, -37.412682, 1.549238, 9.233474, 0.000689, 54.516373, 0.152815],
'Os':[ 32.210297, 16.678440, 48.559906, 5.455839, 16.735533, -43.677956, 1.473531, 9.049695, 0.000519, 50.210201, 0.145771],
'Ir':[ 32.004436, 1.975454, 17.070105, 15.939454, 5.990003, 4.018893, 1.353767, 81.014175, 0.128093, 7.661196, 26.659403],
'Pt':[ 31.273891, 18.445440, 17.063745, 5.555933, 1.575270, 4.050394, 1.316992, 8.797154, 0.124741, 40.177994, 1.316997],
'Au':[ 16.777390, 19.317156, 32.979683, 5.595453, 10.576854, -6.279078, 0.122737, 8.621570, 1.256902, 38.008820, 0.000601],
'Hg':[ 16.839890, 20.023823, 28.428564, 5.881564, 4.714706, 4.076478, 0.115905, 8.256927, 1.195250, 39.247227, 1.195250],
'Tl':[ 16.630795, 19.386616, 32.808571, 1.747191, 6.356862, 4.066939, 0.110704, 7.181401, 1.119730, 90.660263, 26.014978],
'Pb':[ 16.419567, 32.738590, 6.530247, 2.342742, 19.916475, 4.049824, 0.105499, 1.055049, 25.025890, 80.906593, 6.664449],
'Bi':[ 16.282274, 32.725136, 6.678302, 2.694750, 20.576559, 4.040914, 0.101180, 1.002287, 25.714146, 77.057549, 6.291882],
'Po':[ 16.289164, 32.807171, 21.095163, 2.505901, 7.254589, 4.046556, 0.098121, 0.966265, 6.046622, 76.598068, 28.096128],
'At':[ 16.011461, 32.615547, 8.113899, 2.884082, 21.377867, 3.995684, 0.092639, 0.904416, 26.543257, 68.372963, 5.499512],
'Rn':[ 16.070229, 32.641106, 21.489658, 2.299218, 9.480184, 4.020977, 0.090437, 0.876409, 5.239687, 69.188477, 27.632641],
'Fr':[ 16.007385, 32.663830, 21.594351, 1.598497, 11.121192, 4.003472, 0.087031, 0.840187, 4.954467, 199.805801, 26.905106],
'Ra':[ 32.563690, 21.396671, 11.298093, 2.834688, 15.914965, 3.981773, 0.801980, 4.590666, 22.758972, 160.404388, 0.083544],
'Ac':[ 15.914053, 32.535042, 21.553976, 11.433394, 3.612409, 3.939212, 0.080511, 0.770669, 4.352206, 21.381622, 130.500748],
'Th':[ 15.784024, 32.454899, 21.849222, 4.239077, 11.736191, 3.922533, 0.077067, 0.735137, 4.097976, 109.464111, 20.512138],
'Pa':[ 32.740208, 21.973675, 12.957398, 3.683832, 15.744058, 3.886066, 0.709545, 4.050881, 19.231543, 117.255005, 0.074040],
'U' :[ 15.679275, 32.824306, 13.660459, 3.687261, 22.279434, 3.854444, 0.071206, 0.681177, 18.236156, 112.500038, 3.930325],
'Np':[ 32.999901, 22.638077, 14.219973, 3.672950, 15.683245, 3.769391, 0.657086, 3.854918, 17.435474, 109.464485, 0.068033],
'Pu':[ 33.281178, 23.148544, 15.153755, 3.031492, 15.704215, 3.664200, 0.634999, 3.856168, 16.849735, 121.292038, 0.064857],
'Am':[ 33.435162, 23.657259, 15.576339, 3.027023, 15.746100, 3.541160, 0.612785, 3.792942, 16.195778, 117.757004, 0.061755],
'Cm':[ 15.804837, 33.480801, 24.150198, 3.655563, 15.499866, 3.390840, 0.058619, 0.590160, 3.674720, 100.736191, 15.408296],
'Bk':[ 15.889072, 33.625286, 24.710381, 3.707139, 15.839268, 3.213169, 0.055503, 0.569571, 3.615472, 97.694786, 14.754303],
'Cf':[ 33.794075, 25.467693, 16.048487, 3.657525, 16.008982, 3.005326, 0.550447, 3.581973, 14.357388, 96.064972, 0.052450],
'H1- ':[ 0.702260, 0.763666, 0.248678, 0.261323, 0.023017, 0.000425, 23.945604, 74.897919, 6.773289, 233.583450, 1.337],
'Li1+':[ 0.432724, 0.549257, 0.376575, -0.336481, 0.976060, 0.001764, 0.260367, 1.042836, 7.885294, 0.260368, 3.042],
'Be2+':[ 3.055430, -2.372617, 1.044914, 0.544233, 0.381737, -0.653773, 0.001226, 0.001227, 1.542106, 0.456279, 4.047],
'Cval':[ 1.258489, 0.728215, 1.119856, 2.168133, 0.705239, 0.019722, 10.683769, 0.208177, 0.836097, 24.603704, 58.954],
'O1- ':[ 3.106934, 3.235142, 1.148886, 0.783981, 0.676953, 0.046136, 19.868080, 6.960252, 0.170043, 65.693512, 0.630],
'O2- ':[ 3.990247, 2.300563, 0.607200, 1.907882, 1.167080, 0.025429, 16.639956, 5.636819, 0.108493, 47.299709, 0.379],
'F1- ':[ 0.457649, 3.841561, 1.432771, 0.801876, 3.395041, 0.069525, 0.917243, 5.507803, 0.164955, 51.076206, 15.821],
'Na1+':[ 3.148690, 4.073989, 0.767888, 0.995612, 0.968249, 0.045300, 2.594987, 6.046925, 0.070139, 14.122657, 0.217],
'Mg2+':[ 3.062918, 4.135106, 0.853742, 1.036792, 0.852520, 0.058851, 2.015803, 4.417941, 0.065307, 9.669710, 0.187],
'Al3+':[ 4.132015, 0.912049, 1.102425, 0.614876, 3.219136, 0.019397, 3.528641, 7.378344, 0.133708, 0.039065, 1.644],
'Siva':[ 2.879033, 3.072960, 1.515981, 1.390030, 4.995051, 0.146030, 1.239713, 38.706276, 0.081481, 93.616333, 2.770],
'Si4+':[ 3.676722, 3.828496, 1.258033, 0.419024, 0.720421, 0.097266, 1.446851, 3.013144, 0.064397, 0.206254, 5.970],
'Cl1-':[ 1.061802, 7.139886, 6.524271, 2.355626, 35.829403, -34.916603, 0.144727, 1.171795, 19.467655, 60.320301, 0.000],
'K1+ ':[ -17.609339, 1.494873, 7.150305, 10.899569, 15.808228, 0.257164, 18.840979, 0.053453, 0.812940, 22.264105, 14.351],
'Ca2+':[ 8.501441, 12.880483, 9.765095, 7.156669, 0.711160, -21.013187, 10.525848, -0.004033, 0.010692, 0.684443, 27.231],
'Sc3+':[ 7.104348, 1.511488,-53.669773, 38.404816, 24.532240, 0.118642, 0.601957, 0.033386, 12.572138, 10.859736, 14.125],
'Ti2+':[ 7.040119, 1.496285, 9.657304, 0.006534, 1.649561, 0.150362, 0.537072, 0.031914, 8.009958, 201.800293, 24.039],
'Ti3+':[ 36.587933, 7.230255, -9.086077, 2.084594, 17.294008, -35.111282, 0.000681, 0.522262, 5.262317, 15.881716, 6.149],
'Ti4+':[ 45.355537, 7.092900, 7.483858,-43.498817, 1.678915, -0.110628, 9.252186, 0.523046, 13.082852, 10.193876, 0.023],
'V2+ ':[ 7.754356, 2.064100, 2.576998, 2.011404, 7.126177, -0.533379, 7.066315, 0.014993, 7.066308, 22.055786, 0.467],
'V3+ ':[ 9.958480, 1.596350, 1.483442,-10.846044, 17.332867, 0.474921, 6.763041, 0.056895, 17.750029, 0.328826, 0.388],
'V5+ ':[ 15.575018, 8.448095, 1.612040, -9.721855, 1.534029, 0.552676, 0.682708, 5.566640, 10.527077, 0.907961, 0.066],
'Cr2+':[ 10.598877, 1.565858, 2.728280, 0.098064, 6.959321, 0.049870, 6.151846, 0.023519, 17.432816, 54.002388, 0.426],
'Cr3+':[ 7.989310, 1.765079, 2.627125, 1.829380, 6.980908, -0.192123, 6.068867, 0.018342, 6.068887, 16.309284, 0.420],
'Mn2+':[ 11.287712, 26.042414, 3.058096, 0.090258, 7.088306, -24.566132, 5.506225, 0.000774, 16.158575, 54.766354, 0.375],
'Mn3+':[ 6.926972, 2.081342, 11.128379, 2.375107, -0.419287, -0.093713, 0.378315, 0.015054, 5.379957, 14.429586, 0.004],
'Mn4+':[ 12.409131, 7.466993, 1.809947,-12.138477, 10.780248, 0.672146, 0.300400, 0.112814, 12.520756, 0.168653, 5.173],
'Fe2+':[ 11.776765, 11.165097, 3.533495, 0.165345, 7.036932, -9.676919, 4.912232, 0.001748, 14.166556, 42.381958, 0.341],
'Fe3+':[ 9.721638, 63.403847, 2.141347, 2.629274, 7.033846, -61.930725, 4.869297, 0.000293, 4.867602, 13.539076, 0.338],
'Co2+':[ 6.993840, 26.285812, 12.254289, 0.246114, 4.017407, -24.796852, 0.310779, 0.000684, 4.400528, 35.741447, 12.536],
'Co3+':[ 6.861739, 2.678570, 12.281889, 3.501741, -0.179384, -1.147345, 0.309794, 0.008142, 4.331703, 11.914167, 11.914],
'Ni2+':[ 12.519017, 37.832058, 4.387257, 0.661552, 6.949072, -36.344471, 3.933053, 0.000442, 10.449184, 23.860998, 0.283],
'Ni3+':[ 13.579366, 1.902844, 12.859268, 3.811005, -6.838595, -0.317618, 0.313140, 0.012621, 3.906407, 10.894311, 0.344],
'Cu1+':[ 12.960763, 16.342150, 1.110102, 5.520682, 6.915452, -14.849320, 3.576010, 0.000975, 29.523218, 10.114283, 0.261],
'Cu2+':[ 11.895569, 16.344978, 5.799817, 1.048804, 6.789088, -14.878383, 3.378519, 0.000924, 8.133653, 20.526524, 0.254],
'Zn2+':[ 13.340772, 10.428857, 5.544489, 0.762295, 6.869172, -8.945248, 3.215913, 0.001413, 8.542680, 21.891756, 0.239],
'Ga3+':[ 13.123875, 35.288189, 6.126979, 0.611551, 6.724807, -33.875122, 2.809960, 0.000323, 6.831534, 16.784311, 0.212],
'Ge4+':[ 6.876636, 6.779091, 9.969591, 3.135857, 0.152389, 1.08654 , 2.025174, 0.176650, 3.573822, 7.685848, 16.677],
'Br1-':[ 17.714310, 6.466926, 6.947385, 4.402674, -0.697279, 1.152674, 2.122554, 19.050768, 0.152708, 58.690361, 58.690],
'Rb1+':[ 17.684320, 7.761588, 6.680874, 2.668883, 0.070974, 1.133263, 1.710209, 14.919863, 0.128542, 31.654478, 0.128],
'Sr2+':[ 17.694973, 1.275762, 6.154252, 9.234786, 0.515995, 1.125309, 1.550888, 30.133041, 0.118774, 13.821799, 0.118],
'Y3+ ':[ 46.660366, 10.369686, 4.623042,-62.170834, 17.471146, 19.023842, -0.019971, 13.180257, 0.176398, -0.016727, 1.467],
'Zr4+':[ 6.802956, 17.699253, 10.650647, -0.248108, 0.250338, 0.827902, 0.096228, 1.296127, 11.240715, -0.219259, -0.219],
'Nb3+':[ 17.714323, 1.675213, 7.483963, 8.322464, 11.143573, -8.339573, 1.172419, 30.102791, 0.080255, -0.002983, 10.456],
'Nb5+':[ 17.580206, 7.633277, 10.793497, 0.180884, 67.837921, -68.024780, 1.165852, 0.078558, 9.507652, 31.621656, -0.000],
'Mo3+':[ 7.447050, 17.778122, 11.886068, 1.997905, 1.789626, -1.898764, 0.072000, 1.073145, 9.834720, 28.221746, -0.011],
'Mo5+':[ 7.929879, 17.667669, 11.515987, 0.500402, 77.444084, -78.056595, 0.068856, 1.068064, 9.046229, 26.558945, -0.000],
'Mo6+':[ 34.757683, 9.653037, 6.584769,-18.628115, 2.490594, 1.141916, 1.301770, 7.123843, 0.094097, 1.617443, 12.335],
'Ru3+':[ 17.894758, 13.579529, 10.729251, 2.474095, 48.227997, -51.905243, 0.902827, 8.740579, 0.045125, 24.764954, -0.001],
'Ru4+':[ 17.845776, 13.455084, 10.229087, 1.653524, 14.059795, -17.241762, 0.901070, 8.482392, 0.045972, 23.015272, -0.004],
'Rh3+':[ 17.758621, 14.569813, 5.298320, 2.533579, 0.879753, 0.960843, 0.841779, 8.319533, 0.069050, 23.709131, 0.069],
'Rh4+':[ 17.716188, 14.446654, 5.185801, 1.703448, 0.989992, 0.959941, 0.840572, 8.100647, 0.068995, 22.357307, 0.068],
'Pd2+':[ 6.122282, 15.651012, 3.513508, 9.060790, 8.771199, 0.879336, 0.062424, 8.018296, 24.784275, 0.776457, 0.776],
'Pd4+':[ 6.152421,-96.069023,-31.622141,-81.578255, -17.801403, -0.915874, -0.063951,-11.090354,-13.466152, -9.758302, -0.783],
'Ag1+':[ 6.091192, 4.019526, 16.948174, 4.258638, 13.889437, 0.785127, 0.056305, 0.719340, 7.758938, 27.368349, 0.719],
'Ag2+':[ 6.401808, 48.699802, 4.799859,-32.332523, 16.356710, 1.068247, 0.068167, 0.942270, 20.639496, 1.100365, 6.883],
'Cd2+':[ 6.093711, 43.909691, 17.041306,-39.675117, 17.958918, 0.664795, 0.050624, 8.654143, 15.621396, 11.082067, 0.667],
'In3+':[ 6.206277, 18.497746, 3.078131, 10.524613, 7.401234, 0.293677, 0.041357, 6.605563, 18.792250, 0.608082, 0.608],
'Sn2+':[ 6.353672, 4.770377, 14.672025, 4.235959, 18.002131, -0.042519, 0.034720, 6.167891, 6.167879, 29.006456, 0.561],
'Sn4+':[ 15.445732, 6.420892, 4.562980, 1.713385, 18.033537, -0.172219, 6.280898, 0.033144, 6.280899, 17.983601, 0.557],
'Sb3+':[ 10.189171, 57.461918, 19.356573, 4.862206, -45.394096, 1.516108, 0.089485, 0.375256, 5.357987, 22.153736, 0.297],
'Sb5+':[ 17.920622, 6.647932, 12.724075, 1.555545, 7.600591, -0.445371, 0.522315, 0.029487, 5.718210, 16.433775, 5.718],
'I1- ':[ 20.010330, 17.835524, 8.104130, 2.231118, 9.158548, -3.341004, 4.565931, 0.444266, 32.430672, 95.149040, 0.014],
'Cs1+':[ 19.939056, 24.967621, 10.375884, 0.454243, 17.660248, -19.394306, 3.770511, 0.004040, 25.311275, 76.537766, 0.384],
'Ba2+':[ 19.750200, 17.513683, 10.884892, 0.321585, 65.149834, -59.618172, 3.430748, 0.361590, 21.358307, 70.309402, 0.001],
'La3+':[ 19.688887, 17.345703, 11.356296, 0.099418, 82.358124, -76.846909, 3.146211, 0.339586, 18.753832, 90.345459, 0.001],
'Ce3+':[ 26.593231, 85.866432, -6.677695, 12.111847, 17.401903, -80.313423, 3.280381, 0.001012, 4.313575, 17.868504, 0.326],
'Ce4+':[ 17.457533, 25.659941, 11.691037, 19.695251, -16.994749, -3.515096, 0.311812, -0.003793, 16.568687, 2.886395, -0.008],
'Pr3+':[ 20.879841, 36.035797, 12.135341, 0.283103, 17.167803, -30.500784, 2.870897, 0.002364, 16.615236, 53.909359, 0.306],
'Pr4+':[ 17.496082, 21.538509, 20.403114, 12.062211, -7.492043, -9.016722, 0.294457, -0.002742, 2.772886, 15.804613, -0.013],
'Nd3+':[ 17.120077, 56.038139, 21.468307, 10.000671, 2.905866, -50.541992, 0.291295, 0.001421, 2.743681, 14.581367, 22.485],
'Pm3+':[ 22.221066, 17.068142, 12.805423, 0.435687, 52.238770, -46.767181, 2.635767, 0.277039, 14.927315, 45.768017, 0.001],
'Sm3+':[ 15.618565, 19.538092, 13.398946, -4.358811, 24.490461, -9.714854, 0.006001, 0.306379, 14.979594, 0.748825, 2.454],
'Eu2+':[ 23.899035, 31.657497, 12.955752, 1.700576, 16.992199, -26.204315, 2.467332, 0.002230, 13.625002, 35.089481, 0.253],
'Eu3+':[ 17.758327, 33.498665, 24.067188, 13.436883, -9.019134, -19.768026, 0.244474, -0.003901, 2.487526, 14.568011, -0.015],
'Gd3+':[ 24.344999, 16.945311, 13.866931, 0.481674, 93.506378, -88.147179, 2.333971, 0.239215, 12.982995, 43.876347, 0.000],
'Tb3+':[ 24.878252, 16.856016, 13.663937, 1.279671, 39.271294, -33.950317, 2.223301, 0.227290, 11.812528, 29.910065, 0.001],
'Dy3+':[ 16.864344, 90.383461, 13.675473, 1.687078, 25.540651, -85.150650, 0.216275, 0.000593, 11.121207, 26.250975, 2.135],
'Ho3+':[ 16.837524, 63.221336, 13.703766, 2.061602, 26.202621, -58.026505, 0.206873, 0.000796, 10.500283, 24.031883, 2.055],
'Er3+':[ 16.810127, 22.681061, 13.864114, 2.294506, 26.864477, -17.513460, 0.198293, 0.002126, 9.973341, 22.836388, 1.979],
'Tm3+':[ 16.787500, 15.350905, 14.182357, 2.299111, 27.573771, -10.192087, 0.190852, 0.003036, 9.602934, 22.526880, 1.912],
'Yb2+':[ 28.443794, 16.849527, 14.165081, 3.445311, 28.308853, -23.214935, 1.863896, 0.183811, 9.225469, 23.691355, 0.001],
'Yb3+':[ 28.191629, 16.828087, 14.167848, 2.744962, 23.171774, -18.103676, 1.842889, 0.182788, 9.045957, 20.799847, 0.001],
'Lu3+':[ 28.828693, 16.823227, 14.247617, 3.079559, 25.647667, -20.626528, 1.776641, 0.175560, 8.575531, 19.693701, 0.001],
'Hf4+':[ 29.267378, 16.792543, 14.785310, 2.184128, 23.791996, -18.820383, 1.697911, 0.168313, 8.190025, 18.277578, 0.001],
'Ta5+':[ 29.539469, 16.741854, 15.182070, 1.642916, 16.437447, -11.542459, 1.612934, 0.160460, 7.654408, 17.070732, 0.001],
'W6+ ':[ 29.729357, 17.247808, 15.184488, 1.154652, 0.739335, 3.945157, 1.501648, 0.140803, 6.880573, 14.299601, 14.299],
'Os4+':[ 17.113485, 15.792370, 23.342392, 4.090271, 7.671292, 3.988390, 0.131850, 7.288542, 1.389307, 19.629425, 1.389],
'Ir3+':[ 31.537575, 16.363338, 15.597141, 5.051404, 1.436935, 4.009459, 1.334144, 7.451918, 0.127514, 21.705648, 0.127],
'Ir4+':[ 30.391249, 16.146996, 17.019068, 4.458904, 0.975372, 4.006865, 1.328519, 7.181766, 0.127337, 19.060146, 1.328],
'Pt2+':[ 31.986849, 17.249048, 15.269374, 5.760234, 1.694079, 4.032512, 1.281143, 7.625512, 0.123571, 24.190826, 0.123],
'Pt4+':[ 41.932713, 16.339224, 17.653894, 6.012420, -12.036877, 4.094551, 1.111409, 6.466086, 0.128917, 16.954155, 0.778],
'Au1+':[ 32.124306, 16.716476, 16.814100, 7.311565, 0.993064, 4.040792, 1.216073, 7.165378, 0.118715, 20.442486, 53.095],
'Au3+':[ 31.704271, 17.545767, 16.819551, 5.522640, 0.361725, 4.042679, 1.215561, 7.220506, 0.118812, 20.050970, 1.215],
'Hg1+':[ 28.866837, 19.277540, 16.776051, 6.281459, 3.710289, 4.068430, 1.173967, 7.583842, 0.115351, 29.055994, 1.173],
'Hg2+':[ 32.411079, 18.690371, 16.711773, 9.974835, -3.847611, 4.052869, 1.162980, 7.329806, 0.114518, 22.009489, 22.009],
'Tl1+':[ 32.295044, 16.570049, 17.991013, 1.535355, 7.554591, 4.054030, 1.101544, 0.110020, 6.528559, 52.495068, 20.338],
'Tl3+':[ 32.525639, 19.139185, 17.100321, 5.891115, 12.599463, -9.256075, 1.094966, 6.900992, 0.103667, 18.489614, -0.001],
'Pb2+':[ 27.392647, 16.496822, 19.984501, 6.813923, 5.233910, 4.065623, 1.058874, 0.106305, 6.708123, 24.395554, 1.058],
'Pb4+':[ 32.505657, 20.014240, 14.645661, 5.029499, 1.760138, 4.044678, 1.047035, 6.670321, 0.105279, 16.525040, 0.105],
'Bi3+':[ 32.461437, 19.438683, 16.302486, 7.322662, 0.431704, 4.043703, 0.997930, 6.038867, 0.101338, 18.371586, 46.361],
'Bi5+':[ 16.734028, 20.580494, 9.452623, 61.155834, -34.041023, 4.113663, 0.105076, 4.773282, 11.762162, 1.211775, 1.619],
'Ra2+':[ 4.986228, 32.474945, 21.947443, 11.800013, 10.807292, 3.956572, 0.082597, 0.791468, 4.608034, 24.792431, 0.082],
'Ac3+':[ 15.584983, 32.022125, 21.456327, 0.757593, 12.341252, 3.838984, 0.077438, 0.739963, 4.040735, 47.525002, 19.406],
'Th4+':[ 15.515445, 32.090691, 13.996399, 12.918157, 7.635514, 3.831122, 0.074499, 0.711663, 3.871044, 18.596891, 3.871],
'U3+ ':[ 15.360309, 32.395657, 21.961290, 1.325894, 14.251453, 3.706622, 0.067815, 0.654643, 3.643409, 39.604965, 16.330],
'U4+ ':[ 15.355091, 32.235306, 0.557745, 14.396367, 21.751173, 3.705863, 0.067789, 0.652613, 42.354237, 15.908239, 3.553],
'U6+ ':[ 15.333844, 31.770849, 21.274414, 13.872636, 0.048519, 3.700591, 0.067644, 0.646384, 3.317894, 14.650250, 75.339],
'Np3+':[ 15.378152, 32.572132, 22.206125, 1.413295, 14.828381, 3.603370, 0.064613, 0.631420, 3.561936, 37.875511, 15.546],
'Np4+':[ 15.373926, 32.423019, 21.969994, 0.662078, 14.969350, 3.603039, 0.064597, 0.629658, 3.476389, 39.438942, 15.135],
'Np6+':[ 15.359986, 31.992825, 21.412458, 0.066574, 14.568174, 3.600942, 0.064528, 0.624505, 3.253441, 67.658318, 13.980],
'Pu3+':[ 15.356004, 32.769127, 22.680210, 1.351055, 15.416232, 3.428895, 0.060590, 0.604663, 3.491509, 37.260635, 14.981],
'Pu4+':[ 15.416219, 32.610569, 22.256662, 0.719495, 15.518152, 3.480408, 0.061456, 0.607938, 3.411848, 37.628792, 14.464],
'Pu6+':[ 15.436506, 32.289719, 14.726737, 15.012391, 7.024677, 3.502325, 0.061815, 0.606541, 3.245363, 13.616438, 3.245],
}
# Parametrization of incoherently scattered X-ray intensities vs sin(theta)/lambda.
#
# This file contains the tabulated coefficients for calculation
# of Compton Cross Section as a function of sin(theta)/lambda, being
# theta=scattering angle, and lambda=photon wavelength.
# REFERENCE:
# Analytic Approximations to Incoherently Scattered X-Ray Intensities
# by H.H.M. Balyuzi (Acta Cryst. (1975). A31, 600
#
# This is the abstract from the mentioned paper:
# The theoretically calculated incoherently scattered X-ray intensities of
# Cromer & Mann [j. Chem. Phys. (1967). 47, 1892-1893] and Cromer [J. Chem.
# Phys. (1969). 50, 4857-4859] for the elements 2 to 95 and 102 have been
# fitted to analytic function. The coefficients of the fit are tabulated
# and the accurance of the fitting is discussed. The accuracy is very good
# for sin(theta)/Lambda<=1.4 A^(-1)
#
# The analytical function is:
# Compton[s] = Z-[SUM a_i*EXP(-b_i*(s^2)) ]
# i=1,5
#
# where s = sin(theta)/lambda, and a_i and b_i are the tabulated coefficients.
# Coefficients for elements Cm-Cf simply copied from those for Am
# Column description: Z a1 a2 a3 a4 a5 b1 b2 b3 b4 b5
Compton = {
'H' :[ 1.0, 0.2623, 0.5094, 0.2034, 0.0249, 0.0000, 32.3717, 14.7084, 6.6884, 2.4843, 0.0000],
'He':[ 2.0, 0.5246, 1.0188, 0.4068, 0.0498, 0.0000, 32.3717, 14.7084, 6.6884, 2.4843, 0.0000],
'Li':[ 3.0, 0.0518, 0.9578, 0.7348, 1.0817, 0.1740, 533.922, 245.845, 10.1831, 4.4364, 1.5031],
'Be':[ 4.0, 0.4634, 1.5592, 0.7685, 1.0623, 0.1470, 185.856, 104.601, 4.8589, 2.1932, 0.7641],
'B' :[ 5.0, 0.9046, 1.9822, 0.2279, 1.4873, 0.3979, 104.961, 46.0191, 8.9873, 1.9674, 0.6778],
'C' :[ 6.0, 0.7568, 2.5511, 0.7051, 1.4605, 0.5263, 82.2385, 31.7282, 11.9471, 1.4637, 0.5150],
'N' :[ 7.0, 0.9070, 2.8972, 1.1659, 1.5526, 0.4769, 64.1555, 20.8507, 7.7576, 1.0335, 0.3516],
'O' :[ 8.0, 0.8847, 3.2189, 1.7990, 1.5538, 0.5434, 52.0063, 16.4487, 6.5958, 0.8143, 0.2815],
'F' :[ 9.0, 0.9756, 3.5101, 2.3561, 1.5896, 0.5883, 41.7194, 12.7747, 5.2945, 0.6470, 0.2254],
'Ne':[ 10.0, 1.1544, 3.8033, 2.8085, 1.6647, 0.5687, 34.2567, 9.76720, 4.0375, 0.5256, 0.1800],
'Na':[ 11.0, 1.0243, 2.0704, 5.3197, 1.5214, 1.0637, 293.411, 15.2373, 4.4697, 0.6246, 0.1922],
'Mg':[ 12.0, 2.0049, 1.9449, 5.4291, 1.6315, 0.9888, 178.983, 11.2433, 3.4272, 0.4907, 0.1542],
'Al':[ 13.0, 2.6790, 1.2336, 5.9358, 1.6622, 1.4835, 121.363, 16.0728, 3.2632, 0.6616, 0.1586],
'Si':[ 14.0, 2.1006, 2.2025, 5.3063, 2.5162, 1.8716, 116.957, 34.7760, 3.3215, 0.9933, 0.1553],
'P' :[ 15.0, 1.8936, 3.3782, 5.3752, 2.4942, 1.8564, 108.033, 26.6586, 2.6865, 0.7973, 0.1333],
'S' :[ 16.0, 2.0717, 4.2023, 5.6883, 2.2623, 1.7735, 86.7211, 21.4574, 2.1255, 0.5882, 0.1128],
'Cl':[ 17.0, 2.1221, 5.0905, 5.6885, 2.2809, 1.8164, 73.8395, 17.9756, 1.8037, 0.5162, 0.1008],
'Ar':[ 18.0, 2.1778, 5.9791, 5.6104, 2.3442, 1.8873, 65.6187, 14.4380, 1.5611, 0.4716, 0.0914],
'K' :[ 19.0, 1.0863, 2.8562, 5.6616, 6.7809, 2.6145, 425.474, 35.7249, 9.3261, 1.0206, 0.1036],
'Ca':[ 20.0, 2.0554, 2.7251, 5.9104, 6.7171, 2.5916, 289.862, 28.7190, 7.4788, 0.8644, 0.0920],
'Sc':[ 21.0, 2.1608, 2.9545, 6.5125, 6.7745, 2.5969, 251.387, 26.8528, 6.4238, 0.7616, 0.0831],
'Ti':[ 22.0, 2.1983, 3.2282, 7.0649, 6.8832, 2.6243, 226.968, 24.9306, 5.6213, 0.6865, 0.0758],
'V' :[ 23.0, 2.2399, 3.4348, 7.6412, 7.0270, 2.6558, 206.399, 22.9026, 4.9366, 0.6256, 0.0697],
'Cr':[ 24.0, 1.5893, 3.6692, 8.7249, 7.2913, 2.7226, 196.584, 25.2663, 4.5848, 0.5876, 0.0650],
'Mn':[ 25.0, 2.3156, 3.7420, 8.8241, 7.3811, 2.7355, 174.656, 19.5879, 3.8896, 0.5326, 0.0597],
'Fe':[ 26.0, 2.3141, 3.9729, 9.2624, 7.5083, 2.9404, 164.104, 18.2898, 3.5861, 0.5155, 0.0597],
'Co':[ 27.0, 2.3636, 4.4188, 9.7350, 7.6872, 2.7927, 151.337, 16.0770, 3.1445, 0.4553, 0.0514],
'Ni':[ 28.0, 2.4174, 4.7076, 10.2289, 7.8357, 2.8071, 139.963, 14.5797, 2.8142, 0.4203, 0.0476],
'Cu':[ 29.0, 1.7532, 4.8167, 11.2836, 8.2460, 2.8943, 127.251, 16.9194, 2.7458, 0.4076, 0.0454],
'Zn':[ 30.0, 2.4474, 5.1080, 11.4407, 8.1791, 2.8211, 125.083, 12.8443, 2.3122, 0.3605, 0.0410],
'Ga':[ 31.0, 3.0360, 5.1760, 11.6692, 8.2940, 2.8148, 104.577, 11.2803, 2.0663, 0.3314, 0.0380],
'Ge':[ 32.0, 3.4795, 5.2306, 11.9324, 8.5063, 2.8388, 87.5304, 10.8491, 1.9060, 0.3107, 0.0356],
'As':[ 33.0, 3.3385, 5.6229, 12.2449, 8.8668, 2.9138, 82.4879, 12.0723, 1.8317, 0.2996, 0.0341],
'Se':[ 34.0, 3.5333, 5.9644, 12.4403, 9.1212, 2.9289, 73.4400, 12.2369, 1.7151, 0.2824, 0.0318],
'Br':[ 35.0, 3.4898, 6.5641, 12.5318, 9.3877, 3.0170, 68.5438, 12.5940, 1.6235, 0.2712, 0.0308],
'Kr':[ 36.0, 3.0598, 7.6671, 12.5852, 9.6168, 3.0647, 70.8068, 12.6449, 1.5230, 0.2577, 0.0294],
'Rb':[ 37.0, 2.3770, 8.6605, 12.5729, 10.1507, 3.2083, 178.867, 13.9200, 1.5224, 0.2567, 0.0289],
'Sr':[ 38.0, 3.0740, 8.8344, 12.5145, 10.3129, 3.2433, 210.803, 12.3784, 1.4148, 0.2424, 0.0275],
'Y' :[ 39.0, 3.5086, 9.2574, 12.4297, 10.4962, 3.2840, 177.423, 11.0509, 1.3242, 0.2300, 0.0263],
'Zr':[ 40.0, 3.8436, 9.6980, 12.3352, 10.7459, 3.3493, 152.274, 10.0161, 1.2610, 0.2206, 0.0254],
'Nb':[ 41.0, 3.7519, 10.5738, 12.2251, 10.9745, 3.4425, 117.448, 8.9975, 1.2044, 0.2127, 0.0248],
'Mo':[ 42.0, 3.9567, 11.1228, 12.1674, 11.2328, 3.4857, 105.180, 8.0754, 1.1551, 0.2038, 0.0237],
'Tc':[ 43.0, 4.3317, 10.7065, 12.1316, 12.0687, 3.7296, 115.939, 7.9151, 1.2769, 0.2105, 0.0240],
'Ru':[ 44.0, 4.7308, 12.0616, 11.9367, 11.6021, 3.6225, 77.6118, 6.4667, 1.0431, 0.1877, 0.0223],
'Rh':[ 45.0, 4.8745, 12.2804, 12.0133, 12.0245, 3.7592, 71.5772, 6.1701, 1.0589, 0.1853, 0.0220],
'Pd':[ 46.0, 5.4847, 13.7150, 11.6608, 11.4709, 3.6545, 53.2508, 4.9329, 0.8575, 0.1655, 0.0206],
'Ag':[ 47.0, 5.3490, 13.4127, 11.9402, 12.3201, 3.9205, 58.9663, 5.0548, 0.9684, 0.1715, 0.0210],
'Cd':[ 48.0, 4.5224, 9.6282, 14.9509, 14.2066, 4.6563, 87.3897, 7.7119, 1.5820, 0.2036, 0.0234],
'In':[ 49.0, 4.7031, 8.2361, 16.4206, 14.6608, 4.9493, 89.2096, 8.9965, 1.7229, 0.2062, 0.0238],
'Sn':[ 50.0, 4.9677, 7.4873, 17.4268, 14.9229, 5.1678, 83.2133, 10.1130, 1.7605, 0.2042, 0.0237],
'Sb':[ 51.0, 4.6001, 7.4399, 18.3475, 15.1777, 5.4096, 62.5070, 12.5902, 1.7906, 0.2029, 0.0238],
'Te':[ 52.0, 4.5671, 7.7178, 18.8338, 15.2844, 5.5769, 79.7245, 13.8028, 1.7411, 0.1976, 0.0235],
'I' :[ 53.0, 4.3147, 8.3728, 19.1961, 15.3645, 5.7365, 78.6996, 14.9421, 1.6795, 0.1921, 0.0232],
'Xe':[ 54.0, 3.4467, 19.4724, 9.7473, 15.4210, 5.9034, 88.3050, 1.6131, 16.1669, 0.1869, 0.0229],
'Cs':[ 55.0, 2.5751, 19.7181, 11.0138, 15.5312, 6.1374, 224.598, 1.5722, 17.7908, 0.1845, 0.0229],
'Ba':[ 56.0, 3.2477, 19.7824, 11.2142, 15.4790, 6.2602, 266.593, 1.4758, 16.2710, 0.1775, 0.0225],
'La':[ 57.0, 3.6683, 11.7149, 19.8077, 15.4015, 6.3860, 224.726, 14.7472, 1.3823, 0.1708, 0.0222],
'Ce':[ 58.0, 3.7218, 12.0076, 20.1925, 15.4540, 6.5996, 212.566, 14.0417, 1.3272, 0.1681, 0.0221],
'Pr':[ 59.0, 3.6287, 11.9193, 20.8823, 15.6425, 6.8971, 208.103, 13.8486, 1.3025, 0.1690, 0.0222],
'Nd':[ 60.0, 3.7229, 12.0836, 21.2664, 15.7405, 7.1521, 194.998, 13.2282, 1.2573, 0.1677, 0.0222],
'Pm':[ 61.0, 3.8009, 12.2546, 21.6281, 15.8659, 7.4126, 184.340, 12.6793, 1.2139, 0.1669, 0.0222],
'Sm':[ 62.0, 3.8533, 12.3520, 22.0136, 16.0323, 7.7082, 176.392, 12.2878, 1.1810, 0.1672, 0.0223],
'Eu':[ 63.0, 3.9234, 12.4961, 22.3133, 16.2152, 8.0085, 167.816, 11.8294, 1.1433, 0.1676, 0.0224],
'Gd':[ 64.0, 4.1100, 13.0008, 22.2962, 16.2937, 8.2594, 160.859, 11.0492, 1.0914, 0.1659, 0.0223],
'Tb':[ 65.0, 4.1589, 13.2197, 22.4627, 16.5329, 8.5842, 155.186, 10.6407, 1.0667, 0.1674, 0.0224],
'Dy':[ 66.0, 4.0415, 13.1038, 22.8132, 17.0066, 8.9849, 150.058, 20.6511, 1.0708, 0.1723, 0.0227],
'Ho':[ 67.0, 4.0620, 13.2132, 22.9351, 17.3999, 9.3388, 145.976, 10.3972, 1.0569, 0.1752, 0.0228],
'Er':[ 68.0, 4.0815, 13.3282, 22.9865, 17.8548, 9.6967, 142.072, 10.1525, 1.0438, 0.1785, 0.0229],
'Tm':[ 69.0, 4.7416, 13.2875, 23.1429, 17.9215, 9.8935, 100.170, 9.0676, 0.9833, 0.1753, 0.0227],
'Yb':[ 70.0, 4.1014, 13.4586, 22.9675, 18.9998, 10.4188, 135.833, 9.8125, 1.0290, 0.1860, 0.0231],
'Lu':[ 71.0, 4.3261, 13.8638, 22.6992, 19.3649, 10.6986, 133.360, 9.1664, 0.9951, 0.1861, 0.0230],
'Hf':[ 72.0, 4.5759, 14.3292, 22.3741, 19.7027, 10.9720, 124.501, 8.5195, 0.9599, 0.1857, 0.0229],
'Ta':[ 73.0, 4.7416, 14.9284, 22.0214, 20.0290, 11.2347, 117.648, 7.9070, 0.9250, 0.1850, 0.0227],
'W' :[ 74.0, 4.8423, 15.6421, 21.6701, 20.3207, 11.4817, 112.694, 7.3551, 0.8892, 0.1837, 0.0226],
'Re':[ 75.0, 5.0015, 16.3874, 21.3263, 20.5247, 11.7163, 106.001, 6.7592, 0.8491, 0.1817, 0.0224],
'Os':[ 76.0, 5.3630, 17.5066, 23.3197, 20.9425, 8.8158, 93.6155, 6.0569, 0.7156, 0.1373, 0.0127],
'Ir':[ 77.0, 5.0953, 15.4321, 15.5924, 26.5617, 14.2784, 98.2594, 6.9463, 1.2550, 0.2600, 0.0242],
'Pt':[ 78.0, 5.6451, 18.8015, 20.6881, 20.5266, 12.2868, 69.2677, 5.3127, 0.7107, 0.1702, 0.0216],
'Au':[ 79.0, 5.8170, 19.5635, 20.3716, 20.6837, 12.5114, 65.4079, 4.9468, 0.6809, 0.1680, 0.0214],
'Hg':[ 80.0, 5.9389, 19.4946, 19.0547, 22.4442, 13.0069, 74.0107, 4.9118, 0.7375, 0.1779, 0.0216],
'Tl':[ 81.0, 6.5803, 19.6010, 18.3955, 23.0433, 13.3134, 69.9997, 4.6032, 0.7312, 0.1787, 0.0216],
'Pb':[ 82.0, 7.3754, 19.7974, 18.0002, 23.2605, 13.4943, 62.4634, 4.2561, 0.7021, 0.1757, 0.0213],
'Bi':[ 83.0, 8.2324, 20.0183, 17.6603, 23.3246, 13.6777, 53.0479, 3.9249, 0.6675, 0.1722, 0.0210],
'Po':[ 84.0, 9.0662, 20.1395, 23.6273, 17.1872, 13.8913, 48.3272, 3.6624, 0.1701, 0.6471, 0.0208],
'At':[ 85.0, 9.9223, 20.2544, 23.9015, 16.7271, 14.1028, 43.7514, 3.4229, 0.1680, 0.6271, 0.0206],
'Rn':[ 86.0,10.7789, 20.3926, 16.3720, 24.0637, 14.2879, 38.6121, 3.2050, 0.6037, 0.1650, 0.0203],
'Fr':[ 87.0, 2.5332, 11.5319, 24.7459, 31.9247, 16.2475, 269.878, 21.9610, 2.0716, 0.2243, 0.0223],
'Ra':[ 88.0, 3.2249, 11.6694, 24.8687, 31.8957, 16.3306, 321.663, 20.3846, 1.9730, 0.2166, 0.0219],
'Ac':[ 89.0, 3.6207, 12.1269, 24.9363, 31.8798, 16.4227, 270.774, 18.9025, 1.8811, 0.2095, 0.0215],
'Th':[ 90.0, 4.0181, 12.6119, 24.9621, 31.8646, 16.5249, 232.371, 17.3825, 1.7950, 0.2030, 0.0211],
'Pa':[ 91.0, 3.7926, 12.6626, 25.8795, 31.9403, 16.7045, 236.803, 17.5908, 1.7607, 0.1984, 0.0209],
'U' :[ 92.0, 3.9051, 12.8415, 26.4298, 31.9514, 16.8475, 221.178, 16.8737, 1.7008, 0.1932, 0.0205],
'Np':[ 93.0, 4.0333, 12.9014, 26.7571, 31.4732, 17.8065, 207.727, 16.3175, 1.6677, 0.1954, 0.0220],
'Pu':[ 94.0, 4.2695, 12.5662, 28.0362, 31.9382, 17.1487, 185.955, 15.5936, 1.5914, 0.1834, 0.0200],
'Am':[ 95.0, 4.3981, 12.6808, 28.6781, 31.9085, 17.2883, 174.359, 14.9676, 1.5304, 0.1786, 0.0197],
'Cm':[ 96.0, 4.3981, 12.6808, 28.6781, 31.9085, 17.2883, 174.359, 14.9676, 1.5304, 0.1786, 0.0197],
'Bk':[ 97.0, 4.3981, 12.6808, 28.6781, 31.9085, 17.2883, 174.359, 14.9676, 1.5304, 0.1786, 0.0197],
'Cf':[ 98.0, 4.3981, 12.6808, 28.6781, 31.9085, 17.2883, 174.359, 14.9676, 1.5304, 0.1786, 0.0197]
}
|
PypiClean
|
/refind_btrfs-0.6.0-py3-none-any.whl/refind_btrfs/boot/migrations/icon_migration_strategies.py
|
# endregion
from abc import ABC, abstractmethod
from pathlib import Path
from refind_btrfs.common import BtrfsLogo, Icon
from refind_btrfs.common.abc.commands import IconCommand
from refind_btrfs.common.enums import BootStanzaIconGenerationMode
class BaseIconMigrationStrategy(ABC):
def __init__(
self, icon_command: IconCommand, refind_config_path: Path, source_icon: str
) -> None:
self._icon_command = icon_command
self._refind_config_path = refind_config_path
self._source_icon_path = Path(source_icon)
@abstractmethod
def migrate(self) -> str:
pass
class DefaultMigrationStrategy(BaseIconMigrationStrategy):
def migrate(self) -> str:
return str(self._source_icon_path)
class CustomMigrationStrategy(BaseIconMigrationStrategy):
def __init__(
self,
icon_command: IconCommand,
refind_config_path: Path,
source_icon: str,
custom_icon_path: Path,
) -> None:
super().__init__(icon_command, refind_config_path, source_icon)
self._custom_icon_path = custom_icon_path
def migrate(self) -> str:
icon_command = self._icon_command
refind_config_path = self._refind_config_path
source_icon_path = self._source_icon_path
custom_icon_path = self._custom_icon_path
destination_icon_relative_path = icon_command.validate_custom_icon(
refind_config_path, source_icon_path, custom_icon_path
)
return str(destination_icon_relative_path)
class EmbedBtrfsLogoStrategy(BaseIconMigrationStrategy):
def __init__(
self,
icon_command: IconCommand,
refind_config_path: Path,
source_icon: str,
btrfs_logo: BtrfsLogo,
) -> None:
super().__init__(icon_command, refind_config_path, source_icon)
self._btrfs_logo = btrfs_logo
def migrate(self) -> str:
icon_command = self._icon_command
refind_config_path = self._refind_config_path
source_icon_path = self._source_icon_path
btrfs_logo = self._btrfs_logo
destination_icon_relative_path = icon_command.embed_btrfs_logo_into_source_icon(
refind_config_path, source_icon_path, btrfs_logo
)
return str(destination_icon_relative_path)
class IconMigrationFactory:
@staticmethod
def migration_strategy(
icon_command: IconCommand,
refind_config_path: Path,
source_icon: str,
icon: Icon,
) -> BaseIconMigrationStrategy:
mode = icon.mode
if mode == BootStanzaIconGenerationMode.DEFAULT:
return DefaultMigrationStrategy(
icon_command, refind_config_path, source_icon
)
if mode == BootStanzaIconGenerationMode.CUSTOM:
custom_icon_path = icon.path
return CustomMigrationStrategy(
icon_command, refind_config_path, source_icon, custom_icon_path
)
if mode == BootStanzaIconGenerationMode.EMBED_BTRFS_LOGO:
btrfs_logo = icon.btrfs_logo
return EmbedBtrfsLogoStrategy(
icon_command, refind_config_path, source_icon, btrfs_logo
)
raise ValueError(
"The 'icon' parameter's 'mode' property contains an unexpected value!"
)
|
PypiClean
|
/armory_testbed-0.18.1-py3-none-any.whl/armory/art_experimental/attacks/carla_obj_det_patch.py
|
import math
import os
import random
from typing import Dict, List, Optional
from art.attacks.evasion import RobustDPatch
import cv2
import numpy as np
from tqdm.auto import trange
from armory.art_experimental.attacks.carla_obj_det_utils import (
linear_depth_to_rgb,
linear_to_log,
log_to_linear,
rgb_depth_to_linear,
)
from armory.logs import log
from armory.utils.external_repo import ExternalRepoImport
with ExternalRepoImport(
repo="colour-science/[email protected]",
experiment="carla_obj_det_dpatch_undefended.json",
):
import colour
def calculate_ccm(im_np, gt_np, gamma=2.2, Vandermonde=True, degree=1):
"""
Calculates the color transform matrix.
attributes::
im_np
np array of color patch values from image
gt_np
np array of known color patch values
gamma
default is 2.2, this is most common
Vandermonde
boolean indicating whether to use basic ccm method or
Vandermonde method
degree
default is 1, this is only used with Vandermonde method
returns::
color correction matrix
"""
# normalize both arrays
im_np = im_np / 255
gt_np = gt_np / 255
# linearize values by decoding gamma from RGBs
im_lin = np.power(im_np, gamma)
gt_lin = np.power(gt_np, gamma)
# calculate matrix
if Vandermonde:
ccm = colour.characterisation.matrix_colour_correction_Vandermonde(
gt_lin, im_lin
)
else:
ccm = np.linalg.pinv(np.asmatrix(gt_lin)).dot(np.asmatrix(im_lin))
return ccm
def apply_ccm(patch, ccm, gamma=2.2, Vandermonde=True, degree=1):
"""
Applies transform to patch.
attributes::
patch
np array of patch to be inserted
ccm
color correction matrix
gamma
default is still 2.2
Vandermonde
boolean indicating whether basic or Vandermonde method is
being used for calculations
degree
default is 1, should only be used when Vandermonde is True
returns::
color transformed patch
"""
# normalize image
patch = patch / 255
# linearize image
patch_lin = np.power(patch, gamma)
# get shape of patch
rows, cols, ch = patch_lin.shape
if Vandermonde:
# reshape for matrix multiplication
patch_reshape = np.reshape(patch_lin, (-1, 3))
# expand patch for transform
patch_expand = colour.characterisation.polynomial_expansion_Vandermonde(
patch_reshape, degree
)
# multiply and reshape
corrected_RGB = np.reshape(
np.transpose(np.dot(ccm, np.transpose(patch_expand))), (rows, cols, ch)
)
else:
# reshape for matrix multiplication
patch_reshape = np.reshape(patch_lin, (rows * cols, ch))
# multiply
corrected_RGB = np.matmul(np.asmatrix(patch_reshape), ccm)
# reshape back to normal
corrected_RGB = np.reshape(np.array(corrected_RGB), (rows, cols, ch))
# clip where necessary
corrected_RGB = np.array(corrected_RGB)
corrected_RGB[corrected_RGB > 1.0] = 1.0
corrected_RGB[corrected_RGB < 0.0] = 0.0
# reapply gamma
corrected_RGB = np.power(corrected_RGB, (1 / gamma))
# compensate for saturated pixels
corrected_RGB[patch_lin == 1.0] = 1.0
return corrected_RGB
def insert_transformed_patch(
patch, image, gs_shape, rgb, patch_coords=[], image_coords=[]
):
"""
Insert patch to image based on given or selected coordinates
attributes::
patch
patch as numpy array
image
image as numpy array
gs_shape
green screen shape
rgb
true if gs_im is RGB, false otherwise
patch_coords
patch coordinates to map to image [numpy array]
image_coords
image coordinates patch coordinates will be mapped to [numpy array]
going in clockwise direction, starting with upper left corner
returns::
image with patch inserted
"""
# if no patch coords are given, just use whole image
if patch_coords == []:
h, w, c = patch.shape
patch_coords = np.array([[0, 0], [w - 1, 0], [w - 1, h - 1], [0, h - 1]])
# calculate homography
h, status = cv2.findHomography(patch_coords, image_coords)
# mask to aid with insertion
mask = np.ones((patch.shape[0], patch.shape[1], 3))
mask_out = cv2.warpPerspective(
mask, h, (image.shape[1], image.shape[0]), cv2.INTER_CUBIC
)
# mask patch and warp it to destination coordinates
patch[mask == 0] = 0
im_out = cv2.warpPerspective(
patch, h, (image.shape[1], image.shape[0]), cv2.INTER_CUBIC
)
# save image before adding shadows
im_cp_one = np.copy(image)
im_cp_one[mask_out != 0] = 0
im_out_cp = np.copy(im_out)
before = im_cp_one.astype("float32") + im_out_cp.astype("float32")
if rgb:
v_avg = 0.5647 # V value (in HSV) for the green screen, which is #00903a
# mask image for patch insert
image_cp = np.copy(image)
image_cp[mask_out == 0] = 0
# convert to HSV space for shadow estimation
target_hsv = cv2.cvtColor(image_cp, cv2.COLOR_BGR2HSV)
target_hsv = target_hsv.astype("float32")
target_hsv /= 255.0
# apply shadows to patch
ratios = target_hsv[:, :, 2] / v_avg
im_out = im_out.astype("float32")
im_out[:, :, 0] = im_out[:, :, 0] * ratios
im_out[:, :, 1] = im_out[:, :, 1] * ratios
im_out[:, :, 2] = im_out[:, :, 2] * ratios
im_out[im_out > 255.0] = 255.0
im_cp_two = np.copy(image)
im_cp_two[mask_out != 0] = 0
final = im_cp_two.astype("float32") + im_out.astype("float32")
else:
final = before
return before, final
def insert_patch(
gs_coords, gs_im, patch, gs_shape, cc_gt, cc_scene, apply_realistic_effects, rgb
):
"""
:param gs_coords: green screen coordinates in [(x0,y0),(x1,y1),...] format. Type ndarray.
:param gs_im: clean image with green screen. Type ndarray.
:param patch: adversarial patch. Type ndarray
:param gs_shape: green screen shape. Type str.
:param cc_gt: colorchecker ground truth values. Type ndarray.
:param cc_scene: colorchecker values in the scene. Type ndarray.
:param apply_realistic_effects: apply effects such as color correction, blurring, and shadowing. Type bool.
:param rgb: true if gs_im is RGB, false otherwise
"""
if apply_realistic_effects:
# calculate color matrix
ccm = calculate_ccm(cc_scene, cc_gt, Vandermonde=True)
# apply color matrix to patch
patch = apply_ccm(patch.astype("float32"), ccm)
# resize patch
scale = (np.amax(gs_coords[:, 1]) - np.amin(gs_coords[:, 1])) / patch.shape[0]
patch = cv2.resize(
patch,
(int(patch.shape[1] * scale), int(patch.shape[0] * scale)),
interpolation=cv2.INTER_CUBIC,
)
# datatype correction
patch = patch * 255
patch = patch.astype("uint8")
# convert for use with cv2
if rgb:
patch = cv2.cvtColor(patch, cv2.COLOR_RGB2BGR)
enlarged_coords = np.copy(gs_coords)
pad_amt_x = int(0.03 * (enlarged_coords[2, 0] - enlarged_coords[0, 0]))
pad_amt_y = int(0.03 * (gs_coords[2, 1] - gs_coords[0, 1]))
enlarged_coords[0, 0] -= pad_amt_x
enlarged_coords[0, 1] -= pad_amt_y
enlarged_coords[1, 0] += pad_amt_x
enlarged_coords[1, 1] -= pad_amt_y
enlarged_coords[2, 0] += pad_amt_x
enlarged_coords[2, 1] += pad_amt_y
enlarged_coords[3, 0] -= pad_amt_x
enlarged_coords[3, 1] += pad_amt_y
# insert transformed patch
image_digital, image_physical = insert_transformed_patch(
patch, gs_im, gs_shape, image_coords=enlarged_coords, rgb=rgb
)
if apply_realistic_effects:
return image_physical
else:
return image_digital
class CARLADapricotPatch(RobustDPatch):
def __init__(self, estimator, **kwargs):
# Maximum depth perturbation from a flat patch
self.depth_delta_meters = kwargs.pop("depth_delta_meters", 3)
self.learning_rate_depth = kwargs.pop("learning_rate_depth", 0.0001)
self.depth_perturbation = None
self.max_depth = None
self.min_depth = None
self.patch_base_image = kwargs.pop("patch_base_image", None)
# HSV bounds are user-defined to limit perturbation regions
self.hsv_lower_bound = np.array(
kwargs.pop("hsv_lower_bound", [0, 0, 0])
) # [0, 0, 0] means unbounded below
self.hsv_upper_bound = np.array(
kwargs.pop("hsv_upper_bound", [255, 255, 255])
) # [255, 255, 255] means unbounded above
super().__init__(estimator=estimator, **kwargs)
def create_initial_image(self, size, hsv_lower_bound, hsv_upper_bound):
"""
Create initial patch based on a user-defined image
"""
module_path = globals()["__file__"]
# user-defined image is assumed to reside in the same location as the attack module
patch_base_image_path = os.path.abspath(
os.path.join(os.path.join(module_path, "../"), self.patch_base_image)
)
im = cv2.imread(patch_base_image_path)
im = cv2.resize(im, size)
im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
hsv = cv2.cvtColor(im, cv2.COLOR_RGB2HSV)
# find the colors within the boundaries
mask = cv2.inRange(hsv, hsv_lower_bound, hsv_upper_bound)
mask = np.expand_dims(mask, 2)
# cv2.imwrite(
# "mask.png", mask
# ) # visualize perturbable regions. Comment out if not needed.
patch_base = im / 255.0
mask = mask / 255.0
return patch_base, mask
def inner_generate( # type: ignore
self, x: np.ndarray, y: Optional[List[Dict[str, np.ndarray]]] = None, **kwargs
) -> np.ndarray:
"""
Generate RobustDPatch.
:param x: Sample images.
:param y: Target labels for object detector.
:return: Adversarial patch.
"""
if y is None and self.targeted:
raise ValueError(
"The targeted version of RobustDPatch attack requires target labels provided to `y`."
)
if y is not None and not self.targeted:
raise ValueError("The RobustDPatch attack does not use target labels.")
if x.ndim != 4: # pragma: no cover
raise ValueError("The adversarial patch can only be applied to images.")
# Check whether patch fits into the cropped images:
if self.estimator.channels_first:
image_height, image_width = x.shape[2:4]
else:
image_height, image_width = x.shape[1:3]
if not self.estimator.native_label_is_pytorch_format and y is not None:
from art.estimators.object_detection.utils import convert_tf_to_pt
y = convert_tf_to_pt(y=y, height=x.shape[1], width=x.shape[2])
if y is not None:
for i_image in range(x.shape[0]):
y_i = y[i_image]["boxes"]
for i_box in range(y_i.shape[0]):
x_1, y_1, x_2, y_2 = y_i[i_box]
if ( # pragma: no cover
x_1 < self.crop_range[1]
or y_1 < self.crop_range[0]
or x_2 > image_width - self.crop_range[1] + 1
or y_2 > image_height - self.crop_range[0] + 1
):
raise ValueError(
"Cropping is intersecting with at least one box, reduce `crop_range`."
)
if ( # pragma: no cover
self.patch_location[0] + self.patch_shape[0]
> image_height - self.crop_range[0]
or self.patch_location[1] + self.patch_shape[1]
> image_width - self.crop_range[1]
):
raise ValueError("The patch (partially) lies outside the cropped image.")
for i_step in trange(
self.max_iter, desc="RobustDPatch iteration", disable=not self.verbose
):
num_batches = math.ceil(x.shape[0] / self.batch_size)
patch_gradients = np.zeros_like(self._patch)
depth_gradients = None
for e_step in range(self.sample_size):
for i_batch in range(num_batches):
i_batch_start = i_batch * self.batch_size
i_batch_end = min((i_batch + 1) * self.batch_size, x.shape[0])
if y is None:
y_batch = y
else:
y_batch = y[i_batch_start:i_batch_end]
(
patched_images,
patch_target,
transforms,
) = self._augment_images_with_patch(
x[i_batch_start:i_batch_end],
y_batch,
self._patch,
channels_first=self.estimator.channels_first,
)
gradients = self.estimator.loss_gradient(
x=patched_images,
y=patch_target,
standardise_output=True,
) # (B,H,W,C)
gradients_rgb = gradients[:, :, :, :3]
if gradients.shape[-1] == 6:
if depth_gradients is None:
depth_gradients = gradients[:, :, :, 3:]
else:
depth_gradients = np.concatenate(
(depth_gradients, gradients[:, :, :, 3:]), axis=0
)
gradients_rgb = self._untransform_gradients(
gradients_rgb,
transforms,
channels_first=self.estimator.channels_first,
)
patch_gradients = patch_gradients + np.sum(gradients_rgb, axis=0)
# Write summary
if self.summary_writer is not None: # pragma: no cover
x_patched, y_patched, _ = self._augment_images_with_patch(
x, y, self._patch, channels_first=self.estimator.channels_first
)
self.summary_writer.update(
batch_id=0,
global_step=i_step,
grad=np.expand_dims(patch_gradients, axis=0),
patch=self._patch,
estimator=self.estimator,
x=x_patched,
y=y_patched,
targeted=self.targeted,
)
self._patch = (
self._patch
+ np.sign(patch_gradients)
* (1 - 2 * int(self.targeted))
* self.learning_rate
* self.patch_mask
)
# Update depth perturbation
if gradients.shape[-1] == 6:
images_depth = patched_images[:, :, :, 3:]
if self.depth_type == "log":
depth_log = (
self.depth_perturbation
+ np.sign(depth_gradients)
* (1 - 2 * int(self.targeted))
* self.learning_rate_depth
)
perturbed_images = np.clip(
images_depth + depth_log, self.min_depth, self.max_depth
)
self.depth_pertubation = perturbed_images - images_depth
else:
grads_linear = rgb_depth_to_linear(
depth_gradients[:, :, :, 0],
depth_gradients[:, :, :, 1],
depth_gradients[:, :, :, 2],
).astype("float32")
depth_linear = rgb_depth_to_linear(
self.depth_perturbation[:, :, :, 0],
self.depth_perturbation[:, :, :, 1],
self.depth_perturbation[:, :, :, 2],
).astype("float32")
depth_linear = (
depth_linear
+ np.sign(grads_linear)
* (1 - 2 * int(self.targeted))
* self.learning_rate_depth
)
images_depth_linear = rgb_depth_to_linear(
images_depth[:, :, :, 0],
images_depth[:, :, :, 1],
images_depth[:, :, :, 2],
).astype("float32")
depth_linear = np.clip(
images_depth_linear + depth_linear,
self.min_depth,
self.max_depth,
)
depth_r, depth_g, depth_b = linear_depth_to_rgb(depth_linear)
perturbed_images = np.stack(
[depth_r, depth_g, depth_b], axis=-1
).astype("float32")
self.depth_perturbation = perturbed_images - images_depth
if self.estimator.clip_values is not None:
self._patch = np.clip(
self._patch,
self.estimator.clip_values[0],
self.estimator.clip_values[1],
)
if self.summary_writer is not None:
self.summary_writer.reset()
return self._patch
def _augment_images_with_patch(self, x, y, patch, channels_first):
"""
Augment images with patch using perspective transform
instead of inserting patch based on patch_location
:param x: Sample images.
:param y: Target labels.
:param patch: The patch to be applied.
:param channels_first: Set channels first or last.
"""
if x.shape[0] != len(self.gs_coords):
raise ValueError(
"Number of images should be equal to the number of arrays of green screen coordinates"
)
if y is not None and (x.shape[0] != len(y)):
raise ValueError(
"Number of images should be equal to the number of targets"
)
transformations = dict()
x_copy = x.copy()
patch_copy = patch.copy()
# Apply patch:
x_patch = []
for i in range(len(x_copy)):
xi = x_copy[i]
gs_coords = self.gs_coords[i]
if xi.shape[-1] == 3:
rgb_img = xi.astype("float32")
else:
rgb_img = xi[:, :, :3].astype("float32") # (H,W,3)
depth_img = xi[:, :, 3:].astype("float32") # (H,W,3)
depth_perturbation = self.depth_perturbation[i] # (H,W,3)
foreground = self.foreground[i] # (H,W,1)
# apply patch using DAPRICOT transform to RGB channels only
# insert_patch() uses BGR color ordering for input and output
rgb_img_with_patch = (
insert_patch(
gs_coords,
rgb_img[:, :, ::-1] * 255.0, # input image needs to be BGR
patch_copy * 255.0,
self.patch_geometric_shape,
cc_gt=self.cc_gt,
cc_scene=self.cc_scene,
apply_realistic_effects=self.apply_realistic_effects,
rgb=True,
)
/ 255.0
)
# embed patch into background
rgb_img_with_patch[
np.all(self.binarized_patch_mask == 0, axis=-1)
] = rgb_img[np.all(self.binarized_patch_mask == 0, axis=-1)][
:, ::-1
] # (H,W,3)
if xi.shape[-1] == 3:
img_with_patch = rgb_img_with_patch.copy()
else:
# apply depth perturbation to depth channels
depth_img = depth_img + depth_perturbation * foreground # (H,W,3)
depth_img = np.clip(
depth_img,
self.estimator.clip_values[0],
self.estimator.clip_values[1],
)
img_with_patch = np.concatenate(
(depth_img[:, :, ::-1], rgb_img_with_patch), axis=-1
)
x_patch.append(img_with_patch[:, :, ::-1]) # convert back to RGB
x_patch = np.asarray(x_patch) # (B,H,W,3)
# 1) crop images: not used.
if self.crop_range[0] != 0 and self.crop_range[1] != 0:
log.warning("crop_range argument not used.")
# 2) rotate images
if sum(self.rotation_weights[1:]) > 0:
raise ValueError("Non-zero rotations not correctly supported at this time.")
# 3) adjust brightness:
brightness = random.uniform(*self.brightness_range)
x_copy[..., :3] = brightness * x_copy[..., :3]
x_patch[..., :3] = brightness * x_patch[..., :3]
transformations.update({"brightness": brightness})
patch_target = list()
if not self.targeted:
y = self.estimator.predict(
x=x_copy.astype("float32"), standardise_output=True
)
for i_image in range(x_copy.shape[0]):
target_dict = dict()
target_dict["boxes"] = y[i_image]["boxes"]
target_dict["labels"] = y[i_image]["labels"]
target_dict["scores"] = y[i_image]["scores"]
patch_target.append(target_dict)
return x_patch, patch_target, transformations
def _untransform_gradients(
self,
gradients,
transforms,
channels_first,
):
"""
Revert transformation on gradients using perspective transform
:param gradients: The gradients to be reverse transformed.
:param transforms: The transformations in forward direction.
:param channels_first: Set channels first or last.
"""
if gradients.shape[0] != len(self.gs_coords):
raise ValueError(
"Number of gradient arrays should be equal to the number of arrays of green screen coordinates"
)
# Account for brightness adjustment:
gradients = transforms["brightness"] * gradients
# Undo perspective transform for gradients
h, w, c = self.patch_shape
patch_coords = np.array([[0, 0], [w - 1, 0], [w - 1, h - 1], [0, h - 1]])
gradients_tmp = []
for grads, gs_coords in zip(gradients, self.gs_coords):
h, _ = cv2.findHomography(gs_coords, patch_coords)
grads_tmp = cv2.warpPerspective(
grads, h, (self.patch_shape[1], self.patch_shape[0]), cv2.INTER_CUBIC
)
gradients_tmp.append(grads_tmp)
gradients = np.asarray(gradients_tmp)
return gradients
def generate(self, x, y=None, y_patch_metadata=None):
"""
param x: Sample images. For single-modality, shape=(NHW3). For multimodality, shape=(NHW6)
param y: [Optional] Sample labels. List of dictionaries,
ith dictionary contains bounding boxes, class labels, and class scores
param y_patch_metadata: Patch metadata. List of N dictionaries, ith dictionary contains patch metadata for x[i]
"""
if x.shape[0] > 1:
log.info("To perform per-example patch attack, batch size must be 1")
assert x.shape[-1] in [3, 6], "x must have either 3 or 6 color channels"
if x.shape[-1] == 6 and self.sample_size != 1:
raise ValueError(
"Sample size must be 1 for multimodality input because expectation over transformation cannot be applied to depth perturbation"
)
num_imgs = x.shape[0]
attacked_images = []
for i in range(num_imgs):
if x.shape[-1] == 3:
rgb_img = x[i].astype("float32")
else:
rgb_img = x[i, :, :, :3].astype("float32") # (H,W,3)
depth_img = x[i, :, :, 3:].astype("float32") # (H,W,3)
gs_coords = y_patch_metadata[i]["gs_coords"] # (4,2)
patch_width = np.max(gs_coords[:, 0]) - np.min(gs_coords[:, 0])
patch_height = np.max(gs_coords[:, 1]) - np.min(gs_coords[:, 1])
self.patch_shape = (
patch_height,
patch_width,
3,
)
patch_geometric_shape = y_patch_metadata[i].get("shape", "rect")
self.patch_geometric_shape = str(patch_geometric_shape)
# this masked to embed patch into the background in the event of occlusion
self.binarized_patch_mask = y_patch_metadata[i]["mask"] # (H,W,3)
# Eval7 contains a mixture of patch locations.
# Patches that lie flat on the sidewalk or street are constrained to 0.03m depth perturbation, and they are best used to create disappearance errors.
# Patches located elsewhere (i.e., that do not impede pedestrian/vehicle motion) are constrained to 3m depth perturbation, and they are best used to create hallucinations.
# Therefore, the depth perturbation bound for each patch is input-dependent.
if x.shape[-1] == 6:
if "max_depth_perturb_meters" in y_patch_metadata[i].keys():
self.depth_delta_meters = y_patch_metadata[i][
"max_depth_perturb_meters"
]
log.info(
'This dataset contains input-dependent depth perturbation bounds, and the user-defined "depth_delta_meters" has been reset to {} meters'.format(
y_patch_metadata[i]["max_depth_perturb_meters"]
)
)
# get colorchecker information from ground truth and scene
self.cc_gt = y_patch_metadata[i].get("cc_ground_truth", None)
self.cc_scene = y_patch_metadata[i].get("cc_scene", None)
# Prior to Eval 5 (dev set version 2.0.0), this was set to True. It's now being
# set False to be more aligned with AdversarialPatch attack. This line is essentially
# 'if dev set version >= 2.0.0', since these variables are None as of 2.0.0 / Eval 5
if self.cc_gt is None or self.cc_scene is None:
self.apply_realistic_effects = False
else:
self.apply_realistic_effects = True
# self._patch needs to be re-initialized with the correct shape
if self.patch_base_image is not None:
self._patch, self.patch_mask = self.create_initial_image(
(patch_width, patch_height),
self.hsv_lower_bound,
self.hsv_upper_bound,
)
else:
self._patch = (
np.random.randint(0, 255, size=self.patch_shape)
/ 255
* (self.estimator.clip_values[1] - self.estimator.clip_values[0])
+ self.estimator.clip_values[0]
)
self.patch_mask = np.ones_like(self._patch)
self._patch = np.clip(
self._patch,
self.estimator.clip_values[0],
self.estimator.clip_values[1],
)
self.gs_coords = [gs_coords]
# initialize depth variables
if x.shape[-1] == 6:
if np.all(x[i, :, :, 3] == x[i, :, :, 4]) and np.all(
x[i, :, :, 3] == x[i, :, :, 5]
):
self.depth_type = "log"
depth_linear = log_to_linear(x[i, :, :, 3:])
self.max_depth = np.minimum(
1.0, linear_to_log(depth_linear + self.depth_delta_meters)
)
self.min_depth = np.maximum(
0.0, linear_to_log(depth_linear - self.depth_delta_meters)
)
else:
self.depth_type = "linear"
depth_linear = rgb_depth_to_linear(
x[i, :, :, 3], x[i, :, :, 4], x[i, :, :, 5]
)
self.max_depth = np.minimum(
1000.0, depth_linear + self.depth_delta_meters
)
self.min_depth = np.maximum(
0.0, depth_linear - self.depth_delta_meters
)
self.depth_perturbation = np.zeros(
(1, *x.shape[1:3], 3), dtype=np.float32
) # (1,H,W,3)
self.foreground = np.all(self.binarized_patch_mask == 255, axis=-1)
self.foreground = np.expand_dims(self.foreground, (-1, 0)) # (1,H,W,1)
# ensure area perturbed in depth is consistent with area perturbed in RGB
h, _ = cv2.findHomography(
np.array(
[
[0, 0],
[patch_width - 1, 0],
[patch_width - 1, patch_height - 1],
[0, patch_height - 1],
]
),
gs_coords,
)
rgb_mask = np.ones((patch_height, patch_width, 3), dtype=np.float32)
rgb_mask = cv2.warpPerspective(
rgb_mask, h, (x.shape[2], x.shape[1]), cv2.INTER_CUBIC
)
self.foreground = self.foreground * rgb_mask[:, :, 0:1]
if y is None:
patch = self.inner_generate(
np.expand_dims(x[i], axis=0)
) # untargeted attack
else:
patch = self.inner_generate(
np.expand_dims(x[i], axis=0), y=[y[i]]
) # targeted attack
rgb_img_with_patch = (
insert_patch(
self.gs_coords[0],
rgb_img[:, :, ::-1] * 255.0,
patch * 255.0,
self.patch_geometric_shape,
cc_gt=self.cc_gt,
cc_scene=self.cc_scene,
apply_realistic_effects=self.apply_realistic_effects,
rgb=True,
)
/ 255.0
)
# embed patch into background
rgb_img_with_patch[
np.all(self.binarized_patch_mask == 0, axis=-1)
] = rgb_img[np.all(self.binarized_patch_mask == 0, axis=-1)][
:, ::-1
] # (H,W,3)
if x.shape[-1] == 3:
img_with_patch = rgb_img_with_patch.copy()
else:
depth_img = (
depth_img + self.depth_perturbation[0] * self.foreground[0]
) # (H,W,3)
depth_img = np.clip(
depth_img,
self.estimator.clip_values[0],
self.estimator.clip_values[1],
)
img_with_patch = np.concatenate(
(depth_img[:, :, ::-1], rgb_img_with_patch), axis=-1
)
attacked_images.append(img_with_patch[:, :, ::-1])
return np.array(attacked_images) # (B,H,W,3)
|
PypiClean
|
/ka-lite-static-0.17.6b2.tar.gz/ka-lite-static-0.17.6b2/kalite/packages/dist/requests/cookies.py
|
import copy
import time
import calendar
import collections
from .compat import cookielib, urlparse, urlunparse, Morsel
try:
import threading
# grr, pyflakes: this fixes "redefinition of unused 'threading'"
threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
# Only return the response's URL if the user hadn't set the Host
# header
if not self._r.headers.get('Host'):
return self._r.url
# If they did set it, retrieve it and reconstruct the expected domain
host = self._r.headers['Host']
parsed = urlparse(self._r.url)
# Reconstruct the URL as we expect it
return urlunparse([
parsed.scheme, host, parsed.path, parsed.params, parsed.query,
parsed.fragment
])
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
@property
def origin_req_host(self):
return self.get_origin_req_host()
@property
def host(self):
return self.get_host()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
if not (hasattr(response, '_original_response') and
response._original_response):
return
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""
Produce an appropriate Cookie header string to be sent with `request`, or None.
:rtype: str
"""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name != name:
continue
if domain is not None and domain != cookie.domain:
continue
if path is not None and path != cookie.path:
continue
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific.
"""
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Requests does not use the dict interface internally; it's just for
compatibility with external client code. All requests code should work
out of the box with externally provided instances of ``CookieJar``, e.g.
``LWPCookieJar`` and ``FileCookieJar``.
Unlike a regular CookieJar, this class is pickleable.
.. warning:: dictionary operations that are normally O(1) may be O(n).
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains.
.. warning:: operation is O(n), not O(1).
"""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains.
"""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def iterkeys(self):
"""Dict-like iterkeys() that returns an iterator of names of cookies
from the jar.
.. seealso:: itervalues() and iteritems().
"""
for cookie in iter(self):
yield cookie.name
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the
jar.
.. seealso:: values() and items().
"""
return list(self.iterkeys())
def itervalues(self):
"""Dict-like itervalues() that returns an iterator of values of cookies
from the jar.
.. seealso:: iterkeys() and iteritems().
"""
for cookie in iter(self):
yield cookie.value
def values(self):
"""Dict-like values() that returns a list of values of cookies from the
jar.
.. seealso:: keys() and items().
"""
return list(self.itervalues())
def iteritems(self):
"""Dict-like iteritems() that returns an iterator of name-value tuples
from the jar.
.. seealso:: iterkeys() and itervalues().
"""
for cookie in iter(self):
yield cookie.name, cookie.value
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the
jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
vanilla python dict of key value pairs.
.. seealso:: keys() and values().
"""
return list(self.iteritems())
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise.
:rtype: bool
"""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain
old Python dict of name-value pairs of cookies that meet the
requirements.
:rtype: dict
"""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
or cookie.path == path):
dictionary[cookie.name] = cookie.value
return dictionary
def __contains__(self, name):
try:
return super(RequestsCookieJar, self).__contains__(name)
except CookieConflictError:
return True
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws
exception if there are more than one cookie with name. In that case,
use the more explicit get() method instead.
.. warning:: operation is O(n), not O(1).
"""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws
exception if there is already a cookie of that name in the jar. In that
case, use the more explicit set() method instead.
"""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
``remove_cookie_by_name()``.
"""
remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs):
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
cookie.value = cookie.value.replace('\\"', '')
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(copy.copy(cookie))
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values.
If there are conflicting cookies, _find arbitrarily chooses one.
See _find_no_duplicates if you want an exception thrown if there are
conflicting cookies.
:param name: a string containing name of cookie
:param domain: (optional) string containing domain of cookie
:param path: (optional) string containing path of cookie
:return: cookie.value
"""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""Both ``__get_item__`` and ``get`` call this function: it's never
used elsewhere in Requests.
:param name: a string containing name of cookie
:param domain: (optional) string containing domain of cookie
:param path: (optional) string containing path of cookie
:raises KeyError: if cookie is not found
:raises CookieConflictError: if there are multiple cookies
that match name and optionally domain and path
:return: cookie.value
"""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.update(self)
return new_cj
def _copy_cookie_jar(jar):
if jar is None:
return None
if hasattr(jar, 'copy'):
# We're dealing with an instance of RequestsCookieJar
return jar.copy()
# We're dealing with a generic CookieJar instance
new_jar = copy.copy(jar)
new_jar.clear()
for cookie in jar:
new_jar.set_cookie(copy.copy(cookie))
return new_jar
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = dict(
version=0,
name=name,
value=value,
port=None,
domain='',
path='/',
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False,)
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None
if morsel['max-age']:
try:
expires = int(time.time() + int(morsel['max-age']))
except ValueError:
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = calendar.timegm(
time.strptime(morsel['expires'], time_template)
)
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
discard=False,
domain=morsel['domain'],
expires=expires,
name=morsel.key,
path=morsel['path'],
port=None,
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,
secure=bool(morsel['secure']),
value=morsel.value,
version=morsel['version'] or 0,
)
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
:param cookiejar: (optional) A cookiejar to add the cookies to.
:param overwrite: (optional) If False, will not replace cookies
already in the jar with new ones.
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
names_from_jar = [cookie.name for cookie in cookiejar]
for name in cookie_dict:
if overwrite or (name not in names_from_jar):
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
def merge_cookies(cookiejar, cookies):
"""Add cookies to cookiejar and returns a merged CookieJar.
:param cookiejar: CookieJar object to add the cookies to.
:param cookies: Dictionary or CookieJar object to be added.
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False)
elif isinstance(cookies, cookielib.CookieJar):
try:
cookiejar.update(cookies)
except AttributeError:
for cookie_in_jar in cookies:
cookiejar.set_cookie(cookie_in_jar)
return cookiejar
|
PypiClean
|
/pgsql-1.2.tar.gz/pgsql-1.2/README
|
Pgsql
=====
PostgreSQL client library for Python 3. Simple, fast and lightweight.
Installation
------------
::
$ pip install pgsql
Example
-------
.. code-block:: python
import pgsql
# address defaults to ("localhost", 5432), a string must point to a unix socket
# user defaults to "postgres"
# password defaults to None
# database equals user by default
db = pgsql.Connection(user = "antti", database = "postgres")
print(db("CREATE TABLE people (name TEXT, age INT)"))
print(db("INSERT INTO people (name, age) VALUES ($1, $2)", "Veronica", 18))
print(db("SELECT * FROM people"))
db.close()
# for convenience, connection objects support the with statement
with pgsql.Connection(user = "antti", database = "postgres") as db:
# you can use .begin(), .commit(), .rollback() manually, or use the with statement
with db.transaction():
with db.prepare("INSERT INTO people (name, age) VALUES ($1, $2)") as ps:
for person in ("Wallace", 18), ("Keith", 45), ("Lianne", 44):
ps(*person)
# iterate through and print all the rows represented as tuples
people = db.prepare("SELECT * FROM people")
for person in people():
print(person)
# sometimes instead of an iterator, you want the rows as a list
# you may also want to call columns by their name
people_over = db.prepare("SELECT * FROM people WHERE age > $1").all
for person in people_over(21):
print(person.name, "is", person.age - 21, "years over the age of 21")
# when the expected result is only one row, it's convenient to call .first
person_named = db.prepare("SELECT * FROM people WHERE name = $1 LIMIT 1").first
print(person_named("Veronica"))
print(person_named("Backup"))
prints
::
[]
[]
[('Veronica', 18)]
('Veronica', 18)
('Wallace', 18)
('Keith', 45)
('Lianne', 44)
Keith is 24 years over the age of 21
Lianne is 23 years over the age of 21
('Veronica', 18)
None
Changes
-------
1.2 (2019-10-27)
******************
- Update package description
1.1 (2014-03-26)
****************
- Make it possible to execute one-time statements by calling the ``Connection`` object
|
PypiClean
|
/ntnx-files-py-client-4.0.1a2.tar.gz/ntnx-files-py-client-4.0.1a2/ntnx_files_py_client/Ntnx/files/v4/infra/NotifyPlatform.py
|
import pprint
import json
import ast
import re # noqa: F401
import six
from ntnx_files_py_client.Ntnx.files.v4.infra.PcConfig import PcConfig # noqa: F401,E501
from ntnx_files_py_client.Ntnx.files.v4.infra.PeConfig import PeConfig # noqa: F401,E501
from ntnx_files_py_client.Ntnx.files.v4.infra.PreUpgradeNvm import PreUpgradeNvm # noqa: F401,E501
from ntnx_files_py_client.Ntnx.files.v4.infra.PulseConfig import PulseConfig # noqa: F401,E501
from ntnx_files_py_client.Ntnx.files.v4.infra.TlsCertificateConfig import TlsCertificateConfig # noqa: F401,E501
"""
IGNORE:
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
IGNORE
"""
class NotifyPlatform(object):
"""NotifyPlatform - a model defined in Swagger"""
"""
IGNORE:
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
IGNORE
""" # noqa: E501
swagger_types = {
'pc_config': 'files.v4.infra.PcConfig',
'pre_upgrade_nvm': 'files.v4.infra.PreUpgradeNvm',
'pulse_config': 'files.v4.infra.PulseConfig',
'tls_certificate_config': 'files.v4.infra.TlsCertificateConfig',
'pe_config': 'files.v4.infra.PeConfig',
'_reserved': 'dict(str, object)',
'_object_type': 'str',
'_unknown_fields': 'dict(str, object)',
}
attribute_map = {
'pc_config': 'pcConfig',
'pre_upgrade_nvm': 'preUpgradeNvm',
'pulse_config': 'pulseConfig',
'tls_certificate_config': 'tlsCertificateConfig',
'pe_config': 'peConfig',
'_reserved': '$reserved',
'_object_type': '$objectType',
'_unknown_fields': '$unknownFields',
}
def __init__(self, pc_config=None, pre_upgrade_nvm=None, pulse_config=None, tls_certificate_config=None, pe_config=None, *args, **kwargs): # noqa: E501
"""NotifyPlatform - a model defined in Swagger
\nPlatform update notification request model.
"""
self.__pc_config = None
self.__pre_upgrade_nvm = None
self.__pulse_config = None
self.__tls_certificate_config = None
self.__pe_config = None
self.discriminator = None
if pc_config is not None:
self.__pc_config = pc_config
if pre_upgrade_nvm is not None:
self.__pre_upgrade_nvm = pre_upgrade_nvm
if pulse_config is not None:
self.__pulse_config = pulse_config
if tls_certificate_config is not None:
self.__tls_certificate_config = tls_certificate_config
if pe_config is not None:
self.__pe_config = pe_config
# populate hidden vars if not empty
self._populate_hidden_vars(kwargs)
def _initialize_object_type(self):
return 'files.v4.infra.NotifyPlatform'
def _initialize_fq_object_type(self):
return 'files.v4.r0.a2.infra.NotifyPlatform'
def _populate_hidden_vars(self, kwargs):
if "_reserved" in kwargs and kwargs["_reserved"] is not None:
self.__dollar_reserved = kwargs["_reserved"]
elif "_reserved" in self.attribute_map and self.attribute_map["_reserved"] in kwargs and kwargs[self.attribute_map["_reserved"]] is not None:
self.__dollar_reserved = kwargs[self.attribute_map["_reserved"]]
else :
self.__dollar_reserved = {"$fqObjectType": self._initialize_fq_object_type()}
if "_unknown_fields" in kwargs and kwargs["_unknown_fields"] is not None:
self.__dollar_unknown_fields = kwargs["_unknown_fields"]
elif "_unknown_fields" in self.attribute_map and self.attribute_map["_unknown_fields"] in kwargs and kwargs[self.attribute_map["_unknown_fields"]] is not None:
self.__dollar_unknown_fields = kwargs[self.attribute_map["_unknown_fields"]]
else :
self.__dollar_unknown_fields = {}
if "_object_type" in kwargs and kwargs["_object_type"] is not None:
self.__dollar_object_type = kwargs["_object_type"]
elif "_object_type" in self.attribute_map and self.attribute_map["_object_type"] in kwargs and kwargs[self.attribute_map["_object_type"]] is not None:
self.__dollar_object_type = kwargs[self.attribute_map["_object_type"]]
else:
self.__dollar_object_type = self._initialize_object_type()
def get_object_type(self):
return self.__dollar_object_type
def get_reserved(self):
return self.__dollar_reserved
def get_unknown_fields(self):
return self.__dollar_unknown_fields
@property
def pc_config(self):
"""`{ files.v4.infra.PcConfig }`
""" # noqa: E501
return self.__pc_config
@pc_config.setter
def pc_config(self, pc_config):
self.__pc_config = pc_config
@property
def pre_upgrade_nvm(self):
"""`{ files.v4.infra.PreUpgradeNvm }`
""" # noqa: E501
return self.__pre_upgrade_nvm
@pre_upgrade_nvm.setter
def pre_upgrade_nvm(self, pre_upgrade_nvm):
self.__pre_upgrade_nvm = pre_upgrade_nvm
@property
def pulse_config(self):
"""`{ files.v4.infra.PulseConfig }`
""" # noqa: E501
return self.__pulse_config
@pulse_config.setter
def pulse_config(self, pulse_config):
self.__pulse_config = pulse_config
@property
def tls_certificate_config(self):
"""`{ files.v4.infra.TlsCertificateConfig }`
""" # noqa: E501
return self.__tls_certificate_config
@tls_certificate_config.setter
def tls_certificate_config(self, tls_certificate_config):
self.__tls_certificate_config = tls_certificate_config
@property
def pe_config(self):
"""`{ files.v4.infra.PeConfig }`
""" # noqa: E501
return self.__pe_config
@pe_config.setter
def pe_config(self, pe_config):
self.__pe_config = pe_config
@property
def _reserved(self):
"""`{ dict(str, object) }`
""" # noqa: E501
return self.__dollar_reserved
@property
def _object_type(self):
"""`{ str }`
""" # noqa: E501
return self.__dollar_object_type
@property
def _unknown_fields(self):
"""`{ dict(str, object) }`
""" # noqa: E501
return self.__dollar_unknown_fields
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, attr_type in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(NotifyPlatform, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NotifyPlatform):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/apimanagement/latest/gateway_hostname_configuration.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['GatewayHostnameConfiguration']
warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-nextgen:apimanagement:GatewayHostnameConfiguration'.""", DeprecationWarning)
class GatewayHostnameConfiguration(pulumi.CustomResource):
warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-nextgen:apimanagement:GatewayHostnameConfiguration'.""", DeprecationWarning)
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
certificate_id: Optional[pulumi.Input[str]] = None,
gateway_id: Optional[pulumi.Input[str]] = None,
hc_id: Optional[pulumi.Input[str]] = None,
hostname: Optional[pulumi.Input[str]] = None,
negotiate_client_certificate: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Gateway hostname configuration details.
Latest API Version: 2019-12-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] certificate_id: Identifier of Certificate entity that will be used for TLS connection establishment
:param pulumi.Input[str] gateway_id: Gateway entity identifier. Must be unique in the current API Management service instance. Must not have value 'managed'
:param pulumi.Input[str] hc_id: Gateway hostname configuration identifier. Must be unique in the scope of parent Gateway entity.
:param pulumi.Input[str] hostname: Hostname value. Supports valid domain name, partial or full wildcard
:param pulumi.Input[bool] negotiate_client_certificate: Determines whether gateway requests client certificate
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] service_name: The name of the API Management service.
"""
pulumi.log.warn("GatewayHostnameConfiguration is deprecated: The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-nextgen:apimanagement:GatewayHostnameConfiguration'.")
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['certificate_id'] = certificate_id
if gateway_id is None and not opts.urn:
raise TypeError("Missing required property 'gateway_id'")
__props__['gateway_id'] = gateway_id
if hc_id is None and not opts.urn:
raise TypeError("Missing required property 'hc_id'")
__props__['hc_id'] = hc_id
__props__['hostname'] = hostname
__props__['negotiate_client_certificate'] = negotiate_client_certificate
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__['service_name'] = service_name
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement:GatewayHostnameConfiguration"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:GatewayHostnameConfiguration"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201preview:GatewayHostnameConfiguration"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20200601preview:GatewayHostnameConfiguration")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(GatewayHostnameConfiguration, __self__).__init__(
'azure-nextgen:apimanagement/latest:GatewayHostnameConfiguration',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'GatewayHostnameConfiguration':
"""
Get an existing GatewayHostnameConfiguration resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return GatewayHostnameConfiguration(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="certificateId")
def certificate_id(self) -> pulumi.Output[Optional[str]]:
"""
Identifier of Certificate entity that will be used for TLS connection establishment
"""
return pulumi.get(self, "certificate_id")
@property
@pulumi.getter
def hostname(self) -> pulumi.Output[Optional[str]]:
"""
Hostname value. Supports valid domain name, partial or full wildcard
"""
return pulumi.get(self, "hostname")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="negotiateClientCertificate")
def negotiate_client_certificate(self) -> pulumi.Output[Optional[bool]]:
"""
Determines whether gateway requests client certificate
"""
return pulumi.get(self, "negotiate_client_certificate")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type for API Management resource.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
PypiClean
|
/luminesce_sdk_preview-1.13.409-py3-none-any.whl/luminesce/rest.py
|
from __future__ import absolute_import
import io
import json
import logging
import re
import ssl
import certifi
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import urlencode
import urllib3
from luminesce.exceptions import ApiException, ApiValueError
from fbnsdkutilities.tcp.tcp_keep_alive_probes import TCPKeepAlivePoolManager, TCPKeepAliveProxyManager
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
# ca_certs
if configuration.ssl_ca_cert:
ca_certs = configuration.ssl_ca_cert
else:
# if not set certificate file, use Mozilla's root certificates.
ca_certs = certifi.where()
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
if configuration.retries is not None:
addition_pool_args['retries'] = configuration.retries
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# tcp_keep_alive cannot be put in additional_pool_args without errors in the base pool manager
if configuration.proxy:
if configuration.tcp_keep_alive:
self.pool_manager = TCPKeepAliveProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
proxy_headers=configuration.proxy_headers,
**addition_pool_args
)
else:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
proxy_headers=configuration.proxy_headers,
**addition_pool_args
)
else:
if configuration.tcp_keep_alive:
self.pool_manager = TCPKeepAlivePoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True,
_request_timeout=None):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
'PATCH', 'OPTIONS']
if post_params and body:
raise ApiValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (isinstance(_request_timeout, tuple) and
len(_request_timeout) == 2):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1])
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
if query_params:
url += '?' + urlencode(query_params)
if re.search('json', headers['Content-Type'], re.IGNORECASE):
request_body = None
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str) or isinstance(body, bytes):
request_body = body
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None,
_preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
|
PypiClean
|
/tyba_cvxpy-1.4.4-cp311-cp311-macosx_10_9_universal2.whl/cvxpy/atoms/elementwise/huber.py
|
from typing import Tuple
import numpy as np
import scipy.special
from cvxpy.atoms.elementwise.elementwise import Elementwise
# TODO(akshayka): DGP support.
class huber(Elementwise):
"""The Huber function
.. math::
\\operatorname{Huber}(x, M) =
\\begin{cases}
2M|x|-M^2 & \\text{for } |x| \\geq |M| \\\\
|x|^2 & \\text{for } |x| \\leq |M|.
\\end{cases}
:math:`M` defaults to 1.
Parameters
----------
x : Expression
The expression to which the huber function will be applied.
M : Constant
A scalar constant.
"""
def __init__(self, x, M: int = 1) -> None:
self.M = self.cast_to_const(M)
super(huber, self).__init__(x)
@Elementwise.numpy_numeric
def numeric(self, values) -> float:
"""Returns the huber function applied elementwise to x.
"""
return 2*scipy.special.huber(self.M.value, values[0])
def sign_from_args(self) -> Tuple[bool, bool]:
"""Returns sign (is positive, is negative) of the expression.
"""
# Always positive.
return (True, False)
def is_atom_convex(self) -> bool:
"""Is the atom convex?
"""
return True
def is_atom_concave(self) -> bool:
"""Is the atom concave?
"""
return False
def is_incr(self, idx) -> bool:
"""Is the composition non-decreasing in argument idx?
"""
return self.args[idx].is_nonneg()
def is_decr(self, idx) -> bool:
"""Is the composition non-increasing in argument idx?
"""
return self.args[idx].is_nonpos()
def is_quadratic(self) -> bool:
"""Quadratic if x is affine.
"""
return self.args[0].is_affine()
def has_quadratic_term(self) -> bool:
"""Always generates a quadratic term.
"""
return True
def get_data(self):
"""Returns the parameter M.
"""
return [self.M]
def validate_arguments(self) -> None:
"""Checks that M >= 0 and is constant.
"""
if not (self.M.is_nonneg() and
self.M.is_scalar() and
self.M.is_constant()):
raise ValueError("M must be a non-negative scalar constant.")
super(huber, self).validate_arguments()
def _grad(self, values):
"""Gives the (sub/super)gradient of the atom w.r.t. each argument.
Matrix expressions are vectorized, so the gradient is a matrix.
Args:
values: A list of numeric values for the arguments.
Returns:
A list of SciPy CSC sparse matrices or None.
"""
rows = self.args[0].size
cols = self.size
min_val = np.minimum(np.abs(values[0]), self.M.value)
grad_vals = 2*np.multiply(np.sign(values[0]), min_val)
return [huber.elemwise_grad_to_diag(grad_vals, rows, cols)]
|
PypiClean
|
/django-oscar-datacash-0.8.3.tar.gz/django-oscar-datacash-0.8.3/datacash/migrations/0003_auto__del_unique_fraudresponse_t3m_id.py
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'FraudResponse', fields ['t3m_id']
db.delete_unique('datacash_fraudresponse', ['t3m_id'])
# Adding index on 'FraudResponse', fields ['t3m_id']
db.create_index('datacash_fraudresponse', ['t3m_id'])
def backwards(self, orm):
# Removing index on 'FraudResponse', fields ['t3m_id']
db.delete_index('datacash_fraudresponse', ['t3m_id'])
# Adding unique constraint on 'FraudResponse', fields ['t3m_id']
db.create_unique('datacash_fraudresponse', ['t3m_id'])
models = {
'datacash.fraudresponse': {
'Meta': {'ordering': "('-date_created',)", 'object_name': 'FraudResponse'},
'aggregator_identifier': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'merchant_identifier': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'merchant_order_ref': ('django.db.models.fields.CharField', [], {'max_length': '250', 'db_index': 'True'}),
'message_digest': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'raw_response': ('django.db.models.fields.TextField', [], {}),
'recommendation': ('django.db.models.fields.IntegerField', [], {}),
'score': ('django.db.models.fields.IntegerField', [], {}),
't3m_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
'datacash.ordertransaction': {
'Meta': {'ordering': "('-date_created',)", 'object_name': 'OrderTransaction'},
'amount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'auth_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'datacash_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'merchant_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'order_number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'request_xml': ('django.db.models.fields.TextField', [], {}),
'response_xml': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {})
}
}
complete_apps = ['datacash']
|
PypiClean
|
/Muntjac-1.1.2.tar.gz/Muntjac-1.1.2/muntjac/ui/form.py
|
from warnings import warn
from muntjac.data.item import IEditor, IItem
from muntjac.data.buffered import IBuffered, SourceException
from muntjac.data.validatable import IValidatable
from muntjac.data.validator import InvalidValueException
from muntjac.data.property import IValueChangeListener
from muntjac.event.action_manager import ActionManager
from muntjac.ui.abstract_field import AbstractField
from muntjac.ui.default_field_factory import DefaultFieldFactory
from muntjac.ui.abstract_component import AbstractComponent
from muntjac.ui.field import IField
from muntjac.ui.custom_layout import CustomLayout
from muntjac.ui.component_container import IComponentContainer
from muntjac.ui.grid_layout import GridLayout
from muntjac.ui.field_factory import IFieldFactory
from muntjac.ui.form_layout import FormLayout
from muntjac.ui.horizontal_layout import HorizontalLayout
from muntjac.ui.select import Select
from muntjac.event.action import INotifier
from muntjac.terminal.composite_error_message import CompositeErrorMessage
class Form(AbstractField, IEditor, IBuffered, IItem, IValidatable, INotifier):
"""Form component provides easy way of creating and managing sets fields.
C{Form} is a container for fields implementing L{IField}
interface. It provides support for any layouts and provides buffering
interface for easy connection of commit and discard buttons. All the form
fields can be customized by adding validators, setting captions and icons,
setting immediateness, etc. Also direct mechanism for replacing existing
fields with selections is given.
C{Form} provides customizable editor for classes implementing
L{IItem} interface. Also the form itself implements
this interface for easier connectivity to other items. To use the form as
editor for an item, just connect the item to form with
L{Form.setItemDataSource}. If only a part of the item needs to
be edited, L{Form.setItemDataSource} can be used
instead. After the item has been connected to the form, the automatically
created fields can be customized and new fields can be added. If you need
to connect a class that does not implement L{IItem}
interface, most properties of any class following bean pattern, can be
accessed trough C{muntjac.data.util.BeanItem}.
@author: Vaadin Ltd.
@author: Richard Lincoln
@version: 1.1.2
"""
CLIENT_WIDGET = None #ClientWidget(VForm)
def __init__(self, formLayout=None, fieldFactory=None):
"""Constructs a new form with given L{Layout} and
L{FormFieldFactory}.
By default the form uses L{FormLayout}.
@param formLayout:
the layout of the form.
@param fieldFactory:
the IFieldFactory of the form.
"""
self._propertyValue = None
#: Layout of the form.
self._layout = None
#: IItem connected to this form as datasource.
self._itemDatasource = None
#: Ordered list of property ids in this editor.
self._propertyIds = list()
#: Current buffered source exception.
self._currentBufferedSourceException = None
#: Is the form in write trough mode.
self._writeThrough = True
#: Is the form in read trough mode.
self._readThrough = True
#: Mapping from propertyName to corresponding field.
self._fields = dict()
#: Form may act as an IItem, its own properties are stored here.
self._ownProperties = dict()
#: IField factory for this form.
self._fieldFactory = None
#: Visible item properties.
self._visibleItemProperties = None
#: Form needs to repaint itself if child fields value changes due
# possible change in form validity.
#
# TODO introduce ValidityChangeEvent (#6239) and start using it instead.
# See e.g. DateField#notifyFormOfValidityChange().
self._fieldValueChangeListener = FieldValueChangeListener(self)
self._formFooter = None
#: If this is true, commit implicitly calls setValidationVisible(true).
self._validationVisibleOnCommit = True
# special handling for gridlayout; remember initial cursor pos
self._gridlayoutCursorX = -1
self._gridlayoutCursorY = -1
#: Keeps track of the Actions added to this component, and manages the
# painting and handling as well. Note that the extended AbstractField
# is a L{ShortcutNotifier} and has a actionManager that delegates
# actions to the containing window. This one does not delegate.
self._ownActionManager = ActionManager(self)
if fieldFactory is None:
fieldFactory = DefaultFieldFactory.get()
super(Form, self).__init__()
self.setLayout(formLayout)
self.setFormFieldFactory(fieldFactory)
self.setValidationVisible(False)
self.setWidth(100, self.UNITS_PERCENTAGE)
def paintContent(self, target):
super(Form, self).paintContent(target)
self._layout.paint(target)
if self._formFooter is not None:
self._formFooter.paint(target)
if self._ownActionManager is not None:
self._ownActionManager.paintActions(None, target)
def changeVariables(self, source, variables):
super(Form, self).changeVariables(source, variables)
# Actions
if self._ownActionManager is not None:
self._ownActionManager.handleActions(variables, self)
def getErrorMessage(self):
"""The error message of a Form is the error of the first field with
a non-empty error.
Empty error messages of the contained fields are skipped, because an
empty error indicator would be confusing to the user, especially if
there are errors that have something to display. This is also the
reason why the calculation of the error message is separate from
validation, because validation fails also on empty errors.
"""
# Reimplement the checking of validation error by using
# getErrorMessage() recursively instead of validate().
validationError = None
if self.isValidationVisible():
for i in self._propertyIds:
f = self._fields.get(i)
if isinstance(f, AbstractComponent):
field = f
validationError = field.getErrorMessage()
if validationError is not None:
# Show caption as error for fields with empty errors
if '' == str(validationError):
e = InvalidValueException(field.getCaption())
validationError = e
break
elif isinstance(f, IField) and not f.isValid():
# Something is wrong with the field, but no proper
# error is given. Generate one.
e = InvalidValueException(field.getCaption())
validationError = e
break
# Return if there are no errors at all
if (self.getComponentError() is None and validationError is None
and self._currentBufferedSourceException is None):
return None
# Throw combination of the error types
return CompositeErrorMessage([self.getComponentError(),
validationError, self._currentBufferedSourceException])
def setValidationVisibleOnCommit(self, makeVisible):
"""Controls the making validation visible implicitly on commit.
Having commit() call setValidationVisible(True) implicitly is the
default behaviour. You can disable the implicit setting by setting
this property as false.
It is useful, because you usually want to start with the form free
of errors and only display them after the user clicks Ok. You can
disable the implicit setting by setting this property as false.
@param makeVisible:
If true (default), validation is made visible when
commit() is called. If false, the visibility is left
as it is.
"""
self._validationVisibleOnCommit = makeVisible
def isValidationVisibleOnCommit(self):
"""Is validation made automatically visible on commit?
See setValidationVisibleOnCommit().
@return: true if validation is made automatically visible on commit.
"""
return self._validationVisibleOnCommit
def commit(self):
# Commit changes to the data source.
problems = None
# Only commit on valid state if so requested
if not self.isInvalidCommitted() and not self.isValid():
# The values are not ok and we are told not to commit invalid
# values
if self._validationVisibleOnCommit:
self.setValidationVisible(True)
# Find the first invalid value and throw the exception
self.validate()
# Try to commit all
for i in self._propertyIds:
try:
f = self._fields.get(i)
# Commit only non-readonly fields.
if not f.isReadOnly():
f.commit()
except SourceException, e:
if problems is None:
problems = list()
problems.append(e)
# No problems occurred
if problems is None:
if self._currentBufferedSourceException is not None:
self._currentBufferedSourceException = None
self.requestRepaint()
return
# Commit problems
causes = [None] * len(problems)
index = 0
for i in problems:
causes[index] = i
index += 1 # post increment
e = SourceException(self, causes)
self._currentBufferedSourceException = e
self.requestRepaint()
raise e
def discard(self):
# Discards local changes and refresh values from the data source
problems = None
# Try to discard all changes
for i in self._propertyIds:
try:
self._fields.get(i).discard()
except SourceException, e:
if problems is None:
problems = list()
problems.append(e)
# No problems occurred
if problems is None:
if self._currentBufferedSourceException is not None:
self._currentBufferedSourceException = None
self.requestRepaint()
return
# Discards problems occurred
causes = [None] * len(problems)
index = 0
for i in problems:
causes[index] = i
index += 1 # post increment
e = SourceException(self, causes)
self._currentBufferedSourceException = e
self.requestRepaint()
raise e
def isModified(self):
# Is the object modified but not committed?
for i in self._propertyIds:
f = self._fields.get(i)
if f is not None and f.isModified():
return True
return False
def isReadThrough(self):
# Is the editor in a read-through mode?
return self._readThrough
def isWriteThrough(self):
# Is the editor in a write-through mode?
return self._writeThrough
def setReadThrough(self, readThrough):
# Sets the editor's read-through mode to the specified status.
if readThrough != self._readThrough:
self._readThrough = readThrough
for i in self._propertyIds:
self._fields.get(i).setReadThrough(readThrough)
def setWriteThrough(self, writeThrough):
# Sets the editor's read-through mode to the specified status.
if writeThrough != self._writeThrough:
self._writeThrough = writeThrough
for i in self._propertyIds:
self._fields.get(i).setWriteThrough(writeThrough)
def addItemProperty(self, idd, prop):
"""Adds a new property to form and create corresponding field.
@see: L{IItem.addItemProperty}
"""
# Checks inputs
if (idd is None) or (prop is None):
raise ValueError, 'Id and property must be non-null'
# Checks that the property id is not reserved
if self._propertyIds.contains(idd):
return False
self._propertyIds.add(idd)
self._ownProperties[idd] = prop
# Gets suitable field
field = self._fieldFactory.createField(self, idd, self)
if field is None:
return False
# Configures the field
field.setPropertyDataSource(prop)
# Register and attach the created field
self.addField(idd, field)
return True
def addField(self, propertyId, field):
"""Registers the field with the form and adds the field to the
form layout.
The property id must not be already used in the form.
This field is added to the layout using the L{attachField} method.
@param propertyId:
the Property id the the field.
@param field:
the field which should be added to the form.
"""
self.registerField(propertyId, field)
self.attachField(propertyId, field)
self.requestRepaint()
def registerField(self, propertyId, field):
"""Register the field with the form. All registered fields are
validated when the form is validated and also committed when the
form is committed.
The property id must not be already used in the form.
@param propertyId:
the Property id of the field.
@param field:
the IField that should be registered
"""
if propertyId is None or field is None:
return
self._fields[propertyId] = field
field.addListener(self._fieldValueChangeListener,
IValueChangeListener)
if propertyId not in self._propertyIds:
# adding a field directly
self._propertyIds.append(propertyId)
# Update the read and write through status and immediate to match the
# form.
# Should this also include invalidCommitted (#3993)?
field.setReadThrough(self._readThrough)
field.setWriteThrough(self._writeThrough)
if self.isImmediate() and isinstance(field, AbstractComponent):
field.setImmediate(True)
def attachField(self, propertyId, field):
"""Adds the field to the form layout.
The field is added to the form layout in the default position (the
position used by L{Layout.addComponent}. If the
underlying layout is a L{CustomLayout} the field is added to
the CustomLayout location given by the string representation of the
property id using L{CustomLayout.addComponent}.
Override this method to control how the fields are added to the
layout.
"""
if propertyId is None or field is None:
return
if isinstance(self._layout, CustomLayout):
self._layout.addComponent(field, str(propertyId))
else:
self._layout.addComponent(field)
def getItemProperty(self, idd):
"""The property identified by the property id.
The property data source of the field specified with property id is
returned. If there is a (with specified property id) having no data
source, the field is returned instead of the data source.
@see: L{IItem.getItemProperty}
"""
field = self._fields.get(idd)
if field is None:
# field does not exist or it is not (yet) created for
# this property
return self._ownProperties.get(idd)
prop = field.getPropertyDataSource()
if prop is not None:
return prop
else:
return field
def getField(self, propertyId):
"""Gets the field identified by the propertyid.
@param propertyId:
the id of the property.
"""
return self._fields.get(propertyId)
def getItemPropertyIds(self):
return list(self._propertyIds)
def removeItemProperty(self, idd):
"""Removes the property and corresponding field from the form.
@see: L{IItem.removeItemProperty}
"""
if idd in self._ownProperties:
del self._ownProperties[idd]
field = self._fields.get(idd)
if field is not None:
self._propertyIds.remove(idd)
del self._fields[idd]
self.detachField(field)
field.removeListener(self._fieldValueChangeListener,
IValueChangeListener)
return True
return False
def detachField(self, field):
"""Called when a form field is detached from a Form. Typically when
a new IItem is assigned to Form via L{setItemDataSource}.
Override this method to control how the fields are removed from the
layout.
@param field:
the field to be detached from the forms layout.
"""
p = field.getParent()
if isinstance(p, IComponentContainer):
p.removeComponent(field)
def removeAllProperties(self):
"""Removes all properties and fields from the form.
@return: the Success of the operation. Removal of all fields succeeded
if (and only if) the return value is C{True}.
"""
properties = list(self._propertyIds)
success = True
for i in range(len(properties)):
if not self.removeItemProperty(properties[i]):
success = False
return success
def getItemDataSource(self):
return self._itemDatasource
def setItemDataSource(self, newDataSource, propertyIds=None):
"""Set the item datasource for the form, but limit the form contents to
specified properties of the item.
Setting item datasource clears any fields, the form might contain and
adds the specified the properties as fields to the form, in the
specified order.
@see: L{Viewer.setItemDataSource}
"""
if propertyIds is None:
if newDataSource is not None:
self.setItemDataSource(newDataSource,
newDataSource.getItemPropertyIds())
else:
self.setItemDataSource(newDataSource, [])
else:
if isinstance(self._layout, GridLayout):
gl = self._layout
if self._gridlayoutCursorX == -1:
# first setItemDataSource, remember initial cursor
self._gridlayoutCursorX = gl.getCursorX()
self._gridlayoutCursorY = gl.getCursorY()
else:
# restore initial cursor
gl.setCursorX(self._gridlayoutCursorX)
gl.setCursorY(self._gridlayoutCursorY)
# Removes all fields first from the form
self.removeAllProperties()
# Sets the datasource
self._itemDatasource = newDataSource
# If the new datasource is null, just set null datasource
if self._itemDatasource is None:
self.requestRepaint()
return
# Adds all the properties to this form
for idd in propertyIds:
prop = self._itemDatasource.getItemProperty(idd)
if idd is not None and prop is not None:
f = self._fieldFactory.createField(self._itemDatasource,
idd, self)
if f is not None:
f.setPropertyDataSource(prop)
self.addField(idd, f)
def getLayout(self):
"""Gets the layout of the form.
By default form uses C{OrderedLayout} with C{form}-style.
@return: the Layout of the form.
"""
return self._layout
def setLayout(self, newLayout):
"""Sets the layout of the form.
By default form uses C{OrderedLayout} with C{form}-style.
@param newLayout:
the Layout of the form.
"""
# Use orderedlayout by default
if newLayout is None:
newLayout = FormLayout()
# reset cursor memory
self._gridlayoutCursorX = -1
self._gridlayoutCursorY = -1
# Move fields from previous layout
if self._layout is not None:
properties = list(self._propertyIds)
for i in range(len(properties)):
f = self.getField(properties[i])
self.detachField(f)
if isinstance(newLayout, CustomLayout):
newLayout.addComponent(f, str(properties[i]))
else:
newLayout.addComponent(f)
self._layout.setParent(None)
# Replace the previous layout
newLayout.setParent(self)
self._layout = newLayout
def replaceWithSelect(self, propertyId, values, descriptions):
"""Sets the form field to be selectable from static list of changes.
The list values and descriptions are given as array. The value-array
must contain the current value of the field and the lengths of the
arrays must match. Null values are not supported.
@param propertyId:
the id of the property.
@param values:
@param descriptions:
@return: the select property generated
"""
# Checks the parameters
if propertyId is None or values is None or descriptions is None:
raise ValueError, 'All parameters must be non-null'
if len(values) != len(descriptions):
raise ValueError, \
'Value and description list are of different size'
# Gets the old field
oldField = self._fields.get(propertyId)
if oldField is None:
raise ValueError, ('IField with given propertyid \''
+ str(propertyId) + '\' can not be found.')
if oldField.getPropertyDataSource() is None:
value = oldField.getValue()
else:
value = oldField.getPropertyDataSource().getValue()
# Checks that the value exists and check if the select should
# be forced in multiselect mode
found = False
isMultiselect = False
i = 0
while i < len(values) and not found:
if (values[i] == value
or (value is not None and value == values[i])):
found = True
i += 1
if value is not None and not found:
if isinstance(value, (list, set)):
for val in value:
found = False
i = 0
while i < len(values) and not found:
if (values[i] == val or
(val is not None and val == values[i])):
found = True
i += 1
if not found:
raise ValueError, ('Currently selected value \''
+ val + '\' of property \''
+ str(propertyId) + '\' was not found')
isMultiselect = True
else:
raise ValueError, ('Current value \''
+ value + '\' of property \''
+ str(propertyId) + '\' was not found')
# Creates the new field matching to old field parameters
newField = Select()
if isMultiselect:
newField.setMultiSelect(True)
newField.setCaption(oldField.getCaption())
newField.setReadOnly(oldField.isReadOnly())
newField.setReadThrough(oldField.isReadThrough())
newField.setWriteThrough(oldField.isWriteThrough())
# Creates the options list
newField.addContainerProperty('desc', str, '')
newField.setItemCaptionPropertyId('desc')
for idd in values:
if idd is None:
idd = newField.addItem()
item = newField.getItem(idd)
newField.setNullSelectionItemId(idd)
else:
item = newField.addItem(idd)
if item is not None:
item.getItemProperty('desc').setValue( str(descriptions[i]) )
# Sets the property data source
prop = oldField.getPropertyDataSource()
oldField.setPropertyDataSource(None)
newField.setPropertyDataSource(prop)
# Replaces the old field with new one
self._layout.replaceComponent(oldField, newField)
self._fields[propertyId] = newField
newField.addListener(self._fieldValueChangeListener,
prop.IValueChangeListener)
oldField.removeListener(self._fieldValueChangeListener,
prop.IValueChangeListener)
return newField
def attach(self):
"""Notifies the component that it is connected to an application
@see: L{IComponent.attach}
"""
super(Form, self).attach()
self._layout.attach()
if self._formFooter is not None:
self._formFooter.attach()
def detach(self):
"""Notifies the component that it is detached from the application.
@see: L{IComponent.detach}
"""
super(Form, self).detach()
self._layout.detach()
if self._formFooter is not None:
self._formFooter.detach()
def isValid(self):
"""Tests the current value of the object against all registered
validators
@see: L{IValidatable.isValid}
"""
valid = True
for i in self._propertyIds:
valid &= self._fields[i].isValid()
return valid and super(Form, self).isValid()
def validate(self):
"""Checks the validity of the validatable.
@see: L{IValidatable.validate}
"""
super(Form, self).validate()
for i in self._propertyIds:
self._fields[i].validate()
def isInvalidAllowed(self):
"""Checks the validabtable object accept invalid values.
@see: L{IValidatable.isInvalidAllowed}
"""
return True
def setInvalidAllowed(self, invalidValueAllowed):
"""Should the validabtable object accept invalid values.
@see: L{IValidatable.setInvalidAllowed}
"""
raise NotImplementedError
def setReadOnly(self, readOnly):
"""Sets the component's to read-only mode to the specified state.
@see: L{IComponent.setReadOnly}
"""
super(Form, self).setReadOnly(readOnly)
for i in self._propertyIds:
self._fields[i].setReadOnly(readOnly)
def setFieldFactory(self, fieldFactory):
"""Sets the field factory of Form.
C{IFieldFactory} is used to create fields for form properties. By
default the form uses BaseFieldFactory to create IField instances.
@param fieldFactory:
the New factory used to create the fields.
@see: L{IField}
@see: L{FormFieldFactory}
@deprecated: use L{setFormFieldFactory} instead
"""
warn('use setFormFieldFactory() instead', DeprecationWarning)
self._fieldFactory = fieldFactory
def setFormFieldFactory(self, fieldFactory):
"""Sets the field factory used by this Form to genarate Fields for
properties.
L{FormFieldFactory} is used to create fields for form properties.
L{DefaultFieldFactory} is used by default.
@param fieldFactory:
the new factory used to create the fields.
@see: L{IField}
@see: L{FormFieldFactory}
"""
self._fieldFactory = fieldFactory
def getFormFieldFactory(self):
"""Get the field factory of the form.
@return: the FormFieldFactory Factory used to create the fields.
"""
return self._fieldFactory
def getFieldFactory(self):
"""Get the field factory of the form.
@return: the IFieldFactory Factory used to create the fields.
@deprecated: Use L{getFormFieldFactory} instead. Set the
FormFieldFactory using L{setFormFieldFactory}.
"""
warn('Use getFormFieldFactory() instead', DeprecationWarning)
if isinstance(self._fieldFactory, IFieldFactory):
return self._fieldFactory
return None
def getType(self):
"""Gets the field type.
@see: L{AbstractField.getType}
"""
if self.getPropertyDataSource() is not None:
return self.getPropertyDataSource().getType()
return object
def setInternalValue(self, newValue):
"""Sets the internal value.
This is relevant when the Form is used as IField.
@see: L{AbstractField.setInternalValue}
"""
# Stores the old value
oldValue = self._propertyValue
# Sets the current Value
super(Form, self).setInternalValue(newValue)
self._propertyValue = newValue
# Ignores form updating if data object has not changed.
if oldValue != newValue:
self.setFormDataSource(newValue, self.getVisibleItemProperties())
def getFirstFocusableField(self):
"""Gets the first focusable field in form. If there are enabled,
non-read-only fields, the first one of them is returned. Otherwise,
the field for the first property (or null if none) is returned.
@return: the IField.
"""
if self.getItemPropertyIds() is not None:
for idd in self.getItemPropertyIds():
if idd is not None:
field = self.getField(idd)
if field.isEnabled() and not field.isReadOnly():
return field
# fallback: first field if none of the fields is enabled
# and writable
idd = iter( self.getItemPropertyIds() ).next()
if idd is not None:
return self.getField(idd)
return None
def setFormDataSource(self, data, properties):
"""Updates the internal form datasource.
Method setFormDataSource.
"""
# If data is an item use it.
item = None
if isinstance(data, IItem):
item = data
elif data is not None:
raise NotImplementedError
#item = BeanItem(data)
# Sets the datasource to form
if item is not None and properties is not None:
# Shows only given properties
self.setItemDataSource(item, properties)
else:
# Shows all properties
self.setItemDataSource(item)
def getVisibleItemProperties(self):
"""Returns the visibleProperties.
@return: the collection of visible IItem properites.
"""
return self._visibleItemProperties
def setVisibleItemProperties(self, visibleProperties):
"""Sets the visibleProperties.
@param visibleProperties:
the visibleProperties to set.
"""
self._visibleItemProperties = visibleProperties
value = self.getValue()
if value is None:
value = self._itemDatasource
self.setFormDataSource(value, self.getVisibleItemProperties())
def focus(self):
"""Focuses the first field in the form.
@see: L{IFocusable.focus}
"""
f = self.getFirstFocusableField()
if f is not None:
f.focus()
def setTabIndex(self, tabIndex):
"""Sets the Tabulator index of this Focusable component.
@see: L{IFocusable.setTabIndex}
"""
super(Form, self).setTabIndex(tabIndex)
for i in self.getItemPropertyIds():
i.setTabIndex(tabIndex)
def setImmediate(self, immediate):
"""Setting the form to be immediate also sets all the fields
of the form to the same state.
"""
super(Form, self).setImmediate(immediate)
for f in self._fields.values():
if isinstance(f, AbstractComponent):
f.setImmediate(immediate)
def isEmpty(self):
"""Form is empty if all of its fields are empty."""
for f in self._fields.values():
if isinstance(f, AbstractField):
if not f.isEmpty():
return False
return True
def addValidator(self, validator):
"""Adding validators directly to form is not supported.
Add the validators to form fields instead.
"""
raise NotImplementedError
def getFooter(self):
"""Returns a layout that is rendered below normal form contents.
This area can be used for example to include buttons related to
form contents.
@return: layout rendered below normal form contents.
"""
if self._formFooter is None:
self._formFooter = HorizontalLayout()
self._formFooter.setParent(self)
return self._formFooter
def setFooter(self, newFormFooter):
"""Sets the layout that is rendered below normal form contents.
@param newFormFooter: the new Layout
"""
if self._formFooter is not None:
self._formFooter.setParent(None)
self._formFooter = newFormFooter
self._formFooter.setParent(self)
def setEnabled(self, enabled):
super(Form, self).setEnabled(enabled)
if self.getParent() is not None and not self.getParent().isEnabled():
# some ancestor still disabled, don't update children
return
else:
self.getLayout().requestRepaintAll()
def getOwnActionManager(self):
"""Gets the L{ActionManager} responsible for handling
L{Action}s added to this Form.
Note that Form has another ActionManager inherited from
L{AbstractField}. The ownActionManager handles Actions
attached to this Form specifically, while the ActionManager
in AbstractField delegates to the containing Window (i.e global
Actions).
"""
if self._ownActionManager is None:
self._ownActionManager = ActionManager(self)
return self._ownActionManager
def addActionHandler(self, actionHandler):
self.getOwnActionManager().addActionHandler(actionHandler)
def removeActionHandler(self, actionHandler):
if self._ownActionManager is not None:
self._ownActionManager.removeActionHandler(actionHandler)
def removeAllActionHandlers(self):
"""Removes all action handlers"""
if self._ownActionManager is not None:
self._ownActionManager.removeAllActionHandlers()
def addAction(self, action):
self.getOwnActionManager().addAction(action)
def removeAction(self, action):
if self._ownActionManager is not None:
self._ownActionManager.removeAction(action)
class FieldValueChangeListener(IValueChangeListener):
def __init__(self, form):
self._form = form
def valueChange(self, event):
self._form.requestRepaint()
|
PypiClean
|
/django_webpack_custom_loader-0.7.0-py3-none-any.whl/webpack_loader/loader.py
|
import json
import time
from io import open
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from .exceptions import (
WebpackError,
WebpackLoaderBadStatsError,
WebpackLoaderTimeoutError,
WebpackBundleLookupError
)
class WebpackLoader(object):
_assets = {}
def __init__(self, name, config):
self.name = name
self.config = config
def load_assets(self):
try:
with open(self.config['STATS_FILE'], encoding="utf-8") as f:
return json.load(f)
except IOError:
raise IOError(
'Error reading {0}. Are you sure webpack has generated '
'the file and the path is correct?'.format(
self.config['STATS_FILE']))
def get_assets(self):
if self.config['CACHE']:
if self.name not in self._assets:
self._assets[self.name] = self.load_assets()
return self._assets[self.name]
return self.load_assets()
def filter_chunks(self, chunks):
for chunk in chunks:
ignore = any(regex.match(chunk['name'])
for regex in self.config['ignores'])
if not ignore:
chunk['url'] = self.get_chunk_url(chunk)
yield chunk
def get_chunk_url(self, chunk):
public_path = chunk.get('publicPath')
if public_path:
return public_path
relpath = '{0}{1}'.format(
self.config['BUNDLE_DIR_NAME'], chunk['name']
)
return staticfiles_storage.url(relpath)
def get_bundle(self, bundle_name):
assets = self.get_assets()
# poll when debugging and block request until bundle is compiled
# or the build times out
if settings.DEBUG:
timeout = self.config['TIMEOUT'] or 0
timed_out = False
start = time.time()
while assets['status'] == 'compiling' and not timed_out:
time.sleep(self.config['POLL_INTERVAL'])
if timeout and (time.time() - timeout > start):
timed_out = True
assets = self.get_assets()
if timed_out:
raise WebpackLoaderTimeoutError(
"Timed Out. Bundle `{0}` took more than {1} seconds "
"to compile.".format(bundle_name, timeout)
)
if assets.get('status') == 'done':
chunks = assets['chunks'].get(bundle_name, None)
if chunks is None:
raise WebpackBundleLookupError('Cannot resolve bundle {0}.'.format(bundle_name))
return self.filter_chunks(chunks)
elif assets.get('status') == 'error':
if 'file' not in assets:
assets['file'] = ''
if 'error' not in assets:
assets['error'] = 'Unknown Error'
if 'message' not in assets:
assets['message'] = ''
error = u"""
{error} in {file}
{message}
""".format(**assets)
raise WebpackError(error)
raise WebpackLoaderBadStatsError(
"The stats file does not contain valid data. Make sure "
"webpack-bundle-tracker plugin is enabled and try to run "
"webpack again.")
|
PypiClean
|
/bcdup-2.3.2.tar.gz/bcdup-2.3.2/src/bc_acc_dup/feature_handler.py
|
from typing import Any, Union
from . import (
helper,
user_input_handler,
config_manager,
)
from .edits import basic, cats, gamototo, levels, other, save_management
def fix_elsewhere_old(save_stats: dict[str, Any]) -> dict[str, Any]:
"""Fix the elsewhere error using 2 save files"""
main_token = save_stats["token"]
main_iq = save_stats["inquiry_code"]
input(
"Select a save file that is currently loaded in-game that doesn't have the elsehere error and is not banned\nPress enter to continue:"
)
new_path = helper.select_file(
"Select a clean save file",
helper.get_save_file_filetype(),
helper.get_save_path(),
)
if not new_path:
print("Please select a save file")
return save_stats
data = helper.load_save_file(new_path)
new_stats = data["save_stats"]
new_token = new_stats["token"]
new_iq = new_stats["inquiry_code"]
save_stats["token"] = new_token
save_stats["inquiry_code"] = new_iq
helper.colored_text(f"Replaced inquiry code: &{main_iq}& with &{new_iq}&")
helper.colored_text(f"Replaced token: &{main_token}& with &{new_token}&")
return save_stats
FEATURES: dict[str, Any] = {
"Save Management": {
"Save Save": save_management.save.save_save,
"Save changes and upload to game servers (get transfer and confirmation codes)": save_management.server_upload.save_and_upload,
"Save changes to file": save_management.save.save,
"Save changes and push save data to the game with adb (don't re-open game)": save_management.save.save_and_push,
"Save changes and push save data to the game with adb (re-open game)": save_management.save.save_and_push_rerun,
"Export save data as json": save_management.other.export,
"Clear save data with adb (used to generate a new account without re-installing the game)": save_management.other.clear_data,
"Upload tracked bannable items (This is done automatically when saving or exiting)": save_management.server_upload.upload_metadata,
"Load save data": save_management.load.select,
"Convert save data to to a different version": save_management.convert.convert_save,
# "Manage Presets": preset_handler.preset_manager,
},
"Items": {
"Cat Food": basic.basic_items.edit_cat_food,
"XP": basic.basic_items.edit_xp,
"Tickets": {
"Normal Tickets": basic.basic_items.edit_normal_tickets,
"Rare Tickets": basic.basic_items.edit_rare_tickets,
"Platinum Tickets": basic.basic_items.edit_platinum_tickets,
"Platinum Shards": basic.basic_items.edit_platinum_shards,
"Legend Tickets": basic.basic_items.edit_legend_tickets,
},
"NP": basic.basic_items.edit_np,
"Leadership": basic.basic_items.edit_leadership,
"Battle Items": basic.basic_items.edit_battle_items,
"Catseyes": basic.catseyes.edit_catseyes,
"Cat Fruit / Behemoth Stones": basic.catfruit.edit_catfruit,
"Talent Orbs": basic.talent_orbs_new.edit_talent_orbs,
"Catamins": basic.basic_items.edit_catamins,
"Item Schemes (Allows you to get unbannable items)": other.scheme_item.edit_scheme_data,
},
"Gamatoto / Ototo": {
"Ototo Engineers": basic.basic_items.edit_engineers,
"Base materials": basic.ototo_base_mats.edit_base_mats,
"Catamins": basic.basic_items.edit_catamins,
"Gamatoto XP / Level": gamototo.gamatoto_xp.edit_gamatoto_xp,
"Ototo Cat Cannon": gamototo.ototo_cat_cannon.edit_cat_cannon,
"Gamatoto Helpers": gamototo.helpers.edit_helpers,
"Fix gamatoto from crashing the game": gamototo.fix_gamatoto.fix_gamatoto,
},
"Cats / Special Skills": {
"Get / Remove Cats": {
"Get Cats": cats.get_remove_cats.get_cat,
"Remove Cats": cats.get_remove_cats.remove_cats,
},
"Upgrade Cats": cats.upgrade_cats.upgrade_cats,
"True Form Cats": {
"Get Cat True Forms": cats.evolve_cats.get_evolve,
"Remove Cat True Forms": cats.evolve_cats.remove_evolve,
"Force True Form Cats (will lead to blank cats for cats without a true form)": cats.evolve_cats.get_evolve_forced,
},
"Talents": {
"Set talents for each selected cat individually": cats.talents.edit_talents_individual,
"Max / Remove all selected cat talents": cats.talents.max_all_talents,
},
"Collect / Remove Cat Guide": {
"Set Cat Guide Entries (does not give cf)": cats.clear_cat_guide.collect_cat_guide,
"Unclaim Cat Guide Entries": cats.clear_cat_guide.remove_cat_guide,
},
'Get stage unit drops - removes the "Clear this stage to get special cat" dialog': cats.chara_drop.get_character_drops,
"Upgrade special skills / abilities": cats.upgrade_blue.upgrade_blue,
},
"Levels / Treasures": {
"Main Story Chapters Clear / Unclear": {
"Clear each stage in every chapter for all selected chapters": levels.main_story.clear_all,
"Clear each stage in every chapter for each selected chapter": levels.main_story.clear_each,
},
"Treasures": {
"Treasure Groups (e.g energy drink, aqua crystal, etc)": levels.treasures.treasure_groups,
"Specific stages and specific chapters individually": levels.treasures.specific_stages,
"Specific stages and chapters all at once": levels.treasures.specific_stages_all_chapters,
},
"Zombie Stages / Outbreaks": levels.outbreaks.edit_outbreaks,
"Event Stages": levels.event_stages.event_stages,
"Stories of Legend": levels.event_stages.stories_of_legend,
"Uncanny Legends": levels.uncanny.edit_uncanny,
"Aku Realm/Gates Clearing": levels.aku.edit_aku,
"Unlock the Aku Realm/Gates": levels.unlock_aku_realm.unlock_aku_realm,
"Gauntlets": levels.gauntlet.edit_gauntlet,
"Collab Gauntlets": levels.gauntlet.edit_collab_gauntlet,
"Towers": levels.towers.edit_tower,
"Behemoth Culling": levels.behemoth_culling.edit_behemoth_culling,
"Into the Future Timed Scores": levels.itf_timed_scores.timed_scores,
"Challenge Battle Score": basic.basic_items.edit_challenge_battle,
"Clear Tutorial": levels.clear_tutorial.clear_tutorial,
"Catclaw Dojo Score (Hall of Initiates)": basic.basic_items.edit_dojo_score,
"Add Enigma Stages": levels.enigma_stages.edit_enigma_stages,
"Allow the filibuster stage to be recleared": levels.allow_filibuster_clearing.allow_filibuster_clearing,
"Legend Quest": levels.legend_quest.edit_legend_quest,
},
"Inquiry Code / Token / Account": {
"Inquiry Code": basic.basic_items.edit_inquiry_code,
"Token": basic.basic_items.edit_token,
"Fix elsewhere error / Unban account": other.fix_elsewhere.fix_elsewhere,
"Old Fix elsewhere error / Unban account (needs 2 save files)": fix_elsewhere_old,
"Generate a new inquiry code and token": other.create_new_account.create_new_account,
},
"Other": {
"Rare Gacha Seed": basic.basic_items.edit_rare_gacha_seed,
"Unlocked Equip Slots": basic.basic_items.edit_unlocked_slots,
"Get Restart Pack / Returner Mode": basic.basic_items.edit_restart_pack,
"Meow Medals": other.meow_medals.medals,
"Play Time": other.play_time.edit_play_time,
"Unlock / Remove Enemy Guide Entries": other.unlock_enemy_guide.enemy_guide,
"Catnip Challenges / Missions": other.missions.edit_missions,
"Normal Ticket Max Trade Progress (allows for unbannable rare tickets)": other.trade_progress.set_trade_progress,
"Get / Remove Gold Pass": other.get_gold_pass.get_gold_pass,
"Claim / Remove all user rank rewards (does not give any items)": other.claim_user_rank_rewards.edit_rewards,
"Cat Shrine Level / XP": other.cat_shrine.edit_shrine_xp,
},
"Fixes": {
"Fix time errors": other.fix_time_issues.fix_time_issues,
"Unlock the Equip Menu": other.unlock_equip_menu.unlock_equip,
"Clear Tutorial": levels.clear_tutorial.clear_tutorial,
"Fix elsewhere error / Unban account": other.fix_elsewhere.fix_elsewhere,
"Old Fix elsewhere error / Unban account (needs 2 save files)": fix_elsewhere_old,
"Fix gamatoto from crashing the game": gamototo.fix_gamatoto.fix_gamatoto,
},
"Edit Config": {
"Edit LOCALIZATION": config_manager.edit_locale,
"Edit DEFAULT_COUNTRY_CODE": config_manager.edit_default_gv,
"Edit DEFAULT_SAVE_PATH": config_manager.edit_default_save_file_path,
"Edit FIXED_SAVE_PATH": config_manager.edit_fixed_save_path,
"Edit EDITOR settings": config_manager.edit_editor_settings,
"Edit START_UP settings": config_manager.edit_start_up_settings,
"Edit SAVE_CHANGES settings": config_manager.edit_save_changes_settings,
"Edit SERVER settings": config_manager.edit_server_settings,
"Edit config path": config_manager.edit_config_path,
},
"Exit": helper.exit_check_changes,
}
def get_feature(
selected_features: Any, search_string: str, results: dict[str, Any]
) -> dict[str, Any]:
"""Search for a feature if the feature name contains the search string"""
for feature in selected_features:
feature_data = selected_features[feature]
if isinstance(feature_data, dict):
feature_data = get_feature(feature_data, search_string, results)
if search_string.lower().replace(" ", "") in feature.lower().replace(" ", ""):
results[feature] = selected_features[feature]
return results
def show_options(
save_stats: dict[str, Any], features_to_use: dict[str, Any]
) -> dict[str, Any]:
"""Allow the user to either enter a feature number or a feature name, and get the features that match"""
if (
not config_manager.get_config_value_category("EDITOR", "SHOW_CATEGORIES")
and FEATURES == features_to_use
):
user_input = ""
else:
prompt = (
"What do you want to edit (some options contain other features within them)"
)
if config_manager.get_config_value_category(
"EDITOR", "SHOW_FEATURE_SELECT_EXPLANATION"
):
prompt += "\nYou can enter a number to run a feature or a word to search for that feature (e.g entering catfood will run the Cat Food feature, and entering tickets will show you all the features that edit tickets)\nYou can press enter to see a list of all of the features"
user_input = user_input_handler.colored_input(f"{prompt}:\n")
user_int = helper.check_int(user_input)
results = []
if user_int is None:
results = get_feature(features_to_use, user_input, {})
else:
if user_int < 1 or user_int > len(features_to_use) + 1:
helper.colored_text("Value out of range", helper.RED)
return show_options(save_stats, features_to_use)
if FEATURES != features_to_use:
if user_int - 2 < 0:
return menu(save_stats)
results = features_to_use[list(features_to_use)[user_int - 2]]
else:
results = features_to_use[list(features_to_use)[user_int - 1]]
if not isinstance(results, dict):
save_stats_return = results(save_stats)
if save_stats_return is None:
return save_stats
return save_stats_return
if len(results) == 0:
helper.colored_text("No feature found with that name.", helper.RED)
return menu(save_stats)
if len(results) == 1 and isinstance(list(results.values())[0], dict):
results = results[list(results)[0]]
if len(results) == 1:
save_stats_return = results[list(results)[0]](save_stats)
if save_stats_return is None:
return save_stats
return save_stats_return
helper.colored_list(["Go Back"] + list(results))
return show_options(save_stats, results)
def menu(
save_stats: dict[str, Any], path_save: Union[str, None] = None
) -> dict[str, Any]:
"""Show the menu and allow the user to select a feature to edit"""
if path_save:
helper.set_save_path(path_save)
if config_manager.get_config_value_category("EDITOR", "SHOW_CATEGORIES"):
helper.colored_list(list(FEATURES))
save_stats = show_options(save_stats, FEATURES)
return save_stats
|
PypiClean
|
/volatility261-2.6.2.tar.gz/volatility261-2.6.2/volatility/plugins/overlays/windows/win7_sp1_x64_632B36E0_vtypes.py
|
ntkrnlmp_types = {
'_FILE_BASIC_INFORMATION' : [ 0x28, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x20, ['unsigned long']],
} ],
'_SECURITY_SUBJECT_CONTEXT' : [ 0x20, {
'ClientToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'PrimaryToken' : [ 0x10, ['pointer64', ['void']]],
'ProcessAuditId' : [ 0x18, ['pointer64', ['void']]],
} ],
'_KBUGCHECK_ACTIVE_STATE' : [ 0x4, {
'BugCheckState' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'RecursionCount' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'BugCheckOwner' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'Value' : [ 0x0, ['long']],
} ],
'_PF_KERNEL_GLOBALS' : [ 0x60, {
'AccessBufferAgeThreshold' : [ 0x0, ['unsigned long long']],
'AccessBufferRef' : [ 0x8, ['_EX_RUNDOWN_REF']],
'AccessBufferExistsEvent' : [ 0x10, ['_KEVENT']],
'AccessBufferMax' : [ 0x28, ['unsigned long']],
'AccessBufferList' : [ 0x40, ['_SLIST_HEADER']],
'StreamSequenceNumber' : [ 0x50, ['long']],
'Flags' : [ 0x54, ['unsigned long']],
'ScenarioPrefetchCount' : [ 0x58, ['long']],
} ],
'_ARBITER_QUERY_ARBITRATE_PARAMETERS' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_ARBITER_BOOT_ALLOCATION_PARAMETERS' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_EXCEPTION_REGISTRATION_RECORD' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Handler' : [ 0x8, ['pointer64', ['void']]],
} ],
'_POP_SYSTEM_IDLE' : [ 0x38, {
'AverageIdleness' : [ 0x0, ['long']],
'LowestIdleness' : [ 0x4, ['long']],
'Time' : [ 0x8, ['unsigned long']],
'Timeout' : [ 0xc, ['unsigned long']],
'LastUserInput' : [ 0x10, ['unsigned long']],
'Action' : [ 0x14, ['POWER_ACTION_POLICY']],
'MinState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SystemRequired' : [ 0x24, ['unsigned char']],
'IdleWorker' : [ 0x25, ['unsigned char']],
'Sampling' : [ 0x26, ['unsigned char']],
'LastTick' : [ 0x28, ['unsigned long long']],
'LastSystemRequiredTime' : [ 0x30, ['unsigned long']],
} ],
'_VF_TARGET_ALL_SHARED_EXPORT_THUNKS' : [ 0x18, {
'SharedExportThunks' : [ 0x0, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'PoolSharedExportThunks' : [ 0x8, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'OrderDependentSharedExportThunks' : [ 0x10, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
} ],
'_ETW_REF_CLOCK' : [ 0x10, {
'StartTime' : [ 0x0, ['_LARGE_INTEGER']],
'StartPerfClock' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'_OB_DUPLICATE_OBJECT_STATE' : [ 0x28, {
'SourceProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'SourceHandle' : [ 0x8, ['pointer64', ['void']]],
'Object' : [ 0x10, ['pointer64', ['void']]],
'TargetAccess' : [ 0x18, ['unsigned long']],
'ObjectInfo' : [ 0x1c, ['_HANDLE_TABLE_ENTRY_INFO']],
'HandleAttributes' : [ 0x20, ['unsigned long']],
} ],
'_MMPTE_SUBSECTION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'SubsectionAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_POWER_STATE' : [ 0x4, {
'SystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_EFI_FIRMWARE_INFORMATION' : [ 0x18, {
'FirmwareVersion' : [ 0x0, ['unsigned long']],
'VirtualEfiRuntimeServices' : [ 0x8, ['pointer64', ['_VIRTUAL_EFI_RUNTIME_SERVICES']]],
'SetVirtualAddressMapStatus' : [ 0x10, ['long']],
'MissedMappingsCount' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_202c' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_202e' : [ 0x10, {
'Level' : [ 0x0, ['unsigned short']],
'Group' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_2030' : [ 0x10, {
'Group' : [ 0x0, ['unsigned short']],
'MessageCount' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_2032' : [ 0x10, {
'Raw' : [ 0x0, ['__unnamed_2030']],
'Translated' : [ 0x0, ['__unnamed_202e']],
} ],
'__unnamed_2034' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'Port' : [ 0x4, ['unsigned long']],
'Reserved1' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2036' : [ 0xc, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2038' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_203a' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length40' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_203c' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length48' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_203e' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length64' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2040' : [ 0x10, {
'Generic' : [ 0x0, ['__unnamed_202c']],
'Port' : [ 0x0, ['__unnamed_202c']],
'Interrupt' : [ 0x0, ['__unnamed_202e']],
'MessageInterrupt' : [ 0x0, ['__unnamed_2032']],
'Memory' : [ 0x0, ['__unnamed_202c']],
'Dma' : [ 0x0, ['__unnamed_2034']],
'DevicePrivate' : [ 0x0, ['__unnamed_1eff']],
'BusNumber' : [ 0x0, ['__unnamed_2036']],
'DeviceSpecificData' : [ 0x0, ['__unnamed_2038']],
'Memory40' : [ 0x0, ['__unnamed_203a']],
'Memory48' : [ 0x0, ['__unnamed_203c']],
'Memory64' : [ 0x0, ['__unnamed_203e']],
} ],
'_CM_PARTIAL_RESOURCE_DESCRIPTOR' : [ 0x14, {
'Type' : [ 0x0, ['unsigned char']],
'ShareDisposition' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_2040']],
} ],
'__unnamed_2045' : [ 0x4, {
'PhysicalAddress' : [ 0x0, ['unsigned long']],
'VirtualSize' : [ 0x0, ['unsigned long']],
} ],
'_IMAGE_SECTION_HEADER' : [ 0x28, {
'Name' : [ 0x0, ['array', 8, ['unsigned char']]],
'Misc' : [ 0x8, ['__unnamed_2045']],
'VirtualAddress' : [ 0xc, ['unsigned long']],
'SizeOfRawData' : [ 0x10, ['unsigned long']],
'PointerToRawData' : [ 0x14, ['unsigned long']],
'PointerToRelocations' : [ 0x18, ['unsigned long']],
'PointerToLinenumbers' : [ 0x1c, ['unsigned long']],
'NumberOfRelocations' : [ 0x20, ['unsigned short']],
'NumberOfLinenumbers' : [ 0x22, ['unsigned short']],
'Characteristics' : [ 0x24, ['unsigned long']],
} ],
'_ARBITER_ADD_RESERVED_PARAMETERS' : [ 0x8, {
'ReserveDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_204f' : [ 0x50, {
'CellData' : [ 0x0, ['_CELL_DATA']],
'List' : [ 0x0, ['array', 1, ['unsigned long long']]],
} ],
'_CM_CACHED_VALUE_INDEX' : [ 0x58, {
'CellIndex' : [ 0x0, ['unsigned long']],
'Data' : [ 0x8, ['__unnamed_204f']],
} ],
'_CONFIGURATION_COMPONENT_DATA' : [ 0x48, {
'Parent' : [ 0x0, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'Child' : [ 0x8, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'Sibling' : [ 0x10, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'ComponentEntry' : [ 0x18, ['_CONFIGURATION_COMPONENT']],
'ConfigurationData' : [ 0x40, ['pointer64', ['void']]],
} ],
'_DBGKD_QUERY_SPECIAL_CALLS' : [ 0x4, {
'NumberOfSpecialCalls' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_2059' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMSUBSECTION_NODE']]],
} ],
'_MMSUBSECTION_NODE' : [ 0x28, {
'u' : [ 0x0, ['__unnamed_1fb7']],
'StartingSector' : [ 0x4, ['unsigned long']],
'NumberOfFullSectors' : [ 0x8, ['unsigned long']],
'u1' : [ 0x10, ['__unnamed_2059']],
'LeftChild' : [ 0x18, ['pointer64', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x20, ['pointer64', ['_MMSUBSECTION_NODE']]],
} ],
'_VF_AVL_TREE_NODE' : [ 0x10, {
'p' : [ 0x0, ['pointer64', ['void']]],
'RangeSize' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_2061' : [ 0x8, {
'IdleTime' : [ 0x0, ['unsigned long']],
'NonIdleTime' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_2063' : [ 0x8, {
'Disk' : [ 0x0, ['__unnamed_2061']],
} ],
'_DEVICE_OBJECT_POWER_EXTENSION' : [ 0x58, {
'IdleCount' : [ 0x0, ['unsigned long']],
'BusyCount' : [ 0x4, ['unsigned long']],
'BusyReference' : [ 0x8, ['unsigned long']],
'TotalBusyCount' : [ 0xc, ['unsigned long']],
'ConservationIdleTime' : [ 0x10, ['unsigned long']],
'PerformanceIdleTime' : [ 0x14, ['unsigned long']],
'DeviceObject' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'IdleList' : [ 0x20, ['_LIST_ENTRY']],
'IdleType' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceIdleNormal', 1: 'DeviceIdleDisk'})]],
'IdleState' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'CurrentState' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'Volume' : [ 0x40, ['_LIST_ENTRY']],
'Specific' : [ 0x50, ['__unnamed_2063']],
} ],
'_ARBITER_RETEST_ALLOCATION_PARAMETERS' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS' : [ 0x1, {
'FRUId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FRUText' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'AsUCHAR' : [ 0x0, ['unsigned char']],
} ],
'_FS_FILTER_CALLBACKS' : [ 0x68, {
'SizeOfFsFilterCallbacks' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'PreAcquireForSectionSynchronization' : [ 0x8, ['pointer64', ['void']]],
'PostAcquireForSectionSynchronization' : [ 0x10, ['pointer64', ['void']]],
'PreReleaseForSectionSynchronization' : [ 0x18, ['pointer64', ['void']]],
'PostReleaseForSectionSynchronization' : [ 0x20, ['pointer64', ['void']]],
'PreAcquireForCcFlush' : [ 0x28, ['pointer64', ['void']]],
'PostAcquireForCcFlush' : [ 0x30, ['pointer64', ['void']]],
'PreReleaseForCcFlush' : [ 0x38, ['pointer64', ['void']]],
'PostReleaseForCcFlush' : [ 0x40, ['pointer64', ['void']]],
'PreAcquireForModifiedPageWriter' : [ 0x48, ['pointer64', ['void']]],
'PostAcquireForModifiedPageWriter' : [ 0x50, ['pointer64', ['void']]],
'PreReleaseForModifiedPageWriter' : [ 0x58, ['pointer64', ['void']]],
'PostReleaseForModifiedPageWriter' : [ 0x60, ['pointer64', ['void']]],
} ],
'_KENLISTMENT' : [ 0x1e0, {
'cookie' : [ 0x0, ['unsigned long']],
'NamespaceLink' : [ 0x8, ['_KTMOBJECT_NAMESPACE_LINK']],
'EnlistmentId' : [ 0x30, ['_GUID']],
'Mutex' : [ 0x40, ['_KMUTANT']],
'NextSameTx' : [ 0x78, ['_LIST_ENTRY']],
'NextSameRm' : [ 0x88, ['_LIST_ENTRY']],
'ResourceManager' : [ 0x98, ['pointer64', ['_KRESOURCEMANAGER']]],
'Transaction' : [ 0xa0, ['pointer64', ['_KTRANSACTION']]],
'State' : [ 0xa8, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
'Flags' : [ 0xac, ['unsigned long']],
'NotificationMask' : [ 0xb0, ['unsigned long']],
'Key' : [ 0xb8, ['pointer64', ['void']]],
'KeyRefCount' : [ 0xc0, ['unsigned long']],
'RecoveryInformation' : [ 0xc8, ['pointer64', ['void']]],
'RecoveryInformationLength' : [ 0xd0, ['unsigned long']],
'DynamicNameInformation' : [ 0xd8, ['pointer64', ['void']]],
'DynamicNameInformationLength' : [ 0xe0, ['unsigned long']],
'FinalNotification' : [ 0xe8, ['pointer64', ['_KTMNOTIFICATION_PACKET']]],
'SupSubEnlistment' : [ 0xf0, ['pointer64', ['_KENLISTMENT']]],
'SupSubEnlHandle' : [ 0xf8, ['pointer64', ['void']]],
'SubordinateTxHandle' : [ 0x100, ['pointer64', ['void']]],
'CrmEnlistmentEnId' : [ 0x108, ['_GUID']],
'CrmEnlistmentTmId' : [ 0x118, ['_GUID']],
'CrmEnlistmentRmId' : [ 0x128, ['_GUID']],
'NextHistory' : [ 0x138, ['unsigned long']],
'History' : [ 0x13c, ['array', 20, ['_KENLISTMENT_HISTORY']]],
} ],
'_ARBITER_INTERFACE' : [ 0x30, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ArbiterHandler' : [ 0x20, ['pointer64', ['void']]],
'Flags' : [ 0x28, ['unsigned long']],
} ],
'_KAPC_STATE' : [ 0x30, {
'ApcListHead' : [ 0x0, ['array', 2, ['_LIST_ENTRY']]],
'Process' : [ 0x20, ['pointer64', ['_KPROCESS']]],
'KernelApcInProgress' : [ 0x28, ['unsigned char']],
'KernelApcPending' : [ 0x29, ['unsigned char']],
'UserApcPending' : [ 0x2a, ['unsigned char']],
} ],
'_IA64_LOADER_BLOCK' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_IA64_DBGKD_CONTROL_SET' : [ 0x14, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long long']],
} ],
'_DEVICE_RELATIONS' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'Objects' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_IMAGE_ROM_OPTIONAL_HEADER' : [ 0x38, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'BaseOfBss' : [ 0x1c, ['unsigned long']],
'GprMask' : [ 0x20, ['unsigned long']],
'CprMask' : [ 0x24, ['array', 4, ['unsigned long']]],
'GpValue' : [ 0x34, ['unsigned long']],
} ],
'_ALPC_COMPLETION_LIST_HEADER' : [ 0x300, {
'StartMagic' : [ 0x0, ['unsigned long long']],
'TotalSize' : [ 0x8, ['unsigned long']],
'ListOffset' : [ 0xc, ['unsigned long']],
'ListSize' : [ 0x10, ['unsigned long']],
'BitmapOffset' : [ 0x14, ['unsigned long']],
'BitmapSize' : [ 0x18, ['unsigned long']],
'DataOffset' : [ 0x1c, ['unsigned long']],
'DataSize' : [ 0x20, ['unsigned long']],
'AttributeFlags' : [ 0x24, ['unsigned long']],
'AttributeSize' : [ 0x28, ['unsigned long']],
'State' : [ 0x80, ['_ALPC_COMPLETION_LIST_STATE']],
'LastMessageId' : [ 0x88, ['unsigned long']],
'LastCallbackId' : [ 0x8c, ['unsigned long']],
'PostCount' : [ 0x100, ['unsigned long']],
'ReturnCount' : [ 0x180, ['unsigned long']],
'LogSequenceNumber' : [ 0x200, ['unsigned long']],
'UserLock' : [ 0x280, ['_RTL_SRWLOCK']],
'EndMagic' : [ 0x288, ['unsigned long long']],
} ],
'_IMAGE_DEBUG_DIRECTORY' : [ 0x1c, {
'Characteristics' : [ 0x0, ['unsigned long']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'MajorVersion' : [ 0x8, ['unsigned short']],
'MinorVersion' : [ 0xa, ['unsigned short']],
'Type' : [ 0xc, ['unsigned long']],
'SizeOfData' : [ 0x10, ['unsigned long']],
'AddressOfRawData' : [ 0x14, ['unsigned long']],
'PointerToRawData' : [ 0x18, ['unsigned long']],
} ],
'_ETW_WMITRACE_WORK' : [ 0xf0, {
'LoggerId' : [ 0x0, ['unsigned long']],
'LoggerName' : [ 0x8, ['array', 65, ['unsigned char']]],
'FileName' : [ 0x49, ['array', 129, ['unsigned char']]],
'MaximumFileSize' : [ 0xcc, ['unsigned long']],
'MinBuffers' : [ 0xd0, ['unsigned long']],
'MaxBuffers' : [ 0xd4, ['unsigned long']],
'BufferSize' : [ 0xd8, ['unsigned long']],
'Mode' : [ 0xdc, ['unsigned long']],
'FlushTimer' : [ 0xe0, ['unsigned long']],
'MatchAny' : [ 0x8, ['unsigned long long']],
'MatchAll' : [ 0x10, ['unsigned long long']],
'EnableProperty' : [ 0x18, ['unsigned long']],
'Guid' : [ 0x1c, ['_GUID']],
'Level' : [ 0x2c, ['unsigned char']],
'Status' : [ 0xe8, ['long']],
} ],
'_DEVICE_MAP' : [ 0x40, {
'DosDevicesDirectory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'GlobalDosDevicesDirectory' : [ 0x8, ['pointer64', ['_OBJECT_DIRECTORY']]],
'DosDevicesDirectoryHandle' : [ 0x10, ['pointer64', ['void']]],
'ReferenceCount' : [ 0x18, ['unsigned long']],
'DriveMap' : [ 0x1c, ['unsigned long']],
'DriveType' : [ 0x20, ['array', 32, ['unsigned char']]],
} ],
'_HEAP_DEBUGGING_INFORMATION' : [ 0x30, {
'InterceptorFunction' : [ 0x0, ['pointer64', ['void']]],
'InterceptorValue' : [ 0x8, ['unsigned short']],
'ExtendedOptions' : [ 0xc, ['unsigned long']],
'StackTraceDepth' : [ 0x10, ['unsigned long']],
'MinTotalBlockSize' : [ 0x18, ['unsigned long long']],
'MaxTotalBlockSize' : [ 0x20, ['unsigned long long']],
'HeapLeakEnumerationRoutine' : [ 0x28, ['pointer64', ['void']]],
} ],
'_IO_RESOURCE_LIST' : [ 0x28, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'Descriptors' : [ 0x8, ['array', 1, ['_IO_RESOURCE_DESCRIPTOR']]],
} ],
'_MMBANKED_SECTION' : [ 0x38, {
'BasePhysicalPage' : [ 0x0, ['unsigned long long']],
'BasedPte' : [ 0x8, ['pointer64', ['_MMPTE']]],
'BankSize' : [ 0x10, ['unsigned long']],
'BankShift' : [ 0x14, ['unsigned long']],
'BankedRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'CurrentMappedPte' : [ 0x28, ['pointer64', ['_MMPTE']]],
'BankTemplate' : [ 0x30, ['array', 1, ['_MMPTE']]],
} ],
'_WHEA_ERROR_RECORD_HEADER_FLAGS' : [ 0x4, {
'Recovered' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_XSAVE_AREA_HEADER' : [ 0x40, {
'Mask' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['array', 7, ['unsigned long long']]],
} ],
'_HEAP_VIRTUAL_ALLOC_ENTRY' : [ 0x40, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtraStuff' : [ 0x10, ['_HEAP_ENTRY_EXTRA']],
'CommitSize' : [ 0x20, ['unsigned long long']],
'ReserveSize' : [ 0x28, ['unsigned long long']],
'BusyBlock' : [ 0x30, ['_HEAP_ENTRY']],
} ],
'_PNP_DEVICE_COMPLETION_REQUEST' : [ 0x68, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceNode' : [ 0x10, ['pointer64', ['_DEVICE_NODE']]],
'Context' : [ 0x18, ['pointer64', ['void']]],
'CompletionState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'IrpPended' : [ 0x24, ['unsigned long']],
'Status' : [ 0x28, ['long']],
'Information' : [ 0x30, ['pointer64', ['void']]],
'WorkItem' : [ 0x38, ['_WORK_QUEUE_ITEM']],
'FailingDriver' : [ 0x58, ['pointer64', ['_DRIVER_OBJECT']]],
'ReferenceCount' : [ 0x60, ['long']],
} ],
'_KTSS64' : [ 0x68, {
'Reserved0' : [ 0x0, ['unsigned long']],
'Rsp0' : [ 0x4, ['unsigned long long']],
'Rsp1' : [ 0xc, ['unsigned long long']],
'Rsp2' : [ 0x14, ['unsigned long long']],
'Ist' : [ 0x1c, ['array', 8, ['unsigned long long']]],
'Reserved1' : [ 0x5c, ['unsigned long long']],
'Reserved2' : [ 0x64, ['unsigned short']],
'IoMapBase' : [ 0x66, ['unsigned short']],
} ],
'_EVENT_FILTER_HEADER' : [ 0x18, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['array', 5, ['unsigned char']]],
'InstanceId' : [ 0x8, ['unsigned long long']],
'Size' : [ 0x10, ['unsigned long']],
'NextOffset' : [ 0x14, ['unsigned long']],
} ],
'_WAIT_CONTEXT_BLOCK' : [ 0x48, {
'WaitQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DeviceRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeviceContext' : [ 0x20, ['pointer64', ['void']]],
'NumberOfMapRegisters' : [ 0x28, ['unsigned long']],
'DeviceObject' : [ 0x30, ['pointer64', ['void']]],
'CurrentIrp' : [ 0x38, ['pointer64', ['void']]],
'BufferChainingDpc' : [ 0x40, ['pointer64', ['_KDPC']]],
} ],
'_SECTION_OBJECT' : [ 0x30, {
'StartingVa' : [ 0x0, ['pointer64', ['void']]],
'EndingVa' : [ 0x8, ['pointer64', ['void']]],
'Parent' : [ 0x10, ['pointer64', ['void']]],
'LeftChild' : [ 0x18, ['pointer64', ['void']]],
'RightChild' : [ 0x20, ['pointer64', ['void']]],
'Segment' : [ 0x28, ['pointer64', ['_SEGMENT_OBJECT']]],
} ],
'_CM_NAME_CONTROL_BLOCK' : [ 0x20, {
'Compressed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'RefCount' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'NameHash' : [ 0x8, ['_CM_NAME_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
'NameLength' : [ 0x18, ['unsigned short']],
'Name' : [ 0x1a, ['array', 1, ['wchar']]],
} ],
'_u' : [ 0x50, {
'KeyNode' : [ 0x0, ['_CM_KEY_NODE']],
'KeyValue' : [ 0x0, ['_CM_KEY_VALUE']],
'KeySecurity' : [ 0x0, ['_CM_KEY_SECURITY']],
'KeyIndex' : [ 0x0, ['_CM_KEY_INDEX']],
'ValueData' : [ 0x0, ['_CM_BIG_DATA']],
'KeyList' : [ 0x0, ['array', 1, ['unsigned long']]],
'KeyString' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'_GENERAL_LOOKASIDE_POOL' : [ 0x60, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'AllocateEx' : [ 0x30, ['pointer64', ['void']]],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'FreeEx' : [ 0x38, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENTRY' : [ 0x18, {
'Linkage' : [ 0x0, ['_LIST_ENTRY']],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'__unnamed_20e2' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_PERF_STATES' : [ 0xb0, {
'Count' : [ 0x0, ['unsigned long']],
'MaxFrequency' : [ 0x4, ['unsigned long']],
'PStateCap' : [ 0x8, ['unsigned long']],
'TStateCap' : [ 0xc, ['unsigned long']],
'MaxPerfState' : [ 0x10, ['unsigned long']],
'MinPerfState' : [ 0x14, ['unsigned long']],
'LowestPState' : [ 0x18, ['unsigned long']],
'IncreaseTime' : [ 0x1c, ['unsigned long']],
'DecreaseTime' : [ 0x20, ['unsigned long']],
'BusyAdjThreshold' : [ 0x24, ['unsigned char']],
'Reserved' : [ 0x25, ['unsigned char']],
'ThrottleStatesOnly' : [ 0x26, ['unsigned char']],
'PolicyType' : [ 0x27, ['unsigned char']],
'TimerInterval' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['__unnamed_20e2']],
'TargetProcessors' : [ 0x30, ['_KAFFINITY_EX']],
'PStateHandler' : [ 0x58, ['pointer64', ['void']]],
'PStateContext' : [ 0x60, ['unsigned long long']],
'TStateHandler' : [ 0x68, ['pointer64', ['void']]],
'TStateContext' : [ 0x70, ['unsigned long long']],
'FeedbackHandler' : [ 0x78, ['pointer64', ['void']]],
'GetFFHThrottleState' : [ 0x80, ['pointer64', ['void']]],
'State' : [ 0x88, ['array', 1, ['_PPM_PERF_STATE']]],
} ],
'_M128A' : [ 0x10, {
'Low' : [ 0x0, ['unsigned long long']],
'High' : [ 0x8, ['long long']],
} ],
'_HEAP_LOOKASIDE' : [ 0x40, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'LastTotalAllocates' : [ 0x24, ['unsigned long']],
'LastAllocateMisses' : [ 0x28, ['unsigned long']],
'Counters' : [ 0x2c, ['array', 2, ['unsigned long']]],
} ],
'_WMI_TRACE_PACKET' : [ 0x4, {
'Size' : [ 0x0, ['unsigned short']],
'HookId' : [ 0x2, ['unsigned short']],
'Type' : [ 0x2, ['unsigned char']],
'Group' : [ 0x3, ['unsigned char']],
} ],
'_KTIMER' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'DueTime' : [ 0x18, ['_ULARGE_INTEGER']],
'TimerListEntry' : [ 0x20, ['_LIST_ENTRY']],
'Dpc' : [ 0x30, ['pointer64', ['_KDPC']]],
'Processor' : [ 0x38, ['unsigned long']],
'Period' : [ 0x3c, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE' : [ 0x70, {
'Signature' : [ 0x0, ['unsigned long']],
'CriticalSection' : [ 0x8, ['_RTL_CRITICAL_SECTION']],
'RtlHandleTable' : [ 0x30, ['_RTL_HANDLE_TABLE']],
'NumberOfBuckets' : [ 0x60, ['unsigned long']],
'Buckets' : [ 0x68, ['array', 1, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]]],
} ],
'_POP_POWER_ACTION' : [ 0xc0, {
'Updates' : [ 0x0, ['unsigned char']],
'State' : [ 0x1, ['unsigned char']],
'Shutdown' : [ 0x2, ['unsigned char']],
'Action' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'LightestState' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Flags' : [ 0xc, ['unsigned long']],
'Status' : [ 0x10, ['long']],
'DeviceType' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'DeviceTypeFlags' : [ 0x18, ['unsigned long']],
'IrpMinor' : [ 0x1c, ['unsigned char']],
'Waking' : [ 0x1d, ['unsigned char']],
'SystemState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'NextSystemState' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'EffectiveSystemState' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'CurrentSystemState' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ShutdownBugCode' : [ 0x30, ['pointer64', ['_POP_SHUTDOWN_BUG_CHECK']]],
'DevState' : [ 0x38, ['pointer64', ['_POP_DEVICE_SYS_STATE']]],
'HiberContext' : [ 0x40, ['pointer64', ['_POP_HIBER_CONTEXT']]],
'WakeTime' : [ 0x48, ['unsigned long long']],
'SleepTime' : [ 0x50, ['unsigned long long']],
'ProgrammedRTCTime' : [ 0x58, ['unsigned long long']],
'WakeOnRTC' : [ 0x60, ['unsigned char']],
'WakeTimerInfo' : [ 0x68, ['pointer64', ['_DIAGNOSTIC_BUFFER']]],
'FilteredCapabilities' : [ 0x70, ['SYSTEM_POWER_CAPABILITIES']],
} ],
'_CM_KEY_VALUE' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'NameLength' : [ 0x2, ['unsigned short']],
'DataLength' : [ 0x4, ['unsigned long']],
'Data' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
'Flags' : [ 0x10, ['unsigned short']],
'Spare' : [ 0x12, ['unsigned short']],
'Name' : [ 0x14, ['array', 1, ['wchar']]],
} ],
'_AMD64_DBGKD_CONTROL_SET' : [ 0x1c, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long long']],
'CurrentSymbolStart' : [ 0xc, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x14, ['unsigned long long']],
} ],
'_PO_DEVICE_NOTIFY' : [ 0x68, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'PowerChildren' : [ 0x10, ['_LIST_ENTRY']],
'PowerParents' : [ 0x20, ['_LIST_ENTRY']],
'TargetDevice' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'OrderLevel' : [ 0x38, ['unsigned char']],
'DeviceObject' : [ 0x40, ['pointer64', ['_DEVICE_OBJECT']]],
'DeviceName' : [ 0x48, ['pointer64', ['unsigned short']]],
'DriverName' : [ 0x50, ['pointer64', ['unsigned short']]],
'ChildCount' : [ 0x58, ['unsigned long']],
'ActiveChild' : [ 0x5c, ['unsigned long']],
'ParentCount' : [ 0x60, ['unsigned long']],
'ActiveParent' : [ 0x64, ['unsigned long']],
} ],
'_CM_KEY_SECURITY_CACHE_ENTRY' : [ 0x10, {
'Cell' : [ 0x0, ['unsigned long']],
'CachedSecurity' : [ 0x8, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
} ],
'_FS_FILTER_CALLBACK_DATA' : [ 0x40, {
'SizeOfFsFilterCallbackData' : [ 0x0, ['unsigned long']],
'Operation' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'Parameters' : [ 0x18, ['_FS_FILTER_PARAMETERS']],
} ],
'_PROC_IDLE_STATE_ACCOUNTING' : [ 0x228, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'IdleTransitions' : [ 0x8, ['unsigned long']],
'FailedTransitions' : [ 0xc, ['unsigned long']],
'InvalidBucketIndex' : [ 0x10, ['unsigned long']],
'MinTime' : [ 0x18, ['unsigned long long']],
'MaxTime' : [ 0x20, ['unsigned long long']],
'IdleTimeBuckets' : [ 0x28, ['array', 16, ['_PROC_IDLE_STATE_BUCKET']]],
} ],
'_IMAGE_SECURITY_CONTEXT' : [ 0x8, {
'PageHashes' : [ 0x0, ['pointer64', ['void']]],
'Value' : [ 0x0, ['unsigned long long']],
'SecurityBeingCreated' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'SecurityMandatory' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'PageHashPointer' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_2124' : [ 0x4, {
'Level' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_2126' : [ 0x4, {
'Type' : [ 0x0, ['unsigned long']],
} ],
'_POP_ACTION_TRIGGER' : [ 0x18, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'Flags' : [ 0x4, ['unsigned long']],
'Wait' : [ 0x8, ['pointer64', ['_POP_TRIGGER_WAIT']]],
'Battery' : [ 0x10, ['__unnamed_2124']],
'Button' : [ 0x10, ['__unnamed_2126']],
} ],
'_KENLISTMENT_HISTORY' : [ 0x8, {
'Notification' : [ 0x0, ['unsigned long']],
'NewState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
} ],
'_FAST_IO_DISPATCH' : [ 0xe0, {
'SizeOfFastIoDispatch' : [ 0x0, ['unsigned long']],
'FastIoCheckIfPossible' : [ 0x8, ['pointer64', ['void']]],
'FastIoRead' : [ 0x10, ['pointer64', ['void']]],
'FastIoWrite' : [ 0x18, ['pointer64', ['void']]],
'FastIoQueryBasicInfo' : [ 0x20, ['pointer64', ['void']]],
'FastIoQueryStandardInfo' : [ 0x28, ['pointer64', ['void']]],
'FastIoLock' : [ 0x30, ['pointer64', ['void']]],
'FastIoUnlockSingle' : [ 0x38, ['pointer64', ['void']]],
'FastIoUnlockAll' : [ 0x40, ['pointer64', ['void']]],
'FastIoUnlockAllByKey' : [ 0x48, ['pointer64', ['void']]],
'FastIoDeviceControl' : [ 0x50, ['pointer64', ['void']]],
'AcquireFileForNtCreateSection' : [ 0x58, ['pointer64', ['void']]],
'ReleaseFileForNtCreateSection' : [ 0x60, ['pointer64', ['void']]],
'FastIoDetachDevice' : [ 0x68, ['pointer64', ['void']]],
'FastIoQueryNetworkOpenInfo' : [ 0x70, ['pointer64', ['void']]],
'AcquireForModWrite' : [ 0x78, ['pointer64', ['void']]],
'MdlRead' : [ 0x80, ['pointer64', ['void']]],
'MdlReadComplete' : [ 0x88, ['pointer64', ['void']]],
'PrepareMdlWrite' : [ 0x90, ['pointer64', ['void']]],
'MdlWriteComplete' : [ 0x98, ['pointer64', ['void']]],
'FastIoReadCompressed' : [ 0xa0, ['pointer64', ['void']]],
'FastIoWriteCompressed' : [ 0xa8, ['pointer64', ['void']]],
'MdlReadCompleteCompressed' : [ 0xb0, ['pointer64', ['void']]],
'MdlWriteCompleteCompressed' : [ 0xb8, ['pointer64', ['void']]],
'FastIoQueryOpen' : [ 0xc0, ['pointer64', ['void']]],
'ReleaseForModWrite' : [ 0xc8, ['pointer64', ['void']]],
'AcquireForCcFlush' : [ 0xd0, ['pointer64', ['void']]],
'ReleaseForCcFlush' : [ 0xd8, ['pointer64', ['void']]],
} ],
'_KIDTENTRY64' : [ 0x10, {
'OffsetLow' : [ 0x0, ['unsigned short']],
'Selector' : [ 0x2, ['unsigned short']],
'IstIndex' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned short')]],
'Reserved0' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned short')]],
'Type' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned short')]],
'Dpl' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned short')]],
'Present' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned short')]],
'OffsetMiddle' : [ 0x6, ['unsigned short']],
'OffsetHigh' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_CM_CELL_REMAP_BLOCK' : [ 0x8, {
'OldCell' : [ 0x0, ['unsigned long']],
'NewCell' : [ 0x4, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY_ENTRY' : [ 0x18, {
'ChainLink' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'HashValue' : [ 0x10, ['unsigned long']],
} ],
'_LOADER_PARAMETER_EXTENSION' : [ 0x148, {
'Size' : [ 0x0, ['unsigned long']],
'Profile' : [ 0x4, ['_PROFILE_PARAMETER_BLOCK']],
'EmInfFileImage' : [ 0x18, ['pointer64', ['void']]],
'EmInfFileSize' : [ 0x20, ['unsigned long']],
'TriageDumpBlock' : [ 0x28, ['pointer64', ['void']]],
'LoaderPagesSpanned' : [ 0x30, ['unsigned long long']],
'HeadlessLoaderBlock' : [ 0x38, ['pointer64', ['_HEADLESS_LOADER_BLOCK']]],
'SMBiosEPSHeader' : [ 0x40, ['pointer64', ['_SMBIOS_TABLE_HEADER']]],
'DrvDBImage' : [ 0x48, ['pointer64', ['void']]],
'DrvDBSize' : [ 0x50, ['unsigned long']],
'NetworkLoaderBlock' : [ 0x58, ['pointer64', ['_NETWORK_LOADER_BLOCK']]],
'FirmwareDescriptorListHead' : [ 0x60, ['_LIST_ENTRY']],
'AcpiTable' : [ 0x70, ['pointer64', ['void']]],
'AcpiTableSize' : [ 0x78, ['unsigned long']],
'LastBootSucceeded' : [ 0x7c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'LastBootShutdown' : [ 0x7c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'IoPortAccessSupported' : [ 0x7c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x7c, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'LoaderPerformanceData' : [ 0x80, ['pointer64', ['_LOADER_PERFORMANCE_DATA']]],
'BootApplicationPersistentData' : [ 0x88, ['_LIST_ENTRY']],
'WmdTestResult' : [ 0x98, ['pointer64', ['void']]],
'BootIdentifier' : [ 0xa0, ['_GUID']],
'ResumePages' : [ 0xb0, ['unsigned long']],
'DumpHeader' : [ 0xb8, ['pointer64', ['void']]],
'BgContext' : [ 0xc0, ['pointer64', ['void']]],
'NumaLocalityInfo' : [ 0xc8, ['pointer64', ['void']]],
'NumaGroupAssignment' : [ 0xd0, ['pointer64', ['void']]],
'AttachedHives' : [ 0xd8, ['_LIST_ENTRY']],
'MemoryCachingRequirementsCount' : [ 0xe8, ['unsigned long']],
'MemoryCachingRequirements' : [ 0xf0, ['pointer64', ['void']]],
'TpmBootEntropyResult' : [ 0xf8, ['_TPM_BOOT_ENTROPY_LDR_RESULT']],
'ProcessorCounterFrequency' : [ 0x140, ['unsigned long long']],
} ],
'_PI_RESOURCE_ARBITER_ENTRY' : [ 0x70, {
'DeviceArbiterList' : [ 0x0, ['_LIST_ENTRY']],
'ResourceType' : [ 0x10, ['unsigned char']],
'ArbiterInterface' : [ 0x18, ['pointer64', ['_ARBITER_INTERFACE']]],
'DeviceNode' : [ 0x20, ['pointer64', ['_DEVICE_NODE']]],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'BestResourceList' : [ 0x38, ['_LIST_ENTRY']],
'BestConfig' : [ 0x48, ['_LIST_ENTRY']],
'ActiveArbiterList' : [ 0x58, ['_LIST_ENTRY']],
'State' : [ 0x68, ['unsigned char']],
'ResourcesChanged' : [ 0x69, ['unsigned char']],
} ],
'_SECURITY_DESCRIPTOR' : [ 0x28, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x8, ['pointer64', ['void']]],
'Group' : [ 0x10, ['pointer64', ['void']]],
'Sacl' : [ 0x18, ['pointer64', ['_ACL']]],
'Dacl' : [ 0x20, ['pointer64', ['_ACL']]],
} ],
'_KUMS_CONTEXT_HEADER' : [ 0x70, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'StackTop' : [ 0x20, ['pointer64', ['void']]],
'StackSize' : [ 0x28, ['unsigned long long']],
'RspOffset' : [ 0x30, ['unsigned long long']],
'Rip' : [ 0x38, ['unsigned long long']],
'FltSave' : [ 0x40, ['pointer64', ['_XSAVE_FORMAT']]],
'Volatile' : [ 0x48, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Reserved' : [ 0x48, ['BitField', dict(start_bit = 1, end_bit = 64, native_type='unsigned long long')]],
'Flags' : [ 0x48, ['unsigned long long']],
'TrapFrame' : [ 0x50, ['pointer64', ['_KTRAP_FRAME']]],
'ExceptionFrame' : [ 0x58, ['pointer64', ['_KEXCEPTION_FRAME']]],
'SourceThread' : [ 0x60, ['pointer64', ['_KTHREAD']]],
'Return' : [ 0x68, ['unsigned long long']],
} ],
'_RTL_USER_PROCESS_PARAMETERS' : [ 0x400, {
'MaximumLength' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DebugFlags' : [ 0xc, ['unsigned long']],
'ConsoleHandle' : [ 0x10, ['pointer64', ['void']]],
'ConsoleFlags' : [ 0x18, ['unsigned long']],
'StandardInput' : [ 0x20, ['pointer64', ['void']]],
'StandardOutput' : [ 0x28, ['pointer64', ['void']]],
'StandardError' : [ 0x30, ['pointer64', ['void']]],
'CurrentDirectory' : [ 0x38, ['_CURDIR']],
'DllPath' : [ 0x50, ['_UNICODE_STRING']],
'ImagePathName' : [ 0x60, ['_UNICODE_STRING']],
'CommandLine' : [ 0x70, ['_UNICODE_STRING']],
'Environment' : [ 0x80, ['pointer64', ['void']]],
'StartingX' : [ 0x88, ['unsigned long']],
'StartingY' : [ 0x8c, ['unsigned long']],
'CountX' : [ 0x90, ['unsigned long']],
'CountY' : [ 0x94, ['unsigned long']],
'CountCharsX' : [ 0x98, ['unsigned long']],
'CountCharsY' : [ 0x9c, ['unsigned long']],
'FillAttribute' : [ 0xa0, ['unsigned long']],
'WindowFlags' : [ 0xa4, ['unsigned long']],
'ShowWindowFlags' : [ 0xa8, ['unsigned long']],
'WindowTitle' : [ 0xb0, ['_UNICODE_STRING']],
'DesktopInfo' : [ 0xc0, ['_UNICODE_STRING']],
'ShellInfo' : [ 0xd0, ['_UNICODE_STRING']],
'RuntimeData' : [ 0xe0, ['_UNICODE_STRING']],
'CurrentDirectores' : [ 0xf0, ['array', 32, ['_RTL_DRIVE_LETTER_CURDIR']]],
'EnvironmentSize' : [ 0x3f0, ['unsigned long long']],
'EnvironmentVersion' : [ 0x3f8, ['unsigned long long']],
} ],
'_PHYSICAL_MEMORY_RUN' : [ 0x10, {
'BasePage' : [ 0x0, ['unsigned long long']],
'PageCount' : [ 0x8, ['unsigned long long']],
} ],
'_RTL_SRWLOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_ALPC_MESSAGE_ZONE' : [ 0x30, {
'Mdl' : [ 0x0, ['pointer64', ['_MDL']]],
'UserVa' : [ 0x8, ['pointer64', ['void']]],
'UserLimit' : [ 0x10, ['pointer64', ['void']]],
'SystemVa' : [ 0x18, ['pointer64', ['void']]],
'SystemLimit' : [ 0x20, ['pointer64', ['void']]],
'Size' : [ 0x28, ['unsigned long long']],
} ],
'_KTMOBJECT_NAMESPACE_LINK' : [ 0x28, {
'Links' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'Expired' : [ 0x20, ['unsigned char']],
} ],
'_CACHE_MANAGER_CALLBACKS' : [ 0x20, {
'AcquireForLazyWrite' : [ 0x0, ['pointer64', ['void']]],
'ReleaseFromLazyWrite' : [ 0x8, ['pointer64', ['void']]],
'AcquireForReadAhead' : [ 0x10, ['pointer64', ['void']]],
'ReleaseFromReadAhead' : [ 0x18, ['pointer64', ['void']]],
} ],
'_PROC_PERF_LOAD' : [ 0x2, {
'BusyPercentage' : [ 0x0, ['unsigned char']],
'FrequencyPercentage' : [ 0x1, ['unsigned char']],
} ],
'_PROC_HISTORY_ENTRY' : [ 0x4, {
'Utility' : [ 0x0, ['unsigned short']],
'Frequency' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_RTL_RANGE' : [ 0x28, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'UserData' : [ 0x10, ['pointer64', ['void']]],
'Owner' : [ 0x18, ['pointer64', ['void']]],
'Attributes' : [ 0x20, ['unsigned char']],
'Flags' : [ 0x21, ['unsigned char']],
} ],
'_KSPECIAL_REGISTERS' : [ 0xd8, {
'Cr0' : [ 0x0, ['unsigned long long']],
'Cr2' : [ 0x8, ['unsigned long long']],
'Cr3' : [ 0x10, ['unsigned long long']],
'Cr4' : [ 0x18, ['unsigned long long']],
'KernelDr0' : [ 0x20, ['unsigned long long']],
'KernelDr1' : [ 0x28, ['unsigned long long']],
'KernelDr2' : [ 0x30, ['unsigned long long']],
'KernelDr3' : [ 0x38, ['unsigned long long']],
'KernelDr6' : [ 0x40, ['unsigned long long']],
'KernelDr7' : [ 0x48, ['unsigned long long']],
'Gdtr' : [ 0x50, ['_KDESCRIPTOR']],
'Idtr' : [ 0x60, ['_KDESCRIPTOR']],
'Tr' : [ 0x70, ['unsigned short']],
'Ldtr' : [ 0x72, ['unsigned short']],
'MxCsr' : [ 0x74, ['unsigned long']],
'DebugControl' : [ 0x78, ['unsigned long long']],
'LastBranchToRip' : [ 0x80, ['unsigned long long']],
'LastBranchFromRip' : [ 0x88, ['unsigned long long']],
'LastExceptionToRip' : [ 0x90, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x98, ['unsigned long long']],
'Cr8' : [ 0xa0, ['unsigned long long']],
'MsrGsBase' : [ 0xa8, ['unsigned long long']],
'MsrGsSwap' : [ 0xb0, ['unsigned long long']],
'MsrStar' : [ 0xb8, ['unsigned long long']],
'MsrLStar' : [ 0xc0, ['unsigned long long']],
'MsrCStar' : [ 0xc8, ['unsigned long long']],
'MsrSyscallMask' : [ 0xd0, ['unsigned long long']],
} ],
'_SYSTEM_POWER_POLICY' : [ 0xe8, {
'Revision' : [ 0x0, ['unsigned long']],
'PowerButton' : [ 0x4, ['POWER_ACTION_POLICY']],
'SleepButton' : [ 0x10, ['POWER_ACTION_POLICY']],
'LidClose' : [ 0x1c, ['POWER_ACTION_POLICY']],
'LidOpenWake' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Reserved' : [ 0x2c, ['unsigned long']],
'Idle' : [ 0x30, ['POWER_ACTION_POLICY']],
'IdleTimeout' : [ 0x3c, ['unsigned long']],
'IdleSensitivity' : [ 0x40, ['unsigned char']],
'DynamicThrottle' : [ 0x41, ['unsigned char']],
'Spare2' : [ 0x42, ['array', 2, ['unsigned char']]],
'MinSleep' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MaxSleep' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ReducedLatencySleep' : [ 0x4c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WinLogonFlags' : [ 0x50, ['unsigned long']],
'Spare3' : [ 0x54, ['unsigned long']],
'DozeS4Timeout' : [ 0x58, ['unsigned long']],
'BroadcastCapacityResolution' : [ 0x5c, ['unsigned long']],
'DischargePolicy' : [ 0x60, ['array', 4, ['SYSTEM_POWER_LEVEL']]],
'VideoTimeout' : [ 0xc0, ['unsigned long']],
'VideoDimDisplay' : [ 0xc4, ['unsigned char']],
'VideoReserved' : [ 0xc8, ['array', 3, ['unsigned long']]],
'SpindownTimeout' : [ 0xd4, ['unsigned long']],
'OptimizeForPower' : [ 0xd8, ['unsigned char']],
'FanThrottleTolerance' : [ 0xd9, ['unsigned char']],
'ForcedThrottle' : [ 0xda, ['unsigned char']],
'MinThrottle' : [ 0xdb, ['unsigned char']],
'OverThrottled' : [ 0xdc, ['POWER_ACTION_POLICY']],
} ],
'_POOL_HEADER' : [ 0x10, {
'PreviousSize' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'PoolIndex' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'BlockSize' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long')]],
'PoolType' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'Ulong1' : [ 0x0, ['unsigned long']],
'PoolTag' : [ 0x4, ['unsigned long']],
'ProcessBilled' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'AllocatorBackTraceIndex' : [ 0x8, ['unsigned short']],
'PoolTagHash' : [ 0xa, ['unsigned short']],
} ],
'_ETW_PROVIDER_TABLE_ENTRY' : [ 0x18, {
'RefCount' : [ 0x0, ['long']],
'State' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'EtwProviderStateFree', 1: 'EtwProviderStateTransition', 2: 'EtwProviderStateActive', 3: 'EtwProviderStateMax'})]],
'RegEntry' : [ 0x8, ['pointer64', ['_ETW_REG_ENTRY']]],
'Caller' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PEB64' : [ 0x380, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['unsigned long long']],
'ImageBaseAddress' : [ 0x10, ['unsigned long long']],
'Ldr' : [ 0x18, ['unsigned long long']],
'ProcessParameters' : [ 0x20, ['unsigned long long']],
'SubSystemData' : [ 0x28, ['unsigned long long']],
'ProcessHeap' : [ 0x30, ['unsigned long long']],
'FastPebLock' : [ 0x38, ['unsigned long long']],
'AtlThunkSListPtr' : [ 0x40, ['unsigned long long']],
'IFEOKey' : [ 0x48, ['unsigned long long']],
'CrossProcessFlags' : [ 0x50, ['unsigned long']],
'ProcessInJob' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x50, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x50, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x58, ['unsigned long long']],
'UserSharedInfoPtr' : [ 0x58, ['unsigned long long']],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x64, ['unsigned long']],
'ApiSetMap' : [ 0x68, ['unsigned long long']],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['unsigned long long']],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['unsigned long long']],
'HotpatchInformation' : [ 0x90, ['unsigned long long']],
'ReadOnlyStaticServerData' : [ 0x98, ['unsigned long long']],
'AnsiCodePageData' : [ 0xa0, ['unsigned long long']],
'OemCodePageData' : [ 0xa8, ['unsigned long long']],
'UnicodeCaseTableData' : [ 0xb0, ['unsigned long long']],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['unsigned long long']],
'GdiSharedHandleTable' : [ 0xf8, ['unsigned long long']],
'ProcessStarterHelper' : [ 0x100, ['unsigned long long']],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['unsigned long long']],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['unsigned long long']],
'TlsExpansionBitmap' : [ 0x238, ['unsigned long long']],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['unsigned long long']],
'AppCompatInfo' : [ 0x2e0, ['unsigned long long']],
'CSDVersion' : [ 0x2e8, ['_STRING64']],
'ActivationContextData' : [ 0x2f8, ['unsigned long long']],
'ProcessAssemblyStorageMap' : [ 0x300, ['unsigned long long']],
'SystemDefaultActivationContextData' : [ 0x308, ['unsigned long long']],
'SystemAssemblyStorageMap' : [ 0x310, ['unsigned long long']],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['unsigned long long']],
'FlsListHead' : [ 0x328, ['LIST_ENTRY64']],
'FlsBitmap' : [ 0x338, ['unsigned long long']],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
'WerRegistrationData' : [ 0x358, ['unsigned long long']],
'WerShipAssertPtr' : [ 0x360, ['unsigned long long']],
'pContextData' : [ 0x368, ['unsigned long long']],
'pImageHeaderHash' : [ 0x370, ['unsigned long long']],
'TracingFlags' : [ 0x378, ['unsigned long']],
'HeapTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x378, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_SE_AUDIT_PROCESS_CREATION_INFO' : [ 0x8, {
'ImageFileName' : [ 0x0, ['pointer64', ['_OBJECT_NAME_INFORMATION']]],
} ],
'_HEAP_ENTRY_EXTRA' : [ 0x10, {
'AllocatorBackTraceIndex' : [ 0x0, ['unsigned short']],
'TagIndex' : [ 0x2, ['unsigned short']],
'Settable' : [ 0x8, ['unsigned long long']],
'ZeroInit' : [ 0x0, ['unsigned long long']],
'ZeroInit1' : [ 0x8, ['unsigned long long']],
} ],
'_VF_POOL_TRACE' : [ 0x80, {
'Address' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long long']],
'Thread' : [ 0x10, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x18, ['array', 13, ['pointer64', ['void']]]],
} ],
'__unnamed_21ca' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MM_SESSION_SPACE_FLAGS']],
} ],
'_MM_SESSION_SPACE' : [ 0x1f80, {
'ReferenceCount' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_21ca']],
'SessionId' : [ 0x8, ['unsigned long']],
'ProcessReferenceToSession' : [ 0xc, ['long']],
'ProcessList' : [ 0x10, ['_LIST_ENTRY']],
'LastProcessSwappedOutTime' : [ 0x20, ['_LARGE_INTEGER']],
'SessionPageDirectoryIndex' : [ 0x28, ['unsigned long long']],
'NonPagablePages' : [ 0x30, ['unsigned long long']],
'CommittedPages' : [ 0x38, ['unsigned long long']],
'PagedPoolStart' : [ 0x40, ['pointer64', ['void']]],
'PagedPoolEnd' : [ 0x48, ['pointer64', ['void']]],
'SessionObject' : [ 0x50, ['pointer64', ['void']]],
'SessionObjectHandle' : [ 0x58, ['pointer64', ['void']]],
'ResidentProcessCount' : [ 0x60, ['long']],
'SessionPoolAllocationFailures' : [ 0x64, ['array', 4, ['unsigned long']]],
'ImageList' : [ 0x78, ['_LIST_ENTRY']],
'LocaleId' : [ 0x88, ['unsigned long']],
'AttachCount' : [ 0x8c, ['unsigned long']],
'AttachGate' : [ 0x90, ['_KGATE']],
'WsListEntry' : [ 0xa8, ['_LIST_ENTRY']],
'Lookaside' : [ 0xc0, ['array', 21, ['_GENERAL_LOOKASIDE']]],
'Session' : [ 0xb40, ['_MMSESSION']],
'PagedPoolInfo' : [ 0xb98, ['_MM_PAGED_POOL_INFO']],
'Vm' : [ 0xc00, ['_MMSUPPORT']],
'Wsle' : [ 0xc88, ['pointer64', ['_MMWSLE']]],
'DriverUnload' : [ 0xc90, ['pointer64', ['void']]],
'PagedPool' : [ 0xcc0, ['_POOL_DESCRIPTOR']],
'PageDirectory' : [ 0x1e00, ['_MMPTE']],
'SessionVaLock' : [ 0x1e08, ['_KGUARDED_MUTEX']],
'DynamicVaBitMap' : [ 0x1e40, ['_RTL_BITMAP']],
'DynamicVaHint' : [ 0x1e50, ['unsigned long']],
'SpecialPool' : [ 0x1e58, ['_MI_SPECIAL_POOL']],
'SessionPteLock' : [ 0x1ea0, ['_KGUARDED_MUTEX']],
'PoolBigEntriesInUse' : [ 0x1ed8, ['long']],
'PagedPoolPdeCount' : [ 0x1edc, ['unsigned long']],
'SpecialPoolPdeCount' : [ 0x1ee0, ['unsigned long']],
'DynamicSessionPdeCount' : [ 0x1ee4, ['unsigned long']],
'SystemPteInfo' : [ 0x1ee8, ['_MI_SYSTEM_PTE_TYPE']],
'PoolTrackTableExpansion' : [ 0x1f30, ['pointer64', ['void']]],
'PoolTrackTableExpansionSize' : [ 0x1f38, ['unsigned long long']],
'PoolTrackBigPages' : [ 0x1f40, ['pointer64', ['void']]],
'PoolTrackBigPagesSize' : [ 0x1f48, ['unsigned long long']],
'IoState' : [ 0x1f50, ['Enumeration', dict(target = 'long', choices = {1: 'IoSessionStateCreated', 2: 'IoSessionStateInitialized', 3: 'IoSessionStateConnected', 4: 'IoSessionStateDisconnected', 5: 'IoSessionStateDisconnectedLoggedOn', 6: 'IoSessionStateLoggedOn', 7: 'IoSessionStateLoggedOff', 8: 'IoSessionStateTerminated', 9: 'IoSessionStateMax'})]],
'IoStateSequence' : [ 0x1f54, ['unsigned long']],
'IoNotificationEvent' : [ 0x1f58, ['_KEVENT']],
'CreateTime' : [ 0x1f70, ['unsigned long long']],
'CpuQuotaBlock' : [ 0x1f78, ['pointer64', ['_PS_CPU_QUOTA_BLOCK']]],
} ],
'_OBJECT_HANDLE_COUNT_ENTRY' : [ 0x10, {
'Process' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'HandleCount' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'LockCount' : [ 0x8, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_CLIENT_ID' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['pointer64', ['void']]],
'UniqueThread' : [ 0x8, ['pointer64', ['void']]],
} ],
'_WHEA_MEMORY_ERROR_SECTION' : [ 0x49, {
'ValidBits' : [ 0x0, ['_WHEA_MEMORY_ERROR_SECTION_VALIDBITS']],
'ErrorStatus' : [ 0x8, ['_WHEA_ERROR_STATUS']],
'PhysicalAddress' : [ 0x10, ['unsigned long long']],
'PhysicalAddressMask' : [ 0x18, ['unsigned long long']],
'Node' : [ 0x20, ['unsigned short']],
'Card' : [ 0x22, ['unsigned short']],
'Module' : [ 0x24, ['unsigned short']],
'Bank' : [ 0x26, ['unsigned short']],
'Device' : [ 0x28, ['unsigned short']],
'Row' : [ 0x2a, ['unsigned short']],
'Column' : [ 0x2c, ['unsigned short']],
'BitPosition' : [ 0x2e, ['unsigned short']],
'RequesterId' : [ 0x30, ['unsigned long long']],
'ResponderId' : [ 0x38, ['unsigned long long']],
'TargetId' : [ 0x40, ['unsigned long long']],
'ErrorType' : [ 0x48, ['unsigned char']],
} ],
'_KWAIT_STATUS_REGISTER' : [ 0x1, {
'Flags' : [ 0x0, ['unsigned char']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned char')]],
'Affinity' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Priority' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Apc' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'UserApc' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Alert' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_DEADLOCK_RESOURCE' : [ 0xf8, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'VfDeadlockUnknown', 1: 'VfDeadlockMutex', 2: 'VfDeadlockMutexAbandoned', 3: 'VfDeadlockFastMutex', 4: 'VfDeadlockFastMutexUnsafe', 5: 'VfDeadlockSpinLock', 6: 'VfDeadlockInStackQueuedSpinLock', 7: 'VfDeadlockUnusedSpinLock', 8: 'VfDeadlockEresource', 9: 'VfDeadlockTypeMaximum'})]],
'NodeCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'RecursionCount' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'ResourceAddress' : [ 0x8, ['pointer64', ['void']]],
'ThreadOwner' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'ResourceList' : [ 0x18, ['_LIST_ENTRY']],
'HashChainList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'StackTrace' : [ 0x38, ['array', 8, ['pointer64', ['void']]]],
'LastAcquireTrace' : [ 0x78, ['array', 8, ['pointer64', ['void']]]],
'LastReleaseTrace' : [ 0xb8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_DBGKD_GET_SET_BUS_DATA' : [ 0x14, {
'BusDataType' : [ 0x0, ['unsigned long']],
'BusNumber' : [ 0x4, ['unsigned long']],
'SlotNumber' : [ 0x8, ['unsigned long']],
'Offset' : [ 0xc, ['unsigned long']],
'Length' : [ 0x10, ['unsigned long']],
} ],
'_MMSECTION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingCreated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'BeingPurged' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoModifiedWriting' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'FailAllIo' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Based' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'File' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Networked' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PhysicalMemory' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Reserve' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Commit' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WasPurged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UserReference' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DeleteOnClose' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'FilePointerNull' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'GlobalOnlyPerSession' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'SetMappedFileIoComplete' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CollidedFlush' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'AttemptingDelete' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'UserWritable' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 32, native_type='unsigned long')]],
} ],
'_SECURITY_CLIENT_CONTEXT' : [ 0x48, {
'SecurityQos' : [ 0x0, ['_SECURITY_QUALITY_OF_SERVICE']],
'ClientToken' : [ 0x10, ['pointer64', ['void']]],
'DirectlyAccessClientToken' : [ 0x18, ['unsigned char']],
'DirectAccessEffectiveOnly' : [ 0x19, ['unsigned char']],
'ServerIsRemote' : [ 0x1a, ['unsigned char']],
'ClientTokenControl' : [ 0x1c, ['_TOKEN_CONTROL']],
} ],
'_MM_PAGED_POOL_INFO' : [ 0x68, {
'Mutex' : [ 0x0, ['_KGUARDED_MUTEX']],
'PagedPoolAllocationMap' : [ 0x38, ['_RTL_BITMAP']],
'FirstPteForPagedPool' : [ 0x48, ['pointer64', ['_MMPTE']]],
'PagedPoolHint' : [ 0x50, ['unsigned long']],
'PagedPoolCommit' : [ 0x58, ['unsigned long long']],
'AllocatedPagedPool' : [ 0x60, ['unsigned long long']],
} ],
'_BITMAP_RANGE' : [ 0x30, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'BasePage' : [ 0x10, ['long long']],
'FirstDirtyPage' : [ 0x18, ['unsigned long']],
'LastDirtyPage' : [ 0x1c, ['unsigned long']],
'DirtyPages' : [ 0x20, ['unsigned long']],
'Bitmap' : [ 0x28, ['pointer64', ['unsigned long']]],
} ],
'_NT_TIB64' : [ 0x38, {
'ExceptionList' : [ 0x0, ['unsigned long long']],
'StackBase' : [ 0x8, ['unsigned long long']],
'StackLimit' : [ 0x10, ['unsigned long long']],
'SubSystemTib' : [ 0x18, ['unsigned long long']],
'FiberData' : [ 0x20, ['unsigned long long']],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['unsigned long long']],
'Self' : [ 0x30, ['unsigned long long']],
} ],
'_IO_SECURITY_CONTEXT' : [ 0x18, {
'SecurityQos' : [ 0x0, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'AccessState' : [ 0x8, ['pointer64', ['_ACCESS_STATE']]],
'DesiredAccess' : [ 0x10, ['unsigned long']],
'FullCreateOptions' : [ 0x14, ['unsigned long']],
} ],
'_PROC_PERF_DOMAIN' : [ 0xb8, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Master' : [ 0x10, ['pointer64', ['_KPRCB']]],
'Members' : [ 0x18, ['_KAFFINITY_EX']],
'FeedbackHandler' : [ 0x40, ['pointer64', ['void']]],
'GetFFHThrottleState' : [ 0x48, ['pointer64', ['void']]],
'BoostPolicyHandler' : [ 0x50, ['pointer64', ['void']]],
'PerfSelectionHandler' : [ 0x58, ['pointer64', ['void']]],
'PerfHandler' : [ 0x60, ['pointer64', ['void']]],
'Processors' : [ 0x68, ['pointer64', ['_PROC_PERF_CONSTRAINT']]],
'PerfChangeTime' : [ 0x70, ['unsigned long long']],
'ProcessorCount' : [ 0x78, ['unsigned long']],
'PreviousFrequencyMhz' : [ 0x7c, ['unsigned long']],
'CurrentFrequencyMhz' : [ 0x80, ['unsigned long']],
'PreviousFrequency' : [ 0x84, ['unsigned long']],
'CurrentFrequency' : [ 0x88, ['unsigned long']],
'CurrentPerfContext' : [ 0x8c, ['unsigned long']],
'DesiredFrequency' : [ 0x90, ['unsigned long']],
'MaxFrequency' : [ 0x94, ['unsigned long']],
'MinPerfPercent' : [ 0x98, ['unsigned long']],
'MinThrottlePercent' : [ 0x9c, ['unsigned long']],
'MaxPercent' : [ 0xa0, ['unsigned long']],
'MinPercent' : [ 0xa4, ['unsigned long']],
'ConstrainedMaxPercent' : [ 0xa8, ['unsigned long']],
'ConstrainedMinPercent' : [ 0xac, ['unsigned long']],
'Coordination' : [ 0xb0, ['unsigned char']],
'PerfChangeIntervalCount' : [ 0xb4, ['long']],
} ],
'_X86_DBGKD_CONTROL_SET' : [ 0x10, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long']],
} ],
'_HANDLE_TRACE_DB_ENTRY' : [ 0xa0, {
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Type' : [ 0x18, ['unsigned long']],
'StackTrace' : [ 0x20, ['array', 16, ['pointer64', ['void']]]],
} ],
'_DUMMY_FILE_OBJECT' : [ 0x110, {
'ObjectHeader' : [ 0x0, ['_OBJECT_HEADER']],
'FileObjectBody' : [ 0x38, ['array', 216, ['unsigned char']]],
} ],
'_POP_TRIGGER_WAIT' : [ 0x38, {
'Event' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x18, ['long']],
'Link' : [ 0x20, ['_LIST_ENTRY']],
'Trigger' : [ 0x30, ['pointer64', ['_POP_ACTION_TRIGGER']]],
} ],
'_RELATION_LIST' : [ 0x18, {
'Count' : [ 0x0, ['unsigned long']],
'TagCount' : [ 0x4, ['unsigned long']],
'FirstLevel' : [ 0x8, ['unsigned long']],
'MaxLevel' : [ 0xc, ['unsigned long']],
'Entries' : [ 0x10, ['array', 1, ['pointer64', ['_RELATION_LIST_ENTRY']]]],
} ],
'_IO_TIMER' : [ 0x30, {
'Type' : [ 0x0, ['short']],
'TimerFlag' : [ 0x2, ['short']],
'TimerList' : [ 0x8, ['_LIST_ENTRY']],
'TimerRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'_ARBITER_TEST_ALLOCATION_PARAMETERS' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_MI_SPECIAL_POOL' : [ 0x48, {
'PteBase' : [ 0x0, ['pointer64', ['_MMPTE']]],
'Lock' : [ 0x8, ['unsigned long long']],
'Paged' : [ 0x10, ['_MI_SPECIAL_POOL_PTE_LIST']],
'NonPaged' : [ 0x20, ['_MI_SPECIAL_POOL_PTE_LIST']],
'PagesInUse' : [ 0x30, ['long long']],
'SpecialPoolPdes' : [ 0x38, ['_RTL_BITMAP']],
} ],
'_ARBITER_QUERY_CONFLICT_PARAMETERS' : [ 0x20, {
'PhysicalDeviceObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictingResource' : [ 0x8, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'ConflictCount' : [ 0x10, ['pointer64', ['unsigned long']]],
'Conflicts' : [ 0x18, ['pointer64', ['pointer64', ['_ARBITER_CONFLICT_INFO']]]],
} ],
'_PHYSICAL_MEMORY_DESCRIPTOR' : [ 0x20, {
'NumberOfRuns' : [ 0x0, ['unsigned long']],
'NumberOfPages' : [ 0x8, ['unsigned long long']],
'Run' : [ 0x10, ['array', 1, ['_PHYSICAL_MEMORY_RUN']]],
} ],
'__unnamed_2240' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['unsigned char']],
'Flags1' : [ 0x1, ['unsigned char']],
'Flags2' : [ 0x2, ['unsigned char']],
'BaseHigh' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_2244' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned long')]],
'Dpl' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned long')]],
'Present' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'LimitHigh' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'System' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'LongMode' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'DefaultBig' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Granularity' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'BaseHigh' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_KGDTENTRY64' : [ 0x10, {
'LimitLow' : [ 0x0, ['unsigned short']],
'BaseLow' : [ 0x2, ['unsigned short']],
'Bytes' : [ 0x4, ['__unnamed_2240']],
'Bits' : [ 0x4, ['__unnamed_2244']],
'BaseUpper' : [ 0x8, ['unsigned long']],
'MustBeZero' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_PNP_DEVICE_EVENT_LIST' : [ 0x88, {
'Status' : [ 0x0, ['long']],
'EventQueueMutex' : [ 0x8, ['_KMUTANT']],
'Lock' : [ 0x40, ['_KGUARDED_MUTEX']],
'List' : [ 0x78, ['_LIST_ENTRY']],
} ],
'_MAILSLOT_CREATE_PARAMETERS' : [ 0x18, {
'MailslotQuota' : [ 0x0, ['unsigned long']],
'MaximumMessageSize' : [ 0x4, ['unsigned long']],
'ReadTimeout' : [ 0x8, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x10, ['unsigned char']],
} ],
'_PO_IRP_MANAGER' : [ 0x20, {
'DeviceIrpQueue' : [ 0x0, ['_PO_IRP_QUEUE']],
'SystemIrpQueue' : [ 0x10, ['_PO_IRP_QUEUE']],
} ],
'_PPM_PERF_STATE' : [ 0x28, {
'Frequency' : [ 0x0, ['unsigned long']],
'Power' : [ 0x4, ['unsigned long']],
'PercentFrequency' : [ 0x8, ['unsigned char']],
'IncreaseLevel' : [ 0x9, ['unsigned char']],
'DecreaseLevel' : [ 0xa, ['unsigned char']],
'Type' : [ 0xb, ['unsigned char']],
'Control' : [ 0x10, ['unsigned long long']],
'Status' : [ 0x18, ['unsigned long long']],
'TotalHitCount' : [ 0x20, ['unsigned long']],
'DesiredCount' : [ 0x24, ['unsigned long']],
} ],
'_PPM_FFH_THROTTLE_STATE_INFO' : [ 0x20, {
'EnableLogging' : [ 0x0, ['unsigned char']],
'MismatchCount' : [ 0x4, ['unsigned long']],
'Initialized' : [ 0x8, ['unsigned char']],
'LastValue' : [ 0x10, ['unsigned long long']],
'LastLogTickCount' : [ 0x18, ['_LARGE_INTEGER']],
} ],
'_SECURITY_DESCRIPTOR_RELATIVE' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['unsigned long']],
'Group' : [ 0x8, ['unsigned long']],
'Sacl' : [ 0xc, ['unsigned long']],
'Dacl' : [ 0x10, ['unsigned long']],
} ],
'_CLIENT_ID64' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['unsigned long long']],
'UniqueThread' : [ 0x8, ['unsigned long long']],
} ],
'_KDPC_DATA' : [ 0x20, {
'DpcListHead' : [ 0x0, ['_LIST_ENTRY']],
'DpcLock' : [ 0x10, ['unsigned long long']],
'DpcQueueDepth' : [ 0x18, ['long']],
'DpcCount' : [ 0x1c, ['unsigned long']],
} ],
'_NAMED_PIPE_CREATE_PARAMETERS' : [ 0x28, {
'NamedPipeType' : [ 0x0, ['unsigned long']],
'ReadMode' : [ 0x4, ['unsigned long']],
'CompletionMode' : [ 0x8, ['unsigned long']],
'MaximumInstances' : [ 0xc, ['unsigned long']],
'InboundQuota' : [ 0x10, ['unsigned long']],
'OutboundQuota' : [ 0x14, ['unsigned long']],
'DefaultTimeout' : [ 0x18, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x20, ['unsigned char']],
} ],
'_CM_BIG_DATA' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_2260' : [ 0x10, {
'UserData' : [ 0x0, ['pointer64', ['void']]],
'Owner' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_2262' : [ 0x10, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_RTLP_RANGE_LIST_ENTRY' : [ 0x38, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'Allocated' : [ 0x10, ['__unnamed_2260']],
'Merged' : [ 0x10, ['__unnamed_2262']],
'Attributes' : [ 0x20, ['unsigned char']],
'PublicFlags' : [ 0x21, ['unsigned char']],
'PrivateFlags' : [ 0x22, ['unsigned short']],
'ListEntry' : [ 0x28, ['_LIST_ENTRY']],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY' : [ 0x18, {
'ListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Packet' : [ 0x8, ['pointer64', ['_IO_MINI_COMPLETION_PACKET_USER']]],
'Lookaside' : [ 0x10, ['pointer64', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
} ],
'__unnamed_226a' : [ 0x2, {
'AsUSHORT' : [ 0x0, ['unsigned short']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
} ],
'PROCESSOR_IDLESTATE_POLICY' : [ 0x20, {
'Revision' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['__unnamed_226a']],
'PolicyCount' : [ 0x4, ['unsigned long']],
'Policy' : [ 0x8, ['array', 3, ['PROCESSOR_IDLESTATE_INFO']]],
} ],
'_ACTIVATION_CONTEXT_STACK' : [ 0x28, {
'ActiveFrame' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'FrameListCache' : [ 0x8, ['_LIST_ENTRY']],
'Flags' : [ 0x18, ['unsigned long']],
'NextCookieSequenceNumber' : [ 0x1c, ['unsigned long']],
'StackId' : [ 0x20, ['unsigned long']],
} ],
'_MSUBSECTION' : [ 0x70, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x8, ['pointer64', ['_MMPTE']]],
'NextSubsection' : [ 0x10, ['pointer64', ['_SUBSECTION']]],
'NextMappedSubsection' : [ 0x10, ['pointer64', ['_MSUBSECTION']]],
'PtesInSubsection' : [ 0x18, ['unsigned long']],
'UnusedPtes' : [ 0x20, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x20, ['pointer64', ['_MM_AVL_TABLE']]],
'u' : [ 0x28, ['__unnamed_1fb7']],
'StartingSector' : [ 0x2c, ['unsigned long']],
'NumberOfFullSectors' : [ 0x30, ['unsigned long']],
'u1' : [ 0x38, ['__unnamed_2059']],
'LeftChild' : [ 0x40, ['pointer64', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x48, ['pointer64', ['_MMSUBSECTION_NODE']]],
'DereferenceList' : [ 0x50, ['_LIST_ENTRY']],
'NumberOfMappedViews' : [ 0x60, ['unsigned long long']],
'NumberOfPfnReferences' : [ 0x68, ['unsigned long long']],
} ],
'_RTL_DRIVE_LETTER_CURDIR' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned short']],
'TimeStamp' : [ 0x4, ['unsigned long']],
'DosPath' : [ 0x8, ['_STRING']],
} ],
'_VIRTUAL_EFI_RUNTIME_SERVICES' : [ 0x70, {
'GetTime' : [ 0x0, ['unsigned long long']],
'SetTime' : [ 0x8, ['unsigned long long']],
'GetWakeupTime' : [ 0x10, ['unsigned long long']],
'SetWakeupTime' : [ 0x18, ['unsigned long long']],
'SetVirtualAddressMap' : [ 0x20, ['unsigned long long']],
'ConvertPointer' : [ 0x28, ['unsigned long long']],
'GetVariable' : [ 0x30, ['unsigned long long']],
'GetNextVariableName' : [ 0x38, ['unsigned long long']],
'SetVariable' : [ 0x40, ['unsigned long long']],
'GetNextHighMonotonicCount' : [ 0x48, ['unsigned long long']],
'ResetSystem' : [ 0x50, ['unsigned long long']],
'UpdateCapsule' : [ 0x58, ['unsigned long long']],
'QueryCapsuleCapabilities' : [ 0x60, ['unsigned long long']],
'QueryVariableInfo' : [ 0x68, ['unsigned long long']],
} ],
'_MI_SPECIAL_POOL_PTE_LIST' : [ 0x10, {
'FreePteHead' : [ 0x0, ['_MMPTE']],
'FreePteTail' : [ 0x8, ['_MMPTE']],
} ],
'SYSTEM_POWER_CAPABILITIES' : [ 0x4c, {
'PowerButtonPresent' : [ 0x0, ['unsigned char']],
'SleepButtonPresent' : [ 0x1, ['unsigned char']],
'LidPresent' : [ 0x2, ['unsigned char']],
'SystemS1' : [ 0x3, ['unsigned char']],
'SystemS2' : [ 0x4, ['unsigned char']],
'SystemS3' : [ 0x5, ['unsigned char']],
'SystemS4' : [ 0x6, ['unsigned char']],
'SystemS5' : [ 0x7, ['unsigned char']],
'HiberFilePresent' : [ 0x8, ['unsigned char']],
'FullWake' : [ 0x9, ['unsigned char']],
'VideoDimPresent' : [ 0xa, ['unsigned char']],
'ApmPresent' : [ 0xb, ['unsigned char']],
'UpsPresent' : [ 0xc, ['unsigned char']],
'ThermalControl' : [ 0xd, ['unsigned char']],
'ProcessorThrottle' : [ 0xe, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xf, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x10, ['unsigned char']],
'FastSystemS4' : [ 0x11, ['unsigned char']],
'spare2' : [ 0x12, ['array', 3, ['unsigned char']]],
'DiskSpinDown' : [ 0x15, ['unsigned char']],
'spare3' : [ 0x16, ['array', 8, ['unsigned char']]],
'SystemBatteriesPresent' : [ 0x1e, ['unsigned char']],
'BatteriesAreShortTerm' : [ 0x1f, ['unsigned char']],
'BatteryScale' : [ 0x20, ['array', 3, ['BATTERY_REPORTING_SCALE']]],
'AcOnLineWake' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SoftLidWake' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'RtcWake' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MinDeviceWakeState' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DefaultLowLatencyWake' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_2280' : [ 0x8, {
'ImageCommitment' : [ 0x0, ['unsigned long long']],
'CreatingProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
} ],
'__unnamed_2284' : [ 0x8, {
'ImageInformation' : [ 0x0, ['pointer64', ['_MI_SECTION_IMAGE_INFORMATION']]],
'FirstMappedVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_SEGMENT' : [ 0x50, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0x10, ['unsigned long long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'ExtendInfo' : [ 0x20, ['pointer64', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x20, ['pointer64', ['void']]],
'SegmentLock' : [ 0x28, ['_EX_PUSH_LOCK']],
'u1' : [ 0x30, ['__unnamed_2280']],
'u2' : [ 0x38, ['__unnamed_2284']],
'PrototypePte' : [ 0x40, ['pointer64', ['_MMPTE']]],
'ThePtes' : [ 0x48, ['array', 1, ['_MMPTE']]],
} ],
'_DIAGNOSTIC_CONTEXT' : [ 0x20, {
'CallerType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'Process' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'ServiceTag' : [ 0x10, ['unsigned long']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'ReasonSize' : [ 0x18, ['unsigned long long']],
} ],
'__unnamed_228d' : [ 0x4, {
'MissedEtwRegistration' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_228f' : [ 0x4, {
'Flags' : [ 0x0, ['__unnamed_228d']],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VF_TARGET_VERIFIED_DRIVER_DATA' : [ 0x100, {
'SuspectDriverEntry' : [ 0x0, ['pointer64', ['_VF_SUSPECT_DRIVER_ENTRY']]],
'WMICallback' : [ 0x8, ['pointer64', ['void']]],
'EtwHandlesListHead' : [ 0x10, ['_LIST_ENTRY']],
'u1' : [ 0x20, ['__unnamed_228f']],
'Signature' : [ 0x28, ['unsigned long long']],
'PoolPageHeaders' : [ 0x30, ['_SLIST_HEADER']],
'PoolTrackers' : [ 0x40, ['_SLIST_HEADER']],
'CurrentPagedPoolAllocations' : [ 0x50, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x54, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x58, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x5c, ['unsigned long']],
'PagedBytes' : [ 0x60, ['unsigned long long']],
'NonPagedBytes' : [ 0x68, ['unsigned long long']],
'PeakPagedBytes' : [ 0x70, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x78, ['unsigned long long']],
'RaiseIrqls' : [ 0x80, ['unsigned long']],
'AcquireSpinLocks' : [ 0x84, ['unsigned long']],
'SynchronizeExecutions' : [ 0x88, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x8c, ['unsigned long']],
'AllocationsFailed' : [ 0x90, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x94, ['unsigned long']],
'LockedBytes' : [ 0x98, ['unsigned long long']],
'PeakLockedBytes' : [ 0xa0, ['unsigned long long']],
'MappedLockedBytes' : [ 0xa8, ['unsigned long long']],
'PeakMappedLockedBytes' : [ 0xb0, ['unsigned long long']],
'MappedIoSpaceBytes' : [ 0xb8, ['unsigned long long']],
'PeakMappedIoSpaceBytes' : [ 0xc0, ['unsigned long long']],
'PagesForMdlBytes' : [ 0xc8, ['unsigned long long']],
'PeakPagesForMdlBytes' : [ 0xd0, ['unsigned long long']],
'ContiguousMemoryBytes' : [ 0xd8, ['unsigned long long']],
'PeakContiguousMemoryBytes' : [ 0xe0, ['unsigned long long']],
'ContiguousMemoryListHead' : [ 0xe8, ['_LIST_ENTRY']],
} ],
'_PCAT_FIRMWARE_INFORMATION' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_PRIVATE_CACHE_MAP' : [ 0x68, {
'NodeTypeCode' : [ 0x0, ['short']],
'Flags' : [ 0x0, ['_PRIVATE_CACHE_MAP_FLAGS']],
'UlongFlags' : [ 0x0, ['unsigned long']],
'ReadAheadMask' : [ 0x4, ['unsigned long']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'FileOffset1' : [ 0x10, ['_LARGE_INTEGER']],
'BeyondLastByte1' : [ 0x18, ['_LARGE_INTEGER']],
'FileOffset2' : [ 0x20, ['_LARGE_INTEGER']],
'BeyondLastByte2' : [ 0x28, ['_LARGE_INTEGER']],
'SequentialReadCount' : [ 0x30, ['unsigned long']],
'ReadAheadLength' : [ 0x34, ['unsigned long']],
'ReadAheadOffset' : [ 0x38, ['_LARGE_INTEGER']],
'ReadAheadBeyondLastByte' : [ 0x40, ['_LARGE_INTEGER']],
'ReadAheadSpinLock' : [ 0x48, ['unsigned long long']],
'PrivateLinks' : [ 0x50, ['_LIST_ENTRY']],
'ReadAheadWorkItem' : [ 0x60, ['pointer64', ['void']]],
} ],
'_CM_KEY_NODE' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'LastWriteTime' : [ 0x4, ['_LARGE_INTEGER']],
'Spare' : [ 0xc, ['unsigned long']],
'Parent' : [ 0x10, ['unsigned long']],
'SubKeyCounts' : [ 0x14, ['array', 2, ['unsigned long']]],
'SubKeyLists' : [ 0x1c, ['array', 2, ['unsigned long']]],
'ValueList' : [ 0x24, ['_CHILD_LIST']],
'ChildHiveReference' : [ 0x1c, ['_CM_KEY_REFERENCE']],
'Security' : [ 0x2c, ['unsigned long']],
'Class' : [ 0x30, ['unsigned long']],
'MaxNameLen' : [ 0x34, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'UserFlags' : [ 0x34, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'VirtControlFlags' : [ 0x34, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'Debug' : [ 0x34, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'MaxClassLen' : [ 0x38, ['unsigned long']],
'MaxValueNameLen' : [ 0x3c, ['unsigned long']],
'MaxValueDataLen' : [ 0x40, ['unsigned long']],
'WorkVar' : [ 0x44, ['unsigned long']],
'NameLength' : [ 0x48, ['unsigned short']],
'ClassLength' : [ 0x4a, ['unsigned short']],
'Name' : [ 0x4c, ['array', 1, ['wchar']]],
} ],
'_TPM_BOOT_ENTROPY_LDR_RESULT' : [ 0x48, {
'Policy' : [ 0x0, ['unsigned long long']],
'ResultCode' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'TpmBootEntropyStructureUninitialized', 1: 'TpmBootEntropyDisabledByPolicy', 2: 'TpmBootEntropyNoTpmFound', 3: 'TpmBootEntropyTpmError', 4: 'TpmBootEntropySuccess'})]],
'ResultStatus' : [ 0xc, ['long']],
'Time' : [ 0x10, ['unsigned long long']],
'EntropyLength' : [ 0x18, ['unsigned long']],
'EntropyData' : [ 0x1c, ['array', 40, ['unsigned char']]],
} ],
'_RTL_HANDLE_TABLE' : [ 0x30, {
'MaximumNumberOfHandles' : [ 0x0, ['unsigned long']],
'SizeOfHandleTableEntry' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['array', 2, ['unsigned long']]],
'FreeHandles' : [ 0x10, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'CommittedHandles' : [ 0x18, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'UnCommittedHandles' : [ 0x20, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'MaxReservedHandles' : [ 0x28, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_PTE_TRACKER' : [ 0x58, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Mdl' : [ 0x10, ['pointer64', ['_MDL']]],
'Count' : [ 0x18, ['unsigned long long']],
'SystemVa' : [ 0x20, ['pointer64', ['void']]],
'StartVa' : [ 0x28, ['pointer64', ['void']]],
'Offset' : [ 0x30, ['unsigned long']],
'Length' : [ 0x34, ['unsigned long']],
'Page' : [ 0x38, ['unsigned long long']],
'IoMapping' : [ 0x40, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Matched' : [ 0x40, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'CacheAttribute' : [ 0x40, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'Spare' : [ 0x40, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'CallingAddress' : [ 0x48, ['pointer64', ['void']]],
'CallersCaller' : [ 0x50, ['pointer64', ['void']]],
} ],
'_KTHREAD_COUNTERS' : [ 0x1a8, {
'WaitReasonBitMap' : [ 0x0, ['unsigned long long']],
'UserData' : [ 0x8, ['pointer64', ['_THREAD_PERFORMANCE_DATA']]],
'Flags' : [ 0x10, ['unsigned long']],
'ContextSwitches' : [ 0x14, ['unsigned long']],
'CycleTimeBias' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'HwCounter' : [ 0x28, ['array', 16, ['_COUNTER_READING']]],
} ],
'_SHARED_CACHE_MAP_LIST_CURSOR' : [ 0x18, {
'SharedCacheMapLinks' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION64' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned char']],
'KdSecondaryVersion' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'MachineType' : [ 0x8, ['unsigned short']],
'MaxPacketType' : [ 0xa, ['unsigned char']],
'MaxStateChange' : [ 0xb, ['unsigned char']],
'MaxManipulate' : [ 0xc, ['unsigned char']],
'Simulation' : [ 0xd, ['unsigned char']],
'Unused' : [ 0xe, ['array', 1, ['unsigned short']]],
'KernBase' : [ 0x10, ['unsigned long long']],
'PsLoadedModuleList' : [ 0x18, ['unsigned long long']],
'DebuggerDataList' : [ 0x20, ['unsigned long long']],
} ],
'_STRING32' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['unsigned long']],
} ],
'_HMAP_ENTRY' : [ 0x20, {
'BlockAddress' : [ 0x0, ['unsigned long long']],
'BinAddress' : [ 0x8, ['unsigned long long']],
'CmView' : [ 0x10, ['pointer64', ['_CM_VIEW_OF_FILE']]],
'MemAlloc' : [ 0x18, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE_ENTRY' : [ 0x18, {
'HashLink' : [ 0x0, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]],
'HandleIndex' : [ 0x8, ['unsigned short']],
'Atom' : [ 0xa, ['unsigned short']],
'ReferenceCount' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['unsigned char']],
'NameLength' : [ 0xf, ['unsigned char']],
'Name' : [ 0x10, ['array', 1, ['wchar']]],
} ],
'_TXN_PARAMETER_BLOCK' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'TxFsContext' : [ 0x2, ['unsigned short']],
'TransactionObject' : [ 0x8, ['pointer64', ['void']]],
} ],
'_LOADER_PERFORMANCE_DATA' : [ 0x10, {
'StartTime' : [ 0x0, ['unsigned long long']],
'EndTime' : [ 0x8, ['unsigned long long']],
} ],
'_PNP_DEVICE_ACTION_ENTRY' : [ 0x38, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceObject' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'RequestType' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'AssignResources', 1: 'ClearDeviceProblem', 2: 'ClearProblem', 3: 'ClearEjectProblem', 4: 'HaltDevice', 5: 'QueryPowerRelations', 6: 'Rebalance', 7: 'ReenumerateBootDevices', 8: 'ReenumerateDeviceOnly', 9: 'ReenumerateDeviceTree', 10: 'ReenumerateRootDevices', 11: 'RequeryDeviceState', 12: 'ResetDevice', 13: 'ResourceRequirementsChanged', 14: 'RestartEnumeration', 15: 'SetDeviceProblem', 16: 'StartDevice', 17: 'StartSystemDevicesPass0', 18: 'StartSystemDevicesPass1'})]],
'ReorderingBarrier' : [ 0x1c, ['unsigned char']],
'RequestArgument' : [ 0x20, ['unsigned long long']],
'CompletionEvent' : [ 0x28, ['pointer64', ['_KEVENT']]],
'CompletionStatus' : [ 0x30, ['pointer64', ['long']]],
} ],
'_COUNTER_READING' : [ 0x18, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PMCCounter', 1: 'MaxHardwareCounterType'})]],
'Index' : [ 0x4, ['unsigned long']],
'Start' : [ 0x8, ['unsigned long long']],
'Total' : [ 0x10, ['unsigned long long']],
} ],
'_MMSESSION' : [ 0x58, {
'SystemSpaceViewLock' : [ 0x0, ['_KGUARDED_MUTEX']],
'SystemSpaceViewLockPointer' : [ 0x38, ['pointer64', ['_KGUARDED_MUTEX']]],
'SystemSpaceViewTable' : [ 0x40, ['pointer64', ['_MMVIEW']]],
'SystemSpaceHashSize' : [ 0x48, ['unsigned long']],
'SystemSpaceHashEntries' : [ 0x4c, ['unsigned long']],
'SystemSpaceHashKey' : [ 0x50, ['unsigned long']],
'BitmapFailures' : [ 0x54, ['unsigned long']],
} ],
'_ETW_REG_ENTRY' : [ 0x70, {
'RegList' : [ 0x0, ['_LIST_ENTRY']],
'GroupRegList' : [ 0x10, ['_LIST_ENTRY']],
'GuidEntry' : [ 0x20, ['pointer64', ['_ETW_GUID_ENTRY']]],
'GroupEntry' : [ 0x28, ['pointer64', ['_ETW_GUID_ENTRY']]],
'Index' : [ 0x30, ['unsigned short']],
'Flags' : [ 0x32, ['unsigned short']],
'EnableMask' : [ 0x34, ['unsigned char']],
'GroupEnableMask' : [ 0x35, ['unsigned char']],
'UseDescriptorType' : [ 0x36, ['unsigned char']],
'SessionId' : [ 0x38, ['unsigned long']],
'ReplyQueue' : [ 0x38, ['pointer64', ['_ETW_REPLY_QUEUE']]],
'ReplySlot' : [ 0x38, ['array', 4, ['pointer64', ['_ETW_REG_ENTRY']]]],
'Process' : [ 0x58, ['pointer64', ['_EPROCESS']]],
'Callback' : [ 0x58, ['pointer64', ['void']]],
'CallbackContext' : [ 0x60, ['pointer64', ['void']]],
'Traits' : [ 0x68, ['pointer64', ['_ETW_PROVIDER_TRAITS']]],
} ],
'_LPCP_PORT_OBJECT' : [ 0x100, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'ConnectedPort' : [ 0x8, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'MsgQueue' : [ 0x10, ['_LPCP_PORT_QUEUE']],
'Creator' : [ 0x30, ['_CLIENT_ID']],
'ClientSectionBase' : [ 0x40, ['pointer64', ['void']]],
'ServerSectionBase' : [ 0x48, ['pointer64', ['void']]],
'PortContext' : [ 0x50, ['pointer64', ['void']]],
'ClientThread' : [ 0x58, ['pointer64', ['_ETHREAD']]],
'SecurityQos' : [ 0x60, ['_SECURITY_QUALITY_OF_SERVICE']],
'StaticSecurity' : [ 0x70, ['_SECURITY_CLIENT_CONTEXT']],
'LpcReplyChainHead' : [ 0xb8, ['_LIST_ENTRY']],
'LpcDataInfoChainHead' : [ 0xc8, ['_LIST_ENTRY']],
'ServerProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MappingProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MaxMessageLength' : [ 0xe0, ['unsigned short']],
'MaxConnectionInfoLength' : [ 0xe2, ['unsigned short']],
'Flags' : [ 0xe4, ['unsigned long']],
'WaitEvent' : [ 0xe8, ['_KEVENT']],
} ],
'_ARBITER_LIST_ENTRY' : [ 0x60, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'AlternativeCount' : [ 0x10, ['unsigned long']],
'Alternatives' : [ 0x18, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'RequestSource' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Flags' : [ 0x2c, ['unsigned long']],
'WorkSpace' : [ 0x30, ['long long']],
'InterfaceType' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'SlotNumber' : [ 0x3c, ['unsigned long']],
'BusNumber' : [ 0x40, ['unsigned long']],
'Assignment' : [ 0x48, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
'SelectedAlternative' : [ 0x50, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Result' : [ 0x58, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterResultSuccess', 1: 'ArbiterResultExternalConflict', 2: 'ArbiterResultNullRequest', -1: 'ArbiterResultUndefined'})]],
} ],
'_ETW_PROVIDER_TRAITS' : [ 0x20, {
'Node' : [ 0x0, ['_RTL_BALANCED_NODE']],
'ReferenceCount' : [ 0x18, ['unsigned long']],
'Traits' : [ 0x1c, ['array', 1, ['unsigned char']]],
} ],
'_POP_DEVICE_SYS_STATE' : [ 0x2f8, {
'IrpMinor' : [ 0x0, ['unsigned char']],
'SystemState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SpinLock' : [ 0x8, ['unsigned long long']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'AbortEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'ReadySemaphore' : [ 0x20, ['pointer64', ['_KSEMAPHORE']]],
'FinishedSemaphore' : [ 0x28, ['pointer64', ['_KSEMAPHORE']]],
'GetNewDeviceList' : [ 0x30, ['unsigned char']],
'Order' : [ 0x38, ['_PO_DEVICE_NOTIFY_ORDER']],
'Pending' : [ 0x2d0, ['_LIST_ENTRY']],
'Status' : [ 0x2e0, ['long']],
'FailedDevice' : [ 0x2e8, ['pointer64', ['_DEVICE_OBJECT']]],
'Waking' : [ 0x2f0, ['unsigned char']],
'Cancelled' : [ 0x2f1, ['unsigned char']],
'IgnoreErrors' : [ 0x2f2, ['unsigned char']],
'IgnoreNotImplemented' : [ 0x2f3, ['unsigned char']],
'TimeRefreshLockAcquired' : [ 0x2f4, ['unsigned char']],
} ],
'_SEGMENT_FLAGS' : [ 0x4, {
'TotalNumberOfPtes4132' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned long')]],
'ExtraSharedWowSubsections' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WatchProto' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'DebugSymbolsLoaded' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'WriteCombined' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NoCache' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'FloppyMedia' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'DefaultProtectionMask' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 22, native_type='unsigned long')]],
'Binary32' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'ContainsDebug' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_VF_KE_CRITICAL_REGION_TRACE' : [ 0x40, {
'Thread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x8, ['array', 7, ['pointer64', ['void']]]],
} ],
'_DIAGNOSTIC_BUFFER' : [ 0x28, {
'Size' : [ 0x0, ['unsigned long long']],
'CallerType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'ProcessImageNameOffset' : [ 0x10, ['unsigned long long']],
'ProcessId' : [ 0x18, ['unsigned long']],
'ServiceTag' : [ 0x1c, ['unsigned long']],
'DeviceDescriptionOffset' : [ 0x10, ['unsigned long long']],
'DevicePathOffset' : [ 0x18, ['unsigned long long']],
'ReasonOffset' : [ 0x20, ['unsigned long long']],
} ],
'_EX_WORK_QUEUE' : [ 0x58, {
'WorkerQueue' : [ 0x0, ['_KQUEUE']],
'DynamicThreadCount' : [ 0x40, ['unsigned long']],
'WorkItemsProcessed' : [ 0x44, ['unsigned long']],
'WorkItemsProcessedLastPass' : [ 0x48, ['unsigned long']],
'QueueDepthLastPass' : [ 0x4c, ['unsigned long']],
'Info' : [ 0x50, ['EX_QUEUE_WORKER_INFO']],
} ],
'_CLIENT_ID32' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['unsigned long']],
'UniqueThread' : [ 0x4, ['unsigned long']],
} ],
'_TEB32' : [ 0xfe4, {
'NtTib' : [ 0x0, ['_NT_TIB32']],
'EnvironmentPointer' : [ 0x1c, ['unsigned long']],
'ClientId' : [ 0x20, ['_CLIENT_ID32']],
'ActiveRpcHandle' : [ 0x28, ['unsigned long']],
'ThreadLocalStoragePointer' : [ 0x2c, ['unsigned long']],
'ProcessEnvironmentBlock' : [ 0x30, ['unsigned long']],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['unsigned long']],
'Win32ThreadInfo' : [ 0x40, ['unsigned long']],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['unsigned long']],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['unsigned long']]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['unsigned long']],
'SpareBytes' : [ 0x1ac, ['array', 36, ['unsigned char']]],
'TxFsContext' : [ 0x1d0, ['unsigned long']],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH32']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID32']],
'GdiCachedProcessHandle' : [ 0x6bc, ['unsigned long']],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['unsigned long']],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['unsigned long']]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['unsigned long']],
'glSectionInfo' : [ 0xbe0, ['unsigned long']],
'glSection' : [ 0xbe4, ['unsigned long']],
'glTable' : [ 0xbe8, ['unsigned long']],
'glCurrentRC' : [ 0xbec, ['unsigned long']],
'glContext' : [ 0xbf0, ['unsigned long']],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_STRING32']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0xe0c, ['unsigned long']],
'TlsSlots' : [ 0xe10, ['array', 64, ['unsigned long']]],
'TlsLinks' : [ 0xf10, ['LIST_ENTRY32']],
'Vdm' : [ 0xf18, ['unsigned long']],
'ReservedForNtRpc' : [ 0xf1c, ['unsigned long']],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['unsigned long']]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 9, ['unsigned long']]],
'ActivityId' : [ 0xf50, ['_GUID']],
'SubProcessTag' : [ 0xf60, ['unsigned long']],
'EtwLocalData' : [ 0xf64, ['unsigned long']],
'EtwTraceData' : [ 0xf68, ['unsigned long']],
'WinSockData' : [ 0xf6c, ['unsigned long']],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'CurrentIdealProcessor' : [ 0xf74, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0xf74, ['unsigned long']],
'ReservedPad0' : [ 0xf74, ['unsigned char']],
'ReservedPad1' : [ 0xf75, ['unsigned char']],
'ReservedPad2' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['unsigned long']],
'ReservedForOle' : [ 0xf80, ['unsigned long']],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SavedPriorityState' : [ 0xf88, ['unsigned long']],
'SoftPatchPtr1' : [ 0xf8c, ['unsigned long']],
'ThreadPoolData' : [ 0xf90, ['unsigned long']],
'TlsExpansionSlots' : [ 0xf94, ['unsigned long']],
'MuiGeneration' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['unsigned long']],
'pShimData' : [ 0xfa4, ['unsigned long']],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['unsigned long']],
'ActiveFrame' : [ 0xfb0, ['unsigned long']],
'FlsData' : [ 0xfb4, ['unsigned long']],
'PreferredLanguages' : [ 0xfb8, ['unsigned long']],
'UserPrefLanguages' : [ 0xfbc, ['unsigned long']],
'MergedPrefLanguages' : [ 0xfc0, ['unsigned long']],
'MuiImpersonation' : [ 0xfc4, ['unsigned long']],
'CrossTebFlags' : [ 0xfc8, ['unsigned short']],
'SpareCrossTebBits' : [ 0xfc8, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0xfca, ['unsigned short']],
'SafeThunkCall' : [ 0xfca, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0xfca, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0xfca, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0xfca, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0xfca, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0xfca, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0xfca, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0xfca, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0xfca, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0xfca, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0xfca, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0xfca, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0xfcc, ['unsigned long']],
'TxnScopeExitCallback' : [ 0xfd0, ['unsigned long']],
'TxnScopeContext' : [ 0xfd4, ['unsigned long']],
'LockCount' : [ 0xfd8, ['unsigned long']],
'SpareUlong0' : [ 0xfdc, ['unsigned long']],
'ResourceRetValue' : [ 0xfe0, ['unsigned long']],
} ],
'_CM_KEY_INDEX' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_VI_DEADLOCK_THREAD' : [ 0x38, {
'Thread' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'CurrentSpinNode' : [ 0x8, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'CurrentOtherNode' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x18, ['_LIST_ENTRY']],
'NodeCount' : [ 0x28, ['unsigned long']],
'PagingCount' : [ 0x2c, ['unsigned long']],
'ThreadUsesEresources' : [ 0x30, ['unsigned char']],
} ],
'_PPM_IDLE_STATE' : [ 0x60, {
'DomainMembers' : [ 0x0, ['_KAFFINITY_EX']],
'IdleCheck' : [ 0x28, ['pointer64', ['void']]],
'IdleHandler' : [ 0x30, ['pointer64', ['void']]],
'HvConfig' : [ 0x38, ['unsigned long long']],
'Context' : [ 0x40, ['pointer64', ['void']]],
'Latency' : [ 0x48, ['unsigned long']],
'Power' : [ 0x4c, ['unsigned long']],
'TimeCheck' : [ 0x50, ['unsigned long']],
'StateFlags' : [ 0x54, ['unsigned long']],
'PromotePercent' : [ 0x58, ['unsigned char']],
'DemotePercent' : [ 0x59, ['unsigned char']],
'PromotePercentBase' : [ 0x5a, ['unsigned char']],
'DemotePercentBase' : [ 0x5b, ['unsigned char']],
'StateType' : [ 0x5c, ['unsigned char']],
} ],
'_KRESOURCEMANAGER' : [ 0x250, {
'NotificationAvailable' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x18, ['unsigned long']],
'State' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'KResourceManagerUninitialized', 1: 'KResourceManagerOffline', 2: 'KResourceManagerOnline'})]],
'Flags' : [ 0x20, ['unsigned long']],
'Mutex' : [ 0x28, ['_KMUTANT']],
'NamespaceLink' : [ 0x60, ['_KTMOBJECT_NAMESPACE_LINK']],
'RmId' : [ 0x88, ['_GUID']],
'NotificationQueue' : [ 0x98, ['_KQUEUE']],
'NotificationMutex' : [ 0xd8, ['_KMUTANT']],
'EnlistmentHead' : [ 0x110, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0x120, ['unsigned long']],
'NotificationRoutine' : [ 0x128, ['pointer64', ['void']]],
'Key' : [ 0x130, ['pointer64', ['void']]],
'ProtocolListHead' : [ 0x138, ['_LIST_ENTRY']],
'PendingPropReqListHead' : [ 0x148, ['_LIST_ENTRY']],
'CRMListEntry' : [ 0x158, ['_LIST_ENTRY']],
'Tm' : [ 0x168, ['pointer64', ['_KTM']]],
'Description' : [ 0x170, ['_UNICODE_STRING']],
'Enlistments' : [ 0x180, ['_KTMOBJECT_NAMESPACE']],
'CompletionBinding' : [ 0x228, ['_KRESOURCEMANAGER_COMPLETION_BINDING']],
} ],
'_GDI_TEB_BATCH64' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'__unnamed_2318' : [ 0x4, {
'NodeSize' : [ 0x0, ['unsigned long']],
'UseLookaside' : [ 0x0, ['unsigned long']],
} ],
'_VF_AVL_TREE' : [ 0x40, {
'Lock' : [ 0x0, ['long']],
'NodeToFree' : [ 0x8, ['pointer64', ['void']]],
'NodeRangeSize' : [ 0x10, ['unsigned long long']],
'NodeCount' : [ 0x18, ['unsigned long long']],
'Tables' : [ 0x20, ['pointer64', ['_VF_AVL_TABLE']]],
'TablesNo' : [ 0x28, ['unsigned long']],
'u1' : [ 0x2c, ['__unnamed_2318']],
} ],
'_FILE_NETWORK_OPEN_INFORMATION' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x28, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x30, ['unsigned long']],
} ],
'_WHEA_MEMORY_ERROR_SECTION_VALIDBITS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PhysicalAddress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'PhysicalAddressMask' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Node' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Card' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Module' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Bank' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'Device' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Row' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Column' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'BitPosition' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'RequesterId' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'ResponderId' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'TargetId' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'_RELATION_LIST_ENTRY' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'MaxCount' : [ 0x4, ['unsigned long']],
'Devices' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_HEAP_FREE_ENTRY_EXTRA' : [ 0x4, {
'TagIndex' : [ 0x0, ['unsigned short']],
'FreeBackTraceIndex' : [ 0x2, ['unsigned short']],
} ],
'_VI_DEADLOCK_GLOBALS' : [ 0x8168, {
'TimeAcquire' : [ 0x0, ['long long']],
'TimeRelease' : [ 0x8, ['long long']],
'ResourceDatabase' : [ 0x10, ['pointer64', ['_LIST_ENTRY']]],
'ResourceDatabaseCount' : [ 0x18, ['unsigned long long']],
'ResourceAddressRange' : [ 0x20, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'ThreadDatabase' : [ 0x4010, ['pointer64', ['_LIST_ENTRY']]],
'ThreadDatabaseCount' : [ 0x4018, ['unsigned long long']],
'ThreadAddressRange' : [ 0x4020, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'AllocationFailures' : [ 0x8010, ['unsigned long']],
'NodesTrimmedBasedOnAge' : [ 0x8014, ['unsigned long']],
'NodesTrimmedBasedOnCount' : [ 0x8018, ['unsigned long']],
'NodesSearched' : [ 0x801c, ['unsigned long']],
'MaxNodesSearched' : [ 0x8020, ['unsigned long']],
'SequenceNumber' : [ 0x8024, ['unsigned long']],
'RecursionDepthLimit' : [ 0x8028, ['unsigned long']],
'SearchedNodesLimit' : [ 0x802c, ['unsigned long']],
'DepthLimitHits' : [ 0x8030, ['unsigned long']],
'SearchLimitHits' : [ 0x8034, ['unsigned long']],
'ABC_ACB_Skipped' : [ 0x8038, ['unsigned long']],
'OutOfOrderReleases' : [ 0x803c, ['unsigned long']],
'NodesReleasedOutOfOrder' : [ 0x8040, ['unsigned long']],
'TotalReleases' : [ 0x8044, ['unsigned long']],
'RootNodesDeleted' : [ 0x8048, ['unsigned long']],
'ForgetHistoryCounter' : [ 0x804c, ['unsigned long']],
'Instigator' : [ 0x8050, ['pointer64', ['void']]],
'NumberOfParticipants' : [ 0x8058, ['unsigned long']],
'Participant' : [ 0x8060, ['array', 32, ['pointer64', ['_VI_DEADLOCK_NODE']]]],
'ChildrenCountWatermark' : [ 0x8160, ['long']],
} ],
'_KTM' : [ 0x3c0, {
'cookie' : [ 0x0, ['unsigned long']],
'Mutex' : [ 0x8, ['_KMUTANT']],
'State' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'KKtmUninitialized', 1: 'KKtmInitialized', 2: 'KKtmRecovering', 3: 'KKtmOnline', 4: 'KKtmRecoveryFailed', 5: 'KKtmOffline'})]],
'NamespaceLink' : [ 0x48, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmIdentity' : [ 0x70, ['_GUID']],
'Flags' : [ 0x80, ['unsigned long']],
'VolatileFlags' : [ 0x84, ['unsigned long']],
'LogFileName' : [ 0x88, ['_UNICODE_STRING']],
'LogFileObject' : [ 0x98, ['pointer64', ['_FILE_OBJECT']]],
'MarshallingContext' : [ 0xa0, ['pointer64', ['void']]],
'LogManagementContext' : [ 0xa8, ['pointer64', ['void']]],
'Transactions' : [ 0xb0, ['_KTMOBJECT_NAMESPACE']],
'ResourceManagers' : [ 0x158, ['_KTMOBJECT_NAMESPACE']],
'LsnOrderedMutex' : [ 0x200, ['_KMUTANT']],
'LsnOrderedList' : [ 0x238, ['_LIST_ENTRY']],
'CommitVirtualClock' : [ 0x248, ['_LARGE_INTEGER']],
'CommitVirtualClockMutex' : [ 0x250, ['_FAST_MUTEX']],
'BaseLsn' : [ 0x288, ['_CLS_LSN']],
'CurrentReadLsn' : [ 0x290, ['_CLS_LSN']],
'LastRecoveredLsn' : [ 0x298, ['_CLS_LSN']],
'TmRmHandle' : [ 0x2a0, ['pointer64', ['void']]],
'TmRm' : [ 0x2a8, ['pointer64', ['_KRESOURCEMANAGER']]],
'LogFullNotifyEvent' : [ 0x2b0, ['_KEVENT']],
'CheckpointWorkItem' : [ 0x2c8, ['_WORK_QUEUE_ITEM']],
'CheckpointTargetLsn' : [ 0x2e8, ['_CLS_LSN']],
'LogFullCompletedWorkItem' : [ 0x2f0, ['_WORK_QUEUE_ITEM']],
'LogWriteResource' : [ 0x310, ['_ERESOURCE']],
'LogFlags' : [ 0x378, ['unsigned long']],
'LogFullStatus' : [ 0x37c, ['long']],
'RecoveryStatus' : [ 0x380, ['long']],
'LastCheckBaseLsn' : [ 0x388, ['_CLS_LSN']],
'RestartOrderedList' : [ 0x390, ['_LIST_ENTRY']],
'OfflineWorkItem' : [ 0x3a0, ['_WORK_QUEUE_ITEM']],
} ],
'_CONFIGURATION_COMPONENT' : [ 0x28, {
'Class' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SystemClass', 1: 'ProcessorClass', 2: 'CacheClass', 3: 'AdapterClass', 4: 'ControllerClass', 5: 'PeripheralClass', 6: 'MemoryClass', 7: 'MaximumClass'})]],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ArcSystem', 1: 'CentralProcessor', 2: 'FloatingPointProcessor', 3: 'PrimaryIcache', 4: 'PrimaryDcache', 5: 'SecondaryIcache', 6: 'SecondaryDcache', 7: 'SecondaryCache', 8: 'EisaAdapter', 9: 'TcAdapter', 10: 'ScsiAdapter', 11: 'DtiAdapter', 12: 'MultiFunctionAdapter', 13: 'DiskController', 14: 'TapeController', 15: 'CdromController', 16: 'WormController', 17: 'SerialController', 18: 'NetworkController', 19: 'DisplayController', 20: 'ParallelController', 21: 'PointerController', 22: 'KeyboardController', 23: 'AudioController', 24: 'OtherController', 25: 'DiskPeripheral', 26: 'FloppyDiskPeripheral', 27: 'TapePeripheral', 28: 'ModemPeripheral', 29: 'MonitorPeripheral', 30: 'PrinterPeripheral', 31: 'PointerPeripheral', 32: 'KeyboardPeripheral', 33: 'TerminalPeripheral', 34: 'OtherPeripheral', 35: 'LinePeripheral', 36: 'NetworkPeripheral', 37: 'SystemMemory', 38: 'DockingInformation', 39: 'RealModeIrqRoutingTable', 40: 'RealModePCIEnumeration', 41: 'MaximumType'})]],
'Flags' : [ 0x8, ['_DEVICE_FLAGS']],
'Version' : [ 0xc, ['unsigned short']],
'Revision' : [ 0xe, ['unsigned short']],
'Key' : [ 0x10, ['unsigned long']],
'AffinityMask' : [ 0x14, ['unsigned long']],
'Group' : [ 0x14, ['unsigned short']],
'GroupIndex' : [ 0x16, ['unsigned short']],
'ConfigurationDataLength' : [ 0x18, ['unsigned long']],
'IdentifierLength' : [ 0x1c, ['unsigned long']],
'Identifier' : [ 0x20, ['pointer64', ['unsigned char']]],
} ],
'_KTRANSACTION' : [ 0x2d8, {
'OutcomeEvent' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x18, ['unsigned long']],
'Mutex' : [ 0x20, ['_KMUTANT']],
'TreeTx' : [ 0x58, ['pointer64', ['_KTRANSACTION']]],
'GlobalNamespaceLink' : [ 0x60, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmNamespaceLink' : [ 0x88, ['_KTMOBJECT_NAMESPACE_LINK']],
'UOW' : [ 0xb0, ['_GUID']],
'State' : [ 0xc0, ['Enumeration', dict(target = 'long', choices = {0: 'KTransactionUninitialized', 1: 'KTransactionActive', 2: 'KTransactionPreparing', 3: 'KTransactionPrepared', 4: 'KTransactionInDoubt', 5: 'KTransactionCommitted', 6: 'KTransactionAborted', 7: 'KTransactionDelegated', 8: 'KTransactionPrePreparing', 9: 'KTransactionForgotten', 10: 'KTransactionRecovering', 11: 'KTransactionPrePrepared'})]],
'Flags' : [ 0xc4, ['unsigned long']],
'EnlistmentHead' : [ 0xc8, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0xd8, ['unsigned long']],
'RecoverableEnlistmentCount' : [ 0xdc, ['unsigned long']],
'PrePrepareRequiredEnlistmentCount' : [ 0xe0, ['unsigned long']],
'PrepareRequiredEnlistmentCount' : [ 0xe4, ['unsigned long']],
'OutcomeRequiredEnlistmentCount' : [ 0xe8, ['unsigned long']],
'PendingResponses' : [ 0xec, ['unsigned long']],
'SuperiorEnlistment' : [ 0xf0, ['pointer64', ['_KENLISTMENT']]],
'LastLsn' : [ 0xf8, ['_CLS_LSN']],
'PromotedEntry' : [ 0x100, ['_LIST_ENTRY']],
'PromoterTransaction' : [ 0x110, ['pointer64', ['_KTRANSACTION']]],
'PromotePropagation' : [ 0x118, ['pointer64', ['void']]],
'IsolationLevel' : [ 0x120, ['unsigned long']],
'IsolationFlags' : [ 0x124, ['unsigned long']],
'Timeout' : [ 0x128, ['_LARGE_INTEGER']],
'Description' : [ 0x130, ['_UNICODE_STRING']],
'RollbackThread' : [ 0x140, ['pointer64', ['_KTHREAD']]],
'RollbackWorkItem' : [ 0x148, ['_WORK_QUEUE_ITEM']],
'RollbackDpc' : [ 0x168, ['_KDPC']],
'RollbackTimer' : [ 0x1a8, ['_KTIMER']],
'LsnOrderedEntry' : [ 0x1e8, ['_LIST_ENTRY']],
'Outcome' : [ 0x1f8, ['Enumeration', dict(target = 'long', choices = {0: 'KTxOutcomeUninitialized', 1: 'KTxOutcomeUndetermined', 2: 'KTxOutcomeCommitted', 3: 'KTxOutcomeAborted', 4: 'KTxOutcomeUnavailable'})]],
'Tm' : [ 0x200, ['pointer64', ['_KTM']]],
'CommitReservation' : [ 0x208, ['long long']],
'TransactionHistory' : [ 0x210, ['array', 10, ['_KTRANSACTION_HISTORY']]],
'TransactionHistoryCount' : [ 0x260, ['unsigned long']],
'DTCPrivateInformation' : [ 0x268, ['pointer64', ['void']]],
'DTCPrivateInformationLength' : [ 0x270, ['unsigned long']],
'DTCPrivateInformationMutex' : [ 0x278, ['_KMUTANT']],
'PromotedTxSelfHandle' : [ 0x2b0, ['pointer64', ['void']]],
'PendingPromotionCount' : [ 0x2b8, ['unsigned long']],
'PromotionCompletedEvent' : [ 0x2c0, ['_KEVENT']],
} ],
'_PRIVATE_CACHE_MAP_FLAGS' : [ 0x4, {
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'ReadAheadActive' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ReadAheadEnabled' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'PagePriority' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 21, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 32, native_type='unsigned long')]],
} ],
'_CM_KCB_UOW' : [ 0x60, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBLock' : [ 0x10, ['pointer64', ['_CM_INTENT_LOCK']]],
'KeyLock' : [ 0x18, ['pointer64', ['_CM_INTENT_LOCK']]],
'KCBListEntry' : [ 0x20, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x30, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Transaction' : [ 0x38, ['pointer64', ['_CM_TRANS']]],
'UoWState' : [ 0x40, ['unsigned long']],
'ActionType' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'UoWAddThisKey', 1: 'UoWAddChildKey', 2: 'UoWDeleteThisKey', 3: 'UoWDeleteChildKey', 4: 'UoWSetValueNew', 5: 'UoWSetValueExisting', 6: 'UoWDeleteValue', 7: 'UoWSetKeyUserFlags', 8: 'UoWSetLastWriteTime', 9: 'UoWSetSecurityDescriptor', 10: 'UoWRenameSubKey', 11: 'UoWRenameOldSubKey', 12: 'UoWRenameNewSubKey', 13: 'UoWIsolation', 14: 'UoWInvalid'})]],
'StorageType' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'Stable', 1: 'Volatile', 2: 'InvalidStorage'})]],
'ChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'VolatileKeyCell' : [ 0x50, ['unsigned long']],
'OldValueCell' : [ 0x50, ['unsigned long']],
'NewValueCell' : [ 0x54, ['unsigned long']],
'UserFlags' : [ 0x50, ['unsigned long']],
'LastWriteTime' : [ 0x50, ['_LARGE_INTEGER']],
'TxSecurityCell' : [ 0x50, ['unsigned long']],
'OldChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NewChildKCB' : [ 0x58, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'OtherChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'ThisVolatileKeyCell' : [ 0x58, ['unsigned long']],
} ],
'_MMPTE_TRANSITION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'_KREQUEST_PACKET' : [ 0x20, {
'CurrentPacket' : [ 0x0, ['array', 3, ['pointer64', ['void']]]],
'WorkerRoutine' : [ 0x18, ['pointer64', ['void']]],
} ],
'_VF_WATCHDOG_IRP' : [ 0x20, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x10, ['pointer64', ['_IRP']]],
'DueTickCount' : [ 0x18, ['unsigned long']],
'Inserted' : [ 0x1c, ['unsigned char']],
'TrackedStackLocation' : [ 0x1d, ['unsigned char']],
'CancelTimeoutTicks' : [ 0x1e, ['unsigned short']],
} ],
'_flags' : [ 0x1, {
'Removable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'GroupAssigned' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'GroupCommitted' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'GroupAssignmentFixed' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Fill' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
} ],
'__unnamed_2367' : [ 0x8, {
'Head' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long long')]],
'Tail' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 48, native_type='unsigned long long')]],
'ActiveThreadCount' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_2369' : [ 0x8, {
's1' : [ 0x0, ['__unnamed_2367']],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_ALPC_COMPLETION_LIST_STATE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_2369']],
} ],
'_PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA' : [ 0x8, {
'CapturedCpuShareWeight' : [ 0x0, ['unsigned long']],
'CapturedTotalWeight' : [ 0x4, ['unsigned long']],
'CombinedData' : [ 0x0, ['long long']],
} ],
'_CM_NAME_HASH' : [ 0x18, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_NAME_HASH']]],
'NameLength' : [ 0x10, ['unsigned short']],
'Name' : [ 0x12, ['array', 1, ['wchar']]],
} ],
'_PROC_IDLE_STATE_BUCKET' : [ 0x20, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'MinTime' : [ 0x8, ['unsigned long long']],
'MaxTime' : [ 0x10, ['unsigned long long']],
'Count' : [ 0x18, ['unsigned long']],
} ],
'_MMSECURE_FLAGS' : [ 0x4, {
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoWrite' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 12, native_type='unsigned long')]],
} ],
'_PO_IRP_QUEUE' : [ 0x10, {
'CurrentIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'PendingIrpList' : [ 0x8, ['pointer64', ['_IRP']]],
} ],
'__unnamed_237c' : [ 0x4, {
'Active' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OnlyTryAcquireUsed' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ReleasedOutOfOrder' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SequenceNumber' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VI_DEADLOCK_NODE' : [ 0xd0, {
'Parent' : [ 0x0, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ChildrenList' : [ 0x8, ['_LIST_ENTRY']],
'SiblingsList' : [ 0x18, ['_LIST_ENTRY']],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'Root' : [ 0x38, ['pointer64', ['_VI_DEADLOCK_RESOURCE']]],
'ThreadEntry' : [ 0x40, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'u1' : [ 0x48, ['__unnamed_237c']],
'ChildrenCount' : [ 0x4c, ['long']],
'StackTrace' : [ 0x50, ['array', 8, ['pointer64', ['void']]]],
'ParentStackTrace' : [ 0x90, ['array', 8, ['pointer64', ['void']]]],
} ],
'PROCESSOR_IDLESTATE_INFO' : [ 0x8, {
'TimeCheck' : [ 0x0, ['unsigned long']],
'DemotePercent' : [ 0x4, ['unsigned char']],
'PromotePercent' : [ 0x5, ['unsigned char']],
'Spare' : [ 0x6, ['array', 2, ['unsigned char']]],
} ],
'_KTMOBJECT_NAMESPACE' : [ 0xa8, {
'Table' : [ 0x0, ['_RTL_AVL_TABLE']],
'Mutex' : [ 0x68, ['_KMUTANT']],
'LinksOffset' : [ 0xa0, ['unsigned short']],
'GuidOffset' : [ 0xa2, ['unsigned short']],
'Expired' : [ 0xa4, ['unsigned char']],
} ],
'_LPCP_PORT_QUEUE' : [ 0x20, {
'NonPagedPortQueue' : [ 0x0, ['pointer64', ['_LPCP_NONPAGED_PORT_QUEUE']]],
'Semaphore' : [ 0x8, ['pointer64', ['_KSEMAPHORE']]],
'ReceiveHead' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_CM_KEY_REFERENCE' : [ 0x10, {
'KeyCell' : [ 0x0, ['unsigned long']],
'KeyHive' : [ 0x8, ['pointer64', ['_HHIVE']]],
} ],
'SYSTEM_POWER_LEVEL' : [ 0x18, {
'Enable' : [ 0x0, ['unsigned char']],
'Spare' : [ 0x1, ['array', 3, ['unsigned char']]],
'BatteryLevel' : [ 0x4, ['unsigned long']],
'PowerPolicy' : [ 0x8, ['POWER_ACTION_POLICY']],
'MinSystemState' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_OBJECT_DUMP_CONTROL' : [ 0x10, {
'Stream' : [ 0x0, ['pointer64', ['void']]],
'Detail' : [ 0x8, ['unsigned long']],
} ],
'_VF_ADDRESS_RANGE' : [ 0x10, {
'Start' : [ 0x0, ['pointer64', ['unsigned char']]],
'End' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_OBJECT_SYMBOLIC_LINK' : [ 0x20, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LinkTarget' : [ 0x8, ['_UNICODE_STRING']],
'DosDeviceDriveIndex' : [ 0x18, ['unsigned long']],
'Flags' : [ 0x1c, ['unsigned long']],
} ],
'_LPCP_NONPAGED_PORT_QUEUE' : [ 0x28, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'BackPointer' : [ 0x20, ['pointer64', ['_LPCP_PORT_OBJECT']]],
} ],
'_KRESOURCEMANAGER_COMPLETION_BINDING' : [ 0x28, {
'NotificationListHead' : [ 0x0, ['_LIST_ENTRY']],
'Port' : [ 0x10, ['pointer64', ['void']]],
'Key' : [ 0x18, ['unsigned long long']],
'BindingProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
} ],
'_VF_TRACKER' : [ 0x10, {
'TrackerFlags' : [ 0x0, ['unsigned long']],
'TrackerSize' : [ 0x4, ['unsigned long']],
'TrackerIndex' : [ 0x8, ['unsigned long']],
'TraceDepth' : [ 0xc, ['unsigned long']],
} ],
'_CALL_PERFORMANCE_DATA' : [ 0x408, {
'SpinLock' : [ 0x0, ['unsigned long long']],
'HashTable' : [ 0x8, ['array', 64, ['_LIST_ENTRY']]],
} ],
'_ARBITER_ALTERNATIVE' : [ 0x40, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
'Length' : [ 0x10, ['unsigned long long']],
'Alignment' : [ 0x18, ['unsigned long long']],
'Priority' : [ 0x20, ['long']],
'Flags' : [ 0x24, ['unsigned long']],
'Descriptor' : [ 0x28, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Reserved' : [ 0x30, ['array', 3, ['unsigned long']]],
} ],
'_WHEA_ERROR_STATUS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['unsigned long long']],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Address' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long long')]],
'Control' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long long')]],
'Data' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long long')]],
'Responder' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long long')]],
'Requester' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long long')]],
'FirstError' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long long')]],
'Overflow' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WHEA_PERSISTENCE_INFO' : [ 0x8, {
'Signature' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Length' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 40, native_type='unsigned long long')]],
'Identifier' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 56, native_type='unsigned long long')]],
'Attributes' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 58, native_type='unsigned long long')]],
'DoNotLog' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 59, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 59, end_bit = 64, native_type='unsigned long long')]],
'AsULONGLONG' : [ 0x0, ['unsigned long long']],
} ],
'_MI_SECTION_IMAGE_INFORMATION' : [ 0x48, {
'ExportedImageInformation' : [ 0x0, ['_SECTION_IMAGE_INFORMATION']],
'InternalImageInformation' : [ 0x40, ['_MI_EXTRA_IMAGE_INFORMATION']],
} ],
'_HEAP_USERDATA_HEADER' : [ 0x20, {
'SFreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'SubSegment' : [ 0x0, ['pointer64', ['_HEAP_SUBSEGMENT']]],
'Reserved' : [ 0x8, ['pointer64', ['void']]],
'SizeIndex' : [ 0x10, ['unsigned long long']],
'Signature' : [ 0x18, ['unsigned long long']],
} ],
'_STRING64' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['unsigned long long']],
} ],
'_STACK_TABLE' : [ 0x8088, {
'NumStackTraces' : [ 0x0, ['unsigned short']],
'TraceCapacity' : [ 0x2, ['unsigned short']],
'StackTrace' : [ 0x8, ['array', 16, ['pointer64', ['_OBJECT_REF_TRACE']]]],
'StackTableHash' : [ 0x88, ['array', 16381, ['unsigned short']]],
} ],
'_TOKEN_CONTROL' : [ 0x28, {
'TokenId' : [ 0x0, ['_LUID']],
'AuthenticationId' : [ 0x8, ['_LUID']],
'ModifiedId' : [ 0x10, ['_LUID']],
'TokenSource' : [ 0x18, ['_TOKEN_SOURCE']],
} ],
'_DEFERRED_WRITE' : [ 0x48, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'BytesToWrite' : [ 0x10, ['unsigned long']],
'DeferredWriteLinks' : [ 0x18, ['_LIST_ENTRY']],
'Event' : [ 0x28, ['pointer64', ['_KEVENT']]],
'PostRoutine' : [ 0x30, ['pointer64', ['void']]],
'Context1' : [ 0x38, ['pointer64', ['void']]],
'Context2' : [ 0x40, ['pointer64', ['void']]],
} ],
'_ARBITER_ORDERING_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned short']],
'Maximum' : [ 0x2, ['unsigned short']],
'Orderings' : [ 0x8, ['pointer64', ['_ARBITER_ORDERING']]],
} ],
'_SECTION_IMAGE_INFORMATION' : [ 0x40, {
'TransferAddress' : [ 0x0, ['pointer64', ['void']]],
'ZeroBits' : [ 0x8, ['unsigned long']],
'MaximumStackSize' : [ 0x10, ['unsigned long long']],
'CommittedStackSize' : [ 0x18, ['unsigned long long']],
'SubSystemType' : [ 0x20, ['unsigned long']],
'SubSystemMinorVersion' : [ 0x24, ['unsigned short']],
'SubSystemMajorVersion' : [ 0x26, ['unsigned short']],
'SubSystemVersion' : [ 0x24, ['unsigned long']],
'GpValue' : [ 0x28, ['unsigned long']],
'ImageCharacteristics' : [ 0x2c, ['unsigned short']],
'DllCharacteristics' : [ 0x2e, ['unsigned short']],
'Machine' : [ 0x30, ['unsigned short']],
'ImageContainsCode' : [ 0x32, ['unsigned char']],
'ImageFlags' : [ 0x33, ['unsigned char']],
'ComPlusNativeReady' : [ 0x33, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ComPlusILOnly' : [ 0x33, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ImageDynamicallyRelocated' : [ 0x33, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'ImageMappedFlat' : [ 0x33, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Reserved' : [ 0x33, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'LoaderFlags' : [ 0x34, ['unsigned long']],
'ImageFileSize' : [ 0x38, ['unsigned long']],
'CheckSum' : [ 0x3c, ['unsigned long']],
} ],
'_VF_AVL_TABLE' : [ 0x70, {
'RtlTable' : [ 0x0, ['_RTL_AVL_TABLE']],
'ReservedNode' : [ 0x68, ['pointer64', ['_VF_AVL_TREE_NODE']]],
} ],
'_TOKEN_AUDIT_POLICY' : [ 0x1b, {
'PerUserPolicy' : [ 0x0, ['array', 27, ['unsigned char']]],
} ],
'__unnamed_23d4' : [ 0x10, {
'EndingOffset' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'ResourceToRelease' : [ 0x8, ['pointer64', ['pointer64', ['_ERESOURCE']]]],
} ],
'__unnamed_23d6' : [ 0x8, {
'ResourceToRelease' : [ 0x0, ['pointer64', ['_ERESOURCE']]],
} ],
'__unnamed_23da' : [ 0x8, {
'SyncType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SyncTypeOther', 1: 'SyncTypeCreateSection'})]],
'PageProtection' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_23de' : [ 0x10, {
'NotificationType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NotifyTypeCreate', 1: 'NotifyTypeRetired'})]],
'SafeToRecurse' : [ 0x8, ['unsigned char']],
} ],
'__unnamed_23e0' : [ 0x28, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
'Argument5' : [ 0x20, ['pointer64', ['void']]],
} ],
'_FS_FILTER_PARAMETERS' : [ 0x28, {
'AcquireForModifiedPageWriter' : [ 0x0, ['__unnamed_23d4']],
'ReleaseForModifiedPageWriter' : [ 0x0, ['__unnamed_23d6']],
'AcquireForSectionSynchronization' : [ 0x0, ['__unnamed_23da']],
'NotifyStreamFileObject' : [ 0x0, ['__unnamed_23de']],
'Others' : [ 0x0, ['__unnamed_23e0']],
} ],
'_PROFILE_PARAMETER_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'DockingState' : [ 0x4, ['unsigned short']],
'Capabilities' : [ 0x6, ['unsigned short']],
'DockID' : [ 0x8, ['unsigned long']],
'SerialNumber' : [ 0xc, ['unsigned long']],
} ],
'_COMPRESSED_DATA_INFO' : [ 0xc, {
'CompressionFormatAndEngine' : [ 0x0, ['unsigned short']],
'CompressionUnitShift' : [ 0x2, ['unsigned char']],
'ChunkShift' : [ 0x3, ['unsigned char']],
'ClusterShift' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'NumberOfChunks' : [ 0x6, ['unsigned short']],
'CompressedChunkSizes' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_POP_HIBER_CONTEXT' : [ 0x110, {
'WriteToFile' : [ 0x0, ['unsigned char']],
'ReserveLoaderMemory' : [ 0x1, ['unsigned char']],
'ReserveFreeMemory' : [ 0x2, ['unsigned char']],
'Reset' : [ 0x3, ['unsigned char']],
'HiberFlags' : [ 0x4, ['unsigned char']],
'WroteHiberFile' : [ 0x5, ['unsigned char']],
'MapFrozen' : [ 0x6, ['unsigned char']],
'MemoryMap' : [ 0x8, ['_RTL_BITMAP']],
'DiscardedMemoryPages' : [ 0x18, ['_RTL_BITMAP']],
'ClonedRanges' : [ 0x28, ['_LIST_ENTRY']],
'ClonedRangeCount' : [ 0x38, ['unsigned long']],
'NextCloneRange' : [ 0x40, ['pointer64', ['_LIST_ENTRY']]],
'NextPreserve' : [ 0x48, ['unsigned long long']],
'LoaderMdl' : [ 0x50, ['pointer64', ['_MDL']]],
'AllocatedMdl' : [ 0x58, ['pointer64', ['_MDL']]],
'PagesOut' : [ 0x60, ['unsigned long long']],
'IoPages' : [ 0x68, ['pointer64', ['void']]],
'IoPagesCount' : [ 0x70, ['unsigned long']],
'CurrentMcb' : [ 0x78, ['pointer64', ['void']]],
'DumpStack' : [ 0x80, ['pointer64', ['_DUMP_STACK_CONTEXT']]],
'WakeState' : [ 0x88, ['pointer64', ['_KPROCESSOR_STATE']]],
'PreferredIoWriteSize' : [ 0x90, ['unsigned long']],
'IoProgress' : [ 0x94, ['unsigned long']],
'HiberVa' : [ 0x98, ['unsigned long long']],
'HiberPte' : [ 0xa0, ['_LARGE_INTEGER']],
'Status' : [ 0xa8, ['long']],
'MemoryImage' : [ 0xb0, ['pointer64', ['PO_MEMORY_IMAGE']]],
'CompressionWorkspace' : [ 0xb8, ['pointer64', ['void']]],
'CompressedWriteBuffer' : [ 0xc0, ['pointer64', ['unsigned char']]],
'CompressedWriteBufferSize' : [ 0xc8, ['unsigned long']],
'MaxCompressedOutputSize' : [ 0xcc, ['unsigned long']],
'PerformanceStats' : [ 0xd0, ['pointer64', ['unsigned long']]],
'CompressionBlock' : [ 0xd8, ['pointer64', ['void']]],
'DmaIO' : [ 0xe0, ['pointer64', ['void']]],
'TemporaryHeap' : [ 0xe8, ['pointer64', ['void']]],
'BootLoaderLogMdl' : [ 0xf0, ['pointer64', ['_MDL']]],
'FirmwareRuntimeInformationMdl' : [ 0xf8, ['pointer64', ['_MDL']]],
'ResumeContext' : [ 0x100, ['pointer64', ['void']]],
'ResumeContextPages' : [ 0x108, ['unsigned long']],
} ],
'_OBJECT_REF_TRACE' : [ 0x80, {
'StackTrace' : [ 0x0, ['array', 16, ['pointer64', ['void']]]],
} ],
'_OBJECT_NAME_INFORMATION' : [ 0x10, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
} ],
'_KDESCRIPTOR' : [ 0x10, {
'Pad' : [ 0x0, ['array', 3, ['unsigned short']]],
'Limit' : [ 0x6, ['unsigned short']],
'Base' : [ 0x8, ['pointer64', ['void']]],
} ],
'_PCW_COUNTER_INFORMATION' : [ 0x10, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
} ],
'_DUMP_STACK_CONTEXT' : [ 0x110, {
'Init' : [ 0x0, ['_DUMP_INITIALIZATION_CONTEXT']],
'PartitionOffset' : [ 0xa0, ['_LARGE_INTEGER']],
'DumpPointers' : [ 0xa8, ['pointer64', ['void']]],
'PointersLength' : [ 0xb0, ['unsigned long']],
'ModulePrefix' : [ 0xb8, ['pointer64', ['unsigned short']]],
'DriverList' : [ 0xc0, ['_LIST_ENTRY']],
'InitMsg' : [ 0xd0, ['_STRING']],
'ProgMsg' : [ 0xe0, ['_STRING']],
'DoneMsg' : [ 0xf0, ['_STRING']],
'FileObject' : [ 0x100, ['pointer64', ['void']]],
'UsageType' : [ 0x108, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'_FILE_STANDARD_INFORMATION' : [ 0x18, {
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x8, ['_LARGE_INTEGER']],
'NumberOfLinks' : [ 0x10, ['unsigned long']],
'DeletePending' : [ 0x14, ['unsigned char']],
'Directory' : [ 0x15, ['unsigned char']],
} ],
'_POP_SHUTDOWN_BUG_CHECK' : [ 0x40, {
'ThreadHandle' : [ 0x0, ['pointer64', ['void']]],
'ThreadId' : [ 0x8, ['pointer64', ['void']]],
'ProcessId' : [ 0x10, ['pointer64', ['void']]],
'Code' : [ 0x18, ['unsigned long']],
'Parameter1' : [ 0x20, ['unsigned long long']],
'Parameter2' : [ 0x28, ['unsigned long long']],
'Parameter3' : [ 0x30, ['unsigned long long']],
'Parameter4' : [ 0x38, ['unsigned long long']],
} ],
'_MI_EXTRA_IMAGE_INFORMATION' : [ 0x8, {
'SizeOfHeaders' : [ 0x0, ['unsigned long']],
'SizeOfImage' : [ 0x4, ['unsigned long']],
} ],
'_PCW_MASK_INFORMATION' : [ 0x28, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'InstanceId' : [ 0x10, ['unsigned long']],
'CollectMultiple' : [ 0x14, ['unsigned char']],
'Buffer' : [ 0x18, ['pointer64', ['_PCW_BUFFER']]],
'CancelEvent' : [ 0x20, ['pointer64', ['_KEVENT']]],
} ],
'_RTL_HANDLE_TABLE_ENTRY' : [ 0x8, {
'Flags' : [ 0x0, ['unsigned long']],
'NextFree' : [ 0x0, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'__unnamed_2406' : [ 0x20, {
'TestAllocation' : [ 0x0, ['_ARBITER_TEST_ALLOCATION_PARAMETERS']],
'RetestAllocation' : [ 0x0, ['_ARBITER_RETEST_ALLOCATION_PARAMETERS']],
'BootAllocation' : [ 0x0, ['_ARBITER_BOOT_ALLOCATION_PARAMETERS']],
'QueryAllocatedResources' : [ 0x0, ['_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS']],
'QueryConflict' : [ 0x0, ['_ARBITER_QUERY_CONFLICT_PARAMETERS']],
'QueryArbitrate' : [ 0x0, ['_ARBITER_QUERY_ARBITRATE_PARAMETERS']],
'AddReserved' : [ 0x0, ['_ARBITER_ADD_RESERVED_PARAMETERS']],
} ],
'_ARBITER_PARAMETERS' : [ 0x20, {
'Parameters' : [ 0x0, ['__unnamed_2406']],
} ],
'__unnamed_240a' : [ 0x8, {
'idxRecord' : [ 0x0, ['unsigned long']],
'cidContainer' : [ 0x4, ['unsigned long']],
} ],
'_CLS_LSN' : [ 0x8, {
'offset' : [ 0x0, ['__unnamed_240a']],
'ullOffset' : [ 0x0, ['unsigned long long']],
} ],
'_NT_TIB32' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['unsigned long']],
'StackBase' : [ 0x4, ['unsigned long']],
'StackLimit' : [ 0x8, ['unsigned long']],
'SubSystemTib' : [ 0xc, ['unsigned long']],
'FiberData' : [ 0x10, ['unsigned long']],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['unsigned long']],
'Self' : [ 0x18, ['unsigned long']],
} ],
'POWER_ACTION_POLICY' : [ 0xc, {
'Action' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'Flags' : [ 0x4, ['unsigned long']],
'EventCode' : [ 0x8, ['unsigned long']],
} ],
'PO_MEMORY_IMAGE' : [ 0x128, {
'Signature' : [ 0x0, ['unsigned long']],
'ImageType' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'LengthSelf' : [ 0xc, ['unsigned long']],
'PageSelf' : [ 0x10, ['unsigned long long']],
'PageSize' : [ 0x18, ['unsigned long']],
'SystemTime' : [ 0x20, ['_LARGE_INTEGER']],
'InterruptTime' : [ 0x28, ['unsigned long long']],
'FeatureFlags' : [ 0x30, ['unsigned long']],
'HiberFlags' : [ 0x34, ['unsigned char']],
'spare' : [ 0x35, ['array', 3, ['unsigned char']]],
'NoHiberPtes' : [ 0x38, ['unsigned long']],
'HiberVa' : [ 0x40, ['unsigned long long']],
'HiberPte' : [ 0x48, ['_LARGE_INTEGER']],
'NoFreePages' : [ 0x50, ['unsigned long']],
'FreeMapCheck' : [ 0x54, ['unsigned long']],
'WakeCheck' : [ 0x58, ['unsigned long']],
'FirstTablePage' : [ 0x60, ['unsigned long long']],
'PerfInfo' : [ 0x68, ['_PO_HIBER_PERF']],
'FirmwareRuntimeInformationPages' : [ 0xc0, ['unsigned long']],
'FirmwareRuntimeInformation' : [ 0xc8, ['array', 1, ['unsigned long long']]],
'NoBootLoaderLogPages' : [ 0xd0, ['unsigned long']],
'BootLoaderLogPages' : [ 0xd8, ['array', 8, ['unsigned long long']]],
'NotUsed' : [ 0x118, ['unsigned long']],
'ResumeContextCheck' : [ 0x11c, ['unsigned long']],
'ResumeContextPages' : [ 0x120, ['unsigned long']],
} ],
'EX_QUEUE_WORKER_INFO' : [ 0x4, {
'QueueDisabled' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MakeThreadsAsNecessary' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WaitMode' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WorkerCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'QueueWorkerInfo' : [ 0x0, ['long']],
} ],
'BATTERY_REPORTING_SCALE' : [ 0x8, {
'Granularity' : [ 0x0, ['unsigned long']],
'Capacity' : [ 0x4, ['unsigned long']],
} ],
'_CURDIR' : [ 0x18, {
'DosPath' : [ 0x0, ['_UNICODE_STRING']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PO_HIBER_PERF' : [ 0x58, {
'IoTicks' : [ 0x0, ['unsigned long long']],
'InitTicks' : [ 0x8, ['unsigned long long']],
'CopyTicks' : [ 0x10, ['unsigned long long']],
'ElapsedTicks' : [ 0x18, ['unsigned long long']],
'CompressTicks' : [ 0x20, ['unsigned long long']],
'ResumeAppTime' : [ 0x28, ['unsigned long long']],
'HiberFileResumeTime' : [ 0x30, ['unsigned long long']],
'BytesCopied' : [ 0x38, ['unsigned long long']],
'PagesProcessed' : [ 0x40, ['unsigned long long']],
'PagesWritten' : [ 0x48, ['unsigned long']],
'DumpCount' : [ 0x4c, ['unsigned long']],
'FileRuns' : [ 0x50, ['unsigned long']],
} ],
'_DEVICE_FLAGS' : [ 0x4, {
'Failed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Removable' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ConsoleIn' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConsoleOut' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Input' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Output' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
} ],
'_RTL_BALANCED_LINKS' : [ 0x20, {
'Parent' : [ 0x0, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'LeftChild' : [ 0x8, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'RightChild' : [ 0x10, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'Balance' : [ 0x18, ['unsigned char']],
'Reserved' : [ 0x19, ['array', 3, ['unsigned char']]],
} ],
'_MMVIEW' : [ 0x30, {
'Entry' : [ 0x0, ['unsigned long long']],
'Writable' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'ControlArea' : [ 0x8, ['pointer64', ['_CONTROL_AREA']]],
'ViewLinks' : [ 0x10, ['_LIST_ENTRY']],
'SessionViewVa' : [ 0x20, ['pointer64', ['void']]],
'SessionId' : [ 0x28, ['unsigned long']],
} ],
'_MM_SESSION_SPACE_FLAGS' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeletePending' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PoolInitialized' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DynamicVaInitialized' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'WsInitialized' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PoolDestroyed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ObjectInitialized' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Filler' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
} ],
'_HEADLESS_LOADER_BLOCK' : [ 0x40, {
'UsedBiosSettings' : [ 0x0, ['unsigned char']],
'DataBits' : [ 0x1, ['unsigned char']],
'StopBits' : [ 0x2, ['unsigned char']],
'Parity' : [ 0x3, ['unsigned char']],
'BaudRate' : [ 0x4, ['unsigned long']],
'PortNumber' : [ 0x8, ['unsigned long']],
'PortAddress' : [ 0x10, ['pointer64', ['unsigned char']]],
'PciDeviceId' : [ 0x18, ['unsigned short']],
'PciVendorId' : [ 0x1a, ['unsigned short']],
'PciBusNumber' : [ 0x1c, ['unsigned char']],
'PciBusSegment' : [ 0x1e, ['unsigned short']],
'PciSlotNumber' : [ 0x20, ['unsigned char']],
'PciFunctionNumber' : [ 0x21, ['unsigned char']],
'PciFlags' : [ 0x24, ['unsigned long']],
'SystemGUID' : [ 0x28, ['_GUID']],
'IsMMIODevice' : [ 0x38, ['unsigned char']],
'TerminalType' : [ 0x39, ['unsigned char']],
} ],
'__unnamed_2434' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned long']],
'CheckSum' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_2436' : [ 0x10, {
'DiskId' : [ 0x0, ['_GUID']],
} ],
'__unnamed_2438' : [ 0x10, {
'Mbr' : [ 0x0, ['__unnamed_2434']],
'Gpt' : [ 0x0, ['__unnamed_2436']],
} ],
'_DUMP_INITIALIZATION_CONTEXT' : [ 0xa0, {
'Length' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'MemoryBlock' : [ 0x8, ['pointer64', ['void']]],
'CommonBuffer' : [ 0x10, ['array', 2, ['pointer64', ['void']]]],
'PhysicalAddress' : [ 0x20, ['array', 2, ['_LARGE_INTEGER']]],
'StallRoutine' : [ 0x30, ['pointer64', ['void']]],
'OpenRoutine' : [ 0x38, ['pointer64', ['void']]],
'WriteRoutine' : [ 0x40, ['pointer64', ['void']]],
'FinishRoutine' : [ 0x48, ['pointer64', ['void']]],
'AdapterObject' : [ 0x50, ['pointer64', ['_ADAPTER_OBJECT']]],
'MappedRegisterBase' : [ 0x58, ['pointer64', ['void']]],
'PortConfiguration' : [ 0x60, ['pointer64', ['void']]],
'CrashDump' : [ 0x68, ['unsigned char']],
'MaximumTransferSize' : [ 0x6c, ['unsigned long']],
'CommonBufferSize' : [ 0x70, ['unsigned long']],
'TargetAddress' : [ 0x78, ['pointer64', ['void']]],
'WritePendingRoutine' : [ 0x80, ['pointer64', ['void']]],
'PartitionStyle' : [ 0x88, ['unsigned long']],
'DiskInfo' : [ 0x8c, ['__unnamed_2438']],
} ],
'_MI_SYSTEM_PTE_TYPE' : [ 0x48, {
'Bitmap' : [ 0x0, ['_RTL_BITMAP']],
'Flags' : [ 0x10, ['unsigned long']],
'Hint' : [ 0x14, ['unsigned long']],
'BasePte' : [ 0x18, ['pointer64', ['_MMPTE']]],
'FailureCount' : [ 0x20, ['pointer64', ['unsigned long']]],
'Vm' : [ 0x28, ['pointer64', ['_MMSUPPORT']]],
'TotalSystemPtes' : [ 0x30, ['long']],
'TotalFreeSystemPtes' : [ 0x34, ['long']],
'CachedPteCount' : [ 0x38, ['long']],
'PteFailures' : [ 0x3c, ['unsigned long']],
'SpinLock' : [ 0x40, ['unsigned long long']],
'GlobalMutex' : [ 0x40, ['pointer64', ['_KGUARDED_MUTEX']]],
} ],
'_NETWORK_LOADER_BLOCK' : [ 0x20, {
'DHCPServerACK' : [ 0x0, ['pointer64', ['unsigned char']]],
'DHCPServerACKLength' : [ 0x8, ['unsigned long']],
'BootServerReplyPacket' : [ 0x10, ['pointer64', ['unsigned char']]],
'BootServerReplyPacketLength' : [ 0x18, ['unsigned long']],
} ],
'_CM_KEY_SECURITY' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'Flink' : [ 0x4, ['unsigned long']],
'Blink' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_PO_DEVICE_NOTIFY_ORDER' : [ 0x298, {
'Locked' : [ 0x0, ['unsigned char']],
'WarmEjectPdoPointer' : [ 0x8, ['pointer64', ['pointer64', ['_DEVICE_OBJECT']]]],
'OrderLevel' : [ 0x10, ['array', 9, ['_PO_NOTIFY_ORDER_LEVEL']]],
} ],
'_ARBITER_CONFLICT_INFO' : [ 0x18, {
'OwningObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Start' : [ 0x8, ['unsigned long long']],
'End' : [ 0x10, ['unsigned long long']],
} ],
'_PO_NOTIFY_ORDER_LEVEL' : [ 0x48, {
'DeviceCount' : [ 0x0, ['unsigned long']],
'ActiveCount' : [ 0x4, ['unsigned long']],
'WaitSleep' : [ 0x8, ['_LIST_ENTRY']],
'ReadySleep' : [ 0x18, ['_LIST_ENTRY']],
'ReadyS0' : [ 0x28, ['_LIST_ENTRY']],
'WaitS0' : [ 0x38, ['_LIST_ENTRY']],
} ],
'_THREAD_PERFORMANCE_DATA' : [ 0x1c0, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'ProcessorNumber' : [ 0x4, ['_PROCESSOR_NUMBER']],
'ContextSwitches' : [ 0x8, ['unsigned long']],
'HwCountersCount' : [ 0xc, ['unsigned long']],
'UpdateCount' : [ 0x10, ['unsigned long long']],
'WaitReasonBitMap' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'CycleTime' : [ 0x28, ['_COUNTER_READING']],
'HwCounters' : [ 0x40, ['array', 16, ['_COUNTER_READING']]],
} ],
'_GDI_TEB_BATCH32' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'_ETW_REPLY_QUEUE' : [ 0x48, {
'Queue' : [ 0x0, ['_KQUEUE']],
'EventsLost' : [ 0x40, ['long']],
} ],
'_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS' : [ 0x8, {
'AllocatedResources' : [ 0x0, ['pointer64', ['pointer64', ['_CM_PARTIAL_RESOURCE_LIST']]]],
} ],
'_RTL_ACTIVATION_CONTEXT_STACK_FRAME' : [ 0x18, {
'Previous' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'ActivationContext' : [ 0x8, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'_ARBITER_ORDERING' : [ 0x10, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
} ],
'_RTL_AVL_TABLE' : [ 0x68, {
'BalancedRoot' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'OrderedPointer' : [ 0x20, ['pointer64', ['void']]],
'WhichOrderedElement' : [ 0x28, ['unsigned long']],
'NumberGenericTableElements' : [ 0x2c, ['unsigned long']],
'DepthOfTree' : [ 0x30, ['unsigned long']],
'RestartKey' : [ 0x38, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'DeleteCount' : [ 0x40, ['unsigned long']],
'CompareRoutine' : [ 0x48, ['pointer64', ['void']]],
'AllocateRoutine' : [ 0x50, ['pointer64', ['void']]],
'FreeRoutine' : [ 0x58, ['pointer64', ['void']]],
'TableContext' : [ 0x60, ['pointer64', ['void']]],
} ],
'_KTRANSACTION_HISTORY' : [ 0x8, {
'RecordType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {1: 'KTMOH_CommitTransaction_Result', 2: 'KTMOH_RollbackTransaction_Result'})]],
'Payload' : [ 0x4, ['unsigned long']],
} ],
'LIST_ENTRY64' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'LIST_ENTRY32' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['unsigned long']],
} ],
'_KUSER_SHARED_DATA' : [ 0x5f0, {
'TickCountLowDeprecated' : [ 0x0, ['unsigned long']],
'TickCountMultiplier' : [ 0x4, ['unsigned long']],
'InterruptTime' : [ 0x8, ['_KSYSTEM_TIME']],
'SystemTime' : [ 0x14, ['_KSYSTEM_TIME']],
'TimeZoneBias' : [ 0x20, ['_KSYSTEM_TIME']],
'ImageNumberLow' : [ 0x2c, ['unsigned short']],
'ImageNumberHigh' : [ 0x2e, ['unsigned short']],
'NtSystemRoot' : [ 0x30, ['array', 260, ['wchar']]],
'MaxStackTraceDepth' : [ 0x238, ['unsigned long']],
'CryptoExponent' : [ 0x23c, ['unsigned long']],
'TimeZoneId' : [ 0x240, ['unsigned long']],
'LargePageMinimum' : [ 0x244, ['unsigned long']],
'Reserved2' : [ 0x248, ['array', 7, ['unsigned long']]],
'NtProductType' : [ 0x264, ['Enumeration', dict(target = 'long', choices = {1: 'NtProductWinNt', 2: 'NtProductLanManNt', 3: 'NtProductServer'})]],
'ProductTypeIsValid' : [ 0x268, ['unsigned char']],
'NtMajorVersion' : [ 0x26c, ['unsigned long']],
'NtMinorVersion' : [ 0x270, ['unsigned long']],
'ProcessorFeatures' : [ 0x274, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x2b4, ['unsigned long']],
'Reserved3' : [ 0x2b8, ['unsigned long']],
'TimeSlip' : [ 0x2bc, ['unsigned long']],
'AlternativeArchitecture' : [ 0x2c0, ['Enumeration', dict(target = 'long', choices = {0: 'StandardDesign', 1: 'NEC98x86', 2: 'EndAlternatives'})]],
'AltArchitecturePad' : [ 0x2c4, ['array', 1, ['unsigned long']]],
'SystemExpirationDate' : [ 0x2c8, ['_LARGE_INTEGER']],
'SuiteMask' : [ 0x2d0, ['unsigned long']],
'KdDebuggerEnabled' : [ 0x2d4, ['unsigned char']],
'NXSupportPolicy' : [ 0x2d5, ['unsigned char']],
'ActiveConsoleId' : [ 0x2d8, ['unsigned long']],
'DismountCount' : [ 0x2dc, ['unsigned long']],
'ComPlusPackage' : [ 0x2e0, ['unsigned long']],
'LastSystemRITEventTickCount' : [ 0x2e4, ['unsigned long']],
'NumberOfPhysicalPages' : [ 0x2e8, ['unsigned long']],
'SafeBootMode' : [ 0x2ec, ['unsigned char']],
'TscQpcData' : [ 0x2ed, ['unsigned char']],
'TscQpcEnabled' : [ 0x2ed, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'TscQpcSpareFlag' : [ 0x2ed, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'TscQpcShift' : [ 0x2ed, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'TscQpcPad' : [ 0x2ee, ['array', 2, ['unsigned char']]],
'SharedDataFlags' : [ 0x2f0, ['unsigned long']],
'DbgErrorPortPresent' : [ 0x2f0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DbgElevationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'DbgVirtEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DbgInstallerDetectEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'DbgSystemDllRelocated' : [ 0x2f0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DbgDynProcessorEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DbgSEHValidationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SpareBits' : [ 0x2f0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
'DataFlagsPad' : [ 0x2f4, ['array', 1, ['unsigned long']]],
'TestRetInstruction' : [ 0x2f8, ['unsigned long long']],
'SystemCall' : [ 0x300, ['unsigned long']],
'SystemCallReturn' : [ 0x304, ['unsigned long']],
'SystemCallPad' : [ 0x308, ['array', 3, ['unsigned long long']]],
'TickCount' : [ 0x320, ['_KSYSTEM_TIME']],
'TickCountQuad' : [ 0x320, ['unsigned long long']],
'ReservedTickCountOverlay' : [ 0x320, ['array', 3, ['unsigned long']]],
'TickCountPad' : [ 0x32c, ['array', 1, ['unsigned long']]],
'Cookie' : [ 0x330, ['unsigned long']],
'CookiePad' : [ 0x334, ['array', 1, ['unsigned long']]],
'ConsoleSessionForegroundProcessId' : [ 0x338, ['long long']],
'DEPRECATED_Wow64SharedInformation' : [ 0x340, ['array', 16, ['unsigned long']]],
'UserModeGlobalLogger' : [ 0x380, ['array', 16, ['unsigned short']]],
'ImageFileExecutionOptions' : [ 0x3a0, ['unsigned long']],
'LangGenerationCount' : [ 0x3a4, ['unsigned long']],
'Reserved5' : [ 0x3a8, ['unsigned long long']],
'InterruptTimeBias' : [ 0x3b0, ['unsigned long long']],
'TscQpcBias' : [ 0x3b8, ['unsigned long long']],
'ActiveProcessorCount' : [ 0x3c0, ['unsigned long']],
'ActiveGroupCount' : [ 0x3c4, ['unsigned short']],
'Reserved4' : [ 0x3c6, ['unsigned short']],
'AitSamplingValue' : [ 0x3c8, ['unsigned long']],
'AppCompatFlag' : [ 0x3cc, ['unsigned long']],
'DEPRECATED_SystemDllNativeRelocation' : [ 0x3d0, ['unsigned long long']],
'DEPRECATED_SystemDllWowRelocation' : [ 0x3d8, ['unsigned long']],
'XStatePad' : [ 0x3dc, ['array', 1, ['unsigned long']]],
'XState' : [ 0x3e0, ['_XSTATE_CONFIGURATION']],
} ],
'__unnamed_1043' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
} ],
'_ULARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
'u' : [ 0x0, ['__unnamed_1043']],
'QuadPart' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1047' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_LARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
'u' : [ 0x0, ['__unnamed_1047']],
'QuadPart' : [ 0x0, ['long long']],
} ],
'__unnamed_105f' : [ 0x4, {
'LongFunction' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Persistent' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Private' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1061' : [ 0x4, {
'Flags' : [ 0x0, ['unsigned long']],
's' : [ 0x0, ['__unnamed_105f']],
} ],
'_TP_CALLBACK_ENVIRON_V3' : [ 0x48, {
'Version' : [ 0x0, ['unsigned long']],
'Pool' : [ 0x8, ['pointer64', ['_TP_POOL']]],
'CleanupGroup' : [ 0x10, ['pointer64', ['_TP_CLEANUP_GROUP']]],
'CleanupGroupCancelCallback' : [ 0x18, ['pointer64', ['void']]],
'RaceDll' : [ 0x20, ['pointer64', ['void']]],
'ActivationContext' : [ 0x28, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'FinalizationCallback' : [ 0x30, ['pointer64', ['void']]],
'u' : [ 0x38, ['__unnamed_1061']],
'CallbackPriority' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'TP_CALLBACK_PRIORITY_HIGH', 1: 'TP_CALLBACK_PRIORITY_NORMAL', 2: 'TP_CALLBACK_PRIORITY_LOW', 3: 'TP_CALLBACK_PRIORITY_INVALID'})]],
'Size' : [ 0x40, ['unsigned long']],
} ],
'_TP_TASK' : [ 0x20, {
'Callbacks' : [ 0x0, ['pointer64', ['_TP_TASK_CALLBACKS']]],
'NumaNode' : [ 0x8, ['unsigned long']],
'IdealProcessor' : [ 0xc, ['unsigned char']],
'ListEntry' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_TP_TASK_CALLBACKS' : [ 0x10, {
'ExecuteCallback' : [ 0x0, ['pointer64', ['void']]],
'Unposted' : [ 0x8, ['pointer64', ['void']]],
} ],
'_TP_DIRECT' : [ 0x10, {
'Callback' : [ 0x0, ['pointer64', ['void']]],
'NumaNode' : [ 0x8, ['unsigned long']],
'IdealProcessor' : [ 0xc, ['unsigned char']],
} ],
'_TEB' : [ 0x1818, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'EnvironmentPointer' : [ 0x38, ['pointer64', ['void']]],
'ClientId' : [ 0x40, ['_CLIENT_ID']],
'ActiveRpcHandle' : [ 0x50, ['pointer64', ['void']]],
'ThreadLocalStoragePointer' : [ 0x58, ['pointer64', ['void']]],
'ProcessEnvironmentBlock' : [ 0x60, ['pointer64', ['_PEB']]],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['pointer64', ['void']]],
'Win32ThreadInfo' : [ 0x78, ['pointer64', ['void']]],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['pointer64', ['void']]],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['pointer64', ['void']]]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['pointer64', ['_ACTIVATION_CONTEXT_STACK']]],
'SpareBytes' : [ 0x2d0, ['array', 24, ['unsigned char']]],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID']],
'GdiCachedProcessHandle' : [ 0x7e8, ['pointer64', ['void']]],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['pointer64', ['void']]],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['pointer64', ['void']]]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['pointer64', ['void']]],
'glSectionInfo' : [ 0x1228, ['pointer64', ['void']]],
'glSection' : [ 0x1230, ['pointer64', ['void']]],
'glTable' : [ 0x1238, ['pointer64', ['void']]],
'glCurrentRC' : [ 0x1240, ['pointer64', ['void']]],
'glContext' : [ 0x1248, ['pointer64', ['void']]],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_UNICODE_STRING']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0x1478, ['pointer64', ['void']]],
'TlsSlots' : [ 0x1480, ['array', 64, ['pointer64', ['void']]]],
'TlsLinks' : [ 0x1680, ['_LIST_ENTRY']],
'Vdm' : [ 0x1690, ['pointer64', ['void']]],
'ReservedForNtRpc' : [ 0x1698, ['pointer64', ['void']]],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['pointer64', ['void']]]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 11, ['pointer64', ['void']]]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['pointer64', ['void']]],
'EtwLocalData' : [ 0x1728, ['pointer64', ['void']]],
'EtwTraceData' : [ 0x1730, ['pointer64', ['void']]],
'WinSockData' : [ 0x1738, ['pointer64', ['void']]],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'CurrentIdealProcessor' : [ 0x1744, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0x1744, ['unsigned long']],
'ReservedPad0' : [ 0x1744, ['unsigned char']],
'ReservedPad1' : [ 0x1745, ['unsigned char']],
'ReservedPad2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['pointer64', ['void']]],
'ReservedForOle' : [ 0x1758, ['pointer64', ['void']]],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SavedPriorityState' : [ 0x1768, ['pointer64', ['void']]],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['pointer64', ['void']]],
'TlsExpansionSlots' : [ 0x1780, ['pointer64', ['pointer64', ['void']]]],
'DeallocationBStore' : [ 0x1788, ['pointer64', ['void']]],
'BStoreLimit' : [ 0x1790, ['pointer64', ['void']]],
'MuiGeneration' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['pointer64', ['void']]],
'pShimData' : [ 0x17a8, ['pointer64', ['void']]],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['pointer64', ['void']]],
'ActiveFrame' : [ 0x17c0, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'FlsData' : [ 0x17c8, ['pointer64', ['void']]],
'PreferredLanguages' : [ 0x17d0, ['pointer64', ['void']]],
'UserPrefLanguages' : [ 0x17d8, ['pointer64', ['void']]],
'MergedPrefLanguages' : [ 0x17e0, ['pointer64', ['void']]],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'SafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['pointer64', ['void']]],
'TxnScopeExitCallback' : [ 0x17f8, ['pointer64', ['void']]],
'TxnScopeContext' : [ 0x1800, ['pointer64', ['void']]],
'LockCount' : [ 0x1808, ['unsigned long']],
'SpareUlong0' : [ 0x180c, ['unsigned long']],
'ResourceRetValue' : [ 0x1810, ['pointer64', ['void']]],
} ],
'_LIST_ENTRY' : [ 0x10, {
'Flink' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'Blink' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_SINGLE_LIST_ENTRY' : [ 0x8, {
'Next' : [ 0x0, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_CONTEXT' : [ 0x18, {
'ChainHead' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'PrevLinkage' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENUMERATOR' : [ 0x28, {
'HashEntry' : [ 0x0, ['_RTL_DYNAMIC_HASH_TABLE_ENTRY']],
'ChainHead' : [ 0x18, ['pointer64', ['_LIST_ENTRY']]],
'BucketIndex' : [ 0x20, ['unsigned long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE' : [ 0x28, {
'Flags' : [ 0x0, ['unsigned long']],
'Shift' : [ 0x4, ['unsigned long']],
'TableSize' : [ 0x8, ['unsigned long']],
'Pivot' : [ 0xc, ['unsigned long']],
'DivisorMask' : [ 0x10, ['unsigned long']],
'NumEntries' : [ 0x14, ['unsigned long']],
'NonEmptyBuckets' : [ 0x18, ['unsigned long']],
'NumEnumerators' : [ 0x1c, ['unsigned long']],
'Directory' : [ 0x20, ['pointer64', ['void']]],
} ],
'_UNICODE_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned short']]],
} ],
'_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_RTL_BITMAP' : [ 0x10, {
'SizeOfBitMap' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned long']]],
} ],
'_LUID' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_IMAGE_NT_HEADERS64' : [ 0x108, {
'Signature' : [ 0x0, ['unsigned long']],
'FileHeader' : [ 0x4, ['_IMAGE_FILE_HEADER']],
'OptionalHeader' : [ 0x18, ['_IMAGE_OPTIONAL_HEADER64']],
} ],
'_IMAGE_DOS_HEADER' : [ 0x40, {
'e_magic' : [ 0x0, ['unsigned short']],
'e_cblp' : [ 0x2, ['unsigned short']],
'e_cp' : [ 0x4, ['unsigned short']],
'e_crlc' : [ 0x6, ['unsigned short']],
'e_cparhdr' : [ 0x8, ['unsigned short']],
'e_minalloc' : [ 0xa, ['unsigned short']],
'e_maxalloc' : [ 0xc, ['unsigned short']],
'e_ss' : [ 0xe, ['unsigned short']],
'e_sp' : [ 0x10, ['unsigned short']],
'e_csum' : [ 0x12, ['unsigned short']],
'e_ip' : [ 0x14, ['unsigned short']],
'e_cs' : [ 0x16, ['unsigned short']],
'e_lfarlc' : [ 0x18, ['unsigned short']],
'e_ovno' : [ 0x1a, ['unsigned short']],
'e_res' : [ 0x1c, ['array', 4, ['unsigned short']]],
'e_oemid' : [ 0x24, ['unsigned short']],
'e_oeminfo' : [ 0x26, ['unsigned short']],
'e_res2' : [ 0x28, ['array', 10, ['unsigned short']]],
'e_lfanew' : [ 0x3c, ['long']],
} ],
'_KPCR' : [ 0x4e80, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'GdtBase' : [ 0x0, ['pointer64', ['_KGDTENTRY64']]],
'TssBase' : [ 0x8, ['pointer64', ['_KTSS64']]],
'UserRsp' : [ 0x10, ['unsigned long long']],
'Self' : [ 0x18, ['pointer64', ['_KPCR']]],
'CurrentPrcb' : [ 0x20, ['pointer64', ['_KPRCB']]],
'LockArray' : [ 0x28, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Used_Self' : [ 0x30, ['pointer64', ['void']]],
'IdtBase' : [ 0x38, ['pointer64', ['_KIDTENTRY64']]],
'Unused' : [ 0x40, ['array', 2, ['unsigned long long']]],
'Irql' : [ 0x50, ['unsigned char']],
'SecondLevelCacheAssociativity' : [ 0x51, ['unsigned char']],
'ObsoleteNumber' : [ 0x52, ['unsigned char']],
'Fill0' : [ 0x53, ['unsigned char']],
'Unused0' : [ 0x54, ['array', 3, ['unsigned long']]],
'MajorVersion' : [ 0x60, ['unsigned short']],
'MinorVersion' : [ 0x62, ['unsigned short']],
'StallScaleFactor' : [ 0x64, ['unsigned long']],
'Unused1' : [ 0x68, ['array', 3, ['pointer64', ['void']]]],
'KernelReserved' : [ 0x80, ['array', 15, ['unsigned long']]],
'SecondLevelCacheSize' : [ 0xbc, ['unsigned long']],
'HalReserved' : [ 0xc0, ['array', 16, ['unsigned long']]],
'Unused2' : [ 0x100, ['unsigned long']],
'KdVersionBlock' : [ 0x108, ['pointer64', ['void']]],
'Unused3' : [ 0x110, ['pointer64', ['void']]],
'PcrAlign1' : [ 0x118, ['array', 24, ['unsigned long']]],
'Prcb' : [ 0x180, ['_KPRCB']],
} ],
'_KPRCB' : [ 0x4d00, {
'MxCsr' : [ 0x0, ['unsigned long']],
'LegacyNumber' : [ 0x4, ['unsigned char']],
'ReservedMustBeZero' : [ 0x5, ['unsigned char']],
'InterruptRequest' : [ 0x6, ['unsigned char']],
'IdleHalt' : [ 0x7, ['unsigned char']],
'CurrentThread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'NextThread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'IdleThread' : [ 0x18, ['pointer64', ['_KTHREAD']]],
'NestingLevel' : [ 0x20, ['unsigned char']],
'PrcbPad00' : [ 0x21, ['array', 3, ['unsigned char']]],
'Number' : [ 0x24, ['unsigned long']],
'RspBase' : [ 0x28, ['unsigned long long']],
'PrcbLock' : [ 0x30, ['unsigned long long']],
'PrcbPad01' : [ 0x38, ['unsigned long long']],
'ProcessorState' : [ 0x40, ['_KPROCESSOR_STATE']],
'CpuType' : [ 0x5f0, ['unsigned char']],
'CpuID' : [ 0x5f1, ['unsigned char']],
'CpuStep' : [ 0x5f2, ['unsigned short']],
'CpuStepping' : [ 0x5f2, ['unsigned char']],
'CpuModel' : [ 0x5f3, ['unsigned char']],
'MHz' : [ 0x5f4, ['unsigned long']],
'HalReserved' : [ 0x5f8, ['array', 8, ['unsigned long long']]],
'MinorVersion' : [ 0x638, ['unsigned short']],
'MajorVersion' : [ 0x63a, ['unsigned short']],
'BuildType' : [ 0x63c, ['unsigned char']],
'CpuVendor' : [ 0x63d, ['unsigned char']],
'CoresPerPhysicalProcessor' : [ 0x63e, ['unsigned char']],
'LogicalProcessorsPerCore' : [ 0x63f, ['unsigned char']],
'ApicMask' : [ 0x640, ['unsigned long']],
'CFlushSize' : [ 0x644, ['unsigned long']],
'AcpiReserved' : [ 0x648, ['pointer64', ['void']]],
'InitialApicId' : [ 0x650, ['unsigned long']],
'Stride' : [ 0x654, ['unsigned long']],
'Group' : [ 0x658, ['unsigned short']],
'GroupSetMember' : [ 0x660, ['unsigned long long']],
'GroupIndex' : [ 0x668, ['unsigned char']],
'LockQueue' : [ 0x670, ['array', 17, ['_KSPIN_LOCK_QUEUE']]],
'PPLookasideList' : [ 0x780, ['array', 16, ['_PP_LOOKASIDE_LIST']]],
'PPNPagedLookasideList' : [ 0x880, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PPPagedLookasideList' : [ 0x1480, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PacketBarrier' : [ 0x2080, ['long']],
'DeferredReadyListHead' : [ 0x2088, ['_SINGLE_LIST_ENTRY']],
'MmPageFaultCount' : [ 0x2090, ['long']],
'MmCopyOnWriteCount' : [ 0x2094, ['long']],
'MmTransitionCount' : [ 0x2098, ['long']],
'MmDemandZeroCount' : [ 0x209c, ['long']],
'MmPageReadCount' : [ 0x20a0, ['long']],
'MmPageReadIoCount' : [ 0x20a4, ['long']],
'MmDirtyPagesWriteCount' : [ 0x20a8, ['long']],
'MmDirtyWriteIoCount' : [ 0x20ac, ['long']],
'MmMappedPagesWriteCount' : [ 0x20b0, ['long']],
'MmMappedWriteIoCount' : [ 0x20b4, ['long']],
'KeSystemCalls' : [ 0x20b8, ['unsigned long']],
'KeContextSwitches' : [ 0x20bc, ['unsigned long']],
'CcFastReadNoWait' : [ 0x20c0, ['unsigned long']],
'CcFastReadWait' : [ 0x20c4, ['unsigned long']],
'CcFastReadNotPossible' : [ 0x20c8, ['unsigned long']],
'CcCopyReadNoWait' : [ 0x20cc, ['unsigned long']],
'CcCopyReadWait' : [ 0x20d0, ['unsigned long']],
'CcCopyReadNoWaitMiss' : [ 0x20d4, ['unsigned long']],
'LookasideIrpFloat' : [ 0x20d8, ['long']],
'IoReadOperationCount' : [ 0x20dc, ['long']],
'IoWriteOperationCount' : [ 0x20e0, ['long']],
'IoOtherOperationCount' : [ 0x20e4, ['long']],
'IoReadTransferCount' : [ 0x20e8, ['_LARGE_INTEGER']],
'IoWriteTransferCount' : [ 0x20f0, ['_LARGE_INTEGER']],
'IoOtherTransferCount' : [ 0x20f8, ['_LARGE_INTEGER']],
'TargetCount' : [ 0x2100, ['long']],
'IpiFrozen' : [ 0x2104, ['unsigned long']],
'DpcData' : [ 0x2180, ['array', 2, ['_KDPC_DATA']]],
'DpcStack' : [ 0x21c0, ['pointer64', ['void']]],
'MaximumDpcQueueDepth' : [ 0x21c8, ['long']],
'DpcRequestRate' : [ 0x21cc, ['unsigned long']],
'MinimumDpcRate' : [ 0x21d0, ['unsigned long']],
'DpcLastCount' : [ 0x21d4, ['unsigned long']],
'ThreadDpcEnable' : [ 0x21d8, ['unsigned char']],
'QuantumEnd' : [ 0x21d9, ['unsigned char']],
'DpcRoutineActive' : [ 0x21da, ['unsigned char']],
'IdleSchedule' : [ 0x21db, ['unsigned char']],
'DpcRequestSummary' : [ 0x21dc, ['long']],
'DpcRequestSlot' : [ 0x21dc, ['array', 2, ['short']]],
'NormalDpcState' : [ 0x21dc, ['short']],
'DpcThreadActive' : [ 0x21de, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'ThreadDpcState' : [ 0x21de, ['short']],
'TimerHand' : [ 0x21e0, ['unsigned long']],
'MasterOffset' : [ 0x21e4, ['long']],
'LastTick' : [ 0x21e8, ['unsigned long']],
'UnusedPad' : [ 0x21ec, ['unsigned long']],
'PrcbPad50' : [ 0x21f0, ['array', 2, ['unsigned long long']]],
'TimerTable' : [ 0x2200, ['_KTIMER_TABLE']],
'DpcGate' : [ 0x4400, ['_KGATE']],
'PrcbPad52' : [ 0x4418, ['pointer64', ['void']]],
'CallDpc' : [ 0x4420, ['_KDPC']],
'ClockKeepAlive' : [ 0x4460, ['long']],
'ClockCheckSlot' : [ 0x4464, ['unsigned char']],
'ClockPollCycle' : [ 0x4465, ['unsigned char']],
'NmiActive' : [ 0x4466, ['unsigned short']],
'DpcWatchdogPeriod' : [ 0x4468, ['long']],
'DpcWatchdogCount' : [ 0x446c, ['long']],
'TickOffset' : [ 0x4470, ['unsigned long long']],
'KeSpinLockOrdering' : [ 0x4478, ['long']],
'PrcbPad70' : [ 0x447c, ['unsigned long']],
'WaitListHead' : [ 0x4480, ['_LIST_ENTRY']],
'WaitLock' : [ 0x4490, ['unsigned long long']],
'ReadySummary' : [ 0x4498, ['unsigned long']],
'QueueIndex' : [ 0x449c, ['unsigned long']],
'TimerExpirationDpc' : [ 0x44a0, ['_KDPC']],
'PrcbPad72' : [ 0x44e0, ['array', 4, ['unsigned long long']]],
'DispatcherReadyListHead' : [ 0x4500, ['array', 32, ['_LIST_ENTRY']]],
'InterruptCount' : [ 0x4700, ['unsigned long']],
'KernelTime' : [ 0x4704, ['unsigned long']],
'UserTime' : [ 0x4708, ['unsigned long']],
'DpcTime' : [ 0x470c, ['unsigned long']],
'InterruptTime' : [ 0x4710, ['unsigned long']],
'AdjustDpcThreshold' : [ 0x4714, ['unsigned long']],
'DebuggerSavedIRQL' : [ 0x4718, ['unsigned char']],
'PrcbPad80' : [ 0x4719, ['array', 7, ['unsigned char']]],
'DpcTimeCount' : [ 0x4720, ['unsigned long']],
'DpcTimeLimit' : [ 0x4724, ['unsigned long']],
'PeriodicCount' : [ 0x4728, ['unsigned long']],
'PeriodicBias' : [ 0x472c, ['unsigned long']],
'AvailableTime' : [ 0x4730, ['unsigned long']],
'KeExceptionDispatchCount' : [ 0x4734, ['unsigned long']],
'ParentNode' : [ 0x4738, ['pointer64', ['_KNODE']]],
'StartCycles' : [ 0x4740, ['unsigned long long']],
'PrcbPad82' : [ 0x4748, ['array', 3, ['unsigned long long']]],
'MmSpinLockOrdering' : [ 0x4760, ['long']],
'PageColor' : [ 0x4764, ['unsigned long']],
'NodeColor' : [ 0x4768, ['unsigned long']],
'NodeShiftedColor' : [ 0x476c, ['unsigned long']],
'SecondaryColorMask' : [ 0x4770, ['unsigned long']],
'PrcbPad83' : [ 0x4774, ['unsigned long']],
'CycleTime' : [ 0x4778, ['unsigned long long']],
'CcFastMdlReadNoWait' : [ 0x4780, ['unsigned long']],
'CcFastMdlReadWait' : [ 0x4784, ['unsigned long']],
'CcFastMdlReadNotPossible' : [ 0x4788, ['unsigned long']],
'CcMapDataNoWait' : [ 0x478c, ['unsigned long']],
'CcMapDataWait' : [ 0x4790, ['unsigned long']],
'CcPinMappedDataCount' : [ 0x4794, ['unsigned long']],
'CcPinReadNoWait' : [ 0x4798, ['unsigned long']],
'CcPinReadWait' : [ 0x479c, ['unsigned long']],
'CcMdlReadNoWait' : [ 0x47a0, ['unsigned long']],
'CcMdlReadWait' : [ 0x47a4, ['unsigned long']],
'CcLazyWriteHotSpots' : [ 0x47a8, ['unsigned long']],
'CcLazyWriteIos' : [ 0x47ac, ['unsigned long']],
'CcLazyWritePages' : [ 0x47b0, ['unsigned long']],
'CcDataFlushes' : [ 0x47b4, ['unsigned long']],
'CcDataPages' : [ 0x47b8, ['unsigned long']],
'CcLostDelayedWrites' : [ 0x47bc, ['unsigned long']],
'CcFastReadResourceMiss' : [ 0x47c0, ['unsigned long']],
'CcCopyReadWaitMiss' : [ 0x47c4, ['unsigned long']],
'CcFastMdlReadResourceMiss' : [ 0x47c8, ['unsigned long']],
'CcMapDataNoWaitMiss' : [ 0x47cc, ['unsigned long']],
'CcMapDataWaitMiss' : [ 0x47d0, ['unsigned long']],
'CcPinReadNoWaitMiss' : [ 0x47d4, ['unsigned long']],
'CcPinReadWaitMiss' : [ 0x47d8, ['unsigned long']],
'CcMdlReadNoWaitMiss' : [ 0x47dc, ['unsigned long']],
'CcMdlReadWaitMiss' : [ 0x47e0, ['unsigned long']],
'CcReadAheadIos' : [ 0x47e4, ['unsigned long']],
'MmCacheTransitionCount' : [ 0x47e8, ['long']],
'MmCacheReadCount' : [ 0x47ec, ['long']],
'MmCacheIoCount' : [ 0x47f0, ['long']],
'PrcbPad91' : [ 0x47f4, ['array', 1, ['unsigned long']]],
'RuntimeAccumulation' : [ 0x47f8, ['unsigned long long']],
'PowerState' : [ 0x4800, ['_PROCESSOR_POWER_STATE']],
'PrcbPad92' : [ 0x4900, ['array', 16, ['unsigned char']]],
'KeAlignmentFixupCount' : [ 0x4910, ['unsigned long']],
'DpcWatchdogDpc' : [ 0x4918, ['_KDPC']],
'DpcWatchdogTimer' : [ 0x4958, ['_KTIMER']],
'Cache' : [ 0x4998, ['array', 5, ['_CACHE_DESCRIPTOR']]],
'CacheCount' : [ 0x49d4, ['unsigned long']],
'CachedCommit' : [ 0x49d8, ['unsigned long']],
'CachedResidentAvailable' : [ 0x49dc, ['unsigned long']],
'HyperPte' : [ 0x49e0, ['pointer64', ['void']]],
'WheaInfo' : [ 0x49e8, ['pointer64', ['void']]],
'EtwSupport' : [ 0x49f0, ['pointer64', ['void']]],
'InterruptObjectPool' : [ 0x4a00, ['_SLIST_HEADER']],
'HypercallPageList' : [ 0x4a10, ['_SLIST_HEADER']],
'HypercallPageVirtual' : [ 0x4a20, ['pointer64', ['void']]],
'VirtualApicAssist' : [ 0x4a28, ['pointer64', ['void']]],
'StatisticsPage' : [ 0x4a30, ['pointer64', ['unsigned long long']]],
'RateControl' : [ 0x4a38, ['pointer64', ['void']]],
'CacheProcessorMask' : [ 0x4a40, ['array', 5, ['unsigned long long']]],
'PackageProcessorSet' : [ 0x4a68, ['_KAFFINITY_EX']],
'CoreProcessorSet' : [ 0x4a90, ['unsigned long long']],
'PebsIndexAddress' : [ 0x4a98, ['pointer64', ['void']]],
'PrcbPad93' : [ 0x4aa0, ['array', 12, ['unsigned long long']]],
'SpinLockAcquireCount' : [ 0x4b00, ['unsigned long']],
'SpinLockContentionCount' : [ 0x4b04, ['unsigned long']],
'SpinLockSpinCount' : [ 0x4b08, ['unsigned long']],
'IpiSendRequestBroadcastCount' : [ 0x4b0c, ['unsigned long']],
'IpiSendRequestRoutineCount' : [ 0x4b10, ['unsigned long']],
'IpiSendSoftwareInterruptCount' : [ 0x4b14, ['unsigned long']],
'ExInitializeResourceCount' : [ 0x4b18, ['unsigned long']],
'ExReInitializeResourceCount' : [ 0x4b1c, ['unsigned long']],
'ExDeleteResourceCount' : [ 0x4b20, ['unsigned long']],
'ExecutiveResourceAcquiresCount' : [ 0x4b24, ['unsigned long']],
'ExecutiveResourceContentionsCount' : [ 0x4b28, ['unsigned long']],
'ExecutiveResourceReleaseExclusiveCount' : [ 0x4b2c, ['unsigned long']],
'ExecutiveResourceReleaseSharedCount' : [ 0x4b30, ['unsigned long']],
'ExecutiveResourceConvertsCount' : [ 0x4b34, ['unsigned long']],
'ExAcqResExclusiveAttempts' : [ 0x4b38, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusive' : [ 0x4b3c, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusiveRecursive' : [ 0x4b40, ['unsigned long']],
'ExAcqResExclusiveWaits' : [ 0x4b44, ['unsigned long']],
'ExAcqResExclusiveNotAcquires' : [ 0x4b48, ['unsigned long']],
'ExAcqResSharedAttempts' : [ 0x4b4c, ['unsigned long']],
'ExAcqResSharedAcquiresExclusive' : [ 0x4b50, ['unsigned long']],
'ExAcqResSharedAcquiresShared' : [ 0x4b54, ['unsigned long']],
'ExAcqResSharedAcquiresSharedRecursive' : [ 0x4b58, ['unsigned long']],
'ExAcqResSharedWaits' : [ 0x4b5c, ['unsigned long']],
'ExAcqResSharedNotAcquires' : [ 0x4b60, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAttempts' : [ 0x4b64, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresExclusive' : [ 0x4b68, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresShared' : [ 0x4b6c, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresSharedRecursive' : [ 0x4b70, ['unsigned long']],
'ExAcqResSharedStarveExclusiveWaits' : [ 0x4b74, ['unsigned long']],
'ExAcqResSharedStarveExclusiveNotAcquires' : [ 0x4b78, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAttempts' : [ 0x4b7c, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresExclusive' : [ 0x4b80, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresShared' : [ 0x4b84, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresSharedRecursive' : [ 0x4b88, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveWaits' : [ 0x4b8c, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveNotAcquires' : [ 0x4b90, ['unsigned long']],
'ExSetResOwnerPointerExclusive' : [ 0x4b94, ['unsigned long']],
'ExSetResOwnerPointerSharedNew' : [ 0x4b98, ['unsigned long']],
'ExSetResOwnerPointerSharedOld' : [ 0x4b9c, ['unsigned long']],
'ExTryToAcqExclusiveAttempts' : [ 0x4ba0, ['unsigned long']],
'ExTryToAcqExclusiveAcquires' : [ 0x4ba4, ['unsigned long']],
'ExBoostExclusiveOwner' : [ 0x4ba8, ['unsigned long']],
'ExBoostSharedOwners' : [ 0x4bac, ['unsigned long']],
'ExEtwSynchTrackingNotificationsCount' : [ 0x4bb0, ['unsigned long']],
'ExEtwSynchTrackingNotificationsAccountedCount' : [ 0x4bb4, ['unsigned long']],
'VendorString' : [ 0x4bb8, ['array', 13, ['unsigned char']]],
'PrcbPad10' : [ 0x4bc5, ['array', 3, ['unsigned char']]],
'FeatureBits' : [ 0x4bc8, ['unsigned long']],
'UpdateSignature' : [ 0x4bd0, ['_LARGE_INTEGER']],
'Context' : [ 0x4bd8, ['pointer64', ['_CONTEXT']]],
'ContextFlags' : [ 0x4be0, ['unsigned long']],
'ExtendedState' : [ 0x4be8, ['pointer64', ['_XSAVE_AREA']]],
'Mailbox' : [ 0x4c00, ['pointer64', ['_REQUEST_MAILBOX']]],
'RequestMailbox' : [ 0x4c80, ['array', 1, ['_REQUEST_MAILBOX']]],
} ],
'_SINGLE_LIST_ENTRY32' : [ 0x4, {
'Next' : [ 0x0, ['unsigned long']],
} ],
'_KTHREAD' : [ 0x368, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'CycleTime' : [ 0x18, ['unsigned long long']],
'QuantumTarget' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['pointer64', ['void']]],
'StackLimit' : [ 0x30, ['pointer64', ['void']]],
'KernelStack' : [ 0x38, ['pointer64', ['void']]],
'ThreadLock' : [ 0x40, ['unsigned long long']],
'WaitRegister' : [ 0x48, ['_KWAIT_STATUS_REGISTER']],
'Running' : [ 0x49, ['unsigned char']],
'Alerted' : [ 0x4a, ['array', 2, ['unsigned char']]],
'KernelStackResident' : [ 0x4c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadyTransition' : [ 0x4c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessReadyQueue' : [ 0x4c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WaitNext' : [ 0x4c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemAffinityActive' : [ 0x4c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Alertable' : [ 0x4c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'GdiFlushActive' : [ 0x4c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'UserStackWalkActive' : [ 0x4c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ApcInterruptRequest' : [ 0x4c, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ForceDeferSchedule' : [ 0x4c, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'QuantumEndMigrate' : [ 0x4c, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'UmsDirectedSwitchEnable' : [ 0x4c, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'TimerActive' : [ 0x4c, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'SystemThread' : [ 0x4c, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'Reserved' : [ 0x4c, ['BitField', dict(start_bit = 14, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x4c, ['long']],
'ApcState' : [ 0x50, ['_KAPC_STATE']],
'ApcStateFill' : [ 0x50, ['array', 43, ['unsigned char']]],
'Priority' : [ 0x7b, ['unsigned char']],
'NextProcessor' : [ 0x7c, ['unsigned long']],
'DeferredProcessor' : [ 0x80, ['unsigned long']],
'ApcQueueLock' : [ 0x88, ['unsigned long long']],
'WaitStatus' : [ 0x90, ['long long']],
'WaitBlockList' : [ 0x98, ['pointer64', ['_KWAIT_BLOCK']]],
'WaitListEntry' : [ 0xa0, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0xa0, ['_SINGLE_LIST_ENTRY']],
'Queue' : [ 0xb0, ['pointer64', ['_KQUEUE']]],
'Teb' : [ 0xb8, ['pointer64', ['void']]],
'Timer' : [ 0xc0, ['_KTIMER']],
'AutoAlignment' : [ 0x100, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DisableBoost' : [ 0x100, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'EtwStackTraceApc1Inserted' : [ 0x100, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EtwStackTraceApc2Inserted' : [ 0x100, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CalloutActive' : [ 0x100, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ApcQueueable' : [ 0x100, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'EnableStackSwap' : [ 0x100, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'GuiThread' : [ 0x100, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'UmsPerformingSyscall' : [ 0x100, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'VdmSafe' : [ 0x100, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'UmsDispatched' : [ 0x100, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ReservedFlags' : [ 0x100, ['BitField', dict(start_bit = 11, end_bit = 32, native_type='unsigned long')]],
'ThreadFlags' : [ 0x100, ['long']],
'Spare0' : [ 0x104, ['unsigned long']],
'WaitBlock' : [ 0x108, ['array', 4, ['_KWAIT_BLOCK']]],
'WaitBlockFill4' : [ 0x108, ['array', 44, ['unsigned char']]],
'ContextSwitches' : [ 0x134, ['unsigned long']],
'WaitBlockFill5' : [ 0x108, ['array', 92, ['unsigned char']]],
'State' : [ 0x164, ['unsigned char']],
'NpxState' : [ 0x165, ['unsigned char']],
'WaitIrql' : [ 0x166, ['unsigned char']],
'WaitMode' : [ 0x167, ['unsigned char']],
'WaitBlockFill6' : [ 0x108, ['array', 140, ['unsigned char']]],
'WaitTime' : [ 0x194, ['unsigned long']],
'WaitBlockFill7' : [ 0x108, ['array', 168, ['unsigned char']]],
'TebMappedLowVa' : [ 0x1b0, ['pointer64', ['void']]],
'Ucb' : [ 0x1b8, ['pointer64', ['_UMS_CONTROL_BLOCK']]],
'WaitBlockFill8' : [ 0x108, ['array', 188, ['unsigned char']]],
'KernelApcDisable' : [ 0x1c4, ['short']],
'SpecialApcDisable' : [ 0x1c6, ['short']],
'CombinedApcDisable' : [ 0x1c4, ['unsigned long']],
'QueueListEntry' : [ 0x1c8, ['_LIST_ENTRY']],
'TrapFrame' : [ 0x1d8, ['pointer64', ['_KTRAP_FRAME']]],
'FirstArgument' : [ 0x1e0, ['pointer64', ['void']]],
'CallbackStack' : [ 0x1e8, ['pointer64', ['void']]],
'CallbackDepth' : [ 0x1e8, ['unsigned long long']],
'ApcStateIndex' : [ 0x1f0, ['unsigned char']],
'BasePriority' : [ 0x1f1, ['unsigned char']],
'PriorityDecrement' : [ 0x1f2, ['unsigned char']],
'ForegroundBoost' : [ 0x1f2, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'UnusualBoost' : [ 0x1f2, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Preempted' : [ 0x1f3, ['unsigned char']],
'AdjustReason' : [ 0x1f4, ['unsigned char']],
'AdjustIncrement' : [ 0x1f5, ['unsigned char']],
'PreviousMode' : [ 0x1f6, ['unsigned char']],
'Saturation' : [ 0x1f7, ['unsigned char']],
'SystemCallNumber' : [ 0x1f8, ['unsigned long']],
'FreezeCount' : [ 0x1fc, ['unsigned long']],
'UserAffinity' : [ 0x200, ['_GROUP_AFFINITY']],
'Process' : [ 0x210, ['pointer64', ['_KPROCESS']]],
'Affinity' : [ 0x218, ['_GROUP_AFFINITY']],
'IdealProcessor' : [ 0x228, ['unsigned long']],
'UserIdealProcessor' : [ 0x22c, ['unsigned long']],
'ApcStatePointer' : [ 0x230, ['array', 2, ['pointer64', ['_KAPC_STATE']]]],
'SavedApcState' : [ 0x240, ['_KAPC_STATE']],
'SavedApcStateFill' : [ 0x240, ['array', 43, ['unsigned char']]],
'WaitReason' : [ 0x26b, ['unsigned char']],
'SuspendCount' : [ 0x26c, ['unsigned char']],
'Spare1' : [ 0x26d, ['unsigned char']],
'CodePatchInProgress' : [ 0x26e, ['unsigned char']],
'Win32Thread' : [ 0x270, ['pointer64', ['void']]],
'StackBase' : [ 0x278, ['pointer64', ['void']]],
'SuspendApc' : [ 0x280, ['_KAPC']],
'SuspendApcFill0' : [ 0x280, ['array', 1, ['unsigned char']]],
'ResourceIndex' : [ 0x281, ['unsigned char']],
'SuspendApcFill1' : [ 0x280, ['array', 3, ['unsigned char']]],
'QuantumReset' : [ 0x283, ['unsigned char']],
'SuspendApcFill2' : [ 0x280, ['array', 4, ['unsigned char']]],
'KernelTime' : [ 0x284, ['unsigned long']],
'SuspendApcFill3' : [ 0x280, ['array', 64, ['unsigned char']]],
'WaitPrcb' : [ 0x2c0, ['pointer64', ['_KPRCB']]],
'SuspendApcFill4' : [ 0x280, ['array', 72, ['unsigned char']]],
'LegoData' : [ 0x2c8, ['pointer64', ['void']]],
'SuspendApcFill5' : [ 0x280, ['array', 83, ['unsigned char']]],
'LargeStack' : [ 0x2d3, ['unsigned char']],
'UserTime' : [ 0x2d4, ['unsigned long']],
'SuspendSemaphore' : [ 0x2d8, ['_KSEMAPHORE']],
'SuspendSemaphorefill' : [ 0x2d8, ['array', 28, ['unsigned char']]],
'SListFaultCount' : [ 0x2f4, ['unsigned long']],
'ThreadListEntry' : [ 0x2f8, ['_LIST_ENTRY']],
'MutantListHead' : [ 0x308, ['_LIST_ENTRY']],
'SListFaultAddress' : [ 0x318, ['pointer64', ['void']]],
'ReadOperationCount' : [ 0x320, ['long long']],
'WriteOperationCount' : [ 0x328, ['long long']],
'OtherOperationCount' : [ 0x330, ['long long']],
'ReadTransferCount' : [ 0x338, ['long long']],
'WriteTransferCount' : [ 0x340, ['long long']],
'OtherTransferCount' : [ 0x348, ['long long']],
'ThreadCounters' : [ 0x350, ['pointer64', ['_KTHREAD_COUNTERS']]],
'StateSaveArea' : [ 0x358, ['pointer64', ['_XSAVE_FORMAT']]],
'XStateSave' : [ 0x360, ['pointer64', ['_XSTATE_SAVE']]],
} ],
'_KERNEL_STACK_CONTROL' : [ 0x50, {
'Current' : [ 0x0, ['_KERNEL_STACK_SEGMENT']],
'Previous' : [ 0x28, ['_KERNEL_STACK_SEGMENT']],
} ],
'_UMS_CONTROL_BLOCK' : [ 0x98, {
'UmsContext' : [ 0x0, ['pointer64', ['_RTL_UMS_CONTEXT']]],
'CompletionListEntry' : [ 0x8, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
'CompletionListEvent' : [ 0x10, ['pointer64', ['_KEVENT']]],
'ServiceSequenceNumber' : [ 0x18, ['unsigned long']],
'UmsQueue' : [ 0x20, ['_KQUEUE']],
'QueueEntry' : [ 0x60, ['_LIST_ENTRY']],
'YieldingUmsContext' : [ 0x70, ['pointer64', ['_RTL_UMS_CONTEXT']]],
'YieldingParam' : [ 0x78, ['pointer64', ['void']]],
'UmsTeb' : [ 0x80, ['pointer64', ['void']]],
'PrimaryFlags' : [ 0x88, ['unsigned long']],
'UmsContextHeaderReady' : [ 0x88, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'UmsAssociatedQueue' : [ 0x20, ['pointer64', ['_KQUEUE']]],
'UmsQueueListEntry' : [ 0x28, ['pointer64', ['_LIST_ENTRY']]],
'UmsContextHeader' : [ 0x30, ['pointer64', ['_KUMS_CONTEXT_HEADER']]],
'UmsWaitGate' : [ 0x38, ['_KGATE']],
'StagingArea' : [ 0x50, ['pointer64', ['void']]],
'Flags' : [ 0x58, ['long']],
'UmsForceQueueTermination' : [ 0x58, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'UmsAssociatedQueueUsed' : [ 0x58, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'UmsThreadParked' : [ 0x58, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'UmsPrimaryDeliveredContext' : [ 0x58, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'UmsPerformingSingleStep' : [ 0x58, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'TebSelector' : [ 0x90, ['unsigned short']],
} ],
'_KSPIN_LOCK_QUEUE' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Lock' : [ 0x8, ['pointer64', ['unsigned long long']]],
} ],
'_FAST_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Event' : [ 0x18, ['_KEVENT']],
'OldIrql' : [ 0x30, ['unsigned long']],
} ],
'_KEVENT' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'__unnamed_11c8' : [ 0x10, {
'Depth' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Sequence' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 25, native_type='unsigned long long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 64, native_type='unsigned long long')]],
'HeaderType' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Init' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 61, native_type='unsigned long long')]],
'Region' : [ 0x8, ['BitField', dict(start_bit = 61, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_11cd' : [ 0x10, {
'Depth' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Sequence' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='unsigned long long')]],
'HeaderType' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Init' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long long')]],
'NextEntry' : [ 0x8, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_11d0' : [ 0x10, {
'Depth' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Sequence' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='unsigned long long')]],
'HeaderType' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 4, native_type='unsigned long long')]],
'NextEntry' : [ 0x8, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
} ],
'_SLIST_HEADER' : [ 0x10, {
'Alignment' : [ 0x0, ['unsigned long long']],
'Region' : [ 0x8, ['unsigned long long']],
'Header8' : [ 0x0, ['__unnamed_11c8']],
'Header16' : [ 0x0, ['__unnamed_11cd']],
'HeaderX64' : [ 0x0, ['__unnamed_11d0']],
} ],
'_LOOKASIDE_LIST_EX' : [ 0x60, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE_POOL']],
} ],
'_SLIST_ENTRY' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_NPAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_PAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_QUAD' : [ 0x8, {
'UseThisFieldToCopy' : [ 0x0, ['long long']],
'DoNotUseThisField' : [ 0x0, ['double']],
} ],
'_IO_STATUS_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['long']],
'Pointer' : [ 0x0, ['pointer64', ['void']]],
'Information' : [ 0x8, ['unsigned long long']],
} ],
'_IO_STATUS_BLOCK32' : [ 0x8, {
'Status' : [ 0x0, ['long']],
'Information' : [ 0x4, ['unsigned long']],
} ],
'_EX_PUSH_LOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_PROCESSOR_NUMBER' : [ 0x4, {
'Group' : [ 0x0, ['unsigned short']],
'Number' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_EX_PUSH_LOCK_CACHE_AWARE' : [ 0x100, {
'Locks' : [ 0x0, ['array', 32, ['pointer64', ['_EX_PUSH_LOCK']]]],
} ],
'_PP_LOOKASIDE_LIST' : [ 0x10, {
'P' : [ 0x0, ['pointer64', ['_GENERAL_LOOKASIDE']]],
'L' : [ 0x8, ['pointer64', ['_GENERAL_LOOKASIDE']]],
} ],
'_GENERAL_LOOKASIDE' : [ 0x80, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'AllocateEx' : [ 0x30, ['pointer64', ['void']]],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'FreeEx' : [ 0x38, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_EX_FAST_REF' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_EX_PUSH_LOCK_WAIT_BLOCK' : [ 0x40, {
'WakeEvent' : [ 0x0, ['_KEVENT']],
'Next' : [ 0x18, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Last' : [ 0x20, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Previous' : [ 0x28, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'ShareCount' : [ 0x30, ['long']],
'Flags' : [ 0x34, ['long']],
} ],
'_ETHREAD' : [ 0x4a8, {
'Tcb' : [ 0x0, ['_KTHREAD']],
'CreateTime' : [ 0x368, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x370, ['_LARGE_INTEGER']],
'KeyedWaitChain' : [ 0x370, ['_LIST_ENTRY']],
'ExitStatus' : [ 0x380, ['long']],
'PostBlockList' : [ 0x388, ['_LIST_ENTRY']],
'ForwardLinkShadow' : [ 0x388, ['pointer64', ['void']]],
'StartAddress' : [ 0x390, ['pointer64', ['void']]],
'TerminationPort' : [ 0x398, ['pointer64', ['_TERMINATION_PORT']]],
'ReaperLink' : [ 0x398, ['pointer64', ['_ETHREAD']]],
'KeyedWaitValue' : [ 0x398, ['pointer64', ['void']]],
'ActiveTimerListLock' : [ 0x3a0, ['unsigned long long']],
'ActiveTimerListHead' : [ 0x3a8, ['_LIST_ENTRY']],
'Cid' : [ 0x3b8, ['_CLIENT_ID']],
'KeyedWaitSemaphore' : [ 0x3c8, ['_KSEMAPHORE']],
'AlpcWaitSemaphore' : [ 0x3c8, ['_KSEMAPHORE']],
'ClientSecurity' : [ 0x3e8, ['_PS_CLIENT_SECURITY_CONTEXT']],
'IrpList' : [ 0x3f0, ['_LIST_ENTRY']],
'TopLevelIrp' : [ 0x400, ['unsigned long long']],
'DeviceToVerify' : [ 0x408, ['pointer64', ['_DEVICE_OBJECT']]],
'CpuQuotaApc' : [ 0x410, ['pointer64', ['_PSP_CPU_QUOTA_APC']]],
'Win32StartAddress' : [ 0x418, ['pointer64', ['void']]],
'LegacyPowerObject' : [ 0x420, ['pointer64', ['void']]],
'ThreadListEntry' : [ 0x428, ['_LIST_ENTRY']],
'RundownProtect' : [ 0x438, ['_EX_RUNDOWN_REF']],
'ThreadLock' : [ 0x440, ['_EX_PUSH_LOCK']],
'ReadClusterSize' : [ 0x448, ['unsigned long']],
'MmLockOrdering' : [ 0x44c, ['long']],
'CrossThreadFlags' : [ 0x450, ['unsigned long']],
'Terminated' : [ 0x450, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ThreadInserted' : [ 0x450, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HideFromDebugger' : [ 0x450, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ActiveImpersonationInfo' : [ 0x450, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Reserved' : [ 0x450, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'HardErrorsAreDisabled' : [ 0x450, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x450, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SkipCreationMsg' : [ 0x450, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SkipTerminationMsg' : [ 0x450, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyTokenOnOpen' : [ 0x450, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ThreadIoPriority' : [ 0x450, ['BitField', dict(start_bit = 10, end_bit = 13, native_type='unsigned long')]],
'ThreadPagePriority' : [ 0x450, ['BitField', dict(start_bit = 13, end_bit = 16, native_type='unsigned long')]],
'RundownFail' : [ 0x450, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NeedsWorkingSetAging' : [ 0x450, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'SameThreadPassiveFlags' : [ 0x454, ['unsigned long']],
'ActiveExWorker' : [ 0x454, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ExWorkerCanWaitUser' : [ 0x454, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'MemoryMaker' : [ 0x454, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ClonedThread' : [ 0x454, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'KeyedEventInUse' : [ 0x454, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RateApcState' : [ 0x454, ['BitField', dict(start_bit = 5, end_bit = 7, native_type='unsigned long')]],
'SelfTerminate' : [ 0x454, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SameThreadApcFlags' : [ 0x458, ['unsigned long']],
'Spare' : [ 0x458, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'StartAddressInvalid' : [ 0x458, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'EtwPageFaultCalloutActive' : [ 0x458, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsProcessWorkingSetExclusive' : [ 0x458, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'OwnsProcessWorkingSetShared' : [ 0x458, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsSystemCacheWorkingSetExclusive' : [ 0x458, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsSystemCacheWorkingSetShared' : [ 0x458, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsSessionWorkingSetExclusive' : [ 0x458, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsSessionWorkingSetShared' : [ 0x459, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsProcessAddressSpaceExclusive' : [ 0x459, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'OwnsProcessAddressSpaceShared' : [ 0x459, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SuppressSymbolLoad' : [ 0x459, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Prefetching' : [ 0x459, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsDynamicMemoryShared' : [ 0x459, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsChangeControlAreaExclusive' : [ 0x459, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsChangeControlAreaShared' : [ 0x459, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsPagedPoolWorkingSetExclusive' : [ 0x45a, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsPagedPoolWorkingSetShared' : [ 0x45a, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'OwnsSystemPtesWorkingSetExclusive' : [ 0x45a, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsSystemPtesWorkingSetShared' : [ 0x45a, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'TrimTrigger' : [ 0x45a, ['BitField', dict(start_bit = 4, end_bit = 6, native_type='unsigned char')]],
'Spare1' : [ 0x45a, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'PriorityRegionActive' : [ 0x45b, ['unsigned char']],
'CacheManagerActive' : [ 0x45c, ['unsigned char']],
'DisablePageFaultClustering' : [ 0x45d, ['unsigned char']],
'ActiveFaultCount' : [ 0x45e, ['unsigned char']],
'LockOrderState' : [ 0x45f, ['unsigned char']],
'AlpcMessageId' : [ 0x460, ['unsigned long long']],
'AlpcMessage' : [ 0x468, ['pointer64', ['void']]],
'AlpcReceiveAttributeSet' : [ 0x468, ['unsigned long']],
'AlpcWaitListEntry' : [ 0x470, ['_LIST_ENTRY']],
'CacheManagerCount' : [ 0x480, ['unsigned long']],
'IoBoostCount' : [ 0x484, ['unsigned long']],
'IrpListLock' : [ 0x488, ['unsigned long long']],
'ReservedForSynchTracking' : [ 0x490, ['pointer64', ['void']]],
'CmCallbackListHead' : [ 0x498, ['_SINGLE_LIST_ENTRY']],
'KernelStackReference' : [ 0x4a0, ['unsigned long']],
} ],
'_EPROCESS' : [ 0x4e8, {
'Pcb' : [ 0x0, ['_KPROCESS']],
'ProcessLock' : [ 0x160, ['_EX_PUSH_LOCK']],
'CreateTime' : [ 0x168, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x170, ['_LARGE_INTEGER']],
'RundownProtect' : [ 0x178, ['_EX_RUNDOWN_REF']],
'UniqueProcessId' : [ 0x180, ['pointer64', ['void']]],
'ActiveProcessLinks' : [ 0x188, ['_LIST_ENTRY']],
'ProcessQuotaUsage' : [ 0x198, ['array', 2, ['unsigned long long']]],
'ProcessQuotaPeak' : [ 0x1a8, ['array', 2, ['unsigned long long']]],
'CommitCharge' : [ 0x1b8, ['unsigned long long']],
'QuotaBlock' : [ 0x1c0, ['pointer64', ['_EPROCESS_QUOTA_BLOCK']]],
'CpuQuotaBlock' : [ 0x1c8, ['pointer64', ['_PS_CPU_QUOTA_BLOCK']]],
'PeakVirtualSize' : [ 0x1d0, ['unsigned long long']],
'VirtualSize' : [ 0x1d8, ['unsigned long long']],
'SessionProcessLinks' : [ 0x1e0, ['_LIST_ENTRY']],
'DebugPort' : [ 0x1f0, ['pointer64', ['void']]],
'ExceptionPortData' : [ 0x1f8, ['pointer64', ['void']]],
'ExceptionPortValue' : [ 0x1f8, ['unsigned long long']],
'ExceptionPortState' : [ 0x1f8, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long long')]],
'ObjectTable' : [ 0x200, ['pointer64', ['_HANDLE_TABLE']]],
'Token' : [ 0x208, ['_EX_FAST_REF']],
'WorkingSetPage' : [ 0x210, ['unsigned long long']],
'AddressCreationLock' : [ 0x218, ['_EX_PUSH_LOCK']],
'RotateInProgress' : [ 0x220, ['pointer64', ['_ETHREAD']]],
'ForkInProgress' : [ 0x228, ['pointer64', ['_ETHREAD']]],
'HardwareTrigger' : [ 0x230, ['unsigned long long']],
'PhysicalVadRoot' : [ 0x238, ['pointer64', ['_MM_AVL_TABLE']]],
'CloneRoot' : [ 0x240, ['pointer64', ['void']]],
'NumberOfPrivatePages' : [ 0x248, ['unsigned long long']],
'NumberOfLockedPages' : [ 0x250, ['unsigned long long']],
'Win32Process' : [ 0x258, ['pointer64', ['void']]],
'Job' : [ 0x260, ['pointer64', ['_EJOB']]],
'SectionObject' : [ 0x268, ['pointer64', ['void']]],
'SectionBaseAddress' : [ 0x270, ['pointer64', ['void']]],
'Cookie' : [ 0x278, ['unsigned long']],
'UmsScheduledThreads' : [ 0x27c, ['unsigned long']],
'WorkingSetWatch' : [ 0x280, ['pointer64', ['_PAGEFAULT_HISTORY']]],
'Win32WindowStation' : [ 0x288, ['pointer64', ['void']]],
'InheritedFromUniqueProcessId' : [ 0x290, ['pointer64', ['void']]],
'LdtInformation' : [ 0x298, ['pointer64', ['void']]],
'Spare' : [ 0x2a0, ['pointer64', ['void']]],
'ConsoleHostProcess' : [ 0x2a8, ['unsigned long long']],
'DeviceMap' : [ 0x2b0, ['pointer64', ['void']]],
'EtwDataSource' : [ 0x2b8, ['pointer64', ['void']]],
'FreeTebHint' : [ 0x2c0, ['pointer64', ['void']]],
'FreeUmsTebHint' : [ 0x2c8, ['pointer64', ['void']]],
'PageDirectoryPte' : [ 0x2d0, ['_HARDWARE_PTE']],
'Filler' : [ 0x2d0, ['unsigned long long']],
'Session' : [ 0x2d8, ['pointer64', ['void']]],
'ImageFileName' : [ 0x2e0, ['array', 15, ['unsigned char']]],
'PriorityClass' : [ 0x2ef, ['unsigned char']],
'JobLinks' : [ 0x2f0, ['_LIST_ENTRY']],
'LockedPagesList' : [ 0x300, ['pointer64', ['void']]],
'ThreadListHead' : [ 0x308, ['_LIST_ENTRY']],
'SecurityPort' : [ 0x318, ['pointer64', ['void']]],
'Wow64Process' : [ 0x320, ['pointer64', ['void']]],
'ActiveThreads' : [ 0x328, ['unsigned long']],
'ImagePathHash' : [ 0x32c, ['unsigned long']],
'DefaultHardErrorProcessing' : [ 0x330, ['unsigned long']],
'LastThreadExitStatus' : [ 0x334, ['long']],
'Peb' : [ 0x338, ['pointer64', ['_PEB']]],
'PrefetchTrace' : [ 0x340, ['_EX_FAST_REF']],
'ReadOperationCount' : [ 0x348, ['_LARGE_INTEGER']],
'WriteOperationCount' : [ 0x350, ['_LARGE_INTEGER']],
'OtherOperationCount' : [ 0x358, ['_LARGE_INTEGER']],
'ReadTransferCount' : [ 0x360, ['_LARGE_INTEGER']],
'WriteTransferCount' : [ 0x368, ['_LARGE_INTEGER']],
'OtherTransferCount' : [ 0x370, ['_LARGE_INTEGER']],
'CommitChargeLimit' : [ 0x378, ['unsigned long long']],
'CommitChargePeak' : [ 0x380, ['unsigned long long']],
'AweInfo' : [ 0x388, ['pointer64', ['void']]],
'SeAuditProcessCreationInfo' : [ 0x390, ['_SE_AUDIT_PROCESS_CREATION_INFO']],
'Vm' : [ 0x398, ['_MMSUPPORT']],
'MmProcessLinks' : [ 0x420, ['_LIST_ENTRY']],
'HighestUserAddress' : [ 0x430, ['pointer64', ['void']]],
'ModifiedPageCount' : [ 0x438, ['unsigned long']],
'Flags2' : [ 0x43c, ['unsigned long']],
'JobNotReallyActive' : [ 0x43c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AccountingFolded' : [ 0x43c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'NewProcessReported' : [ 0x43c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ExitProcessReported' : [ 0x43c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReportCommitChanges' : [ 0x43c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LastReportMemory' : [ 0x43c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ReportPhysicalPageChanges' : [ 0x43c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'HandleTableRundown' : [ 0x43c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'NeedsHandleRundown' : [ 0x43c, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RefTraceEnabled' : [ 0x43c, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'NumaAware' : [ 0x43c, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProtectedProcess' : [ 0x43c, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'DefaultPagePriority' : [ 0x43c, ['BitField', dict(start_bit = 12, end_bit = 15, native_type='unsigned long')]],
'PrimaryTokenFrozen' : [ 0x43c, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessVerifierTarget' : [ 0x43c, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'StackRandomizationDisabled' : [ 0x43c, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'AffinityPermanent' : [ 0x43c, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'AffinityUpdateEnable' : [ 0x43c, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'PropagateNode' : [ 0x43c, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'ExplicitAffinity' : [ 0x43c, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Spare1' : [ 0x43c, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'ForceRelocateImages' : [ 0x43c, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'DisallowStrippedImages' : [ 0x43c, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'LowVaAccessible' : [ 0x43c, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'Flags' : [ 0x440, ['unsigned long']],
'CreateReported' : [ 0x440, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDebugInherit' : [ 0x440, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessExiting' : [ 0x440, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessDelete' : [ 0x440, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow64SplitPages' : [ 0x440, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VmDeleted' : [ 0x440, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OutswapEnabled' : [ 0x440, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Outswapped' : [ 0x440, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ForkFailed' : [ 0x440, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Wow64VaSpace4Gb' : [ 0x440, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'AddressSpaceInitialized' : [ 0x440, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'SetTimerResolution' : [ 0x440, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x440, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'DeprioritizeViews' : [ 0x440, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WriteWatch' : [ 0x440, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessInSession' : [ 0x440, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'OverrideAddressSpace' : [ 0x440, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HasAddressSpace' : [ 0x440, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'LaunchPrefetched' : [ 0x440, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'InjectInpageErrors' : [ 0x440, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'VmTopDown' : [ 0x440, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'ImageNotifyDone' : [ 0x440, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'PdeUpdateNeeded' : [ 0x440, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'VdmAllowed' : [ 0x440, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'CrossSessionCreate' : [ 0x440, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'ProcessInserted' : [ 0x440, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'DefaultIoPriority' : [ 0x440, ['BitField', dict(start_bit = 27, end_bit = 30, native_type='unsigned long')]],
'ProcessSelfDelete' : [ 0x440, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'SetTimerResolutionLink' : [ 0x440, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'ExitStatus' : [ 0x444, ['long']],
'VadRoot' : [ 0x448, ['_MM_AVL_TABLE']],
'AlpcContext' : [ 0x488, ['_ALPC_PROCESS_CONTEXT']],
'TimerResolutionLink' : [ 0x4a8, ['_LIST_ENTRY']],
'RequestedTimerResolution' : [ 0x4b8, ['unsigned long']],
'ActiveThreadsHighWatermark' : [ 0x4bc, ['unsigned long']],
'SmallestTimerResolution' : [ 0x4c0, ['unsigned long']],
'TimerResolutionStackRecord' : [ 0x4c8, ['pointer64', ['_PO_DIAG_STACK_RECORD']]],
'SequenceNumber' : [ 0x4d0, ['unsigned long long']],
'CreateInterruptTime' : [ 0x4d8, ['unsigned long long']],
'CreateUnbiasedInterruptTime' : [ 0x4e0, ['unsigned long long']],
} ],
'_KPROCESS' : [ 0x160, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'ProfileListHead' : [ 0x18, ['_LIST_ENTRY']],
'DirectoryTableBase' : [ 0x28, ['unsigned long long']],
'ThreadListHead' : [ 0x30, ['_LIST_ENTRY']],
'ProcessLock' : [ 0x40, ['unsigned long long']],
'Affinity' : [ 0x48, ['_KAFFINITY_EX']],
'ReadyListHead' : [ 0x70, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x80, ['_SINGLE_LIST_ENTRY']],
'ActiveProcessors' : [ 0x88, ['_KAFFINITY_EX']],
'AutoAlignment' : [ 0xb0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'DisableBoost' : [ 0xb0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='long')]],
'DisableQuantum' : [ 0xb0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='long')]],
'ActiveGroupsMask' : [ 0xb0, ['BitField', dict(start_bit = 3, end_bit = 7, native_type='unsigned long')]],
'ReservedFlags' : [ 0xb0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='long')]],
'ProcessFlags' : [ 0xb0, ['long']],
'BasePriority' : [ 0xb4, ['unsigned char']],
'QuantumReset' : [ 0xb5, ['unsigned char']],
'Visited' : [ 0xb6, ['unsigned char']],
'Unused3' : [ 0xb7, ['unsigned char']],
'ThreadSeed' : [ 0xb8, ['array', 4, ['unsigned long']]],
'IdealNode' : [ 0xc8, ['array', 4, ['unsigned short']]],
'IdealGlobalNode' : [ 0xd0, ['unsigned short']],
'Flags' : [ 0xd2, ['_KEXECUTE_OPTIONS']],
'Unused1' : [ 0xd3, ['unsigned char']],
'Unused2' : [ 0xd4, ['unsigned long']],
'Unused4' : [ 0xd8, ['unsigned long']],
'StackCount' : [ 0xdc, ['_KSTACK_COUNT']],
'ProcessListEntry' : [ 0xe0, ['_LIST_ENTRY']],
'CycleTime' : [ 0xf0, ['unsigned long long']],
'KernelTime' : [ 0xf8, ['unsigned long']],
'UserTime' : [ 0xfc, ['unsigned long']],
'InstrumentationCallback' : [ 0x100, ['pointer64', ['void']]],
'LdtSystemDescriptor' : [ 0x108, ['_KGDTENTRY64']],
'LdtBaseAddress' : [ 0x118, ['pointer64', ['void']]],
'LdtProcessLock' : [ 0x120, ['_KGUARDED_MUTEX']],
'LdtFreeSelectorHint' : [ 0x158, ['unsigned short']],
'LdtTableLength' : [ 0x15a, ['unsigned short']],
} ],
'__unnamed_12d4' : [ 0x2c, {
'InitialPrivilegeSet' : [ 0x0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet' : [ 0x0, ['_PRIVILEGE_SET']],
} ],
'_ACCESS_STATE' : [ 0xa0, {
'OperationID' : [ 0x0, ['_LUID']],
'SecurityEvaluated' : [ 0x8, ['unsigned char']],
'GenerateAudit' : [ 0x9, ['unsigned char']],
'GenerateOnClose' : [ 0xa, ['unsigned char']],
'PrivilegesAllocated' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned long']],
'RemainingDesiredAccess' : [ 0x10, ['unsigned long']],
'PreviouslyGrantedAccess' : [ 0x14, ['unsigned long']],
'OriginalDesiredAccess' : [ 0x18, ['unsigned long']],
'SubjectSecurityContext' : [ 0x20, ['_SECURITY_SUBJECT_CONTEXT']],
'SecurityDescriptor' : [ 0x40, ['pointer64', ['void']]],
'AuxData' : [ 0x48, ['pointer64', ['void']]],
'Privileges' : [ 0x50, ['__unnamed_12d4']],
'AuditPrivileges' : [ 0x7c, ['unsigned char']],
'ObjectName' : [ 0x80, ['_UNICODE_STRING']],
'ObjectTypeName' : [ 0x90, ['_UNICODE_STRING']],
} ],
'_AUX_ACCESS_DATA' : [ 0xd8, {
'PrivilegesUsed' : [ 0x0, ['pointer64', ['_PRIVILEGE_SET']]],
'GenericMapping' : [ 0x8, ['_GENERIC_MAPPING']],
'AccessesToAudit' : [ 0x18, ['unsigned long']],
'MaximumAuditMask' : [ 0x1c, ['unsigned long']],
'TransactionId' : [ 0x20, ['_GUID']],
'NewSecurityDescriptor' : [ 0x30, ['pointer64', ['void']]],
'ExistingSecurityDescriptor' : [ 0x38, ['pointer64', ['void']]],
'ParentSecurityDescriptor' : [ 0x40, ['pointer64', ['void']]],
'DeRefSecurityDescriptor' : [ 0x48, ['pointer64', ['void']]],
'SDLock' : [ 0x50, ['pointer64', ['void']]],
'AccessReasons' : [ 0x58, ['_ACCESS_REASONS']],
} ],
'__unnamed_12e3' : [ 0x8, {
'MasterIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'IrpCount' : [ 0x0, ['long']],
'SystemBuffer' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_12e8' : [ 0x10, {
'UserApcRoutine' : [ 0x0, ['pointer64', ['void']]],
'IssuingProcess' : [ 0x0, ['pointer64', ['void']]],
'UserApcContext' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_12ea' : [ 0x10, {
'AsynchronousParameters' : [ 0x0, ['__unnamed_12e8']],
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'__unnamed_12f5' : [ 0x50, {
'DeviceQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DriverContext' : [ 0x0, ['array', 4, ['pointer64', ['void']]]],
'Thread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'AuxiliaryBuffer' : [ 0x28, ['pointer64', ['unsigned char']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'CurrentStackLocation' : [ 0x40, ['pointer64', ['_IO_STACK_LOCATION']]],
'PacketType' : [ 0x40, ['unsigned long']],
'OriginalFileObject' : [ 0x48, ['pointer64', ['_FILE_OBJECT']]],
} ],
'__unnamed_12f7' : [ 0x58, {
'Overlay' : [ 0x0, ['__unnamed_12f5']],
'Apc' : [ 0x0, ['_KAPC']],
'CompletionKey' : [ 0x0, ['pointer64', ['void']]],
} ],
'_IRP' : [ 0xd0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'MdlAddress' : [ 0x8, ['pointer64', ['_MDL']]],
'Flags' : [ 0x10, ['unsigned long']],
'AssociatedIrp' : [ 0x18, ['__unnamed_12e3']],
'ThreadListEntry' : [ 0x20, ['_LIST_ENTRY']],
'IoStatus' : [ 0x30, ['_IO_STATUS_BLOCK']],
'RequestorMode' : [ 0x40, ['unsigned char']],
'PendingReturned' : [ 0x41, ['unsigned char']],
'StackCount' : [ 0x42, ['unsigned char']],
'CurrentLocation' : [ 0x43, ['unsigned char']],
'Cancel' : [ 0x44, ['unsigned char']],
'CancelIrql' : [ 0x45, ['unsigned char']],
'ApcEnvironment' : [ 0x46, ['unsigned char']],
'AllocationFlags' : [ 0x47, ['unsigned char']],
'UserIosb' : [ 0x48, ['pointer64', ['_IO_STATUS_BLOCK']]],
'UserEvent' : [ 0x50, ['pointer64', ['_KEVENT']]],
'Overlay' : [ 0x58, ['__unnamed_12ea']],
'CancelRoutine' : [ 0x68, ['pointer64', ['void']]],
'UserBuffer' : [ 0x70, ['pointer64', ['void']]],
'Tail' : [ 0x78, ['__unnamed_12f7']],
} ],
'__unnamed_12fe' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'FileAttributes' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'EaLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1302' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
} ],
'__unnamed_1306' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_MAILSLOT_CREATE_PARAMETERS']]],
} ],
'__unnamed_1308' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_130c' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileName' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'FileInformationClass' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
'FileIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_130e' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'CompletionFilter' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1310' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
} ],
'__unnamed_1312' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'ReplaceIfExists' : [ 0x18, ['unsigned char']],
'AdvanceOnly' : [ 0x19, ['unsigned char']],
'ClusterCount' : [ 0x18, ['unsigned long']],
'DeleteHandle' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1314' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'EaList' : [ 0x8, ['pointer64', ['void']]],
'EaListLength' : [ 0x10, ['unsigned long']],
'EaIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1316' : [ 0x4, {
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_131a' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FsInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsVolumeFlagsInformation', 11: 'FileFsSectorSizeInformation', 12: 'FileFsMaximumInformation'})]],
} ],
'__unnamed_131c' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'FsControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_131e' : [ 0x18, {
'Length' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1320' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'IoControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1322' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1324' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'SecurityDescriptor' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_1328' : [ 0x10, {
'Vpb' : [ 0x0, ['pointer64', ['_VPB']]],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_132c' : [ 0x8, {
'Srb' : [ 0x0, ['pointer64', ['_SCSI_REQUEST_BLOCK']]],
} ],
'__unnamed_1330' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'StartSid' : [ 0x8, ['pointer64', ['void']]],
'SidList' : [ 0x10, ['pointer64', ['_FILE_GET_QUOTA_INFORMATION']]],
'SidListLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1334' : [ 0x4, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations', 6: 'TransportRelations'})]],
} ],
'__unnamed_133a' : [ 0x20, {
'InterfaceType' : [ 0x0, ['pointer64', ['_GUID']]],
'Size' : [ 0x8, ['unsigned short']],
'Version' : [ 0xa, ['unsigned short']],
'Interface' : [ 0x10, ['pointer64', ['_INTERFACE']]],
'InterfaceSpecificData' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_133e' : [ 0x8, {
'Capabilities' : [ 0x0, ['pointer64', ['_DEVICE_CAPABILITIES']]],
} ],
'__unnamed_1342' : [ 0x8, {
'IoResourceRequirementList' : [ 0x0, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
} ],
'__unnamed_1344' : [ 0x20, {
'WhichSpace' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['void']]],
'Offset' : [ 0x10, ['unsigned long']],
'Length' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1346' : [ 0x1, {
'Lock' : [ 0x0, ['unsigned char']],
} ],
'__unnamed_134a' : [ 0x4, {
'IdType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber', 5: 'BusQueryContainerID'})]],
} ],
'__unnamed_134e' : [ 0x10, {
'DeviceTextType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'})]],
'LocaleId' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1352' : [ 0x10, {
'InPath' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'__unnamed_1356' : [ 0x4, {
'PowerState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_135a' : [ 0x8, {
'PowerSequence' : [ 0x0, ['pointer64', ['_POWER_SEQUENCE']]],
} ],
'__unnamed_1362' : [ 0x20, {
'SystemContext' : [ 0x0, ['unsigned long']],
'SystemPowerStateContext' : [ 0x0, ['_SYSTEM_POWER_STATE_CONTEXT']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'State' : [ 0x10, ['_POWER_STATE']],
'ShutdownType' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
} ],
'__unnamed_1366' : [ 0x10, {
'AllocatedResources' : [ 0x0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated' : [ 0x8, ['pointer64', ['_CM_RESOURCE_LIST']]],
} ],
'__unnamed_1368' : [ 0x20, {
'ProviderId' : [ 0x0, ['unsigned long long']],
'DataPath' : [ 0x8, ['pointer64', ['void']]],
'BufferSize' : [ 0x10, ['unsigned long']],
'Buffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_136a' : [ 0x20, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_136c' : [ 0x20, {
'Create' : [ 0x0, ['__unnamed_12fe']],
'CreatePipe' : [ 0x0, ['__unnamed_1302']],
'CreateMailslot' : [ 0x0, ['__unnamed_1306']],
'Read' : [ 0x0, ['__unnamed_1308']],
'Write' : [ 0x0, ['__unnamed_1308']],
'QueryDirectory' : [ 0x0, ['__unnamed_130c']],
'NotifyDirectory' : [ 0x0, ['__unnamed_130e']],
'QueryFile' : [ 0x0, ['__unnamed_1310']],
'SetFile' : [ 0x0, ['__unnamed_1312']],
'QueryEa' : [ 0x0, ['__unnamed_1314']],
'SetEa' : [ 0x0, ['__unnamed_1316']],
'QueryVolume' : [ 0x0, ['__unnamed_131a']],
'SetVolume' : [ 0x0, ['__unnamed_131a']],
'FileSystemControl' : [ 0x0, ['__unnamed_131c']],
'LockControl' : [ 0x0, ['__unnamed_131e']],
'DeviceIoControl' : [ 0x0, ['__unnamed_1320']],
'QuerySecurity' : [ 0x0, ['__unnamed_1322']],
'SetSecurity' : [ 0x0, ['__unnamed_1324']],
'MountVolume' : [ 0x0, ['__unnamed_1328']],
'VerifyVolume' : [ 0x0, ['__unnamed_1328']],
'Scsi' : [ 0x0, ['__unnamed_132c']],
'QueryQuota' : [ 0x0, ['__unnamed_1330']],
'SetQuota' : [ 0x0, ['__unnamed_1316']],
'QueryDeviceRelations' : [ 0x0, ['__unnamed_1334']],
'QueryInterface' : [ 0x0, ['__unnamed_133a']],
'DeviceCapabilities' : [ 0x0, ['__unnamed_133e']],
'FilterResourceRequirements' : [ 0x0, ['__unnamed_1342']],
'ReadWriteConfig' : [ 0x0, ['__unnamed_1344']],
'SetLock' : [ 0x0, ['__unnamed_1346']],
'QueryId' : [ 0x0, ['__unnamed_134a']],
'QueryDeviceText' : [ 0x0, ['__unnamed_134e']],
'UsageNotification' : [ 0x0, ['__unnamed_1352']],
'WaitWake' : [ 0x0, ['__unnamed_1356']],
'PowerSequence' : [ 0x0, ['__unnamed_135a']],
'Power' : [ 0x0, ['__unnamed_1362']],
'StartDevice' : [ 0x0, ['__unnamed_1366']],
'WMI' : [ 0x0, ['__unnamed_1368']],
'Others' : [ 0x0, ['__unnamed_136a']],
} ],
'_IO_STACK_LOCATION' : [ 0x48, {
'MajorFunction' : [ 0x0, ['unsigned char']],
'MinorFunction' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned char']],
'Control' : [ 0x3, ['unsigned char']],
'Parameters' : [ 0x8, ['__unnamed_136c']],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'CompletionRoutine' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
} ],
'__unnamed_1382' : [ 0x48, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Wcb' : [ 0x0, ['_WAIT_CONTEXT_BLOCK']],
} ],
'_DEVICE_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'ReferenceCount' : [ 0x4, ['long']],
'DriverObject' : [ 0x8, ['pointer64', ['_DRIVER_OBJECT']]],
'NextDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'CurrentIrp' : [ 0x20, ['pointer64', ['_IRP']]],
'Timer' : [ 0x28, ['pointer64', ['_IO_TIMER']]],
'Flags' : [ 0x30, ['unsigned long']],
'Characteristics' : [ 0x34, ['unsigned long']],
'Vpb' : [ 0x38, ['pointer64', ['_VPB']]],
'DeviceExtension' : [ 0x40, ['pointer64', ['void']]],
'DeviceType' : [ 0x48, ['unsigned long']],
'StackSize' : [ 0x4c, ['unsigned char']],
'Queue' : [ 0x50, ['__unnamed_1382']],
'AlignmentRequirement' : [ 0x98, ['unsigned long']],
'DeviceQueue' : [ 0xa0, ['_KDEVICE_QUEUE']],
'Dpc' : [ 0xc8, ['_KDPC']],
'ActiveThreadCount' : [ 0x108, ['unsigned long']],
'SecurityDescriptor' : [ 0x110, ['pointer64', ['void']]],
'DeviceLock' : [ 0x118, ['_KEVENT']],
'SectorSize' : [ 0x130, ['unsigned short']],
'Spare1' : [ 0x132, ['unsigned short']],
'DeviceObjectExtension' : [ 0x138, ['pointer64', ['_DEVOBJ_EXTENSION']]],
'Reserved' : [ 0x140, ['pointer64', ['void']]],
} ],
'_KDPC' : [ 0x40, {
'Type' : [ 0x0, ['unsigned char']],
'Importance' : [ 0x1, ['unsigned char']],
'Number' : [ 0x2, ['unsigned short']],
'DpcListEntry' : [ 0x8, ['_LIST_ENTRY']],
'DeferredRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeferredContext' : [ 0x20, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x28, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x30, ['pointer64', ['void']]],
'DpcData' : [ 0x38, ['pointer64', ['void']]],
} ],
'_IO_DRIVER_CREATE_CONTEXT' : [ 0x20, {
'Size' : [ 0x0, ['short']],
'ExtraCreateParameter' : [ 0x8, ['pointer64', ['_ECP_LIST']]],
'DeviceObjectHint' : [ 0x10, ['pointer64', ['void']]],
'TxnParameters' : [ 0x18, ['pointer64', ['_TXN_PARAMETER_BLOCK']]],
} ],
'_IO_PRIORITY_INFO' : [ 0x10, {
'Size' : [ 0x0, ['unsigned long']],
'ThreadPriority' : [ 0x4, ['unsigned long']],
'PagePriority' : [ 0x8, ['unsigned long']],
'IoPriority' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IoPriorityVeryLow', 1: 'IoPriorityLow', 2: 'IoPriorityNormal', 3: 'IoPriorityHigh', 4: 'IoPriorityCritical', 5: 'MaxIoPriorityTypes'})]],
} ],
'_OBJECT_ATTRIBUTES' : [ 0x30, {
'Length' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ObjectName' : [ 0x10, ['pointer64', ['_UNICODE_STRING']]],
'Attributes' : [ 0x18, ['unsigned long']],
'SecurityDescriptor' : [ 0x20, ['pointer64', ['void']]],
'SecurityQualityOfService' : [ 0x28, ['pointer64', ['void']]],
} ],
'_OBJECT_HANDLE_INFORMATION' : [ 0x8, {
'HandleAttributes' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
} ],
'_EVENT_DATA_DESCRIPTOR' : [ 0x10, {
'Ptr' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'Type' : [ 0xc, ['unsigned char']],
'Reserved1' : [ 0xd, ['unsigned char']],
'Reserved2' : [ 0xe, ['unsigned short']],
} ],
'_EVENT_DESCRIPTOR' : [ 0x10, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Channel' : [ 0x3, ['unsigned char']],
'Level' : [ 0x4, ['unsigned char']],
'Opcode' : [ 0x5, ['unsigned char']],
'Task' : [ 0x6, ['unsigned short']],
'Keyword' : [ 0x8, ['unsigned long long']],
} ],
'_PERFINFO_GROUPMASK' : [ 0x20, {
'Masks' : [ 0x0, ['array', 8, ['unsigned long']]],
} ],
'_FILE_OBJECT' : [ 0xd8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Vpb' : [ 0x10, ['pointer64', ['_VPB']]],
'FsContext' : [ 0x18, ['pointer64', ['void']]],
'FsContext2' : [ 0x20, ['pointer64', ['void']]],
'SectionObjectPointer' : [ 0x28, ['pointer64', ['_SECTION_OBJECT_POINTERS']]],
'PrivateCacheMap' : [ 0x30, ['pointer64', ['void']]],
'FinalStatus' : [ 0x38, ['long']],
'RelatedFileObject' : [ 0x40, ['pointer64', ['_FILE_OBJECT']]],
'LockOperation' : [ 0x48, ['unsigned char']],
'DeletePending' : [ 0x49, ['unsigned char']],
'ReadAccess' : [ 0x4a, ['unsigned char']],
'WriteAccess' : [ 0x4b, ['unsigned char']],
'DeleteAccess' : [ 0x4c, ['unsigned char']],
'SharedRead' : [ 0x4d, ['unsigned char']],
'SharedWrite' : [ 0x4e, ['unsigned char']],
'SharedDelete' : [ 0x4f, ['unsigned char']],
'Flags' : [ 0x50, ['unsigned long']],
'FileName' : [ 0x58, ['_UNICODE_STRING']],
'CurrentByteOffset' : [ 0x68, ['_LARGE_INTEGER']],
'Waiters' : [ 0x70, ['unsigned long']],
'Busy' : [ 0x74, ['unsigned long']],
'LastLock' : [ 0x78, ['pointer64', ['void']]],
'Lock' : [ 0x80, ['_KEVENT']],
'Event' : [ 0x98, ['_KEVENT']],
'CompletionContext' : [ 0xb0, ['pointer64', ['_IO_COMPLETION_CONTEXT']]],
'IrpListLock' : [ 0xb8, ['unsigned long long']],
'IrpList' : [ 0xc0, ['_LIST_ENTRY']],
'FileObjectExtension' : [ 0xd0, ['pointer64', ['void']]],
} ],
'_EX_RUNDOWN_REF' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_MM_PAGE_ACCESS_INFO_HEADER' : [ 0x48, {
'Link' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'MmPteAccessType', 1: 'MmCcReadAheadType', 2: 'MmPfnRepurposeType', 3: 'MmMaximumPageAccessType'})]],
'EmptySequenceNumber' : [ 0xc, ['unsigned long']],
'CurrentFileIndex' : [ 0xc, ['unsigned long']],
'CreateTime' : [ 0x10, ['unsigned long long']],
'EmptyTime' : [ 0x18, ['unsigned long long']],
'TempEntry' : [ 0x18, ['pointer64', ['_MM_PAGE_ACCESS_INFO']]],
'PageEntry' : [ 0x20, ['pointer64', ['_MM_PAGE_ACCESS_INFO']]],
'FileEntry' : [ 0x28, ['pointer64', ['unsigned long long']]],
'FirstFileEntry' : [ 0x30, ['pointer64', ['unsigned long long']]],
'Process' : [ 0x38, ['pointer64', ['_EPROCESS']]],
'SessionId' : [ 0x40, ['unsigned long']],
'PageFrameEntry' : [ 0x20, ['pointer64', ['unsigned long long']]],
'LastPageFrameEntry' : [ 0x28, ['pointer64', ['unsigned long long']]],
} ],
'_WHEA_ERROR_PACKET_V2' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['_WHEA_ERROR_PACKET_FLAGS']],
'ErrorType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrTypeProcessor', 1: 'WheaErrTypeMemory', 2: 'WheaErrTypePCIExpress', 3: 'WheaErrTypeNMI', 4: 'WheaErrTypePCIXBus', 5: 'WheaErrTypePCIXDevice', 6: 'WheaErrTypeGeneric'})]],
'ErrorSeverity' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ErrorSourceId' : [ 0x18, ['unsigned long']],
'ErrorSourceType' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSrcTypeMCE', 1: 'WheaErrSrcTypeCMC', 2: 'WheaErrSrcTypeCPE', 3: 'WheaErrSrcTypeNMI', 4: 'WheaErrSrcTypePCIe', 5: 'WheaErrSrcTypeGeneric', 6: 'WheaErrSrcTypeINIT', 7: 'WheaErrSrcTypeBOOT', 8: 'WheaErrSrcTypeSCIGeneric', 9: 'WheaErrSrcTypeIPFMCA', 10: 'WheaErrSrcTypeIPFCMC', 11: 'WheaErrSrcTypeIPFCPE', 12: 'WheaErrSrcTypeMax'})]],
'NotifyType' : [ 0x20, ['_GUID']],
'Context' : [ 0x30, ['unsigned long long']],
'DataFormat' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'WheaDataFormatIPFSalRecord', 1: 'WheaDataFormatXPFMCA', 2: 'WheaDataFormatMemory', 3: 'WheaDataFormatPCIExpress', 4: 'WheaDataFormatNMIPort', 5: 'WheaDataFormatPCIXBus', 6: 'WheaDataFormatPCIXDevice', 7: 'WheaDataFormatGeneric', 8: 'WheaDataFormatMax'})]],
'Reserved1' : [ 0x3c, ['unsigned long']],
'DataOffset' : [ 0x40, ['unsigned long']],
'DataLength' : [ 0x44, ['unsigned long']],
'PshedDataOffset' : [ 0x48, ['unsigned long']],
'PshedDataLength' : [ 0x4c, ['unsigned long']],
} ],
'_WHEA_ERROR_RECORD' : [ 0xc8, {
'Header' : [ 0x0, ['_WHEA_ERROR_RECORD_HEADER']],
'SectionDescriptor' : [ 0x80, ['array', 1, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR' : [ 0x48, {
'SectionOffset' : [ 0x0, ['unsigned long']],
'SectionLength' : [ 0x4, ['unsigned long']],
'Revision' : [ 0x8, ['_WHEA_REVISION']],
'ValidBits' : [ 0xa, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS']],
'Reserved' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS']],
'SectionType' : [ 0x10, ['_GUID']],
'FRUId' : [ 0x20, ['_GUID']],
'SectionSeverity' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'FRUText' : [ 0x34, ['array', 20, ['unsigned char']]],
} ],
'_GUID' : [ 0x10, {
'Data1' : [ 0x0, ['unsigned long']],
'Data2' : [ 0x4, ['unsigned short']],
'Data3' : [ 0x6, ['unsigned short']],
'Data4' : [ 0x8, ['array', 8, ['unsigned char']]],
} ],
'_FSRTL_ADVANCED_FCB_HEADER' : [ 0x58, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned char']],
'IsFastIoPossible' : [ 0x5, ['unsigned char']],
'Flags2' : [ 0x6, ['unsigned char']],
'Reserved' : [ 0x7, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'Version' : [ 0x7, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Resource' : [ 0x8, ['pointer64', ['_ERESOURCE']]],
'PagingIoResource' : [ 0x10, ['pointer64', ['_ERESOURCE']]],
'AllocationSize' : [ 0x18, ['_LARGE_INTEGER']],
'FileSize' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x28, ['_LARGE_INTEGER']],
'FastMutex' : [ 0x30, ['pointer64', ['_FAST_MUTEX']]],
'FilterContexts' : [ 0x38, ['_LIST_ENTRY']],
'PushLock' : [ 0x48, ['_EX_PUSH_LOCK']],
'FileContextSupportPointer' : [ 0x50, ['pointer64', ['pointer64', ['void']]]],
} ],
'_iobuf' : [ 0x30, {
'_ptr' : [ 0x0, ['pointer64', ['unsigned char']]],
'_cnt' : [ 0x8, ['long']],
'_base' : [ 0x10, ['pointer64', ['unsigned char']]],
'_flag' : [ 0x18, ['long']],
'_file' : [ 0x1c, ['long']],
'_charbuf' : [ 0x20, ['long']],
'_bufsiz' : [ 0x24, ['long']],
'_tmpfname' : [ 0x28, ['pointer64', ['unsigned char']]],
} ],
'__unnamed_14ee' : [ 0x8, {
'Long' : [ 0x0, ['unsigned long long']],
'VolatileLong' : [ 0x0, ['unsigned long long']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE']],
'TimeStamp' : [ 0x0, ['_MMPTE_TIMESTAMP']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTE' : [ 0x8, {
'u' : [ 0x0, ['__unnamed_14ee']],
} ],
'__unnamed_14ff' : [ 0x10, {
'I386' : [ 0x0, ['_I386_LOADER_BLOCK']],
'Ia64' : [ 0x0, ['_IA64_LOADER_BLOCK']],
} ],
'_LOADER_PARAMETER_BLOCK' : [ 0xf0, {
'OsMajorVersion' : [ 0x0, ['unsigned long']],
'OsMinorVersion' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'LoadOrderListHead' : [ 0x10, ['_LIST_ENTRY']],
'MemoryDescriptorListHead' : [ 0x20, ['_LIST_ENTRY']],
'BootDriverListHead' : [ 0x30, ['_LIST_ENTRY']],
'KernelStack' : [ 0x40, ['unsigned long long']],
'Prcb' : [ 0x48, ['unsigned long long']],
'Process' : [ 0x50, ['unsigned long long']],
'Thread' : [ 0x58, ['unsigned long long']],
'RegistryLength' : [ 0x60, ['unsigned long']],
'RegistryBase' : [ 0x68, ['pointer64', ['void']]],
'ConfigurationRoot' : [ 0x70, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'ArcBootDeviceName' : [ 0x78, ['pointer64', ['unsigned char']]],
'ArcHalDeviceName' : [ 0x80, ['pointer64', ['unsigned char']]],
'NtBootPathName' : [ 0x88, ['pointer64', ['unsigned char']]],
'NtHalPathName' : [ 0x90, ['pointer64', ['unsigned char']]],
'LoadOptions' : [ 0x98, ['pointer64', ['unsigned char']]],
'NlsData' : [ 0xa0, ['pointer64', ['_NLS_DATA_BLOCK']]],
'ArcDiskInformation' : [ 0xa8, ['pointer64', ['_ARC_DISK_INFORMATION']]],
'OemFontFile' : [ 0xb0, ['pointer64', ['void']]],
'Extension' : [ 0xb8, ['pointer64', ['_LOADER_PARAMETER_EXTENSION']]],
'u' : [ 0xc0, ['__unnamed_14ff']],
'FirmwareInformation' : [ 0xd0, ['_FIRMWARE_INFORMATION_LOADER_BLOCK']],
} ],
'_KLOCK_QUEUE_HANDLE' : [ 0x18, {
'LockQueue' : [ 0x0, ['_KSPIN_LOCK_QUEUE']],
'OldIrql' : [ 0x10, ['unsigned char']],
} ],
'_MMPFNLIST' : [ 0x28, {
'Total' : [ 0x0, ['unsigned long long']],
'ListName' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'ZeroedPageList', 1: 'FreePageList', 2: 'StandbyPageList', 3: 'ModifiedPageList', 4: 'ModifiedNoWritePageList', 5: 'BadPageList', 6: 'ActiveAndValid', 7: 'TransitionPage'})]],
'Flink' : [ 0x10, ['unsigned long long']],
'Blink' : [ 0x18, ['unsigned long long']],
'Lock' : [ 0x20, ['unsigned long long']],
} ],
'__unnamed_152e' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long long']],
'WsIndex' : [ 0x0, ['unsigned long']],
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
'Next' : [ 0x0, ['pointer64', ['void']]],
'VolatileNext' : [ 0x0, ['pointer64', ['void']]],
'KernelStackOwner' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'NextStackPfn' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'__unnamed_1530' : [ 0x8, {
'Blink' : [ 0x0, ['unsigned long long']],
'ImageProtoPte' : [ 0x0, ['pointer64', ['_MMPTE']]],
'ShareCount' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1533' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'VolatileReferenceCount' : [ 0x0, ['short']],
'ShortFlags' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_1535' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'e1' : [ 0x2, ['_MMPFNENTRY']],
'e2' : [ 0x0, ['__unnamed_1533']],
} ],
'__unnamed_153d' : [ 0x8, {
'PteFrame' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 52, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 55, native_type='unsigned long long')]],
'PfnImageVerified' : [ 0x0, ['BitField', dict(start_bit = 55, end_bit = 56, native_type='unsigned long long')]],
'AweAllocation' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 57, native_type='unsigned long long')]],
'PrototypePte' : [ 0x0, ['BitField', dict(start_bit = 57, end_bit = 58, native_type='unsigned long long')]],
'PageColor' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MMPFN' : [ 0x30, {
'u1' : [ 0x0, ['__unnamed_152e']],
'u2' : [ 0x8, ['__unnamed_1530']],
'PteAddress' : [ 0x10, ['pointer64', ['_MMPTE']]],
'VolatilePteAddress' : [ 0x10, ['pointer64', ['void']]],
'Lock' : [ 0x10, ['long']],
'PteLong' : [ 0x10, ['unsigned long long']],
'u3' : [ 0x18, ['__unnamed_1535']],
'UsedPageTableEntries' : [ 0x1c, ['unsigned short']],
'VaType' : [ 0x1e, ['unsigned char']],
'ViewCount' : [ 0x1f, ['unsigned char']],
'OriginalPte' : [ 0x20, ['_MMPTE']],
'AweReferenceCount' : [ 0x20, ['long']],
'u4' : [ 0x28, ['__unnamed_153d']],
} ],
'_MI_COLOR_BASE' : [ 0x10, {
'ColorPointer' : [ 0x0, ['pointer64', ['unsigned short']]],
'ColorMask' : [ 0x8, ['unsigned short']],
'ColorNode' : [ 0xa, ['unsigned short']],
} ],
'_MMSUPPORT' : [ 0x88, {
'WorkingSetMutex' : [ 0x0, ['_EX_PUSH_LOCK']],
'ExitGate' : [ 0x8, ['pointer64', ['_KGATE']]],
'AccessLog' : [ 0x10, ['pointer64', ['void']]],
'WorkingSetExpansionLinks' : [ 0x18, ['_LIST_ENTRY']],
'AgeDistribution' : [ 0x28, ['array', 7, ['unsigned long']]],
'MinimumWorkingSetSize' : [ 0x44, ['unsigned long']],
'WorkingSetSize' : [ 0x48, ['unsigned long']],
'WorkingSetPrivateSize' : [ 0x4c, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x50, ['unsigned long']],
'ChargedWslePages' : [ 0x54, ['unsigned long']],
'ActualWslePages' : [ 0x58, ['unsigned long']],
'WorkingSetSizeOverhead' : [ 0x5c, ['unsigned long']],
'PeakWorkingSetSize' : [ 0x60, ['unsigned long']],
'HardFaultCount' : [ 0x64, ['unsigned long']],
'VmWorkingSetList' : [ 0x68, ['pointer64', ['_MMWSL']]],
'NextPageColor' : [ 0x70, ['unsigned short']],
'LastTrimStamp' : [ 0x72, ['unsigned short']],
'PageFaultCount' : [ 0x74, ['unsigned long']],
'RepurposeCount' : [ 0x78, ['unsigned long']],
'Spare' : [ 0x7c, ['array', 2, ['unsigned long']]],
'Flags' : [ 0x84, ['_MMSUPPORT_FLAGS']],
} ],
'_MMWSL' : [ 0x488, {
'FirstFree' : [ 0x0, ['unsigned long']],
'FirstDynamic' : [ 0x4, ['unsigned long']],
'LastEntry' : [ 0x8, ['unsigned long']],
'NextSlot' : [ 0xc, ['unsigned long']],
'Wsle' : [ 0x10, ['pointer64', ['_MMWSLE']]],
'LowestPagableAddress' : [ 0x18, ['pointer64', ['void']]],
'LastInitializedWsle' : [ 0x20, ['unsigned long']],
'NextAgingSlot' : [ 0x24, ['unsigned long']],
'NumberOfCommittedPageTables' : [ 0x28, ['unsigned long']],
'VadBitMapHint' : [ 0x2c, ['unsigned long']],
'NonDirectCount' : [ 0x30, ['unsigned long']],
'LastVadBit' : [ 0x34, ['unsigned long']],
'MaximumLastVadBit' : [ 0x38, ['unsigned long']],
'LastAllocationSizeHint' : [ 0x3c, ['unsigned long']],
'LastAllocationSize' : [ 0x40, ['unsigned long']],
'NonDirectHash' : [ 0x48, ['pointer64', ['_MMWSLE_NONDIRECT_HASH']]],
'HashTableStart' : [ 0x50, ['pointer64', ['_MMWSLE_HASH']]],
'HighestPermittedHashAddress' : [ 0x58, ['pointer64', ['_MMWSLE_HASH']]],
'MaximumUserPageTablePages' : [ 0x60, ['unsigned long']],
'MaximumUserPageDirectoryPages' : [ 0x64, ['unsigned long']],
'CommittedPageTables' : [ 0x68, ['pointer64', ['unsigned long']]],
'NumberOfCommittedPageDirectories' : [ 0x70, ['unsigned long']],
'CommittedPageDirectories' : [ 0x78, ['array', 128, ['unsigned long long']]],
'NumberOfCommittedPageDirectoryParents' : [ 0x478, ['unsigned long']],
'CommittedPageDirectoryParents' : [ 0x480, ['array', 1, ['unsigned long long']]],
} ],
'__unnamed_156b' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'Long' : [ 0x0, ['unsigned long long']],
'e1' : [ 0x0, ['_MMWSLENTRY']],
'e2' : [ 0x0, ['_MMWSLE_FREE_ENTRY']],
} ],
'_MMWSLE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_156b']],
} ],
'__unnamed_1577' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS']],
} ],
'__unnamed_1581' : [ 0x10, {
'NumberOfSystemCacheViews' : [ 0x0, ['unsigned long']],
'ImageRelocationStartBit' : [ 0x0, ['unsigned long']],
'WritableUserReferences' : [ 0x4, ['long']],
'ImageRelocationSizeIn64k' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'Unused' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 30, native_type='unsigned long')]],
'BitMap64' : [ 0x4, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'ImageActive' : [ 0x4, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubsectionRoot' : [ 0x8, ['pointer64', ['_MM_SUBSECTION_AVL_TABLE']]],
'SeImageStub' : [ 0x8, ['pointer64', ['_MI_IMAGE_SECURITY_REFERENCE']]],
} ],
'__unnamed_1583' : [ 0x10, {
'e2' : [ 0x0, ['__unnamed_1581']],
} ],
'_CONTROL_AREA' : [ 0x80, {
'Segment' : [ 0x0, ['pointer64', ['_SEGMENT']]],
'DereferenceList' : [ 0x8, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0x18, ['unsigned long long']],
'NumberOfPfnReferences' : [ 0x20, ['unsigned long long']],
'NumberOfMappedViews' : [ 0x28, ['unsigned long long']],
'NumberOfUserReferences' : [ 0x30, ['unsigned long long']],
'u' : [ 0x38, ['__unnamed_1577']],
'FlushInProgressCount' : [ 0x3c, ['unsigned long']],
'FilePointer' : [ 0x40, ['_EX_FAST_REF']],
'ControlAreaLock' : [ 0x48, ['long']],
'ModifiedWriteCount' : [ 0x4c, ['unsigned long']],
'StartingFrame' : [ 0x4c, ['unsigned long']],
'WaitList' : [ 0x50, ['pointer64', ['_MI_CONTROL_AREA_WAIT_BLOCK']]],
'u2' : [ 0x58, ['__unnamed_1583']],
'LockedPages' : [ 0x68, ['unsigned long long']],
'ViewList' : [ 0x70, ['_LIST_ENTRY']],
} ],
'_MM_STORE_KEY' : [ 0x8, {
'KeyLow' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 60, native_type='unsigned long long')]],
'KeyHigh' : [ 0x0, ['BitField', dict(start_bit = 60, end_bit = 64, native_type='unsigned long long')]],
'EntireKey' : [ 0x0, ['unsigned long long']],
} ],
'_MMPAGING_FILE' : [ 0x90, {
'Size' : [ 0x0, ['unsigned long long']],
'MaximumSize' : [ 0x8, ['unsigned long long']],
'MinimumSize' : [ 0x10, ['unsigned long long']],
'FreeSpace' : [ 0x18, ['unsigned long long']],
'PeakUsage' : [ 0x20, ['unsigned long long']],
'HighestPage' : [ 0x28, ['unsigned long long']],
'File' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'Entry' : [ 0x38, ['array', 2, ['pointer64', ['_MMMOD_WRITER_MDL_ENTRY']]]],
'PageFileName' : [ 0x48, ['_UNICODE_STRING']],
'Bitmap' : [ 0x58, ['pointer64', ['_RTL_BITMAP']]],
'EvictStoreBitmap' : [ 0x60, ['pointer64', ['_RTL_BITMAP']]],
'BitmapHint' : [ 0x68, ['unsigned long']],
'LastAllocationSize' : [ 0x6c, ['unsigned long']],
'ToBeEvictedCount' : [ 0x70, ['unsigned long']],
'PageFileNumber' : [ 0x74, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned short')]],
'BootPartition' : [ 0x74, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'Spare0' : [ 0x74, ['BitField', dict(start_bit = 5, end_bit = 16, native_type='unsigned short')]],
'AdriftMdls' : [ 0x76, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Spare1' : [ 0x76, ['BitField', dict(start_bit = 1, end_bit = 16, native_type='unsigned short')]],
'FileHandle' : [ 0x78, ['pointer64', ['void']]],
'Lock' : [ 0x80, ['unsigned long long']],
'LockOwner' : [ 0x88, ['pointer64', ['_ETHREAD']]],
} ],
'_MM_AVL_TABLE' : [ 0x40, {
'BalancedRoot' : [ 0x0, ['_MMADDRESS_NODE']],
'DepthOfTree' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long long')]],
'Unused' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long long')]],
'NumberGenericTableElements' : [ 0x28, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='unsigned long long')]],
'NodeHint' : [ 0x30, ['pointer64', ['void']]],
'NodeFreeHint' : [ 0x38, ['pointer64', ['void']]],
} ],
'__unnamed_15bf' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMVAD']]],
} ],
'__unnamed_15c2' : [ 0x8, {
'LongFlags' : [ 0x0, ['unsigned long long']],
'VadFlags' : [ 0x0, ['_MMVAD_FLAGS']],
} ],
'__unnamed_15c5' : [ 0x8, {
'LongFlags3' : [ 0x0, ['unsigned long long']],
'VadFlags3' : [ 0x0, ['_MMVAD_FLAGS3']],
} ],
'_MMVAD_SHORT' : [ 0x40, {
'u1' : [ 0x0, ['__unnamed_15bf']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_15c2']],
'PushLock' : [ 0x30, ['_EX_PUSH_LOCK']],
'u5' : [ 0x38, ['__unnamed_15c5']],
} ],
'__unnamed_15cd' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMADDRESS_NODE']]],
} ],
'_MMADDRESS_NODE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_15cd']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMADDRESS_NODE']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMADDRESS_NODE']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
} ],
'__unnamed_15d2' : [ 0x4, {
'LongFlags2' : [ 0x0, ['unsigned long']],
'VadFlags2' : [ 0x0, ['_MMVAD_FLAGS2']],
} ],
'_MMVAD' : [ 0x78, {
'u1' : [ 0x0, ['__unnamed_15bf']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_15c2']],
'PushLock' : [ 0x30, ['_EX_PUSH_LOCK']],
'u5' : [ 0x38, ['__unnamed_15c5']],
'u2' : [ 0x40, ['__unnamed_15d2']],
'Subsection' : [ 0x48, ['pointer64', ['_SUBSECTION']]],
'MappedSubsection' : [ 0x48, ['pointer64', ['_MSUBSECTION']]],
'FirstPrototypePte' : [ 0x50, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x58, ['pointer64', ['_MMPTE']]],
'ViewLinks' : [ 0x60, ['_LIST_ENTRY']],
'VadsProcess' : [ 0x70, ['pointer64', ['_EPROCESS']]],
} ],
'__unnamed_15dd' : [ 0x38, {
'Mdl' : [ 0x0, ['_MDL']],
'Page' : [ 0x30, ['array', 1, ['unsigned long long']]],
} ],
'_MI_PAGEFILE_TRACES' : [ 0x68, {
'Status' : [ 0x0, ['long']],
'Priority' : [ 0x4, ['unsigned char']],
'IrpPriority' : [ 0x5, ['unsigned char']],
'CurrentTime' : [ 0x8, ['_LARGE_INTEGER']],
'AvailablePages' : [ 0x10, ['unsigned long long']],
'ModifiedPagesTotal' : [ 0x18, ['unsigned long long']],
'ModifiedPagefilePages' : [ 0x20, ['unsigned long long']],
'ModifiedNoWritePages' : [ 0x28, ['unsigned long long']],
'MdlHack' : [ 0x30, ['__unnamed_15dd']],
} ],
'__unnamed_15e3' : [ 0x10, {
'IoStatus' : [ 0x0, ['_IO_STATUS_BLOCK']],
} ],
'__unnamed_15e5' : [ 0x8, {
'KeepForever' : [ 0x0, ['unsigned long long']],
} ],
'_MMMOD_WRITER_MDL_ENTRY' : [ 0xa0, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'u' : [ 0x10, ['__unnamed_15e3']],
'Irp' : [ 0x20, ['pointer64', ['_IRP']]],
'u1' : [ 0x28, ['__unnamed_15e5']],
'PagingFile' : [ 0x30, ['pointer64', ['_MMPAGING_FILE']]],
'File' : [ 0x38, ['pointer64', ['_FILE_OBJECT']]],
'ControlArea' : [ 0x40, ['pointer64', ['_CONTROL_AREA']]],
'FileResource' : [ 0x48, ['pointer64', ['_ERESOURCE']]],
'WriteOffset' : [ 0x50, ['_LARGE_INTEGER']],
'IssueTime' : [ 0x58, ['_LARGE_INTEGER']],
'PointerMdl' : [ 0x60, ['pointer64', ['_MDL']]],
'Mdl' : [ 0x68, ['_MDL']],
'Page' : [ 0x98, ['array', 1, ['unsigned long long']]],
} ],
'_MDL' : [ 0x30, {
'Next' : [ 0x0, ['pointer64', ['_MDL']]],
'Size' : [ 0x8, ['short']],
'MdlFlags' : [ 0xa, ['short']],
'Process' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'MappedSystemVa' : [ 0x18, ['pointer64', ['void']]],
'StartVa' : [ 0x20, ['pointer64', ['void']]],
'ByteCount' : [ 0x28, ['unsigned long']],
'ByteOffset' : [ 0x2c, ['unsigned long']],
} ],
'_HHIVE' : [ 0x598, {
'Signature' : [ 0x0, ['unsigned long']],
'GetCellRoutine' : [ 0x8, ['pointer64', ['void']]],
'ReleaseCellRoutine' : [ 0x10, ['pointer64', ['void']]],
'Allocate' : [ 0x18, ['pointer64', ['void']]],
'Free' : [ 0x20, ['pointer64', ['void']]],
'FileSetSize' : [ 0x28, ['pointer64', ['void']]],
'FileWrite' : [ 0x30, ['pointer64', ['void']]],
'FileRead' : [ 0x38, ['pointer64', ['void']]],
'FileFlush' : [ 0x40, ['pointer64', ['void']]],
'HiveLoadFailure' : [ 0x48, ['pointer64', ['void']]],
'BaseBlock' : [ 0x50, ['pointer64', ['_HBASE_BLOCK']]],
'DirtyVector' : [ 0x58, ['_RTL_BITMAP']],
'DirtyCount' : [ 0x68, ['unsigned long']],
'DirtyAlloc' : [ 0x6c, ['unsigned long']],
'BaseBlockAlloc' : [ 0x70, ['unsigned long']],
'Cluster' : [ 0x74, ['unsigned long']],
'Flat' : [ 0x78, ['unsigned char']],
'ReadOnly' : [ 0x79, ['unsigned char']],
'DirtyFlag' : [ 0x7a, ['unsigned char']],
'HvBinHeadersUse' : [ 0x7c, ['unsigned long']],
'HvFreeCellsUse' : [ 0x80, ['unsigned long']],
'HvUsedCellsUse' : [ 0x84, ['unsigned long']],
'CmUsedCellsUse' : [ 0x88, ['unsigned long']],
'HiveFlags' : [ 0x8c, ['unsigned long']],
'CurrentLog' : [ 0x90, ['unsigned long']],
'LogSize' : [ 0x94, ['array', 2, ['unsigned long']]],
'RefreshCount' : [ 0x9c, ['unsigned long']],
'StorageTypeCount' : [ 0xa0, ['unsigned long']],
'Version' : [ 0xa4, ['unsigned long']],
'Storage' : [ 0xa8, ['array', 2, ['_DUAL']]],
} ],
'_CM_VIEW_OF_FILE' : [ 0x58, {
'MappedViewLinks' : [ 0x0, ['_LIST_ENTRY']],
'PinnedViewLinks' : [ 0x10, ['_LIST_ENTRY']],
'FlushedViewLinks' : [ 0x20, ['_LIST_ENTRY']],
'CmHive' : [ 0x30, ['pointer64', ['_CMHIVE']]],
'Bcb' : [ 0x38, ['pointer64', ['void']]],
'ViewAddress' : [ 0x40, ['pointer64', ['void']]],
'FileOffset' : [ 0x48, ['unsigned long']],
'Size' : [ 0x4c, ['unsigned long']],
'UseCount' : [ 0x50, ['unsigned long']],
} ],
'_CMHIVE' : [ 0xbe8, {
'Hive' : [ 0x0, ['_HHIVE']],
'FileHandles' : [ 0x598, ['array', 6, ['pointer64', ['void']]]],
'NotifyList' : [ 0x5c8, ['_LIST_ENTRY']],
'HiveList' : [ 0x5d8, ['_LIST_ENTRY']],
'PreloadedHiveList' : [ 0x5e8, ['_LIST_ENTRY']],
'HiveRundown' : [ 0x5f8, ['_EX_RUNDOWN_REF']],
'ParseCacheEntries' : [ 0x600, ['_LIST_ENTRY']],
'KcbCacheTable' : [ 0x610, ['pointer64', ['_CM_KEY_HASH_TABLE_ENTRY']]],
'KcbCacheTableSize' : [ 0x618, ['unsigned long']],
'Identity' : [ 0x61c, ['unsigned long']],
'HiveLock' : [ 0x620, ['pointer64', ['_FAST_MUTEX']]],
'ViewLock' : [ 0x628, ['_EX_PUSH_LOCK']],
'ViewLockOwner' : [ 0x630, ['pointer64', ['_KTHREAD']]],
'ViewLockLast' : [ 0x638, ['unsigned long']],
'ViewUnLockLast' : [ 0x63c, ['unsigned long']],
'WriterLock' : [ 0x640, ['pointer64', ['_FAST_MUTEX']]],
'FlusherLock' : [ 0x648, ['pointer64', ['_ERESOURCE']]],
'FlushDirtyVector' : [ 0x650, ['_RTL_BITMAP']],
'FlushOffsetArray' : [ 0x660, ['pointer64', ['CMP_OFFSET_ARRAY']]],
'FlushOffsetArrayCount' : [ 0x668, ['unsigned long']],
'FlushHiveTruncated' : [ 0x66c, ['unsigned long']],
'FlushLock2' : [ 0x670, ['pointer64', ['_FAST_MUTEX']]],
'SecurityLock' : [ 0x678, ['_EX_PUSH_LOCK']],
'MappedViewList' : [ 0x680, ['_LIST_ENTRY']],
'PinnedViewList' : [ 0x690, ['_LIST_ENTRY']],
'FlushedViewList' : [ 0x6a0, ['_LIST_ENTRY']],
'MappedViewCount' : [ 0x6b0, ['unsigned short']],
'PinnedViewCount' : [ 0x6b2, ['unsigned short']],
'UseCount' : [ 0x6b4, ['unsigned long']],
'ViewsPerHive' : [ 0x6b8, ['unsigned long']],
'FileObject' : [ 0x6c0, ['pointer64', ['_FILE_OBJECT']]],
'LastShrinkHiveSize' : [ 0x6c8, ['unsigned long']],
'ActualFileSize' : [ 0x6d0, ['_LARGE_INTEGER']],
'FileFullPath' : [ 0x6d8, ['_UNICODE_STRING']],
'FileUserName' : [ 0x6e8, ['_UNICODE_STRING']],
'HiveRootPath' : [ 0x6f8, ['_UNICODE_STRING']],
'SecurityCount' : [ 0x708, ['unsigned long']],
'SecurityCacheSize' : [ 0x70c, ['unsigned long']],
'SecurityHitHint' : [ 0x710, ['long']],
'SecurityCache' : [ 0x718, ['pointer64', ['_CM_KEY_SECURITY_CACHE_ENTRY']]],
'SecurityHash' : [ 0x720, ['array', 64, ['_LIST_ENTRY']]],
'UnloadEventCount' : [ 0xb20, ['unsigned long']],
'UnloadEventArray' : [ 0xb28, ['pointer64', ['pointer64', ['_KEVENT']]]],
'RootKcb' : [ 0xb30, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Frozen' : [ 0xb38, ['unsigned char']],
'UnloadWorkItem' : [ 0xb40, ['pointer64', ['_CM_WORKITEM']]],
'UnloadWorkItemHolder' : [ 0xb48, ['_CM_WORKITEM']],
'GrowOnlyMode' : [ 0xb70, ['unsigned char']],
'GrowOffset' : [ 0xb74, ['unsigned long']],
'KcbConvertListHead' : [ 0xb78, ['_LIST_ENTRY']],
'KnodeConvertListHead' : [ 0xb88, ['_LIST_ENTRY']],
'CellRemapArray' : [ 0xb98, ['pointer64', ['_CM_CELL_REMAP_BLOCK']]],
'Flags' : [ 0xba0, ['unsigned long']],
'TrustClassEntry' : [ 0xba8, ['_LIST_ENTRY']],
'FlushCount' : [ 0xbb8, ['unsigned long']],
'CmRm' : [ 0xbc0, ['pointer64', ['_CM_RM']]],
'CmRmInitFailPoint' : [ 0xbc8, ['unsigned long']],
'CmRmInitFailStatus' : [ 0xbcc, ['long']],
'CreatorOwner' : [ 0xbd0, ['pointer64', ['_KTHREAD']]],
'RundownThread' : [ 0xbd8, ['pointer64', ['_KTHREAD']]],
'LastWriteTime' : [ 0xbe0, ['_LARGE_INTEGER']],
} ],
'_CM_KEY_CONTROL_BLOCK' : [ 0x128, {
'RefCount' : [ 0x0, ['unsigned long']],
'ExtFlags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'PrivateAlloc' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'Delete' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HiveUnloaded' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Decommissioned' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'LockTablePresent' : [ 0x4, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'TotalLevels' : [ 0x4, ['BitField', dict(start_bit = 21, end_bit = 31, native_type='unsigned long')]],
'DelayedDeref' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DelayedClose' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Parking' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'KeyHash' : [ 0x10, ['_CM_KEY_HASH']],
'ConvKey' : [ 0x10, ['unsigned long']],
'NextHash' : [ 0x18, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x20, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x28, ['unsigned long']],
'KcbPushlock' : [ 0x30, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x38, ['pointer64', ['_KTHREAD']]],
'SharedCount' : [ 0x38, ['long']],
'SlotHint' : [ 0x40, ['unsigned long']],
'ParentKcb' : [ 0x48, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NameBlock' : [ 0x50, ['pointer64', ['_CM_NAME_CONTROL_BLOCK']]],
'CachedSecurity' : [ 0x58, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
'ValueCache' : [ 0x60, ['_CACHED_CHILD_LIST']],
'IndexHint' : [ 0x70, ['pointer64', ['_CM_INDEX_HINT_BLOCK']]],
'HashKey' : [ 0x70, ['unsigned long']],
'SubKeyCount' : [ 0x70, ['unsigned long']],
'KeyBodyListHead' : [ 0x78, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x78, ['_LIST_ENTRY']],
'KeyBodyArray' : [ 0x88, ['array', 4, ['pointer64', ['_CM_KEY_BODY']]]],
'KcbLastWriteTime' : [ 0xa8, ['_LARGE_INTEGER']],
'KcbMaxNameLen' : [ 0xb0, ['unsigned short']],
'KcbMaxValueNameLen' : [ 0xb2, ['unsigned short']],
'KcbMaxValueDataLen' : [ 0xb4, ['unsigned long']],
'KcbUserFlags' : [ 0xb8, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'KcbVirtControlFlags' : [ 0xb8, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'KcbDebug' : [ 0xb8, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'Flags' : [ 0xb8, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'RealKeyName' : [ 0xc0, ['pointer64', ['unsigned char']]],
'KCBUoWListHead' : [ 0xc8, ['_LIST_ENTRY']],
'DelayQueueEntry' : [ 0xd8, ['_LIST_ENTRY']],
'Stolen' : [ 0xd8, ['pointer64', ['unsigned char']]],
'TransKCBOwner' : [ 0xe8, ['pointer64', ['_CM_TRANS']]],
'KCBLock' : [ 0xf0, ['_CM_INTENT_LOCK']],
'KeyLock' : [ 0x100, ['_CM_INTENT_LOCK']],
'TransValueCache' : [ 0x110, ['_CHILD_LIST']],
'TransValueListOwner' : [ 0x118, ['pointer64', ['_CM_TRANS']]],
'FullKCBName' : [ 0x120, ['pointer64', ['_UNICODE_STRING']]],
} ],
'_CM_KEY_HASH_TABLE_ENTRY' : [ 0x18, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Entry' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
} ],
'__unnamed_1669' : [ 0xc, {
'Failure' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: '_None', 1: '_CmInitializeHive', 2: '_HvInitializeHive', 3: '_HvpBuildMap', 4: '_HvpBuildMapAndCopy', 5: '_HvpInitMap', 6: '_HvLoadHive', 7: '_HvpReadFileImageAndBuildMap', 8: '_HvpRecoverData', 9: '_HvpRecoverWholeHive', 10: '_HvpMapFileImageAndBuildMap', 11: '_CmpValidateHiveSecurityDescriptors', 12: '_HvpEnlistBinInMap', 13: '_CmCheckRegistry', 14: '_CmRegistryIO', 15: '_CmCheckRegistry2', 16: '_CmpCheckKey', 17: '_CmpCheckValueList', 18: '_HvCheckHive', 19: '_HvCheckBin'})]],
'Status' : [ 0x4, ['long']],
'Point' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_166c' : [ 0x18, {
'Action' : [ 0x0, ['unsigned long']],
'Handle' : [ 0x8, ['pointer64', ['void']]],
'Status' : [ 0x10, ['long']],
} ],
'__unnamed_166e' : [ 0x8, {
'CheckStack' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1670' : [ 0x20, {
'Cell' : [ 0x0, ['unsigned long']],
'CellPoint' : [ 0x8, ['pointer64', ['_CELL_DATA']]],
'RootPoint' : [ 0x10, ['pointer64', ['void']]],
'Index' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1672' : [ 0x18, {
'List' : [ 0x0, ['pointer64', ['_CELL_DATA']]],
'Index' : [ 0x8, ['unsigned long']],
'Cell' : [ 0xc, ['unsigned long']],
'CellPoint' : [ 0x10, ['pointer64', ['_CELL_DATA']]],
} ],
'__unnamed_1676' : [ 0x10, {
'Space' : [ 0x0, ['unsigned long']],
'MapPoint' : [ 0x4, ['unsigned long']],
'BinPoint' : [ 0x8, ['pointer64', ['_HBIN']]],
} ],
'__unnamed_167a' : [ 0x10, {
'Bin' : [ 0x0, ['pointer64', ['_HBIN']]],
'CellPoint' : [ 0x8, ['pointer64', ['_HCELL']]],
} ],
'__unnamed_167c' : [ 0x4, {
'FileOffset' : [ 0x0, ['unsigned long']],
} ],
'_HIVE_LOAD_FAILURE' : [ 0x160, {
'Hive' : [ 0x0, ['pointer64', ['_HHIVE']]],
'Index' : [ 0x8, ['unsigned long']],
'RecoverableIndex' : [ 0xc, ['unsigned long']],
'Locations' : [ 0x10, ['array', 8, ['__unnamed_1669']]],
'RecoverableLocations' : [ 0x70, ['array', 8, ['__unnamed_1669']]],
'RegistryIO' : [ 0xd0, ['__unnamed_166c']],
'CheckRegistry2' : [ 0xe8, ['__unnamed_166e']],
'CheckKey' : [ 0xf0, ['__unnamed_1670']],
'CheckValueList' : [ 0x110, ['__unnamed_1672']],
'CheckHive' : [ 0x128, ['__unnamed_1676']],
'CheckHive1' : [ 0x138, ['__unnamed_1676']],
'CheckBin' : [ 0x148, ['__unnamed_167a']],
'RecoverData' : [ 0x158, ['__unnamed_167c']],
} ],
'_PCW_COUNTER_DESCRIPTOR' : [ 0x8, {
'Id' : [ 0x0, ['unsigned short']],
'StructIndex' : [ 0x2, ['unsigned short']],
'Offset' : [ 0x4, ['unsigned short']],
'Size' : [ 0x6, ['unsigned short']],
} ],
'_PCW_REGISTRATION_INFORMATION' : [ 0x30, {
'Version' : [ 0x0, ['unsigned long']],
'Name' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'CounterCount' : [ 0x10, ['unsigned long']],
'Counters' : [ 0x18, ['pointer64', ['_PCW_COUNTER_DESCRIPTOR']]],
'Callback' : [ 0x20, ['pointer64', ['void']]],
'CallbackContext' : [ 0x28, ['pointer64', ['void']]],
} ],
'_PCW_PROCESSOR_INFO' : [ 0x80, {
'IdleTime' : [ 0x0, ['unsigned long long']],
'AvailableTime' : [ 0x8, ['unsigned long long']],
'UserTime' : [ 0x10, ['unsigned long long']],
'KernelTime' : [ 0x18, ['unsigned long long']],
'Interrupts' : [ 0x20, ['unsigned long']],
'DpcTime' : [ 0x28, ['unsigned long long']],
'InterruptTime' : [ 0x30, ['unsigned long long']],
'DpcCount' : [ 0x38, ['unsigned long']],
'DpcRate' : [ 0x3c, ['unsigned long']],
'C1Time' : [ 0x40, ['unsigned long long']],
'C2Time' : [ 0x48, ['unsigned long long']],
'C3Time' : [ 0x50, ['unsigned long long']],
'C1Transitions' : [ 0x58, ['unsigned long long']],
'C2Transitions' : [ 0x60, ['unsigned long long']],
'C3Transitions' : [ 0x68, ['unsigned long long']],
'ParkingStatus' : [ 0x70, ['unsigned long']],
'CurrentFrequency' : [ 0x74, ['unsigned long']],
'PercentMaxFrequency' : [ 0x78, ['unsigned long']],
'StateFlags' : [ 0x7c, ['unsigned long']],
} ],
'_PCW_DATA' : [ 0x10, {
'Data' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long']],
} ],
'_ETW_PERF_COUNTERS' : [ 0x18, {
'TotalActiveSessions' : [ 0x0, ['long']],
'TotalBufferMemoryNonPagedPool' : [ 0x4, ['long']],
'TotalBufferMemoryPagedPool' : [ 0x8, ['long']],
'TotalGuidsEnabled' : [ 0xc, ['long']],
'TotalGuidsNotEnabled' : [ 0x10, ['long']],
'TotalGuidsPreEnabled' : [ 0x14, ['long']],
} ],
'_ETW_SESSION_PERF_COUNTERS' : [ 0x18, {
'BufferMemoryPagedPool' : [ 0x0, ['long']],
'BufferMemoryNonPagedPool' : [ 0x4, ['long']],
'EventsLoggedCount' : [ 0x8, ['unsigned long long']],
'EventsLost' : [ 0x10, ['long']],
'NumConsumers' : [ 0x14, ['long']],
} ],
'_CONTEXT32_UPDATE' : [ 0x4, {
'NumberEntries' : [ 0x0, ['unsigned long']],
} ],
'_KTIMER_TABLE' : [ 0x2200, {
'TimerExpiry' : [ 0x0, ['array', 64, ['pointer64', ['_KTIMER']]]],
'TimerEntries' : [ 0x200, ['array', 256, ['_KTIMER_TABLE_ENTRY']]],
} ],
'_KTIMER_TABLE_ENTRY' : [ 0x20, {
'Lock' : [ 0x0, ['unsigned long long']],
'Entry' : [ 0x8, ['_LIST_ENTRY']],
'Time' : [ 0x18, ['_ULARGE_INTEGER']],
} ],
'_KAFFINITY_EX' : [ 0x28, {
'Count' : [ 0x0, ['unsigned short']],
'Size' : [ 0x2, ['unsigned short']],
'Reserved' : [ 0x4, ['unsigned long']],
'Bitmap' : [ 0x8, ['array', 4, ['unsigned long long']]],
} ],
'_KAFFINITY_ENUMERATION_CONTEXT' : [ 0x18, {
'Affinity' : [ 0x0, ['pointer64', ['_KAFFINITY_EX']]],
'CurrentMask' : [ 0x8, ['unsigned long long']],
'CurrentIndex' : [ 0x10, ['unsigned short']],
} ],
'_GROUP_AFFINITY' : [ 0x10, {
'Mask' : [ 0x0, ['unsigned long long']],
'Group' : [ 0x8, ['unsigned short']],
'Reserved' : [ 0xa, ['array', 3, ['unsigned short']]],
} ],
'_KTRAP_FRAME' : [ 0x190, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'PreviousMode' : [ 0x28, ['unsigned char']],
'PreviousIrql' : [ 0x29, ['unsigned char']],
'FaultIndicator' : [ 0x2a, ['unsigned char']],
'ExceptionActive' : [ 0x2b, ['unsigned char']],
'MxCsr' : [ 0x2c, ['unsigned long']],
'Rax' : [ 0x30, ['unsigned long long']],
'Rcx' : [ 0x38, ['unsigned long long']],
'Rdx' : [ 0x40, ['unsigned long long']],
'R8' : [ 0x48, ['unsigned long long']],
'R9' : [ 0x50, ['unsigned long long']],
'R10' : [ 0x58, ['unsigned long long']],
'R11' : [ 0x60, ['unsigned long long']],
'GsBase' : [ 0x68, ['unsigned long long']],
'GsSwap' : [ 0x68, ['unsigned long long']],
'Xmm0' : [ 0x70, ['_M128A']],
'Xmm1' : [ 0x80, ['_M128A']],
'Xmm2' : [ 0x90, ['_M128A']],
'Xmm3' : [ 0xa0, ['_M128A']],
'Xmm4' : [ 0xb0, ['_M128A']],
'Xmm5' : [ 0xc0, ['_M128A']],
'FaultAddress' : [ 0xd0, ['unsigned long long']],
'ContextRecord' : [ 0xd0, ['unsigned long long']],
'TimeStampCKCL' : [ 0xd0, ['unsigned long long']],
'Dr0' : [ 0xd8, ['unsigned long long']],
'Dr1' : [ 0xe0, ['unsigned long long']],
'Dr2' : [ 0xe8, ['unsigned long long']],
'Dr3' : [ 0xf0, ['unsigned long long']],
'Dr6' : [ 0xf8, ['unsigned long long']],
'Dr7' : [ 0x100, ['unsigned long long']],
'DebugControl' : [ 0x108, ['unsigned long long']],
'LastBranchToRip' : [ 0x110, ['unsigned long long']],
'LastBranchFromRip' : [ 0x118, ['unsigned long long']],
'LastExceptionToRip' : [ 0x120, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x128, ['unsigned long long']],
'LastBranchControl' : [ 0x108, ['unsigned long long']],
'LastBranchMSR' : [ 0x110, ['unsigned long']],
'SegDs' : [ 0x130, ['unsigned short']],
'SegEs' : [ 0x132, ['unsigned short']],
'SegFs' : [ 0x134, ['unsigned short']],
'SegGs' : [ 0x136, ['unsigned short']],
'TrapFrame' : [ 0x138, ['unsigned long long']],
'Rbx' : [ 0x140, ['unsigned long long']],
'Rdi' : [ 0x148, ['unsigned long long']],
'Rsi' : [ 0x150, ['unsigned long long']],
'Rbp' : [ 0x158, ['unsigned long long']],
'ErrorCode' : [ 0x160, ['unsigned long long']],
'ExceptionFrame' : [ 0x160, ['unsigned long long']],
'TimeStampKlog' : [ 0x160, ['unsigned long long']],
'Rip' : [ 0x168, ['unsigned long long']],
'SegCs' : [ 0x170, ['unsigned short']],
'Fill0' : [ 0x172, ['unsigned char']],
'Logging' : [ 0x173, ['unsigned char']],
'Fill1' : [ 0x174, ['array', 2, ['unsigned short']]],
'EFlags' : [ 0x178, ['unsigned long']],
'Fill2' : [ 0x17c, ['unsigned long']],
'Rsp' : [ 0x180, ['unsigned long long']],
'SegSs' : [ 0x188, ['unsigned short']],
'Fill3' : [ 0x18a, ['unsigned short']],
'CodePatchCycle' : [ 0x18c, ['long']],
} ],
'_XSTATE_SAVE' : [ 0x38, {
'Prev' : [ 0x0, ['pointer64', ['_XSTATE_SAVE']]],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Level' : [ 0x10, ['unsigned char']],
'XStateContext' : [ 0x18, ['_XSTATE_CONTEXT']],
} ],
'_XSAVE_AREA' : [ 0x240, {
'LegacyState' : [ 0x0, ['_XSAVE_FORMAT']],
'Header' : [ 0x200, ['_XSAVE_AREA_HEADER']],
} ],
'_KEXCEPTION_FRAME' : [ 0x140, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['unsigned long long']],
'Xmm6' : [ 0x30, ['_M128A']],
'Xmm7' : [ 0x40, ['_M128A']],
'Xmm8' : [ 0x50, ['_M128A']],
'Xmm9' : [ 0x60, ['_M128A']],
'Xmm10' : [ 0x70, ['_M128A']],
'Xmm11' : [ 0x80, ['_M128A']],
'Xmm12' : [ 0x90, ['_M128A']],
'Xmm13' : [ 0xa0, ['_M128A']],
'Xmm14' : [ 0xb0, ['_M128A']],
'Xmm15' : [ 0xc0, ['_M128A']],
'TrapFrame' : [ 0xd0, ['unsigned long long']],
'CallbackStack' : [ 0xd8, ['unsigned long long']],
'OutputBuffer' : [ 0xe0, ['unsigned long long']],
'OutputLength' : [ 0xe8, ['unsigned long long']],
'MxCsr' : [ 0xf0, ['unsigned long long']],
'Rbp' : [ 0xf8, ['unsigned long long']],
'Rbx' : [ 0x100, ['unsigned long long']],
'Rdi' : [ 0x108, ['unsigned long long']],
'Rsi' : [ 0x110, ['unsigned long long']],
'R12' : [ 0x118, ['unsigned long long']],
'R13' : [ 0x120, ['unsigned long long']],
'R14' : [ 0x128, ['unsigned long long']],
'R15' : [ 0x130, ['unsigned long long']],
'Return' : [ 0x138, ['unsigned long long']],
} ],
'_PNP_DEVICE_COMPLETION_QUEUE' : [ 0x50, {
'DispatchedList' : [ 0x0, ['_LIST_ENTRY']],
'DispatchedCount' : [ 0x10, ['unsigned long']],
'CompletedList' : [ 0x18, ['_LIST_ENTRY']],
'CompletedSemaphore' : [ 0x28, ['_KSEMAPHORE']],
'SpinLock' : [ 0x48, ['unsigned long long']],
} ],
'_KSEMAPHORE' : [ 0x20, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'Limit' : [ 0x18, ['long']],
} ],
'_DEVOBJ_EXTENSION' : [ 0x70, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'PowerFlags' : [ 0x10, ['unsigned long']],
'Dope' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT_POWER_EXTENSION']]],
'ExtensionFlags' : [ 0x20, ['unsigned long']],
'DeviceNode' : [ 0x28, ['pointer64', ['void']]],
'AttachedTo' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'StartIoCount' : [ 0x38, ['long']],
'StartIoKey' : [ 0x3c, ['long']],
'StartIoFlags' : [ 0x40, ['unsigned long']],
'Vpb' : [ 0x48, ['pointer64', ['_VPB']]],
'DependentList' : [ 0x50, ['_LIST_ENTRY']],
'ProviderList' : [ 0x60, ['_LIST_ENTRY']],
} ],
'__unnamed_1763' : [ 0x8, {
'LegacyDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'PendingDeviceRelations' : [ 0x0, ['pointer64', ['_DEVICE_RELATIONS']]],
'Information' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1765' : [ 0x8, {
'NextResourceDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
} ],
'__unnamed_1769' : [ 0x20, {
'DockStatus' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DOCK_NOTDOCKDEVICE', 1: 'DOCK_QUIESCENT', 2: 'DOCK_ARRIVING', 3: 'DOCK_DEPARTING', 4: 'DOCK_EJECTIRP_COMPLETED'})]],
'ListEntry' : [ 0x8, ['_LIST_ENTRY']],
'SerialNumber' : [ 0x18, ['pointer64', ['unsigned short']]],
} ],
'_DEVICE_NODE' : [ 0x268, {
'Sibling' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'Child' : [ 0x8, ['pointer64', ['_DEVICE_NODE']]],
'Parent' : [ 0x10, ['pointer64', ['_DEVICE_NODE']]],
'LastChild' : [ 0x18, ['pointer64', ['_DEVICE_NODE']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'InstancePath' : [ 0x28, ['_UNICODE_STRING']],
'ServiceName' : [ 0x38, ['_UNICODE_STRING']],
'PendingIrp' : [ 0x48, ['pointer64', ['_IRP']]],
'Level' : [ 0x50, ['unsigned long']],
'Notify' : [ 0x58, ['_PO_DEVICE_NOTIFY']],
'PoIrpManager' : [ 0xc0, ['_PO_IRP_MANAGER']],
'State' : [ 0xe0, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'PreviousState' : [ 0xe4, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'StateHistory' : [ 0xe8, ['array', -80, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]]],
'StateHistoryEntry' : [ 0x138, ['unsigned long']],
'CompletionStatus' : [ 0x13c, ['long']],
'Flags' : [ 0x140, ['unsigned long']],
'UserFlags' : [ 0x144, ['unsigned long']],
'Problem' : [ 0x148, ['unsigned long']],
'ResourceList' : [ 0x150, ['pointer64', ['_CM_RESOURCE_LIST']]],
'ResourceListTranslated' : [ 0x158, ['pointer64', ['_CM_RESOURCE_LIST']]],
'DuplicatePDO' : [ 0x160, ['pointer64', ['_DEVICE_OBJECT']]],
'ResourceRequirements' : [ 0x168, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'InterfaceType' : [ 0x170, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x174, ['unsigned long']],
'ChildInterfaceType' : [ 0x178, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ChildBusNumber' : [ 0x17c, ['unsigned long']],
'ChildBusTypeIndex' : [ 0x180, ['unsigned short']],
'RemovalPolicy' : [ 0x182, ['unsigned char']],
'HardwareRemovalPolicy' : [ 0x183, ['unsigned char']],
'TargetDeviceNotify' : [ 0x188, ['_LIST_ENTRY']],
'DeviceArbiterList' : [ 0x198, ['_LIST_ENTRY']],
'DeviceTranslatorList' : [ 0x1a8, ['_LIST_ENTRY']],
'NoTranslatorMask' : [ 0x1b8, ['unsigned short']],
'QueryTranslatorMask' : [ 0x1ba, ['unsigned short']],
'NoArbiterMask' : [ 0x1bc, ['unsigned short']],
'QueryArbiterMask' : [ 0x1be, ['unsigned short']],
'OverUsed1' : [ 0x1c0, ['__unnamed_1763']],
'OverUsed2' : [ 0x1c8, ['__unnamed_1765']],
'BootResources' : [ 0x1d0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'BootResourcesTranslated' : [ 0x1d8, ['pointer64', ['_CM_RESOURCE_LIST']]],
'CapabilityFlags' : [ 0x1e0, ['unsigned long']],
'DockInfo' : [ 0x1e8, ['__unnamed_1769']],
'DisableableDepends' : [ 0x208, ['unsigned long']],
'PendedSetInterfaceState' : [ 0x210, ['_LIST_ENTRY']],
'LegacyBusListEntry' : [ 0x220, ['_LIST_ENTRY']],
'DriverUnloadRetryCount' : [ 0x230, ['unsigned long']],
'PreviousParent' : [ 0x238, ['pointer64', ['_DEVICE_NODE']]],
'DeletedChildren' : [ 0x240, ['unsigned long']],
'NumaNodeIndex' : [ 0x244, ['unsigned long']],
'ContainerID' : [ 0x248, ['_GUID']],
'OverrideFlags' : [ 0x258, ['unsigned char']],
'RequiresUnloadedDriver' : [ 0x259, ['unsigned char']],
'PendingEjectRelations' : [ 0x260, ['pointer64', ['_PENDING_RELATIONS_LIST_ENTRY']]],
} ],
'_KNODE' : [ 0xc0, {
'PagedPoolSListHead' : [ 0x0, ['_SLIST_HEADER']],
'NonPagedPoolSListHead' : [ 0x10, ['array', 3, ['_SLIST_HEADER']]],
'Affinity' : [ 0x40, ['_GROUP_AFFINITY']],
'ProximityId' : [ 0x50, ['unsigned long']],
'NodeNumber' : [ 0x54, ['unsigned short']],
'PrimaryNodeNumber' : [ 0x56, ['unsigned short']],
'MaximumProcessors' : [ 0x58, ['unsigned char']],
'Color' : [ 0x59, ['unsigned char']],
'Flags' : [ 0x5a, ['_flags']],
'NodePad0' : [ 0x5b, ['unsigned char']],
'Seed' : [ 0x5c, ['unsigned long']],
'MmShiftedColor' : [ 0x60, ['unsigned long']],
'FreeCount' : [ 0x68, ['array', 2, ['unsigned long long']]],
'Right' : [ 0x78, ['unsigned long']],
'Left' : [ 0x7c, ['unsigned long']],
'CachedKernelStacks' : [ 0x80, ['_CACHED_KSTACK_LIST']],
'ParkLock' : [ 0xa0, ['long']],
'NodePad1' : [ 0xa4, ['unsigned long']],
} ],
'_PNP_ASSIGN_RESOURCES_CONTEXT' : [ 0x10, {
'IncludeFailedDevices' : [ 0x0, ['unsigned long']],
'DeviceCount' : [ 0x4, ['unsigned long']],
'DeviceList' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_PNP_RESOURCE_REQUEST' : [ 0x40, {
'PhysicalDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x8, ['unsigned long']],
'AllocationType' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Priority' : [ 0x10, ['unsigned long']],
'Position' : [ 0x14, ['unsigned long']],
'ResourceRequirements' : [ 0x18, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'ReqList' : [ 0x20, ['pointer64', ['void']]],
'ResourceAssignment' : [ 0x28, ['pointer64', ['_CM_RESOURCE_LIST']]],
'TranslatedResourceAssignment' : [ 0x30, ['pointer64', ['_CM_RESOURCE_LIST']]],
'Status' : [ 0x38, ['long']],
} ],
'_IO_RESOURCE_REQUIREMENTS_LIST' : [ 0x48, {
'ListSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x8, ['unsigned long']],
'SlotNumber' : [ 0xc, ['unsigned long']],
'Reserved' : [ 0x10, ['array', 3, ['unsigned long']]],
'AlternativeLists' : [ 0x1c, ['unsigned long']],
'List' : [ 0x20, ['array', 1, ['_IO_RESOURCE_LIST']]],
} ],
'_EXCEPTION_RECORD64' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long long']],
'ExceptionAddress' : [ 0x10, ['unsigned long long']],
'NumberParameters' : [ 0x18, ['unsigned long']],
'__unusedAlignment' : [ 0x1c, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_EXCEPTION_RECORD32' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long']],
'ExceptionAddress' : [ 0xc, ['unsigned long']],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_DBGKM_EXCEPTION64' : [ 0xa0, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD64']],
'FirstChance' : [ 0x98, ['unsigned long']],
} ],
'_DBGKM_EXCEPTION32' : [ 0x54, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD32']],
'FirstChance' : [ 0x50, ['unsigned long']],
} ],
'_DBGKD_LOAD_SYMBOLS64' : [ 0x28, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x8, ['unsigned long long']],
'ProcessId' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
'SizeOfImage' : [ 0x1c, ['unsigned long']],
'UnloadSymbols' : [ 0x20, ['unsigned char']],
} ],
'_DBGKD_LOAD_SYMBOLS32' : [ 0x18, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x4, ['unsigned long']],
'ProcessId' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'UnloadSymbols' : [ 0x14, ['unsigned char']],
} ],
'_DBGKD_READ_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesRead' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesRead' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesWritten' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesWritten' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT64' : [ 0x10, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointHandle' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT32' : [ 0x8, {
'BreakPointAddress' : [ 0x0, ['unsigned long']],
'BreakPointHandle' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO64' : [ 0x10, {
'IoAddress' : [ 0x0, ['unsigned long long']],
'DataSize' : [ 0x8, ['unsigned long']],
'DataValue' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO32' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'IoAddress' : [ 0x4, ['unsigned long']],
'DataValue' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED64' : [ 0x20, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long long']],
'DataValue' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED32' : [ 0x18, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long']],
'DataValue' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL32' : [ 0x4, {
'SpecialCall' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL64' : [ 0x8, {
'SpecialCall' : [ 0x0, ['unsigned long long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT32' : [ 0x8, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT64' : [ 0x10, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT64' : [ 0x20, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
'Calls' : [ 0xc, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0x10, ['unsigned long']],
'MinInstructions' : [ 0x14, ['unsigned long']],
'MaxInstructions' : [ 0x18, ['unsigned long']],
'TotalInstructions' : [ 0x1c, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT32' : [ 0x1c, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'Calls' : [ 0x8, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0xc, ['unsigned long']],
'MinInstructions' : [ 0x10, ['unsigned long']],
'MaxInstructions' : [ 0x14, ['unsigned long']],
'TotalInstructions' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1811' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT64']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO64']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED64']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL64']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT64']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT64']],
'GetVersion64' : [ 0x0, ['_DBGKD_GET_VERSION64']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetSetBusData' : [ 0x0, ['_DBGKD_GET_SET_BUS_DATA']],
'FillMemory' : [ 0x0, ['_DBGKD_FILL_MEMORY']],
'QueryMemory' : [ 0x0, ['_DBGKD_QUERY_MEMORY']],
'SwitchPartition' : [ 0x0, ['_DBGKD_SWITCH_PARTITION']],
} ],
'_DBGKD_MANIPULATE_STATE64' : [ 0x38, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0x10, ['__unnamed_1811']],
} ],
'__unnamed_1818' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY32']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY32']],
'ReadMemory64' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory64' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT32']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO32']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED32']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL32']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT32']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT32']],
'GetVersion32' : [ 0x0, ['_DBGKD_GET_VERSION32']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
} ],
'_DBGKD_MANIPULATE_STATE32' : [ 0x34, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_1818']],
} ],
'_DBGKD_READ_WRITE_MSR' : [ 0xc, {
'Msr' : [ 0x0, ['unsigned long']],
'DataValueLow' : [ 0x4, ['unsigned long']],
'DataValueHigh' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_BREAKPOINTEX' : [ 0x8, {
'BreakPointCount' : [ 0x0, ['unsigned long']],
'ContinueStatus' : [ 0x4, ['long']],
} ],
'_DBGKD_SEARCH_MEMORY' : [ 0x18, {
'SearchAddress' : [ 0x0, ['unsigned long long']],
'FoundAddress' : [ 0x0, ['unsigned long long']],
'SearchLength' : [ 0x8, ['unsigned long long']],
'PatternLength' : [ 0x10, ['unsigned long']],
} ],
'_DBGKD_RESTORE_BREAKPOINT' : [ 0x4, {
'BreakPointHandle' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_CONTINUE' : [ 0x4, {
'ContinueStatus' : [ 0x0, ['long']],
} ],
'_DBGKD_CONTINUE2' : [ 0x20, {
'ContinueStatus' : [ 0x0, ['long']],
'ControlSet' : [ 0x4, ['_AMD64_DBGKD_CONTROL_SET']],
'AnyControlSet' : [ 0x4, ['_DBGKD_ANY_CONTROL_SET']],
} ],
'_CPU_INFO' : [ 0x10, {
'Eax' : [ 0x0, ['unsigned long']],
'Ebx' : [ 0x4, ['unsigned long']],
'Ecx' : [ 0x8, ['unsigned long']],
'Edx' : [ 0xc, ['unsigned long']],
} ],
'_KSYSTEM_TIME' : [ 0xc, {
'LowPart' : [ 0x0, ['unsigned long']],
'High1Time' : [ 0x4, ['long']],
'High2Time' : [ 0x8, ['long']],
} ],
'_VOLUME_CACHE_MAP' : [ 0x38, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteCode' : [ 0x2, ['short']],
'UseCount' : [ 0x4, ['unsigned long']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'VolumeCacheMapLinks' : [ 0x10, ['_LIST_ENTRY']],
'Flags' : [ 0x20, ['unsigned long']],
'DirtyPages' : [ 0x28, ['unsigned long long']],
'PagesQueuedToDisk' : [ 0x30, ['unsigned long']],
} ],
'_SHARED_CACHE_MAP' : [ 0x1f8, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'OpenCount' : [ 0x4, ['unsigned long']],
'FileSize' : [ 0x8, ['_LARGE_INTEGER']],
'BcbList' : [ 0x10, ['_LIST_ENTRY']],
'SectionSize' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x28, ['_LARGE_INTEGER']],
'ValidDataGoal' : [ 0x30, ['_LARGE_INTEGER']],
'InitialVacbs' : [ 0x38, ['array', 4, ['pointer64', ['_VACB']]]],
'Vacbs' : [ 0x58, ['pointer64', ['pointer64', ['_VACB']]]],
'FileObjectFastRef' : [ 0x60, ['_EX_FAST_REF']],
'VacbLock' : [ 0x68, ['_EX_PUSH_LOCK']],
'DirtyPages' : [ 0x70, ['unsigned long']],
'LoggedStreamLinks' : [ 0x78, ['_LIST_ENTRY']],
'SharedCacheMapLinks' : [ 0x88, ['_LIST_ENTRY']],
'Flags' : [ 0x98, ['unsigned long']],
'Status' : [ 0x9c, ['long']],
'Mbcb' : [ 0xa0, ['pointer64', ['_MBCB']]],
'Section' : [ 0xa8, ['pointer64', ['void']]],
'CreateEvent' : [ 0xb0, ['pointer64', ['_KEVENT']]],
'WaitOnActiveCount' : [ 0xb8, ['pointer64', ['_KEVENT']]],
'PagesToWrite' : [ 0xc0, ['unsigned long']],
'BeyondLastFlush' : [ 0xc8, ['long long']],
'Callbacks' : [ 0xd0, ['pointer64', ['_CACHE_MANAGER_CALLBACKS']]],
'LazyWriteContext' : [ 0xd8, ['pointer64', ['void']]],
'PrivateList' : [ 0xe0, ['_LIST_ENTRY']],
'LogHandle' : [ 0xf0, ['pointer64', ['void']]],
'FlushToLsnRoutine' : [ 0xf8, ['pointer64', ['void']]],
'DirtyPageThreshold' : [ 0x100, ['unsigned long']],
'LazyWritePassCount' : [ 0x104, ['unsigned long']],
'UninitializeEvent' : [ 0x108, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'BcbLock' : [ 0x110, ['_KGUARDED_MUTEX']],
'LastUnmapBehindOffset' : [ 0x148, ['_LARGE_INTEGER']],
'Event' : [ 0x150, ['_KEVENT']],
'HighWaterMappingOffset' : [ 0x168, ['_LARGE_INTEGER']],
'PrivateCacheMap' : [ 0x170, ['_PRIVATE_CACHE_MAP']],
'WriteBehindWorkQueueEntry' : [ 0x1d8, ['pointer64', ['void']]],
'VolumeCacheMap' : [ 0x1e0, ['pointer64', ['_VOLUME_CACHE_MAP']]],
'ProcImagePathHash' : [ 0x1e8, ['unsigned long']],
'WritesInProgress' : [ 0x1ec, ['unsigned long']],
'PipelinedReadAheadSize' : [ 0x1f0, ['unsigned long']],
} ],
'__unnamed_188a' : [ 0x8, {
'FileOffset' : [ 0x0, ['_LARGE_INTEGER']],
'ActiveCount' : [ 0x0, ['unsigned short']],
} ],
'_VACB' : [ 0x30, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['_SHARED_CACHE_MAP']]],
'Overlay' : [ 0x10, ['__unnamed_188a']],
'Links' : [ 0x18, ['_LIST_ENTRY']],
'ArrayHead' : [ 0x28, ['pointer64', ['_VACB_ARRAY_HEADER']]],
} ],
'_KGUARDED_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Gate' : [ 0x18, ['_KGATE']],
'KernelApcDisable' : [ 0x30, ['short']],
'SpecialApcDisable' : [ 0x32, ['short']],
'CombinedApcDisable' : [ 0x30, ['unsigned long']],
} ],
'__unnamed_18a8' : [ 0x8, {
'FileObject' : [ 0x0, ['pointer64', ['_FILE_OBJECT']]],
} ],
'__unnamed_18aa' : [ 0x8, {
'SharedCacheMap' : [ 0x0, ['pointer64', ['_SHARED_CACHE_MAP']]],
} ],
'__unnamed_18ac' : [ 0x8, {
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
} ],
'__unnamed_18ae' : [ 0x4, {
'Reason' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_18b0' : [ 0x8, {
'Read' : [ 0x0, ['__unnamed_18a8']],
'Write' : [ 0x0, ['__unnamed_18aa']],
'Event' : [ 0x0, ['__unnamed_18ac']],
'Notification' : [ 0x0, ['__unnamed_18ae']],
} ],
'_WORK_QUEUE_ENTRY' : [ 0x20, {
'WorkQueueLinks' : [ 0x0, ['_LIST_ENTRY']],
'Parameters' : [ 0x10, ['__unnamed_18b0']],
'Function' : [ 0x18, ['unsigned char']],
} ],
'VACB_LEVEL_ALLOCATION_LIST' : [ 0x20, {
'VacbLevelList' : [ 0x0, ['_LIST_ENTRY']],
'VacbLevelWithBcbListHeads' : [ 0x10, ['pointer64', ['void']]],
'VacbLevelsAllocated' : [ 0x18, ['unsigned long']],
} ],
'_VACB_LEVEL_REFERENCE' : [ 0x8, {
'Reference' : [ 0x0, ['long']],
'SpecialReference' : [ 0x4, ['long']],
} ],
'_CACHE_UNINITIALIZE_EVENT' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'Event' : [ 0x8, ['_KEVENT']],
} ],
'_HEAP_LIST_LOOKUP' : [ 0x38, {
'ExtendedLookup' : [ 0x0, ['pointer64', ['_HEAP_LIST_LOOKUP']]],
'ArraySize' : [ 0x8, ['unsigned long']],
'ExtraItem' : [ 0xc, ['unsigned long']],
'ItemCount' : [ 0x10, ['unsigned long']],
'OutOfRangeItems' : [ 0x14, ['unsigned long']],
'BaseIndex' : [ 0x18, ['unsigned long']],
'ListHead' : [ 0x20, ['pointer64', ['_LIST_ENTRY']]],
'ListsInUseUlong' : [ 0x28, ['pointer64', ['unsigned long']]],
'ListHints' : [ 0x30, ['pointer64', ['pointer64', ['_LIST_ENTRY']]]],
} ],
'_HEAP' : [ 0x208, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x10, ['unsigned long']],
'SegmentFlags' : [ 0x14, ['unsigned long']],
'SegmentListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Heap' : [ 0x28, ['pointer64', ['_HEAP']]],
'BaseAddress' : [ 0x30, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x38, ['unsigned long']],
'FirstEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x48, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x50, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x54, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'UCRSegmentList' : [ 0x60, ['_LIST_ENTRY']],
'Flags' : [ 0x70, ['unsigned long']],
'ForceFlags' : [ 0x74, ['unsigned long']],
'CompatibilityFlags' : [ 0x78, ['unsigned long']],
'EncodeFlagMask' : [ 0x7c, ['unsigned long']],
'Encoding' : [ 0x80, ['_HEAP_ENTRY']],
'PointerKey' : [ 0x90, ['unsigned long long']],
'Interceptor' : [ 0x98, ['unsigned long']],
'VirtualMemoryThreshold' : [ 0x9c, ['unsigned long']],
'Signature' : [ 0xa0, ['unsigned long']],
'SegmentReserve' : [ 0xa8, ['unsigned long long']],
'SegmentCommit' : [ 0xb0, ['unsigned long long']],
'DeCommitFreeBlockThreshold' : [ 0xb8, ['unsigned long long']],
'DeCommitTotalFreeThreshold' : [ 0xc0, ['unsigned long long']],
'TotalFreeSize' : [ 0xc8, ['unsigned long long']],
'MaximumAllocationSize' : [ 0xd0, ['unsigned long long']],
'ProcessHeapsListIndex' : [ 0xd8, ['unsigned short']],
'HeaderValidateLength' : [ 0xda, ['unsigned short']],
'HeaderValidateCopy' : [ 0xe0, ['pointer64', ['void']]],
'NextAvailableTagIndex' : [ 0xe8, ['unsigned short']],
'MaximumTagIndex' : [ 0xea, ['unsigned short']],
'TagEntries' : [ 0xf0, ['pointer64', ['_HEAP_TAG_ENTRY']]],
'UCRList' : [ 0xf8, ['_LIST_ENTRY']],
'AlignRound' : [ 0x108, ['unsigned long long']],
'AlignMask' : [ 0x110, ['unsigned long long']],
'VirtualAllocdBlocks' : [ 0x118, ['_LIST_ENTRY']],
'SegmentList' : [ 0x128, ['_LIST_ENTRY']],
'AllocatorBackTraceIndex' : [ 0x138, ['unsigned short']],
'NonDedicatedListLength' : [ 0x13c, ['unsigned long']],
'BlocksIndex' : [ 0x140, ['pointer64', ['void']]],
'UCRIndex' : [ 0x148, ['pointer64', ['void']]],
'PseudoTagEntries' : [ 0x150, ['pointer64', ['_HEAP_PSEUDO_TAG_ENTRY']]],
'FreeLists' : [ 0x158, ['_LIST_ENTRY']],
'LockVariable' : [ 0x168, ['pointer64', ['_HEAP_LOCK']]],
'CommitRoutine' : [ 0x170, ['pointer64', ['void']]],
'FrontEndHeap' : [ 0x178, ['pointer64', ['void']]],
'FrontHeapLockCount' : [ 0x180, ['unsigned short']],
'FrontEndHeapType' : [ 0x182, ['unsigned char']],
'Counters' : [ 0x188, ['_HEAP_COUNTERS']],
'TuningParameters' : [ 0x1f8, ['_HEAP_TUNING_PARAMETERS']],
} ],
'__unnamed_1901' : [ 0x28, {
'CriticalSection' : [ 0x0, ['_RTL_CRITICAL_SECTION']],
} ],
'_HEAP_LOCK' : [ 0x28, {
'Lock' : [ 0x0, ['__unnamed_1901']],
} ],
'_RTL_CRITICAL_SECTION' : [ 0x28, {
'DebugInfo' : [ 0x0, ['pointer64', ['_RTL_CRITICAL_SECTION_DEBUG']]],
'LockCount' : [ 0x8, ['long']],
'RecursionCount' : [ 0xc, ['long']],
'OwningThread' : [ 0x10, ['pointer64', ['void']]],
'LockSemaphore' : [ 0x18, ['pointer64', ['void']]],
'SpinCount' : [ 0x20, ['unsigned long long']],
} ],
'_HEAP_ENTRY' : [ 0x10, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'SmallTagIndex' : [ 0xb, ['unsigned char']],
'PreviousSize' : [ 0xc, ['unsigned short']],
'SegmentOffset' : [ 0xe, ['unsigned char']],
'LFHFlags' : [ 0xe, ['unsigned char']],
'UnusedBytes' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'Reserved' : [ 0x0, ['pointer64', ['void']]],
'FunctionIndex' : [ 0x8, ['unsigned short']],
'ContextValue' : [ 0xa, ['unsigned short']],
'InterceptorValue' : [ 0x8, ['unsigned long']],
'UnusedBytesLength' : [ 0xc, ['unsigned short']],
'EntryOffset' : [ 0xe, ['unsigned char']],
'ExtendedBlockSignature' : [ 0xf, ['unsigned char']],
'ReservedForAlignment' : [ 0x0, ['pointer64', ['void']]],
'Code1' : [ 0x8, ['unsigned long']],
'Code2' : [ 0xc, ['unsigned short']],
'Code3' : [ 0xe, ['unsigned char']],
'Code4' : [ 0xf, ['unsigned char']],
'AgregateCode' : [ 0x8, ['unsigned long long']],
} ],
'_HEAP_SEGMENT' : [ 0x70, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x10, ['unsigned long']],
'SegmentFlags' : [ 0x14, ['unsigned long']],
'SegmentListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Heap' : [ 0x28, ['pointer64', ['_HEAP']]],
'BaseAddress' : [ 0x30, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x38, ['unsigned long']],
'FirstEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x48, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x50, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x54, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'UCRSegmentList' : [ 0x60, ['_LIST_ENTRY']],
} ],
'_HEAP_FREE_ENTRY' : [ 0x20, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'SmallTagIndex' : [ 0xb, ['unsigned char']],
'PreviousSize' : [ 0xc, ['unsigned short']],
'SegmentOffset' : [ 0xe, ['unsigned char']],
'LFHFlags' : [ 0xe, ['unsigned char']],
'UnusedBytes' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'Reserved' : [ 0x0, ['pointer64', ['void']]],
'FunctionIndex' : [ 0x8, ['unsigned short']],
'ContextValue' : [ 0xa, ['unsigned short']],
'InterceptorValue' : [ 0x8, ['unsigned long']],
'UnusedBytesLength' : [ 0xc, ['unsigned short']],
'EntryOffset' : [ 0xe, ['unsigned char']],
'ExtendedBlockSignature' : [ 0xf, ['unsigned char']],
'ReservedForAlignment' : [ 0x0, ['pointer64', ['void']]],
'Code1' : [ 0x8, ['unsigned long']],
'Code2' : [ 0xc, ['unsigned short']],
'Code3' : [ 0xe, ['unsigned char']],
'Code4' : [ 0xf, ['unsigned char']],
'AgregateCode' : [ 0x8, ['unsigned long long']],
'FreeList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_PEB' : [ 0x380, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['pointer64', ['void']]],
'ImageBaseAddress' : [ 0x10, ['pointer64', ['void']]],
'Ldr' : [ 0x18, ['pointer64', ['_PEB_LDR_DATA']]],
'ProcessParameters' : [ 0x20, ['pointer64', ['_RTL_USER_PROCESS_PARAMETERS']]],
'SubSystemData' : [ 0x28, ['pointer64', ['void']]],
'ProcessHeap' : [ 0x30, ['pointer64', ['void']]],
'FastPebLock' : [ 0x38, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'AtlThunkSListPtr' : [ 0x40, ['pointer64', ['void']]],
'IFEOKey' : [ 0x48, ['pointer64', ['void']]],
'CrossProcessFlags' : [ 0x50, ['unsigned long']],
'ProcessInJob' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x50, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x50, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x58, ['pointer64', ['void']]],
'UserSharedInfoPtr' : [ 0x58, ['pointer64', ['void']]],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x64, ['unsigned long']],
'ApiSetMap' : [ 0x68, ['pointer64', ['void']]],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['pointer64', ['void']]],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['pointer64', ['void']]],
'HotpatchInformation' : [ 0x90, ['pointer64', ['void']]],
'ReadOnlyStaticServerData' : [ 0x98, ['pointer64', ['pointer64', ['void']]]],
'AnsiCodePageData' : [ 0xa0, ['pointer64', ['void']]],
'OemCodePageData' : [ 0xa8, ['pointer64', ['void']]],
'UnicodeCaseTableData' : [ 0xb0, ['pointer64', ['void']]],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['pointer64', ['pointer64', ['void']]]],
'GdiSharedHandleTable' : [ 0xf8, ['pointer64', ['void']]],
'ProcessStarterHelper' : [ 0x100, ['pointer64', ['void']]],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['pointer64', ['void']]],
'TlsExpansionBitmap' : [ 0x238, ['pointer64', ['void']]],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['pointer64', ['void']]],
'AppCompatInfo' : [ 0x2e0, ['pointer64', ['void']]],
'CSDVersion' : [ 0x2e8, ['_UNICODE_STRING']],
'ActivationContextData' : [ 0x2f8, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'ProcessAssemblyStorageMap' : [ 0x300, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'SystemDefaultActivationContextData' : [ 0x308, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'SystemAssemblyStorageMap' : [ 0x310, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['pointer64', ['_FLS_CALLBACK_INFO']]],
'FlsListHead' : [ 0x328, ['_LIST_ENTRY']],
'FlsBitmap' : [ 0x338, ['pointer64', ['void']]],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
'WerRegistrationData' : [ 0x358, ['pointer64', ['void']]],
'WerShipAssertPtr' : [ 0x360, ['pointer64', ['void']]],
'pContextData' : [ 0x368, ['pointer64', ['void']]],
'pImageHeaderHash' : [ 0x370, ['pointer64', ['void']]],
'TracingFlags' : [ 0x378, ['unsigned long']],
'HeapTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x378, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_PEB_LDR_DATA' : [ 0x58, {
'Length' : [ 0x0, ['unsigned long']],
'Initialized' : [ 0x4, ['unsigned char']],
'SsHandle' : [ 0x8, ['pointer64', ['void']]],
'InLoadOrderModuleList' : [ 0x10, ['_LIST_ENTRY']],
'InMemoryOrderModuleList' : [ 0x20, ['_LIST_ENTRY']],
'InInitializationOrderModuleList' : [ 0x30, ['_LIST_ENTRY']],
'EntryInProgress' : [ 0x40, ['pointer64', ['void']]],
'ShutdownInProgress' : [ 0x48, ['unsigned char']],
'ShutdownThreadId' : [ 0x50, ['pointer64', ['void']]],
} ],
'_LDR_DATA_TABLE_ENTRY' : [ 0xe0, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'InMemoryOrderLinks' : [ 0x10, ['_LIST_ENTRY']],
'InInitializationOrderLinks' : [ 0x20, ['_LIST_ENTRY']],
'DllBase' : [ 0x30, ['pointer64', ['void']]],
'EntryPoint' : [ 0x38, ['pointer64', ['void']]],
'SizeOfImage' : [ 0x40, ['unsigned long']],
'FullDllName' : [ 0x48, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x58, ['_UNICODE_STRING']],
'Flags' : [ 0x68, ['unsigned long']],
'LoadCount' : [ 0x6c, ['unsigned short']],
'TlsIndex' : [ 0x6e, ['unsigned short']],
'HashLinks' : [ 0x70, ['_LIST_ENTRY']],
'SectionPointer' : [ 0x70, ['pointer64', ['void']]],
'CheckSum' : [ 0x78, ['unsigned long']],
'TimeDateStamp' : [ 0x80, ['unsigned long']],
'LoadedImports' : [ 0x80, ['pointer64', ['void']]],
'EntryPointActivationContext' : [ 0x88, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'PatchInformation' : [ 0x90, ['pointer64', ['void']]],
'ForwarderLinks' : [ 0x98, ['_LIST_ENTRY']],
'ServiceTagLinks' : [ 0xa8, ['_LIST_ENTRY']],
'StaticLinks' : [ 0xb8, ['_LIST_ENTRY']],
'ContextInformation' : [ 0xc8, ['pointer64', ['void']]],
'OriginalBase' : [ 0xd0, ['unsigned long long']],
'LoadTime' : [ 0xd8, ['_LARGE_INTEGER']],
} ],
'_HEAP_SUBSEGMENT' : [ 0x30, {
'LocalInfo' : [ 0x0, ['pointer64', ['_HEAP_LOCAL_SEGMENT_INFO']]],
'UserBlocks' : [ 0x8, ['pointer64', ['_HEAP_USERDATA_HEADER']]],
'AggregateExchg' : [ 0x10, ['_INTERLOCK_SEQ']],
'BlockSize' : [ 0x18, ['unsigned short']],
'Flags' : [ 0x1a, ['unsigned short']],
'BlockCount' : [ 0x1c, ['unsigned short']],
'SizeIndex' : [ 0x1e, ['unsigned char']],
'AffinityIndex' : [ 0x1f, ['unsigned char']],
'Alignment' : [ 0x18, ['array', 2, ['unsigned long']]],
'SFreeListEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'Lock' : [ 0x28, ['unsigned long']],
} ],
'__unnamed_197f' : [ 0x4, {
'DataLength' : [ 0x0, ['short']],
'TotalLength' : [ 0x2, ['short']],
} ],
'__unnamed_1981' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_197f']],
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1983' : [ 0x4, {
'Type' : [ 0x0, ['short']],
'DataInfoOffset' : [ 0x2, ['short']],
} ],
'__unnamed_1985' : [ 0x4, {
's2' : [ 0x0, ['__unnamed_1983']],
'ZeroInit' : [ 0x0, ['unsigned long']],
} ],
'_PORT_MESSAGE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_1981']],
'u2' : [ 0x4, ['__unnamed_1985']],
'ClientId' : [ 0x8, ['_CLIENT_ID']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x18, ['unsigned long']],
'ClientViewSize' : [ 0x20, ['unsigned long long']],
'CallbackId' : [ 0x20, ['unsigned long']],
} ],
'_ALPC_MESSAGE_ATTRIBUTES' : [ 0x8, {
'AllocatedAttributes' : [ 0x0, ['unsigned long']],
'ValidAttributes' : [ 0x4, ['unsigned long']],
} ],
'_ALPC_HANDLE_ENTRY' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
} ],
'_BLOB_TYPE' : [ 0x38, {
'ResourceId' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BLOB_TYPE_UNKNOWN', 1: 'BLOB_TYPE_CONNECTION_INFO', 2: 'BLOB_TYPE_MESSAGE', 3: 'BLOB_TYPE_SECURITY_CONTEXT', 4: 'BLOB_TYPE_SECTION', 5: 'BLOB_TYPE_REGION', 6: 'BLOB_TYPE_VIEW', 7: 'BLOB_TYPE_RESERVE', 8: 'BLOB_TYPE_DIRECT_TRANSFER', 9: 'BLOB_TYPE_HANDLE_DATA', 10: 'BLOB_TYPE_MAX_ID'})]],
'PoolTag' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'CreatedObjects' : [ 0xc, ['unsigned long']],
'DeletedObjects' : [ 0x10, ['unsigned long']],
'DeleteProcedure' : [ 0x18, ['pointer64', ['void']]],
'DestroyProcedure' : [ 0x20, ['pointer64', ['void']]],
'UsualSize' : [ 0x28, ['unsigned long long']],
'LookasideIndex' : [ 0x30, ['unsigned long']],
} ],
'__unnamed_199e' : [ 0x1, {
'ReferenceCache' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Lookaside' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Initializing' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Deleted' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
} ],
'__unnamed_19a0' : [ 0x1, {
's1' : [ 0x0, ['__unnamed_199e']],
'Flags' : [ 0x0, ['unsigned char']],
} ],
'_BLOB' : [ 0x20, {
'ResourceList' : [ 0x0, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x0, ['_SLIST_ENTRY']],
'u1' : [ 0x10, ['__unnamed_19a0']],
'ResourceId' : [ 0x11, ['unsigned char']],
'CachedReferences' : [ 0x12, ['short']],
'ReferenceCount' : [ 0x14, ['long']],
'Lock' : [ 0x18, ['_EX_PUSH_LOCK']],
} ],
'__unnamed_19b3' : [ 0x4, {
'Internal' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Secure' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_19b5' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19b3']],
} ],
'_KALPC_SECTION' : [ 0x48, {
'SectionObject' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long long']],
'HandleTable' : [ 0x10, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'SectionHandle' : [ 0x18, ['pointer64', ['void']]],
'OwnerProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'OwnerPort' : [ 0x28, ['pointer64', ['_ALPC_PORT']]],
'u1' : [ 0x30, ['__unnamed_19b5']],
'NumberOfRegions' : [ 0x34, ['unsigned long']],
'RegionListHead' : [ 0x38, ['_LIST_ENTRY']],
} ],
'__unnamed_19bb' : [ 0x4, {
'Secure' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
} ],
'__unnamed_19bd' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19bb']],
} ],
'_KALPC_REGION' : [ 0x58, {
'RegionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Section' : [ 0x10, ['pointer64', ['_KALPC_SECTION']]],
'Offset' : [ 0x18, ['unsigned long long']],
'Size' : [ 0x20, ['unsigned long long']],
'ViewSize' : [ 0x28, ['unsigned long long']],
'u1' : [ 0x30, ['__unnamed_19bd']],
'NumberOfViews' : [ 0x34, ['unsigned long']],
'ViewListHead' : [ 0x38, ['_LIST_ENTRY']],
'ReadOnlyView' : [ 0x48, ['pointer64', ['_KALPC_VIEW']]],
'ReadWriteView' : [ 0x50, ['pointer64', ['_KALPC_VIEW']]],
} ],
'__unnamed_19c3' : [ 0x4, {
'WriteAccess' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoRelease' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ForceUnlink' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
} ],
'__unnamed_19c5' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19c3']],
} ],
'_KALPC_VIEW' : [ 0x60, {
'ViewListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Region' : [ 0x10, ['pointer64', ['_KALPC_REGION']]],
'OwnerPort' : [ 0x18, ['pointer64', ['_ALPC_PORT']]],
'OwnerProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'Address' : [ 0x28, ['pointer64', ['void']]],
'Size' : [ 0x30, ['unsigned long long']],
'SecureViewHandle' : [ 0x38, ['pointer64', ['void']]],
'WriteAccessHandle' : [ 0x40, ['pointer64', ['void']]],
'u1' : [ 0x48, ['__unnamed_19c5']],
'NumberOfOwnerMessages' : [ 0x4c, ['unsigned long']],
'ProcessViewListEntry' : [ 0x50, ['_LIST_ENTRY']],
} ],
'_ALPC_COMMUNICATION_INFO' : [ 0x40, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'ServerCommunicationPort' : [ 0x8, ['pointer64', ['_ALPC_PORT']]],
'ClientCommunicationPort' : [ 0x10, ['pointer64', ['_ALPC_PORT']]],
'CommunicationList' : [ 0x18, ['_LIST_ENTRY']],
'HandleTable' : [ 0x28, ['_ALPC_HANDLE_TABLE']],
} ],
'__unnamed_19e1' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned long')]],
'ConnectionPending' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConnectionRefused' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Disconnected' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Closed' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'NoFlushOnClose' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReturnExtendedInfo' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Waitable' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DynamicSecurity' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Wow64CompletionList' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Lpc' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'LpcToLpc' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HasCompletionList' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'HadCompletionList' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'EnableCompletionList' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
} ],
'__unnamed_19e3' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19e1']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_ALPC_PORT' : [ 0x1a0, {
'PortListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CommunicationInfo' : [ 0x10, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'OwnerProcess' : [ 0x18, ['pointer64', ['_EPROCESS']]],
'CompletionPort' : [ 0x20, ['pointer64', ['void']]],
'CompletionKey' : [ 0x28, ['pointer64', ['void']]],
'CompletionPacketLookaside' : [ 0x30, ['pointer64', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
'PortContext' : [ 0x38, ['pointer64', ['void']]],
'StaticSecurity' : [ 0x40, ['_SECURITY_CLIENT_CONTEXT']],
'MainQueue' : [ 0x88, ['_LIST_ENTRY']],
'PendingQueue' : [ 0x98, ['_LIST_ENTRY']],
'LargeMessageQueue' : [ 0xa8, ['_LIST_ENTRY']],
'WaitQueue' : [ 0xb8, ['_LIST_ENTRY']],
'Semaphore' : [ 0xc8, ['pointer64', ['_KSEMAPHORE']]],
'DummyEvent' : [ 0xc8, ['pointer64', ['_KEVENT']]],
'PortAttributes' : [ 0xd0, ['_ALPC_PORT_ATTRIBUTES']],
'Lock' : [ 0x118, ['_EX_PUSH_LOCK']],
'ResourceListLock' : [ 0x120, ['_EX_PUSH_LOCK']],
'ResourceListHead' : [ 0x128, ['_LIST_ENTRY']],
'CompletionList' : [ 0x138, ['pointer64', ['_ALPC_COMPLETION_LIST']]],
'MessageZone' : [ 0x140, ['pointer64', ['_ALPC_MESSAGE_ZONE']]],
'CallbackObject' : [ 0x148, ['pointer64', ['_CALLBACK_OBJECT']]],
'CallbackContext' : [ 0x150, ['pointer64', ['void']]],
'CanceledQueue' : [ 0x158, ['_LIST_ENTRY']],
'SequenceNo' : [ 0x168, ['long']],
'u1' : [ 0x16c, ['__unnamed_19e3']],
'TargetQueuePort' : [ 0x170, ['pointer64', ['_ALPC_PORT']]],
'TargetSequencePort' : [ 0x178, ['pointer64', ['_ALPC_PORT']]],
'CachedMessage' : [ 0x180, ['pointer64', ['_KALPC_MESSAGE']]],
'MainQueueLength' : [ 0x188, ['unsigned long']],
'PendingQueueLength' : [ 0x18c, ['unsigned long']],
'LargeMessageQueueLength' : [ 0x190, ['unsigned long']],
'CanceledQueueLength' : [ 0x194, ['unsigned long']],
'WaitQueueLength' : [ 0x198, ['unsigned long']],
} ],
'_OBJECT_TYPE' : [ 0xd0, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'Name' : [ 0x10, ['_UNICODE_STRING']],
'DefaultObject' : [ 0x20, ['pointer64', ['void']]],
'Index' : [ 0x28, ['unsigned char']],
'TotalNumberOfObjects' : [ 0x2c, ['unsigned long']],
'TotalNumberOfHandles' : [ 0x30, ['unsigned long']],
'HighWaterNumberOfObjects' : [ 0x34, ['unsigned long']],
'HighWaterNumberOfHandles' : [ 0x38, ['unsigned long']],
'TypeInfo' : [ 0x40, ['_OBJECT_TYPE_INITIALIZER']],
'TypeLock' : [ 0xb0, ['_EX_PUSH_LOCK']],
'Key' : [ 0xb8, ['unsigned long']],
'CallbackList' : [ 0xc0, ['_LIST_ENTRY']],
} ],
'_PORT_MESSAGE32' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_1981']],
'u2' : [ 0x4, ['__unnamed_1985']],
'ClientId' : [ 0x8, ['_CLIENT_ID32']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x10, ['unsigned long']],
'ClientViewSize' : [ 0x14, ['unsigned long']],
'CallbackId' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1a02' : [ 0x4, {
'QueueType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'QueuePortType' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned long')]],
'Canceled' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Ready' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReleaseMessage' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SharedQuota' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ReplyWaitReply' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'OwnerPortReference' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'ReserveReference' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'ReceiverReference' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'ViewAttributeRetrieved' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'InDispatch' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
} ],
'__unnamed_1a04' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1a02']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_KALPC_MESSAGE' : [ 0x100, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtensionBuffer' : [ 0x10, ['pointer64', ['void']]],
'ExtensionBufferSize' : [ 0x18, ['unsigned long long']],
'QuotaProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'QuotaBlock' : [ 0x20, ['pointer64', ['void']]],
'SequenceNo' : [ 0x28, ['long']],
'u1' : [ 0x2c, ['__unnamed_1a04']],
'CancelSequencePort' : [ 0x30, ['pointer64', ['_ALPC_PORT']]],
'CancelQueuePort' : [ 0x38, ['pointer64', ['_ALPC_PORT']]],
'CancelSequenceNo' : [ 0x40, ['long']],
'CancelListEntry' : [ 0x48, ['_LIST_ENTRY']],
'WaitingThread' : [ 0x58, ['pointer64', ['_ETHREAD']]],
'Reserve' : [ 0x60, ['pointer64', ['_KALPC_RESERVE']]],
'PortQueue' : [ 0x68, ['pointer64', ['_ALPC_PORT']]],
'OwnerPort' : [ 0x70, ['pointer64', ['_ALPC_PORT']]],
'MessageAttributes' : [ 0x78, ['_KALPC_MESSAGE_ATTRIBUTES']],
'DataUserVa' : [ 0xb0, ['pointer64', ['void']]],
'DataSystemVa' : [ 0xb8, ['pointer64', ['void']]],
'CommunicationInfo' : [ 0xc0, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'ConnectionPort' : [ 0xc8, ['pointer64', ['_ALPC_PORT']]],
'ServerThread' : [ 0xd0, ['pointer64', ['_ETHREAD']]],
'PortMessage' : [ 0xd8, ['_PORT_MESSAGE']],
} ],
'_REMOTE_PORT_VIEW' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'ViewSize' : [ 0x8, ['unsigned long long']],
'ViewBase' : [ 0x10, ['pointer64', ['void']]],
} ],
'_KALPC_RESERVE' : [ 0x28, {
'OwnerPort' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'HandleTable' : [ 0x8, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Message' : [ 0x18, ['pointer64', ['_KALPC_MESSAGE']]],
'Active' : [ 0x20, ['long']],
} ],
'_KALPC_HANDLE_DATA' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned long']],
'ObjectType' : [ 0x4, ['unsigned long']],
'DuplicateContext' : [ 0x8, ['pointer64', ['_OB_DUPLICATE_OBJECT_STATE']]],
} ],
'_KALPC_MESSAGE_ATTRIBUTES' : [ 0x38, {
'ClientContext' : [ 0x0, ['pointer64', ['void']]],
'ServerContext' : [ 0x8, ['pointer64', ['void']]],
'PortContext' : [ 0x10, ['pointer64', ['void']]],
'CancelPortContext' : [ 0x18, ['pointer64', ['void']]],
'SecurityData' : [ 0x20, ['pointer64', ['_KALPC_SECURITY_DATA']]],
'View' : [ 0x28, ['pointer64', ['_KALPC_VIEW']]],
'HandleData' : [ 0x30, ['pointer64', ['_KALPC_HANDLE_DATA']]],
} ],
'__unnamed_1a42' : [ 0x4, {
'Revoked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Impersonated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_1a44' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1a42']],
} ],
'_KALPC_SECURITY_DATA' : [ 0x70, {
'HandleTable' : [ 0x0, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'ContextHandle' : [ 0x8, ['pointer64', ['void']]],
'OwningProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'OwnerPort' : [ 0x18, ['pointer64', ['_ALPC_PORT']]],
'DynamicSecurity' : [ 0x20, ['_SECURITY_CLIENT_CONTEXT']],
'u1' : [ 0x68, ['__unnamed_1a44']],
} ],
'_IO_MINI_COMPLETION_PACKET_USER' : [ 0x50, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'PacketType' : [ 0x10, ['unsigned long']],
'KeyContext' : [ 0x18, ['pointer64', ['void']]],
'ApcContext' : [ 0x20, ['pointer64', ['void']]],
'IoStatus' : [ 0x28, ['long']],
'IoStatusInformation' : [ 0x30, ['unsigned long long']],
'MiniPacketCallback' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
'Allocated' : [ 0x48, ['unsigned char']],
} ],
'_ALPC_DISPATCH_CONTEXT' : [ 0x38, {
'PortObject' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'Message' : [ 0x8, ['pointer64', ['_KALPC_MESSAGE']]],
'CommunicationInfo' : [ 0x10, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'TargetThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'TargetPort' : [ 0x20, ['pointer64', ['_ALPC_PORT']]],
'Flags' : [ 0x28, ['unsigned long']],
'TotalLength' : [ 0x2c, ['unsigned short']],
'Type' : [ 0x2e, ['unsigned short']],
'DataInfoOffset' : [ 0x30, ['unsigned short']],
} ],
'_DRIVER_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x10, ['unsigned long']],
'DriverStart' : [ 0x18, ['pointer64', ['void']]],
'DriverSize' : [ 0x20, ['unsigned long']],
'DriverSection' : [ 0x28, ['pointer64', ['void']]],
'DriverExtension' : [ 0x30, ['pointer64', ['_DRIVER_EXTENSION']]],
'DriverName' : [ 0x38, ['_UNICODE_STRING']],
'HardwareDatabase' : [ 0x48, ['pointer64', ['_UNICODE_STRING']]],
'FastIoDispatch' : [ 0x50, ['pointer64', ['_FAST_IO_DISPATCH']]],
'DriverInit' : [ 0x58, ['pointer64', ['void']]],
'DriverStartIo' : [ 0x60, ['pointer64', ['void']]],
'DriverUnload' : [ 0x68, ['pointer64', ['void']]],
'MajorFunction' : [ 0x70, ['array', 28, ['pointer64', ['void']]]],
} ],
'_FILE_SEGMENT_ELEMENT' : [ 0x8, {
'Buffer' : [ 0x0, ['pointer64', ['void']]],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_RELATIVE_SYMLINK_INFO' : [ 0x20, {
'ExposedNamespaceLength' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'DeviceNameLength' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['unsigned short']],
'InteriorMountPoint' : [ 0x8, ['pointer64', ['_RELATIVE_SYMLINK_INFO']]],
'OpenedName' : [ 0x10, ['_UNICODE_STRING']],
} ],
'_ECP_LIST' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'EcpList' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_IOP_FILE_OBJECT_EXTENSION' : [ 0x48, {
'FoExtFlags' : [ 0x0, ['unsigned long']],
'FoExtPerTypeExtension' : [ 0x8, ['array', 7, ['pointer64', ['void']]]],
'FoIoPriorityHint' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'IopIoPriorityNotSet', 1: 'IopIoPriorityVeryLow', 2: 'IopIoPriorityLow', 3: 'IopIoPriorityNormal', 4: 'IopIoPriorityHigh', 5: 'IopIoPriorityCritical', 6: 'MaxIopIoPriorityTypes'})]],
} ],
'_OPEN_PACKET' : [ 0xb8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'FinalStatus' : [ 0x10, ['long']],
'Information' : [ 0x18, ['unsigned long long']],
'ParseCheck' : [ 0x20, ['unsigned long']],
'RelatedFileObject' : [ 0x28, ['pointer64', ['_FILE_OBJECT']]],
'OriginalAttributes' : [ 0x30, ['pointer64', ['_OBJECT_ATTRIBUTES']]],
'AllocationSize' : [ 0x38, ['_LARGE_INTEGER']],
'CreateOptions' : [ 0x40, ['unsigned long']],
'FileAttributes' : [ 0x44, ['unsigned short']],
'ShareAccess' : [ 0x46, ['unsigned short']],
'EaBuffer' : [ 0x48, ['pointer64', ['void']]],
'EaLength' : [ 0x50, ['unsigned long']],
'Options' : [ 0x54, ['unsigned long']],
'Disposition' : [ 0x58, ['unsigned long']],
'BasicInformation' : [ 0x60, ['pointer64', ['_FILE_BASIC_INFORMATION']]],
'NetworkInformation' : [ 0x68, ['pointer64', ['_FILE_NETWORK_OPEN_INFORMATION']]],
'CreateFileType' : [ 0x70, ['Enumeration', dict(target = 'long', choices = {0: 'CreateFileTypeNone', 1: 'CreateFileTypeNamedPipe', 2: 'CreateFileTypeMailslot'})]],
'MailslotOrPipeParameters' : [ 0x78, ['pointer64', ['void']]],
'Override' : [ 0x80, ['unsigned char']],
'QueryOnly' : [ 0x81, ['unsigned char']],
'DeleteOnly' : [ 0x82, ['unsigned char']],
'FullAttributes' : [ 0x83, ['unsigned char']],
'LocalFileObject' : [ 0x88, ['pointer64', ['_DUMMY_FILE_OBJECT']]],
'InternalFlags' : [ 0x90, ['unsigned long']],
'DriverCreateContext' : [ 0x98, ['_IO_DRIVER_CREATE_CONTEXT']],
} ],
'_ETW_SYSTEMTIME' : [ 0x10, {
'Year' : [ 0x0, ['unsigned short']],
'Month' : [ 0x2, ['unsigned short']],
'DayOfWeek' : [ 0x4, ['unsigned short']],
'Day' : [ 0x6, ['unsigned short']],
'Hour' : [ 0x8, ['unsigned short']],
'Minute' : [ 0xa, ['unsigned short']],
'Second' : [ 0xc, ['unsigned short']],
'Milliseconds' : [ 0xe, ['unsigned short']],
} ],
'_TIME_FIELDS' : [ 0x10, {
'Year' : [ 0x0, ['short']],
'Month' : [ 0x2, ['short']],
'Day' : [ 0x4, ['short']],
'Hour' : [ 0x6, ['short']],
'Minute' : [ 0x8, ['short']],
'Second' : [ 0xa, ['short']],
'Milliseconds' : [ 0xc, ['short']],
'Weekday' : [ 0xe, ['short']],
} ],
'_RTL_RB_TREE' : [ 0x10, {
'Root' : [ 0x0, ['pointer64', ['_RTL_BALANCED_NODE']]],
'Min' : [ 0x8, ['pointer64', ['_RTL_BALANCED_NODE']]],
} ],
'_RTL_BALANCED_NODE' : [ 0x18, {
'Children' : [ 0x0, ['array', 2, ['pointer64', ['_RTL_BALANCED_NODE']]]],
'Left' : [ 0x0, ['pointer64', ['_RTL_BALANCED_NODE']]],
'Right' : [ 0x8, ['pointer64', ['_RTL_BALANCED_NODE']]],
'Red' : [ 0x10, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Balance' : [ 0x10, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned char')]],
'ParentValue' : [ 0x10, ['unsigned long long']],
} ],
'_RTL_AVL_TREE' : [ 0x8, {
'Root' : [ 0x0, ['pointer64', ['_RTL_BALANCED_NODE']]],
} ],
'_WMI_LOGGER_CONTEXT' : [ 0x340, {
'LoggerId' : [ 0x0, ['unsigned long']],
'BufferSize' : [ 0x4, ['unsigned long']],
'MaximumEventSize' : [ 0x8, ['unsigned long']],
'CollectionOn' : [ 0xc, ['long']],
'LoggerMode' : [ 0x10, ['unsigned long']],
'AcceptNewEvents' : [ 0x14, ['long']],
'GetCpuClock' : [ 0x18, ['pointer64', ['void']]],
'StartTime' : [ 0x20, ['_LARGE_INTEGER']],
'LogFileHandle' : [ 0x28, ['pointer64', ['void']]],
'LoggerThread' : [ 0x30, ['pointer64', ['_ETHREAD']]],
'LoggerStatus' : [ 0x38, ['long']],
'NBQHead' : [ 0x40, ['pointer64', ['void']]],
'OverflowNBQHead' : [ 0x48, ['pointer64', ['void']]],
'QueueBlockFreeList' : [ 0x50, ['_SLIST_HEADER']],
'GlobalList' : [ 0x60, ['_LIST_ENTRY']],
'BatchedBufferList' : [ 0x70, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'CurrentBuffer' : [ 0x70, ['_EX_FAST_REF']],
'LoggerName' : [ 0x78, ['_UNICODE_STRING']],
'LogFileName' : [ 0x88, ['_UNICODE_STRING']],
'LogFilePattern' : [ 0x98, ['_UNICODE_STRING']],
'NewLogFileName' : [ 0xa8, ['_UNICODE_STRING']],
'ClockType' : [ 0xb8, ['unsigned long']],
'MaximumFileSize' : [ 0xbc, ['unsigned long']],
'LastFlushedBuffer' : [ 0xc0, ['unsigned long']],
'FlushTimer' : [ 0xc4, ['unsigned long']],
'FlushThreshold' : [ 0xc8, ['unsigned long']],
'ByteOffset' : [ 0xd0, ['_LARGE_INTEGER']],
'MinimumBuffers' : [ 0xd8, ['unsigned long']],
'BuffersAvailable' : [ 0xdc, ['long']],
'NumberOfBuffers' : [ 0xe0, ['long']],
'MaximumBuffers' : [ 0xe4, ['unsigned long']],
'EventsLost' : [ 0xe8, ['unsigned long']],
'BuffersWritten' : [ 0xec, ['unsigned long']],
'LogBuffersLost' : [ 0xf0, ['unsigned long']],
'RealTimeBuffersDelivered' : [ 0xf4, ['unsigned long']],
'RealTimeBuffersLost' : [ 0xf8, ['unsigned long']],
'SequencePtr' : [ 0x100, ['pointer64', ['long']]],
'LocalSequence' : [ 0x108, ['unsigned long']],
'InstanceGuid' : [ 0x10c, ['_GUID']],
'FileCounter' : [ 0x11c, ['long']],
'BufferCallback' : [ 0x120, ['pointer64', ['void']]],
'PoolType' : [ 0x128, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'ReferenceTime' : [ 0x130, ['_ETW_REF_CLOCK']],
'Consumers' : [ 0x140, ['_LIST_ENTRY']],
'NumConsumers' : [ 0x150, ['unsigned long']],
'TransitionConsumer' : [ 0x158, ['pointer64', ['_ETW_REALTIME_CONSUMER']]],
'RealtimeLogfileHandle' : [ 0x160, ['pointer64', ['void']]],
'RealtimeLogfileName' : [ 0x168, ['_UNICODE_STRING']],
'RealtimeWriteOffset' : [ 0x178, ['_LARGE_INTEGER']],
'RealtimeReadOffset' : [ 0x180, ['_LARGE_INTEGER']],
'RealtimeLogfileSize' : [ 0x188, ['_LARGE_INTEGER']],
'RealtimeLogfileUsage' : [ 0x190, ['unsigned long long']],
'RealtimeMaximumFileSize' : [ 0x198, ['unsigned long long']],
'RealtimeBuffersSaved' : [ 0x1a0, ['unsigned long']],
'RealtimeReferenceTime' : [ 0x1a8, ['_ETW_REF_CLOCK']],
'NewRTEventsLost' : [ 0x1b8, ['Enumeration', dict(target = 'long', choices = {0: 'EtwRtEventNoLoss', 1: 'EtwRtEventLost', 2: 'EtwRtBufferLost', 3: 'EtwRtBackupLost', 4: 'EtwRtEventLossMax'})]],
'LoggerEvent' : [ 0x1c0, ['_KEVENT']],
'FlushEvent' : [ 0x1d8, ['_KEVENT']],
'FlushTimeOutTimer' : [ 0x1f0, ['_KTIMER']],
'FlushDpc' : [ 0x230, ['_KDPC']],
'LoggerMutex' : [ 0x270, ['_KMUTANT']],
'LoggerLock' : [ 0x2a8, ['_EX_PUSH_LOCK']],
'BufferListSpinLock' : [ 0x2b0, ['unsigned long long']],
'BufferListPushLock' : [ 0x2b0, ['_EX_PUSH_LOCK']],
'ClientSecurityContext' : [ 0x2b8, ['_SECURITY_CLIENT_CONTEXT']],
'TokenAccessInformation' : [ 0x300, ['pointer64', ['_TOKEN_ACCESS_INFORMATION']]],
'SecurityDescriptor' : [ 0x308, ['_EX_FAST_REF']],
'BufferSequenceNumber' : [ 0x310, ['long long']],
'Flags' : [ 0x318, ['unsigned long']],
'Persistent' : [ 0x318, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoLogger' : [ 0x318, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'FsReady' : [ 0x318, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RealTime' : [ 0x318, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow' : [ 0x318, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'KernelTrace' : [ 0x318, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'NoMoreEnable' : [ 0x318, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'StackTracing' : [ 0x318, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ErrorLogged' : [ 0x318, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RealtimeLoggerContextFreed' : [ 0x318, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'RequestFlag' : [ 0x31c, ['unsigned long']],
'RequestNewFie' : [ 0x31c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'RequestUpdateFile' : [ 0x31c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'RequestFlush' : [ 0x31c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RequestDisableRealtime' : [ 0x31c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'RequestDisconnectConsumer' : [ 0x31c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RequestConnectConsumer' : [ 0x31c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'HookIdMap' : [ 0x320, ['_RTL_BITMAP']],
'DisallowedGuids' : [ 0x330, ['_DISALLOWED_GUIDS']],
} ],
'_ETW_LOGGER_HANDLE' : [ 0x1, {
'DereferenceAndLeave' : [ 0x0, ['unsigned char']],
} ],
'_ETW_BUFFER_HANDLE' : [ 0x10, {
'TraceBuffer' : [ 0x0, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'BufferFastRef' : [ 0x8, ['pointer64', ['_EX_FAST_REF']]],
} ],
'_SYSTEM_TRACE_HEADER' : [ 0x20, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'ThreadId' : [ 0x8, ['unsigned long']],
'ProcessId' : [ 0xc, ['unsigned long']],
'SystemTime' : [ 0x10, ['_LARGE_INTEGER']],
'KernelTime' : [ 0x18, ['unsigned long']],
'UserTime' : [ 0x1c, ['unsigned long']],
} ],
'_PERFINFO_TRACE_HEADER' : [ 0x18, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'TS' : [ 0x8, ['unsigned long long']],
'SystemTime' : [ 0x8, ['_LARGE_INTEGER']],
'Data' : [ 0x10, ['array', 1, ['unsigned char']]],
} ],
'_NBQUEUE_BLOCK' : [ 0x20, {
'SListEntry' : [ 0x0, ['_SLIST_ENTRY']],
'Next' : [ 0x10, ['unsigned long long']],
'Data' : [ 0x18, ['unsigned long long']],
} ],
'_TlgProvider_t' : [ 0x40, {
'LevelPlus1' : [ 0x0, ['unsigned long']],
'ProviderMetadataPtr' : [ 0x8, ['pointer64', ['unsigned short']]],
'KeywordAny' : [ 0x10, ['unsigned long long']],
'KeywordAll' : [ 0x18, ['unsigned long long']],
'RegHandle' : [ 0x20, ['unsigned long long']],
'EnableCallback' : [ 0x28, ['pointer64', ['void']]],
'CallbackContext' : [ 0x30, ['pointer64', ['void']]],
'AnnotationFunc' : [ 0x38, ['pointer64', ['void']]],
} ],
'_EVENT_FILTER_DESCRIPTOR' : [ 0x10, {
'Ptr' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
} ],
'_TlgProviderMetadata_t' : [ 0x13, {
'Type' : [ 0x0, ['unsigned char']],
'ProviderId' : [ 0x1, ['_GUID']],
'RemainingSize' : [ 0x11, ['unsigned short']],
} ],
'_SID' : [ 0xc, {
'Revision' : [ 0x0, ['unsigned char']],
'SubAuthorityCount' : [ 0x1, ['unsigned char']],
'IdentifierAuthority' : [ 0x2, ['_SID_IDENTIFIER_AUTHORITY']],
'SubAuthority' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_KMUTANT' : [ 0x38, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListEntry' : [ 0x18, ['_LIST_ENTRY']],
'OwnerThread' : [ 0x28, ['pointer64', ['_KTHREAD']]],
'Abandoned' : [ 0x30, ['unsigned char']],
'ApcDisable' : [ 0x31, ['unsigned char']],
} ],
'_ETW_LAST_ENABLE_INFO' : [ 0x10, {
'EnableFlags' : [ 0x0, ['_LARGE_INTEGER']],
'LoggerId' : [ 0x8, ['unsigned short']],
'Level' : [ 0xa, ['unsigned char']],
'Enabled' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'InternalFlag' : [ 0xb, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
} ],
'_TRACE_ENABLE_CONTEXT' : [ 0x8, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
} ],
'_TRACE_ENABLE_CONTEXT_EX' : [ 0x10, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
'EnableFlagsHigh' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_ETW_GUID_ENTRY' : [ 0x1b0, {
'GuidList' : [ 0x0, ['_LIST_ENTRY']],
'RefCount' : [ 0x10, ['long']],
'Guid' : [ 0x14, ['_GUID']],
'RegListHead' : [ 0x28, ['_LIST_ENTRY']],
'SecurityDescriptor' : [ 0x38, ['pointer64', ['void']]],
'LastEnable' : [ 0x40, ['_ETW_LAST_ENABLE_INFO']],
'MatchId' : [ 0x40, ['unsigned long long']],
'ProviderEnableInfo' : [ 0x50, ['_TRACE_ENABLE_INFO']],
'EnableInfo' : [ 0x70, ['array', 8, ['_TRACE_ENABLE_INFO']]],
'FilterData' : [ 0x170, ['array', 8, ['pointer64', ['_EVENT_FILTER_HEADER']]]],
} ],
'_TRACE_ENABLE_INFO' : [ 0x20, {
'IsEnabled' : [ 0x0, ['unsigned long']],
'Level' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'LoggerId' : [ 0x6, ['unsigned short']],
'EnableProperty' : [ 0x8, ['unsigned long']],
'Reserved2' : [ 0xc, ['unsigned long']],
'MatchAnyKeyword' : [ 0x10, ['unsigned long long']],
'MatchAllKeyword' : [ 0x18, ['unsigned long long']],
} ],
'_LUID_AND_ATTRIBUTES' : [ 0xc, {
'Luid' : [ 0x0, ['_LUID']],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_TOKEN' : [ 0x310, {
'TokenSource' : [ 0x0, ['_TOKEN_SOURCE']],
'TokenId' : [ 0x10, ['_LUID']],
'AuthenticationId' : [ 0x18, ['_LUID']],
'ParentTokenId' : [ 0x20, ['_LUID']],
'ExpirationTime' : [ 0x28, ['_LARGE_INTEGER']],
'TokenLock' : [ 0x30, ['pointer64', ['_ERESOURCE']]],
'ModifiedId' : [ 0x38, ['_LUID']],
'Privileges' : [ 0x40, ['_SEP_TOKEN_PRIVILEGES']],
'AuditPolicy' : [ 0x58, ['_SEP_AUDIT_POLICY']],
'SessionId' : [ 0x74, ['unsigned long']],
'UserAndGroupCount' : [ 0x78, ['unsigned long']],
'RestrictedSidCount' : [ 0x7c, ['unsigned long']],
'VariableLength' : [ 0x80, ['unsigned long']],
'DynamicCharged' : [ 0x84, ['unsigned long']],
'DynamicAvailable' : [ 0x88, ['unsigned long']],
'DefaultOwnerIndex' : [ 0x8c, ['unsigned long']],
'UserAndGroups' : [ 0x90, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'RestrictedSids' : [ 0x98, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'PrimaryGroup' : [ 0xa0, ['pointer64', ['void']]],
'DynamicPart' : [ 0xa8, ['pointer64', ['unsigned long']]],
'DefaultDacl' : [ 0xb0, ['pointer64', ['_ACL']]],
'TokenType' : [ 0xb8, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0xbc, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'TokenFlags' : [ 0xc0, ['unsigned long']],
'TokenInUse' : [ 0xc4, ['unsigned char']],
'IntegrityLevelIndex' : [ 0xc8, ['unsigned long']],
'MandatoryPolicy' : [ 0xcc, ['unsigned long']],
'LogonSession' : [ 0xd0, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'OriginatingLogonSession' : [ 0xd8, ['_LUID']],
'SidHash' : [ 0xe0, ['_SID_AND_ATTRIBUTES_HASH']],
'RestrictedSidHash' : [ 0x1f0, ['_SID_AND_ATTRIBUTES_HASH']],
'pSecurityAttributes' : [ 0x300, ['pointer64', ['_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION']]],
'VariablePart' : [ 0x308, ['unsigned long long']],
} ],
'_SEP_LOGON_SESSION_REFERENCES' : [ 0x50, {
'Next' : [ 0x0, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'LogonId' : [ 0x8, ['_LUID']],
'BuddyLogonId' : [ 0x10, ['_LUID']],
'ReferenceCount' : [ 0x18, ['unsigned long']],
'Flags' : [ 0x1c, ['unsigned long']],
'pDeviceMap' : [ 0x20, ['pointer64', ['_DEVICE_MAP']]],
'Token' : [ 0x28, ['pointer64', ['void']]],
'AccountName' : [ 0x30, ['_UNICODE_STRING']],
'AuthorityName' : [ 0x40, ['_UNICODE_STRING']],
} ],
'_OBJECT_HEADER' : [ 0x38, {
'PointerCount' : [ 0x0, ['long long']],
'HandleCount' : [ 0x8, ['long long']],
'NextToFree' : [ 0x8, ['pointer64', ['void']]],
'Lock' : [ 0x10, ['_EX_PUSH_LOCK']],
'TypeIndex' : [ 0x18, ['unsigned char']],
'TraceFlags' : [ 0x19, ['unsigned char']],
'InfoMask' : [ 0x1a, ['unsigned char']],
'Flags' : [ 0x1b, ['unsigned char']],
'ObjectCreateInfo' : [ 0x20, ['pointer64', ['_OBJECT_CREATE_INFORMATION']]],
'QuotaBlockCharged' : [ 0x20, ['pointer64', ['void']]],
'SecurityDescriptor' : [ 0x28, ['pointer64', ['void']]],
'Body' : [ 0x30, ['_QUAD']],
} ],
'_OBJECT_HEADER_QUOTA_INFO' : [ 0x20, {
'PagedPoolCharge' : [ 0x0, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x4, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x8, ['unsigned long']],
'SecurityDescriptorQuotaBlock' : [ 0x10, ['pointer64', ['void']]],
'Reserved' : [ 0x18, ['unsigned long long']],
} ],
'_OBJECT_HEADER_PROCESS_INFO' : [ 0x10, {
'ExclusiveProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'Reserved' : [ 0x8, ['unsigned long long']],
} ],
'_OBJECT_HEADER_HANDLE_INFO' : [ 0x10, {
'HandleCountDataBase' : [ 0x0, ['pointer64', ['_OBJECT_HANDLE_COUNT_DATABASE']]],
'SingleEntry' : [ 0x0, ['_OBJECT_HANDLE_COUNT_ENTRY']],
} ],
'_OBJECT_HEADER_NAME_INFO' : [ 0x20, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Name' : [ 0x8, ['_UNICODE_STRING']],
'ReferenceCount' : [ 0x18, ['long']],
} ],
'_OBJECT_HEADER_CREATOR_INFO' : [ 0x20, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'CreatorUniqueProcess' : [ 0x10, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x18, ['unsigned short']],
'Reserved' : [ 0x1a, ['unsigned short']],
} ],
'_OBP_LOOKUP_CONTEXT' : [ 0x20, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'HashValue' : [ 0x10, ['unsigned long']],
'HashIndex' : [ 0x14, ['unsigned short']],
'DirectoryLocked' : [ 0x16, ['unsigned char']],
'LockedExclusive' : [ 0x17, ['unsigned char']],
'LockStateSignature' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY' : [ 0x150, {
'HashBuckets' : [ 0x0, ['array', 37, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]]],
'Lock' : [ 0x128, ['_EX_PUSH_LOCK']],
'DeviceMap' : [ 0x130, ['pointer64', ['_DEVICE_MAP']]],
'SessionId' : [ 0x138, ['unsigned long']],
'NamespaceEntry' : [ 0x140, ['pointer64', ['void']]],
'Flags' : [ 0x148, ['unsigned long']],
} ],
'_PS_CLIENT_SECURITY_CONTEXT' : [ 0x8, {
'ImpersonationData' : [ 0x0, ['unsigned long long']],
'ImpersonationToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long long')]],
'EffectiveOnly' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
} ],
'_DBGKD_ANY_CONTROL_SET' : [ 0x1c, {
'X86ControlSet' : [ 0x0, ['_X86_DBGKD_CONTROL_SET']],
'AlphaControlSet' : [ 0x0, ['unsigned long']],
'IA64ControlSet' : [ 0x0, ['_IA64_DBGKD_CONTROL_SET']],
'Amd64ControlSet' : [ 0x0, ['_AMD64_DBGKD_CONTROL_SET']],
'ArmControlSet' : [ 0x0, ['_ARM_DBGKD_CONTROL_SET']],
'PpcControlSet' : [ 0x0, ['_PPC_DBGKD_CONTROL_SET']],
} ],
'_MMVAD_FLAGS3' : [ 0x8, {
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 6, native_type='unsigned long long')]],
'Teb' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'SequentialAccess' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'LastSequentialTrim' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 24, native_type='unsigned long long')]],
'Spare2' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long long')]],
'LargePageCreating' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 33, native_type='unsigned long long')]],
'Spare3' : [ 0x0, ['BitField', dict(start_bit = 33, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MI_VERIFIER_POOL_HEADER' : [ 0x8, {
'VerifierPoolEntry' : [ 0x0, ['pointer64', ['_VI_POOL_ENTRY']]],
} ],
'_HBASE_BLOCK' : [ 0x1000, {
'Signature' : [ 0x0, ['unsigned long']],
'Sequence1' : [ 0x4, ['unsigned long']],
'Sequence2' : [ 0x8, ['unsigned long']],
'TimeStamp' : [ 0xc, ['_LARGE_INTEGER']],
'Major' : [ 0x14, ['unsigned long']],
'Minor' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['unsigned long']],
'Format' : [ 0x20, ['unsigned long']],
'RootCell' : [ 0x24, ['unsigned long']],
'Length' : [ 0x28, ['unsigned long']],
'Cluster' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['array', 64, ['unsigned char']]],
'RmId' : [ 0x70, ['_GUID']],
'LogId' : [ 0x80, ['_GUID']],
'Flags' : [ 0x90, ['unsigned long']],
'TmId' : [ 0x94, ['_GUID']],
'GuidSignature' : [ 0xa4, ['unsigned long']],
'Reserved1' : [ 0xa8, ['array', 85, ['unsigned long']]],
'CheckSum' : [ 0x1fc, ['unsigned long']],
'Reserved2' : [ 0x200, ['array', 882, ['unsigned long']]],
'ThawTmId' : [ 0xfc8, ['_GUID']],
'ThawRmId' : [ 0xfd8, ['_GUID']],
'ThawLogId' : [ 0xfe8, ['_GUID']],
'BootType' : [ 0xff8, ['unsigned long']],
'BootRecover' : [ 0xffc, ['unsigned long']],
} ],
'_ERESOURCE' : [ 0x68, {
'SystemResourcesList' : [ 0x0, ['_LIST_ENTRY']],
'OwnerTable' : [ 0x10, ['pointer64', ['_OWNER_ENTRY']]],
'ActiveCount' : [ 0x18, ['short']],
'Flag' : [ 0x1a, ['unsigned short']],
'SharedWaiters' : [ 0x20, ['pointer64', ['_KSEMAPHORE']]],
'ExclusiveWaiters' : [ 0x28, ['pointer64', ['_KEVENT']]],
'OwnerEntry' : [ 0x30, ['_OWNER_ENTRY']],
'ActiveEntries' : [ 0x40, ['unsigned long']],
'ContentionCount' : [ 0x44, ['unsigned long']],
'NumberOfSharedWaiters' : [ 0x48, ['unsigned long']],
'NumberOfExclusiveWaiters' : [ 0x4c, ['unsigned long']],
'Reserved2' : [ 0x50, ['pointer64', ['void']]],
'Address' : [ 0x58, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x58, ['unsigned long long']],
'SpinLock' : [ 0x60, ['unsigned long long']],
} ],
'_ARM_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_LPCP_MESSAGE' : [ 0x50, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'FreeEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Reserved0' : [ 0x8, ['unsigned long']],
'SenderPort' : [ 0x10, ['pointer64', ['void']]],
'RepliedToThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'PortContext' : [ 0x20, ['pointer64', ['void']]],
'Request' : [ 0x28, ['_PORT_MESSAGE']],
} ],
'_HARDWARE_PTE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'reserved0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_DUAL' : [ 0x278, {
'Length' : [ 0x0, ['unsigned long']],
'Map' : [ 0x8, ['pointer64', ['_HMAP_DIRECTORY']]],
'SmallDir' : [ 0x10, ['pointer64', ['_HMAP_TABLE']]],
'Guard' : [ 0x18, ['unsigned long']],
'FreeDisplay' : [ 0x20, ['array', 24, ['_FREE_DISPLAY']]],
'FreeSummary' : [ 0x260, ['unsigned long']],
'FreeBins' : [ 0x268, ['_LIST_ENTRY']],
} ],
'_ALPC_PORT_ATTRIBUTES' : [ 0x48, {
'Flags' : [ 0x0, ['unsigned long']],
'SecurityQos' : [ 0x4, ['_SECURITY_QUALITY_OF_SERVICE']],
'MaxMessageLength' : [ 0x10, ['unsigned long long']],
'MemoryBandwidth' : [ 0x18, ['unsigned long long']],
'MaxPoolUsage' : [ 0x20, ['unsigned long long']],
'MaxSectionSize' : [ 0x28, ['unsigned long long']],
'MaxViewSize' : [ 0x30, ['unsigned long long']],
'MaxTotalSectionSize' : [ 0x38, ['unsigned long long']],
'DupObjectTypes' : [ 0x40, ['unsigned long']],
'Reserved' : [ 0x44, ['unsigned long']],
} ],
'_CM_INDEX_HINT_BLOCK' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'HashKey' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_KQUEUE' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x18, ['_LIST_ENTRY']],
'CurrentCount' : [ 0x28, ['unsigned long']],
'MaximumCount' : [ 0x2c, ['unsigned long']],
'ThreadListHead' : [ 0x30, ['_LIST_ENTRY']],
} ],
'_KSTACK_COUNT' : [ 0x4, {
'Value' : [ 0x0, ['long']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'StackCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
} ],
'_DISPATCHER_HEADER' : [ 0x18, {
'Type' : [ 0x0, ['unsigned char']],
'TimerControlFlags' : [ 0x1, ['unsigned char']],
'Absolute' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Coalescable' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'KeepShifting' : [ 0x1, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'EncodedTolerableDelay' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'Abandoned' : [ 0x1, ['unsigned char']],
'Signalling' : [ 0x1, ['unsigned char']],
'ThreadControlFlags' : [ 0x2, ['unsigned char']],
'CpuThrottled' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'CycleProfiling' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'CounterProfiling' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'Hand' : [ 0x2, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'TimerMiscFlags' : [ 0x3, ['unsigned char']],
'Index' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 6, native_type='unsigned char')]],
'Inserted' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Expired' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'DebugActive' : [ 0x3, ['unsigned char']],
'ActiveDR7' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Instrumented' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved2' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned char')]],
'UmsScheduled' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'UmsPrimary' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'DpcActive' : [ 0x3, ['unsigned char']],
'Lock' : [ 0x0, ['long']],
'SignalState' : [ 0x4, ['long']],
'WaitListHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_VI_POOL_ENTRY' : [ 0x20, {
'PageHeader' : [ 0x0, ['_VI_POOL_PAGE_HEADER']],
'InUse' : [ 0x0, ['_VI_POOL_ENTRY_INUSE']],
'NextFree' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_MM_PAGE_ACCESS_INFO' : [ 0x8, {
'Flags' : [ 0x0, ['_MM_PAGE_ACCESS_INFO_FLAGS']],
'FileOffset' : [ 0x0, ['unsigned long long']],
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'PointerProtoPte' : [ 0x0, ['pointer64', ['void']]],
} ],
'_MI_CONTROL_AREA_WAIT_BLOCK' : [ 0x28, {
'Next' : [ 0x0, ['pointer64', ['_MI_CONTROL_AREA_WAIT_BLOCK']]],
'WaitReason' : [ 0x8, ['unsigned long']],
'WaitResponse' : [ 0xc, ['unsigned long']],
'Gate' : [ 0x10, ['_KGATE']],
} ],
'_TraceLoggingMetadata_t' : [ 0x10, {
'Signature' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned short']],
'Version' : [ 0x6, ['unsigned char']],
'Flags' : [ 0x7, ['unsigned char']],
'Magic' : [ 0x8, ['unsigned long long']],
} ],
'_HEAP_COUNTERS' : [ 0x70, {
'TotalMemoryReserved' : [ 0x0, ['unsigned long long']],
'TotalMemoryCommitted' : [ 0x8, ['unsigned long long']],
'TotalMemoryLargeUCR' : [ 0x10, ['unsigned long long']],
'TotalSizeInVirtualBlocks' : [ 0x18, ['unsigned long long']],
'TotalSegments' : [ 0x20, ['unsigned long']],
'TotalUCRs' : [ 0x24, ['unsigned long']],
'CommittOps' : [ 0x28, ['unsigned long']],
'DeCommitOps' : [ 0x2c, ['unsigned long']],
'LockAcquires' : [ 0x30, ['unsigned long']],
'LockCollisions' : [ 0x34, ['unsigned long']],
'CommitRate' : [ 0x38, ['unsigned long']],
'DecommittRate' : [ 0x3c, ['unsigned long']],
'CommitFailures' : [ 0x40, ['unsigned long']],
'InBlockCommitFailures' : [ 0x44, ['unsigned long']],
'CompactHeapCalls' : [ 0x48, ['unsigned long']],
'CompactedUCRs' : [ 0x4c, ['unsigned long']],
'AllocAndFreeOps' : [ 0x50, ['unsigned long']],
'InBlockDeccommits' : [ 0x54, ['unsigned long']],
'InBlockDeccomitSize' : [ 0x58, ['unsigned long long']],
'HighWatermarkSize' : [ 0x60, ['unsigned long long']],
'LastPolledSize' : [ 0x68, ['unsigned long long']],
} ],
'_CM_KEY_HASH' : [ 0x20, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x10, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x18, ['unsigned long']],
} ],
'_SYSPTES_HEADER' : [ 0x28, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Count' : [ 0x10, ['unsigned long long']],
'NumberOfEntries' : [ 0x18, ['unsigned long long']],
'NumberOfEntriesPeak' : [ 0x20, ['unsigned long long']],
} ],
'_EXCEPTION_RECORD' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['pointer64', ['_EXCEPTION_RECORD']]],
'ExceptionAddress' : [ 0x10, ['pointer64', ['void']]],
'NumberParameters' : [ 0x18, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_PENDING_RELATIONS_LIST_ENTRY' : [ 0x68, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'WorkItem' : [ 0x10, ['_WORK_QUEUE_ITEM']],
'DeviceEvent' : [ 0x30, ['pointer64', ['_PNP_DEVICE_EVENT_ENTRY']]],
'DeviceObject' : [ 0x38, ['pointer64', ['_DEVICE_OBJECT']]],
'RelationsList' : [ 0x40, ['pointer64', ['_RELATION_LIST']]],
'EjectIrp' : [ 0x48, ['pointer64', ['_IRP']]],
'Lock' : [ 0x50, ['Enumeration', dict(target = 'long', choices = {0: 'IRPLOCK_CANCELABLE', 1: 'IRPLOCK_CANCEL_STARTED', 2: 'IRPLOCK_CANCEL_COMPLETE', 3: 'IRPLOCK_COMPLETED'})]],
'Problem' : [ 0x54, ['unsigned long']],
'ProfileChangingEject' : [ 0x58, ['unsigned char']],
'DisplaySafeRemovalDialog' : [ 0x59, ['unsigned char']],
'LightestSleepState' : [ 0x5c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DockInterface' : [ 0x60, ['pointer64', ['DOCK_INTERFACE']]],
} ],
'_I386_LOADER_BLOCK' : [ 0x10, {
'CommonDataArea' : [ 0x0, ['pointer64', ['void']]],
'MachineType' : [ 0x8, ['unsigned long']],
'VirtualBias' : [ 0xc, ['unsigned long']],
} ],
'_TOKEN_ACCESS_INFORMATION' : [ 0x30, {
'SidHash' : [ 0x0, ['pointer64', ['_SID_AND_ATTRIBUTES_HASH']]],
'RestrictedSidHash' : [ 0x8, ['pointer64', ['_SID_AND_ATTRIBUTES_HASH']]],
'Privileges' : [ 0x10, ['pointer64', ['_TOKEN_PRIVILEGES']]],
'AuthenticationId' : [ 0x18, ['_LUID']],
'TokenType' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'MandatoryPolicy' : [ 0x28, ['_TOKEN_MANDATORY_POLICY']],
'Flags' : [ 0x2c, ['unsigned long']],
} ],
'_CELL_DATA' : [ 0x50, {
'u' : [ 0x0, ['_u']],
} ],
'_ARC_DISK_INFORMATION' : [ 0x10, {
'DiskSignatures' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_INITIAL_PRIVILEGE_SET' : [ 0x2c, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 3, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_HEAP_TUNING_PARAMETERS' : [ 0x10, {
'CommittThresholdShift' : [ 0x0, ['unsigned long']],
'MaxPreCommittThreshold' : [ 0x8, ['unsigned long long']],
} ],
'_MMWSLE_NONDIRECT_HASH' : [ 0x10, {
'Key' : [ 0x0, ['pointer64', ['void']]],
'Index' : [ 0x8, ['unsigned long']],
} ],
'_HMAP_DIRECTORY' : [ 0x2000, {
'Directory' : [ 0x0, ['array', 1024, ['pointer64', ['_HMAP_TABLE']]]],
} ],
'_KAPC' : [ 0x58, {
'Type' : [ 0x0, ['unsigned char']],
'SpareByte0' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'SpareByte1' : [ 0x3, ['unsigned char']],
'SpareLong0' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'ApcListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KernelRoutine' : [ 0x20, ['pointer64', ['void']]],
'RundownRoutine' : [ 0x28, ['pointer64', ['void']]],
'NormalRoutine' : [ 0x30, ['pointer64', ['void']]],
'NormalContext' : [ 0x38, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x40, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x48, ['pointer64', ['void']]],
'ApcStateIndex' : [ 0x50, ['unsigned char']],
'ApcMode' : [ 0x51, ['unsigned char']],
'Inserted' : [ 0x52, ['unsigned char']],
} ],
'_HANDLE_TABLE' : [ 0x68, {
'TableCode' : [ 0x0, ['unsigned long long']],
'QuotaProcess' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'UniqueProcessId' : [ 0x10, ['pointer64', ['void']]],
'HandleLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'HandleTableList' : [ 0x20, ['_LIST_ENTRY']],
'HandleContentionEvent' : [ 0x30, ['_EX_PUSH_LOCK']],
'DebugInfo' : [ 0x38, ['pointer64', ['_HANDLE_TRACE_DEBUG_INFO']]],
'ExtraInfoPages' : [ 0x40, ['long']],
'Flags' : [ 0x44, ['unsigned long']],
'StrictFIFO' : [ 0x44, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FirstFreeHandle' : [ 0x48, ['unsigned long']],
'LastFreeHandleEntry' : [ 0x50, ['pointer64', ['_HANDLE_TABLE_ENTRY']]],
'HandleCount' : [ 0x58, ['unsigned long']],
'NextHandleNeedingPool' : [ 0x5c, ['unsigned long']],
'HandleCountHighWatermark' : [ 0x60, ['unsigned long']],
} ],
'_POOL_TRACKER_BIG_PAGES' : [ 0x18, {
'Va' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['unsigned long']],
'PoolType' : [ 0xc, ['unsigned long']],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
} ],
'_MMVAD_FLAGS2' : [ 0x4, {
'FileOffset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'SecNoChange' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'OneSecured' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'MultipleSecured' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'LongVad' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'ExtendableFile' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'Inherit' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_TEB_ACTIVE_FRAME' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned long']],
'Previous' : [ 0x8, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'Context' : [ 0x10, ['pointer64', ['_TEB_ACTIVE_FRAME_CONTEXT']]],
} ],
'_FILE_GET_QUOTA_INFORMATION' : [ 0x14, {
'NextEntryOffset' : [ 0x0, ['unsigned long']],
'SidLength' : [ 0x4, ['unsigned long']],
'Sid' : [ 0x8, ['_SID']],
} ],
'_ACCESS_REASONS' : [ 0x80, {
'Data' : [ 0x0, ['array', 32, ['unsigned long']]],
} ],
'_CM_KEY_BODY' : [ 0x58, {
'Type' : [ 0x0, ['unsigned long']],
'KeyControlBlock' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NotifyBlock' : [ 0x10, ['pointer64', ['_CM_NOTIFY_BLOCK']]],
'ProcessID' : [ 0x18, ['pointer64', ['void']]],
'KeyBodyList' : [ 0x20, ['_LIST_ENTRY']],
'Flags' : [ 0x30, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'HandleTags' : [ 0x30, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'KtmTrans' : [ 0x38, ['pointer64', ['void']]],
'KtmUow' : [ 0x40, ['pointer64', ['_GUID']]],
'ContextListHead' : [ 0x48, ['_LIST_ENTRY']],
} ],
'_KWAIT_BLOCK' : [ 0x30, {
'WaitListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'Object' : [ 0x18, ['pointer64', ['void']]],
'NextWaitBlock' : [ 0x20, ['pointer64', ['_KWAIT_BLOCK']]],
'WaitKey' : [ 0x28, ['unsigned short']],
'WaitType' : [ 0x2a, ['unsigned char']],
'BlockState' : [ 0x2b, ['unsigned char']],
'SpareLong' : [ 0x2c, ['long']],
} ],
'_MMPTE_PROTOTYPE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned long long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'ProtoAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_WHEA_ERROR_PACKET_FLAGS' : [ 0x4, {
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HypervisorError' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'PlatformPfaControl' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PlatformDirectedOffline' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_THERMAL_INFORMATION_EX' : [ 0x78, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0x10, ['_KAFFINITY_EX']],
'SamplingPeriod' : [ 0x38, ['unsigned long']],
'CurrentTemperature' : [ 0x3c, ['unsigned long']],
'PassiveTripPoint' : [ 0x40, ['unsigned long']],
'CriticalTripPoint' : [ 0x44, ['unsigned long']],
'ActiveTripPointCount' : [ 0x48, ['unsigned char']],
'ActiveTripPoint' : [ 0x4c, ['array', 10, ['unsigned long']]],
'S4TransitionTripPoint' : [ 0x74, ['unsigned long']],
} ],
'__unnamed_1cdf' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
} ],
'__unnamed_1ce1' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Spare1' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
} ],
'_MM_PAGE_ACCESS_INFO_FLAGS' : [ 0x4, {
'File' : [ 0x0, ['__unnamed_1cdf']],
'Private' : [ 0x0, ['__unnamed_1ce1']],
} ],
'_VI_VERIFIER_ISSUE' : [ 0x20, {
'IssueType' : [ 0x0, ['unsigned long long']],
'Address' : [ 0x8, ['pointer64', ['void']]],
'Parameters' : [ 0x10, ['array', 2, ['unsigned long long']]],
} ],
'_MMSUBSECTION_FLAGS' : [ 0x4, {
'SubsectionAccessed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 6, native_type='unsigned short')]],
'StartingSector4132' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 16, native_type='unsigned short')]],
'SubsectionStatic' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'GlobalMemory' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'DirtyPages' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'Spare' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'SectorEndOffset' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 16, native_type='unsigned short')]],
} ],
'_EXCEPTION_POINTERS' : [ 0x10, {
'ExceptionRecord' : [ 0x0, ['pointer64', ['_EXCEPTION_RECORD']]],
'ContextRecord' : [ 0x8, ['pointer64', ['_CONTEXT']]],
} ],
'_OBJECT_REF_INFO' : [ 0x28, {
'ObjectHeader' : [ 0x0, ['pointer64', ['_OBJECT_HEADER']]],
'NextRef' : [ 0x8, ['pointer64', ['void']]],
'ImageFileName' : [ 0x10, ['array', 16, ['unsigned char']]],
'NextPos' : [ 0x20, ['unsigned short']],
'MaxStacks' : [ 0x22, ['unsigned short']],
'StackInfo' : [ 0x24, ['array', 0, ['_OBJECT_REF_STACK_INFO']]],
} ],
'_HBIN' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned long']],
'FileOffset' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['array', 2, ['unsigned long']]],
'TimeStamp' : [ 0x14, ['_LARGE_INTEGER']],
'Spare' : [ 0x1c, ['unsigned long']],
} ],
'_MI_IMAGE_SECURITY_REFERENCE' : [ 0x18, {
'SecurityContext' : [ 0x0, ['_IMAGE_SECURITY_CONTEXT']],
'DynamicRelocations' : [ 0x8, ['pointer64', ['void']]],
'ReferenceCount' : [ 0x10, ['long']],
} ],
'_HEAP_TAG_ENTRY' : [ 0x48, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
'TagIndex' : [ 0x10, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x12, ['unsigned short']],
'TagName' : [ 0x14, ['array', 24, ['wchar']]],
} ],
'_SECURITY_QUALITY_OF_SERVICE' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'ContextTrackingMode' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
} ],
'__unnamed_1d02' : [ 0x10, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Secured' : [ 0x0, ['_MMADDRESS_LIST']],
} ],
'__unnamed_1d08' : [ 0x8, {
'Banked' : [ 0x0, ['pointer64', ['_MMBANKED_SECTION']]],
'ExtendedInfo' : [ 0x0, ['pointer64', ['_MMEXTEND_INFO']]],
} ],
'_MMVAD_LONG' : [ 0x90, {
'u1' : [ 0x0, ['__unnamed_15bf']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_15c2']],
'PushLock' : [ 0x30, ['_EX_PUSH_LOCK']],
'u5' : [ 0x38, ['__unnamed_15c5']],
'u2' : [ 0x40, ['__unnamed_15d2']],
'Subsection' : [ 0x48, ['pointer64', ['_SUBSECTION']]],
'FirstPrototypePte' : [ 0x50, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x58, ['pointer64', ['_MMPTE']]],
'ViewLinks' : [ 0x60, ['_LIST_ENTRY']],
'VadsProcess' : [ 0x70, ['pointer64', ['_EPROCESS']]],
'u3' : [ 0x78, ['__unnamed_1d02']],
'u4' : [ 0x88, ['__unnamed_1d08']],
} ],
'_MMWSLE_FREE_ENTRY' : [ 0x8, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PreviousFree' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long long')]],
'NextFree' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_NT_TIB' : [ 0x38, {
'ExceptionList' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'StackBase' : [ 0x8, ['pointer64', ['void']]],
'StackLimit' : [ 0x10, ['pointer64', ['void']]],
'SubSystemTib' : [ 0x18, ['pointer64', ['void']]],
'FiberData' : [ 0x20, ['pointer64', ['void']]],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['pointer64', ['void']]],
'Self' : [ 0x30, ['pointer64', ['_NT_TIB']]],
} ],
'_WHEA_REVISION' : [ 0x2, {
'MinorRevision' : [ 0x0, ['unsigned char']],
'MajorRevision' : [ 0x1, ['unsigned char']],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_EJOB' : [ 0x1c8, {
'Event' : [ 0x0, ['_KEVENT']],
'JobLinks' : [ 0x18, ['_LIST_ENTRY']],
'ProcessListHead' : [ 0x28, ['_LIST_ENTRY']],
'JobLock' : [ 0x38, ['_ERESOURCE']],
'TotalUserTime' : [ 0xa0, ['_LARGE_INTEGER']],
'TotalKernelTime' : [ 0xa8, ['_LARGE_INTEGER']],
'ThisPeriodTotalUserTime' : [ 0xb0, ['_LARGE_INTEGER']],
'ThisPeriodTotalKernelTime' : [ 0xb8, ['_LARGE_INTEGER']],
'TotalPageFaultCount' : [ 0xc0, ['unsigned long']],
'TotalProcesses' : [ 0xc4, ['unsigned long']],
'ActiveProcesses' : [ 0xc8, ['unsigned long']],
'TotalTerminatedProcesses' : [ 0xcc, ['unsigned long']],
'PerProcessUserTimeLimit' : [ 0xd0, ['_LARGE_INTEGER']],
'PerJobUserTimeLimit' : [ 0xd8, ['_LARGE_INTEGER']],
'MinimumWorkingSetSize' : [ 0xe0, ['unsigned long long']],
'MaximumWorkingSetSize' : [ 0xe8, ['unsigned long long']],
'LimitFlags' : [ 0xf0, ['unsigned long']],
'ActiveProcessLimit' : [ 0xf4, ['unsigned long']],
'Affinity' : [ 0xf8, ['_KAFFINITY_EX']],
'PriorityClass' : [ 0x120, ['unsigned char']],
'AccessState' : [ 0x128, ['pointer64', ['_JOB_ACCESS_STATE']]],
'UIRestrictionsClass' : [ 0x130, ['unsigned long']],
'EndOfJobTimeAction' : [ 0x134, ['unsigned long']],
'CompletionPort' : [ 0x138, ['pointer64', ['void']]],
'CompletionKey' : [ 0x140, ['pointer64', ['void']]],
'SessionId' : [ 0x148, ['unsigned long']],
'SchedulingClass' : [ 0x14c, ['unsigned long']],
'ReadOperationCount' : [ 0x150, ['unsigned long long']],
'WriteOperationCount' : [ 0x158, ['unsigned long long']],
'OtherOperationCount' : [ 0x160, ['unsigned long long']],
'ReadTransferCount' : [ 0x168, ['unsigned long long']],
'WriteTransferCount' : [ 0x170, ['unsigned long long']],
'OtherTransferCount' : [ 0x178, ['unsigned long long']],
'ProcessMemoryLimit' : [ 0x180, ['unsigned long long']],
'JobMemoryLimit' : [ 0x188, ['unsigned long long']],
'PeakProcessMemoryUsed' : [ 0x190, ['unsigned long long']],
'PeakJobMemoryUsed' : [ 0x198, ['unsigned long long']],
'CurrentJobMemoryUsed' : [ 0x1a0, ['unsigned long long']],
'MemoryLimitsLock' : [ 0x1a8, ['_EX_PUSH_LOCK']],
'JobSetLinks' : [ 0x1b0, ['_LIST_ENTRY']],
'MemberLevel' : [ 0x1c0, ['unsigned long']],
'JobFlags' : [ 0x1c4, ['unsigned long']],
} ],
'__unnamed_1d1c' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HvMaxCState' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_IDLE_STATES' : [ 0xa0, {
'Count' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['__unnamed_1d1c']],
'TargetState' : [ 0x8, ['unsigned long']],
'ActualState' : [ 0xc, ['unsigned long']],
'OldState' : [ 0x10, ['unsigned long']],
'TargetProcessors' : [ 0x18, ['_KAFFINITY_EX']],
'State' : [ 0x40, ['array', 1, ['_PPM_IDLE_STATE']]],
} ],
'__unnamed_1d25' : [ 0x18, {
'EfiInformation' : [ 0x0, ['_EFI_FIRMWARE_INFORMATION']],
'PcatInformation' : [ 0x0, ['_PCAT_FIRMWARE_INFORMATION']],
} ],
'_FIRMWARE_INFORMATION_LOADER_BLOCK' : [ 0x20, {
'FirmwareTypeEfi' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'u' : [ 0x8, ['__unnamed_1d25']],
} ],
'_HEAP_UCR_DESCRIPTOR' : [ 0x30, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SegmentEntry' : [ 0x10, ['_LIST_ENTRY']],
'Address' : [ 0x20, ['pointer64', ['void']]],
'Size' : [ 0x28, ['unsigned long long']],
} ],
'_ETW_REALTIME_CONSUMER' : [ 0x88, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'ProcessHandle' : [ 0x10, ['pointer64', ['void']]],
'ProcessObject' : [ 0x18, ['pointer64', ['_EPROCESS']]],
'NextNotDelivered' : [ 0x20, ['pointer64', ['void']]],
'RealtimeConnectContext' : [ 0x28, ['pointer64', ['void']]],
'DisconnectEvent' : [ 0x30, ['pointer64', ['_KEVENT']]],
'DataAvailableEvent' : [ 0x38, ['pointer64', ['_KEVENT']]],
'UserBufferCount' : [ 0x40, ['pointer64', ['unsigned long']]],
'UserBufferListHead' : [ 0x48, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
'BuffersLost' : [ 0x50, ['unsigned long']],
'EmptyBuffersCount' : [ 0x54, ['unsigned long']],
'LoggerId' : [ 0x58, ['unsigned long']],
'ShutDownRequested' : [ 0x5c, ['unsigned char']],
'NewBuffersLost' : [ 0x5d, ['unsigned char']],
'Disconnected' : [ 0x5e, ['unsigned char']],
'ReservedBufferSpaceBitMap' : [ 0x60, ['_RTL_BITMAP']],
'ReservedBufferSpace' : [ 0x70, ['pointer64', ['unsigned char']]],
'ReservedBufferSpaceSize' : [ 0x78, ['unsigned long']],
'UserPagesAllocated' : [ 0x7c, ['unsigned long']],
'UserPagesReused' : [ 0x80, ['unsigned long']],
'Wow' : [ 0x84, ['unsigned char']],
} ],
'_POOL_DESCRIPTOR' : [ 0x1140, {
'PoolType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'PagedLock' : [ 0x8, ['_KGUARDED_MUTEX']],
'NonPagedLock' : [ 0x8, ['unsigned long long']],
'RunningAllocs' : [ 0x40, ['long']],
'RunningDeAllocs' : [ 0x44, ['long']],
'TotalBigPages' : [ 0x48, ['long']],
'ThreadsProcessingDeferrals' : [ 0x4c, ['long']],
'TotalBytes' : [ 0x50, ['unsigned long long']],
'PoolIndex' : [ 0x80, ['unsigned long']],
'TotalPages' : [ 0xc0, ['long']],
'PendingFrees' : [ 0x100, ['pointer64', ['pointer64', ['void']]]],
'PendingFreeDepth' : [ 0x108, ['long']],
'ListHeads' : [ 0x140, ['array', 256, ['_LIST_ENTRY']]],
} ],
'_TOKEN_MANDATORY_POLICY' : [ 0x4, {
'Policy' : [ 0x0, ['unsigned long']],
} ],
'_KGATE' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_WHEA_ERROR_RECORD_HEADER' : [ 0x80, {
'Signature' : [ 0x0, ['unsigned long']],
'Revision' : [ 0x4, ['_WHEA_REVISION']],
'SignatureEnd' : [ 0x6, ['unsigned long']],
'SectionCount' : [ 0xa, ['unsigned short']],
'Severity' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ValidBits' : [ 0x10, ['_WHEA_ERROR_RECORD_HEADER_VALIDBITS']],
'Length' : [ 0x14, ['unsigned long']],
'Timestamp' : [ 0x18, ['_WHEA_TIMESTAMP']],
'PlatformId' : [ 0x20, ['_GUID']],
'PartitionId' : [ 0x30, ['_GUID']],
'CreatorId' : [ 0x40, ['_GUID']],
'NotifyType' : [ 0x50, ['_GUID']],
'RecordId' : [ 0x60, ['unsigned long long']],
'Flags' : [ 0x68, ['_WHEA_ERROR_RECORD_HEADER_FLAGS']],
'PersistenceInfo' : [ 0x6c, ['_WHEA_PERSISTENCE_INFO']],
'Reserved' : [ 0x74, ['array', 12, ['unsigned char']]],
} ],
'_ALPC_PROCESS_CONTEXT' : [ 0x20, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'ViewListHead' : [ 0x8, ['_LIST_ENTRY']],
'PagedPoolQuotaCache' : [ 0x18, ['unsigned long long']],
} ],
'_DRIVER_EXTENSION' : [ 0x38, {
'DriverObject' : [ 0x0, ['pointer64', ['_DRIVER_OBJECT']]],
'AddDevice' : [ 0x8, ['pointer64', ['void']]],
'Count' : [ 0x10, ['unsigned long']],
'ServiceKeyName' : [ 0x18, ['_UNICODE_STRING']],
'ClientDriverExtension' : [ 0x28, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'FsFilterCallbacks' : [ 0x30, ['pointer64', ['_FS_FILTER_CALLBACKS']]],
} ],
'_PRIVILEGE_SET' : [ 0x14, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_CM_NOTIFY_BLOCK' : [ 0x58, {
'HiveList' : [ 0x0, ['_LIST_ENTRY']],
'PostList' : [ 0x10, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x20, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'KeyBody' : [ 0x28, ['pointer64', ['_CM_KEY_BODY']]],
'Filter' : [ 0x30, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'WatchTree' : [ 0x30, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NotifyPending' : [ 0x30, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubjectContext' : [ 0x38, ['_SECURITY_SUBJECT_CONTEXT']],
} ],
'_KINTERRUPT' : [ 0xa0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'InterruptListEntry' : [ 0x8, ['_LIST_ENTRY']],
'ServiceRoutine' : [ 0x18, ['pointer64', ['void']]],
'MessageServiceRoutine' : [ 0x20, ['pointer64', ['void']]],
'MessageIndex' : [ 0x28, ['unsigned long']],
'ServiceContext' : [ 0x30, ['pointer64', ['void']]],
'SpinLock' : [ 0x38, ['unsigned long long']],
'TickCount' : [ 0x40, ['unsigned long']],
'ActualLock' : [ 0x48, ['pointer64', ['unsigned long long']]],
'DispatchAddress' : [ 0x50, ['pointer64', ['void']]],
'Vector' : [ 0x58, ['unsigned long']],
'Irql' : [ 0x5c, ['unsigned char']],
'SynchronizeIrql' : [ 0x5d, ['unsigned char']],
'FloatingSave' : [ 0x5e, ['unsigned char']],
'Connected' : [ 0x5f, ['unsigned char']],
'Number' : [ 0x60, ['unsigned long']],
'ShareVector' : [ 0x64, ['unsigned char']],
'Pad' : [ 0x65, ['array', 3, ['unsigned char']]],
'Mode' : [ 0x68, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'Polarity' : [ 0x6c, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptPolarityUnknown', 1: 'InterruptActiveHigh', 2: 'InterruptActiveLow'})]],
'ServiceCount' : [ 0x70, ['unsigned long']],
'DispatchCount' : [ 0x74, ['unsigned long']],
'Rsvd1' : [ 0x78, ['unsigned long long']],
'TrapFrame' : [ 0x80, ['pointer64', ['_KTRAP_FRAME']]],
'Reserved' : [ 0x88, ['pointer64', ['void']]],
'DispatchCode' : [ 0x90, ['array', 4, ['unsigned long']]],
} ],
'_HANDLE_TABLE_ENTRY' : [ 0x10, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'ObAttributes' : [ 0x0, ['unsigned long']],
'InfoTable' : [ 0x0, ['pointer64', ['_HANDLE_TABLE_ENTRY_INFO']]],
'Value' : [ 0x0, ['unsigned long long']],
'GrantedAccess' : [ 0x8, ['unsigned long']],
'GrantedAccessIndex' : [ 0x8, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0xa, ['unsigned short']],
'NextFreeTableEntry' : [ 0x8, ['unsigned long']],
} ],
'_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION' : [ 0x30, {
'SecurityAttributeCount' : [ 0x0, ['unsigned long']],
'SecurityAttributesList' : [ 0x8, ['_LIST_ENTRY']],
'WorkingSecurityAttributeCount' : [ 0x18, ['unsigned long']],
'WorkingSecurityAttributesList' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_IMAGE_FILE_HEADER' : [ 0x14, {
'Machine' : [ 0x0, ['unsigned short']],
'NumberOfSections' : [ 0x2, ['unsigned short']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'PointerToSymbolTable' : [ 0x8, ['unsigned long']],
'NumberOfSymbols' : [ 0xc, ['unsigned long']],
'SizeOfOptionalHeader' : [ 0x10, ['unsigned short']],
'Characteristics' : [ 0x12, ['unsigned short']],
} ],
'_MMEXTEND_INFO' : [ 0x10, {
'CommittedSize' : [ 0x0, ['unsigned long long']],
'ReferenceCount' : [ 0x8, ['unsigned long']],
} ],
'_HIVE_LIST_ENTRY' : [ 0x88, {
'FileName' : [ 0x0, ['pointer64', ['unsigned short']]],
'BaseName' : [ 0x8, ['pointer64', ['unsigned short']]],
'RegRootName' : [ 0x10, ['pointer64', ['unsigned short']]],
'CmHive' : [ 0x18, ['pointer64', ['_CMHIVE']]],
'HHiveFlags' : [ 0x20, ['unsigned long']],
'CmHiveFlags' : [ 0x24, ['unsigned long']],
'CmKcbCacheSize' : [ 0x28, ['unsigned long']],
'CmHive2' : [ 0x30, ['pointer64', ['_CMHIVE']]],
'HiveMounted' : [ 0x38, ['unsigned char']],
'ThreadFinished' : [ 0x39, ['unsigned char']],
'ThreadStarted' : [ 0x3a, ['unsigned char']],
'Allocate' : [ 0x3b, ['unsigned char']],
'WinPERequired' : [ 0x3c, ['unsigned char']],
'StartEvent' : [ 0x40, ['_KEVENT']],
'FinishedEvent' : [ 0x58, ['_KEVENT']],
'MountLock' : [ 0x70, ['_KEVENT']],
} ],
'_CONTEXT' : [ 0x4d0, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5Home' : [ 0x20, ['unsigned long long']],
'P6Home' : [ 0x28, ['unsigned long long']],
'ContextFlags' : [ 0x30, ['unsigned long']],
'MxCsr' : [ 0x34, ['unsigned long']],
'SegCs' : [ 0x38, ['unsigned short']],
'SegDs' : [ 0x3a, ['unsigned short']],
'SegEs' : [ 0x3c, ['unsigned short']],
'SegFs' : [ 0x3e, ['unsigned short']],
'SegGs' : [ 0x40, ['unsigned short']],
'SegSs' : [ 0x42, ['unsigned short']],
'EFlags' : [ 0x44, ['unsigned long']],
'Dr0' : [ 0x48, ['unsigned long long']],
'Dr1' : [ 0x50, ['unsigned long long']],
'Dr2' : [ 0x58, ['unsigned long long']],
'Dr3' : [ 0x60, ['unsigned long long']],
'Dr6' : [ 0x68, ['unsigned long long']],
'Dr7' : [ 0x70, ['unsigned long long']],
'Rax' : [ 0x78, ['unsigned long long']],
'Rcx' : [ 0x80, ['unsigned long long']],
'Rdx' : [ 0x88, ['unsigned long long']],
'Rbx' : [ 0x90, ['unsigned long long']],
'Rsp' : [ 0x98, ['unsigned long long']],
'Rbp' : [ 0xa0, ['unsigned long long']],
'Rsi' : [ 0xa8, ['unsigned long long']],
'Rdi' : [ 0xb0, ['unsigned long long']],
'R8' : [ 0xb8, ['unsigned long long']],
'R9' : [ 0xc0, ['unsigned long long']],
'R10' : [ 0xc8, ['unsigned long long']],
'R11' : [ 0xd0, ['unsigned long long']],
'R12' : [ 0xd8, ['unsigned long long']],
'R13' : [ 0xe0, ['unsigned long long']],
'R14' : [ 0xe8, ['unsigned long long']],
'R15' : [ 0xf0, ['unsigned long long']],
'Rip' : [ 0xf8, ['unsigned long long']],
'FltSave' : [ 0x100, ['_XSAVE_FORMAT']],
'Header' : [ 0x100, ['array', 2, ['_M128A']]],
'Legacy' : [ 0x120, ['array', 8, ['_M128A']]],
'Xmm0' : [ 0x1a0, ['_M128A']],
'Xmm1' : [ 0x1b0, ['_M128A']],
'Xmm2' : [ 0x1c0, ['_M128A']],
'Xmm3' : [ 0x1d0, ['_M128A']],
'Xmm4' : [ 0x1e0, ['_M128A']],
'Xmm5' : [ 0x1f0, ['_M128A']],
'Xmm6' : [ 0x200, ['_M128A']],
'Xmm7' : [ 0x210, ['_M128A']],
'Xmm8' : [ 0x220, ['_M128A']],
'Xmm9' : [ 0x230, ['_M128A']],
'Xmm10' : [ 0x240, ['_M128A']],
'Xmm11' : [ 0x250, ['_M128A']],
'Xmm12' : [ 0x260, ['_M128A']],
'Xmm13' : [ 0x270, ['_M128A']],
'Xmm14' : [ 0x280, ['_M128A']],
'Xmm15' : [ 0x290, ['_M128A']],
'VectorRegister' : [ 0x300, ['array', 26, ['_M128A']]],
'VectorControl' : [ 0x4a0, ['unsigned long long']],
'DebugControl' : [ 0x4a8, ['unsigned long long']],
'LastBranchToRip' : [ 0x4b0, ['unsigned long long']],
'LastBranchFromRip' : [ 0x4b8, ['unsigned long long']],
'LastExceptionToRip' : [ 0x4c0, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x4c8, ['unsigned long long']],
} ],
'_ALPC_HANDLE_TABLE' : [ 0x18, {
'Handles' : [ 0x0, ['pointer64', ['_ALPC_HANDLE_ENTRY']]],
'TotalHandles' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned long']],
'Lock' : [ 0x10, ['_EX_PUSH_LOCK']],
} ],
'_MMPTE_HARDWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Dirty1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_IO_COMPLETION_CONTEXT' : [ 0x10, {
'Port' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['pointer64', ['void']]],
} ],
'_IOV_FORCED_PENDING_TRACE' : [ 0x200, {
'Irp' : [ 0x0, ['pointer64', ['_IRP']]],
'Thread' : [ 0x8, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x10, ['array', 62, ['pointer64', ['void']]]],
} ],
'_DBGKD_SET_CONTEXT' : [ 0x4, {
'ContextFlags' : [ 0x0, ['unsigned long']],
} ],
'_VI_POOL_ENTRY_INUSE' : [ 0x20, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'CallingAddress' : [ 0x8, ['pointer64', ['void']]],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
'Tag' : [ 0x18, ['unsigned long long']],
} ],
'_ALPC_COMPLETION_LIST' : [ 0x98, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'OwnerProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'Mdl' : [ 0x18, ['pointer64', ['_MDL']]],
'UserVa' : [ 0x20, ['pointer64', ['void']]],
'UserLimit' : [ 0x28, ['pointer64', ['void']]],
'DataUserVa' : [ 0x30, ['pointer64', ['void']]],
'SystemVa' : [ 0x38, ['pointer64', ['void']]],
'TotalSize' : [ 0x40, ['unsigned long long']],
'Header' : [ 0x48, ['pointer64', ['_ALPC_COMPLETION_LIST_HEADER']]],
'List' : [ 0x50, ['pointer64', ['void']]],
'ListSize' : [ 0x58, ['unsigned long long']],
'Bitmap' : [ 0x60, ['pointer64', ['void']]],
'BitmapSize' : [ 0x68, ['unsigned long long']],
'Data' : [ 0x70, ['pointer64', ['void']]],
'DataSize' : [ 0x78, ['unsigned long long']],
'BitmapLimit' : [ 0x80, ['unsigned long']],
'BitmapNextHint' : [ 0x84, ['unsigned long']],
'ConcurrencyCount' : [ 0x88, ['unsigned long']],
'AttributeFlags' : [ 0x8c, ['unsigned long']],
'AttributeSize' : [ 0x90, ['unsigned long']],
} ],
'_INTERFACE' : [ 0x20, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
} ],
'_ACL' : [ 0x8, {
'AclRevision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'AclSize' : [ 0x2, ['unsigned short']],
'AceCount' : [ 0x4, ['unsigned short']],
'Sbz2' : [ 0x6, ['unsigned short']],
} ],
'_LAZY_WRITER' : [ 0x88, {
'ScanDpc' : [ 0x0, ['_KDPC']],
'ScanTimer' : [ 0x40, ['_KTIMER']],
'ScanActive' : [ 0x80, ['unsigned char']],
'OtherWork' : [ 0x81, ['unsigned char']],
'PendingTeardownScan' : [ 0x82, ['unsigned char']],
'PendingPeriodicScan' : [ 0x83, ['unsigned char']],
'PendingLowMemoryScan' : [ 0x84, ['unsigned char']],
'PendingPowerScan' : [ 0x85, ['unsigned char']],
} ],
'_PI_BUS_EXTENSION' : [ 0x70, {
'Flags' : [ 0x0, ['unsigned long']],
'NumberCSNs' : [ 0x4, ['unsigned char']],
'ReadDataPort' : [ 0x8, ['pointer64', ['unsigned char']]],
'DataPortMapped' : [ 0x10, ['unsigned char']],
'AddressPort' : [ 0x18, ['pointer64', ['unsigned char']]],
'AddrPortMapped' : [ 0x20, ['unsigned char']],
'CommandPort' : [ 0x28, ['pointer64', ['unsigned char']]],
'CmdPortMapped' : [ 0x30, ['unsigned char']],
'NextSlotNumber' : [ 0x34, ['unsigned long']],
'DeviceList' : [ 0x38, ['_SINGLE_LIST_ENTRY']],
'CardList' : [ 0x40, ['_SINGLE_LIST_ENTRY']],
'PhysicalBusDevice' : [ 0x48, ['pointer64', ['_DEVICE_OBJECT']]],
'FunctionalBusDevice' : [ 0x50, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x58, ['pointer64', ['_DEVICE_OBJECT']]],
'BusNumber' : [ 0x60, ['unsigned long']],
'SystemPowerState' : [ 0x64, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DevicePowerState' : [ 0x68, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_SID_AND_ATTRIBUTES' : [ 0x10, {
'Sid' : [ 0x0, ['pointer64', ['void']]],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_SID_IDENTIFIER_AUTHORITY' : [ 0x6, {
'Value' : [ 0x0, ['array', 6, ['unsigned char']]],
} ],
'_IO_WORKITEM' : [ 0x40, {
'WorkItem' : [ 0x0, ['_WORK_QUEUE_ITEM']],
'Routine' : [ 0x20, ['pointer64', ['void']]],
'IoObject' : [ 0x28, ['pointer64', ['void']]],
'Context' : [ 0x30, ['pointer64', ['void']]],
'Type' : [ 0x38, ['unsigned long']],
} ],
'_CM_RM' : [ 0x88, {
'RmListEntry' : [ 0x0, ['_LIST_ENTRY']],
'TransactionListHead' : [ 0x10, ['_LIST_ENTRY']],
'TmHandle' : [ 0x20, ['pointer64', ['void']]],
'Tm' : [ 0x28, ['pointer64', ['void']]],
'RmHandle' : [ 0x30, ['pointer64', ['void']]],
'KtmRm' : [ 0x38, ['pointer64', ['void']]],
'RefCount' : [ 0x40, ['unsigned long']],
'ContainerNum' : [ 0x44, ['unsigned long']],
'ContainerSize' : [ 0x48, ['unsigned long long']],
'CmHive' : [ 0x50, ['pointer64', ['_CMHIVE']]],
'LogFileObject' : [ 0x58, ['pointer64', ['void']]],
'MarshallingContext' : [ 0x60, ['pointer64', ['void']]],
'RmFlags' : [ 0x68, ['unsigned long']],
'LogStartStatus1' : [ 0x6c, ['long']],
'LogStartStatus2' : [ 0x70, ['long']],
'BaseLsn' : [ 0x78, ['unsigned long long']],
'RmLock' : [ 0x80, ['pointer64', ['_ERESOURCE']]],
} ],
'_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_MMVAD_FLAGS' : [ 0x8, {
'CommitCharge' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 51, native_type='unsigned long long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 51, end_bit = 52, native_type='unsigned long long')]],
'VadType' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 55, native_type='unsigned long long')]],
'MemCommit' : [ 0x0, ['BitField', dict(start_bit = 55, end_bit = 56, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 61, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 61, end_bit = 63, native_type='unsigned long long')]],
'PrivateMemory' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MMWSLE_HASH' : [ 0x4, {
'Index' : [ 0x0, ['unsigned long']],
} ],
'_UNEXPECTED_INTERRUPT' : [ 0x10, {
'PushImmOp' : [ 0x0, ['unsigned char']],
'PushImm' : [ 0x1, ['unsigned long']],
'PushRbp' : [ 0x5, ['unsigned char']],
'JmpOp' : [ 0x6, ['unsigned char']],
'JmpOffset' : [ 0x7, ['long']],
} ],
'_DBGKD_FILL_MEMORY' : [ 0x10, {
'Address' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned short']],
'PatternLength' : [ 0xe, ['unsigned short']],
} ],
'_HEAP_STOP_ON_VALUES' : [ 0x30, {
'AllocAddress' : [ 0x0, ['unsigned long long']],
'AllocTag' : [ 0x8, ['_HEAP_STOP_ON_TAG']],
'ReAllocAddress' : [ 0x10, ['unsigned long long']],
'ReAllocTag' : [ 0x18, ['_HEAP_STOP_ON_TAG']],
'FreeAddress' : [ 0x20, ['unsigned long long']],
'FreeTag' : [ 0x28, ['_HEAP_STOP_ON_TAG']],
} ],
'_HEAP_PSEUDO_TAG_ENTRY' : [ 0x10, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
} ],
'_CALL_HASH_ENTRY' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CallersAddress' : [ 0x10, ['pointer64', ['void']]],
'CallersCaller' : [ 0x18, ['pointer64', ['void']]],
'CallCount' : [ 0x20, ['unsigned long']],
} ],
'_VF_TRACKER_STAMP' : [ 0x10, {
'Thread' : [ 0x0, ['pointer64', ['void']]],
'Flags' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'OldIrql' : [ 0x9, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'NewIrql' : [ 0xa, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'Processor' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_TRACK_IRQL' : [ 0x38, {
'Thread' : [ 0x0, ['pointer64', ['void']]],
'OldIrql' : [ 0x8, ['unsigned char']],
'NewIrql' : [ 0x9, ['unsigned char']],
'Processor' : [ 0xa, ['unsigned short']],
'TickCount' : [ 0xc, ['unsigned long']],
'StackTrace' : [ 0x10, ['array', 5, ['pointer64', ['void']]]],
} ],
'_PNP_DEVICE_EVENT_ENTRY' : [ 0x90, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Argument' : [ 0x10, ['unsigned long']],
'CallerEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'Callback' : [ 0x20, ['pointer64', ['void']]],
'Context' : [ 0x28, ['pointer64', ['void']]],
'VetoType' : [ 0x30, ['pointer64', ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]]],
'VetoName' : [ 0x38, ['pointer64', ['_UNICODE_STRING']]],
'Data' : [ 0x40, ['_PLUGPLAY_EVENT_BLOCK']],
} ],
'_HEAP_STOP_ON_TAG' : [ 0x4, {
'HeapAndTagIndex' : [ 0x0, ['unsigned long']],
'TagIndex' : [ 0x0, ['unsigned short']],
'HeapIndex' : [ 0x2, ['unsigned short']],
} ],
'_DBGKD_GET_CONTEXT' : [ 0x4, {
'Unused' : [ 0x0, ['unsigned long']],
} ],
'_TEB_ACTIVE_FRAME_CONTEXT' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned long']],
'FrameName' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_NLS_DATA_BLOCK' : [ 0x18, {
'AnsiCodePageData' : [ 0x0, ['pointer64', ['void']]],
'OemCodePageData' : [ 0x8, ['pointer64', ['void']]],
'UnicodeCaseTableData' : [ 0x10, ['pointer64', ['void']]],
} ],
'_ALIGNED_AFFINITY_SUMMARY' : [ 0x80, {
'CpuSet' : [ 0x0, ['_KAFFINITY_EX']],
'SMTSet' : [ 0x28, ['_KAFFINITY_EX']],
} ],
'_XSTATE_CONFIGURATION' : [ 0x210, {
'EnabledFeatures' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'OptimizedSave' : [ 0xc, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Features' : [ 0x10, ['array', 64, ['_XSTATE_FEATURE']]],
} ],
'_CM_KEY_SECURITY_CACHE' : [ 0x38, {
'Cell' : [ 0x0, ['unsigned long']],
'ConvKey' : [ 0x4, ['unsigned long']],
'List' : [ 0x8, ['_LIST_ENTRY']],
'DescriptorLength' : [ 0x18, ['unsigned long']],
'RealRefCount' : [ 0x1c, ['unsigned long']],
'Descriptor' : [ 0x20, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_MMPTE_SOFTWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'UsedPageTableEntries' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 22, native_type='unsigned long long')]],
'InStore' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 32, native_type='unsigned long long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_RTL_UMS_CONTEXT' : [ 0x540, {
'Link' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Context' : [ 0x10, ['_CONTEXT']],
'Teb' : [ 0x4e0, ['pointer64', ['void']]],
'UserContext' : [ 0x4e8, ['pointer64', ['void']]],
'ScheduledThread' : [ 0x4f0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'HasQuantumReq' : [ 0x4f0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HasAffinityReq' : [ 0x4f0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'HasPriorityReq' : [ 0x4f0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Suspended' : [ 0x4f0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VolatileContext' : [ 0x4f0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Terminated' : [ 0x4f0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'DebugActive' : [ 0x4f0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RunningOnSelfThread' : [ 0x4f0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'DenyRunningOnSelfThread' : [ 0x4f0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ReservedFlags' : [ 0x4f0, ['BitField', dict(start_bit = 10, end_bit = 32, native_type='unsigned long')]],
'Flags' : [ 0x4f0, ['long']],
'KernelUpdateLock' : [ 0x4f8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Reserved' : [ 0x4f8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'PrimaryClientID' : [ 0x4f8, ['BitField', dict(start_bit = 2, end_bit = 64, native_type='unsigned long long')]],
'ContextLock' : [ 0x4f8, ['unsigned long long']],
'QuantumValue' : [ 0x500, ['unsigned long long']],
'AffinityMask' : [ 0x508, ['_GROUP_AFFINITY']],
'Priority' : [ 0x518, ['long']],
'PrimaryUmsContext' : [ 0x520, ['pointer64', ['_RTL_UMS_CONTEXT']]],
'SwitchCount' : [ 0x528, ['unsigned long']],
'KernelYieldCount' : [ 0x52c, ['unsigned long']],
'MixedYieldCount' : [ 0x530, ['unsigned long']],
'YieldCount' : [ 0x534, ['unsigned long']],
} ],
'_CM_RESOURCE_LIST' : [ 0x28, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['array', 1, ['_CM_FULL_RESOURCE_DESCRIPTOR']]],
} ],
'_TOKEN_PRIVILEGES' : [ 0x10, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Privileges' : [ 0x4, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_POOL_TRACKER_TABLE' : [ 0x28, {
'Key' : [ 0x0, ['long']],
'NonPagedAllocs' : [ 0x4, ['long']],
'NonPagedFrees' : [ 0x8, ['long']],
'NonPagedBytes' : [ 0x10, ['unsigned long long']],
'PagedAllocs' : [ 0x18, ['unsigned long']],
'PagedFrees' : [ 0x1c, ['unsigned long']],
'PagedBytes' : [ 0x20, ['unsigned long long']],
} ],
'_MM_SUBSECTION_AVL_TABLE' : [ 0x38, {
'BalancedRoot' : [ 0x0, ['_MMSUBSECTION_NODE']],
'DepthOfTree' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long long')]],
'Unused' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long long')]],
'NumberGenericTableElements' : [ 0x28, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='unsigned long long')]],
'NodeHint' : [ 0x30, ['pointer64', ['void']]],
} ],
'_HANDLE_TABLE_ENTRY_INFO' : [ 0x4, {
'AuditMask' : [ 0x0, ['unsigned long']],
} ],
'_CM_FULL_RESOURCE_DESCRIPTOR' : [ 0x24, {
'InterfaceType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x4, ['unsigned long']],
'PartialResourceList' : [ 0x8, ['_CM_PARTIAL_RESOURCE_LIST']],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS' : [ 0x4, {
'Primary' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ContainmentWarning' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reset' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ThresholdExceeded' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ResourceNotAvailable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LatentError' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_WMI_BUFFER_HEADER' : [ 0x48, {
'BufferSize' : [ 0x0, ['unsigned long']],
'SavedOffset' : [ 0x4, ['unsigned long']],
'CurrentOffset' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['long']],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'SequenceNumber' : [ 0x18, ['long long']],
'Padding0' : [ 0x20, ['array', 2, ['unsigned long']]],
'SlistEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'NextBuffer' : [ 0x20, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'ClientContext' : [ 0x28, ['_ETW_BUFFER_CONTEXT']],
'State' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'EtwBufferStateFree', 1: 'EtwBufferStateGeneralLogging', 2: 'EtwBufferStateCSwitch', 3: 'EtwBufferStateFlush', 4: 'EtwBufferStateMaximum'})]],
'Offset' : [ 0x30, ['unsigned long']],
'BufferFlag' : [ 0x34, ['unsigned short']],
'BufferType' : [ 0x36, ['unsigned short']],
'Padding1' : [ 0x38, ['array', 4, ['unsigned long']]],
'ReferenceTime' : [ 0x38, ['_ETW_REF_CLOCK']],
'GlobalEntry' : [ 0x38, ['_LIST_ENTRY']],
'Pointer0' : [ 0x38, ['pointer64', ['void']]],
'Pointer1' : [ 0x40, ['pointer64', ['void']]],
} ],
'_POWER_SEQUENCE' : [ 0xc, {
'SequenceD1' : [ 0x0, ['unsigned long']],
'SequenceD2' : [ 0x4, ['unsigned long']],
'SequenceD3' : [ 0x8, ['unsigned long']],
} ],
'_PROCESSOR_POWER_STATE' : [ 0x100, {
'IdleStates' : [ 0x0, ['pointer64', ['_PPM_IDLE_STATES']]],
'IdleTimeLast' : [ 0x8, ['unsigned long long']],
'IdleTimeTotal' : [ 0x10, ['unsigned long long']],
'IdleTimeEntry' : [ 0x18, ['unsigned long long']],
'IdleAccounting' : [ 0x20, ['pointer64', ['_PROC_IDLE_ACCOUNTING']]],
'Hypervisor' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'ProcHypervisorNone', 1: 'ProcHypervisorPresent', 2: 'ProcHypervisorPower'})]],
'PerfHistoryTotal' : [ 0x2c, ['unsigned long']],
'ThermalConstraint' : [ 0x30, ['unsigned char']],
'PerfHistoryCount' : [ 0x31, ['unsigned char']],
'PerfHistorySlot' : [ 0x32, ['unsigned char']],
'Reserved' : [ 0x33, ['unsigned char']],
'LastSysTime' : [ 0x34, ['unsigned long']],
'WmiDispatchPtr' : [ 0x38, ['unsigned long long']],
'WmiInterfaceEnabled' : [ 0x40, ['long']],
'FFHThrottleStateInfo' : [ 0x48, ['_PPM_FFH_THROTTLE_STATE_INFO']],
'PerfActionDpc' : [ 0x68, ['_KDPC']],
'PerfActionMask' : [ 0xa8, ['long']],
'IdleCheck' : [ 0xb0, ['_PROC_IDLE_SNAP']],
'PerfCheck' : [ 0xc0, ['_PROC_IDLE_SNAP']],
'Domain' : [ 0xd0, ['pointer64', ['_PROC_PERF_DOMAIN']]],
'PerfConstraint' : [ 0xd8, ['pointer64', ['_PROC_PERF_CONSTRAINT']]],
'Load' : [ 0xe0, ['pointer64', ['_PROC_PERF_LOAD']]],
'PerfHistory' : [ 0xe8, ['pointer64', ['_PROC_HISTORY_ENTRY']]],
'Utility' : [ 0xf0, ['unsigned long']],
'OverUtilizedHistory' : [ 0xf4, ['unsigned long']],
'AffinityCount' : [ 0xf8, ['unsigned long']],
'AffinityHistory' : [ 0xfc, ['unsigned long']],
} ],
'_OBJECT_REF_STACK_INFO' : [ 0xc, {
'Sequence' : [ 0x0, ['unsigned long']],
'Index' : [ 0x4, ['unsigned short']],
'NumTraces' : [ 0x6, ['unsigned short']],
'Tag' : [ 0x8, ['unsigned long']],
} ],
'_PPC_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_MMPFNENTRY' : [ 0x2, {
'PageLocation' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'WriteInProgress' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Modified' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ReadInProgress' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'CacheAttribute' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Priority' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'Rom' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'InPageError' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'KernelStack' : [ 0x1, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'RemovalRequested' : [ 0x1, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ParityError' : [ 0x1, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_SEGMENT_OBJECT' : [ 0x40, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['_LARGE_INTEGER']],
'NonExtendedPtes' : [ 0x18, ['unsigned long']],
'ImageCommitment' : [ 0x1c, ['unsigned long']],
'ControlArea' : [ 0x20, ['pointer64', ['_CONTROL_AREA']]],
'Subsection' : [ 0x28, ['pointer64', ['_SUBSECTION']]],
'MmSectionFlags' : [ 0x30, ['pointer64', ['_MMSECTION_FLAGS']]],
'MmSubSectionFlags' : [ 0x38, ['pointer64', ['_MMSUBSECTION_FLAGS']]],
} ],
'_PCW_CALLBACK_INFORMATION' : [ 0x28, {
'AddCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'RemoveCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'EnumerateInstances' : [ 0x0, ['_PCW_MASK_INFORMATION']],
'CollectData' : [ 0x0, ['_PCW_MASK_INFORMATION']],
} ],
'_TOKEN_SOURCE' : [ 0x10, {
'SourceName' : [ 0x0, ['array', 8, ['unsigned char']]],
'SourceIdentifier' : [ 0x8, ['_LUID']],
} ],
'_DBGKD_QUERY_MEMORY' : [ 0x18, {
'Address' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['unsigned long long']],
'AddressSpace' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'DOCK_INTERFACE' : [ 0x30, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ProfileDepartureSetMode' : [ 0x20, ['pointer64', ['void']]],
'ProfileDepartureUpdate' : [ 0x28, ['pointer64', ['void']]],
} ],
'CMP_OFFSET_ARRAY' : [ 0x18, {
'FileOffset' : [ 0x0, ['unsigned long']],
'DataBuffer' : [ 0x8, ['pointer64', ['void']]],
'DataLength' : [ 0x10, ['unsigned long']],
} ],
'_MMSUPPORT_FLAGS' : [ 0x4, {
'WorkingSetType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'ModwriterAttached' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'TrimHard' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaximumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'ForceTrim' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'MinimumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'SessionMaster' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'TrimmerState' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PageStealers' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'MemoryPriority' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'WsleDeleted' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'VmExiting' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ExpansionFailed' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Available' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
} ],
'_IMAGE_OPTIONAL_HEADER64' : [ 0xf0, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'ImageBase' : [ 0x18, ['unsigned long long']],
'SectionAlignment' : [ 0x20, ['unsigned long']],
'FileAlignment' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'MajorImageVersion' : [ 0x2c, ['unsigned short']],
'MinorImageVersion' : [ 0x2e, ['unsigned short']],
'MajorSubsystemVersion' : [ 0x30, ['unsigned short']],
'MinorSubsystemVersion' : [ 0x32, ['unsigned short']],
'Win32VersionValue' : [ 0x34, ['unsigned long']],
'SizeOfImage' : [ 0x38, ['unsigned long']],
'SizeOfHeaders' : [ 0x3c, ['unsigned long']],
'CheckSum' : [ 0x40, ['unsigned long']],
'Subsystem' : [ 0x44, ['unsigned short']],
'DllCharacteristics' : [ 0x46, ['unsigned short']],
'SizeOfStackReserve' : [ 0x48, ['unsigned long long']],
'SizeOfStackCommit' : [ 0x50, ['unsigned long long']],
'SizeOfHeapReserve' : [ 0x58, ['unsigned long long']],
'SizeOfHeapCommit' : [ 0x60, ['unsigned long long']],
'LoaderFlags' : [ 0x68, ['unsigned long']],
'NumberOfRvaAndSizes' : [ 0x6c, ['unsigned long']],
'DataDirectory' : [ 0x70, ['array', 16, ['_IMAGE_DATA_DIRECTORY']]],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE' : [ 0x50, {
'Lock' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'ActiveCount' : [ 0xc, ['unsigned long']],
'PendingNullCount' : [ 0x10, ['unsigned long']],
'PendingCheckCompletionListCount' : [ 0x14, ['unsigned long']],
'PendingDelete' : [ 0x18, ['unsigned long']],
'FreeListHead' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'CompletionPort' : [ 0x28, ['pointer64', ['void']]],
'CompletionKey' : [ 0x30, ['pointer64', ['void']]],
'Entry' : [ 0x38, ['array', 1, ['_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY']]],
} ],
'_TERMINATION_PORT' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_TERMINATION_PORT']]],
'Port' : [ 0x8, ['pointer64', ['void']]],
} ],
'_MEMORY_ALLOCATION_DESCRIPTOR' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'MemoryType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'LoaderExceptionBlock', 1: 'LoaderSystemBlock', 2: 'LoaderFree', 3: 'LoaderBad', 4: 'LoaderLoadedProgram', 5: 'LoaderFirmwareTemporary', 6: 'LoaderFirmwarePermanent', 7: 'LoaderOsloaderHeap', 8: 'LoaderOsloaderStack', 9: 'LoaderSystemCode', 10: 'LoaderHalCode', 11: 'LoaderBootDriver', 12: 'LoaderConsoleInDriver', 13: 'LoaderConsoleOutDriver', 14: 'LoaderStartupDpcStack', 15: 'LoaderStartupKernelStack', 16: 'LoaderStartupPanicStack', 17: 'LoaderStartupPcrPage', 18: 'LoaderStartupPdrPage', 19: 'LoaderRegistryData', 20: 'LoaderMemoryData', 21: 'LoaderNlsData', 22: 'LoaderSpecialMemory', 23: 'LoaderBBTMemory', 24: 'LoaderReserve', 25: 'LoaderXIPRom', 26: 'LoaderHALCachedMemory', 27: 'LoaderLargePageFiller', 28: 'LoaderErrorLogMemory', 29: 'LoaderMaximum'})]],
'BasePage' : [ 0x18, ['unsigned long long']],
'PageCount' : [ 0x20, ['unsigned long long']],
} ],
'_CM_INTENT_LOCK' : [ 0x10, {
'OwnerCount' : [ 0x0, ['unsigned long']],
'OwnerTable' : [ 0x8, ['pointer64', ['pointer64', ['_CM_KCB_UOW']]]],
} ],
'_PROC_IDLE_ACCOUNTING' : [ 0x2c0, {
'StateCount' : [ 0x0, ['unsigned long']],
'TotalTransitions' : [ 0x4, ['unsigned long']],
'ResetCount' : [ 0x8, ['unsigned long']],
'StartTime' : [ 0x10, ['unsigned long long']],
'BucketLimits' : [ 0x18, ['array', 16, ['unsigned long long']]],
'State' : [ 0x98, ['array', 1, ['_PROC_IDLE_STATE_ACCOUNTING']]],
} ],
'_THERMAL_INFORMATION' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0x10, ['unsigned long long']],
'SamplingPeriod' : [ 0x18, ['unsigned long']],
'CurrentTemperature' : [ 0x1c, ['unsigned long']],
'PassiveTripPoint' : [ 0x20, ['unsigned long']],
'CriticalTripPoint' : [ 0x24, ['unsigned long']],
'ActiveTripPointCount' : [ 0x28, ['unsigned char']],
'ActiveTripPoint' : [ 0x2c, ['array', 10, ['unsigned long']]],
} ],
'_MAPPED_FILE_SEGMENT' : [ 0x30, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0x10, ['unsigned long long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'ExtendInfo' : [ 0x20, ['pointer64', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x20, ['pointer64', ['void']]],
'SegmentLock' : [ 0x28, ['_EX_PUSH_LOCK']],
} ],
'_TEB64' : [ 0x1818, {
'NtTib' : [ 0x0, ['_NT_TIB64']],
'EnvironmentPointer' : [ 0x38, ['unsigned long long']],
'ClientId' : [ 0x40, ['_CLIENT_ID64']],
'ActiveRpcHandle' : [ 0x50, ['unsigned long long']],
'ThreadLocalStoragePointer' : [ 0x58, ['unsigned long long']],
'ProcessEnvironmentBlock' : [ 0x60, ['unsigned long long']],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['unsigned long long']],
'Win32ThreadInfo' : [ 0x78, ['unsigned long long']],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['unsigned long long']],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['unsigned long long']]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['unsigned long long']],
'SpareBytes' : [ 0x2d0, ['array', 24, ['unsigned char']]],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH64']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID64']],
'GdiCachedProcessHandle' : [ 0x7e8, ['unsigned long long']],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['unsigned long long']],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['unsigned long long']]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['unsigned long long']],
'glSectionInfo' : [ 0x1228, ['unsigned long long']],
'glSection' : [ 0x1230, ['unsigned long long']],
'glTable' : [ 0x1238, ['unsigned long long']],
'glCurrentRC' : [ 0x1240, ['unsigned long long']],
'glContext' : [ 0x1248, ['unsigned long long']],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_STRING64']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0x1478, ['unsigned long long']],
'TlsSlots' : [ 0x1480, ['array', 64, ['unsigned long long']]],
'TlsLinks' : [ 0x1680, ['LIST_ENTRY64']],
'Vdm' : [ 0x1690, ['unsigned long long']],
'ReservedForNtRpc' : [ 0x1698, ['unsigned long long']],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['unsigned long long']]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 11, ['unsigned long long']]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['unsigned long long']],
'EtwLocalData' : [ 0x1728, ['unsigned long long']],
'EtwTraceData' : [ 0x1730, ['unsigned long long']],
'WinSockData' : [ 0x1738, ['unsigned long long']],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'CurrentIdealProcessor' : [ 0x1744, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0x1744, ['unsigned long']],
'ReservedPad0' : [ 0x1744, ['unsigned char']],
'ReservedPad1' : [ 0x1745, ['unsigned char']],
'ReservedPad2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['unsigned long long']],
'ReservedForOle' : [ 0x1758, ['unsigned long long']],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SavedPriorityState' : [ 0x1768, ['unsigned long long']],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['unsigned long long']],
'DeallocationBStore' : [ 0x1788, ['unsigned long long']],
'BStoreLimit' : [ 0x1790, ['unsigned long long']],
'MuiGeneration' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['unsigned long long']],
'pShimData' : [ 0x17a8, ['unsigned long long']],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['unsigned long long']],
'ActiveFrame' : [ 0x17c0, ['unsigned long long']],
'FlsData' : [ 0x17c8, ['unsigned long long']],
'PreferredLanguages' : [ 0x17d0, ['unsigned long long']],
'UserPrefLanguages' : [ 0x17d8, ['unsigned long long']],
'MergedPrefLanguages' : [ 0x17e0, ['unsigned long long']],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'SafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['unsigned long long']],
'TxnScopeExitCallback' : [ 0x17f8, ['unsigned long long']],
'TxnScopeContext' : [ 0x1800, ['unsigned long long']],
'LockCount' : [ 0x1808, ['unsigned long']],
'SpareUlong0' : [ 0x180c, ['unsigned long']],
'ResourceRetValue' : [ 0x1810, ['unsigned long long']],
} ],
'_GDI_TEB_BATCH' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_MM_DRIVER_VERIFIER_DATA' : [ 0xa0, {
'Level' : [ 0x0, ['unsigned long']],
'RaiseIrqls' : [ 0x4, ['unsigned long']],
'AcquireSpinLocks' : [ 0x8, ['unsigned long']],
'SynchronizeExecutions' : [ 0xc, ['unsigned long']],
'AllocationsAttempted' : [ 0x10, ['unsigned long']],
'AllocationsSucceeded' : [ 0x14, ['unsigned long']],
'AllocationsSucceededSpecialPool' : [ 0x18, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x1c, ['unsigned long']],
'TrimRequests' : [ 0x20, ['unsigned long']],
'Trims' : [ 0x24, ['unsigned long']],
'AllocationsFailed' : [ 0x28, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x2c, ['unsigned long']],
'Loads' : [ 0x30, ['unsigned long']],
'Unloads' : [ 0x34, ['unsigned long']],
'UnTrackedPool' : [ 0x38, ['unsigned long']],
'UserTrims' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long long']],
'NonPagedBytes' : [ 0x58, ['unsigned long long']],
'PeakPagedBytes' : [ 0x60, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x68, ['unsigned long long']],
'BurstAllocationsFailedDeliberately' : [ 0x70, ['unsigned long']],
'SessionTrims' : [ 0x74, ['unsigned long']],
'OptionChanges' : [ 0x78, ['unsigned long']],
'VerifyMode' : [ 0x7c, ['unsigned long']],
'PreviousBucketName' : [ 0x80, ['_UNICODE_STRING']],
'ActivityCounter' : [ 0x90, ['unsigned long']],
'PreviousActivityCounter' : [ 0x94, ['unsigned long']],
'WorkerTrimRequests' : [ 0x98, ['unsigned long']],
} ],
'_VI_FAULT_TRACE' : [ 0x48, {
'Thread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_GENERIC_MAPPING' : [ 0x10, {
'GenericRead' : [ 0x0, ['unsigned long']],
'GenericWrite' : [ 0x4, ['unsigned long']],
'GenericExecute' : [ 0x8, ['unsigned long']],
'GenericAll' : [ 0xc, ['unsigned long']],
} ],
'_OBJECT_HANDLE_COUNT_DATABASE' : [ 0x18, {
'CountEntries' : [ 0x0, ['unsigned long']],
'HandleCountEntries' : [ 0x8, ['array', 1, ['_OBJECT_HANDLE_COUNT_ENTRY']]],
} ],
'_OWNER_ENTRY' : [ 0x10, {
'OwnerThread' : [ 0x0, ['unsigned long long']],
'IoPriorityBoosted' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OwnerReferenced' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'OwnerCount' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'TableSize' : [ 0x8, ['unsigned long']],
} ],
'_ETIMER' : [ 0x110, {
'KeTimer' : [ 0x0, ['_KTIMER']],
'TimerApc' : [ 0x40, ['_KAPC']],
'TimerDpc' : [ 0x98, ['_KDPC']],
'ActiveTimerListEntry' : [ 0xd8, ['_LIST_ENTRY']],
'Lock' : [ 0xe8, ['unsigned long long']],
'Period' : [ 0xf0, ['long']],
'ApcAssociated' : [ 0xf4, ['unsigned char']],
'WakeReason' : [ 0xf8, ['pointer64', ['_DIAGNOSTIC_CONTEXT']]],
'WakeTimerListEntry' : [ 0x100, ['_LIST_ENTRY']],
} ],
'_FREE_DISPLAY' : [ 0x18, {
'RealVectorSize' : [ 0x0, ['unsigned long']],
'Display' : [ 0x8, ['_RTL_BITMAP']],
} ],
'_POOL_BLOCK_HEAD' : [ 0x20, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'List' : [ 0x10, ['_LIST_ENTRY']],
} ],
'__unnamed_1e83' : [ 0x8, {
'Flags' : [ 0x0, ['_MMSECURE_FLAGS']],
'StartVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_MMADDRESS_LIST' : [ 0x10, {
'u1' : [ 0x0, ['__unnamed_1e83']],
'EndVa' : [ 0x8, ['pointer64', ['void']]],
} ],
'_XSTATE_FEATURE' : [ 0x8, {
'Offset' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_ARBITER_INSTANCE' : [ 0x698, {
'Signature' : [ 0x0, ['unsigned long']],
'MutexEvent' : [ 0x8, ['pointer64', ['_KEVENT']]],
'Name' : [ 0x10, ['pointer64', ['unsigned short']]],
'OrderingName' : [ 0x18, ['pointer64', ['unsigned short']]],
'ResourceType' : [ 0x20, ['long']],
'Allocation' : [ 0x28, ['pointer64', ['_RTL_RANGE_LIST']]],
'PossibleAllocation' : [ 0x30, ['pointer64', ['_RTL_RANGE_LIST']]],
'OrderingList' : [ 0x38, ['_ARBITER_ORDERING_LIST']],
'ReservedList' : [ 0x48, ['_ARBITER_ORDERING_LIST']],
'ReferenceCount' : [ 0x58, ['long']],
'Interface' : [ 0x60, ['pointer64', ['_ARBITER_INTERFACE']]],
'AllocationStackMaxSize' : [ 0x68, ['unsigned long']],
'AllocationStack' : [ 0x70, ['pointer64', ['_ARBITER_ALLOCATION_STATE']]],
'UnpackRequirement' : [ 0x78, ['pointer64', ['void']]],
'PackResource' : [ 0x80, ['pointer64', ['void']]],
'UnpackResource' : [ 0x88, ['pointer64', ['void']]],
'ScoreRequirement' : [ 0x90, ['pointer64', ['void']]],
'TestAllocation' : [ 0x98, ['pointer64', ['void']]],
'RetestAllocation' : [ 0xa0, ['pointer64', ['void']]],
'CommitAllocation' : [ 0xa8, ['pointer64', ['void']]],
'RollbackAllocation' : [ 0xb0, ['pointer64', ['void']]],
'BootAllocation' : [ 0xb8, ['pointer64', ['void']]],
'QueryArbitrate' : [ 0xc0, ['pointer64', ['void']]],
'QueryConflict' : [ 0xc8, ['pointer64', ['void']]],
'AddReserved' : [ 0xd0, ['pointer64', ['void']]],
'StartArbiter' : [ 0xd8, ['pointer64', ['void']]],
'PreprocessEntry' : [ 0xe0, ['pointer64', ['void']]],
'AllocateEntry' : [ 0xe8, ['pointer64', ['void']]],
'GetNextAllocationRange' : [ 0xf0, ['pointer64', ['void']]],
'FindSuitableRange' : [ 0xf8, ['pointer64', ['void']]],
'AddAllocation' : [ 0x100, ['pointer64', ['void']]],
'BacktrackAllocation' : [ 0x108, ['pointer64', ['void']]],
'OverrideConflict' : [ 0x110, ['pointer64', ['void']]],
'InitializeRangeList' : [ 0x118, ['pointer64', ['void']]],
'TransactionInProgress' : [ 0x120, ['unsigned char']],
'TransactionEvent' : [ 0x128, ['pointer64', ['_KEVENT']]],
'Extension' : [ 0x130, ['pointer64', ['void']]],
'BusDeviceObject' : [ 0x138, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictCallbackContext' : [ 0x140, ['pointer64', ['void']]],
'ConflictCallback' : [ 0x148, ['pointer64', ['void']]],
'PdoDescriptionString' : [ 0x150, ['array', 336, ['wchar']]],
'PdoSymbolicNameString' : [ 0x3f0, ['array', 672, ['unsigned char']]],
'PdoAddressString' : [ 0x690, ['array', 1, ['wchar']]],
} ],
'_KDEVICE_QUEUE_ENTRY' : [ 0x18, {
'DeviceListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SortKey' : [ 0x10, ['unsigned long']],
'Inserted' : [ 0x14, ['unsigned char']],
} ],
'__unnamed_1edc' : [ 0x4, {
'UserData' : [ 0x0, ['unsigned long']],
'Next' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1ede' : [ 0x8, {
'Last' : [ 0x0, ['unsigned long']],
'u' : [ 0x4, ['__unnamed_1edc']],
} ],
'__unnamed_1ee0' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_1edc']],
} ],
'__unnamed_1ee2' : [ 0x8, {
'OldCell' : [ 0x0, ['__unnamed_1ede']],
'NewCell' : [ 0x0, ['__unnamed_1ee0']],
} ],
'_HCELL' : [ 0xc, {
'Size' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_1ee2']],
} ],
'_HMAP_TABLE' : [ 0x4000, {
'Table' : [ 0x0, ['array', 512, ['_HMAP_ENTRY']]],
} ],
'_PROC_PERF_CONSTRAINT' : [ 0x30, {
'Prcb' : [ 0x0, ['pointer64', ['_KPRCB']]],
'PerfContext' : [ 0x8, ['unsigned long long']],
'PercentageCap' : [ 0x10, ['unsigned long']],
'ThermalCap' : [ 0x14, ['unsigned long']],
'TargetFrequency' : [ 0x18, ['unsigned long']],
'AcumulatedFullFrequency' : [ 0x1c, ['unsigned long']],
'AcumulatedZeroFrequency' : [ 0x20, ['unsigned long']],
'FrequencyHistoryTotal' : [ 0x24, ['unsigned long']],
'AverageFrequency' : [ 0x28, ['unsigned long']],
} ],
'_IMAGE_DATA_DIRECTORY' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_DEVICE_CAPABILITIES' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'DeviceD1' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeviceD2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockSupported' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EjectSupported' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Removable' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DockDevice' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'UniqueID' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SilentInstall' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RawDeviceOK' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SurpriseRemovalOK' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'WakeFromD0' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeFromD1' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeFromD2' : [ 0x4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeFromD3' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HardwareDisabled' : [ 0x4, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NonDynamic' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'WarmEjectSupported' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NoDisplayInUI' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Reserved1' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 32, native_type='unsigned long')]],
'Address' : [ 0x8, ['unsigned long']],
'UINumber' : [ 0xc, ['unsigned long']],
'DeviceState' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'SystemWake' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWake' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'D1Latency' : [ 0x34, ['unsigned long']],
'D2Latency' : [ 0x38, ['unsigned long']],
'D3Latency' : [ 0x3c, ['unsigned long']],
} ],
'_CACHED_KSTACK_LIST' : [ 0x20, {
'SListHead' : [ 0x0, ['_SLIST_HEADER']],
'MinimumFree' : [ 0x10, ['long']],
'Misses' : [ 0x14, ['unsigned long']],
'MissesLast' : [ 0x18, ['unsigned long']],
'Pad0' : [ 0x1c, ['unsigned long']],
} ],
'__unnamed_1ef7' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Alignment' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1efb' : [ 0x18, {
'MinimumVector' : [ 0x0, ['unsigned long']],
'MaximumVector' : [ 0x4, ['unsigned long']],
'AffinityPolicy' : [ 0x8, ['unsigned short']],
'Group' : [ 0xa, ['unsigned short']],
'PriorityPolicy' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IrqPriorityUndefined', 1: 'IrqPriorityLow', 2: 'IrqPriorityNormal', 3: 'IrqPriorityHigh'})]],
'TargetedProcessors' : [ 0x10, ['unsigned long long']],
} ],
'__unnamed_1efd' : [ 0x8, {
'MinimumChannel' : [ 0x0, ['unsigned long']],
'MaximumChannel' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1eff' : [ 0xc, {
'Data' : [ 0x0, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_1f01' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'MinBusNumber' : [ 0x4, ['unsigned long']],
'MaxBusNumber' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1f03' : [ 0xc, {
'Priority' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f05' : [ 0x18, {
'Length40' : [ 0x0, ['unsigned long']],
'Alignment40' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1f07' : [ 0x18, {
'Length48' : [ 0x0, ['unsigned long']],
'Alignment48' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1f09' : [ 0x18, {
'Length64' : [ 0x0, ['unsigned long']],
'Alignment64' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1f0b' : [ 0x18, {
'Port' : [ 0x0, ['__unnamed_1ef7']],
'Memory' : [ 0x0, ['__unnamed_1ef7']],
'Interrupt' : [ 0x0, ['__unnamed_1efb']],
'Dma' : [ 0x0, ['__unnamed_1efd']],
'Generic' : [ 0x0, ['__unnamed_1ef7']],
'DevicePrivate' : [ 0x0, ['__unnamed_1eff']],
'BusNumber' : [ 0x0, ['__unnamed_1f01']],
'ConfigData' : [ 0x0, ['__unnamed_1f03']],
'Memory40' : [ 0x0, ['__unnamed_1f05']],
'Memory48' : [ 0x0, ['__unnamed_1f07']],
'Memory64' : [ 0x0, ['__unnamed_1f09']],
} ],
'_IO_RESOURCE_DESCRIPTOR' : [ 0x20, {
'Option' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'ShareDisposition' : [ 0x2, ['unsigned char']],
'Spare1' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['unsigned short']],
'Spare2' : [ 0x6, ['unsigned short']],
'u' : [ 0x8, ['__unnamed_1f0b']],
} ],
'_POP_THERMAL_ZONE' : [ 0x1e8, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'State' : [ 0x10, ['unsigned char']],
'Flags' : [ 0x11, ['unsigned char']],
'Mode' : [ 0x12, ['unsigned char']],
'PendingMode' : [ 0x13, ['unsigned char']],
'ActivePoint' : [ 0x14, ['unsigned char']],
'PendingActivePoint' : [ 0x15, ['unsigned char']],
'Throttle' : [ 0x18, ['long']],
'LastTime' : [ 0x20, ['unsigned long long']],
'SampleRate' : [ 0x28, ['unsigned long']],
'LastTemp' : [ 0x2c, ['unsigned long']],
'PassiveTimer' : [ 0x30, ['_KTIMER']],
'PassiveDpc' : [ 0x70, ['_KDPC']],
'OverThrottled' : [ 0xb0, ['_POP_ACTION_TRIGGER']],
'Irp' : [ 0xc8, ['pointer64', ['_IRP']]],
'Info' : [ 0xd0, ['_THERMAL_INFORMATION_EX']],
'InfoLastUpdateTime' : [ 0x148, ['_LARGE_INTEGER']],
'Metrics' : [ 0x150, ['_POP_THERMAL_ZONE_METRICS']],
} ],
'_MMPTE_LIST' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'OneEntry' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'filler1' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_VI_POOL_PAGE_HEADER' : [ 0x18, {
'NextPage' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
'VerifierEntry' : [ 0x8, ['pointer64', ['void']]],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_HANDLE_TRACE_DEBUG_INFO' : [ 0xf0, {
'RefCount' : [ 0x0, ['long']],
'TableSize' : [ 0x4, ['unsigned long']],
'BitMaskFlags' : [ 0x8, ['unsigned long']],
'CloseCompactionLock' : [ 0x10, ['_FAST_MUTEX']],
'CurrentStackIndex' : [ 0x48, ['unsigned long']],
'TraceDb' : [ 0x50, ['array', 1, ['_HANDLE_TRACE_DB_ENTRY']]],
} ],
'_CM_WORKITEM' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Private' : [ 0x10, ['unsigned long']],
'WorkerRoutine' : [ 0x18, ['pointer64', ['void']]],
'Parameter' : [ 0x20, ['pointer64', ['void']]],
} ],
'_POP_THERMAL_ZONE_METRICS' : [ 0x98, {
'MetricsResource' : [ 0x0, ['_ERESOURCE']],
'ActiveCount' : [ 0x68, ['unsigned long']],
'PassiveCount' : [ 0x6c, ['unsigned long']],
'LastActiveStartTick' : [ 0x70, ['_LARGE_INTEGER']],
'AverageActiveTime' : [ 0x78, ['_LARGE_INTEGER']],
'LastPassiveStartTick' : [ 0x80, ['_LARGE_INTEGER']],
'AveragePassiveTime' : [ 0x88, ['_LARGE_INTEGER']],
'StartTickSinceLastReset' : [ 0x90, ['_LARGE_INTEGER']],
} ],
'_CM_TRANS' : [ 0xa8, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBUoWListHead' : [ 0x10, ['_LIST_ENTRY']],
'LazyCommitListEntry' : [ 0x20, ['_LIST_ENTRY']],
'KtmTrans' : [ 0x30, ['pointer64', ['void']]],
'CmRm' : [ 0x38, ['pointer64', ['_CM_RM']]],
'KtmEnlistmentObject' : [ 0x40, ['pointer64', ['_KENLISTMENT']]],
'KtmEnlistmentHandle' : [ 0x48, ['pointer64', ['void']]],
'KtmUow' : [ 0x50, ['_GUID']],
'StartLsn' : [ 0x60, ['unsigned long long']],
'TransState' : [ 0x68, ['unsigned long']],
'HiveCount' : [ 0x6c, ['unsigned long']],
'HiveArray' : [ 0x70, ['array', 7, ['pointer64', ['_CMHIVE']]]],
} ],
'_WHEA_ERROR_RECORD_HEADER_VALIDBITS' : [ 0x4, {
'PlatformId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Timestamp' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PartitionId' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_CM_PARTIAL_RESOURCE_LIST' : [ 0x1c, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'PartialDescriptors' : [ 0x8, ['array', 1, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_RTL_RANGE_LIST' : [ 0x20, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
'Count' : [ 0x14, ['unsigned long']],
'Stamp' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_CREATE_INFORMATION' : [ 0x40, {
'Attributes' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ProbeMode' : [ 0x10, ['unsigned char']],
'PagedPoolCharge' : [ 0x14, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x18, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x1c, ['unsigned long']],
'SecurityDescriptor' : [ 0x20, ['pointer64', ['void']]],
'SecurityQos' : [ 0x28, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'SecurityQualityOfService' : [ 0x30, ['_SECURITY_QUALITY_OF_SERVICE']],
} ],
'_RTL_CRITICAL_SECTION_DEBUG' : [ 0x30, {
'Type' : [ 0x0, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x2, ['unsigned short']],
'CriticalSection' : [ 0x8, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'ProcessLocksList' : [ 0x10, ['_LIST_ENTRY']],
'EntryCount' : [ 0x20, ['unsigned long']],
'ContentionCount' : [ 0x24, ['unsigned long']],
'Flags' : [ 0x28, ['unsigned long']],
'CreatorBackTraceIndexHigh' : [ 0x2c, ['unsigned short']],
'SpareUSHORT' : [ 0x2e, ['unsigned short']],
} ],
'_POOL_HACKER' : [ 0x30, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'Contents' : [ 0x10, ['array', 8, ['unsigned long']]],
} ],
'_DISALLOWED_GUIDS' : [ 0x10, {
'Count' : [ 0x0, ['unsigned short']],
'Guids' : [ 0x8, ['pointer64', ['_GUID']]],
} ],
'_PO_DIAG_STACK_RECORD' : [ 0x10, {
'StackDepth' : [ 0x0, ['unsigned long']],
'Stack' : [ 0x8, ['array', 1, ['pointer64', ['void']]]],
} ],
'_SECTION_OBJECT_POINTERS' : [ 0x18, {
'DataSectionObject' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['void']]],
'ImageSectionObject' : [ 0x10, ['pointer64', ['void']]],
} ],
'_SEP_AUDIT_POLICY' : [ 0x1c, {
'AdtTokenPolicy' : [ 0x0, ['_TOKEN_AUDIT_POLICY']],
'PolicySetStatus' : [ 0x1b, ['unsigned char']],
} ],
'__unnamed_1f48' : [ 0x4, {
'SnapSharedExportsFailed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1f4a' : [ 0x18, {
'AllSharedExportThunks' : [ 0x0, ['_VF_TARGET_ALL_SHARED_EXPORT_THUNKS']],
'Flags' : [ 0x0, ['__unnamed_1f48']],
} ],
'_VF_TARGET_DRIVER' : [ 0x30, {
'TreeNode' : [ 0x0, ['_VF_AVL_TREE_NODE']],
'u1' : [ 0x10, ['__unnamed_1f4a']],
'VerifiedData' : [ 0x28, ['pointer64', ['_VF_TARGET_VERIFIED_DRIVER_DATA']]],
} ],
'__unnamed_1f52' : [ 0x14, {
'ClassGuid' : [ 0x0, ['_GUID']],
'SymbolicLinkName' : [ 0x10, ['array', 1, ['wchar']]],
} ],
'__unnamed_1f54' : [ 0x2, {
'DeviceIds' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1f56' : [ 0x2, {
'DeviceId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1f58' : [ 0x10, {
'NotificationStructure' : [ 0x0, ['pointer64', ['void']]],
'DeviceIds' : [ 0x8, ['array', 1, ['wchar']]],
} ],
'__unnamed_1f5a' : [ 0x8, {
'Notification' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1f5c' : [ 0x8, {
'NotificationCode' : [ 0x0, ['unsigned long']],
'NotificationData' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1f5e' : [ 0x8, {
'VetoType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]],
'DeviceIdVetoNameBuffer' : [ 0x4, ['array', 1, ['wchar']]],
} ],
'__unnamed_1f60' : [ 0x10, {
'BlockedDriverGuid' : [ 0x0, ['_GUID']],
} ],
'__unnamed_1f62' : [ 0x2, {
'ParentId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1f64' : [ 0x20, {
'PowerSettingGuid' : [ 0x0, ['_GUID']],
'Flags' : [ 0x10, ['unsigned long']],
'SessionId' : [ 0x14, ['unsigned long']],
'DataLength' : [ 0x18, ['unsigned long']],
'Data' : [ 0x1c, ['array', 1, ['unsigned char']]],
} ],
'__unnamed_1f66' : [ 0x20, {
'DeviceClass' : [ 0x0, ['__unnamed_1f52']],
'TargetDevice' : [ 0x0, ['__unnamed_1f54']],
'InstallDevice' : [ 0x0, ['__unnamed_1f56']],
'CustomNotification' : [ 0x0, ['__unnamed_1f58']],
'ProfileNotification' : [ 0x0, ['__unnamed_1f5a']],
'PowerNotification' : [ 0x0, ['__unnamed_1f5c']],
'VetoNotification' : [ 0x0, ['__unnamed_1f5e']],
'BlockedDriverNotification' : [ 0x0, ['__unnamed_1f60']],
'InvalidIDNotification' : [ 0x0, ['__unnamed_1f62']],
'PowerSettingNotification' : [ 0x0, ['__unnamed_1f64']],
'PropertyChangeNotification' : [ 0x0, ['__unnamed_1f56']],
} ],
'_PLUGPLAY_EVENT_BLOCK' : [ 0x50, {
'EventGuid' : [ 0x0, ['_GUID']],
'EventCategory' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'HardwareProfileChangeEvent', 1: 'TargetDeviceChangeEvent', 2: 'DeviceClassChangeEvent', 3: 'CustomDeviceEvent', 4: 'DeviceInstallEvent', 5: 'DeviceArrivalEvent', 6: 'VetoEvent', 7: 'BlockedDriverEvent', 8: 'InvalidIDEvent', 9: 'DevicePropertyChangeEvent', 10: 'DeviceInstanceRemovalEvent', 11: 'MaxPlugEventCategory'})]],
'Result' : [ 0x18, ['pointer64', ['unsigned long']]],
'Flags' : [ 0x20, ['unsigned long']],
'TotalSize' : [ 0x24, ['unsigned long']],
'DeviceObject' : [ 0x28, ['pointer64', ['void']]],
'u' : [ 0x30, ['__unnamed_1f66']],
} ],
'_VF_SUSPECT_DRIVER_ENTRY' : [ 0x28, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Loads' : [ 0x10, ['unsigned long']],
'Unloads' : [ 0x14, ['unsigned long']],
'BaseName' : [ 0x18, ['_UNICODE_STRING']],
} ],
'_MMPTE_TIMESTAMP' : [ 0x8, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long long')]],
'GlobalTimeStamp' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_SID_AND_ATTRIBUTES_HASH' : [ 0x110, {
'SidCount' : [ 0x0, ['unsigned long']],
'SidAttr' : [ 0x8, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'Hash' : [ 0x10, ['array', 32, ['unsigned long long']]],
} ],
'_XSTATE_CONTEXT' : [ 0x20, {
'Mask' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Area' : [ 0x10, ['pointer64', ['_XSAVE_AREA']]],
'Buffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'_XSAVE_FORMAT' : [ 0x200, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned short']],
'Reserved2' : [ 0xe, ['unsigned short']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned short']],
'Reserved3' : [ 0x16, ['unsigned short']],
'MxCsr' : [ 0x18, ['unsigned long']],
'MxCsr_Mask' : [ 0x1c, ['unsigned long']],
'FloatRegisters' : [ 0x20, ['array', 8, ['_M128A']]],
'XmmRegisters' : [ 0xa0, ['array', 16, ['_M128A']]],
'Reserved4' : [ 0x1a0, ['array', 96, ['unsigned char']]],
} ],
'_MBCB' : [ 0xc0, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeIsInZone' : [ 0x2, ['short']],
'PagesToWrite' : [ 0x4, ['unsigned long']],
'DirtyPages' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'BitmapRanges' : [ 0x10, ['_LIST_ENTRY']],
'ResumeWritePage' : [ 0x20, ['long long']],
'MostRecentlyDirtiedPage' : [ 0x28, ['long long']],
'BitmapRange1' : [ 0x30, ['_BITMAP_RANGE']],
'BitmapRange2' : [ 0x60, ['_BITMAP_RANGE']],
'BitmapRange3' : [ 0x90, ['_BITMAP_RANGE']],
} ],
'_PS_CPU_QUOTA_BLOCK' : [ 0x4080, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SessionId' : [ 0x10, ['unsigned long']],
'CpuShareWeight' : [ 0x14, ['unsigned long']],
'CapturedWeightData' : [ 0x18, ['_PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA']],
'DuplicateInputMarker' : [ 0x20, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x20, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x20, ['long']],
'BlockCurrentGenerationLock' : [ 0x0, ['unsigned long long']],
'CyclesAccumulated' : [ 0x8, ['unsigned long long']],
'CycleCredit' : [ 0x40, ['unsigned long long']],
'BlockCurrentGeneration' : [ 0x48, ['unsigned long']],
'CpuCyclePercent' : [ 0x4c, ['unsigned long']],
'CyclesFinishedForCurrentGeneration' : [ 0x50, ['unsigned char']],
'Cpu' : [ 0x80, ['array', 256, ['_PS_PER_CPU_QUOTA_CACHE_AWARE']]],
} ],
'__unnamed_1f82' : [ 0x1, {
'AsUCHAR' : [ 0x0, ['unsigned char']],
'NoDomainAccounting' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 5, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
} ],
'PROCESSOR_PERFSTATE_POLICY' : [ 0x1c, {
'Revision' : [ 0x0, ['unsigned long']],
'MaxThrottle' : [ 0x4, ['unsigned char']],
'MinThrottle' : [ 0x5, ['unsigned char']],
'BusyAdjThreshold' : [ 0x6, ['unsigned char']],
'Spare' : [ 0x7, ['unsigned char']],
'Flags' : [ 0x7, ['__unnamed_1f82']],
'TimeCheck' : [ 0x8, ['unsigned long']],
'IncreaseTime' : [ 0xc, ['unsigned long']],
'DecreaseTime' : [ 0x10, ['unsigned long']],
'IncreasePercent' : [ 0x14, ['unsigned long']],
'DecreasePercent' : [ 0x18, ['unsigned long']],
} ],
'_BUS_EXTENSION_LIST' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['void']]],
'BusExtension' : [ 0x8, ['pointer64', ['_PI_BUS_EXTENSION']]],
} ],
'_CACHED_CHILD_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'ValueList' : [ 0x8, ['unsigned long long']],
'RealKcb' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
} ],
'_KDEVICE_QUEUE' : [ 0x28, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceListHead' : [ 0x8, ['_LIST_ENTRY']],
'Lock' : [ 0x18, ['unsigned long long']],
'Busy' : [ 0x20, ['unsigned char']],
'Reserved' : [ 0x20, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='long long')]],
'Hint' : [ 0x20, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='long long')]],
} ],
'_SYSTEM_POWER_STATE_CONTEXT' : [ 0x4, {
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'TargetSystemState' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 12, native_type='unsigned long')]],
'EffectiveSystemState' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned long')]],
'CurrentSystemState' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'IgnoreHibernationPath' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'PseudoTransition' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'ContextAsUlong' : [ 0x0, ['unsigned long']],
} ],
'_OBJECT_TYPE_INITIALIZER' : [ 0x70, {
'Length' : [ 0x0, ['unsigned short']],
'ObjectTypeFlags' : [ 0x2, ['unsigned char']],
'CaseInsensitive' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'UnnamedObjectsOnly' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'UseDefaultObject' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SecurityRequired' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'MaintainHandleCount' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaintainTypeList' : [ 0x2, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'SupportsObjectCallbacks' : [ 0x2, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'CacheAligned' : [ 0x2, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'ObjectTypeCode' : [ 0x4, ['unsigned long']],
'InvalidAttributes' : [ 0x8, ['unsigned long']],
'GenericMapping' : [ 0xc, ['_GENERIC_MAPPING']],
'ValidAccessMask' : [ 0x1c, ['unsigned long']],
'RetainAccess' : [ 0x20, ['unsigned long']],
'PoolType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'DefaultPagedPoolCharge' : [ 0x28, ['unsigned long']],
'DefaultNonPagedPoolCharge' : [ 0x2c, ['unsigned long']],
'DumpProcedure' : [ 0x30, ['pointer64', ['void']]],
'OpenProcedure' : [ 0x38, ['pointer64', ['void']]],
'CloseProcedure' : [ 0x40, ['pointer64', ['void']]],
'DeleteProcedure' : [ 0x48, ['pointer64', ['void']]],
'ParseProcedure' : [ 0x50, ['pointer64', ['void']]],
'SecurityProcedure' : [ 0x58, ['pointer64', ['void']]],
'QueryNameProcedure' : [ 0x60, ['pointer64', ['void']]],
'OkayToCloseProcedure' : [ 0x68, ['pointer64', ['void']]],
} ],
'__unnamed_1fb7' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'SubsectionFlags' : [ 0x0, ['_MMSUBSECTION_FLAGS']],
} ],
'_SUBSECTION' : [ 0x38, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x8, ['pointer64', ['_MMPTE']]],
'NextSubsection' : [ 0x10, ['pointer64', ['_SUBSECTION']]],
'PtesInSubsection' : [ 0x18, ['unsigned long']],
'UnusedPtes' : [ 0x20, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x20, ['pointer64', ['_MM_AVL_TABLE']]],
'u' : [ 0x28, ['__unnamed_1fb7']],
'StartingSector' : [ 0x2c, ['unsigned long']],
'NumberOfFullSectors' : [ 0x30, ['unsigned long']],
} ],
'_KPROCESSOR_STATE' : [ 0x5b0, {
'SpecialRegisters' : [ 0x0, ['_KSPECIAL_REGISTERS']],
'ContextFrame' : [ 0xe0, ['_CONTEXT']],
} ],
'_IO_CLIENT_EXTENSION' : [ 0x10, {
'NextExtension' : [ 0x0, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'ClientIdentificationAddress' : [ 0x8, ['pointer64', ['void']]],
} ],
'_PS_PER_CPU_QUOTA_CACHE_AWARE' : [ 0x40, {
'SortedListEntry' : [ 0x0, ['_LIST_ENTRY']],
'IdleOnlyListHead' : [ 0x10, ['_LIST_ENTRY']],
'CycleBaseAllowance' : [ 0x20, ['unsigned long long']],
'CyclesRemaining' : [ 0x28, ['long long']],
'CurrentGeneration' : [ 0x30, ['unsigned long']],
} ],
'_ETW_BUFFER_CONTEXT' : [ 0x4, {
'ProcessorNumber' : [ 0x0, ['unsigned char']],
'Alignment' : [ 0x1, ['unsigned char']],
'ProcessorIndex' : [ 0x0, ['unsigned short']],
'LoggerId' : [ 0x2, ['unsigned short']],
} ],
'_PROC_IDLE_SNAP' : [ 0x10, {
'Time' : [ 0x0, ['unsigned long long']],
'Idle' : [ 0x8, ['unsigned long long']],
} ],
'_KERNEL_STACK_SEGMENT' : [ 0x28, {
'StackBase' : [ 0x0, ['unsigned long long']],
'StackLimit' : [ 0x8, ['unsigned long long']],
'KernelStack' : [ 0x10, ['unsigned long long']],
'InitialStack' : [ 0x18, ['unsigned long long']],
'ActualLimit' : [ 0x20, ['unsigned long long']],
} ],
'_KEXECUTE_OPTIONS' : [ 0x1, {
'ExecuteDisable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ExecuteEnable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DisableThunkEmulation' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Permanent' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ExecuteDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ImageDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'DisableExceptionChainValidation' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'ExecuteOptions' : [ 0x0, ['unsigned char']],
} ],
'_SEP_TOKEN_PRIVILEGES' : [ 0x18, {
'Present' : [ 0x0, ['unsigned long long']],
'Enabled' : [ 0x8, ['unsigned long long']],
'EnabledByDefault' : [ 0x10, ['unsigned long long']],
} ],
'_WORK_QUEUE_ITEM' : [ 0x20, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x10, ['pointer64', ['void']]],
'Parameter' : [ 0x18, ['pointer64', ['void']]],
} ],
'_ARBITER_ALLOCATION_STATE' : [ 0x50, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'CurrentMinimum' : [ 0x10, ['unsigned long long']],
'CurrentMaximum' : [ 0x18, ['unsigned long long']],
'Entry' : [ 0x20, ['pointer64', ['_ARBITER_LIST_ENTRY']]],
'CurrentAlternative' : [ 0x28, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'AlternativeCount' : [ 0x30, ['unsigned long']],
'Alternatives' : [ 0x38, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'Flags' : [ 0x40, ['unsigned short']],
'RangeAttributes' : [ 0x42, ['unsigned char']],
'RangeAvailableAttributes' : [ 0x43, ['unsigned char']],
'WorkSpace' : [ 0x48, ['unsigned long long']],
} ],
'_VACB_ARRAY_HEADER' : [ 0x10, {
'VacbArrayIndex' : [ 0x0, ['unsigned long']],
'MappingCount' : [ 0x4, ['unsigned long']],
'HighestMappedIndex' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_MMWSLENTRY' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Hashed' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Direct' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 9, native_type='unsigned long long')]],
'Age' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 12, native_type='unsigned long long')]],
'VirtualPageNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 64, native_type='unsigned long long')]],
} ],
'_DBGKD_SWITCH_PARTITION' : [ 0x4, {
'Partition' : [ 0x0, ['unsigned long']],
} ],
'_REQUEST_MAILBOX' : [ 0x40, {
'Next' : [ 0x0, ['pointer64', ['_REQUEST_MAILBOX']]],
'RequestSummary' : [ 0x8, ['long long']],
'RequestPacket' : [ 0x10, ['_KREQUEST_PACKET']],
} ],
'_DBGKD_GET_VERSION32' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'KernBase' : [ 0x8, ['unsigned long']],
'PsLoadedModuleList' : [ 0xc, ['unsigned long']],
'MachineType' : [ 0x10, ['unsigned short']],
'ThCallbackStack' : [ 0x12, ['unsigned short']],
'NextCallback' : [ 0x14, ['unsigned short']],
'FramePointer' : [ 0x16, ['unsigned short']],
'KiCallUserMode' : [ 0x18, ['unsigned long']],
'KeUserCallbackDispatcher' : [ 0x1c, ['unsigned long']],
'BreakpointWithStatus' : [ 0x20, ['unsigned long']],
'DebuggerDataList' : [ 0x24, ['unsigned long']],
} ],
'_INTERLOCK_SEQ' : [ 0x8, {
'Depth' : [ 0x0, ['unsigned short']],
'FreeEntryOffset' : [ 0x2, ['unsigned short']],
'OffsetAndDepth' : [ 0x0, ['unsigned long']],
'Sequence' : [ 0x4, ['unsigned long']],
'Exchg' : [ 0x0, ['long long']],
} ],
'_WHEA_TIMESTAMP' : [ 0x8, {
'Seconds' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'Minutes' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Hours' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long long')]],
'Precise' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 32, native_type='unsigned long long')]],
'Day' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 40, native_type='unsigned long long')]],
'Month' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 48, native_type='unsigned long long')]],
'Year' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 56, native_type='unsigned long long')]],
'Century' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 64, native_type='unsigned long long')]],
'AsLARGE_INTEGER' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'_PEB32' : [ 0x248, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x4, ['unsigned long']],
'ImageBaseAddress' : [ 0x8, ['unsigned long']],
'Ldr' : [ 0xc, ['unsigned long']],
'ProcessParameters' : [ 0x10, ['unsigned long']],
'SubSystemData' : [ 0x14, ['unsigned long']],
'ProcessHeap' : [ 0x18, ['unsigned long']],
'FastPebLock' : [ 0x1c, ['unsigned long']],
'AtlThunkSListPtr' : [ 0x20, ['unsigned long']],
'IFEOKey' : [ 0x24, ['unsigned long']],
'CrossProcessFlags' : [ 0x28, ['unsigned long']],
'ProcessInJob' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x28, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x28, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x28, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x28, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x2c, ['unsigned long']],
'UserSharedInfoPtr' : [ 0x2c, ['unsigned long']],
'SystemReserved' : [ 0x30, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x34, ['unsigned long']],
'ApiSetMap' : [ 0x38, ['unsigned long']],
'TlsExpansionCounter' : [ 0x3c, ['unsigned long']],
'TlsBitmap' : [ 0x40, ['unsigned long']],
'TlsBitmapBits' : [ 0x44, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x4c, ['unsigned long']],
'HotpatchInformation' : [ 0x50, ['unsigned long']],
'ReadOnlyStaticServerData' : [ 0x54, ['unsigned long']],
'AnsiCodePageData' : [ 0x58, ['unsigned long']],
'OemCodePageData' : [ 0x5c, ['unsigned long']],
'UnicodeCaseTableData' : [ 0x60, ['unsigned long']],
'NumberOfProcessors' : [ 0x64, ['unsigned long']],
'NtGlobalFlag' : [ 0x68, ['unsigned long']],
'CriticalSectionTimeout' : [ 0x70, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0x78, ['unsigned long']],
'HeapSegmentCommit' : [ 0x7c, ['unsigned long']],
'HeapDeCommitTotalFreeThreshold' : [ 0x80, ['unsigned long']],
'HeapDeCommitFreeBlockThreshold' : [ 0x84, ['unsigned long']],
'NumberOfHeaps' : [ 0x88, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0x8c, ['unsigned long']],
'ProcessHeaps' : [ 0x90, ['unsigned long']],
'GdiSharedHandleTable' : [ 0x94, ['unsigned long']],
'ProcessStarterHelper' : [ 0x98, ['unsigned long']],
'GdiDCAttributeList' : [ 0x9c, ['unsigned long']],
'LoaderLock' : [ 0xa0, ['unsigned long']],
'OSMajorVersion' : [ 0xa4, ['unsigned long']],
'OSMinorVersion' : [ 0xa8, ['unsigned long']],
'OSBuildNumber' : [ 0xac, ['unsigned short']],
'OSCSDVersion' : [ 0xae, ['unsigned short']],
'OSPlatformId' : [ 0xb0, ['unsigned long']],
'ImageSubsystem' : [ 0xb4, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0xb8, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0xbc, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0xc0, ['unsigned long']],
'GdiHandleBuffer' : [ 0xc4, ['array', 34, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x14c, ['unsigned long']],
'TlsExpansionBitmap' : [ 0x150, ['unsigned long']],
'TlsExpansionBitmapBits' : [ 0x154, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x1d4, ['unsigned long']],
'AppCompatFlags' : [ 0x1d8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x1e0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x1e8, ['unsigned long']],
'AppCompatInfo' : [ 0x1ec, ['unsigned long']],
'CSDVersion' : [ 0x1f0, ['_STRING32']],
'ActivationContextData' : [ 0x1f8, ['unsigned long']],
'ProcessAssemblyStorageMap' : [ 0x1fc, ['unsigned long']],
'SystemDefaultActivationContextData' : [ 0x200, ['unsigned long']],
'SystemAssemblyStorageMap' : [ 0x204, ['unsigned long']],
'MinimumStackCommit' : [ 0x208, ['unsigned long']],
'FlsCallback' : [ 0x20c, ['unsigned long']],
'FlsListHead' : [ 0x210, ['LIST_ENTRY32']],
'FlsBitmap' : [ 0x218, ['unsigned long']],
'FlsBitmapBits' : [ 0x21c, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x22c, ['unsigned long']],
'WerRegistrationData' : [ 0x230, ['unsigned long']],
'WerShipAssertPtr' : [ 0x234, ['unsigned long']],
'pContextData' : [ 0x238, ['unsigned long']],
'pImageHeaderHash' : [ 0x23c, ['unsigned long']],
'TracingFlags' : [ 0x240, ['unsigned long']],
'HeapTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x240, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_VPB' : [ 0x60, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned short']],
'VolumeLabelLength' : [ 0x6, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'RealDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'SerialNumber' : [ 0x18, ['unsigned long']],
'ReferenceCount' : [ 0x1c, ['unsigned long']],
'VolumeLabel' : [ 0x20, ['array', 32, ['wchar']]],
} ],
'_CACHE_DESCRIPTOR' : [ 0xc, {
'Level' : [ 0x0, ['unsigned char']],
'Associativity' : [ 0x1, ['unsigned char']],
'LineSize' : [ 0x2, ['unsigned short']],
'Size' : [ 0x4, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'CacheUnified', 1: 'CacheInstruction', 2: 'CacheData', 3: 'CacheTrace'})]],
} ],
}
|
PypiClean
|
/ndn_storage-0.2.3-py3-none-any.whl/ndn_storage-0.2.3.dist-info/LICENSE.rst
|
Apache License
Version 2.0, January 2004
https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
|
PypiClean
|
/easy_boto3-0.1.7.tar.gz/easy_boto3-0.1.7/easy_boto3/ec2/config_parser.py
|
import yaml
from easy_boto3.ec2.script import read_startup_script, inject_aws_creds, add_ssh_forwarding, add_github_host
from easy_boto3.profile.validation import check_credentials
from easy_boto3.profile.active import set_active_profile, check_active_profile
def parse(base_config):
# read in base_config
with open(base_config, 'r') as stream:
try:
base_config = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
# parse config after first key 'instance_details'
profile_name = base_config['aws_profile']
ec2_instance_config = base_config['ec2_instance']
instance_details = ec2_instance_config['instance_details']
ssh_instance_details = ec2_instance_config['ssh_details']
script_details = ec2_instance_config['script_details']
alarm_details = None
alarm_instance_details = None
# check if profile_name is current active_profile
if check_active_profile() != profile_name:
# set active profile
set_active_profile(profile_name)
# re-convert ssh_instance_details IdentityFile to yes/no
if ssh_instance_details['Config']['ForwardAgent'] is True:
ssh_instance_details['Config']['ForwardAgent'] = 'yes'
elif ssh_instance_details['Config']['ForwardAgent'] is False:
ssh_instance_details['Config']['ForwardAgent'] = 'no'
# setup alarm_details if present in base_config
if 'alarm_details' in list(base_config.keys()):
alarm_details = base_config['alarm_details']
alarm_instance_details = {}
# parse alarm details
alarm_instance_details['ComparisonOperator'] = alarm_details['ComparisonOperator']
alarm_instance_details['EvaluationPeriods'] = alarm_details['EvaluationPeriods']
alarm_instance_details['MetricName'] = alarm_details['MetricName']
alarm_instance_details['Namespace'] = alarm_details['Namespace']
alarm_instance_details['Period'] = alarm_details['Period']
alarm_instance_details['Statistic'] = alarm_details['Statistic']
alarm_instance_details['Threshold'] = alarm_details['Threshold']
# read in startup script
startup_script_filepath = script_details['filepath']
UserData = read_startup_script(startup_script_filepath)
# inject aws creds into startup script (optional)
if script_details['inject_aws_creds'] is True:
aws_creds = check_credentials(profile_name)
UserData = inject_aws_creds(UserData, aws_creds)
if script_details['ssh_forwarding'] is True:
UserData = add_ssh_forwarding(UserData)
if script_details['github_host'] is True:
UserData = add_github_host(UserData)
# create dictionary of instance details
ec2_instance_details = {
'InstanceName': instance_details['InstanceName'],
'InstanceType': instance_details['InstanceType'],
'ImageId': instance_details['ImageId'],
'Groups': instance_details['Groups'],
'BlockDeviceMappings': [{
'DeviceName': instance_details['BlockDeviceMappings']['DeviceName'],
'Ebs': {
'VolumeSize': instance_details['BlockDeviceMappings']['Ebs']['VolumeSize'],
'VolumeType': instance_details['BlockDeviceMappings']['Ebs']['VolumeType'],
'DeleteOnTermination': instance_details['BlockDeviceMappings']['Ebs']['DeleteOnTermination']
}
}],
'UserData': UserData,
'KeyName': ssh_instance_details['Config']['IdentityFile'].split('/')[-1].split('.')[0],
}
# return dictionary of instance details
return profile_name, ec2_instance_details, alarm_instance_details, ssh_instance_details
|
PypiClean
|
/pulumi_oci-1.9.0a1693465256.tar.gz/pulumi_oci-1.9.0a1693465256/pulumi_oci/mediaservices/media_workflow_configuration.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['MediaWorkflowConfigurationArgs', 'MediaWorkflowConfiguration']
@pulumi.input_type
class MediaWorkflowConfigurationArgs:
def __init__(__self__, *,
compartment_id: pulumi.Input[str],
display_name: pulumi.Input[str],
parameters: pulumi.Input[str],
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
The set of arguments for constructing a MediaWorkflowConfiguration resource.
:param pulumi.Input[str] compartment_id: (Updatable) Compartment Identifier.
:param pulumi.Input[str] display_name: (Updatable) MediaWorkflowConfiguration identifier. Avoid entering confidential information.
:param pulumi.Input[str] parameters: (Updatable) Reuseable parameter values encoded as a JSON; the top and second level JSON elements are objects. Each key of the top level object refers to a task key that is unqiue to the workflow, each of the second level objects' keys refer to the name of a parameter that is unique to the task. taskKey > parameterName > parameterValue
** IMPORTANT **
Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "parameters", parameters)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Input[str]:
"""
(Updatable) Compartment Identifier.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
(Updatable) MediaWorkflowConfiguration identifier. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def parameters(self) -> pulumi.Input[str]:
"""
(Updatable) Reuseable parameter values encoded as a JSON; the top and second level JSON elements are objects. Each key of the top level object refers to a task key that is unqiue to the workflow, each of the second level objects' keys refer to the name of a parameter that is unique to the task. taskKey > parameterName > parameterValue
** IMPORTANT **
Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: pulumi.Input[str]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@pulumi.input_type
class _MediaWorkflowConfigurationState:
def __init__(__self__, *,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
lifecyle_details: Optional[pulumi.Input[str]] = None,
parameters: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
system_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering MediaWorkflowConfiguration resources.
:param pulumi.Input[str] compartment_id: (Updatable) Compartment Identifier.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] display_name: (Updatable) MediaWorkflowConfiguration identifier. Avoid entering confidential information.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[str] lifecyle_details: A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param pulumi.Input[str] parameters: (Updatable) Reuseable parameter values encoded as a JSON; the top and second level JSON elements are objects. Each key of the top level object refers to a task key that is unqiue to the workflow, each of the second level objects' keys refer to the name of a parameter that is unique to the task. taskKey > parameterName > parameterValue
** IMPORTANT **
Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values
:param pulumi.Input[str] state: The current state of the MediaWorkflowConfiguration.
:param pulumi.Input[Mapping[str, Any]] system_tags: Usage of system tag keys. These predefined keys are scoped to namespaces. Example: `{"orcl-cloud.free-tier-retained": "true"}`
:param pulumi.Input[str] time_created: The time when the the MediaWorkflowConfiguration was created. An RFC3339 formatted datetime string.
:param pulumi.Input[str] time_updated: The time when the MediaWorkflowConfiguration was updated. An RFC3339 formatted datetime string.
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if lifecyle_details is not None:
pulumi.set(__self__, "lifecyle_details", lifecyle_details)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if state is not None:
pulumi.set(__self__, "state", state)
if system_tags is not None:
pulumi.set(__self__, "system_tags", system_tags)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if time_updated is not None:
pulumi.set(__self__, "time_updated", time_updated)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Compartment Identifier.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) MediaWorkflowConfiguration identifier. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="lifecyleDetails")
def lifecyle_details(self) -> Optional[pulumi.Input[str]]:
"""
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
"""
return pulumi.get(self, "lifecyle_details")
@lifecyle_details.setter
def lifecyle_details(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lifecyle_details", value)
@property
@pulumi.getter
def parameters(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Reuseable parameter values encoded as a JSON; the top and second level JSON elements are objects. Each key of the top level object refers to a task key that is unqiue to the workflow, each of the second level objects' keys refer to the name of a parameter that is unique to the task. taskKey > parameterName > parameterValue
** IMPORTANT **
Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The current state of the MediaWorkflowConfiguration.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Usage of system tag keys. These predefined keys are scoped to namespaces. Example: `{"orcl-cloud.free-tier-retained": "true"}`
"""
return pulumi.get(self, "system_tags")
@system_tags.setter
def system_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "system_tags", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The time when the the MediaWorkflowConfiguration was created. An RFC3339 formatted datetime string.
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> Optional[pulumi.Input[str]]:
"""
The time when the MediaWorkflowConfiguration was updated. An RFC3339 formatted datetime string.
"""
return pulumi.get(self, "time_updated")
@time_updated.setter
def time_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_updated", value)
class MediaWorkflowConfiguration(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
parameters: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Media Workflow Configuration resource in Oracle Cloud Infrastructure Media Services service.
Creates a new MediaWorkflowConfiguration.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_media_workflow_configuration = oci.media_services.MediaWorkflowConfiguration("testMediaWorkflowConfiguration",
compartment_id=var["compartment_id"],
display_name=var["media_workflow_configuration_display_name"],
parameters=var["media_workflow_configuration_parameters"],
defined_tags={
"foo-namespace.bar-key": "value",
},
freeform_tags={
"bar-key": "value",
})
```
## Import
MediaWorkflowConfigurations can be imported using the `id`, e.g.
```sh
$ pulumi import oci:MediaServices/mediaWorkflowConfiguration:MediaWorkflowConfiguration test_media_workflow_configuration "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) Compartment Identifier.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] display_name: (Updatable) MediaWorkflowConfiguration identifier. Avoid entering confidential information.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[str] parameters: (Updatable) Reuseable parameter values encoded as a JSON; the top and second level JSON elements are objects. Each key of the top level object refers to a task key that is unqiue to the workflow, each of the second level objects' keys refer to the name of a parameter that is unique to the task. taskKey > parameterName > parameterValue
** IMPORTANT **
Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: MediaWorkflowConfigurationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Media Workflow Configuration resource in Oracle Cloud Infrastructure Media Services service.
Creates a new MediaWorkflowConfiguration.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_media_workflow_configuration = oci.media_services.MediaWorkflowConfiguration("testMediaWorkflowConfiguration",
compartment_id=var["compartment_id"],
display_name=var["media_workflow_configuration_display_name"],
parameters=var["media_workflow_configuration_parameters"],
defined_tags={
"foo-namespace.bar-key": "value",
},
freeform_tags={
"bar-key": "value",
})
```
## Import
MediaWorkflowConfigurations can be imported using the `id`, e.g.
```sh
$ pulumi import oci:MediaServices/mediaWorkflowConfiguration:MediaWorkflowConfiguration test_media_workflow_configuration "id"
```
:param str resource_name: The name of the resource.
:param MediaWorkflowConfigurationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(MediaWorkflowConfigurationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
parameters: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = MediaWorkflowConfigurationArgs.__new__(MediaWorkflowConfigurationArgs)
if compartment_id is None and not opts.urn:
raise TypeError("Missing required property 'compartment_id'")
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["defined_tags"] = defined_tags
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
if parameters is None and not opts.urn:
raise TypeError("Missing required property 'parameters'")
__props__.__dict__["parameters"] = parameters
__props__.__dict__["lifecyle_details"] = None
__props__.__dict__["state"] = None
__props__.__dict__["system_tags"] = None
__props__.__dict__["time_created"] = None
__props__.__dict__["time_updated"] = None
super(MediaWorkflowConfiguration, __self__).__init__(
'oci:MediaServices/mediaWorkflowConfiguration:MediaWorkflowConfiguration',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
lifecyle_details: Optional[pulumi.Input[str]] = None,
parameters: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
system_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None) -> 'MediaWorkflowConfiguration':
"""
Get an existing MediaWorkflowConfiguration resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) Compartment Identifier.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] display_name: (Updatable) MediaWorkflowConfiguration identifier. Avoid entering confidential information.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[str] lifecyle_details: A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param pulumi.Input[str] parameters: (Updatable) Reuseable parameter values encoded as a JSON; the top and second level JSON elements are objects. Each key of the top level object refers to a task key that is unqiue to the workflow, each of the second level objects' keys refer to the name of a parameter that is unique to the task. taskKey > parameterName > parameterValue
** IMPORTANT **
Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values
:param pulumi.Input[str] state: The current state of the MediaWorkflowConfiguration.
:param pulumi.Input[Mapping[str, Any]] system_tags: Usage of system tag keys. These predefined keys are scoped to namespaces. Example: `{"orcl-cloud.free-tier-retained": "true"}`
:param pulumi.Input[str] time_created: The time when the the MediaWorkflowConfiguration was created. An RFC3339 formatted datetime string.
:param pulumi.Input[str] time_updated: The time when the MediaWorkflowConfiguration was updated. An RFC3339 formatted datetime string.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _MediaWorkflowConfigurationState.__new__(_MediaWorkflowConfigurationState)
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["lifecyle_details"] = lifecyle_details
__props__.__dict__["parameters"] = parameters
__props__.__dict__["state"] = state
__props__.__dict__["system_tags"] = system_tags
__props__.__dict__["time_created"] = time_created
__props__.__dict__["time_updated"] = time_updated
return MediaWorkflowConfiguration(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
(Updatable) Compartment Identifier.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
(Updatable) MediaWorkflowConfiguration identifier. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter(name="lifecyleDetails")
def lifecyle_details(self) -> pulumi.Output[str]:
"""
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
"""
return pulumi.get(self, "lifecyle_details")
@property
@pulumi.getter
def parameters(self) -> pulumi.Output[str]:
"""
(Updatable) Reuseable parameter values encoded as a JSON; the top and second level JSON elements are objects. Each key of the top level object refers to a task key that is unqiue to the workflow, each of the second level objects' keys refer to the name of a parameter that is unique to the task. taskKey > parameterName > parameterValue
** IMPORTANT **
Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of the MediaWorkflowConfiguration.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
Usage of system tag keys. These predefined keys are scoped to namespaces. Example: `{"orcl-cloud.free-tier-retained": "true"}`
"""
return pulumi.get(self, "system_tags")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The time when the the MediaWorkflowConfiguration was created. An RFC3339 formatted datetime string.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> pulumi.Output[str]:
"""
The time when the MediaWorkflowConfiguration was updated. An RFC3339 formatted datetime string.
"""
return pulumi.get(self, "time_updated")
|
PypiClean
|
/thickly-1.2.3.tar.gz/thickly-1.2.3/locust/user/task.py
|
import logging
import random
import sys
import traceback
from time import time
import gevent
from gevent import GreenletExit
from locust.exception import InterruptTaskSet, RescheduleTask, RescheduleTaskImmediately, \
StopUser, MissingWaitTimeError
logger = logging.getLogger(__name__)
LOCUST_STATE_RUNNING, LOCUST_STATE_WAITING, LOCUST_STATE_STOPPING = ["running", "waiting", "stopping"]
def task(weight=1):
"""
Used as a convenience decorator to be able to declare tasks for a User or a TaskSet
inline in the class. Example::
class ForumPage(TaskSet):
@task(100)
def read_thread(self):
pass
@task(7)
def create_thread(self):
pass
"""
def decorator_func(func):
func.locust_task_weight = weight
return func
"""
Check if task was used without parentheses (not called), like this::
@task
def my_task()
pass
"""
if callable(weight):
func = weight
weight = 1
return decorator_func(func)
else:
return decorator_func
def tag(*tags):
"""
Decorator for tagging tasks and TaskSets with the given tag name. You can then limit the test
to only execute tasks that are tagged with any of the tags provided by the --tags command-line
argument. Example::
class ForumPage(TaskSet):
@tag('thread')
@task(100)
def read_thread(self):
pass
@tag('thread')
@tag('post')
@task(7)
def create_thread(self):
pass
@tag('post')
@task(11)
def comment(self):
pass
"""
def decorator_func(decorated):
if hasattr(decorated, 'tasks'):
decorated.tasks = list(map(tag(*tags), decorated.tasks))
else:
if 'locust_tag_set' not in decorated.__dict__:
decorated.locust_tag_set = set()
decorated.locust_tag_set |= set(tags)
return decorated
if len(tags) == 0 or callable(tags[0]):
raise ValueError('No tag name was supplied')
return decorator_func
def get_tasks_from_base_classes(bases, class_dict):
"""
Function used by both TaskSetMeta and UserMeta for collecting all declared tasks
on the TaskSet/User class and all its base classes
"""
new_tasks = []
for base in bases:
if hasattr(base, "tasks") and base.tasks:
new_tasks += base.tasks
if "tasks" in class_dict and class_dict["tasks"] is not None:
tasks = class_dict["tasks"]
if isinstance(tasks, dict):
tasks = tasks.items()
for task in tasks:
if isinstance(task, tuple):
task, count = task
for i in range(count):
new_tasks.append(task)
else:
new_tasks.append(task)
for item in class_dict.values():
if "locust_task_weight" in dir(item):
for i in range(0, item.locust_task_weight):
new_tasks.append(item)
return new_tasks
def filter_tasks_by_tags(task_holder, tags=None, exclude_tags=None, checked=None):
"""
Function used by Environment to recursively remove any tasks/TaskSets from a TaskSet/User that
shouldn't be executed according to the tag options
"""
new_tasks = []
if checked is None:
checked = {}
for task in task_holder.tasks:
if task in checked:
if checked[task]:
new_tasks.append(task)
continue
passing = True
if hasattr(task, 'tasks'):
filter_tasks_by_tags(task, tags, exclude_tags, checked)
passing = len(task.tasks) > 0
else:
if tags is not None:
passing &= 'locust_tag_set' in dir(task) and len(task.locust_tag_set & tags) > 0
if exclude_tags is not None:
passing &= 'locust_tag_set' not in dir(task) or len(task.locust_tag_set & exclude_tags) == 0
if passing:
new_tasks.append(task)
checked[task] = passing
task_holder.tasks = new_tasks
class TaskSetMeta(type):
"""
Meta class for the main User class. It's used to allow User classes to specify task execution
ratio using an {task:int} dict, or a [(task0,int), ..., (taskN,int)] list.
"""
def __new__(mcs, classname, bases, class_dict):
class_dict["tasks"] = get_tasks_from_base_classes(bases, class_dict)
return type.__new__(mcs, classname, bases, class_dict)
class TaskSet(object, metaclass=TaskSetMeta):
"""
Class defining a set of tasks that a User will execute.
When a TaskSet starts running, it will pick a task from the *tasks* attribute,
execute it, and then sleep for the number of seconds returned by its *wait_time*
function. If no wait_time method has been declared on the TaskSet, it'll call the
wait_time function on the User by default. It will then schedule another task
for execution and so on.
TaskSets can be nested, which means that a TaskSet's *tasks* attribute can contain
another TaskSet. If the nested TaskSet is scheduled to be executed, it will be
instantiated and called from the currently executing TaskSet. Execution in the
currently running TaskSet will then be handed over to the nested TaskSet which will
continue to run until it throws an InterruptTaskSet exception, which is done when
:py:meth:`TaskSet.interrupt() <locust.TaskSet.interrupt>` is called. (execution
will then continue in the first TaskSet).
"""
tasks = []
"""
Collection of python callables and/or TaskSet classes that the User(s) will run.
If tasks is a list, the task to be performed will be picked randomly.
If tasks is a *(callable,int)* list of two-tuples, or a {callable:int} dict,
the task to be performed will be picked randomly, but each task will be weighted
according to its corresponding int value. So in the following case, *ThreadPage* will
be fifteen times more likely to be picked than *write_post*::
class ForumPage(TaskSet):
tasks = {ThreadPage:15, write_post:1}
"""
min_wait = None
"""
Deprecated: Use wait_time instead.
Minimum waiting time between the execution of user tasks. Can be used to override
the min_wait defined in the root User class, which will be used if not set on the
TaskSet.
"""
max_wait = None
"""
Deprecated: Use wait_time instead.
Maximum waiting time between the execution of user tasks. Can be used to override
the max_wait defined in the root User class, which will be used if not set on the
TaskSet.
"""
wait_function = None
"""
Deprecated: Use wait_time instead.
Function used to calculate waiting time between the execution of user tasks in milliseconds.
Can be used to override the wait_function defined in the root User class, which will be used
if not set on the TaskSet.
"""
_user = None
_parent = None
def __init__(self, parent):
self._task_queue = []
self._time_start = time()
if isinstance(parent, TaskSet):
self._user = parent.user
else:
self._user = parent
self._parent = parent
# if this class doesn't have a min_wait, max_wait or wait_function defined, copy it from Locust
if not self.min_wait:
self.min_wait = self.user.min_wait
if not self.max_wait:
self.max_wait = self.user.max_wait
if not self.wait_function:
self.wait_function = self.user.wait_function
@property
def user(self):
""":py:class:`User <locust.User>` instance that this TaskSet was created by"""
return self._user
@property
def parent(self):
"""Parent TaskSet instance of this TaskSet (or :py:class:`User <locust.User>` if this is not a nested TaskSet)"""
return self._parent
def on_start(self):
"""
Called when a User starts executing this TaskSet
"""
pass
def on_stop(self):
"""
Called when a User stops executing this TaskSet. E.g. when TaskSet.interrupt() is called
or when the User is killed
"""
pass
def run(self):
try:
self.on_start()
except InterruptTaskSet as e:
if e.reschedule:
raise RescheduleTaskImmediately(e.reschedule).with_traceback(sys.exc_info()[2])
else:
raise RescheduleTask(e.reschedule).with_traceback(sys.exc_info()[2])
while (True):
try:
if not self._task_queue:
self.schedule_task(self.get_next_task())
try:
self._check_stop_condition()
self.execute_next_task()
except RescheduleTaskImmediately:
pass
except RescheduleTask:
self.wait()
else:
self.wait()
except InterruptTaskSet as e:
self.on_stop()
if e.reschedule:
raise RescheduleTaskImmediately(e.reschedule) from e
else:
raise RescheduleTask(e.reschedule) from e
except (StopUser, GreenletExit):
self.on_stop()
raise
except Exception as e:
self.user.environment.events.user_error.fire(user_instance=self, exception=e, tb=sys.exc_info()[2])
if self.user.environment.catch_exceptions:
logger.error("%s\n%s", e, traceback.format_exc())
self.wait()
else:
raise
def execute_next_task(self):
self.execute_task(self._task_queue.pop(0))
def execute_task(self, task):
# check if the function is a method bound to the current locust, and if so, don't pass self as first argument
if hasattr(task, "__self__") and task.__self__ == self:
# task is a bound method on self
task()
elif hasattr(task, "tasks") and issubclass(task, TaskSet):
# task is another (nested) TaskSet class
task(self).run()
else:
# task is a function
task(self)
def schedule_task(self, task_callable, first=False):
"""
Add a task to the User's task execution queue.
:param task_callable: User task to schedule.
:param args: Arguments that will be passed to the task callable.
:param kwargs: Dict of keyword arguments that will be passed to the task callable.
:param first: Optional keyword argument. If True, the task will be put first in the queue.
"""
if first:
self._task_queue.insert(0, task_callable)
else:
self._task_queue.append(task_callable)
def get_next_task(self):
if not self.tasks:
raise Exception("No tasks defined. use the @task decorator or set the tasks property of the TaskSet")
return random.choice(self.tasks)
def wait_time(self):
"""
Method that returns the time (in seconds) between the execution of tasks.
Example::
from locust import TaskSet, between
class Tasks(TaskSet):
wait_time = between(3, 25)
"""
if self.user.wait_time:
return self.user.wait_time()
elif self.min_wait is not None and self.max_wait is not None:
return random.randint(self.min_wait, self.max_wait) / 1000.0
else:
raise MissingWaitTimeError("You must define a wait_time method on either the %s or %s class" % (
type(self.user).__name__,
type(self).__name__,
))
def wait(self):
"""
Make the running user sleep for a duration defined by the Locust.wait_time
function (or TaskSet.wait_time function if it's been defined).
The user can also be killed gracefully while it's sleeping, so calling this
method within a task makes it possible for a user to be killed mid-task, even if you've
set a stop_timeout. If this behaviour is not desired you should make the user wait using
gevent.sleep() instead.
"""
self._check_stop_condition()
self.user._state = LOCUST_STATE_WAITING
self._sleep(self.wait_time())
self._check_stop_condition()
self.user._state = LOCUST_STATE_RUNNING
def _sleep(self, seconds):
gevent.sleep(seconds)
def _check_stop_condition(self):
if self.user._state == LOCUST_STATE_STOPPING:
raise StopUser()
def interrupt(self, reschedule=True):
"""
Interrupt the TaskSet and hand over execution control back to the parent TaskSet.
If *reschedule* is True (default), the parent User will immediately re-schedule,
and execute, a new task.
"""
raise InterruptTaskSet(reschedule)
@property
def client(self):
"""
Shortcut to the client :py:attr:`client <locust.User.client>` attribute of this TaskSet's :py:class:`User <locust.User>`
"""
return self.user.client
class DefaultTaskSet(TaskSet):
"""
Default root TaskSet that executes tasks in User.tasks.
It executes tasks declared directly on the Locust with the user instance as the task argument.
"""
def get_next_task(self):
if not self.user.tasks:
raise Exception("No tasks defined. use the @task decorator or set the tasks property of the User")
return random.choice(self.user.tasks)
def execute_task(self, task):
if hasattr(task, "tasks") and issubclass(task, TaskSet):
# task is (nested) TaskSet class
task(self.user).run()
else:
# task is a function
task(self.user)
|
PypiClean
|
/qmpy-tri-2022.7.21.tar.gz/qmpy-tri-2022.7.21/qmpy/web/static/js/jsmol/j2s/J/jvxl/readers/VolumeDataReader.js
|
Clazz.declarePackage ("J.jvxl.readers");
Clazz.load (["J.jvxl.readers.SurfaceReader"], "J.jvxl.readers.VolumeDataReader", ["J.jvxl.data.JvxlCoder", "J.util.ArrayUtil", "$.Logger", "$.SB"], function () {
c$ = Clazz.decorateAsClass (function () {
this.dataType = 0;
this.precalculateVoxelData = false;
this.allowMapData = false;
this.point = null;
this.ptsPerAngstrom = 0;
this.maxGrid = 0;
this.atomDataServer = null;
this.useOriginStepsPoints = false;
Clazz.instantialize (this, arguments);
}, J.jvxl.readers, "VolumeDataReader", J.jvxl.readers.SurfaceReader);
Clazz.makeConstructor (c$,
function () {
Clazz.superConstructor (this, J.jvxl.readers.VolumeDataReader, []);
});
$_M(c$, "initVDR",
function (sg) {
this.initSR (sg);
this.useOriginStepsPoints = (this.params.origin != null && this.params.points != null && this.params.steps != null);
this.dataType = this.params.dataType;
this.precalculateVoxelData = true;
this.allowMapData = true;
}, "J.jvxl.readers.SurfaceGenerator");
$_M(c$, "setup",
function (isMapData) {
this.jvxlFileHeaderBuffer = new J.util.SB ().append ("volume data read from file\n\n");
J.jvxl.data.JvxlCoder.jvxlCreateHeaderWithoutTitleOrAtoms (this.volumeData, this.jvxlFileHeaderBuffer);
}, "~B");
Clazz.overrideMethod (c$, "readVolumeParameters",
function (isMapData) {
this.setup (isMapData);
this.initializeVolumetricData ();
return true;
}, "~B");
Clazz.overrideMethod (c$, "readVolumeData",
function (isMapData) {
try {
this.readSurfaceData (isMapData);
} catch (e) {
if (Clazz.exceptionOf (e, Exception)) {
System.out.println (e.toString ());
return false;
} else {
throw e;
}
}
return true;
}, "~B");
$_M(c$, "readVoxelDataIndividually",
function (isMapData) {
if (isMapData && !this.allowMapData) return;
if (!isMapData || this.volumeData.sr != null) {
this.volumeData.setVoxelDataAsArray (this.voxelData = null);
return;
}this.newVoxelDataCube ();
for (var x = 0; x < this.nPointsX; ++x) {
var plane = J.util.ArrayUtil.newFloat2 (this.nPointsY);
this.voxelData[x] = plane;
var ptyz = 0;
for (var y = 0; y < this.nPointsY; ++y) {
var strip = plane[y] = Clazz.newFloatArray (this.nPointsZ, 0);
for (var z = 0; z < this.nPointsZ; ++z, ++ptyz) {
strip[z] = this.getValue (x, y, z, ptyz);
}
}
}
}, "~B");
$_M(c$, "setVolumeData",
function () {
});
$_M(c$, "setVolumeDataParams",
function () {
if (this.params.volumeData != null) {
this.setVolumeDataV (this.params.volumeData);
return true;
}if (!this.useOriginStepsPoints) {
return false;
}this.volumetricOrigin.setT (this.params.origin);
this.volumetricVectors[0].set (this.params.steps.x, 0, 0);
this.volumetricVectors[1].set (0, this.params.steps.y, 0);
this.volumetricVectors[2].set (0, 0, this.params.steps.z);
this.voxelCounts[0] = Clazz.floatToInt (this.params.points.x);
this.voxelCounts[1] = Clazz.floatToInt (this.params.points.y);
this.voxelCounts[2] = Clazz.floatToInt (this.params.points.z);
if (this.voxelCounts[0] < 1 || this.voxelCounts[1] < 1 || this.voxelCounts[2] < 1) return false;
this.showGridInfo ();
return true;
});
$_M(c$, "showGridInfo",
function () {
J.util.Logger.info ("grid origin = " + this.params.origin);
J.util.Logger.info ("grid steps = " + this.params.steps);
J.util.Logger.info ("grid points = " + this.params.points);
this.ptTemp.x = this.params.steps.x * this.params.points.x;
this.ptTemp.y = this.params.steps.y * this.params.points.y;
this.ptTemp.z = this.params.steps.z * this.params.points.z;
J.util.Logger.info ("grid lengths = " + this.ptTemp);
this.ptTemp.add (this.params.origin);
J.util.Logger.info ("grid max xyz = " + this.ptTemp);
});
$_M(c$, "setVoxelRange",
function (index, min, max, ptsPerAngstrom, gridMax, minPointsPerAngstrom) {
var nGrid;
var d;
if (min >= max) {
min = -10;
max = 10;
}var range = max - min;
var resolution = this.params.resolution;
if (resolution != 3.4028235E38) ptsPerAngstrom = resolution;
nGrid = Clazz.doubleToInt (Math.floor (range * ptsPerAngstrom)) + 1;
if (nGrid > gridMax) {
if ((this.dataType & 256) > 0) {
if (resolution == 3.4028235E38) {
if (!this.isQuiet) J.util.Logger.info ("Maximum number of voxels for index=" + index + " exceeded (" + nGrid + ") -- set to " + gridMax);
nGrid = gridMax;
} else {
if (!this.isQuiet) J.util.Logger.info ("Warning -- high number of grid points: " + nGrid);
}} else if (resolution == 3.4028235E38) {
nGrid = gridMax;
}}ptsPerAngstrom = (nGrid - 1) / range;
if (ptsPerAngstrom < minPointsPerAngstrom) {
ptsPerAngstrom = minPointsPerAngstrom;
nGrid = Clazz.doubleToInt (Math.floor (ptsPerAngstrom * range + 1));
ptsPerAngstrom = (nGrid - 1) / range;
}d = this.volumeData.volumetricVectorLengths[index] = 1 / ptsPerAngstrom;
this.voxelCounts[index] = nGrid;
if (!this.isQuiet) J.util.Logger.info ("isosurface resolution for axis " + (index + 1) + " set to " + ptsPerAngstrom + " points/Angstrom; " + this.voxelCounts[index] + " voxels");
switch (index) {
case 0:
this.volumetricVectors[0].set (d, 0, 0);
this.volumetricOrigin.x = min;
break;
case 1:
this.volumetricVectors[1].set (0, d, 0);
this.volumetricOrigin.y = min;
break;
case 2:
this.volumetricVectors[2].set (0, 0, d);
this.volumetricOrigin.z = min;
if (this.isEccentric) this.eccentricityMatrix.transform (this.volumetricOrigin);
if (this.center != null && this.center.x != 3.4028235E38) this.volumetricOrigin.add (this.center);
}
if (this.isEccentric) this.eccentricityMatrix.transform (this.volumetricVectors[index]);
return this.voxelCounts[index];
}, "~N,~N,~N,~N,~N,~N");
Clazz.overrideMethod (c$, "readSurfaceData",
function (isMapData) {
this.readSurfaceDataVDR (isMapData);
}, "~B");
$_M(c$, "readSurfaceDataVDR",
function (isMapData) {
if (this.isProgressive && !isMapData) {
this.nDataPoints = this.volumeData.setVoxelCounts (this.nPointsX, this.nPointsY, this.nPointsZ);
this.voxelData = null;
return;
}if (this.precalculateVoxelData) this.generateCube ();
else this.readVoxelDataIndividually (isMapData);
}, "~B");
$_M(c$, "generateCube",
function () {
J.util.Logger.info ("data type: user volumeData");
J.util.Logger.info ("voxel grid origin:" + this.volumetricOrigin);
for (var i = 0; i < 3; ++i) J.util.Logger.info ("voxel grid vector:" + this.volumetricVectors[i]);
J.util.Logger.info ("Read " + this.nPointsX + " x " + this.nPointsY + " x " + this.nPointsZ + " data points");
});
Clazz.overrideMethod (c$, "closeReader",
function () {
});
});
|
PypiClean
|
/android_strings_converter-0.1.3-py3-none-any.whl/android_strings_converter/__main__.py
|
import argparse
from pathlib import Path
import converter as conv
from console_style import ConsoleStyle
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"input_filepath",
type=str,
help="Input XML filepath with the Android strings.",
)
parser.add_argument(
"-o",
"--output-filepath",
required=False,
type=str,
help="Output filepath with the strings properly arranged. It can be a JSON, "
"CSV, YAML, HTML, XLS, XLSX, Google Sheet and ODS.",
)
parser.add_argument(
"-gs",
"--google-sheets",
required=False,
type=str,
help="Creates a spreadsheet in Google Sheets with the name passed as argument.",
)
parser.add_argument(
"-c",
"--credentials",
required=False,
type=str,
help="`service_account.json` filepath. Mandatory if you want to generate a "
"spreadsheet in your Google account. You can learn how to generate "
"it in the README.",
)
args = parser.parse_args()
if args.google_sheets and not args.credentials:
print(
f"{ConsoleStyle.RED}Error: You need to pass the path of the "
f"`service_account.json` file to generate a Sheet.{ConsoleStyle.END}"
)
return
elif not args.google_sheets and args.credentials:
print(
f"{ConsoleStyle.RED}Error: You need to pass the name of the Sheet to be "
f"generated.{ConsoleStyle.END}"
)
return
elif args.google_sheets and args.credentials:
conv.to_google_sheets(
Path(args.input_filepath), args.google_sheets, Path(args.credentials)
)
if args.output_filepath:
input_path = Path(args.input_filepath)
output_path = Path(args.output_filepath)
should_print_success = True
if output_path.suffix == ".csv":
conv.to_csv(input_path, output_path)
elif output_path.suffix == ".xlsx":
conv.to_xlsx(input_path, output_path)
elif output_path.suffix == ".json":
conv.to_json(input_path, output_path)
elif output_path.suffix == ".ods":
conv.to_ods(input_path, output_path)
elif output_path.suffix == ".yaml":
conv.to_yaml(input_path, output_path)
elif output_path.suffix == ".html":
conv.to_html(input_path, output_path)
else:
print(
f"{ConsoleStyle.YELLOW}File type not supported. Feel free to create "
f"an issue here (https://github.com/HenestrosaConH/android-strings"
f"-converter/issues) if you want the file type to be supported by the "
f"package.{ConsoleStyle.END}"
)
should_print_success = False
if should_print_success:
print(
f"{ConsoleStyle.GREEN}Data successfully written to {output_path}"
f"{ConsoleStyle.END}"
)
if __name__ == "__main__":
main()
|
PypiClean
|
/aiida_wien2k-0.1.1.tar.gz/aiida_wien2k-0.1.1/aiida_wien2k/workflows/scf123_workchain.py
|
from aiida.engine import ToContext, WorkChain
from aiida.orm import Code, Dict, Int, StructureData
from aiida.plugins.factories import CalculationFactory
Wien2kRun123Lapw = CalculationFactory("wien2k-run123_lapw") # plugin entry point
class Wien2kScf123WorkChain(WorkChain):
"""WorkChain to add two integers."""
@classmethod
def define(cls, spec):
"""Specify inputs, outputs, and the workchain outline."""
super().define(spec)
# input parameters
spec.input("aiida_structure", valid_type=StructureData, required=True)
spec.input("code", valid_type=Code, required=True) # run123_lapw
spec.input("inpdict", valid_type=Dict, required=True) # run123_lapw [param]
spec.input(
"options", valid_type=Dict, required=True
) # parallel options for slurm scheduler
# calculation steps
spec.outline(
cls.run123_lapw, cls.inspect_run123_lapw, cls.result, cls.inspect_warn_all_steps
)
# output parameters
spec.output("workchain_result", valid_type=Dict)
spec.output(
"aiida_structure_out",
valid_type=StructureData,
required=True,
help="AiiDA output structure",
)
# exit codes
spec.exit_code(300, "WARNING", "There were warning messages during calculation steps")
spec.exit_code(400, "ERROR", "There was a terminal error in one of calculation steps")
def run123_lapw(self):
"""Run SCF calculation."""
result = self.submit(
Wien2kRun123Lapw,
aiida_structure=self.inputs.aiida_structure,
parameters=self.inputs.inpdict,
code=self.inputs.code,
metadata={"options": self.inputs.options.get_dict()},
)
return ToContext(node=result)
def inspect_run123_lapw(self):
"""Inspect results of run123_lapw"""
if self.ctx.node.is_excepted:
return self.exit_codes.ERROR # error during calc. steps
return
def result(self):
"""Parse the result."""
# Declaring the output
self.out("workchain_result", self.ctx.node.outputs.scf_grep)
self.out("aiida_structure_out", self.ctx.node.outputs.aiida_structure_out)
return
def inspect_warn_all_steps(self):
"""Check warnings in all calculations and set the exit code accordingly"""
for step in [self.ctx.node]:
if not step.is_finished_ok:
if step.exit_status == 305:
return self.exit_codes.WARNING # warnings during calc. steps
elif step.exit_status >= 400:
return self.exit_codes.ERROR # error during calc. steps
return
|
PypiClean
|
/efficientteacher-1.0.0.tar.gz/efficientteacher-1.0.0/utils/loggers/wandb/wandb_utils.py
|
import logging
import os
import sys
from contextlib import contextmanager
from pathlib import Path
import pkg_resources as pkg
import yaml
from tqdm import tqdm
FILE = Path(__file__).resolve()
ROOT = FILE.parents[3] # YOLOv5 root directory
if str(ROOT) not in sys.path:
sys.path.append(str(ROOT)) # add ROOT to PATH
from utils.datasets import LoadImagesAndLabels
from utils.datasets import img2label_paths
from utils.general import check_dataset, check_file
try:
import wandb
assert hasattr(wandb, '__version__') # verify package import not local dir
except (ImportError, AssertionError):
wandb = None
RANK = int(os.getenv('RANK', -1))
WANDB_ARTIFACT_PREFIX = 'wandb-artifact://'
def remove_prefix(from_string, prefix=WANDB_ARTIFACT_PREFIX):
return from_string[len(prefix):]
def check_wandb_config_file(data_config_file):
wandb_config = '_wandb.'.join(data_config_file.rsplit('.', 1)) # updated data.yaml path
if Path(wandb_config).is_file():
return wandb_config
return data_config_file
def check_wandb_dataset(data_file):
is_trainset_wandb_artifact = False
is_valset_wandb_artifact = False
if check_file(data_file) and data_file.endswith('.yaml'):
with open(data_file, errors='ignore') as f:
data_dict = yaml.safe_load(f)
is_trainset_wandb_artifact = (isinstance(data_dict['train'], str) and
data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX))
is_valset_wandb_artifact = (isinstance(data_dict['val'], str) and
data_dict['val'].startswith(WANDB_ARTIFACT_PREFIX))
if is_trainset_wandb_artifact or is_valset_wandb_artifact:
return data_dict
else:
return check_dataset(data_file)
def get_run_info(run_path):
run_path = Path(remove_prefix(run_path, WANDB_ARTIFACT_PREFIX))
run_id = run_path.stem
project = run_path.parent.stem
entity = run_path.parent.parent.stem
model_artifact_name = 'run_' + run_id + '_model'
return entity, project, run_id, model_artifact_name
def check_wandb_resume(opt):
process_wandb_config_ddp_mode(opt) if RANK not in [-1, 0] else None
if isinstance(opt.resume, str):
if opt.resume.startswith(WANDB_ARTIFACT_PREFIX):
if RANK not in [-1, 0]: # For resuming DDP runs
entity, project, run_id, model_artifact_name = get_run_info(opt.resume)
api = wandb.Api()
artifact = api.artifact(entity + '/' + project + '/' + model_artifact_name + ':latest')
modeldir = artifact.download()
opt.weights = str(Path(modeldir) / "last.pt")
return True
return None
def process_wandb_config_ddp_mode(opt):
with open(check_file(opt.data), errors='ignore') as f:
data_dict = yaml.safe_load(f) # data dict
train_dir, val_dir = None, None
if isinstance(data_dict['train'], str) and data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX):
api = wandb.Api()
train_artifact = api.artifact(remove_prefix(data_dict['train']) + ':' + opt.artifact_alias)
train_dir = train_artifact.download()
train_path = Path(train_dir) / 'data/images/'
data_dict['train'] = str(train_path)
if isinstance(data_dict['val'], str) and data_dict['val'].startswith(WANDB_ARTIFACT_PREFIX):
api = wandb.Api()
val_artifact = api.artifact(remove_prefix(data_dict['val']) + ':' + opt.artifact_alias)
val_dir = val_artifact.download()
val_path = Path(val_dir) / 'data/images/'
data_dict['val'] = str(val_path)
if train_dir or val_dir:
ddp_data_path = str(Path(val_dir) / 'wandb_local_data.yaml')
with open(ddp_data_path, 'w') as f:
yaml.safe_dump(data_dict, f)
opt.data = ddp_data_path
class WandbLogger():
"""Log training runs, datasets, models, and predictions to Weights & Biases.
This logger sends information to W&B at wandb.ai. By default, this information
includes hyperparameters, system configuration and metrics, model metrics,
and basic data metrics and analyses.
By providing additional command line arguments to train.py, datasets,
models and predictions can also be logged.
For more on how this logger is used, see the Weights & Biases documentation:
https://docs.wandb.com/guides/integrations/yolov5
"""
def __init__(self, opt, run_id=None, job_type='Training'):
"""
- Initialize WandbLogger instance
- Upload dataset if opt.upload_dataset is True
- Setup trainig processes if job_type is 'Training'
arguments:
opt (namespace) -- Commandline arguments for this run
run_id (str) -- Run ID of W&B run to be resumed
job_type (str) -- To set the job_type for this run
"""
# Pre-training routine --
self.job_type = job_type
self.wandb, self.wandb_run = wandb, None if not wandb else wandb.run
self.val_artifact, self.train_artifact = None, None
self.train_artifact_path, self.val_artifact_path = None, None
self.result_artifact = None
self.val_table, self.result_table = None, None
self.bbox_media_panel_images = []
self.val_table_path_map = None
self.max_imgs_to_log = 16
self.wandb_artifact_data_dict = None
self.data_dict = None
# It's more elegant to stick to 1 wandb.init call, but useful config data is overwritten in the WandbLogger's wandb.init call
if isinstance(opt.resume, str): # checks resume from artifact
if opt.resume.startswith(WANDB_ARTIFACT_PREFIX):
entity, project, run_id, model_artifact_name = get_run_info(opt.resume)
model_artifact_name = WANDB_ARTIFACT_PREFIX + model_artifact_name
assert wandb, 'install wandb to resume wandb runs'
# Resume wandb-artifact:// runs here| workaround for not overwriting wandb.config
self.wandb_run = wandb.init(id=run_id,
project=project,
entity=entity,
resume='allow',
allow_val_change=True)
opt.resume = model_artifact_name
elif self.wandb:
self.wandb_run = wandb.init(config=opt,
resume="allow",
project='YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem,
entity=opt.entity,
name=opt.name if opt.name != 'exp' else None,
job_type=job_type,
id=run_id,
allow_val_change=True) if not wandb.run else wandb.run
if self.wandb_run:
if self.job_type == 'Training':
if opt.upload_dataset:
if not opt.resume:
self.wandb_artifact_data_dict = self.check_and_upload_dataset(opt)
if opt.resume:
# resume from artifact
if isinstance(opt.resume, str) and opt.resume.startswith(WANDB_ARTIFACT_PREFIX):
self.data_dict = dict(self.wandb_run.config.data_dict)
else: # local resume
self.data_dict = check_wandb_dataset(opt.Dataset.data_name)
else:
self.data_dict = {}
self.data_dict['train'] = opt.Dataset.train
self.data_dict['val'] = opt.Dataset.val
self.data_dict['nc'] = opt.Dataset.nc
self.data_dict['names'] = opt.Dataset.names
# self.data_dict = check_wandb_dataset(opt.Dataset.data_name)
self.wandb_artifact_data_dict = self.wandb_artifact_data_dict or self.data_dict
# write data_dict to config. useful for resuming from artifacts. Do this only when not resuming.
self.wandb_run.config.update({'data_dict': self.wandb_artifact_data_dict},
allow_val_change=True)
self.setup_training(opt)
if self.job_type == 'Dataset Creation':
self.data_dict = self.check_and_upload_dataset(opt)
def check_and_upload_dataset(self, opt):
"""
Check if the dataset format is compatible and upload it as W&B artifact
arguments:
opt (namespace)-- Commandline arguments for current run
returns:
Updated dataset info dictionary where local dataset paths are replaced by WAND_ARFACT_PREFIX links.
"""
assert wandb, 'Install wandb to upload dataset'
config_path = self.log_dataset_artifact(opt.data,
opt.single_cls,
'YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem)
print("Created dataset config file ", config_path)
with open(config_path, errors='ignore') as f:
wandb_data_dict = yaml.safe_load(f)
return wandb_data_dict
def setup_training(self, opt):
"""
Setup the necessary processes for training YOLO models:
- Attempt to download model checkpoint and dataset artifacts if opt.resume stats with WANDB_ARTIFACT_PREFIX
- Update data_dict, to contain info of previous run if resumed and the paths of dataset artifact if downloaded
- Setup log_dict, initialize bbox_interval
arguments:
opt (namespace) -- commandline arguments for this run
"""
self.log_dict, self.current_epoch = {}, 0
self.bbox_interval = opt.bbox_interval
if isinstance(opt.resume, str):
modeldir, _ = self.download_model_artifact(opt)
if modeldir:
self.weights = Path(modeldir) / "last.pt"
config = self.wandb_run.config
opt.weights, opt.save_period, opt.batch_size, opt.bbox_interval, opt.epochs, opt.hyp = str(
self.weights), config.save_period, config.batch_size, config.bbox_interval, config.epochs, \
config.hyp
data_dict = self.data_dict
if self.val_artifact is None: # If --upload_dataset is set, use the existing artifact, don't download
self.train_artifact_path, self.train_artifact = self.download_dataset_artifact(data_dict.get('train'),
opt.artifact_alias)
self.val_artifact_path, self.val_artifact = self.download_dataset_artifact(data_dict.get('val'),
opt.artifact_alias)
if self.train_artifact_path is not None:
train_path = Path(self.train_artifact_path) / 'data/images/'
data_dict['train'] = str(train_path)
if self.val_artifact_path is not None:
val_path = Path(self.val_artifact_path) / 'data/images/'
data_dict['val'] = str(val_path)
if self.val_artifact is not None:
self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation")
self.result_table = wandb.Table(["epoch", "id", "ground truth", "prediction", "avg_confidence"])
self.val_table = self.val_artifact.get("val")
if self.val_table_path_map is None:
self.map_val_table_path()
if opt.bbox_interval == -1:
# self.bbox_interval = opt.bbox_interval = (opt.epochs // 10) if opt.epochs > 10 else 1
self.bbox_interval = (opt.epochs // 10) if opt.epochs > 10 else 1
train_from_artifact = self.train_artifact_path is not None and self.val_artifact_path is not None
# Update the the data_dict to point to local artifacts dir
if train_from_artifact:
self.data_dict = data_dict
def download_dataset_artifact(self, path, alias):
"""
download the model checkpoint artifact if the path starts with WANDB_ARTIFACT_PREFIX
arguments:
path -- path of the dataset to be used for training
alias (str)-- alias of the artifact to be download/used for training
returns:
(str, wandb.Artifact) -- path of the downladed dataset and it's corresponding artifact object if dataset
is found otherwise returns (None, None)
"""
if isinstance(path, str) and path.startswith(WANDB_ARTIFACT_PREFIX):
artifact_path = Path(remove_prefix(path, WANDB_ARTIFACT_PREFIX) + ":" + alias)
dataset_artifact = wandb.use_artifact(artifact_path.as_posix().replace("\\", "/"))
assert dataset_artifact is not None, "'Error: W&B dataset artifact doesn\'t exist'"
datadir = dataset_artifact.download()
return datadir, dataset_artifact
return None, None
def download_model_artifact(self, opt):
"""
download the model checkpoint artifact if the resume path starts with WANDB_ARTIFACT_PREFIX
arguments:
opt (namespace) -- Commandline arguments for this run
"""
if opt.resume.startswith(WANDB_ARTIFACT_PREFIX):
model_artifact = wandb.use_artifact(remove_prefix(opt.resume, WANDB_ARTIFACT_PREFIX) + ":latest")
assert model_artifact is not None, 'Error: W&B model artifact doesn\'t exist'
modeldir = model_artifact.download()
epochs_trained = model_artifact.metadata.get('epochs_trained')
total_epochs = model_artifact.metadata.get('total_epochs')
is_finished = total_epochs is None
assert not is_finished, 'training is finished, can only resume incomplete runs.'
return modeldir, model_artifact
return None, None
def log_model(self, path, opt, epoch, fitness_score, best_model=False):
"""
Log the model checkpoint as W&B artifact
arguments:
path (Path) -- Path of directory containing the checkpoints
opt (namespace) -- Command line arguments for this run
epoch (int) -- Current epoch number
fitness_score (float) -- fitness score for current epoch
best_model (boolean) -- Boolean representing if the current checkpoint is the best yet.
"""
model_artifact = wandb.Artifact('run_' + wandb.run.id + '_model', type='model', metadata={
'original_url': str(path),
'epochs_trained': epoch + 1,
'save period': opt.save_period,
'project': opt.project,
'total_epochs': opt.epochs,
'fitness_score': fitness_score
})
model_artifact.add_file(str(path / 'last.pt'), name='last.pt')
wandb.log_artifact(model_artifact,
aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), 'best' if best_model else ''])
print("Saving model artifact on epoch ", epoch + 1)
def log_dataset_artifact(self, data_file, single_cls, project, overwrite_config=False):
"""
Log the dataset as W&B artifact and return the new data file with W&B links
arguments:
data_file (str) -- the .yaml file with information about the dataset like - path, classes etc.
single_class (boolean) -- train multi-class data as single-class
project (str) -- project name. Used to construct the artifact path
overwrite_config (boolean) -- overwrites the data.yaml file if set to true otherwise creates a new
file with _wandb postfix. Eg -> data_wandb.yaml
returns:
the new .yaml file with artifact links. it can be used to start training directly from artifacts
"""
self.data_dict = check_dataset(data_file) # parse and check
data = dict(self.data_dict)
nc, names = (1, ['item']) if single_cls else (int(data['nc']), data['names'])
names = {k: v for k, v in enumerate(names)} # to index dictionary
self.train_artifact = self.create_dataset_table(LoadImagesAndLabels(
data['train'], rect=True, batch_size=1), names, name='train') if data.get('train') else None
self.val_artifact = self.create_dataset_table(LoadImagesAndLabels(
data['val'], rect=True, batch_size=1), names, name='val') if data.get('val') else None
if data.get('train'):
data['train'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'train')
if data.get('val'):
data['val'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'val')
path = Path(data_file).stem
path = (path if overwrite_config else path + '_wandb') + '.yaml' # updated data.yaml path
data.pop('download', None)
data.pop('path', None)
with open(path, 'w') as f:
yaml.safe_dump(data, f)
if self.job_type == 'Training': # builds correct artifact pipeline graph
self.wandb_run.use_artifact(self.val_artifact)
self.wandb_run.use_artifact(self.train_artifact)
self.val_artifact.wait()
self.val_table = self.val_artifact.get('val')
self.map_val_table_path()
else:
self.wandb_run.log_artifact(self.train_artifact)
self.wandb_run.log_artifact(self.val_artifact)
return path
def map_val_table_path(self):
"""
Map the validation dataset Table like name of file -> it's id in the W&B Table.
Useful for - referencing artifacts for evaluation.
"""
self.val_table_path_map = {}
print("Mapping dataset")
for i, data in enumerate(tqdm(self.val_table.data)):
self.val_table_path_map[data[3]] = data[0]
def create_dataset_table(self, dataset, class_to_id, name='dataset'):
"""
Create and return W&B artifact containing W&B Table of the dataset.
arguments:
dataset (LoadImagesAndLabels) -- instance of LoadImagesAndLabels class used to iterate over the data to build Table
class_to_id (dict(int, str)) -- hash map that maps class ids to labels
name (str) -- name of the artifact
returns:
dataset artifact to be logged or used
"""
# TODO: Explore multiprocessing to slpit this loop parallely| This is essential for speeding up the the logging
artifact = wandb.Artifact(name=name, type="dataset")
img_files = tqdm([dataset.path]) if isinstance(dataset.path, str) and Path(dataset.path).is_dir() else None
img_files = tqdm(dataset.img_files) if not img_files else img_files
for img_file in img_files:
if Path(img_file).is_dir():
artifact.add_dir(img_file, name='data/images')
labels_path = 'labels'.join(dataset.path.rsplit('images', 1))
artifact.add_dir(labels_path, name='data/labels')
else:
artifact.add_file(img_file, name='data/images/' + Path(img_file).name)
label_file = Path(img2label_paths([img_file])[0])
artifact.add_file(str(label_file),
name='data/labels/' + label_file.name) if label_file.exists() else None
table = wandb.Table(columns=["id", "train_image", "Classes", "name"])
class_set = wandb.Classes([{'id': id, 'name': name} for id, name in class_to_id.items()])
for si, (img, labels, paths, shapes) in enumerate(tqdm(dataset)):
box_data, img_classes = [], {}
for cls, *xywh in labels[:, 1:].tolist():
cls = int(cls)
box_data.append({"position": {"middle": [xywh[0], xywh[1]], "width": xywh[2], "height": xywh[3]},
"class_id": cls,
"box_caption": "%s" % (class_to_id[cls])})
img_classes[cls] = class_to_id[cls]
boxes = {"ground_truth": {"box_data": box_data, "class_labels": class_to_id}} # inference-space
table.add_data(si, wandb.Image(paths, classes=class_set, boxes=boxes), list(img_classes.values()),
Path(paths).name)
artifact.add(table, name)
return artifact
def log_training_progress(self, predn, path, names):
"""
Build evaluation Table. Uses reference from validation dataset table.
arguments:
predn (list): list of predictions in the native space in the format - [xmin, ymin, xmax, ymax, confidence, class]
path (str): local path of the current evaluation image
names (dict(int, str)): hash map that maps class ids to labels
"""
class_set = wandb.Classes([{'id': id, 'name': name} for id, name in names.items()])
box_data = []
total_conf = 0
for *xyxy, conf, cls in predn.tolist():
if conf >= 0.25:
box_data.append(
{"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]},
"class_id": int(cls),
"box_caption": "%s %.3f" % (names[cls], conf),
"scores": {"class_score": conf},
"domain": "pixel"})
total_conf = total_conf + conf
boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space
id = self.val_table_path_map[Path(path).name]
self.result_table.add_data(self.current_epoch,
id,
self.val_table.data[id][1],
wandb.Image(self.val_table.data[id][1], boxes=boxes, classes=class_set),
total_conf / max(1, len(box_data))
)
def val_one_image(self, pred, predn, path, names, im):
"""
Log validation data for one image. updates the result Table if validation dataset is uploaded and log bbox media panel
arguments:
pred (list): list of scaled predictions in the format - [xmin, ymin, xmax, ymax, confidence, class]
predn (list): list of predictions in the native space - [xmin, ymin, xmax, ymax, confidence, class]
path (str): local path of the current evaluation image
"""
if self.val_table and self.result_table: # Log Table if Val dataset is uploaded as artifact
self.log_training_progress(predn, path, names)
if len(self.bbox_media_panel_images) < self.max_imgs_to_log and self.current_epoch > 0:
if self.current_epoch % self.bbox_interval == 0:
box_data = [{"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]},
"class_id": int(cls),
"box_caption": "%s %.3f" % (names[cls], conf),
"scores": {"class_score": conf},
"domain": "pixel"} for *xyxy, conf, cls in pred.tolist()]
boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space
self.bbox_media_panel_images.append(wandb.Image(im, boxes=boxes, caption=path.name))
def log(self, log_dict):
"""
save the metrics to the logging dictionary
arguments:
log_dict (Dict) -- metrics/media to be logged in current step
"""
if self.wandb_run:
for key, value in log_dict.items():
self.log_dict[key] = value
def end_epoch(self, best_result=False):
"""
commit the log_dict, model artifacts and Tables to W&B and flush the log_dict.
arguments:
best_result (boolean): Boolean representing if the result of this evaluation is best or not
"""
if self.wandb_run:
with all_logging_disabled():
if self.bbox_media_panel_images:
self.log_dict["Bounding Box Debugger/Images"] = self.bbox_media_panel_images
wandb.log(self.log_dict)
self.log_dict = {}
self.bbox_media_panel_images = []
if self.result_artifact:
self.result_artifact.add(self.result_table, 'result')
wandb.log_artifact(self.result_artifact, aliases=['latest', 'last', 'epoch ' + str(self.current_epoch),
('best' if best_result else '')])
wandb.log({"evaluation": self.result_table})
self.result_table = wandb.Table(["epoch", "id", "ground truth", "prediction", "avg_confidence"])
self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation")
def finish_run(self):
"""
Log metrics if any and finish the current W&B run
"""
if self.wandb_run:
if self.log_dict:
with all_logging_disabled():
wandb.log(self.log_dict)
wandb.run.finish()
@contextmanager
def all_logging_disabled(highest_level=logging.CRITICAL):
""" source - https://gist.github.com/simon-weber/7853144
A context manager that will prevent any logging messages triggered during the body from being processed.
:param highest_level: the maximum logging level in use.
This would only need to be changed if a custom level greater than CRITICAL is defined.
"""
previous_level = logging.root.manager.disable
logging.disable(highest_level)
try:
yield
finally:
logging.disable(previous_level)
|
PypiClean
|
/novartis-pisces-0.1.5.4.tar.gz/novartis-pisces-0.1.5.4/pisces/submit.py
|
from signal import signal, SIGINT, SIGTERM
from subprocess import Popen, PIPE, call
from tqdm import tqdm
import logging
import sys
import os
import stat
import pickle
import pandas as pd
import time
from pkg_resources import get_distribution
__version__ = get_distribution("novartis_pisces").version
def _submit_drmaa(args, unknown_args):
""" Submit multiple 'pisces run' jobs to the cluster using libdrmaa """
if args.local:
submit_local(args.metadata, args.config, args.max_memory, args.runtime, unknown_args, args.dry_run, args.debug, args.workdir)
else:
submit_drmaa(args.metadata, args.config, args.max_memory, args.runtime, unknown_args, args.dry_run, args.debug, args.workdir)
def create_job_scripts(sample_metadata, config, max_memory, max_runtime, metadata_file, unknown_args, working_dir, summarize=True):
import pisces.cli
logging.info('Creating job templates.')
if working_dir:
lock_file = os.path.join(working_dir, '.pisces')
else:
lock_file = os.path.join(os.getcwd(), '.pisces')
for index, row in sample_metadata.iterrows():
sample_id = row['SampleID']
fq1 = None
fq2 = None
sra = None
try:
fq1 = row['Fastq1']
except:
sra = row['SRA']
try:
fq2 = row['Fastq2']
except:
fq2 = None
if fq2 and not fq1:
raise RuntimeError("Fastq1 column is mandatory when Fastq2 is populated at row {row} in {csv}.".format(
row=str(row + 1), csv=metadata.filename))
elif sra and (fq1 or fq2):
raise RuntimeError("SRA column cannot be populated when Fastq1 or Fastq2 are used at row {row} in {csv}.".format(
row=str(row + 1), csv=metadata.filename))
output_dir = row['Directory']
cmd = ['--config', config, 'run']
if sra:
cmd = cmd + ['-sra']
cmd.extend(sra.split(';'))
if fq1:
cmd = cmd + ['-fq1']
cmd.extend(fq1.split(';'))
if fq2:
cmd = cmd + ['-fq2']
cmd.extend(fq2.split(';'))
cmd = cmd + ['-o', output_dir]
cmd = cmd + ['-n', sample_id] + unknown_args
logging.info('command: %s', cmd)
logging.info("Created job template for %s", sample_id)
run_parser = pisces.cli.create_parser()
run_args, run_unknown_args = run_parser.parse_known_args(cmd)
submit_script_path = os.path.join(
lock_file, "pisces_" + row['SampleID'] + '.sh')
with open(submit_script_path, 'w') as submit_script:
submit_script.write("#!/bin/sh\n")
submit_script.write("cd %s\n" % os.getcwd())
submit_script.write("source %s/env.sh\n" % lock_file)
submit_script.write("/usr/bin/env python " + pisces.cli.__file__ + ' ' + ' '.join(cmd))
st = os.stat(submit_script_path)
os.chmod(submit_script_path, st.st_mode | stat.S_IEXEC)
logging.info("Created script for %s", sample_id)
yield (submit_script_path, run_args, run_unknown_args)
if summarize:
submit_script_path = os.path.join(lock_file, "pisces_summarize.sh")
with open(submit_script_path, 'w') as submit_script:
submit_script.write("#!/bin/sh\n")
submit_script.write("cd %s\n" % os.getcwd())
submit_script.write("source %s/env.sh\n" % lock_file)
cmd = ['--config', config, 'summarize-expression', '-m', metadata_file]
submit_script.write("/usr/bin/env python " + pisces.cli.__file__ + ' ' + ' '.join(cmd) + '\n')
cmd = ['--config', config, 'summarize-qc', '-m', metadata_file]
submit_script.write("/usr/bin/env python " + pisces.cli.__file__ + ' ' + ' '.join(cmd) + '\n')
st = os.stat(submit_script_path)
os.chmod(submit_script_path, st.st_mode | stat.S_IEXEC)
logging.info("Created script for experiment summary")
def submit_local(metadata, config, max_memory, max_runtime, unknown_args, dry_run=False, working_dir=None, debug=False):
sample_metadata = pd.read_csv(metadata)
scripts = [s[0] for s in create_job_scripts(sample_metadata, config, max_memory, max_runtime, metadata, unknown_args, working_dir)]
with tqdm(total=len(scripts), leave=False, unit='jobs run', unit_scale=True, position=0) as jobs_run:
for script in scripts:
p = Popen(['/bin/sh', script], shell=True)
p.wait()
jobs_run.update(1)
def submit_drmaa(metadata, config, max_memory, max_runtime, unknown_args, dry_run=False, debug=False, working_dir=None, blocking=True, summarize=True):
""" args:
metadata - open filehandle or bytes buffer
config - dictionary of config values from pisces.cli
unknown_args - "pisces run arguments as a String"
"""
import drmaa
job_tracker = {}
if not working_dir:
lock_file = os.path.join(os.getcwd(), '.pisces')
else:
lock_file = os.path.join(working_dir, '.pisces')
def delete_jobs(signal=None, frame=None):
delete = ''
while delete.upper() not in ('Y', 'N'):
delete = input("Delete jobs? (y/n)")
if delete.upper() == 'Y':
logging.info("Deleting jobs...")
try:
for jobid in job_tracker.keys():
s.control(jobid, drmaa.JobControlAction.TERMINATE)
logging.info("Deleted %s jobs.", len(jobids))
except NameError:
logging.info("Jobs could not be deleted.")
elif delete.upper() == 'N':
logging.info(
"You may check on the status of your jobs by running 'pisces submit' in this directory."
)
with open(os.path.join(lock_file, 'jobs'), 'wb') as jobs:
pickle.dump(job_tracker, jobs)
sys.exit()
def submit_jobs(s, sample_metadata, summarize):
for submit_script_path, run_args, run_unknown_args in create_job_scripts(sample_metadata, config, max_memory, max_runtime, metadata.name, unknown_args, working_dir):
if dry_run:
cmd = "qsub -o {out} -cwd -j y -l h_rt={runtime},m_mem_free={memory}G -pe smp {threads} -binding linear:{threads} {script}".format(
out=os.path.join(lock_file, "logs"),
runtime=str(max_runtime),
memory=str(max_memory // run_args.threads),
threads=str(run_args.threads),
script=submit_script_path)
logging.info(cmd)
else:
jt = s.createJobTemplate()
remote_cmd = os.path.join(submit_script_path)
jt.remoteCommand = remote_cmd
native_spec = ' -l h_rt={runtime},m_mem_free={memory}G -pe smp {threads} -binding linear:{threads}'
native_spec = native_spec.format(
runtime=str(max_runtime),
memory=str(max_memory // run_args.threads),
threads=str(run_args.threads))
jt.nativeSpecification = native_spec.format()
jt.workingDirectory = os.getcwd()
jt.joinFiles = True
jt.outputPath = os.path.join(":" + lock_file, "logs", run_args.name + ".log")
jobid = s.runJob(jt)
s.deleteJobTemplate(jt)
job_tracker[jobid] = drmaa.JobState.UNDETERMINED
if summarize:
submit_script_path = os.path.join(lock_file, "pisces_summarize.sh")
jt = s.createJobTemplate()
remote_cmd = os.path.join(submit_script_path)
jt.remoteCommand = remote_cmd
native_spec = ' -l h_rt={runtime},m_mem_free={memory}G -pe smp {threads} -binding linear:{threads} -hold_jid {hold_jobs}'
native_spec = native_spec.format(
runtime=str(max_runtime),
memory=str(max_memory // run_args.threads),
threads=str(run_args.threads),
hold_jobs=','.join(job_tracker.keys()))
jt.nativeSpecification = native_spec.format()
jt.workingDirectory = working_dir
jt.joinFiles = True
jt.outputPath = os.path.join(":" + lock_file, "logs", "summarize.log")
jobid = s.runJob(jt)
s.deleteJobTemplate(jt)
job_tracker[jobid] = drmaa.JobState.UNDETERMINED
if dry_run:
sys.exit()
def track_job_progress(job_tracker, s):
with tqdm(
total=len(job_tracker),
leave=False,
unit='jobs run',
unit_scale=True,
position=0) as jobs_run:
with tqdm(
total=len(job_tracker),
leave=False,
unit='jobs finished',
unit_scale=True,
position=1) as jobs_done:
run_status = set((drmaa.JobState.DONE, drmaa.JobState.RUNNING,
drmaa.JobState.FAILED))
done_status = set((drmaa.JobState.DONE, drmaa.JobState.FAILED))
while True:
jobs_run.update(
sum([
status in run_status
for status in job_tracker.values()
]) - jobs_run.n)
jobs_done.update(
sum([
status in done_status
for status in job_tracker.values()
]) - jobs_done.n)
for jobid, status in job_tracker.items():
if status in done_status:
continue
elif s.jobStatus(jobid) == drmaa.JobState.DONE:
job_tracker[jobid] = drmaa.JobState.DONE
elif s.jobStatus(
jobid) == drmaa.JobState.QUEUED_ACTIVE:
job_tracker[jobid] = drmaa.JobState.QUEUED_ACTIVE
elif s.jobStatus(jobid) == drmaa.JobState.RUNNING:
job_tracker[jobid] = drmaa.JobState.RUNNING
elif s.jobStatus(jobid) == drmaa.JobState.FAILED:
job_tracker[jobid] = drmaa.JobState.FAILED
logging.error("Job with job id %s failed",
str(jobid))
else:
job_tracker[jobid] = 'undetermined'
if all(status in done_status
for status in job_tracker.values()):
logging.info("All jobs have finished.")
logging.info("Log files are at %s" % os.path.join(
lock_file, "logs"))
logging.info("To resubmit more jobs you must delete %s"
% lock_file)
if all(status == drmaa.JobState.DONE
for status in job_tracker.values()):
sys.exit(0)
elif any(status == 'undetermined'
for status in job_tracker.values()):
sys.exit(
"Some jobs may have failed. Please check output files."
)
else:
sys.exit("Some jobs have failed.")
time.sleep(10)
if debug:
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(
level=level,
format='%(asctime)s %(name)-8s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M')
logging.info("PISCES version %s", __version__)
if blocking:
signal(SIGINT, delete_jobs)
with drmaa.Session() as s:
if os.path.exists(lock_file):
logging.info("loading existing jobs file from %s" % lock_file)
with open(os.path.join(lock_file, 'jobs'), 'rb') as jobs:
job_tracker = pickle.load(jobs)
# remove non-existant jobs
jobids = tuple(job_tracker.keys())
for jobid in jobids:
try:
s.jobStatus(jobid)
except:
job_tracker.pop(jobid)
#raise RuntimeError(".pisces lock file exists. If you are not running jobs you may safely delete the file and try again.")
else:
logging.info("creating .pisces lock file at %s" % lock_file)
os.makedirs(lock_file)
call('export -p > %s/env.sh' % lock_file, shell=True)
sample_metadata = pd.read_csv(metadata)
if not all(
col in sample_metadata.columns
for col in ('SampleID', 'Directory')):
raise IOError(
"('SampleID', 'Directory') columns are all required in metadata.csv"
)
elif not any(
col in sample_metadata.columns
for col in ('SRA', 'Fastq1', 'Fastq2')):
raise IOError(
"One of ('SRA', 'Fastq1, 'Fastq2') columns are required in metadata.csv"
)
submit_jobs(s, sample_metadata, summarize=summarize)
with open(os.path.join(lock_file, 'jobs'), 'wb') as jobs:
pickle.dump(job_tracker, jobs)
if blocking:
track_job_progress(job_tracker, s)
|
PypiClean
|
/KeralaPyApiV2-2.0.2020.tar.gz/KeralaPyApiV2-2.0.2020/compiler/api/compiler.py
|
import os
import re
import shutil
HOME = "compiler/api"
DESTINATION = "pyrogram/api"
NOTICE_PATH = "NOTICE"
SECTION_RE = re.compile(r"---(\w+)---")
LAYER_RE = re.compile(r"//\sLAYER\s(\d+)")
COMBINATOR_RE = re.compile(r"^([\w.]+)#([0-9a-f]+)\s(?:.*)=\s([\w<>.]+);(?: // Docs: (.+))?$", re.MULTILINE)
ARGS_RE = re.compile("[^{](\w+):([\w?!.<>#]+)")
FLAGS_RE = re.compile(r"flags\.(\d+)\?")
FLAGS_RE_2 = re.compile(r"flags\.(\d+)\?([\w<>.]+)")
FLAGS_RE_3 = re.compile(r"flags:#")
INT_RE = re.compile(r"int(\d+)")
core_types = ["int", "long", "int128", "int256", "double", "bytes", "string", "Bool"]
types_to_constructors = {}
types_to_functions = {}
constructors_to_functions = {}
def get_docstring_arg_type(t: str, is_list: bool = False, is_pyrogram_type: bool = False):
if t in core_types:
if t == "long":
return "``int`` ``64-bit``"
elif "int" in t:
size = INT_RE.match(t)
return "``int`` ``{}-bit``".format(size.group(1)) if size else "``int`` ``32-bit``"
elif t == "double":
return "``float`` ``64-bit``"
elif t == "string":
return "``str``"
else:
return "``{}``".format(t.lower())
elif t == "true":
return "``bool``"
elif t == "TLObject" or t == "X":
return "Any object from :obj:`~pyrogram.api.types`"
elif t == "!X":
return "Any method from :obj:`~pyrogram.api.functions`"
elif t.startswith("Vector"):
return "List of " + get_docstring_arg_type(t.split("<", 1)[1][:-1], True, is_pyrogram_type)
else:
if is_pyrogram_type:
t = "pyrogram." + t
t = types_to_constructors.get(t, [t])
n = len(t) - 1
t = (("e" if is_list else "E") + "ither " if n else "") + ", ".join(
":obj:`{1} <{0}.{1}>`".format(
"pyrogram.types" if is_pyrogram_type else "pyrogram.api.types",
i.replace("pyrogram.", "")
)
for i in t
)
if n:
t = t.split(", ")
t = ", ".join(t[:-1]) + " or " + t[-1]
return t
def get_references(t: str):
t = constructors_to_functions.get(t)
if t:
n = len(t) - 1
t = ", ".join(
":obj:`{0} <pyrogram.api.functions.{0}>`".format(i)
for i in t
)
if n:
t = t.split(", ")
t = ", ".join(t[:-1]) + " and " + t[-1]
return t
def get_argument_type(arg):
is_flag = FLAGS_RE.match(arg[1])
name, t = arg
if is_flag:
t = t.split("?")[1]
if t in core_types:
if t == "long" or "int" in t:
t = ": int"
elif t == "double":
t = ": float"
elif t == "string":
t = ": str"
else:
t = ": {}".format(t.lower())
elif t == "true":
t = ": bool"
elif t.startswith("Vector"):
t = ": list"
else:
return name + ("=None" if is_flag else "")
return name + t + (" = None" if is_flag else "")
class Combinator:
def __init__(self,
section: str,
namespace: str,
name: str,
id: str,
args: list,
has_flags: bool,
return_type: str,
docs: str):
self.section = section
self.namespace = namespace
self.name = name
self.id = id
self.args = args
self.has_flags = has_flags
self.return_type = return_type
self.docs = docs
def snek(s: str):
# https://stackoverflow.com/questions/1175208/elegant-python-function-to-convert-camelcase-to-snake-case
s = re.sub(r"(.)([A-Z][a-z]+)", r"\1_\2", s)
return re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", s).lower()
def capit(s: str):
return "".join([i[0].upper() + i[1:] for i in s.split("_")])
def sort_args(args):
"""Put flags at the end"""
args = args.copy()
flags = [i for i in args if FLAGS_RE.match(i[1])]
for i in flags:
args.remove(i)
return args + flags
def start():
shutil.rmtree("{}/types".format(DESTINATION), ignore_errors=True)
shutil.rmtree("{}/functions".format(DESTINATION), ignore_errors=True)
with open("{}/source/auth_key.tl".format(HOME), encoding="utf-8") as auth, \
open("{}/source/sys_msgs.tl".format(HOME), encoding="utf-8") as system, \
open("{}/source/main_api.tl".format(HOME), encoding="utf-8") as api:
schema = (auth.read() + system.read() + api.read()).splitlines()
with open("{}/template/mtproto.txt".format(HOME), encoding="utf-8") as f:
mtproto_template = f.read()
with open("{}/template/pyrogram.txt".format(HOME), encoding="utf-8") as f:
pyrogram_template = f.read()
with open(NOTICE_PATH, encoding="utf-8") as f:
notice = []
for line in f.readlines():
notice.append("# {}".format(line).strip())
notice = "\n".join(notice)
section = None
layer = None
namespaces = {"types": set(), "functions": set()}
combinators = []
for line in schema:
# Check for section changer lines
s = SECTION_RE.match(line)
if s:
section = s.group(1)
continue
# Save the layer version
l = LAYER_RE.match(line)
if l:
layer = l.group(1)
continue
combinator = COMBINATOR_RE.match(line)
if combinator:
name, id, return_type, docs = combinator.groups()
namespace, name = name.split(".") if "." in name else ("", name)
args = ARGS_RE.findall(line.split(" //")[0])
# Pingu!
has_flags = not not FLAGS_RE_3.findall(line)
# Fix file and folder name collision
if name == "updates":
name = "update"
# Fix arg name being "self" (reserved keyword)
for i, item in enumerate(args):
if item[0] == "self":
args[i] = ("is_self", item[1])
if namespace:
namespaces[section].add(namespace)
combinators.append(
Combinator(
section,
namespace,
capit(name),
"0x{}".format(id.zfill(8)),
args,
has_flags,
".".join(
return_type.split(".")[:-1]
+ [capit(return_type.split(".")[-1])]
),
docs
)
)
for c in combinators:
return_type = c.return_type
if return_type.startswith("Vector"):
return_type = return_type.split("<")[1][:-1]
d = types_to_constructors if c.section == "types" else types_to_functions
if return_type not in d:
d[return_type] = []
d[return_type].append(".".join(filter(None, [c.namespace, c.name])))
for k, v in types_to_constructors.items():
for i in v:
try:
constructors_to_functions[i] = types_to_functions[k]
except KeyError:
pass
total = len(combinators)
current = 0
for c in combinators: # type: Combinator
print("Compiling APIs... [{}%] {}".format(
str(round(current * 100 / total)).rjust(3),
".".join(filter(None, [c.section, c.namespace, c.name]))
), end=" \r", flush=True)
current += 1
path = "{}/{}/{}".format(DESTINATION, c.section, c.namespace)
os.makedirs(path, exist_ok=True)
init = "{}/__init__.py".format(path)
if not os.path.exists(init):
with open(init, "w", encoding="utf-8") as f:
f.write(notice + "\n\n")
with open(init, "a", encoding="utf-8") as f:
f.write("from .{} import {}\n".format(snek(c.name), capit(c.name)))
sorted_args = sort_args(c.args)
arguments = (
", "
+ ("*, " if c.args else "")
+ (", ".join([get_argument_type(i) for i in sorted_args if i != ("flags", "#")]) if c.args else "")
)
fields = "\n ".join(
["self.{0} = {0} # {1}".format(i[0], i[1]) for i in c.args if i != ("flags", "#")]
) if c.args else "pass"
docstring_args = []
docs = c.docs.split("|")[1:] if c.docs else None
for i, arg in enumerate(sorted_args):
if arg == ("flags", "#"):
continue
arg_name, arg_type = arg
is_optional = FLAGS_RE.match(arg_type)
flag_number = is_optional.group(1) if is_optional else -1
arg_type = arg_type.split("?")[-1]
if docs:
docstring_args.append(
"{} ({}{}):\n {}\n".format(
arg_name,
get_docstring_arg_type(arg_type, is_pyrogram_type=True),
", optional" if "Optional" in docs[i] else "",
re.sub("Optional\. ", "", docs[i].split("§")[1].rstrip(".") + ".")
)
)
else:
docstring_args.append(
"{}{}: {}".format(
arg_name,
" (optional)".format(flag_number) if is_optional else "",
get_docstring_arg_type(arg_type, is_pyrogram_type=c.namespace == "pyrogram")
)
)
if docstring_args:
docstring_args = "Parameters:\n " + "\n ".join(docstring_args)
else:
docstring_args = "No parameters required."
docstring_args = "Attributes:\n ID: ``{}``\n\n ".format(c.id) + docstring_args
docstring_args = "Attributes:\n LAYER: ``{}``\n\n ".format(layer) + docstring_args
if c.section == "functions":
docstring_args += "\n\n Returns:\n " + get_docstring_arg_type(c.return_type)
else:
references = get_references(".".join(filter(None, [c.namespace, c.name])))
if references:
docstring_args += "\n\n See Also:\n This object can be returned by " + references + "."
write_types = read_types = "" if c.has_flags else "# No flags\n "
for arg_name, arg_type in c.args:
flag = FLAGS_RE_2.findall(arg_type)
if arg_name == "flags" and arg_type == "#":
write_flags = []
for i in c.args:
flag = FLAGS_RE.match(i[1])
if flag:
write_flags.append(
"flags |= (1 << {}) if self.{} is not None else 0".format(flag.group(1), i[0]))
write_flags = "\n ".join([
"flags = 0",
"\n ".join(write_flags),
"b.write(Int(flags))\n "
])
write_types += write_flags
read_types += "flags = Int.read(b)\n "
continue
if flag:
index, flag_type = flag[0]
if flag_type == "true":
read_types += "\n "
read_types += "{} = True if flags & (1 << {}) else False".format(arg_name, index)
elif flag_type in core_types:
write_types += "\n "
write_types += "if self.{} is not None:\n ".format(arg_name)
write_types += "b.write({}(self.{}))\n ".format(flag_type.title(), arg_name)
read_types += "\n "
read_types += "{} = {}.read(b) if flags & (1 << {}) else None".format(
arg_name, flag_type.title(), index
)
elif "vector" in flag_type.lower():
sub_type = arg_type.split("<")[1][:-1]
write_types += "\n "
write_types += "if self.{} is not None:\n ".format(arg_name)
write_types += "b.write(Vector(self.{}{}))\n ".format(
arg_name, ", {}".format(sub_type.title()) if sub_type in core_types else ""
)
read_types += "\n "
read_types += "{} = TLObject.read(b{}) if flags & (1 << {}) else []\n ".format(
arg_name, ", {}".format(sub_type.title()) if sub_type in core_types else "", index
)
else:
write_types += "\n "
write_types += "if self.{} is not None:\n ".format(arg_name)
write_types += "b.write(self.{}.write())\n ".format(arg_name)
read_types += "\n "
read_types += "{} = TLObject.read(b) if flags & (1 << {}) else None\n ".format(
arg_name, index
)
else:
if arg_type in core_types:
write_types += "\n "
write_types += "b.write({}(self.{}))\n ".format(arg_type.title(), arg_name)
read_types += "\n "
read_types += "{} = {}.read(b)\n ".format(arg_name, arg_type.title())
elif "vector" in arg_type.lower():
sub_type = arg_type.split("<")[1][:-1]
write_types += "\n "
write_types += "b.write(Vector(self.{}{}))\n ".format(
arg_name, ", {}".format(sub_type.title()) if sub_type in core_types else ""
)
read_types += "\n "
read_types += "{} = TLObject.read(b{})\n ".format(
arg_name, ", {}".format(sub_type.title()) if sub_type in core_types else ""
)
else:
write_types += "\n "
write_types += "b.write(self.{}.write())\n ".format(arg_name)
read_types += "\n "
read_types += "{} = TLObject.read(b)\n ".format(arg_name)
if c.docs:
description = c.docs.split("|")[0].split("§")[1]
docstring_args = description + "\n\n " + docstring_args
with open("{}/{}.py".format(path, snek(c.name)), "w", encoding="utf-8") as f:
if c.docs:
f.write(
pyrogram_template.format(
notice=notice,
class_name=capit(c.name),
docstring_args=docstring_args,
object_id=c.id,
arguments=arguments,
fields=fields
)
)
else:
f.write(
mtproto_template.format(
notice=notice,
class_name=capit(c.name),
docstring_args=docstring_args,
object_id=c.id,
arguments=arguments,
fields=fields,
read_types=read_types,
write_types=write_types,
return_arguments=", ".join(
["{0}={0}".format(i[0]) for i in sorted_args if i != ("flags", "#")]
),
slots=", ".join(['"{}"'.format(i[0]) for i in sorted_args if i != ("flags", "#")]),
qualname="{}.{}{}".format(c.section, "{}.".format(c.namespace) if c.namespace else "", c.name)
)
)
with open("{}/all.py".format(DESTINATION), "w", encoding="utf-8") as f:
f.write(notice + "\n\n")
f.write("layer = {}\n\n".format(layer))
f.write("objects = {")
for c in combinators:
path = ".".join(filter(None, [c.section, c.namespace, capit(c.name)]))
f.write("\n {}: \"pyrogram.api.{}\",".format(c.id, path))
f.write("\n 0xbc799737: \"pyrogram.api.core.BoolFalse\",")
f.write("\n 0x997275b5: \"pyrogram.api.core.BoolTrue\",")
f.write("\n 0x1cb5c415: \"pyrogram.api.core.Vector\",")
f.write("\n 0x73f1f8dc: \"pyrogram.api.core.MsgContainer\",")
f.write("\n 0xae500895: \"pyrogram.api.core.FutureSalts\",")
f.write("\n 0x0949d9dc: \"pyrogram.api.core.FutureSalt\",")
f.write("\n 0x3072cfa1: \"pyrogram.api.core.GzipPacked\",")
f.write("\n 0x5bb8e511: \"pyrogram.api.core.Message\",")
f.write("\n}\n")
for k, v in namespaces.items():
with open("{}/{}/__init__.py".format(DESTINATION, k), "a", encoding="utf-8") as f:
f.write("from . import {}\n".format(", ".join([i for i in v])) if v else "")
if "__main__" == __name__:
HOME = "."
DESTINATION = "../../pyrogram/api"
NOTICE_PATH = "../../NOTICE"
start()
|
PypiClean
|
/discord-api-wrapper-1.0.0.tar.gz/discord-api-wrapper-1.0.0/discord/components.py
|
from __future__ import annotations
from typing import ClassVar, List, Literal, Optional, TYPE_CHECKING, Tuple, Union, overload
from .enums import try_enum, ComponentType, ButtonStyle, TextStyle, ChannelType
from .utils import get_slots, MISSING
from .partial_emoji import PartialEmoji, _EmojiTag
if TYPE_CHECKING:
from typing_extensions import Self
from .types.components import (
Component as ComponentPayload,
ButtonComponent as ButtonComponentPayload,
SelectMenu as SelectMenuPayload,
SelectOption as SelectOptionPayload,
ActionRow as ActionRowPayload,
TextInput as TextInputPayload,
ActionRowChildComponent as ActionRowChildComponentPayload,
)
from .emoji import Emoji
ActionRowChildComponentType = Union['Button', 'SelectMenu', 'TextInput']
__all__ = (
'Component',
'ActionRow',
'Button',
'SelectMenu',
'SelectOption',
'TextInput',
)
class Component:
"""Represents a Discord Bot UI Kit Component.
Currently, the only components supported by Discord are:
- :class:`ActionRow`
- :class:`Button`
- :class:`SelectMenu`
- :class:`TextInput`
This class is abstract and cannot be instantiated.
.. versionadded:: 2.0
"""
__slots__: Tuple[str, ...] = ()
__repr_info__: ClassVar[Tuple[str, ...]]
def __repr__(self) -> str:
attrs = ' '.join(f'{key}={getattr(self, key)!r}' for key in self.__repr_info__)
return f'<{self.__class__.__name__} {attrs}>'
@property
def type(self) -> ComponentType:
""":class:`ComponentType`: The type of component."""
raise NotImplementedError
@classmethod
def _raw_construct(cls, **kwargs) -> Self:
self = cls.__new__(cls)
for slot in get_slots(cls):
try:
value = kwargs[slot]
except KeyError:
pass
else:
setattr(self, slot, value)
return self
def to_dict(self) -> ComponentPayload:
raise NotImplementedError
class ActionRow(Component):
"""Represents a Discord Bot UI Kit Action Row.
This is a component that holds up to 5 children components in a row.
This inherits from :class:`Component`.
.. versionadded:: 2.0
Attributes
------------
children: List[Union[:class:`Button`, :class:`SelectMenu`, :class:`TextInput`]]
The children components that this holds, if any.
"""
__slots__: Tuple[str, ...] = ('children',)
__repr_info__: ClassVar[Tuple[str, ...]] = __slots__
def __init__(self, data: ActionRowPayload, /) -> None:
self.children: List[ActionRowChildComponentType] = []
for component_data in data.get('components', []):
component = _component_factory(component_data)
if component is not None:
self.children.append(component)
@property
def type(self) -> Literal[ComponentType.action_row]:
""":class:`ComponentType`: The type of component."""
return ComponentType.action_row
def to_dict(self) -> ActionRowPayload:
return {
'type': self.type.value,
'components': [child.to_dict() for child in self.children],
}
class Button(Component):
"""Represents a button from the Discord Bot UI Kit.
This inherits from :class:`Component`.
.. note::
The user constructible and usable type to create a button is :class:`discord.ui.Button`
not this one.
.. versionadded:: 2.0
Attributes
-----------
style: :class:`.ButtonStyle`
The style of the button.
custom_id: Optional[:class:`str`]
The ID of the button that gets received during an interaction.
If this button is for a URL, it does not have a custom ID.
url: Optional[:class:`str`]
The URL this button sends you to.
disabled: :class:`bool`
Whether the button is disabled or not.
label: Optional[:class:`str`]
The label of the button, if any.
emoji: Optional[:class:`PartialEmoji`]
The emoji of the button, if available.
"""
__slots__: Tuple[str, ...] = (
'style',
'custom_id',
'url',
'disabled',
'label',
'emoji',
)
__repr_info__: ClassVar[Tuple[str, ...]] = __slots__
def __init__(self, data: ButtonComponentPayload, /) -> None:
self.style: ButtonStyle = try_enum(ButtonStyle, data['style'])
self.custom_id: Optional[str] = data.get('custom_id')
self.url: Optional[str] = data.get('url')
self.disabled: bool = data.get('disabled', False)
self.label: Optional[str] = data.get('label')
self.emoji: Optional[PartialEmoji]
try:
self.emoji = PartialEmoji.from_dict(data['emoji'])
except KeyError:
self.emoji = None
@property
def type(self) -> Literal[ComponentType.button]:
""":class:`ComponentType`: The type of component."""
return ComponentType.button
def to_dict(self) -> ButtonComponentPayload:
payload: ButtonComponentPayload = {
'type': 2,
'style': self.style.value,
'disabled': self.disabled,
}
if self.label:
payload['label'] = self.label
if self.custom_id:
payload['custom_id'] = self.custom_id
if self.url:
payload['url'] = self.url
if self.emoji:
payload['emoji'] = self.emoji.to_dict()
return payload
class SelectMenu(Component):
"""Represents a select menu from the Discord Bot UI Kit.
A select menu is functionally the same as a dropdown, however
on mobile it renders a bit differently.
.. note::
The user constructible and usable type to create a select menu is
:class:`discord.ui.Select` not this one.
.. versionadded:: 2.0
Attributes
------------
type: :class:`ComponentType`
The type of component.
custom_id: Optional[:class:`str`]
The ID of the select menu that gets received during an interaction.
placeholder: Optional[:class:`str`]
The placeholder text that is shown if nothing is selected, if any.
min_values: :class:`int`
The minimum number of items that must be chosen for this select menu.
Defaults to 1 and must be between 0 and 25.
max_values: :class:`int`
The maximum number of items that must be chosen for this select menu.
Defaults to 1 and must be between 1 and 25.
options: List[:class:`SelectOption`]
A list of options that can be selected in this menu.
disabled: :class:`bool`
Whether the select is disabled or not.
channel_types: List[:class:`.ChannelType`]
A list of channel types that are allowed to be chosen in this select menu.
"""
__slots__: Tuple[str, ...] = (
'type',
'custom_id',
'placeholder',
'min_values',
'max_values',
'options',
'disabled',
'channel_types',
)
__repr_info__: ClassVar[Tuple[str, ...]] = __slots__
def __init__(self, data: SelectMenuPayload, /) -> None:
self.type: ComponentType = try_enum(ComponentType, data['type'])
self.custom_id: str = data['custom_id']
self.placeholder: Optional[str] = data.get('placeholder')
self.min_values: int = data.get('min_values', 1)
self.max_values: int = data.get('max_values', 1)
self.options: List[SelectOption] = [SelectOption.from_dict(option) for option in data.get('options', [])]
self.disabled: bool = data.get('disabled', False)
self.channel_types: List[ChannelType] = [try_enum(ChannelType, t) for t in data.get('channel_types', [])]
def to_dict(self) -> SelectMenuPayload:
payload: SelectMenuPayload = {
'type': self.type.value,
'custom_id': self.custom_id,
'min_values': self.min_values,
'max_values': self.max_values,
'disabled': self.disabled,
}
if self.placeholder:
payload['placeholder'] = self.placeholder
if self.options:
payload['options'] = [op.to_dict() for op in self.options]
if self.channel_types:
payload['channel_types'] = [t.value for t in self.channel_types]
return payload
class SelectOption:
"""Represents a select menu's option.
These can be created by users.
.. versionadded:: 2.0
Parameters
-----------
label: :class:`str`
The label of the option. This is displayed to users.
Can only be up to 100 characters.
value: :class:`str`
The value of the option. This is not displayed to users.
If not provided when constructed then it defaults to the
label. Can only be up to 100 characters.
description: Optional[:class:`str`]
An additional description of the option, if any.
Can only be up to 100 characters.
emoji: Optional[Union[:class:`str`, :class:`Emoji`, :class:`PartialEmoji`]]
The emoji of the option, if available.
default: :class:`bool`
Whether this option is selected by default.
Attributes
-----------
label: :class:`str`
The label of the option. This is displayed to users.
Can only be up to 100 characters.
value: :class:`str`
The value of the option. This is not displayed to users.
If not provided when constructed then it defaults to the
label. Can only be up to 100 characters.
description: Optional[:class:`str`]
An additional description of the option, if any.
Can only be up to 100 characters.
default: :class:`bool`
Whether this option is selected by default.
"""
__slots__: Tuple[str, ...] = (
'label',
'value',
'description',
'_emoji',
'default',
)
def __init__(
self,
*,
label: str,
value: str = MISSING,
description: Optional[str] = None,
emoji: Optional[Union[str, Emoji, PartialEmoji]] = None,
default: bool = False,
) -> None:
self.label: str = label
self.value: str = label if value is MISSING else value
self.description: Optional[str] = description
self.emoji = emoji
self.default: bool = default
def __repr__(self) -> str:
return (
f'<SelectOption label={self.label!r} value={self.value!r} description={self.description!r} '
f'emoji={self.emoji!r} default={self.default!r}>'
)
def __str__(self) -> str:
if self.emoji:
base = f'{self.emoji} {self.label}'
else:
base = self.label
if self.description:
return f'{base}\n{self.description}'
return base
@property
def emoji(self) -> Optional[PartialEmoji]:
"""Optional[:class:`.PartialEmoji`]: The emoji of the option, if available."""
return self._emoji
@emoji.setter
def emoji(self, value: Optional[Union[str, Emoji, PartialEmoji]]) -> None:
if value is not None:
if isinstance(value, str):
self._emoji = PartialEmoji.from_str(value)
elif isinstance(value, _EmojiTag):
self._emoji = value._to_partial()
else:
raise TypeError(f'expected str, Emoji, or PartialEmoji, received {value.__class__.__name__} instead')
else:
self._emoji = None
@classmethod
def from_dict(cls, data: SelectOptionPayload) -> SelectOption:
try:
emoji = PartialEmoji.from_dict(data['emoji'])
except KeyError:
emoji = None
return cls(
label=data['label'],
value=data['value'],
description=data.get('description'),
emoji=emoji,
default=data.get('default', False),
)
def to_dict(self) -> SelectOptionPayload:
payload: SelectOptionPayload = {
'label': self.label,
'value': self.value,
'default': self.default,
}
if self.emoji:
payload['emoji'] = self.emoji.to_dict()
if self.description:
payload['description'] = self.description
return payload
class TextInput(Component):
"""Represents a text input from the Discord Bot UI Kit.
.. note::
The user constructible and usable type to create a text input is
:class:`discord.ui.TextInput` not this one.
.. versionadded:: 2.0
Attributes
------------
custom_id: Optional[:class:`str`]
The ID of the text input that gets received during an interaction.
label: :class:`str`
The label to display above the text input.
style: :class:`TextStyle`
The style of the text input.
placeholder: Optional[:class:`str`]
The placeholder text to display when the text input is empty.
value: Optional[:class:`str`]
The default value of the text input.
required: :class:`bool`
Whether the text input is required.
min_length: Optional[:class:`int`]
The minimum length of the text input.
max_length: Optional[:class:`int`]
The maximum length of the text input.
"""
__slots__: Tuple[str, ...] = (
'style',
'label',
'custom_id',
'placeholder',
'value',
'required',
'min_length',
'max_length',
)
__repr_info__: ClassVar[Tuple[str, ...]] = __slots__
def __init__(self, data: TextInputPayload, /) -> None:
self.style: TextStyle = try_enum(TextStyle, data['style'])
self.label: str = data['label']
self.custom_id: str = data['custom_id']
self.placeholder: Optional[str] = data.get('placeholder')
self.value: Optional[str] = data.get('value')
self.required: bool = data.get('required', True)
self.min_length: Optional[int] = data.get('min_length')
self.max_length: Optional[int] = data.get('max_length')
@property
def type(self) -> Literal[ComponentType.text_input]:
""":class:`ComponentType`: The type of component."""
return ComponentType.text_input
def to_dict(self) -> TextInputPayload:
payload: TextInputPayload = {
'type': self.type.value,
'style': self.style.value,
'label': self.label,
'custom_id': self.custom_id,
'required': self.required,
}
if self.placeholder:
payload['placeholder'] = self.placeholder
if self.value:
payload['value'] = self.value
if self.min_length:
payload['min_length'] = self.min_length
if self.max_length:
payload['max_length'] = self.max_length
return payload
@property
def default(self) -> Optional[str]:
"""Optional[:class:`str`]: The default value of the text input.
This is an alias to :attr:`value`.
"""
return self.value
@overload
def _component_factory(data: ActionRowChildComponentPayload) -> Optional[ActionRowChildComponentType]:
...
@overload
def _component_factory(data: ComponentPayload) -> Optional[Union[ActionRow, ActionRowChildComponentType]]:
...
def _component_factory(data: ComponentPayload) -> Optional[Union[ActionRow, ActionRowChildComponentType]]:
if data['type'] == 1:
return ActionRow(data)
elif data['type'] == 2:
return Button(data)
elif data['type'] == 3:
return SelectMenu(data)
elif data['type'] == 4:
return TextInput(data)
|
PypiClean
|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/domain/AddressInfoDTO.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
class AddressInfoDTO(object):
def __init__(self):
self._account_id = None
self._address = None
self._address_id = None
self._city_code = None
self._city_name = None
self._community = None
self._enterprise_id = None
self._latitude = None
self._longitude = None
self._mark = None
self._poi_id = None
@property
def account_id(self):
return self._account_id
@account_id.setter
def account_id(self, value):
self._account_id = value
@property
def address(self):
return self._address
@address.setter
def address(self, value):
self._address = value
@property
def address_id(self):
return self._address_id
@address_id.setter
def address_id(self, value):
self._address_id = value
@property
def city_code(self):
return self._city_code
@city_code.setter
def city_code(self, value):
self._city_code = value
@property
def city_name(self):
return self._city_name
@city_name.setter
def city_name(self, value):
self._city_name = value
@property
def community(self):
return self._community
@community.setter
def community(self, value):
self._community = value
@property
def enterprise_id(self):
return self._enterprise_id
@enterprise_id.setter
def enterprise_id(self, value):
self._enterprise_id = value
@property
def latitude(self):
return self._latitude
@latitude.setter
def latitude(self, value):
self._latitude = value
@property
def longitude(self):
return self._longitude
@longitude.setter
def longitude(self, value):
self._longitude = value
@property
def mark(self):
return self._mark
@mark.setter
def mark(self, value):
self._mark = value
@property
def poi_id(self):
return self._poi_id
@poi_id.setter
def poi_id(self, value):
self._poi_id = value
def to_alipay_dict(self):
params = dict()
if self.account_id:
if hasattr(self.account_id, 'to_alipay_dict'):
params['account_id'] = self.account_id.to_alipay_dict()
else:
params['account_id'] = self.account_id
if self.address:
if hasattr(self.address, 'to_alipay_dict'):
params['address'] = self.address.to_alipay_dict()
else:
params['address'] = self.address
if self.address_id:
if hasattr(self.address_id, 'to_alipay_dict'):
params['address_id'] = self.address_id.to_alipay_dict()
else:
params['address_id'] = self.address_id
if self.city_code:
if hasattr(self.city_code, 'to_alipay_dict'):
params['city_code'] = self.city_code.to_alipay_dict()
else:
params['city_code'] = self.city_code
if self.city_name:
if hasattr(self.city_name, 'to_alipay_dict'):
params['city_name'] = self.city_name.to_alipay_dict()
else:
params['city_name'] = self.city_name
if self.community:
if hasattr(self.community, 'to_alipay_dict'):
params['community'] = self.community.to_alipay_dict()
else:
params['community'] = self.community
if self.enterprise_id:
if hasattr(self.enterprise_id, 'to_alipay_dict'):
params['enterprise_id'] = self.enterprise_id.to_alipay_dict()
else:
params['enterprise_id'] = self.enterprise_id
if self.latitude:
if hasattr(self.latitude, 'to_alipay_dict'):
params['latitude'] = self.latitude.to_alipay_dict()
else:
params['latitude'] = self.latitude
if self.longitude:
if hasattr(self.longitude, 'to_alipay_dict'):
params['longitude'] = self.longitude.to_alipay_dict()
else:
params['longitude'] = self.longitude
if self.mark:
if hasattr(self.mark, 'to_alipay_dict'):
params['mark'] = self.mark.to_alipay_dict()
else:
params['mark'] = self.mark
if self.poi_id:
if hasattr(self.poi_id, 'to_alipay_dict'):
params['poi_id'] = self.poi_id.to_alipay_dict()
else:
params['poi_id'] = self.poi_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AddressInfoDTO()
if 'account_id' in d:
o.account_id = d['account_id']
if 'address' in d:
o.address = d['address']
if 'address_id' in d:
o.address_id = d['address_id']
if 'city_code' in d:
o.city_code = d['city_code']
if 'city_name' in d:
o.city_name = d['city_name']
if 'community' in d:
o.community = d['community']
if 'enterprise_id' in d:
o.enterprise_id = d['enterprise_id']
if 'latitude' in d:
o.latitude = d['latitude']
if 'longitude' in d:
o.longitude = d['longitude']
if 'mark' in d:
o.mark = d['mark']
if 'poi_id' in d:
o.poi_id = d['poi_id']
return o
|
PypiClean
|
/lingjixian-x3-2022.9.29.0.tar.gz/lingjixian-x3-2022.9.29.0/LingjixianX3/js/plugins/search.js
|
EPUBJS.reader.search = {};
// Search Server -- https://github.com/futurepress/epubjs-search
EPUBJS.reader.search.SERVER = "https://pacific-cliffs-3579.herokuapp.com";
EPUBJS.reader.search.request = function(q, callback) {
var fetch = $.ajax({
dataType: "json",
url: EPUBJS.reader.search.SERVER + "/search?q=" + encodeURIComponent(q)
});
fetch.fail(function(err) {
console.error(err);
});
fetch.done(function(results) {
callback(results);
});
};
EPUBJS.reader.plugins.SearchController = function(Book) {
var reader = this;
var $searchBox = $("#searchBox"),
$searchResults = $("#searchResults"),
$searchView = $("#searchView"),
iframeDoc;
var searchShown = false;
var onShow = function() {
query();
searchShown = true;
$searchView.addClass("shown");
};
var onHide = function() {
searchShown = false;
$searchView.removeClass("shown");
};
var query = function() {
var q = $searchBox.val();
if(q == '') {
return;
}
$searchResults.empty();
$searchResults.append("<li><p>Searching...</p></li>");
EPUBJS.reader.search.request(q, function(data) {
var results = data.results;
$searchResults.empty();
if(iframeDoc) {
$(iframeDoc).find('body').unhighlight();
}
if(results.length == 0) {
$searchResults.append("<li><p>No Results Found</p></li>");
return;
}
iframeDoc = $("#viewer iframe")[0].contentDocument;
$(iframeDoc).find('body').highlight(q, { element: 'span' });
results.forEach(function(result) {
var $li = $("<li></li>");
var $item = $("<a href='"+result.href+"' data-cfi='"+result.cfi+"'><span>"+result.title+"</span><p>"+result.highlight+"</p></a>");
$item.on("click", function(e) {
var $this = $(this),
cfi = $this.data("cfi");
e.preventDefault();
Book.gotoCfi(cfi+"/1:0");
Book.on("renderer:chapterDisplayed", function() {
iframeDoc = $("#viewer iframe")[0].contentDocument;
$(iframeDoc).find('body').highlight(q, { element: 'span' });
})
});
$li.append($item);
$searchResults.append($li);
});
});
};
$searchBox.on("search", function(e) {
var q = $searchBox.val();
//-- SearchBox is empty or cleared
if(q == '') {
$searchResults.empty();
if(reader.SidebarController.getActivePanel() == "Search") {
reader.SidebarController.changePanelTo("Toc");
}
$(iframeDoc).find('body').unhighlight();
iframeDoc = false;
return;
}
reader.SidebarController.changePanelTo("Search");
e.preventDefault();
});
return {
"show" : onShow,
"hide" : onHide
};
};
|
PypiClean
|
/dnpak.py-2.0.0-py3-none-any.whl/dnpak/etfilesystem.py
|
import binascii
import os
import struct
import zlib
from glob import glob
from typing import Final, List
from .etfile import EtFile
from . import utils
class EtFileSystem:
__type = None
__current_file = None
_HEADER_MAGIC: Final[str] = "EyedentityGames Packing File 0.1"
_HEADER_VERSION: Final[int] = 0xB
_FILE_OFFSET: int = 0
FILE_COUNT: int = 0
__files: List[EtFile] = []
def __init__(self, file_name: str):
self.__current_file = file_name
if not file_name or file_name[-4:] != ".pak":
raise NameError("Invalid file name")
def __repr__(self):
return str({
"current_file": self.__current_file,
"file_count": self.FILE_COUNT,
"file_offset": self._FILE_OFFSET,
})
@classmethod
def write(cls, file_name: str):
"""
Write the specified PAK in binary mode
:param file_name: PAK file name to write
:type file_name: str
"""
cls.__type = "write"
if os.path.exists(file_name):
raise FileExistsError("File already exists. Did you mean to read?")
cls.__file = open(file_name, "wb")
cls.write_header()
return cls(file_name)
@classmethod
def read(cls, file_name: str):
"""
Read (and write) the specified PAK in binary mode
:param file_name: PAK file name to read
:type file_name: str
"""
cls.__type = "read"
cls.__file = open(file_name, "rb+")
cls.__file.seek(260)
cls.FILE_COUNT = struct.unpack(
"<I",
cls.__file.read(4))[0] # [0] because the return type is a tuple
cls.__file.seek(264)
cls._FILE_OFFSET = struct.unpack("<I", cls.__file.read(4))[0]
offset_now = 0
for _ in range(cls.FILE_COUNT):
cls.__file.seek(cls._FILE_OFFSET + offset_now)
# Sanitize the file name
location = (cls.__file.read(256).decode("utf-8",
"ignore").split("\x00",
1)[0])
if not location.isalnum() or location in "._-":
location = "".join(x for x in location
if (x.isalnum() or x in "/\\._- "))
file = EtFile(location=location)
file_info = {
"filesizecomp": struct.unpack("<I", cls.__file.read(4))[0],
"filesize": struct.unpack("<I", cls.__file.read(4))[0],
"alloc_size": struct.unpack("<I", cls.__file.read(4))[0],
"offset": struct.unpack("<I", cls.__file.read(4))[0],
}
# seek to offset, and read till allocSize
cls.__file.seek(file_info["offset"])
file_info["filedatacomp"] = cls.__file.read(
file_info["alloc_size"])
file.set_file_info(**file_info)
cls.__files.append(file)
offset_now += 316
return cls(file_name)
def extract(self, mode=None, directory=None):
"""
Extract compressed data inside PAK
:param mode: Use 'strict' mode to prevent extracting 0 byte files
:type mode: str
:param directory: Specified directory name for the extracted files
:type mode: str
"""
# :-4 to remove ".pak"
folder_name = (directory if directory is not None else str(
self.__current_file)[:-4])
for file in self.__files:
if (mode == "strict" and file.get_file_size() == 0
and file.get_compressed_file_size() == 0):
pass
else:
file_path = f"{folder_name}{utils.to_unix_path(file.get_location())}"
os.makedirs(
os.path.dirname(file_path),
exist_ok=True,
)
try:
with open(file_path, "wb") as f:
f.write(file.get_decompressed_data())
except PermissionError:
pass
def get_files(self) -> List[EtFile]:
"""
A getter for files inside pak
:return: List of EtFile objects
:rtype: List[EtFile]
"""
return self.__files
def find_file(self, location: str) -> EtFile:
"""
:param location: Location of the file in pak
:type location: str
:return: EtFile object that match the location
:rtype: EtFile
"""
filtered_file = next(
filter(lambda file: file.get_location() == location,
self.__files), )
return filtered_file
def add_file(self, file_name, location):
"""
Add the specified file to the pak
:param file_name: Path of the specified file
:type file_name: str
:param location: Location of the file that
will be put in the pak.
:type location: str
"""
self.__type = "write"
if not os.path.exists(file_name):
raise FileNotFoundError("File doesn't exist")
if location[0] != "\\":
location = f"\\{utils.to_windows_path(location)}"
self.__files.append(EtFile(file_name, location))
def add_files(self, folder: str):
"""
Add the all files inside specified folder to the pak
:param folder: Path of the folder
:type folder: str
"""
self.__type = "write"
if not os.path.exists(folder):
raise FileNotFoundError("Folder doesn't exist")
files = glob(f"{folder}/**/*.*", recursive=True)
for file in files:
path = utils.to_windows_path(os.path.relpath(file, folder))
self.__files.append(EtFile(file, f"\\{path}"))
def edit_file(self, file: EtFile, filedata: bytes):
"""
Edit the specified EtFile object data
:param file: Object of EtFile that will be edited
:type file: EtFile
:param filedata: File data that will be written
:type filedata: bytes
:return:
"""
self.__type = "write"
try:
filedatacomp = zlib.compress(filedata, 1)
filesize = len(filedata)
filesizecomp = len(binascii.hexlify(filedatacomp)) // 2
file_info = {
"filesize": filesize,
"filedatacomp": filedatacomp,
"filesizecomp": filesizecomp
}
file_index = self.__files.index(file)
self.__files[file_index].set_file_info(**file_info)
except zlib.error as err:
raise err
def close_file_system(self):
"""
Required every time you read or write PAK
Write header, compressed data, and file information to PAK
"""
if self.__type == "write":
self.__file.seek(1024)
self.__write_data()
self.__write_footer()
self.__files.clear()
self.__file.close()
@classmethod
def write_header(cls):
"""
Write header with dummy file count and offset
**Signature**: PAK file magic number/identifier string - FBSTR[256] \n
**Version**: ? Always 11 0x0B - INT32 \n
**File Count**: Number of files in this PAK - UINT32 \n
**File Information Offset**: Pointer to the location of
file information list - UINT32 \n
**Padding**: Padding bytes to make the PAK header
1024 bytes in total - UINT8
"""
cls.__file.write(bytes(cls._HEADER_MAGIC, "utf-8"))
cls.__file.write(struct.pack("<x") * 224)
cls.__file.write(struct.pack("<I", cls._HEADER_VERSION))
cls.__file.write(struct.pack("<I", cls.FILE_COUNT))
cls.__file.write(struct.pack("<I", cls._FILE_OFFSET))
cls.__file.write(struct.pack("<I", 0))
cls.__file.write(struct.pack("<x") * 752)
def __rewrite_header(self):
"""
Rewrite header with real file count and offset
"""
self.FILE_COUNT = len(self.__files)
self._FILE_OFFSET = self.__file.tell()
self.__file.seek(256 + 4)
self.__file.write(struct.pack("<I", self.FILE_COUNT))
self.__file.write(struct.pack("<I", self._FILE_OFFSET))
self.__file.seek(self._FILE_OFFSET, os.SEEK_SET)
def __write_data(self):
"""
Write compressed data to PAK
"""
for f in self.__files:
f.set_offset(self.__file.tell())
self.__file.write(f.get_compressed_data())
def __write_footer(self):
"""
Write file information to PAK
"""
self.__rewrite_header()
for f in self.__files:
self.__file.write(f.get_file_info())
|
PypiClean
|
/azure-cli-2.51.0.tar.gz/azure-cli-2.51.0/azure/cli/command_modules/appconfig/keyvalue.py
|
# pylint: disable=line-too-long, too-many-locals, too-many-statements, too-many-branches
import json
import time
import sys
from itertools import chain
from knack.log import get_logger
from knack.util import CLIError
from azure.appconfiguration import (ConfigurationSetting,
ResourceReadOnlyError)
from azure.core import MatchConditions
from azure.cli.core.util import user_confirmation
from azure.core.exceptions import (HttpResponseError,
ResourceNotFoundError,
ResourceModifiedError)
import azure.cli.core.azclierror as CLIErrors
from ._constants import (FeatureFlagConstants, KeyVaultConstants,
SearchFilterOptions, StatusCodes,
ImportExportProfiles, CompareFieldsMap,
JsonDiff)
from ._models import (convert_configurationsetting_to_keyvalue, convert_keyvalue_to_configurationsetting)
from ._utils import get_appconfig_data_client, prep_label_filter_for_url_encoding, resolve_store_metadata, get_store_endpoint_from_connection_string, is_json_content_type
from ._kv_helpers import (__read_kv_from_file, __read_features_from_file,
__write_kv_and_features_to_file, __read_kv_from_config_store,
__write_kv_and_features_to_config_store,
__discard_features_from_retrieved_kv, __read_kv_from_app_service,
__write_kv_to_app_service, __print_restore_preview,
__convert_featureflag_list_to_keyvalue_list, __export_kvset_to_file,
__import_kvset_from_file, __delete_configuration_setting_from_config_store, __map_to_appservice_config_reference)
from ._diff_utils import print_preview, KVComparer
from .feature import list_feature
logger = get_logger(__name__)
def import_config(cmd,
source,
name=None,
connection_string=None,
label=None,
prefix="", # prefix to add
yes=False,
skip_features=False,
content_type=None,
auth_mode="key",
endpoint=None,
# from-file parameters
path=None,
format_=None,
separator=None,
depth=None,
profile=ImportExportProfiles.DEFAULT,
strict=False,
# from-configstore parameters
src_name=None,
src_connection_string=None,
src_key=None,
src_label=None,
preserve_labels=False,
src_auth_mode="key",
src_endpoint=None,
# from-appservice parameters
appservice_account=None):
src_features = []
dest_features = []
dest_kvs = []
source = source.lower()
profile = profile.lower()
format_ = format_.lower() if format_ else None
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
# fetch key values from source
if source == 'file':
if profile == ImportExportProfiles.KVSET:
__import_kvset_from_file(client=azconfig_client, path=path, strict=strict, yes=yes)
return
if format_ and content_type:
# JSON content type is only supported with JSON format.
# Error out if user has provided JSON content type with any other format.
if format_ != 'json' and is_json_content_type(content_type):
raise CLIErrors.FileOperationError("Failed to import '{}' file format with '{}' content type. Please provide JSON file format to match your content type.".format(format_, content_type))
if separator:
# If separator is provided, use max depth by default unless depth is specified.
depth = sys.maxsize if depth is None else int(depth)
else:
if depth and int(depth) != 1:
logger.warning("Cannot flatten hierarchical data without a separator. --depth argument will be ignored.")
depth = 1
src_kvs = __read_kv_from_file(file_path=path,
format_=format_,
separator=separator,
prefix_to_add=prefix,
depth=depth,
content_type=content_type)
if strict or not skip_features:
# src_features is a list of KeyValue objects
src_features = __read_features_from_file(file_path=path, format_=format_)
elif source == 'appconfig':
src_azconfig_client = get_appconfig_data_client(cmd, src_name, src_connection_string, src_auth_mode, src_endpoint)
if label is not None and preserve_labels:
raise CLIErrors.MutuallyExclusiveArgumentError("Import failed! Please provide only one of these arguments: '--label' or '--preserve-labels'. See 'az appconfig kv import -h' for examples.")
if preserve_labels:
# We need label to be the same as src_label for preview later.
# This will have no effect on label while writing to config store
# as we check preserve_labels again before labelling KVs.
label = src_label
src_kvs = __read_kv_from_config_store(src_azconfig_client,
key=src_key,
label=src_label if src_label else SearchFilterOptions.EMPTY_LABEL,
prefix_to_add=prefix)
# We need to separate KV from feature flags
__discard_features_from_retrieved_kv(src_kvs)
if not skip_features:
# Get all Feature flags with matching label
all_features = __read_kv_from_config_store(src_azconfig_client,
key=FeatureFlagConstants.FEATURE_FLAG_PREFIX + '*',
label=src_label if src_label else SearchFilterOptions.EMPTY_LABEL)
for feature in all_features:
if feature.content_type == FeatureFlagConstants.FEATURE_FLAG_CONTENT_TYPE:
src_features.append(feature)
elif source == 'appservice':
src_kvs = __read_kv_from_app_service(
cmd, appservice_account=appservice_account, prefix_to_add=prefix, content_type=content_type)
if strict or not yes:
# fetch key values from user's configstore
dest_kvs = __read_kv_from_config_store(azconfig_client,
key=prefix + SearchFilterOptions.ANY_KEY if prefix else SearchFilterOptions.ANY_KEY,
label=label if label else SearchFilterOptions.EMPTY_LABEL)
__discard_features_from_retrieved_kv(dest_kvs)
# if customer needs preview & confirmation
# generate preview and wait for user confirmation
kv_comparer = KVComparer(
src_kvs=src_kvs,
compare_fields=CompareFieldsMap[source],
preserve_labels=source == "appconfig" and preserve_labels,
label=label,
content_type=content_type)
kv_diff = kv_comparer.compare(dest_kvs=dest_kvs, strict=strict)
# Show indented key-value preview similar to kvset for appconfig source
indent = 2 if source == "appconfig" else None
need_kv_change = print_preview(kv_diff, source, yes=yes, strict=strict, title="Key Values", indent=indent)
need_feature_change = False
ff_diff = {}
if strict or (src_features and not skip_features):
all_features = __read_kv_from_config_store(azconfig_client,
key=FeatureFlagConstants.FEATURE_FLAG_PREFIX + SearchFilterOptions.ANY_KEY,
label=label if label else SearchFilterOptions.EMPTY_LABEL)
# Append all features to dest_features list
for feature in all_features:
if feature.content_type == FeatureFlagConstants.FEATURE_FLAG_CONTENT_TYPE:
dest_features.append(feature)
ff_comparer = KVComparer(
src_kvs=src_features,
compare_fields=CompareFieldsMap[source],
preserve_labels=source == "appconfig" and preserve_labels,
label=label)
ff_diff = ff_comparer.compare(dest_kvs=dest_features, strict=strict)
need_feature_change = print_preview(ff_diff, source, yes=yes, strict=strict, title="Feature Flags")
if not need_kv_change and not need_feature_change:
return
if not yes:
user_confirmation("Do you want to continue? \n")
# append all feature flags to src_kvs list
src_kvs.extend(src_features)
# In strict mode, delete kvs with specific label that are missing from the imported file
if strict:
kvs_to_delete = chain(
kv_diff.get(JsonDiff.DELETE, []),
ff_diff.get(JsonDiff.DELETE, []))
for kv in kvs_to_delete:
__delete_configuration_setting_from_config_store(azconfig_client, kv)
# import into configstore
__write_kv_and_features_to_config_store(azconfig_client,
key_values=src_kvs,
label=label,
preserve_labels=preserve_labels,
content_type=content_type)
def export_config(cmd,
destination,
name=None,
connection_string=None,
label=None,
key=None,
prefix="", # prefix to remove
yes=False,
skip_features=False,
skip_keyvault=False,
auth_mode="key",
endpoint=None,
# to-file parameters
path=None,
format_=None,
separator=None,
naming_convention='pascal',
resolve_keyvault=False,
profile=ImportExportProfiles.DEFAULT,
# to-config-store parameters
dest_name=None,
dest_connection_string=None,
dest_label=None,
preserve_labels=False,
dest_auth_mode="key",
dest_endpoint=None,
# to-app-service parameters
appservice_account=None,
export_as_reference=False):
src_features = []
dest_features = []
dest_kvs = []
destination = destination.lower()
profile = profile.lower()
format_ = format_.lower() if format_ else None
naming_convention = naming_convention.lower()
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
dest_azconfig_client = None
if destination == 'appconfig':
if dest_label is not None and preserve_labels:
raise CLIErrors.MutuallyExclusiveArgumentError("Export failed! Please provide only one of these arguments: '--dest-label' or '--preserve-labels'. See 'az appconfig kv export -h' for examples.")
if preserve_labels:
# We need dest_label to be the same as label for preview later.
# This will have no effect on label while writing to config store
# as we check preserve_labels again before labelling KVs.
dest_label = label
dest_azconfig_client = get_appconfig_data_client(cmd, dest_name, dest_connection_string, dest_auth_mode, dest_endpoint)
# fetch key values from user's configstore
src_kvs = __read_kv_from_config_store(azconfig_client,
key=key,
label=label if label else SearchFilterOptions.EMPTY_LABEL,
prefix_to_remove=prefix if not export_as_reference else "",
cli_ctx=cmd.cli_ctx if resolve_keyvault else None)
if skip_keyvault:
src_kvs = [keyvalue for keyvalue in src_kvs if keyvalue.content_type != KeyVaultConstants.KEYVAULT_CONTENT_TYPE]
# We need to separate KV from feature flags for the default export profile and only need to discard
# if skip_features is true for the appconfig/kvset export profile.
if profile == ImportExportProfiles.DEFAULT or (profile == ImportExportProfiles.KVSET and skip_features):
__discard_features_from_retrieved_kv(src_kvs)
if profile == ImportExportProfiles.KVSET:
__export_kvset_to_file(file_path=path, keyvalues=src_kvs, yes=yes)
return
if destination == 'appservice' and export_as_reference:
if endpoint is None:
# Endpoint will not be None as it is already resolved in creating azconfig_client
endpoint = get_store_endpoint_from_connection_string(connection_string) or resolve_store_metadata(cmd, name)[1]
src_kvs = [__map_to_appservice_config_reference(kv, endpoint, prefix) for kv in src_kvs]
if not skip_features:
# Get all Feature flags with matching label
if (destination == 'file' and format_ == 'properties') or destination == 'appservice':
skip_features = True
logger.warning("Exporting feature flags to properties file or appservice is currently not supported.")
else:
# src_features is a list of FeatureFlag objects
src_features = list_feature(cmd,
feature='*',
label=label if label else SearchFilterOptions.EMPTY_LABEL,
name=name,
connection_string=connection_string,
all_=True,
auth_mode=auth_mode,
endpoint=endpoint)
if destination == 'appconfig':
# dest_kvs contains features and KV that match the label
dest_kvs = __read_kv_from_config_store(dest_azconfig_client,
key=SearchFilterOptions.ANY_KEY,
label=dest_label if dest_label else SearchFilterOptions.EMPTY_LABEL)
__discard_features_from_retrieved_kv(dest_kvs)
if not skip_features:
# Append all features to dest_features list
dest_features = list_feature(cmd,
feature='*',
label=dest_label if dest_label else SearchFilterOptions.EMPTY_LABEL,
name=dest_name,
connection_string=dest_connection_string,
all_=True,
auth_mode=dest_auth_mode,
endpoint=dest_endpoint)
elif destination == 'appservice':
dest_kvs = __read_kv_from_app_service(cmd, appservice_account=appservice_account)
kv_comparer = KVComparer(
src_kvs=src_kvs,
compare_fields=CompareFieldsMap[destination],
preserve_labels=destination == "appconfig" and preserve_labels,
label=dest_label)
kv_diff = kv_comparer.compare(dest_kvs=dest_kvs)
# Show indented key-value preview similar to kvset for appconfig destination
indent = 2 if destination == "appconfig" else None
need_kv_change = print_preview(kv_diff, destination, yes=yes, title="Key Values", indent=indent)
need_feature_change = False
ff_diff = {}
if src_features:
ff_comparer = KVComparer(
src_kvs=__convert_featureflag_list_to_keyvalue_list(src_features),
compare_fields=CompareFieldsMap[destination],
preserve_labels=destination == "appconfig" and preserve_labels,
label=dest_label)
ff_diff = ff_comparer.compare(dest_kvs=__convert_featureflag_list_to_keyvalue_list(dest_features))
need_feature_change = print_preview(ff_diff, destination, yes=yes, title="Feature Flags")
if not need_feature_change and not need_kv_change:
return
# if customer needs preview & confirmation
if not yes:
user_confirmation("Do you want to continue? \n")
# export to destination
if destination == 'file':
__write_kv_and_features_to_file(file_path=path, key_values=src_kvs, features=src_features,
format_=format_, separator=separator, skip_features=skip_features,
naming_convention=naming_convention)
elif destination == 'appconfig':
__write_kv_and_features_to_config_store(dest_azconfig_client, key_values=src_kvs, features=src_features,
label=dest_label, preserve_labels=preserve_labels)
elif destination == 'appservice':
__write_kv_to_app_service(cmd, key_values=src_kvs, appservice_account=appservice_account)
def set_key(cmd,
key,
name=None,
label=None,
content_type=None,
tags=None,
value=None,
yes=False,
connection_string=None,
auth_mode="key",
endpoint=None):
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
if content_type:
if content_type.lower() == KeyVaultConstants.KEYVAULT_CONTENT_TYPE:
logger.warning("There is a dedicated command to set key vault reference. 'appconfig kv set-keyvault -h'")
elif content_type.lower() == FeatureFlagConstants.FEATURE_FLAG_CONTENT_TYPE:
logger.warning("There is a dedicated command to set feature flag. 'appconfig feature set -h'")
retry_times = 3
retry_interval = 1
label = label if label and label != SearchFilterOptions.EMPTY_LABEL else None
for i in range(0, retry_times):
retrieved_kv = None
set_kv = None
new_kv = None
try:
retrieved_kv = azconfig_client.get_configuration_setting(key=key, label=label)
except ResourceNotFoundError:
logger.debug("Key '%s' with label '%s' not found. A new key-value will be created.", key, label)
except HttpResponseError as exception:
raise CLIErrors.AzureResponseError("Failed to retrieve key-values from config store. " + str(exception))
if retrieved_kv is None:
if is_json_content_type(content_type):
try:
# Ensure that provided value is valid JSON. Error out if value is invalid JSON.
value = 'null' if value is None else value
json.loads(value)
except ValueError:
raise CLIErrors.ValidationError('Value "{}" is not a valid JSON object, which conflicts with the content type "{}".'.format(value, content_type))
set_kv = ConfigurationSetting(key=key,
label=label,
value="" if value is None else value,
content_type="" if content_type is None else content_type,
tags=tags)
else:
value = retrieved_kv.value if value is None else value
content_type = retrieved_kv.content_type if content_type is None else content_type
if is_json_content_type(content_type):
try:
# Ensure that provided/existing value is valid JSON. Error out if value is invalid JSON.
json.loads(value)
except (TypeError, ValueError):
raise CLIErrors.ValidationError('Value "{}" is not a valid JSON object, which conflicts with the content type "{}". Set the value again in valid JSON format.'.format(value, content_type))
set_kv = ConfigurationSetting(key=key,
label=label,
value=value,
content_type=content_type,
tags=retrieved_kv.tags if tags is None else tags,
read_only=retrieved_kv.read_only,
etag=retrieved_kv.etag)
verification_kv = {
"key": set_kv.key,
"label": set_kv.label,
"content_type": set_kv.content_type,
"value": set_kv.value,
"tags": set_kv.tags
}
entry = json.dumps(verification_kv, indent=2, sort_keys=True, ensure_ascii=False)
confirmation_message = "Are you sure you want to set the key: \n" + entry + "\n"
user_confirmation(confirmation_message, yes)
try:
if set_kv.etag is None:
new_kv = azconfig_client.add_configuration_setting(set_kv)
else:
new_kv = azconfig_client.set_configuration_setting(set_kv, match_condition=MatchConditions.IfNotModified)
return convert_configurationsetting_to_keyvalue(new_kv)
except ResourceReadOnlyError:
raise CLIError("Failed to update read only key-value. Unlock the key-value before updating it.")
except HttpResponseError as exception:
if exception.status_code == StatusCodes.PRECONDITION_FAILED:
logger.debug('Retrying setting %s times with exception: concurrent setting operations', i + 1)
time.sleep(retry_interval)
else:
raise CLIError("Failed to set the key-value due to an exception: " + str(exception))
except Exception as exception:
raise CLIError("Failed to set the key-value due to an exception: " + str(exception))
raise CLIError("Failed to set the key '{}' due to a conflicting operation.".format(key))
def set_keyvault(cmd,
key,
secret_identifier,
name=None,
label=None,
tags=None,
yes=False,
connection_string=None,
auth_mode="key",
endpoint=None):
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
keyvault_ref_value = json.dumps({"uri": secret_identifier}, ensure_ascii=False, separators=(',', ':'))
retry_times = 3
retry_interval = 1
label = label if label and label != SearchFilterOptions.EMPTY_LABEL else None
for i in range(0, retry_times):
retrieved_kv = None
set_kv = None
new_kv = None
try:
retrieved_kv = azconfig_client.get_configuration_setting(key=key, label=label)
except ResourceNotFoundError:
logger.debug("Key '%s' with label '%s' not found. A new key-vault reference will be created.", key, label)
except HttpResponseError as exception:
raise CLIErrors.AzureResponseError("Failed to retrieve key-values from config store. " + str(exception))
if retrieved_kv is None:
set_kv = ConfigurationSetting(key=key,
label=label,
value=keyvault_ref_value,
content_type=KeyVaultConstants.KEYVAULT_CONTENT_TYPE,
tags=tags)
else:
set_kv = ConfigurationSetting(key=key,
label=label,
value=keyvault_ref_value,
content_type=KeyVaultConstants.KEYVAULT_CONTENT_TYPE,
tags=retrieved_kv.tags if tags is None else tags,
read_only=retrieved_kv.read_only,
etag=retrieved_kv.etag)
verification_kv = {
"key": set_kv.key,
"label": set_kv.label,
"content_type": set_kv.content_type,
"value": set_kv.value,
"tags": set_kv.tags
}
entry = json.dumps(verification_kv, indent=2, sort_keys=True, ensure_ascii=False)
confirmation_message = "Are you sure you want to set the keyvault reference: \n" + entry + "\n"
user_confirmation(confirmation_message, yes)
try:
if set_kv.etag is None:
new_kv = azconfig_client.add_configuration_setting(set_kv)
else:
new_kv = azconfig_client.set_configuration_setting(set_kv, match_condition=MatchConditions.IfNotModified)
return convert_configurationsetting_to_keyvalue(new_kv)
except ResourceReadOnlyError:
raise CLIError("Failed to update read only key vault reference. Unlock the key vault reference before updating it.")
except HttpResponseError as exception:
if exception.status_code == StatusCodes.PRECONDITION_FAILED:
logger.debug('Retrying setting %s times with exception: concurrent setting operations', i + 1)
time.sleep(retry_interval)
else:
raise CLIErrors.AzureResponseError("Failed to set the keyvault reference due to an exception: " + str(exception))
except Exception as exception:
raise CLIError("Failed to set the keyvault reference due to an exception: " + str(exception))
raise CLIError("Failed to set the keyvault reference '{}' due to a conflicting operation.".format(key))
def delete_key(cmd,
key,
name=None,
label=None,
yes=False,
connection_string=None,
auth_mode="key",
endpoint=None):
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
# list_configuration_settings returns kv with null label when:
# label = ASCII null 0x00, or URL encoded %00
# In delete, import and export commands, we treat missing --label as null label
# In list, restore and revision commands, we treat missing --label as all labels
entries = __read_kv_from_config_store(azconfig_client,
key=key,
label=label if label else SearchFilterOptions.EMPTY_LABEL)
confirmation_message = "Found '{}' key-values matching the specified key and label. Are you sure you want to delete these key-values?".format(len(entries))
user_confirmation(confirmation_message, yes)
deleted_entries = []
exception_messages = []
for entry in entries:
try:
deleted_kv = azconfig_client.delete_configuration_setting(key=entry.key,
label=entry.label,
etag=entry.etag,
match_condition=MatchConditions.IfNotModified)
deleted_entries.append(convert_configurationsetting_to_keyvalue(deleted_kv))
except ResourceReadOnlyError:
exception = "Failed to delete read-only key-value with key '{}' and label '{}'. Unlock the key-value before deleting it.".format(entry.key, entry.label)
exception_messages.append(exception)
except ResourceModifiedError:
exception = "Failed to delete key-value with key '{}' and label '{}' due to a conflicting operation.".format(entry.key, entry.label)
exception_messages.append(exception)
except HttpResponseError as ex:
exception_messages.append(str(ex))
raise CLIErrors.AzureResponseError('Delete operation failed. The following error(s) occurred:\n' + json.dumps(exception_messages, indent=2, ensure_ascii=False))
# Log errors if partially succeeded
if exception_messages:
if deleted_entries:
logger.error('Delete operation partially failed. The following error(s) occurred:\n%s\n',
json.dumps(exception_messages, indent=2, ensure_ascii=False))
else:
raise CLIError('Delete operation failed. \n' + json.dumps(exception_messages, indent=2, ensure_ascii=False))
return deleted_entries
def lock_key(cmd,
key,
label=None,
name=None,
connection_string=None,
yes=False,
auth_mode="key",
endpoint=None):
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
retry_times = 3
retry_interval = 1
for i in range(0, retry_times):
try:
retrieved_kv = azconfig_client.get_configuration_setting(key=key, label=label)
except ResourceNotFoundError:
raise CLIErrors.ResourceNotFoundError("Key '{}' with label '{}' does not exist.".format(key, label))
except HttpResponseError as exception:
raise CLIErrors.AzureResponseError("Failed to retrieve key-values from config store. " + str(exception))
confirmation_message = "Are you sure you want to lock the key '{}' with label '{}'".format(key, label)
user_confirmation(confirmation_message, yes)
try:
new_kv = azconfig_client.set_read_only(retrieved_kv, match_condition=MatchConditions.IfNotModified)
return convert_configurationsetting_to_keyvalue(new_kv)
except HttpResponseError as exception:
if exception.status_code == StatusCodes.PRECONDITION_FAILED:
logger.debug('Retrying lock operation %s times with exception: concurrent setting operations', i + 1)
time.sleep(retry_interval)
else:
raise CLIErrors.AzureResponseError("Failed to lock the key-value due to an exception: " + str(exception))
except Exception as exception:
raise CLIError("Failed to lock the key-value due to an exception: " + str(exception))
raise CLIError("Failed to lock the key '{}' with label '{}' due to a conflicting operation.".format(key, label))
def unlock_key(cmd,
key,
label=None,
name=None,
connection_string=None,
yes=False,
auth_mode="key",
endpoint=None):
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
retry_times = 3
retry_interval = 1
for i in range(0, retry_times):
try:
retrieved_kv = azconfig_client.get_configuration_setting(key=key, label=label)
except ResourceNotFoundError:
raise CLIErrors.ResourceNotFoundError("Key '{}' with label '{}' does not exist.".format(key, label))
except HttpResponseError as exception:
raise CLIErrors.AzureResponseError("Failed to retrieve key-values from config store. " + str(exception))
confirmation_message = "Are you sure you want to unlock the key '{}' with label '{}'".format(key, label)
user_confirmation(confirmation_message, yes)
try:
new_kv = azconfig_client.set_read_only(retrieved_kv, read_only=False, match_condition=MatchConditions.IfNotModified)
return convert_configurationsetting_to_keyvalue(new_kv)
except HttpResponseError as exception:
if exception.status_code == StatusCodes.PRECONDITION_FAILED:
logger.debug('Retrying unlock operation %s times with exception: concurrent setting operations', i + 1)
time.sleep(retry_interval)
else:
raise CLIErrors.AzureResponseError("Failed to unlock the key-value due to an exception: " + str(exception))
except Exception as exception:
raise CLIError("Failed to unlock the key-value due to an exception: " + str(exception))
raise CLIError("Failed to unlock the key '{}' with label '{}' due to a conflicting operation.".format(key, label))
def show_key(cmd,
key,
name=None,
label=None,
datetime=None,
connection_string=None,
auth_mode="key",
endpoint=None):
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
try:
key_value = azconfig_client.get_configuration_setting(key=key, label=label, accept_datetime=datetime)
if key_value is None:
raise CLIErrors.ResourceNotFoundError("The key-value does not exist.")
return convert_configurationsetting_to_keyvalue(key_value)
except ResourceNotFoundError:
raise CLIErrors.ResourceNotFoundError("Key '{}' with label '{}' does not exist.".format(key, label))
except HttpResponseError as exception:
raise CLIErrors.AzureResponseError('Failed to retrieve key-values from config store. ' + str(exception))
raise CLIError("Failed to get the key '{}' with label '{}'.".format(key, label))
def list_key(cmd,
key=None,
fields=None,
name=None,
label=None,
datetime=None,
snapshot=None,
connection_string=None,
top=None,
all_=False,
resolve_keyvault=False,
auth_mode="key",
endpoint=None):
if fields and resolve_keyvault:
raise CLIErrors.MutuallyExclusiveArgumentError("Please provide only one of these arguments: '--fields' or '--resolve-keyvault'. See 'az appconfig kv list -h' for examples.")
if snapshot and (key or label or datetime):
raise CLIErrors.MutuallyExclusiveArgumentError("'snapshot' cannot be specified with 'key', 'label', or 'datetime' filters.")
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
keyvalues = __read_kv_from_config_store(azconfig_client,
key=key if key else SearchFilterOptions.ANY_KEY,
label=label if label else SearchFilterOptions.ANY_LABEL,
datetime=datetime,
snapshot=snapshot,
fields=fields,
top=top,
all_=all_,
cli_ctx=cmd.cli_ctx if resolve_keyvault else None)
return keyvalues
def restore_key(cmd,
datetime,
key=None,
name=None,
label=None,
connection_string=None,
yes=False,
auth_mode="key",
endpoint=None):
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
exception_messages = []
restore_keyvalues = __read_kv_from_config_store(azconfig_client,
key=key if key else SearchFilterOptions.ANY_KEY,
label=label if label else SearchFilterOptions.ANY_LABEL,
datetime=datetime)
current_keyvalues = __read_kv_from_config_store(azconfig_client,
key=key if key else SearchFilterOptions.ANY_KEY,
label=label if label else SearchFilterOptions.ANY_LABEL)
try:
comparer = KVComparer(restore_keyvalues, CompareFieldsMap["restore"])
restore_diff = comparer.compare(current_keyvalues, strict=True)
need_change = __print_restore_preview(restore_diff, yes=yes)
if not yes:
if need_change is False:
logger.debug('Canceling the restore operation based on user selection.')
return
user_confirmation("Do you want to continue? \n")
kvs_to_restore = restore_diff.get(JsonDiff.ADD, [])
kvs_to_modify = [update["new"] for update in restore_diff.get(JsonDiff.UPDATE, [])]
kvs_to_delete = restore_diff.get(JsonDiff.DELETE, [])
keys_to_restore = len(kvs_to_restore) + len(kvs_to_modify) + len(kvs_to_delete)
restored_so_far = 0
for kv in chain(kvs_to_restore, kvs_to_modify):
set_kv = convert_keyvalue_to_configurationsetting(kv)
try:
azconfig_client.set_configuration_setting(set_kv)
restored_so_far += 1
except ResourceReadOnlyError:
exception = "Failed to update read-only key-value with key '{}' and label '{}'. Unlock the key-value before updating it.".format(set_kv.key, set_kv.label)
exception_messages.append(exception)
except ResourceModifiedError:
exception = "Failed to update key-value with key '{}' and label '{}' due to a conflicting operation.".format(set_kv.key, set_kv.label)
exception_messages.append(exception)
for kv in kvs_to_delete:
try:
azconfig_client.delete_configuration_setting(key=kv.key,
label=kv.label,
etag=kv.etag,
match_condition=MatchConditions.IfNotModified)
restored_so_far += 1
except ResourceReadOnlyError:
exception = "Failed to delete read-only key-value with key '{}' and label '{}'. Unlock the key-value before deleting it.".format(kv.key, kv.label)
exception_messages.append(exception)
except ResourceModifiedError:
exception = "Failed to delete key-value with key '{}' and label '{}' due to a conflicting operation.".format(kv.key, kv.label)
exception_messages.append(exception)
if restored_so_far != keys_to_restore:
logger.error('Failed after restoring %d out of %d keys. The following error(s) occurred:\n%s\n',
restored_so_far, keys_to_restore, json.dumps(exception_messages, indent=2, ensure_ascii=False))
else:
logger.debug('Successfully restored %d out of %d keys', restored_so_far, keys_to_restore)
return
except HttpResponseError as ex:
exception_messages.append(str(ex))
raise CLIError('Restore operation failed. The following error(s) occurred:\n' + json.dumps(exception_messages, indent=2, ensure_ascii=False))
def list_revision(cmd,
key=None,
fields=None,
name=None,
label=None,
datetime=None,
connection_string=None,
top=None,
all_=False,
auth_mode="key",
endpoint=None):
azconfig_client = get_appconfig_data_client(cmd, name, connection_string, auth_mode, endpoint)
key = key if key else SearchFilterOptions.ANY_KEY
label = label if label else SearchFilterOptions.ANY_LABEL
label = prep_label_filter_for_url_encoding(label)
try:
query_fields = None
if fields:
query_fields = []
for field in fields:
query_fields.append(field.name.lower())
revisions_iterable = azconfig_client.list_revisions(key_filter=key,
label_filter=label,
accept_datetime=datetime,
fields=query_fields)
retrieved_revisions = []
count = 0
if all_:
top = float('inf')
elif top is None:
top = 100
for revision in revisions_iterable:
kv_revision = convert_configurationsetting_to_keyvalue(revision)
if fields:
partial_revision = {}
for field in fields:
partial_revision[field.name.lower()] = kv_revision.__dict__[field.name.lower()]
retrieved_revisions.append(partial_revision)
else:
retrieved_revisions.append(kv_revision)
count += 1
if count >= top:
return retrieved_revisions
return retrieved_revisions
except HttpResponseError as ex:
raise CLIErrors.AzureResponseError('List revision operation failed.\n' + str(ex))
|
PypiClean
|
/aws_audit-0.1.0-py3-none-any.whl/aws_audit/config/regions.py
|
import os
all_regions = [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
"af-south-1",
'ap-east-1',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'me-south-1',
'sa-east-1'
]
regions = {
"acm": all_regions,
"kms": all_regions,
"secretsmanager": all_regions,
"route53": ['us-east-1'],
"ec2": all_regions,
"rds": all_regions,
"es": all_regions,
"elbv2": all_regions,
"firehose": [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
'ap-east-1',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'me-south-1',
'sa-east-1'
],
"fsx": [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
'ap-east-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-north-1'
],
"kinesis": all_regions,
"mq": [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
'ap-east-1',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'me-south-1',
'sa-east-1'
],
"ses": [
'us-east-1',
'us-west-2',
'ap-south-1',
'ap-southeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'sa-east-1'
],
"sns": all_regions,
"sqs": all_regions,
"transfer": [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'sa-east-1'
],
"iam": ['us-east-1'],
"backup": [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
'ap-east-1',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'me-south-1',
'sa-east-1'
],
"cloudtrail": all_regions,
"config": all_regions,
"dms": all_regions,
"documentdb": [
'us-east-1',
'us-east-2',
'us-west-2',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3'
],
"dynamodb": all_regions,
"ecr": all_regions,
"efs": [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
'ap-east-1',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'me-south-1',
'sa-east-1'
],
"elasticache": all_regions,
"emr": all_regions,
"lambda": all_regions,
"kafka": [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
'ap-east-1',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'me-south-1',
'sa-east-1'
],
"neptune": [
'us-east-1',
'us-east-2',
'us-west-2',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'me-south-1'
],
"redshift": all_regions,
"s3": ['us-east-1'],
"ssm": all_regions,
"sagemaker": [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
'ap-east-1',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'me-south-1',
'sa-east-1'
],
"xray": all_regions,
"shield": ['us-east-1'],
"eks": [
'us-east-1',
'us-east-2',
'us-west-2',
'ap-east-1',
'ap-south-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ca-central-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'me-south-1',
'sa-east-1'
],
"cloudfront": ['us-east-1'],
"apigateway": all_regions
}
|
PypiClean
|
/pylops_gpu-0.0.1.tar.gz/pylops_gpu-0.0.1/pylops_gpu/basicoperators/VStack.py
|
import torch
import numpy as np
from scipy.sparse.linalg.interface import _get_dtype
from pylops_gpu.LinearOperator import LinearOperator
class VStack(LinearOperator):
r"""Vertical stacking.
Stack a set of N linear operators vertically.
Parameters
----------
ops : :obj:`list`
Linear operators to be stacked
device : :obj:`str`, optional
Device to be used
togpu : :obj:`tuple`, optional
Move model and data from cpu to gpu prior to applying ``matvec`` and
``rmatvec``, respectively (only when ``device='gpu'``)
tocpu : :obj:`tuple`, optional
Move data and model from gpu to cpu after applying ``matvec`` and
``rmatvec``, respectively (only when ``device='gpu'``)
dtype : :obj:`str`, optional
Type of elements in input array
Attributes
----------
shape : :obj:`tuple`
Operator shape
explicit : :obj:`bool`
Operator contains a matrix that can be solved explicitly (``True``) or
not (``False``)
Notes
-----
Refer to :class:`pylops.basicoperators.VStack` for
implementation details.
"""
def __init__(self, ops, device='cpu', togpu=(False, False),
tocpu=(False, False), dtype=torch.float32):
self.ops = ops
nops = np.zeros(len(ops), dtype=np.int)
for iop, oper in enumerate(ops):
nops[iop] = oper.shape[0]
self.nops = nops.sum()
self.mops = ops[0].shape[1]
self.nnops = np.insert(np.cumsum(nops), 0, 0)
self.shape = (self.nops, self.mops)
self.device = device
self.togpu = togpu
self.tocpu = tocpu
self.dtype = dtype
self.explicit = False
self.Op = None
def _matvec(self, x):
y = torch.zeros(self.nops, dtype=self.dtype)
for iop, oper in enumerate(self.ops):
y[self.nnops[iop]:self.nnops[iop + 1]] = oper.matvec(x).squeeze()
return y
def _rmatvec(self, x):
y = torch.zeros(self.mops, dtype=self.dtype)
for iop, oper in enumerate(self.ops):
y += oper.rmatvec(x[self.nnops[iop]:self.nnops[iop + 1]]).squeeze()
return y
|
PypiClean
|
/django-siteajax-1.0.0.tar.gz/django-siteajax-1.0.0/docs/source/advanced.rst
|
Advanced
========
Additional info from the client
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``request.ajax`` object is ``siteajax.toolbox.Ajax``. It given you an access
to additional information received from the client:
* ``ajax.is_used`` - a flag indicating whether the request is Ajax or not
* ``ajax.restore_history`` - indicates the client side is requesting the entire page
(as opposed to a page fragment request), when the client was unable to restore a browser history state
from the cache.
* ``ajax.url`` - URL from the browser
* ``ajax.target`` - the id of the target element if it exists
* ``ajax.user_input`` - user input given to a prompt (hx-prompt)
* ``ajax.source`` - info about an element sourcing (triggered) the request (``id`` and ``name`` if any)
.. note:: The object is lazily initialized to allow faster
middleware processing.
Without initialization you won't be able to access it's attributes.
For the initialization it's enough to check it in boolean context, e.g.::
bool(Ajax(request))
# or
if request.ajax:
...
Driving the client
~~~~~~~~~~~~~~~~~~
Wrap your response into ``siteajax.toolbox.AjaxResponse`` to be able to instruct
your client to do thing:
.. code-block:: python
from django.shortcuts import render
from siteajax.toolbox import Ajax, AjaxResponse
def index_page(request):
response = render(request, 'some.html')
# Wrap it
response = AjaxResponse(response)
# Let's trigger `fireThis` event after `swap` step
response.trigger_event(name='fireThis', kwargs={'count': len(news)}, step='swap')
# Add an item to browser history
response.history_item = '/otherurl/'
# Redirect with JS
response.redirect = '/here/'
# Refresh current page
response.refresh = True
return response
CSRF protection
~~~~~~~~~~~~~~~
Include ``siteajax/init_csrf.js`` in your template (page's ``body``) to initialize CSRF
token required to ``POST``, ``PUT``, ``DELETE``.
.. code-block:: html
<script>{% include "siteajax/init_csrf.js" %}</script>
Include htmx from CDN
~~~~~~~~~~~~~~~~~~~~~
You can make use of including ``siteajax/cdn.html`` in your template (page's ``head``)
to get ``htmx`` right from a CDN.
.. code-block:: html
{% include "siteajax/cdn.html" %}
.. note:: If you're not satisfied with the version included you can always
define your own ``<script src=``.
|
PypiClean
|
/wavefront_sdk_python-2.0.0-py3-none-any.whl/wavefront_sdk/common/metrics/registry.py
|
import logging
import threading
import time
from wavefront_sdk.common.metrics import counter, deltacounter, gauge
LOGGER = logging.getLogger('wavefront_sdk.WavefrontSdkMetricsRegistry')
# pylint: disable=too-many-instance-attributes,E0012,R0205
class WavefrontSdkMetricsRegistry(object):
"""Wavefront SDK Metrics Registry."""
# pylint: disable=too-many-arguments
def __init__(self, wf_metric_sender, source=None, tags=None, prefix=None,
reporting_interval_secs=60):
"""Construct Wavefront SDK Metrics Registry."""
self.wf_metric_sender = wf_metric_sender
self.source = source
self.tags = tags
self.prefix = '' if not prefix else prefix + '.'
self.reporting_interval_secs = reporting_interval_secs
self.metrics = {}
self._closed = False
self._schedule_lock = threading.RLock()
self._timer = None
if wf_metric_sender:
self._schedule_timer()
def _schedule_timer(self):
if not self._closed:
self._timer = threading.Timer(self.reporting_interval_secs,
self._run)
self._timer.daemon = True
self._timer.start()
# pylint: disable=broad-except
def _report(self, timeout_secs=None):
timestamp = time.time()
# Copying the dict prevents concurrent modification while iterating
for key, val in self.metrics.copy().items():
if timeout_secs and time.time() - timestamp > timeout_secs:
break
name = self.prefix + key
try:
if isinstance(val, gauge.WavefrontSdkGauge):
gauge_value = val.get_value()
if gauge_value:
self.wf_metric_sender.send_metric(
name, gauge_value, timestamp, self.source,
self.tags)
elif isinstance(val, deltacounter.WavefrontSdkDeltaCounter):
delta_count = val.count()
self.wf_metric_sender.send_delta_counter(
name + '.count', delta_count,
self.source, self.tags, timestamp)
val.dec(delta_count)
elif isinstance(val, counter.WavefrontSdkCounter):
self.wf_metric_sender.send_metric(
name + '.count', val.count(), timestamp,
self.source, self.tags)
except Exception:
LOGGER.warning('Unable to send internal SDK metric.')
def _run(self):
try:
self._report()
finally:
with self._schedule_lock:
if not self._closed:
self._schedule_timer()
def close(self, timeout_secs=None):
"""Close Wavefront SDK Metrics Registry."""
try:
if self.wf_metric_sender:
self._report(timeout_secs)
finally:
with self._schedule_lock:
self._closed = True
if self._timer is not None:
self._timer.cancel()
def new_counter(self, name):
"""Get or create a counter from the registry."""
return self._get_or_add(name, counter.WavefrontSdkCounter())
def new_delta_counter(self, name):
"""Get or create a delta counter from the registry."""
return self._get_or_add(name, deltacounter.WavefrontSdkDeltaCounter())
def new_gauge(self, name, supplier):
"""Get or create a gauge from the registry."""
return self._get_or_add(name, gauge.WavefrontSdkGauge(supplier))
def _get_or_add(self, name, metric):
existing_metric = self.metrics.get(name)
if existing_metric:
return existing_metric
self.metrics.update({name: metric})
return metric
|
PypiClean
|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/domain/ZhimaMerchantOrderCreditPayModel.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
class ZhimaMerchantOrderCreditPayModel(object):
def __init__(self):
self._coupon_amount = None
self._order_operate_type = None
self._out_order_no = None
self._out_trans_no = None
self._pay_amount = None
self._remark = None
self._use_coupon = None
self._zm_order_no = None
@property
def coupon_amount(self):
return self._coupon_amount
@coupon_amount.setter
def coupon_amount(self, value):
self._coupon_amount = value
@property
def order_operate_type(self):
return self._order_operate_type
@order_operate_type.setter
def order_operate_type(self, value):
self._order_operate_type = value
@property
def out_order_no(self):
return self._out_order_no
@out_order_no.setter
def out_order_no(self, value):
self._out_order_no = value
@property
def out_trans_no(self):
return self._out_trans_no
@out_trans_no.setter
def out_trans_no(self, value):
self._out_trans_no = value
@property
def pay_amount(self):
return self._pay_amount
@pay_amount.setter
def pay_amount(self, value):
self._pay_amount = value
@property
def remark(self):
return self._remark
@remark.setter
def remark(self, value):
self._remark = value
@property
def use_coupon(self):
return self._use_coupon
@use_coupon.setter
def use_coupon(self, value):
self._use_coupon = value
@property
def zm_order_no(self):
return self._zm_order_no
@zm_order_no.setter
def zm_order_no(self, value):
self._zm_order_no = value
def to_alipay_dict(self):
params = dict()
if self.coupon_amount:
if hasattr(self.coupon_amount, 'to_alipay_dict'):
params['coupon_amount'] = self.coupon_amount.to_alipay_dict()
else:
params['coupon_amount'] = self.coupon_amount
if self.order_operate_type:
if hasattr(self.order_operate_type, 'to_alipay_dict'):
params['order_operate_type'] = self.order_operate_type.to_alipay_dict()
else:
params['order_operate_type'] = self.order_operate_type
if self.out_order_no:
if hasattr(self.out_order_no, 'to_alipay_dict'):
params['out_order_no'] = self.out_order_no.to_alipay_dict()
else:
params['out_order_no'] = self.out_order_no
if self.out_trans_no:
if hasattr(self.out_trans_no, 'to_alipay_dict'):
params['out_trans_no'] = self.out_trans_no.to_alipay_dict()
else:
params['out_trans_no'] = self.out_trans_no
if self.pay_amount:
if hasattr(self.pay_amount, 'to_alipay_dict'):
params['pay_amount'] = self.pay_amount.to_alipay_dict()
else:
params['pay_amount'] = self.pay_amount
if self.remark:
if hasattr(self.remark, 'to_alipay_dict'):
params['remark'] = self.remark.to_alipay_dict()
else:
params['remark'] = self.remark
if self.use_coupon:
if hasattr(self.use_coupon, 'to_alipay_dict'):
params['use_coupon'] = self.use_coupon.to_alipay_dict()
else:
params['use_coupon'] = self.use_coupon
if self.zm_order_no:
if hasattr(self.zm_order_no, 'to_alipay_dict'):
params['zm_order_no'] = self.zm_order_no.to_alipay_dict()
else:
params['zm_order_no'] = self.zm_order_no
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = ZhimaMerchantOrderCreditPayModel()
if 'coupon_amount' in d:
o.coupon_amount = d['coupon_amount']
if 'order_operate_type' in d:
o.order_operate_type = d['order_operate_type']
if 'out_order_no' in d:
o.out_order_no = d['out_order_no']
if 'out_trans_no' in d:
o.out_trans_no = d['out_trans_no']
if 'pay_amount' in d:
o.pay_amount = d['pay_amount']
if 'remark' in d:
o.remark = d['remark']
if 'use_coupon' in d:
o.use_coupon = d['use_coupon']
if 'zm_order_no' in d:
o.zm_order_no = d['zm_order_no']
return o
|
PypiClean
|
/notebooksmz-7.0.0-py3-none-any.whl/notebook/static/components/MathJax/localization/ja/FontWarnings.js
|
MathJax.Localization.addTranslation("ja","FontWarnings",{version:"2.7.8",isLoaded:true,strings:{webFont:"MathJax \u306F\u3053\u306E\u30DA\u30FC\u30B8\u3067\u3001\u6570\u5F0F\u3092\u8868\u793A\u3059\u308B\u305F\u3081\u306B\u30A6\u30A7\u30D6 \u30D9\u30FC\u30B9\u306E\u30D5\u30A9\u30F3\u30C8\u3092\u4F7F\u7528\u3057\u3066\u3044\u307E\u3059\u3002\u30D5\u30A9\u30F3\u30C8\u306E\u30C0\u30A6\u30F3\u30ED\u30FC\u30C9\u306B\u6642\u9593\u304C\u304B\u304B\u308B\u305F\u3081\u3001\u3042\u306A\u305F\u306E\u30B7\u30B9\u30C6\u30E0\u306E\u30D5\u30A9\u30F3\u30C8 \u30D5\u30A9\u30EB\u30C0\u30FC\u306B\u6570\u5F0F\u30D5\u30A9\u30F3\u30C8\u3092\u76F4\u63A5\u30A4\u30F3\u30B9\u30C8\u30FC\u30EB\u3059\u308B\u3053\u3068\u3067\u30DA\u30FC\u30B8\u306E\u30EC\u30F3\u30C0\u30EA\u30F3\u30B0\u304C\u3088\u308A\u901F\u304F\u306A\u308A\u307E\u3059\u3002",imageFonts:"MathJax \u306F\u30ED\u30FC\u30AB\u30EB \u30D5\u30A9\u30F3\u30C8\u3084 Web \u30D5\u30A9\u30F3\u30C8\u3067\u306F\u306A\u304F\u753B\u50CF\u30D5\u30A9\u30F3\u30C8\u3092\u4F7F\u7528\u3057\u3066\u3044\u307E\u3059\u3002\u63CF\u753B\u304C\u901A\u5E38\u3088\u308A\u9045\u3044\u304A\u305D\u308C\u304C\u3042\u308A\u3001\u30D7\u30EA\u30F3\u30BF\u30FC\u3067\u306E\u9AD8\u89E3\u50CF\u5EA6\u306E\u5370\u5237\u306B\u5411\u304B\u306A\u3044\u304A\u305D\u308C\u304C\u3042\u308A\u307E\u3059\u3002",noFonts:"MathJax \u304C\u6570\u5F0F\u306E\u8868\u793A\u306B\u4F7F\u7528\u3059\u308B\u30D5\u30A9\u30F3\u30C8\u3092\u898B\u3064\u3051\u3089\u308C\u305A\u3001\u753B\u50CF\u30D5\u30A9\u30F3\u30C8\u3082\u5229\u7528\u3067\u304D\u306A\u3044\u305F\u3081\u3001\u4EE3\u308F\u308A\u306B\u6C4E\u7528\u306E Unicode \u6587\u5B57\u3092\u4F7F\u7528\u3057\u3066\u3044\u307E\u3059\u3002\u3054\u4F7F\u7528\u4E2D\u306E\u30D6\u30E9\u30A6\u30B6\u30FC\u304C\u8868\u793A\u3067\u304D\u308B\u3082\u306E\u3068\u671F\u5F85\u3057\u3066\u3044\u307E\u3059\u304C\u3001\u4E00\u90E8\u306E\u6587\u5B57\u304C\u9069\u5207\u306B\u8868\u793A\u3055\u308C\u306A\u3044\u3001\u307E\u305F\u306F\u5168\u304F\u8868\u793A\u3055\u308C\u306A\u3044\u304A\u305D\u308C\u304C\u3042\u308A\u307E\u3059\u3002",webFonts:"\u591A\u304F\u306E\u30A6\u30A7\u30D6 \u30D6\u30E9\u30A6\u30B6\u30FC\u306F\u30A6\u30A7\u30D6\u304B\u3089\u30D5\u30A9\u30F3\u30C8\u3092\u30C0\u30A6\u30F3\u30ED\u30FC\u30C9\u3067\u304D\u307E\u3059\u3002\u3054\u4F7F\u7528\u4E2D\u306E\u30D6\u30E9\u30A6\u30B6\u30FC\u3092\u3088\u308A\u65B0\u3057\u3044\u30D0\u30FC\u30B8\u30E7\u30F3\u306B\u66F4\u65B0\u3059\u308B (\u307E\u305F\u306F\u5225\u306E\u30D6\u30E9\u30A6\u30B6\u30FC\u306B\u5909\u66F4\u3059\u308B) \u3053\u3068\u3067\u3001\u3053\u306E\u30DA\u30FC\u30B8\u306E\u6570\u5F0F\u306E\u54C1\u8CEA\u304C\u5411\u4E0A\u3059\u308B\u53EF\u80FD\u6027\u304C\u3042\u308A\u307E\u3059\u3002",fonts:"MathJax \u3067\u306F [STIX \u30D5\u30A9\u30F3\u30C8](%1)\u3084 [MathJax Tex \u30D5\u30A9\u30F3\u30C8](%2)\u3092\u4F7F\u7528\u3067\u304D\u307E\u3059\u3002MathJax \u4F53\u9A13\u3092\u6539\u5584\u3059\u308B\u305F\u3081\u306B\u3001\u30D5\u30A9\u30F3\u30C8\u3092\u30C0\u30A6\u30F3\u30ED\u30FC\u30C9\u304A\u3088\u3073\u30A4\u30F3\u30B9\u30C8\u30FC\u30EB\u3057\u3066\u304F\u3060\u3055\u3044\u3002",STIXPage:"\u3053\u306E\u30DA\u30FC\u30B8\u306F [STIX \u30D5\u30A9\u30F3\u30C8](%1)\u3092\u4F7F\u7528\u3059\u308B\u3088\u3046\u306B\u8A2D\u8A08\u3055\u308C\u3066\u3044\u307E\u3059\u3002MathJax \u4F53\u9A13\u3092\u6539\u5584\u3059\u308B\u305F\u3081\u306B\u3001\u30D5\u30A9\u30F3\u30C8\u3092\u30C0\u30A6\u30F3\u30ED\u30FC\u30C9\u304A\u3088\u3073\u30A4\u30F3\u30B9\u30C8\u30FC\u30EB\u3057\u3066\u304F\u3060\u3055\u3044\u3002",TeXPage:"\u3053\u306E\u30DA\u30FC\u30B8\u306F [MathJax TeX \u30D5\u30A9\u30F3\u30C8](%1)\u3092\u4F7F\u7528\u3059\u308B\u3088\u3046\u306B\u8A2D\u8A08\u3055\u308C\u3066\u3044\u307E\u3059\u3002MathJax \u4F53\u9A13\u3092\u6539\u5584\u3059\u308B\u305F\u3081\u306B\u3001\u30D5\u30A9\u30F3\u30C8\u3092\u30C0\u30A6\u30F3\u30ED\u30FC\u30C9\u304A\u3088\u3073\u30A4\u30F3\u30B9\u30C8\u30FC\u30EB\u3057\u3066\u304F\u3060\u3055\u3044\u3002"}});MathJax.Ajax.loadComplete("[MathJax]/localization/ja/FontWarnings.js");
|
PypiClean
|
/gaeframework-2.0.10.tar.gz/gaeframework-2.0.10/google_appengine/google/appengine/tools/gen_protorpc.py
|
import errno
import logging
import optparse
import os
import sys
from protorpc import descriptor
from protorpc import generate_python
from protorpc import protobuf
from protorpc import registry
from protorpc import transport
from protorpc import util
EXCLUDED_PACKAGES = frozenset(['protorpc.registry',
'protorpc.messages',
'protorpc.descriptor',
'protorpc.message_types',
])
commands = {}
def usage():
"""Print usage help and exit with an error code."""
parser.print_help()
sys.exit(2)
def fatal_error(message):
"""Print fatal error messages exit with an error code.
Args:
message: Message to print to stderr before exit.
"""
sys.stderr.write(message)
sys.exit(1)
def open_input_file(filename):
"""Open file for reading.
Args:
filename: Name of input file to open or None to open stdin.
Returns:
Opened file if string provided, stdin if filename is None.
"""
# TODO(rafek): Detect missing or invalid files, generating user friendly
# error messages.
if filename is None:
return sys.stdin
else:
try:
return open(filename, 'rb')
except IOError, err:
fatal_error(str(err))
def generate_file_descriptor(dest_dir, file_descriptor):
"""Generate a single file descriptor to destination directory.
Will generate a single Python file from a file descriptor under dest_dir.
The sub-directory where the file is generated is determined by the package
name of descriptor.
Descriptors without package names will not be generated.
Descriptors that are part of the ProtoRPC distribution will not be generated.
Args:
dest_dir: Directory under which to generate files.
file_descriptor: FileDescriptor instance to generate source code from.
"""
package = file_descriptor.package
if not package:
# TODO(rafek): Option to cause an error on this condition.
logging.warn('Will not generate descriptor without package name')
return
if package in EXCLUDED_PACKAGES:
logging.warn('Will not generate main ProtoRPC class %s' % package)
return
package_path = package.split('.')
directory = package_path[:-1]
package_file_name = package_path[-1]
directory_name = os.path.join(dest_dir, *directory)
output_file_name = os.path.join(directory_name,
'%s.py' % (package_file_name,))
try:
os.makedirs(directory_name)
except OSError, err:
if err.errno != errno.EEXIST:
raise
output_file = open(output_file_name, 'w')
logging.info('Writing package %s to %s',
file_descriptor.package, output_file_name)
# TODO(rafek): Option to prevent overwriting.
generate_python.format_python_file(file_descriptor, output_file)
@util.positional(1)
def command(name, required=(), optional=()):
"""Decorator used for declaring commands used on command line.
Each command of this tool can have any number of sequential required
parameters and optional parameters. The required and optional parameters
will be displayed in the command usage. Arguments passed in to the command
are checked to ensure they have at least the required parameters and not
too many parameters beyond the optional ones. When there are not enough
or too few parameters the usage message is generated and the program exits
with an error code.
Functions decorated thus are added to commands by their name.
Resulting decorated functions will have required and optional attributes
assigned to them so that appear in the usage message.
Args:
name: Name of command that will follow the program name on the command line.
required: List of required parameter names as displayed in the usage
message.
optional: List of optional parameter names as displayed in the usage
message.
"""
def check_params_decorator(function):
def check_params_wrapper(options, *args):
if not (len(required) <= len(args) <= len(optional)):
sys.stderr.write("Incorrect usage for command '%s'\n\n" % name)
usage()
function(options, *args)
check_params_wrapper.required = required
check_params_wrapper.optional = optional
commands[name] = check_params_wrapper
return check_params_wrapper
return check_params_decorator
@command('file', optional=['input-filename', 'output-filename'])
def file_command(options, input_filename=None, output_filename=None):
"""Generate a single descriptor file to Python.
Args:
options: Parsed command line options.
input_filename: File to read protobuf FileDescriptor from. If None
will read from stdin.
output_filename: File to write Python source code to. If None will
generate to stdout.
"""
with open_input_file(input_filename) as input_file:
descriptor_content = input_file.read()
if output_filename:
output_file = open(output_filename, 'w')
else:
output_file = sys.stdout
file_descriptor = protobuf.decode_message(descriptor.FileDescriptor,
descriptor_content)
generate_python.format_python_file(file_descriptor, output_file)
@command('fileset', optional=['filename'])
def fileset_command(options, input_filename=None):
"""Generate source directory structure from FileSet.
Args:
options: Parsed command line options.
input_filename: File to read protobuf FileSet from. If None will read from
stdin.
"""
with open_input_file(input_filename) as input_file:
descriptor_content = input_file.read()
dest_dir = os.path.expanduser(options.dest_dir)
if not os.path.isdir(dest_dir) and os.path.exists(dest_dir):
fatal_error("Destination '%s' is not a directory" % dest_dir)
file_set = protobuf.decode_message(descriptor.FileSet,
descriptor_content)
for file_descriptor in file_set.files:
generate_file_descriptor(dest_dir, file_descriptor)
@command('registry',
required=['host'],
optional=['service-name', 'registry-path'])
def registry_command(options,
host,
service_name=None,
registry_path='/protorpc'):
"""Generate source directory structure from remote registry service.
Args:
options: Parsed command line options.
host: Web service host where registry service is located. May include
port.
service_name: Name of specific service to read. Will generate only Python
files that service is dependent on. If None, will generate source code
for all services known by the registry.
registry_path: Path to find registry if not the default 'protorpc'.
"""
dest_dir = os.path.expanduser(options.dest_dir)
url = 'http://%s%s' % (host, registry_path)
reg = registry.RegistryService.Stub(transport.HttpTransport(url))
if service_name is None:
service_names = [service.name for service in reg.services().services]
else:
service_names = [service_name]
file_set = reg.get_file_set(names=service_names).file_set
for file_descriptor in file_set.files:
generate_file_descriptor(dest_dir, file_descriptor)
def make_opt_parser():
"""Create options parser with automatically generated command help.
Will iterate over all functions in commands and generate an appropriate
usage message for them with all their required and optional parameters.
"""
command_descriptions = []
for name in sorted(commands.iterkeys()):
command = commands[name]
params = ' '.join(['<%s>' % param for param in command.required] +
['[<%s>]' % param for param in command.optional])
command_descriptions.append('%%prog [options] %s %s' % (name, params))
command_usage = 'usage: %s\n' % '\n '.join(command_descriptions)
parser = optparse.OptionParser(usage=command_usage)
parser.add_option('-d', '--dest_dir',
dest='dest_dir',
default=os.getcwd(),
help='Write generated files to DIR',
metavar='DIR')
return parser
parser = make_opt_parser()
def main():
# TODO(rafek): Customize verbosity.
logging.basicConfig(level=logging.INFO)
options, positional = parser.parse_args()
if not positional:
usage()
command_name = positional[0]
command = commands.get(command_name)
if not command:
sys.stderr.write("Unknown command '%s'\n\n" % command_name)
usage()
parameters = positional[1:]
command(options, *parameters)
if __name__ == '__main__':
main()
|
PypiClean
|
/swh.web-0.2.37.tar.gz/swh.web-0.2.37/swh/web/browse/assets/origin_visits/visits-histogram.js
|
import {visitStatusColor} from './utils';
export async function createVisitsHistogram(container, visitsData, currentYear, yearClickCallback) {
const d3 = await import(/* webpackChunkName: "d3" */ 'utils/d3');
// remove previously created histogram and tooltip if any
d3.select(container).select('svg').remove();
d3.select('div.d3-tooltip').remove();
// histogram size and margins
let width = 1000;
let height = 200;
const margin = {top: 20, right: 80, bottom: 30, left: 50};
// create responsive svg
const svg = d3.select(container)
.attr('style',
'padding-bottom: ' + Math.ceil(height * 100 / width) + '%')
.append('svg')
.attr('viewBox', '0 0 ' + width + ' ' + height);
// create tooltip div
const tooltip = d3.select('body')
.append('div')
.attr('class', 'd3-tooltip')
.style('opacity', 0);
// update width and height without margins
width = width - margin.left - margin.right;
height = height - margin.top - margin.bottom;
// create main svg group element
const g = svg.append('g').attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
// create x scale
const x = d3.scaleTime().rangeRound([0, width]);
// create y scale
const y = d3.scaleLinear().range([height, 0]);
// create ordinal colorscale mapping visit status
const colors = d3.scaleOrdinal()
.domain(Object.keys(visitStatusColor))
.range(Object.values(visitStatusColor));
// first swh crawls were made in 2015
const startYear = 2015;
// set latest display year as the current one
const now = new Date();
const endYear = now.getUTCFullYear() + 1;
const monthExtent = [new Date(Date.UTC(startYear, 0, 1)), new Date(Date.UTC(endYear, 0, 1))];
// create months bins based on setup extent
const monthBins = d3.timeMonths(d3.timeMonth.offset(monthExtent[0], -1), monthExtent[1]);
// create years bins based on setup extent
const yearBins = d3.timeYears(monthExtent[0], monthExtent[1]);
// set x scale domain
x.domain(d3.extent(monthBins));
// use D3 histogram layout to create a function that will bin the visits by month
const binByMonth = d3.histogram()
.value(d => d.date)
.domain(x.domain())
.thresholds(monthBins);
// use D3 nest function to group the visits by status
const visitsByStatus = d3.groups(visitsData, d => d['status'])
.sort((a, b) => d3.ascending(a[0], b[0]));
// prepare data in order to be able to stack visit statuses by month
const statuses = [];
const histData = [];
for (let i = 0; i < monthBins.length; ++i) {
histData[i] = {};
}
visitsByStatus.forEach(entry => {
statuses.push(entry[0]);
const monthsData = binByMonth(entry[1]);
for (let i = 0; i < monthsData.length; ++i) {
histData[i]['x0'] = monthsData[i]['x0'];
histData[i]['x1'] = monthsData[i]['x1'];
histData[i][entry[0]] = monthsData[i];
}
});
// create function to stack visits statuses by month
const stacked = d3.stack()
.keys(statuses)
.value((d, key) => d[key].length);
// compute the maximum amount of visits by month
const yMax = d3.max(histData, d => {
let total = 0;
for (let i = 0; i < statuses.length; ++i) {
total += d[statuses[i]].length;
}
return total;
});
// set y scale domain
y.domain([0, yMax]);
// compute ticks values for the y axis (at most 8 ticks)
const step = Math.floor(yMax / 8) + 1;
const yTickValues = [];
for (let i = 0; i <= yMax / step; ++i) {
yTickValues.push(i * step);
}
if (yTickValues.length === 0) {
for (let i = 0; i <= yMax; ++i) {
yTickValues.push(i);
}
} else if (yMax % step !== 0) {
yTickValues.push(yMax);
}
// add histogram background grid
g.append('g')
.attr('class', 'grid')
.call(d3.axisLeft(y)
.tickValues(yTickValues)
.tickSize(-width)
.tickFormat(''));
// create one fill only rectangle by displayed year
// each rectangle will be made visible when hovering the mouse over a year range
// user will then be able to select a year by clicking in the rectangle
g.append('g')
.selectAll('rect')
.data(yearBins)
.enter().append('rect')
.attr('class', d => 'year' + d.getUTCFullYear())
.attr('fill', 'red')
.attr('fill-opacity', d => d.getUTCFullYear() === currentYear ? 0.3 : 0)
.attr('stroke', 'none')
.attr('x', d => {
const date = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
return x(date);
})
.attr('y', 0)
.attr('height', height)
.attr('width', d => {
const date = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
const yearWidth = x(d3.timeYear.offset(date, 1)) - x(date);
return yearWidth;
})
// mouse event callbacks used to show rectangle years
// when hovering the mouse over the histograms
.on('mouseover', (event, d) => {
svg.selectAll('rect.year' + d.getUTCFullYear())
.attr('fill-opacity', 0.5);
})
.on('mouseout', (event, d) => {
svg.selectAll('rect.year' + d.getUTCFullYear())
.attr('fill-opacity', 0);
svg.selectAll('rect.year' + currentYear)
.attr('fill-opacity', 0.3);
})
// callback to select a year after a mouse click
// in a rectangle year
.on('click', (event, d) => {
svg.selectAll('rect.year' + currentYear)
.attr('fill-opacity', 0);
svg.selectAll('rect.yearoutline' + currentYear)
.attr('stroke', 'none');
currentYear = d.getUTCFullYear();
svg.selectAll('rect.year' + currentYear)
.attr('fill-opacity', 0.5);
svg.selectAll('rect.yearoutline' + currentYear)
.attr('stroke', 'black');
yearClickCallback(currentYear);
});
// create the stacked histogram of visits
g.append('g')
.selectAll('g')
.data(stacked(histData))
.enter().append('g')
.attr('fill', d => colors(d.key))
.selectAll('rect')
.data(d => d)
.enter().append('rect')
.attr('class', d => 'month' + d.data.x1.getMonth())
.attr('x', d => x(d.data.x0))
.attr('y', d => y(d[1]))
.attr('height', d => y(d[0]) - y(d[1]))
.attr('width', d => x(d.data.x1) - x(d.data.x0) - 1)
// mouse event callbacks used to show rectangle years
// but also to show tooltip when hovering the mouse
// over the histogram bars
.on('mouseover', (event, d) => {
svg.selectAll('rect.year' + d.data.x1.getUTCFullYear())
.attr('fill-opacity', 0.5);
tooltip.transition()
.duration(200)
.style('opacity', 1);
const ds = d.data.x1.toISOString().substr(0, 7).split('-');
let tooltipText = '<b>' + ds[1] + ' / ' + ds[0] + ':</b><br/>';
for (let i = 0; i < statuses.length; ++i) {
const visitStatus = statuses[i];
const nbVisits = d.data[visitStatus].length;
if (nbVisits === 0) continue;
tooltipText += nbVisits + ' ' + visitStatus + ' visits';
if (i !== statuses.length - 1) tooltipText += '<br/>';
}
tooltip.html(tooltipText)
.style('left', event.pageX + 15 + 'px')
.style('top', event.pageY + 'px');
})
.on('mouseout', (event, d) => {
svg.selectAll('rect.year' + d.data.x1.getUTCFullYear())
.attr('fill-opacity', 0);
svg.selectAll('rect.year' + currentYear)
.attr('fill-opacity', 0.3);
tooltip.transition()
.duration(500)
.style('opacity', 0);
})
.on('mousemove', (event) => {
tooltip.style('left', event.pageX + 15 + 'px')
.style('top', event.pageY + 'px');
})
// callback to select a year after a mouse click
// inside a histogram bar
.on('click', (event, d) => {
svg.selectAll('rect.year' + currentYear)
.attr('fill-opacity', 0);
svg.selectAll('rect.yearoutline' + currentYear)
.attr('stroke', 'none');
currentYear = d.data.x1.getUTCFullYear();
svg.selectAll('rect.year' + currentYear)
.attr('fill-opacity', 0.5);
svg.selectAll('rect.yearoutline' + currentYear)
.attr('stroke', 'black');
yearClickCallback(currentYear);
});
// create one stroke only rectangle by displayed year
// that will be displayed on top of the histogram when the user has selected a year
g.append('g')
.selectAll('rect')
.data(yearBins)
.enter().append('rect')
.attr('class', d => 'yearoutline' + d.getUTCFullYear())
.attr('fill', 'none')
.attr('stroke', d => d.getUTCFullYear() === currentYear ? 'black' : 'none')
.attr('x', d => {
const date = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
return x(date);
})
.attr('y', 0)
.attr('height', height)
.attr('width', d => {
const date = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
const yearWidth = x(d3.timeYear.offset(date, 1)) - x(date);
return yearWidth;
});
// add x axis with a tick for every 1st day of each year
const xAxis = g.append('g')
.attr('class', 'axis')
.attr('transform', 'translate(0,' + height + ')')
.call(
d3.axisBottom(x)
.ticks(d3.timeYear.every(1))
.tickFormat(d => {
const year = d.getUTCFullYear();
if (year >= startYear) {
return year;
} else {
// filter out 2014 tick label
return '';
}
})
);
// shift tick labels in order to display them at the middle
// of each year range
xAxis.selectAll('text')
.attr('transform', d => {
const year = d.getUTCFullYear();
const date = new Date(Date.UTC(year, 0, 1));
const yearWidth = x(d3.timeYear.offset(date, 1)) - x(date);
return 'translate(' + -yearWidth / 2 + ', 0)';
});
// add y axis for the number of visits
g.append('g')
.attr('class', 'axis')
.call(d3.axisLeft(y).tickValues(yTickValues));
// add legend for visit statuses
const legendGroup = g.append('g')
.attr('font-family', 'sans-serif')
.attr('font-size', 10)
.attr('text-anchor', 'end');
legendGroup.append('text')
.attr('x', width + margin.right - 5)
.attr('y', 9.5)
.attr('dy', '0.32em')
.text('visit status:');
const legend = legendGroup.selectAll('g')
.data(statuses.slice().reverse())
.enter().append('g')
.attr('transform', (d, i) => 'translate(0,' + (i + 1) * 20 + ')');
legend.append('rect')
.attr('x', width + 2 * margin.right / 3)
.attr('width', 19)
.attr('height', 19)
.attr('fill', colors);
legend.append('text')
.attr('x', width + 2 * margin.right / 3 - 5)
.attr('y', 9.5)
.attr('dy', '0.32em')
.text(d => d);
// add text label for the y axis
g.append('text')
.attr('transform', 'rotate(-90)')
.attr('y', -margin.left)
.attr('x', -(height / 2))
.attr('dy', '1em')
.style('text-anchor', 'middle')
.text('Number of visits');
}
|
PypiClean
|
/prob_distributions_sb-0.3-py3-none-any.whl/prob_distributions_sb/Binomialdistribution.py
|
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
"""
def __init__(self, prob=.5, size=20):
self.p = prob
self.n = size
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.n * self.p
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set. The function updates the p and n variables of the object.
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data)/self.n
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
return self.p, self.n
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the binomial distribution.
Args:
k (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
nCk = math.factorial(self.n) / (math.factorial(k) * math.factorial(self.n-k))
return nCk * (self.p ** k) * ((1 - self.p) ** (self.n-k))
def plot_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x_axis = [k for i in range(self.n+1)]
y_axis = [self.pdf(k) for i in range(self.n+1)]
plt.bar(x_axis, y_axis)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x_axis, y_axis
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial(self.p, self.n + other.n)
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Binomial object
"""
return 'mean {}, standard deviation {}, p {}, n {}'.format(self.mean,
self.stdev,
self.p,
self.n)
|
PypiClean
|
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/groups/item/sites/item/lists/item/columns/columns_request_builder.py
|
from __future__ import annotations
from dataclasses import dataclass
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.response_handler import ResponseHandler
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from ........models import column_definition, column_definition_collection_response
from ........models.o_data_errors import o_data_error
from .count import count_request_builder
from .item import column_definition_item_request_builder
class ColumnsRequestBuilder():
"""
Provides operations to manage the columns property of the microsoft.graph.list entity.
"""
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new ColumnsRequestBuilder and sets the default values.
Args:
pathParameters: The raw url or the Url template parameters for the request.
requestAdapter: The request adapter to use to execute the requests.
"""
if path_parameters is None:
raise Exception("path_parameters cannot be undefined")
if request_adapter is None:
raise Exception("request_adapter cannot be undefined")
# Url template to use to build the URL for the current request builder
self.url_template: str = "{+baseurl}/groups/{group%2Did}/sites/{site%2Did}/lists/{list%2Did}/columns{?%24top,%24skip,%24search,%24filter,%24count,%24orderby,%24select,%24expand}"
url_tpl_params = get_path_parameters(path_parameters)
self.path_parameters = url_tpl_params
self.request_adapter = request_adapter
def by_column_definition_id(self,column_definition_id: str) -> column_definition_item_request_builder.ColumnDefinitionItemRequestBuilder:
"""
Provides operations to manage the columns property of the microsoft.graph.list entity.
Args:
column_definition_id: Unique identifier of the item
Returns: column_definition_item_request_builder.ColumnDefinitionItemRequestBuilder
"""
if column_definition_id is None:
raise Exception("column_definition_id cannot be undefined")
from .item import column_definition_item_request_builder
url_tpl_params = get_path_parameters(self.path_parameters)
url_tpl_params["columnDefinition%2Did"] = column_definition_id
return column_definition_item_request_builder.ColumnDefinitionItemRequestBuilder(self.request_adapter, url_tpl_params)
async def get(self,request_configuration: Optional[ColumnsRequestBuilderGetRequestConfiguration] = None) -> Optional[column_definition_collection_response.ColumnDefinitionCollectionResponse]:
"""
Get columns from groups
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[column_definition_collection_response.ColumnDefinitionCollectionResponse]
"""
request_info = self.to_get_request_information(
request_configuration
)
from ........models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from ........models import column_definition_collection_response
return await self.request_adapter.send_async(request_info, column_definition_collection_response.ColumnDefinitionCollectionResponse, error_mapping)
async def post(self,body: Optional[column_definition.ColumnDefinition] = None, request_configuration: Optional[ColumnsRequestBuilderPostRequestConfiguration] = None) -> Optional[column_definition.ColumnDefinition]:
"""
Create new navigation property to columns for groups
Args:
body: The request body
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[column_definition.ColumnDefinition]
"""
if body is None:
raise Exception("body cannot be undefined")
request_info = self.to_post_request_information(
body, request_configuration
)
from ........models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from ........models import column_definition
return await self.request_adapter.send_async(request_info, column_definition.ColumnDefinition, error_mapping)
def to_get_request_information(self,request_configuration: Optional[ColumnsRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
Get columns from groups
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
def to_post_request_information(self,body: Optional[column_definition.ColumnDefinition] = None, request_configuration: Optional[ColumnsRequestBuilderPostRequestConfiguration] = None) -> RequestInformation:
"""
Create new navigation property to columns for groups
Args:
body: The request body
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
if body is None:
raise Exception("body cannot be undefined")
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.POST
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.add_request_options(request_configuration.options)
request_info.set_content_from_parsable(self.request_adapter, "application/json", body)
return request_info
@property
def count(self) -> count_request_builder.CountRequestBuilder:
"""
Provides operations to count the resources in the collection.
"""
from .count import count_request_builder
return count_request_builder.CountRequestBuilder(self.request_adapter, self.path_parameters)
@dataclass
class ColumnsRequestBuilderGetQueryParameters():
"""
Get columns from groups
"""
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
originalName: The original query parameter name in the class.
Returns: str
"""
if original_name is None:
raise Exception("original_name cannot be undefined")
if original_name == "count":
return "%24count"
if original_name == "expand":
return "%24expand"
if original_name == "filter":
return "%24filter"
if original_name == "orderby":
return "%24orderby"
if original_name == "search":
return "%24search"
if original_name == "select":
return "%24select"
if original_name == "skip":
return "%24skip"
if original_name == "top":
return "%24top"
return original_name
# Include count of items
count: Optional[bool] = None
# Expand related entities
expand: Optional[List[str]] = None
# Filter items by property values
filter: Optional[str] = None
# Order items by property values
orderby: Optional[List[str]] = None
# Search items by search phrases
search: Optional[str] = None
# Select properties to be returned
select: Optional[List[str]] = None
# Skip the first n items
skip: Optional[int] = None
# Show only the first n items
top: Optional[int] = None
@dataclass
class ColumnsRequestBuilderGetRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None
# Request query parameters
query_parameters: Optional[ColumnsRequestBuilder.ColumnsRequestBuilderGetQueryParameters] = None
@dataclass
class ColumnsRequestBuilderPostRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None
|
PypiClean
|
/lizard-ui-5.3.tar.gz/lizard-ui-5.3/lizard_ui/static/openlayers/1.12-r7/lib/OpenLayers/Format/WPSDescribeProcess.js
|
* @requires OpenLayers/Format/XML.js
* @requires OpenLayers/Format/OWSCommon/v1_1_0.js
*/
/**
* Class: OpenLayers.Format.WPSDescribeProcess
* Read WPS DescribeProcess responses.
*
* Inherits from:
* - <OpenLayers.Format.XML>
*/
OpenLayers.Format.WPSDescribeProcess = OpenLayers.Class(
OpenLayers.Format.XML, {
/**
* Constant: VERSION
* {String} 1.0.0
*/
VERSION: "1.0.0",
/**
* Property: namespaces
* {Object} Mapping of namespace aliases to namespace URIs.
*/
namespaces: {
wps: "http://www.opengis.net/wps/1.0.0",
ows: "http://www.opengis.net/ows/1.1",
xsi: "http://www.w3.org/2001/XMLSchema-instance"
},
/**
* Property: schemaLocation
* {String} Schema location
*/
schemaLocation: "http://www.opengis.net/wps/1.0.0 http://schemas.opengis.net/wps/1.0.0/wpsAll.xsd",
/**
* Property: defaultPrefix
*/
defaultPrefix: "wps",
/**
* Property: regExes
* Compiled regular expressions for manipulating strings.
*/
regExes: {
trimSpace: (/^\s*|\s*$/g),
removeSpace: (/\s*/g),
splitSpace: (/\s+/),
trimComma: (/\s*,\s*/g)
},
/**
* Constructor: OpenLayers.Format.WPSDescribeProcess
*
* Parameters:
* options - {Object} An optional object whose properties will be set on
* this instance.
*/
/**
* APIMethod: read
* Parse a WPS DescribeProcess and return an object with its information.
*
* Parameters:
* data - {String} or {DOMElement} data to read/parse.
*
* Returns:
* {Object}
*/
read: function(data) {
if(typeof data == "string") {
data = OpenLayers.Format.XML.prototype.read.apply(this, [data]);
}
if(data && data.nodeType == 9) {
data = data.documentElement;
}
var info = {};
this.readNode(data, info);
return info;
},
/**
* Property: readers
* Contains public functions, grouped by namespace prefix, that will
* be applied when a namespaced node is found matching the function
* name. The function will be applied in the scope of this parser
* with two arguments: the node being read and a context object passed
* from the parent.
*/
readers: {
"wps": {
"ProcessDescriptions": function(node, obj) {
obj.processDescriptions = {};
this.readChildNodes(node, obj.processDescriptions);
},
"ProcessDescription": function(node, processDescriptions) {
var processVersion = this.getAttributeNS(node, this.namespaces.wps, "processVersion");
var processDescription = {
processVersion: processVersion,
statusSupported: (node.getAttribute("statusSupported") === "true"),
storeSupported: (node.getAttribute("storeSupported") === "true")
};
this.readChildNodes(node, processDescription);
processDescriptions[processDescription.identifier] = processDescription;
},
"DataInputs": function(node, processDescription) {
processDescription.dataInputs = [];
this.readChildNodes(node, processDescription.dataInputs);
},
"ProcessOutputs": function(node, processDescription) {
processDescription.processOutputs = [];
this.readChildNodes(node, processDescription.processOutputs);
},
"Output": function(node, processOutputs) {
var output = {};
this.readChildNodes(node, output);
processOutputs.push(output);
},
"ComplexOutput": function(node, output) {
output.complexOutput = {};
this.readChildNodes(node, output.complexOutput);
},
"Input": function(node, dataInputs) {
var input = {
maxOccurs: parseInt(node.getAttribute("maxOccurs")),
minOccurs: parseInt(node.getAttribute("minOccurs"))
};
this.readChildNodes(node, input);
dataInputs.push(input);
},
"BoundingBoxData": function(node, input) {
input.boundingBoxData = {};
this.readChildNodes(node, input.boundingBoxData);
},
"CRS": function(node, obj) {
if (!obj.CRSs) {
obj.CRSs = {};
}
obj.CRSs[this.getChildValue(node)] = true;
},
"LiteralData": function(node, input) {
input.literalData = {};
this.readChildNodes(node, input.literalData);
},
"ComplexData": function(node, input) {
input.complexData = {};
this.readChildNodes(node, input.complexData);
},
"Default": function(node, complexData) {
complexData["default"] = {};
this.readChildNodes(node, complexData["default"]);
},
"Supported": function(node, complexData) {
complexData["supported"] = {};
this.readChildNodes(node, complexData["supported"]);
},
"Format": function(node, obj) {
var format = {};
this.readChildNodes(node, format);
if (!obj.formats) {
obj.formats = {};
}
obj.formats[format.mimeType] = true;
},
"MimeType": function(node, format) {
format.mimeType = this.getChildValue(node);
}
},
"ows": OpenLayers.Format.OWSCommon.v1_1_0.prototype.readers["ows"]
},
CLASS_NAME: "OpenLayers.Format.WPSDescribeProcess"
});
|
PypiClean
|
/bpy_nibbler-0.1.tar.gz/bpy_nibbler-0.1/bpy_lambda/2.78/scripts/addons_contrib/curve_tools/Operators.py
|
import time
import threading
import bpy
from bpy.props import *
from . import Properties
from . import Curves
from . import CurveIntersections
from . import Util
from . import Surfaces
class OperatorSelectionInfo(bpy.types.Operator):
bl_idname = "curvetools2.operatorselectioninfo"
bl_label = "Selection Info"
bl_description = "Maintains a list of selected objects in the order they were selected"
@classmethod
def poll(cls, context):
selectedObjectNames = Properties.CurveTools2SelectedObject.GetSelectedObjectNames()
selectedBlenderObjectNames = Properties.CurveTools2SelectedObject.GetSelectedBlenderObjectNames()
sleepTime = 0.02
lock = threading.Lock()
lock_holder = threading.Thread(target = Properties.CurveTools2SelectedObject.UpdateThreadTarget, args=(lock, sleepTime, selectedObjectNames, selectedBlenderObjectNames), name='OperatorSelectionInfoThread')
# lock_holder = threading.Thread(target = Properties.CurveTools2SelectedObject.UpdateThreadTarget2, args=(lock, sleepTime, selectedObjectNames, selectedBlenderObjectNames, context), name='OperatorSelectionInfoThread')
lock_holder.setDaemon(True)
lock_holder.start()
return True
def execute(self, context):
nrSelectedObjects = bpy.context.scene.curvetools.NrSelectedObjects
self.report({'INFO'}, "Selection Info: nrSelectedObjects: %d" % nrSelectedObjects)
selectedObjects = bpy.context.scene.curvetools.SelectedObjects
selectedObjectValues = selectedObjects.values()
for i, selectedObject in enumerate(selectedObjectValues):
print("--", "selected object %d of %d: %s" % (i + 1, nrSelectedObjects, selectedObject.name))
return {'FINISHED'}
# 1 CURVE SELECTED
# ################
class OperatorCurveInfo(bpy.types.Operator):
bl_idname = "curvetools2.operatorcurveinfo"
bl_label = "Info"
bl_description = "Displays general info about the active/selected curve"
@classmethod
def poll(cls, context):
return Util.Selected1Curve()
def execute(self, context):
curve = Curves.Curve(context.active_object)
nrSplines = len(curve.splines)
nrSegments = 0
nrEmptySplines = 0
for spline in curve.splines:
nrSegments += spline.nrSegments
if spline.nrSegments < 1: nrEmptySplines += 1
self.report({'INFO'}, "nrSplines: %d; nrSegments: %d; nrEmptySplines: %d" % (nrSplines, nrSegments, nrEmptySplines))
return {'FINISHED'}
class OperatorCurveLength(bpy.types.Operator):
bl_idname = "curvetools2.operatorcurvelength"
bl_label = "Length"
bl_description = "Calculates the length of the active/selected curve"
@classmethod
def poll(cls, context):
return Util.Selected1Curve()
def execute(self, context):
curve = Curves.Curve(context.active_object)
context.scene.curvetools.CurveLength = curve.length
return {'FINISHED'}
class OperatorSplinesInfo(bpy.types.Operator):
bl_idname = "curvetools2.operatorsplinesinfo"
bl_label = "Info"
bl_description = "Displays general info about the splines of the active/selected curve"
@classmethod
def poll(cls, context):
return Util.Selected1Curve()
def execute(self, context):
curve = Curves.Curve(context.active_object)
nrSplines = len(curve.splines)
print("")
print("OperatorSplinesInfo:", "nrSplines:", nrSplines)
nrEmptySplines = 0
for iSpline, spline in enumerate(curve.splines):
print("--", "spline %d of %d: nrSegments: %d" % (iSpline + 1, nrSplines, spline.nrSegments))
if spline.nrSegments < 1:
nrEmptySplines += 1
print("--", "--", "## WARNING: spline has no segments and will therefor be ignored in any further calculations")
self.report({'INFO'}, "nrSplines: %d; nrEmptySplines: %d" % (nrSplines, nrEmptySplines) + " -- more info: see console")
return {'FINISHED'}
class OperatorSegmentsInfo(bpy.types.Operator):
bl_idname = "curvetools2.operatorsegmentsinfo"
bl_label = "Info"
bl_description = "Displays general info about the segments of the active/selected curve"
@classmethod
def poll(cls, context):
return Util.Selected1Curve()
def execute(self, context):
curve = Curves.Curve(context.active_object)
nrSplines = len(curve.splines)
nrSegments = 0
print("")
print("OperatorSegmentsInfo:", "nrSplines:", nrSplines)
nrEmptySplines = 0
for iSpline, spline in enumerate(curve.splines):
nrSegmentsSpline = spline.nrSegments
print("--", "spline %d of %d: nrSegments: %d" % (iSpline + 1, nrSplines, nrSegmentsSpline))
if nrSegmentsSpline < 1:
nrEmptySplines += 1
print("--", "--", "## WARNING: spline has no segments and will therefor be ignored in any further calculations")
continue
for iSegment, segment in enumerate(spline.segments):
print("--", "--", "segment %d of %d coefficients:" % (iSegment + 1, nrSegmentsSpline))
print("--", "--", "--", "C0: %.6f, %.6f, %.6f" % (segment.coeff0.x, segment.coeff0.y, segment.coeff0.z))
nrSegments += nrSegmentsSpline
self.report({'INFO'}, "nrSplines: %d; nrSegments: %d; nrEmptySplines: %d" % (nrSplines, nrSegments, nrEmptySplines))
return {'FINISHED'}
class OperatorOriginToSpline0Start(bpy.types.Operator):
bl_idname = "curvetools2.operatororigintospline0start"
bl_label = "OriginToSpline0Start"
bl_description = "Sets the origin of the active/selected curve to the starting point of the (first) spline. Nice for curve modifiers."
@classmethod
def poll(cls, context):
return Util.Selected1Curve()
def execute(self, context):
blCurve = context.active_object
blSpline = blCurve.data.splines[0]
newOrigin = blCurve.matrix_world * blSpline.bezier_points[0].co
origOrigin = bpy.context.scene.cursor_location.copy()
print("--", "origOrigin: %.6f, %.6f, %.6f" % (origOrigin.x, origOrigin.y, origOrigin.z))
print("--", "newOrigin: %.6f, %.6f, %.6f" % (newOrigin.x, newOrigin.y, newOrigin.z))
bpy.context.scene.cursor_location = newOrigin
bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
bpy.context.scene.cursor_location = origOrigin
self.report({'INFO'}, "TODO: OperatorOriginToSpline0Start")
return {'FINISHED'}
# 2 CURVES SELECTED
# #################
class OperatorIntersectCurves(bpy.types.Operator):
bl_idname = "curvetools2.operatorintersectcurves"
bl_label = "Intersect"
bl_description = "Intersects selected curves"
@classmethod
def poll(cls, context):
return Util.Selected2Curves()
def execute(self, context):
print("### TODO: OperatorIntersectCurves.execute()")
algo = context.scene.curvetools.IntersectCurvesAlgorithm
print("-- algo:", algo)
mode = context.scene.curvetools.IntersectCurvesMode
print("-- mode:", mode)
# if mode == 'Split':
# self.report({'WARNING'}, "'Split' mode is not implemented yet -- <<STOPPING>>")
# return {'CANCELLED'}
affect = context.scene.curvetools.IntersectCurvesAffect
print("-- affect:", affect)
curveIntersector = CurveIntersections.CurvesIntersector.FromSelection()
rvIntersectionNrs = curveIntersector.CalcAndApplyIntersections()
self.report({'INFO'}, "Active curve points: %d; other curve points: %d" % (rvIntersectionNrs[0], rvIntersectionNrs[1]))
return {'FINISHED'}
class OperatorLoftCurves(bpy.types.Operator):
bl_idname = "curvetools2.operatorloftcurves"
bl_label = "Loft"
bl_description = "Lofts selected curves"
@classmethod
def poll(cls, context):
return Util.Selected2Curves()
def execute(self, context):
#print("### TODO: OperatorLoftCurves.execute()")
loftedSurface = Surfaces.LoftedSurface.FromSelection()
loftedSurface.AddToScene()
self.report({'INFO'}, "OperatorLoftCurves.execute()")
return {'FINISHED'}
class OperatorSweepCurves(bpy.types.Operator):
bl_idname = "curvetools2.operatorsweepcurves"
bl_label = "Sweep"
bl_description = "Sweeps the active curve along to other curve (rail)"
@classmethod
def poll(cls, context):
return Util.Selected2Curves()
def execute(self, context):
#print("### TODO: OperatorSweepCurves.execute()")
sweptSurface = Surfaces.SweptSurface.FromSelection()
sweptSurface.AddToScene()
self.report({'INFO'}, "OperatorSweepCurves.execute()")
return {'FINISHED'}
# 3 CURVES SELECTED
# #################
class OperatorBirail(bpy.types.Operator):
bl_idname = "curvetools2.operatorbirail"
bl_label = "Birail"
bl_description = "Generates a birailed surface from 3 selected curves -- in order: rail1, rail2 and profile"
@classmethod
def poll(cls, context):
return Util.Selected3Curves()
def execute(self, context):
birailedSurface = Surfaces.BirailedSurface.FromSelection()
birailedSurface.AddToScene()
self.report({'INFO'}, "OperatorBirail.execute()")
return {'FINISHED'}
# 1 OR MORE CURVES SELECTED
# #########################
class OperatorSplinesSetResolution(bpy.types.Operator):
bl_idname = "curvetools2.operatorsplinessetresolution"
bl_label = "SplinesSetResolution"
bl_description = "Sets the resolution of all splines"
@classmethod
def poll(cls, context):
return Util.Selected1OrMoreCurves()
def execute(self, context):
splRes = context.scene.curvetools.SplineResolution
selCurves = Util.GetSelectedCurves()
for blCurve in selCurves:
for spline in blCurve.data.splines:
spline.resolution_u = splRes
return {'FINISHED'}
class OperatorSplinesRemoveZeroSegment(bpy.types.Operator):
bl_idname = "curvetools2.operatorsplinesremovezerosegment"
bl_label = "SplinesRemoveZeroSegment"
bl_description = "Removes splines with no segments -- they seem to creep up, sometimes.."
@classmethod
def poll(cls, context):
return Util.Selected1OrMoreCurves()
def execute(self, context):
selCurves = Util.GetSelectedCurves()
for blCurve in selCurves:
curve = Curves.Curve(blCurve)
nrSplines = curve.nrSplines
splinesToRemove = []
for spline in curve.splines:
if len(spline.segments) < 1: splinesToRemove.append(spline)
nrRemovedSplines = len(splinesToRemove)
for spline in splinesToRemove: curve.splines.remove(spline)
if nrRemovedSplines > 0: curve.RebuildInScene()
self.report({'INFO'}, "Removed %d of %d splines" % (nrRemovedSplines, nrSplines))
return {'FINISHED'}
class OperatorSplinesRemoveShort(bpy.types.Operator):
bl_idname = "curvetools2.operatorsplinesremoveshort"
bl_label = "SplinesRemoveShort"
bl_description = "Removes splines with a length smaller than the threshold"
@classmethod
def poll(cls, context):
return Util.Selected1OrMoreCurves()
def execute(self, context):
threshold = context.scene.curvetools.SplineRemoveLength
selCurves = Util.GetSelectedCurves()
for blCurve in selCurves:
curve = Curves.Curve(blCurve)
nrSplines = curve.nrSplines
nrRemovedSplines = curve.RemoveShortSplines(threshold)
if nrRemovedSplines > 0: curve.RebuildInScene()
self.report({'INFO'}, "Removed %d of %d splines" % (nrRemovedSplines, nrSplines))
return {'FINISHED'}
class OperatorSplinesJoinNeighbouring(bpy.types.Operator):
bl_idname = "curvetools2.operatorsplinesjoinneighbouring"
bl_label = "SplinesJoinNeighbouring"
bl_description = "Joins neighbouring splines within a distance smaller than the threshold"
@classmethod
def poll(cls, context):
return Util.Selected1OrMoreCurves()
def execute(self, context):
selCurves = Util.GetSelectedCurves()
for blCurve in selCurves:
curve = Curves.Curve(blCurve)
nrSplines = curve.nrSplines
threshold = context.scene.curvetools.SplineJoinDistance
startEnd = context.scene.curvetools.SplineJoinStartEnd
mode = context.scene.curvetools.SplineJoinMode
nrJoins = curve.JoinNeighbouringSplines(startEnd, threshold, mode)
if nrJoins > 0: curve.RebuildInScene()
self.report({'INFO'}, "Applied %d joins on %d splines; resulting nrSplines: %d" % (nrJoins, nrSplines, curve.nrSplines))
return {'FINISHED'}
|
PypiClean
|
/cohesity-sdk-1.1.0.tar.gz/cohesity-sdk-1.1.0/cohesity_sdk/cluster/api/remote_storage.py
|
import re # noqa: F401
import sys # noqa: F401
from cohesity_sdk.cluster.api_client import ApiClient, Endpoint as _Endpoint
from cohesity_sdk.cluster.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from cohesity_sdk.cluster.model.error import Error
from cohesity_sdk.cluster.model.registered_remote_storage_list import RegisteredRemoteStorageList
from cohesity_sdk.cluster.model.remote_storage_info import RemoteStorageInfo
class RemoteStorageApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __delete_remote_storage_registration(
self,
id,
**kwargs
):
"""Delete Remote Storage Registration # noqa: E501
Delete remote storage registration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_remote_storage_registration(id, async_req=True)
>>> result = thread.get()
Args:
id (int): Specifies the registration id of the registered remote storage.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.delete_remote_storage_registration = _Endpoint(
settings={
'response_type': None,
'auth': [
'TokenHeader',
'APIKeyHeader'
],
'endpoint_path': '/remote-storage/{id}',
'operation_id': 'delete_remote_storage_registration',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__delete_remote_storage_registration
)
def __get_registered_remote_storage_list(
self,
**kwargs
):
"""Get Registered Remote Storage Servers List # noqa: E501
Get summary about list of registered remote storage servers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_registered_remote_storage_list(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
RegisteredRemoteStorageList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_registered_remote_storage_list = _Endpoint(
settings={
'response_type': (RegisteredRemoteStorageList,),
'auth': [
'TokenHeader',
'APIKeyHeader'
],
'endpoint_path': '/remote-storage',
'operation_id': 'get_registered_remote_storage_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_registered_remote_storage_list
)
def __get_remote_storage_details(
self,
id,
**kwargs
):
"""Get remote storage details # noqa: E501
Get details of remote storage given by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_remote_storage_details(id, async_req=True)
>>> result = thread.get()
Args:
id (int): Specifies the id of the registered remote storage.
Keyword Args:
include_available_space (bool): Specifies whether to include available capacity on remote storage.. [optional] if omitted the server will use the default value of False
include_available_data_vips (bool): Specifies whether to include available data vips on remote storage.. [optional] if omitted the server will use the default value of False
include_array_info (bool): Includes flashblade specific info like name, software os and version of pure flashblade.. [optional] if omitted the server will use the default value of False
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
RemoteStorageInfo
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.get_remote_storage_details = _Endpoint(
settings={
'response_type': (RemoteStorageInfo,),
'auth': [
'TokenHeader',
'APIKeyHeader'
],
'endpoint_path': '/remote-storage/{id}',
'operation_id': 'get_remote_storage_details',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
'include_available_space',
'include_available_data_vips',
'include_array_info',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
'include_available_space':
(bool,),
'include_available_data_vips':
(bool,),
'include_array_info':
(bool,),
},
'attribute_map': {
'id': 'id',
'include_available_space': 'includeAvailableSpace',
'include_available_data_vips': 'includeAvailableDataVips',
'include_array_info': 'includeArrayInfo',
},
'location_map': {
'id': 'path',
'include_available_space': 'query',
'include_available_data_vips': 'query',
'include_array_info': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_remote_storage_details
)
def __register_new_remote_storage(
self,
body,
**kwargs
):
"""Register Remote Storage # noqa: E501
Register a remote storage to be used for disaggregated storage. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_new_remote_storage(body, async_req=True)
>>> result = thread.get()
Args:
body (RemoteStorageInfo): Specifies the parameters to register a remote storage management server.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
RemoteStorageInfo
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.register_new_remote_storage = _Endpoint(
settings={
'response_type': (RemoteStorageInfo,),
'auth': [
'TokenHeader',
'APIKeyHeader'
],
'endpoint_path': '/remote-storage',
'operation_id': 'register_new_remote_storage',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'body',
],
'required': [
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
(RemoteStorageInfo,),
},
'attribute_map': {
},
'location_map': {
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__register_new_remote_storage
)
def __update_remote_storage_registration(
self,
id,
body,
**kwargs
):
"""Update Remote Storage Config # noqa: E501
Update Registered Remote Storage Config. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_remote_storage_registration(id, body, async_req=True)
>>> result = thread.get()
Args:
id (int): Specifies the registration id of the registered remote storage.
body (RemoteStorageInfo): Specifies the parameters to update the registration.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
RemoteStorageInfo
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.update_remote_storage_registration = _Endpoint(
settings={
'response_type': (RemoteStorageInfo,),
'auth': [
'TokenHeader',
'APIKeyHeader'
],
'endpoint_path': '/remote-storage/{id}',
'operation_id': 'update_remote_storage_registration',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'id',
'body',
],
'required': [
'id',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
'body':
(RemoteStorageInfo,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__update_remote_storage_registration
)
|
PypiClean
|
/pulumi_google_native-0.31.2a1689827148.tar.gz/pulumi_google_native-0.31.2a1689827148/pulumi_google_native/healthcare/v1beta1/dataset_fhir_store_iam_policy.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['DatasetFhirStoreIamPolicyArgs', 'DatasetFhirStoreIamPolicy']
@pulumi.input_type
class DatasetFhirStoreIamPolicyArgs:
def __init__(__self__, *,
dataset_id: pulumi.Input[str],
fhir_store_id: pulumi.Input[str],
audit_configs: Optional[pulumi.Input[Sequence[pulumi.Input['AuditConfigArgs']]]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input['BindingArgs']]]] = None,
etag: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
update_mask: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a DatasetFhirStoreIamPolicy resource.
:param pulumi.Input[Sequence[pulumi.Input['AuditConfigArgs']]] audit_configs: Specifies cloud audit logging configuration for this policy.
:param pulumi.Input[Sequence[pulumi.Input['BindingArgs']]] bindings: Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:[email protected]`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
:param pulumi.Input[str] etag: `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
:param pulumi.Input[str] update_mask: OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the mask will be modified. If no mask is provided, the following default mask is used: `paths: "bindings, etag"`
:param pulumi.Input[int] version: Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
pulumi.set(__self__, "dataset_id", dataset_id)
pulumi.set(__self__, "fhir_store_id", fhir_store_id)
if audit_configs is not None:
pulumi.set(__self__, "audit_configs", audit_configs)
if bindings is not None:
pulumi.set(__self__, "bindings", bindings)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if location is not None:
pulumi.set(__self__, "location", location)
if project is not None:
pulumi.set(__self__, "project", project)
if update_mask is not None:
pulumi.set(__self__, "update_mask", update_mask)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="datasetId")
def dataset_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "dataset_id")
@dataset_id.setter
def dataset_id(self, value: pulumi.Input[str]):
pulumi.set(self, "dataset_id", value)
@property
@pulumi.getter(name="fhirStoreId")
def fhir_store_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "fhir_store_id")
@fhir_store_id.setter
def fhir_store_id(self, value: pulumi.Input[str]):
pulumi.set(self, "fhir_store_id", value)
@property
@pulumi.getter(name="auditConfigs")
def audit_configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AuditConfigArgs']]]]:
"""
Specifies cloud audit logging configuration for this policy.
"""
return pulumi.get(self, "audit_configs")
@audit_configs.setter
def audit_configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AuditConfigArgs']]]]):
pulumi.set(self, "audit_configs", value)
@property
@pulumi.getter
def bindings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BindingArgs']]]]:
"""
Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:[email protected]`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
"""
return pulumi.get(self, "bindings")
@bindings.setter
def bindings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BindingArgs']]]]):
pulumi.set(self, "bindings", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
`etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="updateMask")
def update_mask(self) -> Optional[pulumi.Input[str]]:
"""
OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the mask will be modified. If no mask is provided, the following default mask is used: `paths: "bindings, etag"`
"""
return pulumi.get(self, "update_mask")
@update_mask.setter
def update_mask(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_mask", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "version", value)
class DatasetFhirStoreIamPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
audit_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AuditConfigArgs']]]]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BindingArgs']]]]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
fhir_store_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
update_mask: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
Note - this resource's API doesn't support deletion. When deleted, the resource will persist
on Google Cloud even though it will be deleted from Pulumi state.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AuditConfigArgs']]]] audit_configs: Specifies cloud audit logging configuration for this policy.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BindingArgs']]]] bindings: Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:[email protected]`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
:param pulumi.Input[str] etag: `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
:param pulumi.Input[str] update_mask: OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the mask will be modified. If no mask is provided, the following default mask is used: `paths: "bindings, etag"`
:param pulumi.Input[int] version: Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DatasetFhirStoreIamPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
Note - this resource's API doesn't support deletion. When deleted, the resource will persist
on Google Cloud even though it will be deleted from Pulumi state.
:param str resource_name: The name of the resource.
:param DatasetFhirStoreIamPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DatasetFhirStoreIamPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
audit_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AuditConfigArgs']]]]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BindingArgs']]]]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
fhir_store_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
update_mask: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DatasetFhirStoreIamPolicyArgs.__new__(DatasetFhirStoreIamPolicyArgs)
__props__.__dict__["audit_configs"] = audit_configs
__props__.__dict__["bindings"] = bindings
if dataset_id is None and not opts.urn:
raise TypeError("Missing required property 'dataset_id'")
__props__.__dict__["dataset_id"] = dataset_id
__props__.__dict__["etag"] = etag
if fhir_store_id is None and not opts.urn:
raise TypeError("Missing required property 'fhir_store_id'")
__props__.__dict__["fhir_store_id"] = fhir_store_id
__props__.__dict__["location"] = location
__props__.__dict__["project"] = project
__props__.__dict__["update_mask"] = update_mask
__props__.__dict__["version"] = version
replace_on_changes = pulumi.ResourceOptions(replace_on_changes=["dataset_id", "fhir_store_id", "location", "project"])
opts = pulumi.ResourceOptions.merge(opts, replace_on_changes)
super(DatasetFhirStoreIamPolicy, __self__).__init__(
'google-native:healthcare/v1beta1:DatasetFhirStoreIamPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'DatasetFhirStoreIamPolicy':
"""
Get an existing DatasetFhirStoreIamPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = DatasetFhirStoreIamPolicyArgs.__new__(DatasetFhirStoreIamPolicyArgs)
__props__.__dict__["audit_configs"] = None
__props__.__dict__["bindings"] = None
__props__.__dict__["dataset_id"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["fhir_store_id"] = None
__props__.__dict__["location"] = None
__props__.__dict__["project"] = None
__props__.__dict__["version"] = None
return DatasetFhirStoreIamPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="auditConfigs")
def audit_configs(self) -> pulumi.Output[Sequence['outputs.AuditConfigResponse']]:
"""
Specifies cloud audit logging configuration for this policy.
"""
return pulumi.get(self, "audit_configs")
@property
@pulumi.getter
def bindings(self) -> pulumi.Output[Sequence['outputs.BindingResponse']]:
"""
Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:[email protected]`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
"""
return pulumi.get(self, "bindings")
@property
@pulumi.getter(name="datasetId")
def dataset_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "dataset_id")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
`etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="fhirStoreId")
def fhir_store_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "fhir_store_id")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
return pulumi.get(self, "location")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
return pulumi.get(self, "project")
@property
@pulumi.getter
def version(self) -> pulumi.Output[int]:
"""
Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
return pulumi.get(self, "version")
|
PypiClean
|
/ScaffoldGraphDG-1.1.8.tar.gz/ScaffoldGraphDG-1.1.8/scaffoldgraph/tree.py
|
from pickle import FALSE
from rdkit.Chem import rdmolops, MolToMolBlock, MolFromMolBlock, SetAromaticity, AROMATICITY_SIMPLE
from .core import ScaffoldGraph, Scaffold, MurckoRingFragmenter
from .core.fragment import get_murcko_scaffold
from .prioritization import original_ruleset
from .utils import suppress_rdlogger
class ScaffoldTree(ScaffoldGraph):
"""
Class representing a scaffold tree.
Explore scaffold-space through the iterative removal of the least-characteristic
ring from a molecular scaffold. The output is a tree of molecular scaffolds.
Examples
--------
Create a ScaffoldTree from a SMILES file.
>>> import scaffoldgraph as sg
>>> tree = sg.ScaffoldTree.from_smiles_file('my_file.smi', progress=True)
>>> network.num_sscaffold_nodes
75
Create a ScaffoldTree from an SDF.
>>> tree = sg.ScaffoldTree.from_sdf('my_file.sdf', progress=True)
If the SDF is zipped:
>>> tree = sg.ScaffoldTree.from_sdf('my_file.sdf.gz', zipped=True)
Get scaffold nodes:
>>> list(tree.get_scaffold_nodes())
['O=C(OCOC(=O)c1cccc2ncn(Cc3ccccc3)c12)OC1CCCCC1',
'O=C(OCOC(=O)c1cccc2nc[nH]c12)OC1CCCCC1',
...]
Include node attributes:
>>> list(tree.get_scaffold_nodes(data=True))
[('O=C(OCOC(=O)c1cccc2ncn(Cc3ccccc3)c12)OC1CCCCC1', {'type': 'scaffold', 'hierarchy': 4}),
('O=C(OCOC(=O)c1cccc2nc[nH]c12)OC1CCCCC1', {'type': 'scaffold', 'hierarchy': 3}),
...]
Get molecule nodes (use data=True to get attributes):
>>> list(tree.get_molecule_nodes())
['DB00006',
'DB00007',
'DB00014',
...]
References
----------
.. [1] Schuffenhauer, A., Ertl, P., Roggo, S., Wetzel, S., Koch, M. A., and Waldmann, H. (2007).
The scaffold tree visualization of the scaffold universe by hierarchical scaffold classification.
Journal of Chemical Information and Modeling, 47(1), 47–58. PMID: 17238248.
See Also
--------
ScaffoldGraph
ScaffoldNetwork
HierS
"""
def __init__(self, graph=None, prioritization_rules=None, **kwargs):
"""Initialize a ScaffoldTree.
Parameters
----------
graph : input graph, optional
Data to initialize graph. If None (default) an empty
graph is created. The data can be any format that is supported
by the ``to_networkx_graph()`` function, currently including
edge list, dict of dicts, dict of lists, NetworkX graph,
NumPy matrix or 2d ndarray, SciPy sparse matrix,
or PyGraphviz graph. This argument is passed to the networkx
DiGraph constructor.
prioritization_rules : ScaffoldRuleSet
Ruleset for prioritizing parent scaffolds during tree
construction.
"""
super(ScaffoldTree, self).__init__(graph, MurckoRingFragmenter(True), 'tree')
self.count = 0;
self.rules = prioritization_rules if prioritization_rules else original_ruleset
def _hierarchy_constructor(self, child):
#print('tree._hierarchy_constructor');
#print("child")
#print(type(child))
#editMolBlk = MolToMolBlock(child.mol, kekulize=False);
#molA = MolFromMolBlock(editMolBlk);
#SetAromaticity(molA, AROMATICITY_SIMPLE);
#molAMolBlk = MolToMolBlock(child.mol, kekulize=False);
#child.mol = molA;#
#molAMolBlk = MolToMolBlock(child.mol, kekulize=False);
#print(molAMolBlk)
#print(child)
#print("end child")
parents = [p for p in self.fragmenter.fragment(child) if p]
#print('tree._hierarchy_constructor_parents_count', parents);
#for pp in parents:
# print('pp ', MolToMolBlock(pp.mol, kekulize = False))
#print(self.count)
#self.count = self.count +1
#print('self.count')
if not parents:
return
parent = self.rules(child, parents)
#print('parent after rules');
#print(MolToMolBlock(parent.molecule, kekulize=False));
if not parent:
return
deletion_rule = parent.prioritization_rule
if parent in self.nodes:
self.add_scaffold_edge(parent, child, rule=deletion_rule)
else:
#print('tree._hierarchy_constructor_add_node', count);
self.add_scaffold_node(parent)
self.add_scaffold_edge(parent, child, rule=deletion_rule)
if parent.rings.count > 1:
self._hierarchy_constructor(parent)
@property
def prioritization_rules(self):
"""ScaffoldRuleSet : Return the prioritization ruleset used."""
return self.rules
@suppress_rdlogger()
def tree_frags_from_mol(mol, prioritization_rules=None):
"""Generate a scaffold tree from a single molecule without using networkx.
Parameters
----------
mol: rdkit.Chem.rdchem.Mol
rdkit molecule for processing.
prioritization_rules : ScaffoldRuleSet, optional
rules for prioritizing parent scaffolds. If
not supplied the original rules are used.
The default is None.
Returns
-------
parents
An ordered list of rdkit Mols representing a scaffold tree.
Examples
--------
Generating scaffold tree fragments:
>>> from rdkit import Chem
>>> smiles = 'Cc1[nH]cnc1Cn1cccc(-c2ccccc2O)c1=O'
>>> molecule = Chem.MolFromSmiles(smiles)
>>> frags = tree_frags_from_mol(molecule)
"""
scaffold = Scaffold(get_murcko_scaffold(mol))
rdmolops.RemoveStereochemistry(scaffold.mol)
parents = [scaffold]
fragmenter = MurckoRingFragmenter(use_scheme_4=True)
rules = prioritization_rules if prioritization_rules else original_ruleset
def _next_scaffold(child):
next_parents = [p for p in fragmenter.fragment(child) if p]
if not next_parents:
return
next_parent = rules(child, next_parents)
parents.append(next_parent)
if next_parent.rings.count > 1:
_next_scaffold(next_parent)
_next_scaffold(scaffold)
return [p.mol for p in parents]
|
PypiClean
|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/domain/MybankCreditLoantradeBillBudgetQueryModel.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.MultiCurrencyMoney import MultiCurrencyMoney
from alipay.aop.api.domain.UserVo import UserVo
class MybankCreditLoantradeBillBudgetQueryModel(object):
def __init__(self):
self._apply_amount = None
self._bill_no = None
self._out_request_no = None
self._product_code = None
self._repay_budget_type = None
self._user = None
@property
def apply_amount(self):
return self._apply_amount
@apply_amount.setter
def apply_amount(self, value):
if isinstance(value, MultiCurrencyMoney):
self._apply_amount = value
else:
self._apply_amount = MultiCurrencyMoney.from_alipay_dict(value)
@property
def bill_no(self):
return self._bill_no
@bill_no.setter
def bill_no(self, value):
self._bill_no = value
@property
def out_request_no(self):
return self._out_request_no
@out_request_no.setter
def out_request_no(self, value):
self._out_request_no = value
@property
def product_code(self):
return self._product_code
@product_code.setter
def product_code(self, value):
self._product_code = value
@property
def repay_budget_type(self):
return self._repay_budget_type
@repay_budget_type.setter
def repay_budget_type(self, value):
self._repay_budget_type = value
@property
def user(self):
return self._user
@user.setter
def user(self, value):
if isinstance(value, UserVo):
self._user = value
else:
self._user = UserVo.from_alipay_dict(value)
def to_alipay_dict(self):
params = dict()
if self.apply_amount:
if hasattr(self.apply_amount, 'to_alipay_dict'):
params['apply_amount'] = self.apply_amount.to_alipay_dict()
else:
params['apply_amount'] = self.apply_amount
if self.bill_no:
if hasattr(self.bill_no, 'to_alipay_dict'):
params['bill_no'] = self.bill_no.to_alipay_dict()
else:
params['bill_no'] = self.bill_no
if self.out_request_no:
if hasattr(self.out_request_no, 'to_alipay_dict'):
params['out_request_no'] = self.out_request_no.to_alipay_dict()
else:
params['out_request_no'] = self.out_request_no
if self.product_code:
if hasattr(self.product_code, 'to_alipay_dict'):
params['product_code'] = self.product_code.to_alipay_dict()
else:
params['product_code'] = self.product_code
if self.repay_budget_type:
if hasattr(self.repay_budget_type, 'to_alipay_dict'):
params['repay_budget_type'] = self.repay_budget_type.to_alipay_dict()
else:
params['repay_budget_type'] = self.repay_budget_type
if self.user:
if hasattr(self.user, 'to_alipay_dict'):
params['user'] = self.user.to_alipay_dict()
else:
params['user'] = self.user
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = MybankCreditLoantradeBillBudgetQueryModel()
if 'apply_amount' in d:
o.apply_amount = d['apply_amount']
if 'bill_no' in d:
o.bill_no = d['bill_no']
if 'out_request_no' in d:
o.out_request_no = d['out_request_no']
if 'product_code' in d:
o.product_code = d['product_code']
if 'repay_budget_type' in d:
o.repay_budget_type = d['repay_budget_type']
if 'user' in d:
o.user = d['user']
return o
|
PypiClean
|
/intel_extension_for_pytorch-2.0.100-cp38-cp38-manylinux2014_x86_64.whl/intel_extension_for_pytorch/tpp/fused_bert.py
|
import math
import torch
from torch import nn
from torch.nn.parameter import Parameter
from torch.nn import init
from torch.autograd import Function
from .utils.blocked_layout import (
BlockedParameter,
BlockedModule,
BlockedTensor,
get_blocking_signature,
)
import time
from contextlib import contextmanager
try:
from transformers.modeling_utils import apply_chunking_to_forward
from transformers.modeling_outputs import BaseModelOutputWithPastAndCrossAttentions
except:
pass
USE_BF16_PARAMS = True
layer_use_bf16 = False
unpad = True
print_cou = 0
def print_grad_hook(var, name):
if not hasattr(var, "grad_fn"):
return
def register_grad(grad_input, grad_output):
global print_cou
print(f"TESTGRADU {name}: {var.grad_fn.name()} - {grad_input[0].abs().sum()}")
torch.save(grad_input, "tmp_u_%d.pt" % print_cou)
print_cou += 1
var.grad_fn.register_hook(register_grad)
def generate_mask(attention_mask):
assert not attention_mask is None, "attention_mask is None"
B, _, _, S = attention_mask.shape
S1, S2 = BlockedModule.default_blocking_factors(S)
attention_mask = attention_mask.view([B, S]).clone()
if unpad:
nnz = (((attention_mask + 10000).count_nonzero(dim=-1) + (S2 - 1)) // S2) * S2
# nnz = (((attention_mask+10000).count_nonzero(dim=-1) + (S - 1))//S)*S
nnz1 = nnz.unsqueeze(dim=1).expand([-1, S])
a = torch.arange(S).expand([B, -1])
msk = a < nnz1
attention_mask = attention_mask[msk].clone()
seq_offsets = torch.cat([torch.zeros([1]), nnz // S2]).to(torch.long)
else:
msk = torch.ones_like(attention_mask).to(torch.bool)
seq_offsets = torch.cat([torch.zeros([1]), torch.ones([B])*S//S2]).to(torch.long)
seq_sqr_offsets = seq_offsets * seq_offsets
seq_offsets = seq_offsets.cumsum(dim=0)
seq_sqr_offsets = seq_sqr_offsets.cumsum(dim=0)
return msk, attention_mask, seq_offsets, seq_sqr_offsets
class PadInput(torch.autograd.Function):
@staticmethod
def forward(ctx, input, msk, padded_shape):
ctx.save_for_backward(msk)
output = input.new_zeros(padded_shape)
output[msk, :] = input
return output
@staticmethod
def backward(ctx, grad_output):
(msk,) = ctx.saved_tensors
grad_input = grad_output[msk, :]
return grad_input, None, None
class UnpadInput(torch.autograd.Function):
@staticmethod
def forward(ctx, input, msk):
ctx.save_for_backward(msk)
ctx.shape = input.shape
output = input[msk, :]
return output
@staticmethod
def backward(ctx, grad_output):
(msk,) = ctx.saved_tensors
grad_input = grad_output.new_zeros(ctx.shape)
grad_input[msk, :] = grad_output
return grad_input, None
# class DummyLinear(BlockedModule):
# def __init__(self, in_features, out_features, bias=True):
# super(DummyLinear, self).__init__()
# self.weight = BlockedParameter(torch.Tensor(out_features, in_features))
# if bias:
# self.bias = BlockedParameter(torch.Tensor(out_features))
# else:
# self.register_parameter("bias", None)
# self.reset_parameters()
#
# def reset_parameters(self):
# init.kaiming_uniform_(self.weight, a=math.sqrt(5))
# if self.bias is not None:
# fan_in, _ = init._calculate_fan_in_and_fan_out(self.weight)
# bound = 1 / math.sqrt(fan_in)
# init.uniform_(self.bias, -bound, bound)
#
# def forward(self, input):
# raise NotImplemented
# return input
class DummyLinear(BlockedModule, torch.nn.Linear):
def __init__(self, in_features, out_features, bias=True):
# super(DummyLinear, self).__init__()
torch.nn.Linear.__init__(self, in_features, out_features, bias)
self.weight = BlockedParameter(self.weight.data)
if bias:
self.bias = BlockedParameter(self.bias.data)
def forward(self, input):
raise NotImplemented
return input
class DummyLayerNorm(BlockedModule, torch.nn.LayerNorm):
def __init__(self, *args, **kwargs):
torch.nn.LayerNorm.__init__(self, *args, **kwargs)
if self.elementwise_affine:
self.weight = BlockedParameter(self.weight.data)
self.bias = BlockedParameter(self.bias.data)
def forward(self, input):
raise NotImplemented
return input
class BertSelfAttentionFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, p, training, need_attention_output, *inputs):
# print("FWD Called")
# print("BSAFWD:", [t.shape if isinstance(t, torch.Tensor) else t for t in inputs[6:]])
(
context_layer,
attention_probs_out,
hs_t,
ehs_t,
ql_t,
kl_tv,
vl_tv,
ap,
apd_t,
ap_dp_mask,
) = torch.ops.torch_ipex.fused_self_attention_fwd_unpad(p, inputs, training)
(qw, qb, kw, kb, vw, vb, hs, am, hm, ehs, eam, offs, offs2) = inputs
ctx.save_for_backward(
qw,
kw,
vw,
hs_t,
hm,
ehs_t,
ql_t,
kl_tv,
vl_tv,
ap,
apd_t,
ap_dp_mask,
offs,
offs2,
)
ctx.p = p
# stop = False
# for i, t in enumerate([context_layer, attention_probs_out, hs_t, ehs_t, ql_t, kl_tv, vl_tv, ap, apd_t, ap_dp_mask]):
# nan = t.isnan().any().item()
# stop = stop or nan
# if nan: print ("Nan found in %d tensor" % i)
# if stop: raise "Nan Found"
# print("Returning from FWD")
if need_attention_output:
return context_layer, attention_probs_out
else:
return (context_layer,)
@staticmethod
def backward(ctx, *grad_outs):
# print("BWD Called")
inputs = []
inputs += [g.contiguous() for g in grad_outs]
if len(inputs) == 1:
inputs.append(inputs[0].new_empty(0))
inputs += ctx.saved_tensors
p = ctx.p
(
dqw,
dqb,
dkw,
dkb,
dvw,
dvb,
dhs,
dehs,
) = torch.ops.torch_ipex.fused_self_attention_bwd_unpad(p, inputs)
ehs = inputs[7]
if ehs is None:
dehs = None
# print("Returning from BWD")
# print("DHS:", dhs.view([-1])[:4])
return (
None,
None,
None,
dqw,
dqb,
dkw,
dkb,
dvw,
dvb,
dhs,
None,
None,
dehs,
None,
None,
None,
)
class BertSelfAttention(BlockedModule):
r"""PCL Bert Self Attention Layer using libxsmm blocked GEMM"""
# __constants__ = ['bias', 'C', 'K']
def __init__(self, config):
super().__init__()
if config.hidden_size % config.num_attention_heads != 0 and not hasattr(
config, "embedding_size"
):
raise ValueError(
"The hidden size (%d) is not a multiple of the number of attention "
"heads (%d)" % (config.hidden_size, config.num_attention_heads)
)
# self.output_attentions = config.output_attentions
self.num_attention_heads = config.num_attention_heads # N
self.attention_head_size = int(
config.hidden_size / config.num_attention_heads
) # H
self.all_head_size = self.num_attention_heads * self.attention_head_size # NH
self.hidden_size = config.hidden_size # HS
self.attention_probs_dropout_prob = config.attention_probs_dropout_prob
self.query = DummyLinear(config.hidden_size, self.all_head_size)
self.key = DummyLinear(config.hidden_size, self.all_head_size)
self.value = DummyLinear(config.hidden_size, self.all_head_size)
self.is_decoder = config.is_decoder
self.position_embedding_type = getattr(
config, "position_embedding_type", "absolute"
)
assert (
self.position_embedding_type == "absolute"
), "self.position_embedding_type other than absolute not supported"
self.query.weight.set_blocking_param(
([self.attention_head_size, self.attention_head_size], [0, 2, 3, 1],)
)
self.key.weight.set_blocking_param(
([self.attention_head_size, self.attention_head_size], [0, 2, 3, 1],)
)
self.value.weight.set_blocking_param(
([self.attention_head_size, self.attention_head_size], [0, 2, 3, 1],)
)
self.blocked_input_signature = get_blocking_signature("SF", "SFSF")
if layer_use_bf16 == True and USE_BF16_PARAMS:
self.query.weight.set_blocking_param(
(
[self.attention_head_size, [self.attention_head_size // 2, 2]],
[0, 2, 3, 1, 4],
torch.bfloat16,
)
)
self.key.weight.set_blocking_param(
(
[self.attention_head_size, [self.attention_head_size // 2, 2]],
[0, 2, 3, 1, 4],
torch.bfloat16,
)
)
self.value.weight.set_blocking_param(
(
[self.attention_head_size, [self.attention_head_size // 2, 2]],
[0, 2, 3, 1, 4],
torch.bfloat16,
)
)
self.query.bias.set_blocking_param((None, None, torch.bfloat16))
self.key.bias.set_blocking_param((None, None, torch.bfloat16))
self.value.bias.set_blocking_param((None, None, torch.bfloat16))
self.use_bf16 = layer_use_bf16
# self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
def maybe_block_params(self):
self.query.weight.block()
self.key.weight.block()
self.value.weight.block()
self.query.bias.block()
self.key.bias.block()
self.value.bias.block()
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_value=None,
output_attentions=False,
seq_offsets=None,
seq_sqr_offsets=None,
):
assert past_key_value == None, "past_key_value not supported"
self.maybe_block_params()
if encoder_hidden_states is not None:
assert encoder_hidden_states.shape == hidden_states.shape, (
"Different shapes not supported(%s != %s)"
% (encoder_hidden_states.shape, hidden_states.shape,)
)
encoder_hidden_states = self.get_blocked_tensor(
encoder_hidden_states,
self.blocked_input_signature,
[None, self.attention_head_size],
)
orig_hidden_states = hidden_states
hidden_states = self.get_blocked_tensor(
hidden_states,
self.blocked_input_signature,
[None, self.attention_head_size],
)
# print(f"hidden_states: {hidden_states.shape}")
inputs = [
self.query.weight,
self.query.bias,
self.key.weight,
self.key.bias,
self.value.weight,
self.value.bias,
]
inputs.append(hidden_states)
if attention_mask is not None:
# print(f"attention_mask: {attention_mask.shape}")
# B, S1, N, S2, H = hidden_states.shape
# S = S1 * S2
# print("Before attention_mask shape = %s (%s)" % (attention_mask.shape, attention_mask.numel()))
# attention_mask = attention_mask.expand([B, N, S, S]).view([B, N, S1, S2, S1, S2]).permute([0, 2, 1, 4, 3, 5]).contiguous()
# assert (
# attention_mask.size(1) == attention_mask.size(2) == 1
# ), "unsupported attention_mask shape %s" % (attention_mask.shape,)
attention_mask = attention_mask.contiguous()
# print("After attention_mask shape = %s (%s)" % (attention_mask.shape, attention_mask.numel()))
if head_mask is not None:
print(f"head_mask: {head_mask.shape}")
if encoder_attention_mask is not None:
print(f"encoder_attention_mask: {encoder_attention_mask.shape}")
# B, S1, N, S2, H = encoder_hidden_states.shape
# S = S1 * S2
# encoder_attention_mask = encoder_attention_mask.expand([B, N, S, S]).view([B, N, S1, S2, S1, S2]).permute([0, 2, 1, 4, 3, 5]).contiguous()
assert (
encoder_attention_mask.size(1) == encoder_attention_mask.size(2) == 1
), (
"unsupported encoder_attention_mask shape %s"
% (encoder_attention_mask.shape,)
)
encoder_attention_mask = encoder_attention_mask.contiguous()
inputs.append(attention_mask if attention_mask is not None else torch.Tensor())
inputs.append(head_mask if head_mask is not None else torch.Tensor())
inputs.append(
encoder_hidden_states
if encoder_hidden_states is not None
else torch.Tensor()
)
inputs.append(
encoder_attention_mask
if encoder_attention_mask is not None
else torch.Tensor()
)
inputs.append(seq_offsets if seq_offsets is not None else torch.Tensor())
inputs.append(
seq_sqr_offsets if seq_sqr_offsets is not None else torch.Tensor()
)
# context_layer, attention_probs = torch.ops.torch_ipex.forward(self.handle.handle, inputs)
p = self.attention_probs_dropout_prob if self.training else 0.0
if self.use_bf16:
inputs = [
i.to(torch.bfloat16) if i.is_floating_point() else i for i in inputs
]
outputs = BertSelfAttentionFunction.apply(
p, self.training, output_attentions, *inputs
)
# outputs = BertSelfAttentionFunction.apply(p, self.training, True, *inputs)
context_layer = outputs[0]
context_layer = BlockedTensor(
context_layer, self.blocked_input_signature, orig_hidden_states.dtype
)
if output_attentions:
print("Reshaping output_attentions")
attention_probs = outputs[1]
attention_probs = (
attention_probs.permute([0, 2, 1, 4, 3, 5])
.contiguous()
.view([B, self.num_attention_heads, S, S])
.to(orig_hidden_states.dtype)
)
outputs = (
(context_layer, attention_probs) if output_attentions else (context_layer,)
)
return outputs
class BertOutputBaseFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, p, eps, training, *inputs):
(inp, inp2, wt, bias, gamma, beta) = inputs
# print("A")
outputs = torch.ops.torch_ipex.fused_dense_dropout_layernorm_fwd_unpad(
p, eps, inputs, training
)
# print("B")
(out, dout, mean, var, dp_mask) = outputs
ctx.save_for_backward(inp, wt, gamma, mean, var, dout, dp_mask)
# print("C")
ctx.p = p
return out
@staticmethod
def backward(ctx, *grad_outs):
inputs = list(grad_outs)
inputs += ctx.saved_tensors
(
grad_inp,
grad_inp2,
grad_wt,
grad_bias,
grad_gamma,
grad_beta,
) = torch.ops.torch_ipex.fused_dense_dropout_layernorm_bwd_unpad(ctx.p, inputs)
return (
None,
None,
None,
grad_inp,
grad_inp2,
grad_wt,
grad_bias,
grad_gamma,
grad_beta,
)
class BertOutputBase(BlockedModule):
def __init__(self, config, selfOutput):
super().__init__()
ifm = config.hidden_size if selfOutput else config.intermediate_size
self.dense = DummyLinear(ifm, config.hidden_size)
# self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.LayerNorm = DummyLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.hidden_dropout_prob = config.hidden_dropout_prob
self.layer_norm_eps = config.layer_norm_eps
self.attention_head_size = config.hidden_size // config.num_attention_heads
# self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.dense.weight.set_blocking_param(
([self.attention_head_size, self.attention_head_size], [0, 2, 3, 1],)
)
self.blocked_input_signature = get_blocking_signature("SF", "SFSF")
if layer_use_bf16 == True and USE_BF16_PARAMS:
self.dense.weight.set_blocking_param(
(
[self.attention_head_size, [self.attention_head_size // 2, 2]],
[0, 2, 3, 1, 4],
torch.bfloat16,
)
)
self.dense.bias.set_blocking_param((None, None, torch.bfloat16))
self.LayerNorm.weight.set_blocking_param((None, None, torch.bfloat16))
self.LayerNorm.bias.set_blocking_param((None, None, torch.bfloat16))
self.use_bf16 = layer_use_bf16
# print(f"config.hidden_size = {config.hidden_size}, ifm = {ifm}, p = {config.hidden_dropout_prob}, eps = {config.layer_norm_eps}")
def maybe_block_params(self):
self.dense.weight.block()
self.dense.bias.block()
self.LayerNorm.weight.block()
self.LayerNorm.bias.block()
def forward(self, hidden_states, input_tensor):
self.maybe_block_params()
orig_hidden_states = hidden_states
hidden_states = self.get_blocked_tensor(
hidden_states,
self.blocked_input_signature,
[None, self.attention_head_size],
)
input_tensor = self.get_blocked_tensor(
input_tensor,
self.blocked_input_signature,
[None, self.attention_head_size],
)
inputs = [
hidden_states,
input_tensor,
self.dense.weight,
self.dense.bias,
self.LayerNorm.weight,
self.LayerNorm.bias,
]
p = self.hidden_dropout_prob if self.training else 0.0
if self.use_bf16:
inputs = [
i.to(torch.bfloat16) if i.is_floating_point() else i for i in inputs
]
ret = BertOutputBaseFunction.apply(
p, self.layer_norm_eps, self.training, *inputs
)
# ret = ret.to(hidden_states.dtype)
ret = BlockedTensor(ret, self.blocked_input_signature, orig_hidden_states.dtype)
return ret
# hidden_states = self.dense(hidden_states)
# hidden_states = self.dropout(hidden_states)
# hidden_states = self.LayerNorm(hidden_states + input_tensor)
# return hidden_states
class BertSelfOutput(BertOutputBase):
def __init__(self, config):
super(BertSelfOutput, self).__init__(config, True)
class BertOutput(BertOutputBase):
def __init__(self, config):
super(BertOutput, self).__init__(config, False)
class BertIntermediateFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, input, weight, bias, act, training):
# assert act == "gelu_new", "%s activation type is not supported" % act
gelu_in, output = torch.ops.torch_ipex.fused_dense_gelu_fwd_unpad(
input, weight, bias, training
)
ctx.save_for_backward(input, weight, gelu_in)
ctx.act = act
return output
@staticmethod
def backward(ctx, grad_out):
(input, weight, gelu_in) = ctx.saved_tensors
grad_out = grad_out.contiguous()
grad_inp, grad_wt, grad_bias = torch.ops.torch_ipex.fused_dense_gelu_bwd_unpad(
grad_out, gelu_in, input, weight
)
return (grad_inp, grad_wt, grad_bias, None, None)
class BertIntermediate(BlockedModule):
def __init__(self, config):
super().__init__()
self.dense = DummyLinear(config.hidden_size, config.intermediate_size)
self.attention_head_size = config.hidden_size // config.num_attention_heads
self.dense.weight.set_blocking_param(
([self.attention_head_size, self.attention_head_size], [0, 2, 3, 1],)
)
assert config.hidden_act in ["gelu", "gelu_new"], (
"Currently, only GELU new is supported in fused op, %s is given"
% config.hidden_act
)
self.hidden_act = config.hidden_act
self.blocked_input_signature = get_blocking_signature("SF", "SFSF")
if layer_use_bf16 == True and USE_BF16_PARAMS:
self.dense.weight.set_blocking_param(
(
[self.attention_head_size, [self.attention_head_size // 2, 2]],
[0, 2, 3, 1, 4],
torch.bfloat16,
)
)
self.dense.bias.set_blocking_param((None, None, torch.bfloat16))
self.use_bf16 = True if layer_use_bf16 else False
# if isinstance(config.hidden_act, str):
# self.intermediate_act_fn = ACT2FN[config.hidden_act]
# else:
# self.intermediate_act_fn = config.hidden_act
def maybe_block_params(self):
self.dense.weight.block()
self.dense.bias.block()
def forward(self, hidden_states):
self.maybe_block_params()
orig_hidden_states = hidden_states
hidden_states = self.get_blocked_tensor(
hidden_states,
self.blocked_input_signature,
[None, self.attention_head_size],
)
inputs = [hidden_states, self.dense.weight, self.dense.bias]
if self.use_bf16:
inputs = [
i.to(torch.bfloat16) if i.is_floating_point() else i for i in inputs
]
ret = BertIntermediateFunction.apply(*inputs, self.hidden_act, self.training)
# ret = ret.to(hidden_states.dtype)
hidden_states = BlockedTensor(
ret, self.blocked_input_signature, orig_hidden_states.dtype
)
# hidden_states = self.dense(hidden_states)
# hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
class BertEmbeddingsFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, training, prob, eps, head_size, pad_id, *inputs):
(ii, pi, ti, ie, g, b, we, pe, te) = inputs
(
out,
eout,
mean,
var,
msk,
) = torch.ops.torch_ipex.fused_embedding_layernorm_dropout_fwd_unpad(
prob, eps, head_size, pad_id, inputs, training
)
ctx.save_for_backward(ii, pi, ti, ie, g, we, pe, te, mean, var, eout, msk)
ctx.prob = prob
ctx.pad_id = pad_id
return out
@staticmethod
def backward(ctx, *grad_outs):
prob = ctx.prob
pad_id = ctx.pad_id
inputs = []
inputs += [t.contiguous() for t in grad_outs]
inputs += ctx.saved_tensors
(
die,
dg,
db,
dwe,
dpe,
dte,
) = torch.ops.torch_ipex.fused_embedding_layernorm_dropout_bwd_unpad(prob, pad_id, inputs)
grad_inps = (
None,
None,
None,
die,
dg,
db,
dwe,
dpe,
dte,
)
return (None, None, None, None, None) + grad_inps
class BertEmbeddings(BlockedModule):
"""Construct the embeddings from word, position and token_type embeddings."""
def __init__(self, config):
super().__init__()
self.word_embeddings = nn.Embedding(
config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id
)
self.position_embeddings = nn.Embedding(
config.max_position_embeddings, config.hidden_size
)
self.token_type_embeddings = nn.Embedding(
config.type_vocab_size, config.hidden_size
)
# self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
# any TensorFlow checkpoint file
self.LayerNorm = DummyLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.layer_norm_eps = config.layer_norm_eps
# self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.hidden_dropout_prob = config.hidden_dropout_prob
self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
self.pad_token_id = config.pad_token_id
# position_ids (1, len position emb) is contiguous in memory and exported when serialized
self.register_buffer(
"position_ids", torch.arange(config.max_position_embeddings).expand((1, -1))
)
self.position_embedding_type = getattr(
config, "position_embedding_type", "absolute"
)
assert (
self.position_embedding_type == "absolute"
), f"position embedding type {self.position_embedding_type} not supported"
self.blocked_ids_signature = get_blocking_signature("BS", "BSS")
self.blocked_embed_signature = get_blocking_signature("BSF", "BSFSF")
self.use_bf16 = layer_use_bf16
if not torch.distributed.is_initialized() or torch.distributed.get_rank() == 0:
print(
f"config.hidden_size = {config.hidden_size}, config.intermediate_size = {config.intermediate_size}, p = {config.hidden_dropout_prob}, eps = {config.layer_norm_eps}, bf16 = {layer_use_bf16}"
)
def forward(
self,
input_ids=None,
token_type_ids=None,
position_ids=None,
inputs_embeds=None,
past_key_values_length=0,
):
assert past_key_values_length == 0, "past_key_values_length != 0 Not supported"
if input_ids is not None:
input_shape = input_ids.size()
input_ids = self.get_blocked_tensor(
input_ids, self.blocked_ids_signature, [None, None]
)
else:
input_shape = inputs_embeds.size()[:-1]
input_ids = torch.LongTensor()
inputs_embeds = self.get_blocked_tensor(
inputs_embeds,
self.blocked_embed_signature,
[None, self.attention_head_size],
)
# seq_length = input_shape[1]
if position_ids is None:
position_ids = torch.LongTensor()
else:
position_ids = self.get_blocked_tensor(
position_ids, self.blocked_ids_signature, [None, None]
)
if token_type_ids is None:
token_type_ids = torch.LongTensor()
else:
token_type_ids = self.get_blocked_tensor(
token_type_ids, self.blocked_ids_signature, [None, None]
)
if inputs_embeds is None:
inputs_embeds = torch.Tensor()
# inputs_embeds = self.word_embeddings(input_ids)
# position_embeddings = self.position_embeddings(position_ids)
# token_type_embeddings = self.token_type_embeddings(token_type_ids)
emb_weighs = [
self.word_embeddings.weight,
self.position_embeddings.weight,
self.token_type_embeddings.weight,
]
inputs = [
input_ids,
position_ids,
token_type_ids,
inputs_embeds,
self.LayerNorm.weight,
self.LayerNorm.bias,
]
p = self.hidden_dropout_prob if self.training else 0.0
if self.use_bf16:
inputs = [
i.to(torch.bfloat16) if i.is_floating_point() else i for i in inputs
]
inputs += emb_weighs
embeddings = BertEmbeddingsFunction.apply(
self.training,
p,
self.layer_norm_eps,
self.attention_head_size,
self.pad_token_id,
*inputs,
)
# embeddings = BlockedTensor(embeddings, self.blocked_embed_signature, torch.bfloat16 if self.use_bf16 else torch.float)
embeddings = BlockedTensor(
embeddings, self.blocked_embed_signature, torch.float
)
# embeddings = inputs_embeds + position_embeddings + token_type_embeddings
# embeddings = self.LayerNorm(embeddings)
# embeddings = self.dropout(embeddings)
return embeddings
class BertAttention(nn.Module):
def __init__(self, config):
super().__init__()
self.self = BertSelfAttention(config)
self.output = BertSelfOutput(config)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
heads, index = find_pruneable_heads_and_indices(
heads,
self.self.num_attention_heads,
self.self.attention_head_size,
self.pruned_heads,
)
# Prune linear layers
self.self.query = prune_linear_layer(self.self.query, index)
self.self.key = prune_linear_layer(self.self.key, index)
self.self.value = prune_linear_layer(self.self.value, index)
self.output.dense = prune_linear_layer(self.output.dense, index, dim=1)
# Update hyper params and store pruned heads
self.self.num_attention_heads = self.self.num_attention_heads - len(heads)
self.self.all_head_size = (
self.self.attention_head_size * self.self.num_attention_heads
)
self.pruned_heads = self.pruned_heads.union(heads)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_value=None,
output_attentions=False,
seq_offsets=None,
seq_sqr_offsets=None,
):
self_outputs = self.self(
hidden_states,
attention_mask,
head_mask,
encoder_hidden_states,
encoder_attention_mask,
past_key_value,
output_attentions,
seq_offsets=seq_offsets,
seq_sqr_offsets=seq_sqr_offsets,
)
attention_output = self.output(self_outputs[0], hidden_states)
outputs = (attention_output,) + self_outputs[
1:
] # add attentions if we output them
return outputs
class BertLayer(nn.Module):
def __init__(self, config):
super().__init__()
self.chunk_size_feed_forward = config.chunk_size_feed_forward
self.seq_len_dim = 1
self.attention = BertAttention(config)
self.is_decoder = config.is_decoder
self.add_cross_attention = config.add_cross_attention
if self.add_cross_attention:
assert (
self.is_decoder
), f"{self} should be used as a decoder model if cross attention is added"
self.crossattention = BertAttention(config)
self.intermediate = BertIntermediate(config)
self.output = BertOutput(config)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_value=None,
output_attentions=False,
seq_offsets=None,
seq_sqr_offsets=None,
):
# decoder uni-directional self-attention cached key/values tuple is at positions 1,2
self_attn_past_key_value = (
past_key_value[:2] if past_key_value is not None else None
)
self_attention_outputs = self.attention(
hidden_states,
attention_mask,
head_mask,
output_attentions=output_attentions,
past_key_value=self_attn_past_key_value,
seq_offsets=seq_offsets,
seq_sqr_offsets=seq_sqr_offsets,
)
attention_output = self_attention_outputs[0]
# if decoder, the last output is tuple of self-attn cache
if self.is_decoder:
outputs = self_attention_outputs[1:-1]
present_key_value = self_attention_outputs[-1]
else:
outputs = self_attention_outputs[
1:
] # add self attentions if we output attention weights
cross_attn_present_key_value = None
if self.is_decoder and encoder_hidden_states is not None:
assert hasattr(
self, "crossattention"
), f"If `encoder_hidden_states` are passed, {self} has to be instantiated with cross-attention layers by setting `config.add_cross_attention=True`"
# cross_attn cached key/values tuple is at positions 3,4 of past_key_value tuple
cross_attn_past_key_value = (
past_key_value[-2:] if past_key_value is not None else None
)
cross_attention_outputs = self.crossattention(
attention_output,
attention_mask,
head_mask,
encoder_hidden_states,
encoder_attention_mask,
cross_attn_past_key_value,
output_attentions,
seq_offsets=seq_offsets,
seq_sqr_offsets=seq_sqr_offsets,
)
attention_output = cross_attention_outputs[0]
outputs = (
outputs + cross_attention_outputs[1:-1]
) # add cross attentions if we output attention weights
# add cross-attn cache to positions 3,4 of present_key_value tuple
cross_attn_present_key_value = cross_attention_outputs[-1]
present_key_value = present_key_value + cross_attn_present_key_value
layer_output = apply_chunking_to_forward(
self.feed_forward_chunk,
self.chunk_size_feed_forward,
self.seq_len_dim,
attention_output,
)
outputs = (layer_output,) + outputs
# if decoder, return the attn key/values as the last output
if self.is_decoder:
outputs = outputs + (present_key_value,)
return outputs
def feed_forward_chunk(self, attention_output):
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
return layer_output
class BertEncoder(nn.Module):
def __init__(self, config):
super().__init__()
self.config = config
self.layer = nn.ModuleList(
[BertLayer(config) for _ in range(config.num_hidden_layers)]
)
# self.blocked_input_signature = get_blocking_signature(
# "SF", "SFSF"
# )
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=False,
output_hidden_states=False,
return_dict=True
):
all_hidden_states = () if output_hidden_states else None
all_self_attentions = () if output_attentions else None
all_cross_attentions = (
() if output_attentions and self.config.add_cross_attention else None
)
next_decoder_cache = () if use_cache else None
if hasattr(hidden_states, "unblocked_tensor"):
hidden_states = hidden_states.unblocked_tensor()
padded_shape = hidden_states.shape
# print_grad_hook(hidden_states, 'BertEncoder:hidden_states')
msk, attention_mask, seq_offsets, seq_sqr_offsets = generate_mask(
attention_mask
)
hidden_states = UnpadInput.apply(hidden_states, msk)
for i, layer_module in enumerate(self.layer):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
layer_head_mask = head_mask[i] if head_mask is not None else None
past_key_value = past_key_values[i] if past_key_values is not None else None
if getattr(self.config, "gradient_checkpointing", False) and self.training:
if use_cache:
logger.warning(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
return module(*inputs, past_key_value, output_attentions)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(layer_module),
hidden_states,
attention_mask,
layer_head_mask,
encoder_hidden_states,
encoder_attention_mask,
seq_offsets=seq_offsets,
seq_sqr_offsets=seq_sqr_offsets,
)
else:
layer_outputs = layer_module(
hidden_states,
attention_mask,
layer_head_mask,
encoder_hidden_states,
encoder_attention_mask,
past_key_value,
output_attentions,
seq_offsets=seq_offsets,
seq_sqr_offsets=seq_sqr_offsets,
)
hidden_states = layer_outputs[0]
if use_cache:
next_decoder_cache += (layer_outputs[-1],)
if output_attentions:
all_self_attentions = all_self_attentions + (layer_outputs[1],)
if self.config.add_cross_attention:
all_cross_attentions = all_cross_attentions + (layer_outputs[2],)
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if hasattr(hidden_states, "unblocked_tensor"):
hidden_states = hidden_states.unblocked_tensor()
hidden_states = PadInput.apply(hidden_states, msk, padded_shape)
# print_grad_hook(hidden_states, 'BertEncoder:hidden_states')
if not return_dict:
return tuple(
v
for v in [
hidden_states,
next_decoder_cache,
all_hidden_states,
all_self_attentions,
all_cross_attentions,
]
if v is not None
)
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=next_decoder_cache,
hidden_states=all_hidden_states,
attentions=all_self_attentions,
cross_attentions=all_cross_attentions,
)
class BertPooler(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.activation = nn.Tanh()
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
first_token_tensor = hidden_states[:, 0]
pooled_output = self.dense(first_token_tensor)
pooled_output = self.activation(pooled_output)
return pooled_output
class BertPredictionHeadTransform(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
if isinstance(config.hidden_act, str):
self.transform_act_fn = ACT2FN[config.hidden_act]
else:
self.transform_act_fn = config.hidden_act
self.LayerNorm = DummyLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
hidden_states = self.LayerNorm(hidden_states)
return hidden_states
class BertLMPredictionHead(nn.Module):
def __init__(self, config):
super().__init__()
self.transform = BertPredictionHeadTransform(config)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
self.bias = nn.Parameter(torch.zeros(config.vocab_size))
# Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings`
self.decoder.bias = self.bias
def forward(self, hidden_states):
hidden_states = self.transform(hidden_states)
hidden_states = self.decoder(hidden_states)
return hidden_states
# bm_default_blocking_factors = BlockedModule.default_blocking_factors
# @staticmethod
# def custom_blocking_factors(S):
# print(f"S = {S}")
# if S % 32 == 0: return [S//32, 32]
# return bm_default_blocking_factors
# BlockedModule.default_blocking_factors = custom_blocking_factors
try:
import transformers
transformers_orig_is_tensor = transformers.file_utils.is_tensor
def is_tensor(x):
"""Tests if ``x`` is a :obj:`torch.Tensor`, :obj:`tf.Tensor` or :obj:`np.ndarray`."""
if transformers_orig_is_tensor(x):
return True
if isinstance(x, BlockedTensor):
return True
return False
transformers.file_utils.is_tensor = is_tensor
except:
pass
def block(model):
for m in model.modules():
if hasattr(m, "maybe_block_params"):
m.maybe_block_params()
|
PypiClean
|
/xnni-0.7.4-py3-none-manylinux1_x86_64.whl/xnni-0.7.4.data/data/nni/core/nniDataStore.js
|
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const assert = require("assert");
const ts_deferred_1 = require("ts-deferred");
const component = require("../common/component");
const datastore_1 = require("../common/datastore");
const errors_1 = require("../common/errors");
const experimentStartupInfo_1 = require("../common/experimentStartupInfo");
const log_1 = require("../common/log");
const utils_1 = require("../common/utils");
class NNIDataStore {
constructor() {
this.db = component.get(datastore_1.Database);
this.log = log_1.getLogger();
}
init() {
if (this.initTask !== undefined) {
return this.initTask.promise;
}
this.initTask = new ts_deferred_1.Deferred();
const databaseDir = utils_1.getDefaultDatabaseDir();
if (experimentStartupInfo_1.isNewExperiment()) {
utils_1.mkDirP(databaseDir).then(() => {
this.db.init(true, databaseDir).then(() => {
this.log.info('Datastore initialization done');
this.initTask.resolve();
}).catch((err) => {
this.initTask.reject(err);
});
}).catch((err) => {
this.initTask.reject(err);
});
}
else {
this.db.init(false, databaseDir).then(() => {
this.log.info('Datastore initialization done');
this.initTask.resolve();
}).catch((err) => {
this.initTask.reject(err);
});
}
return this.initTask.promise;
}
async close() {
await this.db.close();
}
async storeExperimentProfile(experimentProfile) {
try {
await this.db.storeExperimentProfile(experimentProfile);
}
catch (err) {
throw new errors_1.NNIError('Datastore error', `Datastore error: ${err.message}`, err);
}
}
getExperimentProfile(experimentId) {
return this.db.queryLatestExperimentProfile(experimentId);
}
storeTrialJobEvent(event, trialJobId, hyperParameter, jobDetail) {
this.log.debug(`storeTrialJobEvent: event: ${event}, data: ${hyperParameter}, jobDetail: ${JSON.stringify(jobDetail)}`);
let timestamp;
if (event === 'WAITING' && jobDetail) {
timestamp = jobDetail.submitTime;
}
else if (event === 'RUNNING' && jobDetail) {
timestamp = jobDetail.startTime;
}
else if (['EARLY_STOPPED', 'SUCCEEDED', 'FAILED', 'USER_CANCELED', 'SYS_CANCELED'].includes(event) && jobDetail) {
timestamp = jobDetail.endTime;
}
if (timestamp === undefined) {
timestamp = Date.now();
}
return this.db.storeTrialJobEvent(event, trialJobId, timestamp, hyperParameter, jobDetail).catch((err) => {
throw new errors_1.NNIError('Datastore error', `Datastore error: ${err.message}`, err);
});
}
async getTrialJobStatistics() {
const result = [];
const jobs = await this.listTrialJobs();
const map = new Map();
jobs.forEach((value) => {
let n = map.get(value.status);
if (!n) {
n = 0;
}
map.set(value.status, n + 1);
});
map.forEach((value, key) => {
const statistics = {
trialJobStatus: key,
trialJobNumber: value
};
result.push(statistics);
});
return result;
}
listTrialJobs(status) {
return this.queryTrialJobs(status);
}
async getTrialJob(trialJobId) {
const trialJobs = await this.queryTrialJobs(undefined, trialJobId);
return trialJobs[0];
}
async storeMetricData(trialJobId, data) {
const metrics = JSON.parse(data);
if (metrics.type === 'REQUEST_PARAMETER') {
return;
}
assert(trialJobId === metrics.trial_job_id);
try {
await this.db.storeMetricData(trialJobId, JSON.stringify({
trialJobId: metrics.trial_job_id,
parameterId: metrics.parameter_id,
type: metrics.type,
sequence: metrics.sequence,
data: metrics.value,
timestamp: Date.now()
}));
}
catch (err) {
throw new errors_1.NNIError('Datastore error', `Datastore error: ${err.message}`, err);
}
}
getMetricData(trialJobId, metricType) {
return this.db.queryMetricData(trialJobId, metricType);
}
async queryTrialJobs(status, trialJobId) {
const result = [];
const trialJobEvents = await this.db.queryTrialJobEvent(trialJobId);
if (trialJobEvents === undefined) {
return result;
}
const map = this.getTrialJobsByReplayEvents(trialJobEvents);
const finalMetricsMap = await this.getFinalMetricData(trialJobId);
for (const key of map.keys()) {
const jobInfo = map.get(key);
if (jobInfo === undefined) {
continue;
}
if (!(status !== undefined && jobInfo.status !== status)) {
if (jobInfo.status === 'SUCCEEDED') {
jobInfo.finalMetricData = finalMetricsMap.get(jobInfo.id);
}
result.push(jobInfo);
}
}
return result;
}
async getFinalMetricData(trialJobId) {
const map = new Map();
const metrics = await this.getMetricData(trialJobId, 'FINAL');
const multiPhase = await this.isMultiPhase();
for (const metric of metrics) {
const existMetrics = map.get(metric.trialJobId);
if (existMetrics !== undefined) {
if (!multiPhase) {
this.log.error(`Found multiple FINAL results for trial job ${trialJobId}, metrics: ${JSON.stringify(metrics)}`);
}
else {
existMetrics.push(metric);
}
}
else {
map.set(metric.trialJobId, [metric]);
}
}
return map;
}
async isMultiPhase() {
if (this.multiPhase === undefined) {
const expProfile = await this.getExperimentProfile(experimentStartupInfo_1.getExperimentId());
if (expProfile !== undefined) {
this.multiPhase = expProfile.params.multiPhase;
}
else {
return false;
}
}
if (this.multiPhase !== undefined) {
return this.multiPhase;
}
else {
return false;
}
}
getJobStatusByLatestEvent(oldStatus, event) {
switch (event) {
case 'USER_TO_CANCEL':
return 'USER_CANCELED';
case 'ADD_CUSTOMIZED':
return 'WAITING';
case 'ADD_HYPERPARAMETER':
return oldStatus;
default:
}
return event;
}
parseHyperParameter(hParamStr) {
let hParam;
try {
hParam = JSON.parse(hParamStr);
return hParam;
}
catch (err) {
this.log.error(`Hyper parameter needs to be in json format: ${hParamStr}`);
return undefined;
}
}
getTrialJobsByReplayEvents(trialJobEvents) {
this.log.debug('getTrialJobsByReplayEvents begin');
const map = new Map();
const hParamIdMap = new Map();
for (const record of trialJobEvents) {
let jobInfo;
if (record.trialJobId === undefined || record.trialJobId.length < 1) {
continue;
}
if (map.has(record.trialJobId)) {
jobInfo = map.get(record.trialJobId);
}
else {
jobInfo = {
id: record.trialJobId,
status: this.getJobStatusByLatestEvent('UNKNOWN', record.event),
hyperParameters: []
};
}
if (!jobInfo) {
throw new Error('Empty JobInfo');
}
switch (record.event) {
case 'RUNNING':
if (record.timestamp !== undefined) {
jobInfo.startTime = record.timestamp;
}
case 'WAITING':
if (record.logPath !== undefined) {
jobInfo.logPath = record.logPath;
}
if (jobInfo.startTime === undefined && record.timestamp !== undefined) {
jobInfo.startTime = record.timestamp;
}
break;
case 'SUCCEEDED':
case 'FAILED':
case 'USER_CANCELED':
case 'SYS_CANCELED':
case 'EARLY_STOPPED':
if (record.logPath !== undefined) {
jobInfo.logPath = record.logPath;
}
jobInfo.endTime = record.timestamp;
if (jobInfo.startTime === undefined && record.timestamp !== undefined) {
jobInfo.startTime = record.timestamp;
}
default:
}
jobInfo.status = this.getJobStatusByLatestEvent(jobInfo.status, record.event);
if (record.data !== undefined && record.data.trim().length > 0) {
const newHParam = this.parseHyperParameter(record.data);
if (newHParam !== undefined) {
if (jobInfo.hyperParameters !== undefined) {
let hParamIds = hParamIdMap.get(jobInfo.id);
if (hParamIds === undefined) {
hParamIds = new Set();
}
if (!hParamIds.has(newHParam.parameter_index)) {
jobInfo.hyperParameters.push(JSON.stringify(newHParam));
hParamIds.add(newHParam.parameter_index);
hParamIdMap.set(jobInfo.id, hParamIds);
}
}
else {
assert(false, 'jobInfo.hyperParameters is undefined');
}
}
}
if (record.sequenceId !== undefined && jobInfo.sequenceId === undefined) {
jobInfo.sequenceId = record.sequenceId;
}
map.set(record.trialJobId, jobInfo);
}
this.log.debug('getTrialJobsByReplayEvents done');
return map;
}
}
exports.NNIDataStore = NNIDataStore;
|
PypiClean
|
/Orange3_Timeseries-0.6.0-py3-none-any.whl/orangecontrib/timeseries/widgets/owvarmodel.py
|
from AnyQt.QtCore import Qt
from orangewidget.utils.widgetpreview import WidgetPreview
from Orange.widgets import gui, settings
from orangecontrib.timeseries import Timeseries, VAR
from orangecontrib.timeseries.widgets._owmodel import OWBaseModel
class OWVARModel(OWBaseModel):
name = 'VAR Model'
description = 'Model the time series using vector auto-regression (VAR).'
icon = 'icons/VAR.svg'
priority = 220
maxlags = settings.Setting(1)
ic = settings.Setting(0)
trend = settings.Setting(0)
IC_LABELS = dict((('None', None),
("Akaike's information criterion (AIC)", 'aic'),
('Bayesian information criterion (BIC)', 'bic'),
('Hannan–Quinn', 'hqic'),
("Final prediction error (FPE)", 'fpe'),
('Average of the above', 'magic')))
TREND_LABELS = dict((('None', 'n'),
('Constant', 'c'),
('Constant and linear', 'ct'),
('Constant, linear and quadratic', 'ctt')))
def add_main_layout(self):
box = gui.vBox(self.controlArea, box='Parameters')
gui.spin(
box, self, 'maxlags', 1, 100,
label='Maximum auto-regression order:', alignment=Qt.AlignRight,
callback=self.apply.deferred)
gui.separator(self.controlArea, 12)
box = gui.vBox(self.controlArea, box=True)
gui.radioButtons(
box, self, 'ic',
btnLabels=tuple(self.IC_LABELS),
label='Optimize AR order by:',
callback=self.apply.deferred)
gui.separator(self.controlArea, 12)
gui.radioButtons(
box, self, 'trend',
btnLabels=tuple(self.TREND_LABELS),
label='Add trend vector(s):',
callback=self.apply.deferred)
def create_learner(self):
ic = self.IC_LABELS[tuple(self.IC_LABELS.keys())[self.ic]]
trend = self.TREND_LABELS[tuple(self.TREND_LABELS.keys())[self.trend]]
return VAR(self.maxlags, ic, trend)
if __name__ == "__main__":
data = Timeseries.from_file('airpassengers')
WidgetPreview(OWVARModel).run(set_data=data)
|
PypiClean
|
/pybanyan-openapi-client-1.0.0.tar.gz/pybanyan-openapi-client-1.0.0/openapi_client/paths/v2_bundle/get.py
|
from dataclasses import dataclass
import typing_extensions
import urllib3
from urllib3._collections import HTTPHeaderDict
from openapi_client import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import typing_extensions # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from openapi_client import schemas # noqa: F401
from . import path
# query params
BundleNameSchema = schemas.StrSchema
CreatedBySchema = schemas.StrSchema
RequestRequiredQueryParams = typing_extensions.TypedDict(
'RequestRequiredQueryParams',
{
}
)
RequestOptionalQueryParams = typing_extensions.TypedDict(
'RequestOptionalQueryParams',
{
'bundle_name': typing.Union[BundleNameSchema, str, ],
'created_by': typing.Union[CreatedBySchema, str, ],
},
total=False
)
class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams):
pass
request_query_bundle_name = api_client.QueryParameter(
name="bundle_name",
style=api_client.ParameterStyle.FORM,
schema=BundleNameSchema,
explode=True,
)
request_query_created_by = api_client.QueryParameter(
name="created_by",
style=api_client.ParameterStyle.FORM,
schema=CreatedBySchema,
explode=True,
)
_auth = [
'bearerAuthToken',
]
class SchemaFor200ResponseBodyApplicationJson(
schemas.DictSchema
):
class MetaOapg:
class properties:
request_id = schemas.StrSchema
error_code = schemas.IntSchema
error_description = schemas.StrSchema
class data(
schemas.DictSchema
):
class MetaOapg:
class properties:
id = schemas.StrSchema
bundle_name = schemas.StrSchema
description = schemas.StrSchema
is_admin_created = schemas.IntSchema
bulk_connect = schemas.IntSchema
class service_ids(
schemas.ListSchema
):
class MetaOapg:
items = schemas.StrSchema
def __new__(
cls,
arg: typing.Union[typing.Tuple[typing.Union[MetaOapg.items, str, ]], typing.List[typing.Union[MetaOapg.items, str, ]]],
_configuration: typing.Optional[schemas.Configuration] = None,
) -> 'service_ids':
return super().__new__(
cls,
arg,
_configuration=_configuration,
)
def __getitem__(self, i: int) -> MetaOapg.items:
return super().__getitem__(i)
created_at = schemas.IntSchema
updated_at = schemas.IntSchema
created_by_user_id = schemas.StrSchema
__annotations__ = {
"id": id,
"bundle_name": bundle_name,
"description": description,
"is_admin_created": is_admin_created,
"bulk_connect": bulk_connect,
"service_ids": service_ids,
"created_at": created_at,
"updated_at": updated_at,
"created_by_user_id": created_by_user_id,
}
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["id"]) -> MetaOapg.properties.id: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["bundle_name"]) -> MetaOapg.properties.bundle_name: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["description"]) -> MetaOapg.properties.description: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["is_admin_created"]) -> MetaOapg.properties.is_admin_created: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["bulk_connect"]) -> MetaOapg.properties.bulk_connect: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["service_ids"]) -> MetaOapg.properties.service_ids: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["created_at"]) -> MetaOapg.properties.created_at: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["updated_at"]) -> MetaOapg.properties.updated_at: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["created_by_user_id"]) -> MetaOapg.properties.created_by_user_id: ...
@typing.overload
def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
def __getitem__(self, name: typing.Union[typing_extensions.Literal["id", "bundle_name", "description", "is_admin_created", "bulk_connect", "service_ids", "created_at", "updated_at", "created_by_user_id", ], str]):
# dict_instance[name] accessor
return super().__getitem__(name)
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["id"]) -> typing.Union[MetaOapg.properties.id, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["bundle_name"]) -> typing.Union[MetaOapg.properties.bundle_name, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["description"]) -> typing.Union[MetaOapg.properties.description, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["is_admin_created"]) -> typing.Union[MetaOapg.properties.is_admin_created, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["bulk_connect"]) -> typing.Union[MetaOapg.properties.bulk_connect, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["service_ids"]) -> typing.Union[MetaOapg.properties.service_ids, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["created_at"]) -> typing.Union[MetaOapg.properties.created_at, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["updated_at"]) -> typing.Union[MetaOapg.properties.updated_at, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["created_by_user_id"]) -> typing.Union[MetaOapg.properties.created_by_user_id, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["id", "bundle_name", "description", "is_admin_created", "bulk_connect", "service_ids", "created_at", "updated_at", "created_by_user_id", ], str]):
return super().get_item_oapg(name)
def __new__(
cls,
*args: typing.Union[dict, frozendict.frozendict, ],
id: typing.Union[MetaOapg.properties.id, str, schemas.Unset] = schemas.unset,
bundle_name: typing.Union[MetaOapg.properties.bundle_name, str, schemas.Unset] = schemas.unset,
description: typing.Union[MetaOapg.properties.description, str, schemas.Unset] = schemas.unset,
is_admin_created: typing.Union[MetaOapg.properties.is_admin_created, decimal.Decimal, int, schemas.Unset] = schemas.unset,
bulk_connect: typing.Union[MetaOapg.properties.bulk_connect, decimal.Decimal, int, schemas.Unset] = schemas.unset,
service_ids: typing.Union[MetaOapg.properties.service_ids, list, tuple, schemas.Unset] = schemas.unset,
created_at: typing.Union[MetaOapg.properties.created_at, decimal.Decimal, int, schemas.Unset] = schemas.unset,
updated_at: typing.Union[MetaOapg.properties.updated_at, decimal.Decimal, int, schemas.Unset] = schemas.unset,
created_by_user_id: typing.Union[MetaOapg.properties.created_by_user_id, str, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
) -> 'data':
return super().__new__(
cls,
*args,
id=id,
bundle_name=bundle_name,
description=description,
is_admin_created=is_admin_created,
bulk_connect=bulk_connect,
service_ids=service_ids,
created_at=created_at,
updated_at=updated_at,
created_by_user_id=created_by_user_id,
_configuration=_configuration,
**kwargs,
)
__annotations__ = {
"request_id": request_id,
"error_code": error_code,
"error_description": error_description,
"data": data,
}
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["request_id"]) -> MetaOapg.properties.request_id: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["error_code"]) -> MetaOapg.properties.error_code: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["error_description"]) -> MetaOapg.properties.error_description: ...
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["data"]) -> MetaOapg.properties.data: ...
@typing.overload
def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
def __getitem__(self, name: typing.Union[typing_extensions.Literal["request_id", "error_code", "error_description", "data", ], str]):
# dict_instance[name] accessor
return super().__getitem__(name)
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["request_id"]) -> typing.Union[MetaOapg.properties.request_id, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["error_code"]) -> typing.Union[MetaOapg.properties.error_code, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["error_description"]) -> typing.Union[MetaOapg.properties.error_description, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["data"]) -> typing.Union[MetaOapg.properties.data, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["request_id", "error_code", "error_description", "data", ], str]):
return super().get_item_oapg(name)
def __new__(
cls,
*args: typing.Union[dict, frozendict.frozendict, ],
request_id: typing.Union[MetaOapg.properties.request_id, str, schemas.Unset] = schemas.unset,
error_code: typing.Union[MetaOapg.properties.error_code, decimal.Decimal, int, schemas.Unset] = schemas.unset,
error_description: typing.Union[MetaOapg.properties.error_description, str, schemas.Unset] = schemas.unset,
data: typing.Union[MetaOapg.properties.data, dict, frozendict.frozendict, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
) -> 'SchemaFor200ResponseBodyApplicationJson':
return super().__new__(
cls,
*args,
request_id=request_id,
error_code=error_code,
error_description=error_description,
data=data,
_configuration=_configuration,
**kwargs,
)
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor200ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
content={
'application/json': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationJson),
},
)
@dataclass
class ApiResponseFor401(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_401 = api_client.OpenApiResponse(
response_cls=ApiResponseFor401,
)
@dataclass
class ApiResponseFor403(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_403 = api_client.OpenApiResponse(
response_cls=ApiResponseFor403,
)
@dataclass
class ApiResponseFor500(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_500 = api_client.OpenApiResponse(
response_cls=ApiResponseFor500,
)
_status_code_to_response = {
'200': _response_for_200,
'401': _response_for_401,
'403': _response_for_403,
'500': _response_for_500,
}
_all_accept_content_types = (
'application/json',
)
class BaseApi(api_client.Api):
def _v2_bundle_get_oapg(
self: api_client.Api,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
"""
GET service bundles
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
self._verify_typed_dict_inputs_oapg(RequestQueryParams, query_params)
used_path = path.value
prefix_separator_iterator = None
for parameter in (
request_query_bundle_name,
request_query_created_by,
):
parameter_data = query_params.get(parameter.name, schemas.unset)
if parameter_data is schemas.unset:
continue
if prefix_separator_iterator is None:
prefix_separator_iterator = parameter.get_prefix_separator_iterator()
serialized_data = parameter.serialize(parameter_data, prefix_separator_iterator)
for serialized_value in serialized_data.values():
used_path += serialized_value
_headers = HTTPHeaderDict()
# TODO add cookie handling
if accept_content_types:
for accept_content_type in accept_content_types:
_headers.add('Accept', accept_content_type)
response = self.api_client.call_api(
resource_path=used_path,
method='get'.upper(),
headers=_headers,
auth_settings=_auth,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
class V2BundleGet(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
def v2_bundle_get(
self: BaseApi,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._v2_bundle_get_oapg(
query_params=query_params,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiForget(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
def get(
self: BaseApi,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._v2_bundle_get_oapg(
query_params=query_params,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
|
PypiClean
|
/client_chat_pyqt_June-0.2.tar.gz/client_chat_pyqt_June-0.2/client/common/metaclasses.py
|
import dis
class ServerMaker(type):
'''
Метакласс, проверяющий что в результирующем классе нет клиентских
вызовов таких как: connect. Также проверяется, что серверный
сокет является TCP и работает по IPv4 протоколу.
'''
def __init__(cls, clsname, bases, clsdict):
# Список методов, которые используются в функциях класса:
methods = []
# Атрибуты, вызываемые функциями классов
attrs = []
for func in clsdict:
# Пробуем
try:
ret = dis.get_instructions(clsdict[func])
# Если не функция то ловим исключение
except TypeError:
pass
else:
# Раз функция разбираем код, получая используемые методы и
# атрибуты.
for i in ret:
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
methods.append(i.argval)
elif i.opname == 'LOAD_ATTR':
if i.argval not in attrs:
attrs.append(i.argval)
# Если обнаружено использование недопустимого метода connect,
# генерируем исключение:
if 'connect' in methods:
raise TypeError(
'Использование метода connect недопустимо в серверном классе')
# Если сокет не инициализировался константами SOCK_STREAM(TCP)
# AF_INET(IPv4), тоже исключение.
if not ('SOCK_STREAM' in attrs and 'AF_INET' in attrs):
raise TypeError('Некорректная инициализация сокета.')
super().__init__(clsname, bases, clsdict)
class ClientMaker(type):
'''
Метакласс, проверяющий что в результирующем классе нет серверных
вызовов таких как: accept, listen. Также проверяется, что сокет не
создаётся внутри конструктора класса.
'''
def __init__(cls, clsname, bases, clsdict):
# Список методов, которые используются в функциях класса:
methods = []
for func in clsdict:
# Пробуем
try:
ret = dis.get_instructions(clsdict[func])
# Если не функция то ловим исключение
except TypeError:
pass
else:
# Раз функция разбираем код, получая используемые методы.
for i in ret:
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
methods.append(i.argval)
# Если обнаружено использование недопустимого метода accept, listen,
# socket бросаем исключение:
for command in ('accept', 'listen', 'socket'):
if command in methods:
raise TypeError(
'В классе обнаружено использование запрещённого метода')
# Вызов get_message или send_message из utils считаем корректным
# использованием сокетов
if 'get_message' in methods or 'send_message' in methods:
pass
else:
raise TypeError(
'Отсутствуют вызовы функций, работающих с сокетами.')
super().__init__(clsname, bases, clsdict)
|
PypiClean
|
/stag-ssg-0.10.1.tar.gz/stag-ssg-0.10.1/src/stag/config.py
|
import os
import copy
from functools import cache
from typing import Optional, List, Any
import attr
import tomli
from stag.utils import attr_access
class ConfigError(Exception):
pass
@attr.s(auto_attribs=True)
class TemplateTable:
name: str = os.path.join("themes", "default")
templates: dict = attr.ib(factory=dict, converter=attr_access)
def __attrs_post_init__(self):
self.templates.setdefault("page", "page")
self.templates.setdefault("list", "list")
self.templates.setdefault("taxonomy", "taxonomy")
@attr.s(auto_attribs=True)
class TaxonomyTable:
key: str
singular: Optional[str] = None
plural: Optional[str] = None
possible_terms: Optional[List[str]] = None
def __attrs_post_init__(self):
if self.singular is None:
self.singular = self.key
if self.plural is None:
self.plural = self.key
def init_table(cls):
def _init(val):
if isinstance(val, dict):
return cls(**val)
if isinstance(val, (tuple, list)):
return type(val)(cls(**elem) for elem in val)
if isinstance(val, cls):
return val
return cls(val)
return _init
@attr.s(auto_attribs=True)
class Config:
title: str = "MySite"
url: str = "https://example.com"
language: str = "en"
timezone: str = "+0000"
plugins_path: str = "plugins"
plugins_disabled: list[str] = attr.ib(factory=list)
content: str = "content"
output: str = "_output"
cache: str = ".cache"
no_cache: bool = False
taxonomies: list[TaxonomyTable] = attr.ib(
factory=list, converter=init_table(TaxonomyTable)
)
template: TemplateTable = attr.ib(
factory=TemplateTable, converter=init_table(TemplateTable)
)
user: Any = attr.ib(factory=dict, converter=attr_access)
plugins: Any = attr.ib(factory=dict, converter=attr_access)
def get(self, name, default=None):
return getattr(self, name, default)
def update_plugin_table(self, name, table):
tc = copy.deepcopy(table)
if name in self.plugins:
tc.__dict__.update(self.plugins[name])
self.plugins[name] = tc.__dict__
else:
self.plugins[name] = tc.__dict__
@cache
def read_config(path):
try:
with open(path) as f:
config_dct = tomli.loads(f.read())
except FileNotFoundError:
config_dct = {}
return Config(**config_dct)
|
PypiClean
|
/huobi_sdk_hash_7-2.3.7-py3-none-any.whl/huobi/utils/print_mix_object.py
|
import sys
import time
BASIC_DATA_TYPE = (int, str, float)
BASIC_DATA_TYPE_BOOL = (bool)
TYPE_BASIC = "type_basic"
TYPE_BOOL = "type_bool"
TYPE_OBJECT = "type_object"
TYPE_LIST = "type_list"
TYPE_DICT = "type_dict"
TYPE_UNDEFINED = "type_undefined"
class TypeCheck:
@staticmethod
def is_list(obj):
return type(obj) == list and isinstance(obj, list)
@staticmethod
def is_dict(obj):
return type(obj) == dict and isinstance(obj, dict)
@staticmethod
def is_object(obj):
return isinstance(obj, object)
@staticmethod
def is_basic(obj):
return isinstance(obj, BASIC_DATA_TYPE)
@staticmethod
def is_bool(obj):
return isinstance(obj, bool)
@staticmethod
def get_obj_type(obj):
if TypeCheck.is_basic(obj):
return TYPE_BASIC
elif TypeCheck.is_bool(obj):
return TYPE_BOOL
elif TypeCheck.is_list(obj):
return TYPE_LIST
elif TypeCheck.is_dict(obj):
return TYPE_DICT
elif TypeCheck.is_object(obj):
return TYPE_OBJECT
else:
return TYPE_UNDEFINED
class PrintBasic:
@staticmethod
def print_basic(data, name=None):
if name and len(name):
print(str(name) + " : " + str(data))
else:
print(str(data))
@staticmethod
def print_basic_bool(data, name=None):
bool_desc = "True"
if not data:
bool_desc = "False"
if name and len(name):
print(str(name) + " : " + str(bool_desc))
else:
print(str(bool_desc))
@staticmethod
def print_obj(obj):
if not obj:
return -1
members = [attr for attr in dir(obj) if not callable(attr) and not attr.startswith("__")]
for member_def in members:
val_str = str(getattr(obj, member_def))
print(member_def + ":" + val_str)
return 0
class PrintList:
@staticmethod
def print_list_data(obj):
if not obj:
print("object is None")
return -1
if TypeCheck.get_obj_type(obj) == TYPE_LIST:
for idx, row in enumerate(obj):
PrintBasic.print_basic(row)
else:
return -2
return 0
@staticmethod
def print_origin_object(obj):
if not obj:
print("object is None")
return -1
obj_type = TypeCheck.get_obj_type(obj)
if obj_type == TYPE_BASIC:
PrintBasic.print_basic(obj)
elif obj_type == TYPE_BOOL:
PrintBasic.print_basic_bool(obj)
elif obj_type == TYPE_OBJECT:
PrintBasic.print_obj(obj)
else:
return 1
return 0
@staticmethod
def print_object_list(obj_list):
if not obj_list:
return -1
obj_type = TypeCheck.get_obj_type(obj_list)
if obj_type != TYPE_LIST:
return -2
print ("data count : ", (len(obj_list)))
print ("\n")
for idx, row in enumerate(obj_list):
print("data number " + (str(idx)) + " :")
PrintList.print_origin_object(row)
print("\n")
print("\n\n")
return 0
@staticmethod
def print_object_dict(obj_dict):
if not obj_dict:
return -1
obj_type = TypeCheck.get_obj_type(obj_dict)
if obj_type != TYPE_DICT:
return -2
print ("data count : ", (len(obj_dict)))
print ("\n")
for key, row in obj_dict.items():
PrintBasic.print_basic(str(key) + " :")
PrintList.print_origin_object(row)
print("\n")
print("\n\n")
return 0
class PrintMix:
@staticmethod
def print_data(data):
if not data:
print (sys._getframe().f_code.co_name + " none data")
return -1
obj_type = TypeCheck.get_obj_type(data)
if obj_type == TYPE_BASIC:
PrintBasic.print_basic(data)
elif obj_type == TYPE_BOOL:
PrintBasic.print_basic_bool(data)
elif obj_type == TYPE_LIST:
PrintList.print_object_list(data)
elif obj_type == TYPE_DICT:
PrintList.print_object_dict(data)
elif obj_type == TYPE_OBJECT:
PrintList.print_origin_object(data)
else:
print (sys._getframe().f_code.co_name + " enter unknown")
return -2
return 0
class PrintDate:
@staticmethod
def timestamp_to_date(ts_minsecond):
try:
ts_minsecond = int(ts_minsecond)
time_local = time.localtime(int(ts_minsecond / 1000))
dt = time.strftime("%Y-%m-%d %H:%M:%S", time_local)
print("ping " + str(ts_minsecond) + ":" + dt)
except Exception as e:
print(e)
"""
if __name__ == "__main__":
ping_ts = 1569319465421
PrintDate.timestamp_to_date(ping_ts)
PrintDate.timestamp_to_date(int(ping_ts), ("ping " + str(ping_ts)))
"""
if __name__ == "__main__":
"""
from huobi.model.symbol import Symbol
symbol_1 = Symbol()
symbol_1.amount_precision = 10009
symbol_1.symbol = "btcusdt"
symbol_2 = Symbol()
symbol_2.amount_precision = 28
symbol_2.symbol = "htusdt"
symbol_3 = Symbol()
symbol_3.amount_precision = 26
symbol_3.symbol = "eosusdt"
symbol_list = [symbol_1, symbol_2, symbol_3]
symbol_dict = {"one": symbol_1, "two": symbol_2, "three": symbol_3}
PrintMix.print_data(symbol_list)
PrintMix.print_data(symbol_dict)
print(type(symbol_list) == list)
print(type(symbol_dict) == dict)
print(type(symbol_list) == object)
print(isinstance(symbol_list, list))
print(isinstance(symbol_list, object))
print(isinstance(symbol_dict, dict))
print(isinstance(symbol_dict, object))
"""
a=['s', 'h', 'i']
PrintList.print_list_data(a)
|
PypiClean
|
/alipay-python-3.3.17.tar.gz/alipay-python-3.3.17/alipay/aop/api/request/AlipayMobilePublicGisGetRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
class AlipayMobilePublicGisGetRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
self._biz_content = value
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.mobile.public.gis.get'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/matts-majestic-monads-0.0.5.tar.gz/matts-majestic-monads-0.0.5/monads/maybe.py
|
from .base import Monad
from .operators import MonadOperator
class Maybe(Monad, MonadOperator): # pylint: disable=abstract-method
@classmethod
def lift(cls, value):
return Just(value)
@classmethod
def convert(cls, function):
def wrapper(*args, **kwargs):
try:
return Just(function(*args, **kwargs))
except Exception: # pylint: disable=broad-except
return Nothing
return wrapper
class _Nothing(Maybe):
""" This represents no value """
def apply(self, applicative):
""" apply :: m a -> m (a -> b) -> m b
Create a new m b, from an m a using the results of calling a lifted function on every value in the m a. """
return self
def bind(self, fun):
""" bind :: m a -> (a -> m b) -> m b
Create a new m b, from an m a using the results of calling a function on every value in the m a. """
return self
def __repr__(self):
return 'Nothing'
def __eq__(self, other):
return isinstance(other, self.__class__)
def __hash__(self):
return hash(0)
Nothing = _Nothing() # pylint: disable=invalid-name
class Just(Maybe):
""" This represents a value """
def __init__(self, value):
self.value = value
def apply(self, applicative):
""" apply :: m a -> m (a -> b) -> m b
Create a new m b, from an m a using the results of calling a lifted function on every value in the m a. """
try:
return self.bind(lambda v: applicative.fmap(lambda f: f(v)))
except: # pylint: disable=bare-except
return Nothing
def bind(self, fun):
""" bind :: m a -> (a -> m b) -> m b
Create a new m b, from an m a using the results of calling a function on every value in the m a. """
try:
return fun(self.value)
except: # pylint: disable=bare-except
return Nothing
def __repr__(self):
return f'Just({self.value})'
def __eq__(self, other):
return isinstance(other, self.__class__) and self.value == other.value
def __hash__(self):
return hash(self.value)
|
PypiClean
|
/HtmlNode-0.1.8.tar.gz/HtmlNode-0.1.8/html_node/templates.py
|
from .tags import Node, Tag, html_node as n
__all__ = ['Placeholder', 'placeholder', 'BaseTemplate', 'BaseLayout']
class Placeholder(Node):
"""Stub to fill in other template."""
def __init__(self, name):
self.name = name
def fill(tag, placeholders):
"""Recursively fill in the placeholders of descendents of any Tag in place."""
if isinstance(tag, Tag):
for index in range(0, len(tag.children)):
if isinstance(tag.children[index], Placeholder):
tag.children[index] = fill(placeholders[tag.children[index].name], placeholders=placeholders)
else:
tag.children[index] = fill(tag.children[index], placeholders=placeholders)
return tag
def placeholder(arg):
"""Decorate a class method to create a placeholder with the same name as the method name.
... 2013-07-07 - add ability to be use as Placeholder anchor in the template also
"""
if isinstance(arg, basestring):
# put an anchor
return Placeholder(arg)
elif hasattr(arg, '__call__'):
# register the function to relate to the anchor
arg.is_placeholder = True
return arg
class BaseTemplate(Node):
default_context = {}
def __init__(self, context={}, **kwargs):
"""The most basic template, which uses placeholder for rendering.
:param context: A dictionary of Context to bring into the template
:param kwargs: Key word argument which overrides the value in the context
"""
self.__placeholders = {}
self.context = {}
self.context.update(self.default_context)
self.context.update(context)
self.context.update(kwargs)
# Make shorthand available
self.c = self.context.get
self.__extract_placeholders()
def __extract_placeholders(self):
"""Get placeholders from its declared methods which decorated as `placeholder`."""
for key in dir(self):
method = getattr(self, key)
if getattr(method, 'is_placeholder', False):
self.__placeholders[method.__name__] = method()
def template(self):
raise NotImplementedError
def render(self):
return self.__unicode__()
def __unicode__(self):
return fill(self.template(), self.__placeholders).render()
def __str__(self):
return self.__unicode__()
class BaseLayout(BaseTemplate):
title = 'Welcome!'
def template(self):
return n.html5(n.head(Placeholder('head')), n.body(Placeholder('body')))
@placeholder
def head(self):
return n.title(self.title)
@placeholder
def body(self):
return n.div(class_="container")
|
PypiClean
|
/Nevow-0.14.5.tar.gz/Nevow-0.14.5/examples/athenademo/calculator.py
|
import sys
from twisted.python.filepath import FilePath
from twisted.python import log
from twisted.internet import reactor
from nevow.athena import LivePage, LiveElement, expose
from nevow.loaders import xmlfile
from nevow.appserver import NevowSite
# Handy helper for finding external resources nearby.
sibling = FilePath(__file__).sibling
class Calculator(object):
"""
The model object for the calculator demo. This is the object which
actually knows how to perform calculations.
@ivar expression: A C{str} giving the current expression which has been
entered into the calculator. For example, if the buttons '3', '5', and
'+' have been pressed (in that order), C{expression} will be C{'35+'}.
"""
defaultExpression = u'0'
errorExpression = u'E'
def __init__(self):
self.expression = self.defaultExpression
def buttonClicked(self, symbol):
"""
Change the current expression by performing the operation indicated by
C{symbol} (clearing it or computing it) or by extending it (with a
digit or operator).
@param symbol: C{'C'} to clear the expression, C{'='} to evaluate the
expression, or one of C{'0'}-C{'9'}.
@rtype: C{unicode}
@return: The expression after interpreting the new symbol.
"""
# Clear
if symbol == 'C':
self.expression = self.defaultExpression
return self.expression
# Check the expression is currently valid
if self.expression == self.errorExpression:
return self.expression
# Evaluate the expression
if symbol == '=':
try:
self.expression = unicode(eval(self.expression))
except ZeroDivisionError:
self.expression = self.errorExpression
return self.expression
# Replace of add to the expression
if self.expression == self.defaultExpression:
self.expression = symbol
else:
self.expression += symbol
return self.expression
class CalculatorElement(LiveElement):
"""
A "live" calculator.
All buttons presses in the browser are sent to the server. The server
evaluates the expression and sets the output in the browser.
@ivar validSymbols: A C{str} giving all of the symbols which the browser is
allowed to submit to us. Input is checked against this before being
submitted to the model.
@ivar calc: A L{Calculator} which will be used to handle all inputs and
generate computed outputs.
"""
docFactory = xmlfile(sibling('calculator.html').path, 'CalculatorPattern')
jsClass = u"CalculatorDemo.Calculator"
validSymbols = '0123456789/*-=+.C'
def __init__(self, calc):
LiveElement.__init__(self)
self.calc = calc
def buttonClicked(self, symbol):
"""
Accept a symbol from the browser, perform input validation on it,
provide it to the underlying L{Calculator} if appropriate, and return
the result.
@type symbol: C{unicode}
@rtype: C{unicode}
"""
# Remember ... never trust a browser
if symbol not in self.validSymbols:
raise ValueError('Invalid symbol')
return self.calc.buttonClicked(symbol)
expose(buttonClicked)
class CalculatorParentPage(LivePage):
"""
A "live" container page for L{CalculatorElement}.
"""
docFactory = xmlfile(sibling('calculator.html').path)
def __init__(self, *a, **kw):
LivePage.__init__(self)
# Update the mapping of known JavaScript modules so that the
# client-side code for this example can be found and served to the
# browser.
self.jsModules.mapping[u'CalculatorDemo'] = sibling(
'calculator.js').path
def render_calculator(self, ctx, data):
"""
Replace the tag with a new L{CalculatorElement}.
"""
c = CalculatorElement(Calculator())
c.setFragmentParent(self)
return c
def main():
log.startLogging(sys.stdout)
site = NevowSite(CalculatorParentPage(calc=Calculator()))
reactor.listenTCP(8080, site)
reactor.run()
if __name__ == '__main__':
main()
|
PypiClean
|
/cohesity-sdk-1.1.0.tar.gz/cohesity-sdk-1.1.0/cohesity_sdk/cluster/model/recover_cassandra_snapshot_params_all_of.py
|
import re # noqa: F401
import sys # noqa: F401
from cohesity_sdk.cluster.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from cohesity_sdk.cluster.model.recover_cassandra_no_sql_object_params import RecoverCassandraNoSqlObjectParams
globals()['RecoverCassandraNoSqlObjectParams'] = RecoverCassandraNoSqlObjectParams
class RecoverCassandraSnapshotParamsAllOf(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'objects': ([RecoverCassandraNoSqlObjectParams], none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'objects': 'objects', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""RecoverCassandraSnapshotParamsAllOf - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
objects ([RecoverCassandraNoSqlObjectParams], none_type): Specifies details of objects to be recovered.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
PypiClean
|
/django2_bootstrap3_datetimepicker-3.0.3-py3-none-any.whl/bootstrap3_datetime/static/bootstrap3_datetime/js/locales/bootstrap-datetimepicker.uk.js
|
(function (factory) {
if (typeof define === 'function' && define.amd) {
define(['moment'], factory); // AMD
} else if (typeof exports === 'object') {
module.exports = factory(require('../moment')); // Node
} else {
factory(window.moment); // Browser global
}
}(function (moment) {
function plural(word, num) {
var forms = word.split('_');
return num % 10 === 1 && num % 100 !== 11 ? forms[0] : (num % 10 >= 2 && num % 10 <= 4 && (num % 100 < 10 || num % 100 >= 20) ? forms[1] : forms[2]);
}
function relativeTimeWithPlural(number, withoutSuffix, key) {
var format = {
'mm': 'хвилина_хвилини_хвилин',
'hh': 'година_години_годин',
'dd': 'день_дні_днів',
'MM': 'місяць_місяці_місяців',
'yy': 'рік_роки_років'
};
if (key === 'm') {
return withoutSuffix ? 'хвилина' : 'хвилину';
}
else if (key === 'h') {
return withoutSuffix ? 'година' : 'годину';
}
else {
return number + ' ' + plural(format[key], +number);
}
}
function monthsCaseReplace(m, format) {
var months = {
'nominative': 'січень_лютий_березень_квітень_травень_червень_липень_серпень_вересень_жовтень_листопад_грудень'.split('_'),
'accusative': 'січня_лютого_березня_квітня_травня_червня_липня_серпня_вересня_жовтня_листопада_грудня'.split('_')
},
nounCase = (/D[oD]? *MMMM?/).test(format) ?
'accusative' :
'nominative';
return months[nounCase][m.month()];
}
function weekdaysCaseReplace(m, format) {
var weekdays = {
'nominative': 'неділя_понеділок_вівторок_середа_четвер_п’ятниця_субота'.split('_'),
'accusative': 'неділю_понеділок_вівторок_середу_четвер_п’ятницю_суботу'.split('_'),
'genitive': 'неділі_понеділка_вівторка_середи_четверга_п’ятниці_суботи'.split('_')
},
nounCase = (/(\[[ВвУу]\]) ?dddd/).test(format) ?
'accusative' :
((/\[?(?:минулої|наступної)? ?\] ?dddd/).test(format) ?
'genitive' :
'nominative');
return weekdays[nounCase][m.day()];
}
function processHoursFunction(str) {
return function () {
return str + 'о' + (this.hours() === 11 ? 'б' : '') + '] LT';
};
}
return moment.lang('uk', {
months : monthsCaseReplace,
monthsShort : "січ_лют_бер_квіт_трав_черв_лип_серп_вер_жовт_лист_груд".split("_"),
weekdays : weekdaysCaseReplace,
weekdaysShort : "нд_пн_вт_ср_чт_пт_сб".split("_"),
weekdaysMin : "нд_пн_вт_ср_чт_пт_сб".split("_"),
longDateFormat : {
LT : "HH:mm",
L : "DD.MM.YYYY",
LL : "D MMMM YYYY р.",
LLL : "D MMMM YYYY р., LT",
LLLL : "dddd, D MMMM YYYY р., LT"
},
calendar : {
sameDay: processHoursFunction('[Сьогодні '),
nextDay: processHoursFunction('[Завтра '),
lastDay: processHoursFunction('[Вчора '),
nextWeek: processHoursFunction('[У] dddd ['),
lastWeek: function () {
switch (this.day()) {
case 0:
case 3:
case 5:
case 6:
return processHoursFunction('[Минулої] dddd [').call(this);
case 1:
case 2:
case 4:
return processHoursFunction('[Минулого] dddd [').call(this);
}
},
sameElse: 'L'
},
relativeTime : {
future : "за %s",
past : "%s тому",
s : "декілька секунд",
m : relativeTimeWithPlural,
mm : relativeTimeWithPlural,
h : "годину",
hh : relativeTimeWithPlural,
d : "день",
dd : relativeTimeWithPlural,
M : "місяць",
MM : relativeTimeWithPlural,
y : "рік",
yy : relativeTimeWithPlural
},
// M. E.: those two are virtually unused but a user might want to implement them for his/her website for some reason
meridiem : function (hour, minute, isLower) {
if (hour < 4) {
return "ночі";
} else if (hour < 12) {
return "ранку";
} else if (hour < 17) {
return "дня";
} else {
return "вечора";
}
},
ordinal: function (number, period) {
switch (period) {
case 'M':
case 'd':
case 'DDD':
case 'w':
case 'W':
return number + '-й';
case 'D':
return number + '-го';
default:
return number;
}
},
week : {
dow : 1, // Monday is the first day of the week.
doy : 7 // The week that contains Jan 1st is the first week of the year.
}
});
}));
|
PypiClean
|
/OctoBot-Pro-0.0.10.tar.gz/OctoBot-Pro-0.0.10/CHANGELOG.md
|
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.0.10] - 2022-05-13
### Update
- Added python 3.9 and 3.10 support
- Update to OctoBot 0.4.50
## [0.0.9] - 2022-05-02
### Update
- Update to OctoBot 0.4.49
## [0.0.8] - 2022-03-24
### Update
- Update to OctoBot 0.4.45
## [0.0.7] - 2022-03-07
### Update
- Update to OctoBot 0.4.41
## [0.0.6] - 2022-01-21
### Fix
- Typeerror and report issues
## [0.0.5] - 2022-01-14
### Fix
- Installation: remove cryptofeed requirement in OctoBot
## [0.0.4] - 2022-30-12
### Added
- Report generation time
## [0.0.3] - 2022-29-12
### Fixed
- Install
## [0.0.2] - 2022-29-12
### Updated
- Install method
## [0.0.1] - 2022-10-12
### Added
- OctoBot Pro alpha version
|
PypiClean
|
/VIP-yt-2022.7.26.tar.gz/VIP-yt-2022.7.26/VIP_yt/extractor/fivetv.py
|
from .common import InfoExtractor
from ..utils import int_or_none
class FiveTVIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://
(?:www\.)?5-tv\.ru/
(?:
(?:[^/]+/)+(?P<id>\d+)|
(?P<path>[^/?#]+)(?:[/?#])?
)
'''
_TESTS = [{
'url': 'http://5-tv.ru/news/96814/',
'md5': 'bbff554ad415ecf5416a2f48c22d9283',
'info_dict': {
'id': '96814',
'ext': 'mp4',
'title': 'Россияне выбрали имя для общенациональной платежной системы',
'description': 'md5:a8aa13e2b7ad36789e9f77a74b6de660',
'thumbnail': r're:^https?://.*\.jpg$',
'duration': 180,
},
}, {
'url': 'http://5-tv.ru/video/1021729/',
'info_dict': {
'id': '1021729',
'ext': 'mp4',
'title': '3D принтер',
'description': 'md5:d76c736d29ef7ec5c0cf7d7c65ffcb41',
'thumbnail': r're:^https?://.*\.jpg$',
'duration': 180,
},
}, {
# redirect to https://www.5-tv.ru/projects/1000095/izvestia-glavnoe/
'url': 'http://www.5-tv.ru/glavnoe/#itemDetails',
'info_dict': {
'id': 'glavnoe',
'ext': 'mp4',
'title': r're:^Итоги недели с \d+ по \d+ \w+ \d{4} года$',
'thumbnail': r're:^https?://.*\.jpg$',
},
'skip': 'redirect to «Известия. Главное» project page',
}, {
'url': 'http://www.5-tv.ru/glavnoe/broadcasts/508645/',
'only_matching': True,
}, {
'url': 'http://5-tv.ru/films/1507502/',
'only_matching': True,
}, {
'url': 'http://5-tv.ru/programs/broadcast/508713/',
'only_matching': True,
}, {
'url': 'http://5-tv.ru/angel/',
'only_matching': True,
}, {
'url': 'http://www.5-tv.ru/schedule/?iframe=true&width=900&height=450',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = self._match_valid_url(url)
video_id = mobj.group('id') or mobj.group('path')
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(
[r'<div[^>]+?class="(?:flow)?player[^>]+?data-href="([^"]+)"',
r'<a[^>]+?href="([^"]+)"[^>]+?class="videoplayer"'],
webpage, 'video url')
title = self._og_search_title(webpage, default=None) or self._html_extract_title(webpage)
duration = int_or_none(self._og_search_property(
'video:duration', webpage, 'duration', default=None))
return {
'id': video_id,
'url': video_url,
'title': title,
'description': self._og_search_description(webpage, default=None),
'thumbnail': self._og_search_thumbnail(webpage, default=None),
'duration': duration,
}
|
PypiClean
|
/SQLAlchemy-2.0.20.tar.gz/SQLAlchemy-2.0.20/doc/build/index.rst
|
:orphan:
.. _index_toplevel:
========================
SQLAlchemy Documentation
========================
.. container:: left_right_container
.. container:: leftmost
.. rst-class:: h2
Getting Started
.. container::
New to SQLAlchemy? Start here:
* **For Python Beginners:** :ref:`Installation Guide <installation>` - basic guidance on installing with pip and similar
* **For Python Veterans:** :doc:`SQLAlchemy Overview <intro>` - brief architectural overview
.. container:: left_right_container
.. container:: leftmost
.. rst-class:: h2
Tutorials
.. container::
New users of SQLAlchemy, as well as veterans of older SQLAlchemy
release series, should start with the
:doc:`/tutorial/index`, which covers everything an Alchemist needs
to know when using the ORM or just Core.
* **For a quick glance:** :doc:`/orm/quickstart` - a glimpse at what working with the ORM looks like
* **For all users:** :doc:`/tutorial/index` - In depth tutorial for Core and ORM
.. container:: left_right_container
.. container:: leftmost
.. rst-class:: h2
Migration Notes
.. container::
Users coming from older versions of SQLAlchemy, especially those transitioning
from the 1.x style of working, will want to review this documentation.
* :doc:`Migrating to SQLAlchemy 2.0 <changelog/migration_20>` - Complete background on migrating from 1.3 or 1.4 to 2.0
* :doc:`What's New in SQLAlchemy 2.0? <changelog/whatsnew_20>` - New 2.0 features and behaviors beyond the 1.x migration
* :doc:`Changelog catalog <changelog/index>` - Detailed changelogs for all SQLAlchemy Versions
.. container:: left_right_container
.. container:: leftmost
.. rst-class:: h2
Reference and How To
.. container:: orm
**SQLAlchemy ORM** - Detailed guides and API reference for using the ORM
* **Mapping Classes:**
:doc:`Mapping Python Classes <orm/mapper_config>` |
:doc:`Relationship Configuration <orm/relationships>`
* **Using the ORM:**
:doc:`Using the ORM Session <orm/session>` |
:doc:`ORM Querying Guide <orm/queryguide/index>` |
:doc:`Using AsyncIO <orm/extensions/asyncio>`
* **Configuration Extensions:**
:doc:`Association Proxy <orm/extensions/associationproxy>` |
:doc:`Hybrid Attributes <orm/extensions/hybrid>` |
:doc:`Mutable Scalars <orm/extensions/mutable>` |
:doc:`Automap <orm/extensions/automap>` |
:doc:`All extensions <orm/extensions/index>`
* **Extending the ORM:**
:doc:`ORM Events and Internals <orm/extending>`
* **Other:**
:doc:`Introduction to Examples <orm/examples>`
.. container:: core
**SQLAlchemy Core** - Detailed guides and API reference for working with Core
* **Engines, Connections, Pools:**
:doc:`Engine Configuration <core/engines>` |
:doc:`Connections, Transactions, Results <core/connections>` |
:doc:`AsyncIO Support <orm/extensions/asyncio>` |
:doc:`Connection Pooling <core/pooling>`
* **Schema Definition:**
:doc:`Overview <core/schema>` |
:ref:`Tables and Columns <metadata_describing_toplevel>` |
:ref:`Database Introspection (Reflection) <metadata_reflection_toplevel>` |
:ref:`Insert/Update Defaults <metadata_defaults_toplevel>` |
:ref:`Constraints and Indexes <metadata_constraints_toplevel>` |
:ref:`Using Data Definition Language (DDL) <metadata_ddl_toplevel>`
* **SQL Statements:**
:doc:`SQL Expression Elements <core/sqlelement>` |
:doc:`Operator Reference <core/operators>` |
:doc:`SELECT and related constructs <core/selectable>` |
:doc:`INSERT, UPDATE, DELETE <core/dml>` |
:doc:`SQL Functions <core/functions>` |
:doc:`Table of Contents <core/expression_api>`
* **Datatypes:**
:ref:`Overview <types_toplevel>` |
:ref:`Building Custom Types <types_custom>` |
:ref:`Type API Reference <types_api>`
* **Core Basics:**
:doc:`Overview <core/api_basics>` |
:doc:`Runtime Inspection API <core/inspection>` |
:doc:`Event System <core/event>` |
:doc:`Core Event Interfaces <core/events>` |
:doc:`Creating Custom SQL Constructs <core/compiler>`
.. container:: left_right_container
.. container:: leftmost
.. rst-class:: h2
Dialect Documentation
.. container::
The **dialect** is the system SQLAlchemy uses to communicate with various types of DBAPIs and databases.
This section describes notes, options, and usage patterns regarding individual dialects.
:doc:`PostgreSQL <dialects/postgresql>` |
:doc:`MySQL <dialects/mysql>` |
:doc:`SQLite <dialects/sqlite>` |
:doc:`Oracle <dialects/oracle>` |
:doc:`Microsoft SQL Server <dialects/mssql>`
:doc:`More Dialects ... <dialects/index>`
.. container:: left_right_container
.. container:: leftmost
.. rst-class:: h2
Supplementary
.. container::
* :doc:`Frequently Asked Questions <faq/index>` - A collection of common problems and solutions
* :doc:`Glossary <glossary>` - Terms used in SQLAlchemy's documentation
* :doc:`Error Message Guide <errors>` - Explainations of many SQLAlchemy Errors
* :doc:`Complete table of of contents <contents>`
* :ref:`Index <genindex>`
|
PypiClean
|
/elixirnote-4.0.0a28-py3-none-any.whl/jupyterlab/static/9030.2ba1a8d0e8831066ca6c.js
|
"use strict";(self["webpackChunk_jupyterlab_application_top"]=self["webpackChunk_jupyterlab_application_top"]||[]).push([[9030],{9030:(e,t,O)=>{O.r(t);O.d(t,{completeFromSchema:()=>j,xml:()=>A,xmlLanguage:()=>Z});var r=O(11705);var n=O(96006);const a=1,s=2,o=3,l=4,i=5,d=35,c=36,p=37,u=11,$=13;function f(e){return e==45||e==46||e==58||e>=65&&e<=90||e==95||e>=97&&e<=122||e>=161}function k(e){return e==9||e==10||e==13||e==32}let m=null,g=null,h=0;function T(e,t){let O=e.pos+t;if(g==e&&h==O)return m;while(k(e.peek(t)))t++;let r="";for(;;){let O=e.peek(t);if(!f(O))break;r+=String.fromCharCode(O);t++}g=e;h=O;return m=r||null}function v(e,t){this.name=e;this.parent=t;this.hash=t?t.hash:0;for(let O=0;O<e.length;O++)this.hash+=(this.hash<<4)+e.charCodeAt(O)+(e.charCodeAt(O)<<8)}const P=new r.IK({start:null,shift(e,t,O,r){return t==a?new v(T(r,1)||"",e):e},reduce(e,t){return t==u&&e?e.parent:e},reuse(e,t,O,r){let n=t.type.id;return n==a||n==$?new v(T(r,1)||"",e):e},hash(e){return e?e.hash:0},strict:false});const b=new r.Jq(((e,t)=>{if(e.next!=60)return;e.advance();if(e.next==47){e.advance();let O=T(e,0);if(!O)return e.acceptToken(i);if(t.context&&O==t.context.name)return e.acceptToken(s);for(let r=t.context;r;r=r.parent)if(r.name==O)return e.acceptToken(o,-2);e.acceptToken(l)}else if(e.next!=33&&e.next!=63){return e.acceptToken(a)}}),{contextual:true});function W(e,t){return new r.Jq((O=>{for(let r=0,n=0;;n++){if(O.next<0){if(n)O.acceptToken(e);break}if(O.next==t.charCodeAt(r)){r++;if(r==t.length){if(n>t.length)O.acceptToken(e,1-t.length);break}}else{r=O.next==t.charCodeAt(0)?1:0}O.advance()}}))}const C=W(d,"--\x3e");const w=W(c,"?>");const x=W(p,"]]>");const y=(0,n.styleTags)({Text:n.tags.content,"StartTag StartCloseTag EndTag SelfCloseEndTag":n.tags.angleBracket,TagName:n.tags.tagName,"MismatchedCloseTag/Tagname":[n.tags.tagName,n.tags.invalid],AttributeName:n.tags.attributeName,AttributeValue:n.tags.attributeValue,Is:n.tags.definitionOperator,"EntityReference CharacterReference":n.tags.character,Comment:n.tags.blockComment,ProcessingInst:n.tags.processingInstruction,DoctypeDecl:n.tags.documentMeta,Cdata:n.tags.special(n.tags.string)});const S=r.WQ.deserialize({version:14,states:",SOQOaOOOrOxO'#CfOzOpO'#CiO!tOaO'#CgOOOP'#Cg'#CgO!{OrO'#CrO#TOtO'#CsO#]OpO'#CtOOOP'#DS'#DSOOOP'#Cv'#CvQQOaOOOOOW'#Cw'#CwO#eOxO,59QOOOP,59Q,59QOOOO'#Cx'#CxO#mOpO,59TO#uO!bO,59TOOOP'#C{'#C{O$TOaO,59RO$[OpO'#CoOOOP,59R,59ROOOQ'#C|'#C|O$dOrO,59^OOOP,59^,59^OOOS'#C}'#C}O$lOtO,59_OOOP,59_,59_O$tOpO,59`O$|OpO,59`OOOP-E6t-E6tOOOW-E6u-E6uOOOP1G.l1G.lOOOO-E6v-E6vO%UO!bO1G.oO%UO!bO1G.oO%dOpO'#CkO%lO!bO'#CyO%zO!bO1G.oOOOP1G.o1G.oOOOP1G.w1G.wOOOP-E6y-E6yOOOP1G.m1G.mO&VOpO,59ZO&_OpO,59ZOOOQ-E6z-E6zOOOP1G.x1G.xOOOS-E6{-E6{OOOP1G.y1G.yO&gOpO1G.zO&gOpO1G.zOOOP1G.z1G.zO&oO!bO7+$ZO&}O!bO7+$ZOOOP7+$Z7+$ZOOOP7+$c7+$cO'YOpO,59VO'bOpO,59VO'jO!bO,59eOOOO-E6w-E6wO'xOpO1G.uO'xOpO1G.uOOOP1G.u1G.uO(QOpO7+$fOOOP7+$f7+$fO(YO!bO<<GuOOOP<<Gu<<GuOOOP<<G}<<G}O'bOpO1G.qO'bOpO1G.qO(eO#tO'#CnOOOO1G.q1G.qO(sOpO7+$aOOOP7+$a7+$aOOOP<<HQ<<HQOOOPAN=aAN=aOOOPAN=iAN=iO'bOpO7+$]OOOO7+$]7+$]OOOO'#Cz'#CzO({O#tO,59YOOOO,59Y,59YOOOP<<G{<<G{OOOO<<Gw<<GwOOOO-E6x-E6xOOOO1G.t1G.t",stateData:")Z~OPQOSVOTWOVWOWWOXWOiXOxPO}TO!PUO~OuZOw]O~O^`Oy^O~OPQOQcOSVOTWOVWOWWOXWOxPO}TO!PUO~ORdO~P!SOseO|gO~OthO!OjO~O^lOy^O~OuZOwoO~O^qOy^O~O[vO`sOdwOy^O~ORyO~P!SO^{Oy^O~OseO|}O~OthO!O!PO~O^!QOy^O~O[!SOy^O~O[!VO`sOd!WOy^O~Oa!YOy^O~Oy^O[mX`mXdmX~O[!VO`sOd!WO~O^!]Oy^O~O[!_Oy^O~O[!aOy^O~O[!cO`sOd!dOy^O~O[!cO`sOd!dO~Oa!eOy^O~Oy^Oz!gO~Oy^O[ma`madma~O[!jOy^O~O[!kOy^O~O[!lO`sOd!mO~OW!pOX!pOz!rO{!pO~O[!sOy^O~OW!pOX!pOz!vO{!pO~O",goto:"%[wPPPPPPPPPPxxP!OP!UPP!_!iP!oxxxP!u!{#R$Z$j$p$v$|PPPP%SXWORYbXRORYb_t`qru!T!U!bQ!h!YS!o!e!fR!t!nQdRRybXSORYbQYORmYQ[PRn[Q_QQkVjp_krz!R!T!X!Z!^!`!f!i!nQr`QzcQ!RlQ!TqQ!XsQ!ZtQ!^{Q!`!QQ!f!YQ!i!]R!n!eQu`S!UqrU![u!U!bR!b!TQ!q!gR!u!qQbRRxbQfTR|fQiUR!OiSXOYTaRb",nodeNames:"⚠ StartTag StartCloseTag MissingCloseTag StartCloseTag StartCloseTag Document Text EntityReference CharacterReference Cdata Element EndTag OpenTag TagName Attribute AttributeName Is AttributeValue CloseTag SelfCloseEndTag SelfClosingTag Comment ProcessingInst MismatchedCloseTag DoctypeDecl",maxTerm:47,context:P,nodeProps:[["closedBy",1,"SelfCloseEndTag EndTag",13,"CloseTag MissingCloseTag"],["openedBy",12,"StartTag StartCloseTag",19,"OpenTag",20,"StartTag"]],propSources:[y],skippedNodes:[0],repeatNodeCount:8,tokenData:"Az~R!WOX$kXY%rYZ%rZ]$k]^%r^p$kpq%rqr$krs&tsv$kvw'Uw}$k}!O(q!O!P$k!P!Q*n!Q![$k![!]+z!]!^$k!^!_/s!_!`=i!`!a>U!a!b>q!b!c$k!c!}+z!}#P$k#P#Q?}#Q#R$k#R#S+z#S#T$k#T#o+z#o%W$k%W%o+z%o%p$k%p&a+z&a&b$k&b1p+z1p4U$k4U4d+z4d4e$k4e$IS+z$IS$I`$k$I`$Ib+z$Ib$Kh$k$Kh%#t+z%#t&/x$k&/x&Et+z&Et&FV$k&FV;'S+z;'S;:j/S;:j?&r$k?&r?Ah+z?Ah?BY$k?BY?Mn+z?Mn~$kX$rUVP{WOr$krs%Usv$kw!^$k!^!_%d!_~$kP%ZRVPOv%Uw!^%U!_~%UW%iR{WOr%dsv%dw~%d_%{]VP{WyUOX$kXY%rYZ%rZ]$k]^%r^p$kpq%rqr$krs%Usv$kw!^$k!^!_%d!_~$kZ&{RzYVPOv%Uw!^%U!_~%U~'XTOp'hqs'hst(Pt!]'h!^~'h~'kTOp'hqs'ht!]'h!]!^'z!^~'h~(POW~~(SROp(]q!](]!^~(]~(`SOp(]q!](]!]!^(l!^~(]~(qOX~Z(xWVP{WOr$krs%Usv$kw}$k}!O)b!O!^$k!^!_%d!_~$kZ)iWVP{WOr$krs%Usv$kw!^$k!^!_%d!_!`$k!`!a*R!a~$kZ*[U|QVP{WOr$krs%Usv$kw!^$k!^!_%d!_~$k]*uWVP{WOr$krs%Usv$kw!^$k!^!_%d!_!`$k!`!a+_!a~$k]+hUdSVP{WOr$krs%Usv$kw!^$k!^!_%d!_~$k_,V}`S^QVP{WOr$krs%Usv$kw}$k}!O+z!O!P+z!P!Q$k!Q![+z![!]+z!]!^$k!^!_%d!_!c$k!c!}+z!}#R$k#R#S+z#S#T$k#T#o+z#o$}$k$}%O+z%O%W$k%W%o+z%o%p$k%p&a+z&a&b$k&b1p+z1p4U+z4U4d+z4d4e$k4e$IS+z$IS$I`$k$I`$Ib+z$Ib$Je$k$Je$Jg+z$Jg$Kh$k$Kh%#t+z%#t&/x$k&/x&Et+z&Et&FV$k&FV;'S+z;'S;:j/S;:j?&r$k?&r?Ah+z?Ah?BY$k?BY?Mn+z?Mn~$k_/ZWVP{WOr$krs%Usv$kw!^$k!^!_%d!_;=`$k;=`<%l+z<%l~$kX/xU{WOq%dqr0[sv%dw!a%d!a!b=X!b~%dX0aZ{WOr%dsv%dw}%d}!O1S!O!f%d!f!g1x!g!}%d!}#O5s#O#W%d#W#X:k#X~%dX1XT{WOr%dsv%dw}%d}!O1h!O~%dX1oR}P{WOr%dsv%dw~%dX1}T{WOr%dsv%dw!q%d!q!r2^!r~%dX2cT{WOr%dsv%dw!e%d!e!f2r!f~%dX2wT{WOr%dsv%dw!v%d!v!w3W!w~%dX3]T{WOr%dsv%dw!{%d!{!|3l!|~%dX3qT{WOr%dsv%dw!r%d!r!s4Q!s~%dX4VT{WOr%dsv%dw!g%d!g!h4f!h~%dX4kV{WOr4frs5Qsv4fvw5Qw!`4f!`!a5c!a~4fP5TRO!`5Q!`!a5^!a~5QP5cOiPX5jRiP{WOr%dsv%dw~%dX5xV{WOr%dsv%dw!e%d!e!f6_!f#V%d#V#W8w#W~%dX6dT{WOr%dsv%dw!f%d!f!g6s!g~%dX6xT{WOr%dsv%dw!c%d!c!d7X!d~%dX7^T{WOr%dsv%dw!v%d!v!w7m!w~%dX7rT{WOr%dsv%dw!c%d!c!d8R!d~%dX8WT{WOr%dsv%dw!}%d!}#O8g#O~%dX8nR{WxPOr%dsv%dw~%dX8|T{WOr%dsv%dw#W%d#W#X9]#X~%dX9bT{WOr%dsv%dw#T%d#T#U9q#U~%dX9vT{WOr%dsv%dw#h%d#h#i:V#i~%dX:[T{WOr%dsv%dw#T%d#T#U8R#U~%dX:pT{WOr%dsv%dw#c%d#c#d;P#d~%dX;UT{WOr%dsv%dw#V%d#V#W;e#W~%dX;jT{WOr%dsv%dw#h%d#h#i;y#i~%dX<OT{WOr%dsv%dw#m%d#m#n<_#n~%dX<dT{WOr%dsv%dw#d%d#d#e<s#e~%dX<xT{WOr%dsv%dw#X%d#X#Y4f#Y~%dX=`R!PP{WOr%dsv%dw~%dZ=rUaQVP{WOr$krs%Usv$kw!^$k!^!_%d!_~$k_>_U[UVP{WOr$krs%Usv$kw!^$k!^!_%d!_~$kZ>xWVP{WOr$krs%Usv$kw!^$k!^!_%d!_!`$k!`!a?b!a~$kZ?kU!OQVP{WOr$krs%Usv$kw!^$k!^!_%d!_~$kZ@UWVP{WOr$krs%Usv$kw!^$k!^!_%d!_#P$k#P#Q@n#Q~$kZ@uWVP{WOr$krs%Usv$kw!^$k!^!_%d!_!`$k!`!aA_!a~$kZAhUwQVP{WOr$krs%Usv$kw!^$k!^!_%d!_~$k",tokenizers:[b,C,w,x,0,1,2,3],topRules:{Document:[0,6]},tokenPrec:0});var X=O(16888);function _(e,t){let O=t&&t.getChild("TagName");return O?e.sliceString(O.from,O.to):""}function z(e,t){let O=t&&t.firstChild;return!O||O.name!="OpenTag"?"":_(e,O)}function Q(e,t,O){let r=t&&t.getChildren("Attribute").find((e=>e.from<=O&&e.to>=O));let n=r&&r.getChild("AttributeName");return n?e.sliceString(n.from,n.to):""}function U(e){for(let t=e&&e.parent;t;t=t.parent)if(t.name=="Element")return t;return null}function V(e,t){var O;let r=(0,X.qz)(e).resolveInner(t,-1),n=null;for(let a=r;!n&&a.parent;a=a.parent)if(a.name=="OpenTag"||a.name=="CloseTag"||a.name=="SelfClosingTag"||a.name=="MismatchedCloseTag")n=a;if(n&&(n.to>t||n.lastChild.type.isError)){let e=n.parent;if(r.name=="TagName")return n.name=="CloseTag"||n.name=="MismatchedCloseTag"?{type:"closeTag",from:r.from,context:e}:{type:"openTag",from:r.from,context:U(e)};if(r.name=="AttributeName")return{type:"attrName",from:r.from,context:n};if(r.name=="AttributeValue")return{type:"attrValue",from:r.from,context:n};let O=r==n||r.name=="Attribute"?r.childBefore(t):r;if((O===null||O===void 0?void 0:O.name)=="StartTag")return{type:"openTag",from:t,context:U(e)};if((O===null||O===void 0?void 0:O.name)=="StartCloseTag"&&O.to<=t)return{type:"closeTag",from:t,context:e};if((O===null||O===void 0?void 0:O.name)=="Is")return{type:"attrValue",from:t,context:n};if(O)return{type:"attrName",from:t,context:n};return null}else if(r.name=="StartCloseTag"){return{type:"closeTag",from:t,context:r.parent}}while(r.parent&&r.to==t&&!((O=r.lastChild)===null||O===void 0?void 0:O.type.isError))r=r.parent;if(r.name=="Element"||r.name=="Text"||r.name=="Document")return{type:"tag",from:t,context:r.name=="Element"?r:U(r)};return null}class R{constructor(e,t,O){this.attrs=t;this.attrValues=O;this.children=[];this.name=e.name;this.completion=Object.assign(Object.assign({type:"type"},e.completion||{}),{label:this.name});this.openCompletion=Object.assign(Object.assign({},this.completion),{label:"<"+this.name});this.closeCompletion=Object.assign(Object.assign({},this.completion),{label:"</"+this.name+">",boost:2});this.closeNameCompletion=Object.assign(Object.assign({},this.completion),{label:this.name+">"});this.text=e.textContent?e.textContent.map((e=>({label:e,type:"text"}))):[]}}const G=/^[:\-\.\w\u00b7-\uffff]*$/;function E(e){return Object.assign(Object.assign({type:"property"},e.completion||{}),{label:e.name})}function q(e){return typeof e=="string"?{label:`"${e}"`,type:"constant"}:/^"/.test(e.label)?e:Object.assign(Object.assign({},e),{label:`"${e.label}"`})}function j(e,t){let O=[],r=[];let n=Object.create(null);for(let l of t){let e=E(l);O.push(e);if(l.global)r.push(e);if(l.values)n[l.name]=l.values.map(q)}let a=[],s=[];let o=Object.create(null);for(let l of e){let e=r,t=n;if(l.attributes)e=e.concat(l.attributes.map((e=>{if(typeof e=="string")return O.find((t=>t.label==e))||{label:e,type:"property"};if(e.values){if(t==n)t=Object.create(t);t[e.name]=e.values.map(q)}return E(e)})));let i=new R(l,e,t);o[i.name]=i;a.push(i);if(l.top)s.push(i)}if(!s.length)s=a;for(let l=0;l<a.length;l++){let t=e[l],O=a[l];if(t.children){for(let e of t.children)if(o[e])O.children.push(o[e])}else{O.children=a}}return e=>{var t;let{doc:O}=e.state,l=V(e.state,e.pos);if(!l||l.type=="tag"&&!e.explicit)return null;let{type:i,from:d,context:c}=l;if(i=="openTag"){let e=s;let t=z(O,c);if(t){let O=o[t];e=(O===null||O===void 0?void 0:O.children)||a}return{from:d,options:e.map((e=>e.completion)),validFor:G}}else if(i=="closeTag"){let r=z(O,c);return r?{from:d,to:e.pos+(O.sliceString(e.pos,e.pos+1)==">"?1:0),options:[((t=o[r])===null||t===void 0?void 0:t.closeNameCompletion)||{label:r+">",type:"type"}],validFor:G}:null}else if(i=="attrName"){let e=o[_(O,c)];return{from:d,options:(e===null||e===void 0?void 0:e.attrs)||r,validFor:G}}else if(i=="attrValue"){let t=Q(O,c,d);if(!t)return null;let r=o[_(O,c)];let a=((r===null||r===void 0?void 0:r.attrValues)||n)[t];if(!a||!a.length)return null;return{from:d,to:e.pos+(O.sliceString(e.pos,e.pos+1)=='"'?1:0),options:a,validFor:/^"[^"]*"?$/}}else if(i=="tag"){let t=z(O,c),r=o[t];let n=[],l=c&&c.lastChild;if(t&&(!l||l.name!="CloseTag"||_(O,l)!=t))n.push(r?r.closeCompletion:{label:"</"+t+">",type:"type",boost:2});let i=n.concat(((r===null||r===void 0?void 0:r.children)||(c?a:s)).map((e=>e.openCompletion)));if(c&&(r===null||r===void 0?void 0:r.text.length)){let t=c.firstChild;if(t.to>e.pos-20&&!/\S/.test(e.state.sliceDoc(t.to,e.pos)))i=i.concat(r.text)}return{from:d,options:i,validFor:/^<\/?[:\-\.\w\u00b7-\uffff]*$/}}else{return null}}}const Z=X.qp.define({parser:S.configure({props:[X.uj.add({Element(e){let t=/^\s*<\//.test(e.textAfter);return e.lineIndent(e.node.from)+(t?0:e.unit)},"OpenTag CloseTag SelfClosingTag"(e){return e.column(e.node.from)+e.unit}}),X.x0.add({Element(e){let t=e.firstChild,O=e.lastChild;if(!t||t.name!="OpenTag")return null;return{from:t.to,to:O.name=="CloseTag"?O.from:e.to}}})]}),languageData:{commentTokens:{block:{open:"\x3c!--",close:"--\x3e"}},indentOnInput:/^\s*<\/$/}});function A(e={}){return new X.ri(Z,Z.data.of({autocomplete:j(e.elements||[],e.attributes||[])}))}}}]);
//# sourceMappingURL=9030.2ba1a8d0e8831066ca6c.js.map?v=2ba1a8d0e8831066ca6c
|
PypiClean
|
/hirise_tools-0.7.3.tar.gz/hirise_tools-0.7.3/docs/pyrise.rst
|
hirise\_tools package
=====================
Submodules
----------
hirise\_tools\.cli module
-------------------------
.. automodule:: pyrise.cli
:members:
:undoc-members:
:show-inheritance:
hirise\_tools\.data module
--------------------------
.. automodule:: pyrise.data
:members:
:undoc-members:
:show-inheritance:
hirise\_tools\.downloads module
-------------------------------
.. automodule:: pyrise.downloads
:members:
:undoc-members:
:show-inheritance:
hirise\_tools\.indexfiles module
--------------------------------
.. automodule:: pyrise.indexfiles
:members:
:undoc-members:
:show-inheritance:
hirise\_tools\.labels module
----------------------------
.. automodule:: pyrise.labels
:members:
:undoc-members:
:show-inheritance:
hirise\_tools\.products module
------------------------------
.. automodule:: pyrise.products
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: pyrise
:members:
:undoc-members:
:show-inheritance:
|
PypiClean
|
/lightdash_client_python-0.753.0-py3-none-any.whl/lightdash_client/models/additional_metric_filters_item.py
|
from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast
import attr
from ..models.additional_metric_filters_item_operator import (
AdditionalMetricFiltersItemOperator,
)
from ..types import UNSET, Unset
if TYPE_CHECKING:
from ..models.additional_metric_filters_item_target import (
AdditionalMetricFiltersItemTarget,
)
T = TypeVar("T", bound="AdditionalMetricFiltersItem")
@attr.s(auto_attribs=True)
class AdditionalMetricFiltersItem:
"""
Attributes:
operator (AdditionalMetricFiltersItemOperator):
id (str):
target (AdditionalMetricFiltersItemTarget):
values (Union[Unset, List[Any]]):
settings (Union[Unset, Any]):
disabled (Union[Unset, bool]):
"""
operator: AdditionalMetricFiltersItemOperator
id: str
target: "AdditionalMetricFiltersItemTarget"
values: Union[Unset, List[Any]] = UNSET
settings: Union[Unset, Any] = UNSET
disabled: Union[Unset, bool] = UNSET
def to_dict(self) -> Dict[str, Any]:
operator = self.operator.value
id = self.id
target = self.target.to_dict()
values: Union[Unset, List[Any]] = UNSET
if not isinstance(self.values, Unset):
values = self.values
settings = self.settings
disabled = self.disabled
field_dict: Dict[str, Any] = {}
field_dict.update(
{
"operator": operator,
"id": id,
"target": target,
}
)
if values is not UNSET:
field_dict["values"] = values
if settings is not UNSET:
field_dict["settings"] = settings
if disabled is not UNSET:
field_dict["disabled"] = disabled
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.additional_metric_filters_item_target import (
AdditionalMetricFiltersItemTarget,
)
d = src_dict.copy()
operator = AdditionalMetricFiltersItemOperator(d.pop("operator"))
id = d.pop("id")
target = AdditionalMetricFiltersItemTarget.from_dict(d.pop("target"))
values = cast(List[Any], d.pop("values", UNSET))
settings = d.pop("settings", UNSET)
disabled = d.pop("disabled", UNSET)
additional_metric_filters_item = cls(
operator=operator,
id=id,
target=target,
values=values,
settings=settings,
disabled=disabled,
)
return additional_metric_filters_item
|
PypiClean
|
/elixirnote-4.0.0a28-py3-none-any.whl/jupyterlab/static/9362.e17eaf97a4562063f27b.js
|
"use strict";(self["webpackChunk_jupyterlab_application_top"]=self["webpackChunk_jupyterlab_application_top"]||[]).push([[9362],{19362:(e,t,n)=>{n.r(t);n.d(t,{tiki:()=>_});function r(e,t,n){return function(r,i){while(!r.eol()){if(r.match(t)){i.tokenize=u;break}r.next()}if(n)i.tokenize=n;return e}}function i(e){return function(t,n){while(!t.eol()){t.next()}n.tokenize=u;return e}}function u(e,t){function n(n){t.tokenize=n;return n(e,t)}var a=e.sol();var o=e.next();switch(o){case"{":e.eat("/");e.eatSpace();e.eatWhile(/[^\s\u00a0=\"\'\/?(}]/);t.tokenize=c;return"tag";case"_":if(e.eat("_"))return n(r("strong","__",u));break;case"'":if(e.eat("'"))return n(r("em","''",u));break;case"(":if(e.eat("("))return n(r("link","))",u));break;case"[":return n(r("url","]",u));break;case"|":if(e.eat("|"))return n(r("comment","||"));break;case"-":if(e.eat("=")){return n(r("header string","=-",u))}else if(e.eat("-")){return n(r("error tw-deleted","--",u))}break;case"=":if(e.match("=="))return n(r("tw-underline","===",u));break;case":":if(e.eat(":"))return n(r("comment","::"));break;case"^":return n(r("tw-box","^"));break;case"~":if(e.match("np~"))return n(r("meta","~/np~"));break}if(a){switch(o){case"!":if(e.match("!!!!!")){return n(i("header string"))}else if(e.match("!!!!")){return n(i("header string"))}else if(e.match("!!!")){return n(i("header string"))}else if(e.match("!!")){return n(i("header string"))}else{return n(i("header string"))}break;case"*":case"#":case"+":return n(i("tw-listitem bracket"));break}}return null}var a,o;function c(e,t){var n=e.next();var r=e.peek();if(n=="}"){t.tokenize=u;return"tag"}else if(n=="("||n==")"){return"bracket"}else if(n=="="){o="equals";if(r==">"){e.next();r=e.peek()}if(!/[\'\"]/.test(r)){t.tokenize=s()}return"operator"}else if(/[\'\"]/.test(n)){t.tokenize=f(n);return t.tokenize(e,t)}else{e.eatWhile(/[^\s\u00a0=\"\'\/?]/);return"keyword"}}function f(e){return function(t,n){while(!t.eol()){if(t.next()==e){n.tokenize=c;break}}return"string"}}function s(){return function(e,t){while(!e.eol()){var n=e.next();var r=e.peek();if(n==" "||n==","||/[ )}]/.test(r)){t.tokenize=c;break}}return"string"}}var l,k;function p(){for(var e=arguments.length-1;e>=0;e--)l.cc.push(arguments[e])}function d(){p.apply(null,arguments);return true}function h(e,t){var n=l.context&&l.context.noIndent;l.context={prev:l.context,pluginName:e,indent:l.indented,startOfLine:t,noIndent:n}}function g(){if(l.context)l.context=l.context.prev}function b(e){if(e=="openPlugin"){l.pluginName=a;return d(v,m(l.startOfLine))}else if(e=="closePlugin"){var t=false;if(l.context){t=l.context.pluginName!=a;g()}else{t=true}if(t)k="error";return d(x(t))}else if(e=="string"){if(!l.context||l.context.name!="!cdata")h("!cdata");if(l.tokenize==u)g();return d()}else return d()}function m(e){return function(t){if(t=="selfclosePlugin"||t=="endPlugin")return d();if(t=="endPlugin"){h(l.pluginName,e);return d()}return d()}}function x(e){return function(t){if(e)k="error";if(t=="endPlugin")return d();return p()}}function v(e){if(e=="keyword"){k="attribute";return d(v)}if(e=="equals")return d(w,v);return p()}function w(e){if(e=="keyword"){k="string";return d()}if(e=="string")return d(z);return p()}function z(e){if(e=="string")return d(z);else return p()}const _={startState:function(){return{tokenize:u,cc:[],indented:0,startOfLine:true,pluginName:null,context:null}},token:function(e,t){if(e.sol()){t.startOfLine=true;t.indented=e.indentation()}if(e.eatSpace())return null;k=o=a=null;var n=t.tokenize(e,t);if((n||o)&&n!="comment"){l=t;while(true){var r=t.cc.pop()||b;if(r(o||n))break}}t.startOfLine=false;return k||n},indent:function(e,t,n){var r=e.context;if(r&&r.noIndent)return 0;if(r&&/^{\//.test(t))r=r.prev;while(r&&!r.startOfLine)r=r.prev;if(r)return r.indent+n.unit;else return 0}}}}]);
//# sourceMappingURL=9362.e17eaf97a4562063f27b.js.map?v=e17eaf97a4562063f27b
|
PypiClean
|
/odoo14_addon_ssi_project_type-14.0.1.3.0-py3-none-any.whl/odoo/addons/ssi_project_type/README.rst
|
.. image:: https://img.shields.io/badge/licence-LGPL--3-blue.svg
:target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html
:alt: License: LGPL-3
============
Project Type
============
Installation
============
To install this module, you need to:
1. Clone the branch 14.0 of the repository https://github.com/open-synergy/opnsynid-project
2. Add the path to this repository in your configuration (addons-path)
3. Update the module list (Must be on developer mode)
4. Go to menu *Apps -> Apps -> Main Apps*
5. Search For *Project Type*
6. Install the module
Bug Tracker
===========
Bugs are tracked on `GitHub Issues
<https://github.com/open-synergy/opnsynid-project/issues>`_. In case of trouble, please
check there if your issue has already been reported. If you spotted it first,
help us smash it by providing detailed and welcomed feedback.
Credits
=======
Contributors
------------
* Andhitia Rama <[email protected]>
Maintainer
----------
.. image:: https://simetri-sinergi.id/logo.png
:alt: PT. Simetri Sinergi Indonesia
:target: https://simetri-sinergi.id.com
This module is maintained by the PT. Simetri Sinergi Indonesia.
|
PypiClean
|
/performanceplatform-client-0.11.5.tar.gz/performanceplatform-client-0.11.5/performanceplatform/client/admin.py
|
import json
import logging
import urllib
from .base import BaseClient, return_none_on
log = logging.getLogger(__name__)
try:
url_quote = urllib.quote_plus
except AttributeError:
url_quote = urllib.parse.quote_plus
class AdminAPI(BaseClient):
def __init__(self, base_url, token, dry_run=False, request_id_fn=None):
super(AdminAPI, self).__init__(
base_url,
token,
dry_run,
request_id_fn)
self.should_gzip = False
@return_none_on(404)
def get_data_set(self, data_group, data_type):
query_result = self._get(
path='/data-sets',
params={"data-group": data_group, "data-type": data_type}
)
if query_result is not None:
query_result = query_result[0] if len(query_result) > 0 else None
return query_result
@return_none_on(404)
def get_data_set_by_name(self, name):
return self._get('/data-sets/{0}'.format(name))
def get_data_set_transforms(self, name):
return self._get('/data-sets/{0}/transform'.format(name))
def get_data_set_dashboard(self, name):
return self._get('/data-sets/{0}/dashboard'.format(name))
def list_data_sets(self):
return self._get('/data-sets')
@return_none_on(404)
def get_data_group(self, data_group):
query_result = self._get(
path='/data-groups',
params={'name': data_group},
)
if query_result is not None:
query_result = query_result[0] if len(query_result) > 0 else None
return query_result
def get_user(self, email):
return self._get(
'/users/{0}'.format(url_quote(email)))
def list_dashboards(self):
return self._get(
'/dashboards')
def get_dashboard(self, dashboard_id):
return self._get(
'/dashboard/{0}'.format(dashboard_id))
def get_module(self, module_id):
return self._get(
'/module/{0}'.format(module_id))
def get_dashboards(self, params=None):
return self._get('/dashboard', params=params)
def get_dashboard_by_tx_id(self, tx_id):
return self._get(
'/transactions-explorer-service/{}/dashboard'.format(tx_id),
)
def get_transform_types(self):
return self._get('/transform-type')
def create_data_set(self, data):
return self._post('/data-sets', json.dumps(data))
def create_data_group(self, data):
return self._post('/data-groups', json.dumps(data))
def create_transform(self, data):
return self._post('/transform', json.dumps(data))
def create_dashboard(self, data):
return self._post('/dashboard', json.dumps(data))
def update_dashboard(self, dashboard_id, data):
return self._put('/dashboard/{}'.format(dashboard_id),
json.dumps(data))
def delete_dashboard(self, dashboard_id):
return self._delete('/dashboard/{}'.format(dashboard_id))
def list_organisations(self, query=None):
return self._get(path='/organisation/node', params=query)
def list_modules_on_dashboard(self, dashboard_id):
return self._get('/dashboard/{}/module'.format(dashboard_id))
def add_module_to_dashboard(self, dashboard_id, data):
return self._post('/dashboard/{}/module'.format(dashboard_id),
json.dumps(data))
def list_module_types(self):
return self._get('/module-type')
def add_module_type(self, data):
return self._post('/module-type', json.dumps(data))
def reauth(self, uid):
return self._post('/auth/gds/api/users/{}/reauth'.format(uid), None)
|
PypiClean
|
/odoo_addon_stock_picking_kind-16.0.1.0.0.4-py3-none-any.whl/odoo/addons/stock_picking_kind/hooks.py
|
import logging
from odoo.tools import sql
_logger = logging.getLogger(__name__)
def pre_init_hook(cr):
"""Initialize picking_kind field based on location_id and location_dest_id"""
if not sql.column_exists(cr, "stock_picking", "picking_kind"):
_logger.info("Create picking_kind column")
cr.execute(
"""
ALTER TABLE stock_picking
ADD COLUMN picking_kind character varying;
"""
)
_logger.info("Initialize picking_kind field")
cr.execute(
"""
UPDATE stock_picking
SET picking_kind = (
CASE
WHEN
origin.usage = 'supplier'
AND destination.usage = 'customer'
THEN 'drop_shipping'
WHEN
origin.usage = 'customer'
AND destination.usage = 'supplier'
THEN 'drop_shipping_return'
WHEN
origin.usage = 'customer'
AND destination.usage != 'customer'
THEN 'customer_return'
WHEN
origin.usage != 'customer'
AND destination.usage = 'customer'
THEN 'customer_out'
WHEN
origin.usage = 'supplier'
AND destination.usage != 'supplier'
THEN 'supplier_in'
WHEN
origin.usage != 'supplier'
AND destination.usage = 'supplier'
THEN 'supplier_return'
ELSE NULL
END
)
FROM stock_location origin, stock_location destination
WHERE stock_picking.location_id = origin.id
AND stock_picking.location_dest_id = destination.id
"""
)
_logger.info(f"{cr.rowcount} rows updated")
|
PypiClean
|
/uniohomeassistant-0.1.3.tar.gz/uniohomeassistant-0.1.3/homeassistant/components/tcp/sensor.py
|
import logging
import select
import socket
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PAYLOAD,
CONF_PORT,
CONF_TIMEOUT,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_BUFFER_SIZE = "buffer_size"
CONF_VALUE_ON = "value_on"
DEFAULT_BUFFER_SIZE = 1024
DEFAULT_NAME = "TCP Sensor"
DEFAULT_TIMEOUT = 10
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_PAYLOAD): cv.string,
vol.Optional(CONF_BUFFER_SIZE, default=DEFAULT_BUFFER_SIZE): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_ON): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the TCP Sensor."""
add_entities([TcpSensor(hass, config)])
class TcpSensor(Entity):
"""Implementation of a TCP socket based sensor."""
required = ()
def __init__(self, hass, config):
"""Set all the config values if they exist and get initial state."""
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
self._hass = hass
self._config = {
CONF_NAME: config.get(CONF_NAME),
CONF_HOST: config.get(CONF_HOST),
CONF_PORT: config.get(CONF_PORT),
CONF_TIMEOUT: config.get(CONF_TIMEOUT),
CONF_PAYLOAD: config.get(CONF_PAYLOAD),
CONF_UNIT_OF_MEASUREMENT: config.get(CONF_UNIT_OF_MEASUREMENT),
CONF_VALUE_TEMPLATE: value_template,
CONF_VALUE_ON: config.get(CONF_VALUE_ON),
CONF_BUFFER_SIZE: config.get(CONF_BUFFER_SIZE),
}
self._state = None
self.update()
@property
def name(self):
"""Return the name of this sensor."""
name = self._config[CONF_NAME]
if name is not None:
return name
return super().name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._config[CONF_UNIT_OF_MEASUREMENT]
def update(self):
"""Get the latest value for this sensor."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.settimeout(self._config[CONF_TIMEOUT])
try:
sock.connect((self._config[CONF_HOST], self._config[CONF_PORT]))
except OSError as err:
_LOGGER.error(
"Unable to connect to %s on port %s: %s",
self._config[CONF_HOST],
self._config[CONF_PORT],
err,
)
return
try:
sock.send(self._config[CONF_PAYLOAD].encode())
except OSError as err:
_LOGGER.error(
"Unable to send payload %r to %s on port %s: %s",
self._config[CONF_PAYLOAD],
self._config[CONF_HOST],
self._config[CONF_PORT],
err,
)
return
readable, _, _ = select.select([sock], [], [], self._config[CONF_TIMEOUT])
if not readable:
_LOGGER.warning(
"Timeout (%s second(s)) waiting for a response after "
"sending %r to %s on port %s",
self._config[CONF_TIMEOUT],
self._config[CONF_PAYLOAD],
self._config[CONF_HOST],
self._config[CONF_PORT],
)
return
value = sock.recv(self._config[CONF_BUFFER_SIZE]).decode()
if self._config[CONF_VALUE_TEMPLATE] is not None:
try:
self._state = self._config[CONF_VALUE_TEMPLATE].render(
parse_result=False, value=value
)
return
except TemplateError:
_LOGGER.error(
"Unable to render template of %r with value: %r",
self._config[CONF_VALUE_TEMPLATE],
value,
)
return
self._state = value
|
PypiClean
|
/azure-multiapi-storage-1.2.0.tar.gz/azure-multiapi-storage-1.2.0/azure/multiapi/storagev2/blob/v2021_08_06/_shared_access_signature.py
|
from typing import ( # pylint: disable=unused-import
Union, Optional, Any, TYPE_CHECKING
)
from ._shared import sign_string, url_quote
from ._shared.constants import X_MS_VERSION
from ._shared.models import Services, UserDelegationKey
from ._shared.shared_access_signature import SharedAccessSignature, _SharedAccessHelper, \
QueryStringConstants
if TYPE_CHECKING:
from datetime import datetime
from ..blob import (
ResourceTypes,
AccountSasPermissions,
ContainerSasPermissions,
BlobSasPermissions
)
class BlobQueryStringConstants(object):
SIGNED_TIMESTAMP = 'snapshot'
class BlobSharedAccessSignature(SharedAccessSignature):
'''
Provides a factory for creating blob and container access
signature tokens with a common account name and account key. Users can either
use the factory or can construct the appropriate service and use the
generate_*_shared_access_signature method directly.
'''
def __init__(self, account_name, account_key=None, user_delegation_key=None):
'''
:param str account_name:
The storage account name used to generate the shared access signatures.
:param str account_key:
The access key to generate the shares access signatures.
:param ~azure.storage.blob.models.UserDelegationKey user_delegation_key:
Instead of an account key, the user could pass in a user delegation key.
A user delegation key can be obtained from the service by authenticating with an AAD identity;
this can be accomplished by calling get_user_delegation_key on any Blob service object.
'''
super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION)
self.user_delegation_key = user_delegation_key
def generate_blob(self, container_name, blob_name, snapshot=None, version_id=None, permission=None,
expiry=None, start=None, policy_id=None, ip=None, protocol=None,
cache_control=None, content_disposition=None,
content_encoding=None, content_language=None,
content_type=None, **kwargs):
'''
Generates a shared access signature for the blob or one of its snapshots.
Use the returned signature with the sas_token parameter of any BlobService.
:param str container_name:
Name of container.
:param str blob_name:
Name of blob.
:param str snapshot:
The snapshot parameter is an opaque DateTime value that,
when present, specifies the blob snapshot to grant permission.
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Permissions must be ordered racwdxytmei.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: str or BlobSasPermissions
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: datetime or str
:param str policy_id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
set_blob_service_properties.
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:param str protocol:
Specifies the protocol permitted for a request made. The default value
is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values.
:param str cache_control:
Response header value for Cache-Control when resource is accessed
using this shared access signature.
:param str content_disposition:
Response header value for Content-Disposition when resource is accessed
using this shared access signature.
:param str content_encoding:
Response header value for Content-Encoding when resource is accessed
using this shared access signature.
:param str content_language:
Response header value for Content-Language when resource is accessed
using this shared access signature.
:param str content_type:
Response header value for Content-Type when resource is accessed
using this shared access signature.
'''
resource_path = container_name + '/' + blob_name
sas = _BlobSharedAccessHelper()
sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version)
sas.add_id(policy_id)
resource = 'bs' if snapshot else 'b'
resource = 'bv' if version_id else resource
resource = 'd' if kwargs.pop("is_directory", None) else resource
sas.add_resource(resource)
sas.add_timestamp(snapshot or version_id)
sas.add_override_response_headers(cache_control, content_disposition,
content_encoding, content_language,
content_type)
sas.add_encryption_scope(**kwargs)
sas.add_info_for_hns_account(**kwargs)
sas.add_resource_signature(self.account_name, self.account_key, resource_path,
user_delegation_key=self.user_delegation_key)
return sas.get_token()
def generate_container(self, container_name, permission=None, expiry=None,
start=None, policy_id=None, ip=None, protocol=None,
cache_control=None, content_disposition=None,
content_encoding=None, content_language=None,
content_type=None, **kwargs):
'''
Generates a shared access signature for the container.
Use the returned signature with the sas_token parameter of any BlobService.
:param str container_name:
Name of container.
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Permissions must be ordered racwdxyltfmei.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: str or ContainerSasPermissions
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: datetime or str
:param str policy_id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
set_blob_service_properties.
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:param str protocol:
Specifies the protocol permitted for a request made. The default value
is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values.
:param str cache_control:
Response header value for Cache-Control when resource is accessed
using this shared access signature.
:param str content_disposition:
Response header value for Content-Disposition when resource is accessed
using this shared access signature.
:param str content_encoding:
Response header value for Content-Encoding when resource is accessed
using this shared access signature.
:param str content_language:
Response header value for Content-Language when resource is accessed
using this shared access signature.
:param str content_type:
Response header value for Content-Type when resource is accessed
using this shared access signature.
'''
sas = _BlobSharedAccessHelper()
sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version)
sas.add_id(policy_id)
sas.add_resource('c')
sas.add_override_response_headers(cache_control, content_disposition,
content_encoding, content_language,
content_type)
sas.add_encryption_scope(**kwargs)
sas.add_info_for_hns_account(**kwargs)
sas.add_resource_signature(self.account_name, self.account_key, container_name,
user_delegation_key=self.user_delegation_key)
return sas.get_token()
class _BlobSharedAccessHelper(_SharedAccessHelper):
def add_timestamp(self, timestamp):
self._add_query(BlobQueryStringConstants.SIGNED_TIMESTAMP, timestamp)
def add_info_for_hns_account(self, **kwargs):
self._add_query(QueryStringConstants.SIGNED_DIRECTORY_DEPTH, kwargs.pop('sdd', None))
self._add_query(QueryStringConstants.SIGNED_AUTHORIZED_OID, kwargs.pop('preauthorized_agent_object_id', None))
self._add_query(QueryStringConstants.SIGNED_UNAUTHORIZED_OID, kwargs.pop('agent_object_id', None))
self._add_query(QueryStringConstants.SIGNED_CORRELATION_ID, kwargs.pop('correlation_id', None))
def get_value_to_append(self, query):
return_value = self.query_dict.get(query) or ''
return return_value + '\n'
def add_resource_signature(self, account_name, account_key, path, user_delegation_key=None):
# pylint: disable = no-member
if path[0] != '/':
path = '/' + path
canonicalized_resource = '/blob/' + account_name + path + '\n'
# Form the string to sign from shared_access_policy and canonicalized
# resource. The order of values is important.
string_to_sign = \
(self.get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) +
self.get_value_to_append(QueryStringConstants.SIGNED_START) +
self.get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) +
canonicalized_resource)
if user_delegation_key is not None:
self._add_query(QueryStringConstants.SIGNED_OID, user_delegation_key.signed_oid)
self._add_query(QueryStringConstants.SIGNED_TID, user_delegation_key.signed_tid)
self._add_query(QueryStringConstants.SIGNED_KEY_START, user_delegation_key.signed_start)
self._add_query(QueryStringConstants.SIGNED_KEY_EXPIRY, user_delegation_key.signed_expiry)
self._add_query(QueryStringConstants.SIGNED_KEY_SERVICE, user_delegation_key.signed_service)
self._add_query(QueryStringConstants.SIGNED_KEY_VERSION, user_delegation_key.signed_version)
string_to_sign += \
(self.get_value_to_append(QueryStringConstants.SIGNED_OID) +
self.get_value_to_append(QueryStringConstants.SIGNED_TID) +
self.get_value_to_append(QueryStringConstants.SIGNED_KEY_START) +
self.get_value_to_append(QueryStringConstants.SIGNED_KEY_EXPIRY) +
self.get_value_to_append(QueryStringConstants.SIGNED_KEY_SERVICE) +
self.get_value_to_append(QueryStringConstants.SIGNED_KEY_VERSION) +
self.get_value_to_append(QueryStringConstants.SIGNED_AUTHORIZED_OID) +
self.get_value_to_append(QueryStringConstants.SIGNED_UNAUTHORIZED_OID) +
self.get_value_to_append(QueryStringConstants.SIGNED_CORRELATION_ID))
else:
string_to_sign += self.get_value_to_append(QueryStringConstants.SIGNED_IDENTIFIER)
string_to_sign += \
(self.get_value_to_append(QueryStringConstants.SIGNED_IP) +
self.get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) +
self.get_value_to_append(QueryStringConstants.SIGNED_VERSION) +
self.get_value_to_append(QueryStringConstants.SIGNED_RESOURCE) +
self.get_value_to_append(BlobQueryStringConstants.SIGNED_TIMESTAMP) +
self.get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE) +
self.get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) +
self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) +
self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) +
self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_LANGUAGE) +
self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_TYPE))
# remove the trailing newline
if string_to_sign[-1] == '\n':
string_to_sign = string_to_sign[:-1]
self._add_query(QueryStringConstants.SIGNED_SIGNATURE,
sign_string(account_key if user_delegation_key is None else user_delegation_key.value,
string_to_sign))
def get_token(self):
# a conscious decision was made to exclude the timestamp in the generated token
# this is to avoid having two snapshot ids in the query parameters when the user appends the snapshot timestamp
exclude = [BlobQueryStringConstants.SIGNED_TIMESTAMP]
return '&'.join(['{0}={1}'.format(n, url_quote(v))
for n, v in self.query_dict.items() if v is not None and n not in exclude])
def generate_account_sas(
account_name, # type: str
account_key, # type: str
resource_types, # type: Union[ResourceTypes, str]
permission, # type: Union[AccountSasPermissions, str]
expiry, # type: Optional[Union[datetime, str]]
start=None, # type: Optional[Union[datetime, str]]
ip=None, # type: Optional[str]
**kwargs # type: Any
): # type: (...) -> str
"""Generates a shared access signature for the blob service.
Use the returned signature with the credential parameter of any BlobServiceClient,
ContainerClient or BlobClient.
:param str account_name:
The storage account name used to generate the shared access signature.
:param str account_key:
The account key, also called shared key or access key, to generate the shared access signature.
:param resource_types:
Specifies the resource types that are accessible with the account SAS.
:type resource_types: str or ~azure.storage.blob.ResourceTypes
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: str or ~azure.storage.blob.AccountSasPermissions
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: ~datetime.datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: ~datetime.datetime or str
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:keyword str protocol:
Specifies the protocol permitted for a request made. The default value is https.
:keyword str encryption_scope:
Specifies the encryption scope for a request made so that all write operations will be service encrypted.
:return: A Shared Access Signature (sas) token.
:rtype: str
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_authentication.py
:start-after: [START create_sas_token]
:end-before: [END create_sas_token]
:language: python
:dedent: 8
:caption: Generating a shared access signature.
"""
sas = SharedAccessSignature(account_name, account_key)
return sas.generate_account(
services=Services(blob=True),
resource_types=resource_types,
permission=permission,
expiry=expiry,
start=start,
ip=ip,
**kwargs
) # type: ignore
def generate_container_sas(
account_name, # type: str
container_name, # type: str
account_key=None, # type: Optional[str]
user_delegation_key=None, # type: Optional[UserDelegationKey]
permission=None, # type: Optional[Union[ContainerSasPermissions, str]]
expiry=None, # type: Optional[Union[datetime, str]]
start=None, # type: Optional[Union[datetime, str]]
policy_id=None, # type: Optional[str]
ip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Any
"""Generates a shared access signature for a container.
Use the returned signature with the credential parameter of any BlobServiceClient,
ContainerClient or BlobClient.
:param str account_name:
The storage account name used to generate the shared access signature.
:param str container_name:
The name of the container.
:param str account_key:
The account key, also called shared key or access key, to generate the shared access signature.
Either `account_key` or `user_delegation_key` must be specified.
:param ~azure.storage.blob.UserDelegationKey user_delegation_key:
Instead of an account shared key, the user could pass in a user delegation key.
A user delegation key can be obtained from the service by authenticating with an AAD identity;
this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`.
When present, the SAS is signed with the user delegation key instead.
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Permissions must be ordered racwdxyltfmei.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: str or ~azure.storage.blob.ContainerSasPermissions
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: ~datetime.datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: ~datetime.datetime or str
:param str policy_id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
:func:`~azure.storage.blob.ContainerClient.set_container_access_policy`.
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:keyword str protocol:
Specifies the protocol permitted for a request made. The default value is https.
:keyword str cache_control:
Response header value for Cache-Control when resource is accessed
using this shared access signature.
:keyword str content_disposition:
Response header value for Content-Disposition when resource is accessed
using this shared access signature.
:keyword str content_encoding:
Response header value for Content-Encoding when resource is accessed
using this shared access signature.
:keyword str content_language:
Response header value for Content-Language when resource is accessed
using this shared access signature.
:keyword str content_type:
Response header value for Content-Type when resource is accessed
using this shared access signature.
:keyword str encryption_scope:
Specifies the encryption scope for a request made so that all write operations will be service encrypted.
:return: A Shared Access Signature (sas) token.
:rtype: str
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_containers.py
:start-after: [START generate_sas_token]
:end-before: [END generate_sas_token]
:language: python
:dedent: 12
:caption: Generating a sas token.
"""
if not user_delegation_key and not account_key:
raise ValueError("Either user_delegation_key or account_key must be provided.")
if isinstance(account_key, UserDelegationKey):
user_delegation_key = account_key
if user_delegation_key:
sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key)
else:
sas = BlobSharedAccessSignature(account_name, account_key=account_key)
return sas.generate_container(
container_name,
permission=permission,
expiry=expiry,
start=start,
policy_id=policy_id,
ip=ip,
**kwargs
)
def generate_blob_sas(
account_name, # type: str
container_name, # type: str
blob_name, # type: str
snapshot=None, # type: Optional[str]
account_key=None, # type: Optional[str]
user_delegation_key=None, # type: Optional[UserDelegationKey]
permission=None, # type: Optional[Union[BlobSasPermissions, str]]
expiry=None, # type: Optional[Union[datetime, str]]
start=None, # type: Optional[Union[datetime, str]]
policy_id=None, # type: Optional[str]
ip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Any
"""Generates a shared access signature for a blob.
Use the returned signature with the credential parameter of any BlobServiceClient,
ContainerClient or BlobClient.
:param str account_name:
The storage account name used to generate the shared access signature.
:param str container_name:
The name of the container.
:param str blob_name:
The name of the blob.
:param str snapshot:
An optional blob snapshot ID.
:param str account_key:
The account key, also called shared key or access key, to generate the shared access signature.
Either `account_key` or `user_delegation_key` must be specified.
:param ~azure.storage.blob.UserDelegationKey user_delegation_key:
Instead of an account shared key, the user could pass in a user delegation key.
A user delegation key can be obtained from the service by authenticating with an AAD identity;
this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`.
When present, the SAS is signed with the user delegation key instead.
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Permissions must be ordered racwdxytmei.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: str or ~azure.storage.blob.BlobSasPermissions
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: ~datetime.datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: ~datetime.datetime or str
:param str policy_id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
:func:`~azure.storage.blob.ContainerClient.set_container_access_policy()`.
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:keyword str version_id:
An optional blob version ID. This parameter is only for versioning enabled account
.. versionadded:: 12.4.0
This keyword argument was introduced in API version '2019-12-12'.
:keyword str protocol:
Specifies the protocol permitted for a request made. The default value is https.
:keyword str cache_control:
Response header value for Cache-Control when resource is accessed
using this shared access signature.
:keyword str content_disposition:
Response header value for Content-Disposition when resource is accessed
using this shared access signature.
:keyword str content_encoding:
Response header value for Content-Encoding when resource is accessed
using this shared access signature.
:keyword str content_language:
Response header value for Content-Language when resource is accessed
using this shared access signature.
:keyword str content_type:
Response header value for Content-Type when resource is accessed
using this shared access signature.
:keyword str encryption_scope:
Specifies the encryption scope for a request made so that all write operations will be service encrypted.
:return: A Shared Access Signature (sas) token.
:rtype: str
"""
if not user_delegation_key and not account_key:
raise ValueError("Either user_delegation_key or account_key must be provided.")
if isinstance(account_key, UserDelegationKey):
user_delegation_key = account_key
version_id = kwargs.pop('version_id', None)
if version_id and snapshot:
raise ValueError("snapshot and version_id cannot be set at the same time.")
if user_delegation_key:
sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key)
else:
sas = BlobSharedAccessSignature(account_name, account_key=account_key)
return sas.generate_blob(
container_name,
blob_name,
snapshot=snapshot,
version_id=version_id,
permission=permission,
expiry=expiry,
start=start,
policy_id=policy_id,
ip=ip,
**kwargs
)
|
PypiClean
|
/ethereum-tester-0.1.0b2.tar.gz/ethereum-tester-0.1.0b2/eth_tester/backends/pyethereum/v16/serializers.py
|
from __future__ import unicode_literals
import rlp
from eth_tester.utils.encoding import (
zpad,
zpad32,
int_to_32byte_big_endian,
)
def serialize_transaction_receipt(block, transaction, transaction_index, is_pending):
transaction_receipt = block.get_receipt(transaction_index)
origin_gas = block.transaction_list[0].startgas
if transaction.creates is not None:
contract_addr = transaction.creates
else:
contract_addr = None
return {
"transaction_hash": transaction.hash,
"transaction_index": None if is_pending else transaction_index,
"block_number": None if is_pending else block.number,
"block_hash": None if is_pending else block.hash,
"cumulative_gas_used": origin_gas - transaction.startgas + transaction_receipt.gas_used,
"gas_used": transaction_receipt.gas_used,
"contract_address": contract_addr,
"logs": [
serialize_log(block, transaction, transaction_index, log, log_index, is_pending)
for log_index, log in enumerate(transaction_receipt.logs)
],
}
def serialize_transaction_hash(block, transaction, transaction_index, is_pending):
return transaction.hash
def serialize_transaction(block, transaction, transaction_index, is_pending):
return {
"hash": transaction.hash,
"nonce": transaction.nonce,
"block_hash": None if is_pending else block.hash,
"block_number": None if is_pending else block.number,
"transaction_index": None if is_pending else transaction_index,
"from": transaction.sender,
"to": transaction.to,
"value": transaction.value,
"gas": transaction.startgas,
"gas_price": transaction.gasprice,
"data": transaction.data,
"v": transaction.v,
"r": transaction.r,
"s": transaction.s,
}
def serialize_log(block, transaction, transaction_index, log, log_index, is_pending):
return {
"type": "pending" if is_pending else "mined",
"log_index": log_index,
"transaction_index": None if is_pending else transaction_index,
"transaction_hash": transaction.hash,
"block_hash": None if is_pending else block.hash,
"block_number": None if is_pending else block.number,
"address": log.address,
"data": log.data,
"topics": [int_to_32byte_big_endian(topic) for topic in log.topics],
}
def serialize_block(block, transaction_serialize_fn, is_pending):
transactions = [
transaction_serialize_fn(block, transaction, transaction_index, is_pending)
for transaction_index, transaction
in enumerate(block.transaction_list)
]
return {
"number": block.number,
"hash": block.hash,
"parent_hash": block.prevhash,
"nonce": zpad(block.nonce, 8),
"sha3_uncles": block.uncles_hash,
"logs_bloom": block.bloom,
"transactions_root": block.tx_list_root,
"receipts_root": block.receipts_root,
"state_root": block.state_root,
"miner": block.coinbase,
"difficulty": block.difficulty,
"total_difficulty": block.chain_difficulty(),
"size": len(rlp.encode(block)),
"extra_data": zpad32(block.extra_data),
"gas_limit": block.gas_limit,
"gas_used": block.gas_used,
"timestamp": block.timestamp,
"transactions": transactions,
"uncles": block.uncles,
}
|
PypiClean
|
/valer.jsonapi-1.2.3rc5.zip/valer.jsonapi-1.2.3rc5/src/senaite/jsonapi/dataproviders.py
|
from zope import interface
from zope import component
from plone.dexterity.interfaces import IDexterityContent
from AccessControl import Unauthorized
from Products.CMFCore.interfaces import ISiteRoot
from Products.ZCatalog.interfaces import ICatalogBrain
from Products.ATContentTypes.interfaces import IATContentType
from senaite.jsonapi import api
from senaite.jsonapi import logger
from senaite.jsonapi.interfaces import IInfo
from senaite.jsonapi.interfaces import ICatalog
from senaite.jsonapi.interfaces import IDataManager
_marker = object
class Base(object):
""" Base Adapter
"""
interface.implements(IInfo)
def __init__(self, context):
self.context = context
self.keys = []
self.ignore = []
# Mapped attributes to extract from the object besides the schema keys.
# These keys are always included
self.attributes = {
"id": "getId",
"uid": "UID",
"title": "Title",
"description": "Description",
"created": "created",
"modified": "modified",
"effective": "effective",
"portal_type": "portal_type",
"tags": "Subject",
"author": "Creator",
"path": "_x_get_physical_path",
"parent_path": "_x_get_parent_path",
}
def _x_get_physical_path(self):
"""Generate the physical path
"""
path = self.context.getPhysicalPath()
return "/".join(path)
def _x_get_parent_path(self):
"""Generate the parent path
"""
path = self.context.getPhysicalPath()
return "/".join(path[:-1])
def to_dict(self):
""" extract the data of the content and return it as a dictionary
"""
# 1. extract the schema fields
data = self.extract_fields()
# 2. include custom key-value pairs listed in the mapping dictionary
for key, attr in self.attributes.iteritems():
if key in self.ignore:
continue # skip ignores
# fetch the mapped attribute
value = getattr(self.context, attr, None)
if value is None:
value = getattr(self, attr, None)
# handle function calls
if callable(value):
value = value()
# map the value to the given key from the mapping
data[key] = api.to_json_value(self.context, key, value)
return data
def extract_fields(self):
"""Extract the given fieldnames from the object
:returns: Schema name/value mapping
:rtype: dict
"""
# get the proper data manager for the object
dm = IDataManager(self.context)
# filter out ignored fields
fieldnames = filter(lambda name: name not in self.ignore, self.keys)
# schema mapping
out = dict()
for fieldname in fieldnames:
try:
# get the field value with the data manager
fieldvalue = dm.json_data(fieldname)
# https://github.com/collective/plone.jsonapi.routes/issues/52
# -> skip restricted fields
except Unauthorized:
logger.debug("Skipping restricted field '%s'" % fieldname)
continue
except ValueError:
logger.debug("Skipping invalid field '%s'" % fieldname)
continue
out[fieldname] = api.to_json_value(self.context, fieldname, fieldvalue)
return out
def __call__(self):
return self.to_dict()
class ZCDataProvider(Base):
""" Catalog Brain Adapter
"""
interface.implements(IInfo)
component.adapts(ICatalogBrain)
def __init__(self, context):
super(ZCDataProvider, self).__init__(context)
catalog_adapter = ICatalog(context)
# extract the metadata
self.keys = catalog_adapter.get_schema()
# ignore some metadata values, which we already mapped
self.ignore = [
'CreationDate',
'Creator',
'Date',
'Description',
'EffectiveDate',
'ExpirationDate',
'ModificationDate',
'Subject',
'Title',
'Type',
'UID',
'cmf_uid',
'getIcon',
'getId',
'getObjSize',
'getRemoteUrl',
'listCreators',
'meta_type',
]
def _x_get_parent_path(self):
"""Generate the parent path
"""
path = self._x_get_physical_path().split("/")
return "/".join(path[:-1])
def _x_get_physical_path(self):
"""Generate the physical path
"""
path = self.context.getPath()
portal_path = api.get_path(api.get_portal())
if portal_path not in path:
return "{}/{}".format(portal_path, path)
return path
class DexterityDataProvider(Base):
""" Data Provider for Dexterity based content types
"""
interface.implements(IInfo)
component.adapts(IDexterityContent)
def __init__(self, context):
super(DexterityDataProvider, self).__init__(context)
# get the behavior and schema fields from the data manager
schema = api.get_schema(context)
behaviors = api.get_behaviors(context)
self.keys = schema.names() + behaviors.keys()
class ATDataProvider(Base):
""" Archetypes Adapter
"""
interface.implements(IInfo)
component.adapts(IATContentType)
def __init__(self, context):
super(ATDataProvider, self).__init__(context)
# get the schema fields from the data manager
schema = api.get_schema(context)
self.keys = schema.keys()
class SiteRootDataProvider(Base):
""" Site Root Adapter
"""
interface.implements(IInfo)
component.adapts(ISiteRoot)
def __init__(self, context):
super(SiteRootDataProvider, self).__init__(context)
# virtual keys, which are handled by the data manager
self.keys = ["uid", "path"]
|
PypiClean
|
/python_amazon_ad_api-0.4.9-py3-none-any.whl/ad_api/api/sb/product_targeting.py
|
from ad_api.base import Client, sp_endpoint, fill_query_params, ApiResponse
class Targets(Client):
"""
Use the Amazon Advertising API for Sponsored Brands for campaign, ad group, keyword, negative keyword, drafts, Stores, landing pages, and Brands management operations. For more information about Sponsored Brands, see the Sponsored Brands Support Center. For onboarding information, see the account setup topic.
"""
@sp_endpoint('/sb/targets/list', method='POST')
def list_products_targets(self, **kwargs) -> ApiResponse:
r"""
Gets a list of product targets associated with the client identifier passed in the authorization header, filtered by specified criteria.
Request Body
| '**nextToken**': *string*, {'description': 'Operations that return paginated results include a pagination token in this field. To retrieve the next page of results, call the same operation and specify this token in the request. If the NextToken field is empty, there are no further results.'}
| '**maxResults**': *number*, {'description': 'The identifier of the ad group to which this keyword is associated.'}
| '**filters**': *string*, {'Restricts results to targets with the specified filters. Filters are inclusive. Filters are joined using 'and' logic. Specify one type of each filter. Specifying multiples of the same type of filter results in an error.'}
| '**filterType**': *string*, {'CREATIVE_TYPE': '[ CREATIVE_TYPE ]'}
| '**filterType**': *string*, {'TARGETING_STATE': '[ archived, paused, pending, enabled ]'}
| '**filterType**': *string*, {'CAMPAIGN_ID': '[ CAMPAIGN_ID ]'}
| '**filterType**': *string*, {'AD_GROUP_ID': '[ SBAdGroupId ]'}
Returns:
ApiResponse
"""
# contentType = 'application/vnd.sblisttargetsrequest.v3.0+json'
# headers = {'Content-Type': contentType}
# return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs, headers=headers)
# hotfix: up until now (2022-04-29) it just seems to consume application/json content
return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs)
@sp_endpoint('/sb/targets', method='PUT')
def edit_products_targets(self, **kwargs) -> ApiResponse:
r"""
Updates one or more targets.
Request Body
| '**targetId**': *integer($int64)*, {'description': 'The identifier of the target.'}
| '**adGroupId**': *integer($int64)*, {'description': 'The identifier of the ad group to which the target is associated.'}
| '**campaignId**': *integer($int64)*, {'description': 'The identifier of the campaign to which the target is associated.'}
| '**state**': *string*, {'values': '[ enabled, paused, pending, archived, draft ]'}
| '**bid**': *number*, {'description': 'The associated bid. Note that this value must be less than the budget associated with the Advertiser account. For more information, see supported features.'}
Returns:
ApiResponse
"""
return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs)
@sp_endpoint('/sb/targets', method='POST')
def create_products_targets(self, **kwargs) -> ApiResponse:
r"""
Create one or more targets.
Request Body
| '**adGroupId**': *integer($int64)*, {'description': 'The identifier of the ad group to which the target is associated.'}
| '**campaignId**': *integer($int64)*, {'description': 'The identifier of the campaign to which the target is associated.'}
| '**expressions**': *SBExpression*, {'type': 'asinCategorySameAs, asinBrandSameAs, asinPriceLessThan, asinPriceBetween, asinPriceGreaterThan, asinReviewRatingLessThan, asinReviewRatingBetween, asinReviewRatingGreaterThan, asinSameAs', 'values': 'The text of the targeting expression. The - token defines a range. For example, 2-4 defines a range of 2, 3, and 4.'}
| '**bid**': *number*, {'description': 'The associated bid. Note that this value must be less than the budget associated with the Advertiser account. For more information, see supported features.'}
Returns:
ApiResponse
"""
return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs)
@sp_endpoint('/sb/targets/{}', method='GET')
def get_products_target(self, targetId, **kwargs) -> ApiResponse:
r"""
Gets a target specified by identifier.
Keyword Args
path **targetId**:*number* | Required. The identifier of an existing target.
Returns
ApiResponse
"""
return self._request(fill_query_params(kwargs.pop('path'), targetId), params=kwargs)
@sp_endpoint('/sb/targets/{}', method='DELETE')
def delete_products_target(self, targetId, **kwargs) -> ApiResponse:
r"""
Archives a target specified by identifier. Note that archiving is permanent, and once a target has been archived it can't be made active again.
Keyword Args
path **targetId**:*number* | Required. The identifier of an existing target.
Returns
ApiResponse
"""
return self._request(fill_query_params(kwargs.pop('path'), targetId), params=kwargs)
|
PypiClean
|
/domain-paas-sdk-python-1.0.7.tar.gz/domain-paas-sdk-python-1.0.7/src/identity_service/api_client.py
|
from __future__ import absolute_import
import atexit
import datetime
from dateutil.parser import parse
import json
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
import tempfile
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from identity_service.configuration import Configuration
import identity_service.models
from identity_service import rest
from identity_service.exceptions import ApiValueError, ApiException
class ApiClient(object):
"""Generic API client for OpenAPI client library builds.
OpenAPI generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the OpenAPI
templates.
NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long, # noqa: F821
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
_pool = None
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None, pool_threads=1):
if configuration is None:
configuration = Configuration.get_default_copy()
self.configuration = configuration
self.pool_threads = pool_threads
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'OpenAPI-Generator/1.0.0/python'
self.client_side_validation = configuration.client_side_validation
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
if self._pool:
self._pool.close()
self._pool.join()
self._pool = None
if hasattr(atexit, 'unregister'):
atexit.unregister(self.close)
@property
def pool(self):
"""Create thread pool on first request
avoids instantiating unused threadpool for blocking clients.
"""
if self._pool is None:
atexit.register(self.close)
self._pool = ThreadPool(self.pool_threads)
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None, _host=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = post_params if post_params else []
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
post_params.extend(self.files_parameters(files))
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
if _host is None:
url = self.configuration.host + resource_path
else:
# use server/host defined in path or operation instead
url = _host + resource_path
try:
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
except ApiException as e:
e.body = e.body.decode('utf-8') if six.PY3 else e.body
raise e
content_type = response_data.getheader('content-type')
self.last_response = response_data
return_data = response_data
if not _preload_content:
return return_data
if six.PY3 and response_type not in ["file", "bytes"]:
match = None
if content_type is not None:
match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type)
encoding = match.group(1) if match else "utf-8"
response_data.data = response_data.data.decode(encoding)
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is OpenAPI model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `openapi_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in six.iteritems(obj.openapi_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return data
#return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match(r'list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in six.iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(identity_service.models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == datetime.date:
return self.__deserialize_date(data)
elif klass == datetime.datetime:
return self.__deserialize_datetime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async_req=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None, _host=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async_req request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout, _host)
return self.pool.apply_async(self.__call_api, (resource_path,
method, path_params,
query_params,
header_params, body,
post_params, files,
response_type,
auth_settings,
_return_http_data_only,
collection_formats,
_preload_content,
_request_timeout,
_host))
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ApiValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def files_parameters(self, files=None):
"""Builds form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if files:
for k, v in six.iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if auth_setting['in'] == 'cookie':
headers['Cookie'] = auth_setting['value']
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ApiValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""Deserializes body to file
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, "wb") as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return six.text_type(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""Return an original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""Deserializes string to date.
:param string: str.
:return: date.
"""
try:
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason="Failed to parse `{0}` as date object".format(string)
)
def __deserialize_datetime(self, string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason=(
"Failed to parse `{0}` as datetime object"
.format(string)
)
)
def __deserialize_model(self, data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
has_discriminator = False
if (hasattr(klass, 'get_real_child_model')
and klass.discriminator_value_class_map):
has_discriminator = True
if not klass.openapi_types and has_discriminator is False:
return data
kwargs = {}
if (data is not None and
klass.openapi_types is not None and
isinstance(data, (list, dict))):
for attr, attr_type in six.iteritems(klass.openapi_types):
if klass.attribute_map[attr] in data:
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if has_discriminator:
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance
|
PypiClean
|
/django-static-angular-v1.3.15.tar.gz/django-static-angular-v1.3.15/django_static_angular/static/static_angular/js/i18n/angular-locale_si-lk.js
|
'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"\u0db4\u0dd9.\u0dc0.",
"\u0db4.\u0dc0."
],
"DAY": [
"\u0d89\u0dbb\u0dd2\u0daf\u0dcf",
"\u0dc3\u0db3\u0dd4\u0daf\u0dcf",
"\u0d85\u0d9f\u0dc4\u0dbb\u0dd4\u0dc0\u0dcf\u0daf\u0dcf",
"\u0db6\u0daf\u0dcf\u0daf\u0dcf",
"\u0db6\u0dca\u200d\u0dbb\u0dc4\u0dc3\u0dca\u0db4\u0dad\u0dd2\u0db1\u0dca\u0daf\u0dcf",
"\u0dc3\u0dd2\u0d9a\u0dd4\u0dbb\u0dcf\u0daf\u0dcf",
"\u0dc3\u0dd9\u0db1\u0dc3\u0dd4\u0dbb\u0dcf\u0daf\u0dcf"
],
"ERANAMES": [
"\u0d9a\u0dca\u200d\u0dbb\u0dd2\u0dc3\u0dca\u0dad\u0dd4 \u0db4\u0dd6\u0dbb\u0dca\u200d\u0dc0",
"\u0d9a\u0dca\u200d\u0dbb\u0dd2\u0dc3\u0dca\u0dad\u0dd4 \u0dc0\u0dbb\u0dca\u200d\u0dc2"
],
"ERAS": [
"\u0d9a\u0dca\u200d\u0dbb\u0dd2.\u0db4\u0dd6.",
"\u0d9a\u0dca\u200d\u0dbb\u0dd2.\u0dc0."
],
"MONTH": [
"\u0da2\u0db1\u0dc0\u0dcf\u0dbb\u0dd2",
"\u0db4\u0dd9\u0db6\u0dbb\u0dc0\u0dcf\u0dbb\u0dd2",
"\u0db8\u0dcf\u0dbb\u0dca\u0dad\u0dd4",
"\u0d85\u0db4\u0dca\u200d\u0dbb\u0dda\u0dbd\u0dca",
"\u0db8\u0dd0\u0dba\u0dd2",
"\u0da2\u0dd6\u0db1\u0dd2",
"\u0da2\u0dd6\u0dbd\u0dd2",
"\u0d85\u0d9c\u0ddd\u0dc3\u0dca\u0dad\u0dd4",
"\u0dc3\u0dd0\u0db4\u0dca\u0dad\u0dd0\u0db8\u0dca\u0db6\u0dbb\u0dca",
"\u0d94\u0d9a\u0dca\u0dad\u0ddd\u0db6\u0dbb\u0dca",
"\u0db1\u0ddc\u0dc0\u0dd0\u0db8\u0dca\u0db6\u0dbb\u0dca",
"\u0daf\u0dd9\u0dc3\u0dd0\u0db8\u0dca\u0db6\u0dbb\u0dca"
],
"SHORTDAY": [
"\u0d89\u0dbb\u0dd2\u0daf\u0dcf",
"\u0dc3\u0db3\u0dd4\u0daf\u0dcf",
"\u0d85\u0d9f\u0dc4",
"\u0db6\u0daf\u0dcf\u0daf\u0dcf",
"\u0db6\u0dca\u200d\u0dbb\u0dc4\u0dc3\u0dca",
"\u0dc3\u0dd2\u0d9a\u0dd4",
"\u0dc3\u0dd9\u0db1"
],
"SHORTMONTH": [
"\u0da2\u0db1",
"\u0db4\u0dd9\u0db6",
"\u0db8\u0dcf\u0dbb\u0dca\u0dad\u0dd4",
"\u0d85\u0db4\u0dca\u200d\u0dbb\u0dda\u0dbd\u0dca",
"\u0db8\u0dd0\u0dba\u0dd2",
"\u0da2\u0dd6\u0db1\u0dd2",
"\u0da2\u0dd6\u0dbd\u0dd2",
"\u0d85\u0d9c\u0ddd",
"\u0dc3\u0dd0\u0db4\u0dca",
"\u0d94\u0d9a\u0dca",
"\u0db1\u0ddc\u0dc0\u0dd0",
"\u0daf\u0dd9\u0dc3\u0dd0"
],
"fullDate": "y MMMM d, EEEE",
"longDate": "y MMMM d",
"medium": "y MMM d a h.mm.ss",
"mediumDate": "y MMM d",
"mediumTime": "a h.mm.ss",
"short": "y-MM-dd a h.mm",
"shortDate": "y-MM-dd",
"shortTime": "a h.mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "Rs",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "\u00a4-",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "si-lk",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if ((n == 0 || n == 1) || i == 0 && vf.f == 1) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]);
|
PypiClean
|
/mypy-boto3-cloudcontrol-1.28.36.tar.gz/mypy-boto3-cloudcontrol-1.28.36/mypy_boto3_cloudcontrol/literals.py
|
import sys
if sys.version_info >= (3, 12):
from typing import Literal
else:
from typing_extensions import Literal
__all__ = (
"HandlerErrorCodeType",
"ListResourceRequestsPaginatorName",
"ListResourcesPaginatorName",
"OperationStatusType",
"OperationType",
"ResourceRequestSuccessWaiterName",
"CloudControlApiServiceName",
"ServiceName",
"ResourceServiceName",
"PaginatorName",
"WaiterName",
"RegionName",
)
HandlerErrorCodeType = Literal[
"AccessDenied",
"AlreadyExists",
"GeneralServiceException",
"InternalFailure",
"InvalidCredentials",
"InvalidRequest",
"NetworkFailure",
"NotFound",
"NotStabilized",
"NotUpdatable",
"ResourceConflict",
"ServiceInternalError",
"ServiceLimitExceeded",
"ServiceTimeout",
"Throttling",
]
ListResourceRequestsPaginatorName = Literal["list_resource_requests"]
ListResourcesPaginatorName = Literal["list_resources"]
OperationStatusType = Literal[
"CANCEL_COMPLETE", "CANCEL_IN_PROGRESS", "FAILED", "IN_PROGRESS", "PENDING", "SUCCESS"
]
OperationType = Literal["CREATE", "DELETE", "UPDATE"]
ResourceRequestSuccessWaiterName = Literal["resource_request_success"]
CloudControlApiServiceName = Literal["cloudcontrol"]
ServiceName = Literal[
"accessanalyzer",
"account",
"acm",
"acm-pca",
"alexaforbusiness",
"amp",
"amplify",
"amplifybackend",
"amplifyuibuilder",
"apigateway",
"apigatewaymanagementapi",
"apigatewayv2",
"appconfig",
"appconfigdata",
"appfabric",
"appflow",
"appintegrations",
"application-autoscaling",
"application-insights",
"applicationcostprofiler",
"appmesh",
"apprunner",
"appstream",
"appsync",
"arc-zonal-shift",
"athena",
"auditmanager",
"autoscaling",
"autoscaling-plans",
"backup",
"backup-gateway",
"backupstorage",
"batch",
"billingconductor",
"braket",
"budgets",
"ce",
"chime",
"chime-sdk-identity",
"chime-sdk-media-pipelines",
"chime-sdk-meetings",
"chime-sdk-messaging",
"chime-sdk-voice",
"cleanrooms",
"cloud9",
"cloudcontrol",
"clouddirectory",
"cloudformation",
"cloudfront",
"cloudhsm",
"cloudhsmv2",
"cloudsearch",
"cloudsearchdomain",
"cloudtrail",
"cloudtrail-data",
"cloudwatch",
"codeartifact",
"codebuild",
"codecatalyst",
"codecommit",
"codedeploy",
"codeguru-reviewer",
"codeguru-security",
"codeguruprofiler",
"codepipeline",
"codestar",
"codestar-connections",
"codestar-notifications",
"cognito-identity",
"cognito-idp",
"cognito-sync",
"comprehend",
"comprehendmedical",
"compute-optimizer",
"config",
"connect",
"connect-contact-lens",
"connectcampaigns",
"connectcases",
"connectparticipant",
"controltower",
"cur",
"customer-profiles",
"databrew",
"dataexchange",
"datapipeline",
"datasync",
"dax",
"detective",
"devicefarm",
"devops-guru",
"directconnect",
"discovery",
"dlm",
"dms",
"docdb",
"docdb-elastic",
"drs",
"ds",
"dynamodb",
"dynamodbstreams",
"ebs",
"ec2",
"ec2-instance-connect",
"ecr",
"ecr-public",
"ecs",
"efs",
"eks",
"elastic-inference",
"elasticache",
"elasticbeanstalk",
"elastictranscoder",
"elb",
"elbv2",
"emr",
"emr-containers",
"emr-serverless",
"entityresolution",
"es",
"events",
"evidently",
"finspace",
"finspace-data",
"firehose",
"fis",
"fms",
"forecast",
"forecastquery",
"frauddetector",
"fsx",
"gamelift",
"gamesparks",
"glacier",
"globalaccelerator",
"glue",
"grafana",
"greengrass",
"greengrassv2",
"groundstation",
"guardduty",
"health",
"healthlake",
"honeycode",
"iam",
"identitystore",
"imagebuilder",
"importexport",
"inspector",
"inspector2",
"internetmonitor",
"iot",
"iot-data",
"iot-jobs-data",
"iot-roborunner",
"iot1click-devices",
"iot1click-projects",
"iotanalytics",
"iotdeviceadvisor",
"iotevents",
"iotevents-data",
"iotfleethub",
"iotfleetwise",
"iotsecuretunneling",
"iotsitewise",
"iotthingsgraph",
"iottwinmaker",
"iotwireless",
"ivs",
"ivs-realtime",
"ivschat",
"kafka",
"kafkaconnect",
"kendra",
"kendra-ranking",
"keyspaces",
"kinesis",
"kinesis-video-archived-media",
"kinesis-video-media",
"kinesis-video-signaling",
"kinesis-video-webrtc-storage",
"kinesisanalytics",
"kinesisanalyticsv2",
"kinesisvideo",
"kms",
"lakeformation",
"lambda",
"lex-models",
"lex-runtime",
"lexv2-models",
"lexv2-runtime",
"license-manager",
"license-manager-linux-subscriptions",
"license-manager-user-subscriptions",
"lightsail",
"location",
"logs",
"lookoutequipment",
"lookoutmetrics",
"lookoutvision",
"m2",
"machinelearning",
"macie",
"macie2",
"managedblockchain",
"managedblockchain-query",
"marketplace-catalog",
"marketplace-entitlement",
"marketplacecommerceanalytics",
"mediaconnect",
"mediaconvert",
"medialive",
"mediapackage",
"mediapackage-vod",
"mediapackagev2",
"mediastore",
"mediastore-data",
"mediatailor",
"medical-imaging",
"memorydb",
"meteringmarketplace",
"mgh",
"mgn",
"migration-hub-refactor-spaces",
"migrationhub-config",
"migrationhuborchestrator",
"migrationhubstrategy",
"mobile",
"mq",
"mturk",
"mwaa",
"neptune",
"network-firewall",
"networkmanager",
"nimble",
"oam",
"omics",
"opensearch",
"opensearchserverless",
"opsworks",
"opsworkscm",
"organizations",
"osis",
"outposts",
"panorama",
"payment-cryptography",
"payment-cryptography-data",
"personalize",
"personalize-events",
"personalize-runtime",
"pi",
"pinpoint",
"pinpoint-email",
"pinpoint-sms-voice",
"pinpoint-sms-voice-v2",
"pipes",
"polly",
"pricing",
"privatenetworks",
"proton",
"qldb",
"qldb-session",
"quicksight",
"ram",
"rbin",
"rds",
"rds-data",
"redshift",
"redshift-data",
"redshift-serverless",
"rekognition",
"resiliencehub",
"resource-explorer-2",
"resource-groups",
"resourcegroupstaggingapi",
"robomaker",
"rolesanywhere",
"route53",
"route53-recovery-cluster",
"route53-recovery-control-config",
"route53-recovery-readiness",
"route53domains",
"route53resolver",
"rum",
"s3",
"s3control",
"s3outposts",
"sagemaker",
"sagemaker-a2i-runtime",
"sagemaker-edge",
"sagemaker-featurestore-runtime",
"sagemaker-geospatial",
"sagemaker-metrics",
"sagemaker-runtime",
"savingsplans",
"scheduler",
"schemas",
"sdb",
"secretsmanager",
"securityhub",
"securitylake",
"serverlessrepo",
"service-quotas",
"servicecatalog",
"servicecatalog-appregistry",
"servicediscovery",
"ses",
"sesv2",
"shield",
"signer",
"simspaceweaver",
"sms",
"sms-voice",
"snow-device-management",
"snowball",
"sns",
"sqs",
"ssm",
"ssm-contacts",
"ssm-incidents",
"ssm-sap",
"sso",
"sso-admin",
"sso-oidc",
"stepfunctions",
"storagegateway",
"sts",
"support",
"support-app",
"swf",
"synthetics",
"textract",
"timestream-query",
"timestream-write",
"tnb",
"transcribe",
"transfer",
"translate",
"verifiedpermissions",
"voice-id",
"vpc-lattice",
"waf",
"waf-regional",
"wafv2",
"wellarchitected",
"wisdom",
"workdocs",
"worklink",
"workmail",
"workmailmessageflow",
"workspaces",
"workspaces-web",
"xray",
]
ResourceServiceName = Literal[
"cloudformation",
"cloudwatch",
"dynamodb",
"ec2",
"glacier",
"iam",
"opsworks",
"s3",
"sns",
"sqs",
]
PaginatorName = Literal["list_resource_requests", "list_resources"]
WaiterName = Literal["resource_request_success"]
RegionName = Literal[
"af-south-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-1",
"eu-south-2",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2",
]
|
PypiClean
|
/pybnfalabuda-1.2.0-py3-none-any.whl/pybnf/pybnf.py
|
from .parse import load_config
from .config import init_logging
from .printing import print0, print1, print2, PybnfError
from .cluster import Cluster
from .pset import Trajectory
import pybnf.algorithms as algs
import pybnf.printing as printing
from subprocess import run
from numpy import inf
import logging
import argparse
import os
import shutil
import time
import traceback
import pickle
__version__ = "1.2.0"
def main():
"""The main function for running a fitting job"""
start_time = time.time()
success = False
alg = None
cluster = None
parser = argparse.ArgumentParser(description='Performs parameter fitting on systems biology models defined in '
'BNGL or SBML. For documentation, examples, and source code, go to '
'https://github.com/lanl/PyBNF')
parser.add_argument('-c', action='store', dest='conf_file',
help='Path to the BioNetFit configuration file', metavar='config.conf')
parser.add_argument('-o', '--overwrite', action='store_true',
help='automatically overwrites existing folders if necessary')
parser.add_argument('-t', '--cluster_type', action='store',
help='optional string denoting the type of cluster')
parser.add_argument('-s', '--scheduler_file', action='store',
help='optional file on shared filesystem to get scheduler location, should be same as passed to dask-scheduler and dask-worker.')
parser.add_argument('-r', '--resume', action='store', nargs='?', const=0, default=None, type=int,
metavar='iterations',
help='automatically resume the previously stopped fitting run; '
'if a number is passed with this flag, add that many iterations to the fitting run.')
parser.add_argument('-d', '--debug_logging', action='store_true',
help='outputs a separate debugging log (file could be very large)')
parser.add_argument('-l', '--log_prefix', action='store',
help='specifies a custom prefix for the log files (will overwrite if files already exist')
parser.add_argument('-L', '--log_level', type=str.lower, default='i',
choices=['debug', 'info', 'warning', 'error', 'critical', 'none', 'd', 'i', 'w', 'e', 'c', 'n'],
help='set the level of output to the log file. Options in decreasing order of verbosity are: '
'debug, info, warning, error, critical, none.')
cmdline_args = parser.parse_args()
if cmdline_args.log_prefix:
log_prefix = cmdline_args.log_prefix
else:
log_prefix = 'bnf_%s' % time.strftime('%Y%m%d-%H%M%S')
debug = cmdline_args.debug_logging
# Overwrite log file if it exists
if os.path.isfile('%s_debug.log' % log_prefix):
os.remove('%s_debug.log' % log_prefix)
if os.path.isfile('%s.log' % log_prefix):
os.remove('%s.log' % log_prefix)
init_logging(log_prefix, debug, cmdline_args.log_level)
logger = logging.getLogger(__name__)
print0("PyBNF v%s" % __version__)
logger.info('Running PyBNF v%s' % __version__)
try:
# Load the conf file and create the algorithm
if cmdline_args.conf_file is None:
print0('No configuration file given, so I won''t do anything.\nFor more information, try pybnf --help')
exit(0)
logger.info('Loading configuration file: %s' % cmdline_args.conf_file)
config = load_config(cmdline_args.conf_file)
if 'verbosity' in config.config:
printing.verbosity = config.config['verbosity']
if cmdline_args.resume is not None and cmdline_args.overwrite:
raise PybnfError("Options --overwrite and --resume are contradictory. Use --resume to continue a previous "
"run, or --overwrite to overwrite the previous run with a new one.")
continue_file = None
if cmdline_args.resume is not None:
if os.path.exists(config.config['output_dir'] + '/alg_backup.bp'):
continue_file = config.config['output_dir'] + '/alg_backup.bp'
elif os.path.exists(config.config['output_dir'] + '/alg_finished.bp'):
if cmdline_args.resume <= 0:
raise PybnfError('The fitting run saved in %s already finished. If you want to continue the '
'fitting with more iterations, pass a number of iterations with the '
'--resume flag.' % config.config['output_dir'])
continue_file = config.config['output_dir'] + '/alg_finished.bp'
else:
raise PybnfError('No algorithm found to resume in %s' % (config.config['output_dir']))
elif os.path.exists(config.config['output_dir'] + '/alg_backup.bp') and not cmdline_args.overwrite:
ans = 'x'
while ans.lower() not in ['y', 'yes', 'n', 'no', '']:
ans = input('Your output_dir contains an in-progress run.\nContinue that run? [y/n] (y) ')
if ans.lower() in ('y', 'yes', ''):
logger.info('Resuming a previous run')
continue_file = config.config['output_dir'] + '/alg_backup.bp'
cmdline_args.resume = 0
if continue_file:
# Restart the loaded algorithm
logger.info('Reloading algorithm')
f = open(continue_file, 'rb')
alg, pending = pickle.load(f)
logger.debug('Loaded algorithm is the %s algorithm' % ('refinement' if alg.refine else 'configured'))
config = alg.config
logger.debug('Checking for Simulations directory')
if not os.path.exists(alg.sim_dir):
os.makedirs(alg.sim_dir)
if alg.bootstrap_number is not None:
print0('Resuming a bootstrapping run')
logger.info('Resuming a bootstrapping run')
if cmdline_args.resume > 0 and cmdline_args.resume is not None:
raise PybnfError("Cannot increase the number of iterations in a boostrapping run")
else:
print0('Resuming a fitting run')
alg.add_iterations(cmdline_args.resume)
f.close()
if isinstance(alg, algs.SimplexAlgorithm):
# The continuing alg is already on the Simplex stage, so don't restart simplex after completion
alg.config.config['refine'] = 0
else:
# Create output folders, checking for overwrites.
subdirs = ('Simulations', 'Results', 'Initialize', 'FailedSimLogs')
subfiles = ('alg_backup.bp', 'alg_finished.bp', 'alg_refine_finished.bp')
will_overwrite = [subdir for subdir in subdirs + subfiles
if os.path.exists(config.config['output_dir'] + '/' + subdir)]
if config.config['simulation_dir']:
simdir = config.config['simulation_dir'] + '/Simulations'
if os.path.exists(simdir):
will_overwrite.append(simdir)
if len(will_overwrite) > 0:
if not cmdline_args.overwrite:
logger.info("Output directory already exists... querying user for overwrite permission")
ans = 'x'
while ans.lower() not in ['y', 'yes', 'n', 'no', '']:
print0('Your output directory contains files from a previous run: %s.' % ', '.join(will_overwrite))
ans = input(
'Overwrite them with the current run? [y/n] (n) ')
if not(ans.lower() == 'y' or ans.lower() == 'yes'):
logger.info("Overwrite rejected... exiting")
print0('Quitting')
exit(0)
# If we get here, safe to overwrite files
for subdir in subdirs:
try:
shutil.rmtree(config.config['output_dir'] + '/' + subdir)
logger.info('Deleted old directory %s' % config.config['output_dir'] + '/' + subdir)
except OSError:
logger.debug('Directory %s does not already exist' % config.config['output_dir'] + '/' + subdir)
for subfile in subfiles:
try:
os.remove(config.config['output_dir'] + '/' + subfile)
logger.info('Deleted old file %s' % config.config['output_dir'] + '/' + subfile)
except OSError:
logger.debug('File %s does not already exist' % config.config['output_dir'] + '/' + subfile)
if config.config['simulation_dir']:
try:
shutil.rmtree(config.config['simulation_dir']+'/Simulations')
logger.info('Deleted old simulation directory %s' %
config.config['simulation_dir']+'/Simulations')
except OSError:
logger.debug('Simulation directory %s does not already exist' %
config.config['simulation_dir']+'/Simulations')
# Create new directories for the current run.
os.makedirs(config.config['output_dir'] + '/Results')
if config.config['simulation_dir']:
os.makedirs(config.config['simulation_dir'] + '/Simulations')
else:
os.mkdir(config.config['output_dir'] + '/Simulations')
shutil.copy(cmdline_args.conf_file, config.config['output_dir'] + '/Results')
pending = None
if config.config['fit_type'] == 'pso':
alg = algs.ParticleSwarm(config)
elif config.config['fit_type'] == 'de':
alg = algs.DifferentialEvolution(config)
elif config.config['fit_type'] == 'ss':
alg = algs.ScatterSearch(config)
elif config.config['fit_type'] == 'mh' or config.config['fit_type'] == 'pt':
# Note: mh vs pt difference is handled in Config by setting or not setting the exchange_every key.
alg = algs.BasicBayesMCMCAlgorithm(config)
elif config.config['fit_type'] == 'am':
alg = algs.Adaptive_MCMC(config)
elif config.config['fit_type'] == 'sa':
alg = algs.BasicBayesMCMCAlgorithm(config, sa=True)
elif config.config['fit_type'] == 'sim':
alg = algs.SimplexAlgorithm(config)
elif config.config['fit_type'] == 'ade':
alg = algs.AsynchronousDifferentialEvolution(config)
elif config.config['fit_type'] == 'dream':
alg = algs.DreamAlgorithm(config)
elif config.config['fit_type'] == 'check':
alg = algs.ModelCheck(config)
else:
raise PybnfError('Invalid fit_type %s. Options are: pso, de, ade, ss, mh, pt, sa, sim, am check' % config.config['fit_type'])
# Override configuration values if provided on command line
if cmdline_args.cluster_type:
config.config['cluster_type'] = cmdline_args.cluster_type
if cmdline_args.scheduler_file:
config.config['scheduler_file'] = cmdline_args.scheduler_file
if config.config['fit_type'] != 'check':
# Set up cluster
cluster = Cluster(config, log_prefix, debug, cmdline_args.log_level)
# Run the algorithm!
logger.debug('Algorithm initialization')
alg.run(cluster.client, resume=pending, debug=debug)
else:
# Run model checking
logger.debug('Model checking initialization')
alg.run_check(debug=debug)
if config.config['refine'] == 1:
logger.debug('Refinement requested for best fit parameter set')
if config.config['fit_type'] == 'sim':
logger.debug('Cannot refine further if Simplex algorithm was used for original fit')
print1("You specified refine=1, but refine uses the Simplex algorithm, which you already just ran."
"\nSkipping refine.")
else:
logger.debug('Refining further using the Simplex algorithm')
print1("Refining the best fit by the Simplex algorithm")
config.config['simplex_start_point'] = alg.trajectory.best_fit()
simplex = algs.SimplexAlgorithm(config, refine=True)
simplex.trajectory = alg.trajectory # Reuse existing trajectory; don't start a new one.
simplex.run(cluster.client, debug=debug)
if alg.bootstrap_number is None:
print0('Fitting complete')
# Bootstrapping (optional)
if config.config['bootstrap'] > 0:
# Bootstrapping setup
if config.config['bootstrap_max_obj']:
bootstrap_max_obj = config.config['bootstrap_max_obj']
else:
bootstrap_max_obj = inf
logger.info('No bootstrap_max_obj specified; set to infinity')
print1('No bootstrap_max_obj specified. All bootstrap replicates will be accepted regardless of '
'objective value.')
num_to_bootstrap = config.config['bootstrap']
completed_bootstrap_runs = 0
if alg.bootstrap_number is None:
bootstrapped_psets = Trajectory(num_to_bootstrap)
else: # Check if finished a resumed bootstrap fitting run
completed_bootstrap_runs += alg.bootstrap_number
if completed_bootstrap_runs == 0:
bootstrapped_psets = Trajectory(num_to_bootstrap)
else:
if completed_bootstrap_runs > 0:
bootstrapped_psets = Trajectory.load_trajectory(config.config['output_dir'] +
'/Results/bootstrapped_parameter_sets.txt',
config.variables,
config.config['num_to_output'])
if alg.best_fit_obj <= bootstrap_max_obj:
logger.info('Bootstrap run %s complete' % completed_bootstrap_runs)
bootstrapped_psets.add(alg.trajectory.best_fit(), alg.best_fit_obj,
'bootstrap_run_%s' % completed_bootstrap_runs,
config.config['output_dir'] + '/Results/bootstrapped_parameter_sets.txt',
completed_bootstrap_runs == 0)
logger.info('Succesfully completed resumed bootstrapping run %s' % completed_bootstrap_runs)
completed_bootstrap_runs += 1
else:
shutil.rmtree(alg.res_dir)
if os.path.exists(alg.sim_dir):
shutil.rmtree(alg.sim_dir)
print0("Bootstrap run did not achieve maximum allowable objective function value. Retrying")
logger.info('Resumed bootstrapping run %s did not achieve maximum allowable objective function '
'value. Retrying' % completed_bootstrap_runs)
# Run bootstrapping
consec_failed_bootstrap_runs = 0
while completed_bootstrap_runs < num_to_bootstrap:
alg.reset(bootstrap=completed_bootstrap_runs)
for model in alg.exp_data:
for name, data in alg.exp_data[model].items():
data.gen_bootstrap_weights()
data.weights_to_file('%s/%s_weights_%s.txt' % (alg.res_dir, name, completed_bootstrap_runs))
logger.info('Beginning bootstrap run %s' % completed_bootstrap_runs)
print0("Beginning bootstrap run %s" % completed_bootstrap_runs)
alg.run(cluster.client, debug=debug)
if config.config['refine'] == 1:
logger.debug('Refinement requested for best fit parameter set')
if config.config['fit_type'] == 'sim':
logger.debug('Cannot refine further if Simplex algorithm was used for original fit')
print1("You specified refine=1, but refine uses the Simplex algorithm, which you already just ran."
"\nSkipping refine.")
else:
logger.debug('Refining further using the Simplex algorithm')
print1("Refining the best fit by the Simplex algorithm")
config.config['simplex_start_point'] = alg.trajectory.best_fit()
simplex = algs.SimplexAlgorithm(config, refine=True)
simplex.trajectory = alg.trajectory # Reuse existing trajectory; don't start a new one.
simplex.run(cluster.client, debug=debug)
best_fit_pset = alg.trajectory.best_fit()
if alg.best_fit_obj <= bootstrap_max_obj:
logger.info('Bootstrap run %s complete' % completed_bootstrap_runs)
bootstrapped_psets.add(best_fit_pset, alg.best_fit_obj, 'bootstrap_run_%s' % completed_bootstrap_runs,
config.config['output_dir'] + '/Results/bootstrapped_parameter_sets.txt',
completed_bootstrap_runs == 0)
completed_bootstrap_runs += 1
consec_failed_bootstrap_runs = 0
else:
consec_failed_bootstrap_runs += 1
print0("Bootstrap run did not achieve maximum allowable objective function value. Retrying")
logger.warning("Bootstrap run did not achieve maximum allowable objective function value.")
if consec_failed_bootstrap_runs > 20: # Arbitrary... should we make this configurable or smaller?
raise PybnfError("20 consecutive bootstrap runs failed to achieve maximum allowable objective "
"function values. Check 'bootstrap_max_obj' configuration key")
# bootstrapped_psets.write_to_file(config.config['output_dir'] + "/Results/bootstrapped_parameter_sets.txt")
print0('Bootstrapping complete')
success = True
except PybnfError as e:
# Exceptions generated by problems such as bad user input should be caught here and print a useful message
# before quitting
logger.error('Terminating due to a PybnfError:')
logger.error(e.log_message)
print0('Error: %s' % e.message)
except KeyboardInterrupt:
print0('Fitting aborted.')
logger.info('Terminating due to keyboard interrupt')
logger.exception('Keyboard interrupt')
except Exception:
# Sends any unhandled errors to log instead of to user output
logger.exception('Internal error')
exceptiondata = traceback.format_exc().splitlines()
print0('Sorry, an unknown error occurred: %s\n'
'Logs have been saved to %s.log.\n'
'Please report this bug to help us improve PyBNF.' % (exceptiondata[-1], log_prefix))
finally:
# Stop dask-ssh regardless of success
if cluster:
try:
cluster.teardown()
if not cluster.local:
time.sleep(10) # wait for teardown before continuing
except Exception:
logging.exception('Failed to tear down cluster')
else:
logging.info('No cluster to tear down')
# Attempt to remove dask-worker-space directory if necessary
# (exists in directory where workers were instantiated)
# Tries current and home directories
if os.path.isdir('dask-worker-space'):
if os.name == 'nt': # Windows
shutil.rmtree('dask-worker-space', ignore_errors=True)
else:
run(['rm', '-rf', 'dask-worker-space']) # More likely to succeed than rmtree()
home_dask_dir = os.path.expanduser(os.path.join('~', 'dask-worker-space'))
if os.path.isdir(home_dask_dir):
if os.name == 'nt': # Windows
shutil.rmtree(home_dask_dir, ignore_errors=True)
else:
run(['rm', '-rf', home_dask_dir])
# After any error, try to clean up.
try:
if not success:
logger.info('Fitting unsuccessful. Attempting cleanup')
if alg:
alg.cleanup()
logger.info('Completed cleanup after exception')
except:
logger.exception('During cleanup, another exception occurred')
finally:
end_time = time.time()
secs = end_time - start_time
mins, secs = divmod(secs, 60)
hrs, mins = divmod(mins, 60)
print2('Total fitting time: %d:%02d:%02d' % (hrs, mins, secs))
logger.info('Total fitting time: %d:%02d:%02d' % (hrs, mins, secs))
exit(0 if success else 1)
|
PypiClean
|
/django_nested_inline_bug_fixed-0.4.4-py3-none-any.whl/nested_inline/admin.py
|
from django import VERSION, forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import helpers
from django.contrib.admin.options import InlineModelAdmin, reverse
from django.contrib.admin.utils import unquote
from django.core.exceptions import FieldDoesNotExist
from django.core.exceptions import PermissionDenied
from django.db import models, transaction
from django.forms.formsets import all_valid
from django.http import Http404
from django.templatetags.static import static
from django.utils.decorators import method_decorator
from django.utils.encoding import force_text
from django.utils.html import escape
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_protect
csrf_protect_m = method_decorator(csrf_protect)
class InlineInstancesMixin():
def get_inline_instances(self, request, obj=None):
inline_instances = []
for inline_class in self.inlines:
inline = inline_class(self.model, self.admin_site)
if request:
if VERSION < (2, 1, 0):
if not (inline.has_add_permission(request) or
inline.has_change_permission(request, obj) or
inline.has_delete_permission(request, obj)):
continue
if not inline.has_add_permission(request):
inline.max_num = 0
else:
if not (inline.has_add_permission(request, obj) or
inline.has_change_permission(request, obj) or
inline.has_delete_permission(request, obj)):
continue
if not inline.has_add_permission(request, obj):
inline.max_num = 0
inline_instances.append(inline)
return inline_instances
class NestedModelAdmin(InlineInstancesMixin, admin.ModelAdmin):
class Media:
css = {
"all": ('admin/css/forms-nested.css',)
}
js = ('admin/js/inlines-nested%s.js' % ('' if settings.DEBUG else '.min'),)
def save_formset(self, request, form, formset, change):
"""
Given an inline formset save it to the database.
"""
formset.save()
for form in formset.forms:
if hasattr(form, 'nested_formsets') and form not in formset.deleted_forms:
for nested_formset in form.nested_formsets:
self.save_formset(request, form, nested_formset, change)
def save_related(self, request, form, formsets, change):
"""
Given the ``HttpRequest``, the parent ``ModelForm`` instance, the
list of inline formsets and a boolean value based on whether the
parent is being added or changed, save the related objects to the
database. Note that at this point save_form() and save_model() have
already been called.
"""
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=change)
def add_nested_inline_formsets(self, request, inline, formset, depth=0):
if depth > 5:
raise Exception("Maximum nesting depth reached (5)")
for form in formset.forms:
nested_formsets = []
for nested_inline in inline.get_inline_instances(request):
InlineFormSet = nested_inline.get_formset(request, form.instance)
prefix = "%s-%s" % (form.prefix, InlineFormSet.get_default_prefix())
if request.method == 'POST' and any(s.startswith(prefix) for s in request.POST.keys()):
nested_formset = InlineFormSet(request.POST, request.FILES,
instance=form.instance,
prefix=prefix, queryset=nested_inline.get_queryset(request))
else:
nested_formset = InlineFormSet(instance=form.instance,
prefix=prefix, queryset=nested_inline.get_queryset(request))
nested_formsets.append(nested_formset)
if nested_inline.inlines:
self.add_nested_inline_formsets(request, nested_inline, nested_formset, depth=depth + 1)
form.nested_formsets = nested_formsets
def wrap_nested_inline_formsets(self, request, inline, formset):
media = None
def get_media(extra_media):
if media:
return media + extra_media
else:
return extra_media
for form in formset.forms:
wrapped_nested_formsets = []
for nested_inline, nested_formset in zip(inline.get_inline_instances(request), form.nested_formsets):
if form.instance.pk:
instance = form.instance
else:
instance = None
fieldsets = list(nested_inline.get_fieldsets(request, instance))
readonly = list(nested_inline.get_readonly_fields(request, instance))
prepopulated = dict(nested_inline.get_prepopulated_fields(request, instance))
wrapped_nested_formset = helpers.InlineAdminFormSet(
nested_inline, nested_formset,
fieldsets, prepopulated, readonly, model_admin=self,
)
wrapped_nested_formsets.append(wrapped_nested_formset)
media = get_media(wrapped_nested_formset.media)
if nested_inline.inlines:
media = get_media(self.wrap_nested_inline_formsets(request, nested_inline, nested_formset))
form.nested_formsets = wrapped_nested_formsets
return media
def formset_has_nested_data(self, formsets):
for formset in formsets:
if not formset.is_bound:
pass
for form in formset:
if hasattr(form, 'cleaned_data') and form.cleaned_data:
return True
elif hasattr(form, 'nested_formsets'):
if self.formset_has_nested_data(form.nested_formsets):
return True
def all_valid_with_nesting(self, formsets):
"Recursively validate all nested formsets"
if not all_valid(formsets):
return False
for formset in formsets:
if not formset.is_bound:
pass
for form in formset:
if hasattr(form, 'nested_formsets'):
if not self.all_valid_with_nesting(form.nested_formsets):
return False
# TODO - find out why this breaks when extra = 1 and just adding new item with no sub items
if (not hasattr(form, 'cleaned_data') or not form.cleaned_data) and\
self.formset_has_nested_data(form.nested_formsets):
form._errors["__all__"] = form.error_class(
[u"Parent object must be created when creating nested inlines."]
)
return False
return True
@csrf_protect_m
@transaction.atomic
def add_view(self, request, form_url='', extra_context=None):
"The 'add' admin view for this model."
model = self.model
opts = model._meta
if not self.has_add_permission(request):
raise PermissionDenied
ModelForm = self.get_form(request)
formsets = []
inline_instances = self.get_inline_instances(request, None)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES)
if form.is_valid():
new_object = self.save_form(request, form, change=False)
form_validated = True
else:
form_validated = False
new_object = self.model()
prefixes = {}
for FormSet, inline in self.get_formsets_with_inlines(request):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(data=request.POST, files=request.FILES,
instance=new_object,
save_as_new="_saveasnew" in request.POST,
prefix=prefix, queryset=inline.get_queryset(request))
formsets.append(formset)
if inline.inlines:
self.add_nested_inline_formsets(request, inline, formset)
if self.all_valid_with_nesting(formsets) and form_validated:
self.save_model(request, new_object, form, False)
self.save_related(request, form, formsets, False)
args = ()
# Provide `add_message` argument to ModelAdmin.log_addition for
# Django 1.9 and up.
if VERSION[:2] >= (1, 9):
add_message = self.construct_change_message(
request, form, formsets, add=True
)
args = (request, new_object, add_message)
else:
args = (request, new_object)
self.log_addition(*args)
return self.response_add(request, new_object)
else:
# Prepare the dict of initial data from the request.
# We have to special-case M2Ms as a list of comma-separated PKs.
initial = dict(request.GET.items())
for k in initial:
try:
f = opts.get_field(k)
except FieldDoesNotExist:
continue
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
form = ModelForm(initial=initial)
prefixes = {}
for FormSet, inline in self.get_formsets_with_inlines(request):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(instance=self.model(), prefix=prefix,
queryset=inline.get_queryset(request))
formsets.append(formset)
if hasattr(inline, 'inlines') and inline.inlines:
self.add_nested_inline_formsets(request, inline, formset)
adminForm = helpers.AdminForm(form, list(self.get_fieldsets(request)),
self.get_prepopulated_fields(request),
self.get_readonly_fields(request),
model_admin=self)
media = self.media + adminForm.media
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request))
readonly = list(inline.get_readonly_fields(request))
prepopulated = dict(inline.get_prepopulated_fields(request))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset,
fieldsets, prepopulated, readonly, model_admin=self)
inline_admin_formsets.append(inline_admin_formset)
media = media + inline_admin_formset.media
if hasattr(inline, 'inlines') and inline.inlines:
extra_media = self.wrap_nested_inline_formsets(
request, inline, formset)
if extra_media:
media += extra_media
context = {
'title': _('Add %s') % force_text(opts.verbose_name),
'adminform': adminForm,
'is_popup': "_popup" in request.GET,
'show_delete': False,
'media': media,
'inline_admin_formsets': inline_admin_formsets,
'errors': helpers.AdminErrorList(form, formsets),
'app_label': opts.app_label,
}
context.update(self.admin_site.each_context(request))
context.update(extra_context or {})
return self.render_change_form(request, context, form_url=form_url, add=True)
@csrf_protect_m
@transaction.atomic
def change_view(self, request, object_id, form_url='', extra_context=None):
"The 'change' admin view for this model."
model = self.model
opts = model._meta
obj = self.get_object(request, unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name), 'key': escape(object_id)})
if request.method == 'POST' and "_saveasnew" in request.POST:
return self.add_view(request, form_url=reverse('admin:%s_%s_add' %
(opts.app_label,
opts.module_name),
current_app=self.admin_site.name))
ModelForm = self.get_form(request, obj)
formsets = []
inline_instances = self.get_inline_instances(request, obj)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
if form.is_valid():
form_validated = True
new_object = self.save_form(request, form, change=True)
else:
form_validated = False
new_object = obj
prefixes = {}
for FormSet, inline in self.get_formsets_with_inlines(request, new_object):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(
request.POST, request.FILES, instance=new_object,
prefix=prefix, queryset=inline.get_queryset(request),
)
formsets.append(formset)
if hasattr(inline, 'inlines') and inline.inlines:
self.add_nested_inline_formsets(request, inline, formset)
if self.all_valid_with_nesting(formsets) and form_validated:
self.save_model(request, new_object, form, True)
self.save_related(request, form, formsets, True)
change_message = self.construct_change_message(request, form, formsets)
self.log_change(request, new_object, change_message)
return self.response_change(request, new_object)
else:
form = ModelForm(instance=obj)
prefixes = {}
for FormSet, inline in self.get_formsets_with_inlines(request, obj):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(instance=obj, prefix=prefix, queryset=inline.get_queryset(request))
formsets.append(formset)
if hasattr(inline, 'inlines') and inline.inlines:
self.add_nested_inline_formsets(request, inline, formset)
adminForm = helpers.AdminForm(
form, self.get_fieldsets(request, obj),
self.get_prepopulated_fields(request, obj),
self.get_readonly_fields(request, obj),
model_admin=self,
)
media = self.media + adminForm.media
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request, obj))
readonly = list(inline.get_readonly_fields(request, obj))
prepopulated = dict(inline.get_prepopulated_fields(request, obj))
inline_admin_formset = helpers.InlineAdminFormSet(
inline, formset, fieldsets, prepopulated, readonly, model_admin=self,
)
inline_admin_formsets.append(inline_admin_formset)
media = media + inline_admin_formset.media
if hasattr(inline, 'inlines') and inline.inlines:
extra_media = self.wrap_nested_inline_formsets(request, inline, formset)
if extra_media:
media += extra_media
context = {
'title': _('Change %s') % force_text(opts.verbose_name),
'adminform': adminForm,
'object_id': object_id,
'original': obj,
'is_popup': "_popup" in request.GET,
'media': media,
'inline_admin_formsets': inline_admin_formsets,
'errors': helpers.AdminErrorList(form, formsets),
'app_label': opts.app_label,
}
context.update(self.admin_site.each_context(request))
context.update(extra_context or {})
return self.render_change_form(request, context, change=True, obj=obj, form_url=form_url)
class NestedInline(InlineInstancesMixin, InlineModelAdmin):
inlines = []
new_objects = []
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
if VERSION[:2] >= (1, 9):
js = ['vendor/jquery/jquery%s.js' % extra, 'jquery.init.js']
else:
js = ['jquery%s.js' % extra, 'jquery.init.js']
js.append('inlines-nested%s.js' % extra)
if self.prepopulated_fields:
js.extend(['urlify.js', 'prepopulate%s.js' % extra])
if self.filter_vertical or self.filter_horizontal:
js.extend(['SelectBox.js', 'SelectFilter2.js'])
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
def get_formsets_with_inlines(self, request, obj=None):
for inline in self.get_inline_instances(request):
yield inline.get_formset(request, obj), inline
class NestedStackedInline(NestedInline):
template = 'admin/edit_inline/stacked-nested.html'
class NestedTabularInline(NestedInline):
template = 'admin/edit_inline/tabular-nested.html'
|
PypiClean
|
/keras_segmentation-0.3.0.tar.gz/keras_segmentation-0.3.0/keras_segmentation/models/segnet.py
|
from keras.models import *
from keras.layers import *
from .config import IMAGE_ORDERING
from .model_utils import get_segmentation_model
from .vgg16 import get_vgg_encoder
from .mobilenet import get_mobilenet_encoder
from .basic_models import vanilla_encoder
from .resnet50 import get_resnet50_encoder
def segnet_decoder(f, n_classes, n_up=3):
assert n_up >= 2
o = f
o = (ZeroPadding2D((1, 1), data_format=IMAGE_ORDERING))(o)
o = (Conv2D(512, (3, 3), padding='valid', data_format=IMAGE_ORDERING))(o)
o = (BatchNormalization())(o)
o = (UpSampling2D((2, 2), data_format=IMAGE_ORDERING))(o)
o = (ZeroPadding2D((1, 1), data_format=IMAGE_ORDERING))(o)
o = (Conv2D(256, (3, 3), padding='valid', data_format=IMAGE_ORDERING))(o)
o = (BatchNormalization())(o)
for _ in range(n_up-2):
o = (UpSampling2D((2, 2), data_format=IMAGE_ORDERING))(o)
o = (ZeroPadding2D((1, 1), data_format=IMAGE_ORDERING))(o)
o = (Conv2D(128, (3, 3), padding='valid',
data_format=IMAGE_ORDERING))(o)
o = (BatchNormalization())(o)
o = (UpSampling2D((2, 2), data_format=IMAGE_ORDERING))(o)
o = (ZeroPadding2D((1, 1), data_format=IMAGE_ORDERING))(o)
o = (Conv2D(64, (3, 3), padding='valid', data_format=IMAGE_ORDERING))(o)
o = (BatchNormalization())(o)
o = Conv2D(n_classes, (3, 3), padding='same',
data_format=IMAGE_ORDERING)(o)
return o
def _segnet(n_classes, encoder, input_height=416, input_width=608,
encoder_level=3):
img_input, levels = encoder(
input_height=input_height, input_width=input_width)
feat = levels[encoder_level]
o = segnet_decoder(feat, n_classes, n_up=3)
model = get_segmentation_model(img_input, o)
return model
def segnet(n_classes, input_height=416, input_width=608, encoder_level=3):
model = _segnet(n_classes, vanilla_encoder, input_height=input_height,
input_width=input_width, encoder_level=encoder_level)
model.model_name = "segnet"
return model
def vgg_segnet(n_classes, input_height=416, input_width=608, encoder_level=3):
model = _segnet(n_classes, get_vgg_encoder, input_height=input_height,
input_width=input_width, encoder_level=encoder_level)
model.model_name = "vgg_segnet"
return model
def resnet50_segnet(n_classes, input_height=416, input_width=608,
encoder_level=3):
model = _segnet(n_classes, get_resnet50_encoder, input_height=input_height,
input_width=input_width, encoder_level=encoder_level)
model.model_name = "resnet50_segnet"
return model
def mobilenet_segnet(n_classes, input_height=224, input_width=224,
encoder_level=3):
model = _segnet(n_classes, get_mobilenet_encoder,
input_height=input_height,
input_width=input_width, encoder_level=encoder_level)
model.model_name = "mobilenet_segnet"
return model
if __name__ == '__main__':
m = vgg_segnet(101)
m = segnet(101)
# m = mobilenet_segnet( 101 )
# from keras.utils import plot_model
# plot_model( m , show_shapes=True , to_file='model.png')
|
PypiClean
|
/nonebot-plugin-kawaii-robot-4.0.0.tar.gz/nonebot-plugin-kawaii-robot-4.0.0/README.md
|
<!-- markdownlint-disable MD031 MD033 MD036 MD041 -->
<div align="center">
<a href="https://v2.nonebot.dev/store">
<img src="https://raw.githubusercontent.com/A-kirami/nonebot-plugin-template/resources/nbp_logo.png" width="180" height="180" alt="NoneBotPluginLogo">
</a>
<p>
<img src="https://raw.githubusercontent.com/A-kirami/nonebot-plugin-template/resources/NoneBotPlugin.svg" width="240" alt="NoneBotPluginText">
</p>
# NoneBot-Plugin-Kawaii-Robot
_✨ 使用 [Kyomotoi/AnimeThesaurus](https://github.com/Kyomotoi/AnimeThesaurus) 的 NoneBot2 的回复(文 i)插件 ✨_
<img src="https://img.shields.io/badge/python-3.8+-blue.svg" alt="python">
<a href="https://pdm.fming.dev">
<img src="https://img.shields.io/badge/pdm-managed-blueviolet" alt="pdm-managed">
</a>
<!-- <a href="https://wakatime.com/badge/user/b61b0f9a-f40b-4c82-bc51-0a75c67bfccf/project/f4778875-45a4-4688-8e1b-b8c844440abb">
<img src="https://wakatime.com/badge/user/b61b0f9a-f40b-4c82-bc51-0a75c67bfccf/project/f4778875-45a4-4688-8e1b-b8c844440abb.svg" alt="wakatime">
</a> -->
<br />
<a href="./LICENSE">
<img src="https://img.shields.io/github/license/KarisAya/nonebot_plugin_kawaii_robot.svg" alt="license">
</a>
<a href="https://pypi.python.org/pypi/nonebot_plugin_kawaii_robot">
<img src="https://img.shields.io/pypi/v/nonebot_plugin_kawaii_robot.svg" alt="pypi">
</a>
<a href="https://pypi.python.org/pypi/nonebot_plugin_kawaii_robot">
<img src="https://img.shields.io/pypi/dm/nonebot_plugin_kawaii_robot" alt="pypi download">
</a>
</div>
## 📖 介绍
**WARNING:高二次元浓度警告**
### 词库回复
当用户 @机器人 或者 提到机器人昵称时,会根据词库回复一条消息
### 戳一戳回复
当用户戳机器人的时候,机器人会戳回去,或者随机回复一条词库中消息
### 群聊(打断)复读姬
现在可以复读啦!~~谁不喜欢 +1 呢~~
当然也可以打断复读...~~谁不喜欢打断复读呢~~
## 💿 安装
以下提到的方法 任选**其一** 即可
<details open>
<summary>[推荐] 使用 nb-cli 安装</summary>
在 nonebot2 项目的根目录下打开命令行, 输入以下指令即可安装
```bash
nb plugin install nonebot_plugin_kawaii_robot
```
</details>
<details>
<summary>使用包管理器安装</summary>
在 nonebot2 项目的插件目录下, 打开命令行, 根据你使用的包管理器, 输入相应的安装命令
<details>
<summary>pip</summary>
```bash
pip install nonebot_plugin_kawaii_robot
```
</details>
<details>
<summary>pdm</summary>
```bash
pdm add nonebot_plugin_kawaii_robot
```
</details>
<details>
<summary>poetry</summary>
```bash
poetry add nonebot_plugin_kawaii_robot
```
</details>
<details>
<summary>conda</summary>
```bash
conda install nonebot_plugin_kawaii_robot
```
</details>
打开 nonebot2 项目根目录下的 `pyproject.toml` 文件, 在 `[tool.nonebot]` 部分的 `plugins` 项里追加写入
```toml
[tool.nonebot]
plugins = [
# ...
"nonebot_plugin_kawaii_robot"
]
```
</details>
## ⚙️ 配置
### 插件
在 NoneBot2 项目的 `.env` 文件中按需添加下面的配置项
```properties
# 机器人昵称
NICKNAME=[]
# 词库回复权限,`ALL` 就是全部聊天都会触发回复,`GROUP` 就是仅群聊
LEAF_PERMISSION=ALL
# 忽略词,指令以本列表中的元素开头不会触发回复
# 例:[".", "#", "你好"]
LEAF_IGNORE=[]
# 回复模式,`-1` 关闭全部 at 回复,`0` 仅启用词库回复,`1` 开启所有回复
LEAF_REPLY_TYPE=1
# 戳一戳回复文字概率,范围 `0` ~ `100`,`-1` 关闭戳一戳回复,`0` 代表始终戳回去
LEAF_POKE_RAND=20
# 触发复读或打断次数,群内复读 `{0}` ~ `{1}` 次数后触发复读或打断
LEAF_REPEATER_LIMIT=[2, 6]
# 打断概率,范围 `0` ~ `100`,`0` 关闭打断
LEAF_INTERRUPT=20
# 词库回复匹配模式,`0` 是精确匹配,`1` 是关键词匹配
LEAF_MATCH_PATTERN=1
# 词库回复是否需要 @机器人 或包含机器人昵称
LEAF_NEED_AT=True
# 当 `LEAF_NEED_AT` 为 `False` 时,非 @机器人 时的词库回复触发概率,范围 `0` ~ `100`
LEAF_TRIGGER_PERCENT=5
# 戳一戳回复延时,单位秒
LEAF_POKE_ACTION_DELAY=[0.5, 1.5]
# 当回复存在多条消息时,发送消息的间隔时间,单位秒
LEAF_MULTI_REPLY_DELAY=[1.0, 3.0]
# 是否载入内置回复词库
# 内置了 Kyomotoi/AnimeThesaurus 词库(data.json),还有咱自制的 bot 的词库(leaf.json)
LEAF_LOAD_BUILTIN_DICT=True
# 是否载入内置特殊回复词库
LEAF_LOAD_BUILTIN_SPECIAL=True
```
### 附加词库
#### 加载
把你自己的词库(json 文件)扔到 `data/kawaii_robot` 文件夹里就可以加载啦!
可以加载多个 json 文件。
会忽略文件名以 `_` 开头的文件。
如果扔进了奇怪的东西大概会加载失败,然后。。。跳过,继续加载下一个文件。
~~不要把奇怪的东西扔进资源里呀 kora~~
~~顺便一提,自己的词库是最优先的。~~ 现在并到一起了
#### 编写
参考 [Kyomotoi/AnimeThesaurus](https://github.com/Kyomotoi/AnimeThesaurus) 的 json 字典格式,键是关键词字符串,值是回复列表
**注意:词库要符合 json 格式 如果报解码错误(`UnicodeDecodeError`)先检查自己的词库是不是 无 BOM 的 UTF-8 编码格式**
回复里可以写变量,目前用 `str.format()` 格式化;也可以往里写 CQ 码。
如果回复中需要用到 `{` 或 `}`,请用 `{{` 或 `}}` 代替。
支持的变量:
- `{user_id}`:发送者 QQ 号
- `{username}`:发送者昵称(获取失败则默认为 `你`)
- `{bot_nickname}`:机器人昵称(没有设置则默认为 `可爱的咱`)
- `{segment}`:用于分割消息,该变量前的文本将会单独为一条消息发送
示例:
```jsonc
{
"呐": [
"嗯?{bot_nickname}在哦~{username}有什么事吗?"
// ...
]
}
```
#### 特殊词库
在 `data/kawaii_robot` 文件夹里有几个特殊的附加词库文件(在 `const.py` 中有对应的内置词库):
- `_hello.json`:用户只 at 了机器人,没有带任何其他文本消息时回复的内容
- `_poke.json`:用户戳一戳机器人时回复的文本内容
- `_unknown.json`:用户发送的消息没有匹配到任何词库内容时回复的消息
- `_interrupt.json`:打断复读时回复的消息
这些词库的格式是一个文本数组,每个元素是一条回复,同样可以使用上面提到的变量
示例:
```jsonc
[
"{username}你好~",
"{bot_nickname}在哦~"
// ...
]
```
<!--
## 🎉 使用
### 指令表
| 指令 | 权限 | 需要@ | 范围 | 说明 |
| :----: | :--: | :---: | :--: | :------: |
| 指令 1 | 主人 | 否 | 私聊 | 指令说明 |
| 指令 2 | 群员 | 是 | 群聊 | 指令说明 |
### 效果图
如果有效果图的话
-->
<!--
## 📞 联系
...
-->
## 💡 鸣谢
- 插件改编~~抄~~自 [nonebot_plugin_smart_reply](https://github.com/Special-Week/nonebot_plugin_smart_reply):使用了青云客 api 的的智能~障~回复插件
- 复读姬借鉴~~抄~~自 [nonebot_plugin_repeater](https://github.com/ninthseason/nonebot-plugin-repeater):群聊复读机
## 📝 更新日志
### 4.0.0
- 完全重构插件代码,更改项目结构,使用 `pdm` 管理项目
- 词库优化(详见 [附加词库](#附加词库)):
- 加载:现在可以直接往 `data/kawaii_robot` 文件夹里扔你自己的 json 词库了
- 编写:支持了一些变量
- 配置项的增加与修改(详见 [配置](#%EF%B8%8F-配置)):
- 修改 `LEAF_IGNORE`:修改类型为 `Set[str]`,配置书写方式不变
- 修改 `LEAF_AT_MOD`:更名为 `LEAF_NEED_AT`,修改类型为 `bool`
- 增加 `LEAF_TRIGGER_PERCENT`
- 增加 `LEAF_POKE_ACTION_DELAY`
- 增加 `LEAF_LOAD_BUILTIN_DICT`
- 增加 `LEAF_LOAD_BUILTIN_SPECIAL`
- 增加 `LEAF_MULTI_REPLY_DELAY`
- 还有的可能没列出来,问就是我忘了,qwq
|
PypiClean
|
/zipline_crypto-0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/zipline/finance/blotter/blotter.py
|
from abc import ABCMeta, abstractmethod
from zipline.extensions import extensible
from zipline.finance.cancel_policy import NeverCancel
@extensible
class Blotter(metaclass=ABCMeta):
def __init__(self, cancel_policy=None):
self.cancel_policy = cancel_policy if cancel_policy else NeverCancel()
self.current_dt = None
def set_date(self, dt):
self.current_dt = dt
@abstractmethod
def order(self, asset, amount, style, order_id=None):
"""Place an order.
Parameters
----------
asset : zipline.assets.Asset
The asset that this order is for.
amount : int
The amount of shares to order. If ``amount`` is positive, this is
the number of shares to buy or cover. If ``amount`` is negative,
this is the number of shares to sell or short.
style : zipline.finance.execution.ExecutionStyle
The execution style for the order.
order_id : str, optional
The unique identifier for this order.
Returns
-------
order_id : str or None
The unique identifier for this order, or None if no order was
placed.
Notes
-----
amount > 0 :: Buy/Cover
amount < 0 :: Sell/Short
Market order: order(asset, amount)
Limit order: order(asset, amount, style=LimitOrder(limit_price))
Stop order: order(asset, amount, style=StopOrder(stop_price))
StopLimit order: order(asset, amount, style=StopLimitOrder(limit_price,
stop_price))
"""
raise NotImplementedError("order")
def batch_order(self, order_arg_lists):
"""Place a batch of orders.
Parameters
----------
order_arg_lists : iterable[tuple]
Tuples of args that `order` expects.
Returns
-------
order_ids : list[str or None]
The unique identifier (or None) for each of the orders placed
(or not placed).
Notes
-----
This is required for `Blotter` subclasses to be able to place a batch
of orders, instead of being passed the order requests one at a time.
"""
return [self.order(*order_args) for order_args in order_arg_lists]
@abstractmethod
def cancel(self, order_id, relay_status=True):
"""Cancel a single order
Parameters
----------
order_id : int
The id of the order
relay_status : bool
Whether or not to record the status of the order
"""
raise NotImplementedError("cancel")
@abstractmethod
def cancel_all_orders_for_asset(self, asset, warn=False, relay_status=True):
"""
Cancel all open orders for a given asset.
"""
raise NotImplementedError("cancel_all_orders_for_asset")
@abstractmethod
def execute_cancel_policy(self, event):
raise NotImplementedError("execute_cancel_policy")
@abstractmethod
def reject(self, order_id, reason=""):
"""
Mark the given order as 'rejected', which is functionally similar to
cancelled. The distinction is that rejections are involuntary (and
usually include a message from a broker indicating why the order was
rejected) while cancels are typically user-driven.
"""
raise NotImplementedError("reject")
@abstractmethod
def hold(self, order_id, reason=""):
"""
Mark the order with order_id as 'held'. Held is functionally similar
to 'open'. When a fill (full or partial) arrives, the status
will automatically change back to open/filled as necessary.
"""
raise NotImplementedError("hold")
@abstractmethod
def process_splits(self, splits):
"""
Processes a list of splits by modifying any open orders as needed.
Parameters
----------
splits: list
A list of splits. Each split is a tuple of (asset, ratio).
Returns
-------
None
"""
raise NotImplementedError("process_splits")
@abstractmethod
def get_transactions(self, bar_data):
"""
Creates a list of transactions based on the current open orders,
slippage model, and commission model.
Parameters
----------
bar_data: zipline._protocol.BarData
Notes
-----
This method book-keeps the blotter's open_orders dictionary, so that
it is accurate by the time we're done processing open orders.
Returns
-------
transactions_list: List
transactions_list: list of transactions resulting from the current
open orders. If there were no open orders, an empty list is
returned.
commissions_list: List
commissions_list: list of commissions resulting from filling the
open orders. A commission is an object with "asset" and "cost"
parameters.
closed_orders: List
closed_orders: list of all the orders that have filled.
"""
raise NotImplementedError("get_transactions")
@abstractmethod
def prune_orders(self, closed_orders):
"""
Removes all given orders from the blotter's open_orders list.
Parameters
----------
closed_orders: iterable of orders that are closed.
Returns
-------
None
"""
raise NotImplementedError("prune_orders")
|
PypiClean
|
/EARL-pytorch-0.5.1.tar.gz/EARL-pytorch-0.5.1/earl_pytorch/dataset/create_dataset_v3.py
|
import json
import os
import subprocess
import sys
import ballchasing as bc
import numpy as np
import pandas as pd
from earl_pytorch import EARL
command = r'carball.exe -i "{}" -o "{}" parquet'
ENV = os.environ.copy()
ENV["NO_COLOR"] = "1"
class CarballAnalysis:
METADATA_FNAME = "metadata.json"
ANALYZER_FNAME = "analyzer.json"
BALL_FNAME = "__ball.parquet"
GAME_FNAME = "__game.parquet"
PLAYER_FNAME = "player_{}.parquet"
def __init__(self, processed_folder: str):
# print(processed_folder, self.METADATA_FNAME)
self.metadata = json.load(open(os.path.join(processed_folder, self.METADATA_FNAME)))
self.analyzer = json.load(open(os.path.join(processed_folder, self.ANALYZER_FNAME)))
self.ball = pd.read_parquet(os.path.join(processed_folder, self.BALL_FNAME))
self.game = pd.read_parquet(os.path.join(processed_folder, self.GAME_FNAME))
self.players = {}
for player in self.metadata["players"]:
uid = player["unique_id"]
player_path = os.path.join(processed_folder, self.PLAYER_FNAME.format(uid))
if os.path.exists(player_path):
self.players[uid] = pd.read_parquet(player_path)
def download_replays(n=1_000):
for gamemode in bc.Playlist.RANKED:
gm_folder = os.path.join(working_dir, "replays", gamemode)
os.makedirs(gm_folder, exist_ok=True)
replay_iter = api.get_replays(
min_rank=bc.Rank.SUPERSONIC_LEGEND,
max_rank=bc.Rank.SUPERSONIC_LEGEND,
season=bc.Season.SEASON_5_FTP,
count=n
)
for replay in replay_iter:
if not os.path.exists(os.path.join(gm_folder, replay["id"])):
api.download_replay(replay["id"], gm_folder)
print(replay["id"], "downloaded")
def process_replay(replay_path, output_folder):
folder, fn = os.path.split(replay_path)
replay_name = fn.replace(".replay", "")
processed_folder = os.path.join(output_folder, replay_name)
os.makedirs(processed_folder, exist_ok=True)
with open(os.path.join(processed_folder, "carball.o.log"), "w", encoding="utf8") as stdout_f:
with open(os.path.join(processed_folder, "carball.e.log"), "w", encoding="utf8") as stderr_f:
return subprocess.run(
command.format(replay_path, processed_folder),
stdout=stdout_f,
stderr=stderr_f,
env=ENV
)
def parse_replays():
for gamemode in bc.Playlist.RANKED:
replay_folder = os.path.join(working_dir, "replays", gamemode)
parsed_folder = os.path.join(working_dir, "parsed", gamemode)
for replay in os.listdir(replay_folder):
process_replay(os.path.join(replay_folder, replay), parsed_folder)
print(replay, "processed")
def train_model():
model = EARL()
shard_size = 1_000_000
for epoch in range(100):
data = np.zeros((shard_size, 41, 24))
analysis = CarballAnalysis()
def main():
download_replays()
parse_replays()
if __name__ == '__main__':
working_dir = sys.argv[1]
api = bc.Api(sys.argv[2])
main()
|
PypiClean
|
/sphinxcontrib-getthecode-1.2.1.tar.gz/sphinxcontrib-getthecode-1.2.1/sphinxcontrib/getthecode.py
|
from pathlib import Path
import codecs
import os
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from docutils.writers.html4css1 import HTMLTranslator as BaseTranslator
from sphinx.util.nodes import set_source_info
import jinja2
####################################################################################################
# class="reference download internal"
# '<button id="copy-button" data-clipboard-target="clipboard_pre">Copy to Clipboard</button>'
# '<pre id="clipboard_pre">' + node.rawsource + </pre>'
HEADER_TEMPLATE = '''
<div class="getthecode-header">
<ul>
<li class="getthecode-filename">{{ filename }}</li>
<li class="getthecode-filename-link">
<a href="{{ url }}" download={{ filename }} type="text/x-python" target="_blank" rel="noreferrer noopener"><span>
{{ filename }}
</span></a>
</li>
{% if notebook_url %}
<li class="getthecode-notebook-link">
<a href="{{ notebook_url }}" download={{ notebook_filename }} type="application/x-ipynb+json" target="_blank" rel="noreferrer noopener"><span>
{{ notebook_filename }}
</span></a>
</li>
{% endif %}
</ul>
</div>
'''
####################################################################################################
class GetTheCode(nodes.literal_block):
pass
####################################################################################################
class GetTheCodeDirective(Directive):
"""This code is a copy-paste from :file:`sphinx/directives/code.py` :class:`LiteralInclude`. See
also :file:`sphinx/roles.py` :class:`XRefRole`.
"""
##############################################
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'encoding': directives.encoding,
'hidden': directives.flag,
'language': directives.unchanged_required,
'linenos': directives.flag,
'notebook': directives.flag,
}
##############################################
def run(self):
document = self.state.document
if not document.settings.file_insertion_enabled:
return [document.reporter.warning('File insertion disabled', line=self.lineno)]
env = document.settings.env
# arguments = [relative_source_path, ]
relative_source_path, source_path = env.relfn2path(self.arguments[0])
encoding = self.options.get('encoding', env.config.source_encoding)
codec_info = codecs.lookup(encoding)
try:
fh = codecs.StreamReaderWriter(open(source_path, 'rb'), codec_info[2], codec_info[3], 'strict')
text = fh.read()
fh.close()
except (IOError, OSError):
return [
document.reporter.warning(
'Include file {} not found or reading it failed'.format(source_path),
line=self.lineno,
)
]
except UnicodeError:
template = 'Encoding {} used for reading included file {} seems to be wrong, try giving an :encoding: option'
return [document.reporter.warning(template.format(encoding, source_path))]
env.note_dependency(relative_source_path)
node = GetTheCode(text, text, source=source_path, filename=None)
set_source_info(self, node)
if self.options.get('language', ''):
node['language'] = self.options['language']
if 'linenos' in self.options:
node['linenos'] = True
if 'hidden' in self.options:
node['hidden'] = True
if 'notebook' in self.options:
# node['notebook'] = True
source_path = Path(source_path)
notebook_name = source_path.stem + '.ipynb'
notebook_path = source_path.parent.joinpath(notebook_name)
node['notebook_path'] = notebook_path
return [node]
####################################################################################################
def process_getthedoc(app, doctree):
""" This function is a *doctree-read* callback. It copies the download-able files to the
directory :directory:`_downloads`.
This code is a copy-paste with few modifications of the
:meth:`BuildEnvironment.process_downloads` method.
"""
# Called before visit_GetTheCode_html
# print('process_getthedoc')
env = app.builder.env
document_name = env.docname # examples/document-generator/full-test .rst
for node in doctree.traverse(GetTheCode):
# targetname = node['reftarget']
# /home/.../doc/sphinx/source/examples/document-generator/full-test.py
source_path = Path(node['source'])
relative_source_path, source_path = env.relfn2path(source_path.name, document_name)
env.dependencies.setdefault(document_name, set()).add(relative_source_path)
if not os.access(source_path, os.R_OK):
env.warn_node('download file not readable: {}'.format(source_path), node)
continue
# c3e20896d45729b3dd37b566def9e52a/full-test.py
unique_name = env.dlfiles.add_file(document_name, source_path)
node['filename'] = unique_name # Fixme: filename -> ... ?
notebook_path = node.get('notebook_path', None)
if notebook_path is not None:
if not os.access(notebook_path, os.R_OK):
env.warn_node('download file not readable: {}'.format(notebook_path), node)
continue
unique_name = env.dlfiles.add_file(document_name, str(notebook_path))
node['notebook_download_path'] = unique_name # Fixme: -> ... ?
####################################################################################################
def visit_GetTheCode_html(self, node):
"""
This code is a copy-paste from :file:`sphinx/writers/html.py`.
"""
# print('visit_GetTheCode_html')
# {
# 'rawsource': u"...",
# 'parent': <section "introduction": <title...><paragraph...><paragraph...><literal_block...> ...>,
# 'source': '/home.../open-source-frontends.rst',
# 'tagname': 'GetTheCode',
# 'attributes': {'language': 'python', 'dupnames': [], 'xml:space': 'preserve', 'ids': [], 'backrefs': [],
# 'source': u'/home.../SimpleRectifierWithTransformer-jmodelica.py',
# 'classes': [], 'names': []},
# 'line': 42,
# 'document': <document: <comment...><comment...><section "open source frontends"...>>,
# 'children': [<#text: 'from pymodelica import compile_fmu\nfrom pyfmi import load_fmu\n\ni ...'>]
# }
# Open the top div
self.body.append(self.starttag(node, 'div', CLASS=('getthecode')))
# self.context.append('</div>\n')
# c3e20896d45729b3dd37b566def9e52a/full-test.py
relative_path = Path(node['filename'])
download_path = Path(self.builder.dlpath)
# ../../_downloads/c3e20896d45729b3dd37b566def9e52a/full-test.py
url = download_path.joinpath(relative_path)
filename = relative_path.name
notebook_path = node.get('notebook_download_path', None)
if notebook_path is not None:
notebook_path = Path(notebook_path)
notebook_filename = notebook_path.name
notebook_url = download_path.joinpath(notebook_path)
else:
notebook_filename = None
notebook_url = None
template_str = self.builder.config.getthecode_header_template
template = jinja2.Template(template_str)
self.body.append(template.render(
filename=filename, url=url,
notebook_filename=notebook_filename, notebook_url=notebook_url
))
if node.rawsource != node.astext():
# most probably a parsed-literal block -- don't highlight
return BaseTranslator.visit_literal_block(self, node)
lang = node.get('language', 'default')
linenos = node.get('linenos', False)
highlight_args = node.get('highlight_args', {})
highlight_args['force'] = node.get('force', False)
if lang is self.builder.config.highlight_language:
# only pass highlighter options for original language
opts = self.builder.config.highlight_options
else:
opts = {}
highlighted = self.highlighter.highlight_block(
node.rawsource, lang, opts=opts, linenos=linenos,
location=(self.builder.current_docname, node.line), **highlight_args
)
_class = 'highlight-{}'.format(lang)
if node.get('hidden', False):
_class += ' highlight-hidden'
starttag = self.starttag(node, 'div', suffix='', CLASS=_class)
self.body.append(starttag + highlighted + '</div>\n')
# Close the top div
self.body.append('</div>\n')
# don't call depart_GetTheCode_html else dump source code
raise nodes.SkipNode
####################################################################################################
def depart_GetTheCode_html(self, node):
# print 'depart_GetTheCode_html'
pass
# BaseTranslator.depart_literal_block(self, node)
# self.body.append('\n</pre>\n')
####################################################################################################
def setup(app):
# https://www.sphinx-doc.org/en/master/extdev/appapi.html#sphinx.application.Sphinx.add_js_file
app.add_js_file('getthecode.js') # , async='async'
app.add_config_value('getthecode_header_template', HEADER_TEMPLATE, False)
app.add_node(
GetTheCode,
html=(visit_GetTheCode_html, depart_GetTheCode_html),
# text=(visit_GetTheCode_text, depart_GetTheCode_text),
)
app.add_directive('getthecode', GetTheCodeDirective)
app.connect('doctree-read', process_getthedoc)
|
PypiClean
|
/juno_sdk-0.0.1-py3-none-any.whl/juno_sdk/client/lcd/api/tendermint.py
|
from typing import Optional
from ._base import BaseAsyncAPI, sync_bind
from ..params import APIParams
__all__ = ["AsyncTendermintAPI", "TendermintAPI"]
class AsyncTendermintAPI(BaseAsyncAPI):
async def node_info(self, params: Optional[APIParams] = None) -> dict:
"""Fetches the curent connected node's information.
Args:
params (APIParams): optional parameters
Returns:
dict: node information
"""
res = await self._c._get("/cosmos/base/tendermint/v1beta1/node_info", params)
return {
"default_node_info": res["default_node_info"],
"application_version": res["application_version" ""],
}
async def syncing(self, params: Optional[APIParams] = None) -> bool:
"""Fetches whether the curent connect node is syncing with the network.
Args:
params (APIParams): optional parameters
Returns:
bool: syncing status
"""
return (await self._c._get("/cosmos/base/tendermint/v1beta1/syncing", params))[
"syncing"
]
async def validator_set(self, height: Optional[int] = None, params: Optional[APIParams] = None) -> dict:
"""Fetches the validator set for a height. If no height is given, defaults to latest.
Args:
height (int, optional): block height.
params (APIParams): optional parameters
Returns:
dict: validator set
"""
x = "latest" if height is None else height
return await self._c._get(f"/cosmos/base/tendermint/v1beta1/validatorsets/{x}", params)
async def block_info(self, height: Optional[int] = None, params: Optional[APIParams] = None) -> dict:
"""Fetches the block information for a given height. If no height is given, defaults to latest block.
Args:
height (int, optional): block height.
params (APIParams): optional parameters
Returns:
dict: block info
"""
x = "latest" if height is None else height
return await self._c._get(f"/cosmos/base/tendermint/v1beta1/blocks/{x}", params)
class TendermintAPI(AsyncTendermintAPI):
@sync_bind(AsyncTendermintAPI.node_info)
def node_info(self, params: Optional[APIParams] = None) -> dict:
pass
node_info.__doc__ = AsyncTendermintAPI.node_info.__doc__
@sync_bind(AsyncTendermintAPI.syncing)
def syncing(self, params: Optional[APIParams] = None) -> bool:
pass
syncing.__doc__ = AsyncTendermintAPI.syncing.__doc__
@sync_bind(AsyncTendermintAPI.validator_set)
def validator_set(self, height: Optional[int] = None, params: Optional[APIParams] = None) -> dict:
pass
validator_set.__doc__ = AsyncTendermintAPI.validator_set.__doc__
@sync_bind(AsyncTendermintAPI.block_info)
def block_info(self, height: Optional[int] = None, params: Optional[APIParams] = None) -> dict:
pass
block_info.__doc__ = AsyncTendermintAPI.block_info.__doc__
|
PypiClean
|
/discord-pda-1.0.1a0.tar.gz/discord-pda-1.0.1a0/pda/shard.py
|
from __future__ import annotations
import asyncio
import logging
import aiohttp
from .state import AutoShardedConnectionState
from .client import Client
from .backoff import ExponentialBackoff
from .gateway import *
from .errors import (
ClientException,
HTTPException,
GatewayNotFound,
ConnectionClosed,
PrivilegedIntentsRequired,
)
from .enums import Status
from typing import TYPE_CHECKING, Any, Callable, Tuple, Type, Optional, List, Dict, TypeVar
if TYPE_CHECKING:
from .gateway import DiscordWebSocket
from .activity import BaseActivity
from .enums import Status
EI = TypeVar('EI', bound='EventItem')
__all__ = (
'AutoShardedClient',
'ShardInfo',
)
_log = logging.getLogger(__name__)
class EventType:
close = 0
reconnect = 1
resume = 2
identify = 3
terminate = 4
clean_close = 5
class EventItem:
__slots__ = ('type', 'shard', 'error')
def __init__(self, etype: int, shard: Optional['Shard'], error: Optional[Exception]) -> None:
self.type: int = etype
self.shard: Optional['Shard'] = shard
self.error: Optional[Exception] = error
def __lt__(self: EI, other: EI) -> bool:
if not isinstance(other, EventItem):
return NotImplemented
return self.type < other.type
def __eq__(self: EI, other: EI) -> bool:
if not isinstance(other, EventItem):
return NotImplemented
return self.type == other.type
def __hash__(self) -> int:
return hash(self.type)
class Shard:
def __init__(self, ws: DiscordWebSocket, client: AutoShardedClient, queue_put: Callable[[EventItem], None]) -> None:
self.ws: DiscordWebSocket = ws
self._client: Client = client
self._dispatch: Callable[..., None] = client.dispatch
self._queue_put: Callable[[EventItem], None] = queue_put
self.loop: asyncio.AbstractEventLoop = self._client.loop
self._disconnect: bool = False
self._reconnect = client._reconnect
self._backoff: ExponentialBackoff = ExponentialBackoff()
self._task: Optional[asyncio.Task] = None
self._handled_exceptions: Tuple[Type[Exception], ...] = (
OSError,
HTTPException,
GatewayNotFound,
ConnectionClosed,
aiohttp.ClientError,
asyncio.TimeoutError,
)
@property
def id(self) -> int:
# DiscordWebSocket.shard_id is set in the from_client classmethod
return self.ws.shard_id # type: ignore
def launch(self) -> None:
self._task = self.loop.create_task(self.worker())
def _cancel_task(self) -> None:
if self._task is not None and not self._task.done():
self._task.cancel()
async def close(self) -> None:
self._cancel_task()
await self.ws.close(code=1000)
async def disconnect(self) -> None:
await self.close()
self._dispatch('shard_disconnect', self.id)
async def _handle_disconnect(self, e: Exception) -> None:
self._dispatch('disconnect')
self._dispatch('shard_disconnect', self.id)
if not self._reconnect:
self._queue_put(EventItem(EventType.close, self, e))
return
if self._client.is_closed():
return
if isinstance(e, OSError) and e.errno in (54, 10054):
# If we get Connection reset by peer then always try to RESUME the connection.
exc = ReconnectWebSocket(self.id, resume=True)
self._queue_put(EventItem(EventType.resume, self, exc))
return
if isinstance(e, ConnectionClosed):
if e.code == 4014:
self._queue_put(EventItem(EventType.terminate, self, PrivilegedIntentsRequired(self.id)))
return
if e.code != 1000:
self._queue_put(EventItem(EventType.close, self, e))
return
retry = self._backoff.delay()
_log.error('Attempting a reconnect for shard ID %s in %.2fs', self.id, retry, exc_info=e)
await asyncio.sleep(retry)
self._queue_put(EventItem(EventType.reconnect, self, e))
async def worker(self) -> None:
while not self._client.is_closed():
try:
await self.ws.poll_event()
except ReconnectWebSocket as e:
etype = EventType.resume if e.resume else EventType.identify
self._queue_put(EventItem(etype, self, e))
break
except self._handled_exceptions as e:
await self._handle_disconnect(e)
break
except asyncio.CancelledError:
break
except Exception as e:
self._queue_put(EventItem(EventType.terminate, self, e))
break
async def reidentify(self, exc: ReconnectWebSocket) -> None:
self._cancel_task()
self._dispatch('disconnect')
self._dispatch('shard_disconnect', self.id)
_log.info('Got a request to %s the websocket at Shard ID %s.', exc.op, self.id)
try:
coro = DiscordWebSocket.from_client(
self._client,
resume=exc.resume,
shard_id=self.id,
session=self.ws.session_id,
sequence=self.ws.sequence,
)
self.ws = await asyncio.wait_for(coro, timeout=60.0)
except self._handled_exceptions as e:
await self._handle_disconnect(e)
except asyncio.CancelledError:
return
except Exception as e:
self._queue_put(EventItem(EventType.terminate, self, e))
else:
self.launch()
async def reconnect(self) -> None:
self._cancel_task()
try:
coro = DiscordWebSocket.from_client(self._client, shard_id=self.id)
self.ws = await asyncio.wait_for(coro, timeout=60.0)
except self._handled_exceptions as e:
await self._handle_disconnect(e)
except asyncio.CancelledError:
return
except Exception as e:
self._queue_put(EventItem(EventType.terminate, self, e))
else:
self.launch()
class ShardInfo:
"""A class that gives information and control over a specific shard.
You can retrieve this object via :meth:`AutoShardedClient.get_shard`
or :attr:`AutoShardedClient.shards`.
.. versionadded:: 1.4
Attributes
------------
id: :class:`int`
The shard ID for this shard.
shard_count: Optional[:class:`int`]
The shard count for this cluster. If this is ``None`` then the bot has not started yet.
"""
__slots__ = ('_parent', 'id', 'shard_count')
def __init__(self, parent: Shard, shard_count: Optional[int]) -> None:
self._parent: Shard = parent
self.id: int = parent.id
self.shard_count: Optional[int] = shard_count
def is_closed(self) -> bool:
""":class:`bool`: Whether the shard connection is currently closed."""
return not self._parent.ws.open
async def disconnect(self) -> None:
"""|coro|
Disconnects a shard. When this is called, the shard connection will no
longer be open.
If the shard is already disconnected this does nothing.
"""
if self.is_closed():
return
await self._parent.disconnect()
async def reconnect(self) -> None:
"""|coro|
Disconnects and then connects the shard again.
"""
if not self.is_closed():
await self._parent.disconnect()
await self._parent.reconnect()
async def connect(self) -> None:
"""|coro|
Connects a shard. If the shard is already connected this does nothing.
"""
if not self.is_closed():
return
await self._parent.reconnect()
@property
def latency(self) -> float:
""":class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds for this shard."""
return self._parent.ws.latency
def is_ws_ratelimited(self) -> bool:
""":class:`bool`: Whether the websocket is currently rate limited.
This can be useful to know when deciding whether you should query members
using HTTP or via the gateway.
.. versionadded:: 1.6
"""
return self._parent.ws.is_ratelimited()
class AutoShardedClient(Client):
"""A client similar to :class:`Client` except it handles the complications
of sharding for the user into a more manageable and transparent single
process bot.
When using this client, you will be able to use it as-if it was a regular
:class:`Client` with a single shard when implementation wise internally it
is split up into multiple shards. This allows you to not have to deal with
IPC or other complicated infrastructure.
It is recommended to use this client only if you have surpassed at least
1000 guilds.
If no :attr:`.shard_count` is provided, then the library will use the
Bot Gateway endpoint call to figure out how many shards to use.
If a ``shard_ids`` parameter is given, then those shard IDs will be used
to launch the internal shards. Note that :attr:`.shard_count` must be provided
if this is used. By default, when omitted, the client will launch shards from
0 to ``shard_count - 1``.
Attributes
------------
shard_ids: Optional[List[:class:`int`]]
An optional list of shard_ids to launch the shards with.
"""
if TYPE_CHECKING:
_connection: AutoShardedConnectionState
def __init__(self, *args: Any, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any) -> None:
kwargs.pop('shard_id', None)
self.shard_ids: Optional[List[int]] = kwargs.pop('shard_ids', None)
super().__init__(*args, loop=loop, **kwargs)
if self.shard_ids is not None:
if self.shard_count is None:
raise ClientException('When passing manual shard_ids, you must provide a shard_count.')
elif not isinstance(self.shard_ids, (list, tuple)):
raise ClientException('shard_ids parameter must be a list or a tuple.')
# instead of a single websocket, we have multiple
# the key is the shard_id
self.__shards = {}
self._connection._get_websocket = self._get_websocket
self._connection._get_client = lambda: self
self.__queue = asyncio.PriorityQueue()
def _get_websocket(self, guild_id: Optional[int] = None, *, shard_id: Optional[int] = None) -> DiscordWebSocket:
if shard_id is None:
# guild_id won't be None if shard_id is None and shard_count won't be None here
shard_id = (guild_id >> 22) % self.shard_count # type: ignore
return self.__shards[shard_id].ws
def _get_state(self, **options: Any) -> AutoShardedConnectionState:
return AutoShardedConnectionState(
dispatch=self.dispatch,
handlers=self._handlers,
hooks=self._hooks,
http=self.http,
loop=self.loop,
**options,
)
@property
def latency(self) -> float:
""":class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.
This operates similarly to :meth:`Client.latency` except it uses the average
latency of every shard's latency. To get a list of shard latency, check the
:attr:`latencies` property. Returns ``nan`` if there are no shards ready.
"""
if not self.__shards:
return float('nan')
return sum(latency for _, latency in self.latencies) / len(self.__shards)
@property
def latencies(self) -> List[Tuple[int, float]]:
"""List[Tuple[:class:`int`, :class:`float`]]: A list of latencies between a HEARTBEAT and a HEARTBEAT_ACK in seconds.
This returns a list of tuples with elements ``(shard_id, latency)``.
"""
return [(shard_id, shard.ws.latency) for shard_id, shard in self.__shards.items()]
def get_shard(self, shard_id: int) -> Optional[ShardInfo]:
"""Optional[:class:`ShardInfo`]: Gets the shard information at a given shard ID or ``None`` if not found."""
try:
parent = self.__shards[shard_id]
except KeyError:
return None
else:
return ShardInfo(parent, self.shard_count)
@property
def shards(self) -> Dict[int, ShardInfo]:
"""Mapping[int, :class:`ShardInfo`]: Returns a mapping of shard IDs to their respective info object."""
return {shard_id: ShardInfo(parent, self.shard_count) for shard_id, parent in self.__shards.items()}
async def launch_shard(self, gateway: str, shard_id: int, *, initial: bool = False) -> None:
try:
coro = DiscordWebSocket.from_client(self, initial=initial, gateway=gateway, shard_id=shard_id)
ws = await asyncio.wait_for(coro, timeout=180.0)
except Exception:
_log.exception('Failed to connect for shard_id: %s. Retrying...', shard_id)
await asyncio.sleep(5.0)
return await self.launch_shard(gateway, shard_id)
# keep reading the shard while others connect
self.__shards[shard_id] = ret = Shard(ws, self, self.__queue.put_nowait)
ret.launch()
async def launch_shards(self) -> None:
if self.shard_count is None:
self.shard_count, gateway = await self.http.get_bot_gateway()
else:
gateway = await self.http.get_gateway()
self._connection.shard_count = self.shard_count
shard_ids = self.shard_ids or range(self.shard_count)
self._connection.shard_ids = shard_ids
for shard_id in shard_ids:
initial = shard_id == shard_ids[0]
await self.launch_shard(gateway, shard_id, initial=initial)
self._connection.shards_launched.set()
async def connect(self, *, reconnect: bool = True) -> None:
self._reconnect = reconnect
await self.launch_shards()
while not self.is_closed():
item = await self.__queue.get()
if item.type == EventType.close:
await self.close()
if isinstance(item.error, ConnectionClosed):
if item.error.code != 1000:
raise item.error
if item.error.code == 4014:
raise PrivilegedIntentsRequired(item.shard.id) from None
return
elif item.type in (EventType.identify, EventType.resume):
await item.shard.reidentify(item.error)
elif item.type == EventType.reconnect:
await item.shard.reconnect()
elif item.type == EventType.terminate:
await self.close()
raise item.error
elif item.type == EventType.clean_close:
return
async def close(self) -> None:
"""|coro|
Closes the connection to pda.
"""
if self.is_closed():
return
self._closed = True
for vc in self.voice_clients:
try:
await vc.disconnect(force=True)
except Exception:
pass
to_close = [asyncio.ensure_future(shard.close(), loop=self.loop) for shard in self.__shards.values()]
if to_close:
await asyncio.wait(to_close)
await self.http.close()
self.__queue.put_nowait(EventItem(EventType.clean_close, None, None))
async def change_presence(
self,
*,
activity: Optional[BaseActivity] = None,
status: Optional[Status] = None,
shard_id: int = None,
) -> None:
"""|coro|
Changes the client's presence.
Example: ::
game = pda.Game("with the API")
await client.change_presence(status=pda.Status.idle, activity=game)
.. versionchanged:: 2.0
Removed the ``afk`` keyword-only parameter.
Parameters
----------
activity: Optional[:class:`BaseActivity`]
The activity being done. ``None`` if no currently active activity is done.
status: Optional[:class:`Status`]
Indicates what status to change to. If ``None``, then
:attr:`Status.online` is used.
shard_id: Optional[:class:`int`]
The shard_id to change the presence to. If not specified
or ``None``, then it will change the presence of every
shard the bot can see.
Raises
------
InvalidArgument
If the ``activity`` parameter is not of proper type.
"""
if status is None:
status_value = 'online'
status_enum = Status.online
elif status is Status.offline:
status_value = 'invisible'
status_enum = Status.offline
else:
status_enum = status
status_value = str(status)
if shard_id is None:
for shard in self.__shards.values():
await shard.ws.change_presence(activity=activity, status=status_value)
guilds = self._connection.guilds
else:
shard = self.__shards[shard_id]
await shard.ws.change_presence(activity=activity, status=status_value)
guilds = [g for g in self._connection.guilds if g.shard_id == shard_id]
activities = () if activity is None else (activity,)
for guild in guilds:
me = guild.me
if me is None:
continue
# Member.activities is typehinted as Tuple[ActivityType, ...], we may be setting it as Tuple[BaseActivity, ...]
me.activities = activities # type: ignore
me.status = status_enum
def is_ws_ratelimited(self) -> bool:
""":class:`bool`: Whether the websocket is currently rate limited.
This can be useful to know when deciding whether you should query members
using HTTP or via the gateway.
This implementation checks if any of the shards are rate limited.
For more granular control, consider :meth:`ShardInfo.is_ws_ratelimited`.
.. versionadded:: 1.6
"""
return any(shard.ws.is_ratelimited() for shard in self.__shards.values())
|
PypiClean
|
/microsoft_kiota_serialization_json-0.4.0.tar.gz/microsoft_kiota_serialization_json-0.4.0/CHANGELOG.md
|
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.4.0] - 2023-07-27
### Added
### Changed
- Enabled backing store support
## [0.3.7] - 2023-07-04
### Added
### Changed
- Fixes the key assignment to the writer in write_bytes_value.
## [0.3.6] - 2023-06-27
### Added
### Changed
- Fixed a bug with loading json response in method to get root parse node.
## [0.3.5] - 2023-06-14
### Added
- Added support for composed types (de)serialization.
### Changed
- Fixed a bug with assigning field values.
## [0.3.4] - 2023-05-17
### Added
### Changed
- Fixed a bug with assigning field values.
## [0.3.3] - 2023-04-27
### Added
### Changed
- Fixed a bug with deserializing collection of enum values.
## [0.3.2] - 2023-04-27
### Added
### Changed
- Fixed a bug with deserializing models with additional data.
## [0.3.1] - 2023-03-20
### Added
### Changed
- Fixed a bug with deserializing bytes responses.
## [0.3.0] - 2023-03-09
### Added
### Changed
- Switched from snake casing api response keys to prevent mismatch scenarios.
- Fixed a bug with getting child node using deserializer identifier
## [0.2.2] - 2023-02-21
### Added
### Changed
- Fixed a bug with deserializing 'None' string values in enums.
|
PypiClean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.