language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def slack(value): """ Returns a html link to any Slack channels mentioned in the text (must be prefixed with #) """ new_value = value it = regex.finditer(value) for m in it: r = m.groupdict() channel = r['channel_id'] new_value = new_value.replace(m.group(), '[#%s](https://wpilnl.slack.com/app_redirect?channel=%s)' % (channel, channel)) return new_value
def slack(value): """ Returns a html link to any Slack channels mentioned in the text (must be prefixed with #) """ new_value = value it = regex.finditer(value) for m in it: r = m.groupdict() channel = r['channel_id'] new_value = new_value.replace(m.group(), '[#%s](https://wpilnl.slack.com/app_redirect?channel=%s)' % (channel, channel)) return new_value
Python
def load_channels(archived=False): """ Get a list of all the public channels in Slack :param archived: Boolean - Include archived channels :returns: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_list(exclude_archived=not archived) assert response['ok'] is True channels = [] for channel in response['channels']: channels.append((channel['id'], channel['name'])) return {'ok': True, 'channels': channels} except SlackApiError as e: assert e.response['ok'] is False return e.response
def load_channels(archived=False): """ Get a list of all the public channels in Slack :param archived: Boolean - Include archived channels :returns: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_list(exclude_archived=not archived) assert response['ok'] is True channels = [] for channel in response['channels']: channels.append((channel['id'], channel['name'])) return {'ok': True, 'channels': channels} except SlackApiError as e: assert e.response['ok'] is False return e.response
Python
def join_channel(channel): """ If the app gets the 'not_in_channel' error when accessing a public channel, call this method :param channel: The channel to join :returns: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_join(channel=channel) assert response['ok'] is True return {'ok': response['ok']} except SlackApiError as e: assert e.response['ok'] is False return e.response
def join_channel(channel): """ If the app gets the 'not_in_channel' error when accessing a public channel, call this method :param channel: The channel to join :returns: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_join(channel=channel) assert response['ok'] is True return {'ok': response['ok']} except SlackApiError as e: assert e.response['ok'] is False return e.response
Python
def upload(attachment, filename, title=None, message=None, channels=None): """ Upload a new file to Slack :param attachment: File path to the file :param filename: Filename with file extension (i.e. example.pdf) :param title: Title of the file to display in Slack :param message: The message text introducing the file in the specified ``channels`` :param channels: Comma-separated list of channel names or ids where the file should be posted (i.e. C1234567890) :returns: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) client.timeout = 600 try: if channels: response = client.files_upload(channels=channels, file=attachment, filename=filename, initial_comment=message, title=title) else: response = client.files_upload(file=attachment, filename=filename, title=title) assert response['ok'] is True return {'ok': True, 'file': response['file']} except SlackApiError as e: assert e.response['ok'] is False return e.response
def upload(attachment, filename, title=None, message=None, channels=None): """ Upload a new file to Slack :param attachment: File path to the file :param filename: Filename with file extension (i.e. example.pdf) :param title: Title of the file to display in Slack :param message: The message text introducing the file in the specified ``channels`` :param channels: Comma-separated list of channel names or ids where the file should be posted (i.e. C1234567890) :returns: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) client.timeout = 600 try: if channels: response = client.files_upload(channels=channels, file=attachment, filename=filename, initial_comment=message, title=title) else: response = client.files_upload(file=attachment, filename=filename, title=title) assert response['ok'] is True return {'ok': True, 'file': response['file']} except SlackApiError as e: assert e.response['ok'] is False return e.response
Python
def slack_post(channel, thread=None, text=None, content=None, username=None, icon_url=None, attachment=None): """ Post a message on Slack The `text` parameter is not required when the `content` parameter is provided, however including it is still highly recommended. :param channel: The identifier of the Slack conversation to post to :param thread: The timestamp of another message to post this message as a reply to :param text: Message text (Formatting: https://api.slack.com/reference/surfaces/formatting) :param content: List of valid blocks data (https://api.slack.com/block-kit) :param username: Name displayed by the bot :param icon_url: The URL to an image / icon to display next to the message (profile picture) :param attachment: Dictionary with file details - {'name': 'Example File', 'filepath': '/media/slack/example.pdf'} :returns: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) if attachment: filename = attachment['filepath'].split('/')[-1] return upload(attachment['filepath'], filename, attachment['name'], text, channel) if content: try: if username: response = client.chat_postMessage(channel=channel, thread_ts=thread, blocks=content, text=text, username=username, icon_url=icon_url) else: response = client.chat_postMessage(channel=channel, thread_ts=thread, blocks=content, text=text) assert response['ok'] is True return {'ok': True, 'message': response['message']} except SlackApiError as e: assert e.response['ok'] is False return e.response elif text: try: if username: response = client.chat_postMessage(channel=channel, thread_ts=thread, text=text, username=username, icon_url=icon_url) else: response = client.chat_postMessage(channel=channel, thread_ts=thread, text=text) assert response['ok'] is True return {'ok': True, 'message': response['message']} except SlackApiError as e: assert e.response['ok'] is False return e.response elif not content and not text: return {'ok': False, 'error': 'no_text'}
def slack_post(channel, thread=None, text=None, content=None, username=None, icon_url=None, attachment=None): """ Post a message on Slack The `text` parameter is not required when the `content` parameter is provided, however including it is still highly recommended. :param channel: The identifier of the Slack conversation to post to :param thread: The timestamp of another message to post this message as a reply to :param text: Message text (Formatting: https://api.slack.com/reference/surfaces/formatting) :param content: List of valid blocks data (https://api.slack.com/block-kit) :param username: Name displayed by the bot :param icon_url: The URL to an image / icon to display next to the message (profile picture) :param attachment: Dictionary with file details - {'name': 'Example File', 'filepath': '/media/slack/example.pdf'} :returns: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) if attachment: filename = attachment['filepath'].split('/')[-1] return upload(attachment['filepath'], filename, attachment['name'], text, channel) if content: try: if username: response = client.chat_postMessage(channel=channel, thread_ts=thread, blocks=content, text=text, username=username, icon_url=icon_url) else: response = client.chat_postMessage(channel=channel, thread_ts=thread, blocks=content, text=text) assert response['ok'] is True return {'ok': True, 'message': response['message']} except SlackApiError as e: assert e.response['ok'] is False return e.response elif text: try: if username: response = client.chat_postMessage(channel=channel, thread_ts=thread, text=text, username=username, icon_url=icon_url) else: response = client.chat_postMessage(channel=channel, thread_ts=thread, text=text) assert response['ok'] is True return {'ok': True, 'message': response['message']} except SlackApiError as e: assert e.response['ok'] is False return e.response elif not content and not text: return {'ok': False, 'error': 'no_text'}
Python
def post_ephemeral(channel, text, user, username=None): """ Send an ephemeral message to a user in a channel. This message will only be visible to the target user. :param channel: The identifier of the Slack conversation to post to :param text: Message text (Formatting: https://api.slack.com/reference/surfaces/formatting) :param user: The identifier of the specified user :param username: Name displayed by the bot :return: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.chat_postEphemeral(channel=channel, text=text, user=user, username=username) assert response['ok'] is True return response except SlackApiError as e: assert e.response['ok'] is False return e.response
def post_ephemeral(channel, text, user, username=None): """ Send an ephemeral message to a user in a channel. This message will only be visible to the target user. :param channel: The identifier of the Slack conversation to post to :param text: Message text (Formatting: https://api.slack.com/reference/surfaces/formatting) :param user: The identifier of the specified user :param username: Name displayed by the bot :return: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.chat_postEphemeral(channel=channel, text=text, user=user, username=username) assert response['ok'] is True return response except SlackApiError as e: assert e.response['ok'] is False return e.response
Python
def retrieve_message(channel, message_id): """ Retrieve a single message from Slack :param channel: The channel the message was posted to :param message_id: The timestamp of the message :return: The message details """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_history(channel=channel, latest=message_id, inclusive=True, limit=1) assert response['ok'] is True return response except SlackApiError as e: assert e.response['ok'] is False return e.response
def retrieve_message(channel, message_id): """ Retrieve a single message from Slack :param channel: The channel the message was posted to :param message_id: The timestamp of the message :return: The message details """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_history(channel=channel, latest=message_id, inclusive=True, limit=1) assert response['ok'] is True return response except SlackApiError as e: assert e.response['ok'] is False return e.response
Python
def replace_message(channel, message_id, text=None, content=None): """ Replace an existing message in Slack. The message will need to have been published by the bot. The `text` parameter is not required when the `content` parameter is provided, however including it is still highly recommended. :param channel: The identifier of the Slack conversation the message was posted to :param message_id: The timestamp of the message to be updated :param text: Message text (Formatting: https://api.slack.com/reference/surfaces/formatting) :param content: List of valid blocks data (https://api.slack.com/block-kit) :return: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) if content or text: try: response = client.chat_update(channel=channel, ts=message_id, as_user=True, text=text, blocks=content, link_names=True) assert response['ok'] is True return {'ok': True, 'message': response['message']} except SlackApiError as e: assert e.response['ok'] is False return e.response else: return {'ok': False, 'error': 'no_text'}
def replace_message(channel, message_id, text=None, content=None): """ Replace an existing message in Slack. The message will need to have been published by the bot. The `text` parameter is not required when the `content` parameter is provided, however including it is still highly recommended. :param channel: The identifier of the Slack conversation the message was posted to :param message_id: The timestamp of the message to be updated :param text: Message text (Formatting: https://api.slack.com/reference/surfaces/formatting) :param content: List of valid blocks data (https://api.slack.com/block-kit) :return: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) if content or text: try: response = client.chat_update(channel=channel, ts=message_id, as_user=True, text=text, blocks=content, link_names=True) assert response['ok'] is True return {'ok': True, 'message': response['message']} except SlackApiError as e: assert e.response['ok'] is False return e.response else: return {'ok': False, 'error': 'no_text'}
Python
def user_add(channel, users): """ Invite users to join a slack channel. The bot must be a member of the channel. :param channel: The identifier of the Slack channel to invite the users to :param users: The identifiers of the specified users (List of up to 1000) :return: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_invite(channel=channel, users=users) assert response['ok'] is True return {'ok': response['ok']} except SlackApiError as e: assert e.response['ok'] is False return e.response
def user_add(channel, users): """ Invite users to join a slack channel. The bot must be a member of the channel. :param channel: The identifier of the Slack channel to invite the users to :param users: The identifiers of the specified users (List of up to 1000) :return: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_invite(channel=channel, users=users) assert response['ok'] is True return {'ok': response['ok']} except SlackApiError as e: assert e.response['ok'] is False return e.response
Python
def user_kick(channel, user): """ Remove a user from a slack channel. The bot must be a member of the channel. :param channel: The identifier of the Slack channel to remove users from :param user: The identifier of the specified user :return: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_kick(channel=channel, user=user) assert response['ok'] is True return {'ok': response['ok']} except SlackApiError as e: assert e.response['ok'] is False return e.response
def user_kick(channel, user): """ Remove a user from a slack channel. The bot must be a member of the channel. :param channel: The identifier of the Slack channel to remove users from :param user: The identifier of the specified user :return: Response object (Dictionary) """ if not settings.SLACK_TOKEN: return {'ok': False, 'error': 'config_error'} client = WebClient(token=settings.SLACK_TOKEN) try: response = client.conversations_kick(channel=channel, user=user) assert response['ok'] is True return {'ok': response['ok']} except SlackApiError as e: assert e.response['ok'] is False return e.response
Python
def lookup_user(email): """ Will search for a user in the Slack workspace using their email address :param email: The email address for the user :return: The identifier for the user in Slack (`None` if the search returns nothing) """ if not settings.SLACK_TOKEN: return None client = WebClient(token=settings.SLACK_TOKEN) try: response = client.users_lookupByEmail(email=email) assert response['ok'] is True return response['user']['id'] except SlackApiError as e: assert e.response['ok'] is False return None
def lookup_user(email): """ Will search for a user in the Slack workspace using their email address :param email: The email address for the user :return: The identifier for the user in Slack (`None` if the search returns nothing) """ if not settings.SLACK_TOKEN: return None client = WebClient(token=settings.SLACK_TOKEN) try: response = client.users_lookupByEmail(email=email) assert response['ok'] is True return response['user']['id'] except SlackApiError as e: assert e.response['ok'] is False return None
Python
def open_modal(trigger_id, blocks): """ Opens a modal view (in Slack) in response to user action :param trigger_id: The trigger id provided by the API during the user's last interaction :param blocks: Block configuration (https://api.slack.com/block-kit) :return: View ID if successful; None otherwise """ if not settings.SLACK_TOKEN: return None client = WebClient(token=settings.SLACK_TOKEN) try: response = client.views_open(trigger_id=trigger_id, view=blocks) assert response['ok'] is True return response['view']['id'] except SlackApiError as e: assert e.response['ok'] is False return None
def open_modal(trigger_id, blocks): """ Opens a modal view (in Slack) in response to user action :param trigger_id: The trigger id provided by the API during the user's last interaction :param blocks: Block configuration (https://api.slack.com/block-kit) :return: View ID if successful; None otherwise """ if not settings.SLACK_TOKEN: return None client = WebClient(token=settings.SLACK_TOKEN) try: response = client.views_open(trigger_id=trigger_id, view=blocks) assert response['ok'] is True return response['view']['id'] except SlackApiError as e: assert e.response['ok'] is False return None
Python
def handle_event(request): """ Event endpoint for the Slack API. Slack will send POST requests here whenever certain events have been triggered. """ payload = json.loads(request.body) if payload['type'] == "url_verification": return JsonResponse({"challenge": payload['challenge']}) elif payload['type'] == "event_callback": event = payload['event'] if event['type'] == "team_join": slack_post(event['user']['id'], text="Welcome to LNL!", content=views.welcome_message()) elif event['type'] == "app_home_opened": load_app_home(event['user']) return HttpResponse() return HttpResponse("Not implemented")
def handle_event(request): """ Event endpoint for the Slack API. Slack will send POST requests here whenever certain events have been triggered. """ payload = json.loads(request.body) if payload['type'] == "url_verification": return JsonResponse({"challenge": payload['challenge']}) elif payload['type'] == "event_callback": event = payload['event'] if event['type'] == "team_join": slack_post(event['user']['id'], text="Welcome to LNL!", content=views.welcome_message()) elif event['type'] == "app_home_opened": load_app_home(event['user']) return HttpResponse() return HttpResponse("Not implemented")
Python
def handle_interaction(request): """ Interaction endpoint for the Slack API. Slack will send POST requests here when users interact with a shortcut or interactive component. """ payload = json.loads(request.POST['payload']) interaction_type = payload.get('type', None) # Handle shortcut if interaction_type == "shortcut": callback_id = payload.get('callback_id', None) if callback_id == "tfed": blocks = views.tfed_modal() modal_id = open_modal(payload.get('trigger_id', None), blocks) if modal_id: return HttpResponse() return HttpResponseServerError("Failed to open modal") # Handle modal view submission if interaction_type == "view_submission": values = payload['view']['state']['values'] callback_id = payload['view'].get('callback_id', None) # TFed ticket submission if callback_id == "tfed-modal": subject = values['subject']['subject-action']['value'] description = values['description']['description-action']['value'] topic = values['rt_topic']['rt_topic-action']['selected_option']['value'] user_id = payload['user']['id'] user = user_profile(user_id) if user['ok']: __create_ticket(user, subject, description, topic) return HttpResponse() return HttpResponseServerError("Failed to obtain user information") # Update TFed ticket elif callback_id == "ticket-update-modal": ticket_info = payload['view']['blocks'][1] owner_id = None if ticket_info['type'] != "divider": ticket_info = payload['view']['blocks'][2] owner_id = values['ticket_assignee']['ticket_assignee-action']['selected_user'] ticket_id = ticket_info['block_id'].split("#")[0] channel = ticket_info['block_id'].split("#")[1] ts = ticket_info['block_id'].split("#")[2] status = values['ticket_status']['ticket_status-action']['selected_option'] if status: status = status['value'] comments = values['ticket_comment']['ticket_comment-action']['value'] checkboxes = values['email_requestor']['email_requestor-action']['selected_options'] notify_requestor = False if len(checkboxes) > 0: notify_requestor = True # Obtain user's RT token user_id = payload['user']['id'] token = __retrieve_rt_token(user_id) __update_ticket(ticket_id, status, owner_id, comments, notify_requestor, token, user_id, channel, ts) return HttpResponse() elif callback_id == "ticket-comment-modal": ticket_id = payload['view']['blocks'][0]['block_id'] comments = values[ticket_id]['comment-action']['value'] user_id = payload['user']['id'] token = __retrieve_rt_token(user_id) __post_ticket_comment(ticket_id, user_id, comments, token) return HttpResponse() return HttpResponseNotFound() # Handle block interaction event if interaction_type == "block_actions": action = payload['actions'][0]['action_id'] channel = payload.get('channel', None) if channel: channel = channel['id'] message = payload.get('message', None) view = payload.get('view', None) # TFed message if channel in [settings.SLACK_TARGET_TFED, settings.SLACK_TARGET_TFED_DB] and message and not view: ticket_id = message['blocks'][0]['block_id'].split('~')[0] blocks = views.ticket_update_modal(ticket_id, channel, message['ts'], action) # Get current ticket from RT __refresh_ticket_async(channel, message) # Check that user has token, if not display a warning user_id = payload['user']['id'] token = __retrieve_rt_token(user_id) if not token: error_message = "Hi there! Before you can update tickets, you'll need to set up access to your RT " \ "account. Visit https://lnl.wpi.edu" + reverse("support:link-account") + \ " to get started." post_ephemeral(channel, error_message, user_id, 'Request Tracker') return HttpResponse() modal_id = open_modal(payload.get('trigger_id', None), blocks) if modal_id: return HttpResponse() return HttpResponseServerError("Failed to open modal") # Home tab menu options if action == "home-ticket-update": ticket_id = payload['actions'][0]['block_id'] option = payload['actions'][0]['selected_option']['value'] if option == 'Comment': blocks = views.ticket_comment_modal(ticket_id) modal_id = open_modal(payload.get('trigger_id', None), blocks) if not modal_id: return HttpResponseServerError("Failed to open modal") return HttpResponse() return HttpResponseNotFound()
def handle_interaction(request): """ Interaction endpoint for the Slack API. Slack will send POST requests here when users interact with a shortcut or interactive component. """ payload = json.loads(request.POST['payload']) interaction_type = payload.get('type', None) # Handle shortcut if interaction_type == "shortcut": callback_id = payload.get('callback_id', None) if callback_id == "tfed": blocks = views.tfed_modal() modal_id = open_modal(payload.get('trigger_id', None), blocks) if modal_id: return HttpResponse() return HttpResponseServerError("Failed to open modal") # Handle modal view submission if interaction_type == "view_submission": values = payload['view']['state']['values'] callback_id = payload['view'].get('callback_id', None) # TFed ticket submission if callback_id == "tfed-modal": subject = values['subject']['subject-action']['value'] description = values['description']['description-action']['value'] topic = values['rt_topic']['rt_topic-action']['selected_option']['value'] user_id = payload['user']['id'] user = user_profile(user_id) if user['ok']: __create_ticket(user, subject, description, topic) return HttpResponse() return HttpResponseServerError("Failed to obtain user information") # Update TFed ticket elif callback_id == "ticket-update-modal": ticket_info = payload['view']['blocks'][1] owner_id = None if ticket_info['type'] != "divider": ticket_info = payload['view']['blocks'][2] owner_id = values['ticket_assignee']['ticket_assignee-action']['selected_user'] ticket_id = ticket_info['block_id'].split("#")[0] channel = ticket_info['block_id'].split("#")[1] ts = ticket_info['block_id'].split("#")[2] status = values['ticket_status']['ticket_status-action']['selected_option'] if status: status = status['value'] comments = values['ticket_comment']['ticket_comment-action']['value'] checkboxes = values['email_requestor']['email_requestor-action']['selected_options'] notify_requestor = False if len(checkboxes) > 0: notify_requestor = True # Obtain user's RT token user_id = payload['user']['id'] token = __retrieve_rt_token(user_id) __update_ticket(ticket_id, status, owner_id, comments, notify_requestor, token, user_id, channel, ts) return HttpResponse() elif callback_id == "ticket-comment-modal": ticket_id = payload['view']['blocks'][0]['block_id'] comments = values[ticket_id]['comment-action']['value'] user_id = payload['user']['id'] token = __retrieve_rt_token(user_id) __post_ticket_comment(ticket_id, user_id, comments, token) return HttpResponse() return HttpResponseNotFound() # Handle block interaction event if interaction_type == "block_actions": action = payload['actions'][0]['action_id'] channel = payload.get('channel', None) if channel: channel = channel['id'] message = payload.get('message', None) view = payload.get('view', None) # TFed message if channel in [settings.SLACK_TARGET_TFED, settings.SLACK_TARGET_TFED_DB] and message and not view: ticket_id = message['blocks'][0]['block_id'].split('~')[0] blocks = views.ticket_update_modal(ticket_id, channel, message['ts'], action) # Get current ticket from RT __refresh_ticket_async(channel, message) # Check that user has token, if not display a warning user_id = payload['user']['id'] token = __retrieve_rt_token(user_id) if not token: error_message = "Hi there! Before you can update tickets, you'll need to set up access to your RT " \ "account. Visit https://lnl.wpi.edu" + reverse("support:link-account") + \ " to get started." post_ephemeral(channel, error_message, user_id, 'Request Tracker') return HttpResponse() modal_id = open_modal(payload.get('trigger_id', None), blocks) if modal_id: return HttpResponse() return HttpResponseServerError("Failed to open modal") # Home tab menu options if action == "home-ticket-update": ticket_id = payload['actions'][0]['block_id'] option = payload['actions'][0]['selected_option']['value'] if option == 'Comment': blocks = views.ticket_comment_modal(ticket_id) modal_id = open_modal(payload.get('trigger_id', None), blocks) if not modal_id: return HttpResponseServerError("Failed to open modal") return HttpResponse() return HttpResponseNotFound()
Python
def __create_ticket(user, subject, description, topic): """ Handler for creating a new TFed ticket :param user: The user that submitted the ticket :param subject: The ticket's subject line :param description: The contents of the ticket :param topic: The Queue in RT to post the ticket to """ target = settings.SLACK_TARGET_TFED if topic == 'Database': target = settings.SLACK_TARGET_TFED_DB user_email = user['user']['profile'].get('email', '[email protected]') display_name = user['user']['profile']['real_name'] resp = rt_api.create_ticket(topic, user_email, subject, description + "\n\n- " + display_name) ticket_id = resp.get('id', None) if ticket_id: ticket_info = { "url": 'https://lnl-rt.wpi.edu/rt/Ticket/Display.html?id=' + ticket_id, "id": ticket_id, "subject": subject, "description": description, "status": "New", "assignee": None, "reporter": user['user']['name'] } ticket = views.tfed_ticket(ticket_info) slack_post(target, text=description, content=ticket, username='Request Tracker') return error_message = "Whoops! It appears something went wrong while attempting to submit your request. " \ "Please wait a few minutes then try again. If the problem persists, please email " \ "us directly at [email protected]." post_ephemeral(target, error_message, user['user']['id'], username="Request Tracker")
def __create_ticket(user, subject, description, topic): """ Handler for creating a new TFed ticket :param user: The user that submitted the ticket :param subject: The ticket's subject line :param description: The contents of the ticket :param topic: The Queue in RT to post the ticket to """ target = settings.SLACK_TARGET_TFED if topic == 'Database': target = settings.SLACK_TARGET_TFED_DB user_email = user['user']['profile'].get('email', '[email protected]') display_name = user['user']['profile']['real_name'] resp = rt_api.create_ticket(topic, user_email, subject, description + "\n\n- " + display_name) ticket_id = resp.get('id', None) if ticket_id: ticket_info = { "url": 'https://lnl-rt.wpi.edu/rt/Ticket/Display.html?id=' + ticket_id, "id": ticket_id, "subject": subject, "description": description, "status": "New", "assignee": None, "reporter": user['user']['name'] } ticket = views.tfed_ticket(ticket_info) slack_post(target, text=description, content=ticket, username='Request Tracker') return error_message = "Whoops! It appears something went wrong while attempting to submit your request. " \ "Please wait a few minutes then try again. If the problem persists, please email " \ "us directly at [email protected]." post_ephemeral(target, error_message, user['user']['id'], username="Request Tracker")
Python
def __update_ticket(ticket_id, status, owner_id, comments, notify_requestor, token, user_id, channel, ts): """ Handler for updating an existing TFed ticket :param ticket_id: The ticket number :param status: The new status to assign to the ticket in RT :param owner_id: The Slack user ID for the ticket owner (who the ticket will be assigned to) :param comments: Comments to add to the ticket history :param notify_requestor: If True, the ticket creator will receive an email with the comments :param token: The RT auth token for the user that triggered this action :param user_id: The Slack user ID for the user that triggered this action :param channel: The identifier of the Slack channel this ticket was posted to :param ts: The timestamp of the original ticket message in Slack """ # Update ticket metadata owner = user_profile(owner_id) username = '' if owner['ok']: username = owner['user']['profile'].get('email', '').split('@')[0] resp = rt_api.update_ticket(ticket_id, token, status, username) if rt_api.permission_error(resp): error_message = "Sorry, it appears you do not have permission to perform this action." post_ephemeral(channel, error_message, user_id, 'Request Tracker') return # Update ticket in Slack current_message = retrieve_message(channel, ts) if current_message.get('error', '') == 'not_in_channel': join_channel(channel) current_message = retrieve_message(channel, ts) resp = refresh_ticket_message(channel, current_message['messages'][0]) if not resp['ok']: logger.warning("Failed to update ticket in Slack. Please check RT to see if your changes were applied.") # Post comments / replies, if applicable if comments: slack_user = user_profile(user_id) display_name = slack_user['user']['profile']['real_name'] resp = rt_api.ticket_comment(ticket_id, comments + "\n\n- " + display_name, notify_requestor, token=token) if rt_api.permission_error(resp): error_message = "Sorry, it appears you do not have permission to perform this action." post_ephemeral(channel, error_message, user_id, 'Request Tracker') return profile_photo = slack_user['user']['profile']['image_original'] slack_post(channel, ts, comments, username=display_name, icon_url=profile_photo)
def __update_ticket(ticket_id, status, owner_id, comments, notify_requestor, token, user_id, channel, ts): """ Handler for updating an existing TFed ticket :param ticket_id: The ticket number :param status: The new status to assign to the ticket in RT :param owner_id: The Slack user ID for the ticket owner (who the ticket will be assigned to) :param comments: Comments to add to the ticket history :param notify_requestor: If True, the ticket creator will receive an email with the comments :param token: The RT auth token for the user that triggered this action :param user_id: The Slack user ID for the user that triggered this action :param channel: The identifier of the Slack channel this ticket was posted to :param ts: The timestamp of the original ticket message in Slack """ # Update ticket metadata owner = user_profile(owner_id) username = '' if owner['ok']: username = owner['user']['profile'].get('email', '').split('@')[0] resp = rt_api.update_ticket(ticket_id, token, status, username) if rt_api.permission_error(resp): error_message = "Sorry, it appears you do not have permission to perform this action." post_ephemeral(channel, error_message, user_id, 'Request Tracker') return # Update ticket in Slack current_message = retrieve_message(channel, ts) if current_message.get('error', '') == 'not_in_channel': join_channel(channel) current_message = retrieve_message(channel, ts) resp = refresh_ticket_message(channel, current_message['messages'][0]) if not resp['ok']: logger.warning("Failed to update ticket in Slack. Please check RT to see if your changes were applied.") # Post comments / replies, if applicable if comments: slack_user = user_profile(user_id) display_name = slack_user['user']['profile']['real_name'] resp = rt_api.ticket_comment(ticket_id, comments + "\n\n- " + display_name, notify_requestor, token=token) if rt_api.permission_error(resp): error_message = "Sorry, it appears you do not have permission to perform this action." post_ephemeral(channel, error_message, user_id, 'Request Tracker') return profile_photo = slack_user['user']['profile']['image_original'] slack_post(channel, ts, comments, username=display_name, icon_url=profile_photo)
Python
def __post_ticket_comment(ticket_id, user_id, comments, token): """ Comment on a TFed ticket (background process). :param ticket_id: The ticket number :param user_id: The Slack user ID for the user that triggered the action :param comments: The comments to be added to the ticket :param token: The RT auth token for the user that triggered the action (if applicable) """ user = user_profile(user_id) display_name = user['user']['profile']['real_name'] rt_api.ticket_comment(ticket_id, comments + "\n\n- " + display_name, True, token=token)
def __post_ticket_comment(ticket_id, user_id, comments, token): """ Comment on a TFed ticket (background process). :param ticket_id: The ticket number :param user_id: The Slack user ID for the user that triggered the action :param comments: The comments to be added to the ticket :param token: The RT auth token for the user that triggered the action (if applicable) """ user = user_profile(user_id) display_name = user['user']['profile']['real_name'] rt_api.ticket_comment(ticket_id, comments + "\n\n- " + display_name, True, token=token)
Python
def refresh_ticket_message(channel, message): """ Update a TFed ticket message with the latest information :param channel: The channel the ticket was posted to :param message: The original message object :return: Response from Slack API after attempting to update the message """ ticket_id = message['blocks'][0]['block_id'].split('~')[0] ticket_reporter = message['blocks'][0]['block_id'].split('~')[1] ticket_description = message['blocks'][1]['text']['text'] ticket = rt_api.fetch_ticket(ticket_id) if ticket.get('message'): return {"ok": False} ticket_owner = ticket['Owner']['id'] if ticket_owner == "Nobody": ticket_owner = None ticket_info = { "url": 'https://lnl-rt.wpi.edu/rt/Ticket/Display.html?id=' + ticket_id, "id": ticket_id, "subject": ticket.get('Subject'), "description": ticket_description, "status": ticket.get('Status').capitalize(), "assignee": ticket_owner, "reporter": ticket_reporter } new_message = views.tfed_ticket(ticket_info) return replace_message(channel, message['ts'], ticket_description, new_message)
def refresh_ticket_message(channel, message): """ Update a TFed ticket message with the latest information :param channel: The channel the ticket was posted to :param message: The original message object :return: Response from Slack API after attempting to update the message """ ticket_id = message['blocks'][0]['block_id'].split('~')[0] ticket_reporter = message['blocks'][0]['block_id'].split('~')[1] ticket_description = message['blocks'][1]['text']['text'] ticket = rt_api.fetch_ticket(ticket_id) if ticket.get('message'): return {"ok": False} ticket_owner = ticket['Owner']['id'] if ticket_owner == "Nobody": ticket_owner = None ticket_info = { "url": 'https://lnl-rt.wpi.edu/rt/Ticket/Display.html?id=' + ticket_id, "id": ticket_id, "subject": ticket.get('Subject'), "description": ticket_description, "status": ticket.get('Status').capitalize(), "assignee": ticket_owner, "reporter": ticket_reporter } new_message = views.tfed_ticket(ticket_info) return replace_message(channel, message['ts'], ticket_description, new_message)
Python
def __refresh_ticket_async(channel, message): """ Update a TFed ticket message with the latest information in the background :param channel: The channel the ticket was posted to :param message: The original message object :return: Response from Slack API after attempting to update the message """ resp = refresh_ticket_message(channel, message) if not resp['ok']: logger.warning("Failed to update ticket in Slack. Please check RT to see if your changes were applied.")
def __refresh_ticket_async(channel, message): """ Update a TFed ticket message with the latest information in the background :param channel: The channel the ticket was posted to :param message: The original message object :return: Response from Slack API after attempting to update the message """ resp = refresh_ticket_message(channel, message) if not resp['ok']: logger.warning("Failed to update ticket in Slack. Please check RT to see if your changes were applied.")
Python
def __retrieve_rt_token(user_id): """ Retrieve a user's RT auth token (if it exists) :param user_id: The Slack user's identifier :return: Auth token; `None` if it doesn't exist """ slack_user = user_profile(user_id) if slack_user['ok']: username = slack_user['user']['profile'].get('email', '').split('@')[0] user = get_user_model().objects.filter(username=username).first() if user: prefs = UserPreferences.objects.filter(user=user).first() if prefs: if prefs.rt_token: cipher_suite = Fernet(settings.RT_CRYPTO_KEY) return cipher_suite.decrypt(prefs.rt_token.encode('utf-8')).decode('utf-8') return None
def __retrieve_rt_token(user_id): """ Retrieve a user's RT auth token (if it exists) :param user_id: The Slack user's identifier :return: Auth token; `None` if it doesn't exist """ slack_user = user_profile(user_id) if slack_user['ok']: username = slack_user['user']['profile'].get('email', '').split('@')[0] user = get_user_model().objects.filter(username=username).first() if user: prefs = UserPreferences.objects.filter(user=user).first() if prefs: if prefs.rt_token: cipher_suite = Fernet(settings.RT_CRYPTO_KEY) return cipher_suite.decrypt(prefs.rt_token.encode('utf-8')).decode('utf-8') return None
Python
def has_perm(self, perm, obj=None): """ Returns True if the user has the specified permission. This method queries all available auth backends, but returns immediately if any backend returns True. Thus, a user who has permission from a single auth backend is assumed to have permission in general. If an object is provided, permissions for this specific object are checked. This differs from the default in that superusers, while still having every permission, will be allowed after the logic has executed. This helps with typos in permission strings. """ has_perm = _user_has_perm(self, perm, obj) # Active superusers have all permissions. if self.is_active and self.is_superuser: return True else: return has_perm
def has_perm(self, perm, obj=None): """ Returns True if the user has the specified permission. This method queries all available auth backends, but returns immediately if any backend returns True. Thus, a user who has permission from a single auth backend is assumed to have permission in general. If an object is provided, permissions for this specific object are checked. This differs from the default in that superusers, while still having every permission, will be allowed after the logic has executed. This helps with typos in permission strings. """ has_perm = _user_has_perm(self, perm, obj) # Active superusers have all permissions. if self.is_active and self.is_superuser: return True else: return has_perm
Python
def is_complete(self): """ Returns false if the user's profile is incomplete. The user will be constantly reminded to complete their profile. """ # if this returns false, the user will be constantly reminded to update their profile return self.first_name and self.last_name and self.email and (not self.is_lnl or self.class_year)
def is_complete(self): """ Returns false if the user's profile is incomplete. The user will be constantly reminded to complete their profile. """ # if this returns false, the user will be constantly reminded to update their profile return self.first_name and self.last_name and self.email and (not self.is_lnl or self.class_year)
Python
def all_orgs(self): """All organizations the user is associated with""" return Organization.objects.complex_filter( Q(user_in_charge=self) | Q(associated_users=self) ).distinct()
def all_orgs(self): """All organizations the user is associated with""" return Organization.objects.complex_filter( Q(user_in_charge=self) | Q(associated_users=self) ).distinct()
Python
def path_and_rename(instance, filename): """ Determine path for storing officer headshots. Will rename with officer's username. :param instance: An OfficerImg instance :param filename: The original name of the uploaded file :returns: New path to save file to """ upload_to = 'officers' ext = filename.split('.')[-1] if instance.officer.get_username(): filename = "{}.{}".format(instance.officer.get_username(), ext) return os.path.join(upload_to, filename)
def path_and_rename(instance, filename): """ Determine path for storing officer headshots. Will rename with officer's username. :param instance: An OfficerImg instance :param filename: The original name of the uploaded file :returns: New path to save file to """ upload_to = 'officers' ext = filename.split('.')[-1] if instance.officer.get_username(): filename = "{}.{}".format(instance.officer.get_username(), ext) return os.path.join(upload_to, filename)
Python
def officer_img_cleanup(sender, instance, **kwargs): """ When an instance of OfficerImg is deleted, delete the respective files as well. :param instance: An OfficerImg instance """ instance.img.delete(False)
def officer_img_cleanup(sender, instance, **kwargs): """ When an instance of OfficerImg is deleted, delete the respective files as well. :param instance: An OfficerImg instance """ instance.img.delete(False)
Python
def updateevent(request, mtg_id, event_id): """ Update crew chief assignments for an event :param mtg_id: The primary key value of the meeting (redirects to meeting detail page) :param event_id: The primary key value of the event (pre-2019 events only) """ context = {} perms = ('meetings.edit_mtg',) event = get_object_or_404(BaseEvent, pk=event_id) if not (request.user.has_perms(perms) or request.user.has_perms(perms, event)): raise PermissionDenied context['event'] = event.event_name cc_formset = inlineformset_factory(BaseEvent, EventCCInstance, extra=3, exclude=[]) cc_formset.form = curry_class(CCIForm, event=event) if request.method == 'POST': formset = cc_formset(request.POST, instance=event, prefix="main") if formset.is_valid(): formset.save() url = reverse('meetings:detail', args=(mtg_id,)) + "#events" return HttpResponseRedirect(url) else: formset = cc_formset(instance=event, prefix="main") context['formset'] = formset return render(request, 'formset_crispy_helpers.html', context)
def updateevent(request, mtg_id, event_id): """ Update crew chief assignments for an event :param mtg_id: The primary key value of the meeting (redirects to meeting detail page) :param event_id: The primary key value of the event (pre-2019 events only) """ context = {} perms = ('meetings.edit_mtg',) event = get_object_or_404(BaseEvent, pk=event_id) if not (request.user.has_perms(perms) or request.user.has_perms(perms, event)): raise PermissionDenied context['event'] = event.event_name cc_formset = inlineformset_factory(BaseEvent, EventCCInstance, extra=3, exclude=[]) cc_formset.form = curry_class(CCIForm, event=event) if request.method == 'POST': formset = cc_formset(request.POST, instance=event, prefix="main") if formset.is_valid(): formset.save() url = reverse('meetings:detail', args=(mtg_id,)) + "#events" return HttpResponseRedirect(url) else: formset = cc_formset(instance=event, prefix="main") context['formset'] = formset return render(request, 'formset_crispy_helpers.html', context)
Python
def download_invite(request, mtg_id): """ Generate and download an ics file """ meeting = get_object_or_404(Meeting, pk=mtg_id) invite = generate_ics([meeting], None) response = HttpResponse(invite, content_type="text/calendar") response['Content-Disposition'] = "attachment; filename=invite.ics" return response
def download_invite(request, mtg_id): """ Generate and download an ics file """ meeting = get_object_or_404(Meeting, pk=mtg_id) invite = generate_ics([meeting], None) response = HttpResponse(invite, content_type="text/calendar") response['Content-Disposition'] = "attachment; filename=invite.ics" return response
Python
def __core(self): """ Private function for setup feedforward nan :return: poisson_spikes: list :return: pre_2_post_conn: nx.Connection :return: post_neurons: nx.CompartmentGroup :return: astrocyte: combra.Astrocyte """ """ define spike generator as presynaptic neurons """ pre_neurons = self.net.createSpikeGenProcess(self.pre_num) random_spikes = np.random.rand(self.pre_num, self.sim_time) < (self.pre_fr / 1000.) poisson_spikes = [np.where(random_spikes[num, :])[0].tolist() for num in range(self.pre_num)] # add spikes to spike generator pre_neurons.addSpikes( spikeInputPortNodeIds=[num for num in range(self.pre_num)], spikeTimes=poisson_spikes ) """ define post synaptic neurons """ post_neurons_prototype = nx.CompartmentPrototype( vThMant=self.post_vth, compartmentCurrentDecay=self.post_cdecay, compartmentVoltageDecay=self.post_vdecay, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE ) post_neurons = self.net.createCompartmentGroup( size=self.post_num, prototype=post_neurons_prototype ) """ define astrocyte """ astrocyte = Astrocyte(self.net) """ define connection between presynaptic neurons and postsynaptic neurons """ pre_2_post_conn_prototype = nx.ConnectionPrototype() mask = np.int_(np.random.rand(self.post_num, self.pre_num) < self.pre_post_conn_p) weight = self.pre_post_w * mask pre_2_post_conn = pre_neurons.connect( post_neurons, prototype=pre_2_post_conn_prototype, connectionMask=mask, weight=weight ) """ define connection between neurons and astrocyte """ astrocyte.connectInputNeurons(pre_neurons, self.pre_num) astrocyte.connectOutputNeurons(post_neurons, self.post_num) """ return """ return poisson_spikes, pre_2_post_conn, post_neurons, astrocyte
def __core(self): """ Private function for setup feedforward nan :return: poisson_spikes: list :return: pre_2_post_conn: nx.Connection :return: post_neurons: nx.CompartmentGroup :return: astrocyte: combra.Astrocyte """ """ define spike generator as presynaptic neurons """ pre_neurons = self.net.createSpikeGenProcess(self.pre_num) random_spikes = np.random.rand(self.pre_num, self.sim_time) < (self.pre_fr / 1000.) poisson_spikes = [np.where(random_spikes[num, :])[0].tolist() for num in range(self.pre_num)] # add spikes to spike generator pre_neurons.addSpikes( spikeInputPortNodeIds=[num for num in range(self.pre_num)], spikeTimes=poisson_spikes ) """ define post synaptic neurons """ post_neurons_prototype = nx.CompartmentPrototype( vThMant=self.post_vth, compartmentCurrentDecay=self.post_cdecay, compartmentVoltageDecay=self.post_vdecay, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE ) post_neurons = self.net.createCompartmentGroup( size=self.post_num, prototype=post_neurons_prototype ) """ define astrocyte """ astrocyte = Astrocyte(self.net) """ define connection between presynaptic neurons and postsynaptic neurons """ pre_2_post_conn_prototype = nx.ConnectionPrototype() mask = np.int_(np.random.rand(self.post_num, self.pre_num) < self.pre_post_conn_p) weight = self.pre_post_w * mask pre_2_post_conn = pre_neurons.connect( post_neurons, prototype=pre_2_post_conn_prototype, connectionMask=mask, weight=weight ) """ define connection between neurons and astrocyte """ astrocyte.connectInputNeurons(pre_neurons, self.pre_num) astrocyte.connectOutputNeurons(post_neurons, self.post_num) """ return """ return poisson_spikes, pre_2_post_conn, post_neurons, astrocyte
Python
def __core(self): """ Private function for core function of Astrocyte computation Point Astrocyte is consists 4 compartments spike_receiver: spiking compartment receive all spikes from presynaptic neurons ip3_integrator: slow spiking compartment integrate spikes from spike_receiver sic_generator: non-spike compartment generate voltage sic from ip3 spike spike_generator: spiking compartment :return: spike_receiver: nx.Compartment :return: sr_2_ip3_conn: nx.Connection :return: ip3_integrator: nx.Compartment :return: ip3_2_sic_conn: nx.Connection :return: sic_generator: nx.Compartment :return: spike_generator: nx.CompartmentGroup """ spike_receiver_prototype = nx.CompartmentPrototype( vThMant=self.srVThMant, compartmentCurrentDecay=self.srCurrentDecay, compartmentVoltageDecay=self.srVoltageDecay, activityImpulse=self.srActivityImpulse, activityTimeConstant=self.srActivityTimeConstant, enableHomeostasis=self.srEnableHomeostasis, maxActivity=self.srMinActivity, minActivity=self.srMaxActivity, homeostasisGain=self.srHomeostasisGain, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE ) ip3_integrator_prototype = nx.CompartmentPrototype( vThMant=self.ip3VThMant, compartmentCurrentDecay=self.ip3CurrentDecay, compartmentVoltageDecay=self.ip3VoltageDecay, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE ) sic_generator_prototype = nx.CompartmentPrototype( compartmentCurrentDecay=self.sicCurrentDecay, compartmentVoltageDecay=self.sicVoltageDecay, thresholdBehavior=nx.COMPARTMENT_THRESHOLD_MODE.NO_SPIKE_AND_PASS_V_LG_VTH_TO_PARENT, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE, stackOut=nx.COMPARTMENT_OUTPUT_MODE.PUSH ) spike_generator_prototype = nx.CompartmentPrototype( vThMant=self.sgVThMant, compartmentCurrentDecay=self.sgCurrentDecay, compartmentVoltageDecay=self.sgVoltageDecay, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE, compartmentJoinOperation=nx.COMPARTMENT_JOIN_OPERATION.ADD, stackIn=nx.COMPARTMENT_INPUT_MODE.POP_A ) sr_2_ip3_conn_prototype = nx.ConnectionPrototype(signMode=2, numWeightBits=8, weight=self.sr2ip3Weight) ip3_2_sic_conn_prototype = nx.ConnectionPrototype(signMode=2, numWeightBits=8, weight=self.ip32sicWeight) """ Astrocyte model part 1: simulate IP3 integration """ spike_receiver = self.net.createCompartment(prototype=spike_receiver_prototype) ip3_integrator = self.net.createCompartment(prototype=ip3_integrator_prototype) sr_2_ip3_conn = spike_receiver.connect(ip3_integrator, prototype=sr_2_ip3_conn_prototype) """ Astrocyte model part 2: simulate SIC """ sic_generator = self.net.createCompartment(prototype=sic_generator_prototype) spike_generator_tmp = self.net.createCompartment(prototype=spike_generator_prototype) spike_generator = self.net.createCompartmentGroup() spike_generator.addCompartments([spike_generator_tmp]) ip3_2_sic_conn = ip3_integrator.connect(sic_generator, prototype=ip3_2_sic_conn_prototype) """ return """ return [spike_receiver, sr_2_ip3_conn, ip3_integrator, ip3_2_sic_conn, sic_generator, spike_generator]
def __core(self): """ Private function for core function of Astrocyte computation Point Astrocyte is consists 4 compartments spike_receiver: spiking compartment receive all spikes from presynaptic neurons ip3_integrator: slow spiking compartment integrate spikes from spike_receiver sic_generator: non-spike compartment generate voltage sic from ip3 spike spike_generator: spiking compartment :return: spike_receiver: nx.Compartment :return: sr_2_ip3_conn: nx.Connection :return: ip3_integrator: nx.Compartment :return: ip3_2_sic_conn: nx.Connection :return: sic_generator: nx.Compartment :return: spike_generator: nx.CompartmentGroup """ spike_receiver_prototype = nx.CompartmentPrototype( vThMant=self.srVThMant, compartmentCurrentDecay=self.srCurrentDecay, compartmentVoltageDecay=self.srVoltageDecay, activityImpulse=self.srActivityImpulse, activityTimeConstant=self.srActivityTimeConstant, enableHomeostasis=self.srEnableHomeostasis, maxActivity=self.srMinActivity, minActivity=self.srMaxActivity, homeostasisGain=self.srHomeostasisGain, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE ) ip3_integrator_prototype = nx.CompartmentPrototype( vThMant=self.ip3VThMant, compartmentCurrentDecay=self.ip3CurrentDecay, compartmentVoltageDecay=self.ip3VoltageDecay, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE ) sic_generator_prototype = nx.CompartmentPrototype( compartmentCurrentDecay=self.sicCurrentDecay, compartmentVoltageDecay=self.sicVoltageDecay, thresholdBehavior=nx.COMPARTMENT_THRESHOLD_MODE.NO_SPIKE_AND_PASS_V_LG_VTH_TO_PARENT, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE, stackOut=nx.COMPARTMENT_OUTPUT_MODE.PUSH ) spike_generator_prototype = nx.CompartmentPrototype( vThMant=self.sgVThMant, compartmentCurrentDecay=self.sgCurrentDecay, compartmentVoltageDecay=self.sgVoltageDecay, functionalState=nx.COMPARTMENT_FUNCTIONAL_STATE.IDLE, compartmentJoinOperation=nx.COMPARTMENT_JOIN_OPERATION.ADD, stackIn=nx.COMPARTMENT_INPUT_MODE.POP_A ) sr_2_ip3_conn_prototype = nx.ConnectionPrototype(signMode=2, numWeightBits=8, weight=self.sr2ip3Weight) ip3_2_sic_conn_prototype = nx.ConnectionPrototype(signMode=2, numWeightBits=8, weight=self.ip32sicWeight) """ Astrocyte model part 1: simulate IP3 integration """ spike_receiver = self.net.createCompartment(prototype=spike_receiver_prototype) ip3_integrator = self.net.createCompartment(prototype=ip3_integrator_prototype) sr_2_ip3_conn = spike_receiver.connect(ip3_integrator, prototype=sr_2_ip3_conn_prototype) """ Astrocyte model part 2: simulate SIC """ sic_generator = self.net.createCompartment(prototype=sic_generator_prototype) spike_generator_tmp = self.net.createCompartment(prototype=spike_generator_prototype) spike_generator = self.net.createCompartmentGroup() spike_generator.addCompartments([spike_generator_tmp]) ip3_2_sic_conn = ip3_integrator.connect(sic_generator, prototype=ip3_2_sic_conn_prototype) """ return """ return [spike_receiver, sr_2_ip3_conn, ip3_integrator, ip3_2_sic_conn, sic_generator, spike_generator]
Python
def SavePlot(figure: matplotlib.figure.Figure, directory: str, name: str, filetype: str): """ Save matplotlib figure to a file :param figure: matplotlib figure :param directory: directory to the file :param name: name of the file :param filetype: file type of saved figure (only support png and svg) :return: """ fileName = directory + name print("Plot " + name + " is save to file: " + fileName + ".") if filetype == 'svg': figure.savefig(fileName + '.svg', format='svg') elif filetype == 'png': figure.savefig(fileName + '.png', format='png') else: print("File type " + filetype + " is not supported by PlotHelper.")
def SavePlot(figure: matplotlib.figure.Figure, directory: str, name: str, filetype: str): """ Save matplotlib figure to a file :param figure: matplotlib figure :param directory: directory to the file :param name: name of the file :param filetype: file type of saved figure (only support png and svg) :return: """ fileName = directory + name print("Plot " + name + " is save to file: " + fileName + ".") if filetype == 'svg': figure.savefig(fileName + '.svg', format='svg') elif filetype == 'png': figure.savefig(fileName + '.png', format='png') else: print("File type " + filetype + " is not supported by PlotHelper.")
Python
def MultiRowVoltagePlot(name: str, directory: str, data: np.ndarray, filetype: str): """ Plot multiple rows of voltage data for each compartment separately :param name: name of the figure :param directory: directory to the file :param data: data of the figure from probe :param filetype: file type of saved figure :return: figure: matplotlib figure """ if type(data) == list: data = np.array(data).reshape(1, len(data)) row_num = data.shape[0] col_num = data.shape[1] x_list = np.arange(col_num) figure_size = (col_num / 500., row_num * 2) figure, ax = plt.subplots(row_num, 1, sharex='col', figsize=figure_size) if row_num == 1: ax = [ax] for num in range(row_num): ax[row_num - 1 - num].plot(x_list, data[num, :]) ax[row_num - 1 - num].set_ylabel(str(num)) ax[0].set_title(name) plt.xlabel("Simulation time (ms)") SavePlot(figure, directory, name, filetype) return figure
def MultiRowVoltagePlot(name: str, directory: str, data: np.ndarray, filetype: str): """ Plot multiple rows of voltage data for each compartment separately :param name: name of the figure :param directory: directory to the file :param data: data of the figure from probe :param filetype: file type of saved figure :return: figure: matplotlib figure """ if type(data) == list: data = np.array(data).reshape(1, len(data)) row_num = data.shape[0] col_num = data.shape[1] x_list = np.arange(col_num) figure_size = (col_num / 500., row_num * 2) figure, ax = plt.subplots(row_num, 1, sharex='col', figsize=figure_size) if row_num == 1: ax = [ax] for num in range(row_num): ax[row_num - 1 - num].plot(x_list, data[num, :]) ax[row_num - 1 - num].set_ylabel(str(num)) ax[0].set_title(name) plt.xlabel("Simulation time (ms)") SavePlot(figure, directory, name, filetype) return figure
Python
def MultiRowCurrentPlot(name: str, directory: str, data: np.ndarray, filetype: str): """ Plot multiple rows of current data for each compartment separately :param name: name of the figure :param directory: directory to the file :param data: data of the figure from probe :param filetype: file type of saved figure :return: figure: matplotlib figure """ figure = MultiRowVoltagePlot(name, directory, data, filetype) return figure
def MultiRowCurrentPlot(name: str, directory: str, data: np.ndarray, filetype: str): """ Plot multiple rows of current data for each compartment separately :param name: name of the figure :param directory: directory to the file :param data: data of the figure from probe :param filetype: file type of saved figure :return: figure: matplotlib figure """ figure = MultiRowVoltagePlot(name, directory, data, filetype) return figure
Python
def FiringRateCompute(data: np.ndarray, window: int): """ Compute firing rate of single or multiple neurons using sliding window :param data: data of neuron spikes :param window: window size in ms :return: fr_data: data of firing rates :return: fr_x: x axis of firing rates """ if type(data) == list: data = np.array(data).reshape(1, len(data)) row_num = data.shape[0] col_num = data.shape[1] fr_data = np.zeros((row_num, col_num - window)) for num in range(col_num-window): fr_data[:, num] = data[:, num:num+window].sum(axis=1) / (window / 1000.) fr_x = np.arange(col_num - window) + int(window / 2) return fr_data, fr_x
def FiringRateCompute(data: np.ndarray, window: int): """ Compute firing rate of single or multiple neurons using sliding window :param data: data of neuron spikes :param window: window size in ms :return: fr_data: data of firing rates :return: fr_x: x axis of firing rates """ if type(data) == list: data = np.array(data).reshape(1, len(data)) row_num = data.shape[0] col_num = data.shape[1] fr_data = np.zeros((row_num, col_num - window)) for num in range(col_num-window): fr_data[:, num] = data[:, num:num+window].sum(axis=1) / (window / 1000.) fr_x = np.arange(col_num - window) + int(window / 2) return fr_data, fr_x
Python
def FiringRateComputeGap(data: np.ndarray): """ Compute firing rate of single or multiple neurons using spike gap time :param data: data of neuron spikes :return: fr_data: data of firing rates :return: fr_x: x axis of firing rates """ if type(data) == list: data = np.array(data).reshape(1, len(data)) row_num = data.shape[0] col_num = data.shape[1] fr_data = np.zeros((row_num, col_num)) fr_x = np.arange(col_num) spike_times = Spikes2SpikeTime(data) for r in range(row_num): for t in range(len(spike_times[r]) - 1): firing_rate = 1000. / (spike_times[r][t+1] - spike_times[r][t]) fr_data[r, spike_times[r][t]:spike_times[r][t+1]] = firing_rate return fr_data, fr_x
def FiringRateComputeGap(data: np.ndarray): """ Compute firing rate of single or multiple neurons using spike gap time :param data: data of neuron spikes :return: fr_data: data of firing rates :return: fr_x: x axis of firing rates """ if type(data) == list: data = np.array(data).reshape(1, len(data)) row_num = data.shape[0] col_num = data.shape[1] fr_data = np.zeros((row_num, col_num)) fr_x = np.arange(col_num) spike_times = Spikes2SpikeTime(data) for r in range(row_num): for t in range(len(spike_times[r]) - 1): firing_rate = 1000. / (spike_times[r][t+1] - spike_times[r][t]) fr_data[r, spike_times[r][t]:spike_times[r][t+1]] = firing_rate return fr_data, fr_x
Python
def FiringRatePlot(name: str, directory: str, data: np.ndarray, filetype: str, enable_gap=False, window=250): """ Plot firing rate of spike data :param name: name of the figure :param directory: directory to the file :param data: data of the figure in neuron spikes :param filetype: file type of saved figure :param enable_gap: if or not using spike gap to compute firing rate :param window: window size in ms :return: figure: matplotlib figure """ if type(data) == list: data = np.array(data).reshape(1, len(data)) row_num = data.shape[0] col_num = data.shape[1] if col_num < window: window = int(col_num / 4) if enable_gap: fr_data, fr_x = FiringRateComputeGap(data) else: fr_data, fr_x = FiringRateCompute(data, window) figure_size = (col_num / 500., row_num * 2) figure, ax = plt.subplots(row_num, 1, sharex='col', figsize=figure_size) if row_num == 1: ax = [ax] for num in range(row_num): ax[row_num - 1 - num].plot(fr_x, fr_data[num, :]) ax[row_num - 1 - num].set_ylabel(str(num)) ax[0].set_title(name) plt.xlabel("Simulation time (ms)") SavePlot(figure, directory, name, filetype) return figure
def FiringRatePlot(name: str, directory: str, data: np.ndarray, filetype: str, enable_gap=False, window=250): """ Plot firing rate of spike data :param name: name of the figure :param directory: directory to the file :param data: data of the figure in neuron spikes :param filetype: file type of saved figure :param enable_gap: if or not using spike gap to compute firing rate :param window: window size in ms :return: figure: matplotlib figure """ if type(data) == list: data = np.array(data).reshape(1, len(data)) row_num = data.shape[0] col_num = data.shape[1] if col_num < window: window = int(col_num / 4) if enable_gap: fr_data, fr_x = FiringRateComputeGap(data) else: fr_data, fr_x = FiringRateCompute(data, window) figure_size = (col_num / 500., row_num * 2) figure, ax = plt.subplots(row_num, 1, sharex='col', figsize=figure_size) if row_num == 1: ax = [ax] for num in range(row_num): ax[row_num - 1 - num].plot(fr_x, fr_data[num, :]) ax[row_num - 1 - num].set_ylabel(str(num)) ax[0].set_title(name) plt.xlabel("Simulation time (ms)") SavePlot(figure, directory, name, filetype) return figure
Python
def SpikeTime2Spikes(spike_times: list, time_steps): """ Transform spike times to spikes of each time step in a ndarray :param spike_times: time of spikes :param time_steps: number of time steps :return: spike_date: ndarray of spikes """ row_num = len(spike_times) spike_data = np.zeros((row_num, time_steps)) for num in range(row_num): spike_data[num, spike_times[num]] = 1 spike_data = np.int_(spike_data) return spike_data
def SpikeTime2Spikes(spike_times: list, time_steps): """ Transform spike times to spikes of each time step in a ndarray :param spike_times: time of spikes :param time_steps: number of time steps :return: spike_date: ndarray of spikes """ row_num = len(spike_times) spike_data = np.zeros((row_num, time_steps)) for num in range(row_num): spike_data[num, spike_times[num]] = 1 spike_data = np.int_(spike_data) return spike_data
Python
def ip3Sensitivity(self): """ read ip3 sensitivity time of ip3 integrator spikes in ms :return: """ return self._ip3Sensitivity
def ip3Sensitivity(self): """ read ip3 sensitivity time of ip3 integrator spikes in ms :return: """ return self._ip3Sensitivity
Python
def sicAmplitude(self): """ read sic amplitude of max sic spike generator firing rate in hz :return: """ return self._sicAmplitude
def sicAmplitude(self): """ read sic amplitude of max sic spike generator firing rate in hz :return: """ return self._sicAmplitude
Python
def sicWindow(self): """ read sic window of sic spike generator spike window in ms :return: """ return self._sicWindow
def sicWindow(self): """ read sic window of sic spike generator spike window in ms :return: """ return self._sicWindow
Python
def ip3Sensitivity(self, val): """ Set ip3 sensitivity and transform into Loihi Parameters :param val: ip3 spike time in ms :return: """ self._validate_ip3_sensitivity(val) self._ip3Sensitivity = val self.sr2ip3Weight = self._ip3Sensitivity
def ip3Sensitivity(self, val): """ Set ip3 sensitivity and transform into Loihi Parameters :param val: ip3 spike time in ms :return: """ self._validate_ip3_sensitivity(val) self._ip3Sensitivity = val self.sr2ip3Weight = self._ip3Sensitivity
Python
def sicAmplitude(self, val): """ Set sic amplitude and transform into Loihi Parameters :param val: sic firing rate in hz :return: """ self._validate_sic_firing_rate(val) self._sicAmplitude = val self.ip32sicWeight, self.sicCurrentDecay = AstrocytePrototypeBase._calculate_sic_props(self._sicAmplitude, self._sicWindow) self.sicCurrentDecay = int(self.sicCurrentDecay * 2 ** 12)
def sicAmplitude(self, val): """ Set sic amplitude and transform into Loihi Parameters :param val: sic firing rate in hz :return: """ self._validate_sic_firing_rate(val) self._sicAmplitude = val self.ip32sicWeight, self.sicCurrentDecay = AstrocytePrototypeBase._calculate_sic_props(self._sicAmplitude, self._sicWindow) self.sicCurrentDecay = int(self.sicCurrentDecay * 2 ** 12)
Python
def sicWindow(self, val): """ Set sic window and transform into Loihi Parameters :param val: sic firing window in ms :return: """ self._validate_sic_window(val) self._sicWindow = val self.ip32sicWeight, self.sicCurrentDecay = AstrocytePrototypeBase._calculate_sic_props(self._sicAmplitude, self._sicWindow) self.sicCurrentDecay = int(self.sicCurrentDecay * 2 ** 12)
def sicWindow(self, val): """ Set sic window and transform into Loihi Parameters :param val: sic firing window in ms :return: """ self._validate_sic_window(val) self._sicWindow = val self.ip32sicWeight, self.sicCurrentDecay = AstrocytePrototypeBase._calculate_sic_props(self._sicAmplitude, self._sicWindow) self.sicCurrentDecay = int(self.sicCurrentDecay * 2 ** 12)
Python
def _calculate_sic_props(firing_rate, window_size): """ Calculate the optimal values to achieve closest specifications to those provided for the SIC. :param firing_rate: :param window_size: :return: ip32sicWeight, sicCurrentDecay """ configs = np.load(os.path.join(os.path.dirname(__file__), "sic_data_table.npy")) optimal_config = configs[15] min_diff = AstrocytePrototypeBase._calc_diff(optimal_config[2], optimal_config[3], firing_rate, window_size) for config in configs: cost = AstrocytePrototypeBase._calc_diff(config[2], config[3], firing_rate, window_size) if min_diff > cost: min_diff = cost optimal_config = config return optimal_config[0], optimal_config[1]
def _calculate_sic_props(firing_rate, window_size): """ Calculate the optimal values to achieve closest specifications to those provided for the SIC. :param firing_rate: :param window_size: :return: ip32sicWeight, sicCurrentDecay """ configs = np.load(os.path.join(os.path.dirname(__file__), "sic_data_table.npy")) optimal_config = configs[15] min_diff = AstrocytePrototypeBase._calc_diff(optimal_config[2], optimal_config[3], firing_rate, window_size) for config in configs: cost = AstrocytePrototypeBase._calc_diff(config[2], config[3], firing_rate, window_size) if min_diff > cost: min_diff = cost optimal_config = config return optimal_config[0], optimal_config[1]
Python
def parse_gcov_file(gcov_file): """Parses the content of .gcov file written by gcov -i Returns: str: Source file name dict: coverage info { line_number: hits } """ count = {} with open(gcov_file) as fh: for line in fh: tag, value = line.split(':') if tag == 'file': src_file = value.rstrip() elif tag == 'lcount': line_num, exec_count = value.split(',') count[int(line_num)] = int(exec_count) return src_file, count
def parse_gcov_file(gcov_file): """Parses the content of .gcov file written by gcov -i Returns: str: Source file name dict: coverage info { line_number: hits } """ count = {} with open(gcov_file) as fh: for line in fh: tag, value = line.split(':') if tag == 'file': src_file = value.rstrip() elif tag == 'lcount': line_num, exec_count = value.split(',') count[int(line_num)] = int(exec_count) return src_file, count
Python
def run_gcov(filename, coverage, args): """ * run gcov on given file * parse generated .gcov files and update coverage structure * store source file md5 (if not yet stored) * delete .gcov files """ if args.verbose: warn("calling:", 'gcov', '-i', filename) stdout = None else: # gcov is noisy and don't have quit flag so redirect stdout to /dev/null stdout = subprocess.DEVNULL subprocess.check_call(['gcov', '-i', filename], stdout=stdout) for gcov_file in glob('*.gcov'): if args.verbose: warn('parsing', gcov_file) src_file, count = parse_gcov_file(gcov_file) os.remove(gcov_file) if src_file not in coverage: coverage[src_file] = defaultdict(int, count) else: # sum execution counts for line, exe_cnt in count.items(): coverage[src_file][line] += exe_cnt
def run_gcov(filename, coverage, args): """ * run gcov on given file * parse generated .gcov files and update coverage structure * store source file md5 (if not yet stored) * delete .gcov files """ if args.verbose: warn("calling:", 'gcov', '-i', filename) stdout = None else: # gcov is noisy and don't have quit flag so redirect stdout to /dev/null stdout = subprocess.DEVNULL subprocess.check_call(['gcov', '-i', filename], stdout=stdout) for gcov_file in glob('*.gcov'): if args.verbose: warn('parsing', gcov_file) src_file, count = parse_gcov_file(gcov_file) os.remove(gcov_file) if src_file not in coverage: coverage[src_file] = defaultdict(int, count) else: # sum execution counts for line, exe_cnt in count.items(): coverage[src_file][line] += exe_cnt
Python
def _dump_luacov_stats(statsfile, coverage): """ Saves data to the luacov stats file. Existing file is overwritted if exists. """ src_files = sorted(coverage) with open(statsfile, 'w') as fh: for src in src_files: stats = " ".join(str(n) for n in coverage[src]) fh.write("%s:%s\n%s\n" % (len(coverage[src]), src, stats))
def _dump_luacov_stats(statsfile, coverage): """ Saves data to the luacov stats file. Existing file is overwritted if exists. """ src_files = sorted(coverage) with open(statsfile, 'w') as fh: for src in src_files: stats = " ".join(str(n) for n in coverage[src]) fh.write("%s:%s\n%s\n" % (len(coverage[src]), src, stats))
Python
def do_GET(self): response = b"hello world" """Respond to a GET request.""" if self.path == "/empty": self.finish() return if self.path == "/timeout": time.sleep(2) if self.path == "/error_403": self.send_response(403) else: self.send_response(200) if self.path == "/content-length": self.send_header("Content-Length", str(len(response))) self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write(response) self.log_message("to be closed: %d, headers: %s, conn:'%s'" % (self.close_connection, str(self.headers), self.headers.get('Connection', "").lower())) conntype = self.headers.get('Connection', "").lower() if conntype != 'keep-alive': self.close_connection = True self.log_message("ka:'%s', pv:%s[%s]" % (str(conntype == 'keep-alive'), str(self.protocol_version >= "HTTP/1.1"), self.protocol_version))
def do_GET(self): response = b"hello world" """Respond to a GET request.""" if self.path == "/empty": self.finish() return if self.path == "/timeout": time.sleep(2) if self.path == "/error_403": self.send_response(403) else: self.send_response(200) if self.path == "/content-length": self.send_header("Content-Length", str(len(response))) self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write(response) self.log_message("to be closed: %d, headers: %s, conn:'%s'" % (self.close_connection, str(self.headers), self.headers.get('Connection', "").lower())) conntype = self.headers.get('Connection', "").lower() if conntype != 'keep-alive': self.close_connection = True self.log_message("ka:'%s', pv:%s[%s]" % (str(conntype == 'keep-alive'), str(self.protocol_version >= "HTTP/1.1"), self.protocol_version))
Python
def do_POST(self): response = b"hello post" """Respond to a GET request.""" if self.path == "/empty": self.finish() return if self.path == "/timeout": time.sleep(2) if self.path == "/error_403": self.send_response(403) else: self.send_response(200) if self.path == "/content-length": self.send_header("Content-Length", str(len(response))) self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write(response)
def do_POST(self): response = b"hello post" """Respond to a GET request.""" if self.path == "/empty": self.finish() return if self.path == "/timeout": time.sleep(2) if self.path == "/error_403": self.send_response(403) else: self.send_response(200) if self.path == "/content-length": self.send_header("Content-Length", str(len(response))) self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write(response)
Python
def create_hive_client(port): """ Creates a HMS client to a external running metastore service at the provided port """ trans_type = 'buffered' if pytest.config.option.use_kerberos: trans_type = 'kerberos' hive_transport = create_transport( host=pytest.config.option.metastore_server.split(':')[0], port=port, service=pytest.config.option.hive_service_name, transport_type=trans_type) protocol = TBinaryProtocol.TBinaryProtocol(hive_transport) hive_client = ThriftHiveMetastore.Client(protocol) hive_transport.open() return hive_client, hive_transport
def create_hive_client(port): """ Creates a HMS client to a external running metastore service at the provided port """ trans_type = 'buffered' if pytest.config.option.use_kerberos: trans_type = 'kerberos' hive_transport = create_transport( host=pytest.config.option.metastore_server.split(':')[0], port=port, service=pytest.config.option.hive_service_name, transport_type=trans_type) protocol = TBinaryProtocol.TBinaryProtocol(hive_transport) hive_client = ThriftHiveMetastore.Client(protocol) hive_transport.open() return hive_client, hive_transport
Python
def create_impala_clients(cls): """Creates Impala clients for all supported protocols.""" # The default connection (self.client) is Beeswax so that existing tests, which assume # Beeswax do not need modification (yet). cls.client = cls.create_impala_client(protocol='beeswax') cls.hs2_client = None try: cls.hs2_client = cls.create_impala_client(protocol='hs2') except Exception, e: # HS2 connection can fail for benign reasons, e.g. running with unsupported auth. LOG.info("HS2 connection setup failed, continuing...: {0}".format(e)) cls.hs2_http_client = None try: cls.hs2_http_client = cls.create_impala_client(protocol='hs2-http') except Exception, e: # HS2 HTTP connection can fail for benign reasons, e.g. running with unsupported # auth. LOG.info("HS2 HTTP connection setup failed, continuing...: {0}".format(e))
def create_impala_clients(cls): """Creates Impala clients for all supported protocols.""" # The default connection (self.client) is Beeswax so that existing tests, which assume # Beeswax do not need modification (yet). cls.client = cls.create_impala_client(protocol='beeswax') cls.hs2_client = None try: cls.hs2_client = cls.create_impala_client(protocol='hs2') except Exception, e: # HS2 connection can fail for benign reasons, e.g. running with unsupported auth. LOG.info("HS2 connection setup failed, continuing...: {0}".format(e)) cls.hs2_http_client = None try: cls.hs2_http_client = cls.create_impala_client(protocol='hs2-http') except Exception, e: # HS2 HTTP connection can fail for benign reasons, e.g. running with unsupported # auth. LOG.info("HS2 HTTP connection setup failed, continuing...: {0}".format(e))
Python
def close_impala_clients(cls): """Closes Impala clients created by create_impala_clients().""" if cls.client: cls.client.close() cls.client = None if cls.hs2_client: cls.hs2_client.close() cls.hs2_client = None if cls.hs2_http_client: cls.hs2_http_client.close() cls.hs2_http_client = None
def close_impala_clients(cls): """Closes Impala clients created by create_impala_clients().""" if cls.client: cls.client.close() cls.client = None if cls.hs2_client: cls.hs2_client.close() cls.hs2_client = None if cls.hs2_http_client: cls.hs2_http_client.close() cls.hs2_http_client = None
Python
def __verify_results_and_errors(self, vector, test_section, result, use_db): """Verifies that both results and error sections are as expected. Rewrites both by replacing $NAMENODE, $DATABASE and $IMPALA_HOME with their actual values, and optionally rewriting filenames with __HDFS_FILENAME__, to ensure that expected and actual values are easily compared. """ replace_filenames_with_placeholder = True for section_name in ('RESULTS', 'ERRORS'): if section_name in test_section: if "$NAMENODE" in test_section[section_name]: replace_filenames_with_placeholder = False test_section[section_name] = test_section[section_name] \ .replace('$NAMENODE', NAMENODE) \ .replace('$IMPALA_HOME', IMPALA_HOME) \ .replace('$USER', getuser()) \ .replace('$FILESYSTEM_NAME', FILESYSTEM_NAME) \ .replace('$INTERNAL_LISTEN_HOST', INTERNAL_LISTEN_HOST) \ .replace('$INTERNAL_LISTEN_IP', INTERNAL_LISTEN_IP) \ .replace('$MANAGED_WAREHOUSE_DIR', MANAGED_WAREHOUSE_DIR) \ .replace('$EXTERNAL_WAREHOUSE_DIR', EXTERNAL_WAREHOUSE_DIR) if use_db: test_section[section_name] = test_section[section_name].replace('$DATABASE', use_db) result_section, type_section = 'RESULTS', 'TYPES' if vector.get_value('protocol').startswith('hs2'): # hs2 or hs2-http if 'HS2_TYPES' in test_section: assert 'TYPES' in test_section,\ "Base TYPES section must always be included alongside HS2_TYPES" # In some cases HS2 types are expected differ from Beeswax types (e.g. see # IMPALA-914), so use the HS2-specific section if present. type_section = 'HS2_TYPES' verify_raw_results(test_section, result, vector.get_value('table_format').file_format, result_section, type_section, pytest.config.option.update_results, replace_filenames_with_placeholder)
def __verify_results_and_errors(self, vector, test_section, result, use_db): """Verifies that both results and error sections are as expected. Rewrites both by replacing $NAMENODE, $DATABASE and $IMPALA_HOME with their actual values, and optionally rewriting filenames with __HDFS_FILENAME__, to ensure that expected and actual values are easily compared. """ replace_filenames_with_placeholder = True for section_name in ('RESULTS', 'ERRORS'): if section_name in test_section: if "$NAMENODE" in test_section[section_name]: replace_filenames_with_placeholder = False test_section[section_name] = test_section[section_name] \ .replace('$NAMENODE', NAMENODE) \ .replace('$IMPALA_HOME', IMPALA_HOME) \ .replace('$USER', getuser()) \ .replace('$FILESYSTEM_NAME', FILESYSTEM_NAME) \ .replace('$INTERNAL_LISTEN_HOST', INTERNAL_LISTEN_HOST) \ .replace('$INTERNAL_LISTEN_IP', INTERNAL_LISTEN_IP) \ .replace('$MANAGED_WAREHOUSE_DIR', MANAGED_WAREHOUSE_DIR) \ .replace('$EXTERNAL_WAREHOUSE_DIR', EXTERNAL_WAREHOUSE_DIR) if use_db: test_section[section_name] = test_section[section_name].replace('$DATABASE', use_db) result_section, type_section = 'RESULTS', 'TYPES' if vector.get_value('protocol').startswith('hs2'): # hs2 or hs2-http if 'HS2_TYPES' in test_section: assert 'TYPES' in test_section,\ "Base TYPES section must always be included alongside HS2_TYPES" # In some cases HS2 types are expected differ from Beeswax types (e.g. see # IMPALA-914), so use the HS2-specific section if present. type_section = 'HS2_TYPES' verify_raw_results(test_section, result, vector.get_value('table_format').file_format, result_section, type_section, pytest.config.option.update_results, replace_filenames_with_placeholder)
Python
def execute_query_expect_failure(cls, impalad_client, query, query_options=None, user=None): """Executes a query and asserts if the query succeeds""" result = None try: result = cls.__execute_query(impalad_client, query, query_options, user) except Exception, e: return e assert not result.success, "No failure encountered for query %s" % query return result
def execute_query_expect_failure(cls, impalad_client, query, query_options=None, user=None): """Executes a query and asserts if the query succeeds""" result = None try: result = cls.__execute_query(impalad_client, query, query_options, user) except Exception, e: return e assert not result.success, "No failure encountered for query %s" % query return result
Python
def exec_and_time(self, query, query_options=None, impalad=0): """Executes a given query on the given impalad and returns the time taken in millisecondsas seen by the client.""" client = self.create_client_for_nth_impalad(impalad) if query_options is not None: client.set_configuration(query_options) start_time = int(round(time.time() * 1000)) client.execute(query) end_time = int(round(time.time() * 1000)) return end_time - start_time
def exec_and_time(self, query, query_options=None, impalad=0): """Executes a given query on the given impalad and returns the time taken in millisecondsas seen by the client.""" client = self.create_client_for_nth_impalad(impalad) if query_options is not None: client.set_configuration(query_options) start_time = int(round(time.time() * 1000)) client.execute(query) end_time = int(round(time.time() * 1000)) return end_time - start_time
Python
def exec_with_jdbc(self, stmt): """Pass 'stmt' to IMPALA via Impala JDBC client and execute it""" # execute_using_jdbc expects a Query object. Convert the query string into a Query # object query = Query() query.query_str = stmt # Run the statement targeting Impala exec_opts = JdbcQueryExecConfig(impalad=IMPALAD_HS2_HOST_PORT, transport='NOSASL') return execute_using_jdbc(query, exec_opts).data
def exec_with_jdbc(self, stmt): """Pass 'stmt' to IMPALA via Impala JDBC client and execute it""" # execute_using_jdbc expects a Query object. Convert the query string into a Query # object query = Query() query.query_str = stmt # Run the statement targeting Impala exec_opts = JdbcQueryExecConfig(impalad=IMPALAD_HS2_HOST_PORT, transport='NOSASL') return execute_using_jdbc(query, exec_opts).data
Python
def exec_with_jdbc_and_compare_result(self, stmt, expected): """Execute 'stmt' via Impala JDBC client and compare the result with 'expected'""" result = self.exec_with_jdbc(stmt) # Check the results assert (result is not None) and (result == expected)
def exec_with_jdbc_and_compare_result(self, stmt, expected): """Execute 'stmt' via Impala JDBC client and compare the result with 'expected'""" result = self.exec_with_jdbc(stmt) # Check the results assert (result is not None) and (result == expected)
Python
def run_stmt_in_hive(self, stmt, username=None): """ Run a statement in Hive, returning stdout if successful and throwing RuntimeError(stderr) if not. """ # Remove HADOOP_CLASSPATH from environment. Beeline doesn't need it, # and doing so avoids Hadoop 3's classpath de-duplication code from # placing $HADOOP_CONF_DIR too late in the classpath to get the right # log4j configuration file picked up. Some log4j configuration files # in Hadoop's jars send logging to stdout, confusing Impala's test # framework. env = os.environ.copy() env.pop("HADOOP_CLASSPATH", None) call = subprocess.Popen( ['beeline', '--outputformat=csv2', '-u', 'jdbc:hive2://' + pytest.config.option.hive_server2, '-n', username or getuser(), '-e', stmt], stdout=subprocess.PIPE, stderr=subprocess.PIPE, # Beeline in Hive 2.1 will read from stdin even when "-e" # is specified; explicitly make sure there's nothing to # read to avoid hanging, especially when running interactively # with py.test. stdin=file("/dev/null"), env=env) (stdout, stderr) = call.communicate() call.wait() if call.returncode != 0: raise RuntimeError(stderr) return stdout
def run_stmt_in_hive(self, stmt, username=None): """ Run a statement in Hive, returning stdout if successful and throwing RuntimeError(stderr) if not. """ # Remove HADOOP_CLASSPATH from environment. Beeline doesn't need it, # and doing so avoids Hadoop 3's classpath de-duplication code from # placing $HADOOP_CONF_DIR too late in the classpath to get the right # log4j configuration file picked up. Some log4j configuration files # in Hadoop's jars send logging to stdout, confusing Impala's test # framework. env = os.environ.copy() env.pop("HADOOP_CLASSPATH", None) call = subprocess.Popen( ['beeline', '--outputformat=csv2', '-u', 'jdbc:hive2://' + pytest.config.option.hive_server2, '-n', username or getuser(), '-e', stmt], stdout=subprocess.PIPE, stderr=subprocess.PIPE, # Beeline in Hive 2.1 will read from stdin even when "-e" # is specified; explicitly make sure there's nothing to # read to avoid hanging, especially when running interactively # with py.test. stdin=file("/dev/null"), env=env) (stdout, stderr) = call.communicate() call.wait() if call.returncode != 0: raise RuntimeError(stderr) return stdout
Python
def wait_for_progress(self, handle, expected_progress, timeout, client=None): """Waits for the given query handle to reach expected progress rate""" if client is None: client = self.client start_time = time.time() summary = client.get_exec_summary(handle) while time.time() - start_time < timeout and \ self.__get_query_progress_rate(summary.progress) <= expected_progress: summary = client.get_exec_summary(handle) time.sleep(0.5) actual_progress = self.__get_query_progress_rate(summary.progress) if actual_progress <= expected_progress: raise Timeout("query {0} did not reach the expected progress {1}, " "current progress {2}".format(handle.get_handle().id, expected_progress, actual_progress)) return actual_progress
def wait_for_progress(self, handle, expected_progress, timeout, client=None): """Waits for the given query handle to reach expected progress rate""" if client is None: client = self.client start_time = time.time() summary = client.get_exec_summary(handle) while time.time() - start_time < timeout and \ self.__get_query_progress_rate(summary.progress) <= expected_progress: summary = client.get_exec_summary(handle) time.sleep(0.5) actual_progress = self.__get_query_progress_rate(summary.progress) if actual_progress <= expected_progress: raise Timeout("query {0} did not reach the expected progress {1}, " "current progress {2}".format(handle.get_handle().id, expected_progress, actual_progress)) return actual_progress
Python
def confirm_db_exists(self, db_name): """Confirm the database with 'db_name' is present in the impalad's local catalog. Fail if the db is not present""" # This will throw an exception if the database is not present. self.client.execute("describe database `{db_name}`".format(db_name=db_name)) return
def confirm_db_exists(self, db_name): """Confirm the database with 'db_name' is present in the impalad's local catalog. Fail if the db is not present""" # This will throw an exception if the database is not present. self.client.execute("describe database `{db_name}`".format(db_name=db_name)) return
Python
def confirm_table_exists(self, db_name, tbl_name): """Confirms if the table exists. The describe table command will fail if the table does not exist.""" self.client.execute("describe `{0}`.`{1}`".format(db_name, tbl_name)) return
def confirm_table_exists(self, db_name, tbl_name): """Confirms if the table exists. The describe table command will fail if the table does not exist.""" self.client.execute("describe `{0}`.`{1}`".format(db_name, tbl_name)) return
Python
def assert_eventually(self, timeout_s, period_s, condition, error_msg=None): """Assert that the condition (a function with no parameters) returns True within the given timeout. The condition is executed every period_s seconds. The check assumes that once the condition returns True, it continues to return True. Throws a Timeout if the condition does not return true within timeout_s seconds. 'error_msg' is an optional function that must return a string. If set, the result of the function will be included in the Timeout error message.""" count = 0 start_time = time.time() while not condition() and time.time() - start_time < timeout_s: time.sleep(period_s) count += 1 if not condition(): error_msg_str = " error message: " + error_msg() if error_msg else "" raise Timeout( "Check failed to return True after {0} tries and {1} seconds{2}".format( count, timeout_s, error_msg_str))
def assert_eventually(self, timeout_s, period_s, condition, error_msg=None): """Assert that the condition (a function with no parameters) returns True within the given timeout. The condition is executed every period_s seconds. The check assumes that once the condition returns True, it continues to return True. Throws a Timeout if the condition does not return true within timeout_s seconds. 'error_msg' is an optional function that must return a string. If set, the result of the function will be included in the Timeout error message.""" count = 0 start_time = time.time() while not condition() and time.time() - start_time < timeout_s: time.sleep(period_s) count += 1 if not condition(): error_msg_str = " error message: " + error_msg() if error_msg else "" raise Timeout( "Check failed to return True after {0} tries and {1} seconds{2}".format( count, timeout_s, error_msg_str))
Python
def assert_impalad_log_contains(self, level, line_regex, expected_count=1, timeout_s=6): """ Convenience wrapper around assert_log_contains for impalad logs. """ self.assert_log_contains("impalad", level, line_regex, expected_count, timeout_s)
def assert_impalad_log_contains(self, level, line_regex, expected_count=1, timeout_s=6): """ Convenience wrapper around assert_log_contains for impalad logs. """ self.assert_log_contains("impalad", level, line_regex, expected_count, timeout_s)
Python
def assert_catalogd_log_contains(self, level, line_regex, expected_count=1, timeout_s=6): """ Convenience wrapper around assert_log_contains for catalogd logs. """ self.assert_log_contains("catalogd", level, line_regex, expected_count, timeout_s)
def assert_catalogd_log_contains(self, level, line_regex, expected_count=1, timeout_s=6): """ Convenience wrapper around assert_log_contains for catalogd logs. """ self.assert_log_contains("catalogd", level, line_regex, expected_count, timeout_s)
Python
def assert_log_contains(self, daemon, level, line_regex, expected_count=1, timeout_s=6): """ Assert that the daemon log with specified level (e.g. ERROR, WARNING, INFO) contains expected_count lines with a substring matching the regex. When expected_count is -1, at least one match is expected. Retries until 'timeout_s' has expired. The default timeout is the default minicluster log buffering time (5 seconds) with a one second buffer. When using this method to check log files of running processes, the caller should make sure that log buffering has been disabled, for example by adding '-logbuflevel=-1' to the daemon startup options or set timeout_s to a value higher than the log flush interval. """ pattern = re.compile(line_regex) start_time = time.time() while True: try: found = 0 if hasattr(self, "impala_log_dir"): log_dir = self.impala_log_dir else: log_dir = EE_TEST_LOGS_DIR log_file_path = os.path.join(log_dir, daemon + "." + level) # Resolve symlinks to make finding the file easier. log_file_path = os.path.realpath(log_file_path) with open(log_file_path) as log_file: for line in log_file: if pattern.search(line): found += 1 if expected_count == -1: assert found > 0, "Expected at least one line in file %s matching regex '%s'"\ ", but found none." % (log_file_path, line_regex) else: assert found == expected_count, \ "Expected %d lines in file %s matching regex '%s', but found %d lines. "\ "Last line was: \n%s" %\ (expected_count, log_file_path, line_regex, found, line) return except AssertionError as e: # Re-throw the exception to the caller only when the timeout is expired. Otherwise # sleep before retrying. if time.time() - start_time > timeout_s: raise LOG.info("Expected log lines could not be found, sleeping before retrying: %s", str(e)) time.sleep(1)
def assert_log_contains(self, daemon, level, line_regex, expected_count=1, timeout_s=6): """ Assert that the daemon log with specified level (e.g. ERROR, WARNING, INFO) contains expected_count lines with a substring matching the regex. When expected_count is -1, at least one match is expected. Retries until 'timeout_s' has expired. The default timeout is the default minicluster log buffering time (5 seconds) with a one second buffer. When using this method to check log files of running processes, the caller should make sure that log buffering has been disabled, for example by adding '-logbuflevel=-1' to the daemon startup options or set timeout_s to a value higher than the log flush interval. """ pattern = re.compile(line_regex) start_time = time.time() while True: try: found = 0 if hasattr(self, "impala_log_dir"): log_dir = self.impala_log_dir else: log_dir = EE_TEST_LOGS_DIR log_file_path = os.path.join(log_dir, daemon + "." + level) # Resolve symlinks to make finding the file easier. log_file_path = os.path.realpath(log_file_path) with open(log_file_path) as log_file: for line in log_file: if pattern.search(line): found += 1 if expected_count == -1: assert found > 0, "Expected at least one line in file %s matching regex '%s'"\ ", but found none." % (log_file_path, line_regex) else: assert found == expected_count, \ "Expected %d lines in file %s matching regex '%s', but found %d lines. "\ "Last line was: \n%s" %\ (expected_count, log_file_path, line_regex, found, line) return except AssertionError as e: # Re-throw the exception to the caller only when the timeout is expired. Otherwise # sleep before retrying. if time.time() - start_time > timeout_s: raise LOG.info("Expected log lines could not be found, sleeping before retrying: %s", str(e)) time.sleep(1)
Python
def home(): """List all available api routes.""" return ( f"Hawaii Climate Analysis<br/>" f"Available Routes:<br/>" f"/api/v1.0/precipitation<br/>" f"/api/v1.0/stations<br/>" f"/api/v1.0/tobs<br/>" f"/api/v1.0/YYYY-MM-DD<start><br/>" f"/api/v1.0/<start>/<end>" )
def home(): """List all available api routes.""" return ( f"Hawaii Climate Analysis<br/>" f"Available Routes:<br/>" f"/api/v1.0/precipitation<br/>" f"/api/v1.0/stations<br/>" f"/api/v1.0/tobs<br/>" f"/api/v1.0/YYYY-MM-DD<start><br/>" f"/api/v1.0/<start>/<end>" )
Python
def client( username: typing.Optional[str] = None, password: typing.Optional[str] = None, hostname: str = 'unspecified', service: str = 'host', channel_bindings: typing.Optional[GssChannelBindings] = None, context_req: ContextReq = ContextReq.default, protocol: str = 'negotiate', options: NegotiateOptions = NegotiateOptions.none, **kwargs: typing.Any, ) -> ContextProxy: """Create a client context to be used for authentication. Args: username: The username to authenticate with. Certain providers can use a cache if omitted. password: The password to authenticate with. Certain providers can use a cache if omitted. hostname: The principal part of the SPN. This is required for Kerberos auth to build the SPN. service: The service part of the SPN. This is required for Kerberos auth to build the SPN. channel_bindings: The optional :class:`spnego.channel_bindings.GssChannelBindings` for the context. context_req: The :class:`spnego.ContextReq` flags to use when setting up the context. protocol: The protocol to authenticate with, can be `ntlm`, `kerberos`, `negotiate`, or `credssp`. options: The :class:`spnego.NegotiateOptions` that define pyspnego specific options to control the negotiation. kwargs: Optional arguments to pass through to the authentiction context. Returns: ContextProxy: The context proxy for a client. """ return _new_context(username, password, hostname, service, channel_bindings, context_req, protocol, options, 'initiate', **kwargs)
def client( username: typing.Optional[str] = None, password: typing.Optional[str] = None, hostname: str = 'unspecified', service: str = 'host', channel_bindings: typing.Optional[GssChannelBindings] = None, context_req: ContextReq = ContextReq.default, protocol: str = 'negotiate', options: NegotiateOptions = NegotiateOptions.none, **kwargs: typing.Any, ) -> ContextProxy: """Create a client context to be used for authentication. Args: username: The username to authenticate with. Certain providers can use a cache if omitted. password: The password to authenticate with. Certain providers can use a cache if omitted. hostname: The principal part of the SPN. This is required for Kerberos auth to build the SPN. service: The service part of the SPN. This is required for Kerberos auth to build the SPN. channel_bindings: The optional :class:`spnego.channel_bindings.GssChannelBindings` for the context. context_req: The :class:`spnego.ContextReq` flags to use when setting up the context. protocol: The protocol to authenticate with, can be `ntlm`, `kerberos`, `negotiate`, or `credssp`. options: The :class:`spnego.NegotiateOptions` that define pyspnego specific options to control the negotiation. kwargs: Optional arguments to pass through to the authentiction context. Returns: ContextProxy: The context proxy for a client. """ return _new_context(username, password, hostname, service, channel_bindings, context_req, protocol, options, 'initiate', **kwargs)
Python
def server( hostname: str = 'unspecified', service: str = 'host', channel_bindings: typing.Optional[GssChannelBindings] = None, context_req: ContextReq = ContextReq.default, protocol: str = 'negotiate', options: NegotiateOptions = NegotiateOptions.none, **kwargs: typing.Any, ) -> ContextProxy: """Create a server context to be used for authentication. Args: hostname: The principal part of the SPN. This is required for Kerberos auth to build the SPN. service: The service part of the SPN. This is required for Kerberos auth to build the SPN. channel_bindings: The optional :class:`spnego.channel_bindings.GssChannelBindings` for the context. context_req: The :class:`spnego.ContextReq` flags to use when setting up the context. protocol: The protocol to authenticate with, can be `ntlm`, `kerberos`, `negotiate`, or `credssp`. options: The :class:`spnego.NegotiateOptions` that define pyspnego specific options to control the negotiation. kwargs: Optional arguments to pass through to the authentiction context. Returns: ContextProxy: The context proxy for a client. """ return _new_context(None, None, hostname, service, channel_bindings, context_req, protocol, options, 'accept', **kwargs)
def server( hostname: str = 'unspecified', service: str = 'host', channel_bindings: typing.Optional[GssChannelBindings] = None, context_req: ContextReq = ContextReq.default, protocol: str = 'negotiate', options: NegotiateOptions = NegotiateOptions.none, **kwargs: typing.Any, ) -> ContextProxy: """Create a server context to be used for authentication. Args: hostname: The principal part of the SPN. This is required for Kerberos auth to build the SPN. service: The service part of the SPN. This is required for Kerberos auth to build the SPN. channel_bindings: The optional :class:`spnego.channel_bindings.GssChannelBindings` for the context. context_req: The :class:`spnego.ContextReq` flags to use when setting up the context. protocol: The protocol to authenticate with, can be `ntlm`, `kerberos`, `negotiate`, or `credssp`. options: The :class:`spnego.NegotiateOptions` that define pyspnego specific options to control the negotiation. kwargs: Optional arguments to pass through to the authentiction context. Returns: ContextProxy: The context proxy for a client. """ return _new_context(None, None, hostname, service, channel_bindings, context_req, protocol, options, 'accept', **kwargs)
Python
def _enum_labels( value: typing.Union[int, str, enum.Enum], enum_type: typing.Optional[typing.Type] = None, ) -> typing.Dict[int, str]: """ Gets the human friendly labels of a known enum and what value they map to. """ def get_labels(v): return getattr(v, 'native_labels', lambda: {})() return get_labels(enum_type) if enum_type else get_labels(value)
def _enum_labels( value: typing.Union[int, str, enum.Enum], enum_type: typing.Optional[typing.Type] = None, ) -> typing.Dict[int, str]: """ Gets the human friendly labels of a known enum and what value they map to. """ def get_labels(v): return getattr(v, 'native_labels', lambda: {})() return get_labels(enum_type) if enum_type else get_labels(value)
Python
def parse_enum( value: typing.Union[int, str, enum.Enum], enum_type: typing.Optional[typing.Type] = None, ) -> str: """ Parses an IntEnum into a human representative object of that enum. """ enum_name = 'UNKNOWN' labels = _enum_labels(value, enum_type) value = int(value) if isinstance(value, int) else value for v, name in labels.items(): if value == v: enum_name = name break return "%s (%s)" % (enum_name, value)
def parse_enum( value: typing.Union[int, str, enum.Enum], enum_type: typing.Optional[typing.Type] = None, ) -> str: """ Parses an IntEnum into a human representative object of that enum. """ enum_name = 'UNKNOWN' labels = _enum_labels(value, enum_type) value = int(value) if isinstance(value, int) else value for v, name in labels.items(): if value == v: enum_name = name break return "%s (%s)" % (enum_name, value)
Python
def parse_flags( value: typing.Union[int, enum.IntFlag], enum_type: typing.Optional[typing.Type] = None, ) -> typing.Dict[str, typing.Any]: """ Parses an IntFlag into each flag value that is set. """ raw_value = int(value) flags = [] labels = _enum_labels(value, enum_type) value = int(value) for v, name in labels.items(): if value & v == v: value &= ~v flags.append("%s (%d)" % (name, v)) if value != 0: flags.append('UNKNOWN (%d)' % value) return { 'raw': raw_value, 'flags': flags, }
def parse_flags( value: typing.Union[int, enum.IntFlag], enum_type: typing.Optional[typing.Type] = None, ) -> typing.Dict[str, typing.Any]: """ Parses an IntFlag into each flag value that is set. """ raw_value = int(value) flags = [] labels = _enum_labels(value, enum_type) value = int(value) for v, name in labels.items(): if value & v == v: value &= ~v flags.append("%s (%d)" % (name, v)) if value != 0: flags.append('UNKNOWN (%d)' % value) return { 'raw': raw_value, 'flags': flags, }
Python
def parse_kerberos_token( token: "KerberosV5Msg", secret: typing.Optional[str] = None, encoding: typing.Optional[str] = None, ) -> typing.Dict[str, typing.Any]: """ Parses a KerberosV5Msg object to a dict. """ if not encoding: encoding = 'utf-8' def parse_default(value: typing.Any) -> typing.Any: return value def parse_datetime(value: datetime.datetime) -> str: return value.isoformat() def parse_text(value: bytes) -> str: return to_text(value, encoding=encoding, errors='replace') def parse_bytes(value: bytes) -> str: return base64.b16encode(value).decode() def parse_principal_name(value: PrincipalName) -> typing.Dict[str, typing.Any]: return { 'name-type': parse_enum(value.name_type), 'name-string': [parse_text(v) for v in value.value], } def parse_host_address(value: HostAddress) -> typing.Dict[str, typing.Any]: return { 'addr-type': parse_enum(value.addr_type), 'address': parse_text(value.value), } def parse_token(value: typing.Any) -> typing.Dict[str, typing.Any]: return parse_kerberos_token(value, secret, encoding) if isinstance(token, bytes): return parse_bytes(token) msg = {} for name, attr_name, attr_type in token.PARSE_MAP: attr_value = getattr(token, attr_name) parse_args = [] if isinstance(attr_type, tuple): parse_args.append(attr_type[1]) attr_type = attr_type[0] parse_func = { ParseType.default: parse_default, ParseType.enum: parse_enum, ParseType.flags: parse_flags, ParseType.datetime: parse_datetime, ParseType.text: parse_text, ParseType.bytes: parse_bytes, ParseType.principal_name: parse_principal_name, ParseType.host_address: parse_host_address, ParseType.token: parse_token, }[attr_type] if attr_value is None: parsed_value = None elif isinstance(attr_value, list): parsed_value = [parse_func(v, *parse_args) if v is not None else None for v in attr_value] else: parsed_value = parse_func(attr_value, *parse_args) msg[name] = parsed_value return msg
def parse_kerberos_token( token: "KerberosV5Msg", secret: typing.Optional[str] = None, encoding: typing.Optional[str] = None, ) -> typing.Dict[str, typing.Any]: """ Parses a KerberosV5Msg object to a dict. """ if not encoding: encoding = 'utf-8' def parse_default(value: typing.Any) -> typing.Any: return value def parse_datetime(value: datetime.datetime) -> str: return value.isoformat() def parse_text(value: bytes) -> str: return to_text(value, encoding=encoding, errors='replace') def parse_bytes(value: bytes) -> str: return base64.b16encode(value).decode() def parse_principal_name(value: PrincipalName) -> typing.Dict[str, typing.Any]: return { 'name-type': parse_enum(value.name_type), 'name-string': [parse_text(v) for v in value.value], } def parse_host_address(value: HostAddress) -> typing.Dict[str, typing.Any]: return { 'addr-type': parse_enum(value.addr_type), 'address': parse_text(value.value), } def parse_token(value: typing.Any) -> typing.Dict[str, typing.Any]: return parse_kerberos_token(value, secret, encoding) if isinstance(token, bytes): return parse_bytes(token) msg = {} for name, attr_name, attr_type in token.PARSE_MAP: attr_value = getattr(token, attr_name) parse_args = [] if isinstance(attr_type, tuple): parse_args.append(attr_type[1]) attr_type = attr_type[0] parse_func = { ParseType.default: parse_default, ParseType.enum: parse_enum, ParseType.flags: parse_flags, ParseType.datetime: parse_datetime, ParseType.text: parse_text, ParseType.bytes: parse_bytes, ParseType.principal_name: parse_principal_name, ParseType.host_address: parse_host_address, ParseType.token: parse_token, }[attr_type] if attr_value is None: parsed_value = None elif isinstance(attr_value, list): parsed_value = [parse_func(v, *parse_args) if v is not None else None for v in attr_value] else: parsed_value = parse_func(attr_value, *parse_args) msg[name] = parsed_value return msg
Python
def unpack_hostname(value: ASN1Value) -> "HostAddress": """ Unpacks an ASN.1 value to a HostAddress. """ s = unpack_asn1_tagged_sequence(value) name_type = KerberosHostAddressType(get_sequence_value(s, 0, 'HostAddress', 'addr-type', unpack_asn1_integer)) name = get_sequence_value(s, 1, 'HostAddress', 'address', unpack_asn1_octet_string) return HostAddress(name_type, name)
def unpack_hostname(value: ASN1Value) -> "HostAddress": """ Unpacks an ASN.1 value to a HostAddress. """ s = unpack_asn1_tagged_sequence(value) name_type = KerberosHostAddressType(get_sequence_value(s, 0, 'HostAddress', 'addr-type', unpack_asn1_integer)) name = get_sequence_value(s, 1, 'HostAddress', 'address', unpack_asn1_octet_string) return HostAddress(name_type, name)
Python
def unpack_principal_name(value: ASN1Value) -> "PrincipalName": """ Unpacks an ASN.1 value to a PrincipalName. """ s = unpack_asn1_tagged_sequence(value) name_type = KerberosPrincipalNameType(get_sequence_value(s, 0, 'PrincipalName', 'name-type', unpack_asn1_integer)) name = [unpack_asn1_general_string(n) for n in get_sequence_value(s, 1, 'PrincipalName', 'name-string', unpack_asn1_sequence)] return PrincipalName(name_type, name)
def unpack_principal_name(value: ASN1Value) -> "PrincipalName": """ Unpacks an ASN.1 value to a PrincipalName. """ s = unpack_asn1_tagged_sequence(value) name_type = KerberosPrincipalNameType(get_sequence_value(s, 0, 'PrincipalName', 'name-type', unpack_asn1_integer)) name = [unpack_asn1_general_string(n) for n in get_sequence_value(s, 1, 'PrincipalName', 'name-string', unpack_asn1_sequence)] return PrincipalName(name_type, name)
Python
def extract_asn1_tlv( tlv: typing.Union[bytes, ASN1Value], tag_class: TagClass, tag_number: typing.Union[int, TypeTagNumber], ) -> bytes: """ Extract the bytes and validates the existing tag of an ASN.1 value. """ if isinstance(tlv, ASN1Value): if tag_class == TagClass.universal: label_name = TypeTagNumber.native_labels().get(tag_number, 'Unknown tag type') msg = "Invalid ASN.1 %s tags, actual tag class %s and tag number %s" \ % (label_name, tlv.tag_class, tlv.tag_number) else: msg = "Invalid ASN.1 tags, actual tag %s and number %s, expecting class %s and number %s" \ % (tlv.tag_class, tlv.tag_number, tag_class, tag_number) if tlv.tag_class != tag_class or tlv.tag_number != tag_number: raise ValueError(msg) return tlv.b_data return tlv
def extract_asn1_tlv( tlv: typing.Union[bytes, ASN1Value], tag_class: TagClass, tag_number: typing.Union[int, TypeTagNumber], ) -> bytes: """ Extract the bytes and validates the existing tag of an ASN.1 value. """ if isinstance(tlv, ASN1Value): if tag_class == TagClass.universal: label_name = TypeTagNumber.native_labels().get(tag_number, 'Unknown tag type') msg = "Invalid ASN.1 %s tags, actual tag class %s and tag number %s" \ % (label_name, tlv.tag_class, tlv.tag_number) else: msg = "Invalid ASN.1 tags, actual tag %s and number %s, expecting class %s and number %s" \ % (tlv.tag_class, tlv.tag_number, tag_class, tag_number) if tlv.tag_class != tag_class or tlv.tag_number != tag_number: raise ValueError(msg) return tlv.b_data return tlv
Python
def pack_asn1_enumerated( value: int, tag: bool = True, ) -> bytes: """ Packs an int into an ASN.1 ENUMERATED byte value with optional universal tagging. """ b_data = pack_asn1_integer(value, tag=False) if tag: b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.enumerated, b_data) return b_data
def pack_asn1_enumerated( value: int, tag: bool = True, ) -> bytes: """ Packs an int into an ASN.1 ENUMERATED byte value with optional universal tagging. """ b_data = pack_asn1_integer(value, tag=False) if tag: b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.enumerated, b_data) return b_data
Python
def pack_asn1_general_string( value: typing.Union[str, bytes], tag: bool = True, encoding: str = 'ascii', ) -> bytes: """ Packs an string value into an ASN.1 GeneralString byte value with optional universal tagging. """ b_data = to_bytes(value, encoding=encoding) if tag: b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.general_string, b_data) return b_data
def pack_asn1_general_string( value: typing.Union[str, bytes], tag: bool = True, encoding: str = 'ascii', ) -> bytes: """ Packs an string value into an ASN.1 GeneralString byte value with optional universal tagging. """ b_data = to_bytes(value, encoding=encoding) if tag: b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.general_string, b_data) return b_data
Python
def pack_asn1_integer( value: int, tag: bool = True, ) -> bytes: """ Packs an int value into an ASN.1 INTEGER byte value with optional universal tagging. """ # Thanks to https://github.com/andrivet/python-asn1 for help with the negative value logic. is_negative = False limit = 0x7f if value < 0: value = -value is_negative = True limit = 0x80 b_int = bytearray() while value > limit: val = value & 0xFF if is_negative: val = 0xFF - val b_int.append(val) value >>= 8 b_int.append(((0xFF - value) if is_negative else value) & 0xFF) if is_negative: for idx, val in enumerate(b_int): if val < 0xFF: b_int[idx] += 1 break b_int[idx] = 0 if is_negative and b_int[-1] == 0x7F: # Two's complement corner case b_int.append(0xFF) b_int.reverse() b_value = bytes(b_int) if tag: b_value = pack_asn1(TagClass.universal, False, TypeTagNumber.integer, b_value) return b_value
def pack_asn1_integer( value: int, tag: bool = True, ) -> bytes: """ Packs an int value into an ASN.1 INTEGER byte value with optional universal tagging. """ # Thanks to https://github.com/andrivet/python-asn1 for help with the negative value logic. is_negative = False limit = 0x7f if value < 0: value = -value is_negative = True limit = 0x80 b_int = bytearray() while value > limit: val = value & 0xFF if is_negative: val = 0xFF - val b_int.append(val) value >>= 8 b_int.append(((0xFF - value) if is_negative else value) & 0xFF) if is_negative: for idx, val in enumerate(b_int): if val < 0xFF: b_int[idx] += 1 break b_int[idx] = 0 if is_negative and b_int[-1] == 0x7F: # Two's complement corner case b_int.append(0xFF) b_int.reverse() b_value = bytes(b_int) if tag: b_value = pack_asn1(TagClass.universal, False, TypeTagNumber.integer, b_value) return b_value
Python
def pack_asn1_object_identifier( oid: str, tag: bool = True, ) -> bytes: """ Packs an str value into an ASN.1 OBJECT IDENTIFIER byte value with optional universal tagging. """ b_oid = bytearray() oid_split = [int(i) for i in oid.split('.')] if len(oid_split) < 2: raise ValueError("An OID must have 2 or more elements split by '.'") # The first byte of the OID is the first 2 elements (x.y) as (x * 40) + y b_oid.append((oid_split[0] * 40) + oid_split[1]) for val in oid_split[2:]: b_oid.extend(_pack_asn1_octet_number(val)) b_value = bytes(b_oid) if tag: b_value = pack_asn1(TagClass.universal, False, TypeTagNumber.object_identifier, b_value) return b_value
def pack_asn1_object_identifier( oid: str, tag: bool = True, ) -> bytes: """ Packs an str value into an ASN.1 OBJECT IDENTIFIER byte value with optional universal tagging. """ b_oid = bytearray() oid_split = [int(i) for i in oid.split('.')] if len(oid_split) < 2: raise ValueError("An OID must have 2 or more elements split by '.'") # The first byte of the OID is the first 2 elements (x.y) as (x * 40) + y b_oid.append((oid_split[0] * 40) + oid_split[1]) for val in oid_split[2:]: b_oid.extend(_pack_asn1_octet_number(val)) b_value = bytes(b_oid) if tag: b_value = pack_asn1(TagClass.universal, False, TypeTagNumber.object_identifier, b_value) return b_value
Python
def pack_asn1_octet_string( b_data: bytes, tag: bool = True, ) -> bytes: """ Packs an bytes value into an ASN.1 OCTET STRING byte value with optional universal tagging. """ if tag: b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.octet_string, b_data) return b_data
def pack_asn1_octet_string( b_data: bytes, tag: bool = True, ) -> bytes: """ Packs an bytes value into an ASN.1 OCTET STRING byte value with optional universal tagging. """ if tag: b_data = pack_asn1(TagClass.universal, False, TypeTagNumber.octet_string, b_data) return b_data
Python
def pack_asn1_sequence( sequence: typing.List[bytes], tag: bool = True, ) -> bytes: """ Packs a list of encoded bytes into an ASN.1 SEQUENCE byte value with optional universal tagging. """ b_data = b"".join(sequence) if tag: b_data = pack_asn1(TagClass.universal, True, TypeTagNumber.sequence, b_data) return b_data
def pack_asn1_sequence( sequence: typing.List[bytes], tag: bool = True, ) -> bytes: """ Packs a list of encoded bytes into an ASN.1 SEQUENCE byte value with optional universal tagging. """ b_data = b"".join(sequence) if tag: b_data = pack_asn1(TagClass.universal, True, TypeTagNumber.sequence, b_data) return b_data
Python
def _pack_asn1_octet_number(num: int) -> bytes: """ Packs an int number into an ASN.1 integer value that spans multiple octets. """ num_octets = bytearray() while num: # Get the 7 bit value of the number. octet_value = num & 0b01111111 # Set the MSB if this isn't the first octet we are processing (overall last octet) if len(num_octets): octet_value |= 0b10000000 num_octets.append(octet_value) # Shift the number by 7 bits as we've just processed them. num >>= 7 # Finally we reverse the order so the higher octets are first. num_octets.reverse() return num_octets
def _pack_asn1_octet_number(num: int) -> bytes: """ Packs an int number into an ASN.1 integer value that spans multiple octets. """ num_octets = bytearray() while num: # Get the 7 bit value of the number. octet_value = num & 0b01111111 # Set the MSB if this isn't the first octet we are processing (overall last octet) if len(num_octets): octet_value |= 0b10000000 num_octets.append(octet_value) # Shift the number by 7 bits as we've just processed them. num >>= 7 # Finally we reverse the order so the higher octets are first. num_octets.reverse() return num_octets
Python
def unpack_asn1(b_data: bytes) -> typing.Tuple[ASN1Value, bytes]: """Unpacks an ASN.1 TLV into each element. Unpacks the raw ASN.1 value into a `ASN1Value` tuple and returns the remaining bytes that are not part of the ASN.1 TLV. Args: b_data: The raw bytes to unpack as an ASN.1 TLV. Returns: ASN1Value: The ASN.1 value that is unpacked from the raw bytes passed in. bytes: Any remaining bytes that are not part of the ASN1Value. """ octet1 = struct.unpack("B", b_data[:1])[0] tag_class = TagClass((octet1 & 0b11000000) >> 6) constructed = bool(octet1 & 0b00100000) tag_number = octet1 & 0b00011111 length_offset = 1 if tag_number == 31: tag_number, octet_count = _unpack_asn1_octet_number(b_data[1:]) length_offset += octet_count if tag_class == TagClass.universal: tag_number = TypeTagNumber(tag_number) b_data = b_data[length_offset:] length = struct.unpack("B", b_data[:1])[0] length_octets = 1 if length & 0b10000000: # If the MSB is set then the length octet just contains the number of octets that encodes the actual length. length_octets += length & 0b01111111 length = 0 for idx in range(1, length_octets): octet_val = struct.unpack("B", b_data[idx:idx + 1])[0] length += octet_val << (8 * (length_octets - 1 - idx)) value = ASN1Value(tag_class=tag_class, constructed=constructed, tag_number=tag_number, b_data=b_data[length_octets:length_octets + length]) return value, b_data[length_octets + length:]
def unpack_asn1(b_data: bytes) -> typing.Tuple[ASN1Value, bytes]: """Unpacks an ASN.1 TLV into each element. Unpacks the raw ASN.1 value into a `ASN1Value` tuple and returns the remaining bytes that are not part of the ASN.1 TLV. Args: b_data: The raw bytes to unpack as an ASN.1 TLV. Returns: ASN1Value: The ASN.1 value that is unpacked from the raw bytes passed in. bytes: Any remaining bytes that are not part of the ASN1Value. """ octet1 = struct.unpack("B", b_data[:1])[0] tag_class = TagClass((octet1 & 0b11000000) >> 6) constructed = bool(octet1 & 0b00100000) tag_number = octet1 & 0b00011111 length_offset = 1 if tag_number == 31: tag_number, octet_count = _unpack_asn1_octet_number(b_data[1:]) length_offset += octet_count if tag_class == TagClass.universal: tag_number = TypeTagNumber(tag_number) b_data = b_data[length_offset:] length = struct.unpack("B", b_data[:1])[0] length_octets = 1 if length & 0b10000000: # If the MSB is set then the length octet just contains the number of octets that encodes the actual length. length_octets += length & 0b01111111 length = 0 for idx in range(1, length_octets): octet_val = struct.unpack("B", b_data[idx:idx + 1])[0] length += octet_val << (8 * (length_octets - 1 - idx)) value = ASN1Value(tag_class=tag_class, constructed=constructed, tag_number=tag_number, b_data=b_data[length_octets:length_octets + length]) return value, b_data[length_octets + length:]
Python
def unpack_asn1_bit_string(value: typing.Union[ASN1Value, bytes]) -> bytes: """ Unpacks an ASN.1 BIT STRING value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.bit_string) # First octet is the number of unused bits in the last octet from the LSB. unused_bits = struct.unpack("B", b_data[:1])[0] last_octet = struct.unpack("B", b_data[-2:-1])[0] last_octet = (last_octet >> unused_bits) << unused_bits return b_data[1:-1] + struct.pack("B", last_octet)
def unpack_asn1_bit_string(value: typing.Union[ASN1Value, bytes]) -> bytes: """ Unpacks an ASN.1 BIT STRING value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.bit_string) # First octet is the number of unused bits in the last octet from the LSB. unused_bits = struct.unpack("B", b_data[:1])[0] last_octet = struct.unpack("B", b_data[-2:-1])[0] last_octet = (last_octet >> unused_bits) << unused_bits return b_data[1:-1] + struct.pack("B", last_octet)
Python
def unpack_asn1_boolean(value: typing.Union[ASN1Value, bytes]) -> bool: """ Unpacks an ASN.1 BOOLEAN value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.boolean) return b_data != b"\x00"
def unpack_asn1_boolean(value: typing.Union[ASN1Value, bytes]) -> bool: """ Unpacks an ASN.1 BOOLEAN value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.boolean) return b_data != b"\x00"
Python
def unpack_asn1_enumerated(value: typing.Union[ASN1Value, bytes]) -> int: """ Unpacks an ASN.1 ENUMERATED value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.enumerated) return unpack_asn1_integer(b_data)
def unpack_asn1_enumerated(value: typing.Union[ASN1Value, bytes]) -> int: """ Unpacks an ASN.1 ENUMERATED value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.enumerated) return unpack_asn1_integer(b_data)
Python
def unpack_asn1_generalized_time(value: typing.Union[ASN1Value, bytes]) -> datetime.datetime: """ Unpacks an ASN.1 GeneralizedTime value. """ data = to_text(extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.generalized_time)) # While ASN.1 can have a timezone encoded, KerberosTime is the only thing we use and it is always in UTC with the # Z prefix. We strip out the Z because Python 2 doesn't support the %z identifier and add the UTC tz to the object. # https://www.rfc-editor.org/rfc/rfc4120#section-5.2.3 if data.endswith('Z'): data = data[:-1] err = None for datetime_format in ['%Y%m%d%H%M%S.%f', '%Y%m%d%H%M%S']: try: dt = datetime.datetime.strptime(data, datetime_format) return dt.replace(tzinfo=datetime.timezone.utc) except ValueError as e: err = e else: raise err
def unpack_asn1_generalized_time(value: typing.Union[ASN1Value, bytes]) -> datetime.datetime: """ Unpacks an ASN.1 GeneralizedTime value. """ data = to_text(extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.generalized_time)) # While ASN.1 can have a timezone encoded, KerberosTime is the only thing we use and it is always in UTC with the # Z prefix. We strip out the Z because Python 2 doesn't support the %z identifier and add the UTC tz to the object. # https://www.rfc-editor.org/rfc/rfc4120#section-5.2.3 if data.endswith('Z'): data = data[:-1] err = None for datetime_format in ['%Y%m%d%H%M%S.%f', '%Y%m%d%H%M%S']: try: dt = datetime.datetime.strptime(data, datetime_format) return dt.replace(tzinfo=datetime.timezone.utc) except ValueError as e: err = e else: raise err
Python
def unpack_asn1_integer(value: typing.Union[ASN1Value, bytes]) -> int: """ Unpacks an ASN.1 INTEGER value. """ b_int = bytearray(extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.integer)) is_negative = b_int[0] & 0b10000000 if is_negative: # Get the two's compliment. for i in range(len(b_int)): b_int[i] = 0xFF - b_int[i] for i in range(len(b_int) - 1, -1, -1): if b_int[i] == 0xFF: b_int[i - 1] += 1 b_int[i] = 0 break else: b_int[i] += 1 break int_value = 0 for val in b_int: int_value = (int_value << 8) | val if is_negative: int_value *= -1 return int_value
def unpack_asn1_integer(value: typing.Union[ASN1Value, bytes]) -> int: """ Unpacks an ASN.1 INTEGER value. """ b_int = bytearray(extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.integer)) is_negative = b_int[0] & 0b10000000 if is_negative: # Get the two's compliment. for i in range(len(b_int)): b_int[i] = 0xFF - b_int[i] for i in range(len(b_int) - 1, -1, -1): if b_int[i] == 0xFF: b_int[i - 1] += 1 b_int[i] = 0 break else: b_int[i] += 1 break int_value = 0 for val in b_int: int_value = (int_value << 8) | val if is_negative: int_value *= -1 return int_value
Python
def unpack_asn1_object_identifier(value: typing.Union[ASN1Value, bytes]) -> str: """ Unpacks an ASN.1 OBJECT IDENTIFIER value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.object_identifier) first_element = struct.unpack("B", b_data[:1])[0] second_element = first_element % 40 ids = [(first_element - second_element) // 40, second_element] idx = 1 while idx != len(b_data): oid, octet_len = _unpack_asn1_octet_number(b_data[idx:]) ids.append(oid) idx += octet_len return ".".join([str(i) for i in ids])
def unpack_asn1_object_identifier(value: typing.Union[ASN1Value, bytes]) -> str: """ Unpacks an ASN.1 OBJECT IDENTIFIER value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.object_identifier) first_element = struct.unpack("B", b_data[:1])[0] second_element = first_element % 40 ids = [(first_element - second_element) // 40, second_element] idx = 1 while idx != len(b_data): oid, octet_len = _unpack_asn1_octet_number(b_data[idx:]) ids.append(oid) idx += octet_len return ".".join([str(i) for i in ids])
Python
def unpack_asn1_sequence(value: typing.Union[ASN1Value, bytes]) -> typing.List[ASN1Value]: """ Unpacks an ASN.1 SEQUENCE value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.sequence) values = [] while b_data: v, b_data = unpack_asn1(b_data) values.append(v) return values
def unpack_asn1_sequence(value: typing.Union[ASN1Value, bytes]) -> typing.List[ASN1Value]: """ Unpacks an ASN.1 SEQUENCE value. """ b_data = extract_asn1_tlv(value, TagClass.universal, TypeTagNumber.sequence) values = [] while b_data: v, b_data = unpack_asn1(b_data) values.append(v) return values
Python
def _unpack_asn1_octet_number(b_data: bytes) -> typing.Tuple[int, int]: """ Unpacks an ASN.1 INTEGER value that can span across multiple octets. """ i = 0 idx = 0 while True: element = struct.unpack("B", b_data[idx:idx + 1])[0] idx += 1 i = (i << 7) + (element & 0b01111111) if not element & 0b10000000: break return i, idx
def _unpack_asn1_octet_number(b_data: bytes) -> typing.Tuple[int, int]: """ Unpacks an ASN.1 INTEGER value that can span across multiple octets. """ i = 0 idx = 0 while True: element = struct.unpack("B", b_data[idx:idx + 1])[0] idx += 1 i = (i << 7) + (element & 0b01111111) if not element & 0b10000000: break return i, idx
Python
def seal( flags: int, handle: RC4Handle, signing_key: bytes, seq_num: int, b_data: bytes ) -> typing.Tuple[bytes, bytes]: """Create a sealed NTLM message. Creates a sealed NTLM message as documented at `NTLM Message Confidentiality`_. Args: flags: The negotiated flags between the initiator and acceptor. handle: The RC4 handle for the negotiated context. signing_key: The key used to sign the message. seq_num: The sequence number for the message. b_data: The data/message bytes to seal. Returns: Tuple[bytes, bytes]: The sealed message bytes and the message signature. .. _NTLM Message Confidentiality: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/115f9c7d-bc30-4262-ae96-254555c14ea6 """ seal_msg = rc4(handle, b_data) signature = sign(flags, handle, signing_key, seq_num, b_data) return seal_msg, signature
def seal( flags: int, handle: RC4Handle, signing_key: bytes, seq_num: int, b_data: bytes ) -> typing.Tuple[bytes, bytes]: """Create a sealed NTLM message. Creates a sealed NTLM message as documented at `NTLM Message Confidentiality`_. Args: flags: The negotiated flags between the initiator and acceptor. handle: The RC4 handle for the negotiated context. signing_key: The key used to sign the message. seq_num: The sequence number for the message. b_data: The data/message bytes to seal. Returns: Tuple[bytes, bytes]: The sealed message bytes and the message signature. .. _NTLM Message Confidentiality: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/115f9c7d-bc30-4262-ae96-254555c14ea6 """ seal_msg = rc4(handle, b_data) signature = sign(flags, handle, signing_key, seq_num, b_data) return seal_msg, signature
Python
def sign( flags: int, handle: RC4Handle, signing_key: bytes, seq_num: int, b_data: bytes, ) -> bytes: """Create a NTLM signature. Creates a NTLM signature as documented at `NTLM Message Integrity`_ and appends it to the end of the message. Args: flags: The negotiated flags between the initiator and acceptor. handle: The RC4 handle for the negotiated context. signing_key: The key used to sign the message. seq_num: The sequence number for the signature. b_data: The data/message bytes to sign. Returns: bytes: The data with the signature appended. .. _NTLM Message Integrity: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/131b0062-7958-460e-bca5-c7a9f9086652 """ if flags & NegotiateFlags.sign == 0: if flags & NegotiateFlags.always_sign == 0: raise OperationNotAvailableError(context_msg="Signing without integrity.") # This is the behaviour seen with SSPI when signing data with NTLMSSP_NEGOTIATE_ALWAYS_SIGN. return b"\x01" + b"\x00" * 15 elif flags & NegotiateFlags.extended_session_security: return _mac_with_ess(flags, handle, signing_key, seq_num, b_data) else: return _mac_without_ess(handle, seq_num, b_data)
def sign( flags: int, handle: RC4Handle, signing_key: bytes, seq_num: int, b_data: bytes, ) -> bytes: """Create a NTLM signature. Creates a NTLM signature as documented at `NTLM Message Integrity`_ and appends it to the end of the message. Args: flags: The negotiated flags between the initiator and acceptor. handle: The RC4 handle for the negotiated context. signing_key: The key used to sign the message. seq_num: The sequence number for the signature. b_data: The data/message bytes to sign. Returns: bytes: The data with the signature appended. .. _NTLM Message Integrity: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/131b0062-7958-460e-bca5-c7a9f9086652 """ if flags & NegotiateFlags.sign == 0: if flags & NegotiateFlags.always_sign == 0: raise OperationNotAvailableError(context_msg="Signing without integrity.") # This is the behaviour seen with SSPI when signing data with NTLMSSP_NEGOTIATE_ALWAYS_SIGN. return b"\x01" + b"\x00" * 15 elif flags & NegotiateFlags.extended_session_security: return _mac_with_ess(flags, handle, signing_key, seq_num, b_data) else: return _mac_without_ess(handle, seq_num, b_data)
Python
def _mac_with_ess( flags: int, handle: RC4Handle, signing_key: bytes, seq_num: int, b_data: bytes ) -> bytes: """NTLM MAC with Extended Session Security Generates the NTLM signature when Extended Session Security has been negotiated. The structure of the signature is documented at `NTLM signature with ESS`_. The algorithm as documented by `MAC with ESS`_ is:: Define MAC(Handle, SigningKey, SeqNum, Message) as Set NTLMSSP_MESSAGE_SIGNATURE.Version to 0x00000001 Set NTLMSSP_MESSAGE_SIGNATURE.Checksum to HMAC_MD5(SigningKey, ConcatenationOf(SeqNum, Message))[0..7] Set NTLMSSP_MESSAGE_SIGNATURE.SeqNum to SeqNum Set SeqNum to SeqNum + 1 EndDefine # When NegotiateFlags.key_exch Define MAC(Handle, SigningKey, SeqNum, Message) as Set NTLMSSP_MESSAGE_SIGNATURE.Version to 0x00000001 Set NTLMSSP_MESSAGE_SIGNATURE.Checksum to RC4(Handle, HMAC_MD5(SigningKey, ConcatenationOf(SeqNum, Message))[0..7]) Set NTLMSSP_MESSAGE_SIGNATURE.SeqNum to SeqNum Set SeqNum to SeqNum + 1 EndDefine Args: flags: The negotiated flags between the initiator and acceptor. handle: The RC4 handle for the negotiated context. signing_key: The key used to sign the message. seq_num: The sequence number for the signature. b_data: The data/message bytes to sign. Returns: bytes: The NTLM with ESS signature. .. _NTLM signature with ESS: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/2c3b4689-d6f1-4dc6-85c9-0bf01ea34d9f .. _MAC with ESS: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/a92716d5-d164-4960-9e15-300f4eef44a8 """ b_seq_num = struct.pack("<I", seq_num) checksum = hmac_md5(signing_key, b_seq_num + b_data)[:8] if flags & NegotiateFlags.key_exch: checksum = handle.update(checksum) return b"\x01\x00\x00\x00" + checksum + b_seq_num
def _mac_with_ess( flags: int, handle: RC4Handle, signing_key: bytes, seq_num: int, b_data: bytes ) -> bytes: """NTLM MAC with Extended Session Security Generates the NTLM signature when Extended Session Security has been negotiated. The structure of the signature is documented at `NTLM signature with ESS`_. The algorithm as documented by `MAC with ESS`_ is:: Define MAC(Handle, SigningKey, SeqNum, Message) as Set NTLMSSP_MESSAGE_SIGNATURE.Version to 0x00000001 Set NTLMSSP_MESSAGE_SIGNATURE.Checksum to HMAC_MD5(SigningKey, ConcatenationOf(SeqNum, Message))[0..7] Set NTLMSSP_MESSAGE_SIGNATURE.SeqNum to SeqNum Set SeqNum to SeqNum + 1 EndDefine # When NegotiateFlags.key_exch Define MAC(Handle, SigningKey, SeqNum, Message) as Set NTLMSSP_MESSAGE_SIGNATURE.Version to 0x00000001 Set NTLMSSP_MESSAGE_SIGNATURE.Checksum to RC4(Handle, HMAC_MD5(SigningKey, ConcatenationOf(SeqNum, Message))[0..7]) Set NTLMSSP_MESSAGE_SIGNATURE.SeqNum to SeqNum Set SeqNum to SeqNum + 1 EndDefine Args: flags: The negotiated flags between the initiator and acceptor. handle: The RC4 handle for the negotiated context. signing_key: The key used to sign the message. seq_num: The sequence number for the signature. b_data: The data/message bytes to sign. Returns: bytes: The NTLM with ESS signature. .. _NTLM signature with ESS: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/2c3b4689-d6f1-4dc6-85c9-0bf01ea34d9f .. _MAC with ESS: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/a92716d5-d164-4960-9e15-300f4eef44a8 """ b_seq_num = struct.pack("<I", seq_num) checksum = hmac_md5(signing_key, b_seq_num + b_data)[:8] if flags & NegotiateFlags.key_exch: checksum = handle.update(checksum) return b"\x01\x00\x00\x00" + checksum + b_seq_num
Python
def mic(self) -> typing.Optional[bytes]: """ The MIC for the Authenticate message. """ mic_offset = self._get_mic_offset() if mic_offset: return self._data.tobytes()[mic_offset:mic_offset + 16] else: return None
def mic(self) -> typing.Optional[bytes]: """ The MIC for the Authenticate message. """ mic_offset = self._get_mic_offset() if mic_offset: return self._data.tobytes()[mic_offset:mic_offset + 16] else: return None
Python
def _get_mic_offset(self) -> int: """ Gets the offset of the MIC structure if present. """ payload_offset = self._payload_offset # If the payload offset is 88 or more then we must have the Version (8 bytes) and the MIC (16 bytes) plus # any random data after that. if payload_offset >= 88: return 72 # If the payload offset is between 80 and 88, then we should have just the MIC and no Version. elif payload_offset >= 80: return 64 # Not enough room for a MIC between the minimum size and the payload offset. else: return 0
def _get_mic_offset(self) -> int: """ Gets the offset of the MIC structure if present. """ payload_offset = self._payload_offset # If the payload offset is 88 or more then we must have the Version (8 bytes) and the MIC (16 bytes) plus # any random data after that. if payload_offset >= 88: return 72 # If the payload offset is between 80 and 88, then we should have just the MIC and no Version. elif payload_offset >= 80: return 64 # Not enough room for a MIC between the minimum size and the payload offset. else: return 0
Python
def pack(self) -> bytes: """ Packs the structure to bytes. """ # Make sure we are dealing with a timezone aware datetime utc_tz = datetime.timezone.utc utc_dt = self.replace(tzinfo=self.tzinfo if self.tzinfo else utc_tz) # Get the time since UTC EPOCH in microseconds td = utc_dt.astimezone(utc_tz) - datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=utc_tz) epoch_time_ms = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) # Add the EPOCH_FILETIME to the microseconds since EPOCH and finally the nanoseconds part. ns100 = FileTime._EPOCH_FILETIME + (epoch_time_ms * 10) + (self.nanosecond // 100) return struct.pack("<Q", ns100)
def pack(self) -> bytes: """ Packs the structure to bytes. """ # Make sure we are dealing with a timezone aware datetime utc_tz = datetime.timezone.utc utc_dt = self.replace(tzinfo=self.tzinfo if self.tzinfo else utc_tz) # Get the time since UTC EPOCH in microseconds td = utc_dt.astimezone(utc_tz) - datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=utc_tz) epoch_time_ms = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) # Add the EPOCH_FILETIME to the microseconds since EPOCH and finally the nanoseconds part. ns100 = FileTime._EPOCH_FILETIME + (epoch_time_ms * 10) + (self.nanosecond // 100) return struct.pack("<Q", ns100)
Python
def unpack(b_data: bytes) -> "FileTime": """ Unpacks the structure from bytes. """ filetime = struct.unpack("<Q", b_data)[0] # 100 nanosecond intervals since 1601-01-01. # Create a datetime object based on the filetime microseconds epoch_time_ms = (filetime - FileTime._EPOCH_FILETIME) // 10 dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(microseconds=epoch_time_ms) # Create the FileTime object from the datetime object and add the nanoseconds. ns = int(filetime % 10) * 100 return FileTime.from_datetime(dt, ns=ns)
def unpack(b_data: bytes) -> "FileTime": """ Unpacks the structure from bytes. """ filetime = struct.unpack("<Q", b_data)[0] # 100 nanosecond intervals since 1601-01-01. # Create a datetime object based on the filetime microseconds epoch_time_ms = (filetime - FileTime._EPOCH_FILETIME) // 10 dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(microseconds=epoch_time_ms) # Create the FileTime object from the datetime object and add the nanoseconds. ns = int(filetime % 10) * 100 return FileTime.from_datetime(dt, ns=ns)
Python
def pack(self) -> bytes: """ Packs the structure to bytes. """ b_data = io.BytesIO() for av_id, value in self.items(): # MsvAvEOL should only be set at the end, will just ignore these entries. if av_id == AvId.eol: continue if av_id in self._FIELD_TYPES['text']: b_value = value.encode('utf-16-le') elif av_id in self._FIELD_TYPES['int32']: b_value = struct.pack("<I", value) elif av_id in self._FIELD_TYPES['struct']: b_value = value.pack() else: b_value = value b_data.write(struct.pack("<HH", av_id, len(b_value)) + b_value) b_data.write(b"\x00\x00\x00\x00") # MsvAvEOL return b_data.getvalue()
def pack(self) -> bytes: """ Packs the structure to bytes. """ b_data = io.BytesIO() for av_id, value in self.items(): # MsvAvEOL should only be set at the end, will just ignore these entries. if av_id == AvId.eol: continue if av_id in self._FIELD_TYPES['text']: b_value = value.encode('utf-16-le') elif av_id in self._FIELD_TYPES['int32']: b_value = struct.pack("<I", value) elif av_id in self._FIELD_TYPES['struct']: b_value = value.pack() else: b_value = value b_data.write(struct.pack("<HH", av_id, len(b_value)) + b_value) b_data.write(b"\x00\x00\x00\x00") # MsvAvEOL return b_data.getvalue()
Python
def unpack(b_data: bytes) -> "TargetInfo": """ Unpacks the structure from bytes. """ target_info = TargetInfo() b_io = io.BytesIO(b_data) b_av_id = b_io.read(2) while b_av_id: av_id = struct.unpack("<H", b_av_id)[0] length = struct.unpack("<H", b_io.read(2))[0] b_value = b_io.read(length) value: typing.Any if av_id in TargetInfo._FIELD_TYPES['text']: # All AV_PAIRS are UNICODE encoded. value = b_value.decode('utf-16-le') elif av_id in TargetInfo._FIELD_TYPES['int32']: value = AvFlags(struct.unpack("<I", b_value)[0]) elif av_id == AvId.timestamp: value = FileTime.unpack(b_value) elif av_id == AvId.single_host: value = SingleHost.unpack(b_value) else: value = b_value target_info[AvId(av_id)] = value b_av_id = b_io.read(2) return target_info
def unpack(b_data: bytes) -> "TargetInfo": """ Unpacks the structure from bytes. """ target_info = TargetInfo() b_io = io.BytesIO(b_data) b_av_id = b_io.read(2) while b_av_id: av_id = struct.unpack("<H", b_av_id)[0] length = struct.unpack("<H", b_io.read(2))[0] b_value = b_io.read(length) value: typing.Any if av_id in TargetInfo._FIELD_TYPES['text']: # All AV_PAIRS are UNICODE encoded. value = b_value.decode('utf-16-le') elif av_id in TargetInfo._FIELD_TYPES['int32']: value = AvFlags(struct.unpack("<I", b_value)[0]) elif av_id == AvId.timestamp: value = FileTime.unpack(b_value) elif av_id == AvId.single_host: value = SingleHost.unpack(b_value) else: value = b_value target_info[AvId(av_id)] = value b_av_id = b_io.read(2) return target_info
Python
def unpack_text_field( sequence: typing.Dict[int, ASN1Value], idx: int, structure: str, name: str, **kwargs: typing.Optional[str], ) -> typing.Optional[str]: """ Extracts a text field from a tagged ASN.1 sequence. """ raw_value = get_sequence_value(sequence, idx, structure, name, unpack_asn1_octet_string) if raw_value is None: if 'default' not in kwargs: raise ValueError("Missing mandatory text field '%s' in '%s'" % (name, structure)) return kwargs['default'] return raw_value.decode('utf-16-le')
def unpack_text_field( sequence: typing.Dict[int, ASN1Value], idx: int, structure: str, name: str, **kwargs: typing.Optional[str], ) -> typing.Optional[str]: """ Extracts a text field from a tagged ASN.1 sequence. """ raw_value = get_sequence_value(sequence, idx, structure, name, unpack_asn1_octet_string) if raw_value is None: if 'default' not in kwargs: raise ValueError("Missing mandatory text field '%s' in '%s'" % (name, structure)) return kwargs['default'] return raw_value.decode('utf-16-le')