content
stringlengths
10
4.9M
package org.learning.parallelprocessor.framework; import org.learning.parallelprocessor.framework.connector.Connector; import org.learning.parallelprocessor.framework.merger.Key; import java.util.concurrent.BlockingQueue; public interface ISource<T> extends Component { void pipe(ISink<T> next); <Y> Connector<T, Y> pipe(Connector<T, Y> next); }
#!/usr/bin/env python # -*- coding: utf-8 -*- ''' Copyright (C) 2016 By <NAME>. All rights reserved. @author : <NAME> @version: 1.0 @created: 2016-09-22 ''' import os import re import time import shutil import zipfile import mimetypes import urllib import copy import random import uuid import email import smtplib import string from email.utils import formatdate from email.MIMEText import MIMEText from email.MIMEBase import MIMEBase from email.MIMEMultipart import MIMEMultipart from email.MIMENonMultipart import MIMENonMultipart from email import Encoders import gevent from email.header import Header from django.conf import settings from django import template from django.template import Context ATTACH_SAVE_PATH = settings.ATTACH_DATA_PATH from .tools import safe_format CHARS = string.lowercase + string.digits ### 解码 ### def decode_str(s, charset=None): if charset is not None: try: return s.decode(charset, 'replace') except Exception: return s.decode('utf-8', 'replace') else: try: return s.decode('utf-8') except UnicodeDecodeError: try: return s.decode('gb18030') except UnicodeDecodeError: return s.decode('utf-8', 'replace') ### 编码 ### def encode_str(s, charset=None): if charset is not None: try: return s.encode(charset, 'replace') except Exception: return s.encode('utf-8', 'replace') else: try: return s.encode('utf-8') except UnicodeEncodeError: try: return s.encode('gb18030') except UnicodeEncodeError: return s.encode('utf-8', 'replace') def decode_encode_str(s): try: return decode_str(s) except: return encode_str(s).decode('utf-8') def html_add_footer(html, footer): m = re.search(r'</\s*(body|html)>', html, re.IGNORECASE) if m is not None: s = m.start() else: s = len(html) return html[:s] + footer + html[s:] ############################################# ### html内容删除外部资源(css、js) ### def del_jscss_from_html(html): html = re.sub(r'<script.*?</script>', '', html, flags=re.DOTALL|re.IGNORECASE) # html = re.sub(r'<style.*?</style>', '', html, flags=re.DOTALL|re.IGNORECASE) html = re.sub(r'<link .*?>', '', html, flags=re.DOTALL|re.IGNORECASE) return html ### 从url导入:将相对url替换成绝对url ### def replace_srchref_from_html(url, url2, html): findall = re.findall(r'src="(.+?)"', html) p = re.compile(r'^((https|http|ftp|rtsp|mms)+://)+') for value in findall: if value.startswith('/') and not value.startswith('//'): html = re.sub(value, url2 + value, html) elif not value.startswith('//') and not p.search(value): html = re.sub(value, url + '/' + value, html) return html ### 从html导入:判断是否包含相对路径 ### def clear_relative_url(html): findall = re.findall(r'src="(.+?)"', html) for value in findall: if value.startswith('/') and not value.startswith('//'): m = 'src="' + value + '"' html = re.sub(m, '', html) return html ### 获取附件列表渲染模板 ### def get_render_attach_template(): path = os.path.join(settings.BASE_DIR, 'app', 'template', 'templates', 'template', 'render_attch_list.html') with open(path) as f: html = f.read() return html ### 获取参考模板列表渲染模板 ### def get_render_refimg_template(): path = os.path.join(settings.BASE_DIR, 'app', 'template', 'templates', 'template', 'render_ref_template_imglist.html') with open(path) as f: html = f.read() return html ### 获取网络附件渲染模板 ### def get_render_net_template(): # <p style="text-align:center"> att_html = u''' <span>附件:</span> <a href="{{ ajax_url }}?id={{ template_id }}&ufile_name={{ ufile_name }}&aid=1&download=1">{{ file_name }}</a> <span>({{ file_size }})</span> ''' return att_html ### 获取HTML内容 ### def get_html_content(content, ajax_url, template_id, ufile_name, file_type, file_name, file_size): att_html = get_render_net_template() vlas = { 'ajax_url': ajax_url, 'template_id': template_id, 'ufile_name': ufile_name, 'file_type': file_type, 'file_name': file_name, 'file_size': file_size, } t = template.Template(att_html) att_html = t.render(Context(vlas)) content = html_add_footer(content, att_html) return content ############################################# #创建指定目录 def create_filepath(path=None): if not os.path.exists(path): os.makedirs(path) return True #删除指定目录下的文件以及文件夹 def del_filepath(path_list=None): path_list = path_list if isinstance(path_list, (list, tuple)) else [path_list] for path in path_list: if os.path.exists(path): if os.path.isfile(path): os.remove(path) elif os.path.isdir(path): shutil.rmtree(path, True) return True ############################################# ### 附件处理 ### def handle_uploaded_attachfile(template_id, file): file_name = file.name suffix = file_name.split('.')[-1] ufile_name = '{}.{}'.format(uuid.uuid1(), suffix) tpl_path = os.path.join(ATTACH_SAVE_PATH, str(template_id)) file_path = os.path.join(tpl_path, ufile_name) create_filepath(tpl_path) with open(file_path, 'w') as f: f.write(file.read()) return file_name, ufile_name ############################################# ### 模板类型html: eml文件上传保存 ### ### 模板类型html: zip文件上传保存 ### ### 模板类型eml: eml文件上传保存 ### def handle_uploaded_file(template_id=None, file=None, path=None, suffix='txt'): create_filepath(path) file_path = os.path.join(path, '{}.{}'.format(template_id, suffix)) dest = open(file_path, 'wb+') for chunk in file.chunks(): dest.write(chunk) dest.close() return file.name ############################################# ### 模板类型html:获取eml文件内容 ### ### 模板类型eml:获取eml文件内容 ### def handle_get_file(template_id, path): file_path = os.path.join(path, '{}.txt'.format(template_id)) with open(file_path, 'r') as f: content = f.read() del_filepath(file_path) return content ############################################# ### 从eml导入:附件处理 ### def handle_html_attach(template_id, attach, tpl_attachtype): file_name = attach.get('decode_name', '') suffix = file_name.split('.')[-1] ufile_name = '{}.{}'.format(uuid.uuid1(), suffix) file_type = attach.get('content_type', '').split(';')[0] attachtype = 'common' if tpl_attachtype == 'html' or 'content_id' in attach: attachtype = 'html' attachsize = 0 if attachtype == 'common': attachsize = attach.get('size', 0) att_path = os.path.join(ATTACH_SAVE_PATH, str(template_id)) file_path = os.path.join(att_path, ufile_name) create_filepath(att_path) with open(file_path, 'w') as f: f.write(attach['data']) return file_name, ufile_name, file_type, attachtype, attachsize ############################################# ### 模板类型html:解压zip文件 ### def handle_get_rarfile(template_id, path): zipfile_path = os.path.join(path, '{}.rar'.format(template_id)) save_path = os.path.abspath(os.path.join(path, str(template_id))) create_filepath(save_path) count = 0 suffix = ('html', 'htm') import rarfile f = rarfile.RarFile(zipfile_path) html_file, att_files= '', [] for name in f.namelist(): try: utf8name = decode_str(name).encode('utf-8') except: utf8name = encode_str(name) #utf8name = utf8name.decode('utf-8') # 判断文件格式最多两层:一个html文件 和 文件夹 if len(utf8name.split('/')) > 2: f.close() return '', [], '' file_path = os.path.join(save_path, utf8name) zip_path_name = os.path.dirname(utf8name) pathname = os.path.abspath(os.path.join(save_path, zip_path_name)) # 第一层只有一个html文件 if zip_path_name == '': # 获取html文件路径 T = utf8name.split('.') if len(T)>=2: count += 1 html_file = utf8name if T[-1] not in suffix: f.close() return '', [], '' else: continue if not os.path.exists(pathname) and zip_path_name!= '': os.makedirs(pathname) data = f.read(name) if not os.path.isdir(file_path): # 获取附件或者引用文件名路径 if zip_path_name != '': att_files.append(utf8name) fo = open(file_path, "w") fo.write(data) fo.close if count != 1: f.close() return '', [], '' f.close() del_filepath(zipfile_path) return html_file, att_files, save_path ### 模板类型html:解压zip文件 ### def handle_get_zipfile(template_id, path): zipfile_path = os.path.join(path, '{}.zip'.format(template_id)) save_path = os.path.abspath(os.path.join(path, str(template_id))) create_filepath(save_path) count = 0 suffix = ('html', 'htm') f = zipfile.ZipFile(zipfile_path, "r") html_file, att_files= '', [] for name in f.namelist(): try: utf8name = decode_str(name).encode('utf-8') except: utf8name = encode_str(name) #utf8name = utf8name.decode('utf-8') # 判断文件格式最多两层:一个html文件 和 文件夹 if len(utf8name.split('/')) > 2: f.close() return '', [], '' file_path = os.path.join(save_path, utf8name) zip_path_name = os.path.dirname(utf8name) pathname = os.path.abspath(os.path.join(save_path, zip_path_name)) # 第一层只有一个html文件 if zip_path_name == '': count += 1 # 获取html文件路径 html_file = utf8name if utf8name.split('.')[-1] not in suffix: f.close() return '', [], '' if not os.path.exists(pathname) and zip_path_name!= '': os.makedirs(pathname) data = f.read(name) if not os.path.isdir(file_path): # 获取附件或者引用文件名路径 if zip_path_name != '': att_files.append(utf8name) fo = open(file_path, "w") fo.write(data) fo.close if count != 1: f.close() return '', [], '' f.close() del_filepath(zipfile_path) return html_file, att_files, save_path ### 从zip导入:附件处理 ### def handle_html_zip_attach(template_id, file_path, tpl_attachtype, attachtype='html'): file_name = file_path.split('/')[-1] suffix = file_name.split('.')[-1] ufile_name = '{}.{}'.format(uuid.uuid1(), suffix) file_type = mimetypes.guess_type(file_path)[0] if tpl_attachtype == 'html': attachtype = 'html' tpl_path = os.path.join(ATTACH_SAVE_PATH, str(template_id)) create_filepath(tpl_path) dir_file_path = os.path.join(tpl_path, ufile_name) if isinstance(file_path, unicode): file_path = file_path.encode('utf-8') shutil.copyfile(file_path, dir_file_path) file_size = os.path.getsize(dir_file_path) return file_name, ufile_name, file_type, attachtype, file_size ############################################# ### 发送邮件模板 ### def send_template(host='127.0.0.1', port=10027, use_ssl=None, sender='<EMAIL>', receiver=None, message=None): deliver_ip = None receive_ip = None try: with gevent.Timeout(120): if use_ssl: s = smtplib.SMTP_SSL(host, port) else: s = smtplib.SMTP(host, port) deliver_ip = s.sock.getsockname()[0] receive_ip = s.sock.getpeername()[0] s.sendmail(sender, receiver, message) s.quit() code, msg = 250, 'ok' except smtplib.SMTPResponseException as e: code, msg = e.smtp_code, e.smtp_error except smtplib.SMTPRecipientsRefused as e: senderrs = e.recipients code, msg = senderrs[receiver] except gevent.Timeout: code, msg = -1, u'发送超时' except BaseException as e: code, msg = -1, repr(e) return code, msg, deliver_ip, receive_ip ############################################# # 账号密码发送邮件 def smtp_send_email(host=None, port=25, account=None, password=<PASSWORD>, sender=None, receivers=None, message=None): try: smtpObj = smtplib.SMTP() smtpObj.connect(host, port) # 25 为 SMTP 端口号 smtpObj.login(account, password) smtpObj.sendmail(sender, receivers, message) code, msg = 250, 'ok' except smtplib.SMTPException: code, msg = -1, u"Error: 无法发送邮件" except BaseException as e: code, msg = -1, repr(e) return code, msg ############################################# ### 通过邮件模板构造一个可以发送html、纯文本、附件的邮件 ### # html_comments_oneline = re.compile(r'\<!--[^[#][^\r\n]+?--\>') # html_comments_oneline = re.compile(r'\<!--(.*?)--\>') class MulTemplateEmail(object): def __init__(self, content_type=1, character='utf-8', encoding='base64', template_id=0, mail_from=u'<EMAIL>', mail_to=u'<EMAIL>', reply_to=None, task_id=None, send_maillist_id=None, subject='', content='', text_content=None, attachment=None, user_id=0, replace=False, edm_check_result='', is_need_receipt=False, track_domain=None, sys_track_domain=None): """ :param content_type: 判断是eml格式发送,还是html编辑模式发送 :param character: 设置发送编码(转换字符集) :param encoding: 设置邮件编码(附件编码) :param attachtype: 判断是传统附件common,还是在线附件html(在线附件则以网络附件发送) :param template_id: 暂时生成附件的保存路径 :param mail_from: 发件人 :param mail_to: 收件人 :param subject: 主题 :param content: html内容 :param text_content: 纯文本 :param eml_content: eml内容 :param attachment: 附件信息,字典列表, 格式:[{'filepath': 'XXX', 'filetype': 'application/octet-stream', 'filename': 'xxx.txt', 'attachtype': 'html'},...] :param replace: 变量替换标志 :return: 返回邮件信息 """ self.content_type = content_type if not character: character='utf-8' # if not encoding: encoding='base64' if encoding not in ["base64", "quoted-printable"]: encoding = 'base64' self.character = character self.encoding = encoding self.template_id = template_id self.mail_from = mail_from self.mail_to = mail_to self.reply_to = reply_to self.edm_check_result = edm_check_result self.is_need_receipt = is_need_receipt self.track_domain = track_domain self.sys_track_domain = sys_track_domain self._common_kwargs() if not subject: subject = '' if not content: content = '' # m = html_comments_oneline.match(content) # if m: # content = content.replace(m.group(), '', count=1) # content = html_comments_oneline.match(content) self._relace_subject(subject, replace) self._relace_content(content, replace, template_id, user_id, task_id, send_maillist_id) if not text_content: text_content = u'''如果邮件内容无法正常显示请以超文本格式显示HTML邮件!\n (If the content of the message does not display properly, please display the HTML message in hypertext format!)''' self.text_content = text_content self.attachment = attachment self.attachment_path = ATTACH_SAVE_PATH # self.msgAlternative = MIMEMultipart('alternative') self.message = MIMEMultipart('alternative') if user_id==2369: self.message['List-Unsubscribe'] = "<mailto:<EMAIL>?subject=unsubscribe>, <http://www.bestedm.org/>" # if user_id==2369: # self.message['List-Unsubscribe'] = Header("<mailto:<EMAIL>?subject=unsubscribe>", None) # self.message['List-Unsubscribe'] = Header("<mailto:<EMAIL>>", None) # self.message['List-Unsubscribe'] = Header("<https://www.ceshi.magvision.com/login?next=/>, <mailto:<EMAIL>?subject=unsubscribe>", None) def _replace(self, content, s, replace_s): content = content.replace(s, str(replace_s)) content = content.replace(urllib.quote_plus(s), str(replace_s)) return content def _common_kwargs(self): self.kwargs = {} self.kwargs.update( FULLNAME='', RECIPIENTS= '', DATE='', RANDOM_NUMBER='', SEX='', BIRTHDAY='', PHONE='', AREA='', VAR1='', VAR2='', VAR3='', VAR4='', VAR5='', VAR6='', VAR7='', VAR8='', VAR9='', VAR10='', JOKE='', MOTTO='', HEALTH='', SUBJECT_STRING='', TEMPLATE_ID='', USER_ID='', SEND_ID='', MAILLIST_ID='', ) return def _relace_subject(self, subject, replace): if replace: subject = safe_format(subject, **self.kwargs) self.subject = subject return # 生成一个start-end 随机长度的字符 def _make_rand_rand_chars(self, start=5, end=10): return "".join([random.choice(CHARS) for i in range(random.randint(start, end))]) # 内容里面链接替换成客户的域名 def _replace_href_domain(self, content): def encrypt_url(matched): domain = self.track_domain if self.track_domain else '{}.{}'.format(self._make_rand_rand_chars(), settings.TRACK_DOMAIN_DEFAULT) search_url = matched.group(1) if search_url.startswith('http://'): search_url2 = (search_url.replace('http://', '')).split('/')[0] search_url2 = 'http://{}'.format(search_url2) if search_url2 in self.sys_track_domain: search_url = search_url.replace(search_url2, 'http://{}'.format(domain)) return 'href="{}"'.format(search_url) return re.sub('href="?\'?([^"\'>]*)', encrypt_url, content) # 图片链接替换 def _replace_src_domain(self, content): def encrypt_url(matched): domain = self.track_domain if self.track_domain else '{}.{}'.format(self._make_rand_rand_chars(), settings.TRACK_DOMAIN_DEFAULT) search_url = matched.group(1) if search_url.startswith('http://'): search_url2 = (search_url.replace('http://', '')).split('/')[0] search_url2 = 'http://{}'.format(search_url2) if search_url2 in self.sys_track_domain: search_url = search_url.replace(search_url2, 'http://{}'.format(domain)) return 'src="{}"'.format(search_url) return re.sub('src="?\'?([^"\'>]*)', encrypt_url, content) def _relace_content(self, content, replace, template_id, user_id, task_id, send_maillist_id): content = content.replace("\r\n", "\n") if template_id: content = self._replace(content, '{TEMPLATE_ID}', template_id) if user_id: content = self._replace(content, '{USER_ID}', user_id) if task_id: content = self._replace(content, '{SEND_ID}', task_id) if send_maillist_id is not None: content = self._replace(content, '{MAILLIST_ID}', '{}_{}'.format(user_id, send_maillist_id)) if replace: content = self._replace(content, '{JOKE-MOTTO}', '') content = safe_format(content, **self.kwargs) if self.track_domain is not None: content = self._replace_href_domain(content) content = self._replace_src_domain(content) self.content = content return @staticmethod def encode_(s): try: return s.encode('utf-8') except UnicodeDecodeError: try: return s.encode('gb18030') except UnicodeDecodeError: return s.encode('utf-8', 'replace') @staticmethod def decode_(s): try: return s.decode('utf-8') except UnicodeDecodeError: try: return s.decode('gb18030') except UnicodeDecodeError: return s.decode('utf-8', 'replace') def get_message(self): if self.content_type == 2: return self._eml() else: return self._html() def _eml(self): try: content = MulTemplateEmail.encode_(self.content) except: content = MulTemplateEmail.decode_(self.content) self.message = email.message_from_string(content) return self.message.as_string() # 生成一个长度为n的数字串 def _make_rand_nums(self, n=10): # noinspection PyUnusedLocal return "".join([str(random.randint(0, 9)) for i in range(n)]) # 生成 Message-Id def _makeMsgId(self): msgid_stat = random.randint(1,10000) user_id = random.randint(1,10000) msgid_domain = self.mail_from.split('@')[-1] task_ident = '{}-{}-{}'.format(time.strftime('%Y%m%d%H%M%S'), user_id, random.randint(10, 100)) mid = "<%s.{RANDOM}-{%s:%s}-{COUNT}@{DOMAIN}>" % (time.strftime("%Y%m%d%H%M%S"), user_id, task_ident) mid = mid.replace('{COUNT}', "%07d" % msgid_stat) mid = mid.replace('{RANDOM}', self._make_rand_nums(5)) mid = mid.replace('{DOMAIN}', msgid_domain) return mid def _html(self): """ # 发送一个包含纯文本、html和附件邮件: # 发送成功少纯文本的内容,代码没有报错,把其他的代码注掉仅发送纯文本内容,纯文本中的内容在邮件中是能看到的。 """ # mul Header # self.message['Content-Transfer-Encoding'] = self.encoding # self.message.replace_header('Content-Transfer-Encoding', self.encoding) self.message['Message-Id'] = Header(self._makeMsgId(), self.character) if self.reply_to: self.message['Reply-to'] = Header(self.reply_to, self.character) self.message['Subject'] = Header(self.subject, self.character) self.message['From'] = Header(self.mail_from, self.character) self.message['To'] = Header(self.mail_to, self.character) self.message["Date"] = formatdate(localtime=True) if self.is_need_receipt: self.message['Disposition-Notification-To'] = Header(self.mail_from, self.character) # self.message['Disposition-Notification-To'] = Header('1248644045@qq.<EMAIL>') if self.edm_check_result: self.message['Edm-Check-Result'] = Header(self.edm_check_result, self.character) # mul Content(html或纯文本) if self.text_content: if self.encoding == "base64": mt = MIMEText(self.text_content, 'plain', self.character) else: mt = MIMEText(None, _subtype="plain") mt.replace_header('content-transfer-encoding', self.encoding) mt.set_payload(self.text_content.encode(self.character).encode('quoted-printable'), self.character) self.message.attach(mt) if self.content: if self.encoding == "base64": mt = MIMEText(self.content, 'html', self.character) else: mt = MIMEText(None, _subtype="html") mt.replace_header('content-transfer-encoding', self.encoding) mt.set_payload(self.content.encode(self.character).encode('quoted-printable'), self.character) self.message.attach(mt) # self.message.attach(self.msgAlternative) # mul Attachment(附件,传统附件解析) for filepath, filetype, filename in self.attachment: try: real_filepath = os.path.join(self.attachment_path, str(self.template_id), filepath.encode('utf-8')) attachment = MIMEText(open(real_filepath, 'r').read(), self.encoding, self.character) attachment['Content-Type'] = filetype attachment['Content-Disposition'] = 'attachment;filename="%s"' % Header(filename, self.character) self.message.attach(attachment) except BaseException as e: print e continue return self.message.as_string()
# -*- coding: utf-8 -*- """"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" ### Alias : CertServer.init & Last Modded : 2022.02.24. ### Coded with Python 3.10 Grammar by IRACK000 Description : This is a generator script to generate a CertSercer-signed certificate. Reference : [CA certificate] https://www.openssl.org/docs/manmaster/man5/x509v3_config.html [add subject, authority key] https://stackoverflow.com/questions/14972345/creating-self-signed-certificate-using-pyopenssl """"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" from os import mkdir, chmod, environ import requests from settings import * isfile = path.isfile # check root permission if OS != "Windows": if 'SUDO_UID' not in environ.keys(): print("ERROR: this program requires super user priv.") sys.exit(1) # [import root CA certificate.] __ROOT_CA = RootCA() # check if server certificate is exist. => if it does, skip the process. if isfile(f"{CERT_DIR}/{PASS_FILE}") and isfile(f"{CERT_DIR}/{CERT_FILE}") and isfile(f"{CERT_DIR}/{KEY_FILE}"): print("INFO: server certificate already exists. Init process skipped.") sys.exit(0) else: print(f"INFO: Certificate files not found. Create a directory called '{CERT_DIR}' automatically " f"in the same directory as this python file and generate '{CERT_FILE}' and '{KEY_FILE}' files.\n") def set_certificate_passphrase(): """ Get a passphrase for the certificate, and save it to a file. """ # get rootCA certificate password. while True: __PASSPHRASE__ = getpass("Enter passphrase: ").replace(" ", "") if __PASSPHRASE__ == "": print("ERROR: passphrase cannot be empty.\n") continue elif '$' in __PASSPHRASE__: print("ERROR: you should not use '$' in passphrase for bash auto input compatibility.\n") continue elif __PASSPHRASE__ == getpass("Enter passphrase again: ").replace(" ", ""): # check passphrase is same. break else: print("ERROR: Passphrase is not same. retry.\n") # write rootCA certificate password to file. with open(f"{CERT_DIR}/{PASS_FILE}", 'w+') as pass_file: pass_file.write(__PASSPHRASE__) chmod(f"{CERT_DIR}/{PASS_FILE}", 0o600) # can only root user read and write. return __PASSPHRASE__ def proceed_certificate_authority_generation(): """ Generate CertServer crt file and key file with a 4096bit RSA key. """ __PASSPHRASE__ = set_certificate_passphrase() keypair = crypto.PKey() keypair.generate_key(TYPE_RSA, 4096) public_ip = requests.get(IP_API_URL).content.decode() country = input("\nEnter your Country Name: ") region = input("Enter your State: ") city = input("Enter your Location(City): ") crt = crypto.X509() crt.set_version(2) crt.set_serial_number(1) # serial number must be unique, but we don't care. ^^ crt.gmtime_adj_notBefore(0) # start time from now crt.gmtime_adj_notAfter(ONE_YEAR * HOW_MANY_YEARS) # end time subject = crt.get_subject() subject.CN = public_ip # external ip subject.C = country subject.ST = region subject.L = city subject.O = ORGANIZATION subject.OU = UnitType.CERT crt.add_extensions([ # add extensions; crt does not ues domain name, so need to add subject alternative name. # [set this certificate belongs to Certificate Authority(CA)] # This is a multivalued extension which indicates whether a certificate is a CA certificate. # The first value is CA followed by TRUE or FALSE. If CA is TRUE then an optional pathlen name followed # by a non-negative value can be included. crypto.X509Extension(b'basicConstraints', True, b'CA:TRUE'), # The SKID extension specification has a value with three choices. If the value is the word none then # no SKID extension will be included. If the value is the word hash, or by default for the x509, req, # and ca apps, the process specified in RFC 5280 section 4.2.1.2. (1) is followed: The keyIdentifier is # composed of the 160-bit SHA-1 hash of the value of the BIT STRING subjectPublicKey (excluding the tag, # length, and number of unused bits). crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=crt), crypto.X509Extension(b"subjectAltName", False, f"IP:{public_ip}".encode('utf-8')) ]) # if the client's ip is not exists at crt ip list, the certificate will be disabled. # in this situation, authority key using a reference to CA, which is subject key # so, if add 'subjectKeyIdentifier' and 'authorityKeyIdentifier' extensions at the same time, # it will make error crt.add_extensions([ # The AKID extension specification may have the value none indicating that no AKID shall be included. # Otherwise, it may have the value keyid or issuer or both of them, separated by ,. Either or both can have # the option always, indicated by putting a colon : between the value and this option. For self-signed # certificates the AKID is suppressed unless always is present. By default, the x509, req, and ca apps # behave as if none was given for self-signed certificates and keyid, issuer otherwise. crypto.X509Extension(b"authorityKeyIdentifier", False, b"keyid:always", issuer=crt) ]) crt.set_subject(crt.get_subject()) crt.set_issuer(crt.get_subject()) crt.set_pubkey(keypair) crt.sign(keypair, 'SHA256') # sign with the CA(CS) private key. key_dump = crypto.dump_privatekey(FILETYPE_PEM, keypair, cipher='AES256', passphrase=__PASSPHRASE__.encode('utf-8')) crt_dump = crypto.dump_certificate(FILETYPE_PEM, crt) with open(path.join(CERT_DIR, KEY_FILE), 'w+') as ca_key_file, \ open(path.join(CERT_DIR, CERT_FILE), 'w+') as ca_crt_file: ca_key_file.write(key_dump.decode()) ca_crt_file.write(crt_dump.decode()) print("RESULT: Certificate Authority generated successfully.\n") chmod(path.join(CERT_DIR, KEY_FILE), 0o600) # can only root user read and write chmod(path.join(CERT_DIR, CERT_FILE), 0o644) # can any user read if __name__ == '__main__': # set up the certificate password and create rootCA certificate. proceed_certificate_authority_generation()
Skeletal muscle oxidative capacity in young and older women and men. It has been suggested that a decline in skeletal muscle oxidative capacity is a general consequence of aging in humans. However, previous studies have not always controlled for the effects of varying levels of physical activity on muscle oxidative capacity. To test the hypothesis that, when matched for comparable habitual physical activity levels, there would be no age-related decline in the oxidative capacity of a locomotor muscle, the postexercise recovery time of phosphocreatine was compared in the tibialis anterior muscle of young and older healthy women and men of similar, relatively low, activity levels. The intramuscular metabolic measurements were accomplished by using phosphorus magnetic resonance spectroscopy. The results indicate that there was no age effect on the postexercise recovery time of phosphocreatine recovery, thus supporting the stated hypothesis. These data suggest that there is no requisite decline in skeletal muscle oxidative capacity with aging in humans, at least through the seventh decade.
<gh_stars>10-100 // Copyright 2016 The Serviced Authors. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package agent implements a service that runs on a serviced node. It is // responsible for ensuring that a particular node is running the correct services // and reporting the state and health of those services back to the master // serviced. package facade import ( log "github.com/Sirupsen/logrus" "github.com/control-center/serviced/datastore" userdomain "github.com/control-center/serviced/domain/user" "github.com/control-center/serviced/utils" "crypto/sha1" "errors" "fmt" "io" "strings" ) // each time Serviced starts up a new password will be generated. This will be passed into // the containers so that they can authenticate against the API var SYSTEM_USER_NAME = "system_user" var INSTANCE_PASSWORD string //hashPassword returns the sha-1 of a password func hashPassword(password string) string { h := sha1.New() io.WriteString(h, password) return fmt.Sprintf("% x", h.Sum(nil)) } // AddUser adds a new user record func (f *Facade) AddUser(ctx datastore.Context, newUser userdomain.User) error { defer ctx.Metrics().Stop(ctx.Metrics().Start("Facade.AddUser")) var err error logger := plog.WithFields(log.Fields{ "newUserName": newUser.Name, }) logger.Debug("Started Facade.AddUser") defer logger.WithError(err).Debug("Finished Facade.AddUser") name := strings.TrimSpace(newUser.Name) newUser.Password = <PASSWORD>(newUser.Password) _, err = f.GetUser(ctx, name) if err != nil && !datastore.IsErrNoSuchEntity(err) { return err } err = f.userStore.Put(ctx, userdomain.Key(name), &newUser) return err } // UpdateUser updates the user record. NOTE: It is assumed the pasword // is NOT hashed when updating the user record func (f *Facade) UpdateUser(ctx datastore.Context, user userdomain.User) error { defer ctx.Metrics().Stop(ctx.Metrics().Start("Facade.UpdateUser")) var err error logger := plog.WithField("userName", user.Name) logger.Debug("Started Facade.UpdateUser") defer logger.WithError(err).Debug("Finished Facade.UpdateUser") id := strings.TrimSpace(user.Name) if id == "" { err = errors.New("empty User.Name not allowed") return err } user.Name = id user.Password = <PASSWORD>(user.Password) err = f.userStore.Put(ctx, userdomain.Key(user.Name), &user) return err } func (f *Facade) GetUser(ctx datastore.Context, userName string) (userdomain.User, error) { defer ctx.Metrics().Stop(ctx.Metrics().Start("Facade.GetUser")) var err error logger := plog.WithField("userName", userName) logger.Debug("Started Facade.GetUser") defer logger.WithError(err).Debug("Finished Facade.GetUser") var user userdomain.User err = f.userStore.Get(ctx, userdomain.Key(userName), &user) return user, err } // RemoveUser removes the user specified by the userName string func (f *Facade) RemoveUser(ctx datastore.Context, userName string) error { defer ctx.Metrics().Stop(ctx.Metrics().Start("Facade.RemoveUser")) var err error logger := plog.WithField("userName", userName) logger.Debug("Started Facade.RemoveUser") defer logger.WithError(err).Debug("Finished Facade.RemoveUser") err = f.userStore.Delete(ctx, userdomain.Key(userName)) return err } // ValidateCredentials takes a user name and password and validates them against a stored user func (f *Facade) ValidateCredentials(ctx datastore.Context, user userdomain.User) (bool, error) { defer ctx.Metrics().Stop(ctx.Metrics().Start("Facade.ValidateCredentials")) var err error logger := plog.WithField("userName", user.Name) logger.Debug("Started Facade.ValidateCredentials") defer logger.WithError(err).Debug("Finished Facade.ValidateCredentials") var storedUser userdomain.User storedUser, err = f.GetUser(ctx, user.Name) if err != nil { return false, err } // hash the passed in password hashedPassword := hashPassword(user.Password) // confirm the password if storedUser.Password != hashedPassword { return false, nil } // at this point we found the user and confirmed the password return true, nil } // GetSystemUser returns the system user's credentials. func (f *Facade) GetSystemUser(ctx datastore.Context) (userdomain.User, error) { defer ctx.Metrics().Stop(ctx.Metrics().Start("Facade.GetSystemUser")) plog.Debug("Started Facade.GetSystemUser") defer plog.Debug("Finished Facade.GetSystemUser") systemUser := userdomain.User{ Name: SYSTEM_USER_NAME, Password: <PASSWORD>, } return systemUser, nil } // createSystemUser updates the running instance password as well as the user record in elastic func (f *Facade) CreateSystemUser(ctx datastore.Context) error { defer ctx.Metrics().Stop(ctx.Metrics().Start("Facade.CreateSystemUser")) user, err := f.GetUser(ctx, SYSTEM_USER_NAME) if err != nil { plog.WithError(err).Warning("Default user not found; creating one.") // create the system user user := userdomain.User{} user.Name = SYSTEM_USER_NAME if err := f.AddUser(ctx, user); err != nil { return err } } // update the instance password password, err := utils.NewUUID36() if err != nil { return err } user.Name = SYSTEM_USER_NAME user.Password = password INSTANCE_PASSWORD = password return f.UpdateUser(ctx, user) }
public class App { static int x = 0; final int y = x++; final static int z = x++; public String toString() { return "x: " + x + " y: " + y + " z: " + z; } public static void main(String[] args) { App a = new App(); System.out.println(a); App b = new App(); System.out.println(b); } }
<reponame>dszmaj/channels from setuptools import find_packages, setup setup( name='channels-benchmark', packages=find_packages(), py_modules=['benchmark'], install_requires=[ 'autobahn', 'Twisted', 'statistics ; python_version < "3.0"', ], )
<gh_stars>1-10 package com.jcrspace.manager_account; import android.content.SharedPreferences; import com.blankj.utilcode.utils.EncryptUtils; import com.jcrspace.common.Qs; import com.jcrspace.common.config.QsCommonConfig; import com.jcrspace.common.lander.UserLander; import com.jcrspace.common.manager.BaseManager; import com.jcrspace.common.manager.TokenManager; import com.jcrspace.manager_account.model.AccountDO; import com.jcrspace.manager_account.model.AccountSO; import org.json.JSONArray; import org.xutils.DbManager; import org.xutils.ex.DbException; import cn.bmob.v3.BmobQuery; import cn.bmob.v3.exception.BmobException; import cn.bmob.v3.listener.QueryListener; import cn.bmob.v3.listener.SaveListener; import cn.bmob.v3.listener.UpdateListener; /** * Created by jiangchaoren on 2017/2/27. */ /** * 用户信息管理类 * 规则: * 1、在用户登录过后,会创建该用户的一个数据库名为account_13333333333 * 这个数据库中有一张表为user_info,这张表只有一条数据,这条数据存储该用户的详细信息。 */ public class AccountManager extends BaseManager{ private UserLander lander; //存储当前信息的INDEX,因为这个表是在该用户的数据库中,所以user_info表只有一条记录,这一条记录就是当前用户的信息 private static final int CURRENT_INDEX_ID=1; public static AccountManager getInstance(UserLander userLander){ try { return userLander.getManager(AccountManager.class); } catch (ClassNotFoundException e){ return userLander.putManager(new AccountManager(userLander)); } } public AccountManager(UserLander lander) { this.lander = lander; dbManager = lander.getDbManager(); } /** * 从本地数据库读取用户信息 * @return * @throws DbException */ public AccountDO readUserInfo() throws DbException { AccountDO accountDO = dbManager.findById(AccountDO.class,CURRENT_INDEX_ID); return accountDO; } /** * 更新信息 * @param accountDO * @throws DbException */ public void updateUserInfo(AccountDO accountDO) throws DbException { dbManager.update(accountDO); } /** * 创建用户信息 * @param accountDO * @throws DbException */ public void createUserInfo(AccountDO accountDO) throws DbException{ dbManager.dropTable(AccountDO.class); dbManager.saveOrUpdate(accountDO); } /** * 从服务器注册 * @param userName * @param password * @param saveListener */ public void register(String userName,String password,SaveListener saveListener){ AccountSO so = new AccountSO(); so.mobile = userName; so.register_time = System.currentTimeMillis(); so.password = <PASSWORD>(password); so.device_token = TokenManager.calcToken(userName); so.save(saveListener); } public void updateUserSex(String sex,UpdateListener listener){ AccountDO accountDO = null; try { accountDO = readUserInfo(); } catch (DbException e) { e.printStackTrace(); } accountDO.sex = sex; AccountSO accountSO = convert(accountDO); accountSO.update(accountDO.objectID,listener); } /** * 从服务器上拉去所有数据 * @param name * @param listener */ public void findAccountFromServer(String name, QueryListener<JSONArray> listener){ BmobQuery query = new BmobQuery("user"); query.addWhereEqualTo("mobile",name); query.setLimit(1); query.findObjectsByTable(listener); } /** * 修改密码 * @param newPassword * @param listener */ public void updatePassword(String newPassword,UpdateListener listener){ AccountDO accountDO = null; try { accountDO = readUserInfo(); } catch (DbException e) { e.printStackTrace(); } AccountSO accountSO = convert(accountDO); accountSO.password = <PASSWORD>(<PASSWORD>); accountSO.update(accountDO.objectID,listener); } /** * 退出登录 */ public void logout(){ lander.changeAccount(UserLander.DEFAULT_LOCAL_USER_ID); Qs.getConfigSharedPreferences().edit().putString(QsCommonConfig.SP_AUTO_LOGIN_NAME,UserLander.DEFAULT_LOCAL_USER_ID).apply(); } /** * 设置自动登录用户 * @param username */ public void setAutoLoginUser(String username){ SharedPreferences sharedPreferences = Qs.getConfigSharedPreferences(); sharedPreferences.edit().putString(QsCommonConfig.SP_AUTO_LOGIN_NAME,username).apply(); } /** * 更新用户昵称 * @param nickname * @param listener */ public void updateUserNickname(String nickname,UpdateListener listener){ AccountDO accountDO = null; try { accountDO = readUserInfo(); } catch (DbException e) { e.printStackTrace(); } accountDO.nick_name = nickname; AccountSO accountSO = convert(accountDO); accountSO.update(accountDO.objectID,listener); } /** * 转换类 * @param accountDO * @return */ public AccountSO convert(AccountDO accountDO){ AccountSO so = new AccountSO(); so.id = accountDO.aid; so.device_token = accountDO.device_token; so.last_login_time = accountDO.last_login_time; so.mobile = accountDO.mobile; so.nick_name = accountDO.nick_name; so.register_time = accountDO.register_time; so.sex = accountDO.sex; so.status = accountDO.status; so.setObjectId(accountDO.objectID); return so; } }
<reponame>AlexDid/simple-debts-api const EMAIL_PATTERN = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; const EMAIL_NAME_PATTERN = /^.*(?=@)/; const IMAGES_FOLDER_FILE_PATTERN = /\/images\/.*/; const ACCESS_TOKEN_EXP_SECONDS = 60 * 60; const REFRESH_TOKEN_EXP_SECONDS = 60 * 60 * 24 * 30; export {EMAIL_PATTERN, EMAIL_NAME_PATTERN, IMAGES_FOLDER_FILE_PATTERN, ACCESS_TOKEN_EXP_SECONDS, REFRESH_TOKEN_EXP_SECONDS};
<filename>app/src/main/java/com/github/programmerr47/vkgroups/background/objects/special/RepostsInfo.java<gh_stars>1-10 package com.github.programmerr47.vkgroups.background.objects.special; /** * @author <NAME> * @since 2016-01-12 */ public class RepostsInfo { private int count; private boolean canRepost; private boolean hasRepost; }
import numpy as np from sklearn.metrics import mean_squared_error, accuracy_score class BaseModel(object): """ Base model to run the test """ def __init__(self): self.max_depth = 6 self.learning_rate = 1 self.min_split_loss = 1 self.min_weight = 1 self.L1_reg = 1 self.L2_reg = 1 self.num_rounds = 40 self.max_bin = 255 self.use_gpu = True self.params = {} self.model = None # self.model is different with different libraries def _config_model(self, data): """ To config the model with different params """ pass def _train_model(self, data): """ To train model :param data: :return: """ pass def _predict(self, data): pass def eval(self, data, pred): """ To eval the predict results with specified metric :param data: :param pred: :return: """ if data.metric == "RMSE": with open('pred', 'w') as f: for x in pred: f.write(str(x) + '\n') return np.sqrt(mean_squared_error(data.y_test, pred)) elif data.metric == "Accuracy": # Threshold prediction if binary classification if data.task == "Classification": pred = pred > 0.5 elif data.task == "Multiclass classification": if pred.ndim > 1: pred = np.argmax(pred, axis=1) return accuracy_score(data.y_test, pred) else: raise ValueError("Unknown metric: " + data.metric) def run_model(self, data): """ To run model :param data: :return: """ self._config_model(data) elapsed = self._train_model(data) # metric = 0 metric = self._predict(data) print("##### Elapsed time: %.5f #####" % (elapsed)) print("##### Predict %s: %.4f #####" % (data.metric, metric)) return elapsed, metric def model_name(self): pass
<filename>full_system/main.py from blinks import * import cv2 import dlib if conf.is_raspi: import picamera from utils import * from train_face import * import Copilot.copilot as copi def most_common(lst): return max(set(lst), key=lst.count) def circle_list(circular, max_elmnts, element): if len(circular) >= max_elmnts: circular.pop(0) circular.append(element) else: circular.append(element) return circular def main(): # Create Copilot object. alicia = copi.copilot_obj() counter_asleep = {} COUNTER = 0 TOTAL = 0 frames_count = 0 out = cv2.VideoWriter('./output.avi', -1, 20.0, (640,480)) # initialize dlib's face detector (HOG-based) and then create # the facial landmark predictor print("[INFO] loading facial landmark predictor...") detector = dlib.get_frontal_face_detector() predictor = dlib.shape_predictor(conf.shape_predictor) # vs = cv2.VideoCapture(1) # for i in range(200): X_pca, pca, y = train_pca() if conf.is_raspi: camera = picamera.PiCamera() stream = picamera.array.PiRGBArray(camera) else: stream = cv2.VideoCapture(0) # stream = cv2.VideoCapture('6.h264') no_faces = True detected_frames = 0 # cv2.destroyAllWindows() id_person = [] initial_val = 0 while no_faces == True: ret, frame = stream.read() img, faces, coor = face_recognition_2(frame) out.write(img) if len(faces) != 0: detected_frames += 1 if len(id_person) > 0: retrieved_id, dist = test_id(faces, X_pca, pca, y, img, coor, most_common(id_person)) if dist < 8: id_person.append(retrieved_id) else: retrieved_id, dist = test_id(faces, X_pca, pca, y, img, coor, '') if dist < 8: id_person.append(retrieved_id) else: detected_frames = 0 if detected_frames > 15: no_faces = False cv2.imwrite('gif/00'+str(initial_val)+'.jpg', frame) initial_val+=1 i = 0 if len(id_person) > 10: id_driver = most_common(id_person) else: id_driver = '' cv2.destroyAllWindows() key = cv2.waitKey(1) & 0xFF if id_driver == 'negative': id_driver = '' alicia.error_recognition() for i in range(100): ret, frame = stream.read() img, eyes, faces = face_recognition_train(frame, i, 'unknown') cv2.imshow('img',img) if cv2.waitKey(1) == 27: break # esc to quit alicia.start(id_driver) list_asleep = [] while stream.isOpened(): print(most_common(id_person)) frame, frames_count, counter_asleep, ASLEEP = initiate_asleep_detector(frames_count, counter_asleep, detector, predictor, stream) print(ASLEEP) ASLEEP = most_common(circle_list(list_asleep, 15, ASLEEP)) if frame == None: break out.write(frame) # show the frame print(id_driver) cv2.putText(frame, id_driver, (15, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 0, 255), 2) cv2.imshow("Frame", frame) cv2.imwrite('gif/00'+str(i+initial_val)+'.jpg', frame) i += 1 key = cv2.waitKey(1) & 0xFF # if the `q` key was pressed, break from the loop if key == ord("q"): break # Check response for the server if ASLEEP and len(list_asleep)>10: cv2.putText(frame, "TIRED", (250, 400), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2) print 'Driver is sleeping! ' alicia.run() list_asleep = [] if conf.is_raspi: stream.seek(0) stream.truncate() # do a bit of cleanup # vs.stop() stream.release() out.release() cv2.destroyAllWindows() # Entry point of the script if __name__ == "__main__": main()
t = int(input()) def max(x,y): if x > y: return x else: return y for i in range(0,t): n = int(input()) # always an even number l = input() l = l.split() zeros = 0 ones = 0 boo = [] for j in range(0,n): l[j] = int(l[j]) if l[j] % 2 == 0: zeros += 1 boo.append(0) else: ones += 1 boo.append(1) #print(l) #print(boo) done = 0 if max(ones,zeros) % 2 != 0: for j in range(0,n): if done != 1: for k in range(0,n): if done != 1: if ((l[j] - l[k]) == 1) or ((l[j] - l[k]) == -1): if done != 1: boo[j] = None boo[k] = None ones -= 1 zeros -= 1 done = 1 #print(ones,zeros) #if max(ones,zeros) % 2 == 0: #print(boo) for jj in range(0,n): if boo[jj] != None: for kk in range(0,n): #print(kk) if jj != kk: if (boo[kk] != None): if (boo[jj] == 1) and (boo[kk] == 1): boo[jj] = None boo[kk] = None ones -= 2 if (boo[jj] == 0) and (boo[kk] == 0): boo[jj] = None boo[kk] = None zeros -=2 #print(boo) if (1 in boo) or (0 in boo): print("NO") else: print("YES")
/** * Displays the correct info in the different views. */ private void displayInfo() { switch (result) { case RESULTWINNER: bigResultTextView.setText(R.string.game_result_winner_big); String winText = "Congratulations, " + winner + " has won."; resultTextView.setText(winText); dataSource.addHighScore(winner, HighScoresDataSource.WIN); if (winner != null) { if (winner.equals(playerNames.get(0))) { dataSource.addHighScore(playerNames.get(1), HighScoresDataSource.LOSS); } else { dataSource.addHighScore(playerNames.get(0), HighScoresDataSource.LOSS); } } else { Log.e(TAG, "Winner pointed to null"); } break; case RESULTTIE: bigResultTextView.setText(R.string.gameresultactivity); String drawText = "Between " + playerNames.get(0) + " and " + playerNames.get(1) + "."; resultTextView.setText(drawText); dataSource.addHighScore(playerNames.get(0), HighScoresDataSource.DRAW); dataSource.addHighScore(playerNames.get(1), HighScoresDataSource.DRAW); break; } prefs.edit().putBoolean(SharedPreferenceConstants.HIGHSCOREEXISTS, true).apply(); dataSource.close(); }
package com.ayush.playsoduko.playsoduko.storyboard; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.view.View; import android.widget.Button; import android.widget.SeekBar; import android.widget.TextView; import com.ayush.playsoduko.playsoduko.storyboard.play_mode.SinglePlayerGame; import com.ayush.playsoduko.playsoduko.utilities.SudokuBoard; import com.ayush.playsoduko.playsoduko.storyboard.play_mode.MultiPlayerGame; import com.ayush.playsoduko.playsoduko.R; /** * This activity represents the activity which displays the difficulty difficultySeek bar and the user has the * option of playing locally or online with an opponent. * * @author ayushranjan * @see SinglePlayerGame * @since 30/10/16. */ public class PlayActivity extends Activity { private SeekBar difficultySeek; private TextView difficultyText; private Button localPlayButton; private Button onlinePLayButton; public static final int MAX_DIFFICULTY = 5; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.play_layout); initElements(); } private void initElements() { difficultySeek = findViewById(R.id.difficulty_seek_bar); difficultyText = findViewById(R.id.seek_bar_text_view); localPlayButton = findViewById(R.id.play_locally_button); onlinePLayButton = findViewById(R.id.play_online_button); // set default value on seek bar difficultySeek.setProgress(2); difficultyText.setText(String.format("%d/%d", 3, MAX_DIFFICULTY)); difficultySeek.setOnSeekBarChangeListener( new SeekBar.OnSeekBarChangeListener() { @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { difficultyText.setText(String.format("%d/%d", (progress + 1), MAX_DIFFICULTY)); } } ); localPlayButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(getApplicationContext(), SinglePlayerGame.class); intent.putExtra(SudokuBoard.DIFFICULTY_TAG, (difficultySeek.getProgress() + 1)); startActivity(intent); } }); onlinePLayButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(getApplicationContext(), MultiPlayerGame.class); intent.putExtra(SudokuBoard.DIFFICULTY_TAG, (difficultySeek.getProgress() + 1) ); startActivity(intent); } }); } public void onBackPressed() { finish(); } }
/** * lab 6 * The distance between 2 points on a cartesian plane * via user input * * @author PMCampbell * @version 2020-09-21 */ public class Distance { public static void main(String[] args) { Scanner stdin = new Scanner(System.in); double dist, x1, x2, y1, y2; System.out.println("Distance between 2 points"); System.out.print("x1: "); x1 = stdin.nextDouble(); System.out.print("x2: "); x2 = stdin.nextDouble(); System.out.print("y1: "); y1 = stdin.nextDouble(); System.out.print("y2: "); y2 = stdin.nextDouble(); dist = Math.sqrt(((x2 - x1)*(x2 -x1)) + ((y2 - y1)*(y2 -y1))); System.out.println("The distance is " + dist); } }
use unixstring::UnixString; use crate::drop_zeroed::DropZeroed; /// Safely reads the password from a terminal pub fn ask_for_password(username: impl AsRef<str>) -> Option<UnixString> { println!("[kindly] Password for {}:", username.as_ref()); let password = rpassword::read_password_from_tty(None).ok()?; // As pointed out by @ferrouille, we must not use `UnixString::try_from` to convert here // since it could lead to a copy of the password being left unzeroed somewhere in the memory. let mut unx = UnixString::with_capacity(password.len()); let push_worked = unx.push_bytes(password.as_bytes()).is_ok(); password.drop_zeroed(); if push_worked { Some(unx) } else { unx.drop_zeroed(); None } }
If you've ever preordered a videogame and felt disappointed by your purchase later, let me suggest a great thing to try: Stop preordering videogames. It must be difficult to ponder such a thing when the entire blockbuster videogame industry seems intent on getting you to preorder every game, every time. That up-sell was once confined to the checkout counter at GameStop, where if you so much as took a sideways glance into the store on the way to Wetzel's Pretzels, you'd be asked if you wanted to put down five bucks on an upcoming game. Today, it's a full-court press from the game publishers themselves. Just to name one example, if you go to the Penny Arcade Expo convention in Seattle this weekend, Nintendo will be recruiting players visiting its booth to place a preorder for the new Pokemon games by offering "a collectable gift." Today's preorder campaigns start early and get flogged hard. Square Enix is promising Final Fantasy fans a boatload of physical and digital exclusive bonus items if they would only pay full price, sight unseen, for a game that's spent 10 years in development hell. What could possibly go wrong? Maybe you're such a Pokemon or Final Fantasy devotee that you already know you're going to play the game on day one regardless of its quality. But if you think there's even a slight chance you're going to feel buyer's remorse, you should probably do yourself a lifelong favor and stop paying for games before you know anything about them. And if you're looking for precedent, look no further than No Man's Sky. Sony spent the last few years hyping up the planetary exploration game as one of the most brilliant things ever created by humankind. That's fine; that's exactly what Sony's marketing machine exists to do. But what you are supposed to do, potential customer, is not believe any of this, and instead wait for outside opinions from disinterested parties before you spend your money on the product. As it happens, No Man's Sky differed in a variety of ways from the product that was described in interviews, trailers, and other pre-release avenues of information. Hello Games Games change during development, especially ones that, like No Man's Sky, are experimenting with novel gameplay formats and mechanics (in this case, a procedurally-generated universe with a reported 18 quintillion planets). A game developer doesn't have to lie to talk up features that don't make it into the final product—a team can simply have grand ambitions and then have to whittle them down so they can actually ship. Granted, underdelivering on one single game does not a Peter Molyneux make. Molyneux isn't Molyneux because he overpromised once; the Fable creator earned his reputation as the Baron Munchausen of the games industry because he continued to escalate his impossible promises from game to game, a practice that finally came to its sublime conclusion when he claimed that the ending of his nonsense mobile clicker game would be literally "life-changing." Game developers can find themselves under an insane amount of scrutiny during a game's gestation; they're constantly asked questions about their work in progress. We can hope that Hello Games learns from being burned. Smart developers would learn to underpromise and overdeliver. Maybe it's even knuckling down as we speak to add those missing features into the game. Who knows? But whether or not you think what happened with No Man's Sky is egregious, the fact remains you could have escaped any damage by simply waiting until a few days after it launched to spend your money. And maybe a game that would have disappointed you at full price would have been perfectly fine during a flash sale. Or maybe you could borrow a friend's copy. Or a dozen other ways to play the game that don't involve paying $60 for what's essentially Box #2 from Let's Make a Deal. It's disappointing to see players attempt to get refunds for No Man's Sky after playing the game for 50 hours. I am not a lawyer, but I watch a lot of Judge Judy, and one of Her Televised Honor's favorite legal metaphors goes something like this: "You go into a restaurant, they bring you a steak, you eat a bite of it. You say, 'this is no good, I don't want it.' That's fine. But if you eat the steak? The whole steak? And then you say you want your money back? No way." Square Enix Square Enix's Deus Ex: Mankind Divided has also come under fire as of late for its preorder campaign. The simple version: Bonus items that were awarded with preorder turned out—after money had already changed hands and the game was released—to be single-use items, akin to buying "boosts" in mobile games that disappear after being consumed. It's disconcerting to see pay-for-play mechanics becoming more prevalent—and why wouldn't it? A premium-priced game isn't supposed to hit you up for more cash mid-playthrough like a free-to-play mobile title. We're going to see this happen more and more often in the future, though, and an easy insurance policy would be not spend that money before you learn how the game is really structured. The point of preordering a game used to be to guarantee yourself a copy on day one. But today? For a massive tentpole release? It's highly unlikely anyone will run out of physical discs, and certainly nobody's going to run out of downloads. Today, preordering benefits the game's publisher, or GameStop, but not the consumer. And those who benefit know that—hence the increasingly elaborate bonus gewgaws they offer you in return for your money. Again, if $60 is the sort of cash you might find between your couch cushions, then go nuts. But if there's even the slightest chance you'll feel burned, you have an option: just wait.
{-# LANGUAGE DuplicateRecordFields,RecordWildCards,TemplateHaskell #-} module Main where import Development.GitRev import Paths_hbgp(version) import Data.Version(showVersion) import System.Environment(getArgs) import System.Exit import Data.List(intersect) import Network.Socket import Control.Monad(unless) import Control.Concurrent import qualified Data.Map.Strict as Data.Map import System.FilePath.Posix(takeBaseName) import qualified Session.Session as Session import BGPRib.BGPRib import Router.Config import Router.BgpFSM import Router.Collision import Router.Global import Router.Redistributor(redistribute) import Router.Console(startConsole) import Router.Monitor(startMonitor) import Router.Log main :: IO () main = do info banner (rawConfig, fileBaseName) <- getConfig config <- checkCapabilities rawConfig >>= fixCapabilities global <- buildGlobal config fileBaseName _ <- forkIO (redistribute global) _ <- forkIO (startConsole global) _ <- forkIO (startMonitor global) let app = bgpFSM global debug $ "connecting to " ++ show (activePeers config) debug $ "activeOnly = " ++ show (activeOnly config) _ <- forkIO $ Session.session 179 app (configListenAddress config) (activePeers config) (not $ activeOnly config) info $ "Router " ++ fileBaseName ++ " ready" takeMVar (exitFlag global) -- gracefull cleanup would have to be called here -- currently, sessions just fall of a cliff edge (TCP reset) banner = "hbgp " ++ showVersion version ++ if "master" == $(gitBranch) then "" else " (" ++ $(gitBranch)++ ")" getConfig :: IO (Config, String) getConfig = do args <- getArgs unless (null $ intersect args ["--version","-V","-v"]) exitSuccess let (configPath,configName) = if null args then ("bgp.conf","Router") else (head args, takeBaseName $ head args) configString <- readFile configPath let rawConfig = read configString :: Config return (buildPeerConfigs rawConfig,configName) buildGlobal :: Config -> String -> IO Global buildGlobal c@Config{..} configName = do let config = c gd = GlobalData { myAS = configAS , myBGPid = configBGPID } routerName = configName ld = localPeer gd delayOpenTimer = configDelayOpenTimer initialHoldTimer = configInitialHoldTimer -- TODO - configure this in configuration file listenAddress = SockAddrInet 179 0 -- listen on all intefaces by default... -- TODO the map creation should be in Config... peerMap = Data.Map.fromList $ map (\pc -> (peerConfigIPv4 pc,pc)) configConfiguredPeers logger = putStrLn exitFlag <- newEmptyMVar collisionDetector <- mkCollisionDetector sessions <- newMVar Data.Map.empty rib <- BGPRib.BGPRib.newRib ld monitorChannel <- newChan return Global {..}
/*************************************** * Adds a sub-fragment to be displayed as a view. The parameter for the view * fragment will be added automatically to the input parameters of this * instance. Therefore the parameter lists of this instance MUST be mutable! * * <p>Because a view has no explicit buttons like dialogs it must be closed * by the creating code by invoking the {@link ViewFragment#hide()} method * on the returned view fragment instance on a corresponding interaction. * </p> * * @param sParamNameTemplate The name template to be used for generated * view parameter names * @param rContentFragment The fragment to be displayed as the view * content * @param bModal TRUE for a modal view * * @return The new view fragment to provide access to it's method {@link * ViewFragment#hide()} */ public ViewFragment showView(String sParamNameTemplate, InteractionFragment rContentFragment, boolean bModal) { ViewFragment aViewFragment = new ViewFragment( sParamNameTemplate, rContentFragment, bModal ? ViewDisplayType.MODAL_VIEW : ViewDisplayType.VIEW); aViewFragment.show(this); return aViewFragment; }
/** * Used by reverse ajax proxies to send data back to the server * @param key The unique id under which a callback is registered * @param data The data to decode and pass to the callback */ public void activateCallback(String key, RealRawData data) { try { DefaultCallbackHelper.executeCallback(key, data); } catch (Exception ex) { log.error("Failed to marshall data from callback", ex); } }
import * as React from 'react'; import { Bar } from 'react-chartjs-2'; export const BarChart = ({ labels, title, data }) => { const barData = { labels, datasets: [ { label: title, backgroundColor: 'rgba(54, 162, 235, 1)', data, maxBarThickness: 30, }, ], }; const barOptions = { scales: { xAxes: [ { stacked: true, }, ], yAxes: [ { ticks: { beginAtZero: true, stepSize: 3600, suggestedMin: 0, suggestedMax: 3600, callback: value => { const seconds = Math.abs(value); const hours = (seconds / 3600).toFixed(2); return `${hours} h`; }, }, }, ], }, tooltips: { enabled: true, callbacks: { label: function(tooltipItem, data) { const seconds = Math.abs(tooltipItem.yLabel); const hours = (seconds / 3600).toFixed(2); return `${hours} h`; }, }, }, responsive: true, }; return <Bar data={barData} options={barOptions} width={350} />; };
/************************************************************************************************************************ SiLabs_Scan_Loop function Use: scan the band using SiLabs_API_Channel_Seek_Init and SiLabs_API_Channel_Seek_Next Used to fill the carriers table Returns: the number of detected channels during this loop ************************************************************************************************************************/ signed int SiLabs_Scan_Loop (void) { int previous_count; int new_count; int standard; int i; int start; previous_count = SiLabs_Scan_Table_Count(); standard = Silabs_UserInput_standard(); if (standard < 0 ) return 0; SiLabs_API_switch_to_standard (front_end, standard, 0); Silabs_UserInput_SeekInit(); start = system_time(); while (Silabs_UserInput_SeekNext()) { } SiLabs_API_Channel_Seek_End(front_end); printf("\nScan duration %.3f s\n", (system_time() - start)/1000.0 ); new_count = SiLabs_Scan_Table_Count(); if (new_count - previous_count > 0) {printf("There are %d carriers now in the table (%d new carriers)\n", new_count, new_count - previous_count);} for (i = previous_count; i < new_count; i++) { SiLabs_Scan_Table_Carrier_Text(i, messageBuffer); printf("%s\n", messageBuffer); } return SiLabs_Scan_Table_Count() - previous_count; }
/** Creates a new element DebugValueInfo. @param identifier @param textValue */ _DebugValueInfo(long long identifier, const typename EncodingT::string_t& textValue) : m_identifier(identifier), m_textValue(textValue) { m_logger = &Category::getInstance(LOGNAME); m_logger->debugStream() << "constructor _DebugValueInfo " << m_identifier << ' ' << A(m_textValue) << ' '; }
def parse_datetime_string(s, *args, **kwargs): kwargs['date_value_resolution'] = DateResolution.SECOND de = DateExtractor() e = de.extract(s, *args, **kwargs) if len(e) == 0: raise ValueError('No date / datetime detected') dt_str = e[0].value orig_resolution = de._last_original_resolution wd_precision = None if orig_resolution == DateResolution.SECOND: wd_precision = Precision.second if orig_resolution == DateResolution.MINUTE: wd_precision = Precision.minute if orig_resolution == DateResolution.HOUR: wd_precision = Precision.hour elif orig_resolution == DateResolution.DAY: wd_precision = Precision.day elif orig_resolution == DateResolution.MONTH: wd_precision = Precision.month elif orig_resolution == DateResolution.YEAR: wd_precision = Precision.year return dt_str, wd_precision
/** * Service Implementation for managing Restaurant. */ @Service @Transactional public class RestaurantServiceImpl implements RestaurantService { private final Logger log = LoggerFactory.getLogger(RestaurantServiceImpl.class); private final RestaurantRepository restaurantRepository; public RestaurantServiceImpl(RestaurantRepository restaurantRepository) { this.restaurantRepository = restaurantRepository; } /** * Save a restaurant. * * @param restaurant the entity to save * @return the persisted entity */ @Override public Restaurant save(Restaurant restaurant) { log.debug("Request to save Restaurant : {}", restaurant); return restaurantRepository.save(restaurant); } /** * Get all the restaurants. * * @param pageable the pagination information * @return the list of entities */ @Override @Transactional(readOnly = true) public Page<Restaurant> findAll(Pageable pageable) { log.debug("Request to get all Restaurants"); return restaurantRepository.findAll(pageable); } /** * Get one restaurant by id. * * @param id the id of the entity * @return the entity */ @Override @Transactional(readOnly = true) public Optional<Restaurant> findOne(Long id) { log.debug("Request to get Restaurant : {}", id); return restaurantRepository.findById(id); } /** * Delete the restaurant by id. * * @param id the id of the entity */ @Override public void delete(Long id) { log.debug("Request to delete Restaurant : {}", id); restaurantRepository.deleteById(id); } }
/// Unwraps the underlying HANDLE and transfers ownership to the caller. pub unsafe fn into_inner(self) -> RawHandle { // Make sure our desctructor doesn't actually close // the handle we just transfered to the caller. let handle = self.inner(); mem::forget(self); handle }
<filename>logigator-editor/src/app/models/bi-directional-map.ts export class BiDirectionalMap<K, V> { private readonly map = new Map<K, V>(); private readonly reverse = new Map<V, K>(); constructor(map?: object | Map<K, V> | Array<[K, V]>) { if (!map) return; if (map instanceof Map) { map.forEach((value, key) => { this.set(key, value); }); } else if (Array.isArray(map)) { map.forEach((entry) => { this.set(entry[0], entry[1]); }); } else { Object.keys(map).forEach((key) => { this.set(key as any, map[key]); }); } } get size(): number { return this.map.size; } public set(key: K, value: V): this { if (this.map.has(key)) { const existingValue = this.map.get(key); this.reverse.delete(existingValue); } if (this.reverse.has(value)) { const existingKey = this.reverse.get(value); this.map.delete(existingKey); } this.map.set(key, value); this.reverse.set(value, key); return this; } public clear(): void { this.map.clear(); this.reverse.clear(); } public getValue(key: K): V { return this.map.get(key); } public getKey(value: V): K { return this.reverse.get(value); } public deleteKey(key: K): boolean { const value = this.map.get(key); this.reverse.delete(value); return this.map.delete(key); } public deleteValue(value: V): boolean { const key = this.reverse.get(value); this.map.delete(key); return this.reverse.delete(value); } public hasKey(key: K): boolean { return this.map.has(key); } public hasValue(value): boolean { return this.reverse.has(value); } public keys(): IterableIterator<K> { return this.map.keys(); } public values(): IterableIterator<V> { return this.reverse.keys(); } public entries(): IterableIterator<[K, V]> { return this.map.entries(); } public forEach(callbackfn: (value: V, key: K, map: Map<K, V>) => void): void { return this.map.forEach(callbackfn); } }
from copy import deepcopy from functools import wraps from itertools import count from django.db.models.query import QuerySet, BaseIterable VERSION = (0, 2, 3) __all__ = ['NV', 'model_fields_map'] __title__ = 'DjangoNestedValues' __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) __author__ = '<NAME>' __contact__ = '<EMAIL>' __homepage__ = 'https://github.com/aklim007/DjangoNestedValues' __copyright__ = '<NAME> <<EMAIL>>' def model_fields_map(model, fields=None, exclude=None, prefix='', prefixm='', attname=True, rename=None): """ На основании переданной модели, возвращает список tuple, содержащих путь в орм к этому полю, и с каким именем оно должно войти в результат. Обрабатываются только обычные поля, m2m и generic сюда не войдут. ARGUMENTS: :param model: модель или инстанс модели, на основе которой будет формироваться список полей :param None | collections.Container fields: список полей, которые будут забраны из модели :param None | collections.Container exclude: список полей, которые не будут забираться :param str prefix: ORM путь, по которому будут распологаться модель в запросе :param str prefixm: префикс, который будет добавлен к имени поля :param bool attname: использовать имя name (model) или attname(model_id) эти поля отличаются для внешних ключей :param dict rename: словарь переименования полей :rtype: list[tuple[str]] """ data = [] rename = rename or {} attribute = 'attname' if attname else 'name' for f in model._meta.concrete_fields: if fields and f.attname not in fields and f.name not in fields: continue if exclude and f.attname in exclude and f.name not in exclude: continue param_name = getattr(f, attribute) new_param_name = rename[param_name] if param_name in rename else param_name data.append(('{}{}'.format(prefix, param_name), '{}{}'.format(prefixm, new_param_name))) return data class NV(object): __slots__ = ('_fieldsmap', '_nest', '_values_list', '_lfieldsmap', '_parent', '_c', 'ifnone') def __init__(self, fieldsmap, nest=None, ifnone=None): """ ARGUMENTS: :type fieldsmap: list[tuple[str]] :type nest: nest None | dict[str, list[tuple[str]] | NV] :type ifnone: None | str """ self._fieldsmap = fieldsmap #: :type: list[tuple] self._lfieldsmap = [] #: :type: dict[str, NV | list[tuple[str]]] self._nest = nest or {} self._values_list = None #: :type: None | NV self._parent = None self._c = None self.ifnone = ifnone for key, nest in self._nest.items(): if not isinstance(nest, NV): self._nest[key] = NV(fieldsmap=nest) self._nest[key]._parent = self def _parse_value(self, value): """ ARGUMENTS: :type value: tuple :rtype: dict """ lv = {} for newkey, indx in self._lfieldsmap: lv[newkey] = value[indx] if self.ifnone is not None and lv[self.ifnone] is None: return None for key, nest in self._nest.items(): lv[key] = nest._parse_value(value) return lv @property def values_list(self): """ Возвращает cписок полей (могут повторятся), таких какими их требуется скормить для values_list orm ARGUMENTS: :rtype: list[str] """ if self._values_list is not None: return self._values_list for indx, key in enumerate(self._fieldsmap): if isinstance(key, str): self._fieldsmap[indx] = (key, key) self._c = count() if self._parent is None else self._parent._c v = [key[0] for key in self._fieldsmap] self._lfieldsmap = [(key[1], next(self._c)) for key in self._fieldsmap] for nest in self._nest.values(): v += nest.values_list self._values_list = v return self._values_list class NestedValuesIterable(BaseIterable): """ Итератор который собственно и вернёт значения в нужном формате """ def __iter__(self): queryset = self.queryset query = queryset.query compiler = query.get_compiler(queryset.db) _parse_value = queryset._nested_values._parse_value _lfieldsmap = queryset._nested_values._lfieldsmap values_list = queryset._nested_values.values_list if not query.extra_select and not query.annotation_select: for row in compiler.results_iter(): yield _parse_value(row) else: field_names = list(query.values_select) extra_names = list(query.extra_select) afields = list(query.annotation_select) correction = len(extra_names) fnames_len = len(field_names) afields_start = correction + fnames_len for indx, v in enumerate(values_list): key, position = _lfieldsmap[indx] if v in query.annotation_select: correction -= 1 position = afields_start + afields.index(v) else: position += correction _lfieldsmap[indx] = key, position for row in compiler.results_iter(): yield _parse_value(row) def _clone(self, **kwargs): query = self.query.clone() if self._sticky_filter: query.filter_is_sticky = True clone = self.__class__(model=self.model, query=query, using=self._db, hints=self._hints) clone._for_write = self._for_write clone._prefetch_related_lookups = self._prefetch_related_lookups[:] clone._known_related_objects = self._known_related_objects clone._iterable_class = self._iterable_class clone._fields = self._fields clone.__dict__.update(kwargs) return clone def clone_wrapper(old_obj: QuerySet): """ Деоратор функции QuerySet._clone для копирования _nested_values Организовано так, что будет патчится только QuerySet на котором был вызван nested_values На вход принимаем именно объект, а не функцию, так как на данный момент метод класса уже прикреплён к конкретному экземпляру класса. """ # запоминаем исходную функцию копирования, для переданного экземпляра _clone = old_obj._clone @wraps(_clone) def wrapper(**kwargs): new_obj = _clone(**kwargs) # в новый объект переносим копию _nested_values new_obj._nested_values = deepcopy(getattr(old_obj, '_nested_values', None)) # и также патчим функцию копирования new_obj._clone = clone_wrapper(new_obj) return new_obj return wrapper def _nested_values(self, fieldsmap, nest=None) -> QuerySet: nv = NV(fieldsmap=fieldsmap, nest=nest) clone = self._values(*nv.values_list) clone._iterable_class = NestedValuesIterable clone._nested_values = nv clone._clone = clone_wrapper(clone) return clone def setup(): """Инициализируем модуль""" QuerySet.nested_values = _nested_values
/** * Model tests for CharacterMiningResponse */ public class CharacterMiningResponseTest { private final CharacterMiningResponse model = new CharacterMiningResponse(); /** * Model tests for CharacterMiningResponse */ @Test public void testCharacterMiningResponse() { // TODO: test CharacterMiningResponse } /** * Test the property 'date' */ @Test public void dateTest() { // TODO: test date } /** * Test the property 'quantity' */ @Test public void quantityTest() { // TODO: test quantity } /** * Test the property 'typeId' */ @Test public void typeIdTest() { // TODO: test typeId } /** * Test the property 'solarSystemId' */ @Test public void solarSystemIdTest() { // TODO: test solarSystemId } }
Adenine and adenosine metabolism in intact erythrocytes deficient in adenosine monophosphate-pyrophosphate phosphoribosyltransferase: a study of two families. 1. We have studied the metabolism of adenine and adenosine in intact erythrocytes from three children and four adults of two separate families. The children were homozygotes for a deficiency of AMP-pyrophosphate phosphoribosyltransferase (adenine phosphoribosyltransferase, EC 2.4.2.7), and the parents heterozygotes. 2. In this intact cell system at physiological concentrations of inorganic phosphate (Pi) and adenine heterozygote adenine phosphoribosyltransferase activity was normal, whereas at higher adenine concentrations (1.0–10 μmol/l) the four heterozygotes showed approximately 50% of control activity. At high concentrations of Pi (18 mmol/l) and adenine (10 μmol/l) and with extended incubation heterozygote activity was again indistinguishable from normal. Activity of the enzyme in homozygotes was negligible under all these conditions. 3. Formation of inosine 5′-monophosphate from hypoxanthine at high Pi concentrations was normal in both homozygotes and heterozygotes. Thus no abnormality of either of the enzymes IMP—pyrophosphate phosphoribosyltransferase (hypoxanthine phosphoribosyltransferase, EC 2.4.2.8) or phosphoribosyl pyrophosphate synthetase (ribose phosphate pyrophosphokinase, EC 2.7.6.1) was found. 4. To ascertain whether deficiency of adenine phosphoribosyltransferase was linked with enhanced conversion of adenosine into adenine nucleotides at the expense of its deamination pathways, the complete metabolism of adenosine was studied over the range 0.1–9.0 μmol/l. No such increase was observed for either heterozygotes or homozygotes. 5. Insignificant formation of either -adenine from adenosine or vice versa in the erythrocytes of homozygotes lacking adenine phosphoribosyltransferase but capable of forming hypoxathine from adenosine, supports the concept that adenine and adenosine are not directly interconvertible and do not utilize purine nucleoside phosphorylase (EC 2.4.2.1) in erythrocytes. 6. Measurement of erythrocyte nucleotide concentrations by high-performance liquid chromatography revealed no obvious abnormalities. ATP:ADP:AMP proportions were normal (approximately 10:1:0.1) and the total adenine nucleotide pool was at the lower limit of the normal range for both homozygotes and heterozygotes. These observations imply that deficiency of adenine phosphoribosyltransferase does not grossly affect adenine nucleotides synthesis in the erythrocyte.
import ReactDOM from 'react-dom' import App from './App' import { RootStoreProvider } from '@src/components/StoreProvider' import * as stores from '@src/stores/index' ReactDOM.render( <RootStoreProvider store={stores}> <App /> </RootStoreProvider>, document.getElementById('emp-root'), )
After a broken bone or something as simple as a burn or cut, the body needs time to heal. Skin regrown and cell regeneration can be stimulated by certain foods to help the body heal faster. Science has shown that foods that are high in vitamin C and zinc aid in this healing process. Vitamin C is a primary nutrient that is used in the process of healing anything simple as a paper cut or something more significant such as major surgery or torn ligaments. Zinc is a mineral that boosts the immune system which directly influences the healing process. Some of these foods include meats, fruits, vegetables, and nuts. Meats There are many meat products that help the body heal faster because of the amount of zinc each contains. For example: Beef has the highest content of the zinc nutrient. Other leaner meats such as chicken and turkey have lower amounts of zinc, however, are still useful in boosting the immune system. A vegetarian can find similar results in protein packed foods such as peanut butter and almonds. Fruits Vitamin C is a key nutrient found in many fruits that helps the body heal faster. Citrus fruits such as oranges, grapefruit, and even papayas and mangos have a higher content of vitamin C. Fruits such as strawberries and various other berries may have a low percentage of vitamin C but are no less beneficial in the fight for the body to heal faster. Vegetables Strange-perhaps, however, even certain vegetables contain levels of vitamin C used in the bodies healing process. Many studies show that kelp has one of the highest rates of vitamin C. Peppers such as bell peppers, red peppers, and green peppers also contain large amounts of vitamin C. Other vegetables such as broccoli and cauliflower have lower percentages of vitamin C, however, are still no less important. The key is to make sure vegetables are eaten regularly throughout the day to ensure optimal healing of the body. Nuts Nuts naturally contain many nutrients and minerals, such as vitamin C and zinc, that aid the body’s healing process. Studies show that nuts have a high content of antioxidants which help strengthen the immune system. Nuts such as almonds, pecans, walnuts, and Brazil nuts are all great choices to aid in a speedy recovery. This is a great option for the on the go individual who may not have time to eat a balanced diet. Just a handle full of nuts a day can go along way. Many foods such as meats, fruits, vegetables, and nuts contain vital nutrients and minerals, such as vitamin C and zinc, which allow the body to heal. By boosting the immune system and speeding up the cell regeneration process, consuming a higher percentage of zinc and vitamin C help the body heal at a more rapid rate. The key is to make sure to follow specified dietary guidelines to ensure that the proper servings of each food group are eaten daily.
#ifndef VECGEOM_BASE_FpeEnable_H_ #define VECGEOM_BASE_FpeEnable_H_ #if defined(__GNUC__) && !defined(__APPLE__) #include <fenv.h> static void __attribute__((constructor)) EnableFpeForTests() { // this function is not offered on APPLE MACOS feenableexcept(FE_INVALID | FE_DIVBYZERO); // feenableexcept(FE_ALL_EXCEPT & ~FE_INEXACT); } #endif // #ifdef __APPLE__ // #include <xmmintrin.h> // _MM_SET_EXCEPTION_MASK(_MM_GET_EXCEPTION_MASK() & ~_MM_MASK_INVALID); // #endif #endif // VECGEOM_BASE_FpeEnable_H_
Nutritional support of the gut: how and why. Trauma, sepsis, and other conditions of stress are characterized by a hypermetabolic state, in which markedly increased substrate availability is required to meet energy demands for tissue repair and host defenses. Inability to meet these increased metabolic demands results in accelerated visceral protein depletion, impaired immune function, and impaired wound healing. In addition, alteration of the gut flora, impaired host immune defenses, or direct gut mucosal injury may result in gut barrier failure. All of the above derangements, acting in concert, may ultimately lead to sepsis or multiple organ failure. Since enteral nutrition improves resistance to experimentally induced infections, blunts the hypermetabolic response to injury, and maintains intestinal structure and function better than parenteral nutrition, there are many reasons to favor a policy of early enteral feeding in critically ill patients. We demonstrated the safety and efficacy of immediate enteral feeding in patients with major thermal injury, and have found this practice to be applicable to a wide variety of other critically ill patients. These patients had not been felt to be candidates for enteral nutrition due to unfounded fears related to the presence of ileus or fresh gastrointestinal anastomosis.
// dereference is the private locked version of Dereference. func (intermediateWriter *IntermediateWriter) dereference(child common.Hash, parent common.Hash) { node := intermediateWriter.dirties[parent] if node.children != nil && node.children[child] > 0 { node.children[child]-- if node.children[child] == 0 { delete(node.children, child) intermediateWriter.childrenSize -= (common.HashSize + 2) } } node, ok := intermediateWriter.dirties[child] if !ok { return } if node.parents > 0 { node.parents-- } if node.parents == 0 { switch child { case intermediateWriter.oldest: intermediateWriter.oldest = node.flushNext intermediateWriter.dirties[node.flushNext].flushPrev = common.Hash{} case intermediateWriter.newest: intermediateWriter.newest = node.flushPrev intermediateWriter.dirties[node.flushPrev].flushNext = common.Hash{} default: intermediateWriter.dirties[node.flushPrev].flushNext = node.flushNext intermediateWriter.dirties[node.flushNext].flushPrev = node.flushPrev } for _, hash := range node.childs() { intermediateWriter.dereference(hash, child) } delete(intermediateWriter.dirties, child) intermediateWriter.dirtiesSize -= common.StorageSize(common.HashSize + int(node.size)) if node.children != nil { intermediateWriter.childrenSize -= cachedNodeChildrenSize } } }
/** * Frees all the stored resources by calling {@link AutoCloseable#close()} for each. * Guarantees to close all the resources, even if one has thrown an exception. * * @throws Throwable if one of the resources has thrown an exception while closing. * Each additional exception thrown by a resource while be suppressed using * {@link Throwable#addSuppressed(Throwable)}. */ public void freeAll() throws Throwable { Throwable throwable = null; for (AutoCloseable closeable : mResources) { try { closeable.close(); } catch (Throwable t) { if (throwable == null) { throwable = t; } else { throwable.addSuppressed(t); } } } if (throwable != null) { throw throwable; } }
<reponame>taemon1337/vault-operator // Copyright 2018 The vault-operator Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package operator import ( "context" "reflect" "time" api "github.com/coreos/vault-operator/pkg/apis/vault/v1alpha1" "github.com/coreos/vault-operator/pkg/util/k8sutil" "github.com/coreos/vault-operator/pkg/util/vaultutil" vaultapi "github.com/hashicorp/vault/api" "github.com/sirupsen/logrus" "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/labels" ) // monitorAndUpdateStatus monitors the vault service and replicas statuses, and // updates the status resource in the vault CR item. func (vs *Vaults) monitorAndUpdateStatus(ctx context.Context, vr *api.VaultService) { var tlsConfig *vaultapi.TLSConfig s := api.VaultServiceStatus{ Phase: api.ClusterPhaseRunning, ServiceName: vr.GetName(), ClientPort: k8sutil.VaultClientPort, } for { // Do not wait to update Phase ASAP. latest, err := vs.updateVaultCRStatus(ctx, vr.GetName(), vr.GetNamespace(), s) if err != nil { logrus.Errorf("failed updating the status for the vault service: %s (%v)", vr.GetName(), err) } if latest != nil { vr = latest } select { case err := <-ctx.Done(): logrus.Infof("stop monitoring vault (%s), reason: %v", vr.GetName(), err) return case <-time.After(10 * time.Second): } if tlsConfig == nil { var err error tlsConfig, err = k8sutil.VaultTLSFromSecret(vs.kubecli, vr) if err != nil { logrus.Errorf("failed to read TLS config for vault client: %v", err) continue } } vs.updateLocalVaultCRStatus(ctx, vr, &s, tlsConfig) } } // updateLocalVaultCRStatus updates local vault CR status by querying each vault pod's API. func (vs *Vaults) updateLocalVaultCRStatus(ctx context.Context, vr *api.VaultService, s *api.VaultServiceStatus, tlsConfig *vaultapi.TLSConfig) { name, namespace := vr.Name, vr.Namespace sel := k8sutil.LabelsForVault(name) // TODO: handle upgrades when pods from two replicaset can co-exist :( opt := metav1.ListOptions{LabelSelector: labels.SelectorFromSet(sel).String()} pods, err := vs.kubecli.CoreV1().Pods(namespace).List(opt) if err != nil { logrus.Errorf("failed to update vault replica status: failed listing pods for the vault service (%s.%s): %v", name, namespace, err) return } var sealNodes []string var standByNodes []string var updated []string inited := false // If it can't talk to any vault pod, we are not going to change the status. changed := false for _, p := range pods.Items { // If a pod is Terminating, it is still Running but has no IP. if p.Status.Phase != v1.PodRunning || p.DeletionTimestamp != nil { continue } vapi, err := vaultutil.NewClient(k8sutil.PodDNSName(p), "8200", tlsConfig) if err != nil { logrus.Errorf("failed to update vault replica status: failed creating client for the vault pod (%s/%s): %v", namespace, p.GetName(), err) continue } hr, err := vapi.Sys().Health() if err != nil { logrus.Errorf("failed to update vault replica status: failed requesting health info for the vault pod (%s/%s): %v", namespace, p.GetName(), err) continue } changed = true if k8sutil.IsVaultVersionMatch(p.Spec, vr.Spec) { updated = append(updated, p.GetName()) } // TODO: add to vaultutil? if hr.Initialized && !hr.Sealed && !hr.Standby { s.VaultStatus.Active = p.GetName() } if hr.Initialized && !hr.Sealed && hr.Standby { standByNodes = append(standByNodes, p.GetName()) } if hr.Sealed { sealNodes = append(sealNodes, p.GetName()) } if hr.Initialized { inited = true } } if !changed { return } s.VaultStatus.Standby = standByNodes s.VaultStatus.Sealed = sealNodes s.Initialized = inited s.UpdatedNodes = updated } // updateVaultCRStatus updates the status field of the Vault CR. func (vs *Vaults) updateVaultCRStatus(ctx context.Context, name, namespace string, status api.VaultServiceStatus) (*api.VaultService, error) { vault, err := vs.vaultsCRCli.VaultV1alpha1().VaultServices(namespace).Get(name, metav1.GetOptions{}) if err != nil { return nil, err } if reflect.DeepEqual(vault.Status, status) { return vault, nil } vault.Status = status _, err = vs.vaultsCRCli.VaultV1alpha1().VaultServices(namespace).Update(vault) return vault, err }
<gh_stars>0 from sstcam_sandbox import get_checs class Dataset: def __init__(self, **kwargs): self.poi = 888 class MC(Dataset): def __init__(self, **kwargs): super().__init__(**kwargs) self.dl1_paths = [ # get_checs("d190312_spectra_fitting/mc/run43490_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43491_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43492_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43493_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43494_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43495_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43496_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43497_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43498_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43499_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43500_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43501_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43502_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43503_dl1.h5"), # get_checs("d190312_spectra_fitting/mc/run43504_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43505_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43506_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43507_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43508_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43509_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43510_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43511_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43512_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43513_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43514_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43515_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43516_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43517_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43518_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43519_dl1.h5"), get_checs("d190312_spectra_fitting/mc/run43520_dl1.h5"), ] self.spe_config = get_checs("d190312_spectra_fitting/mc/spe_config.yml")
RENO, Nev. ()The Nevada Democratic Party saying it would work with law enforcement "to prosecute anyone who falsely registers as a Democrat to caucus" on Saturday. In the statement, Chairwoman Roberta Lange said, "After reviewing Nevada law, we believe that registering under false pretenses in order to participate in the Democratic caucuses for purposes of manipulating the presidential nominating process is a felony." The statement cites NRS 293.800, which reads: "A person who, for himself, herself or another person, willfully gives a false answer or answers to questions propounded to the person by the registrar or field registrar of voters relating to the information called for by the application to register to vote, or who willfully falsifies the application in any particular, or who violates any of the provisions of the election laws of this State or knowingly encourages another person to violate those laws is guilty of a category E felony and shall be punished as provided in NRS 193.130." The party's statement comes in response to a press release from the University of Nevada, Reno College Republicans, which said that it appeared to be legal for people to register as both a Republican and a Democrat for Nevada's caucuses. "Asking individuals to participate in both is not a normal activity but it is also not illegal; nobody will get arrested," chapter president Miranda Hoover said in the statement. "I am hopeful that the loophole will be fixed in 2020 and while I will neither endorse nor demean the act of Republicans taking part in both caucuses, but it is important for this issue to be recognized." That statement came in response to an saying "a quirk in the system" could allow anyone registered as a Democrat or Republican to vote in both caucuses. "Republicans closed their registration rolls on Feb. 13, and that is the file that will be used on Feb. 23. Democrats are allowing same-day registration on Saturday," Ralston wrote. "So: A Republican registered by Feb. 13 could show up at a Democratic caucus site on Saturday, switch to the Democratic Party, vote and then still participate on Tuesday because the party switch would not show up on the GOP caucus rolls." The attention was enough that Sen. Harry Reid, D-Nev., accusing Republicans of "resorting to trickery and gimmicks." "The American people deserve a fair voting process, and I will do everything in my power to ensure that these disgraceful Republican tactics do not interfere with the voice of Nevada voters," Reid said in his statement. The president of the College Republicans chapter at the University of Nevada, Las Vegas reportedly slammed the statement from the Reno chapter. "The UNLV College Republicans strongly denounce the actions of the UNR College Republican president, Miranda Hoover, to subvert the electoral process during the Nevada caucuses," Julian Batz, president of the UNLV chapter said, and . "As a millennial Republican I expect more from my counterpart, and would hope that both the Nevada Democratic and Republican Parties will take steps to ensure that Miranda's malevolent efforts are not tolerated." The Washoe County Republican Party released a revised statement to oppose the idea of voting in both caucuses. "I believe that if we participate to intentionally affect the results of the Democrat nomination, then we are ultimately losing the very freedoms that caused many of our parents and grandparents to come to this country," Chairman Adam Khan said in the statement. When asked about the possibly legality of caucusing twice, The Washoe County Democratic Party Executive Director, Pam duPre said the following: "This is apparently a Republican scheme to undermine the integrity of our nominating process. It's really, incredibly disappointing to know that Republicans in this county and across the state of Nevada would actually be encouraging their people to undermine the democratic process. And it will not be the job of our volunteers to try to enforce rules and regulations tomorrow. They have precinct caucuses to run."
#!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ Metrics that allow to retrieve curves of partial results. Typically used to retrieve partial learning curves of ML training jobs. """ from __future__ import annotations from abc import ABC, abstractmethod from typing import Any, Dict, Iterable, Optional, Union import numpy as np import pandas as pd from ax.core.base_trial import BaseTrial from ax.core.data import Data from ax.core.experiment import Experiment from ax.core.map_data import MapData, MapKeyInfo from ax.core.map_metric import MapMetric from ax.core.metric import Metric from ax.core.trial import Trial from ax.early_stopping.utils import align_partial_results from ax.utils.common.logger import get_logger from ax.utils.common.typeutils import checked_cast logger = get_logger(__name__) class AbstractCurveMetric(MapMetric, ABC): """Metric representing (partial) learning curves of ML model training jobs.""" MAP_KEY = MapKeyInfo(key="training_rows", default_value=0.0) def __init__( self, name: str, curve_name: str, lower_is_better: bool = True, ) -> None: """Inits Metric. Args: name: The name of the metric. curve_name: The name of the learning curve in the training output (there may be multiple outputs e.g. for MTML models). lower_is_better: If True, lower curve values are considered better. """ super().__init__(name=name, lower_is_better=lower_is_better) self.curve_name = curve_name @classmethod def is_available_while_running(cls) -> bool: return True def fetch_trial_data(self, trial: BaseTrial, **kwargs: Any) -> Data: """Fetch data for one trial.""" return self.fetch_trial_data_multi(trial=trial, metrics=[self], **kwargs) @classmethod def fetch_trial_data_multi( cls, trial: BaseTrial, metrics: Iterable[Metric], **kwargs: Any ) -> Data: """Fetch multiple metrics data for one trial.""" return cls.fetch_experiment_data_multi( experiment=trial.experiment, metrics=metrics, trials=[trial], **kwargs ) @classmethod def fetch_experiment_data_multi( cls, experiment: Experiment, metrics: Iterable[Metric], trials: Optional[Iterable[BaseTrial]] = None, **kwargs: Any, ) -> Data: """Fetch multiple metrics data for an experiment.""" if trials is None: trials = list(experiment.trials.values()) trials = [trial for trial in trials if trial.status.expecting_data] if any(not isinstance(trial, Trial) for trial in trials): raise RuntimeError( f"Only (non-batch) Trials are supported by {cls.__name__}" ) trial_idx_to_id = cls.get_ids_from_trials(trials=trials) if len(trial_idx_to_id) == 0: logger.debug("Could not get ids from trials. Returning empty data.") return MapData(map_key_infos=[cls.MAP_KEY]) all_curve_series = cls.get_curves_from_ids(ids=trial_idx_to_id.values()) if all(id_ not in all_curve_series for id_ in trial_idx_to_id.values()): logger.debug("Could not get curves from ids. Returning empty data.") return MapData(map_key_infos=[cls.MAP_KEY]) df = cls.get_df_from_curve_series( experiment=experiment, all_curve_series=all_curve_series, metrics=metrics, trial_idx_to_id=trial_idx_to_id, ) return MapData(df=df, map_key_infos=[cls.MAP_KEY]) @classmethod def get_df_from_curve_series( cls, experiment: Experiment, all_curve_series: Dict[Union[int, str], Dict[str, pd.Series]], metrics: Iterable[Metric], trial_idx_to_id: Dict[int, Union[int, str]], ) -> Optional[pd.DataFrame]: """Convert a `all_curve_series` dict (from `get_curves_from_ids`) into a dataframe. For each metric, we get one curve (of name `curve_name`). Args: experiment: The experiment. all_curve_series: A dict containing curve data, as output from `get_curves_from_ids`. metrics: The metrics from which data is being fetched. trial_idx_to_id: A dict mapping trial index to ids. Returns: A dataframe containing curve data or None if no curve data could be found. """ dfs = [] for trial_idx, id_ in trial_idx_to_id.items(): if id_ not in all_curve_series: logger.debug(f"Could not get curve data for id {id_}. Ignoring.") continue curve_series = all_curve_series[id_] for m in metrics: if m.curve_name in curve_series: # pyre-ignore [16] dfi = _get_single_curve( curve_series=curve_series, curve_name=m.curve_name, metric_name=m.name, map_key=cls.MAP_KEY.key, trial=experiment.trials[trial_idx], ) dfs.append(dfi) else: logger.debug( f"{m.curve_name} not yet present in curves from {id_}. " "Returning without this metric." ) if len(dfs) == 0: return None return pd.concat(dfs, axis=0, ignore_index=True) @classmethod @abstractmethod def get_ids_from_trials( cls, trials: Iterable[BaseTrial] ) -> Dict[int, Union[int, str]]: """Get backend run ids associated with trials. Args: trials: The trials for which to retrieve the associated ids that can be used to to identify the corresponding runs on the backend. Returns: A dictionary mapping the trial indices to the identifiers (ints or strings) corresponding to the backend runs associated with the trials. Trials whose corresponding ids could not be found should be omitted. """ ... # pragma: nocover @classmethod @abstractmethod def get_curves_from_ids( cls, ids: Iterable[Union[int, str]] ) -> Dict[Union[int, str], Dict[str, pd.Series]]: """Get partial result curves from backend ids. Args: ids: The ids of the backend runs for which to fetch the partial result curves. Returns: A dictionary mapping the backend id to the partial result curves, each of which is represented as a mapping from the metric name to a pandas Series indexed by the progression (which will be mapped to the `MAP_KEY` of the metric class). E.g. if `curve_name=loss` and `MAP_KEY=training_rows`, then a Series should look like: training_rows (index) | loss -----------------------|------ 100 | 0.5 200 | 0.2 """ ... # pragma: nocover class AbstractScalarizedCurveMetric(AbstractCurveMetric): """A linear scalarization of (partial) learning curves of ML model training jobs: scalarized_curve = offset + sum_i(coefficients[i] * curve[i]). It is assumed that the output of `get_curves_from_ids` contains all of the curves necessary for performing the scalarization. """ def __init__( self, name: str, coefficients: Dict[str, float], offset: float = 0.0, lower_is_better: bool = True, ) -> None: """Construct a AbstractScalarizedCurveMetric. Args: name: Name of metric. coefficients: A mapping from learning curve names to their scalarization coefficients. offset: The offset of the affine scalarization. lower_is_better: If True, lower values (of the scalarized metric) are considered better. """ MapMetric.__init__(self, name=name, lower_is_better=lower_is_better) self.coefficients = coefficients self.offset = offset @classmethod def get_df_from_curve_series( cls, experiment: Experiment, all_curve_series: Dict[Union[int, str], Dict[str, pd.Series]], metrics: Iterable[Metric], trial_idx_to_id: Dict[int, Union[int, str]], ) -> Optional[pd.DataFrame]: """Convert a `all_curve_series` dict (from `get_curves_from_ids`) into a dataframe. For each metric, we first get all curves represented in `coefficients` and then perform scalarization. Args: experiment: The experiment. all_curve_series: A dict containing curve data, as output from `get_curves_from_ids`. metrics: The metrics from which data is being fetched. trial_idx_to_id: A dict mapping trial index to ids. Returns: A dataframe containing curve data or None if no curve data could be found. """ dfs = [] complete_metrics_by_trial = { trial_idx: [] for trial_idx in trial_idx_to_id.keys() } for trial_idx, id_ in trial_idx_to_id.items(): if id_ not in all_curve_series: logger.debug(f"Could not get curve data for id {id_}. Ignoring.") continue curve_series = all_curve_series[id_] for m in metrics: curve_dfs = [] for curve_name in m.coefficients.keys(): # pyre-ignore[16] if curve_name in curve_series: curve_df = _get_single_curve( curve_series=curve_series, curve_name=curve_name, map_key=cls.MAP_KEY.key, trial=experiment.trials[trial_idx], ) curve_dfs.append(curve_df) else: logger.debug( f"{curve_name} not present in curves from {id_}, so the " f"scalarization for {m.name} cannot be computed. Returning " "without this metric." ) break if len(curve_dfs) == len(m.coefficients): # only keep if all curves needed by the metric are available dfs.extend(curve_dfs) # mark metrics who have all underlying curves complete_metrics_by_trial[trial_idx].append(m) if len(dfs) == 0: return None all_data_df = pd.concat(dfs, axis=0, ignore_index=True) sub_dfs = [] # Do not create a common index across trials, only across the curves # involved in the scalarized metric. for trial_idx, dfi in all_data_df.groupby("trial_index"): # the `do_forward_fill = True` pads with the latest # observation to handle situations where learning curves # report different amounts of data. trial_curves = dfi["metric_name"].unique().tolist() dfs_mean, dfs_sem = align_partial_results( dfi, progr_key=cls.MAP_KEY.key, metrics=trial_curves, do_forward_fill=True, ) for metric in complete_metrics_by_trial[trial_idx]: sub_df = _get_scalarized_curve_metric_sub_df( dfs_mean=dfs_mean, dfs_sem=dfs_sem, metric=metric, trial=checked_cast(Trial, experiment.trials[trial_idx]), ) sub_dfs.append(sub_df) return pd.concat(sub_dfs, axis=0, ignore_index=True) def _get_single_curve( curve_series: Dict[str, pd.Series], curve_name: str, map_key: str, trial: BaseTrial, metric_name: Optional[str] = None, ) -> pd.DataFrame: """Get a single curve from `curve_series` and return as a dataframe. By default, the `metric_name` is set to be the `curve_name`, but if an additional `metric_name` is passed, it will be used instead. """ if metric_name is None: metric_name = curve_name cs = curve_series[curve_name].rename("mean") # pyre-ignore [6] dfi = cs.reset_index().rename(columns={"index": map_key}) # pyre-ignore [16] dfi["trial_index"] = trial.index dfi["arm_name"] = trial.arm.name # pyre-ignore [16] dfi["metric_name"] = metric_name dfi["sem"] = float("nan") return dfi.drop_duplicates() def _get_scalarized_curve_metric_sub_df( dfs_mean: Dict[str, pd.DataFrame], dfs_sem: Dict[str, pd.DataFrame], metric: AbstractScalarizedCurveMetric, trial: Trial, ) -> pd.DataFrame: """Helper to construct sub-dfs for a ScalarizedCurveMetric. Args: df_mean: A mapping from Curve metric names to a dataframe containing the means of the respective metric. The progression indices are assumed to be aliged across metrics (e.g. as obtained via `align_partial_results`). df_sem: A mapping from Curve metric names to a dataframe containing the sems of the respective metric. If empty, assume the metrics are subject to noise of unknown magnitude. metric: The ScalarizedCurveMetric to perform the aggregation for. trial: The trial associated with the data in `df_mean` and `df_sem`. Returns: A dataframe with the scalarized mean and sem in `mean` and `sem` columns, respectively. """ sub_df = metric.offset + sum( coeff * dfs_mean[metric] # pyre-ignore [58] for metric, coeff in metric.coefficients.items() ) sub_df = sub_df.rename(columns={trial.index: "mean"}) # pyre-ignore [16] if dfs_sem: var_df = sum( (coeff * dfs_sem[metric]) ** 2 # pyre-ignore [58] for metric, coeff in metric.coefficients.items() ) sem_df = var_df.apply(np.sqrt).rename( # pyre-ignore [16] columns={trial.index: "sem"} ) sub_df = pd.concat([sub_df, sem_df], axis=1) else: sub_df["sem"] = float("nan") sub_df = sub_df.reset_index() sub_df["trial_index"] = trial.index sub_df["arm_name"] = trial.arm.name # pyre-ignore [16] sub_df["metric_name"] = metric.name # When scalarizing curves, sometimes the last progression will be different # across curves, even for the same trial. This dropna() will only keep the # progressions that are available for all curves. return sub_df.dropna(subset=["mean"])
// Get returns a map of bundled modules. func Get() map[string]string { return map[string]string{ "binding": bindingElv, "epm": epmElv, "readline-binding": readlineBindingElv, } }
By Alex Stevenson Professor Richard Dawkins has hit out at science teachers being "so ignorant of science" after a poll showed a majority backed the teaching of creationism in schools. The outspoken Oxford professor was responding to research out last month by pollster Ipsos Mori which found 65 per cent of teaching staff backed its discussion. Creationism remains a politically sensitive subject. Many believe there is no place in science lessons for the belief that the Earth was created by God as told in the Bible. And just one in four teachers agreed with the view that creationism should not be taught in schools. Although a majority agrees science lessons are not the place for it to be discussed Prof Dawkins, author of The God Delusion, blamed Tony Blair's attitude for current guidance from the Department for Children, Schools and Families. "Tony Blair was always soft on that," he told politics.co.uk. "He would say things like 'in the interests of diversity' and things like that. I don't know about the Brown government." Commenting on the Ipsos Mori poll, Prof Dawkins added: "I think there's a misperception it's kind of fair there are these two theories [creationism and evolution]. If there were two theories, of course it would be fair. There aren't." He added: "I'm a bit shocked that science teachers should be so ignorant of science." Prof Dawkins was speaking as he supported the launch of the Atheist Bus Campaign, which will see bus advertisements bearing the slogan: "There's probably no God. Now stop worrying and enjoy your life." Though pleased with the adverts, which will feature on 800 buses in Britain, he said he would have liked to see different slogans used in ways which would address ways in which children receive influence on religious issues outside school. "Slogans which I'm particularly keen on are with respect to children - 'there's no such thing as a Christian child', for example," he commented. "I think the automatic labelling of children with the religion of their parents is absolutely despicable." Revd Jan Ainsworth, the Church of England's chief education officer, said both she and the Archbishop of Canterbury were against teaching creationism as if it was a scientific theory in schools. She added: "Many schools rightly include the subject in RE lessons or in the discussion of the development of scientific ideas. "Part of the role of education is to explore and challenge contrasting ideas and viewpoints, and to encourage students to debate these and reach their own informed opinions - and these discussions should include creationism."
a,b,q = list(map(int,input().split())) s = [int(input()) for i in range(0,a)] t = [int(input()) for i in range(0,b)] x = [int(input()) for i in range(0,q)] import bisect def bifind(r,z): if z < r[0]: return [r[0],r[0]] elif z > r[-1]: return [r[-1],r[-1]] y = bisect.bisect_left(r,z)-1 return [r[y],r[y+1]] for i in range(0,q): ls = bifind(s,x[i]) lt = bifind(t,x[i]) ans = list() for j in range(0,2): for k in range(0,2): ans.append(abs(ls[j]-x[i])+abs(lt[k]-ls[j])) ans.append(abs(lt[k]-x[i])+abs(lt[k]-ls[j])) print(min(ans))
/** * Builds details of for all timers. * @param timers list of timers * @return details map */ private Map<String, Object> detailsOf(List<Timer> timers) { var details = new HashMap<String, Object>(); for (Timer timer : timers) { Object nearestOccurrence = timer.getNearestOccurrence().orElse(null); if (nearestOccurrence == null) { nearestOccurrence = "not scheduled"; } details.put(timer.getDescription(), nearestOccurrence); } return details; }
def update(self,g,dt): if not self.active: return self.oangle = self.angle a = -g[0]/1024 * math.sin(self.angle) + g[1]/1024 * math.cos(self.angle) a -= friction * self.velocity self.angle += speed * self.velocity * dt self.angle %= 2*math.pi self.velocity += a * dt
def evaluate(images, labels, thetas): predictions = forward_prop(images, thetas)[-1] predicteds = [np.where(r == r.max())[0][0] for r in predictions] actuals = labels correct = 0 for p, a in zip(predicteds, actuals): if p == a: correct += 1 return correct
Material Properties' Identification Using Full Field Measurements on Vibrating Plates Abstract: The paper presents an experimental application of an inverse method leading to the identification of the elastic and damping material properties of isotropic vibrating plates. The theory assumes that the searched parameters can be extracted from curvature and deflection fields measured on the whole surface of the plate at two particular instants of the vibrating motion. The experimental application consists in an original excitation fixture, a particular adaptation of an optical full field measurement technique, a data preprocessing giving the curvatures and deflection fields and finally in the identification process using the Virtual Fields Method (VFM). The principle of the deflectometry technique used for the measurements is presented. First results of identification on acrylic plates are presented and compared to reference values. Results are discussed and improvements of the method are proposed.
// Create creates a Kubernetes resource based on the given APIVersion/Kind, // the name and the object definition itself. It allows us to easily manage // all resources through Unstructured object with the "official" Kubernetes // client.Client interface. func (ctx *FeatureContext) Create( groupVersionKind schema.GroupVersionKind, namespacedName types.NamespacedName, obj *unstructured.Unstructured, opts ...client.CreateOption, ) error { obj.SetGroupVersionKind(groupVersionKind) obj.SetUID(types.UID(uuid.New().String())) obj.SetName(namespacedName.Name) obj.SetNamespace(namespacedName.Namespace) kobj, err := ctx.scheme.New(groupVersionKind) if err != nil { return err } err = runtime.DefaultUnstructuredConverter.FromUnstructured(obj.Object, kobj) if err != nil { return err } return ctx.client.Create(ctx.ctx, kobj, opts...) }
""" Uber object for calibration images, e.g. arc, flat .. include common links, assuming primary doc root is up one directory .. include:: ../include/links.rst """ from IPython import embed import numpy as np from pypeit import msgs from pypeit.par import pypeitpar from pypeit.images import combineimage from pypeit.images import pypeitimage from pypeit.core.framematch import valid_frametype class ArcImage(pypeitimage.PypeItCalibrationImage): """ Simple DataContainer for the Arc Image """ # version is inherited from PypeItImage # I/O output_to_disk = ('ARC_IMAGE', 'ARC_FULLMASK', 'ARC_DETECTOR', 'ARC_DET_IMG', # For echelle multi-detector wavelengths ) hdu_prefix = 'ARC_' calib_type = 'Arc' class AlignImage(pypeitimage.PypeItCalibrationImage): """ Simple DataContainer for the Alignment Image """ # version is inherited from PypeItImage # I/O output_to_disk = ('ALIGN_IMAGE', 'ALIGN_FULLMASK', 'ALIGN_DETECTOR') hdu_prefix = 'ALIGN_' calib_type = 'Align' class BiasImage(pypeitimage.PypeItCalibrationImage): """ Simple DataContainer for the Bias Image """ # version is inherited from PypeItImage # Output to disk output_to_disk = ('BIAS_IMAGE', 'BIAS_IVAR', 'BIAS_DETECTOR') hdu_prefix = 'BIAS_' calib_type = 'Bias' class DarkImage(pypeitimage.PypeItCalibrationImage): """ Simple DataContainer for the Dark Image """ # version is inherited from PypeItImage # Output to disk output_to_disk = ('DARK_IMAGE', 'DARK_IVAR', 'DARK_DETECTOR') hdu_prefix = 'DARK_' calib_type = 'Dark' class TiltImage(pypeitimage.PypeItCalibrationImage): """ Simple DataContainer for the Tilt Image """ # version is inherited from PypeItImage # I/O output_to_disk = ('TILT_IMAGE', 'TILT_FULLMASK', 'TILT_DETECTOR') hdu_prefix = 'TILT_' calib_type = 'Tiltimg' class TraceImage(pypeitimage.PypeItCalibrationImage): """ Simple DataContainer for the Trace Image """ # version is inherited from PypeItImage # I/O output_to_disk = ('TRACE_IMAGE', 'TRACE_FULLMASK', 'TRACE_DETECTOR') hdu_prefix = 'TRACE_' calib_type = 'Trace' # TODO: This doesn't need to inherit from PypeItCalibrationImage. It can just # be a Calibframe with a short datamodel that holds the mask. And we might want # to find a place for it that makes more sense. class SkyRegions(pypeitimage.PypeItCalibrationImage): """ Simple DataContainer for the SkyRegions Image """ # version is inherited from PypeItImage # I/O output_to_disk = ('SKYREG_IMAGE') hdu_prefix = 'SKYREG_' calib_type = 'SkyRegions' calib_file_format = 'fits.gz' @classmethod def construct_file_name(cls, calib_key, calib_dir=None, basename=None): """ Override the base-class filename construction to optionally include a basename. Args: calib_key (:obj:`str`): String identifier of the calibration group. See :func:`construct_calib_key`. calib_dir (:obj:`str`, `Path`_, optional): If provided, return the full path to the file given this directory. basename (:Obj:`str`, optional): If provided include this in the output file name. Returns: :obj:`str`: File path or file name """ filename = str(super().construct_file_name(calib_key, calib_dir=calib_dir)) if basename is None: return filename return filename.replace(f'.{cls.calib_file_format}', f'_{basename}.{cls.calib_file_format}') frame_image_classes = dict( bias=BiasImage, dark=DarkImage, arc=ArcImage, tilt=TiltImage, trace=TraceImage, align=AlignImage) """ The list of classes that :func:`buildimage_fromlist` should use to decorate the output for the specified frame types. All of these **must** subclass from :class:`~pypeit.images.pypeitimage.PypeItCalibrationImage`. """ def buildimage_fromlist(spectrograph, det, frame_par, file_list, bias=None, bpm=None, dark=None, flatimages=None, maxiters=5, ignore_saturation=True, slits=None, mosaic=None, calib_dir=None, setup=None, calib_id=None): """ Perform basic image processing on a list of images and combine the results. .. warning:: For image mosaics (when ``det`` is a tuple) the processing behavior is hard-coded such that bias and dark frames are *not* reformatted into a mosaic image. They are saved in their native multi-image format. Bad-pixel masks are also expected to be in multi-image format. See :class:`~pypeit.images.rawimage.RawImage`. Args: spectrograph (:class:`~pypeit.spectrographs.spectrograph.Spectrograph`): Spectrograph used to take the data. det (:obj:`int`, :obj:`tuple`): The 1-indexed detector number(s) to process. If a tuple, it must include detectors viable as a mosaic for the provided spectrograph; see :func:`~pypeit.spectrographs.spectrograph.Spectrograph.allowed_mosaics`. frame_par (:class:`~pypeit.par.pypeitpar.FramePar`): Parameters that dictate the processing of the images. See :class:`~pypeit.par.pypeitpar.ProcessImagesPar` for the defaults. file_list (:obj:`list`): List of files bias (:class:`~pypeit.images.buildimage.BiasImage`, optional): Bias image for bias subtraction; passed directly to :func:`~pypeit.images.rawimage.RawImage.process` for all images. bpm (`numpy.ndarray`_, optional): Bad pixel mask; passed directly to :func:`~pypeit.images.rawimage.RawImage.process` for all images. dark (:class:`~pypeit.images.buildimage.DarkImage`, optional): Dark-current image; passed directly to :func:`~pypeit.images.rawimage.RawImage.process` for all images. flatimages (:class:`~pypeit.flatfield.FlatImages`, optional): Flat-field images for flat fielding; passed directly to :func:`~pypeit.images.rawimage.RawImage.process` for all images. maxiters (:obj:`int`, optional): When ``combine_method='mean'``) and sigma-clipping (``sigma_clip`` is True), this sets the maximum number of rejection iterations. If None, rejection iterations continue until no more data are rejected; see :func:`~pypeit.core.combine.weighted_combine``. ignore_saturation (:obj:`bool`, optional): If True, turn off the saturation flag in the individual images before stacking. This avoids having such values set to 0, which for certain images (e.g. flat calibrations) can have unintended consequences. slits (:class:`~pypeit.slittrace.SlitTraceSet`, optional): Edge traces for all slits. These are used to calculate spatial flexure between the image and the slits, and for constructing the slit-illumination correction. See :class:`pypeit.images.rawimage.RawImage.process`. mosaic (:obj:`bool`, optional): Flag processed image will be a mosaic of multiple detectors. By default, this is determined by the format of ``det`` and whether or not this is a bias or dark frame. *Only used for testing purposes.* calib_dir (:obj:`str`, `Path`_, optional): The directory for processed calibration files. Required for elements of :attr:`frame_image_classes`, ignored otherwise. setup (:obj:`str`, optional): The setup/configuration identifier to use for this dataset. Required for elements of :attr:`frame_image_classes`, ignored otherwise. calib_id (:obj:`str`, optional): The string listing the set of calibration groups associated with this dataset. Required for elements of :attr:`frame_image_classes`, ignored otherwise. Returns: :class:`~pypeit.images.pypeitimage.PypeItImage`, :class:`~pypeit.images.pypeitimage.PypeItCalibrationImage`: The processed and combined image. """ # Check if not isinstance(frame_par, pypeitpar.FrameGroupPar): msgs.error('Provided ParSet must be type FrameGroupPar, not ' f'{frame_par.__class__.__name__}.') if not valid_frametype(frame_par['frametype'], quiet=True): # NOTE: This should not be necessary because FrameGroupPar explicitly # requires frametype to be valid msgs.error(f'{frame_par["frametype"]} is not a valid PypeIt frame type.') # Should the detectors be reformatted into a single image mosaic? if mosaic is None: mosaic = isinstance(det, tuple) and frame_par['frametype'] not in ['bias', 'dark'] # Do it combineImage = combineimage.CombineImage(spectrograph, det, frame_par['process'], file_list) pypeitImage = combineImage.run(bias=bias, bpm=bpm, dark=dark, flatimages=flatimages, sigma_clip=frame_par['process']['clip'], sigrej=frame_par['process']['comb_sigrej'], maxiters=maxiters, ignore_saturation=ignore_saturation, slits=slits, combine_method=frame_par['process']['combine'], mosaic=mosaic) # Return class type, if returning any of the frame_image_classes cls = frame_image_classes[frame_par['frametype']] \ if frame_par['frametype'] in frame_image_classes.keys() else None # Either return the image directly, or decorate and return according to the # type of calibration. For the latter, this specific use of # from_pypeitimage means that the class *must* be a subclass of # PypeItCalibrationImage! return pypeitImage if cls is None \ else cls.from_pypeitimage(pypeitImage, calib_dir=calib_dir, setup=setup, calib_id=calib_id, detname=spectrograph.get_det_name(det))
# Copyright 2017 The Rudders Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """File with logic specific to preprocess and build amazon relations triplets""" from rudders.datasets.amazon import load_metadata from rudders.relations import Relations from rudders.utils import add_to_train_split def get_co_triplets(item_metas, get_aspect_func, iid2id, relation_id): """ Creates triplets based on co_buy or co_view relations taken from metadata :param item_metas: list of amazon metadata objects :param get_aspect_func: a function that extract either co_buy or co_view data :param iid2id: dict of item ids :param relation_id: relation index :return: list of triplets """ triplets = set() for item in item_metas: head_id = iid2id[item.id] for co_rel_id in get_aspect_func(item): if co_rel_id in iid2id: tail_id = iid2id[co_rel_id] triplets.add((head_id, relation_id, tail_id)) return list(triplets) def get_category_triplets(item_metas, cat2id, iid2id, relation_id): """ Builds triplets based on categorical labels. For each item: (item, has_category, category) :param item_metas: list of amazon metadata objects :param cat2id: dict of category ids :param iid2id: dict of item ids :param relation_id: relation index for has_category relation :return: list of triplets """ triplets = set() for it_meta in item_metas: for cat in it_meta.categories: item_id = iid2id[it_meta.id] cat_id = cat2id[cat] triplets.add((item_id, relation_id, cat_id)) return list(triplets) def get_brand_triplets(item_metas, brand2id, iid2id, relation_id): """ Builds triplets based on brands. For each item: (item, has_brand, brand) :param item_metas: list of amazon metadata objects :param brand2id: dict of brand ids :param iid2id: dict of item ids :param relation_id: relation index for has_brand relation :return: list of triplets """ triplets = set() for it_meta in item_metas: if it_meta.brand: item_id = iid2id[it_meta.id] brand_id = brand2id[it_meta.brand] triplets.add((item_id, relation_id, brand_id)) return list(triplets) def get_cat2id(item_metas, n_entities): """Extracts all categories from item metada and maps them to an id""" categories = set([cat for it_meta in item_metas for cat in it_meta.categories]) return {cate: n_entities + i for i, cate in enumerate(categories)} def load_relations(metadata_file, data, iid2id, n_entities): """ Loads relations extracted from the amazon dataset. Modifies training data (in data variable) in place. :param metadata_file: path to metadata file :param data: dict with train split as key to extend it with new triplets :param iid2id: dict of item_ids to numerical index :param n_entities: current amount of entities in the data :return: updates number of entities after adding new relations """ item_metas = load_metadata(metadata_file) item_metas = [it_meta for it_meta in item_metas if it_meta.id in iid2id] # co buy relations cobuy_triplets = get_co_triplets(item_metas, lambda x: x.cobuys, iid2id, Relations.COBUY.value) add_to_train_split(data, cobuy_triplets) print(f"Added co-buy triplets: {len(cobuy_triplets)}") # co view relations coview_triplets = get_co_triplets(item_metas, lambda x: x.coviews, iid2id, Relations.COVIEW.value) add_to_train_split(data, coview_triplets) print(f"Added co-view triplets: {len(coview_triplets)}") # category relations cat2id = get_cat2id(item_metas, n_entities) category_triplets = get_category_triplets(item_metas, cat2id, iid2id, Relations.CATEGORY.value) add_to_train_split(data, category_triplets) print(f"Added categorical triplets: {len(category_triplets)}") n_entities += len(cat2id) data["id2cat"] = {cid: cat for cat, cid in cat2id.items()} # brand relations all_brands = set([it_meta.brand for it_meta in item_metas if it_meta.brand]) brand2id = {br: n_entities + i for i, br in enumerate(all_brands)} brand_triplets = get_brand_triplets(item_metas, brand2id, iid2id, Relations.BRAND.value) add_to_train_split(data, brand_triplets) print(f"Added brand triplets: {len(brand_triplets)}") n_entities += len(brand2id) data["id2brand"] = {bid: brand for brand, bid in brand2id.items()} return n_entities
// common code to draw a text string in a listbox item void CMainDlgWindow::DrawItemText(const CString& text, int off) { RECT& rect = m_item->rcItem; m_dc->ExtTextOut(rect.left + off + 2, rect.top, off ? 0 : ETO_OPAQUE, &rect, text, text.GetLength(), 0); }
<reponame>Minionguyjpro/Ghostly-Skills<gh_stars>1-10 package com.google.android.gms.ads.internal.gmsg; import com.google.android.gms.internal.ads.zzaqw; import java.util.Map; final class zzs implements zzv<zzaqw> { zzs() { } public final /* synthetic */ void zza(Object obj, Map map) { zzaqw zzaqw = (zzaqw) obj; String str = (String) map.get("action"); if ("pause".equals(str)) { zzaqw.zzcl(); } else if ("resume".equals(str)) { zzaqw.zzcm(); } } }
Simultaneous determination of cyproterone acetate and ethinylestradiol in tablets by derivative spectrophotometry. Derivative spectrophotometry offers a useful approach for the analysis of drugs in multi-component mixtures. In this study a third-derivative spectrophotometric method was used for simultaneous determination of cyproterone acetate and ethinylestradiol using the zero-crossing technique. The measurements were carried out at wavelengths of 316 and 226 nm for cyproterone acetate and ethinylestradiol respectively. The method was found to be linear (r2>0.999) in the range of 0.5-6 mg/100 ml for cyproterone acetate in the presence of 35 microg/100 ml ethinylestsradiol at 316 nm. The same linear correlation (r2>0.999) was obtained in the range of 10-80 microg/100 ml of ethinylestradiol in the presence of 2 mg/100 ml of cyproterone acetate at 226 nm. The limit of determination was 0.5 mg/100 ml and 10 microg/100 ml for cyproterone acetate and ethinylestradiol respectively. The method was successfully applied for simultaneous determination of cyproterone acetate and ethinylestradiol in pharmaceutical preparations without any interferences from excipients.
package com.gnd.calificaprofesores.RecyclerForClassFrontPageCapital; public class AdapterElement { private Integer type; public AdapterElement(Integer _type){ type = _type; } public Integer GetType() { return type; } public void SetType(Integer type) { this.type = type; } }
Tokyo is big without a phone! Dailymotion link: http://www.dailymotion.com/video/x4ndd3e_testing-bonds-without-phones-part-1_fun Download link: https://mega.nz/#!WNYjkQCB!gIDPF2iZm4JIaY5Ywj_7W9upJA_kcwY4pnnbAapdqZs Hey! Hope you’re all having a great day so far. Most of you saw the teaser last week and knew what would be coming. It’s here! Thank you so so much Jose for the translation. Great as always. Also Derek for typesetting. Good work you two! “Testing bonds without phones”. Sounds strange but to sum it up, it’s more a less a teamwork challenge. What the episode is about: This challenge is a bit more casual and it’s very nice to see the cast’s personal sides while out in public. So the point of the segment is that they first split up and then have to find each other again using only 1 common keyword (A place in Tokyo). Whatever it is they’re thinking of they have to go there by car hoping that one of the others thought of the same thing. Trying to figure out what the others are thinking proves to be almost impossible. Watch out for Tanaka and his made up rules. When the timer is out, they phone each other and ask where everybody else went and get super annoyed by each other that they didn’t pick the same location. Most keywords are things like “Big parking lot” or “Big billboard” something very generic which makes the challenge even harder. The episode was aired in two parts in Japan. This is the first one. Second part is translated and ready. It will need patching up as always. Let’s aim for next week! Who will find each other in the first part? Check it out! Enjoy! Discuss this episode here
<reponame>jquintanas/cloudsim /* * <NAME> & <NAME> * Computer Communication Networks (CCN) Lab * Dept of Electrical & Computer Engineering * National University of Singapore * August 2004 * * Licence: GPL - http://www.gnu.org/copyleft/gpl.html * Copyright (c) 2004, The University of Melbourne, Australia and National * University of Singapore * Packet.java - Interface of a Network Packet. * */ package org.cloudbus.cloudsim; /** * Defines the structure for a network packet. * * @author <NAME> * @author <NAME>, National University of Singapore * @since CloudSim Toolkit 1.0 */ public interface Packet { /** * Returns a string describing this packet in detail. * * @return description of this packet * @pre $none * @post $none */ @Override String toString(); /** * Returns the size of this packet * * @return size of the packet * @pre $none * @post $none */ long getSize(); /** * Sets the size of this packet * * @param size size of the packet * @return <tt>true</tt> if it is successful, <tt>false</tt> otherwise * @pre size >= 0 * @post $none */ boolean setSize(long size); /** * Returns the destination id of this packet. * * @return destination id * @pre $none * @post $none */ int getDestId(); /** * Returns the ID of this packet * * @return packet ID * @pre $none * @post $none */ int getId(); /** * Returns the ID of the source of this packet. * * @return source id * @pre $none * @post $none */ int getSrcId(); /** * Gets the network service type of this packet * * @return the network service type * @pre $none * @post $none * * @todo Is it the Type of Service (ToS) of IPv4, like in * the {@link Cloudlet#netToS}? If yes, so the names would * be standardized. */ int getNetServiceType(); /** * Sets the network service type of this packet. * <p> * By default, the service type is 0 (zero). It is depends on the packet scheduler to determine * the priority of this service level. * * @param serviceType this packet's service type * @pre serviceType >= 0 * @post $none */ void setNetServiceType(int serviceType); /** * Gets an entity ID from the last hop that this packet has traversed. * * @return an entity ID * @pre $none * @post $none */ int getLast(); /** * Sets an entity ID from the last hop that this packet has traversed. * * @param last an entity ID from the last hop * @pre last > 0 * @post $none */ void setLast(int last); /** * Gets this packet tag * * @return this packet tag * @pre $none * @post $none */ int getTag(); }
def extract_image_from_mjpeg(stream): data = b'' for chunk in stream: data += chunk jpg_start = data.find(b'\xff\xd8') jpg_end = data.find(b'\xff\xd9') if jpg_start != -1 and jpg_end != -1: jpg = data[jpg_start:jpg_end + 2] return jpg
<reponame>Eldrinn-Elantey/BuildCraft /** * Copyright (c) 2011-2017, SpaceToad and the BuildCraft Team * http://www.mod-buildcraft.com * <p/> * BuildCraft is distributed under the terms of the Minecraft Mod Public * License 1.0, or MMPL. Please check the contents of the license located in * http://www.mod-buildcraft.com/MMPL-1.0.txt */ package buildcraft.robotics.ai; import net.minecraft.nbt.NBTTagCompound; import net.minecraftforge.common.util.ForgeDirection; import buildcraft.api.core.BlockIndex; import buildcraft.api.robots.AIRobot; import buildcraft.api.robots.DockingStation; import buildcraft.api.robots.EntityRobotBase; public class AIRobotGoAndLinkToDock extends AIRobot { private DockingStation station; public AIRobotGoAndLinkToDock(EntityRobotBase iRobot) { super(iRobot); } public AIRobotGoAndLinkToDock(EntityRobotBase iRobot, DockingStation iStation) { this(iRobot); station = iStation; } @Override public void start() { if (station == robot.getLinkedStation() && station == robot.getDockingStation()) { terminate(); } else { if (station != null && station.takeAsMain(robot)) { startDelegateAI(new AIRobotGotoBlock(robot, station.x() + station.side().offsetX * 2, station.y() + station.side().offsetY * 2, station.z() + station.side().offsetZ * 2)); } else { setSuccess(false); terminate(); } } } @Override public void delegateAIEnded(AIRobot ai) { if (ai instanceof AIRobotGotoBlock) { if (ai.success()) { startDelegateAI(new AIRobotStraightMoveTo(robot, station.x() + 0.5F + station.side().offsetX * 0.5F, station.y() + 0.5F + station.side().offsetY * 0.5F, station.z() + 0.5F + station.side().offsetZ * 0.5F)); } else { terminate(); } } else if (ai instanceof AIRobotStraightMoveTo) { if (ai.success()) { robot.dock(station); } terminate(); } } @Override public boolean canLoadFromNBT() { return true; } @Override public void writeSelfToNBT(NBTTagCompound nbt) { super.writeSelfToNBT(nbt); if (station != null && station.index() != null) { NBTTagCompound indexNBT = new NBTTagCompound(); station.index().writeTo(indexNBT); nbt.setTag("stationIndex", indexNBT); nbt.setByte("stationSide", (byte) station.side().ordinal()); } } @Override public void loadSelfFromNBT(NBTTagCompound nbt) { if (nbt.hasKey("stationIndex")) { BlockIndex index = new BlockIndex(nbt.getCompoundTag("stationIndex")); ForgeDirection side = ForgeDirection.values()[nbt.getByte("stationSide")]; station = robot.getRegistry().getStation(index.x, index.y, index.z, side); } else { station = robot.getLinkedStation(); } } }
<reponame>denissnykio/celo-monorepo import fontStyles from '@celo/react-components/styles/fonts' import { componentStyles } from '@celo/react-components/styles/styles' import * as React from 'react' import { withNamespaces, WithNamespaces } from 'react-i18next' import { Platform, StyleSheet, View } from 'react-native' import { WebView } from 'react-native-webview' import { NavigationScreenProps } from 'react-navigation' import componentWithAnalytics from 'src/analytics/wrapper' import BackButton from 'src/components/BackButton' import i18n, { Namespaces } from 'src/i18n' const licenseURI = Platform.select({ ios: './LicenseDisclaimer.txt', // For when iOS is implemented! android: 'file:///android_asset/custom/LicenseDisclaimer.txt', }) type Props = {} & WithNamespaces class Licenses extends React.Component<Props> { static navigationOptions = ({ navigation }: NavigationScreenProps) => ({ headerStyle: { elevation: 0, }, headerTitle: i18n.t('accountScreen10:licenses'), headerTitleStyle: [fontStyles.headerTitle, componentStyles.screenHeader], headerRight: <View />, // This helps vertically center the title headerLeft: <BackButton />, }) render() { return ( <WebView style={styles.licensesWebView} source={{ uri: licenseURI }} startInLoadingState={true} originWhitelist={['file://']} /> ) } } const styles = StyleSheet.create({ licensesWebView: { marginHorizontal: 20, }, }) export default componentWithAnalytics(withNamespaces(Namespaces.accountScreen10)(Licenses))
/* * Copyright 2007, <NAME>, <EMAIL>. All rights reserved. * Distributed under the terms of the MIT License. */ #ifndef RAW_TRANSLATOR_H #define RAW_TRANSLATOR_H #include "BaseTranslator.h" #include <ByteOrder.h> #include <DataIO.h> #include <File.h> #include <GraphicsDefs.h> #include <InterfaceDefs.h> #include <TranslationDefs.h> #include <Translator.h> #include <TranslatorFormats.h> #define RAW_TRANSLATOR_VERSION B_TRANSLATION_MAKE_VERSION(0, 5, 0) #define RAW_IMAGE_FORMAT 'RAWI' #define RAW_IN_QUALITY 0.90 #define RAW_IN_CAPABILITY 0.90 #define BITS_IN_QUALITY 1 #define BITS_IN_CAPABILITY 1 #define RAW_OUT_QUALITY 0.8 #define RAW_OUT_CAPABILITY 0.8 #define BITS_OUT_QUALITY 1 #define BITS_OUT_CAPABILITY 0.9 class RAWTranslator : public BaseTranslator { public: RAWTranslator(); virtual ~RAWTranslator(); virtual status_t DerivedIdentify(BPositionIO *inSource, const translation_format *inFormat, BMessage *ioExtension, translator_info *outInfo, uint32 outType); virtual status_t DerivedTranslate(BPositionIO *inSource, const translator_info *inInfo, BMessage *ioExtension, uint32 outType, BPositionIO *outDestination, int32 baseType); virtual BView *NewConfigView(TranslatorSettings *settings); private: static void _ProgressMonitor(const char* message, float percentage, void* data); }; #endif // RAW_TRANSLATOR_H
// TrimCommonFunctionNamePrefix trims common function-name prefix from name. func TrimCommonFunctionNamePrefix(name string) string { name = strings.TrimPrefix(name, "create") name = strings.TrimPrefix(name, "get") if len(name) > 4 && unicode.IsUpper(rune(name[3])) { name = strings.TrimPrefix(name, "Get") } switch name { case "CXXManglings", "ObjCManglings": default: name = TrimLanguagePrefix(name) } return name }
/// [cfg(target_os = "android")]: Compiler flag ("cfg") which exposes /// the JNI interface for targeting Android in this case /// /// [allow(non_snake_case)]: Tells the compiler not to warn if /// we are not using snake_case for a variable or function names. /// For Android Development we want to be consistent with code style. #[cfg(target_os = "android")] #[allow(non_snake_case)] pub mod android { extern crate jni; // https://docs.rs/jni/0.19.0/jni/ use self::jni::JNIEnv; use self::jni::objects::{JClass, JString}; use self::jni::sys::{jstring}; use std::ffi::{CStr}; use std::os::raw::c_char; use cryptor::encrypt; use cryptor::decrypt; /// Encrypts a String. #[no_mangle] pub unsafe extern fn Java_com_fernandocejas_rust_Cryptor_encrypt( env: JNIEnv, _: JClass, java_string: JString, ) -> jstring { // Let's call the Rust Library for encryption let to = get_string(&env, java_string); let to_encrypt = CStr::from_ptr(to).to_str().unwrap(); let encrypted_str = encrypt(to_encrypt); let output = env.new_string(&encrypted_str).expect("Couldn't create Java String!"); output.into_inner() } /// Decrypts a String. #[no_mangle] pub unsafe extern fn Java_com_fernandocejas_rust_Cryptor_decrypt( env: JNIEnv, _: JClass, java_string: JString ) -> jstring { // Let's call the Rust Library for decryption let to = get_string(&env, java_string); let to_decrypt = CStr::from_ptr(to).to_str().unwrap(); let decrypted_str = decrypt(to_decrypt); let output = env.new_string(&decrypted_str).expect("Couldn't create Java String!"); output.into_inner() } /// Get and check a valid Java String fn get_string( env: &JNIEnv, java_string: JString ) -> *const c_char { env.get_string(java_string) .expect("Invalid Pattern String") .as_ptr() } }
/** * Applies this filter to an array with default samplings. * @param x the input array. * @return the output array. */ public float[][] apply(float[][] x) { int n1 = x[0].length; int n2 = x.length; float[][] y = new float[n2][n1]; apply(x,y); return y; }
/** * SameThreadExecutor executes task in the same thread as a method that made a * call to run(Runnable). It is suitable for situations where tasks finish very * quickly. * * @author Vjekoslav Nesek */ public class SameThreadExecutor implements Executor { private static Logger log = LoggerFactory.getLogger(SameThreadExecutor.class); /** * @see com.nmote.util.Executor#dispose() */ @Override public void dispose() { } /** * @see Executor#exec(Runnable, int) */ @Override public void exec(Runnable task, int priority) { Thread t = Thread.currentThread(); String oldName; int oldPriority; boolean securityOk; try { oldName = t.getName(); oldPriority = t.getPriority(); t.setName(task.toString()); t.setPriority(priority); securityOk = true; } catch (SecurityException e) { oldName = null; oldPriority = 0; securityOk = false; } try { task.run(); } catch (Throwable error) { log.error("Failed to execute {}", task, error); } finally { if (securityOk) { t.setName(oldName); t.setPriority(oldPriority); } } } }
#include <stdio.h> #include <stdlib.h> #include <netdb.h> #include <netinet/in.h> #include <string.h> int main(int argc, char *argv[]) { int sockfd,newsockfd,bfd; char buf[256]; struct sockaddr_in servadd; int n; sockfd=socket(AF_INET,SOCK_STREAM,0); if(sockfd<0) { perror("ERROR opening socket"); exit(1); } bzero((char*)&servadd,sizeof(servadd)); servadd.sin_family=AF_INET; servadd.sin_addr.s_addr=INADDR_ANY; servadd.sin_port=htons(7000); bfd=bind(sockfd,(struct sockaddr*)&servadd,sizeof(servadd)); if(bfd<0){ perror("ERROR on binding"); exit(1); } listen(sockfd,5); newsockfd=accept(sockfd,0,0); if(newsockfd<0) { perror("ERROR on accept"); exit(1); } bzero(buf,256); n=read(newsockfd,buf,255); if(n<0) { perror("ERROR on reading from socket"); exit(1); } printf("Client's message: %s",buf); n=write(newsockfd,"Hello client!",13); if(n<0) { perror("ERROR writine to socket"); exit(1); } return 0; }
import { Command, GuildStorage, Client } from 'yamdbf'; import { GuildMember, Message, RichEmbed, User, Role } from 'discord.js'; import { SweeperClient } from '../../util/lib/SweeperClient'; import Constants from '../../util/Constants'; import * as _ from 'lodash'; export default class ListChannels extends Command<SweeperClient> { public guildId: string; public constructor() { super({ name: 'chan', desc: 'Handle Search', usage: '<prefix>chan <Argument> <Argument>', info: 'Argument information below...\u000d\u000d' + 'empty : list available channels\u000d' + 'add : add yourself to this channel\u000d' + 'del : remove yourself from this channel\u000d', group: 'search', guildOnly: false }); } public async action(message: Message, args: Array<string>): Promise<any> { this.guildId = message.guild ? message.guild.id : Constants.defaultGuildId; let ret: Promise<any>; // no user specified if (!args[0]) { ret = this.sendRoleList(message); } // if there was an attempt, args[0] was too short if (args[0] && args[0] === 'add') { ret = this.addChannel(message, args.slice(1).join(' ')); } else if (args[0] && args[0] === 'del') { ret = this.delChannel(message, args.slice(1).join(' ')); } if (message.channel.type === 'text') { message.delete().catch( err => console.log('error removing message', err)); } return ret; } private async sendRoleList(message: Message): Promise<any> { const guild = this.client.guilds.get(this.guildId); const roles = guild.roles; let roleMessage: string = 'Here are the channels you may join:\u000d\u000d'; roles.forEach((role: Role) => { if (!_.includes(Constants.ExcludedRoles, role.name)) { roleMessage += `${role.name}\u000d`; } }); roleMessage += 'type \'.chan add <channel name>\' to join the channel'; let dm = await message.author.createDM(); return dm.send(roleMessage); } private async addChannel(message: Message, roleName: string): Promise<any> { const guild = this.client.guilds.get(this.guildId); let dm = await message.author.createDM(); let author = await guild.fetchMember(message.author.id); let foundRole = this.findRole(roleName); if (!foundRole) { return dm.send(`The ${roleName} channel is not available`); } else { if (author.roles.has(foundRole.id)) { return dm.send(`You are already in the ${roleName} channel. Use .chan del to leave.`); } else { await dm.send(`Adding you to the ${roleName} channel...`); return await author.addRole(foundRole).then(null, Constants.reportError); } } } private async delChannel(message: Message, roleName: string): Promise<any> { const guild = this.client.guilds.get(this.guildId); let dm = await message.author.createDM(); let author = await guild.fetchMember(message.author.id); let foundRole = this.findRole(roleName); if (!foundRole) { return dm.send(`The ${roleName} channel is not available`); } else { if (!author.roles.has(foundRole.id)) { return dm.send(`You are not in the ${roleName} channel. Use .chan add to join.`); } else { await dm.send(`Removing you from the ${roleName} channel...`); return await author.removeRole(foundRole).then(null, Constants.reportError); } } } private findRole(roleName: string): Role { const guild = this.client.guilds.get(this.guildId); const roles = guild.roles; let availableRoles: Role[] = []; roles.forEach((role: Role) => { let available = !_.includes(Constants.ExcludedRoles, role.name) || _.includes(Constants.GreyRoles, role.name); if (available) { availableRoles.push(role); } }); return availableRoles.find((role: Role) => { return role.name === roleName; }); } }
// PrettyPrint outputs human-readable information about the user to the given writer at some level of detail. func (user User) PrettyPrint(wr io.Writer, detail prettyprint.DetailLevel) error { userTpl := ` {{ define "user_sgl" }}{{ .Username }}{{ end }} {{ define "user_medium" }}{{ .Username }} - {{ .Email }}{{ end }} {{ define "user_full " }}{{ template "user_medium" }} Authorized keys: {{ range .AuthorizedKeys }} {{ . }} {{ end }} ` return prettyprint.Run(wr, userTpl, "user"+string(detail), user) }
// Throw will throw error out with default message func (e *ErrManager) Throw() *Throwable { if e.isError { return createThrowable(e.err, nil) } return createEmptyThrowable() }
Hi - We've taken the decision to stop providing the RTB dedicated hosting service beyond the 16th of February. Our hosting provider recently changed their policy on DoS attacks meaning they'll no longer be reconnecting servers after they get null-routed for DoS attacks - and the RTB service gets DDoS'd fairly regularly. We've considered a number of options to enable us to continue providing the service including switching hosts and paying for DoS protection but the effort involved for us just isn't worth it. With the Steam release, we were hoping for access to a larger client base to enable us to provide more reliable service with the additional income. Unfortunately it's been two months and we're still having to turn away all Steam users because there's no way for them to authenticate their dedicated servers without a key. This lack of interest and co-operation from Blockland makes it too hard to justify our continued efforts to provide an excellent service under these circumstances so we've opted to refocus our attention on some other indie game communities instead. To be frank, having to deal with a completely ambivalent development team when we're providing such a high quality professional level of service to a community is insulting and not something we're interested in continuing to do. The service will be shut down from February 16th and everyone with an active contract will be refunded via PayPal. Details on refunds will be forthcoming shortly. We strongly recommend you retrieve any files you want to keep from your servers before this date, but we will manually retrieve files for people for up to a week after the shut-off date. We apologise for the lack of notice and our unfortunate inability to fulfil our obligations to our clients but the situation is unavoidable, we hope the generous refund goes some way to making up for this. Finally we'd like to thank all our clients that purchased service from us, we appreciate your trust in us and hope the experience was enjoyable - it's been our absolute pleasure to host Blockland's most popular servers for the past year. Thanks.
<reponame>KaninchenSpeed/NoCopyrightSounds-API export const pass = (name: string) => { console.log(`✔️ passed | ${name}`) } export const fail = (name: string, reason: any) => { console.error(`❌ failed | ${name} | ${reason}`) } export const allPass = () => { console.log(`\n\n ----------------------------- ✔️ all tests passed ----------------------------- `) } export const allFail = () => { console.log(`\n\n ------------------------------------ ❌ at least one test failed ------------------------------------ `) }
// verifyBlockBlobDirUpload verifies the directory recursively uploaded to the container. func verifyBlockBlobDirUpload(testBlobCmd TestBlobCommand) { sasUrl, err := url.Parse(testBlobCmd.Subject) if err != nil { fmt.Println("error parsing the container sas ", testBlobCmd.Subject) os.Exit(1) } containerName := strings.SplitAfterN(sasUrl.Path[1:], "/", 2)[0] sasUrl.Path = "/" + containerName p := ste.NewBlobPipeline(azblob.NewAnonymousCredential(), azblob.PipelineOptions{ Telemetry: azblob.TelemetryOptions{ Value: common.UserAgent, }, }, ste.XferRetryOptions{ Policy: 0, MaxTries: ste.UploadMaxTries, TryTimeout: 10 * time.Minute, RetryDelay: ste.UploadRetryDelay, MaxRetryDelay: ste.UploadMaxRetryDelay}, nil) containerUrl := azblob.NewContainerURL(*sasUrl, p) testCtx := context.WithValue(context.Background(), ste.ServiceAPIVersionOverride, defaultServiceApiVersion) dirName := strings.Split(testBlobCmd.Object, "/") searchPrefix := dirName[len(dirName)-1] + "/" for marker := (azblob.Marker{}); marker.NotDone(); { listBlob, err := containerUrl.ListBlobsFlatSegment(testCtx, marker, azblob.ListBlobsSegmentOptions{Prefix: searchPrefix}) if err != nil { fmt.Println("error listing blobs inside the container. Please check the container sas") os.Exit(1) } for _, blobInfo := range listBlob.Segment.BlobItems { size := blobInfo.Properties.ContentLength get, err := containerUrl.NewBlobURL(blobInfo.Name).Download(testCtx, 0, *size, azblob.BlobAccessConditions{}, false) if err != nil { fmt.Println(fmt.Sprintf("error downloading the blob %s", blobInfo.Name)) os.Exit(1) } blobBytesDownloaded, err := ioutil.ReadAll(get.Body(azblob.RetryReaderOptions{})) if err != nil { fmt.Println(fmt.Sprintf("error reading the body of blob %s downloaded and failed with error %s", blobInfo.Name, err.Error())) os.Exit(1) } blobName := strings.Replace(blobInfo.Name, searchPrefix, "", 1) objectLocalPath := testBlobCmd.Object + string(os.PathSeparator) + blobName sFileInfo, err := os.Stat(objectLocalPath) if err != nil { fmt.Println("error geting the subject blob file info on local disk ") os.Exit(1) } sFile, err := os.Open(objectLocalPath) if err != nil { fmt.Println("error opening file ", sFile) os.Exit(1) } sMap, err := NewMMF(sFile, false, 0, int64(sFileInfo.Size())) if err != nil { fmt.Println("error memory mapping the file ", sFileInfo.Name()) } actualMd5 := md5.Sum(blobBytesDownloaded) expectedMd5 := md5.Sum(sMap) if actualMd5 != expectedMd5 { fmt.Println("the upload blob md5 is not equal to the md5 of actual blob on disk for blob ", blobInfo.Name) os.Exit(1) } } marker = listBlob.NextMarker } }
<filename>forks/github.com/ethereum/go-ethereum/p2p/protocols/accounting_test.go // Copyright 2018 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package protocols import ( "testing" "github.com/ie310mu/ie310go/forks/github.com/ethereum/go-ethereum/p2p" "github.com/ie310mu/ie310go/forks/github.com/ethereum/go-ethereum/p2p/simulations/adapters" "github.com/ie310mu/ie310go/forks/github.com/ethereum/go-ethereum/rlp" ) //dummy Balance implementation type dummyBalance struct { amount int64 peer *Peer } //dummy Prices implementation type dummyPrices struct{} //a dummy message which needs size based accounting //sender pays type perBytesMsgSenderPays struct { Content string } //a dummy message which needs size based accounting //receiver pays type perBytesMsgReceiverPays struct { Content string } //a dummy message which is paid for per unit //sender pays type perUnitMsgSenderPays struct{} //receiver pays type perUnitMsgReceiverPays struct{} //a dummy message which has zero as its price type zeroPriceMsg struct{} //a dummy message which has no accounting type nilPriceMsg struct{} //return the price for the defined messages func (d *dummyPrices) Price(msg interface{}) *Price { switch msg.(type) { //size based message cost, receiver pays case *perBytesMsgReceiverPays: return &Price{ PerByte: true, Value: uint64(100), Payer: Receiver, } //size based message cost, sender pays case *perBytesMsgSenderPays: return &Price{ PerByte: true, Value: uint64(100), Payer: Sender, } //unitary cost, receiver pays case *perUnitMsgReceiverPays: return &Price{ PerByte: false, Value: uint64(99), Payer: Receiver, } //unitary cost, sender pays case *perUnitMsgSenderPays: return &Price{ PerByte: false, Value: uint64(99), Payer: Sender, } case *zeroPriceMsg: return &Price{ PerByte: false, Value: uint64(0), Payer: Sender, } case *nilPriceMsg: return nil } return nil } //dummy accounting implementation, only stores values for later check func (d *dummyBalance) Add(amount int64, peer *Peer) error { d.amount = amount d.peer = peer return nil } type testCase struct { msg interface{} size uint32 sendResult int64 recvResult int64 } //lowest level unit test func TestBalance(t *testing.T) { //create instances balance := &dummyBalance{} prices := &dummyPrices{} //create the spec spec := createTestSpec() //create the accounting hook for the spec acc := NewAccounting(balance, prices) //create a peer id := adapters.RandomNodeConfig().ID p := p2p.NewPeer(id, "testPeer", nil) peer := NewPeer(p, &dummyRW{}, spec) //price depends on size, receiver pays msg := &perBytesMsgReceiverPays{Content: "testBalance"} size, _ := rlp.EncodeToBytes(msg) testCases := []testCase{ { msg, uint32(len(size)), int64(len(size) * 100), int64(len(size) * -100), }, { &perBytesMsgSenderPays{Content: "testBalance"}, uint32(len(size)), int64(len(size) * -100), int64(len(size) * 100), }, { &perUnitMsgSenderPays{}, 0, int64(-99), int64(99), }, { &perUnitMsgReceiverPays{}, 0, int64(99), int64(-99), }, { &zeroPriceMsg{}, 0, int64(0), int64(0), }, { &nilPriceMsg{}, 0, int64(0), int64(0), }, } checkAccountingTestCases(t, testCases, acc, peer, balance, true) checkAccountingTestCases(t, testCases, acc, peer, balance, false) } func checkAccountingTestCases(t *testing.T, cases []testCase, acc *Accounting, peer *Peer, balance *dummyBalance, send bool) { for _, c := range cases { var err error var expectedResult int64 //reset balance before every check balance.amount = 0 if send { err = acc.Send(peer, c.size, c.msg) expectedResult = c.sendResult } else { err = acc.Receive(peer, c.size, c.msg) expectedResult = c.recvResult } checkResults(t, err, balance, peer, expectedResult) } } func checkResults(t *testing.T, err error, balance *dummyBalance, peer *Peer, result int64) { if err != nil { t.Fatal(err) } if balance.peer != peer { t.Fatalf("expected Add to be called with peer %v, got %v", peer, balance.peer) } if balance.amount != result { t.Fatalf("Expected balance to be %d but is %d", result, balance.amount) } } //create a test spec func createTestSpec() *Spec { spec := &Spec{ Name: "test", Version: 42, MaxMsgSize: 10 * 1024, Messages: []interface{}{ &perBytesMsgReceiverPays{}, &perBytesMsgSenderPays{}, &perUnitMsgReceiverPays{}, &perUnitMsgSenderPays{}, &zeroPriceMsg{}, &nilPriceMsg{}, }, } return spec }
def optimize_basic(self, verbose=False): for layer_index in range(self.M): if verbose: print('working on layer {} of {}'.format(layer_index, self.M)) values_prev = self.mesh.partial_values[layer_index] layer = self.mesh.layers[layer_index] D = power_vec(self.output_target) new_layer = self.tune_layer(L=layer, input_values=values_prev, desired_power=D, verbose=verbose) self.mesh.layers[layer_index] = new_layer self.mesh.recompute_matrices() self.mesh.input_couple(self.input_values)
package util import ( "math/big" "testing" "github.com/stretchr/testify/assert" ) func TestPasreUnit(t *testing.T) { amount, err := ParseUnit("1 meycoin") assert.NoError(t, err, "parsing meycoin") t.Log(amount) amount, err = ParseUnit("101 MeyCoin") assert.NoError(t, err, "parsing MeyCoin") t.Log(amount) amount, err = ParseUnit("101 MEYCOIN") assert.NoError(t, err, "parsing MEYCOIN") t.Log(amount) amount, err = ParseUnit("123 aer") assert.NoError(t, err, "parsing aer") assert.Equal(t, new(big.Int).SetUint64(123), amount, "123 aer") amount, err = ParseUnit("4567 Aer") assert.NoError(t, err, "parsing Aer") assert.Equal(t, new(big.Int).SetUint64(4567), amount, "4567 Aer") amount, err = ParseUnit("101 AER") assert.NoError(t, err, "parsing AER") assert.Equal(t, new(big.Int).SetUint64(101), amount, "101 AER") amount, err = ParseUnit("1 gaer") assert.NoError(t, err, "parsing gaer") assert.Equal(t, new(big.Int).SetUint64(1000000000), amount, "1 gaer") amount, err = ParseUnit("1010") assert.Equal(t, new(big.Int).SetUint64(1010), amount, "1010") assert.NoError(t, err, "parsing implicit unit") t.Log(amount) } func TestPasreDecimalUnit(t *testing.T) { amount, err := ParseUnit("1.01 meycoin") assert.NoError(t, err, "parsing point meycoin") assert.Equal(t, new(big.Int).SetUint64(1010000000000000000), amount, "converting result") amount, err = ParseUnit("1.01 gaer") assert.NoError(t, err, "parsing point gaer") assert.Equal(t, new(big.Int).SetUint64(1010000000), amount, "converting result") amount, err = ParseUnit("0.123456789012345678 meycoin") assert.NoError(t, err, "parsing point") t.Log(amount) amount, err = ParseUnit("0.100000000000000001 meycoin") assert.NoError(t, err, "parsing point max length of decimal") t.Log(amount) amount, err = ParseUnit("499999999.100000000000000001 meycoin") assert.NoError(t, err, "parsing point max length of decimal") t.Log(amount) amount, err = ParseUnit("499999999100000000000000001 aer") assert.NoError(t, err, "parsing point max length of decimal") t.Log(amount) } func TestFailPasreUnit(t *testing.T) { amount, err := ParseUnit("0.0000000000000000001 meycoin") assert.Error(t, err, "exceed max length of decimal") t.Log(amount) amount, err = ParseUnit("499999999100000000000000001.1 aer") assert.Error(t, err, "parsing point max length of decimal") amount, err = ParseUnit("1 meycoina") assert.Error(t, err, "parsing meycoina") amount, err = ParseUnit("1 meey") assert.Error(t, err, "parsing meey") amount, err = ParseUnit("1 ameycoin") assert.Error(t, err, "parsing ameycoin") amount, err = ParseUnit("1 meycoin ") assert.Error(t, err, "check fail") amount, err = ParseUnit("1meycoin.1aer") assert.Error(t, err, "check fail") amount, err = ParseUnit("0.1") assert.Error(t, err, "default unit assumed meycoin") amount, err = ParseUnit("0.1.1") assert.Error(t, err, "only one dot is allowed") } func TestConvertUnit(t *testing.T) { result, err := ConvertUnit(new(big.Int).SetUint64(1000000000000000000), "meycoin") assert.NoError(t, err, "convert 1 meycoin") t.Log(result) result, err = ConvertUnit(new(big.Int).SetUint64(1020300000000000000), "meycoin") assert.NoError(t, err, "convert 1.0203 meycoin") t.Log(result) result, err = ConvertUnit(new(big.Int).SetUint64(1000000000), "gaer") assert.NoError(t, err, "convert 1 gaer") t.Log(result) result, err = ConvertUnit(new(big.Int).SetUint64(1), "gaer") assert.NoError(t, err, "convert 0.000000001 gaer") assert.Equal(t, "0.000000001 gaer", result) result, err = ConvertUnit(new(big.Int).SetUint64(10), "gaer") assert.NoError(t, err, "convert 0.00000001 gaer") assert.Equal(t, "0.00000001 gaer", result) t.Log(result) result, err = ConvertUnit(new(big.Int).SetUint64(0), "gaer") assert.NoError(t, err, "convert 0 gaer") assert.Equal(t, "0 gaer", result) t.Log(result) result, err = ConvertUnit(new(big.Int).SetUint64(1), "aer") assert.NoError(t, err, "convert 1 aer") assert.Equal(t, "1 aer", result) t.Log(result) result, err = ConvertUnit(new(big.Int).SetUint64(1000000000000000000), "gaer") assert.NoError(t, err, "convert 1000000000 gaer") t.Log(result) } func TestParseUnit(t *testing.T) { n100 := big.NewInt(100) n1000 := big.NewInt(1000) OneGaer := big.NewInt(1000000000) OneMeyCoin := big.NewInt(1000000000).Mul(OneGaer,OneGaer) tests := []struct { name string args string want *big.Int wantErr bool }{ {"TNum", "10000", big.NewInt(10000),false}, {"TNumDot", "1000.5", big.NewInt(0),true}, {"TNum20pow", "100000000000000000000", big.NewInt(1).Mul(OneMeyCoin, n100),false}, {"TAer", "10000aer", big.NewInt(10000),false}, {"TAerDot", "1000.5aer", big.NewInt(0),true}, {"TAer20pow", "100000000000000000000", big.NewInt(1).Mul(OneMeyCoin, n100),false}, {"TGaer", "1000gaer", big.NewInt(1).Mul(OneGaer,n1000),false}, {"TGaerDot", "1000.21245gaer", big.NewInt(1).Add(big.NewInt(0).Mul(OneGaer,n1000),big.NewInt(212450000)),false}, {"TGaerDot2", "0.21245gaer", big.NewInt(212450000),false}, {"TGaerDot3", ".21245gaer", big.NewInt(212450000),false}, {"TGaer11pow", "100000000000gaer", big.NewInt(1).Mul(OneMeyCoin, n100),false}, {"TMeyCoin", "100meycoin", big.NewInt(1).Mul(OneMeyCoin, n100),false}, {"TMeyCoinDot", "1000.00000000021245meycoin", big.NewInt(1).Add(big.NewInt(0).Mul(OneMeyCoin,n1000),big.NewInt(212450000)),false}, {"TWrongNum", "100d0.321245", big.NewInt(1),true}, {"TWrongNum2", "100d0.321245meycoin", big.NewInt(1),true}, {"TWrongUnit", "100d0.321245argo", big.NewInt(1),true}, {"TWrongUnit", "100d0.321245ear", big.NewInt(1),true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := ParseUnit(tt.args) if (err != nil) != tt.wantErr { t.Errorf("ParseUnit() error = %v, wantErr %v", err, tt.wantErr) return } if err == nil && (tt.want.Cmp(got) != 0) { t.Errorf("ParseUnit() got = %v, want %v", got, tt.want) } }) } }
A bacterial high-affinity GABA binding protein: isolation and characterization. A gamma-aminobutyric acid (GABA) binding protein (GBP) was isolated from a bacterial mutant which has high-affinity GABA binding characteristics comparable with the GABA(A) brain receptor in mammals. The GBP was partially purified and characterized and was shown to be a periplasmic protein of approximately 42,000 molecular weight. To determine the molecular weight, a bacterial GABA binding assay was used with SDS-PAGE. This procedure did not require large amounts or complete purification of protein and may be useful as a simple method in estimating the molecular weight of other bacterial binding proteins.
// newScanScmResp sets protobuf SCM scan response with module or namespace info. func newScanScmResp(inResp *scm.ScanResponse, inErr error) (*ctlpb.ScanScmResp, error) { outResp := new(ctlpb.ScanScmResp) outResp.State = new(ctlpb.ResponseState) if inErr != nil { outResp.State = newResponseState(inErr, ctlpb.ResponseStatus_CTL_ERR_SCM, "") return outResp, nil } if len(inResp.Namespaces) == 0 { outResp.Modules = make(proto.ScmModules, 0, len(inResp.Modules)) if err := (*proto.ScmModules)(&outResp.Modules).FromNative(inResp.Modules); err != nil { return nil, err } return outResp, nil } outResp.Namespaces = make(proto.ScmNamespaces, 0, len(inResp.Namespaces)) if err := (*proto.ScmNamespaces)(&outResp.Namespaces).FromNative(inResp.Namespaces); err != nil { return nil, err } return outResp, nil }
// lld can sometimes produce a build with an imported mutable __stack_pointer // (i.e. when linking with -fpie). This method internalizes the // __stack_pointer and initializes it from an immutable global instead. // For -shared builds we instead call replaceStackPointerGlobal. void EmscriptenGlueGenerator::internalizeStackPointerGlobal() { Global* stackPointer = getStackPointerGlobal(wasm); if (!stackPointer || !stackPointer->imported() || !stackPointer->mutable_) { return; } Name internalName = stackPointer->name; Name externalName = internalName.c_str() + std::string("_import"); stackPointer->name = externalName; stackPointer->mutable_ = false; wasm.updateMaps(); Builder builder(wasm); auto* init = builder.makeGlobalGet(externalName, stackPointer->type); auto* sp = builder.makeGlobal( internalName, stackPointer->type, init, Builder::Mutable); wasm.addGlobal(sp); }
// RewriteRegexConditions rewrites regex conditions to make better use of the // database index. // // Conditions that can currently be simplified are: // // - host =~ /^foo$/ becomes host = 'foo' // - host !~ /^foo$/ becomes host != 'foo' // // Note: if the regex contains groups, character classes, repetition or // similar, it's likely it won't be rewritten. In order to support rewriting // regexes with these characters would be a lot more work. func (s *SelectStatement) RewriteRegexConditions() { s.Condition = RewriteExpr(s.Condition, func(e Expr) Expr { be, ok := e.(*BinaryExpr) if !ok || (be.Op != EQREGEX && be.Op != NEQREGEX) { return e } rhs := be.RHS.(*RegexLiteral) vals, ok := matchExactRegex(rhs.Val.String()) if !ok { return e } var concatOp Token if be.Op == EQREGEX { be.Op = EQ concatOp = OR } else { be.Op = NEQ concatOp = AND } switch { case len(vals) == 0: be.RHS = &StringLiteral{} case len(vals) == 1: be.RHS = &StringLiteral{Val: vals[0]} default: expr := &BinaryExpr{ Op: be.Op, LHS: be.LHS, RHS: &StringLiteral{Val: vals[0]}, } for i := 1; i < len(vals); i++ { expr = &BinaryExpr{ Op: concatOp, LHS: expr, RHS: &BinaryExpr{ Op: be.Op, LHS: be.LHS, RHS: &StringLiteral{Val: vals[i]}, }, } } return &ParenExpr{Expr: expr} } return be }) if cond, ok := s.Condition.(*ParenExpr); ok { s.Condition = cond.Expr } }
// FromYamlDSL creates a slice of flows from a Camel YAML DSL stream. func FromYamlDSL(reader io.Reader) ([]v1.Flow, error) { buffered, err := ioutil.ReadAll(reader) if err != nil { return nil, err } var flows []v1.Flow jsonData, err := yaml.ToJSON(buffered) if err != nil { return nil, err } if err = json.Unmarshal(jsonData, &flows); err != nil { return nil, err } return flows, err }
/** * Extracts details about regions * @author Gregory Green * */ public class RegionCsvStatsVisitor implements StatsVisitor { private static final String [] defaultStateNames = {"dataStoreEntryCount", "dataStoreBytesInUse", "lowRedundancyBucketCount", "localMaxMemory"}; private final CsvWriter csvWriter; private final String[] statNames; /** * * @param file the STAT file */ public RegionCsvStatsVisitor(File file) { this(file,null); }//------------------------------------------------ public RegionCsvStatsVisitor(File file,String[] statNames) { if(statNames !=null) { this.statNames = statNames; } else { this.statNames = Config.getPropertyStrings(RegionCsvStatsVisitor.class,"statNames",defaultStateNames); } csvWriter = new CsvWriter(file); } @Override public void visitResourceInst(ResourceInst resourceInst) { String name = resourceInst.getName(); if(!resourceInst.getType().isRegion()) return; ArrayList<String> values = new ArrayList<String>(); ArrayList<String> headers = new ArrayList<String>(); headers.add("machine"); headers.add("region"); values.add(resourceInst.getArchive().getArchiveInfo().getMachine()); values.add(name); StatValue[] statValues = resourceInst.getStatValues(); if(statValues == null) return; /* * dataStoreEntryCount * dataStoreBytesInUse * lowRedundancyBucketCount * configuredRedundantCopies * actualRedundantCopies * localMaxMemory */ for (String statName : statNames) { //String statName = statValue.getDescriptor().getName(); StatValue dataStoreEntryCount = resourceInst.getStatValue(statName); headers.add(statName+" "+resourceInst.getType().getStat(statName).getDescription()); values.add(String.valueOf(dataStoreEntryCount.getSnapshotsMaximum())); } try { csvWriter.writeHeader(headers); csvWriter.appendRow(values); } catch (IOException e) { throw new RuntimeException(e); } } }
/** * Checks inputs. * * @param selected * Selected pilot points. * @return True if inputs are acceptable. */ private boolean checkInputs(ObservableList<PilotPointTableItem> selected) { if (model_ == null) return false; if ((selected == null) || selected.isEmpty()) { showWarning("Please select at least 1 pilot point to link.", 30, link_); return false; } ArrayList<String> missionEIDs = new ArrayList<>(); for (PilotPointTableItem item : selected) { if ((item.getProgram() == null) || !item.getProgram().equals(model_.getProgram())) { showWarning("There are pilot points with different A/C programs. Pilot points must belong to same A/C program with the A/C model.", 50, link_); return false; } if ((item.getEid() == null) || item.getEid().equals("N/A")) { showWarning("There are pilot points without EID. Element ID is required to link the pilot point to A/C model.", 50, link_); return false; } String missionEID = item.getMission() + "_" + item.getEid(); if (missionEIDs.contains(missionEID)) { showWarning("There are duplicate pilot points. Element IDs must be unique for each fatigue mission.", 50, link_); return false; } missionEIDs.add(missionEID); } return true; }
/** * Abstract implementation of the {@link org.axonframework.queryhandling.responsetypes.ResponseType} which contains * match functions for the majority of the {@link java.lang.reflect.Type} options available. * For single instance response types, a direct assignable to check will be performed. For multiple instances response * types, the match will be performed against the containing type of that array/collection/etc. * Proves useful for reuse among ResponseType implementations. * * @param <R> The response type which will be matched against and converted to * @author Steven van Beelen * @since 3.2 */ public abstract class AbstractResponseType<R> implements ResponseType<R> { protected final Class<?> expectedResponseType; /** * Instantiate a {@link org.axonframework.queryhandling.responsetypes.ResponseType} with the given * {@code expectedResponseType} as the type to be matched against and to which the query response should be * converted to, as is or as the contained type for an array/list/etc. * * @param expectedResponseType the response type which is expected to be matched against and to be returned, as is * or as the contained type for an array/list/etc */ protected AbstractResponseType(Class<?> expectedResponseType) { this.expectedResponseType = expectedResponseType; } @Override public Class<?> getExpectedResponseType() { return expectedResponseType; } /** * Tries to unwrap generic type if provided {@code type} is of type {@link Future}. * * @param type to be unwrapped * @return unwrapped generic, or original if provided {@code type} is not of type {@link Future} */ protected Type unwrapIfTypeFuture(Type type) { Type futureType = TypeReflectionUtils.getExactSuperType(type, Future.class); if (futureType instanceof ParameterizedType) { Type[] actualTypeArguments = ((ParameterizedType) futureType).getActualTypeArguments(); if (actualTypeArguments.length == 1) { return actualTypeArguments[0]; } } return type; } protected boolean isIterableOfExpectedType(Type responseType) { Type iterableType = TypeReflectionUtils.getExactSuperType(responseType, Iterable.class); return iterableType != null && isParameterizedTypeOfExpectedType(iterableType); } protected boolean isStreamOfExpectedType(Type responseType) { Type streamType = TypeReflectionUtils.getExactSuperType(responseType, Stream.class); return streamType != null && isParameterizedTypeOfExpectedType(streamType); } protected boolean isParameterizedTypeOfExpectedType(Type responseType) { boolean isParameterizedType = isParameterizedType(responseType); if (!isParameterizedType) { return false; } Type[] actualTypeArguments = ((ParameterizedType) responseType).getActualTypeArguments(); boolean hasOneTypeArgument = actualTypeArguments.length == 1; if (!hasOneTypeArgument) { return false; } Type actualTypeArgument = actualTypeArguments[0]; return isAssignableFrom(actualTypeArgument) || isGenericAssignableFrom(actualTypeArgument) || isWildcardTypeWithMatchingUpperBound(actualTypeArgument); } protected boolean isParameterizedType(Type responseType) { return responseType instanceof ParameterizedType; } protected boolean isWildcardTypeWithMatchingUpperBound(Type responseType) { boolean isWildcardType = isWildcardType(responseType); if (!isWildcardType) { return false; } Type[] upperBounds = ((WildcardType) responseType).getUpperBounds(); return Arrays.stream(upperBounds).anyMatch(this::isAssignableFrom) || Arrays.stream(upperBounds).anyMatch(this::isGenericAssignableFrom); } protected boolean isWildcardType(Type responseType) { return responseType instanceof WildcardType; } protected boolean isArrayOfExpectedType(Type responseType) { return isArray(responseType) && isAssignableFrom(((Class) responseType).getComponentType()); } protected boolean isArray(Type responseType) { return responseType instanceof Class && ((Class) responseType).isArray(); } protected boolean isGenericArrayOfExpectedType(Type responseType) { return isGenericArrayType(responseType) && isGenericAssignableFrom(((GenericArrayType) responseType).getGenericComponentType()); } protected boolean isGenericArrayType(Type responseType) { return responseType instanceof GenericArrayType; } protected boolean isGenericAssignableFrom(Type responseType) { return isTypeVariable(responseType) && Arrays.stream(((TypeVariable) responseType).getBounds()) .anyMatch(this::isAssignableFrom); } protected boolean isTypeVariable(Type responseType) { return responseType instanceof TypeVariable; } protected boolean isAssignableFrom(Type responseType) { return responseType instanceof Class && expectedResponseType.isAssignableFrom((Class) responseType); } }
def pct_high(self): return self.__pct_high
/** convert a real into our preferred form compatible with 10303-11 * (i.e. decimal point is required; no trailing zeros) * uses a static buffer, so NOT thread safe * \param r the real to convert * \returns const char pointer to static buffer containing ascii representation of real */ const char * real2exp( double r ) { #define PP_SMALL_BUF_SZ 80 static char result[PP_SMALL_BUF_SZ] = { 0 }; char * pos = result, * lcNumeric = setlocale( LC_NUMERIC, NULL ); unsigned int exponentDigits = 2, expMax = DBL_MAX_10_EXP; while( expMax >= 10 ) { exponentDigits++; expMax /= 10; } if( !( ( DBL_DIG + exponentDigits + 3 ) < PP_SMALL_BUF_SZ ) ) { fprintf( stderr, "ERROR: buffer undersized at %s:%d\n", __FILE__, __LINE__ ); abort(); } if( strcmp( "C", lcNumeric ) ) { fprintf( stderr, "WARNING: locale has been set to \"%s\", not \"C\" %s", lcNumeric, "(are you calling exppp from Qt?). Incorrect formatting is possible.\n" ); setlocale( LC_NUMERIC, "C" ); } snprintf( result, PP_SMALL_BUF_SZ, "%#.*g", DBL_DIG, r ); assert( strlen( result ) < PP_SMALL_BUF_SZ - 1 ); while( ( *pos != '.' ) && ( *pos != '\0' ) ) { pos++; } if( *pos != '\0' ) { char * firstUnnecessaryDigit = NULL; pos++; while( isdigit( *pos ) ) { if( ( *pos == '0' ) && ( firstUnnecessaryDigit == NULL ) ) { firstUnnecessaryDigit = pos; } else if( *pos != '0' ) { firstUnnecessaryDigit = NULL; } pos++; } if( ( firstUnnecessaryDigit != NULL ) && ( firstUnnecessaryDigit < pos ) ) { if( ( *( firstUnnecessaryDigit - 1 ) == '.' ) && ( *pos == '\0' ) ) { *( firstUnnecessaryDigit - 1 ) = '\0'; } else { memmove( firstUnnecessaryDigit, pos, strlen( pos ) + 1 ); } } } assert( strlen( result ) < PP_SMALL_BUF_SZ - 1 ); return result; #undef PP_SMALL_BUF_SZ }
<reponame>ONSdigital/dp-collection-api package api_test import ( "context" "encoding/json" "github.com/ONSdigital/dp-collection-api/api" "github.com/ONSdigital/dp-collection-api/api/mock" "github.com/ONSdigital/dp-collection-api/collections" "github.com/ONSdigital/dp-collection-api/models" "github.com/ONSdigital/dp-collection-api/pagination" "github.com/gorilla/mux" "github.com/pkg/errors" "io/ioutil" "net/http" "net/http/httptest" "testing" . "github.com/smartystreets/goconvey/convey" ) func TestGetEvents(t *testing.T) { Convey("Given a request to GET collection events", t, func() { paginator := mockPaginator() collectionStore := mockCollectionStore() r := httptest.NewRequest("GET", "http://localhost:26000/collections/123/events", nil) w := httptest.NewRecorder() Convey("When the request is sent to the API", func() { api := api.Setup(context.Background(), mux.NewRouter(), paginator, collectionStore) api.Router.ServeHTTP(w, r) Convey("Then the paginator is called to extract pagination parameters", func() { So(len(paginator.ReadPaginationParametersCalls()), ShouldEqual, 1) }) Convey("Then the collection store is called to get collection data", func() { So(len(collectionStore.GetCollectionEventsCalls()), ShouldEqual, 1) getCollectionsCall := collectionStore.GetCollectionEventsCalls()[0] So(getCollectionsCall.QueryParams.Limit, ShouldEqual, limit) So(getCollectionsCall.QueryParams.Offset, ShouldEqual, offset) So(getCollectionsCall.QueryParams.CollectionID, ShouldEqual, "123") }) Convey("Then the response has the expected status code", func() { So(w.Code, ShouldEqual, http.StatusOK) }) Convey("Then the response body should contain the collection events", func() { body, err := ioutil.ReadAll(w.Body) So(err, ShouldBeNil) response := models.EventsResponse{} err = json.Unmarshal(body, &response) So(err, ShouldBeNil) So(response.TotalCount, ShouldEqual, totalCount) So(response.Count, ShouldEqual, len(response.Items)) So(response.Offset, ShouldEqual, offset) So(response.Limit, ShouldEqual, limit) So(response.TotalCount, ShouldEqual, totalCount) So(response.Items[0].Type, ShouldEqual, "CREATED") So(response.Items[0].Email, ShouldEqual, "<EMAIL>") }) }) }) } func TestGetEvents_paginationError(t *testing.T) { Convey("Given a paginator that returns an error", t, func() { paginator := &mock.PaginatorMock{ ReadPaginationParametersFunc: func(r *http.Request) (int, int, error) { return 1, 0, pagination.ErrInvalidLimitParameter }, } collectionStore := &mock.CollectionStoreMock{} Convey("When the request is made to GET collection events", func() { r := httptest.NewRequest("GET", "http://localhost:26000/collections/123/events", nil) w := httptest.NewRecorder() api := api.Setup(context.Background(), mux.NewRouter(), paginator, collectionStore) api.Router.ServeHTTP(w, r) Convey("Then the paginator is called to extract pagination parameters", func() { So(len(paginator.ReadPaginationParametersCalls()), ShouldEqual, 1) }) Convey("Then the expected error code is returned", func() { So(w.Code, ShouldEqual, http.StatusBadRequest) }) }) }) } func TestGetEvents_collectionStoreError(t *testing.T) { Convey("Given a collection store that returns an error", t, func() { paginator := mockPaginator() collectionStore := &mock.CollectionStoreMock{ GetCollectionByIDFunc: func(ctx context.Context, id string, eTagSelector string) (*models.Collection, error) { return nil, nil }, GetCollectionEventsFunc: func(ctx context.Context, queryParams collections.EventsQueryParams) ([]models.Event, int, error) { return nil, 0, errors.New("store error") }, } Convey("When the request is made to GET collection events", func() { r := httptest.NewRequest("GET", "http://localhost:26000/collections/123/events", nil) w := httptest.NewRecorder() api := api.Setup(context.Background(), mux.NewRouter(), paginator, collectionStore) api.Router.ServeHTTP(w, r) Convey("Then the paginator is called to extract pagination parameters", func() { So(len(paginator.ReadPaginationParametersCalls()), ShouldEqual, 1) }) Convey("Then the expected error code is returned", func() { So(w.Code, ShouldEqual, http.StatusInternalServerError) }) }) }) } func TestGetEvents_internalError(t *testing.T) { Convey("Given a paginator that returns an unrecognised error", t, func() { paginator := &mock.PaginatorMock{ ReadPaginationParametersFunc: func(r *http.Request) (int, int, error) { return 1, 0, errors.New("unrecognised error") }, } collectionStore := &mock.CollectionStoreMock{} Convey("When the request is made to GET collection events", func() { r := httptest.NewRequest("GET", "http://localhost:26000/collections/123/events", nil) w := httptest.NewRecorder() api := api.Setup(context.Background(), mux.NewRouter(), paginator, collectionStore) api.Router.ServeHTTP(w, r) Convey("Then the paginator is called to extract pagination parameters", func() { So(len(paginator.ReadPaginationParametersCalls()), ShouldEqual, 1) }) Convey("Then an internal server error is returned", func() { So(w.Code, ShouldEqual, http.StatusInternalServerError) }) }) }) }
Thyroid Tuberculosis: Diagnosis and Treatment Objective: It was the aim of this study to report clinical characteristics and treatment of thyroid tuberculosis (TT). Methods: During 16 years, 2,426 patients have been operated on the thyroid in the surgical department ‘A’ in Ibn Sina Hospital, Rabat, Morocco. Anatomopathological results of the removed thyroid were analyzed for evidence of tuberculosis. Results: Eight cases of TT were diagnosed. Five patients had a goiter and 3 patients had an isolated nodule of the thyroid. In one case, fine-needle aspiration cytology gave the diagnosis of TT. This patient had a complete drainage of the abscess. In all other patients, the diagnosis was given after surgery. All patients received additional antituberculous drugs for 6 months, and follow-up was satisfactory. Conclusion: TT does not have any consistent symptoms. Fine-needle aspiration is the best method for diagnosis and can result in the avoidance of surgery.
import DialogConfirmation from "../../entities/dialog-confirmation"; export const SHOW_CONFIRMATION_DIALOG = 'SHOW_CONFIRMATION_DIALOG' export const HIDE_CONFIRMATION_DIALOG = 'HIDE_CONFIRMATION_DIALOG' interface ShowConfirmationDialogAction { type: typeof SHOW_CONFIRMATION_DIALOG; payload: DialogConfirmation; } interface HideConfirmationDialogAction { type: typeof HIDE_CONFIRMATION_DIALOG; } export type ModalDialogActionTypes = ShowConfirmationDialogAction | HideConfirmationDialogAction
import { FormEvent, useState } from 'react' import { SEO } from 'components/SEO' import { Success } from 'components/Alerts/Success' import { Error } from 'components/Alerts/Error' import { Warning } from 'components/Alerts/Warning' import Axios from 'axios' import { SpinnerIcon } from 'components/Icons/Spinner' import { validateEmailAddress, validateLength } from 'functions/validation' import { CrispChat } from 'components/Scripts/CrispChat' import { LayoutSidebar } from 'components/Layout/LayoutSidebar' interface MessageData { [key: string]: string } export default function Contact() { return ( <> <SEO title="Contact Us" description="This page contains a contact form. Use this form if you have questions or suggestions about the content on this site." canonical="/contact/" /> <CrispChat /> <LayoutSidebar heading="Contact Us" url="/contact/"> <p> We would love to hear from you. Please submit a message using the contact form below and we will get back to you as soon as possible. You can also send an email directly to{' '} <a className="bll" href="mailto:<EMAIL>"> <EMAIL> </a> . </p> <ContactForm /> </LayoutSidebar> </> ) } function ContactForm() { const [name, setName] = useState('') const [email, setEmail] = useState('') const [message, setMessage] = useState('') const [subject, setSubject] = useState('') const [responseType, setResponseType] = useState<string | null>(null) const [loading, setLoading] = useState(false) const [warning, setWarning] = useState<string | null>(null) const url = '/api/contact/' const handleSubmit = async (e: FormEvent) => { e.preventDefault() setResponseType(null) setWarning(null) if (!validateEmailAddress(email)) { return setWarning('Please enter a valid email address.') } if (!validateLength(message, 10)) { return setWarning( 'Your message is too short to be meaningful. Add more details.' ) } setLoading(true) try { const messageData: MessageData = { name, email, subject, message } const res = await Axios.post(url, messageData) const data = res.data if (data.status === 'email_sent') { setResponseType('success') setName('') setEmail('') setSubject('') setMessage('') } else { setResponseType('error') } } catch (err) { console.error({ err }) setResponseType('error') } finally { setLoading(false) } } return ( <div className="bg-white"> <div className="relative px-2 mx-auto"> {warning && <Warning message={warning} />} <div className="mt-4"> <form action="#" method="POST" className="space-y-6" onSubmit={handleSubmit} > <div> <label htmlFor="name" className="block text-sm font-medium text-gray-700" > Name </label> <div className="mt-1"> <input type="text" name="name" id="name" autoComplete="given-name" className="py-3 px-4 block w-full shadow-sm focus:ring-blue-500 focus:border-blue-500 border-gray-300 rounded-md" value={name} onChange={event => setName(event.target.value)} /> </div> </div> <div> <label htmlFor="email" className="block text-sm font-medium text-gray-700" > Email </label> <div className="mt-1"> <input name="email" type="email" id="email" autoComplete="email" required className="py-3 px-4 block w-full shadow-sm focus:ring-blue-500 focus:border-blue-500 border-gray-300 rounded-md" value={email} onChange={event => setEmail(event.target.value)} /> </div> </div> <div> <label htmlFor="subject" className="block text-sm font-medium text-gray-700" > Subject </label> <div className="mt-1"> <input name="subject" type="text" id="subject" autoComplete="off" className="py-3 px-4 block w-full shadow-sm focus:ring-blue-500 focus:border-blue-500 border-gray-300 rounded-md" value={subject} onChange={event => setSubject(event.target.value)} /> </div> </div> <div> <label htmlFor="message" className="block text-sm font-medium text-gray-700" > Message </label> <div className="mt-1"> <textarea id="message" name="message" required rows={4} className="py-3 px-4 block w-full shadow-sm focus:ring-blue-500 focus:border-blue-500 border-gray-300 rounded-md" value={message} onChange={event => setMessage(event.target.value)} /> </div> </div> <div className="pt-1"> <button type="submit" className="w-full inline-flex items-center justify-center px-6 py-3 border border-transparent rounded-md shadow-sm text-base font-medium text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500" > {loading ? ( <> <SpinnerIcon /> Sending... </> ) : ( 'Send Message' )} </button> </div> </form> </div> {responseType && ( <div className="my-8"> {responseType === 'success' ? ( <Success message="Your message was sent successfully. We will get back to you soon." /> ) : ( <Error message="There was an error sending your message. Please send an email directly to <EMAIL> instead." /> )} </div> )} </div> </div> ) }
<filename>src/easymql/meta.py from pyparsing import ParserElement ParserElement.enablePackrat() class MetaGrammar(type): def __init__(cls, name, bases, dct): super(MetaGrammar, cls).__init__(name, bases, dct) try: cls._set_parse_action(cls.action) except AttributeError as e: pass try: cls.set_name(cls.name) except AttributeError as e: pass def __add__(cls, other): return cls.grammar.__add__(other) def __and__(cls, other): return cls.grammar.__and__(other) # def __eq__(cls, other): # return cls.grammar.__eq__(other) def __getitem__(cls, key): return cls.grammar[key] def __mul__(cls, other): return cls.grammar.__mul__(other) # def __ne__(cls, other): # return cls.grammar.__ne__(other) def __or__(cls, other): return cls.grammar.__or__(other) def __radd__(cls, other): return cls.grammar.__radd__(other) def __rand__(cls, other): return cls.grammar.__rand__(other) def __req__(cls, other): return cls.grammar.__req__(other) def __rmul__(cls, other): return cls.grammar.__rmul__(other) def __rne__(cls, other): return cls.grammar.__rne__(other) def __ror__(cls, other): return cls.grammar.__ror__(other) def __rsub__(cls, other): return cls.grammar.__rsub__(other) def __rxor__(cls, other): return cls.grammar.__rxor__(other) def __sub__(cls, other): return cls.grammar.__sub__(other) def __xor__(cls, other): return cls.grammar.__xor__(other) def get_adapter_grammar(cls): return cls.grammar.get_adapter_grammar() @property def _grammar(cls): """Return pyparsing grammar contained in this class travelling from MetaGrammar --> Grammar --> Adapter --> PyParsing """ return cls.grammar._grammar def ignore(cls, expr): return cls._grammar.ignore(expr) def _set_parse_action(cls, action): try: cls.grammar._set_parse_action(action) except AttributeError: pass def parse(cls, string, explode=True): return cls.grammar.parse(string, explode) def set_name(cls, name): try: cls.grammar.set_name(name) except AttributeError: pass class Grammar(metaclass=MetaGrammar): def __init__(self, value): self.value = value def __repr__(self): return f"{self.__class__.__name__}({repr(self.value)})" def __eq__(self, other): return isinstance(other, self.__class__) and self.value == other.value def __ne__(self, other): return not self == other
// If a range is defined in a deferred block, we can expect all the range // to only cover positions in deferred blocks. Otherwise, a block on the // hot path would be dominated by a deferred block, meaning it is unreachable // without passing through the deferred block, which is contradictory. // In particular, when such a range contributes a result back on the hot // path, it will be as one of the inputs of a phi. In that case, the value // will be transferred via a move in the Gap::END's of the last instruction // of a deferred block. bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() { for (const TopLevelLiveRange* range : live_ranges()) { if (range == nullptr || range->IsEmpty() || !code() ->GetInstructionBlock(range->Start().ToInstructionIndex()) ->IsDeferred()) { continue; } for (const UseInterval* i = range->first_interval(); i != nullptr; i = i->next()) { int first = i->FirstGapIndex(); int last = i->LastGapIndex(); for (int instr = first; instr <= last;) { const InstructionBlock* block = code()->GetInstructionBlock(instr); if (!block->IsDeferred()) return false; instr = block->last_instruction_index() + 1; } } } return true; }
import xlrd import xlwt from xlutils.copy import copy # 打开想要更改的excel文件 old_excel = xlrd.open_workbook('32-案件恢复审调查呈批表.xls', formatting_info=True) # 将操作文件对象拷贝,变成可写的workbook对象 new_excel = copy(old_excel) # 获得第一个sheet的对象 ws = new_excel.get_sheet(0) # 写入数据 ws.write(4, 1, '第一行,第一列') ws.write(5, 1, '第一行,第二列') ws.write(5, 3, '第一行,第三列') ws.write(6, 1, '第二行,第一列') ws.write(6, 3, '第二行,第二列') ws.write(7, 1, '第二行,第三列') # 另存为excel文件,并将文件命名,可以重新命名,应该也可以覆盖掉 new_excel.save('new_mcw_test.xls')
<reponame>ComansServices/pinetime-rust-mynewt /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ // Watch Face for PineTime on Mynewt #include <assert.h> #include <os/os.h> #include <console/console.h> #include <datetime/datetime.h> #include <pinetime_lvgl_mynewt/pinetime_lvgl_mynewt.h> // This includes "lvgl.h" int pinetime_lvgl_mynewt_render(void); // TODO: Move to pinetime_lvgl_mynewt.h static void watch_face_callback(struct os_event *ev); static lv_obj_t *btn; // Button static lv_obj_t *label; // Label static struct os_callout watch_face_callout; // Timer that is triggered every minute /// Render a watch face. Called by main() in rust/app/src/lib.rs int create_watch_face(void) { console_printf("Create watch face...\n"); console_flush(); btn = lv_btn_create(lv_scr_act(), NULL); // Add a button the current screen lv_obj_set_pos(btn, 10, 10); // Set its position lv_obj_set_size(btn, 220, 50); // Set its size label = lv_label_create(btn, NULL); // Add a label to the button lv_label_set_text(label, "Time Sync"); // Set the label text // Set a timer to update the watch face every minute // TODO: Move this code to the caller os_callout_init( &watch_face_callout, // Timer for the watch face os_eventq_dflt_get(), // Use default event queue watch_face_callback, // Callback function for the timer NULL ); // Trigger the timer in 60 seconds os_callout_reset( &watch_face_callout, // Timer for the watch face OS_TICKS_PER_SEC * 60 // Trigger timer in 60 seconds ); return 0; } /// Update the watch face int update_watch_face(void) { console_printf("Update watch face...\n"); console_flush(); // If button or label not created, quit if (btn == NULL || label == NULL) { return 1; } // Get the system time struct os_timeval tv; struct os_timezone tz; int rc = os_gettimeofday(&tv, &tz); if (rc != 0) { console_printf("Can't get time: %d\n", rc); return 2; } // Convert the time struct clocktime ct; rc = timeval_to_clocktime(&tv, &tz, &ct); if (rc != 0) { console_printf("Can't convert time: %d\n", rc); return 3; } // Format the time as 2020-10-04T13:20:26.839843+00:00 char buf[50]; rc = datetime_format(&tv, &tz, buf, sizeof(buf)); if (rc != 0) { console_printf("Can't format time: %d\n", rc); return 4; } // Truncate after minute: 2020-10-04T13:20 buf[16] = 0; // Set the label text lv_label_set_text(label, buf); return 0; } /// Timer callback that is called every minute static void watch_face_callback(struct os_event *ev) { assert(ev != NULL); // Update the watch face update_watch_face(); // Render the watch face pinetime_lvgl_mynewt_render(); // Set the watch face timer os_callout_reset( &watch_face_callout, // Timer for the watch face OS_TICKS_PER_SEC * 60 // Trigger timer in 60 seconds ); } #ifdef NOTUSED Debug Log: TMP create temp_stub_0 NET hwid 4a f8 cf 95 6a be c1 f6 89 ba 12 1a NET standalone node Init LVGL... Create watch face... Render LVGL display... Flush display: left=0, top=0, right=239, bottom=9... Flush display: left=0, top=10, right=239, bottom=19... Flush display: left=0, top=20, right=239, bottom=29... Flush display: left=0, top=30, right=239, bottom=39... Flush display: left=0, top=40, right=239, bottom=49... Flush display: left=0, top=50, right=239, bottom=59... Flush display: left=0, top=60, right=239, bottom=69... Flush display: left=0, top=70, right=239, bottom=79... Flush display: left=0, top=80, right=239, bottom=89... Flush display: left=0, top=90, right=239, bottom=99... Remote PineTime Log Flush display: left=0, top=100, right=239, bottom=109... Flush display: left=0, top=110, right=239, bottom=119... Flush display: left=0, top=120, right=239, bottom=129... Flush display: left=0, top=130, right=239, bottom=139... Flush display: left=0, top=140, right=239, bottom=149... Flush display: left=0, top=150, right=239, bottom=159... Flush display: left=0, top=160, right=239, bottom=169... Flush display: left=0, top=170, right=239, bottom=179... Flush display: left=0, top=180, right=239, bottom=189... Flush display: left=0, top=190, right=239, bottom=199... Flush display: left=0, top=200, right=239, bottom=209... Flush display: left=0, top=210, right=239, bottom=219... Flush display: left=0, top=220, right=239, bottom=229... Flush display: left=0, top=230, right=239, bottom=239... Starting BLE... BLE started Render LVGL display... Remote PineTime Log Flush display: left=63, top=27, right=196, bottom=42... Remote PineTime Log connection established; status=0 handle=1 our_ota_addr_type=1 our_ota_addr= our_id_addr_type=1 our_id_addr= peer_ota_addr_type=1 peer_ota_addr= peer_id_addr_type=1 peer_id_addr= conn_itvl=36 conn_latency=0 supervision_timeout=500 encrypted=0 authenticated=0 bonded=0 connection updated; status=0 handle=1 our_ota_addr_type=1 our_ota_addr= our_id_addr_type=1 our_id_addr= peer_ota_addr_type=1 peer_ota_addr= peer_id_addr_type=1 peer_id_addr= conn_itvl=6 conn_latency=0 supervision_timeout=500 encrypted=0 authenticated=0 bonded=0 Service discovery complete; status=0 conn_handle=1 Read complete; status=0 conn_handle=1 attr_handle=67 value=e4 07 0a 04 0e 05 29 07 87 00 Current Time: 2020-10-04T14:05:41.527343+00:00 connection updated; status=0 handle=1 our_ota_addr_type=1 our_ota_addr= our_id_addr_type=1 our_id_addr= peer_ota_addr_type=1 peer_ota_addr= peer_id_addr_type=1 peer_id_addr= conn_itvl=36 conn_latency=0 supervision_timeout=500 encrypted=0 authenticated=0 bonded=0 disconnect; reason=531 handle=1 our_ota_addr_type=1 our_ota_addr= our_id_addr_type=1 our_id_addr= peer_ota_addr_type=1 peer_ota_addr= peer_id_addr_type=1 peer_id_addr= conn_itvl=36 conn_latency=0 supervision_timeout=500 encrypted=0 authenticated=0 bonded=0 Remote PineTime Log Render LVGL display... Remote PineTime Log Flush display: left=60, top=27, right=183, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=59, top=27, right=181, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=59, top=27, right=180, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=59, top=27, right=181, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=59, top=27, right=180, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=59, top=27, right=180, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=61, top=27, right=178, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=62, top=27, right=179, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=62, top=27, right=178, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=61, top=27, right=179, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=61, top=27, right=178, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=61, top=27, right=179, bottom=42... Remote PineTime Log Render LVGL display... Flush display: left=61, top=27, right=178, bottom=42... Remote PineTime Log #endif // NOTUSED
USA Today Updates from Sunday, March 2 Steve Popper of NorthJersey.com confirms Iman Shumpert will play against the Bulls: Updates from Thursday, Feb. 20 Ken Berger of CBS Sports provides an update on Iman Shumpert's status after undergoing an MRI: Marc Stein of ESPN had more on Shumpert's injury and a timetable for his return: The Knicks later updated Shumpert's status after his MRI: Original Text New York Knicks shooting guard Iman Shumpert has sustained a knee injury and will not return against the New Orleans Pelicans. Frank Isola from the New York Daily News first tweeted that Shumpert suffered a sprained left knee on Wednesday night and would not return to the game: After New York's 98-91 win, the Knicks PR account provided an update: Marc Berman from the New York Post provided a statement from Shumpert discussing the knee injury: “They said my ACL is fine,’’ Shumpert said. “That was my biggest concern. They didn’t seem to think it was bad. I just hope it’s not.’’ Yahoo! Sports' Adrian Wojnarowski added more detail about the injury: As Berman points out, this is the same knee that Shumpert had ACL surgery on, which makes the current trade rumors surrounding him even more interesting: Given the team's current roster, the Knicks will likely be calling upon players like Metta World Peace and J.R. Smith to play additional minutes. While the 23-year-old Shumpert has shown promise in his first three years in the NBA, this season hasn't been ideal for the former Georgia Tech Yellow Jacket. Through 51 games, he's averaged just 7.0 points per game while shooting just 38.1 percent from the floor and 34.7 percent from behind the arc. According to Adrian Wojnarowski from Yahoo! Sports, the Los Angeles Clippers are attempting to trade for Shumpert, but this knee injury could make things more interesting if he is forced to miss time.
import torch from torchvision.utils import save_image from loss import loss_function def test(args, model, test_loader, device, epoch): model.eval() test_loss = 0 with torch.no_grad(): for i, (data, _) in enumerate(test_loader): data = data.to(device) recon_batch, mu, logvar = model(data) test_loss += loss_function(recon_batch, data, mu, logvar).item() if i == 0: n = min(data.size(0), 8) comparison = torch.cat([data[:n], recon_batch.view(args.batch_size, 1, 28, 28)[:n]]) save_image(comparison.cpu(), '../results/reconstruction_' + str(epoch) + '.png', nrow=n) test_loss /= len(test_loader.dataset) print('====> Test set loss: {:.4f}'.format(test_loss))
package org.jentrata.ebms.messaging.internal; import org.apache.camel.Exchange; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.impl.DefaultExchange; import org.apache.camel.test.junit4.CamelTestSupport; import org.apache.commons.io.IOUtils; import org.hamcrest.Matchers; import org.jentrata.ebms.EbmsConstants; import org.jentrata.ebms.MessageStatusType; import org.jentrata.ebms.messaging.MessageStore; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.is; /** * Unit Test for org.jentrata.ebms.messaging.internal.FileMessageStore * * @author aaronwalker */ public class FileMessageStoreTest extends CamelTestSupport { private FileMessageStore messageStore; private String baseDir; @Test public void testFileMessageStore() throws IOException { Exchange request = new DefaultExchange(context()); request.getIn().setHeader(EbmsConstants.EBMS_VERSION,EbmsConstants.EBMS_V3); request.getIn().setHeader(EbmsConstants.MESSAGE_ID,"testMessageID"); request.getIn().setBody(new ByteArrayInputStream("test".getBytes())); Exchange response = context().createProducerTemplate().send(MessageStore.DEFAULT_MESSAGE_STORE_ENDPOINT,request); String msgId = response.getIn().getHeader(MessageStore.JENTRATA_MESSAGE_ID, String.class); assertThat(msgId,equalTo("testMessageID")); assertThat(messageStore.findByMessageId(msgId,EbmsConstants.MESSAGE_DIRECTION_INBOUND).getMessageId(),equalTo(msgId)); } @Test public void testUpdateMessageInMessageStore() throws Exception { messageStore.updateMessage("testMessageID",EbmsConstants.MESSAGE_DIRECTION_INBOUND, MessageStatusType.RECEIVED,"Received"); File expectedFile = new File(baseDir,"testMessageID.RECEIVED"); assertThat(expectedFile.exists(),is(true)); assertThat("Received", equalTo(IOUtils.toString(new FileInputStream(expectedFile)))); } @Test public void testFindPayloadById() throws Exception { testFileMessageStore(); InputStream stream = messageStore.findPayloadById("testMessageID"); assertThat(IOUtils.toString(stream),equalTo("test")); } @Override protected RouteBuilder createRouteBuilder() throws Exception { messageStore = new FileMessageStore(); baseDir = System.getProperty("java.io.tmpdir"); messageStore.setBaseDir(baseDir); return new RouteBuilder() { @Override public void configure() throws Exception { from(MessageStore.DEFAULT_MESSAGE_STORE_ENDPOINT) .bean(messageStore,"store") .routeId("_jentrataMessageStoreTest"); } }; } }
package session import ( "bytes" "encoding/hex" "io" "math/rand" "strings" "testing" "testing/iotest" ) // TestUFormat tests all 6 function compositions. func TestUFormat(t *testing.T) { var golden = []struct { f function name, serial string }{ {bringUp, "STARTDT_ACT", "680407000000"}, {bringUpOK, "STARTDT_CON", "68040b000000"}, {bringDown, "STOPDT_ACT", "680413000000"}, {bringDownOK, "STOPDT_CON", "680423000000"}, {keepAlive, "TESTFR_ACT", "680443000000"}, {keepAliveOK, "TESTFR_CON", "680483000000"}, {bringUp | bringDown, "<illegal 0x14>", "680417000000"}, } for _, gold := range golden { u := newFunc(gold.f) if got := u.Format(); got != uFrame { t.Errorf("%s(%s): got %c-frame", gold.serial, gold.name, got) } if got := u.Function().String(); got != gold.name { t.Errorf("%s(%s): got function %q", gold.serial, gold.name, got) } if got := hex.EncodeToString(u[:]); !strings.HasPrefix(got, gold.serial) { t.Errorf("%s(%s): got serial 0x%s", gold.serial, gold.name, got) } var buf bytes.Buffer if n, err := u.Marshal(&buf, 0); n != 6 || err != nil { t.Errorf("%s(%s): marshall returned (%d, %#v)", gold.serial, gold.name, n, err) } if got := hex.EncodeToString(buf.Bytes()); got != gold.serial { t.Errorf("%s(%s): marshalled 0x%s", gold.serial, gold.name, got) } } } // TestSFormat tests all sequence number acknowledges. func TestSFormat(t *testing.T) { for seqNo := uint(0); seqNo < 1<<15; seqNo++ { u := newAck(seqNo) if got := u.RecvSeqNo(); got != seqNo { t.Fatalf("got sequence number %d, want %d", got, seqNo) } if got := u.Format(); got != sFrame { t.Fatalf("acknowledge %d %c-frame", seqNo, got) } var buf bytes.Buffer if n, err := u.Marshal(&buf, 0); n != 6 || err != nil { t.Fatalf("acknowledge %d marshall returned (%d, %#v)", seqNo, n, err) } if got := hex.EncodeToString(buf.Bytes()); !strings.HasPrefix(got, "68040100") { t.Fatalf("acknowledge %d marshalled 0x%s", seqNo, got) } } } // TestIFormat test all sequence numbers with ASDU payloads. func TestIFormat(t *testing.T) { var u apdu rand.Read(u[:]) // populate with junk var feed [249]byte for i := range feed { feed[i] = byte(i + 1) } for seqNo := uint(0); seqNo < 1<<15; seqNo++ { seqNo2 := (seqNo + 1) % (1 << 15) asduLen := int(seqNo) % len(feed) u, err := packASDU(feed[:asduLen], seqNo, seqNo2) if err != nil { t.Fatal("ASDU wrap error:", err) } if got := u.SendSeqNo(); got != seqNo { t.Fatalf("got send sequence number %d, want %d", got, seqNo) } if got := u.RecvSeqNo(); got != seqNo2 { t.Fatalf("got receive sequence number %d, want %d", got, seqNo2) } got := u.Payload() if len(got) != asduLen || (len(got) != 0 && int(got[len(got)-1]) != asduLen) { t.Fatalf("want %d byte payload, got %#x", asduLen, got) } if got := u.Format(); got != iFrame { t.Error("got ", got) } } } var goldenUnmarshals = []struct { hex string err error }{ {"", io.EOF}, {"79", errStart}, {"68", io.ErrUnexpectedEOF}, {"6800", errLength}, {"6801", errLength}, {"68fe", errLength}, {"680100", errLength}, {"680200", errLength}, {"68020000", errLength}, {"68030000", errLength}, {"6803000000", errLength}, {"6804", io.ErrUnexpectedEOF}, {"680400", io.ErrUnexpectedEOF}, {"68040000", io.ErrUnexpectedEOF}, {"6804000000", io.ErrUnexpectedEOF}, {"680400000000", nil}, {"68040100ffff", nil}, {"68050100ffffee", nil}, {"680407000000", nil}, {"570400000000", errStart}, {"68fd" + strings.Repeat("00", 253), nil}, } func TestUnmarshals(t *testing.T) { for _, gold := range goldenUnmarshals { serial, err := hex.DecodeString(gold.hex) if err != nil { t.Fatal(err) } var u apdu n, err := u.Unmarshal(iotest.OneByteReader(bytes.NewReader(serial)), 0) if gold.err != nil { if err != gold.err { t.Errorf("%s: got error %#v, want %#v", gold.hex, err, gold.err) } continue } if n != len(serial) { t.Errorf("%s: unmarshaled %d bytes", gold.hex, n) } if err != nil { t.Errorf("%s: unmarshal error: %s", gold.hex, err) } } }
Former Cobb County Republican Party leader Joseph Dendy was given a life sentence after pleading guilty Friday to sexually abusing children. Dendy, 72, must serve 30 years of that sentence in prison. Dendy admitted that he had committed repeated sexual offenses against two boys, according to a statement from Cobb District Attorney Vic Reynolds. The crimes occurred between 2004 and 2011 at Dendy’s home, his church, and in a store dressing room, the statement said. One victim, now an adult, told the court that he has nightmares of the abuse and became a military police officer to help other victims. “I’m stronger than you,” he told Dendy. Dendy served four years as chair of the Cobb County GOP. His second, two-year term ended in 2015. The defendant’s guilty plea came as a surprise during what was to be a pretrial hearing. Prosecutors were expected to call several additional witnesses who said they had also been abused. Dendy was able to hide behind his "public persona as a prominent community leader," weaving a "path of destruction through his family for decades,” prosecutor Susan Treadaway was quoted saying in the statement. “That ends today.” Cobb Superior Court Judge Kimberly Childs presided over the case. Dendy’s attorney, Brian Steel, did not return a message left at his office. Dendy has been in custody since his arrest in May 2016.
#include <iostream> #include "data_processing_advanced_instruction.h" namespace Instructions { DataProcessingAdvancedInstruction::DataProcessingAdvancedInstruction( IntegerType inst ) {} void DataProcessingAdvancedInstruction::print_to(std::ostream &out) const { out << "TODO: Data processing advanced instruction"; } }
/** * Different method for loading of different scene. * @see #unloadPreviousScene() First unload previous scene from VisAssetManager. * */ public void loadMainScene () { unloadPreviousScene(); SceneLoader.SceneParameter parameter = new SceneLoader.SceneParameter(); parameter.config.disable(SceneFeature.BOX2D_DEBUG_RENDER_SYSTEM); parameter.config.addSystem(BoundsCreator.class); parameter.config.addSystem(BoundsUpdater.class); parameter.config.addSystem(SoundEffectSystem.class); parameter.config.addSystem(MainPageSystem.class); parameter.config.addSystem(PhysicsUpdateSystem.class, SceneConfig.Priority.HIGH); parameter.config.addSystem(PhysicsManager.class, SceneConfig.Priority.HIGH); parameter.config.addSystem(new SystemProvider() { @Override public BaseSystem create(EntityEngineConfiguration config, RuntimeContext context, SceneData data) { return new ShapeRendererSystem(config.getSystem(RenderBatchingSystem.class)); } }); /*parameter.config.addSystem(new SystemProvider() { @Override public BaseSystem create(EntityEngineConfiguration config, RuntimeContext context, SceneData data) { return new MainSceneManager(game); } });*/ scene = manager.loadSceneNow(scenePath, parameter); }