text
stringlengths 2
100k
| meta
dict |
---|---|
package ssh
import (
"errors"
"io"
"net"
)
// streamLocalChannelOpenDirectMsg is a struct used for SSH_MSG_CHANNEL_OPEN message
// with "[email protected]" string.
//
// See openssh-portable/PROTOCOL, section 2.4. connection: Unix domain socket forwarding
// https://github.com/openssh/openssh-portable/blob/master/PROTOCOL#L235
type streamLocalChannelOpenDirectMsg struct {
socketPath string
reserved0 string
reserved1 uint32
}
// forwardedStreamLocalPayload is a struct used for SSH_MSG_CHANNEL_OPEN message
// with "[email protected]" string.
type forwardedStreamLocalPayload struct {
SocketPath string
Reserved0 string
}
// streamLocalChannelForwardMsg is a struct used for SSH2_MSG_GLOBAL_REQUEST message
// with "[email protected]"/"[email protected]" string.
type streamLocalChannelForwardMsg struct {
socketPath string
}
// ListenUnix is similar to ListenTCP but uses a Unix domain socket.
func (c *Client) ListenUnix(socketPath string) (net.Listener, error) {
c.handleForwardsOnce.Do(c.handleForwards)
m := streamLocalChannelForwardMsg{
socketPath,
}
// send message
ok, _, err := c.SendRequest("[email protected]", true, Marshal(&m))
if err != nil {
return nil, err
}
if !ok {
return nil, errors.New("ssh: [email protected] request denied by peer")
}
ch := c.forwards.add(&net.UnixAddr{Name: socketPath, Net: "unix"})
return &unixListener{socketPath, c, ch}, nil
}
func (c *Client) dialStreamLocal(socketPath string) (Channel, error) {
msg := streamLocalChannelOpenDirectMsg{
socketPath: socketPath,
}
ch, in, err := c.OpenChannel("[email protected]", Marshal(&msg))
if err != nil {
return nil, err
}
go DiscardRequests(in)
return ch, err
}
type unixListener struct {
socketPath string
conn *Client
in <-chan forward
}
// Accept waits for and returns the next connection to the listener.
func (l *unixListener) Accept() (net.Conn, error) {
s, ok := <-l.in
if !ok {
return nil, io.EOF
}
ch, incoming, err := s.newCh.Accept()
if err != nil {
return nil, err
}
go DiscardRequests(incoming)
return &chanConn{
Channel: ch,
laddr: &net.UnixAddr{
Name: l.socketPath,
Net: "unix",
},
raddr: &net.UnixAddr{
Name: "@",
Net: "unix",
},
}, nil
}
// Close closes the listener.
func (l *unixListener) Close() error {
// this also closes the listener.
l.conn.forwards.remove(&net.UnixAddr{Name: l.socketPath, Net: "unix"})
m := streamLocalChannelForwardMsg{
l.socketPath,
}
ok, _, err := l.conn.SendRequest("[email protected]", true, Marshal(&m))
if err == nil && !ok {
err = errors.New("ssh: [email protected] failed")
}
return err
}
// Addr returns the listener's network address.
func (l *unixListener) Addr() net.Addr {
return &net.UnixAddr{
Name: l.socketPath,
Net: "unix",
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 2016 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Foundation
import HubFramework
/**
* A header component that applies a parallax-like effect to its background image when scrolled
*
* This component is compatible with the following model data:
*
* - title
* - backgroundImageData
*/
class HeaderComponent: HUBComponentContentOffsetObserver, HUBComponentWithImageHandling, HUBComponentViewObserver {
var view: UIView?
private lazy var imageView = UIImageView()
private lazy var titleLabel = UILabel()
private var minimumHeight: CGFloat { return 64 }
private var maximumHeight: CGFloat { return 250 }
private var minimumFontSize: CGFloat { return 18 }
private var maximumFontSize: CGFloat { return 30 }
var layoutTraits: Set<HUBComponentLayoutTrait> {
return [.fullWidth, .stackable]
}
func loadView() {
imageView.alpha = 0.6
titleLabel.textColor = .white
let containerView = UIView()
containerView.clipsToBounds = true
containerView.backgroundColor = .darkGray
containerView.accessibilityIdentifier = "header"
containerView.addSubview(imageView)
containerView.addSubview(titleLabel)
view = containerView
}
func preferredViewSize(forDisplaying model: HUBComponentModel, containerViewSize: CGSize) -> CGSize {
return CGSize(width: containerViewSize.width, height: maximumHeight)
}
func prepareViewForReuse() {
imageView.image = nil
}
func configureView(with model: HUBComponentModel, containerViewSize: CGSize) {
titleLabel.text = model.title
}
func updateView(forChangedContentOffset contentOffset: CGPoint) {
guard let view = view else {
return
}
view.frame.size.height = max(maximumHeight - contentOffset.y, minimumHeight)
let relativeHeight = view.frame.height / maximumHeight
if relativeHeight > 1 {
let imageViewSize = CGSize(width: view.frame.width * relativeHeight, height: view.frame.height)
imageView.bounds = CGRect(origin: CGPoint(), size: imageViewSize)
imageView.center = view.center
} else {
let imageViewSize = CGSize(width: view.frame.width, height: maximumHeight)
imageView.frame = CGRect(origin: CGPoint(), size: imageViewSize)
}
var fontSize = maximumFontSize * relativeHeight
if fontSize > maximumFontSize {
fontSize = maximumFontSize
} else if fontSize < minimumFontSize {
fontSize = minimumFontSize
}
titleLabel.font = .boldSystemFont(ofSize: fontSize)
}
func preferredSizeForImage(from imageData: HUBComponentImageData, model: HUBComponentModel, containerViewSize: CGSize) -> CGSize {
return preferredViewSize(forDisplaying: model, containerViewSize: containerViewSize)
}
func updateView(forLoadedImage image: UIImage, from imageData: HUBComponentImageData, model: HUBComponentModel, animated: Bool) {
imageView.setImage(image, animated: animated)
}
func viewDidResize() {
titleLabel.sizeToFit()
titleLabel.center = view!.center
let minimumTitleLabelCenterY = (minimumHeight + minimumFontSize) / 2
if titleLabel.center.y < minimumTitleLabelCenterY {
titleLabel.center.y = minimumTitleLabelCenterY
}
}
func viewWillAppear() {
// No-op
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
namespace wcf\data\object\type\definition;
use wcf\data\AbstractDatabaseObjectAction;
/**
* Executes object type definition-related actions.
*
* @author Alexander Ebert
* @copyright 2001-2019 WoltLab GmbH
* @license GNU Lesser General Public License <http://opensource.org/licenses/lgpl-license.php>
* @package WoltLabSuite\Core\Data\Object\Type\Definition
*
* @method ObjectTypeDefinition create()
* @method ObjectTypeDefinitionEditor[] getObjects()
* @method ObjectTypeDefinitionEditor getSingleObject()
*/
class ObjectTypeDefinitionAction extends AbstractDatabaseObjectAction {
/**
* @inheritDoc
*/
protected $className = ObjectTypeDefinitionEditor::class;
}
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.5.0_08) on Tue Jun 03 16:14:37 GMT-05:00 2008 -->
<TITLE>
AntTask.PhaseOptbb_ule (Soot API)
</TITLE>
<META NAME="keywords" CONTENT="soot.AntTask.PhaseOptbb_ule class">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
parent.document.title="AntTask.PhaseOptbb_ule (Soot API)";
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/AntTask.PhaseOptbb_ule.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../soot/AntTask.PhaseOptbb_pho.html" title="class in soot"><B>PREV CLASS</B></A>
<A HREF="../soot/AntTask.PhaseOptbop.html" title="class in soot"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../index.html?soot/AntTask.PhaseOptbb_ule.html" target="_top"><B>FRAMES</B></A>
<A HREF="AntTask.PhaseOptbb_ule.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | FIELD | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
soot</FONT>
<BR>
Class AntTask.PhaseOptbb_ule</H2>
<PRE>
<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html" title="class or interface in java.lang">java.lang.Object</A>
<IMG SRC="../resources/inherit.gif" ALT="extended by "><B>soot.AntTask.PhaseOptbb_ule</B>
</PRE>
<DL>
<DT><B>Enclosing class:</B><DD><A HREF="../soot/AntTask.html" title="class in soot">AntTask</A></DD>
</DL>
<HR>
<DL>
<DT><PRE>public class <B>AntTask.PhaseOptbb_ule</B><DT>extends <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html" title="class or interface in java.lang">Object</A></DL>
</PRE>
<P>
<HR>
<P>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<A NAME="constructor_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Constructor Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../soot/AntTask.PhaseOptbb_ule.html#AntTask.PhaseOptbb_ule()">AntTask.PhaseOptbb_ule</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<!-- ========== METHOD SUMMARY =========== -->
<A NAME="method_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Method Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B><A HREF="../soot/AntTask.PhaseOptbb_ule.html#setenabled(boolean)">setenabled</A></B>(boolean arg)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from class java.lang.<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html" title="class or interface in java.lang">Object</A></B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#clone()" title="class or interface in java.lang">clone</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#equals(java.lang.Object)" title="class or interface in java.lang">equals</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#finalize()" title="class or interface in java.lang">finalize</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#getClass()" title="class or interface in java.lang">getClass</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#hashCode()" title="class or interface in java.lang">hashCode</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#notify()" title="class or interface in java.lang">notify</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#notifyAll()" title="class or interface in java.lang">notifyAll</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#toString()" title="class or interface in java.lang">toString</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#wait()" title="class or interface in java.lang">wait</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#wait(long)" title="class or interface in java.lang">wait</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html#wait(long, int)" title="class or interface in java.lang">wait</A></CODE></TD>
</TR>
</TABLE>
<P>
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<A NAME="constructor_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Constructor Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="AntTask.PhaseOptbb_ule()"><!-- --></A><H3>
AntTask.PhaseOptbb_ule</H3>
<PRE>
public <B>AntTask.PhaseOptbb_ule</B>()</PRE>
<DL>
</DL>
<!-- ============ METHOD DETAIL ========== -->
<A NAME="method_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Method Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="setenabled(boolean)"><!-- --></A><H3>
setenabled</H3>
<PRE>
public void <B>setenabled</B>(boolean arg)</PRE>
<DL>
<DD><DL>
</DL>
</DD>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/AntTask.PhaseOptbb_ule.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../soot/AntTask.PhaseOptbb_pho.html" title="class in soot"><B>PREV CLASS</B></A>
<A HREF="../soot/AntTask.PhaseOptbop.html" title="class in soot"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../index.html?soot/AntTask.PhaseOptbb_ule.html" target="_top"><B>FRAMES</B></A>
<A HREF="AntTask.PhaseOptbb_ule.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | FIELD | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 1996, 1999, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.io;
/**
* @author Limin Shi
*/
public class CharToByteEUC_JP extends CharToByteJIS0208 {
CharToByteJIS0201 cbJIS0201 = new CharToByteJIS0201();
CharToByteJIS0212 cbJIS0212 = new CharToByteJIS0212();
public String getCharacterEncoding() {
return "EUC_JP";
}
protected int convSingleByte(char inputChar, byte[] outputByte) {
byte b;
if (inputChar == 0) {
outputByte[0] = (byte)0;
return 1;
}
if ((b = cbJIS0201.getNative(inputChar)) == 0)
return 0;
if (b > 0 && b < 128) {
outputByte[0] = b;
return 1;
}
outputByte[0] = (byte)0x8E;
outputByte[1] = b;
return 2;
}
protected int getNative(char ch) {
int offset = index1[((ch & 0xff00) >> 8 )] << 8;
int r = index2[offset >> 12].charAt((offset & 0xfff) + (ch & 0xff));
if (r != 0)
return r + 0x8080;
r = cbJIS0212.getNative(ch);
if (r == 0)
return r;
return r + 0x8F8080;
}
/**
* Converts characters to sequences of bytes.
* Conversions that result in Exceptions can be restarted by calling
* convert again, with appropriately modified parameters.
* @return the characters written to output.
* @param input char array containing text in Unicode
* @param inStart offset in input array
* @param inEnd offset of last byte to be converted
* @param output byte array to receive conversion result
* @param outStart starting offset
* @param outEnd offset of last byte to be written to
* @throw UnsupportedCharacterException for any character
* that cannot be converted to the external character set.
*/
public int convert(char[] input, int inOff, int inEnd,
byte[] output, int outOff, int outEnd)
throws MalformedInputException, UnknownCharacterException,
ConversionBufferFullException
{
char inputChar; // Input character to be converted
byte[] outputByte; // Output byte written to output
int inputSize = 0; // Size of input
int outputSize = 0; // Size of output
byte[] tmpbuf = new byte[4];
// Record beginning offsets
charOff = inOff;
byteOff = outOff;
if (highHalfZoneCode != 0) {
inputChar = highHalfZoneCode;
highHalfZoneCode = 0;
if (input[inOff] >= 0xdc00 && input[inOff] <= 0xdfff) {
// This is legal UTF16 sequence.
badInputLength = 1;
throw new UnknownCharacterException();
} else {
// This is illegal UTF16 sequence.
badInputLength = 0;
throw new MalformedInputException();
}
}
// Loop until we hit the end of the input
while(charOff < inEnd) {
inputSize = 1;
outputByte = tmpbuf;
inputChar = input[charOff]; // Get the input character
// Is this a high surrogate?
if(inputChar >= '\uD800' && inputChar <= '\uDBFF') {
// Is this the last character of the input?
if (charOff + 1 >= inEnd) {
highHalfZoneCode = inputChar;
break;
}
// Is there a low surrogate following?
inputChar = input[charOff + 1];
if (inputChar >= '\uDC00' && inputChar <= '\uDFFF') {
// We have a valid surrogate pair. Too bad we don't do
// surrogates. Is substitution enabled?
if (subMode) {
outputByte = subBytes;
outputSize = subBytes.length;
inputSize = 2;
} else {
badInputLength = 2;
throw new UnknownCharacterException();
}
} else {
// We have a malformed surrogate pair
badInputLength = 1;
throw new MalformedInputException();
}
}
// Is this an unaccompanied low surrogate?
else if (inputChar >= '\uDC00' && inputChar <= '\uDFFF') {
badInputLength = 1;
throw new MalformedInputException();
} else {
outputSize = convSingleByte(inputChar, outputByte);
if (outputSize == 0) { // DoubleByte
int ncode = getNative(inputChar);
if (ncode != 0 ) {
if ((ncode & 0xFF0000) == 0) {
outputByte[0] = (byte) ((ncode & 0xff00) >> 8);
outputByte[1] = (byte) (ncode & 0xff);
outputSize = 2;
} else {
outputByte[0] = (byte) 0x8F;
outputByte[1] = (byte) ((ncode & 0xff00) >> 8);
outputByte[2] = (byte) (ncode & 0xff);
outputSize = 3;
}
} else {
if (subMode) {
outputByte = subBytes;
outputSize = subBytes.length;
} else {
badInputLength = 1;
throw new UnknownCharacterException();
}
}
}
}
// If we don't have room for the output, throw an exception
if (byteOff + outputSize > outEnd)
throw new ConversionBufferFullException();
// Put the byte in the output buffer
for (int i = 0; i < outputSize; i++) {
output[byteOff++] = outputByte[i];
}
charOff += inputSize;
}
// Return the length written to the output buffer
return byteOff - outOff;
}
/**
* the maximum number of bytes needed to hold a converted char
* @returns the maximum number of bytes needed for a converted char
*/
public int getMaxBytesPerChar() {
return 3;
}
}
|
{
"pile_set_name": "Github"
}
|
import _plotly_utils.basevalidators
class BgcolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name="bgcolor",
parent_name="scattercarpet.marker.colorbar",
**kwargs
):
super(BgcolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
{
"pile_set_name": "Github"
}
|
{
"nome": "Montù Beccaria",
"codice": "018100",
"zona": {
"codice": "1",
"nome": "Nord-ovest"
},
"regione": {
"codice": "03",
"nome": "Lombardia"
},
"provincia": {
"codice": "018",
"nome": "Pavia"
},
"sigla": "PV",
"codiceCatastale": "F701",
"cap": [
"27040"
],
"popolazione": 1722
}
|
{
"pile_set_name": "Github"
}
|
{
"proportion_index": [0.7754, 0.6457, 0.6491, 0.243, 0.4318],
"structural": {
"Torso_BreastTone": 0.7,
"Cheeks_Tone": -0.5,
"Shoulders_Tone": -0.5,
"Legs_UpperlegsMass": 0.61,
"Feet_Mass": 0.61,
"Abdomen_Tone": -1.0,
"Legs_UpperlegsTone": -0.5,
"Neck_Mass": 0.61,
"Legs_LowerlegsTone": -0.5,
"Waist_Size": 0.512,
"Shoulders_Mass": 0.61,
"Torso_Tone": -0.5,
"Cheeks_Mass": 0.61,
"Torso_BreastMass": 0.25,
"Arms_UpperarmMass": 0.61,
"Pelvis_GluteusMass": 0.61,
"Abdomen_Mass": 0.11,
"Arms_UpperarmTone": -0.5,
"Stomach_LocalFat": 0.5022,
"Pelvis_GluteusTone": -0.5,
"Stomach_Volume": 0.5022,
"Legs_KneeSize": 0.522,
"Arms_ForearmTone": -0.5,
"Torso_Mass": 0.61,
"Wrists_Size": 0.461,
"Arms_ForearmMass": 0.61,
"Hands_Mass": 0.61,
"Neck_Tone": -0.5,
"Legs_LowerlegsMass": 0.61
},
"manuellab_vers": [1, 6, 0],
"metaproperties": {
"last_character_age": 0.0,
"character_tone": -1.0,
"last_character_mass": 0.11,
"character_mass": 0.11,
"character_age": 0.0,
"last_character_tone": -1.0
},
"materialproperties": {}
}
|
{
"pile_set_name": "Github"
}
|
// Copyright 2013 Dario Castañé. All rights reserved.
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package mergo merges same-type structs and maps by setting default values in zero-value fields.
Mergo won't merge unexported (private) fields but will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection).
Usage
From my own work-in-progress project:
type networkConfig struct {
Protocol string
Address string
ServerType string `json: "server_type"`
Port uint16
}
type FssnConfig struct {
Network networkConfig
}
var fssnDefault = FssnConfig {
networkConfig {
"tcp",
"127.0.0.1",
"http",
31560,
},
}
// Inside a function [...]
if err := mergo.Merge(&config, fssnDefault); err != nil {
log.Fatal(err)
}
// More code [...]
*/
package mergo
|
{
"pile_set_name": "Github"
}
|
use bytecodec::json_codec::{JsonDecoder, JsonEncoder};
use bytecodec::null::NullDecoder;
use fibers_http_server::{HandleRequest, Reply, Req, ServerBuilder as HttpServerBuilder, Status};
use fibers_rpc::client::ClientServiceHandle as RpcServiceHandle;
use futures::Future;
use httpcodec::{BodyDecoder, BodyEncoder};
use libfrugalos::client::config::Client as ConfigRpcClient;
use libfrugalos::entity::bucket::{Bucket, BucketSummary};
use libfrugalos::entity::device::{Device, DeviceSummary};
use libfrugalos::entity::server::{Server, ServerSummary};
use std::net::SocketAddr;
use url::Url;
use http::{make_json_response, not_found, HttpResult};
use {Error, Result};
#[derive(Clone)]
pub struct ConfigServer {
rpc_service: RpcServiceHandle,
local_addr: SocketAddr,
}
impl ConfigServer {
pub fn new(rpc_service: RpcServiceHandle, local_addr: SocketAddr) -> Self {
ConfigServer {
rpc_service,
local_addr,
}
}
pub fn register(self, builder: &mut HttpServerBuilder) -> Result<()> {
track!(builder.add_handler(ListServers(self.clone())))?;
track!(builder.add_handler(PutServer(self.clone())))?;
track!(builder.add_handler(GetServer(self.clone())))?;
track!(builder.add_handler(ListDevices(self.clone())))?;
track!(builder.add_handler(PutDevice(self.clone())))?;
track!(builder.add_handler(GetDevice(self.clone())))?;
track!(builder.add_handler(ListBuckets(self.clone())))?;
track!(builder.add_handler(PutBucket(self.clone())))?;
track!(builder.add_handler(GetBucket(self.clone())))?;
track!(builder.add_handler(DeleteBucket(self.clone())))?;
// 上の clone を一つだけ消したくないので、ここで drop する
drop(self);
Ok(())
}
fn client(&self) -> ConfigRpcClient {
ConfigRpcClient::new(self.local_addr, self.rpc_service.clone())
}
}
struct ListServers(ConfigServer);
impl HandleRequest for ListServers {
const METHOD: &'static str = "GET";
const PATH: &'static str = "/v1/servers";
type ReqBody = ();
type ResBody = HttpResult<Vec<ServerSummary>>;
type Decoder = BodyDecoder<NullDecoder>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, _req: Req<Self::ReqBody>) -> Self::Reply {
let future = self.0.client().list_servers().then(|result| {
let (status, body) = match track!(result) {
Err(e) => (Status::InternalServerError, Err(Error::from(e))),
Ok(v) => (Status::Ok, Ok(v)),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
struct PutServer(ConfigServer);
impl HandleRequest for PutServer {
const METHOD: &'static str = "PUT";
const PATH: &'static str = "/v1/servers/*";
type ReqBody = Server;
type ResBody = HttpResult<Server>;
type Decoder = BodyDecoder<JsonDecoder<Self::ReqBody>>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, req: Req<Self::ReqBody>) -> Self::Reply {
let server = req.into_body();
let future = self.0.client().put_server(server).then(|result| {
let (status, body) = match track!(result) {
Err(e) => (Status::InternalServerError, Err(Error::from(e))),
Ok(v) => (Status::Ok, Ok(v)),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
struct GetServer(ConfigServer);
impl HandleRequest for GetServer {
const METHOD: &'static str = "GET";
const PATH: &'static str = "/v1/servers/*";
type ReqBody = ();
type ResBody = HttpResult<Server>;
type Decoder = BodyDecoder<NullDecoder>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, req: Req<Self::ReqBody>) -> Self::Reply {
let server_id = get_id(&req.url());
let future = self.0.client().get_server(server_id).then(|result| {
let (status, body) = match track!(result) {
Err(e) => (Status::InternalServerError, Err(Error::from(e))),
Ok(None) => (Status::NotFound, Err(track!(not_found()))),
Ok(Some(v)) => (Status::Ok, Ok(v)),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
struct ListDevices(ConfigServer);
impl HandleRequest for ListDevices {
const METHOD: &'static str = "GET";
const PATH: &'static str = "/v1/devices";
type ReqBody = ();
type ResBody = HttpResult<Vec<DeviceSummary>>;
type Decoder = BodyDecoder<NullDecoder>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, _req: Req<Self::ReqBody>) -> Self::Reply {
let future = self.0.client().list_devices().then(|result| {
let (status, body) = match track!(result) {
Err(e) => (Status::InternalServerError, Err(Error::from(e))),
Ok(v) => (Status::Ok, Ok(v)),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
struct PutDevice(ConfigServer);
impl HandleRequest for PutDevice {
const METHOD: &'static str = "PUT";
const PATH: &'static str = "/v1/devices/*";
type ReqBody = Device;
type ResBody = HttpResult<Device>;
type Decoder = BodyDecoder<JsonDecoder<Self::ReqBody>>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, req: Req<Self::ReqBody>) -> Self::Reply {
let device = req.into_body();
let future = self.0.client().put_device(device).then(|result| {
let (status, body) = match track!(result) {
Err(e) => {
if let libfrugalos::ErrorKind::InvalidInput = e.kind() {
(Status::BadRequest, Err(Error::from(e)))
} else {
(Status::InternalServerError, Err(Error::from(e)))
}
}
Ok(v) => (Status::Ok, Ok(v)),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
struct GetDevice(ConfigServer);
impl HandleRequest for GetDevice {
const METHOD: &'static str = "GET";
const PATH: &'static str = "/v1/devices/*";
type ReqBody = ();
type ResBody = HttpResult<Device>;
type Decoder = BodyDecoder<NullDecoder>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, req: Req<Self::ReqBody>) -> Self::Reply {
let device_id = get_id(&req.url());
let future = self.0.client().get_device(device_id).then(|result| {
let (status, body) = match track!(result) {
Err(e) => (Status::InternalServerError, Err(Error::from(e))),
Ok(None) => (Status::NotFound, Err(track!(not_found()))),
Ok(Some(v)) => (Status::Ok, Ok(v)),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
struct ListBuckets(ConfigServer);
impl HandleRequest for ListBuckets {
const METHOD: &'static str = "GET";
const PATH: &'static str = "/v1/buckets";
type ReqBody = ();
type ResBody = HttpResult<Vec<BucketSummary>>;
type Decoder = BodyDecoder<NullDecoder>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, _req: Req<Self::ReqBody>) -> Self::Reply {
let future = self.0.client().list_buckets().then(|result| {
let (status, body) = match track!(result) {
Err(e) => (Status::InternalServerError, Err(Error::from(e))),
Ok(v) => (Status::Ok, Ok(v)),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
struct PutBucket(ConfigServer);
impl HandleRequest for PutBucket {
const METHOD: &'static str = "PUT";
const PATH: &'static str = "/v1/buckets/*";
type ReqBody = Bucket;
type ResBody = HttpResult<Bucket>;
type Decoder = BodyDecoder<JsonDecoder<Self::ReqBody>>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, req: Req<Self::ReqBody>) -> Self::Reply {
let bucket = req.into_body();
let future = self.0.client().put_bucket(bucket).then(|result| {
let (status, body) = match track!(result) {
Err(e) => {
if let libfrugalos::ErrorKind::InvalidInput = e.kind() {
(Status::BadRequest, Err(Error::from(e)))
} else {
(Status::InternalServerError, Err(Error::from(e)))
}
}
Ok(v) => (Status::Ok, Ok(v)),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
struct GetBucket(ConfigServer);
impl HandleRequest for GetBucket {
const METHOD: &'static str = "GET";
const PATH: &'static str = "/v1/buckets/*";
type ReqBody = ();
type ResBody = HttpResult<Bucket>;
type Decoder = BodyDecoder<NullDecoder>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, req: Req<Self::ReqBody>) -> Self::Reply {
let bucket_id = get_id(&req.url());
let future = self.0.client().get_bucket(bucket_id).then(|result| {
let (status, body) = match track!(result) {
Err(e) => (Status::InternalServerError, Err(Error::from(e))),
Ok(None) => (Status::NotFound, Err(track!(not_found()))),
Ok(Some(v)) => (Status::Ok, Ok(v)),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
struct DeleteBucket(ConfigServer);
impl HandleRequest for DeleteBucket {
const METHOD: &'static str = "DELETE";
const PATH: &'static str = "/v1/buckets/*";
type ReqBody = ();
type ResBody = HttpResult<Option<Bucket>>;
type Decoder = BodyDecoder<NullDecoder>;
type Encoder = BodyEncoder<JsonEncoder<Self::ResBody>>;
type Reply = Reply<Self::ResBody>;
fn handle_request(&self, req: Req<Self::ReqBody>) -> Self::Reply {
let bucket_id = get_id(&req.url());
let future = self.0.client().delete_bucket(bucket_id).then(|result| {
let (status, body) = match track!(result) {
Err(e) => (Status::InternalServerError, Err(Error::from(e))),
Ok(None) => (Status::NotFound, Err(not_found())),
Ok(Some(v)) => (Status::Ok, Ok(Some(v))),
};
Ok(make_json_response(status, body))
});
Box::new(future)
}
}
fn get_id(url: &Url) -> String {
url.path_segments()
.expect("Never fails")
.nth(2)
.expect("Never fails")
.to_string()
}
|
{
"pile_set_name": "Github"
}
|
from __future__ import print_function
import sys
import os
import argparse
import torch
import io
import time
import codecs
import json
import threading
from onmt.translate.Translator import make_translator
import onmt
import onmt.opts
import onmt.translate
class Timer:
def __init__(self, start=False):
self.stime = -1
self.prev = -1
self.times = {}
if start:
self.start()
def start(self):
self.stime = time.time()
self.prev = self.stime
self.times = {}
def tick(self, name=None, tot=False):
t = time.time()
if not tot:
elapsed = t - self.prev
else:
elapsed = t - self.stime
self.prev = t
if name is not None:
self.times[name] = elapsed
return elapsed
class ServerModelError(Exception):
pass
class TranslationServer():
def __init__(self):
self.models = {}
self.next_id = 0
def start(self, config_file):
"""Read the config file and pre-/load the models
"""
self.config_file = config_file
with open(self.config_file) as f:
self.confs = json.load(f)
self.models_root = self.confs.get('models_root', './available_models')
for i, conf in enumerate(self.confs["models"]):
if "model" not in conf:
raise ValueError("""Incorrect config file: missing 'model'
parameter for model #%d""" % i)
kwargs = {'timeout': conf.get('timeout', None),
'load': conf.get('load', None),
'tokenizer_opt': conf.get('tokenizer', None),
'on_timeout': conf.get('on_timeout', None),
'model_root': conf.get('model_root', self.models_root)
}
kwargs = {k: v for (k, v) in kwargs.items() if v is not None}
model_id = conf.get("id", None)
opt = conf["opt"]
opt["model"] = conf["model"]
self.preload_model(opt, model_id=model_id, **kwargs)
def clone_model(self, model_id, opt, timeout=-1):
"""Clone a model `model_id`.
Different options may be passed. If `opt` is None, it will use the
same set of options
"""
if model_id in self.models:
if opt is None:
opt = self.models[model_id].user_opt
opt["model"] = self.models[model_id].opt.model
return self.load_model(opt, timeout)
else:
raise ServerModelError("No such model '%s'" % str(model_id))
def load_model(self, opt, model_id=None, **model_kwargs):
"""Loading a model given a set of options
"""
model_id = self.preload_model(opt, model_id=model_id, **model_kwargs)
load_time = self.models[model_id].load_time
return model_id, load_time
def preload_model(self, opt, model_id=None, **model_kwargs):
"""Preloading the model: updating internal datastructure
It will effectively load the model if `load` is set
"""
if model_id is not None:
if model_id in self.models.keys():
raise ValueError("Model ID %d already exists" % model_id)
else:
model_id = self.next_id
while model_id in self.models.keys():
model_id += 1
self.next_id = model_id + 1
print("Pre-loading model %d" % model_id)
model = ServerModel(opt, model_id, **model_kwargs)
self.models[model_id] = model
return model_id
def run(self, inputs):
"""Translate `inputs`
We keep the same format as the Lua version i.e.
[{"id": model_id, "src": "sequence to translate"},{ ...}]
We use inputs[0]["id"] as the model id
"""
model_id = inputs[0].get("id", 0)
if model_id in self.models and self.models[model_id] is not None:
return self.models[model_id].run(inputs)
else:
print("Error No such model '%s'" % str(model_id))
raise ServerModelError("No such model '%s'" % str(model_id))
def unload_model(self, model_id):
"""Manually unload a model.
It will free the memory and cancel the timer
"""
if model_id in self.models and self.models[model_id] is not None:
self.models[model_id].unload()
else:
raise ServerModelError("No such model '%s'" % str(model_id))
def list_models(self):
"""Return the list of available models
"""
models = []
for i, model in self.models.items():
models += [model.to_dict()]
return models
class ServerModel:
def __init__(self, opt, model_id, tokenizer_opt=None, load=False,
timeout=-1, on_timeout="to_cpu", model_root="./"):
"""
Args:
opt: (dict) options for the Translator
model_id: (int) model id
tokenizer_opt: (dict) options for the tokenizer or None
load: (bool) whether to load the model during __init__
timeout: (int) seconds before running `do_timeout`
Negative values means no timeout
on_timeout: (str) in ["to_cpu", "unload"] set what to do on
timeout (see function `do_timeout`)
model_root: (str) path to the model directory
it must contain de model and tokenizer file
"""
self.model_root = model_root
self.opt = self.parse_opt(opt)
if self.opt.n_best > 1:
raise ValueError("Values of n_best > 1 are not supported")
self.model_id = model_id
self.tokenizer_opt = tokenizer_opt
self.timeout = timeout
self.on_timeout = on_timeout
self.unload_timer = None
self.user_opt = opt
self.tokenizer = None
if load:
self.load()
def parse_opt(self, opt):
"""Parse the option set passed by the user using `onmt.opts`
Args:
opt: (dict) options passed by the user
Returns:
opt: (Namespace) full set of options for the Translator
"""
prec_argv = sys.argv
sys.argv = sys.argv[:1]
parser = argparse.ArgumentParser()
onmt.opts.translate_opts(parser)
opt['model'] = os.path.join(self.model_root, opt['model'])
opt['src'] = "dummy_src"
for (k, v) in opt.items():
sys.argv += ['-%s' % k, str(v)]
opt = parser.parse_args()
opt.cuda = opt.gpu > -1
sys.argv = prec_argv
return opt
@property
def loaded(self):
return hasattr(self, 'translator')
def load(self):
timer = Timer()
print("Loading model %d" % self.model_id)
timer.start()
self.out_file = io.StringIO()
try:
self.translator = make_translator(self.opt,
report_score=False,
out_file=self.out_file)
except RuntimeError as e:
raise ServerModelError("Runtime Error: %s" % str(e))
timer.tick("model_loading")
if self.tokenizer_opt is not None:
print("Loading tokenizer")
mandatory = ["type", "model"]
for m in mandatory:
if m not in self.tokenizer_opt:
raise ValueError("Missing mandatory tokenizer option '%s'"
% m)
if self.tokenizer_opt['type'] == 'sentencepiece':
import sentencepiece as spm
sp = spm.SentencePieceProcessor()
model_path = os.path.join(self.model_root,
self.tokenizer_opt['model'])
sp.Load(model_path)
self.tokenizer = sp
else:
raise ValueError("Invalid value for tokenizer type")
self.load_time = timer.tick()
self.reset_unload_timer()
def run(self, inputs):
"""Translate `inputs` using this model
Args:
inputs: [{"src": "..."},{"src": ...}]
Returns:
result: (list) translations
times: (dict) containing times
"""
timer = Timer()
print("\nRunning translation using %d" % self.model_id)
timer.start()
if not self.loaded:
self.load()
timer.tick(name="load")
elif self.opt.cuda:
self.to_gpu()
timer.tick(name="to_gpu")
# NOTE: the translator exept a filepath as parameter
# therefore we write the data as a temp file.
tmp_root = "/tmp/onmt_server"
if not os.path.exists(tmp_root):
os.makedirs(tmp_root)
src_path = os.path.join(tmp_root, "tmp_src")
with codecs.open(src_path, 'w', 'utf-8') as f:
# NOTE: If an input contains an line separator \n we split it
# into subsegments that we translate independantly
# we then merge the translations together with the same
# line breaks
subsegment = {}
sscount = 0
sslength = []
for (i, inp) in enumerate(inputs):
src = inp['src']
lines = src.split("\n")
subsegment[i] = slice(sscount, sscount + len(lines))
sscount += len(lines)
for line in lines:
tok = self.maybe_tokenize(line)
f.write(tok + "\n")
sslength += [len(tok.split())]
timer.tick(name="writing")
try:
scores = self.translator.translate(None, src_path, None,
self.opt.batch_size)
except RuntimeError as e:
raise ServerModelError("Runtime Error: %s" % str(e))
timer.tick(name="translation")
print("""Using model #%d\t%d inputs (%d subsegment)
\ttranslation time: %f""" % (self.model_id, len(subsegment),
sscount,
timer.times['translation']))
self.reset_unload_timer()
results = self.out_file.getvalue().split("\n")
print("Results: ", len(results))
results = ['\n'.join([self.maybe_detokenize(_)
for _ in results[subsegment[i]]
if len(_) > 0])
for i in sorted(subsegment.keys())]
avg_scores = [sum([s * l for s, l in zip(scores[sub], sslength[sub])])
/ sum(sslength[sub])
for k, sub
in sorted(subsegment.items(), key=lambda x: x[0])]
self.clear_out_file()
return results, avg_scores, self.opt.n_best, timer.times
def do_timeout(self):
"""Timeout function that free GPU memory by moving the model to CPU
or unloading it; depending on `self.on_timemout` value
"""
if self.on_timeout == "unload":
print("Timeout: unloading model %d" % self.model_id)
self.unload()
if self.on_timeout == "to_cpu":
print("Timeout: sending model %d to CPU" % self.model_id)
self.to_cpu()
def unload(self):
print("Unloading model %d" % self.model_id)
del self.translator
if self.opt.cuda:
torch.cuda.empty_cache()
self.unload_timer = None
def reset_unload_timer(self):
if self.timeout < 0:
return
if self.unload_timer is not None:
self.unload_timer.cancel()
self.unload_timer = threading.Timer(self.timeout, self.do_timeout)
self.unload_timer.start()
def to_dict(self):
hide_opt = ["model", "src"]
d = {"model_id": self.model_id,
"opt": {k: self.user_opt[k] for k in self.user_opt.keys()
if k not in hide_opt},
"model": self.user_opt["model"],
"loaded": self.loaded,
"timeout": self.timeout,
}
if self.tokenizer_opt is not None:
d["tokenizer"] = self.tokenizer_opt
return d
def to_cpu(self):
"""Move the model to CPU and clear CUDA cache
"""
self.translator.model.cpu()
if self.opt.cuda:
torch.cuda.empty_cache()
def to_gpu(self):
"""Move the model to GPU
"""
torch.cuda.set_device(self.opt.gpu)
self.translator.model.cuda()
def clear_out_file(self):
# Creating a new object is faster
self.out_file = io.StringIO()
self.translator.out_file = self.out_file
def maybe_tokenize(self, sequence):
"""Tokenize the sequence (or not)
Same args/returns as `tokenize`
"""
if self.tokenizer_opt is not None:
return self.tokenize(sequence)
return sequence
def tokenize(self, sequence):
"""Tokenize a single sequence
Args:
sequence: (str) the sequence to tokenize
Returns:
tok: (str) the tokenized sequence
"""
if self.tokenizer is None:
raise ValueError("No tokenizer loaded")
if self.tokenizer_opt["type"] == "sentencepiece":
tok = self.tokenizer.EncodeAsPieces(sequence)
tok = " ".join(tok)
return tok
def maybe_detokenize(self, sequence):
"""De-tokenize the sequence (or not)
Same args/returns as `tokenize`
"""
if self.tokenizer_opt is not None:
return self.detokenize(sequence)
return sequence
def detokenize(self, sequence):
"""Detokenize a single sequence
Same args/returns as `tokenize`
"""
if self.tokenizer is None:
raise ValueError("No tokenizer loaded")
if self.tokenizer_opt["type"] == "sentencepiece":
detok = self.tokenizer.DecodePieces(sequence.split())
return detok
|
{
"pile_set_name": "Github"
}
|
package getter
import (
"fmt"
"net/url"
"strings"
)
// GCSDetector implements Detector to detect GCS URLs and turn
// them into URLs that the GCSGetter can understand.
type GCSDetector struct{}
func (d *GCSDetector) Detect(src, _ string) (string, bool, error) {
if len(src) == 0 {
return "", false, nil
}
if strings.Contains(src, "googleapis.com/") {
return d.detectHTTP(src)
}
return "", false, nil
}
func (d *GCSDetector) detectHTTP(src string) (string, bool, error) {
parts := strings.Split(src, "/")
if len(parts) < 5 {
return "", false, fmt.Errorf(
"URL is not a valid GCS URL")
}
version := parts[2]
bucket := parts[3]
object := strings.Join(parts[4:], "/")
url, err := url.Parse(fmt.Sprintf("https://www.googleapis.com/storage/%s/%s/%s",
version, bucket, object))
if err != nil {
return "", false, fmt.Errorf("error parsing GCS URL: %s", err)
}
return "gcs::" + url.String(), true, nil
}
|
{
"pile_set_name": "Github"
}
|
polygon
1
1.445535E+01 5.075361E+01
1.445552E+01 5.075326E+01
1.445567E+01 5.075283E+01
1.445593E+01 5.075252E+01
1.445617E+01 5.075226E+01
1.445673E+01 5.075183E+01
1.445699E+01 5.075124E+01
1.445698E+01 5.075096E+01
1.445703E+01 5.075067E+01
1.445694E+01 5.075024E+01
1.445688E+01 5.075006E+01
1.445688E+01 5.074978E+01
1.445692E+01 5.074957E+01
1.445709E+01 5.074930E+01
1.445748E+01 5.074896E+01
1.445788E+01 5.074836E+01
1.445801E+01 5.074780E+01
1.445789E+01 5.074745E+01
1.445834E+01 5.074703E+01
1.445860E+01 5.074685E+01
1.445908E+01 5.074655E+01
1.445987E+01 5.074614E+01
1.446019E+01 5.074560E+01
1.446049E+01 5.074532E+01
1.446062E+01 5.074521E+01
1.446073E+01 5.074516E+01
1.446094E+01 5.074512E+01
1.446135E+01 5.074497E+01
1.446164E+01 5.074485E+01
1.446237E+01 5.074499E+01
1.446281E+01 5.074513E+01
1.446311E+01 5.074502E+01
1.446303E+01 5.074469E+01
1.446345E+01 5.074461E+01
1.446409E+01 5.074437E+01
1.446441E+01 5.074434E+01
1.446528E+01 5.074435E+01
1.446652E+01 5.074409E+01
1.446675E+01 5.074451E+01
1.446704E+01 5.074480E+01
1.446734E+01 5.074502E+01
1.446759E+01 5.074524E+01
1.446792E+01 5.074560E+01
1.446852E+01 5.074588E+01
1.446861E+01 5.074591E+01
1.446872E+01 5.074589E+01
1.446886E+01 5.074586E+01
1.446912E+01 5.074577E+01
1.446925E+01 5.074574E+01
1.446983E+01 5.074529E+01
1.446973E+01 5.074506E+01
1.446970E+01 5.074460E+01
1.446999E+01 5.074416E+01
1.447016E+01 5.074395E+01
1.447054E+01 5.074359E+01
1.447076E+01 5.074344E+01
1.447129E+01 5.074401E+01
1.447147E+01 5.074370E+01
1.447157E+01 5.074360E+01
1.447194E+01 5.074333E+01
1.447227E+01 5.074313E+01
1.447232E+01 5.074309E+01
1.447260E+01 5.074262E+01
1.447323E+01 5.074217E+01
1.447360E+01 5.074196E+01
1.447362E+01 5.074193E+01
1.447394E+01 5.074154E+01
1.447426E+01 5.074132E+01
1.447439E+01 5.074120E+01
1.447355E+01 5.074039E+01
1.447331E+01 5.074016E+01
1.447273E+01 5.073951E+01
1.447256E+01 5.073918E+01
1.447241E+01 5.073888E+01
1.447228E+01 5.073870E+01
1.447128E+01 5.073814E+01
1.447072E+01 5.073793E+01
1.447042E+01 5.073782E+01
1.446997E+01 5.073748E+01
1.446969E+01 5.073717E+01
1.446941E+01 5.073674E+01
1.446913E+01 5.073614E+01
1.446900E+01 5.073571E+01
1.446877E+01 5.073546E+01
1.446834E+01 5.073498E+01
1.446790E+01 5.073461E+01
1.446723E+01 5.073416E+01
1.446673E+01 5.073381E+01
1.446640E+01 5.073331E+01
1.446601E+01 5.073279E+01
1.446608E+01 5.073277E+01
1.446683E+01 5.073251E+01
1.446709E+01 5.073241E+01
1.446772E+01 5.073223E+01
1.446779E+01 5.073230E+01
1.446849E+01 5.073212E+01
1.446794E+01 5.073191E+01
1.446756E+01 5.073199E+01
1.446747E+01 5.073191E+01
1.446745E+01 5.073183E+01
1.446742E+01 5.073173E+01
1.446717E+01 5.073154E+01
1.446757E+01 5.073150E+01
1.446788E+01 5.073150E+01
1.446820E+01 5.073152E+01
1.446853E+01 5.073156E+01
1.446873E+01 5.073150E+01
1.446889E+01 5.073135E+01
1.446893E+01 5.073138E+01
1.446881E+01 5.073154E+01
1.446888E+01 5.073184E+01
1.446896E+01 5.073193E+01
1.446923E+01 5.073206E+01
1.446933E+01 5.073216E+01
1.446936E+01 5.073221E+01
1.446947E+01 5.073225E+01
1.446949E+01 5.073235E+01
1.446974E+01 5.073232E+01
1.446984E+01 5.073204E+01
1.447014E+01 5.073156E+01
1.447014E+01 5.073147E+01
1.447006E+01 5.073129E+01
1.447021E+01 5.073126E+01
1.447031E+01 5.073138E+01
1.447030E+01 5.073156E+01
1.447034E+01 5.073189E+01
1.447045E+01 5.073205E+01
1.447048E+01 5.073233E+01
1.447039E+01 5.073245E+01
1.447030E+01 5.073258E+01
1.447008E+01 5.073274E+01
1.447030E+01 5.073284E+01
1.447048E+01 5.073287E+01
1.447108E+01 5.073281E+01
1.447141E+01 5.073277E+01
1.447175E+01 5.073271E+01
1.447215E+01 5.073266E+01
1.447245E+01 5.073261E+01
1.447275E+01 5.073259E+01
1.447313E+01 5.073256E+01
1.447411E+01 5.073255E+01
1.447366E+01 5.073231E+01
1.447401E+01 5.073221E+01
1.447425E+01 5.073244E+01
1.447457E+01 5.073257E+01
1.447472E+01 5.073256E+01
1.447497E+01 5.073271E+01
1.447531E+01 5.073264E+01
1.447472E+01 5.073213E+01
1.447451E+01 5.073154E+01
1.447456E+01 5.073133E+01
1.447465E+01 5.073130E+01
1.447513E+01 5.073108E+01
1.447541E+01 5.073097E+01
1.447586E+01 5.073050E+01
1.447536E+01 5.073006E+01
1.447544E+01 5.072999E+01
1.447559E+01 5.072989E+01
1.447583E+01 5.072981E+01
1.447641E+01 5.072974E+01
1.447619E+01 5.073018E+01
1.447607E+01 5.073074E+01
1.447627E+01 5.073098E+01
1.447671E+01 5.073154E+01
1.447705E+01 5.073200E+01
1.447734E+01 5.073250E+01
1.447751E+01 5.073265E+01
1.447790E+01 5.073302E+01
1.447895E+01 5.073360E+01
1.447946E+01 5.073380E+01
1.447995E+01 5.073394E+01
1.448041E+01 5.073401E+01
1.448076E+01 5.073406E+01
1.448143E+01 5.073429E+01
1.448186E+01 5.073453E+01
1.448230E+01 5.073494E+01
1.448277E+01 5.073513E+01
1.448318E+01 5.073534E+01
1.448371E+01 5.073524E+01
1.448427E+01 5.073553E+01
1.448513E+01 5.073509E+01
1.448528E+01 5.073519E+01
1.448542E+01 5.073541E+01
1.448542E+01 5.073549E+01
1.448545E+01 5.073558E+01
1.448547E+01 5.073563E+01
1.448548E+01 5.073565E+01
1.448565E+01 5.073561E+01
1.448585E+01 5.073556E+01
1.448659E+01 5.073538E+01
1.448664E+01 5.073547E+01
1.448668E+01 5.073553E+01
1.448664E+01 5.073556E+01
1.448636E+01 5.073594E+01
1.448599E+01 5.073589E+01
1.448603E+01 5.073603E+01
1.448605E+01 5.073605E+01
1.448610E+01 5.073634E+01
1.448607E+01 5.073641E+01
1.448600E+01 5.073639E+01
1.448592E+01 5.073641E+01
1.448590E+01 5.073652E+01
1.448583E+01 5.073654E+01
1.448579E+01 5.073668E+01
1.448571E+01 5.073676E+01
1.448562E+01 5.073680E+01
1.448553E+01 5.073679E+01
1.448538E+01 5.073680E+01
1.448535E+01 5.073682E+01
1.448528E+01 5.073686E+01
1.448504E+01 5.073688E+01
1.448504E+01 5.073688E+01
1.448490E+01 5.073695E+01
1.448486E+01 5.073701E+01
1.448472E+01 5.073706E+01
1.448454E+01 5.073706E+01
1.448449E+01 5.073707E+01
1.448454E+01 5.073713E+01
1.448457E+01 5.073718E+01
1.448467E+01 5.073737E+01
1.448488E+01 5.073772E+01
1.448498E+01 5.073790E+01
1.448502E+01 5.073796E+01
1.448509E+01 5.073831E+01
1.448511E+01 5.073836E+01
1.448514E+01 5.073846E+01
1.448516E+01 5.073848E+01
1.448529E+01 5.073890E+01
1.448548E+01 5.073957E+01
1.448566E+01 5.073993E+01
1.448572E+01 5.074003E+01
1.448574E+01 5.074005E+01
1.448578E+01 5.074038E+01
1.448581E+01 5.074045E+01
1.448587E+01 5.074060E+01
1.448592E+01 5.074069E+01
1.448604E+01 5.074081E+01
1.448616E+01 5.074095E+01
1.448624E+01 5.074102E+01
1.448667E+01 5.074143E+01
1.448683E+01 5.074156E+01
1.448714E+01 5.074179E+01
1.448721E+01 5.074188E+01
1.448750E+01 5.074207E+01
1.448762E+01 5.074214E+01
1.448803E+01 5.074240E+01
1.448809E+01 5.074245E+01
1.448848E+01 5.074266E+01
1.448879E+01 5.074285E+01
1.448908E+01 5.074305E+01
1.448948E+01 5.074327E+01
1.448963E+01 5.074336E+01
1.448976E+01 5.074340E+01
1.449001E+01 5.074351E+01
1.449012E+01 5.074356E+01
1.449025E+01 5.074364E+01
1.449065E+01 5.074375E+01
1.449086E+01 5.074384E+01
1.449125E+01 5.074401E+01
1.449177E+01 5.074425E+01
1.449180E+01 5.074428E+01
1.449230E+01 5.074461E+01
1.449232E+01 5.074462E+01
1.449236E+01 5.074463E+01
1.449258E+01 5.074469E+01
1.449273E+01 5.074477E+01
1.449296E+01 5.074499E+01
1.449313E+01 5.074521E+01
1.449335E+01 5.074547E+01
1.449355E+01 5.074567E+01
1.449358E+01 5.074570E+01
1.449394E+01 5.074606E+01
1.449410E+01 5.074619E+01
1.449422E+01 5.074627E+01
1.449429E+01 5.074629E+01
1.449447E+01 5.074640E+01
1.449497E+01 5.074670E+01
1.449559E+01 5.074715E+01
1.449571E+01 5.074724E+01
1.449589E+01 5.074749E+01
1.449590E+01 5.074760E+01
1.449592E+01 5.074778E+01
1.449593E+01 5.074790E+01
1.449597E+01 5.074804E+01
1.449606E+01 5.074831E+01
1.449619E+01 5.074854E+01
1.449624E+01 5.074875E+01
1.449624E+01 5.074888E+01
1.449624E+01 5.074899E+01
1.449626E+01 5.074915E+01
1.449630E+01 5.074934E+01
1.449642E+01 5.074954E+01
1.449664E+01 5.074975E+01
1.449707E+01 5.075018E+01
1.449728E+01 5.075036E+01
1.449730E+01 5.075040E+01
1.449763E+01 5.075067E+01
1.449779E+01 5.075079E+01
1.449811E+01 5.075102E+01
1.449834E+01 5.075118E+01
1.449887E+01 5.075149E+01
1.449904E+01 5.075156E+01
1.449920E+01 5.075163E+01
1.449941E+01 5.075173E+01
1.449967E+01 5.075186E+01
1.449972E+01 5.075189E+01
1.449974E+01 5.075192E+01
1.449994E+01 5.075215E+01
1.450038E+01 5.075246E+01
1.450092E+01 5.075290E+01
1.450148E+01 5.075339E+01
1.450189E+01 5.075377E+01
1.450229E+01 5.075410E+01
1.450289E+01 5.075447E+01
1.450329E+01 5.075469E+01
1.450361E+01 5.075503E+01
1.450393E+01 5.075543E+01
1.450406E+01 5.075576E+01
1.450426E+01 5.075601E+01
1.450435E+01 5.075612E+01
1.450468E+01 5.075647E+01
1.450454E+01 5.075662E+01
1.450451E+01 5.075664E+01
1.450447E+01 5.075665E+01
1.450432E+01 5.075682E+01
1.450391E+01 5.075718E+01
1.450344E+01 5.075743E+01
1.450304E+01 5.075763E+01
1.450236E+01 5.075794E+01
1.450127E+01 5.075874E+01
1.450077E+01 5.075889E+01
1.450046E+01 5.075897E+01
1.450016E+01 5.075906E+01
1.449986E+01 5.075918E+01
1.449925E+01 5.075946E+01
1.449878E+01 5.075966E+01
1.449860E+01 5.075985E+01
1.449788E+01 5.076034E+01
1.449715E+01 5.076032E+01
1.449682E+01 5.076026E+01
1.449621E+01 5.076009E+01
1.449558E+01 5.076004E+01
1.449502E+01 5.076022E+01
1.449416E+01 5.076044E+01
1.449368E+01 5.076059E+01
1.449312E+01 5.076074E+01
1.449287E+01 5.076083E+01
1.449247E+01 5.076096E+01
1.449167E+01 5.076118E+01
1.449160E+01 5.076120E+01
1.449127E+01 5.076135E+01
1.449107E+01 5.076134E+01
1.449075E+01 5.076135E+01
1.449047E+01 5.076132E+01
1.448975E+01 5.076115E+01
1.448949E+01 5.076087E+01
1.448928E+01 5.076066E+01
1.448913E+01 5.076055E+01
1.448843E+01 5.076033E+01
1.448783E+01 5.076014E+01
1.448699E+01 5.076003E+01
1.448670E+01 5.076000E+01
1.448606E+01 5.075990E+01
1.448556E+01 5.075986E+01
1.448549E+01 5.075985E+01
1.448507E+01 5.075994E+01
1.448455E+01 5.075993E+01
1.448395E+01 5.076000E+01
1.448355E+01 5.076009E+01
1.448310E+01 5.076018E+01
1.448282E+01 5.076021E+01
1.448231E+01 5.076016E+01
1.448199E+01 5.076013E+01
1.448120E+01 5.076006E+01
1.448076E+01 5.075989E+01
1.448040E+01 5.075975E+01
1.448009E+01 5.075962E+01
1.447993E+01 5.075956E+01
1.447949E+01 5.075938E+01
1.447911E+01 5.075925E+01
1.447879E+01 5.075916E+01
1.447857E+01 5.075903E+01
1.447820E+01 5.075883E+01
1.447781E+01 5.075870E+01
1.447712E+01 5.075833E+01
1.447692E+01 5.075825E+01
1.447659E+01 5.075809E+01
1.447636E+01 5.075796E+01
1.447604E+01 5.075782E+01
1.447530E+01 5.075752E+01
1.447472E+01 5.075725E+01
1.447420E+01 5.075700E+01
1.447393E+01 5.075696E+01
1.447380E+01 5.075695E+01
1.447287E+01 5.075687E+01
1.447271E+01 5.075685E+01
1.447230E+01 5.075681E+01
1.447193E+01 5.075670E+01
1.447115E+01 5.075678E+01
1.447067E+01 5.075701E+01
1.447044E+01 5.075709E+01
1.447030E+01 5.075711E+01
1.446976E+01 5.075722E+01
1.446929E+01 5.075733E+01
1.446925E+01 5.075733E+01
1.446901E+01 5.075738E+01
1.446871E+01 5.075745E+01
1.446832E+01 5.075755E+01
1.446828E+01 5.075756E+01
1.446824E+01 5.075757E+01
1.446793E+01 5.075764E+01
1.446760E+01 5.075775E+01
1.446730E+01 5.075779E+01
1.446683E+01 5.075790E+01
1.446638E+01 5.075810E+01
1.446595E+01 5.075830E+01
1.446540E+01 5.075832E+01
1.446501E+01 5.075836E+01
1.446471E+01 5.075835E+01
1.446417E+01 5.075832E+01
1.446346E+01 5.075840E+01
1.446281E+01 5.075845E+01
1.446222E+01 5.075850E+01
1.446210E+01 5.075852E+01
1.446187E+01 5.075855E+01
1.446114E+01 5.075890E+01
1.446061E+01 5.075915E+01
1.446026E+01 5.075882E+01
1.446002E+01 5.075851E+01
1.445957E+01 5.075829E+01
1.445926E+01 5.075787E+01
1.445894E+01 5.075769E+01
1.445857E+01 5.075734E+01
1.445825E+01 5.075712E+01
1.445816E+01 5.075692E+01
1.445796E+01 5.075663E+01
1.445786E+01 5.075621E+01
1.445774E+01 5.075596E+01
1.445764E+01 5.075557E+01
1.445730E+01 5.075537E+01
1.445720E+01 5.075526E+01
1.445719E+01 5.075524E+01
1.445695E+01 5.075494E+01
1.445664E+01 5.075462E+01
1.445633E+01 5.075426E+01
1.445611E+01 5.075405E+01
1.445567E+01 5.075377E+01
1.445535E+01 5.075361E+01
END
END
|
{
"pile_set_name": "Github"
}
|
How to Build for Windows
========================
You can follow the [r2book](https://radare.gitbooks.io/radare2book/content/first_steps/windows_compilation.html) for a more complete guide.
Native
---------------
You will need:
* Python 3
* Meson (pip3 install meson)
* Visual Studio 2015 (or later)
First, call `vcvarsall.bat` with your architecture (x86, x64, arm) to setup the compilation environment.
cd radare2
python3 sys\meson.py --release --backend vs2019 --shared --install --prefix="%cd%\radare2_dist" --webui
You can change `--backend` to your VS version (`vs2015`, `vs2017`), `ninja` buildsystem is also supported.
For XP support, append `--xp` to the command (not compatible with VS2019).
You can then add `radare2_dist` to your PATH to make radare2 accessible from everywhere.
Crosscompilation
----------------
As building with mingw is no longer officially supported in radare2, crosscompilation isn't (easily) possible.
You can check the official [Meson documentation](https://mesonbuild.com/Cross-compilation.html).
Good luck.
|
{
"pile_set_name": "Github"
}
|
using System;
using Server;
using Server.Items;
namespace Server.Mobiles
{
[CorpseName( "a gore fiend corpse" )]
public class GoreFiend : BaseCreature
{
[Constructable]
public GoreFiend() : base( AIType.AI_Melee, FightMode.Closest, 10, 1, 0.2, 0.4 )
{
Name = "a gore fiend";
Body = 305;
BaseSoundID = 224;
SetStr( 161, 185 );
SetDex( 41, 65 );
SetInt( 46, 70 );
SetHits( 97, 111 );
SetDamage( 15, 21 );
SetDamageType( ResistanceType.Physical, 85 );
SetDamageType( ResistanceType.Poison, 15 );
SetResistance( ResistanceType.Physical, 35, 45 );
SetResistance( ResistanceType.Fire, 25, 35 );
SetResistance( ResistanceType.Cold, 15, 25 );
SetResistance( ResistanceType.Poison, 5, 15 );
SetResistance( ResistanceType.Energy, 30, 40 );
SetSkill( SkillName.MagicResist, 40.1, 55.0 );
SetSkill( SkillName.Tactics, 45.1, 70.0 );
SetSkill( SkillName.Wrestling, 50.1, 70.0 );
Fame = 1500;
Karma = -1500;
VirtualArmor = 24;
}
public override void GenerateLoot()
{
AddLoot( LootPack.Average );
}
public override int GetDeathSound()
{
return 1218;
}
public override bool BleedImmune{ get{ return true; } }
public GoreFiend( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 );
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
namespace AgenDAV\Exception;
/*
* Copyright (C) Jorge López Pérez <[email protected]>
*
* This file is part of AgenDAV.
*
* AgenDAV is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* AgenDAV is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with AgenDAV. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Exception thrown when ETags don't match, so an element was modified while
* editing it
*/
class PermissionDenied extends \AgenDAV\Exception
{
}
|
{
"pile_set_name": "Github"
}
|
/* inftrees.h -- header to use inftrees.c
* Copyright (C) 1995-2002 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* WARNING: this file should *not* be used by applications. It is
part of the implementation of the compression library and is
subject to change. Applications should only use zlib.h.
*/
/* Huffman code lookup table entry--this entry is four bytes for machines
that have 16-bit pointers (e.g. PC's in the small or medium model). */
#ifndef _INFTREES_H
#define _INFTREES_H
typedef struct inflate_huft_s FAR inflate_huft;
struct inflate_huft_s {
union {
struct {
Byte Exop; /* number of extra bits or operation */
Byte Bits; /* number of bits in this code or subcode */
} what;
uInt pad; /* pad structure to a power of 2 (4 bytes for */
} word; /* 16-bit, 8 bytes for 32-bit int's) */
uInt base; /* literal, length base, distance base,
or table offset */
};
/* Maximum size of dynamic tree. The maximum found in a long but non-
exhaustive search was 1004 huft structures (850 for length/literals
and 154 for distances, the latter actually the result of an
exhaustive search). The actual maximum is not known, but the
value below is more than safe. */
#define MANY 1440
local int inflate_trees_bits OF((
uIntf *, /* 19 code lengths */
uIntf *, /* bits tree desired/actual depth */
inflate_huft * FAR *, /* bits tree result */
inflate_huft *, /* space for trees */
z_streamp)); /* for messages */
local int inflate_trees_dynamic OF((
uInt, /* number of literal/length codes */
uInt, /* number of distance codes */
uIntf *, /* that many (total) code lengths */
uIntf *, /* literal desired/actual bit depth */
uIntf *, /* distance desired/actual bit depth */
inflate_huft * FAR *, /* literal/length tree result */
inflate_huft * FAR *, /* distance tree result */
inflate_huft *, /* space for trees */
z_streamp)); /* for messages */
local int inflate_trees_fixed OF((
uIntf *, /* literal desired/actual bit depth */
uIntf *, /* distance desired/actual bit depth */
const inflate_huft * FAR *, /* literal/length tree result */
const inflate_huft * FAR *, /* distance tree result */
z_streamp)); /* for memory allocation */
#endif /* _INFTREES_H */
|
{
"pile_set_name": "Github"
}
|
<?php
namespace PHLAK\Twine\Tests\Methods;
use PHLAK\Twine;
use PHLAK\Twine\Exceptions\EncryptionException;
use PHPUnit\Framework\TestCase;
class EncryptTest extends TestCase
{
public function test_it_can_be_encrypted()
{
$string = new Twine\Str('john pinkerton');
$encrypted = $string->encrypt('secret');
$this->assertInstanceOf(Twine\Str::class, $encrypted);
$this->assertMatchesRegularExpression('/[a-zA-Z0-9=+\/]+/', (string) $encrypted);
return $encrypted;
}
public function test_it_throws_an_exception_when_encrypting_with_an_invalid_cipher()
{
$string = new Twine\Str('john pinkerton');
$this->expectException(EncryptionException::class);
$string->encrypt('secret', 'invalid');
}
public function test_a_multibyte_string_can_be_encrypted()
{
$string = new Twine\Str('宮本 茂');
$encrypted = $string->encrypt('secret');
$this->assertInstanceOf(Twine\Str::class, $encrypted);
$this->assertMatchesRegularExpression('/[a-zA-Z0-9=+\/]+/', (string) $encrypted);
return $encrypted;
}
public function test_it_preserves_encoding()
{
$string = new Twine\Str('john pinkerton', 'ASCII');
$encrypted = $string->encrypt('secret');
$this->assertEquals('ASCII', mb_detect_encoding($encrypted));
}
}
|
{
"pile_set_name": "Github"
}
|
---
# Implement your Workload deployment tasks here
- debug:
msg: "Removing ML Workflows workshop materials from preexisting project {{ project_name }}"
- name: Create the custom notebook image
k8s:
state: absent
namespace: "{{ project_name }}"
definition: "{{ item }}"
with_items:
- "{{ lookup('template', 'custom-notebook-imagestream.yaml.j2') }}"
- name: Create the pipeline builder image
k8s:
state: absent
namespace: "{{ project_name }}"
definition: "{{ item }}"
with_items:
- "{{ lookup('template', 'pipeline-s2i-imagestream.yaml.j2') }}"
- name: Create the pipeline build and app
k8s:
state: absent
namespace: "{{ project_name }}"
definition: "{{ item }}"
with_items:
- "{{ lookup('template', 'pipeline-imagestream.yaml.j2') }}"
- "{{ lookup('template', 'pipeline-buildconfig.yaml.j2') }}"
- "{{ lookup('template', 'pipeline-deploymentconfig.yaml.j2') }}"
- "{{ lookup('template', 'pipeline-service.yaml.j2') }}"
when: instructor_mode
# Leave this as the last task in the playbook.
- name: workload tasks complete
debug:
msg: "Workload Tasks completed successfully."
when: not silent|bool
|
{
"pile_set_name": "Github"
}
|
import simplejson as json
from django.contrib.auth.decorators import permission_required
from django.http import HttpResponse
from sql.engines import get_engine
from common.utils.extend_json_encoder import ExtendJSONEncoder
from sql.utils.resource_group import user_instances
from .models import AliyunRdsConfig, Instance
from .aliyun_rds import process_status as aliyun_process_status, create_kill_session as aliyun_create_kill_session, \
kill_session as aliyun_kill_session, sapce_status as aliyun_sapce_status
# 问题诊断--进程列表
@permission_required('sql.process_view', raise_exception=True)
def process(request):
instance_name = request.POST.get('instance_name')
command_type = request.POST.get('command_type')
try:
instance = user_instances(request.user, db_type=['mysql']).get(instance_name=instance_name)
except Instance.DoesNotExist:
result = {'status': 1, 'msg': '你所在组未关联该实例', 'data': []}
return HttpResponse(json.dumps(result), content_type='application/json')
base_sql = "select id, user, host, db, command, time, state, ifnull(info,'') as info from information_schema.processlist"
# 判断是RDS还是其他实例
if AliyunRdsConfig.objects.filter(instance=instance, is_enable=True).exists():
result = aliyun_process_status(request)
else:
if command_type == 'All':
sql = base_sql + ";"
elif command_type == 'Not Sleep':
sql = "{} where command<>'Sleep';".format(base_sql)
else:
sql = "{} where command= '{}';".format(base_sql, command_type)
query_engine = get_engine(instance=instance)
query_result = query_engine.query('information_schema', sql)
if not query_result.error:
processlist = query_result.to_dict()
result = {'status': 0, 'msg': 'ok', 'rows': processlist}
else:
result = {'status': 1, 'msg': query_result.error}
# 返回查询结果
return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True),
content_type='application/json')
# 问题诊断--通过进程id构建请求
@permission_required('sql.process_kill', raise_exception=True)
def create_kill_session(request):
instance_name = request.POST.get('instance_name')
thread_ids = request.POST.get('ThreadIDs')
try:
instance = user_instances(request.user, db_type=['mysql']).get(instance_name=instance_name)
except Instance.DoesNotExist:
result = {'status': 1, 'msg': '你所在组未关联该实例', 'data': []}
return HttpResponse(json.dumps(result), content_type='application/json')
result = {'status': 0, 'msg': 'ok', 'data': []}
# 判断是RDS还是其他实例
if AliyunRdsConfig.objects.filter(instance=instance, is_enable=True).exists():
result = aliyun_create_kill_session(request)
else:
thread_ids = thread_ids.replace('[', '').replace(']', '')
query_engine = get_engine(instance=instance)
sql = "select concat('kill ', id, ';') from information_schema.processlist where id in ({});".format(thread_ids)
all_kill_sql = query_engine.query('information_schema', sql)
kill_sql = ''
for row in all_kill_sql.rows:
kill_sql = kill_sql + row[0]
result['data'] = kill_sql
# 返回查询结果
return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True),
content_type='application/json')
# 问题诊断--终止会话
@permission_required('sql.process_kill', raise_exception=True)
def kill_session(request):
instance_name = request.POST.get('instance_name')
thread_ids = request.POST.get('ThreadIDs')
result = {'status': 0, 'msg': 'ok', 'data': []}
try:
instance = user_instances(request.user, db_type=['mysql']).get(instance_name=instance_name)
except Instance.DoesNotExist:
result = {'status': 1, 'msg': '你所在组未关联该实例', 'data': []}
return HttpResponse(json.dumps(result), content_type='application/json')
# 判断是RDS还是其他实例
if AliyunRdsConfig.objects.filter(instance=instance, is_enable=True).exists():
result = aliyun_kill_session(request)
else:
thread_ids = thread_ids.replace('[', '').replace(']', '')
engine = get_engine(instance=instance)
sql = "select concat('kill ', id, ';') from information_schema.processlist where id in ({});".format(thread_ids)
all_kill_sql = engine.query('information_schema', sql)
kill_sql = ''
for row in all_kill_sql.rows:
kill_sql = kill_sql + row[0]
engine.execute('information_schema', kill_sql)
# 返回查询结果
return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True),
content_type='application/json')
# 问题诊断--表空间信息
@permission_required('sql.tablespace_view', raise_exception=True)
def tablesapce(request):
instance_name = request.POST.get('instance_name')
try:
instance = user_instances(request.user, db_type=['mysql']).get(instance_name=instance_name)
except Instance.DoesNotExist:
result = {'status': 1, 'msg': '你所在组未关联该实例', 'data': []}
return HttpResponse(json.dumps(result), content_type='application/json')
# 判断是RDS还是其他实例
if AliyunRdsConfig.objects.filter(instance=instance, is_enable=True).exists():
result = aliyun_sapce_status(request)
else:
sql = '''
SELECT
table_schema AS table_schema,
table_name AS table_name,
engine AS engine,
TRUNCATE((data_length+index_length+data_free)/1024/1024,2) AS total_size,
table_rows AS table_rows,
TRUNCATE(data_length/1024/1024,2) AS data_size,
TRUNCATE(index_length/1024/1024,2) AS index_size,
TRUNCATE(data_free/1024/1024,2) AS data_free,
TRUNCATE(data_free/(data_length+index_length+data_free)*100,2) AS pct_free
FROM information_schema.tables
WHERE table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'test', 'sys')
ORDER BY total_size DESC
LIMIT 14;'''.format(instance_name)
query_engine = get_engine(instance=instance)
query_result = query_engine.query('information_schema', sql)
if not query_result.error:
table_space = query_result.to_dict()
result = {'status': 0, 'msg': 'ok', 'rows': table_space}
else:
result = {'status': 1, 'msg': query_result.error}
# 返回查询结果
return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True),
content_type='application/json')
# 问题诊断--锁等待
@permission_required('sql.trxandlocks_view', raise_exception=True)
def trxandlocks(request):
instance_name = request.POST.get('instance_name')
try:
instance = user_instances(request.user, db_type=['mysql']).get(instance_name=instance_name)
except Instance.DoesNotExist:
result = {'status': 1, 'msg': '你所在组未关联该实例', 'data': []}
return HttpResponse(json.dumps(result), content_type='application/json')
query_engine = get_engine(instance=instance)
server_version = query_engine.server_version
if server_version < (8, 0, 1):
sql = '''
SELECT
rtrx.`trx_state` AS "等待的状态",
rtrx.`trx_started` AS "等待事务开始时间",
rtrx.`trx_wait_started` AS "等待事务等待开始时间",
lw.`requesting_trx_id` AS "等待事务ID",
rtrx.trx_mysql_thread_id AS "等待事务线程ID",
rtrx.`trx_query` AS "等待事务的sql",
CONCAT(rl.`lock_mode`, '-', rl.`lock_table`, '(', rl.`lock_index`, ')') AS "等待的表信息",
rl.`lock_id` AS "等待的锁id",
lw.`blocking_trx_id` AS "运行的事务id",
trx.trx_mysql_thread_id AS "运行的事务线程id",
CONCAT(l.`lock_mode`, '-', l.`lock_table`, '(', l.`lock_index`, ')') AS "运行的表信息",
l.lock_id AS "运行的锁id",
trx.`trx_state` AS "运行事务的状态",
trx.`trx_started` AS "运行事务的时间",
trx.`trx_wait_started` AS "运行事务的等待开始时间",
trx.`trx_query` AS "运行事务的sql"
FROM information_schema.`INNODB_LOCKS` rl
, information_schema.`INNODB_LOCKS` l
, information_schema.`INNODB_LOCK_WAITS` lw
, information_schema.`INNODB_TRX` rtrx
, information_schema.`INNODB_TRX` trx
WHERE rl.`lock_id` = lw.`requested_lock_id`
AND l.`lock_id` = lw.`blocking_lock_id`
AND lw.requesting_trx_id = rtrx.trx_id
AND lw.blocking_trx_id = trx.trx_id;'''
else:
sql = '''
SELECT
rtrx.`trx_state` AS "等待的状态",
rtrx.`trx_started` AS "等待事务开始时间",
rtrx.`trx_wait_started` AS "等待事务等待开始时间",
lw.`REQUESTING_ENGINE_TRANSACTION_ID` AS "等待事务ID",
rtrx.trx_mysql_thread_id AS "等待事务线程ID",
rtrx.`trx_query` AS "等待事务的sql",
CONCAT(rl.`lock_mode`, '-', rl.`OBJECT_SCHEMA`, '(', rl.`INDEX_NAME`, ')') AS "等待的表信息",
rl.`ENGINE_LOCK_ID` AS "等待的锁id",
lw.`BLOCKING_ENGINE_TRANSACTION_ID` AS "运行的事务id",
trx.trx_mysql_thread_id AS "运行的事务线程id",
CONCAT(l.`lock_mode`, '-', l.`OBJECT_SCHEMA`, '(', l.`INDEX_NAME`, ')') AS "运行的表信息",
l.ENGINE_LOCK_ID AS "运行的锁id",
trx.`trx_state` AS "运行事务的状态",
trx.`trx_started` AS "运行事务的时间",
trx.`trx_wait_started` AS "运行事务的等待开始时间",
trx.`trx_query` AS "运行事务的sql"
FROM performance_schema.`data_locks` rl
, performance_schema.`data_locks` l
, performance_schema.`data_lock_waits` lw
, information_schema.`INNODB_TRX` rtrx
, information_schema.`INNODB_TRX` trx
WHERE rl.`ENGINE_LOCK_ID` = lw.`REQUESTING_ENGINE_LOCK_ID`
AND l.`ENGINE_LOCK_ID` = lw.`BLOCKING_ENGINE_LOCK_ID`
AND lw.REQUESTING_ENGINE_TRANSACTION_ID = rtrx.trx_id
AND lw.BLOCKING_ENGINE_TRANSACTION_ID = trx.trx_id;'''
query_result = query_engine.query('information_schema', sql)
if not query_result.error:
trxandlocks = query_result.to_dict()
result = {'status': 0, 'msg': 'ok', 'rows': trxandlocks}
else:
result = {'status': 1, 'msg': query_result.error}
# 返回查询结果
return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True),
content_type='application/json')
# 问题诊断--长事务
@permission_required('sql.trx_view', raise_exception=True)
def innodb_trx(request):
instance_name = request.POST.get('instance_name')
try:
instance = user_instances(request.user, db_type=['mysql']).get(instance_name=instance_name)
except Instance.DoesNotExist:
result = {'status': 1, 'msg': '你所在组未关联该实例', 'data': []}
return HttpResponse(json.dumps(result), content_type='application/json')
query_engine = get_engine(instance=instance)
sql = '''select trx.trx_started,
trx.trx_state,
trx.trx_operation_state,
trx.trx_mysql_thread_id,
trx.trx_tables_locked,
trx.trx_rows_locked,
trx.trx_rows_modified,
trx.trx_is_read_only,
trx.trx_isolation_level,
p.user,
p.host,
p.db,
to_seconds(now()) - to_seconds(trx.trx_started) trx_idle_time,
p.time thread_time,
ifnull(p.info, '') info
from information_schema.INNODB_TRX trx
join information_schema.processlist p on trx.trx_mysql_thread_id = p.id;'''
query_result = query_engine.query('information_schema', sql)
if not query_result.error:
trx = query_result.to_dict()
result = {'status': 0, 'msg': 'ok', 'rows': trx}
else:
result = {'status': 1, 'msg': query_result.error}
# 返回查询结果
return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True),
content_type='application/json')
|
{
"pile_set_name": "Github"
}
|
#
# $HOME/.config/pulseaudio-ctl/config
#
# The default setting is for pulseaudio-ctl to NOT increase to volume level
# above 100 % but Some users may wish exceed this level. If this describes
# your use case, uncomment the UPPER_THRESHOLD variable below setting it to
# the new upper threshold.
#
#UPPER_THRESHOLD=150
# Push output through libnotify. Set to any value to enable this feature
# and note that you must have /usr/bin/notify-send to use this. On Arch
# libnotify provides this. Other distros may not name it as such.
#
#NOTIFY=yes
# Show a graphical progress-bar type visualization of the volume level in
# libnotify. No setting or commented out will show a simply percentage but
# a setting will convert the percentage to a progress-bar in libnotify.
#
#BARCHART=yes
# Use KDE OSD notification. Set to any value to enable this feature.
#KDE_OSD=yes
|
{
"pile_set_name": "Github"
}
|
@extends('layouts.base')
@section('title', '会员详情')
@section('content')
<link href="{{static_url('yunshop/css/member.css')}}" media="all" rel="stylesheet" type="text/css"/>
<div class="w1200 m0a">
<div class="rightlist">
<!-- 新增加右侧顶部三级菜单 -->
<div class="right-titpos">
<ul class="add-snav">
<li class="active"><a href="{{yzWebUrl('member.member.index')}}">会员管理</a></li>
<li><a href="#"> <i class="fa fa-angle-double-right"></i> 会员详情</a></li>
</ul>
</div>
<!-- 新增加右侧顶部三级菜单结束 -->
<form action="{{yzWebUrl('member.member.update', ['id'=> $member['uid']])}}" method='post'
class='form-horizontal'>
<input type="hidden" name="id" value="{{$member['uid']}}">
<input type="hidden" name="op" value="detail">
<input type="hidden" name="c" value="site"/>
<input type="hidden" name="a" value="entry"/>
<input type="hidden" name="m" value="yun_shop"/>
<input type="hidden" name="do" value="member"/>
<div class='panel panel-default'>
<div class='panel-body'>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">粉丝</label>
<div class="col-sm-9 col-xs-12">
<img src='{{$member['avatar']}}'
style='width:100px;height:100px;padding:1px;border:1px solid #ccc'/>
{{$member['nickname']}}
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">会员等级</label>
<div class="col-sm-9 col-xs-12">
<select name='data[level_id]' class='form-control'>
<option value="0" @if($member['yz_member']['level_id']==$level['id'])
selected
@endif;
>
{{$set['level_name']}}
</option>
@foreach ($levels as $level)
<option value='{{$level['id']}}'
@if($member['yz_member']['level_id']==$level['id'])
selected
@endif>{{$level['level_name']}}</option>
@endforeach
</select>
</div>
</div>
@if($set['level_type'] == 2)
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">会员等级期限</label>
<div class="col-sm-6 col-xs-6">
<div class='input-group'>
<input type='text' name='data[validity]' class="form-control"
value="{{$member['yz_member']['validity']}}"/>
<div class='input-group-addon' style="width: auto;">天</div>
</div>
</div>
</div>
@endif
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">会员分组</label>
<div class="col-sm-9 col-xs-12">
<select name='data[group_id]' class='form-control'>
<option value='0' selected>无分组</option>
@foreach($groups as $group)
<option value='{{$group['id']}}'
@if($member['yz_member']['group_id'] == $group['id']) selected @endif >{{ $group['group_name'] }}</option>
@endforeach
</select>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">真实姓名</label>
<div class="col-sm-9 col-xs-12">
<input type="text" name="data[realname]" class="form-control"
value="{{$member['realname']}}"/>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">绑定手机</label>
<div class="col-sm-9 col-xs-12">
<div class='form-control-static'>{{$member['mobile']}}</div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">提现手机</label>
<div class="col-sm-9 col-xs-12">
<div class='form-control-static'>{{$member['yz_member']['withdraw_mobile']}}</div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">微信号</label>
<div class="col-sm-9 col-xs-12">
<input type="text" name="data[wechat]" class="form-control"
value="{{$member['yz_member']['wechat']}}"/>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">支付宝姓名</label>
<div class="col-sm-9 col-xs-12">
<input type="text" name="data[alipayname]" class="form-control"
value="{{$member['yz_member']['alipayname']}}"/>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">支付宝账号</label>
<div class="col-sm-9 col-xs-12">
<input type="text" name="data[alipay]" class="form-control"
value="{{$member['yz_member']['alipay']}}"/>
</div>
</div>
@if (!empty($myform))
@foreach ($myform as $item)
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">{{$item->name}}</label>
<div class="col-sm-9 col-xs-12">
<input type="text" name="myform[{{$item->pinyin}}]" class="form-control"
value="{{$item->value}}"/>
</div>
</div>
@endforeach
@endif
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">积分</label>
<div class="col-sm-3">
<div class='input-group'>
<div class=' input-group-addon'>{{$member['credit1']}}</div>
<div class='input-group-btn'>
<a class='btn btn-success' href="{{yzWebUrl('point.recharge.index',['id'=>$member['uid']])}}">充值</a>
</div>
</div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">余额</label>
<div class="col-sm-3">
<div class='input-group'>
<div class=' input-group-addon'>{{$member['credit2']}}</div>
<div class='input-group-btn'>
<a class='btn btn-success' href="{{yzWebUrl('balance.recharge.index', ['member_id'=>$member['uid']])}}">充值</a>
</div>
</div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">成交订单数</label>
<div class="col-sm-9 col-xs-12">
<div class='form-control-static'>
@if($member['has_one_order']['total'])
{{$member['has_one_order']['total']}}
@else
0
@endif
</div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">成交金额</label>
<div class="col-sm-9 col-xs-12">
<div class='form-control-static'>
@if($member['has_one_order']['sum'])
{{$member['has_one_order']['sum']}}
@else
0
@endif
</div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">注册时间</label>
<div class="col-sm-9 col-xs-12">
<div class='form-control-static'>{{date('Y-m-d H:i:s', $member['createtime'])}}</div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">关注状态</label>
<div class="col-sm-9 col-xs-12">
<div class='form-control-static'>
@if(!$member['has_one_fans']['followed'])
<label class='label label-default'>未关注</label>
@else
<label class='label label-success'>已关注</label>
@endif
</div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">推广员</label>
<div class="col-sm-9 col-xs-12">
<label class="radio-inline"><input type="radio" name="data[agent]" value="1"
@if($member['agent']==1)
checked
@endif>是</label>
<label class="radio-inline"><input type="radio" name="data[agent]" value="0"
@if($member['agent']==0)
checked
@endif>否</label>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">会员上线</label>
<div class="col-sm-5">
<div class='input-group'>
<input type="hidden" id="parent_id" name="data[parent_id]" value="{{$member['yz_member']['parent_id']}}">
<div class=' input-group-addon' style="border-left: 1px solid #cccccc" id="parent_info">[{{$member['yz_member']['parent_id']}}]{{$parent_name}}</div>
<div class='input-group-btn'><a class='btn btn-success'
href="javascript:;" id="change_relation">修改</a>
</div>
<span class="help-block"> <a href="javascript:;" id="members_record">修改记录</a></span>
</div>
<span class="help-block">手动修改关系链可能会造成会员关系链异常,从而会导致分红、分销问题,请谨慎修改</span>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">会员邀请码</label>
<div class="col-sm-6 col-xs-6">
<div class='input-group'>
<input type='text' name='data[invite_code]' class="form-control"
value="{{$member['yz_member']['invite_code']}}"/>
</div>
<div><span>会员邀请码须8个字符</span></div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">黑名单</label>
<div class="col-sm-9 col-xs-12">
<label class="radio-inline"><input type="radio" name="data[is_black]" value="1"
@if($member['yz_member']['is_black']==1)
checked
@endif>是</label>
<label class="radio-inline"><input type="radio" name="data[is_black]" value="0"
@if($member['yz_member']['is_black']==0)
checked
@endif>否</label>
<span class="help-block">设置黑名单后,此会员无法访问商城</span>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">备注</label>
<div class="col-sm-9 col-xs-12">
<textarea name="data[content]"
class='form-control'>{{$member['yz_member']['content']}}</textarea>
</div>
</div>
</div>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label">{{$set['custom_title']}}</label>
<div class="col-sm-9 col-xs-12">
<input type="text" name="data[custom_value]" class="form-control"
value="{{$member['yz_member']['custom_value']}}"/>
</div>
</div>
<div class='panel-body'>
<div class="form-group">
<label class="col-xs-12 col-sm-3 col-md-2 control-label"></label>
<div class="col-sm-9 col-xs-12">
<input type="submit" name="submit" value="提交" class="btn btn-success"/>
<input type="hidden" name="token" value="{{$var['token']}}"/>
<input type="button" class="btn btn-default" name="submit" onclick="history.go(-1)"
value="返回" style='margin-left:10px;'/>
</div>
</div>
</div>
</div>
</form>
</div>
</div>
<div class="form-group">
<div class="col-sm-9">
<div id="modal-module-menus-members" class="modal fade" tabindex="-1">
<div class="modal-dialog" style='width: 920px;'>
<div class="modal-content">
<div class="modal-header">
<button aria-hidden="true" data-dismiss="modal"
class="close" type="button">
×
</button>
<h3>选择会员</h3></div>
<div class="modal-body">
<div class="row">
<div class="input-group">
<input type="text" class="form-control"
name="keyword" value=""
id="search-kwd-members"
placeholder="请输入会员ID"/>
<span class='input-group-btn'>
<button type="button" class="btn btn-default"
onclick="search_members();">搜索
</button></span>
</div>
</div>
<div id="module-menus-members"
style="padding-top:5px;"></div>
</div>
<div class="modal-footer"><a href="#"
class="btn btn-default"
data-dismiss="modal"
aria-hidden="true">关闭</a>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="form-group">
<div class="col-sm-9">
<div id="modal-module-members-record" class="modal fade" tabindex="-1">
<div class="modal-dialog" style='width: 920px;'>
<div class="modal-content">
<div class="modal-header">
<button aria-hidden="true" data-dismiss="modal"
class="close" type="button">
×
</button>
<h3>修改记录</h3></div>
<div class="modal-body">
<div id="module-members-record"
style="padding-top:5px;"></div>
</div>
<div class="modal-footer"><a href="#"
class="btn btn-default"
data-dismiss="modal"
aria-hidden="true">关闭</a>
</div>
</div>
</div>
</div>
</div>
</div>
<script>
$(function () {
$('#change_relation').click(function () {
$('#modal-module-menus-members').modal();
});
$('#members_record').click(function() {
$('#modal-module-members-record').modal();
$.get('{!! yzWebUrl('member.member.member_record') !!}', {
member: '{{$member['yz_member']['member_id']}}'
}, function (dat) {
$('#module-members-record').html(dat);
}
);
});
});
function search_members() {
if ($.trim($('#search-kwd-members').val()) == '') {
Tip.focus('#search-kwd-members', '请输入关键词');
return;
}
$("#module-menus-members").html("正在搜索....");
$.get('{!! yzWebUrl('member.member.search_member') !!}', {
parent: $.trim($('#search-kwd-members').val()),
}, function (dat) {
if (dat != '') {
$('#module-menus-members').html(dat);
} else {
$("#modal-module-menus-members .close").click();
}
}
);
}
function select_member(o) {
$.get('{!! yzWebUrl('member.member.change_relation') !!}', {
parent: $.trim(o.uid),
member: '{{$member['yz_member']['member_id']}}'
}, function (dat) {
if (1 == dat.status) {
$("#parent_info").html("[" + o.uid + "]" + o.nickname);
$('#parent_id').val(o.uid);
}
$("#modal-module-menus-members .close").click();
}
);
}
</script>
@endsection
|
{
"pile_set_name": "Github"
}
|
/* Copyright (c) 2010 People Power Co.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
* - Neither the name of the People Power Corporation nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* PEOPLE POWER CO. OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE
*
*/
#include "PppPrintf.h"
#include <stdio.h>
/** Implement putchar() in a way that transfers the data in packets
* over PPP. */
module PppPrintfP {
provides {
interface PppProtocol;
interface Putchar;
}
uses {
interface Ppp;
}
} implementation {
bool disabled__;
enum {
Protocol = PppProtocol_Printf,
};
#if 255 >= PPP_PRINTF_MAX_BUFFER
typedef uint8_t bufferIndex_t;
#else /* PPP_PRINTF_MAX_BUFFER */
typedef uint16_t bufferIndex_t;
#endif /* PPP_PRINTF_MAX_BUFFER */
char buffer_[PPP_PRINTF_MAX_BUFFER];
bufferIndex_t bufferIndex_;
frame_key_t activeKey_;
task void sendBuffer_task ()
{
const uint8_t* fpe;
frame_key_t key;
uint8_t* fp;
unsigned int tx_length;
error_t rc;
if (activeKey_) {
return;
}
fp = call Ppp.getOutputFrame(Protocol, &fpe, FALSE, &key);
if (fp == 0) {
post sendBuffer_task();
return;
}
atomic {
tx_length = fpe - fp - 1;
if (bufferIndex_ < tx_length) {
tx_length = bufferIndex_;
}
*fp++ = tx_length;
memmove(fp, buffer_, tx_length);
fp += tx_length;
bufferIndex_ -= tx_length;
if (0 < bufferIndex_) {
memcpy(buffer_, buffer_ + tx_length, bufferIndex_);
}
}
rc = call Ppp.fixOutputFrameLength(key, fp);
if (SUCCESS == rc) {
rc = call Ppp.sendOutputFrame(key);
}
if (SUCCESS == rc) {
activeKey_ = key;
}
}
command unsigned int PppProtocol.getProtocol () { return Protocol; }
command error_t PppProtocol.process (const uint8_t* information,
unsigned int information_length)
{
return FAIL;
}
command error_t PppProtocol.rejectedByPeer (const uint8_t* data,
const uint8_t* data_end)
{
/* If we've been fed a rejected message, disable this protocol. */
atomic disabled__ = (0 != data);
return SUCCESS;
}
event void Ppp.outputFrameTransmitted (frame_key_t key,
error_t err)
{
atomic {
if (activeKey_ == key) {
activeKey_ = 0;
if ((! disabled__) && (0 < bufferIndex_)) {
post sendBuffer_task();
}
}
}
}
#undef putchar
command int Putchar.putchar (int c)
{
atomic {
if ((! disabled__) && (bufferIndex_ < sizeof(buffer_))) {
buffer_[bufferIndex_++] = c;
post sendBuffer_task();
}
}
return c;
}
}
|
{
"pile_set_name": "Github"
}
|
{
"author": {
"name": "Mikeal Rogers",
"email": "[email protected]",
"url": "http://www.futurealoof.com"
},
"name": "oauth-sign",
"description": "OAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module.",
"version": "0.4.0",
"repository": {
"url": "https://github.com/mikeal/oauth-sign"
},
"main": "index.js",
"dependencies": {},
"devDependencies": {},
"optionalDependencies": {},
"engines": {
"node": "*"
},
"scripts": {
"test": "node test.js"
},
"bugs": {
"url": "https://github.com/mikeal/oauth-sign/issues"
},
"_id": "[email protected]",
"dist": {
"shasum": "f22956f31ea7151a821e5f2fb32c113cad8b9f69",
"tarball": "http://registry.npmjs.org/oauth-sign/-/oauth-sign-0.4.0.tgz"
},
"_from": "oauth-sign@>=0.4.0 <0.5.0",
"_npmVersion": "1.3.2",
"_npmUser": {
"name": "mikeal",
"email": "[email protected]"
},
"maintainers": [
{
"name": "mikeal",
"email": "[email protected]"
}
],
"directories": {},
"_shasum": "f22956f31ea7151a821e5f2fb32c113cad8b9f69",
"_resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.4.0.tgz",
"readme": "ERROR: No README data found!",
"homepage": "https://github.com/mikeal/oauth-sign"
}
|
{
"pile_set_name": "Github"
}
|
http://cvs.savannah.gnu.org/viewvc/fastjar/jartool.c?root=fastjar&r1=1.60&r2=1.61&view=patch
2010-06-10 Chris Ball <[email protected]>
* jartool.c (add_file_to_jar): Fix write return value check.
--- a/jartool.c 2010/03/01 15:38:43 1.60
+++ b/jartool.c 2010/06/10 08:46:10 1.61
@@ -1258,7 +1258,7 @@
exit_on_error("write");
/* write the file name to the zip file */
- if (1 == write(jfd, fname, file_name_length))
+ if (-1 == write(jfd, fname, file_name_length))
exit_on_error("write");
if(verbose){
|
{
"pile_set_name": "Github"
}
|
#region License
// Copyright (c) 2013, ClearCanvas Inc.
// All rights reserved.
// http://www.clearcanvas.ca
//
// This file is part of the ClearCanvas RIS/PACS open source project.
//
// The ClearCanvas RIS/PACS open source project is free software: you can
// redistribute it and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// The ClearCanvas RIS/PACS open source project is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
// Public License for more details.
//
// You should have received a copy of the GNU General Public License along with
// the ClearCanvas RIS/PACS open source project. If not, see
// <http://www.gnu.org/licenses/>.
#endregion
using System;
namespace ClearCanvas.ImageServer.Common.Exceptions
{
/// <summary>
/// Represents the exception thrown when the study is online but the filesystem is missing or not writable.
/// </summary>
public class FilesystemNotWritableException : SopInstanceProcessingException
{
public string Path { get; set; }
public string Reason { get; set; }
public FilesystemNotWritableException()
: base("Study is online but the filesystem is no longer writable.")
{
}
public FilesystemNotWritableException(string path)
: base(String.Format("Filesystem is not writable: {0}", path))
{
Path = path;
}
public override string ToString()
{
return string.Format("{0} : {1}", Path, Reason);
}
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Please do not edit this file.
* It was generated using rpcgen.
*/
#ifndef _KEY_PROT_H_RPCGEN
#define _KEY_PROT_H_RPCGEN
#include <rpc/rpc.h>
/* Copyright (c) 2010, Oracle America, Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
* * Neither the name of the "Oracle America, Inc." nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* Compiled from key_prot.x using rpcgen.
* DO NOT EDIT THIS FILE!
* This is NOT source code!
*/
#define PROOT 3
#define HEXMODULUS "d4a0ba0250b6fd2ec626e7efd637df76c716e22d0944b88b"
#define HEXKEYBYTES 48
#define KEYSIZE 192
#define KEYBYTES 24
#define KEYCHECKSUMSIZE 16
enum keystatus {
KEY_SUCCESS = 0,
KEY_NOSECRET = 1,
KEY_UNKNOWN = 2,
KEY_SYSTEMERR = 3,
};
typedef enum keystatus keystatus;
#ifdef __cplusplus
extern "C" bool_t xdr_keystatus(XDR *, keystatus*);
#elif __STDC__
extern bool_t xdr_keystatus(XDR *, keystatus*);
#else /* Old Style C */
bool_t xdr_keystatus();
#endif /* Old Style C */
typedef char keybuf[HEXKEYBYTES];
#ifdef __cplusplus
extern "C" bool_t xdr_keybuf(XDR *, keybuf);
#elif __STDC__
extern bool_t xdr_keybuf(XDR *, keybuf);
#else /* Old Style C */
bool_t xdr_keybuf();
#endif /* Old Style C */
typedef char *netnamestr;
#ifdef __cplusplus
extern "C" bool_t xdr_netnamestr(XDR *, netnamestr*);
#elif __STDC__
extern bool_t xdr_netnamestr(XDR *, netnamestr*);
#else /* Old Style C */
bool_t xdr_netnamestr();
#endif /* Old Style C */
struct cryptkeyarg {
netnamestr remotename;
des_block deskey;
};
typedef struct cryptkeyarg cryptkeyarg;
#ifdef __cplusplus
extern "C" bool_t xdr_cryptkeyarg(XDR *, cryptkeyarg*);
#elif __STDC__
extern bool_t xdr_cryptkeyarg(XDR *, cryptkeyarg*);
#else /* Old Style C */
bool_t xdr_cryptkeyarg();
#endif /* Old Style C */
struct cryptkeyarg2 {
netnamestr remotename;
netobj remotekey;
des_block deskey;
};
typedef struct cryptkeyarg2 cryptkeyarg2;
#ifdef __cplusplus
extern "C" bool_t xdr_cryptkeyarg2(XDR *, cryptkeyarg2*);
#elif __STDC__
extern bool_t xdr_cryptkeyarg2(XDR *, cryptkeyarg2*);
#else /* Old Style C */
bool_t xdr_cryptkeyarg2();
#endif /* Old Style C */
struct cryptkeyres {
keystatus status;
union {
des_block deskey;
} cryptkeyres_u;
};
typedef struct cryptkeyres cryptkeyres;
#ifdef __cplusplus
extern "C" bool_t xdr_cryptkeyres(XDR *, cryptkeyres*);
#elif __STDC__
extern bool_t xdr_cryptkeyres(XDR *, cryptkeyres*);
#else /* Old Style C */
bool_t xdr_cryptkeyres();
#endif /* Old Style C */
#define MAXGIDS 16
struct unixcred {
u_int uid;
u_int gid;
struct {
u_int gids_len;
u_int *gids_val;
} gids;
};
typedef struct unixcred unixcred;
#ifdef __cplusplus
extern "C" bool_t xdr_unixcred(XDR *, unixcred*);
#elif __STDC__
extern bool_t xdr_unixcred(XDR *, unixcred*);
#else /* Old Style C */
bool_t xdr_unixcred();
#endif /* Old Style C */
struct getcredres {
keystatus status;
union {
unixcred cred;
} getcredres_u;
};
typedef struct getcredres getcredres;
#ifdef __cplusplus
extern "C" bool_t xdr_getcredres(XDR *, getcredres*);
#elif __STDC__
extern bool_t xdr_getcredres(XDR *, getcredres*);
#else /* Old Style C */
bool_t xdr_getcredres();
#endif /* Old Style C */
struct key_netstarg {
keybuf st_priv_key;
keybuf st_pub_key;
netnamestr st_netname;
};
typedef struct key_netstarg key_netstarg;
#ifdef __cplusplus
extern "C" bool_t xdr_key_netstarg(XDR *, key_netstarg*);
#elif __STDC__
extern bool_t xdr_key_netstarg(XDR *, key_netstarg*);
#else /* Old Style C */
bool_t xdr_key_netstarg();
#endif /* Old Style C */
struct key_netstres {
keystatus status;
union {
key_netstarg knet;
} key_netstres_u;
};
typedef struct key_netstres key_netstres;
#ifdef __cplusplus
extern "C" bool_t xdr_key_netstres(XDR *, key_netstres*);
#elif __STDC__
extern bool_t xdr_key_netstres(XDR *, key_netstres*);
#else /* Old Style C */
bool_t xdr_key_netstres();
#endif /* Old Style C */
#ifndef opaque
#define opaque char
#endif
#define KEY_PROG ((u_long)100029)
#define KEY_VERS ((u_long)1)
#ifdef __cplusplus
#define KEY_SET ((u_long)1)
extern "C" keystatus * key_set_1(opaque *, CLIENT *);
extern "C" keystatus * key_set_1_svc(opaque *, struct svc_req *);
#define KEY_ENCRYPT ((u_long)2)
extern "C" cryptkeyres * key_encrypt_1(cryptkeyarg *, CLIENT *);
extern "C" cryptkeyres * key_encrypt_1_svc(cryptkeyarg *, struct svc_req *);
#define KEY_DECRYPT ((u_long)3)
extern "C" cryptkeyres * key_decrypt_1(cryptkeyarg *, CLIENT *);
extern "C" cryptkeyres * key_decrypt_1_svc(cryptkeyarg *, struct svc_req *);
#define KEY_GEN ((u_long)4)
extern "C" des_block * key_gen_1(void *, CLIENT *);
extern "C" des_block * key_gen_1_svc(void *, struct svc_req *);
#define KEY_GETCRED ((u_long)5)
extern "C" getcredres * key_getcred_1(netnamestr *, CLIENT *);
extern "C" getcredres * key_getcred_1_svc(netnamestr *, struct svc_req *);
#elif __STDC__
#define KEY_SET ((u_long)1)
extern keystatus * key_set_1(opaque *, CLIENT *);
extern keystatus * key_set_1_svc(opaque *, struct svc_req *);
#define KEY_ENCRYPT ((u_long)2)
extern cryptkeyres * key_encrypt_1(cryptkeyarg *, CLIENT *);
extern cryptkeyres * key_encrypt_1_svc(cryptkeyarg *, struct svc_req *);
#define KEY_DECRYPT ((u_long)3)
extern cryptkeyres * key_decrypt_1(cryptkeyarg *, CLIENT *);
extern cryptkeyres * key_decrypt_1_svc(cryptkeyarg *, struct svc_req *);
#define KEY_GEN ((u_long)4)
extern des_block * key_gen_1(void *, CLIENT *);
extern des_block * key_gen_1_svc(void *, struct svc_req *);
#define KEY_GETCRED ((u_long)5)
extern getcredres * key_getcred_1(netnamestr *, CLIENT *);
extern getcredres * key_getcred_1_svc(netnamestr *, struct svc_req *);
#else /* Old Style C */
#define KEY_SET ((u_long)1)
extern keystatus * key_set_1();
extern keystatus * key_set_1_svc();
#define KEY_ENCRYPT ((u_long)2)
extern cryptkeyres * key_encrypt_1();
extern cryptkeyres * key_encrypt_1_svc();
#define KEY_DECRYPT ((u_long)3)
extern cryptkeyres * key_decrypt_1();
extern cryptkeyres * key_decrypt_1_svc();
#define KEY_GEN ((u_long)4)
extern des_block * key_gen_1();
extern des_block * key_gen_1_svc();
#define KEY_GETCRED ((u_long)5)
extern getcredres * key_getcred_1();
extern getcredres * key_getcred_1_svc();
#endif /* Old Style C */
#define KEY_VERS2 ((u_long)2)
#ifdef __cplusplus
extern "C" keystatus * key_set_2(opaque *, CLIENT *);
extern "C" keystatus * key_set_2_svc(opaque *, struct svc_req *);
extern "C" cryptkeyres * key_encrypt_2(cryptkeyarg *, CLIENT *);
extern "C" cryptkeyres * key_encrypt_2_svc(cryptkeyarg *, struct svc_req *);
extern "C" cryptkeyres * key_decrypt_2(cryptkeyarg *, CLIENT *);
extern "C" cryptkeyres * key_decrypt_2_svc(cryptkeyarg *, struct svc_req *);
extern "C" des_block * key_gen_2(void *, CLIENT *);
extern "C" des_block * key_gen_2_svc(void *, struct svc_req *);
extern "C" getcredres * key_getcred_2(netnamestr *, CLIENT *);
extern "C" getcredres * key_getcred_2_svc(netnamestr *, struct svc_req *);
#define KEY_ENCRYPT_PK ((u_long)6)
extern "C" cryptkeyres * key_encrypt_pk_2(cryptkeyarg2 *, CLIENT *);
extern "C" cryptkeyres * key_encrypt_pk_2_svc(cryptkeyarg2 *, struct svc_req *);
#define KEY_DECRYPT_PK ((u_long)7)
extern "C" cryptkeyres * key_decrypt_pk_2(cryptkeyarg2 *, CLIENT *);
extern "C" cryptkeyres * key_decrypt_pk_2_svc(cryptkeyarg2 *, struct svc_req *);
#define KEY_NET_PUT ((u_long)8)
extern "C" keystatus * key_net_put_2(key_netstarg *, CLIENT *);
extern "C" keystatus * key_net_put_2_svc(key_netstarg *, struct svc_req *);
#define KEY_NET_GET ((u_long)9)
extern "C" key_netstres * key_net_get_2(void *, CLIENT *);
extern "C" key_netstres * key_net_get_2_svc(void *, struct svc_req *);
#define KEY_GET_CONV ((u_long)10)
extern "C" cryptkeyres * key_get_conv_2(opaque *, CLIENT *);
extern "C" cryptkeyres * key_get_conv_2_svc(opaque *, struct svc_req *);
#elif __STDC__
extern keystatus * key_set_2(opaque *, CLIENT *);
extern keystatus * key_set_2_svc(opaque *, struct svc_req *);
extern cryptkeyres * key_encrypt_2(cryptkeyarg *, CLIENT *);
extern cryptkeyres * key_encrypt_2_svc(cryptkeyarg *, struct svc_req *);
extern cryptkeyres * key_decrypt_2(cryptkeyarg *, CLIENT *);
extern cryptkeyres * key_decrypt_2_svc(cryptkeyarg *, struct svc_req *);
extern des_block * key_gen_2(void *, CLIENT *);
extern des_block * key_gen_2_svc(void *, struct svc_req *);
extern getcredres * key_getcred_2(netnamestr *, CLIENT *);
extern getcredres * key_getcred_2_svc(netnamestr *, struct svc_req *);
#define KEY_ENCRYPT_PK ((u_long)6)
extern cryptkeyres * key_encrypt_pk_2(cryptkeyarg2 *, CLIENT *);
extern cryptkeyres * key_encrypt_pk_2_svc(cryptkeyarg2 *, struct svc_req *);
#define KEY_DECRYPT_PK ((u_long)7)
extern cryptkeyres * key_decrypt_pk_2(cryptkeyarg2 *, CLIENT *);
extern cryptkeyres * key_decrypt_pk_2_svc(cryptkeyarg2 *, struct svc_req *);
#define KEY_NET_PUT ((u_long)8)
extern keystatus * key_net_put_2(key_netstarg *, CLIENT *);
extern keystatus * key_net_put_2_svc(key_netstarg *, struct svc_req *);
#define KEY_NET_GET ((u_long)9)
extern key_netstres * key_net_get_2(void *, CLIENT *);
extern key_netstres * key_net_get_2_svc(void *, struct svc_req *);
#define KEY_GET_CONV ((u_long)10)
extern cryptkeyres * key_get_conv_2(opaque *, CLIENT *);
extern cryptkeyres * key_get_conv_2_svc(opaque *, struct svc_req *);
#else /* Old Style C */
extern keystatus * key_set_2();
extern keystatus * key_set_2_svc();
extern cryptkeyres * key_encrypt_2();
extern cryptkeyres * key_encrypt_2_svc();
extern cryptkeyres * key_decrypt_2();
extern cryptkeyres * key_decrypt_2_svc();
extern des_block * key_gen_2();
extern des_block * key_gen_2_svc();
extern getcredres * key_getcred_2();
extern getcredres * key_getcred_2_svc();
#define KEY_ENCRYPT_PK ((u_long)6)
extern cryptkeyres * key_encrypt_pk_2();
extern cryptkeyres * key_encrypt_pk_2_svc();
#define KEY_DECRYPT_PK ((u_long)7)
extern cryptkeyres * key_decrypt_pk_2();
extern cryptkeyres * key_decrypt_pk_2_svc();
#define KEY_NET_PUT ((u_long)8)
extern keystatus * key_net_put_2();
extern keystatus * key_net_put_2_svc();
#define KEY_NET_GET ((u_long)9)
extern key_netstres * key_net_get_2();
extern key_netstres * key_net_get_2_svc();
#define KEY_GET_CONV ((u_long)10)
extern cryptkeyres * key_get_conv_2();
extern cryptkeyres * key_get_conv_2_svc();
#endif /* Old Style C */
#endif /* !_KEY_PROT_H_RPCGEN */
|
{
"pile_set_name": "Github"
}
|
/*!
* Bootstrap Grunt task for Glyphicons data generation
* http://getbootstrap.com
* Copyright 2014-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
*/
'use strict';
var fs = require('fs');
module.exports = function generateGlyphiconsData(grunt) {
// Pass encoding, utf8, so `readFileSync` will return a string instead of a
// buffer
var glyphiconsFile = fs.readFileSync('less/glyphicons.less', 'utf8');
var glyphiconsLines = glyphiconsFile.split('\n');
// Use any line that starts with ".glyphicon-" and capture the class name
var iconClassName = /^\.(glyphicon-[a-zA-Z0-9-]+)/;
var glyphiconsData = '# This file is generated via Grunt task. **Do not edit directly.**\n' +
'# See the \'build-glyphicons-data\' task in Gruntfile.js.\n\n';
var glyphiconsYml = 'docs/_data/glyphicons.yml';
for (var i = 0, len = glyphiconsLines.length; i < len; i++) {
var match = glyphiconsLines[i].match(iconClassName);
if (match !== null) {
glyphiconsData += '- ' + match[1] + '\n';
}
}
// Create the `_data` directory if it doesn't already exist
if (!fs.existsSync('docs/_data')) {
fs.mkdirSync('docs/_data');
}
try {
fs.writeFileSync(glyphiconsYml, glyphiconsData);
} catch (err) {
grunt.fail.warn(err);
}
grunt.log.writeln('File ' + glyphiconsYml.cyan + ' created.');
};
|
{
"pile_set_name": "Github"
}
|
# Source library for shell script tests
#
# Copyright (C) 2016 Alexander Larsson <[email protected]>
# Copyright (C) 2011 Colin Walters <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
if [ -n "${G_TEST_SRCDIR:-}" ]; then
test_srcdir="${G_TEST_SRCDIR}"
else
test_srcdir=$(dirname $0)
fi
if [ -n "${G_TEST_BUILDDIR:-}" ]; then
test_builddir="${G_TEST_BUILDDIR}"
else
test_builddir=$(dirname $0)
fi
assert_not_reached () {
echo $@ 1>&2; exit 1
}
test_tmpdir=$(pwd)
# Sanity check that we're in a tmpdir that has
# just .testtmp (created by tap-driver for `make check`,
# or nothing at all (as ginstest-runner does)
if ! test -f .testtmp; then
files=$(ls)
if test -n "${files}"; then
ls -l
assert_not_reached "test tmpdir=${test_tmpdir} is not empty; run this test via \`make check TESTS=\`, not directly"
fi
# Remember that this is an acceptable test $(pwd), for the benefit of
# C and JS tests which may source this file again
touch .testtmp
fi
export G_DEBUG=fatal-warnings
# Also, unbreak `tar` inside `make check`...Automake will inject
# TAR_OPTIONS: --owner=0 --group=0 --numeric-owner presumably so that
# tarballs are predictable, except we don't want this in our tests.
unset TAR_OPTIONS
if test -n "${OT_TESTS_DEBUG:-}"; then
set -x
fi
if test -n "${OT_TESTS_VALGRIND:-}"; then
CMD_PREFIX="env G_SLICE=always-malloc valgrind -q --leak-check=full --num-callers=30 --suppressions=${test_srcdir}/flatpak-valgrind.supp"
else
CMD_PREFIX=""
fi
# We need this to be in /var/tmp because /tmp has no xattr support
TEST_DATA_DIR=`mktemp -d /var/tmp/test-flatpak-XXXXXX`
mkdir -p ${TEST_DATA_DIR}/home
mkdir -p ${TEST_DATA_DIR}/system
export FLATPAK_SYSTEM_DIR=${TEST_DATA_DIR}/system
export FLATPAK_SYSTEM_HELPER_ON_SESSION=1
export XDG_DATA_HOME=${TEST_DATA_DIR}/home/share
export USERDIR=${TEST_DATA_DIR}/home/share/flatpak
export SYSTEMDIR=${TEST_DATA_DIR}/system
export ARCH=`flatpak --default-arch`
if [ x${USE_SYSTEMDIR-} == xyes ] ; then
export FL_DIR=${SYSTEMDIR}
export U=
else
export FL_DIR=${USERDIR}
export U="--user"
fi
export FLATPAK="${CMD_PREFIX} flatpak"
assert_streq () {
test "$1" = "$2" || (echo 1>&2 "$1 != $2"; exit 1)
}
assert_not_streq () {
(! test "$1" = "$2") || (echo 1>&2 "$1 == $2"; exit 1)
}
assert_has_file () {
test -f "$1" || (echo 1>&2 "Couldn't find '$1'"; exit 1)
}
assert_has_symlink () {
test -L "$1" || (echo 1>&2 "Couldn't find '$1'"; exit 1)
}
assert_has_dir () {
test -d "$1" || (echo 1>&2 "Couldn't find '$1'"; exit 1)
}
assert_not_has_file () {
if test -f "$1"; then
sed -e 's/^/# /' < "$1" >&2
echo 1>&2 "File '$1' exists"
exit 1
fi
}
assert_not_file_has_content () {
if grep -q -e "$2" "$1"; then
sed -e 's/^/# /' < "$1" >&2
echo 1>&2 "File '$1' incorrectly matches regexp '$2'"
exit 1
fi
}
assert_not_has_dir () {
if test -d "$1"; then
echo 1>&2 "Directory '$1' exists"; exit 1
fi
}
assert_file_has_content () {
if ! grep -q -e "$2" "$1"; then
sed -e 's/^/# /' < "$1" >&2
echo 1>&2 "File '$1' doesn't match regexp '$2'"
exit 1
fi
}
assert_symlink_has_content () {
if ! readlink "$1" | grep -q -e "$2"; then
readlink "$1" |sed -e 's/^/# /' >&2
echo 1>&2 "Symlink '$1' doesn't match regexp '$2'"
exit 1
fi
}
assert_file_empty() {
if test -s "$1"; then
sed -e 's/^/# /' < "$1" >&2
echo 1>&2 "File '$1' is not empty"
exit 1
fi
}
export FL_GPG_HOMEDIR=${TEST_DATA_DIR}/gpghome
mkdir -p ${FL_GPG_HOMEDIR}
# This need to be writable, so copy the keys
cp $(dirname $0)/test-keyring/*.gpg ${FL_GPG_HOMEDIR}/
export FL_GPG_ID=7B0961FD
export FL_GPGARGS="--gpg-homedir=${FL_GPG_HOMEDIR} --gpg-sign=${FL_GPG_ID}"
setup_repo () {
GPGARGS="$FL_GPGARGS" . $(dirname $0)/make-test-runtime.sh org.test.Platform bash ls cat echo readlink > /dev/null
GPGARGS="$FL_GPGARGS" . $(dirname $0)/make-test-app.sh > /dev/null
flatpak remote-add ${U} --gpg-import=${FL_GPG_HOMEDIR}/pubring.gpg test-repo repo
}
make_updated_app () {
GPGARGS="$FL_GPGARGS" . $(dirname $0)/make-test-app.sh UPDATED > /dev/null
}
setup_sdk_repo () {
GPGARGS="$FL_GPGARGS" . $(dirname $0)/make-test-runtime.sh org.test.Sdk bash ls cat echo readlink make mkdir cp touch > /dev/null
}
install_repo () {
${FLATPAK} ${U} install test-repo org.test.Platform master
${FLATPAK} ${U} install test-repo org.test.Hello master
}
install_sdk_repo () {
${FLATPAK} ${U} install test-repo org.test.Sdk master
}
run () {
${CMD_PREFIX} flatpak run "$@"
}
run_sh () {
${CMD_PREFIX} flatpak run --command=bash ${ARGS-} org.test.Hello -c "$*"
}
skip_without_bwrap () {
if [ -z "${FLATPAK_BWRAP:-}" ]; then
# running installed-tests: assume we know what we're doing
:
elif ! "$FLATPAK_BWRAP" --ro-bind / / /bin/true > bwrap-result 2>&1; then
sed -e 's/^/# /' < bwrap-result
echo "1..0 # SKIP Cannot run bwrap"
exit 0
fi
}
sed s#@testdir@#${test_builddir}# ${test_srcdir}/session.conf.in > session.conf
eval `dbus-launch --config-file=session.conf --sh-syntax`
trap "rm -rf $TEST_DATA_DIR; /bin/kill $DBUS_SESSION_BUS_PID" EXIT
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright 2011 Joakim Sindholt <[email protected]>
* Copyright 2015 Patrick Rudolph <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* on the rights to use, copy, modify, merge, publish, distribute, sub
* license, and/or sell copies of the Software, and to permit persons to whom
* the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
* THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
* USE OR OTHER DEALINGS IN THE SOFTWARE. */
#include "buffer9.h"
#include "device9.h"
#include "nine_buffer_upload.h"
#include "nine_helpers.h"
#include "nine_pipe.h"
#include "pipe/p_screen.h"
#include "pipe/p_context.h"
#include "pipe/p_state.h"
#include "pipe/p_defines.h"
#include "pipe/p_format.h"
#include "util/u_box.h"
#include "util/u_inlines.h"
#define DBG_CHANNEL (DBG_INDEXBUFFER|DBG_VERTEXBUFFER)
HRESULT
NineBuffer9_ctor( struct NineBuffer9 *This,
struct NineUnknownParams *pParams,
D3DRESOURCETYPE Type,
DWORD Usage,
UINT Size,
D3DPOOL Pool )
{
struct pipe_resource *info = &This->base.info;
HRESULT hr;
DBG("This=%p Size=0x%x Usage=%x Pool=%u\n", This, Size, Usage, Pool);
user_assert(Pool != D3DPOOL_SCRATCH, D3DERR_INVALIDCALL);
This->maps = MALLOC(sizeof(struct NineTransfer));
if (!This->maps)
return E_OUTOFMEMORY;
This->nmaps = 0;
This->maxmaps = 1;
This->size = Size;
info->screen = pParams->device->screen;
info->target = PIPE_BUFFER;
info->format = PIPE_FORMAT_R8_UNORM;
info->width0 = Size;
info->flags = 0;
/* Note: WRITEONLY is just tip for resource placement, the resource
* can still be read (but slower). */
info->bind = PIPE_BIND_VERTEX_BUFFER;
/* It is hard to find clear information on where to place the buffer in
* memory depending on the flag.
* MSDN: resources are static, except for those with DYNAMIC, thus why you
* can only use DISCARD on them.
* ATI doc: The driver has the liberty it wants for having things static
* or not.
* MANAGED: Ram + uploads to Vram copy at unlock (msdn and nvidia doc say
* at first draw call using the buffer)
* DEFAULT + Usage = 0 => System memory backing for easy read access
* (That doc is very unclear on the details, like whether some copies to
* vram copy are involved or not).
* DEFAULT + WRITEONLY => Vram
* DEFAULT + WRITEONLY + DYNAMIC => Either Vram buffer or GTT_WC, depending on what the driver wants.
*/
if (Pool == D3DPOOL_SYSTEMMEM)
info->usage = PIPE_USAGE_STAGING;
else if (Pool == D3DPOOL_MANAGED)
info->usage = PIPE_USAGE_DEFAULT;
else if (Usage & D3DUSAGE_DYNAMIC && Usage & D3DUSAGE_WRITEONLY)
info->usage = PIPE_USAGE_STREAM;
else if (Usage & D3DUSAGE_WRITEONLY)
info->usage = PIPE_USAGE_DEFAULT;
/* For the remaining two, PIPE_USAGE_STAGING would probably be
* a good fit according to the doc. However it seems rather a mistake
* from apps to use these (mistakes that do really happen). Try
* to put the flags that are the best compromise between the real
* behaviour and what buggy apps should get for better performance. */
else if (Usage & D3DUSAGE_DYNAMIC)
info->usage = PIPE_USAGE_STREAM;
else
info->usage = PIPE_USAGE_DYNAMIC;
/* When Writeonly is not set, we don't want to enable the
* optimizations */
This->discard_nooverwrite_only = !!(Usage & D3DUSAGE_WRITEONLY) &&
pParams->device->buffer_upload;
/* if (pDesc->Usage & D3DUSAGE_DONOTCLIP) { } */
/* if (pDesc->Usage & D3DUSAGE_NONSECURE) { } */
/* if (pDesc->Usage & D3DUSAGE_NPATCHES) { } */
/* if (pDesc->Usage & D3DUSAGE_POINTS) { } */
/* if (pDesc->Usage & D3DUSAGE_RTPATCHES) { } */
/* The buffer must be usable with both sw and hw
* vertex processing. It is expected to be slower with hw. */
if (Usage & D3DUSAGE_SOFTWAREPROCESSING)
info->usage = PIPE_USAGE_STAGING;
/* if (pDesc->Usage & D3DUSAGE_TEXTAPI) { } */
info->height0 = 1;
info->depth0 = 1;
info->array_size = 1;
info->last_level = 0;
info->nr_samples = 0;
hr = NineResource9_ctor(&This->base, pParams, NULL, TRUE,
Type, Pool, Usage);
if (FAILED(hr))
return hr;
if (Pool == D3DPOOL_MANAGED) {
This->managed.data = align_calloc(
nine_format_get_level_alloc_size(This->base.info.format,
Size, 1, 0), 32);
if (!This->managed.data)
return E_OUTOFMEMORY;
memset(This->managed.data, 0, Size);
This->managed.dirty = TRUE;
u_box_1d(0, Size, &This->managed.dirty_box);
list_inithead(&This->managed.list);
list_inithead(&This->managed.list2);
list_add(&This->managed.list2, &pParams->device->managed_buffers);
}
return D3D_OK;
}
void
NineBuffer9_dtor( struct NineBuffer9 *This )
{
DBG("This=%p\n", This);
if (This->maps) {
while (This->nmaps) {
NineBuffer9_Unlock(This);
}
FREE(This->maps);
}
if (This->base.pool == D3DPOOL_MANAGED) {
if (This->managed.data)
align_free(This->managed.data);
if (This->managed.list.prev != NULL && This->managed.list.next != NULL)
list_del(&This->managed.list);
if (This->managed.list2.prev != NULL && This->managed.list2.next != NULL)
list_del(&This->managed.list2);
}
if (This->buf)
nine_upload_release_buffer(This->base.base.device->buffer_upload, This->buf);
NineResource9_dtor(&This->base);
}
struct pipe_resource *
NineBuffer9_GetResource( struct NineBuffer9 *This, unsigned *offset )
{
if (This->buf)
return nine_upload_buffer_resource_and_offset(This->buf, offset);
*offset = 0;
return NineResource9_GetResource(&This->base);
}
static void
NineBuffer9_RebindIfRequired( struct NineBuffer9 *This,
struct NineDevice9 *device )
{
int i;
if (!This->bind_count)
return;
for (i = 0; i < device->caps.MaxStreams; i++) {
if (device->state.stream[i] == (struct NineVertexBuffer9 *)This)
nine_context_set_stream_source(device, i,
(struct NineVertexBuffer9 *)This,
device->state.vtxbuf[i].buffer_offset,
device->state.vtxbuf[i].stride);
}
if (device->state.idxbuf == (struct NineIndexBuffer9 *)This)
nine_context_set_indices(device, (struct NineIndexBuffer9 *)This);
}
HRESULT NINE_WINAPI
NineBuffer9_Lock( struct NineBuffer9 *This,
UINT OffsetToLock,
UINT SizeToLock,
void **ppbData,
DWORD Flags )
{
struct NineDevice9 *device = This->base.base.device;
struct pipe_box box;
struct pipe_context *pipe;
void *data;
unsigned usage;
DBG("This=%p(pipe=%p) OffsetToLock=0x%x, SizeToLock=0x%x, Flags=0x%x\n",
This, This->base.resource,
OffsetToLock, SizeToLock, Flags);
user_assert(ppbData, E_POINTER);
user_assert(!(Flags & ~(D3DLOCK_DISCARD |
D3DLOCK_DONOTWAIT |
D3DLOCK_NO_DIRTY_UPDATE |
D3DLOCK_NOSYSLOCK |
D3DLOCK_READONLY |
D3DLOCK_NOOVERWRITE)), D3DERR_INVALIDCALL);
if (SizeToLock == 0) {
SizeToLock = This->size - OffsetToLock;
user_warn(OffsetToLock != 0);
}
u_box_1d(OffsetToLock, SizeToLock, &box);
if (This->base.pool == D3DPOOL_MANAGED) {
/* READONLY doesn't dirty the buffer */
/* Tests on Win: READONLY doesn't wait for the upload */
if (!(Flags & D3DLOCK_READONLY)) {
if (!This->managed.dirty) {
assert(LIST_IS_EMPTY(&This->managed.list));
This->managed.dirty = TRUE;
This->managed.dirty_box = box;
if (p_atomic_read(&This->managed.pending_upload))
nine_csmt_process(This->base.base.device);
} else
u_box_union_2d(&This->managed.dirty_box, &This->managed.dirty_box, &box);
/* Tests trying to draw while the buffer is locked show that
* MANAGED buffers are made dirty at Lock time */
BASEBUF_REGISTER_UPDATE(This);
}
*ppbData = (char *)This->managed.data + OffsetToLock;
DBG("returning pointer %p\n", *ppbData);
This->nmaps++;
return D3D_OK;
}
/* Driver ddi doc: READONLY is never passed to the device. So it can only
* have effect on things handled by the driver (MANAGED pool for example).
* Msdn doc: DISCARD and NOOVERWRITE are only for DYNAMIC.
* ATI doc: You can use DISCARD and NOOVERWRITE without DYNAMIC.
* Msdn doc: D3DLOCK_DONOTWAIT is not among the valid flags for buffers.
* Our tests: On win 7 nvidia, D3DLOCK_DONOTWAIT does return
* D3DERR_WASSTILLDRAWING if the resource is in use, except for DYNAMIC.
* Our tests: some apps do use both DISCARD and NOOVERWRITE at the same
* time. On windows it seems to return different pointer, thus indicating
* DISCARD is taken into account.
* Our tests: SYSTEMMEM doesn't DISCARD */
if (This->base.pool == D3DPOOL_SYSTEMMEM)
Flags &= ~D3DLOCK_DISCARD;
if (Flags & D3DLOCK_DISCARD)
usage = PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD_WHOLE_RESOURCE;
else if (Flags & D3DLOCK_NOOVERWRITE)
usage = PIPE_TRANSFER_WRITE | PIPE_TRANSFER_UNSYNCHRONIZED;
else
usage = PIPE_TRANSFER_READ_WRITE;
if (Flags & D3DLOCK_DONOTWAIT && !(This->base.usage & D3DUSAGE_DYNAMIC))
usage |= PIPE_TRANSFER_DONTBLOCK;
This->discard_nooverwrite_only &= !!(Flags & (D3DLOCK_DISCARD | D3DLOCK_NOOVERWRITE));
if (This->nmaps == This->maxmaps) {
struct NineTransfer *newmaps =
REALLOC(This->maps, sizeof(struct NineTransfer)*This->maxmaps,
sizeof(struct NineTransfer)*(This->maxmaps << 1));
if (newmaps == NULL)
return E_OUTOFMEMORY;
This->maxmaps <<= 1;
This->maps = newmaps;
}
if (This->buf && !This->discard_nooverwrite_only) {
struct pipe_box src_box;
unsigned offset;
struct pipe_resource *src_res;
DBG("Disabling nine_subbuffer for a buffer having"
"used a nine_subbuffer buffer\n");
/* Copy buffer content to the buffer resource, which
* we will now use.
* Note: The behaviour may be different from what is expected
* with double lock. However applications can't really make expectations
* about double locks, and don't really use them, so that's ok. */
src_res = nine_upload_buffer_resource_and_offset(This->buf, &offset);
u_box_1d(offset, This->size, &src_box);
pipe = NineDevice9_GetPipe(device);
pipe->resource_copy_region(pipe, This->base.resource, 0, 0, 0, 0,
src_res, 0, &src_box);
/* Release previous resource */
if (This->nmaps >= 1)
This->maps[This->nmaps-1].should_destroy_buf = true;
else
nine_upload_release_buffer(device->buffer_upload, This->buf);
This->buf = NULL;
/* Rebind buffer */
NineBuffer9_RebindIfRequired(This, device);
}
This->maps[This->nmaps].transfer = NULL;
This->maps[This->nmaps].is_pipe_secondary = false;
This->maps[This->nmaps].buf = NULL;
This->maps[This->nmaps].should_destroy_buf = false;
if (This->discard_nooverwrite_only) {
if (This->buf && (Flags & D3DLOCK_DISCARD)) {
/* Release previous buffer */
if (This->nmaps >= 1)
This->maps[This->nmaps-1].should_destroy_buf = true;
else
nine_upload_release_buffer(device->buffer_upload, This->buf);
This->buf = NULL;
}
if (!This->buf) {
This->buf = nine_upload_create_buffer(device->buffer_upload, This->base.info.width0);
NineBuffer9_RebindIfRequired(This, device);
}
if (This->buf) {
This->maps[This->nmaps].buf = This->buf;
This->nmaps++;
*ppbData = nine_upload_buffer_get_map(This->buf) + OffsetToLock;
return D3D_OK;
} else {
/* Fallback to normal path, and don't try again */
This->discard_nooverwrite_only = false;
}
}
/* When csmt is active, we want to avoid stalls as much as possible,
* and thus we want to create a new resource on discard and map it
* with the secondary pipe, instead of waiting on the main pipe. */
if (Flags & D3DLOCK_DISCARD && device->csmt_active) {
struct pipe_screen *screen = NineDevice9_GetScreen(device);
struct pipe_resource *new_res = screen->resource_create(screen, &This->base.info);
if (new_res) {
/* Use the new resource */
pipe_resource_reference(&This->base.resource, new_res);
pipe_resource_reference(&new_res, NULL);
usage = PIPE_TRANSFER_WRITE | PIPE_TRANSFER_UNSYNCHRONIZED;
NineBuffer9_RebindIfRequired(This, device);
This->maps[This->nmaps].is_pipe_secondary = TRUE;
}
} else if (Flags & D3DLOCK_NOOVERWRITE && device->csmt_active)
This->maps[This->nmaps].is_pipe_secondary = TRUE;
if (This->maps[This->nmaps].is_pipe_secondary)
pipe = device->pipe_secondary;
else
pipe = NineDevice9_GetPipe(device);
data = pipe->transfer_map(pipe, This->base.resource, 0,
usage, &box, &This->maps[This->nmaps].transfer);
if (!data) {
DBG("pipe::transfer_map failed\n"
" usage = %x\n"
" box.x = %u\n"
" box.width = %u\n",
usage, box.x, box.width);
if (Flags & D3DLOCK_DONOTWAIT)
return D3DERR_WASSTILLDRAWING;
return D3DERR_INVALIDCALL;
}
DBG("returning pointer %p\n", data);
This->nmaps++;
*ppbData = data;
return D3D_OK;
}
HRESULT NINE_WINAPI
NineBuffer9_Unlock( struct NineBuffer9 *This )
{
struct NineDevice9 *device = This->base.base.device;
struct pipe_context *pipe;
DBG("This=%p\n", This);
user_assert(This->nmaps > 0, D3DERR_INVALIDCALL);
This->nmaps--;
if (This->base.pool != D3DPOOL_MANAGED) {
if (!This->maps[This->nmaps].buf) {
pipe = This->maps[This->nmaps].is_pipe_secondary ?
device->pipe_secondary :
nine_context_get_pipe_acquire(device);
pipe->transfer_unmap(pipe, This->maps[This->nmaps].transfer);
/* We need to flush in case the driver does implicit copies */
if (This->maps[This->nmaps].is_pipe_secondary)
pipe->flush(pipe, NULL, 0);
else
nine_context_get_pipe_release(device);
} else if (This->maps[This->nmaps].should_destroy_buf)
nine_upload_release_buffer(device->buffer_upload, This->maps[This->nmaps].buf);
}
return D3D_OK;
}
void
NineBuffer9_SetDirty( struct NineBuffer9 *This )
{
assert(This->base.pool == D3DPOOL_MANAGED);
This->managed.dirty = TRUE;
u_box_1d(0, This->size, &This->managed.dirty_box);
BASEBUF_REGISTER_UPDATE(This);
}
|
{
"pile_set_name": "Github"
}
|
about=JabRef-icon-16.png
jabrefIcon16=JabRef-icon-16.png
jabrefIcon20=JabRef-icon-20.png
jabrefIcon32=JabRef-icon-32.png
jabrefIcon40=JabRef-icon-40.png
jabrefIcon48=JabRef-icon-48.png
jabrefIcon64=JabRef-icon-64.png
jabrefIcon128=JabRef-icon-128.png
#external apps
texmaker=texmaker.png
texstudio=texstudio.png
winedt=winedt.png
vim=vim.png
lyx=lyx2.png
openoffice=openoffice.png
citeseer=wwwciteseer.png
arxiv=arxiv_32.png
emacs=emacs.png
mdl=mdl-icon.png
mdlloading=mdlloading.gif
mdlListIcon=mdlListIcon.png
|
{
"pile_set_name": "Github"
}
|
//
// BugsnagSessionTrackerStopTest.m
// Tests
//
// Created by Jamie Lynch on 15/02/2019.
// Copyright © 2019 Bugsnag. All rights reserved.
//
#import <XCTest/XCTest.h>
#import "BugsnagSessionTracker.h"
#import "BugsnagTestConstants.h"
@interface BugsnagSession ()
@property NSUInteger unhandledCount;
@property NSUInteger handledCount;
@end
@interface BugsnagSessionTrackerStopTest : XCTestCase
@property BugsnagConfiguration *configuration;
@property BugsnagSessionTracker *tracker;
@end
@implementation BugsnagSessionTrackerStopTest
- (void)setUp {
[super setUp];
self.configuration = [[BugsnagConfiguration alloc] initWithApiKey:DUMMY_APIKEY_32CHAR_1];
self.configuration.autoTrackSessions = NO;
self.tracker = [[BugsnagSessionTracker alloc] initWithConfig:self.configuration
client:nil
postRecordCallback:nil];
}
/**
* Verifies that a session can be resumed after it is stopped
*/
- (void)testResumeFromStoppedSession {
[self.tracker startNewSession];
BugsnagSession *original = self.tracker.runningSession;
XCTAssertNotNil(original);
[self.tracker pauseSession];
XCTAssertNil(self.tracker.runningSession);
XCTAssertTrue([self.tracker resumeSession]);
XCTAssertEqual(original, self.self.tracker.runningSession);
}
/**
* Verifies that a new session is started when calling resumeSession,
* if there is no stopped session
*/
- (void)testResumeWithNoStoppedSession {
XCTAssertNil(self.tracker.runningSession);
XCTAssertFalse([self.tracker resumeSession]);
XCTAssertNotNil(self.tracker.runningSession);
}
/**
* Verifies that a new session can be created after the previous one is stopped
*/
- (void)testStartNewAfterStoppedSession {
[self.tracker startNewSession];
BugsnagSession *originalSession = self.tracker.runningSession;
[self.tracker pauseSession];
[self.tracker startNewSession];
XCTAssertNotEqual(originalSession, self.tracker.runningSession);
}
/**
* Verifies that calling resumeSession multiple times only starts one session
*/
- (void)testMultipleResumesHaveNoEffect {
[self.tracker startNewSession];
BugsnagSession *original = self.tracker.runningSession;
[self.tracker pauseSession];
XCTAssertTrue([self.tracker resumeSession]);
XCTAssertEqual(original, self.tracker.runningSession);
XCTAssertFalse([self.tracker resumeSession]);
XCTAssertEqual(original, self.tracker.runningSession);
}
/**
* Verifies that calling pauseSession multiple times only stops one session
*/
- (void)testMultipleStopsHaveNoEffect {
[self.tracker startNewSession];
XCTAssertNotNil(self.tracker.runningSession);
[self.tracker pauseSession];
XCTAssertNil(self.tracker.runningSession);
[self.tracker pauseSession];
XCTAssertNil(self.tracker.runningSession);
}
/**
* Verifies that if a handled or unhandled error occurs when a session is stopped, the
* error count is not updated
*/
- (void)testStoppedSessionDoesNotIncrement {
[self.tracker startNewSession];
self.tracker.runningSession.handledCount++;
self.tracker.runningSession.unhandledCount++;
XCTAssertEqual(1, self.tracker.runningSession.handledCount);
XCTAssertEqual(1, self.tracker.runningSession.unhandledCount);
[self.tracker pauseSession];
self.tracker.runningSession.handledCount++;
self.tracker.runningSession.unhandledCount++;
[self.tracker resumeSession];
XCTAssertEqual(1, self.tracker.runningSession.handledCount);
XCTAssertEqual(1, self.tracker.runningSession.unhandledCount);
self.tracker.runningSession.handledCount++;
self.tracker.runningSession.unhandledCount++;
XCTAssertEqual(2, self.tracker.runningSession.handledCount);
XCTAssertEqual(2, self.tracker.runningSession.unhandledCount);
}
@end
|
{
"pile_set_name": "Github"
}
|
/*-------------------------------------------------------------------------
HtmlShortColorNames provides a template class for access to the short name table
Written by Michael C. Miller.
I invest time and resources providing this open source code,
please support me by dontating (see https://github.com/Makuna/NeoPixelBus)
-------------------------------------------------------------------------
This file is part of the Makuna/NeoPixelBus library.
NeoPixelBus is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
NeoPixelBus is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with NeoPixel. If not, see
<http://www.gnu.org/licenses/>.
-------------------------------------------------------------------------*/
#include "HtmlColor.h"
#include "HtmlColorNameStrings.h"
static const HtmlColorPair c_ShortColorNames[] PROGMEM = {
{ c_HtmlNameAqua, 0xffff },
{ c_HtmlNameBlack, 0x0 },
{ c_HtmlNameBlue, 0xff },
{ c_HtmlNameFuchsia, 0xff00ff },
{ c_HtmlNameGray, 0x808080 },
{ c_HtmlNameGreen, 0x8000 },
{ c_HtmlNameLime, 0xff00 },
{ c_HtmlNameMaroon, 0x800000 },
{ c_HtmlNameNavy, 0x80 },
{ c_HtmlNameOlive, 0x808000 },
{ c_HtmlNameOrange, 0xffa500 },
{ c_HtmlNamePurple, 0x800080 },
{ c_HtmlNameRed, 0xff0000 },
{ c_HtmlNameSilver, 0xc0c0c0 },
{ c_HtmlNameTeal, 0x8080 },
{ c_HtmlNameWhite, 0xffffff },
{ c_HtmlNameYellow, 0xffff00 },
};
const HtmlColorPair* HtmlShortColorNames::Pair(uint8_t index)
{
return &c_ShortColorNames[index];
}
uint8_t HtmlShortColorNames::Count()
{
return countof(c_ShortColorNames);
}
|
{
"pile_set_name": "Github"
}
|
.gradient-mixin(@color) {
background: svg-gradient(to bottom,
fade(@color, 0%) 0%,
fade(@color, 5%) 60%,
fade(@color, 10%) 70%,
fade(@color, 15%) 73%,
fade(@color, 20%) 75%,
fade(@color, 25%) 80%,
fade(@color, 30%) 85%,
fade(@color, 35%) 88%,
fade(@color, 40%) 90%,
fade(@color, 45%) 95%,
fade(@color, 50%) 100%
);
}
|
{
"pile_set_name": "Github"
}
|
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "commonjs",
"sourceMap": true
}
}
|
{
"pile_set_name": "Github"
}
|
{
"name": "kendryte/sdcard-file-bmp-standalone-driver",
"version": "develop",
"type": "library",
"dependency": {
"kendryte/sdcard-standalone-driver": "develop"
},
"source": [
"src/*.c"
],
"include": [
"include"
],
"header": [
"src"
],
"homepage": "https://github.com/GongT/monorepo-kendryte-driver-collection"
}
|
{
"pile_set_name": "Github"
}
|
/* crypto/dh/dh.h */
/* Copyright (C) 1995-1998 Eric Young ([email protected])
* All rights reserved.
*
* This package is an SSL implementation written
* by Eric Young ([email protected]).
* The implementation was written so as to conform with Netscapes SSL.
*
* This library is free for commercial and non-commercial use as long as
* the following conditions are aheared to. The following conditions
* apply to all code found in this distribution, be it the RC4, RSA,
* lhash, DES, etc., code; not just the SSL code. The SSL documentation
* included with this distribution is covered by the same copyright terms
* except that the holder is Tim Hudson ([email protected]).
*
* Copyright remains Eric Young's, and as such any Copyright notices in
* the code are not to be removed.
* If this package is used in a product, Eric Young should be given attribution
* as the author of the parts of the library used.
* This can be in the form of a textual message at program startup or
* in documentation (online or textual) provided with the package.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* "This product includes cryptographic software written by
* Eric Young ([email protected])"
* The word 'cryptographic' can be left out if the rouines from the library
* being used are not cryptographic related :-).
* 4. If you include any Windows specific code (or a derivative thereof) from
* the apps directory (application code) you must include an acknowledgement:
* "This product includes software written by Tim Hudson ([email protected])"
*
* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* The licence and distribution terms for any publically available version or
* derivative of this code cannot be changed. i.e. this code cannot simply be
* copied and put under another distribution licence
* [including the GNU Public Licence.]
*/
#ifndef HEADER_DH_H
# define HEADER_DH_H
# include <openssl/e_os2.h>
# ifdef OPENSSL_NO_DH
# error DH is disabled.
# endif
# ifndef OPENSSL_NO_BIO
# include <openssl/bio.h>
# endif
# include <openssl/ossl_typ.h>
# ifndef OPENSSL_NO_DEPRECATED
# include <openssl/bn.h>
# endif
# ifndef OPENSSL_DH_MAX_MODULUS_BITS
# define OPENSSL_DH_MAX_MODULUS_BITS 10000
# endif
# define DH_FLAG_CACHE_MONT_P 0x01
/*
* new with 0.9.7h; the built-in DH
* implementation now uses constant time
* modular exponentiation for secret exponents
* by default. This flag causes the
* faster variable sliding window method to
* be used for all exponents.
*/
# define DH_FLAG_NO_EXP_CONSTTIME 0x02
/*
* If this flag is set the DH method is FIPS compliant and can be used in
* FIPS mode. This is set in the validated module method. If an application
* sets this flag in its own methods it is its reposibility to ensure the
* result is compliant.
*/
# define DH_FLAG_FIPS_METHOD 0x0400
/*
* If this flag is set the operations normally disabled in FIPS mode are
* permitted it is then the applications responsibility to ensure that the
* usage is compliant.
*/
# define DH_FLAG_NON_FIPS_ALLOW 0x0400
#ifdef __cplusplus
extern "C" {
#endif
/* Already defined in ossl_typ.h */
/* typedef struct dh_st DH; */
/* typedef struct dh_method DH_METHOD; */
struct dh_method {
const char *name;
/* Methods here */
int (*generate_key) (DH *dh);
int (*compute_key) (unsigned char *key, const BIGNUM *pub_key, DH *dh);
/* Can be null */
int (*bn_mod_exp) (const DH *dh, BIGNUM *r, const BIGNUM *a,
const BIGNUM *p, const BIGNUM *m, BN_CTX *ctx,
BN_MONT_CTX *m_ctx);
int (*init) (DH *dh);
int (*finish) (DH *dh);
int flags;
char *app_data;
/* If this is non-NULL, it will be used to generate parameters */
int (*generate_params) (DH *dh, int prime_len, int generator,
BN_GENCB *cb);
};
struct dh_st {
/*
* This first argument is used to pick up errors when a DH is passed
* instead of a EVP_PKEY
*/
int pad;
int version;
BIGNUM *p;
BIGNUM *g;
long length; /* optional */
BIGNUM *pub_key; /* g^x % p */
BIGNUM *priv_key; /* x */
int flags;
BN_MONT_CTX *method_mont_p;
/* Place holders if we want to do X9.42 DH */
BIGNUM *q;
BIGNUM *j;
unsigned char *seed;
int seedlen;
BIGNUM *counter;
int references;
CRYPTO_EX_DATA ex_data;
const DH_METHOD *meth;
ENGINE *engine;
};
# define DH_GENERATOR_2 2
/* #define DH_GENERATOR_3 3 */
# define DH_GENERATOR_5 5
/* DH_check error codes */
# define DH_CHECK_P_NOT_PRIME 0x01
# define DH_CHECK_P_NOT_SAFE_PRIME 0x02
# define DH_UNABLE_TO_CHECK_GENERATOR 0x04
# define DH_NOT_SUITABLE_GENERATOR 0x08
# define DH_CHECK_Q_NOT_PRIME 0x10
# define DH_CHECK_INVALID_Q_VALUE 0x20
# define DH_CHECK_INVALID_J_VALUE 0x40
/* DH_check_pub_key error codes */
# define DH_CHECK_PUBKEY_TOO_SMALL 0x01
# define DH_CHECK_PUBKEY_TOO_LARGE 0x02
# define DH_CHECK_PUBKEY_INVALID 0x04
/*
* primes p where (p-1)/2 is prime too are called "safe"; we define this for
* backward compatibility:
*/
# define DH_CHECK_P_NOT_STRONG_PRIME DH_CHECK_P_NOT_SAFE_PRIME
# define d2i_DHparams_fp(fp,x) (DH *)ASN1_d2i_fp((char *(*)())DH_new, \
(char *(*)())d2i_DHparams,(fp),(unsigned char **)(x))
# define i2d_DHparams_fp(fp,x) ASN1_i2d_fp(i2d_DHparams,(fp), \
(unsigned char *)(x))
# define d2i_DHparams_bio(bp,x) ASN1_d2i_bio_of(DH,DH_new,d2i_DHparams,bp,x)
# define i2d_DHparams_bio(bp,x) ASN1_i2d_bio_of_const(DH,i2d_DHparams,bp,x)
DH *DHparams_dup(DH *);
const DH_METHOD *DH_OpenSSL(void);
void DH_set_default_method(const DH_METHOD *meth);
const DH_METHOD *DH_get_default_method(void);
int DH_set_method(DH *dh, const DH_METHOD *meth);
DH *DH_new_method(ENGINE *engine);
DH *DH_new(void);
void DH_free(DH *dh);
int DH_up_ref(DH *dh);
int DH_size(const DH *dh);
int DH_get_ex_new_index(long argl, void *argp, CRYPTO_EX_new *new_func,
CRYPTO_EX_dup *dup_func, CRYPTO_EX_free *free_func);
int DH_set_ex_data(DH *d, int idx, void *arg);
void *DH_get_ex_data(DH *d, int idx);
/* Deprecated version */
# ifndef OPENSSL_NO_DEPRECATED
DH *DH_generate_parameters(int prime_len, int generator,
void (*callback) (int, int, void *), void *cb_arg);
# endif /* !defined(OPENSSL_NO_DEPRECATED) */
/* New version */
int DH_generate_parameters_ex(DH *dh, int prime_len, int generator,
BN_GENCB *cb);
int DH_check(const DH *dh, int *codes);
int DH_check_pub_key(const DH *dh, const BIGNUM *pub_key, int *codes);
int DH_generate_key(DH *dh);
int DH_compute_key(unsigned char *key, const BIGNUM *pub_key, DH *dh);
int DH_compute_key_padded(unsigned char *key, const BIGNUM *pub_key, DH *dh);
DH *d2i_DHparams(DH **a, const unsigned char **pp, long length);
int i2d_DHparams(const DH *a, unsigned char **pp);
DH *d2i_DHxparams(DH **a, const unsigned char **pp, long length);
int i2d_DHxparams(const DH *a, unsigned char **pp);
# ifndef OPENSSL_NO_FP_API
int DHparams_print_fp(FILE *fp, const DH *x);
# endif
# ifndef OPENSSL_NO_BIO
int DHparams_print(BIO *bp, const DH *x);
# else
int DHparams_print(char *bp, const DH *x);
# endif
/* RFC 5114 parameters */
DH *DH_get_1024_160(void);
DH *DH_get_2048_224(void);
DH *DH_get_2048_256(void);
/* RFC2631 KDF */
int DH_KDF_X9_42(unsigned char *out, size_t outlen,
const unsigned char *Z, size_t Zlen,
ASN1_OBJECT *key_oid,
const unsigned char *ukm, size_t ukmlen, const EVP_MD *md);
# define EVP_PKEY_CTX_set_dh_paramgen_prime_len(ctx, len) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DH, EVP_PKEY_OP_PARAMGEN, \
EVP_PKEY_CTRL_DH_PARAMGEN_PRIME_LEN, len, NULL)
# define EVP_PKEY_CTX_set_dh_paramgen_subprime_len(ctx, len) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DH, EVP_PKEY_OP_PARAMGEN, \
EVP_PKEY_CTRL_DH_PARAMGEN_SUBPRIME_LEN, len, NULL)
# define EVP_PKEY_CTX_set_dh_paramgen_type(ctx, typ) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DH, EVP_PKEY_OP_PARAMGEN, \
EVP_PKEY_CTRL_DH_PARAMGEN_TYPE, typ, NULL)
# define EVP_PKEY_CTX_set_dh_paramgen_generator(ctx, gen) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DH, EVP_PKEY_OP_PARAMGEN, \
EVP_PKEY_CTRL_DH_PARAMGEN_GENERATOR, gen, NULL)
# define EVP_PKEY_CTX_set_dh_rfc5114(ctx, gen) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, EVP_PKEY_OP_PARAMGEN, \
EVP_PKEY_CTRL_DH_RFC5114, gen, NULL)
# define EVP_PKEY_CTX_set_dhx_rfc5114(ctx, gen) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, EVP_PKEY_OP_PARAMGEN, \
EVP_PKEY_CTRL_DH_RFC5114, gen, NULL)
# define EVP_PKEY_CTX_set_dh_kdf_type(ctx, kdf) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_DH_KDF_TYPE, kdf, NULL)
# define EVP_PKEY_CTX_get_dh_kdf_type(ctx) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_DH_KDF_TYPE, -2, NULL)
# define EVP_PKEY_CTX_set0_dh_kdf_oid(ctx, oid) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_DH_KDF_OID, 0, (void *)oid)
# define EVP_PKEY_CTX_get0_dh_kdf_oid(ctx, poid) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_GET_DH_KDF_OID, 0, (void *)poid)
# define EVP_PKEY_CTX_set_dh_kdf_md(ctx, md) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_DH_KDF_MD, 0, (void *)md)
# define EVP_PKEY_CTX_get_dh_kdf_md(ctx, pmd) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_GET_DH_KDF_MD, 0, (void *)pmd)
# define EVP_PKEY_CTX_set_dh_kdf_outlen(ctx, len) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_DH_KDF_OUTLEN, len, NULL)
# define EVP_PKEY_CTX_get_dh_kdf_outlen(ctx, plen) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_GET_DH_KDF_OUTLEN, 0, (void *)plen)
# define EVP_PKEY_CTX_set0_dh_kdf_ukm(ctx, p, plen) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_DH_KDF_UKM, plen, (void *)p)
# define EVP_PKEY_CTX_get0_dh_kdf_ukm(ctx, p) \
EVP_PKEY_CTX_ctrl(ctx, EVP_PKEY_DHX, \
EVP_PKEY_OP_DERIVE, \
EVP_PKEY_CTRL_GET_DH_KDF_UKM, 0, (void *)p)
# define EVP_PKEY_CTRL_DH_PARAMGEN_PRIME_LEN (EVP_PKEY_ALG_CTRL + 1)
# define EVP_PKEY_CTRL_DH_PARAMGEN_GENERATOR (EVP_PKEY_ALG_CTRL + 2)
# define EVP_PKEY_CTRL_DH_RFC5114 (EVP_PKEY_ALG_CTRL + 3)
# define EVP_PKEY_CTRL_DH_PARAMGEN_SUBPRIME_LEN (EVP_PKEY_ALG_CTRL + 4)
# define EVP_PKEY_CTRL_DH_PARAMGEN_TYPE (EVP_PKEY_ALG_CTRL + 5)
# define EVP_PKEY_CTRL_DH_KDF_TYPE (EVP_PKEY_ALG_CTRL + 6)
# define EVP_PKEY_CTRL_DH_KDF_MD (EVP_PKEY_ALG_CTRL + 7)
# define EVP_PKEY_CTRL_GET_DH_KDF_MD (EVP_PKEY_ALG_CTRL + 8)
# define EVP_PKEY_CTRL_DH_KDF_OUTLEN (EVP_PKEY_ALG_CTRL + 9)
# define EVP_PKEY_CTRL_GET_DH_KDF_OUTLEN (EVP_PKEY_ALG_CTRL + 10)
# define EVP_PKEY_CTRL_DH_KDF_UKM (EVP_PKEY_ALG_CTRL + 11)
# define EVP_PKEY_CTRL_GET_DH_KDF_UKM (EVP_PKEY_ALG_CTRL + 12)
# define EVP_PKEY_CTRL_DH_KDF_OID (EVP_PKEY_ALG_CTRL + 13)
# define EVP_PKEY_CTRL_GET_DH_KDF_OID (EVP_PKEY_ALG_CTRL + 14)
/* KDF types */
# define EVP_PKEY_DH_KDF_NONE 1
# define EVP_PKEY_DH_KDF_X9_42 2
/* BEGIN ERROR CODES */
/*
* The following lines are auto generated by the script mkerr.pl. Any changes
* made after this point may be overwritten when the script is next run.
*/
void ERR_load_DH_strings(void);
/* Error codes for the DH functions. */
/* Function codes. */
# define DH_F_COMPUTE_KEY 102
# define DH_F_DHPARAMS_PRINT_FP 101
# define DH_F_DH_BUILTIN_GENPARAMS 106
# define DH_F_DH_CMS_DECRYPT 117
# define DH_F_DH_CMS_SET_PEERKEY 118
# define DH_F_DH_CMS_SET_SHARED_INFO 119
# define DH_F_DH_COMPUTE_KEY 114
# define DH_F_DH_GENERATE_KEY 115
# define DH_F_DH_GENERATE_PARAMETERS_EX 116
# define DH_F_DH_NEW_METHOD 105
# define DH_F_DH_PARAM_DECODE 107
# define DH_F_DH_PRIV_DECODE 110
# define DH_F_DH_PRIV_ENCODE 111
# define DH_F_DH_PUB_DECODE 108
# define DH_F_DH_PUB_ENCODE 109
# define DH_F_DO_DH_PRINT 100
# define DH_F_GENERATE_KEY 103
# define DH_F_GENERATE_PARAMETERS 104
# define DH_F_PKEY_DH_DERIVE 112
# define DH_F_PKEY_DH_KEYGEN 113
/* Reason codes. */
# define DH_R_BAD_GENERATOR 101
# define DH_R_BN_DECODE_ERROR 109
# define DH_R_BN_ERROR 106
# define DH_R_DECODE_ERROR 104
# define DH_R_INVALID_PUBKEY 102
# define DH_R_KDF_PARAMETER_ERROR 112
# define DH_R_KEYS_NOT_SET 108
# define DH_R_KEY_SIZE_TOO_SMALL 110
# define DH_R_MODULUS_TOO_LARGE 103
# define DH_R_NON_FIPS_METHOD 111
# define DH_R_NO_PARAMETERS_SET 107
# define DH_R_NO_PRIVATE_VALUE 100
# define DH_R_PARAMETER_ENCODING_ERROR 105
# define DH_R_PEER_KEY_ERROR 113
# define DH_R_SHARED_INFO_ERROR 114
#ifdef __cplusplus
}
#endif
#endif
|
{
"pile_set_name": "Github"
}
|
fileFormatVersion: 2
guid: 7589a9e1a090a45b88350dfc54ba13ac
DefaultImporter:
userData:
|
{
"pile_set_name": "Github"
}
|
/*! \file AffineRegister.h
\author Diogo Nunes Sampaio <[email protected]>
\date Wednesday February 15, 2012
\brief The file for the AffineRegister class.
*/
#ifndef AFFINE_REGISTER_CPP_
#define AFFINE_REGISTER_CPP_
// Ocelot Includes
#include <ocelot/transforms/interface/AffineRegister.h>
// Hydrazine Includes
#include <hydrazine/interface/debug.h>
#include <hydrazine/interface/SystemCompatibility.h>
#ifdef REPORT_BASE
#undef REPORT_BASE
#endif
#define REPORT_BASE 1
#if AFFINE_REGISTER_PROFILE_H_
#include <fstream>
namespace affineProfiler
{
unsigned divSpills = 0;
unsigned affSpills = 0;
unsigned caffSpills = 0;
unsigned unifSpills = 0;
unsigned cstSpills = 0;
unsigned divLoads = 0;
unsigned affLoads = 0;
unsigned caffLoads = 0;
unsigned unifLoads = 0;
unsigned cstLoads = 0;
unsigned divStores = 0;
unsigned affStores = 0;
unsigned caffStores = 0;
unsigned unifStores = 0;
unsigned cstStores = 0;
void resetSpillData()
{
divSpills = affSpills = caffSpills = unifSpills = cstSpills = 0;
divLoads = affLoads = caffLoads = unifLoads = cstLoads = 0;
divStores = affStores = caffStores = unifStores = cstStores = 0;
}
void printSpillResults(const std::string kernelName)
{
string k = hydrazine::demangleCXXString(kernelName);
//Remove parameter from function name
k = k.substr(0, k.find("("));
//Remove data type from templated kernel
if(k.find('<') != string::npos)
{
k = k.substr(0, k.find("<"));
}
//Remove function namespace from templated kernel
if(k.find(':') != string::npos)
{
k.replace(0, 1 + k.find_last_of(':'), "");
}
//Remove function type from templated kernel
if(k.find(' ') != string::npos)
{
k.replace(0, 1 + k.find_last_of(' '), "");
}
std::ofstream out(k + ".AffSpill.csv");
if(!out.is_open()) return;
out << "div;aff;C.aff;unif;cst;divLD;affLD;caffLD;uniLD;CstLD;"
"divST;affST;C.affST;uniST;ConstST"
<< std::endl << divSpills << ';' << affSpills << ';' << caffSpills
<< ';' << unifSpills << ';' << cstSpills << ';'
<< divLoads << ';' << affLoads << ';' << caffLoads << ';' << unifLoads
<< ';' << cstLoads << ';' << divStores << ';'
<< affStores << ';' << caffStores << ';' << unifStores << ';'
<< cstStores;
out.close();
}
}
#endif
namespace transforms
{
AffineRegister::RegisterId AffineRegister::warpPosition = (unsigned) (-1);
std::map<ir::PTXOperand::DataType, AffineRegister::RegisterId>
AffineRegister::tempRegisters;
bool AffineRegister::bottomBase() const
{
return _state.base == analysis::ConstantAbstractState::bottom;
}
bool AffineRegister::baseZero() const
{
return _state.base == analysis::ConstantAbstractState::zero;
}
bool AffineRegister::strideOne() const
{
return _state.stride[0] == analysis::ConstantAbstractState::one;
}
bool AffineRegister::doNotRequireMemory() const
{
return _state.known();
}
bool AffineRegister::requireSharedMemory() const
{
return (!(requireLocalMemory() || doNotRequireMemory()));
}
bool AffineRegister::requireLocalMemory() const
{
return (_state.undefined() || _state.divergent() || _state.hardAffine());
}
AffineRegister::AffineRegister(RegisterId ireg, Type itype,
MemoryArray* localArray, MemoryArray* affineStack,
const analysis::AffineAbstractState state)
: CoalescedRegister(ireg, itype, localArray), _shared(affineStack), _regs(0)
{
}
void AffineRegister::combineState(const analysis::AffineAbstractState state)
{
_state &= state;
}
void AffineRegister::load(DataflowGraph & dfg, InstructionList &il,
RegisterId &dreg)
{
report("Load coalesced variable " << _reg << ", SSA variable " << dreg);
if(requireLocalMemory())
{
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::divLoads++;
#endif
report("\tGoes into local memory");
CoalescedRegister::load(dfg, il, dreg);
return;
}
if(predicate())
{
report("\tIs a affine or constant predicate");
loadPred(dfg, il, dreg);
return;
}
if(doNotRequireMemory())
{
report("\tIs a affine or constant variable with known indexes");
recomputeKnownValue(il, dreg);
return;
}
report("\tKnown stride, unknown base");
if(_state.isUniform())
{
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::unifLoads++;
#endif
/* 0 + 0 + B */
std::stringstream a;
a << "%r" << warpPosition;
if(_shared->getVarOffset(_reg) != 0)
{
a << " + " << _shared->getVarOffset(_reg);
}
ir::PTXInstruction load(ir::PTXInstruction::Ld);
load.addressSpace = _shared->addressSpace();
load.d = ir::PTXOperand(ir::PTXOperand::Register, _type, dreg);
load.a = ir::PTXOperand(a.str());
load.type = _type;
il.push_back(load);
return;
}
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::affLoads++;
#endif
/* 0 + C + B */
std::stringstream a;
a << "%r" << warpPosition;
if(_shared->getVarOffset(_reg) != 0)
a << " + " << _shared->getVarOffset(_reg);
ir::PTXInstruction load(ir::PTXInstruction::Ld);
load.addressSpace = _shared->addressSpace();
load.d = ir::PTXOperand(ir::PTXOperand::Register, _type, dreg);
load.a = ir::PTXOperand(a.str());
load.type = _type;
il.push_back(load);
report("\t\tIs not constant");
ir::PTXInstruction::Opcode mvOpc = ir::PTXInstruction::Opcode::Mov;
if(_type != ir::PTXOperand::DataType::u32)
{
mvOpc = ir::PTXInstruction::Opcode::Cvt;
}
if(tempRegisters.find(_type) == tempRegisters.end())
{
tempRegisters[_type] = dfg.newRegister();
}
ir::PTXInstruction mv(mvOpc);
mv.type = _type;
mv.a = ir::PTXOperand(ir::PTXOperand::SpecialRegister::tid,
ir::PTXOperand::VectorIndex::ix);
mv.d = ir::PTXOperand(ir::PTXOperand::Register, _type,
tempRegisters[_type]);
il.push_back(mv);
if(strideOne())
{
report("\t\tHas stride 1");
ir::PTXInstruction add(ir::PTXInstruction::Opcode::Add);
add.type = _type;
add.a = mv.d;
add.b = load.d;
add.d = ir::PTXOperand(ir::PTXOperand::Register, _type, dreg);
il.push_back(add);
}
else
{
report("\t\tHas stride != 1" << _state.stride[0].value);
ir::PTXInstruction mad(ir::PTXInstruction::Opcode::Mad);
mad.a = mv.d;
mad.b = ir::PTXOperand(_state.stride[0].value, _type);
mad.c = load.d;
mad.d = ir::PTXOperand(ir::PTXOperand::Register, _type, dreg);
mad.type = _type;
mad.modifier = ir::PTXInstruction::Modifier::lo;
il.push_back(mad);
}
return;
}
void AffineRegister::loadPred(DataflowGraph &dfg, InstructionList &il,
RegisterId &dreg)
{
if(tempRegisters.find(predType) == tempRegisters.end())
{
tempRegisters[predType] = dfg.newRegister();
}
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::affLoads++;
#endif
std::stringstream s;
s << "%r" << warpPosition;
if(_shared->getVarOffset(_reg) != 0)
{
s << " + " << _shared->getVarOffset(_reg);
}
ir::PTXInstruction ld(ir::PTXInstruction::Opcode::Ld);
ld.type = predType;
ld.addressSpace = _shared->addressSpace();
ld.a = ir::PTXOperand(s.str());
ld.d = ir::PTXOperand(ir::PTXOperand::AddressMode::Register,
predType, tempRegisters[predType]);
il.push_back(ld);
report("ld: " << ld.toString());
ir::PTXInstruction setp(ir::PTXInstruction::Opcode::SetP);
setp.type = predType;
setp.a = ir::PTXOperand(ir::PTXOperand::AddressMode::Register,
predType, tempRegisters[predType]);
setp.d = ir::PTXOperand(ir::PTXOperand::AddressMode::Register, _type, dreg);
setp.b = ir::PTXOperand(1, predType);
setp.comparisonOperator = ir::PTXInstruction::CmpOp::Eq;
report("setp: " << setp.toString());
il.push_back(setp);
}
void AffineRegister::storePred(DataflowGraph & dfg,
InstructionList &il, RegisterId &sreg)
{
if(tempRegisters.find(predType) == tempRegisters.end())
{
tempRegisters[predType] = dfg.newRegister();
}
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::affStores++;
#endif
std::stringstream s;
s << "%r" << warpPosition;
if(_shared->getVarOffset(_reg) != 0)
{
s << " + " << _shared->getVarOffset(_reg);
}
ir::PTXInstruction selp(ir::PTXInstruction::Opcode::SelP);
selp.type = predType;
selp.a = ir::PTXOperand(1, predType);
selp.b = ir::PTXOperand(0, predType);
selp.c = ir::PTXOperand(ir::PTXOperand::AddressMode::Register, _type, sreg);
selp.d = ir::PTXOperand(ir::PTXOperand::AddressMode::Register,
predType, tempRegisters[predType]);
report("selp: " << selp.toString());
il.push_back(selp);
ir::PTXInstruction st(ir::PTXInstruction::Opcode::St);
st.type = predType;
st.addressSpace = _shared->addressSpace();
st.d = ir::PTXOperand(ir::PTXOperand::AddressMode::Address, predType,
_shared->name(), _shared->getVarOffset(_reg));
st.a = ir::PTXOperand(ir::PTXOperand::AddressMode::Register,
predType, tempRegisters[predType]);
report("st: " << st.toString());
il.push_back(st);
}
void AffineRegister::recomputeKnownValue(InstructionList &il, RegisterId &dreg)
{
/* 0 + C + C */
report("Recomputing known value: " << _state)
if(!_state.affine())
{
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::cstLoads++;
#endif
report("\tIs constant");
/* 0 + 0 + C*/
ir::PTXInstruction mv(ir::PTXInstruction::Opcode::Mov);
mv.type = _type;
mv.d = ir::PTXOperand(ir::PTXOperand::Register, _type, dreg);
mv.a = ir::PTXOperand(_state.base.value, _type);
il.push_back(mv);
return;
}
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::caffLoads++;
#endif
/* 0 + C + K, First we load the constant C, or stride */
report("\tIs not constant");
ir::PTXInstruction::Opcode mvOpc = ir::PTXInstruction::Opcode::Mov;
if(_type != ir::PTXOperand::DataType::u32)
{
mvOpc = ir::PTXInstruction::Opcode::Cvt;
}
ir::PTXInstruction mv(mvOpc);
mv.d = ir::PTXOperand(ir::PTXOperand::Register, _type, dreg);
mv.a = ir::PTXOperand(ir::PTXOperand::SpecialRegister::tid,
ir::PTXOperand::VectorIndex::ix);
mv.type = _type;
il.push_back(mv);
if(strideOne())
{
if(!baseZero())
{
/* 0 + 1 + K, Just add K to the loaded value */
report("\t\t\tHas stride = 1 and base != 0");
ir::PTXInstruction add(ir::PTXInstruction::Opcode::Add);
add.d = mv.d;
add.a = mv.d;
add.b = ir::PTXOperand(_state.stride[0].value, _type);
add.type = _type;
il.push_back(add);
}
return;
}
if(baseZero())
{
report("\t\t\tHas stride != 1 and base = 0");
ir::PTXInstruction mul(ir::PTXInstruction::Opcode::Mul);
mul.d = mv.d;
mul.a = mv.d;
mul.b = ir::PTXOperand(_state.stride[0].value, _type);
mul.type = _type;
mul.modifier = ir::PTXInstruction::Modifier::lo;
il.push_back(mul);
return;
}
report("\t\t\tHas stride != 1 and base != 0");
/* 0 + C != 1 + C */
ir::PTXInstruction mad(ir::PTXInstruction::Opcode::Mad);
mad.d = mv.d;
mad.a = mv.d;
mad.b = ir::PTXOperand(_state.stride[0].value, _type);
mad.c = ir::PTXOperand(_state.base.value, _type);
mad.type = _type;
mad.modifier = ir::PTXInstruction::Modifier::lo;
il.push_back(mad);
}
void AffineRegister::store(DataflowGraph &dfg,
InstructionList &il, RegisterId &sreg)
{
report("Affine store for variable " << sreg
<< ", coalesced to " << _reg << ", state: " << _state);
if(requireLocalMemory())
{
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::divStores++;
#endif
CoalescedRegister::store(dfg, il, sreg);
return;
}
if(predicate())
{
storePred(dfg, il, sreg);
return;
}
if(doNotRequireMemory())
{
#if AFFINE_REGISTER_PROFILE_H_
if(_state.affine())
{
affineProfiler::caffStores++;
}
else
{
affineProfiler::cstStores++;
}
#endif
report("No store required, needs to recreate data");
return;
}
report("Has bottom base");
RegisterId store = sreg;
if(_state.isUniform())
{
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::unifStores++;
#endif
std::stringstream s;
s << "%r" << warpPosition;
if(_shared->getVarOffset(_reg) != 0)
s << " + " << _shared->getVarOffset(_reg);
ir::PTXInstruction st(ir::PTXInstruction::St);
st.d = ir::PTXOperand(s.str());
st.a = ir::PTXOperand(ir::PTXOperand::Register, _type, store);
st.addressSpace = _shared->addressSpace();
st.type = _type;
il.push_back(st);
return;
}
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::affStores++;
#endif
report("Has stride != 0");
ir::PTXInstruction::Opcode mvOpc = ir::PTXInstruction::Opcode::Mov;
if(_type != ir::PTXOperand::DataType::u32)
{
mvOpc = ir::PTXInstruction::Opcode::Cvt;
}
if(tempRegisters.find(_type) == tempRegisters.end())
{
tempRegisters[_type] = dfg.newRegister();
}
store = tempRegisters[_type];
ir::PTXInstruction mv(mvOpc);
mv.d = ir::PTXOperand(ir::PTXOperand::Register, _type, store);
mv.a = ir::PTXOperand(ir::PTXOperand::SpecialRegister::tid,
ir::PTXOperand::VectorIndex::ix);
mv.type = _type;
il.push_back(mv);
if(strideOne())
{
report("Has stride == 1");
ir::PTXInstruction sub(ir::PTXInstruction::Opcode::Sub);
sub.d = mv.d;
sub.a = ir::PTXOperand(ir::PTXOperand::Register, _type, sreg);
sub.b = mv.d;
sub.type = _type;
il.push_back(sub);
}
else
{
report("Has stride != 1");
ir::PTXInstruction mad(ir::PTXInstruction::Opcode::Mad);
mad.d = mv.d;
mad.a = mv.d;
mad.b = ir::PTXOperand(-_state.stride[0].value, _type);
mad.c = ir::PTXOperand(ir::PTXOperand::Register, _type, sreg);
mad.type = _type;
mad.modifier = ir::PTXInstruction::Modifier::lo;
il.push_back(mad);
}
std::stringstream s;
s << "%r" << warpPosition;
if(_shared->getVarOffset(_reg) != 0)
{
s << " + " << _shared->getVarOffset(_reg);
}
ir::PTXInstruction st(ir::PTXInstruction::St);
st.d = ir::PTXOperand(s.str());
st.a = ir::PTXOperand(ir::PTXOperand::Register, _type, store);
st.addressSpace = _shared->addressSpace();
st.type = _type;
il.push_back(st);
}
void AffineRegister::spill()
{
if(_spilled) return;
if(requireLocalMemory())
{
#if AFFINE_REGISTER_PROFILE_H_
if(!_spilled) affineProfiler::divSpills++;
#endif
CoalescedRegister::spill();
return;
}
_spilled = true;
if(doNotRequireMemory())
{
#if AFFINE_REGISTER_PROFILE_H_
if(_state.affine())
{
affineProfiler::caffSpills++;
}
else
{
affineProfiler::cstSpills++;
}
#endif
return;
}
if(_type == ir::PTXOperand::DataType::pred)
{
#if AFFINE_REGISTER_PROFILE_H_
affineProfiler::affSpills++;
#endif
_shared->insertVar(_reg, predType);
}
else
{
_shared->insertVar(_reg, _type);
#if AFFINE_REGISTER_PROFILE_H_
if(_state.affine())
affineProfiler::affSpills++;
else
affineProfiler::unifSpills++;
#endif
}
}
unsigned AffineRegister::additionalRegs() const
{
return _regs;
}
}
#endif
|
{
"pile_set_name": "Github"
}
|
#include <queue>
using std::priority_queue;
typedef struct Pair {
vector<int> *a1, *a2;
int i1, i2;
bool operator < (const Pair &p) const {
return (*a1)[i1] + (*a2)[i2] < (*p.a1)[p.i1] + (*p.a2)[p.i2];
}
bool operator > (const Pair &p) const {
return (*a1)[i1] + (*a2)[i2] > (*p.a1)[p.i1] + (*p.a2)[p.i2];
}
bool operator == (const Pair &p) const {
return (*a1)[i1] + (*a2)[i2] == (*p.a1)[p.i1] + (*p.a2)[p.i2];
}
} Pair;
class Solution {
public:
vector<pair<int, int>> kSmallestPairs(vector<int>& nums1, vector<int>& nums2, int k) {
vector<pair<int, int>> res;
vector<int> &a1 = nums1;
vector<int> &a2 = nums2;
int n1 = a1.size();
int n2 = a2.size();
if (n1 == 0 || n2 == 0) {
return res;
}
priority_queue<Pair, vector<Pair>, greater<Pair>> pq;
int i;
Pair p;
p.a1 = &a1;
p.a2 = &a2;
for (i = 0; i < n1; ++i) {
p.i1 = i;
p.i2 = 0;
pq.push(p);
}
for (i = 0; i < k && !pq.empty(); ++i) {
p = pq.top();
pq.pop();
res.push_back(make_pair((*p.a1)[p.i1], (*p.a2)[p.i2]));
++p.i2;
if (p.i2 < (*p.a2).size()) {
pq.push(p);
}
}
while (!pq.empty()) {
pq.pop();
}
return res;
}
};
|
{
"pile_set_name": "Github"
}
|
import torch
from torch.autograd import Function
from torch.autograd.function import once_differentiable
from torch.nn.modules.utils import _pair
from atss_core import _C
class DeformConvFunction(Function):
@staticmethod
def forward(
ctx,
input,
offset,
weight,
stride=1,
padding=0,
dilation=1,
groups=1,
deformable_groups=1,
im2col_step=64
):
if input is not None and input.dim() != 4:
raise ValueError(
"Expected 4D tensor as input, got {}D tensor instead.".format(
input.dim()))
ctx.stride = _pair(stride)
ctx.padding = _pair(padding)
ctx.dilation = _pair(dilation)
ctx.groups = groups
ctx.deformable_groups = deformable_groups
ctx.im2col_step = im2col_step
ctx.save_for_backward(input, offset, weight)
output = input.new_empty(
DeformConvFunction._output_size(input, weight, ctx.padding,
ctx.dilation, ctx.stride))
ctx.bufs_ = [input.new_empty(0), input.new_empty(0)] # columns, ones
if not input.is_cuda:
raise NotImplementedError
else:
cur_im2col_step = min(ctx.im2col_step, input.shape[0])
assert (input.shape[0] %
cur_im2col_step) == 0, 'im2col step must divide batchsize'
_C.deform_conv_forward(
input,
weight,
offset,
output,
ctx.bufs_[0],
ctx.bufs_[1],
weight.size(3),
weight.size(2),
ctx.stride[1],
ctx.stride[0],
ctx.padding[1],
ctx.padding[0],
ctx.dilation[1],
ctx.dilation[0],
ctx.groups,
ctx.deformable_groups,
cur_im2col_step
)
return output
@staticmethod
@once_differentiable
def backward(ctx, grad_output):
input, offset, weight = ctx.saved_tensors
grad_input = grad_offset = grad_weight = None
if not grad_output.is_cuda:
raise NotImplementedError
else:
cur_im2col_step = min(ctx.im2col_step, input.shape[0])
assert (input.shape[0] %
cur_im2col_step) == 0, 'im2col step must divide batchsize'
if ctx.needs_input_grad[0] or ctx.needs_input_grad[1]:
grad_input = torch.zeros_like(input)
grad_offset = torch.zeros_like(offset)
_C.deform_conv_backward_input(
input,
offset,
grad_output,
grad_input,
grad_offset,
weight,
ctx.bufs_[0],
weight.size(3),
weight.size(2),
ctx.stride[1],
ctx.stride[0],
ctx.padding[1],
ctx.padding[0],
ctx.dilation[1],
ctx.dilation[0],
ctx.groups,
ctx.deformable_groups,
cur_im2col_step
)
if ctx.needs_input_grad[2]:
grad_weight = torch.zeros_like(weight)
_C.deform_conv_backward_parameters(
input,
offset,
grad_output,
grad_weight,
ctx.bufs_[0],
ctx.bufs_[1],
weight.size(3),
weight.size(2),
ctx.stride[1],
ctx.stride[0],
ctx.padding[1],
ctx.padding[0],
ctx.dilation[1],
ctx.dilation[0],
ctx.groups,
ctx.deformable_groups,
1,
cur_im2col_step
)
return (grad_input, grad_offset, grad_weight, None, None, None, None, None)
@staticmethod
def _output_size(input, weight, padding, dilation, stride):
channels = weight.size(0)
output_size = (input.size(0), channels)
for d in range(input.dim() - 2):
in_size = input.size(d + 2)
pad = padding[d]
kernel = dilation[d] * (weight.size(d + 2) - 1) + 1
stride_ = stride[d]
output_size += ((in_size + (2 * pad) - kernel) // stride_ + 1, )
if not all(map(lambda s: s > 0, output_size)):
raise ValueError(
"convolution input is too small (output would be {})".format(
'x'.join(map(str, output_size))))
return output_size
class ModulatedDeformConvFunction(Function):
@staticmethod
def forward(
ctx,
input,
offset,
mask,
weight,
bias=None,
stride=1,
padding=0,
dilation=1,
groups=1,
deformable_groups=1
):
ctx.stride = stride
ctx.padding = padding
ctx.dilation = dilation
ctx.groups = groups
ctx.deformable_groups = deformable_groups
ctx.with_bias = bias is not None
if not ctx.with_bias:
bias = input.new_empty(1) # fake tensor
if not input.is_cuda:
raise NotImplementedError
if weight.requires_grad or mask.requires_grad or offset.requires_grad \
or input.requires_grad:
ctx.save_for_backward(input, offset, mask, weight, bias)
output = input.new_empty(
ModulatedDeformConvFunction._infer_shape(ctx, input, weight))
ctx._bufs = [input.new_empty(0), input.new_empty(0)]
_C.modulated_deform_conv_forward(
input,
weight,
bias,
ctx._bufs[0],
offset,
mask,
output,
ctx._bufs[1],
weight.shape[2],
weight.shape[3],
ctx.stride,
ctx.stride,
ctx.padding,
ctx.padding,
ctx.dilation,
ctx.dilation,
ctx.groups,
ctx.deformable_groups,
ctx.with_bias
)
return output
@staticmethod
@once_differentiable
def backward(ctx, grad_output):
if not grad_output.is_cuda:
raise NotImplementedError
input, offset, mask, weight, bias = ctx.saved_tensors
grad_input = torch.zeros_like(input)
grad_offset = torch.zeros_like(offset)
grad_mask = torch.zeros_like(mask)
grad_weight = torch.zeros_like(weight)
grad_bias = torch.zeros_like(bias)
_C.modulated_deform_conv_backward(
input,
weight,
bias,
ctx._bufs[0],
offset,
mask,
ctx._bufs[1],
grad_input,
grad_weight,
grad_bias,
grad_offset,
grad_mask,
grad_output,
weight.shape[2],
weight.shape[3],
ctx.stride,
ctx.stride,
ctx.padding,
ctx.padding,
ctx.dilation,
ctx.dilation,
ctx.groups,
ctx.deformable_groups,
ctx.with_bias
)
if not ctx.with_bias:
grad_bias = None
return (grad_input, grad_offset, grad_mask, grad_weight, grad_bias,
None, None, None, None, None)
@staticmethod
def _infer_shape(ctx, input, weight):
n = input.size(0)
channels_out = weight.size(0)
height, width = input.shape[2:4]
kernel_h, kernel_w = weight.shape[2:4]
height_out = (height + 2 * ctx.padding -
(ctx.dilation * (kernel_h - 1) + 1)) // ctx.stride + 1
width_out = (width + 2 * ctx.padding -
(ctx.dilation * (kernel_w - 1) + 1)) // ctx.stride + 1
return n, channels_out, height_out, width_out
deform_conv = DeformConvFunction.apply
modulated_deform_conv = ModulatedDeformConvFunction.apply
|
{
"pile_set_name": "Github"
}
|
package s3manager
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/awstesting/unit"
"github.com/aws/aws-sdk-go/service/s3"
)
func testSetupGetBucketRegionServer(region string, statusCode int, incHeader bool) *httptest.Server {
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if incHeader {
w.Header().Set(bucketRegionHeader, region)
}
w.WriteHeader(statusCode)
}))
}
var testGetBucketRegionCases = []struct {
RespRegion string
StatusCode int
HintRegion string
ExpectReqRegion string
}{
{"bucket-region", 301, "hint-region", ""},
{"bucket-region", 403, "hint-region", ""},
{"bucket-region", 200, "hint-region", ""},
{"bucket-region", 200, "", "default-region"},
}
func TestGetBucketRegion_Exists(t *testing.T) {
for i, c := range testGetBucketRegionCases {
server := testSetupGetBucketRegionServer(c.RespRegion, c.StatusCode, true)
defer server.Close()
sess := unit.Session.Copy()
sess.Config.Region = aws.String("default-region")
sess.Config.Endpoint = aws.String(server.URL)
sess.Config.DisableSSL = aws.Bool(true)
ctx := aws.BackgroundContext()
region, err := GetBucketRegion(ctx, sess, "bucket", c.HintRegion)
if err != nil {
t.Fatalf("%d, expect no error, got %v", i, err)
}
if e, a := c.RespRegion, region; e != a {
t.Errorf("%d, expect %q region, got %q", i, e, a)
}
}
}
func TestGetBucketRegion_NotExists(t *testing.T) {
server := testSetupGetBucketRegionServer("ignore-region", 404, false)
defer server.Close()
sess := unit.Session.Copy()
sess.Config.Endpoint = aws.String(server.URL)
sess.Config.DisableSSL = aws.Bool(true)
ctx := aws.BackgroundContext()
region, err := GetBucketRegion(ctx, sess, "bucket", "hint-region")
if err == nil {
t.Fatalf("expect error, but did not get one")
}
aerr := err.(awserr.Error)
if e, a := "NotFound", aerr.Code(); e != a {
t.Errorf("expect %s error code, got %s", e, a)
}
if len(region) != 0 {
t.Errorf("expect region not to be set, got %q", region)
}
}
func TestGetBucketRegionWithClient(t *testing.T) {
for i, c := range testGetBucketRegionCases {
server := testSetupGetBucketRegionServer(c.RespRegion, c.StatusCode, true)
defer server.Close()
svc := s3.New(unit.Session, &aws.Config{
Region: aws.String("hint-region"),
Endpoint: aws.String(server.URL),
DisableSSL: aws.Bool(true),
})
ctx := aws.BackgroundContext()
region, err := GetBucketRegionWithClient(ctx, svc, "bucket")
if err != nil {
t.Fatalf("%d, expect no error, got %v", i, err)
}
if e, a := c.RespRegion, region; e != a {
t.Errorf("%d, expect %q region, got %q", i, e, a)
}
}
}
func TestGetBucketRegionWithClientWithoutRegion(t *testing.T) {
for i, c := range testGetBucketRegionCases {
server := testSetupGetBucketRegionServer(c.RespRegion, c.StatusCode, true)
defer server.Close()
svc := s3.New(unit.Session, &aws.Config{
Endpoint: aws.String(server.URL),
DisableSSL: aws.Bool(true),
})
ctx := aws.BackgroundContext()
region, err := GetBucketRegionWithClient(ctx, svc, "bucket")
if err != nil {
t.Fatalf("%d, expect no error, got %v", i, err)
}
if e, a := c.RespRegion, region; e != a {
t.Errorf("%d, expect %q region, got %q", i, e, a)
}
}
}
|
{
"pile_set_name": "Github"
}
|
signature DEVELOPMENT_AST =
sig
type label = Label.t
structure Syntax : ABT_UTIL
where type Operator.t = UniversalOperator.t
structure Tactic : TACTIC
where type label = label
where type term = Syntax.t
datatype command =
PRINT of Syntax.Operator.t
| EVAL of Syntax.t * int option
| SEARCH of Syntax.Operator.t
| ADD_RESOURCE of Resource.t * Tactic.t
| NEW_RESOURCE of Resource.t
datatype t =
THEOREM of label * Syntax.Operator.t * Syntax.t * Tactic.t
| OPERATOR of label * Syntax.Operator.t
| TACTIC of label * Tactic.t
| DEFINITION of Syntax.t * Syntax.t
| NOTATION of Notation.t * Syntax.Operator.t
| COMMAND of command
end
|
{
"pile_set_name": "Github"
}
|
/*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package etcdraft
import (
"encoding/pem"
"github.com/hyperledger/fabric-protos-go/common"
"github.com/hyperledger/fabric/bccsp"
"github.com/hyperledger/fabric/common/flogging"
"github.com/hyperledger/fabric/orderer/common/cluster"
"github.com/hyperledger/fabric/orderer/common/localconfig"
"github.com/hyperledger/fabric/orderer/consensus"
"github.com/pkg/errors"
)
// LedgerBlockPuller pulls blocks upon demand, or fetches them from the ledger
type LedgerBlockPuller struct {
BlockPuller
BlockRetriever cluster.BlockRetriever
Height func() uint64
}
func (lp *LedgerBlockPuller) PullBlock(seq uint64) *common.Block {
lastSeq := lp.Height() - 1
if lastSeq >= seq {
return lp.BlockRetriever.Block(seq)
}
return lp.BlockPuller.PullBlock(seq)
}
// EndpointconfigFromSupport extracts TLS CA certificates and endpoints from the ConsenterSupport
func EndpointconfigFromSupport(support consensus.ConsenterSupport, bccsp bccsp.BCCSP) ([]cluster.EndpointCriteria, error) {
lastConfigBlock, err := lastConfigBlockFromSupport(support)
if err != nil {
return nil, err
}
endpointconf, err := cluster.EndpointconfigFromConfigBlock(lastConfigBlock, bccsp)
if err != nil {
return nil, err
}
return endpointconf, nil
}
func lastConfigBlockFromSupport(support consensus.ConsenterSupport) (*common.Block, error) {
lastBlockSeq := support.Height() - 1
lastBlock := support.Block(lastBlockSeq)
if lastBlock == nil {
return nil, errors.Errorf("unable to retrieve block [%d]", lastBlockSeq)
}
lastConfigBlock, err := cluster.LastConfigBlock(lastBlock, support)
if err != nil {
return nil, err
}
return lastConfigBlock, nil
}
// NewBlockPuller creates a new block puller
func NewBlockPuller(support consensus.ConsenterSupport,
baseDialer *cluster.PredicateDialer,
clusterConfig localconfig.Cluster,
bccsp bccsp.BCCSP,
) (BlockPuller, error) {
verifyBlockSequence := func(blocks []*common.Block, _ string) error {
return cluster.VerifyBlocks(blocks, support)
}
stdDialer := &cluster.StandardDialer{
Config: baseDialer.Config.Clone(),
}
stdDialer.Config.AsyncConnect = false
stdDialer.Config.SecOpts.VerifyCertificate = nil
// Extract the TLS CA certs and endpoints from the configuration,
endpoints, err := EndpointconfigFromSupport(support, bccsp)
if err != nil {
return nil, err
}
der, _ := pem.Decode(stdDialer.Config.SecOpts.Certificate)
if der == nil {
return nil, errors.Errorf("client certificate isn't in PEM format: %v",
string(stdDialer.Config.SecOpts.Certificate))
}
bp := &cluster.BlockPuller{
VerifyBlockSequence: verifyBlockSequence,
Logger: flogging.MustGetLogger("orderer.common.cluster.puller").With("channel", support.ChannelID()),
RetryTimeout: clusterConfig.ReplicationRetryTimeout,
MaxTotalBufferBytes: clusterConfig.ReplicationBufferSize,
FetchTimeout: clusterConfig.ReplicationPullTimeout,
Endpoints: endpoints,
Signer: support,
TLSCert: der.Bytes,
Channel: support.ChannelID(),
Dialer: stdDialer,
}
return &LedgerBlockPuller{
Height: support.Height,
BlockRetriever: support,
BlockPuller: bp,
}, nil
}
|
{
"pile_set_name": "Github"
}
|
//===--- FormatInternal.h - Format C++ code ---------------------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
///
/// \file
/// This file declares Format APIs to be used internally by the
/// formatting library implementation.
///
//===----------------------------------------------------------------------===//
#ifndef LLVM_CLANG_LIB_FORMAT_FORMATINTERNAL_H
#define LLVM_CLANG_LIB_FORMAT_FORMATINTERNAL_H
#include "BreakableToken.h"
#include "clang/Tooling/Core/Lookup.h"
#include <utility>
namespace clang {
namespace format {
namespace internal {
/// Reformats the given \p Ranges in the code fragment \p Code.
///
/// A fragment of code could conceptually be surrounded by other code that might
/// constrain how that fragment is laid out.
/// For example, consider the fragment of code between 'R"(' and ')"',
/// exclusive, in the following code:
///
/// void outer(int x) {
/// string inner = R"(name: data
/// ^ FirstStartColumn
/// value: {
/// x: 1
/// ^ NextStartColumn
/// }
/// )";
/// ^ LastStartColumn
/// }
///
/// The outer code can influence the inner fragment as follows:
/// * \p FirstStartColumn specifies the column at which \p Code starts.
/// * \p NextStartColumn specifies the additional indent dictated by the
/// surrounding code. It is applied to the rest of the lines of \p Code.
/// * \p LastStartColumn specifies the column at which the last line of
/// \p Code should end, in case the last line is an empty line.
///
/// In the case where the last line of the fragment contains content,
/// the fragment ends at the end of that content and \p LastStartColumn is
/// not taken into account, for example in:
///
/// void block() {
/// string inner = R"(name: value)";
/// }
///
/// Each range is extended on either end to its next bigger logic unit, i.e.
/// everything that might influence its formatting or might be influenced by its
/// formatting.
///
/// Returns a pair P, where:
/// * P.first are the ``Replacements`` necessary to make all \p Ranges comply
/// with \p Style.
/// * P.second is the penalty induced by formatting the fragment \p Code.
/// If the formatting of the fragment doesn't have a notion of penalty,
/// returns 0.
///
/// If ``Status`` is non-null, its value will be populated with the status of
/// this formatting attempt. See \c FormattingAttemptStatus.
std::pair<tooling::Replacements, unsigned>
reformat(const FormatStyle &Style, StringRef Code,
ArrayRef<tooling::Range> Ranges, unsigned FirstStartColumn,
unsigned NextStartColumn, unsigned LastStartColumn, StringRef FileName,
FormattingAttemptStatus *Status);
} // namespace internal
} // namespace format
} // namespace clang
#endif
|
{
"pile_set_name": "Github"
}
|
#include helpers.inc;
#include classMatches.inc;
// **************************************************************************************
function alert(fname, matchedText)
{
var ri = new TReportItem();
ri.LoadFromFile(fname);
ri.affects = "Web Server";
ri.alertPath = "Scripts/" + fname;
ri.setHttpInfo(lastJob);
if (matchedText)
ri.Details = ri.Details + "[break]Pattern found: [pre][blue]" + matchedText + "[/blue][/pre]";
AddReportItem(ri);
}
// **************************************************************************************
function request(uri)
{
lastJob = new THTTPJob();
lastJob.url = scanURL;
lastJob.verb = 'GET';
lastJob.URI = uri;
lastJob.addCookies = false;
lastJob.execute();
if (!lastJob.wasError && lastJob.response.msg2 == 200)
{
return true;
}
return false;
}
// **************************************************************************************
function Test1()
{
var urls = [
'/configprops'
];
matches.plainArray = [
'"management.health.status.CONFIGURATION_PROPERTIES"'
];
matches.regexArray = [
];
for (var i=0;i<urls.length;i++)
{
if (request(urls[i]))
{
var matchedText = matches.searchOnText(lastJob.response.body);
if (matchedText)
alert("Spring_Boot_Actuator.xml", matchedText);
}
}
}
// **************************************************************************************
function startTesting()
{
Test1();
}
/***********************************************************************************/
/* main entry point */
var matches = new classMatches();
var lastJob = null;
startTesting();
|
{
"pile_set_name": "Github"
}
|
#!/usr/bin/env python
# Copyright 2017 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Wrapper script for verify-predictable mode. D8 is expected to be compiled with
v8_enable_verify_predictable.
The actual test command is expected to be passed to this wraper as is. E.g.:
predictable_wrapper.py path/to/d8 --test --predictable --flag1 --flag2
The command is run up to three times and the printed allocation hash is
compared. Differences are reported as errors.
"""
# for py2/py3 compatibility
from __future__ import print_function
import sys
from testrunner.local import command
from testrunner.local import utils
MAX_TRIES = 3
TIMEOUT = 120
# Predictable mode works only when run on the host os.
command.setup(utils.GuessOS(), None)
def main(args):
def allocation_str(stdout):
for line in reversed((stdout or '').splitlines()):
if line.startswith('### Allocations = '):
return line
return None
cmd = command.Command(args[0], args[1:], timeout=TIMEOUT)
previous_allocations = None
for run in range(1, MAX_TRIES + 1):
print('### Predictable run #%d' % run)
output = cmd.execute()
if output.stdout:
print('### Stdout:')
print(output.stdout)
if output.stderr:
print('### Stderr:')
print(output.stderr)
print('### Return code: %s' % output.exit_code)
if output.HasTimedOut():
# If we get a timeout in any run, we are in an unpredictable state. Just
# report it as a failure and don't rerun.
print('### Test timed out')
return 1
allocations = allocation_str(output.stdout)
if not allocations:
print ('### Test had no allocation output. Ensure this is built '
'with v8_enable_verify_predictable and that '
'--verify-predictable is passed at the cmd line.')
return 2
if previous_allocations and previous_allocations != allocations:
print('### Allocations differ')
return 3
if run >= MAX_TRIES:
# No difference on the last run -> report a success.
return 0
previous_allocations = allocations
# Unreachable.
assert False
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#ifdef _ALLBSD_SOURCE
#include <stdint.h>
#define THRTYPE intptr_t
#else
#define THRTYPE int
#endif
#include <sys/types.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <stdarg.h>
#include <errno.h>
#include <limits.h>
#include <time.h>
#if defined(unix) && !defined(PRODUCT)
#include "pthread.h"
#define THREAD_SELF ((THRTYPE)pthread_self())
#endif
#include "defines.h"
#include "bytes.h"
#include "utils.h"
#include "coding.h"
#include "bands.h"
#include "constants.h"
#include "zip.h"
#include "unpack.h"
int main(int argc, char **argv) {
return unpacker::run(argc, argv);
}
// Single-threaded, implementation, not reentrant.
// Includes a weak error check against MT access.
#ifndef THREAD_SELF
#define THREAD_SELF ((THRTYPE) 0)
#endif
NOT_PRODUCT(static THRTYPE uThread = -1;)
unpacker* unpacker::non_mt_current = null;
unpacker* unpacker::current() {
//assert(uThread == THREAD_SELF);
return non_mt_current;
}
static void set_current_unpacker(unpacker* u) {
unpacker::non_mt_current = u;
assert(((uThread = (u == null) ? (THRTYPE) -1 : THREAD_SELF),
true));
}
// Callback for fetching data, Unix style.
static jlong read_input_via_stdio(unpacker* u,
void* buf, jlong minlen, jlong maxlen) {
assert(minlen <= maxlen); // don't talk nonsense
jlong numread = 0;
char* bufptr = (char*) buf;
while (numread < minlen) {
// read available input, up to buf.length or maxlen
int readlen = (1<<16);
if (readlen > (maxlen - numread))
readlen = (int)(maxlen - numread);
int nr = 0;
if (u->infileptr != null) {
nr = (int)fread(bufptr, 1, readlen, u->infileptr);
} else {
#ifndef WIN32
// we prefer unbuffered inputs
nr = (int)read(u->infileno, bufptr, readlen);
#else
nr = (int)fread(bufptr, 1, readlen, stdin);
#endif
}
if (nr <= 0) {
if (errno != EINTR)
break;
nr = 0;
}
numread += nr;
bufptr += nr;
assert(numread <= maxlen);
}
//fprintf(u->errstrm, "readInputFn(%d,%d) => %d\n",
// (int)minlen, (int)maxlen, (int)numread);
return numread;
}
enum { EOF_MAGIC = 0, BAD_MAGIC = -1 };
static int read_magic(unpacker* u, char peek[], int peeklen) {
assert(peeklen == 4); // magic numbers are always 4 bytes
jlong nr = (u->read_input_fn)(u, peek, peeklen, peeklen);
if (nr != peeklen) {
return (nr == 0) ? EOF_MAGIC : BAD_MAGIC;
}
int magic = 0;
for (int i = 0; i < peeklen; i++) {
magic <<= 8;
magic += peek[i] & 0xFF;
}
return magic;
}
static void setup_gzin(unpacker* u) {
gunzip* gzin = NEW(gunzip, 1);
gzin->init(u);
}
static const char* nbasename(const char* progname) {
const char* slash = strrchr(progname, '/');
if (slash != null) progname = ++slash;
return progname;
}
static const char* usage_lines[] = {
"Usage: %s [-opt... | --option=value]... x.pack[.gz] y.jar\n",
"\n",
"Unpacking Options\n",
" -H{h}, --deflate-hint={h} override transmitted deflate hint: true, false, or keep (default)\n",
" -r, --remove-pack-file remove input file after unpacking\n",
" -v, --verbose increase program verbosity\n",
" -q, --quiet set verbosity to lowest level\n",
" -l{F}, --log-file={F} output to the given log file, or '-' for standard output (default)\n",
" -?, -h, --help print this message\n",
" -V, --version print program version\n",
" -J{X} Java VM argument (ignored)\n",
null
};
static void usage(unpacker* u, const char* progname, bool full = false) {
// WinMain does not set argv[0] to the progrname
progname = (progname != null) ? nbasename(progname) : "unpack200";
for (int i = 0; usage_lines[i] != null; i++) {
fprintf(u->errstrm, usage_lines[i], progname);
if (!full) {
fprintf(u->errstrm,
"(For more information, run %s --help .)\n", progname);
break;
}
}
}
// argument parsing
static char** init_args(int argc, char** argv, int &envargc) {
const char* env = getenv("UNPACK200_FLAGS");
ptrlist envargs;
envargs.init();
if (env != null) {
char* buf = (char*) strdup(env);
const char* delim = "\n\t ";
for (char* p = strtok(buf, delim); p != null; p = strtok(null, delim)) {
envargs.add(p);
}
}
// allocate extra margin at both head and tail
char** argp = NEW(char*, envargs.length()+argc+1);
char** argp0 = argp;
int i;
for (i = 0; i < envargs.length(); i++) {
*argp++ = (char*) envargs.get(i);
}
for (i = 1; i < argc; i++) {
// note: skip argv[0] (program name)
*argp++ = (char*) strdup(argv[i]); // make a scratch copy
}
*argp = null; // sentinel
envargc = envargs.length(); // report this count to next_arg
envargs.free();
return argp0;
}
static int strpcmp(const char* str, const char* pfx) {
return strncmp(str, pfx, strlen(pfx));
}
static const char flag_opts[] = "vqrVh?";
static const char string_opts[] = "HlJ";
static int next_arg(char** &argp) {
char* arg = *argp;
if (arg == null || arg[0] != '-') { // end of option list
return 0;
}
//printf("opt: %s\n", arg);
char ach = arg[1];
if (ach == '\0') {
// ++argp; // do not pop this arg
return 0; // bare "-" is stdin/stdout
} else if (arg[1] == '-') { // --foo option
static const char* keys[] = {
"Hdeflate-hint=",
"vverbose",
"qquiet",
"rremove-pack-file",
"llog-file=",
"Vversion",
"hhelp",
null };
if (arg[2] == '\0') { // end of option list
++argp; // pop the "--"
return 0;
}
for (int i = 0; keys[i] != null; i++) {
const char* key = keys[i];
char kch = *key++;
if (strchr(key, '=') == null) {
if (!strcmp(arg+2, key)) {
++argp; // pop option arg
return kch;
}
} else {
if (!strpcmp(arg+2, key)) {
*argp += 2 + strlen(key); // remove "--"+key from arg
return kch;
}
}
}
} else if (strchr(flag_opts, ach) != null) { // plain option
if (arg[2] == '\0') {
++argp;
} else {
// in-place edit of "-vxyz" to "-xyz"
arg += 1; // skip original '-'
arg[0] = '-';
*argp = arg;
}
//printf(" key => %c\n", ach);
return ach;
} else if (strchr(string_opts, ach) != null) { // argument-bearing option
if (arg[2] == '\0') {
if (argp[1] == null) return -1; // no next arg
++argp; // leave the argument in place
} else {
// in-place edit of "-Hxyz" to "xyz"
arg += 2; // skip original '-H'
*argp = arg;
}
//printf(" key => %c\n", ach);
return ach;
}
return -1; // bad argument
}
static const char sccsver[] = "1.30, 07/05/05";
// Usage: unpackage input.pack output.jar
int unpacker::run(int argc, char **argv) {
unpacker u;
u.init(read_input_via_stdio);
set_current_unpacker(&u);
jar jarout;
jarout.init(&u);
int envargc = 0;
char** argbuf = init_args(argc, argv, envargc);
char** arg0 = argbuf+envargc;
char** argp = argbuf;
int verbose = 0;
char* logfile = null;
for (;;) {
const char* arg = (*argp == null)? "": u.saveStr(*argp);
bool isenvarg = (argp < arg0);
int ach = next_arg(argp);
bool hasoptarg = (ach != 0 && strchr(string_opts, ach) != null);
if (ach == 0 && argp >= arg0) break;
if (isenvarg && argp == arg0 && hasoptarg) ach = 0; // don't pull from cmdline
switch (ach) {
case 'H': u.set_option(UNPACK_DEFLATE_HINT,*argp++); break;
case 'v': ++verbose; break;
case 'q': verbose = 0; break;
case 'r': u.set_option(UNPACK_REMOVE_PACKFILE,"1"); break;
case 'l': logfile = *argp++; break;
case 'J': argp += 1; break; // skip ignored -Jxxx parameter
case 'V':
fprintf(u.errstrm, VERSION_STRING, nbasename(argv[0]), sccsver);
exit(0);
case 'h':
case '?':
usage(&u, argv[0], true);
exit(1);
default:
const char* inenv = isenvarg? " in ${UNPACK200_FLAGS}": "";
if (hasoptarg)
fprintf(u.errstrm, "Missing option string%s: %s\n", inenv, arg);
else
fprintf(u.errstrm, "Unrecognized argument%s: %s\n", inenv, arg);
usage(&u, argv[0]);
exit(2);
}
}
if (verbose != 0) {
u.set_option(DEBUG_VERBOSE, u.saveIntStr(verbose));
}
if (logfile != null) {
u.set_option(UNPACK_LOG_FILE, logfile);
}
u.redirect_stdio();
const char* source_file = *argp++;
const char* destination_file = *argp++;
if (source_file == null || destination_file == null || *argp != null) {
usage(&u, argv[0]);
exit(2);
}
if (verbose != 0) {
fprintf(u.errstrm,
"Unpacking from %s to %s\n", source_file, destination_file);
}
bool& remove_source = u.remove_packfile;
if (strcmp(source_file, "-") == 0) {
remove_source = false;
u.infileno = fileno(stdin);
} else {
u.infileptr = fopen(source_file, "rb");
if (u.infileptr == null) {
fprintf(u.errstrm,
"Error: Could not open input file: %s\n", source_file);
exit(3); // Called only from the native standalone unpacker
}
}
if (strcmp(destination_file, "-") == 0) {
jarout.jarfp = stdout;
if (u.errstrm == stdout) // do not mix output
u.set_option(UNPACK_LOG_FILE, LOGFILE_STDERR);
} else {
jarout.openJarFile(destination_file);
assert(jarout.jarfp != null);
}
if (verbose != 0)
u.dump_options();
char peek[4];
int magic;
// check for GZIP input
magic = read_magic(&u, peek, (int)sizeof(peek));
if ((magic & GZIP_MAGIC_MASK) == GZIP_MAGIC) {
// Oops; must slap an input filter on this data.
setup_gzin(&u);
u.gzin->start(magic);
if (!u.aborting()) {
u.start();
}
} else {
u.start(peek, sizeof(peek));
}
// Note: The checks to u.aborting() are necessary to gracefully
// terminate processing when the first segment throws an error.
for (;;) {
if (u.aborting()) break;
// Each trip through this loop unpacks one segment
// and then resets the unpacker.
for (unpacker::file* filep; (filep = u.get_next_file()) != null; ) {
if (u.aborting()) break;
u.write_file_to_jar(filep);
}
if (u.aborting()) break;
// Peek ahead for more data.
magic = read_magic(&u, peek, (int)sizeof(peek));
if (magic != (int)JAVA_PACKAGE_MAGIC) {
if (magic != EOF_MAGIC)
u.abort("garbage after end of pack archive");
break; // all done
}
// Release all storage from parsing the old segment.
u.reset();
// Restart, beginning with the peek-ahead.
u.start(peek, sizeof(peek));
}
int status = 0;
if (u.aborting()) {
fprintf(u.errstrm, "Error: %s\n", u.get_abort_message());
status = 1;
}
if (u.infileptr != null) {
fclose(u.infileptr);
u.infileptr = null;
}
if (!u.aborting() && remove_source)
remove(source_file);
if (verbose != 0) {
fprintf(u.errstrm, "unpacker completed with status=%d\n", status);
}
u.finish();
u.free(); // tidy up malloc blocks
set_current_unpacker(null); // clean up global pointer
return status;
}
|
{
"pile_set_name": "Github"
}
|
var CombinedStream = require('combined-stream');
var util = require('util');
var path = require('path');
var http = require('http');
var https = require('https');
var parseUrl = require('url').parse;
var fs = require('fs');
var mime = require('mime-types');
var asynckit = require('asynckit');
var populate = require('./populate.js');
// Public API
module.exports = FormData;
// make it a Stream
util.inherits(FormData, CombinedStream);
/**
* Create readable "multipart/form-data" streams.
* Can be used to submit forms
* and file uploads to other web applications.
*
* @constructor
*/
function FormData() {
if (!(this instanceof FormData)) {
return new FormData();
}
this._overheadLength = 0;
this._valueLength = 0;
this._valuesToMeasure = [];
CombinedStream.call(this);
}
FormData.LINE_BREAK = '\r\n';
FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream';
FormData.prototype.append = function(field, value, options) {
options = options || {};
// allow filename as single option
if (typeof options == 'string') {
options = {filename: options};
}
var append = CombinedStream.prototype.append.bind(this);
// all that streamy business can't handle numbers
if (typeof value == 'number') {
value = '' + value;
}
// https://github.com/felixge/node-form-data/issues/38
if (util.isArray(value)) {
// Please convert your array into string
// the way web server expects it
this._error(new Error('Arrays are not supported.'));
return;
}
var header = this._multiPartHeader(field, value, options);
var footer = this._multiPartFooter();
append(header);
append(value);
append(footer);
// pass along options.knownLength
this._trackLength(header, value, options);
};
FormData.prototype._trackLength = function(header, value, options) {
var valueLength = 0;
// used w/ getLengthSync(), when length is known.
// e.g. for streaming directly from a remote server,
// w/ a known file a size, and not wanting to wait for
// incoming file to finish to get its size.
if (options.knownLength != null) {
valueLength += +options.knownLength;
} else if (Buffer.isBuffer(value)) {
valueLength = value.length;
} else if (typeof value === 'string') {
valueLength = Buffer.byteLength(value);
}
this._valueLength += valueLength;
// @check why add CRLF? does this account for custom/multiple CRLFs?
this._overheadLength +=
Buffer.byteLength(header) +
FormData.LINE_BREAK.length;
// empty or either doesn't have path or not an http response
if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) {
return;
}
// no need to bother with the length
if (!options.knownLength) {
this._valuesToMeasure.push(value);
}
};
FormData.prototype._lengthRetriever = function(value, callback) {
if (value.hasOwnProperty('fd')) {
// take read range into a account
// `end` = Infinity –> read file till the end
//
// TODO: Looks like there is bug in Node fs.createReadStream
// it doesn't respect `end` options without `start` options
// Fix it when node fixes it.
// https://github.com/joyent/node/issues/7819
if (value.end != undefined && value.end != Infinity && value.start != undefined) {
// when end specified
// no need to calculate range
// inclusive, starts with 0
callback(null, value.end + 1 - (value.start ? value.start : 0));
// not that fast snoopy
} else {
// still need to fetch file size from fs
fs.stat(value.path, function(err, stat) {
var fileSize;
if (err) {
callback(err);
return;
}
// update final size based on the range options
fileSize = stat.size - (value.start ? value.start : 0);
callback(null, fileSize);
});
}
// or http response
} else if (value.hasOwnProperty('httpVersion')) {
callback(null, +value.headers['content-length']);
// or request stream http://github.com/mikeal/request
} else if (value.hasOwnProperty('httpModule')) {
// wait till response come back
value.on('response', function(response) {
value.pause();
callback(null, +response.headers['content-length']);
});
value.resume();
// something else
} else {
callback('Unknown stream');
}
};
FormData.prototype._multiPartHeader = function(field, value, options) {
// custom header specified (as string)?
// it becomes responsible for boundary
// (e.g. to handle extra CRLFs on .NET servers)
if (typeof options.header == 'string') {
return options.header;
}
var contentDisposition = this._getContentDisposition(value, options);
var contentType = this._getContentType(value, options);
var contents = '';
var headers = {
// add custom disposition as third element or keep it two elements if not
'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []),
// if no content type. allow it to be empty array
'Content-Type': [].concat(contentType || [])
};
// allow custom headers.
if (typeof options.header == 'object') {
populate(headers, options.header);
}
var header;
for (var prop in headers) {
header = headers[prop];
// skip nullish headers.
if (header == null) {
continue;
}
// convert all headers to arrays.
if (!Array.isArray(header)) {
header = [header];
}
// add non-empty headers.
if (header.length) {
contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK;
}
}
return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK;
};
FormData.prototype._getContentDisposition = function(value, options) {
var contentDisposition;
// custom filename takes precedence
// fs- and request- streams have path property
// formidable and the browser add a name property.
var filename = options.filename || value.name || value.path;
// or try http response
if (!filename && value.readable && value.hasOwnProperty('httpVersion')) {
filename = value.client._httpMessage.path;
}
if (filename) {
contentDisposition = 'filename="' + path.basename(filename) + '"';
}
return contentDisposition;
};
FormData.prototype._getContentType = function(value, options) {
// use custom content-type above all
var contentType = options.contentType;
// or try `name` from formidable, browser
if (!contentType && value.name) {
contentType = mime.lookup(value.name);
}
// or try `path` from fs-, request- streams
if (!contentType && value.path) {
contentType = mime.lookup(value.path);
}
// or if it's http-reponse
if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) {
contentType = value.headers['content-type'];
}
// or guess it from the filename
if (!contentType && options.filename) {
contentType = mime.lookup(options.filename);
}
// fallback to the default content type if `value` is not simple value
if (!contentType && typeof value == 'object') {
contentType = FormData.DEFAULT_CONTENT_TYPE;
}
return contentType;
};
FormData.prototype._multiPartFooter = function() {
return function(next) {
var footer = FormData.LINE_BREAK;
var lastPart = (this._streams.length === 0);
if (lastPart) {
footer += this._lastBoundary();
}
next(footer);
}.bind(this);
};
FormData.prototype._lastBoundary = function() {
return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK;
};
FormData.prototype.getHeaders = function(userHeaders) {
var header;
var formHeaders = {
'content-type': 'multipart/form-data; boundary=' + this.getBoundary()
};
for (header in userHeaders) {
if (userHeaders.hasOwnProperty(header)) {
formHeaders[header.toLowerCase()] = userHeaders[header];
}
}
return formHeaders;
};
FormData.prototype.getBoundary = function() {
if (!this._boundary) {
this._generateBoundary();
}
return this._boundary;
};
FormData.prototype._generateBoundary = function() {
// This generates a 50 character boundary similar to those used by Firefox.
// They are optimized for boyer-moore parsing.
var boundary = '--------------------------';
for (var i = 0; i < 24; i++) {
boundary += Math.floor(Math.random() * 10).toString(16);
}
this._boundary = boundary;
};
// Note: getLengthSync DOESN'T calculate streams length
// As workaround one can calculate file size manually
// and add it as knownLength option
FormData.prototype.getLengthSync = function() {
var knownLength = this._overheadLength + this._valueLength;
// Don't get confused, there are 3 "internal" streams for each keyval pair
// so it basically checks if there is any value added to the form
if (this._streams.length) {
knownLength += this._lastBoundary().length;
}
// https://github.com/form-data/form-data/issues/40
if (!this.hasKnownLength()) {
// Some async length retrievers are present
// therefore synchronous length calculation is false.
// Please use getLength(callback) to get proper length
this._error(new Error('Cannot calculate proper length in synchronous way.'));
}
return knownLength;
};
// Public API to check if length of added values is known
// https://github.com/form-data/form-data/issues/196
// https://github.com/form-data/form-data/issues/262
FormData.prototype.hasKnownLength = function() {
var hasKnownLength = true;
if (this._valuesToMeasure.length) {
hasKnownLength = false;
}
return hasKnownLength;
};
FormData.prototype.getLength = function(cb) {
var knownLength = this._overheadLength + this._valueLength;
if (this._streams.length) {
knownLength += this._lastBoundary().length;
}
if (!this._valuesToMeasure.length) {
process.nextTick(cb.bind(this, null, knownLength));
return;
}
asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) {
if (err) {
cb(err);
return;
}
values.forEach(function(length) {
knownLength += length;
});
cb(null, knownLength);
});
};
FormData.prototype.submit = function(params, cb) {
var request
, options
, defaults = {method: 'post'}
;
// parse provided url if it's string
// or treat it as options object
if (typeof params == 'string') {
params = parseUrl(params);
options = populate({
port: params.port,
path: params.pathname,
host: params.hostname
}, defaults);
// use custom params
} else {
options = populate(params, defaults);
// if no port provided use default one
if (!options.port) {
options.port = options.protocol == 'https:' ? 443 : 80;
}
}
// put that good code in getHeaders to some use
options.headers = this.getHeaders(params.headers);
// https if specified, fallback to http in any other case
if (options.protocol == 'https:') {
request = https.request(options);
} else {
request = http.request(options);
}
// get content length and fire away
this.getLength(function(err, length) {
if (err) {
this._error(err);
return;
}
// add content length
request.setHeader('Content-Length', length);
this.pipe(request);
if (cb) {
request.on('error', cb);
request.on('response', cb.bind(this, null));
}
}.bind(this));
return request;
};
FormData.prototype._error = function(err) {
if (!this.error) {
this.error = err;
this.pause();
this.emit('error', err);
}
};
FormData.prototype.toString = function () {
return '[object FormData]';
};
|
{
"pile_set_name": "Github"
}
|
package com.xxmassdeveloper.mpchartexample;
import android.graphics.Color;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.WindowManager;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.TextView;
import android.widget.Toast;
import com.github.mikephil.charting.animation.Easing;
import com.github.mikephil.charting.charts.LineChart;
import com.github.mikephil.charting.components.Legend;
import com.github.mikephil.charting.components.Legend.LegendForm;
import com.github.mikephil.charting.components.LimitLine;
import com.github.mikephil.charting.components.LimitLine.LimitLabelPosition;
import com.github.mikephil.charting.components.YAxis;
import com.github.mikephil.charting.data.DataSet;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.LineData;
import com.github.mikephil.charting.data.LineDataSet;
import com.github.mikephil.charting.data.filter.Approximator;
import com.github.mikephil.charting.data.filter.Approximator.ApproximatorType;
import com.github.mikephil.charting.listener.OnChartGestureListener;
import com.github.mikephil.charting.listener.OnChartValueSelectedListener;
import com.github.mikephil.charting.utils.Highlight;
import com.xxmassdeveloper.mpchartexample.custom.MyMarkerView;
import com.xxmassdeveloper.mpchartexample.notimportant.DemoBase;
import java.util.ArrayList;
public class LineChartActivity1 extends DemoBase implements OnSeekBarChangeListener,
OnChartGestureListener, OnChartValueSelectedListener {
private LineChart mChart;
private SeekBar mSeekBarX, mSeekBarY;
private TextView tvX, tvY;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_linechart);
tvX = (TextView) findViewById(R.id.tvXMax);
tvY = (TextView) findViewById(R.id.tvYMax);
mSeekBarX = (SeekBar) findViewById(R.id.seekBar1);
mSeekBarY = (SeekBar) findViewById(R.id.seekBar2);
mSeekBarX.setProgress(45);
mSeekBarY.setProgress(100);
mSeekBarY.setOnSeekBarChangeListener(this);
mSeekBarX.setOnSeekBarChangeListener(this);
mChart = (LineChart) findViewById(R.id.chart1);
mChart.setOnChartGestureListener(this);
mChart.setOnChartValueSelectedListener(this);
// no description text
mChart.setDescription("");
mChart.setNoDataTextDescription("You need to provide data for the chart.");
// enable value highlighting
mChart.setHighlightEnabled(true);
// enable touch gestures
mChart.setTouchEnabled(true);
// enable scaling and dragging
mChart.setDragEnabled(true);
mChart.setScaleEnabled(true);
// mChart.setScaleXEnabled(true);
// mChart.setScaleYEnabled(true);
// if disabled, scaling can be done on x- and y-axis separately
mChart.setPinchZoom(true);
// set an alternative background color
// mChart.setBackgroundColor(Color.GRAY);
// create a custom MarkerView (extend MarkerView) and specify the layout
// to use for it
MyMarkerView mv = new MyMarkerView(this, R.layout.custom_marker_view);
// set the marker to the chart
mChart.setMarkerView(mv);
// enable/disable highlight indicators (the lines that indicate the
// highlighted Entry)
mChart.setHighlightIndicatorEnabled(false);
// x-axis limit line
// LimitLine llXAxis = new LimitLine(10f, "Index 10");
// llXAxis.setLineWidth(4f);
// llXAxis.enableDashedLine(10f, 10f, 0f);
// llXAxis.setLabelPosition(LimitLabelPosition.POS_RIGHT);
// llXAxis.setTextSize(10f);
//
// XAxis xAxis = mChart.getXAxis();
// xAxis.addLimitLine(llXAxis);
LimitLine ll1 = new LimitLine(130f, "Upper Limit");
ll1.setLineWidth(4f);
ll1.enableDashedLine(10f, 10f, 0f);
ll1.setLabelPosition(LimitLabelPosition.POS_RIGHT);
ll1.setTextSize(10f);
LimitLine ll2 = new LimitLine(-30f, "Lower Limit");
ll2.setLineWidth(4f);
ll2.enableDashedLine(10f, 10f, 0f);
ll2.setLabelPosition(LimitLabelPosition.POS_RIGHT);
ll2.setTextSize(10f);
YAxis leftAxis = mChart.getAxisLeft();
leftAxis.removeAllLimitLines(); // reset all limit lines to avoid overlapping lines
leftAxis.addLimitLine(ll1);
leftAxis.addLimitLine(ll2);
leftAxis.setAxisMaxValue(220f);
leftAxis.setAxisMinValue(-50f);
leftAxis.setStartAtZero(false);
leftAxis.enableGridDashedLine(10f, 10f, 0f);
// limit lines are drawn behind data (and not on top)
leftAxis.setDrawLimitLinesBehindData(true);
mChart.getAxisRight().setEnabled(false);
// add data
setData(45, 100);
// mChart.setVisibleXRange(20);
// mChart.setVisibleYRange(20f, AxisDependency.LEFT);
// mChart.centerViewTo(20, 50, AxisDependency.LEFT);
mChart.animateX(2500, Easing.EasingOption.EaseInOutQuart);
// mChart.invalidate();
// get the legend (only possible after setting data)
Legend l = mChart.getLegend();
// modify the legend ...
// l.setPosition(LegendPosition.LEFT_OF_CHART);
l.setForm(LegendForm.LINE);
// // dont forget to refresh the drawing
// mChart.invalidate();
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.line, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.actionToggleValues: {
for (DataSet<?> set : mChart.getData().getDataSets())
set.setDrawValues(!set.isDrawValuesEnabled());
mChart.invalidate();
break;
}
case R.id.actionToggleHighlight: {
if (mChart.isHighlightEnabled())
mChart.setHighlightEnabled(false);
else
mChart.setHighlightEnabled(true);
mChart.invalidate();
break;
}
case R.id.actionToggleFilled: {
ArrayList<LineDataSet> sets = (ArrayList<LineDataSet>) mChart.getData()
.getDataSets();
for (LineDataSet set : sets) {
if (set.isDrawFilledEnabled())
set.setDrawFilled(false);
else
set.setDrawFilled(true);
}
mChart.invalidate();
break;
}
case R.id.actionToggleCircles: {
ArrayList<LineDataSet> sets = (ArrayList<LineDataSet>) mChart.getData()
.getDataSets();
for (LineDataSet set : sets) {
if (set.isDrawCirclesEnabled())
set.setDrawCircles(false);
else
set.setDrawCircles(true);
}
mChart.invalidate();
break;
}
case R.id.actionToggleCubic: {
ArrayList<LineDataSet> sets = (ArrayList<LineDataSet>) mChart.getData()
.getDataSets();
for (LineDataSet set : sets) {
if (set.isDrawCubicEnabled())
set.setDrawCubic(false);
else
set.setDrawCubic(true);
}
mChart.invalidate();
break;
}
case R.id.actionToggleStartzero: {
mChart.getAxisLeft().setStartAtZero(!mChart.getAxisLeft().isStartAtZeroEnabled());
mChart.getAxisRight().setStartAtZero(!mChart.getAxisRight().isStartAtZeroEnabled());
mChart.invalidate();
break;
}
case R.id.actionTogglePinch: {
if (mChart.isPinchZoomEnabled())
mChart.setPinchZoom(false);
else
mChart.setPinchZoom(true);
mChart.invalidate();
break;
}
case R.id.animateX: {
mChart.animateX(3000);
break;
}
case R.id.animateY: {
mChart.animateY(3000, Easing.EasingOption.EaseInCubic);
break;
}
case R.id.animateXY: {
mChart.animateXY(3000, 3000);
break;
}
case R.id.actionToggleFilter: {
// the angle of filtering is 35°
Approximator a = new Approximator(ApproximatorType.DOUGLAS_PEUCKER, 35);
if (!mChart.isFilteringEnabled()) {
mChart.enableFiltering(a);
} else {
mChart.disableFiltering();
}
mChart.invalidate();
//
// for(int i = 0; i < 10; i++) {
// mChart.addEntry(new Entry((float) (Math.random() * 100),
// i+2), 0);
// mChart.invalidate();
// }
//
// Toast.makeText(getApplicationContext(), "valcount: " +
// mChart.getDataOriginal().getYValCount() + ", valsum: " +
// mChart.getDataOriginal().getYValueSum(),
// Toast.LENGTH_SHORT).show();
//
break;
}
case R.id.actionSave: {
if (mChart.saveToPath("title" + System.currentTimeMillis(), "")) {
Toast.makeText(getApplicationContext(), "Saving SUCCESSFUL!",
Toast.LENGTH_SHORT).show();
} else
Toast.makeText(getApplicationContext(), "Saving FAILED!", Toast.LENGTH_SHORT)
.show();
// mChart.saveToGallery("title"+System.currentTimeMillis())
break;
}
}
return true;
}
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
tvX.setText("" + (mSeekBarX.getProgress() + 1));
tvY.setText("" + (mSeekBarY.getProgress()));
setData(mSeekBarX.getProgress() + 1, mSeekBarY.getProgress());
// redraw
mChart.invalidate();
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
private void setData(int count, float range) {
ArrayList<String> xVals = new ArrayList<String>();
for (int i = 0; i < count; i++) {
xVals.add((i) + "");
}
ArrayList<Entry> yVals = new ArrayList<Entry>();
for (int i = 0; i < count; i++) {
float mult = (range + 1);
float val = (float) (Math.random() * mult) + 3;// + (float)
// ((mult *
// 0.1) / 10);
yVals.add(new Entry(val, i));
}
// create a dataset and give it a type
LineDataSet set1 = new LineDataSet(yVals, "DataSet 1");
// set1.setFillAlpha(110);
// set1.setFillColor(Color.RED);
// set the line to be drawn like this "- - - - - -"
set1.enableDashedLine(10f, 5f, 0f);
set1.setColor(Color.BLACK);
set1.setCircleColor(Color.BLACK);
set1.setLineWidth(1f);
set1.setCircleSize(3f);
set1.setDrawCircleHole(false);
set1.setValueTextSize(9f);
set1.setFillAlpha(65);
set1.setFillColor(Color.BLACK);
// set1.setDrawFilled(true);
// set1.setShader(new LinearGradient(0, 0, 0, mChart.getHeight(),
// Color.BLACK, Color.WHITE, Shader.TileMode.MIRROR));
ArrayList<LineDataSet> dataSets = new ArrayList<LineDataSet>();
dataSets.add(set1); // add the datasets
// create a data object with the datasets
LineData data = new LineData(xVals, dataSets);
// set data
mChart.setData(data);
}
@Override
public void onChartLongPressed(MotionEvent me) {
Log.i("LongPress", "Chart longpressed.");
}
@Override
public void onChartDoubleTapped(MotionEvent me) {
Log.i("DoubleTap", "Chart double-tapped.");
}
@Override
public void onChartSingleTapped(MotionEvent me) {
Log.i("SingleTap", "Chart single-tapped.");
}
@Override
public void onChartFling(MotionEvent me1, MotionEvent me2, float velocityX, float velocityY) {
Log.i("Fling", "Chart flinged. VeloX: " + velocityX + ", VeloY: " + velocityY);
}
@Override
public void onChartScale(MotionEvent me, float scaleX, float scaleY) {
Log.i("Scale / Zoom", "ScaleX: " + scaleX + ", ScaleY: " + scaleY);
}
@Override
public void onChartTranslate(MotionEvent me, float dX, float dY) {
Log.i("Translate / Move", "dX: " + dX + ", dY: " + dY);
}
@Override
public void onValueSelected(Entry e, int dataSetIndex, Highlight h) {
Log.i("Entry selected", e.toString());
Log.i("", "low: " + mChart.getLowestVisibleXIndex() + ", high: " + mChart.getHighestVisibleXIndex());
}
@Override
public void onNothingSelected() {
Log.i("Nothing selected", "Nothing selected.");
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
/*
* This file is part of PHPExifTool.
*
* (c) 2012 Romain Neutron <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace PHPExiftool\Driver\Tag\XML;
use JMS\Serializer\Annotation\ExclusionPolicy;
use PHPExiftool\Driver\AbstractTag;
/**
* @ExclusionPolicy("all")
*/
class Project extends AbstractTag
{
protected $Id = 'Project';
protected $Name = 'Project';
protected $FullName = 'OOXML::Main';
protected $GroupName = 'XML';
protected $g0 = 'XML';
protected $g1 = 'XML';
protected $g2 = 'Document';
protected $Type = '?';
protected $Writable = false;
protected $Description = 'Project';
}
|
{
"pile_set_name": "Github"
}
|
/* Code generated by IfcQuery EXPRESS generator, www.ifcquery.com */
#pragma once
#include <vector>
#include <map>
#include <sstream>
#include <string>
#include "ifcpp/model/GlobalDefines.h"
#include "ifcpp/model/BasicTypes.h"
#include "ifcpp/model/BuildingObject.h"
#include "IfcStructuralAction.h"
//ENTITY
class IFCQUERY_EXPORT IfcStructuralPointAction : public IfcStructuralAction
{
public:
IfcStructuralPointAction() = default;
IfcStructuralPointAction( int id );
~IfcStructuralPointAction() = default;
virtual shared_ptr<BuildingObject> getDeepCopy( BuildingCopyOptions& options );
virtual void getStepLine( std::stringstream& stream ) const;
virtual void getStepParameter( std::stringstream& stream, bool is_select_type = false ) const;
virtual void readStepArguments( const std::vector<std::wstring>& args, const std::map<int,shared_ptr<BuildingEntity> >& map );
virtual void setInverseCounterparts( shared_ptr<BuildingEntity> ptr_self );
virtual size_t getNumAttributes() { return 10; }
virtual void getAttributes( std::vector<std::pair<std::string, shared_ptr<BuildingObject> > >& vec_attributes ) const;
virtual void getAttributesInverse( std::vector<std::pair<std::string, shared_ptr<BuildingObject> > >& vec_attributes ) const;
virtual void unlinkFromInverseCounterparts();
virtual const char* className() const { return "IfcStructuralPointAction"; }
virtual const std::wstring toString() const;
// IfcRoot -----------------------------------------------------------
// attributes:
// shared_ptr<IfcGloballyUniqueId> m_GlobalId;
// shared_ptr<IfcOwnerHistory> m_OwnerHistory; //optional
// shared_ptr<IfcLabel> m_Name; //optional
// shared_ptr<IfcText> m_Description; //optional
// IfcObjectDefinition -----------------------------------------------------------
// inverse attributes:
// std::vector<weak_ptr<IfcRelAssigns> > m_HasAssignments_inverse;
// std::vector<weak_ptr<IfcRelNests> > m_Nests_inverse;
// std::vector<weak_ptr<IfcRelNests> > m_IsNestedBy_inverse;
// std::vector<weak_ptr<IfcRelDeclares> > m_HasContext_inverse;
// std::vector<weak_ptr<IfcRelAggregates> > m_IsDecomposedBy_inverse;
// std::vector<weak_ptr<IfcRelAggregates> > m_Decomposes_inverse;
// std::vector<weak_ptr<IfcRelAssociates> > m_HasAssociations_inverse;
// IfcObject -----------------------------------------------------------
// attributes:
// shared_ptr<IfcLabel> m_ObjectType; //optional
// inverse attributes:
// std::vector<weak_ptr<IfcRelDefinesByObject> > m_IsDeclaredBy_inverse;
// std::vector<weak_ptr<IfcRelDefinesByObject> > m_Declares_inverse;
// std::vector<weak_ptr<IfcRelDefinesByType> > m_IsTypedBy_inverse;
// std::vector<weak_ptr<IfcRelDefinesByProperties> > m_IsDefinedBy_inverse;
// IfcProduct -----------------------------------------------------------
// attributes:
// shared_ptr<IfcObjectPlacement> m_ObjectPlacement; //optional
// shared_ptr<IfcProductRepresentation> m_Representation; //optional
// inverse attributes:
// std::vector<weak_ptr<IfcRelAssignsToProduct> > m_ReferencedBy_inverse;
// IfcStructuralActivity -----------------------------------------------------------
// attributes:
// shared_ptr<IfcStructuralLoad> m_AppliedLoad;
// shared_ptr<IfcGlobalOrLocalEnum> m_GlobalOrLocal;
// inverse attributes:
// std::vector<weak_ptr<IfcRelConnectsStructuralActivity> > m_AssignedToStructuralItem_inverse;
// IfcStructuralAction -----------------------------------------------------------
// attributes:
// shared_ptr<IfcBoolean> m_DestabilizingLoad; //optional
// IfcStructuralPointAction -----------------------------------------------------------
};
|
{
"pile_set_name": "Github"
}
|
#
# This is a project Makefile. It is assumed the directory this Makefile resides in is a
# project subdirectory.
#
PROJECT_NAME := 12_accesspoint
include $(IDF_PATH)/make/project.mk
|
{
"pile_set_name": "Github"
}
|
[comment {-*- tcl -*- doctools manpage}]
[manpage_begin smtp n 1.4.5]
[see_also ftp]
[see_also http]
[see_also mime]
[see_also pop3]
[copyright {1999-2000 Marshall T. Rose and others}]
[moddesc {smtp client}]
[titledesc {Client-side tcl implementation of the smtp protocol}]
[category Networking]
[require Tcl]
[require mime [opt 1.5.4]]
[require smtp [opt 1.4.5]]
[description]
[para]
The [package smtp] library package provides the client side of the
Simple Mail Transfer Protocol (SMTP) (1) (2).
[list_begin definitions]
[call [cmd ::smtp::sendmessage] [arg token] [arg option]...]
This command sends the MIME part (see package [package mime])
represented by [arg token] to an SMTP server. [arg options] is a list
of options and their associated values. The recognized options are:
[list_begin definitions]
[def [option -servers]]
A list of SMTP servers. The default is [const localhost].
[def [option -ports]]
A list of SMTP ports. The default is [const 25].
[def [option -client]]
The name to use as our hostname when connecting to the server. By
default this is either localhost if one of the servers is localhost,
or is set to the string returned by [cmd "info hostname"].
[def [option -queue]]
Indicates that the SMTP server should be asked to queue the message
for later processing. A boolean value.
[def [option -atleastone]]
Indicates that the SMTP server must find at least one recipient
acceptable for the message to be sent. A boolean value.
[def [option -originator]]
A string containing an 822-style address specification. If present the
header isn't examined for an originator address.
[def [option -recipients]]
A string containing one or more 822-style address specifications. If
present the header isn't examined for recipient addresses). If the
string contains more than one address they will be separated by
commas.
[def [option -header]]
A list containing two elements, an smtp header and its associated
value (the -header option may occur zero or more times).
[def [option -usetls]]
This package supports the RFC 3207 TLS extension (3) by default provided the
tls package is available. You can turn this off with this boolean option.
[def [option -tlspolicy]]
This option lets you specify a command to be called if an error occurs
during TLS setup. The command is called with the SMTP code and diagnostic
message appended. The command should return 'secure' or 'insecure' where
insecure will cause the package to continue on the unencrypted channel.
Returning 'secure' will cause the socket to be closed and the next server
in the [option -servers] list to be tried.
[def [option -username]]
[def [option -password]]
If your SMTP server requires authentication (RFC 2554 (4)) before
accepting mail you can use [option -username] and [option -password]
to provide your authentication details to the server. Currently this
package supports DIGEST-MD5, CRAM-MD5, LOGIN and PLAIN authentication
methods. The most secure method will be tried first and each method
tried in turn until we are either authorized or we run out of
methods. Note that if the server permits a TLS connection, then the
authorization will occur after we begin using the secure channel.
[para]
Please also read the section on [sectref Authentication], it details
the necessary prequisites, i.e. packages needed to support these
options and authentication.
[list_end]
[para]
If the [option -originator] option is not present, the originator
address is taken from [const From] (or [const Resent-From]);
similarly, if the [option -recipients] option is not present,
recipient addresses are taken from [const To], [const cc], and
[const Bcc] (or [const Resent-To], and so on). Note that the header
key/values supplied by the [option -header] option (not those present
in the MIME part) are consulted. Regardless, header key/values are
added to the outgoing message as necessary to ensure that a valid
822-style message is sent.
[para]
The command returns a list indicating which recipients were
unacceptable to the SMTP server. Each element of the list is another
list, containing the address, an SMTP error code, and a textual
diagnostic. Depending on the [option -atleastone] option and the
intended recipients, a non-empty list may still indicate that the
message was accepted by the server.
[list_end]
[section Authentication]
Beware. SMTP authentication uses [package SASL]. I.e. if the user
has to authenticate a connection, i.e. use the options [option -user]
and [option -password] (see above) it is necessary to have the
[package sasl] package available so that [package smtp] can load it.
[para]
This is a soft dependency because not everybody requires authentication,
and [package sasl] depends on a lot of the cryptographic (secure) hashes,
i.e. all of [package md5], [package otp], [package md4], [package sha1],
and [package ripemd160].
[section EXAMPLE]
[example {
proc send_simple_message {recipient email_server subject body} {
package require smtp
package require mime
set token [mime::initialize -canonical text/plain \\
-string $body]
mime::setheader $token Subject $subject
smtp::sendmessage $token \\
-recipients $recipient -servers $email_server
mime::finalize $token
}
send_simple_message [email protected] localhost \\
"This is the subject." "This is the message."
}]
[include ../common-text/tls-security-notes.inc]
[section {REFERENCES}]
[list_begin enumerated]
[enum]
Jonathan B. Postel, "SIMPLE MAIL TRANSFER PROTOCOL", RFC 821, August 1982.
([uri http://www.rfc-editor.org/rfc/rfc821.txt])
[enum]
J. Klensin, "Simple Mail Transfer Protocol", RFC 2821, April 2001.
([uri http://www.rfc-editor.org/rfc/rfc2821.txt])
[enum]
P. Hoffman, "SMTP Service Extension for Secure SMTP over Transport
Layer Security", RFC 3207, February 2002.
([uri http://www.rfc-editor.org/rfc/rfc3207.txt])
[enum]
J. Myers, "SMTP Service Extension for Authentication",
RFC 2554, March 1999.
([uri http://www.rfc-editor.org/rfc/rfc2554.txt])
[list_end]
[vset CATEGORY smtp]
[include ../doctools2base/include/feedback.inc]
[keywords mail mail email smtp mime tls \
{rfc 821} {rfc 822} {rfc 2821} {rfc 3207} {rfc 2554} internet net]
[manpage_end]
|
{
"pile_set_name": "Github"
}
|
uniform mat4 ModelViewProjectionMatrix;
uniform float size;
uniform float outlineWidth;
in vec2 pos;
out vec4 radii;
void main()
{
gl_Position = ModelViewProjectionMatrix * vec4(pos, 0.0, 1.0);
gl_PointSize = size;
// calculate concentric radii in pixels
float radius = 0.5 * size;
// start at the outside and progress toward the center
radii[0] = radius;
radii[1] = radius - 1.0;
radii[2] = radius - outlineWidth;
radii[3] = radius - outlineWidth - 1.0;
// convert to PointCoord units
radii /= size;
}
|
{
"pile_set_name": "Github"
}
|
<h1>Add a Style</h1>
<form method="POST" action="{{ url_for('new_page', site_id=site.key().id()) }}">
{% include "page_form.html" %}
<p>
<input type="submit" value="{% trans %}Create!{% endtrans %}" />
<img class="throbber" src="/static/img/throbber.gif" width="16" height="16" />
</p>
{% include "form_errors.html" %}
</form>
|
{
"pile_set_name": "Github"
}
|
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package config implements the pod configuration readers.
package config // import "k8s.io/kubernetes/pkg/kubelet/config"
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0910"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</MacroExpansion>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Profile"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
|
{
"pile_set_name": "Github"
}
|
name: "VGG_ILSVRC_19_layers"
input: "data"
input_dim: 1
input_dim: 3
input_dim: 340
input_dim: 256
layers {
bottom: "data"
top: "conv1_1"
name: "conv1_1"
type: CONVOLUTION
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv1_1"
top: "conv1_1"
name: "relu1_1"
type: RELU
}
layers {
bottom: "conv1_1"
top: "conv1_2"
name: "conv1_2"
type: CONVOLUTION
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv1_2"
top: "conv1_2"
name: "relu1_2"
type: RELU
}
layers {
bottom: "conv1_2"
top: "pool1"
name: "pool1"
type: POOLING
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layers {
bottom: "pool1"
top: "conv2_1"
name: "conv2_1"
type: CONVOLUTION
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv2_1"
top: "conv2_1"
name: "relu2_1"
type: RELU
}
layers {
bottom: "conv2_1"
top: "conv2_2"
name: "conv2_2"
type: CONVOLUTION
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv2_2"
top: "conv2_2"
name: "relu2_2"
type: RELU
}
layers {
bottom: "conv2_2"
top: "pool2"
name: "pool2"
type: POOLING
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layers {
bottom: "pool2"
top: "conv3_1"
name: "conv3_1"
type: CONVOLUTION
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv3_1"
top: "conv3_1"
name: "relu3_1"
type: RELU
}
layers {
bottom: "conv3_1"
top: "conv3_2"
name: "conv3_2"
type: CONVOLUTION
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv3_2"
top: "conv3_2"
name: "relu3_2"
type: RELU
}
layers {
bottom: "conv3_2"
top: "conv3_3"
name: "conv3_3"
type: CONVOLUTION
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv3_3"
top: "conv3_3"
name: "relu3_3"
type: RELU
}
layers {
bottom: "conv3_3"
top: "conv3_4"
name: "conv3_4"
type: CONVOLUTION
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv3_4"
top: "conv3_4"
name: "relu3_4"
type: RELU
}
layers {
bottom: "conv3_4"
top: "pool3"
name: "pool3"
type: POOLING
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layers {
bottom: "pool3"
top: "conv4_1"
name: "conv4_1"
type: CONVOLUTION
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv4_1"
top: "conv4_1"
name: "relu4_1"
type: RELU
}
layers {
bottom: "conv4_1"
top: "conv4_2"
name: "conv4_2"
type: CONVOLUTION
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv4_2"
top: "conv4_2"
name: "relu4_2"
type: RELU
}
layers {
bottom: "conv4_2"
top: "conv4_3"
name: "conv4_3"
type: CONVOLUTION
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv4_3"
top: "conv4_3"
name: "relu4_3"
type: RELU
}
layers {
bottom: "conv4_3"
top: "conv4_4"
name: "conv4_4"
type: CONVOLUTION
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv4_4"
top: "conv4_4"
name: "relu4_4"
type: RELU
}
layers {
bottom: "conv4_4"
top: "pool4"
name: "pool4"
type: POOLING
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layers {
bottom: "pool4"
top: "conv5_1"
name: "conv5_1"
type: CONVOLUTION
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv5_1"
top: "conv5_1"
name: "relu5_1"
type: RELU
}
layers {
bottom: "conv5_1"
top: "conv5_2"
name: "conv5_2"
type: CONVOLUTION
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv5_2"
top: "conv5_2"
name: "relu5_2"
type: RELU
}
layers {
bottom: "conv5_2"
top: "conv5_3"
name: "conv5_3"
type: CONVOLUTION
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv5_3"
top: "conv5_3"
name: "relu5_3"
type: RELU
}
layers {
bottom: "conv5_3"
top: "conv5_4"
name: "conv5_4"
type: CONVOLUTION
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layers {
bottom: "conv5_4"
top: "conv5_4"
name: "relu5_4"
type: RELU
}
layers {
bottom: "conv5_4"
top: "pool5"
name: "pool5"
type: POOLING
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
|
{
"pile_set_name": "Github"
}
|
package ch.renku.acceptancetests.workflows
import ch.renku.acceptancetests.pages._
import ch.renku.acceptancetests.tooling.AcceptanceSpec
import ch.renku.acceptancetests.workflows.LoginType._
import org.openqa.selenium.StaleElementReferenceException
import scala.concurrent.duration._
import scala.language.postfixOps
trait Login {
self: AcceptanceSpec =>
def logIntoRenku: LoginType = {
Given("user is not logged in")
go to LandingPage sleep (1 second)
verify browserAt LandingPage
When("user clicks on the Login button")
// Wait for the page to update
sleep(2 seconds)
click on LandingPage.loginButton
Then("they should get into the Login Page")
verify browserAt LoginPage
val loginType = if (userCredentials.useProvider) {
logIntoRenkuUsingProvider
} else {
logIntoRenkuDirectly
}
Then("they should get into the Welcome page")
verify browserAt WelcomePage
loginType
}
private def logIntoRenkuUsingProvider: LoginType = {
When("user clicks on the provider login page")
val providerLoginPage = LoginPage openProviderLoginPage
And("enters credentials and logs in")
providerLoginPage logInWith userCredentials
val authorizationPage = LoginPage.AuthorizeApplicationPage()
if (currentUrl startsWith authorizationPage.url) {
And("authorizes the application")
authorizationPage authorize;
// It may be necessary to authorize twice
if (currentUrl startsWith authorizationPage.url) {
And("authorizes the application a second time")
authorizationPage authorize
}
}
// This is a first login, and we need to provide account information
if (currentUrl contains "login-actions/first-broker-login") {
val updateInfoPage = LoginPage.UpdateAccountInfoPage(userCredentials)
And("updates user information")
updateInfoPage.updateInfo sleep (5 seconds)
}
// Authorization may come later
if (currentUrl startsWith authorizationPage.url) {
And("authorizes the application")
authorizationPage authorize;
// It may be necessary to authorize twice
if (currentUrl startsWith authorizationPage.url) {
And("authorizes the application a second time")
authorizationPage authorize
}
}
LoginWithProvider
}
private def logIntoRenkuDirectly: LoginType = {
When("user enters credentials and logs in")
LoginPage logInWith userCredentials
if (LoginPage loginSucceeded) {
val providerLoginPage = LoginPage.ProviderLoginPage()
val lt = if (currentUrl startsWith providerLoginPage.url) {
And("enters information with the provider")
providerLoginPage logInWith userCredentials
LoginWithProvider
} else LoginWithoutProvider
val authorizationPage = LoginPage.AuthorizeApplicationPage()
if (currentUrl startsWith authorizationPage.url) {
And("authorizes the application")
authorizationPage authorize;
// It may be necessary to authorize twice
if (currentUrl startsWith authorizationPage.url) {
And("authorizes the application a second time")
authorizationPage authorize
}
}
lt
} else {
if (userCredentials.register) {
And("login fails")
Then("try to register the user")
registerNewUserWithRenku
} else fail("Incorrect user credentials.")
}
}
private def registerNewUserWithRenku: LoginType = {
When("user opens registration form")
val lt = LoginPage openRegistrationForm;
And("registers")
val registerPage = LoginPage.RegisterNewUserPage()
registerPage registerNewUserWith userCredentials
And("logs into provider")
val providerLoginPage = LoginPage.ProviderLoginPage()
providerLoginPage logInWith userCredentials
val authorizationPage = LoginPage.AuthorizeApplicationPage()
if (currentUrl startsWith authorizationPage.url) {
And("authorizes the application")
authorizationPage authorize;
// It may be necessary to authorize twice
if (currentUrl startsWith authorizationPage.url) {
And("authorizes the application a second time")
authorizationPage authorize
}
}
lt
}
def logOutOfRenku(implicit loginType: LoginType): Unit = {
When("user clicks the Log out link")
click on WelcomePage.TopBar.topRightDropDown
click on WelcomePage.TopBar.logoutLink
unless(loginType == LoginWithProvider) {
Then("they should get back into the Landing page")
verify browserAt LandingPage
verify userCanSee LandingPage.loginButton sleep (1 second)
}
}
}
sealed trait LoginType
object LoginType {
case object LoginWithProvider extends LoginType
case object LoginWithoutProvider extends LoginType
}
|
{
"pile_set_name": "Github"
}
|
/*
* File : fal.h
* This file is part of FAL (Flash Abstraction Layer) package
* COPYRIGHT (C) 2006 - 2018, RT-Thread Development Team
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Change Logs:
* Date Author Notes
* 2018-05-17 armink the first version
*/
#ifndef _FAL_H_
#define _FAL_H_
#include <rtconfig.h>
#include <fal_cfg.h>
#include "fal_def.h"
/**
* FAL (Flash Abstraction Layer) initialization.
* It will initialize all flash device and all flash partition.
*
* @return >= 0: partitions total number
*/
int fal_init(void);
/* =============== flash device operator API =============== */
/**
* find flash device by name
*
* @param name flash device name
*
* @return != NULL: flash device
* NULL: not found
*/
const struct fal_flash_dev *fal_flash_device_find(const char *name);
/* =============== partition operator API =============== */
/**
* find the partition by name
*
* @param name partition name
*
* @return != NULL: partition
* NULL: not found
*/
const struct fal_partition *fal_partition_find(const char *name);
/**
* get the partition table
*
* @param len return the partition table length
*
* @return partition table
*/
const struct fal_partition *fal_get_partition_table(size_t *len);
/**
* set partition table temporarily
* This setting will modify the partition table temporarily, the setting will be lost after restart.
*
* @param table partition table
* @param len partition table length
*/
void fal_set_partition_table_temp(struct fal_partition *table, size_t len);
/**
* read data from partition
*
* @param part partition
* @param addr relative address for partition
* @param buf read buffer
* @param size read size
*
* @return >= 0: successful read data size
* -1: error
*/
int fal_partition_read(const struct fal_partition *part, uint32_t addr, uint8_t *buf, size_t size);
/**
* write data to partition
*
* @param part partition
* @param addr relative address for partition
* @param buf write buffer
* @param size write size
*
* @return >= 0: successful write data size
* -1: error
*/
int fal_partition_write(const struct fal_partition *part, uint32_t addr, const uint8_t *buf, size_t size);
/**
* erase partition data
*
* @param part partition
* @param addr relative address for partition
* @param size erase size
*
* @return >= 0: successful erased data size
* -1: error
*/
int fal_partition_erase(const struct fal_partition *part, uint32_t addr, size_t size);
/**
* erase partition all data
*
* @param part partition
*
* @return >= 0: successful erased data size
* -1: error
*/
int fal_partition_erase_all(const struct fal_partition *part);
/**
* print the partition table
*/
void fal_show_part_table(void);
/* =============== API provided to RT-Thread =============== */
#if defined(RT_USING_DFS)
/**
* create RT-Thread block device by specified partition
*
* @param parition_name partition name
*
* @return != NULL: created block device
* NULL: created failed
*/
struct rt_device *fal_blk_device_create(const char *parition_name);
#endif /* defined(RT_USING_DFS) */
#endif /* _FAL_H_ */
|
{
"pile_set_name": "Github"
}
|
Matrix4i m = Matrix4i::Random();
cout << "Here is the matrix m:" << endl << m << endl;
cout << "Here is the block:" << endl << m.block<2, Dynamic>(1, 1, 2, 3) << endl;
m.block<2, Dynamic>(1, 1, 2, 3).setZero();
cout << "Now the matrix m is:" << endl << m << endl;
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<beast
namespace="beast.evolution.operators:beast.evolution.speciation:beast.evolution.alignment:beast.core:beast.math:beast.evolution:beast.evolution.tree:beast.evolution.tree.coalescent:beast.core.util:beast.evolution.nuc:beast.evolution.operators:beast.evolution.sitemodel:beast.evolution.substitutionmodel:beast.evolution.likelihood"
version="2.0">
<data dataType="nucleotide" id="26" name="taxa">
<sequence taxon="Orthogeomys_heterodus">
ATTCTAGGCAAAAAG-AGCAATGCTGGAGGTATTACAATACCAGACTTCAAACTTTACTATAGAGCCATAATAACAAAAACCGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAATGGAAYAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGTCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCACCATGCACAGGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCAGAAACCGTGAAACTGCTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGGGAAAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATAGGAGAAAGACTCAATAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGATAAAAACATAGT-CGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAATAATACATCTGACAAATGCTTGAT--ATCCAGT-ATCTACAGGGAACTCAAGAAATTAATC
</sequence>
<sequence taxon="Thomomys_bottae_albatus">
????????????????AGCAATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGATCCAGAAATAGAACCAC-ATACCTATAGTCAACTTGTATTTGATAAAGGAGCTAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCATTTCACCATGCACAAGAATTAACTCAAAGTGGATCAAAGACCTCAATGTCAGACCT-------------------------------------CATAGGCACTCATAGGCACAGGCAAAAGATTCCTATACAGGAACCCAGGGTCAACAACAGATTGGAGAGAGACTCAATAAATGGGACTACCTAAAAATAAAA-GTTTCTGCAC?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_alpinus">
????????????????AGCAATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCTATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCA??????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_awahnee_a">
????????????????????ATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACMCAGTCTATTCAATTATTGGKGCTGGCAAAACTGGGCAGCCMCATGCARAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAA?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_awahnee_b">
????????????????????ATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAACTGCTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAGATTCCTGAACAGAATCCCAGGGTCM-CAACAGATTGGAGAGAGACTCGATAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGATAAAAACATAGTT-GCAAAAGT????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_bottae">
????????????????AGCAATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAATGTCAGACCGGAAACCGTGAAACTGCTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAAATTCCTGAACAGAATCCCAGGGTCA-CAACAGATKGGAGAGAGACTCGATAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGATAAAAACATAGTT-GCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATA?????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_cactophilus">
????????????????AGCAATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAAYGTCAGACCGGAAACCGTGAAAYTGTTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAA????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_laticeps">
????????????????????ATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACARTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAATTGCTACATGATAAWGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCAATAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGATAAAAACATAGTT-GCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATAT????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_mewa">
????????????????????ATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCATTTCACCATGCACAAGAATTAACTCAAAGTGGATCAAAGACCTCAATGTCAGACCGGAAACCGTGAAA???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_riparius">
????????????????AGCAATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAACTGCTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGATAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGATAAAAACATAGT-TGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACA???????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_ruidosae">
????????????????AGCAATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCARAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAATGTCAGACCGGAAACCGTGAAAYTGCTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAG??????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_saxatilis">
????????????????????ATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACARTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAA?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_bottae_xerophilus">
????????????????????ATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAAYTGCTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAGATTCCTGAA?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_idahoensis_pygmaeus_a">
????????CAAAA-G-AGCAATGCTGGTGGTATYACAATACCAGACTTCAAACTTTACTATAAAGCCATAATAACAAAAACTGC-TTGGTACTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGATATAGAACCAC-ACAACTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATMCAATGGAACGAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGAAAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCRACGTCAGACCGGAAACCGTGAAACTGCTACATGATAATGTAGGAGAGACACTGGAACTCATAGGCACAGGAAACAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGACAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGACAAAAACATAGT-CGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATRTATCTGACAAATGCTTAAT--ATCCAGC-ATCTACAGAGAACTCAAGAAACTAATC
</sequence>
<sequence taxon="Thomomys_idahoensis_pygmaeus_b">
????GGCGCAAAA-G-AGCAATGCTGGTGGTATCACAATACCAGACTTCAAACTTTACTATAAAGCCATAATAACAAAAACTGC-TTGGTACTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGATATAGAACCAC-ACAACTATAGGCAACTTRTATTTGACAAAGGAGCCAAAGATATMCAATGGAACGAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGAAAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAACTGCTACATGATAATGTAGGAGAGACACTGGAACTCATAGGCACAGGAAACAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGACAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGACAAAAACATAGT-CGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATRTATCTGACAAATGCTTAAT--ATCCAGCTATCTACAGAGAACTCAAGAAACTAATC
</sequence>
<sequence taxon="Thomomys_mazama_mazama">
ATTCTAGGCAAAAAGAAGCAATGCTGGTGGTATCACAATACCAGACTTCAAACTTTACTATAAAGCCATAATAACAAAAACTGC-TTGGTACTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGATATAGAACCAC-ACAACTATAGGCAACTTRTATTTGACAAAGGAGCCAAAGATATACAATGGAACGAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGAAAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAACTGCTACATGATAATGTAGGAGAGACACTGGAAYTCATAGGCACAGGAAACAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGACAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGACAAAAACATAGT-CGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATGTATCTGACAAATGCTTAAT--ATCCAGC-ATCTACAGAGAACTCAAGAAACTAATC
</sequence>
<sequence taxon="Thomomys_mazama_nasicus">
ATTCTAGGCAAAAAG-AGCAATGCTGGTGGTATCACAATACCAGACTTCAAACTTTACTATAAAGCCATAATAACAAAAACTGC-TTGGTACTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGATATAGAACCAC-ACAACTATAGGCAACTTRTATTTGACAAAGGAGCCAAAGATATACAATGGAACGAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGAAAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCSCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAACTGCTACATGATAATGTAGGAGAGACACTGGAAYTCATAGGCACAGGAAACAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGACAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGACAAAAACATAGT-CGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATGTATCTGACAAATGCTTAAT--ATCCAGC-ATCTACAGAGAACTCAAGAAACTA-TC
</sequence>
<sequence taxon="Thomomys_monticola_a">
??????????????G-AGCAATGCTGGTGGTATCACAATACCAGACTTCAAACTTTACTATAAAGCCATAATAACAAAAACTGC-TTGGTACTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGATATAGAACCAC-ACAACTATAGGCAACTTRTATTTGACAAAGGAGCCAAAGATATACAATGGAACGAACACAGYCTATTCAATTATTGGTGCTGGCAAAACTGGAAAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAACTGCTACATGATAATGTAGGAGAGACACTGGAACTCATAGGCACAGGAAACAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGACAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGACAAAAACATAGT-CGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATRTATCTGACAAATGCTTAAT--ATCCAGC-ATCTACAGAGAACTCAAGAAACTAATC
</sequence>
<sequence taxon="Thomomys_monticola_b">
ATTCTAGGCAAAAAG-AGCAATGCTGGTGGTATYACAATACCAGACTTCAAACTTTACTATAAAGCCATAATAACAAAAACTGC-TTGGTACTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAWATAGAACCAC-ACAACTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAATGGAACRAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGAAAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAACTGCTACATGATAATGTAGGAGAGACACTGGAACTCATAGGCACAGGAAACAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGACAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGACAAAAACATAGT-CGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATATATCTGACAAATGCTTAAT--ATCCAGC-ATCTACAGAGAACTCAAGAAACTAATC
</sequence>
<sequence taxon="Thomomys_talpoides_bridgeri">
ATTCTAGGCAAAAAG-AGCAATGCTGGTGGTATCACAATACCAGACTTCAAACTTTACTATAAAGCCATAATAACAAAAACTGC-TTGGTACTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGATATAGAACCAC-ACAACTATAGGCAACTTRTATTTGACAAAGGAGCCAAAGATATCCAATGGAACGAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGAAAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCRACGTCAGACCGGAAACCGTGAAACTGCTACATGATAATGTAGGAGAGACACTGGAACTCATAGGCACAGGAAACAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGACAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGACAAAAACATAGT-CGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATGTATCTGACAAATGCTTAAT--ATCCAGC-ATCTACAGAGAACTCAAGAAACTAATC
</sequence>
<sequence taxon="Thomomys_talpoides_ocius">
???CTAGGCAAAA-G-AGC-ATGCTGGTGGTATCACAATACCAGACTTCAAACTTTACTATAAAGCCATAATAACAAAAACTGC-TTGGTACTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGATATAGAACCAC-ACAACTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATMCAATGGAACGAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGAAAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCRACGTCAGACCGGAAACCGTGAAACTGCTACATGATAATGTAGGAGAGACACTGGAACTCATAGGCACAGGAAACAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGACAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGACAAAAACATAGT-CKCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATGTATCTGACAAATGCTTAAT--ATCCAGC-ATCTACAGAGAACTCAAGAAACTAATC
</sequence>
<sequence taxon="Thomomys_talpoides_yakimensis">
ATTCTAGGCAAAA-G-AGCAATGCTGGTGGTATCACAATACCAGACTTCAAACTKTACTATAAAGCCATAATAACAAAAACTGC-TTGGTACTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGATATAGAACCAC-ACAACTATAGGCAACTTRTATTTGACAAAGGAGCCAAAGATATACAATGGAACGAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGAAAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAAYGTCAGACCGGAAACCGTGAAACTGCTACATGATAATGTAGGAGAGACACTGGAAYTCATAGGCACAGGAAACAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGACAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGACAAAAACATAGT-CGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATGTATCTGACAAATGCTTAAT--ATCCAGC-ATCTACAGAGAACTCAAGAAACTAATC
</sequence>
<sequence taxon="Thomomys_townsendii_relictus">
??????????????G-AGCAATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTAYTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCACTATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACAATGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAATTGCTACATGATAAWGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGATAAATGGGACTACCTAAAAATAAAATGTTTCTGTACAGATAAAAACATAG???????????????????????????????????????????????????????????????????????????????????????????????????????????????
</sequence>
<sequence taxon="Thomomys_townsendii_townsendii">
ATTCTAGGCAAAAAG-AGCAATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACARTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTYGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAACGTCAGACCGGAAACCGTGAAACTGCTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGATAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGATAAAAACATAGTCTGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCCACAATACATCTGACAAATGCTTAAT--ATCCAGT-ATCTACAGG-AACTCAAG?????????
</sequence>
<sequence taxon="Thomomys_umbrinus_atroavarius">
ATTCTAGGCAAAAAG-AGCAATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGCATTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGKCAACTTGTATTTGACAAAGGAGCCAAAGATATACAGTGGAACAAACACAGYCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCCTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAAYGTCAGACCGGAAACCGTGAAATTGCTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGATAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGATAAAAACATAGT-TGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCCACAATACATCTGACAAATGCTT-ATATATCCAGT-ATCTACAGAGAACTCAAG?????????
</sequence>
<sequence taxon="Thomomys_umbrinus_chihuahuae">
?????????????AG-AGC-ATGCTGGTGGTATTACAATACCAGACTTCAAACTCTACTATAGAGCCATAATAACAAAAACTGC-TTGGTATTGGCACAAAAACAGACCAGAAGATCAGTGGAACAGAATAGAAGACCCAGAAATAGAACCAC-ATACCTATAGGCAACTTGTATTTGACAAAGGAGCCAAAGATATACARTGGAACAAACACAGTCTATTCAATTATTGGTGCTGGCAAAACTGGGCAGCCACATGCAGAAAACTAAAAATTGACCCTAGCTTTTCGCCATGCACAAGCATTAACTCAAAGTGGATCAAAGACCTCAATGTCAGACCGGAAACCGTGAAACTGCTACATGATAAAGTAGGAGAGACACTGGAACTCATAGGCACAGGCAAAAGATTCCTGAACAGAATCCCAGGGTCA-CAACAGATTGGAGAGAGACTCGATAAATGGGACTACCTAAAAATAAAAAGTTTCTGTACAGATAAAAACATAGT-TGCAAAAGTAGAGAGACAGCCAACCAACTGGGAAAAGATATTCACCAACAATACATCTGACAAATGCTTAAT--ATCCAGT-ATCTACA????????????????????
</sequence>
</data>
<data dataType="nucleotide" id="29" name="taxa">
<sequence taxon="Orthogeomys_heterodus">
C-AGCTTGCTG-TCACACTGCCCCTTCTACAAGCACAGCTGACAGAGGCCTTAAGTACTGCCTGGATATAATGTCTATAGACAGAGGCGTTTCTGGAAAACTGAGATCTTGTTTTGACCAACAGAAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTACTGGTGATGTCTCTGTCAGGATCTAGTCCCATCCTG-AATCCATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGACTCTTCAGAACTGACTGAATCAGGCCAGTGAAAGTTA-CAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATTGTGCATGGCTTCATTAGCAGGTTTACTGTGCTGGG-TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCATCAGATTGTCAGGGTCTTGGATTCTGATAAGTTCTGTGTTCCTGATGAGAGACTGGTTATTCAGTAAGTTGTAGGACAAA
</sequence>
<sequence taxon="Thomomys_bottae_albatus">
C-AGCTTGCTG-TCACACCGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGTGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_alpinus">
C-AGCTTGCTG-TCACACCGCCCCTTCTACAAGCC-AGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGTTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCYACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_awahnee_a">
C-AGCTTGCTG-TCACACTGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGAGAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_awahnee_b">
C-AGCTTGCTG-TCACACTGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGAKAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_bottae">
C-AGCTTGCTG-TCACACYGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTAKGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCRTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_cactophilus">
C-AGCTTGCTG-TCACACTACCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCACTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCASTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_laticeps">
C-AGCTTGCTG-TCACACCGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_mewa">
C-AGCTTGCTG-TCACACCGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAATTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_riparius">
C-AGCTTGCTG-TCACACCGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_ruidosae">
?????????????????????????????C-AGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_saxatilis">
C-AGCTTGCTG-TCACACCGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_bottae_xerophilus">
C-AGCTTGCTG-TCACACTGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCACTAAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTRTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_idahoensis_pygmaeus_a">
C-AGCTTGCTG-TCACACTGTCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTGCCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTGCTGGTGACGTCTCTGTCAGGATCCAGTCCCATCCTG-AATTGATACTTAACAATTTTTTAAGTGTTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGTCTGGCTATGAAAGGAAGGACTCTTAAGAACTGACTGAATCAGTCCAGTGAAACTTAACAGCTACCTTGCCAATKGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGG--TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTGGACTCTGATAAGTTCGGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_idahoensis_pygmaeus_b">
CAAGCTTGCTG-TCACACTGTCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTGCCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTGCTGGTGACGTCTCTGTCAGGATCCAGTCCCATCCTG-AATTGATACTTAACAATTTTCTAAGTGTTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGACTCTTAAGAACTGACTGAATCAGTCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGG--TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTGGACTCTGATAAGTTCGGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_mazama_mazama">
C-AGCTTGCTG-TCACACTGTCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTATTGCCTGGATATAGTGTCTATAGACAGAGACAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTGCTGGTGACGTCTCTGTCAGGATCCAGTCACATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCACCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGACTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCTGGG-TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCTCTGGGTTGTTAG-CGTCAGATTGGCAGGGTCTTGGACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_mazama_nasicus">
C-AGCTTGCTG-TCACAATGTCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTATTGCCTGGATATAGTGTCTATAGACAGAGACAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTGCTGGTGACGTCTCTGTCAGGATCCAGTCACATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGACTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCTGGG-TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCTCTGGGTTGTTAG-CGTCAGATTGGCAGGGTCTTGGACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_monticola_a">
C-AGCTTGCTG-TCACACTGTCCCTTCTACAAGCCCAGCTGACAGACGCCKTAAGTACTGCCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTGCTGGTGACGTCTCTGTCAGGATCCAGTCCCATCCTG-AATTGATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGACTCTTAAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGG-TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTGGACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAGGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_monticola_b">
C-AGCTTGCTG-TCACACTGTCCCTTCTACAAGCCCAGCTGACAGACGCCTTAAGTACTGCCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTGCTGGTGACGTCTCTGTCAGGATCCAGTYCCATCCTG-AATTGATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGACTCTTAAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGG-TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTGGACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAGGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_talpoides_bridgeri">
C-AGCTTGCTG-TCACACTGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTGCCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGGATCCAGTCCCATCCTG-AATTGATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGACTCTTAAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGG-TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTGGACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_talpoides_ocius">
C-AGCTTGCTGGTCACACTGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTGCCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCATATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGGATCCAGTCCCATCCTG-AATTGATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGACTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAAWGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGG-TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCTCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTGGACTCTGATAAGTTCTGTGTTCCTGATGACAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_talpoides_yakimensis">
C-AGCTTGCTG-TCACACTGTCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTGCCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCTCCTCAGCTCCTGGATTGCTGGTGCTGGTGACGTCTCTGTCAGGATCCAGTCCCATCCTG-AATTGATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGACTCTTAAGAACTGAMTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGG-TCTTTCCAAGGATTGTGCTTCTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTACTGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTGGACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_townsendii_relictus">
C-AGCTTGCTGGTCACACCGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_townsendii_townsendii">
C-AGCTTGCTGGTCACACCGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_umbrinus_atroavarius">
C-AGCTTGCTG-TCACACTGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAGGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTG-AATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGATGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
<sequence taxon="Thomomys_umbrinus_chihuahuae">
C-AGCTTGCTGGTCACACTGCCCCTTCTACAAGCCCAGCTGACAGAGGCCTTAAGTACTACCTGGATATAATGTCTATAGACAGAGGCAATTCTGGAAAACTCAGATCTTATTTTGACCAACAAGAATTAATGTCTATGGTAGAATTCCCCTCAGCTCCTGGATTGCTGGTACTGGTGACGTCTCTGTCAGAATCCAGTCCCATCCTGGAATTTATACTTAACAATTTTCTAAGTATTTTGTGGTCTTGCTCTACCTCCAGGGAAGCTGGCCTGGCTATGAAAGGAAGGGCTCTTTAGAACTGACTGAATCAGGCCAGTGAAACTTAACAGCTACCTTGCCAATGGTCTGTGTGCCAGTGACATCGTGCATGGCTTCATTAGCAGATTTACTGTGCCGGGGTCTTTCCAAGGATTGTGCTTGTTGATTTGGTCCCTGGATAAATTTGCAAATGCTAATCTAGTTGTTAATGGAGAGCATCCAAACCTAAGACAGGACTATTCCTCCCTGGGTTGTTAGGCGTCAGATTGGCAGGGTCTTG-ACTCTGATAAGTTCTGTGTTCCTGAGGAGAGGCTGGTTATTCAGTAAGTTGTAGGGCAAA
</sequence>
</data>
<taxonset spec='TaxonSet' id='lineagetaxa'>
<taxon idref='Orthogeomys_heterodus'/>
<taxon idref='Thomomys_bottae_albatus'/>
<taxon idref='Thomomys_bottae_alpinus'/>
<taxon idref='Thomomys_bottae_awahnee_a'/>
<taxon idref='Thomomys_bottae_awahnee_b'/>
<taxon idref='Thomomys_bottae_bottae'/>
<taxon idref='Thomomys_bottae_cactophilus'/>
<taxon idref='Thomomys_bottae_laticeps'/>
<taxon idref='Thomomys_bottae_mewa'/>
<taxon idref='Thomomys_bottae_riparius'/>
<taxon idref='Thomomys_bottae_ruidosae'/>
<taxon idref='Thomomys_bottae_saxatilis'/>
<taxon idref='Thomomys_bottae_xerophilus'/>
<taxon idref='Thomomys_idahoensis_pygmaeus_a'/>
<taxon idref='Thomomys_idahoensis_pygmaeus_b'/>
<taxon idref='Thomomys_mazama_mazama'/>
<taxon idref='Thomomys_mazama_nasicus'/>
<taxon idref='Thomomys_monticola_a'/>
<taxon idref='Thomomys_monticola_b'/>
<taxon idref='Thomomys_talpoides_bridgeri'/>
<taxon idref='Thomomys_talpoides_ocius'/>
<taxon idref='Thomomys_talpoides_yakimensis'/>
<taxon idref='Thomomys_townsendii_relictus'/>
<taxon idref='Thomomys_townsendii_townsendii'/>
<taxon idref='Thomomys_umbrinus_atroavarius'/>
<taxon idref='Thomomys_umbrinus_chihuahuae'/>
</taxonset>
<taxonset spec='TaxonSet' id='taxonsuperset'>
<taxon spec='TaxonSet' id='Orthogeomys_heterodus_species'>
<taxon spec='Taxon' id='Orthogeomys_heterodus'/>
</taxon>
<taxon spec='TaxonSet' id='Thomomys_bottae'>
<taxon spec='Taxon' id='Thomomys_bottae_awahnee_a'/>
<taxon spec='Taxon' id='Thomomys_bottae_awahnee_b'/>
<taxon spec='Taxon' id='Thomomys_bottae_xerophilus'/>
<taxon spec='Taxon' id='Thomomys_bottae_cactophilus'/>
<taxon spec='Taxon' id='Thomomys_bottae_albatus'/>
<taxon spec='Taxon' id='Thomomys_bottae_ruidosae'/>
<taxon spec='Taxon' id='Thomomys_bottae_bottae'/>
<taxon spec='Taxon' id='Thomomys_bottae_alpinus'/>
<taxon spec='Taxon' id='Thomomys_bottae_riparius'/>
<taxon spec='Taxon' id='Thomomys_bottae_mewa'/>
<taxon spec='Taxon' id='Thomomys_bottae_saxatilis'/>
<taxon spec='Taxon' id='Thomomys_bottae_laticeps'/>
</taxon>
<taxon spec='TaxonSet' id='Thomomys_idahoensis'>
<taxon spec='Taxon' id='Thomomys_idahoensis_pygmaeus_a'/>
<taxon spec='Taxon' id='Thomomys_idahoensis_pygmaeus_b'/>
</taxon>
<taxon spec='TaxonSet' id='Thomomys_mazama'>
<taxon spec='Taxon' id='Thomomys_mazama_mazama'/>
<taxon spec='Taxon' id='Thomomys_mazama_nasicus'/>
</taxon>
<taxon spec='TaxonSet' id='Thomomys_monticola'>
<taxon spec='Taxon' id='Thomomys_monticola_a'/>
<taxon spec='Taxon' id='Thomomys_monticola_b'/>
</taxon>
<taxon spec='TaxonSet' id='Thomomys_talpoides'>
<taxon spec='Taxon' id='Thomomys_talpoides_yakimensis'/>
<taxon spec='Taxon' id='Thomomys_talpoides_bridgeri'/>
<taxon spec='Taxon' id='Thomomys_talpoides_ocius'/>
</taxon>
<taxon spec='TaxonSet' id='Thomomys_townsendii'>
<taxon spec='Taxon' id='Thomomys_townsendii_townsendii'/>
<taxon spec='Taxon' id='Thomomys_townsendii_relictus'/>
</taxon>
<taxon spec='TaxonSet' id='Thomomys_umbrinus'>
<taxon spec='Taxon' id='Thomomys_umbrinus_chihuahuae'/>
<taxon spec='Taxon' id='Thomomys_umbrinus_atroavarius'/>
</taxon>
</taxonset>
<distribution id="posterior" spec="util.CompoundDistribution">
<distribution id="prior" spec="util.CompoundDistribution">
<distribution id="speciesCoalescent" spec="util.CompoundDistribution">
<!-- gene tree priors -->
<distribution id="genetree.priors" spec="util.CompoundDistribution">
<distribution id='tree.prior.29' spec='GeneTreeForSpeciesTreeDistribution' tree='@tree29'
speciesTree='@speciesTree'
speciesTreePrior='@SpeciesTreePopSizePrior'>
<treetop spec='TreeTopFinder' id='treeTopFinder'>
<tree idref='tree29'/>
<tree idref='tree26'/>
</treetop>
</distribution>
<distribution id='tree.prior.26' spec='GeneTreeForSpeciesTreeDistribution' tree='@tree26'
speciesTree='@speciesTree'
speciesTreePrior='@SpeciesTreePopSizePrior' treetop='@treeTopFinder'/>
</distribution>
<!-- species tree priors -->
<distribution id='SpeciesTreePopSizePrior' spec='SpeciesTreePrior' tree='@speciesTree'
popFunction='linear'
taxonset='@taxonsuperset'>
<bottomPopSize spec='parameter.RealParameter' id='popSize' value='1'/>
<topPopSize spec='parameter.RealParameter' id='popSizeTop' value='1'/>
<parameter name='gammaParameter' id='popMean' value='0.004'/>
</distribution>
</distribution>
<distribution id="SpeciesTreeDivergenceTimesPrior" spec="speciation.YuleModel" tree='@speciesTree'>
<birthDiffRate spec='parameter.RealParameter' id="birthRate" value='213.2021' lower='0' upper='10000'/>
</distribution>
<!-- hyper priors for assorted parameters -->
<distribution id='birthRatePrior' spec='distributions.Prior' x='@birthRate'>
<distr id='OneOnX' spec='distributions.OneOnX'/>
</distribution>
<distribution id="popMean.prior26" spec="beast.math.distributions.Prior" x="@popMean" distr='@OneOnX'/>
<distribution id="kappa.prior26" spec="beast.math.distributions.Prior" x="@hky.kappa26">
<distr id='logNormal' spec='beast.math.distributions.LogNormalDistributionModel' M="1.0" S="1.25"
offset="0.0"
meanInRealSpace="false"/>
</distribution>
<distribution id="kappa.prior29" spec="beast.math.distributions.Prior" x="@hky.kappa29" distr='@logNormal'/>
</distribution>
<distribution id="likelihood" spec="util.CompoundDistribution">
<distribution data="@26" id="treelikelihood.26" spec="TreeLikelihood" tree="@tree26" useAmbiguities="false">
<siteModel gammaCategoryCount="1" id="siteModel26" mutationRate="@mutationRate"
proportionInvariant="@proportionInvariant"
shape="@gammaShape" spec="SiteModel">
<substModel id="hky26" kappa="@hky.kappa26" spec="HKY">
<frequencies id="freqs26" spec="Frequencies" estimate='true' data='@26'/>
</substModel>
</siteModel>
<branchRateModel clock.rate="@clockRate" id="StrictClockModel26"
spec="beast.evolution.branchratemodel.StrictClockModel"/>
</distribution>
<distribution data="@29" id="treelikelihood.29" spec="TreeLikelihood" tree="@tree29" useAmbiguities="false">
<siteModel gammaCategoryCount="1" id="siteModel29" mutationRate="@mutationRate"
proportionInvariant="@proportionInvariant"
shape="@gammaShape" spec="SiteModel">
<substModel id="hky29" kappa="@hky.kappa29" spec="HKY">
<frequencies id="freqs29" spec="Frequencies" estimate='true' data='@29'/>
</substModel>
</siteModel>
<branchRateModel clock.rate="@clockRate" id="StrictClockModel29"
spec="beast.evolution.branchratemodel.StrictClockModel"/>
</distribution>
</distribution>
</distribution>
<tree spec='RandomTree' id='randomSpeciesTree' initial='@speciesTree' taxonset='@taxonsuperset'>
<populationModel spec='ConstantPopulation'>
<popSize spec='parameter.RealParameter' value='1'/>
</populationModel>
</tree>
<tree spec='beast.util.TreeParser' id='newickSpeciesTree' IsLabelledNewick='true'
newick='(Orthogeomys_heterodus_species:0.02065265792151943,(((Thomomys_talpoides:0.0013609706233106656,Thomomys_mazama:0.0013609706233106656):6.032405317365711E-4,(Thomomys_idahoensis:0.0019116640088644983,Thomomys_monticola:0.0019116640088644983):5.254714618273845E-5):0.01135560317788915,(Thomomys_umbrinus:0.001060205025110908,(Thomomys_townsendii:2.1835631905959876E-4,Thomomys_bottae:2.1835631905959876E-4):8.418487060513092E-4):0.01225960930782548):0.007332843588583044);'
initial='@speciesTree' threshold='1e-12' taxonset='@taxonsuperset'/>
<tree spec='beast.util.TreeParser' id='newickGeneTree1' IsLabelledNewick='true'
newick='((((Thomomys_idahoensis_pygmaeus_a:4.790629004925939E-4,Thomomys_idahoensis_pygmaeus_b:4.790629004925939E-4):0.002807766516703272,(Thomomys_monticola_a:3.1053813362974055E-5,Thomomys_monticola_b:3.1053813362974055E-5):0.0032557756038328915):1.3936218139959985E-4,((Thomomys_talpoides_yakimensis:0.001392151582227928,(Thomomys_talpoides_bridgeri:4.271344427331326E-4,Thomomys_talpoides_ocius:4.271344427331326E-4):9.650171394947955E-4):2.3416363608392133E-4,(Thomomys_mazama_nasicus:5.938379928988402E-6,Thomomys_mazama_mazama:5.938379928988402E-6):0.001620376838382861):0.0017998763802836162):0.01709773249173348,(Orthogeomys_heterodus:0.009652282924590812,(((Thomomys_townsendii_townsendii:0.0026355773641040447,(Thomomys_townsendii_relictus:0.001886298607022084,(Thomomys_bottae_laticeps:0.0011492988922358863,Thomomys_umbrinus_atroavarius:0.0011492988922358863):7.369997147861978E-4):7.492787570819606E-4):0.002003647116192913,((Thomomys_bottae_awahnee_a:7.080639947397304E-4,((Thomomys_bottae_saxatilis:4.7332771620207383E-4,Thomomys_bottae_xerophilus:4.7332771620207383E-4):2.7590796315257195E-5,(Thomomys_bottae_riparius:2.2956067039993388E-4,Thomomys_bottae_awahnee_b:2.2956067039993388E-4):2.7135784211739715E-4):2.0714548222239932E-4):7.737704382886797E-4,(Thomomys_bottae_cactophilus:0.0014394832564564783,(Thomomys_bottae_alpinus:0.0013868072888249166,(Thomomys_umbrinus_chihuahuae:9.025787629100629E-4,(Thomomys_bottae_bottae:7.08841912043247E-4,Thomomys_bottae_ruidosae:7.08841912043247E-4):1.9373685086681593E-4):4.842285259148536E-4):5.267596763156176E-5):4.2351176571931706E-5):0.0031573900472685475):0.0015015284932928617,(Thomomys_bottae_mewa:8.285742263631879E-4,Thomomys_bottae_albatus:8.285742263631879E-4):0.005312178747226631):0.0035115299510009926):0.010871641165738136)'
initial='@tree26' taxonset='@lineagetaxa' threshold='1e-12'/>
<tree spec='beast.util.TreeParser' id='newickGeneTree2' IsLabelledNewick='true'
newick='(Orthogeomys_heterodus:0.02555754747205849,((Thomomys_mazama_nasicus:0.001146519512253189,Thomomys_mazama_mazama:0.001146519512253189):0.01169723135080885,((Thomomys_talpoides_ocius:0.00703974619989265,(((Thomomys_talpoides_yakimensis:0.0014682135377831343,(Thomomys_monticola_a:3.1427207759506006E-5,Thomomys_monticola_b:3.1427207759506006E-5):0.0014367863300236282):3.5111237723683074E-4,(Thomomys_idahoensis_pygmaeus_a:2.52563963076903E-4,Thomomys_idahoensis_pygmaeus_b:2.52563963076903E-4):0.001566761951943062):0.0033770883069903614,Thomomys_talpoides_bridgeri:0.005196414222010327):0.001843331977882323):0.002676969649368072,((((Thomomys_umbrinus_chihuahuae:0.0017048445360954947,Thomomys_umbrinus_atroavarius:0.0017048445360954947):0.00109910693708295,(Thomomys_bottae_awahnee_a:0.0017104792362342403,Thomomys_bottae_awahnee_b:0.0017104792362342403):0.0010934722369442043):2.397726403374303E-4,(Thomomys_bottae_xerophilus:0.0028568232959683758,Thomomys_bottae_cactophilus:0.0028568232959683758):1.8690081754749914E-4):3.8269468803520503E-4,(Thomomys_bottae_mewa:0.0023122370757911325,((Thomomys_bottae_ruidosae:6.038316687617217E-4,Thomomys_bottae_bottae:6.038316687617217E-4):0.001291158181918091,(((Thomomys_bottae_albatus:0.0010882593426384808,Thomomys_bottae_saxatilis:0.0010882593426384808):7.144016389479342E-4,(Thomomys_bottae_riparius:0.0016565761010643102,Thomomys_bottae_laticeps:0.0016565761010643102):1.4608488052210477E-4):1.0681582125047617E-5,((Thomomys_townsendii_relictus:2.4739716804096176E-5,Thomomys_townsendii_townsendii:2.4739716804096176E-5):0.0017656857791623062,Thomomys_bottae_alpinus:0.0017904254959664023):2.2917067745060332E-5):8.164728696835014E-5):4.172472251113197E-4):0.0011141817257599475):0.006290297047709642):0.0031270350138013162):0.012713796608996452)'
initial='@tree29' taxonset='@lineagetaxa' threshold='1e-12'/>
<tree spec='RandomGeneTree' id='randomGeneTree1' initial='@tree26' speciesTree='@speciesTree' taxa='@26'>
<populationModel spec='ConstantPopulation'>
<popSize spec='parameter.RealParameter' value='1'/>
</populationModel>
</tree>
<tree spec='RandomGeneTree' id='randomGeneTree2' initial='@tree29' speciesTree='@speciesTree' taxa='@29'>
<populationModel spec='ConstantPopulation'>
<popSize spec='parameter.RealParameter' value='1'/>
</populationModel>
</tree>
<run chainLength="10000000" id="mcmc" preBurnin="0" spec="MCMC">
<!--init idref='randomSpeciesTree'/ -->
<init idref='newickSpeciesTree'/>
<!--init idref='newickGeneTree1'/> <init idref='newickGeneTree2'/ -->
<init idref='randomGeneTree1'/>
<init idref='randomGeneTree2'/>
<distribution idref="posterior"/>
<state id="State0" storeEvery="100000">
<!--parameter dimension="1" estimate="false" id="popSize" name="stateNode" value="1.0"/ -->
<parameter dimension="1" estimate="false" id="mutationRate" name="stateNode" value="1.0"/>
<parameter dimension="1" estimate="false" id="clockRate" name="stateNode" value="1.0"/>
<parameter dimension="1" estimate="true" id="hky.kappa26" lower="0.0" name="stateNode" value="1.0"/>
<parameter dimension="1" estimate="true" id="hky.kappa29" lower="0.0" name="stateNode" value="1.0"/>
<stateNode idref='popSize'/>
<stateNode idref='popSizeTop'/>
<stateNode idref='popMean'/>
<tree estimate="true" id="tree26" name="stateNode"/>
<tree estimate="true" id="tree29" name="stateNode"/>
<tree estimate="true" id="speciesTree" name="stateNode"/>
<stateNode idref="birthRate"/>
<!--stateNode idref="relativeDeathRate"/ -->
<parameter dimension="1" estimate="false" id="gammaShape" name="stateNode" value="1.0"/>
<parameter dimension="1" estimate="false" id="proportionInvariant" lower="0.0" name="stateNode" upper="1.0"
value="0.0"/>
</state>
<operator degreesOfFreedom="1" id="popSizeScaler" scaleAll="false" scaleAllIndependently="false"
scaleFactor="0.5"
spec="ScaleOperator" parameter="@popSize" weight="5.0"/>
<operator degreesOfFreedom="1" id="popSizeTopScaler" scaleAll="false" scaleAllIndependently="false"
scaleFactor="0.5" spec="ScaleOperator" parameter="@popSizeTop" weight="5.0"/>
<operator degreesOfFreedom="1" id="birthRateScaler" scaleFactor="0.5" spec="ScaleOperator"
parameter="@birthRate"
weight="3"/>
<!--operator degreesOfFreedom="1" id="relDeathRateScaler" scaleFactor="0.5" spec="ScaleOperator" parameter="@relativeDeathRate"
weight="1.0"/ -->
<operator degreesOfFreedom="1" id="psiScaler" scaleAll="false" scaleAllIndependently="false" scaleFactor="0.5"
spec="ScaleOperator" parameter="@popMean" weight="94"/>
<operator degreesOfFreedom="1" id="kappa26Scaler" scaleAll="false" scaleAllIndependently="false"
scaleFactor="0.35"
spec="ScaleOperator" parameter="@hky.kappa26" weight="0.1"/>
<operator degreesOfFreedom="1" id="kappa29Scaler" scaleAll="false" scaleAllIndependently="false"
scaleFactor="0.35"
spec="ScaleOperator" parameter="@hky.kappa29" weight="0.1"/>
<operator gaussian="true" id="SubtreeSlide26" size="0.003" spec="SubtreeSlide" tree="@tree26" weight="15.0"/>
<operator degreesOfFreedom="1" id="treeScaler26" scaleAll="false" scaleAllIndependently="false"
scaleFactor="0.85"
spec="ScaleOperator" tree="@tree26" weight="6.0"/>
<operator id="wide26" isNarrow="false" spec="Exchange" tree="@tree26" weight="1.0"/>
<operator id="narrow26" isNarrow="true" spec="Exchange" tree="@tree26" weight="15"/>
<operator id="WilsonBalding26" spec="WilsonBalding" tree="@tree26" weight="3"/>
<!-- -->
<operator id="Uniform26" spec="Uniform" tree="@tree26" weight="30.0"/>
<operator gaussian="true" id="SubtreeSlide29" size="0.002" spec="SubtreeSlide" tree="@tree29" weight="15.0"/>
<operator degreesOfFreedom="1" id="treeScaler29" scaleAll="false" scaleAllIndependently="false"
scaleFactor="0.85"
spec="ScaleOperator" tree="@tree29" weight="6.0"/>
<operator id="wide29" isNarrow="false" spec="Exchange" tree="@tree29" weight="1.0"/>
<operator id="narrow29" isNarrow="true" spec="Exchange" tree="@tree29" weight="15"/>
<operator id="WilsonBalding29" spec="WilsonBalding" tree="@tree29" weight="3"/>
<!-- -->
<operator id="Uniform29" spec="Uniform" tree="@tree29" weight="30.0"/>
<operator spec='UpDownOperator' scaleFactor="0.75" weight="30">
<up idref="birthRate"/>
<down idref="popSize"/>
<down idref="popSizeTop"/>
<down idref="popMean"/>
<down idref="speciesTree"/>
<down idref="tree26"/>
<down idref="tree29"/>
</operator>
<!-- <operator id="wideSP" isNarrow="false" spec="Exchange" tree="@speciesTree" weight="1.0"/> <operator gaussian="true"
id="SubtreeSlideSP" size="0.003" spec="SubtreeSlide" tree="@speciesTree" weight="15.0"/> <operator degreesOfFreedom="1" id="treeScalerSP"
scaleAll="false" scaleAllIndependently="false" scaleFactor="0.85" spec="ScaleOperator" tree="@speciesTree" weight="6.0"/>
<operator id="UniformSP" spec="Uniform" tree="@speciesTree" weight="30.0"/> -->
<operator id="Reheight" spec="NodeReheight" tree="@speciesTree" weight="94" taxonset='@taxonsuperset'>
<genetree idref='tree26'/>
<genetree idref='tree29'/>
</operator>
<logger fileName="test.$(seed).log" id="Logger0" logEvery="1000" mode="autodetect" model="@posterior">
<log idref="posterior"/>
<log idref="prior"/>
<log idref="speciesCoalescent"/>
<log idref="genetree.priors"/>
<log idref="SpeciesTreePopSizePrior"/>
<log idref="tree.prior.26"/>
<log idref="tree.prior.29"/>
<log idref="SpeciesTreeDivergenceTimesPrior"/>
<log idref="likelihood"/>
<log idref="popMean"/>
<log idref="birthRate"/>
<!--log idref="relativeDeathRate"/ -->
<log idref="hky.kappa26"/>
<log idref="hky.kappa29"/>
<log idref="popSize"/>
<log idref="popSizeTop"/>
<log id="TreeHeightSP" spec="beast.evolution.tree.TreeHeightLogger" tree="@speciesTree"/>
<log id="TreeHeight26" spec="beast.evolution.tree.TreeHeightLogger" tree="@tree26"/>
<log id="TreeHeight29" spec="beast.evolution.tree.TreeHeightLogger" tree="@tree29"/>
<log idref="treelikelihood.26"/>
<log idref="treelikelihood.29"/>
</logger>
<logger fileName="test.$(seed).sp.trees" id="Logger4" logEvery="1000" mode="tree">
<log spec='SpeciesTreeLogger' popSize='@popSize' popSizeTop="@popSizeTop" tree="@speciesTree"
speciesTreePrior='@SpeciesTreePopSizePrior' treetop="@treeTopFinder">
</log>
</logger>
<logger fileName="test.$(seed).26.trees" id="Logger2" logEvery="1000" mode="autodetect">
<log idref="tree26"/>
</logger>
<logger fileName="test.$(seed).29.trees" id="Logger3" logEvery="1000" mode="autodetect">
<log idref="tree29"/>
</logger>
<logger id="Logger1" logEvery="100000" mode="autodetect">
<log idref="posterior"/>
<log spec='ESS' arg='@posterior'/>
<log idref="prior"/>
<log spec='ESS' arg='@prior'/>
</logger>
</run>
</beast>
|
{
"pile_set_name": "Github"
}
|
/**
* Link plugin spec
*/
import { createElement, detach } from '@syncfusion/ej2-base';
import { MarkdownParser } from '../../../src/markdown-parser/index';
describe('Markdown - Link and Image plugin', () => {
let innerValue: string =
`# Lists are a piece of cake
They even auto continue as you type
A double enter will end them
Tabs and shift-tabs work too`;
let editorObj: MarkdownParser;
let textArea: HTMLTextAreaElement = <HTMLTextAreaElement>createElement('textarea', {
id: 'markdown-editor',
styles: 'width:200px;height:200px'
});
beforeAll(() => {
document.body.appendChild(textArea);
editorObj = new MarkdownParser({ element: textArea });
textArea.value = innerValue;
textArea.focus();
});
it(' Check the selection link insert ', () => {
editorObj.markdownSelection.save(2, 7);
editorObj.markdownSelection.restore(textArea);
let isCallBack: boolean = false;
let item: any = { text: 'Lists', url: 'http://' };
editorObj.execCommand("Links", 'Link', null, () => {
isCallBack = true;
}, null, item);
expect(isCallBack).toBe(true);
expect(textArea.value.substr(2, 16)).toBe('[Lists](http://)');
expect(editorObj.markdownSelection.getSelectedText(textArea)).toBe('Lists');
});
it(' Check the selection Image insert ', () => {
editorObj.markdownSelection.save(19, 22);
editorObj.markdownSelection.restore(textArea);
let item: any = { url: 'http://' };
editorObj.execCommand("Images", 'Image', null, null, null, item);
expect(textArea.value.substr(19, 15)).toBe('');
expect(editorObj.markdownSelection.getSelectedText(textArea)).toBe('are');
expect(textArea.selectionStart).toBe(21);
});
it(' Check the cursor link insert ', () => {
textArea.value = innerValue;
textArea.focus();
editorObj.markdownSelection.save(2, 2);
editorObj.markdownSelection.restore(textArea);
let item: any = { url: 'http://', text: '' };
editorObj.execCommand("Links", 'Link', null, null, null, item);
expect(textArea.value.substr(2, 11)).toBe('[](http://)');
expect(editorObj.markdownSelection.getSelectedText(textArea)).toBe('');
expect(textArea.selectionStart).toBe(3);
});
it(' Check the cursor image insert ', () => {
textArea.value = innerValue;
textArea.focus();
editorObj.markdownSelection.save(2, 2);
editorObj.markdownSelection.restore(textArea);
let item: any = { url: 'http://' };
editorObj.execCommand("Images", 'Image', null, null, null, item);
expect(textArea.value.substr(2, 12)).toBe('');
expect(editorObj.markdownSelection.getSelectedText(textArea)).toBe('');
expect(textArea.selectionStart).toBe(4);
});
it(' Check the multiple line link insert ', () => {
textArea.value = innerValue;
textArea.focus();
editorObj.markdownSelection.save(2, 40);
editorObj.markdownSelection.restore(textArea);
let item: any = { text: 'Lists are a piece of cake\n They', url: 'http://' };
editorObj.execCommand("Links", 'Link', null, null, null, item);
expect(textArea.value.substr(2, 49)).toBe('[Lists are a piece of cake\n They](http://)');
expect(editorObj.markdownSelection.getSelectedText(textArea)).toBe('Lists are a piece of cake\n They');
expect(textArea.selectionStart).toBe(3);
expect(textArea.selectionEnd).toBe(41);
});
it(' Check the multiple line image insert ', () => {
textArea.value = innerValue;
textArea.focus();
editorObj.markdownSelection.save(2, 40);
editorObj.markdownSelection.restore(textArea);
let item: any = { url: 'http://' };
editorObj.execCommand("Images", 'Image', null, null, null, item);
expect(textArea.value.substr(2, 50)).toBe('');
expect(editorObj.markdownSelection.getSelectedText(textArea)).toBe('Lists are a piece of cake\n They');
expect(textArea.selectionStart).toBe(4);
});
afterAll(() => {
textArea.value = '';
detach(textArea);
});
});
|
{
"pile_set_name": "Github"
}
|
require "rendertext"
require "keys"
require "graphics"
require "font"
require "commands"
SelectMenu = {
fsize = 22, -- font for displaying item names
tfsize = 25, -- font for page title
ffsize = 16,-- font for paging display
title_H = 40, -- title height
spacing = 36, -- spacing between lines
foot_H = 27, -- foot height
margin_H = 10, -- horisontal margin
current_entry = 0,
menu_title = "No Title",
no_item_msg = "No items found.",
item_array = {},
items = 0,
item_shortcuts = {
"Q", "W", "E", "R", "T", "Y", "U", "I", "O", "P",
"A", "S", "D", "F", "G", "H", "J", "K", "L", "Sym",
"Z", "X", "C", "V", "B", "N", "M", ".", "/", "Ent",
},
last_shortcut = 0,
-- state buffer
page = 1,
current = 1,
oldcurrent = 0,
selected_item = nil,
commands = nil,
expandable = false, -- if true handle Right/Left FW selector keys
deletable = false, -- if true handle Del key as a request to delete item
-- note that currently expandable and deletable are mutually exclusive
-- NuPogodi, 30.08.12: define font to render menu items
own_glyph = 0, -- render menu items with default "cfont"
-- own_glyph = 1 => own glyphs for items like "Droid/DroidSans.ttf"
-- own_glyph = 2 => own glyphs for Font.fontmap._index like "ffont", "tfont", etc.
}
function SelectMenu:new(o)
o = o or {}
setmetatable(o, self)
self.__index = self
o.items = #o.item_array
o.page = 1
o.current = 1
o.oldcurrent = 0
o.selected_item = nil
-- increase spacing for DXG so we don't have more than 30 shortcuts
if fb.bb:getHeight() == 1200 then
o.spacing = 37
end
o:addAllCommands()
return o
end
function SelectMenu:getItemIndexByShortCut(c, perpage)
if c == nil then return end -- unused key
for _k,_v in ipairs(self.item_shortcuts) do
if _v == c and _k <= self.last_shortcut then
return (perpage * (self.page - 1) + _k)
end
end
end
function SelectMenu:addAllCommands()
self.commands = Commands:new{}
local numeric_keydefs, i = {}
for i=1, 10 do numeric_keydefs[i]=Keydef:new(KEY_1+i-1, nil, tostring(i%10)) end
self.commands:addGroup("[1, 2 .. 9, 0]", numeric_keydefs,
"item at position 0%, 10% .. 90%, 100%",
function(sm)
local target_item = math.ceil(sm.items * (keydef.keycode-KEY_1) / 9)
sm.current, sm.page, sm.markerdirty, sm.pagedirty =
gotoTargetItem(target_item, sm.items, sm.current, sm.page, sm.perpage)
end
)
self.commands:add(KEY_FW_UP, nil, "joypad up",
"previous item",
function(sm)
if sm.current == 1 then
if sm.page > 1 then
sm.current = sm.perpage
sm.page = sm.page - 1
sm.pagedirty = true
end
else
sm.current = sm.current - 1
sm.markerdirty = true
end
end
)
self.commands:add(KEY_FW_DOWN, nil, "joypad down",
"next item",
function(sm)
if sm.current == sm.perpage then
if sm.page < (sm.items / sm.perpage) then
sm.current = 1
sm.page = sm.page + 1
sm.pagedirty = true
end
else
if sm.page ~= math.floor(sm.items / sm.perpage) + 1
or sm.current + (sm.page - 1) * sm.perpage < sm.items then
sm.current = sm.current + 1
sm.markerdirty = true
end
end
end
)
self.commands:add({KEY_PGFWD, KEY_LPGFWD}, nil, ">",
"next page",
function(sm)
if sm.page < (sm.items / sm.perpage) then
if sm.current + sm.page * sm.perpage > sm.items then
sm.current = sm.items - sm.page * sm.perpage
end
sm.page = sm.page + 1
sm.pagedirty = true
else
sm.current = sm.items - (sm.page - 1) * sm.perpage
sm.markerdirty = true
end
end
)
self.commands:add({KEY_PGBCK, KEY_LPGBCK}, nil, "<",
"previous page",
function(sm)
if sm.page > 1 then
sm.page = sm.page - 1
sm.pagedirty = true
else
sm.current = 1
sm.markerdirty = true
end
end
)
self.commands:add(KEY_FW_PRESS, nil, "joypad center",
"select item",
function(sm)
if sm.items == 0 then
return "break"
else
self.selected_item = (sm.perpage * (sm.page - 1) + sm.current)
end
end
)
if self.deletable then
self.commands:add(KEY_DEL, nil, "Del",
"delete item",
function(sm)
self.selected_item = (sm.perpage * (sm.page - 1) + sm.current)
return "delete"
end
)
end
if self.expandable then
self.commands:add(KEY_FW_RIGHT, nil, "joypad right",
"expand item",
function(sm)
self.selected_item = (sm.perpage * (sm.page - 1) + sm.current)
return "expand"
end
)
self.commands:add(KEY_FW_LEFT, nil, "joypad left",
"collapse item",
function(sm)
self.selected_item = (sm.perpage * (sm.page - 1) + sm.current)
return "collapse"
end
)
self.commands:add(KEY_FW_RIGHT, MOD_SHIFT, "joypad right",
"expand all subitems",
function(sm)
self.selected_item = (sm.perpage * (sm.page - 1) + sm.current)
return "expand all"
end
)
end
local KEY_Q_to_P = {}
for i = KEY_Q, KEY_P do
table.insert(KEY_Q_to_P, Keydef:new(i, nil, ""))
end
self.commands:addGroup("Q to P", KEY_Q_to_P,
"select item with Q to P key as shortcut",
function(sm, keydef)
sm.selected_item = sm:getItemIndexByShortCut(
sm.item_shortcuts[ keydef.keycode - KEY_Q + 1 ], sm.perpage)
end
)
local KEY_A_to_L = {}
for i = KEY_A, KEY_L do
table.insert(KEY_A_to_L, Keydef:new(i, nil, ""))
end
self.commands:addGroup("A to L", KEY_A_to_L,
"select item with A to L key as shortcut",
function(sm, keydef)
sm.selected_item = sm:getItemIndexByShortCut(
sm.item_shortcuts[ keydef.keycode - KEY_A + 11 ], sm.perpage)
end
)
local KEY_Z_to_M = {}
for i = KEY_Z, KEY_M do
table.insert(KEY_Z_to_M, Keydef:new(i, nil, ""))
end
self.commands:addGroup("Z to M", KEY_Z_to_M,
"select item with Z to M key as shortcut",
function(sm, keydef)
sm.selected_item = sm:getItemIndexByShortCut(
sm.item_shortcuts[ keydef.keycode - KEY_Z + 21 ], sm.perpage)
end
)
self.commands:add(KEY_SLASH, nil, "/",
"select item with / key as shortcut",
function(sm)
sm.selected_item = sm:getItemIndexByShortCut("/", sm.perpage)
end
)
self.commands:add(KEY_DOT, nil, ".",
"select item with dot key as shortcut",
function(sm)
sm.selected_item = sm:getItemIndexByShortCut(".", sm.perpage)
end
)
self.commands:add(KEY_SYM, nil, "Sym",
"select item with Sym key as shortcut",
function(sm)
sm.selected_item = sm:getItemIndexByShortCut("Sym", sm.perpage)
end
)
self.commands:add(KEY_ENTER, nil, "Enter",
"select item with Enter key as shortcut",
function(sm)
sm.selected_item = sm:getItemIndexByShortCut("Ent", sm.perpage)
end
)
self.commands:add(KEY_H,MOD_ALT,"H",
"show help page",
function(sm)
HelpPage:show(0, G_height, sm.commands)
sm.pagedirty = true
end)
self.commands:add(KEY_BACK, nil, "Back",
"exit menu",
function(sm)
return "break"
end
)
end
function SelectMenu:clearCommands()
self.commands = Commands:new{}
self.commands:add(KEY_BACK, nil, "Back",
"exit menu",
function(sm)
return "break"
end)
end
------------------------------------------------
-- return the index of selected item
------------------------------------------------
function SelectMenu:choose(ypos, height)
self.perpage = math.floor(height / self.spacing) - 2
self.pagedirty = true
self.markerdirty = false
self.last_shortcut = 0
self.current_entry = math.min(self.current_entry,self.items)
-- now calculating the page & cursor
self.page = math.floor(self.current_entry / self.perpage) + 1
self.page = math.max(1, self.page)
self.current = self.current_entry - (self.page - 1) * self.perpage + 1
self.current = math.max(1, self.current)
local own_face
while true do
local cface = Font:getFace("cfont", 22)
local tface = Font:getFace("tfont", 25)
local fface = Font:getFace("ffont", 16)
local sface = Font:getFace("scfont", 22)
local lx = self.margin_H + 40
local fw = fb.bb:getWidth() - lx - self.margin_H
if self.pagedirty then
fb.bb:paintRect(0, ypos, fb.bb:getWidth(), height, 0)
self.markerdirty = true
-- draw menu title
DrawTitle(self.menu_title,self.margin_H,0,self.title_H,3,tface)
-- draw items
fb.bb:paintRect(0, ypos + self.title_H + self.margin_H, fb.bb:getWidth(), height - self.title_H, 0)
if self.items == 0 then
y = ypos + self.title_H + (self.spacing * 2)
renderUtf8Text(fb.bb, self.margin_H + 20, y, cface,
"Oops... Bad news for you:", true)
y = y + self.spacing
renderUtf8Text(fb.bb, self.margin_H + 20, y, cface,
self.no_item_msg, true)
self.markerdirty = false
self:clearCommands()
else
local c
for c = 1, self.perpage do
local i = (self.page - 1) * self.perpage + c
if i <= self.items then
y = ypos + self.title_H + (self.spacing * c) + 4
-- paint shortcut indications
if c <= 10 or c > 20 then
blitbuffer.paintBorder(fb.bb, self.margin_H, y-22, 29, 29, 2, 15)
else
fb.bb:paintRect(self.margin_H, y-22, 29, 29, 3)
end
if self.item_shortcuts[c] ~= nil and
string.len(self.item_shortcuts[c]) == 3 then
-- debug "Del", "Sym and "Ent"
renderUtf8Text(fb.bb, self.margin_H + 3, y, fface,
self.item_shortcuts[c], true)
else
renderUtf8Text(fb.bb, self.margin_H + 8, y, sface,
self.item_shortcuts[c], true)
end
self.last_shortcut = c
-- NuPogodi, 30.08.12: improved method to use own fontface for each menu item
if self.own_glyph == 1 then -- Font.fontmap[_index], like "Droid/DroidSans.ttf"
own_face = Font:getFace(self.item_array[i], 22)
elseif self.own_glyph == 2 then -- Font.fontmap._index, like "[cfont] description"
own_face = Font:getFace(string.sub(string.match(self.item_array[i],"%b[]"), 2, -2), 22)
else
own_face = cface
end
-- rendering menu items
if sizeUtf8Text(lx,fb.bb:getWidth(),own_face,self.item_array[i],false).x < (fw - 10) then
renderUtf8Text(fb.bb,lx,y,own_face,self.item_array[i],false)
else
local gapx = sizeUtf8Text(0,fb.bb:getWidth(),own_face,"...", true).x
gapx = lx + renderUtf8TextWidth(fb.bb,lx,y,own_face,self.item_array[i],false,fw-gapx-15).x
renderUtf8Text(fb.bb,gapx,y,own_face,"...",true)
end
-- end of changes (NuPogodi)
end -- if i <= self.items
end -- for c=1, self.perpage
end -- if self.items == 0
local footer = "Page "..self.page.." of "..(math.ceil(self.items / self.perpage)).." - Press Alt-H for help"
renderUtf8Text(fb.bb, self.margin_H, height-7, fface, footer, true)
end
if self.markerdirty then
if not self.pagedirty then
if self.oldcurrent > 0 then
y = ypos + self.title_H + (self.spacing * self.oldcurrent) + 12
fb.bb:paintRect( lx, y, fw, 3, 0)
Screen:refresh(1, nil, lx, y, fw, 3)
end
end
-- draw new marker line
y = ypos + self.title_H + (self.spacing * self.current) + 12
fb.bb:paintRect(lx, y, fw, 3, 15)
if not self.pagedirty then
Screen:refresh(1, nil, lx, y, fw, 3)
end
self.oldcurrent = self.current
self.markerdirty = false
end
if self.pagedirty then
Screen:refresh(0, 0, nil, ypos, fb.bb:getWidth(), height)
self.pagedirty = false
end
local ev = input.saveWaitForEvent()
ev.code = adjustKeyEvents(ev)
if ev.type == EV_KEY and ev.value ~= EVENT_VALUE_KEY_RELEASE then
keydef = Keydef:new(ev.code, getKeyModifier())
Debug("key pressed: "..tostring(keydef))
command = self.commands:getByKeydef(keydef)
if command ~= nil then
Debug("command to execute: "..tostring(command))
ret_code = command.func(self, keydef)
else
Debug("command not found: "..tostring(command))
end
if ret_code == "break" then
break
end
if self.selected_item ~= nil then
if self.expandable then
if ret_code == "expand" then
return nil, self.selected_item
elseif ret_code == "collapse" then
return nil, -self.selected_item
elseif ret_code == "expand all" then
return nil, self.selected_item, "all"
end
elseif self.deletable and ret_code == "delete" then
return nil, self.selected_item
end
Debug("# selected "..self.selected_item)
return self.selected_item, self.item_array[self.selected_item]
end
end -- EOF if
end -- EOF while
return nil
end
|
{
"pile_set_name": "Github"
}
|
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package norm
type valueRange struct {
value uint16 // header: value:stride
lo, hi byte // header: lo:n
}
type sparseBlocks struct {
values []valueRange
offset []uint16
}
var nfcSparse = sparseBlocks{
values: nfcSparseValues[:],
offset: nfcSparseOffset[:],
}
var nfkcSparse = sparseBlocks{
values: nfkcSparseValues[:],
offset: nfkcSparseOffset[:],
}
var (
nfcData = newNfcTrie(0)
nfkcData = newNfkcTrie(0)
)
// lookupValue determines the type of block n and looks up the value for b.
// For n < t.cutoff, the block is a simple lookup table. Otherwise, the block
// is a list of ranges with an accompanying value. Given a matching range r,
// the value for b is by r.value + (b - r.lo) * stride.
func (t *sparseBlocks) lookup(n uint32, b byte) uint16 {
offset := t.offset[n]
header := t.values[offset]
lo := offset + 1
hi := lo + uint16(header.lo)
for lo < hi {
m := lo + (hi-lo)/2
r := t.values[m]
if r.lo <= b && b <= r.hi {
return r.value + uint16(b-r.lo)*header.value
}
if b < r.lo {
hi = m
} else {
lo = m + 1
}
}
return 0
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright Andrey Semashev 2007 - 2015.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*/
/*!
* \file formatters.hpp
* \author Andrey Semashev
* \date 10.11.2012
*
* The header includes all template expression formatters.
*/
#ifndef BOOST_LOG_EXPRESSIONS_FORMATTERS_HPP_INCLUDED_
#define BOOST_LOG_EXPRESSIONS_FORMATTERS_HPP_INCLUDED_
#include <boost/log/detail/config.hpp>
#include <boost/log/expressions/formatters/stream.hpp>
#include <boost/log/expressions/formatters/format.hpp>
#include <boost/log/expressions/formatters/date_time.hpp>
#include <boost/log/expressions/formatters/named_scope.hpp>
#include <boost/log/expressions/formatters/char_decorator.hpp>
#include <boost/log/expressions/formatters/xml_decorator.hpp>
#include <boost/log/expressions/formatters/csv_decorator.hpp>
#include <boost/log/expressions/formatters/c_decorator.hpp>
#include <boost/log/expressions/formatters/max_size_decorator.hpp>
#include <boost/log/expressions/formatters/if.hpp>
#include <boost/log/expressions/formatters/wrap_formatter.hpp>
#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
#endif // BOOST_LOG_EXPRESSIONS_FORMATTERS_HPP_INCLUDED_
|
{
"pile_set_name": "Github"
}
|
/*
* Description: Class defining the standard paths
*/
#ifndef STANDARD_PATHS_HPP
#define STANDARD_PATHS_HPP
#include <QtCore/QString>
namespace webotsQtUtils {
class StandardPaths {
public:
static const QString &getWebotsHomePath();
static const QString &getCurrentLibraryPath();
static const QString &getControllerPath();
static const QString &getProjectPath();
private:
StandardPaths() {}
};
} // namespace webotsQtUtils
#endif
|
{
"pile_set_name": "Github"
}
|
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
/*
* This file is part of the LibreOffice project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* This file incorporates work covered by the following license notice:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 .
*/
#include "ConversionHelper.hxx"
#include <com/sun/star/table/BorderLine2.hpp>
#include <com/sun/star/lang/Locale.hpp>
#include <com/sun/star/text/HoriOrientation.hpp>
#include <com/sun/star/style/NumberingType.hpp>
#include <editeng/borderline.hxx>
#include <ooxml/resourceids.hxx>
#include <rtl/ustrbuf.hxx>
#include <tools/color.hxx>
#include <tools/mapunit.hxx>
#include <tools/UnitConversion.hxx>
using namespace com::sun::star;
namespace writerfilter::dmapper::ConversionHelper{
/// Convert OOXML border style to WW8 that editeng can handle.
static sal_Int32 lcl_convertBorderStyleFromToken(sal_Int32 nOOXMLType)
{
switch (nOOXMLType)
{
case NS_ooxml::LN_Value_ST_Border_nil: return 255;
case NS_ooxml::LN_Value_ST_Border_none: return 0;
case NS_ooxml::LN_Value_ST_Border_single: return 1;
case NS_ooxml::LN_Value_ST_Border_thick: return 2;
case NS_ooxml::LN_Value_ST_Border_double: return 3;
case NS_ooxml::LN_Value_ST_Border_dotted: return 6;
case NS_ooxml::LN_Value_ST_Border_dashed: return 7;
case NS_ooxml::LN_Value_ST_Border_dotDash: return 8;
case NS_ooxml::LN_Value_ST_Border_dotDotDash: return 9;
case NS_ooxml::LN_Value_ST_Border_triple: return 10;
case NS_ooxml::LN_Value_ST_Border_thinThickSmallGap: return 11;
case NS_ooxml::LN_Value_ST_Border_thickThinSmallGap: return 12;
case NS_ooxml::LN_Value_ST_Border_thinThickThinSmallGap: return 13;
case NS_ooxml::LN_Value_ST_Border_thinThickMediumGap: return 14;
case NS_ooxml::LN_Value_ST_Border_thickThinMediumGap: return 15;
case NS_ooxml::LN_Value_ST_Border_thinThickThinMediumGap: return 16;
case NS_ooxml::LN_Value_ST_Border_thinThickLargeGap: return 17;
case NS_ooxml::LN_Value_ST_Border_thickThinLargeGap: return 18;
case NS_ooxml::LN_Value_ST_Border_thinThickThinLargeGap: return 19;
case NS_ooxml::LN_Value_ST_Border_wave: return 20;
case NS_ooxml::LN_Value_ST_Border_doubleWave: return 21;
case NS_ooxml::LN_Value_ST_Border_dashSmallGap: return 22;
case NS_ooxml::LN_Value_ST_Border_dashDotStroked: return 23;
case NS_ooxml::LN_Value_ST_Border_threeDEmboss: return 24;
case NS_ooxml::LN_Value_ST_Border_threeDEngrave: return 25;
case NS_ooxml::LN_Value_ST_Border_outset: return 26;
case NS_ooxml::LN_Value_ST_Border_inset: return 27;
case NS_ooxml::LN_Value_ST_Border_apples: return 64;
case NS_ooxml::LN_Value_ST_Border_archedScallops: return 65;
case NS_ooxml::LN_Value_ST_Border_babyPacifier: return 66;
case NS_ooxml::LN_Value_ST_Border_babyRattle: return 67;
case NS_ooxml::LN_Value_ST_Border_balloons3Colors: return 68;
case NS_ooxml::LN_Value_ST_Border_balloonsHotAir: return 69;
case NS_ooxml::LN_Value_ST_Border_basicBlackDashes: return 70;
case NS_ooxml::LN_Value_ST_Border_basicBlackDots: return 71;
case NS_ooxml::LN_Value_ST_Border_basicBlackSquares: return 72;
case NS_ooxml::LN_Value_ST_Border_basicThinLines: return 73;
case NS_ooxml::LN_Value_ST_Border_basicWhiteDashes: return 74;
case NS_ooxml::LN_Value_ST_Border_basicWhiteDots: return 75;
case NS_ooxml::LN_Value_ST_Border_basicWhiteSquares: return 76;
case NS_ooxml::LN_Value_ST_Border_basicWideInline: return 77;
case NS_ooxml::LN_Value_ST_Border_basicWideMidline: return 78;
case NS_ooxml::LN_Value_ST_Border_basicWideOutline: return 79;
case NS_ooxml::LN_Value_ST_Border_bats: return 80;
case NS_ooxml::LN_Value_ST_Border_birds: return 81;
case NS_ooxml::LN_Value_ST_Border_birdsFlight: return 82;
case NS_ooxml::LN_Value_ST_Border_cabins: return 83;
case NS_ooxml::LN_Value_ST_Border_cakeSlice: return 84;
case NS_ooxml::LN_Value_ST_Border_candyCorn: return 85;
case NS_ooxml::LN_Value_ST_Border_celticKnotwork: return 86;
case NS_ooxml::LN_Value_ST_Border_certificateBanner: return 87;
case NS_ooxml::LN_Value_ST_Border_chainLink: return 88;
case NS_ooxml::LN_Value_ST_Border_champagneBottle: return 89;
case NS_ooxml::LN_Value_ST_Border_checkedBarBlack: return 90;
case NS_ooxml::LN_Value_ST_Border_checkedBarColor: return 91;
case NS_ooxml::LN_Value_ST_Border_checkered: return 92;
case NS_ooxml::LN_Value_ST_Border_christmasTree: return 93;
case NS_ooxml::LN_Value_ST_Border_circlesLines: return 94;
case NS_ooxml::LN_Value_ST_Border_circlesRectangles: return 95;
case NS_ooxml::LN_Value_ST_Border_classicalWave: return 96;
case NS_ooxml::LN_Value_ST_Border_clocks: return 97;
case NS_ooxml::LN_Value_ST_Border_compass: return 98;
case NS_ooxml::LN_Value_ST_Border_confetti: return 99;
case NS_ooxml::LN_Value_ST_Border_confettiGrays: return 100;
case NS_ooxml::LN_Value_ST_Border_confettiOutline: return 101;
case NS_ooxml::LN_Value_ST_Border_confettiStreamers: return 102;
case NS_ooxml::LN_Value_ST_Border_confettiWhite: return 103;
case NS_ooxml::LN_Value_ST_Border_cornerTriangles: return 104;
case NS_ooxml::LN_Value_ST_Border_couponCutoutDashes: return 105;
case NS_ooxml::LN_Value_ST_Border_couponCutoutDots: return 106;
case NS_ooxml::LN_Value_ST_Border_crazyMaze: return 107;
case NS_ooxml::LN_Value_ST_Border_creaturesButterfly: return 108;
case NS_ooxml::LN_Value_ST_Border_creaturesFish: return 109;
case NS_ooxml::LN_Value_ST_Border_creaturesInsects: return 110;
case NS_ooxml::LN_Value_ST_Border_creaturesLadyBug: return 111;
case NS_ooxml::LN_Value_ST_Border_crossStitch: return 112;
case NS_ooxml::LN_Value_ST_Border_cup: return 113;
case NS_ooxml::LN_Value_ST_Border_decoArch: return 114;
case NS_ooxml::LN_Value_ST_Border_decoArchColor: return 115;
case NS_ooxml::LN_Value_ST_Border_decoBlocks: return 116;
case NS_ooxml::LN_Value_ST_Border_diamondsGray: return 117;
case NS_ooxml::LN_Value_ST_Border_doubleD: return 118;
case NS_ooxml::LN_Value_ST_Border_doubleDiamonds: return 119;
case NS_ooxml::LN_Value_ST_Border_earth1: return 120;
case NS_ooxml::LN_Value_ST_Border_earth2: return 121;
case NS_ooxml::LN_Value_ST_Border_eclipsingSquares1: return 122;
case NS_ooxml::LN_Value_ST_Border_eclipsingSquares2: return 123;
case NS_ooxml::LN_Value_ST_Border_eggsBlack: return 124;
case NS_ooxml::LN_Value_ST_Border_fans: return 125;
case NS_ooxml::LN_Value_ST_Border_film: return 126;
case NS_ooxml::LN_Value_ST_Border_firecrackers: return 127;
case NS_ooxml::LN_Value_ST_Border_flowersBlockPrint: return 128;
case NS_ooxml::LN_Value_ST_Border_flowersDaisies: return 129;
case NS_ooxml::LN_Value_ST_Border_flowersModern1: return 130;
case NS_ooxml::LN_Value_ST_Border_flowersModern2: return 131;
case NS_ooxml::LN_Value_ST_Border_flowersPansy: return 132;
case NS_ooxml::LN_Value_ST_Border_flowersRedRose: return 133;
case NS_ooxml::LN_Value_ST_Border_flowersRoses: return 134;
case NS_ooxml::LN_Value_ST_Border_flowersTeacup: return 135;
case NS_ooxml::LN_Value_ST_Border_flowersTiny: return 136;
case NS_ooxml::LN_Value_ST_Border_gems: return 137;
case NS_ooxml::LN_Value_ST_Border_gingerbreadMan: return 138;
case NS_ooxml::LN_Value_ST_Border_gradient: return 139;
case NS_ooxml::LN_Value_ST_Border_handmade1: return 140;
case NS_ooxml::LN_Value_ST_Border_handmade2: return 141;
case NS_ooxml::LN_Value_ST_Border_heartBalloon: return 142;
case NS_ooxml::LN_Value_ST_Border_heartGray: return 143;
case NS_ooxml::LN_Value_ST_Border_hearts: return 144;
case NS_ooxml::LN_Value_ST_Border_heebieJeebies: return 145;
case NS_ooxml::LN_Value_ST_Border_holly: return 146;
case NS_ooxml::LN_Value_ST_Border_houseFunky: return 147;
case NS_ooxml::LN_Value_ST_Border_hypnotic: return 148;
case NS_ooxml::LN_Value_ST_Border_iceCreamCones: return 149;
case NS_ooxml::LN_Value_ST_Border_lightBulb: return 150;
case NS_ooxml::LN_Value_ST_Border_lightning1: return 151;
case NS_ooxml::LN_Value_ST_Border_lightning2: return 152;
case NS_ooxml::LN_Value_ST_Border_mapPins: return 153;
case NS_ooxml::LN_Value_ST_Border_mapleLeaf: return 154;
case NS_ooxml::LN_Value_ST_Border_mapleMuffins: return 155;
case NS_ooxml::LN_Value_ST_Border_marquee: return 156;
case NS_ooxml::LN_Value_ST_Border_marqueeToothed: return 157;
case NS_ooxml::LN_Value_ST_Border_moons: return 158;
case NS_ooxml::LN_Value_ST_Border_mosaic: return 159;
case NS_ooxml::LN_Value_ST_Border_musicNotes: return 160;
case NS_ooxml::LN_Value_ST_Border_northwest: return 161;
case NS_ooxml::LN_Value_ST_Border_ovals: return 162;
case NS_ooxml::LN_Value_ST_Border_packages: return 163;
case NS_ooxml::LN_Value_ST_Border_palmsBlack: return 164;
case NS_ooxml::LN_Value_ST_Border_palmsColor: return 165;
case NS_ooxml::LN_Value_ST_Border_paperClips: return 166;
case NS_ooxml::LN_Value_ST_Border_papyrus: return 167;
case NS_ooxml::LN_Value_ST_Border_partyFavor: return 168;
case NS_ooxml::LN_Value_ST_Border_partyGlass: return 169;
case NS_ooxml::LN_Value_ST_Border_pencils: return 170;
case NS_ooxml::LN_Value_ST_Border_people: return 171;
case NS_ooxml::LN_Value_ST_Border_peopleWaving: return 172;
case NS_ooxml::LN_Value_ST_Border_peopleHats: return 173;
case NS_ooxml::LN_Value_ST_Border_poinsettias: return 174;
case NS_ooxml::LN_Value_ST_Border_postageStamp: return 175;
case NS_ooxml::LN_Value_ST_Border_pumpkin1: return 176;
case NS_ooxml::LN_Value_ST_Border_pushPinNote2: return 177;
case NS_ooxml::LN_Value_ST_Border_pushPinNote1: return 178;
case NS_ooxml::LN_Value_ST_Border_pyramids: return 179;
case NS_ooxml::LN_Value_ST_Border_pyramidsAbove: return 180;
case NS_ooxml::LN_Value_ST_Border_quadrants: return 181;
case NS_ooxml::LN_Value_ST_Border_rings: return 182;
case NS_ooxml::LN_Value_ST_Border_safari: return 183;
case NS_ooxml::LN_Value_ST_Border_sawtooth: return 184;
case NS_ooxml::LN_Value_ST_Border_sawtoothGray: return 185;
case NS_ooxml::LN_Value_ST_Border_scaredCat: return 186;
case NS_ooxml::LN_Value_ST_Border_seattle: return 187;
case NS_ooxml::LN_Value_ST_Border_shadowedSquares: return 188;
case NS_ooxml::LN_Value_ST_Border_sharksTeeth: return 189;
case NS_ooxml::LN_Value_ST_Border_shorebirdTracks: return 190;
case NS_ooxml::LN_Value_ST_Border_skyrocket: return 191;
case NS_ooxml::LN_Value_ST_Border_snowflakeFancy: return 192;
case NS_ooxml::LN_Value_ST_Border_snowflakes: return 193;
case NS_ooxml::LN_Value_ST_Border_sombrero: return 194;
case NS_ooxml::LN_Value_ST_Border_southwest: return 195;
case NS_ooxml::LN_Value_ST_Border_stars: return 196;
case NS_ooxml::LN_Value_ST_Border_starsTop: return 197;
case NS_ooxml::LN_Value_ST_Border_stars3d: return 198;
case NS_ooxml::LN_Value_ST_Border_starsBlack: return 199;
case NS_ooxml::LN_Value_ST_Border_starsShadowed: return 200;
case NS_ooxml::LN_Value_ST_Border_sun: return 201;
case NS_ooxml::LN_Value_ST_Border_swirligig: return 202;
case NS_ooxml::LN_Value_ST_Border_tornPaper: return 203;
case NS_ooxml::LN_Value_ST_Border_tornPaperBlack: return 204;
case NS_ooxml::LN_Value_ST_Border_trees: return 205;
case NS_ooxml::LN_Value_ST_Border_triangleParty: return 206;
case NS_ooxml::LN_Value_ST_Border_triangles: return 207;
case NS_ooxml::LN_Value_ST_Border_tribal1: return 208;
case NS_ooxml::LN_Value_ST_Border_tribal2: return 209;
case NS_ooxml::LN_Value_ST_Border_tribal3: return 210;
case NS_ooxml::LN_Value_ST_Border_tribal4: return 211;
case NS_ooxml::LN_Value_ST_Border_tribal5: return 212;
case NS_ooxml::LN_Value_ST_Border_tribal6: return 213;
case NS_ooxml::LN_Value_ST_Border_twistedLines1: return 214;
case NS_ooxml::LN_Value_ST_Border_twistedLines2: return 215;
case NS_ooxml::LN_Value_ST_Border_vine: return 216;
case NS_ooxml::LN_Value_ST_Border_waveline: return 217;
case NS_ooxml::LN_Value_ST_Border_weavingAngles: return 218;
case NS_ooxml::LN_Value_ST_Border_weavingBraid: return 219;
case NS_ooxml::LN_Value_ST_Border_weavingRibbon: return 220;
case NS_ooxml::LN_Value_ST_Border_weavingStrips: return 221;
case NS_ooxml::LN_Value_ST_Border_whiteFlowers: return 222;
case NS_ooxml::LN_Value_ST_Border_woodwork: return 223;
case NS_ooxml::LN_Value_ST_Border_xIllusions: return 224;
case NS_ooxml::LN_Value_ST_Border_zanyTriangles: return 225;
case NS_ooxml::LN_Value_ST_Border_zigZag: return 226;
case NS_ooxml::LN_Value_ST_Border_zigZagStitch: return 227;
default: break;
}
return 0;
}
void MakeBorderLine( sal_Int32 nLineThickness, sal_Int32 nLineToken,
sal_Int32 nLineColor,
table::BorderLine2& rToFill, bool bIsOOXML )
{
static const Color aBorderDefColor[] =
{
// The first item means automatic color (COL_AUTO), but we
// do not use it anyway (see the next statement) .-)
// See also GetLineIndex in sw/source/filter/ww8/ww8par6.cxx
COL_AUTO, COL_BLACK, COL_LIGHTBLUE, COL_LIGHTCYAN, COL_LIGHTGREEN,
COL_LIGHTMAGENTA, COL_LIGHTRED, COL_YELLOW, COL_WHITE, COL_BLUE,
COL_CYAN, COL_GREEN, COL_MAGENTA, COL_RED, COL_BROWN, COL_GRAY,
COL_LIGHTGRAY
};
if(!bIsOOXML && sal::static_int_cast<sal_uInt32>(nLineColor) < SAL_N_ELEMENTS(aBorderDefColor))
nLineColor = sal_Int32(aBorderDefColor[nLineColor]);
//no auto color for borders
if (nLineColor == sal_Int32(COL_AUTO))
nLineColor = sal_Int32(COL_BLACK);
sal_Int32 nLineType = lcl_convertBorderStyleFromToken(nLineToken);
// Map to our border types, we should use of one equal line
// thickness, or one of smaller thickness. If too small we
// can make the deficit up in additional white space or
// object size
SvxBorderLineStyle const nLineStyle(
::editeng::ConvertBorderStyleFromWord(nLineType));
rToFill.LineStyle = static_cast<sal_Int16>(nLineStyle);
double const fConverted( (SvxBorderLineStyle::NONE == nLineStyle) ? 0.0 :
::editeng::ConvertBorderWidthFromWord(nLineStyle, nLineThickness,
nLineType));
rToFill.LineWidth = convertTwipToMM100(fConverted);
rToFill.Color = nLineColor;
}
namespace {
void lcl_SwapQuotesInField(OUString &rFmt)
{
//Swap unescaped " and ' with ' and "
sal_Int32 nLen = rFmt.getLength();
OUStringBuffer aBuffer( rFmt );
const sal_Unicode* pFmt = rFmt.getStr();
for (sal_Int32 nI = 0; nI < nLen; ++nI)
{
if ((pFmt[nI] == '\"') && (!nI || pFmt[nI-1] != '\\'))
aBuffer[nI] = '\'';
else if ((pFmt[nI] == '\'') && (!nI || pFmt[nI-1] != '\\'))
aBuffer[nI] = '\"';
}
rFmt = aBuffer.makeStringAndClear();
}
bool lcl_IsNotAM(OUString const & rFmt, sal_Int32 nPos)
{
return (
(nPos == rFmt.getLength() - 1) ||
(
(rFmt[nPos+1] != 'M') &&
(rFmt[nPos+1] != 'm')
)
);
}
}
OUString ConvertMSFormatStringToSO(
const OUString& rFormat, lang::Locale& rLocale, bool bHijri)
{
OUString sFormat(rFormat);
lcl_SwapQuotesInField(sFormat);
//#102782#, #102815#, #108341# & #111944# have to work at the same time :-)
bool bForceJapanese(false);
bool bForceNatNum(false);
sal_Int32 nLen = sFormat.getLength();
sal_Int32 nI = 0;
// const sal_Unicode* pFormat = sFormat.getStr();
OUStringBuffer aNewFormat( sFormat );
while (nI < nLen)
{
if (aNewFormat[nI] == '\\')
nI++;
else if (aNewFormat[nI] == '\"')
{
++nI;
//While not at the end and not at an unescaped end quote
while ((nI < nLen) && ((aNewFormat[nI] != '\"') && (aNewFormat[nI-1] != '\\')))
++nI;
}
else //normal unquoted section
{
sal_Unicode nChar = aNewFormat[nI];
if (nChar == 'O')
{
aNewFormat[nI] = 'M';
bForceNatNum = true;
}
else if (nChar == 'o')
{
aNewFormat[nI] = 'm';
bForceNatNum = true;
}
else if ((nChar == 'A') && lcl_IsNotAM(sFormat, nI))
{
aNewFormat[nI] = 'D';
bForceNatNum = true;
}
else if ((nChar == 'g') || (nChar == 'G'))
bForceJapanese = true;
else if ((nChar == 'a') && lcl_IsNotAM(sFormat, nI))
bForceJapanese = true;
else if (nChar == 'E')
{
if ((nI != nLen-1) && (aNewFormat[nI+1] == 'E'))
{
//todo: this cannot be the right way to replace a part of the string!
aNewFormat[nI] = 'Y';
aNewFormat[nI + 1] = 'Y';
aNewFormat.insert(nI + 2, "YY");
nLen+=2;
nI+=3;
}
bForceJapanese = true;
}
else if (nChar == 'e')
{
if ((nI != nLen-1) && (aNewFormat[nI+1] == 'e'))
{
//todo: this cannot be the right way to replace a part of the string!
aNewFormat[nI] = 'y';
aNewFormat[nI + 1] = 'y';
aNewFormat.insert(nI + 2, "yy");
nLen+=2;
nI+=3;
}
bForceJapanese = true;
}
else if (nChar == '/')
{
// MM We have to escape '/' in case it's used as a char
//todo: this cannot be the right way to replace a part of the string!
aNewFormat[nI] = '\\';
aNewFormat.insert(nI + 1, "/");
nI++;
nLen++;
}
}
++nI;
}
if (bForceNatNum)
bForceJapanese = true;
if (bForceJapanese)
{
rLocale.Language = "ja";
rLocale.Country = "JP";
}
if (bForceNatNum)
{
aNewFormat.insert( 0, "[NatNum1][$-411]");
}
if (bHijri)
{
aNewFormat.insert( 0, "[~hijri]");
}
return aNewFormat.makeStringAndClear();
}
sal_Int32 convertTwipToMM100(sal_Int32 _t)
{
// It appears that MSO handles large twip values specially, probably legacy 16bit handling,
// anything that's bigger than 32767 appears to be simply ignored.
if( _t >= 0x8000 )
return 0;
return ::convertTwipToMm100( _t );
}
double convertTwipToMM100Double(sal_Int32 _t)
{
// It appears that MSO handles large twip values specially, probably legacy 16bit handling,
// anything that's bigger than 32767 appears to be simply ignored.
if( _t >= 0x8000 )
return 0.0;
return _t * 254.0 / 144.0;
}
sal_uInt32 convertTwipToMM100Unsigned(sal_Int32 _t)
{
if( _t < 0 )
return 0;
return convertTwipToMM100( _t );
}
text::RubyAdjust convertRubyAlign( sal_Int32 nIntValue )
{
text::RubyAdjust rubyAdjust = text::RubyAdjust_LEFT;
switch( nIntValue )
{
case NS_ooxml::LN_Value_ST_RubyAlign_center:
case NS_ooxml::LN_Value_ST_RubyAlign_rightVertical:
rubyAdjust = text::RubyAdjust_CENTER;
break;
case NS_ooxml::LN_Value_ST_RubyAlign_distributeLetter:
rubyAdjust = text::RubyAdjust_BLOCK;
break;
case NS_ooxml::LN_Value_ST_RubyAlign_distributeSpace:
rubyAdjust = text::RubyAdjust_INDENT_BLOCK;
break;
case NS_ooxml::LN_Value_ST_RubyAlign_left:
rubyAdjust = text::RubyAdjust_LEFT;
break;
case NS_ooxml::LN_Value_ST_RubyAlign_right:
rubyAdjust = text::RubyAdjust_RIGHT;
break;
}
return rubyAdjust;
}
sal_Int16 convertTableJustification( sal_Int32 nIntValue )
{
sal_Int16 nOrient = text::HoriOrientation::LEFT_AND_WIDTH;
switch( nIntValue )
{
case NS_ooxml::LN_Value_ST_Jc_center:
nOrient = text::HoriOrientation::CENTER;
break;
case NS_ooxml::LN_Value_ST_Jc_right:
case NS_ooxml::LN_Value_ST_Jc_end:
nOrient = text::HoriOrientation::RIGHT;
break;
case NS_ooxml::LN_Value_ST_Jc_left:
case NS_ooxml::LN_Value_ST_Jc_start:
//no break
default:;
}
return nOrient;
}
sal_Int16 ConvertNumberingType(sal_Int32 nFmt)
{
sal_Int16 nRet;
switch(nFmt)
{
case NS_ooxml::LN_Value_ST_NumberFormat_decimal:
nRet = style::NumberingType::ARABIC;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_upperRoman:
nRet = style::NumberingType::ROMAN_UPPER;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_lowerRoman:
nRet = style::NumberingType::ROMAN_LOWER;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_ordinal:
nRet = style::NumberingType::TEXT_NUMBER;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_bullet:
nRet = style::NumberingType::CHAR_SPECIAL;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_none:
nRet = style::NumberingType::NUMBER_NONE;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_upperLetter:
nRet = style::NumberingType::CHARS_UPPER_LETTER_N;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_lowerLetter:
nRet = style::NumberingType::CHARS_LOWER_LETTER_N;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_iroha:
nRet = style::NumberingType::IROHA_HALFWIDTH_JA;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_irohaFullWidth:
nRet = style::NumberingType::IROHA_FULLWIDTH_JA;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_aiueo:
nRet = style::NumberingType::AIU_HALFWIDTH_JA;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_aiueoFullWidth:
nRet = style::NumberingType::AIU_FULLWIDTH_JA;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_hebrew2:
nRet = style::NumberingType::CHARS_HEBREW;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_thaiLetters:
nRet = style::NumberingType::CHARS_THAI;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_russianLower:
nRet = style::NumberingType::CHARS_CYRILLIC_LOWER_LETTER_RU;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_russianUpper:
nRet = style::NumberingType::CHARS_CYRILLIC_UPPER_LETTER_RU;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_decimalEnclosedCircleChinese:
case NS_ooxml::LN_Value_ST_NumberFormat_decimalEnclosedCircle:
case NS_ooxml::LN_Value_ST_NumberFormat_ideographEnclosedCircle:
nRet = style::NumberingType::CIRCLE_NUMBER;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_ideographTraditional:
nRet = style::NumberingType::TIAN_GAN_ZH;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_ideographZodiac:
nRet = style::NumberingType::DI_ZI_ZH;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_ganada:
nRet = style::NumberingType::HANGUL_SYLLABLE_KO;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_chosung:
nRet = style::NumberingType::HANGUL_JAMO_KO;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_koreanLegal:
case NS_ooxml::LN_Value_ST_NumberFormat_koreanDigital:
case NS_ooxml::LN_Value_ST_NumberFormat_koreanCounting:
case NS_ooxml::LN_Value_ST_NumberFormat_koreanDigital2:
nRet = style::NumberingType::NUMBER_HANGUL_KO;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_ideographLegalTraditional:
nRet = style::NumberingType::NUMBER_UPPER_ZH_TW;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_arabicAlpha:
nRet = style::NumberingType::CHARS_ARABIC;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_hindiVowels:
nRet = style::NumberingType::CHARS_NEPALI;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_japaneseLegal:
nRet = style::NumberingType::NUMBER_TRADITIONAL_JA;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_chineseCounting:
case NS_ooxml::LN_Value_ST_NumberFormat_japaneseCounting:
case NS_ooxml::LN_Value_ST_NumberFormat_taiwaneseCounting:
case NS_ooxml::LN_Value_ST_NumberFormat_taiwaneseCountingThousand:
case NS_ooxml::LN_Value_ST_NumberFormat_ideographDigital:
case NS_ooxml::LN_Value_ST_NumberFormat_chineseCountingThousand:
nRet = style::NumberingType::NUMBER_LOWER_ZH;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_chineseLegalSimplified:
nRet = style::NumberingType::NUMBER_UPPER_ZH;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_hebrew1:
//91726
nRet = style::NumberingType::NUMBER_HEBREW;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_decimalFullWidth:
case NS_ooxml::LN_Value_ST_NumberFormat_decimalFullWidth2:
nRet = style::NumberingType::FULLWIDTH_ARABIC;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_cardinalText:
nRet = style::NumberingType::TEXT_CARDINAL;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_ordinalText:
nRet = style::NumberingType::TEXT_ORDINAL;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_chicago:
nRet = style::NumberingType::SYMBOL_CHICAGO;
break;
case NS_ooxml::LN_Value_ST_NumberFormat_decimalZero:
nRet = style::NumberingType::ARABIC_ZERO;
break;
default: nRet = style::NumberingType::ARABIC;
}
/* TODO: Lots of additional values are available - some are supported in the I18 framework
NS_ooxml::LN_Value_ST_NumberFormat_hex = 91685;
NS_ooxml::LN_Value_ST_NumberFormat_decimalFullWidth = 91691;
NS_ooxml::LN_Value_ST_NumberFormat_decimalHalfWidth = 91692;
NS_ooxml::LN_Value_ST_NumberFormat_japaneseDigitalTenThousand = 91694;
NS_ooxml::LN_Value_ST_NumberFormat_decimalEnclosedFullstop = 91703;
NS_ooxml::LN_Value_ST_NumberFormat_decimalEnclosedParen = 91704;
NS_ooxml::LN_Value_ST_NumberFormat_ideographZodiacTraditional = 91709;
NS_ooxml::LN_Value_ST_NumberFormat_taiwaneseDigital = 91713;
NS_ooxml::LN_Value_ST_NumberFormat_chineseLegalSimplified = 91715;
NS_ooxml::LN_Value_ST_NumberFormat_chineseCountingThousand = 91716;
NS_ooxml::LN_Value_ST_NumberFormat_koreanLegal = 91719;
NS_ooxml::LN_Value_ST_NumberFormat_vietnameseCounting = 91721;
NS_ooxml::LN_Value_ST_NumberFormat_numberInDash = 91725;
NS_ooxml::LN_Value_ST_NumberFormat_arabicAbjad:
NS_ooxml::LN_Value_ST_NumberFormat_hindiConsonants = 91731;
NS_ooxml::LN_Value_ST_NumberFormat_hindiNumbers = 91732;
NS_ooxml::LN_Value_ST_NumberFormat_hindiCounting = 91733;
NS_ooxml::LN_Value_ST_NumberFormat_thaiNumbers = 91735;
NS_ooxml::LN_Value_ST_NumberFormat_thaiCounting = 91736;*/
return nRet;
}
sal_Int16 ConvertCustomNumberFormat(const OUString& rFormat)
{
sal_Int16 nRet = -1;
if (rFormat == "001, 002, 003, ...")
{
nRet = style::NumberingType::ARABIC_ZERO3;
}
else if (rFormat == "0001, 0002, 0003, ...")
{
nRet = style::NumberingType::ARABIC_ZERO4;
}
else if (rFormat == "00001, 00002, 00003, ...")
{
nRet = style::NumberingType::ARABIC_ZERO5;
}
return nRet;
}
util::DateTime ConvertDateStringToDateTime( const OUString& rDateTime )
{
util::DateTime aDateTime;
//xsd::DateTime in the format [-]CCYY-MM-DDThh:mm:ss[Z|(+|-)hh:mm] example: 2008-01-21T10:42:00Z
//OUString getToken( sal_Int32 token, sal_Unicode cTok, sal_Int32& index ) const
sal_Int32 nIndex = 0;
OUString sDate = rDateTime.getToken( 0, 'T', nIndex );
// HACK: this is broken according to the spec, but MSOffice always treats the time as local,
// and writes it as Z (=UTC+0)
OUString sTime = rDateTime.getToken( 0, 'Z', nIndex );
nIndex = 0;
aDateTime.Year = sal_uInt16( sDate.getToken( 0, '-', nIndex ).toInt32() );
aDateTime.Month = sal_uInt16( sDate.getToken( 0, '-', nIndex ).toInt32() );
if (nIndex != -1)
aDateTime.Day = sal_uInt16( sDate.copy( nIndex ).toInt32() );
nIndex = 0;
aDateTime.Hours = sal_uInt16( sTime.getToken( 0, ':', nIndex ).toInt32() );
aDateTime.Minutes = sal_uInt16( sTime.getToken( 0, ':', nIndex ).toInt32() );
if (nIndex != -1)
aDateTime.Seconds = sal_uInt16( sTime.copy( nIndex ).toInt32() );
return aDateTime;
}
} //namespace writerfilter
/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
|
{
"pile_set_name": "Github"
}
|
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Copyright (c) 2018 NVIDIA Corporation. All rights reserved.
#include "MultiClientRenderResourceManager.h"
#include "UserRenderVertexBuffer.h"
#include "UserRenderIndexBuffer.h"
#include "UserRenderBoneBuffer.h"
#include "UserRenderInstanceBuffer.h"
#include "UserRenderSpriteBuffer.h"
#include "UserRenderSurfaceBuffer.h"
#include "UserRenderResource.h"
#include "UserRenderResourceDesc.h"
#include "RenderContext.h"
#include <assert.h>
#include <algorithm> // for std::min
MultiClientRenderResourceManager::MultiClientRenderResourceManager()
{
}
MultiClientRenderResourceManager::~MultiClientRenderResourceManager()
{
for (size_t i = 0; i < mChildren.size(); i++)
{
if (mChildren[i].destroyRrm)
{
delete mChildren[i].rrm;
}
mChildren[i].rrm = NULL;
}
}
void MultiClientRenderResourceManager::addChild(nvidia::apex::UserRenderResourceManager* rrm, bool destroyAutomatic)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
if (mChildren[i].rrm == rrm)
{
return;
}
}
mChildren.push_back(Child(rrm, destroyAutomatic));
}
bool MultiClientRenderResourceManager::getSpriteLayoutData(uint32_t spriteCount, uint32_t spriteSemanticsBitmap, nvidia::apex::UserRenderSpriteBufferDesc* bufferDesc)
{
PX_UNUSED(spriteCount);
PX_UNUSED(spriteSemanticsBitmap);
PX_UNUSED(bufferDesc);
return false;
}
bool MultiClientRenderResourceManager::getInstanceLayoutData(uint32_t particleCount, uint32_t particleSemanticsBitmap, nvidia::apex::UserRenderInstanceBufferDesc* bufferDesc)
{
PX_UNUSED(particleCount);
PX_UNUSED(particleSemanticsBitmap);
PX_UNUSED(bufferDesc);
return false;
}
template<typename T>
class MultiClientBuffer
{
public:
MultiClientBuffer() {}
~MultiClientBuffer() {}
void addChild(T* vb)
{
mChildren.push_back(vb);
}
T* getChild(size_t index)
{
assert(index < mChildren.size());
return mChildren[index];
}
protected:
std::vector<T*> mChildren;
};
class MultiClientVertexBuffer : public nvidia::apex::UserRenderVertexBuffer, public MultiClientBuffer<nvidia::apex::UserRenderVertexBuffer>
{
public:
MultiClientVertexBuffer() {}
~MultiClientVertexBuffer() {}
virtual void writeBuffer(const nvidia::apex::RenderVertexBufferData& data, unsigned int firstVertex, unsigned int numVertices)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->writeBuffer(data, firstVertex, numVertices);
}
}
};
nvidia::apex::UserRenderVertexBuffer* MultiClientRenderResourceManager::createVertexBuffer(const nvidia::apex::UserRenderVertexBufferDesc& desc)
{
MultiClientVertexBuffer* vb = new MultiClientVertexBuffer();
for (size_t i = 0; i < mChildren.size(); i++)
{
vb->addChild(mChildren[i].rrm->createVertexBuffer(desc));
}
return vb;
}
void MultiClientRenderResourceManager::releaseVertexBuffer(nvidia::apex::UserRenderVertexBuffer& buffer)
{
MultiClientVertexBuffer* vb = static_cast<MultiClientVertexBuffer*>(&buffer);
for (size_t i = 0; i < mChildren.size(); i++)
{
nvidia::apex::UserRenderVertexBuffer* childVb = vb->getChild(i);
mChildren[i].rrm->releaseVertexBuffer(*childVb);
}
delete vb;
}
class MultiClientIndexBuffer : public nvidia::apex::UserRenderIndexBuffer, public MultiClientBuffer<nvidia::apex::UserRenderIndexBuffer>
{
public:
MultiClientIndexBuffer() {}
~MultiClientIndexBuffer() {}
virtual void writeBuffer(const void* srcData, unsigned int srcStride, unsigned int firstDestElement, unsigned int numElements)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->writeBuffer(srcData, srcStride, firstDestElement, numElements);
}
}
};
nvidia::apex::UserRenderIndexBuffer* MultiClientRenderResourceManager::createIndexBuffer(const nvidia::apex::UserRenderIndexBufferDesc& desc)
{
MultiClientIndexBuffer* ib = new MultiClientIndexBuffer();
for (size_t i = 0; i < mChildren.size(); i++)
{
ib->addChild(mChildren[i].rrm->createIndexBuffer(desc));
}
return ib;
}
void MultiClientRenderResourceManager::releaseIndexBuffer(nvidia::apex::UserRenderIndexBuffer& buffer)
{
MultiClientIndexBuffer* ib = static_cast<MultiClientIndexBuffer*>(&buffer);
for (size_t i = 0; i < mChildren.size(); i++)
{
nvidia::apex::UserRenderIndexBuffer* childIb = ib->getChild(i);
mChildren[i].rrm->releaseIndexBuffer(*childIb);
}
delete ib;
}
class MultiClientBoneBuffer : public nvidia::apex::UserRenderBoneBuffer, public MultiClientBuffer<nvidia::apex::UserRenderBoneBuffer>
{
public:
MultiClientBoneBuffer() {}
~MultiClientBoneBuffer() {}
virtual void writeBuffer(const nvidia::apex::RenderBoneBufferData& data, unsigned int firstBone, unsigned int numBones)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->writeBuffer(data, firstBone, numBones);
}
}
};
nvidia::apex::UserRenderBoneBuffer* MultiClientRenderResourceManager::createBoneBuffer(const nvidia::apex::UserRenderBoneBufferDesc& desc)
{
MultiClientBoneBuffer* bb = new MultiClientBoneBuffer();
for (size_t i = 0; i < mChildren.size(); i++)
{
bb->addChild(mChildren[i].rrm->createBoneBuffer(desc));
}
return bb;
}
void MultiClientRenderResourceManager::releaseBoneBuffer(nvidia::apex::UserRenderBoneBuffer& buffer)
{
MultiClientBoneBuffer* bb = static_cast<MultiClientBoneBuffer*>(&buffer);
for (size_t i = 0; i < mChildren.size(); i++)
{
nvidia::apex::UserRenderBoneBuffer* childBb = bb->getChild(i);
mChildren[i].rrm->releaseBoneBuffer(*childBb);
}
delete bb;
}
class MultiClientInstanceBuffer : public nvidia::apex::UserRenderInstanceBuffer, public MultiClientBuffer<nvidia::apex::UserRenderInstanceBuffer>
{
public:
MultiClientInstanceBuffer() {}
~MultiClientInstanceBuffer() {}
virtual void writeBuffer(const void* data, unsigned int firstInstance, unsigned int numInstances)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->writeBuffer(data, firstInstance, numInstances);
}
}
};
nvidia::apex::UserRenderInstanceBuffer* MultiClientRenderResourceManager::createInstanceBuffer(const nvidia::apex::UserRenderInstanceBufferDesc& desc)
{
MultiClientInstanceBuffer* ib = new MultiClientInstanceBuffer();
for (size_t i = 0; i < mChildren.size(); i++)
{
ib->addChild(mChildren[i].rrm->createInstanceBuffer(desc));
}
return ib;
}
void MultiClientRenderResourceManager::releaseInstanceBuffer(nvidia::apex::UserRenderInstanceBuffer& buffer)
{
MultiClientInstanceBuffer* ib = static_cast<MultiClientInstanceBuffer*>(&buffer);
for (size_t i = 0; i < mChildren.size(); i++)
{
nvidia::apex::UserRenderInstanceBuffer* childIb = ib->getChild(i);
mChildren[i].rrm->releaseInstanceBuffer(*childIb);
}
delete ib;
}
class MultiClientSpriteBuffer : public nvidia::apex::UserRenderSpriteBuffer, public MultiClientBuffer<nvidia::apex::UserRenderSpriteBuffer>
{
public:
MultiClientSpriteBuffer() {}
~MultiClientSpriteBuffer() {}
virtual void writeBuffer(const void* data, unsigned int firstSprite, unsigned int numSprites)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->writeBuffer(data, firstSprite, numSprites);
}
}
};
nvidia::apex::UserRenderSpriteBuffer* MultiClientRenderResourceManager::createSpriteBuffer(const nvidia::apex::UserRenderSpriteBufferDesc& desc)
{
MultiClientSpriteBuffer* sb = new MultiClientSpriteBuffer();
for (size_t i = 0; i < mChildren.size(); i++)
{
sb->addChild(mChildren[i].rrm->createSpriteBuffer(desc));
}
return sb;
}
void MultiClientRenderResourceManager::releaseSpriteBuffer(nvidia::apex::UserRenderSpriteBuffer& buffer)
{
MultiClientSpriteBuffer* sb = static_cast<MultiClientSpriteBuffer*>(&buffer);
for (size_t i = 0; i < mChildren.size(); i++)
{
nvidia::apex::UserRenderSpriteBuffer* childSb = sb->getChild(i);
mChildren[i].rrm->releaseSpriteBuffer(*childSb);
}
delete sb;
}
class MultiClientSurfaceBuffer : public nvidia::apex::UserRenderSurfaceBuffer, public MultiClientBuffer<nvidia::apex::UserRenderSurfaceBuffer>
{
public:
MultiClientSurfaceBuffer() {}
~MultiClientSurfaceBuffer() {}
virtual void writeBuffer(const void* /*srcData*/,
uint32_t /*srcPitch*/,
uint32_t /*srcHeight*/,
uint32_t /*dstX*/,
uint32_t /*dstY*/,
uint32_t /*dstZ*/,
uint32_t /*width*/,
uint32_t /*height*/,
uint32_t /*depth*/)
{
//for (size_t i = 0; i < mChildren.size(); i++)
//{
// mChildren[i]->writeBuffer(data, firstSprite, numSprites);
//}
}
};
nvidia::apex::UserRenderSurfaceBuffer* MultiClientRenderResourceManager::createSurfaceBuffer( const nvidia::apex::UserRenderSurfaceBufferDesc &desc )
{
MultiClientSurfaceBuffer* sb = new MultiClientSurfaceBuffer();
for (size_t i = 0; i < mChildren.size(); i++)
{
sb->addChild(mChildren[i].rrm->createSurfaceBuffer(desc));
}
return sb;
}
void MultiClientRenderResourceManager::releaseSurfaceBuffer( nvidia::apex::UserRenderSurfaceBuffer &buffer )
{
MultiClientSurfaceBuffer* sb = static_cast<MultiClientSurfaceBuffer*>(&buffer);
for (size_t i = 0; i < mChildren.size(); i++)
{
nvidia::apex::UserRenderSurfaceBuffer* childSb = sb->getChild(i);
mChildren[i].rrm->releaseSurfaceBuffer(*childSb);
}
delete sb;
}
class MultiClientRenderResource : public nvidia::apex::UserRenderResource
{
public:
MultiClientRenderResource(const nvidia::apex::UserRenderResourceDesc& desc) : mDescriptor(desc)
{
assert(desc.numVertexBuffers > 0);
mVertexBufferOriginal.resize(desc.numVertexBuffers);
for (size_t i = 0; i < mVertexBufferOriginal.size(); i++)
{
mVertexBufferOriginal[i] = desc.vertexBuffers[i];
}
mDescriptor.vertexBuffers = &mVertexBufferOriginal[0];
}
~MultiClientRenderResource()
{
}
void addChild(nvidia::apex::UserRenderResourceManager* rrm)
{
nvidia::apex::UserRenderResourceDesc newDesc(mDescriptor);
std::vector<nvidia::apex::UserRenderVertexBuffer*> childVertexBuffers(mVertexBufferOriginal.size());
size_t nextChild = mChildren.size();
for (size_t i = 0; i < mVertexBufferOriginal.size(); i++)
{
MultiClientVertexBuffer* vb = static_cast<MultiClientVertexBuffer*>(mVertexBufferOriginal[i]);
childVertexBuffers[i] = vb->getChild(nextChild);
}
newDesc.vertexBuffers = &childVertexBuffers[0];
if (mDescriptor.indexBuffer != NULL)
{
newDesc.indexBuffer = static_cast<MultiClientIndexBuffer*>(mDescriptor.indexBuffer)->getChild(nextChild);
}
if (mDescriptor.boneBuffer != NULL)
{
newDesc.boneBuffer = static_cast<MultiClientBoneBuffer*>(mDescriptor.boneBuffer)->getChild(nextChild);
}
if (mDescriptor.spriteBuffer != NULL)
{
newDesc.spriteBuffer = static_cast<MultiClientSpriteBuffer*>(mDescriptor.spriteBuffer)->getChild(nextChild);
}
if (rrm != NULL)
{
mChildren.push_back(rrm->createResource(newDesc));
}
}
nvidia::apex::UserRenderResource* getChild(size_t index)
{
assert(index < mChildren.size());
return mChildren[index];
}
void setVertexBufferRange(unsigned int firstVertex, unsigned int numVerts)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->setVertexBufferRange(firstVertex, numVerts);
}
mDescriptor.firstVertex = firstVertex;
mDescriptor.numVerts = numVerts;
}
void setIndexBufferRange(unsigned int firstIndex, unsigned int numIndices)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->setIndexBufferRange(firstIndex, numIndices);
}
mDescriptor.firstIndex = firstIndex;
mDescriptor.numIndices = numIndices;
}
void setBoneBufferRange(unsigned int firstBone, unsigned int numBones)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->setBoneBufferRange(firstBone, numBones);
}
mDescriptor.firstBone = firstBone;
mDescriptor.numBones = numBones;
}
void setInstanceBufferRange(unsigned int firstInstance, unsigned int numInstances)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->setInstanceBufferRange(firstInstance, numInstances);
}
mDescriptor.firstInstance = firstInstance;
mDescriptor.numInstances = numInstances;
}
void setSpriteBufferRange(unsigned int firstSprite, unsigned int numSprites)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->setSpriteBufferRange(firstSprite, numSprites);
}
mDescriptor.firstSprite = firstSprite;
mDescriptor.numSprites = numSprites;
}
void setMaterial(void* material)
{
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i]->setMaterial(material);
}
mDescriptor.material = material;
}
unsigned int getNbVertexBuffers() const
{
return mDescriptor.numVertexBuffers;
}
nvidia::apex::UserRenderVertexBuffer* getVertexBuffer(unsigned int index) const
{
return mDescriptor.vertexBuffers[index];
}
nvidia::apex::UserRenderIndexBuffer* getIndexBuffer() const
{
return mDescriptor.indexBuffer;
}
nvidia::apex::UserRenderBoneBuffer* getBoneBuffer() const
{
return mDescriptor.boneBuffer;
}
nvidia::apex::UserRenderInstanceBuffer* getInstanceBuffer() const
{
return mDescriptor.instanceBuffer;
}
nvidia::apex::UserRenderSpriteBuffer* getSpriteBuffer() const
{
return mDescriptor.spriteBuffer;
}
protected:
std::vector<nvidia::apex::UserRenderVertexBuffer*> mVertexBufferOriginal;
std::vector<nvidia::apex::UserRenderResource*> mChildren;
nvidia::apex::UserRenderResourceDesc mDescriptor;
};
nvidia::apex::UserRenderResource* MultiClientRenderResourceManager::createResource(const nvidia::apex::UserRenderResourceDesc& desc)
{
MultiClientRenderResource* rr = new MultiClientRenderResource(desc);
for (size_t i = 0; i < mChildren.size(); i++)
{
rr->addChild(mChildren[i].rrm);
}
return rr;
}
void MultiClientRenderResourceManager::releaseResource(nvidia::apex::UserRenderResource& resource)
{
MultiClientRenderResource* rr = static_cast<MultiClientRenderResource*>(&resource);
for (size_t i = 0; i < mChildren.size(); i++)
{
mChildren[i].rrm->releaseResource(*rr->getChild(i));
}
delete rr;
}
unsigned int MultiClientRenderResourceManager::getMaxBonesForMaterial(void* material)
{
unsigned int smallestMax = 10000;
for (size_t i = 0; i < mChildren.size(); i++)
{
unsigned int childMax = mChildren[i].rrm->getMaxBonesForMaterial(material);
if (childMax > 0)
{
smallestMax = std::min(smallestMax, childMax);
}
}
return smallestMax;
}
void MultiClientUserRenderer::addChild(nvidia::apex::UserRenderer* child)
{
mChildren.push_back(child);
}
void MultiClientUserRenderer::renderResource(const nvidia::apex::RenderContext& context)
{
MultiClientRenderResource* rr = static_cast<MultiClientRenderResource*>(context.renderResource);
for (size_t i = 0; i < mChildren.size(); i++)
{
nvidia::apex::RenderContext newContext(context);
newContext.renderResource = rr->getChild(i);
mChildren[i]->renderResource(newContext);
}
}
|
{
"pile_set_name": "Github"
}
|
name global
examples Some examples of search terms are 'FOXA2' 'HOXA1' and 'MAP kinase.'
|
{
"pile_set_name": "Github"
}
|
use strict;
use warnings;
package DDGC::Util::Markup;
# ABSTRACT: BBCode, Markdown and HTML renderer for comments and blog posts.
use Text::Markdown;
use Text::Xslate;
use HTML::TreeBuilder::LibXML;
use Hash::Merge::Simple qw/ merge /;
use URI::Escape;
use URI;
use Parse::BBCode;
use String::Util 'trim';
use Moo;
has xslate => (
is => 'ro',
lazy => 1,
builder => '_build_xslate',
);
sub _build_xslate {
Text::Xslate->new(
path => 'views',
);
}
has image_proxy_url => (
is => 'ro',
lazy => 1,
builder => '_build_image_proxy_url',
);
sub _build_image_proxy_url {
'https://images.duckduckgo.com/iu/?u=%s&f=1'
}
has image_proxy_base => (
is => 'ro',
lazy => 1,
builder => '_build_image_proxy_base',
);
sub _build_image_proxy_base {
my ( $self ) = @_;
( my $image_proxy_base = $self->image_proxy_url ) =~
s{(https?://[A-Za-z.]+).*}{$1};
return $image_proxy_base;
}
sub _canonical_uri {
my ( $self, $uri ) = @_;
URI->new($uri)->canonical =~ s/'/%27/gr;
}
has opts => (
is => 'ro',
lazy => 1,
builder => '_build_opts',
);
sub _build_opts {
+{
proxify_images => 1,
highlight_code => 1,
links_new_window => 0,
plain_bbcode => 0,
}
}
has bbcode_tags => (
is => 'ro',
lazy => 1,
builder => '_build_bbcode_tags',
);
sub _build_bbcode_tags {
my ( $self ) = @_;
my $tags;
$tags->{code} = {
parse => 0,
class => 'block',
code => sub { $self->_bbcode_code_block( @_ ) },
};
$tags->{url} = {
code => sub { $self->_bbcode_url( @_ ) },,
parse => 0,
class => 'url',
};
return $tags;
}
sub _bbcode_code_block {
my ( $self, $parser, $lang, $content ) = @_;
$lang ||= 'perl';
my $langname = ucfirst($lang);
$self->xslate->render(
'includes/bbcode/code.tx', {
lang => $lang,
content => $$content,
langname => $langname,
}
);
}
# Support for [url href=...]
sub _bbcode_url {
my ( $self, $parser, $attr, $content, $fallback, $tag ) = @_;
my $url = $attr;
$url ||= ( map {
( $_->[0] && $_->[0] eq 'href' ) ? $_->[1] : () ;
} @{ $tag->get_attr } )[0];
$url or return '';
$self->xslate->render(
'includes/bbcode/url.tx', {
url => $self->_canonical_uri($url),
content => $$content || $url,
}
);
}
sub _ddg_bbcode {
my ( $self, $opts ) = @_;
my %defaults = Parse::BBCode::HTML->defaults;
Parse::BBCode->new({
tags => {
%defaults,
( $self->bbcode_tags )
? %{ $self->bbcode_tags }
: (),
},
url_finder => {
max_length => 80,
format => '<a href="%s" rel="nofollow">%s</a>',
},
close_open_tags => 1,
attribute_quote => q/'"/,
});
}
sub bbcode {
my ( $self, $string, $opts ) = @_;
$opts = merge $self->opts, $opts;
my $bbcode;
if ( $opts->{plain_bbcode} ) {
$bbcode = Parse::BBCode->new();
}
else {
$bbcode = $self->_ddg_bbcode( $opts );
}
my $html = $bbcode->render( $string );
return $self->html( $html, {
%{ $opts },
proxify_images => 1,
}
);
}
sub markdown {
my ( $self, $string, $opts ) = @_;
$opts = merge $self->opts, $opts;
my $markdown = Text::Markdown->new;
my $html = $markdown->markdown( $string );
return $self->html( $html, {
%{ $opts },
proxify_images => 1,
}
);
}
sub html {
my ( $self, $string, $opts ) = @_;
$opts = merge $self->opts, $opts;
my $tree = HTML::TreeBuilder::LibXML->new;
$tree->parse( $string );
$tree->eof;
if ( $opts->{proxify_images} ) {
for my $node ( $tree->findnodes('//img') ) {
my $src = trim($node->attr('src'));
if (index(lc($src), $self->image_proxy_base) != 0 && index(lc($src), 'http') == 0) {
$node->attr(
'src',
sprintf($self->image_proxy_url, uri_escape($src))
);
}
}
}
if ( $opts->{links_new_window} ) {
for my $node ( $tree->findnodes('//a') ) {
next if (!$node->attr('href'));
$node->attr('target', '_blank');
}
}
my $guts = $tree->guts;
if ($guts) {
# as_HTML reliably closes empty tags supplied by BBCode and other
# generators, e.g. [b][/b] -> <b></b>
# as_XML would return a self-closed tag, <b /> which is interpreted
# by renderer as <b>, so we have an open tag hanging.
# as_HTML does *not* currently understand non-nested tags (like img,
# hr, br) and generates closing tags for these. Only </br> is
# actually problematic in our case so we ditch these by hand.
# Is this worth it for a fast performing HTML munger?
return $guts->as_HTML =~ s{</br>}{}gmr;
}
return ' ';
}
1;
|
{
"pile_set_name": "Github"
}
|
//===- Main.cpp - Top-Level TableGen implementation -----------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// TableGen is a tool which can be used to build up a description of something,
// then invoke one or more "tablegen backends" to emit information about the
// description in some predefined format. In practice, this is used by the LLVM
// code generators to automate generation of a code generator through a
// high-level description of the target.
//
//===----------------------------------------------------------------------===//
#include "llvm/TableGen/Main.h"
#include "TGParser.h"
#include "llvm/ADT/StringExtras.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/ToolOutputFile.h"
#include "llvm/TableGen/Error.h"
#include "llvm/TableGen/Record.h"
#include <algorithm>
#include <cstdio>
#include <system_error>
using namespace llvm;
static cl::opt<std::string>
OutputFilename("o", cl::desc("Output filename"), cl::value_desc("filename"),
cl::init("-"));
static cl::opt<std::string>
DependFilename("d",
cl::desc("Dependency filename"),
cl::value_desc("filename"),
cl::init(""));
static cl::opt<std::string>
InputFilename(cl::Positional, cl::desc("<input file>"), cl::init("-"));
static cl::list<std::string>
IncludeDirs("I", cl::desc("Directory of include files"),
cl::value_desc("directory"), cl::Prefix);
static cl::list<std::string>
MacroNames("D", cl::desc("Name of the macro to be defined"),
cl::value_desc("macro name"), cl::Prefix);
static cl::opt<bool>
WriteIfChanged("write-if-changed", cl::desc("Only write output if it changed"));
static int reportError(const char *ProgName, Twine Msg) {
errs() << ProgName << ": " << Msg;
errs().flush();
return 1;
}
/// Create a dependency file for `-d` option.
///
/// This functionality is really only for the benefit of the build system.
/// It is similar to GCC's `-M*` family of options.
static int createDependencyFile(const TGParser &Parser, const char *argv0) {
if (OutputFilename == "-")
return reportError(argv0, "the option -d must be used together with -o\n");
std::error_code EC;
ToolOutputFile DepOut(DependFilename, EC, sys::fs::OF_None);
if (EC)
return reportError(argv0, "error opening " + DependFilename + ":" +
EC.message() + "\n");
DepOut.os() << OutputFilename << ":";
for (const auto &Dep : Parser.getDependencies()) {
DepOut.os() << ' ' << Dep;
}
DepOut.os() << "\n";
DepOut.keep();
return 0;
}
int llvm::TableGenMain(char *argv0, TableGenMainFn *MainFn) {
RecordKeeper Records;
// Parse the input file.
ErrorOr<std::unique_ptr<MemoryBuffer>> FileOrErr =
MemoryBuffer::getFileOrSTDIN(InputFilename);
if (std::error_code EC = FileOrErr.getError())
return reportError(argv0, "Could not open input file '" + InputFilename +
"': " + EC.message() + "\n");
// Tell SrcMgr about this buffer, which is what TGParser will pick up.
SrcMgr.AddNewSourceBuffer(std::move(*FileOrErr), SMLoc());
// Record the location of the include directory so that the lexer can find
// it later.
SrcMgr.setIncludeDirs(IncludeDirs);
TGParser Parser(SrcMgr, MacroNames, Records);
if (Parser.ParseFile())
return 1;
// Write output to memory.
std::string OutString;
raw_string_ostream Out(OutString);
if (MainFn(Out, Records))
return 1;
// Always write the depfile, even if the main output hasn't changed.
// If it's missing, Ninja considers the output dirty. If this was below
// the early exit below and someone deleted the .inc.d file but not the .inc
// file, tablegen would never write the depfile.
if (!DependFilename.empty()) {
if (int Ret = createDependencyFile(Parser, argv0))
return Ret;
}
if (WriteIfChanged) {
// Only updates the real output file if there are any differences.
// This prevents recompilation of all the files depending on it if there
// aren't any.
if (auto ExistingOrErr = MemoryBuffer::getFile(OutputFilename))
if (std::move(ExistingOrErr.get())->getBuffer() == Out.str())
return 0;
}
std::error_code EC;
ToolOutputFile OutFile(OutputFilename, EC, sys::fs::OF_None);
if (EC)
return reportError(argv0, "error opening " + OutputFilename + ":" +
EC.message() + "\n");
OutFile.os() << Out.str();
if (ErrorsPrinted > 0)
return reportError(argv0, Twine(ErrorsPrinted) + " errors.\n");
// Declare success.
OutFile.keep();
return 0;
}
|
{
"pile_set_name": "Github"
}
|
// This file is part of ScaViSLAM.
//
// Copyright 2011 Hauke Strasdat (Imperial College London)
//
// ScaViSLAM is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation, either version 3 of the License, or
// any later version.
//
// ScaViSLAM is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with ScaViSLAM. If not, see <http://www.gnu.org/licenses/>.
#ifndef SCAVISLAM_TRANSFORMATIONS_H
#define SCAVISLAM_TRANSFORMATIONS_H
#include <list>
#include <sophus/se3.h>
#ifdef MONO
#include <sophus/sim3.h>
#endif
#include <visiontools/linear_camera.h>
#include "maths_utils.h"
#include "stereo_camera.h"
namespace ScaViSLAM
{
using namespace Eigen;
using namespace Sophus;
using namespace VisionTools;
//TODO: clean, hide implementation and remove stuff not needed here
struct AnchoredPoint3d
{
AnchoredPoint3d(const Vector3d & p_a, int frame_id)
: p_a(p_a), frame_id(frame_id)
{
}
Vector3d p_a;
int frame_id;
};
inline Matrix<double,2,3>
d_proj_d_y(const double & f, const Vector3d & xyz)
{
double z_sq = xyz[2]*xyz[2];
Matrix<double,2,3> J;
J << f/xyz[2], 0, -(f*xyz[0])/z_sq,
0, f/xyz[2], -(f*xyz[1])/z_sq;
return J;
}
inline Matrix3d
d_stereoproj_d_y(const double & f, double b, const Vector3d & xyz)
{
double z_sq = xyz[2]*xyz[2];
Matrix3d J;
J << f/xyz[2], 0, -(f*xyz[0])/z_sq,
0, f/xyz[2], -(f*xyz[1])/z_sq,
f/xyz[2], 0, -(f*(xyz[0]-b))/z_sq;
return J;
}
inline Matrix<double,3,6>
d_expy_d_y(const Vector3d & y)
{
Matrix<double,3,6> J;
J.topLeftCorner<3,3>().setIdentity();
J.bottomRightCorner<3,3>() = -SO3::hat(y);
return J;
}
inline Matrix3d
d_Tinvpsi_d_psi(const SE3 & T, const Vector3d & psi)
{
Matrix3d R = T.rotation_matrix();
Vector3d x = invert_depth(psi);
Vector3d r1 = R.col(0);
Vector3d r2 = R.col(1);
Matrix3d J;
J.col(0) = r1;
J.col(1) = r2;
J.col(2) = -R*x;
J*=1./psi.z();
return J;
}
inline void
point_jac_xyz2uv(const Vector3d & xyz,
const Matrix3d & R,
const double & focal_length,
Matrix<double,2,3> & point_jac)
{
double x = xyz[0];
double y = xyz[1];
double z = xyz[2];
Matrix<double,2,3> tmp;
tmp(0,0) = focal_length;
tmp(0,1) = 0;
tmp(0,2) = -x/z*focal_length;
tmp(1,0) = 0;
tmp(1,1) = focal_length;
tmp(1,2) = -y/z*focal_length;
point_jac = -1./z * tmp * R;
}
inline void
frame_jac_xyz2uv(const Vector3d & xyz,
const double & focal_length,
Matrix<double,2,6> & frame_jac)
{
double x = xyz[0];
double y = xyz[1];
double z = xyz[2];
double z_2 = z*z;
frame_jac(0,0) = -1./z *focal_length;
frame_jac(0,1) = 0;
frame_jac(0,2) = x/z_2 *focal_length;
frame_jac(0,3) = x*y/z_2 * focal_length;
frame_jac(0,4) = -(1+(x*x/z_2)) *focal_length;
frame_jac(0,5) = y/z *focal_length;
frame_jac(1,0) = 0;
frame_jac(1,1) = -1./z *focal_length;
frame_jac(1,2) = y/z_2 *focal_length;
frame_jac(1,3) = (1+y*y/z_2) *focal_length;
frame_jac(1,4) = -x*y/z_2 *focal_length;
frame_jac(1,5) = -x/z *focal_length;
}
inline void
frame_jac_xyz2uvu(const Vector3d & xyz,
const Vector2d & focal_length,
Matrix<double,3,6> & frame_jac)
{
double x = xyz[0];
double y = xyz[1];
double z = xyz[2];
double z_2 = z*z;
frame_jac(0,0) = -1./z *focal_length(0);
frame_jac(0,1) = 0;
frame_jac(0,2) = x/z_2 *focal_length(0);
frame_jac(0,3) = x*y/z_2 * focal_length(0);
frame_jac(0,4) = -(1+(x*x/z_2)) *focal_length(0);
frame_jac(0,5) = y/z *focal_length(0);
frame_jac(1,0) = 0;
frame_jac(1,1) = -1./z *focal_length(1);
frame_jac(1,2) = y/z_2 *focal_length(1);
frame_jac(1,3) = (1+y*y/z_2) *focal_length(1);
frame_jac(1,4) = -x*y/z_2 *focal_length(1);
frame_jac(1,5) = -x/z *focal_length(1);
}
// /**
// * Abstract prediction class
// * Frame: How is the frame/pose represented? (e.g. SE3)
// * FrameDoF: How many DoF has the pose/frame? (e.g. 6 DoF, that is
// * 3 DoF translation, 3 DoF rotation)
// * PointParNum: number of parameters to represent a point
// * (4 for a 3D homogenious point)
// * PointDoF: DoF of a point (3 DoF for a 3D homogenious point)
// * ObsDim: dimensions of observation (2 dim for (u,v) image
// * measurement)
// */
template <typename Frame,
int FrameDoF,
typename Point,
int PointDoF,
int ObsDim>
class AbstractPrediction
{
public:
/** Map a world point x into the camera/sensor coordinate frame T
* and create an observation*/
virtual Matrix<double,ObsDim,1>
map (const Frame & T,
const Point & x) const = 0;
virtual Matrix<double,ObsDim,1>
map_n_bothJac (const Frame & T,
const Point & x,
Matrix<double,ObsDim,FrameDoF> & frame_jac,
Matrix<double,ObsDim,PointDoF> & point_jac) const
{
frame_jac = frameJac(T,x);
point_jac = pointJac(T,x);
return map(T,x);
}
virtual Matrix<double,ObsDim,1>
map_n_frameJac (const Frame & T,
const Point & x,
Matrix<double,ObsDim,FrameDoF> & frame_jac) const
{
frame_jac = frameJac(T,x);
return map(T,x);
}
virtual Matrix<double,ObsDim,1>
map_n_pointJac (const Frame & T,
const Point & x,
Matrix<double,ObsDim,PointDoF> & point_jac) const
{
point_jac = pointJac(T,x);
return map(T,x);
}
/** Jacobian wrt. frame: use numerical Jacobian as default */
virtual Matrix<double,ObsDim,FrameDoF>
frameJac (const Frame & T,
const Point & x) const
{
double h = 0.000000000001;
Matrix<double,ObsDim,FrameDoF> J_pose
= Matrix<double,ObsDim,FrameDoF>::Zero();
Matrix<double,ObsDim,1> fun = -map(T,x);
for (unsigned int i=0; i<FrameDoF; ++i)
{
Matrix<double,FrameDoF,1> eps
= Matrix<double,FrameDoF,1>::Zero();
eps[i] = h;
J_pose.col(i) = (-map(add(T,eps),x) -fun)/h ;
}
return J_pose;
}
/** Jacobian wrt. point: use numerical Jacobian as default */
virtual Matrix<double,ObsDim,PointDoF>
pointJac (const Frame & T,
const Point & x) const
{
double h = 0.000000000001;
Matrix<double,ObsDim,PointDoF> J_x
= Matrix<double,ObsDim,PointDoF>::Zero();
Matrix<double,ObsDim,1> fun = -map(T,x);
for (unsigned int i=0; i<PointDoF; ++i)
{
Matrix<double,PointDoF,1> eps
= Matrix<double,PointDoF,1>::Zero();
eps[i] = h;
J_x.col(i) = (-map(T,add(x,eps)) -fun)/h ;
}
return J_x;
}
/** Add an incermental update delta to pose/frame T*/
virtual Frame
add (const Frame & T,
const Matrix<double,FrameDoF,1> & delta) const = 0;
/** Add an incremental update delta to point x*/
virtual Point
add (const Point & x,
const Matrix<double,PointDoF,1> & delta) const = 0;
};
template <typename Frame,
int FrameDoF,
typename Point,
int PointDoF,
int ObsDim>
class AbstractAnchoredPrediction
{
public:
/** Map a world point x into the camera/sensor coordinate frame T
* and create an observation*/
virtual Matrix<double,ObsDim,1>
map (const Frame & T_cw,
const Frame & A_wa,
const Point & x_a) const = 0;
virtual Matrix<double,ObsDim,1>
map_n_bothJac (const Frame & T_cw,
const Frame & A_wa,
const Point & x_a,
Matrix<double,ObsDim,FrameDoF> & frame_jac,
Matrix<double,ObsDim,PointDoF> & point_jac) const
{
frame_jac = frameJac(T_cw,A_wa,x_a);
point_jac = pointJac(T_cw,A_wa,x_a);
return map(T_cw,A_wa,x_a);
}
virtual Matrix<double,ObsDim,1>
map_n_allJac (const Frame & T_cw,
const Frame & A_wa,
const Point & x_a,
Matrix<double,ObsDim,FrameDoF> & frame_jac,
Matrix<double,ObsDim,FrameDoF> & anchor_jac,
Matrix<double,ObsDim,PointDoF> & point_jac) const
{
frame_jac = frameJac(T_cw,A_wa,x_a);
anchor_jac = anchorJac(T_cw,A_wa,x_a);
point_jac = pointJac(T_cw,A_wa,x_a);
return map(T_cw,A_wa,x_a);
}
/** Jacobian wrt. frame: use numerical Jacobian as default */
virtual Matrix<double,ObsDim,FrameDoF>
frameJac (const Frame & T_cw,
const Frame & A_wa,
const Point & x_a) const
{
double h = 0.000000000001;
Matrix<double,ObsDim,FrameDoF> J_pose
= Matrix<double,ObsDim,FrameDoF>::Zero();
Matrix<double,ObsDim,1> fun = -map(T_cw,A_wa,x_a);
for (unsigned int i=0; i<FrameDoF; ++i)
{
Matrix<double,FrameDoF,1> eps
= Matrix<double,FrameDoF,1>::Zero();
eps[i] = h;
J_pose.col(i) = (-map(add(T_cw,eps),A_wa,x_a) -fun)/h ;
}
return J_pose;
}
/** Jacobian wrt. anchor: use numerical Jacobian as default */
virtual Matrix<double,ObsDim,FrameDoF>
anchorJac (const Frame & T_cw,
const Frame & A_wa,
const Point & x_a) const
{
double h = 0.000000000001;
Matrix<double,ObsDim,FrameDoF> J_pose
= Matrix<double,ObsDim,FrameDoF>::Zero();
Matrix<double,ObsDim,1> fun = -map(T_cw,A_wa,x_a);
for (unsigned int i=0; i<FrameDoF; ++i)
{
Matrix<double,FrameDoF,1> eps
= Matrix<double,FrameDoF,1>::Zero();
eps[i] = h;
J_pose.col(i) = (-map(T_cw,add(A_wa,eps),x_a) -fun)/h ;
}
return J_pose;
}
/** Jacobian wrt. point: use numerical Jacobian as default */
virtual Matrix<double,ObsDim,PointDoF>
pointJac (const Frame & T_cw,
const Frame & A_wa,
const Point & x_a) const
{
double h = 0.000000000001;
Matrix<double,ObsDim,PointDoF> J_x
= Matrix<double,ObsDim,PointDoF>::Zero();
Matrix<double,ObsDim,1> fun = -map(T_cw,A_wa,x_a);
for (unsigned int i=0; i<PointDoF; ++i)
{
Matrix<double,PointDoF,1> eps
= Matrix<double,PointDoF,1>::Zero();
eps[i] = h;
J_x.col(i) = (-map(T_cw,A_wa,add(x_a,eps)) -fun)/h ;
}
return J_x;
}
/** Add an incermental update delta to pose/frame T*/
virtual Frame
add (const Frame & T,
const Matrix<double,FrameDoF,1> & delta
) const = 0;
/** Add an incremental update delta to point x*/
virtual Point
add (const Point & x,
const Matrix<double,PointDoF,1> & delta
) const = 0;
};
/** abstract prediction class dependig on
* 3D rigid body transformations SE3 */
template <int PointParNum, int PointDoF, int ObsDim>
class SE3_AbstractPoint
: public AbstractPrediction
<SE3,6,Matrix<double, PointParNum,1>,PointDoF,ObsDim>
{
public:
SE3 add(const SE3 &T, const Matrix<double,6,1> & delta) const
{
return SE3::exp(delta)*T;
}
};
class SE3XYZ_STEREO: public SE3_AbstractPoint<3, 3, 3>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
SE3XYZ_STEREO (const StereoCamera & cam)
: _cam(cam)
{
}
Matrix<double,3,6>
frameJac(const SE3 & se3,
const Vector3d & xyz)const
{
const Vector3d & xyz_trans = se3*xyz;
double x = xyz_trans[0];
double y = xyz_trans[1];
double z = xyz_trans[2];
double f = _cam.focal_length();
double one_b_z = 1./z;
double one_b_z_sq = 1./(z*z);
double A = -f*one_b_z;
double B = -f*one_b_z;
double C = f*x*one_b_z_sq;
double D = f*y*one_b_z_sq;
double E = f*(x-_cam.baseline())*one_b_z_sq;
Matrix<double, 3, 6> jac;
jac << A, 0, C, y*C, z*A-x*C, -y*A,
0, B, D,-z*B+y*D, -x*D, x*B,
A, 0, E, y*E, z*A-x*E, -y*A;
return jac;
}
Vector3d map(const SE3 & T,
const Vector3d& xyz) const
{
return _cam.map_uvu(T*xyz);
}
Vector3d add(const Vector3d & x,
const Vector3d & delta) const
{
return x+delta;
}
private:
StereoCamera _cam;
};
#ifdef MONO
class Sim3XYZ : public AbstractPrediction<Sim3,6,Vector3d,3,2>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
Sim3XYZ(const LinearCamera & cam)
{
this->cam = cam;
}
inline Vector2d map(const Sim3 & T,
const Vector3d& x) const
{
return cam.map(project2d(T*x));
}
Vector3d add(const Vector3d & x,
const Vector3d & delta) const
{
return x+delta;
}
Sim3 add(const Sim3 &T, const Matrix<double,6,1> & delta) const
{
Matrix<double,7,1> delta7;
delta7.head<6>() = delta;
delta7[6] = 0;
return Sim3::exp(delta7)*T;
}
private:
LinearCamera cam;
};
class Sim3XYZ_STEREO : public AbstractPrediction<Sim3,7,Vector3d,3,3>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
Sim3XYZ_STEREO(const StereoCamera & cam)
{
this->cam = cam;
}
inline Vector3d map(const Sim3 & T,
const Vector3d& x) const
{
return cam.map_uvu(T*x);
}
Vector3d add(const Vector3d & x,
const Vector3d & delta) const
{
return x+delta;
}
Sim3 add(const Sim3 &T, const Matrix<double,7,1> & delta) const
{
return Sim3::exp(delta)*T;
}
private:
StereoCamera cam;
};
class AbsoluteOrient : public AbstractPrediction<Sim3,7,Vector3d,3,3>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
AbsoluteOrient()
{
}
inline Vector3d map(const Sim3 & T,
const Vector3d& x) const
{
return T*x;
}
Vector3d add(const Vector3d & x,
const Vector3d & delta) const
{
return x+delta;
}
Sim3 add(const Sim3 &T, const Matrix<double,7,1> & delta) const
{
return Sim3::exp(delta)*T;
}
};
#endif
/** 3D Euclidean point class */
class SE3XYZ: public SE3_AbstractPoint<3, 3, 2>{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
SE3XYZ(const LinearCamera & cam)
{
this->cam = cam;
}
inline Vector2d map(const SE3 & T,
const Vector3d& x) const
{
return cam.map(project2d(T*x));
}
Vector3d add(const Vector3d & x,
const Vector3d & delta) const
{
return x+delta;
}
private:
LinearCamera cam;
};
/** 3D inverse depth point class*/
class SE3UVQ : public SE3_AbstractPoint<3, 3, 2>{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
SE3UVQ ()
{
}
SE3UVQ (const LinearCamera & cam_pars)
{
this->cam = cam_pars;
}
inline Vector2d map(const SE3 & T,
const Vector3d& uvq_w) const
{
Vector3d xyz_w = invert_depth(uvq_w);
return cam.map(project2d(T*xyz_w));
}
Vector3d add(const Vector3d & x,
const Vector3d & delta) const
{
return x+delta;
}
private:
LinearCamera cam;
};
/** 3D inverse depth point class*/
class SE3AnchordUVQ : public AbstractAnchoredPrediction<SE3,6,Vector3d,3,2>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
SE3AnchordUVQ ()
{
}
SE3AnchordUVQ (const LinearCamera & cam_pars)
{
this->cam = cam_pars;
}
inline Vector2d map(const SE3 & T_cw,
const SE3 & A_aw,
const Vector3d& uvq_a) const
{
Vector3d xyz_w = A_aw.inverse()*invert_depth(uvq_a);
return cam.map(project2d(T_cw*xyz_w));
}
Vector3d add(const Vector3d & point,
const Vector3d & delta) const
{
return point+delta;
}
Matrix<double,2,3>
pointJac(const SE3 & T_cw,
const SE3 & A_aw,
const Vector3d & psi_a) const
{
SE3 T_ca = T_cw*A_aw.inverse();
Vector3d y = T_ca*invert_depth(psi_a);
Matrix<double,2,3> J1
= d_proj_d_y(cam.focal_length(),y);
Matrix3d J2 = d_Tinvpsi_d_psi(T_ca, psi_a);
return -J1*J2;
}
Matrix<double,2,6>
frameJac(const SE3 & T_cw,
const SE3 & A_aw,
const Vector3d & psi_a) const
{
SE3 T_ca = T_cw*A_aw.inverse();
Vector3d y = T_ca*invert_depth(psi_a);
Matrix<double,2,3> J1 = d_proj_d_y(cam.focal_length(),y);
Matrix<double,3,6> J2 = d_expy_d_y(y);
return -J1*J2;
}
Matrix<double,2,6>
anchorJac(const SE3 & T_cw,
const SE3 & A_aw,
const Vector3d & psi_a) const
{
SE3 T_ca = T_cw*A_aw.inverse();
Vector3d x = invert_depth(psi_a);
Vector3d y = T_ca*x;
Matrix<double,2,3> J1
= d_proj_d_y(cam.focal_length(),y);
Matrix<double,3,6> d_invexpx_dx
= -d_expy_d_y(x);
return -J1*T_ca.rotation_matrix()*d_invexpx_dx;
}
SE3 add(const SE3 &T, const Matrix<double,6,1> & delta) const
{
return SE3::exp(delta)*T;
}
private:
LinearCamera cam;
};
/** 3D inverse depth point class*/
class SE3NormUVQ : public AbstractPrediction<SE3,5,Vector3d,3,2>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
SE3NormUVQ ()
{
}
SE3NormUVQ (const LinearCamera & cam_pars)
{
this->cam = cam_pars;
}
inline Vector2d map(const SE3 & T_cw,
const Vector3d& uvq_w) const
{
Vector3d xyz_w = invert_depth(uvq_w);
return cam.map(project2d(T_cw*xyz_w));
}
Vector3d add(const Vector3d & point,
const Vector3d & delta) const
{
return point+delta;
}
SE3 add(const SE3 &T, const Matrix<double,5,1> & delta) const
{
Vector6d delta6;
delta6[0] = delta[0];
delta6[1] = delta[1];
delta6[2] = 0;
delta6.tail<3>() = delta.tail<3>();
SE3 new_T = SE3::exp(delta6)*T;
double length = new_T.translation().norm();
assert(fabs(length)>0.00001);
new_T.translation() *= 1./length;
assert(fabs(new_T.translation().norm()-1) < 0.00001);
return new_T;
}
private:
LinearCamera cam;
};
/** 3D inverse depth point class*/
class SE3AnchordUVQ_STEREO
: public AbstractAnchoredPrediction<SE3,6,Vector3d,3,3>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
SE3AnchordUVQ_STEREO ()
{
}
SE3AnchordUVQ_STEREO (const StereoCamera & cam_pars)
{
this->cam = cam_pars;
}
inline Vector3d map(const SE3 & T_cw,
const SE3 & A_aw,
const Vector3d& uvq_a) const
{
Vector3d xyz_w = A_aw.inverse()*invert_depth(uvq_a);
return cam.map_uvu(T_cw*xyz_w);
}
Matrix3d
pointJac(const SE3 & T_cw,
const SE3 & A_aw,
const Vector3d & psi_a) const
{
SE3 T_ca = T_cw*A_aw.inverse();
Vector3d y = T_ca*invert_depth(psi_a);
Matrix3d J1
= d_stereoproj_d_y(cam.focal_length(),
cam.baseline(),
y);
Matrix3d J2
= d_Tinvpsi_d_psi(T_ca,
psi_a);
return -J1*J2;
}
Matrix<double,3,6>
frameJac(const SE3 & T_cw,
const SE3 & A_aw,
const Vector3d & psi_a) const
{
SE3 T_ca = T_cw*A_aw.inverse();
Vector3d y = T_ca*invert_depth(psi_a);
Matrix3d J1
= d_stereoproj_d_y(cam.focal_length(),
cam.baseline(),
y);
Matrix<double,3,6> J2
= d_expy_d_y(y);
return -J1*J2;
}
Matrix<double,3,6>
anchorJac(const SE3 & T_cw,
const SE3 & A_aw,
const Vector3d & psi_a) const
{
SE3 T_ca = T_cw*A_aw.inverse();
Vector3d x = invert_depth(psi_a);
Vector3d y = T_ca*x;
Matrix3d J1
= d_stereoproj_d_y(cam.focal_length(),
cam.baseline(),
y);
Matrix<double,3,6> d_invexpx_dx
= -d_expy_d_y(x);
return -J1*T_ca.rotation_matrix()*d_invexpx_dx;
}
Vector3d add(const Vector3d & point,
const Vector3d & delta) const
{
return point+delta;
}
SE3 add(const SE3 &T, const Matrix<double,6,1> & delta) const
{
return SE3::exp(delta)*T;
}
private:
StereoCamera cam;
};
/** 3D inverse depth point class*/
class SE3UVU_STEREO : public SE3_AbstractPoint<3, 3, 3>{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
SE3UVU_STEREO ()
{
}
SE3UVU_STEREO (const StereoCamera & cam)
{
this->cam = cam;
}
inline Vector3d map(const SE3 & T,
const Vector3d& uvu) const
{
Vector3d x = cam.unmap_uvu(uvu);
return cam.map_uvu(T*x);
}
Vector3d add(const Vector3d & x,
const Vector3d & delta) const
{
return x+delta;
}
private:
StereoCamera cam;
};
/** 3D inverse depth point class*/
class SE3UVQ_STEREO : public SE3_AbstractPoint<3, 3, 3>{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
SE3UVQ_STEREO ()
{
}
SE3UVQ_STEREO (const StereoCamera & cam)
{
this->cam = cam;
}
inline Vector3d map(const SE3 & T,
const Vector3d& uvq) const
{
Vector3d x = invert_depth(uvq);
return cam.map_uvu(T*x);
}
Vector3d add(const Vector3d & x,
const Vector3d & delta) const
{
return x+delta;
}
private:
StereoCamera cam;
};
/** observation class */
template <int ObsDim>
class IdObs
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
IdObs(){}
IdObs(int point_id, int frame_id, const Matrix<double,ObsDim,1> & obs)
: frame_id(frame_id), point_id(point_id), obs(obs)
{
}
int frame_id;
int point_id;
Matrix<double,ObsDim,1> obs;
};
/** observation class with inverse uncertainty*/
template <int ObsDim>
class IdObsLambda : public IdObs<ObsDim>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
IdObsLambda(){}
IdObsLambda(int point_id,
int frame_id,
const Matrix<double,ObsDim,1> & obs,
const Matrix<double,ObsDim,ObsDim> & lambda)
: IdObs<ObsDim>(point_id, frame_id, obs) , lambda(lambda)
{
}
Matrix<double,ObsDim,ObsDim> lambda;
};
}
#endif
|
{
"pile_set_name": "Github"
}
|
var addon = require('bindings')('hello');
console.log(addon.hello()); // 'world'
|
{
"pile_set_name": "Github"
}
|
#!/bin/bash -eux
mkdir -p ../../tmp/{object,html}
exec cd ../../tmp ../venv/bin/python -m http.server 5001
|
{
"pile_set_name": "Github"
}
|
package main
import (
"fmt"
"time"
"go.uber.org/ratelimit"
)
func main() {
// 每秒可以通过100个请求,也就是每个请求间隔10ms
rl := ratelimit.New(100)
prev := time.Now()
for i := 0; i < 10; i++ {
now := rl.Take()
fmt.Println(i, now.Sub(prev))
prev = now
}
}
|
{
"pile_set_name": "Github"
}
|
// { dg-do run { target c++11 } }
// { dg-require-cstdint "" }
//
// 2010-03-16 Paolo Carlini <[email protected]>
//
// Copyright (C) 2010-2019 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
// 26.5.3.1 class template linear_congruential_engine [rand.eng.lcong]
#include <random>
#include <testsuite_hooks.h>
void
test01()
{
std::minstd_rand0 a;
std::minstd_rand0 b;
std::minstd_rand0 c(120);
VERIFY( a != c );
VERIFY( !(a != b) );
}
int main()
{
test01();
return 0;
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.opendistroforelasticsearch.alerting.action
import org.elasticsearch.test.ESTestCase
class GetMonitorActionTests : ESTestCase() {
fun `test get monitor action name`() {
assertNotNull(GetMonitorAction.INSTANCE.name())
assertEquals(GetMonitorAction.INSTANCE.name(), GetMonitorAction.NAME)
}
}
|
{
"pile_set_name": "Github"
}
|
/*!
* Copyright (c) Microsoft. All rights reserved.
* Licensed under the MIT license. See LICENSE file in the project.
*/
/* eslint-disable @typescript-eslint/no-var-requires */
// Bar Chart Example, captured from https://vega.github.io/vega/examples/bar-chart/
import { parseScene } from '@chart-parts/scenegraph'
const data = require('../resources/barley_trellis.json')
export const scenegraph = parseScene(data)
export const title = 'Barley Trellis'
export const dimensions = {
height: 1000,
width: 520,
origin: [30, 19] as [number, number],
}
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyuncs.cms.transform.v20190101;
import java.util.ArrayList;
import java.util.List;
import com.aliyuncs.cms.model.v20190101.DescribeEventRuleListResponse;
import com.aliyuncs.cms.model.v20190101.DescribeEventRuleListResponse.EventRule;
import com.aliyuncs.cms.model.v20190101.DescribeEventRuleListResponse.EventRule.EventPatternItem;
import com.aliyuncs.transform.UnmarshallerContext;
public class DescribeEventRuleListResponseUnmarshaller {
public static DescribeEventRuleListResponse unmarshall(DescribeEventRuleListResponse describeEventRuleListResponse, UnmarshallerContext _ctx) {
describeEventRuleListResponse.setRequestId(_ctx.stringValue("DescribeEventRuleListResponse.RequestId"));
describeEventRuleListResponse.setSuccess(_ctx.booleanValue("DescribeEventRuleListResponse.Success"));
describeEventRuleListResponse.setCode(_ctx.stringValue("DescribeEventRuleListResponse.Code"));
describeEventRuleListResponse.setMessage(_ctx.stringValue("DescribeEventRuleListResponse.Message"));
describeEventRuleListResponse.setTotal(_ctx.integerValue("DescribeEventRuleListResponse.Total"));
List<EventRule> eventRules = new ArrayList<EventRule>();
for (int i = 0; i < _ctx.lengthValue("DescribeEventRuleListResponse.EventRules.Length"); i++) {
EventRule eventRule = new EventRule();
eventRule.setName(_ctx.stringValue("DescribeEventRuleListResponse.EventRules["+ i +"].Name"));
eventRule.setGroupId(_ctx.stringValue("DescribeEventRuleListResponse.EventRules["+ i +"].GroupId"));
eventRule.setEventType(_ctx.stringValue("DescribeEventRuleListResponse.EventRules["+ i +"].EventType"));
eventRule.setState(_ctx.stringValue("DescribeEventRuleListResponse.EventRules["+ i +"].State"));
eventRule.setDescription(_ctx.stringValue("DescribeEventRuleListResponse.EventRules["+ i +"].Description"));
List<EventPatternItem> eventPattern = new ArrayList<EventPatternItem>();
for (int j = 0; j < _ctx.lengthValue("DescribeEventRuleListResponse.EventRules["+ i +"].EventPattern.Length"); j++) {
EventPatternItem eventPatternItem = new EventPatternItem();
eventPatternItem.setProduct(_ctx.stringValue("DescribeEventRuleListResponse.EventRules["+ i +"].EventPattern["+ j +"].Product"));
List<String> nameList = new ArrayList<String>();
for (int k = 0; k < _ctx.lengthValue("DescribeEventRuleListResponse.EventRules["+ i +"].EventPattern["+ j +"].NameList.Length"); k++) {
nameList.add(_ctx.stringValue("DescribeEventRuleListResponse.EventRules["+ i +"].EventPattern["+ j +"].NameList["+ k +"]"));
}
eventPatternItem.setNameList(nameList);
List<String> levelList = new ArrayList<String>();
for (int k = 0; k < _ctx.lengthValue("DescribeEventRuleListResponse.EventRules["+ i +"].EventPattern["+ j +"].LevelList.Length"); k++) {
levelList.add(_ctx.stringValue("DescribeEventRuleListResponse.EventRules["+ i +"].EventPattern["+ j +"].LevelList["+ k +"]"));
}
eventPatternItem.setLevelList(levelList);
List<String> eventTypeList = new ArrayList<String>();
for (int k = 0; k < _ctx.lengthValue("DescribeEventRuleListResponse.EventRules["+ i +"].EventPattern["+ j +"].EventTypeList.Length"); k++) {
eventTypeList.add(_ctx.stringValue("DescribeEventRuleListResponse.EventRules["+ i +"].EventPattern["+ j +"].EventTypeList["+ k +"]"));
}
eventPatternItem.setEventTypeList(eventTypeList);
eventPattern.add(eventPatternItem);
}
eventRule.setEventPattern(eventPattern);
eventRules.add(eventRule);
}
describeEventRuleListResponse.setEventRules(eventRules);
return describeEventRuleListResponse;
}
}
|
{
"pile_set_name": "Github"
}
|
/* Bidirection version of popen() */
#ifndef _Ibipopen
#define _Ibipopen 1
#include "config.h"
typedef struct bip BIP;
struct bip
{
int infd; /* Input file descriptor */
int outfd; /* Output file descriptor */
int pid; /* Sub-process id */
};
BIP *bipopen();
void bipclose();
void bipputs();
char *bipgets();
#endif
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (C) 2014 Igalia S.L.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "WaylandSurface.h"
#if PLATFORM(WAYLAND)
#include "GLContextEGL.h"
#include "IntSize.h"
#include "PlatformDisplayWayland.h"
#include <EGL/egl.h>
namespace WebCore {
void frameCallback(void*, struct wl_callback* callback, uint32_t)
{
if (callback)
wl_callback_destroy(callback);
}
static const struct wl_callback_listener frameListener = {
frameCallback
};
WaylandSurface::WaylandSurface(struct wl_surface* wlSurface, EGLNativeWindowType nativeWindow)
: m_wlSurface(wlSurface)
, m_nativeWindow(nativeWindow)
{
}
WaylandSurface::~WaylandSurface()
{
// The surface couldn't have been created in the first place if WaylandDisplay wasn't properly initialized.
const PlatformDisplayWayland& waylandDisplay = downcast<PlatformDisplayWayland>(PlatformDisplay::sharedDisplay());
ASSERT(waylandDisplay.native());
eglMakeCurrent(waylandDisplay.native(), EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
wl_egl_window_destroy(m_nativeWindow);
wl_surface_destroy(m_wlSurface);
}
void WaylandSurface::resize(const IntSize& size)
{
wl_egl_window_resize(m_nativeWindow, size.width(), size.height(), 0, 0);
}
std::unique_ptr<GLContextEGL> WaylandSurface::createGLContext()
{
return GLContextEGL::createWindowContext(m_nativeWindow, GLContext::sharingContext());
}
void WaylandSurface::requestFrame()
{
struct wl_callback* frameCallback = wl_surface_frame(m_wlSurface);
wl_callback_add_listener(frameCallback, &frameListener, this);
}
} // namespace WebCore
#endif // PLATFORM(WAYLAND)
|
{
"pile_set_name": "Github"
}
|
/*
==============================================================================
This file is part of the Water library.
Copyright (c) 2016 ROLI Ltd.
Copyright (C) 2017 Filipe Coelho <[email protected]>
Permission is granted to use this software under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license/
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH REGARD
TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
OF THIS SOFTWARE.
==============================================================================
*/
#ifndef WATER_MIDIBUFFER_H_INCLUDED
#define WATER_MIDIBUFFER_H_INCLUDED
#include "../containers/Array.h"
namespace water {
//==============================================================================
/**
Holds a sequence of time-stamped midi events.
Analogous to the AudioSampleBuffer, this holds a set of midi events with
integer time-stamps. The buffer is kept sorted in order of the time-stamps.
If you're working with a sequence of midi events that may need to be manipulated
or read/written to a midi file, then MidiMessageSequence is probably a more
appropriate container. MidiBuffer is designed for lower-level streams of raw
midi data.
@see MidiMessage
*/
class MidiBuffer
{
public:
//==============================================================================
/** Creates an empty MidiBuffer. */
MidiBuffer() noexcept;
/** Creates a MidiBuffer containing a single midi message. */
explicit MidiBuffer (const MidiMessage& message) noexcept;
/** Creates a copy of another MidiBuffer. */
MidiBuffer (const MidiBuffer&) noexcept;
/** Makes a copy of another MidiBuffer. */
MidiBuffer& operator= (const MidiBuffer&) noexcept;
/** Destructor */
~MidiBuffer();
//==============================================================================
/** Removes all events from the buffer. */
void clear() noexcept;
/** Removes all events between two times from the buffer.
All events for which (start <= event position < start + numSamples) will
be removed.
*/
void clear (int start, int numSamples);
/** Returns true if the buffer is empty.
To actually retrieve the events, use a MidiBuffer::Iterator object
*/
bool isEmpty() const noexcept;
/** Counts the number of events in the buffer.
This is actually quite a slow operation, as it has to iterate through all
the events, so you might prefer to call isEmpty() if that's all you need
to know.
*/
int getNumEvents() const noexcept;
/** Adds an event to the buffer.
The sample number will be used to determine the position of the event in
the buffer, which is always kept sorted. The MidiMessage's timestamp is
ignored.
If an event is added whose sample position is the same as one or more events
already in the buffer, the new event will be placed after the existing ones.
To retrieve events, use a MidiBuffer::Iterator object
*/
void addEvent (const MidiMessage& midiMessage, int sampleNumber);
/** Adds an event to the buffer from raw midi data.
The sample number will be used to determine the position of the event in
the buffer, which is always kept sorted.
If an event is added whose sample position is the same as one or more events
already in the buffer, the new event will be placed after the existing ones.
The event data will be inspected to calculate the number of bytes in length that
the midi event really takes up, so maxBytesOfMidiData may be longer than the data
that actually gets stored. E.g. if you pass in a note-on and a length of 4 bytes,
it'll actually only store 3 bytes. If the midi data is invalid, it might not
add an event at all.
To retrieve events, use a MidiBuffer::Iterator object
*/
void addEvent (const void* rawMidiData,
int maxBytesOfMidiData,
int sampleNumber);
/** Adds some events from another buffer to this one.
@param otherBuffer the buffer containing the events you want to add
@param startSample the lowest sample number in the source buffer for which
events should be added. Any source events whose timestamp is
less than this will be ignored
@param numSamples the valid range of samples from the source buffer for which
events should be added - i.e. events in the source buffer whose
timestamp is greater than or equal to (startSample + numSamples)
will be ignored. If this value is less than 0, all events after
startSample will be taken.
@param sampleDeltaToAdd a value which will be added to the source timestamps of the events
that are added to this buffer
*/
void addEvents (const MidiBuffer& otherBuffer,
int startSample,
int numSamples,
int sampleDeltaToAdd);
/** Returns the sample number of the first event in the buffer.
If the buffer's empty, this will just return 0.
*/
int getFirstEventTime() const noexcept;
/** Returns the sample number of the last event in the buffer.
If the buffer's empty, this will just return 0.
*/
int getLastEventTime() const noexcept;
//==============================================================================
/** Exchanges the contents of this buffer with another one.
This is a quick operation, because no memory allocating or copying is done, it
just swaps the internal state of the two buffers.
*/
void swapWith (MidiBuffer&) noexcept;
/** Preallocates some memory for the buffer to use.
This helps to avoid needing to reallocate space when the buffer has messages
added to it.
*/
void ensureSize (size_t minimumNumBytes);
//==============================================================================
/**
Used to iterate through the events in a MidiBuffer.
Note that altering the buffer while an iterator is using it isn't a
safe operation.
@see MidiBuffer
*/
class Iterator
{
public:
//==============================================================================
/** Creates an Iterator for this MidiBuffer. */
Iterator (const MidiBuffer&) noexcept;
/** Destructor. */
~Iterator() noexcept;
//==============================================================================
/** Repositions the iterator so that the next event retrieved will be the first
one whose sample position is at greater than or equal to the given position.
*/
void setNextSamplePosition (int samplePosition) noexcept;
/** Retrieves a copy of the next event from the buffer.
@param result on return, this will be the message. The MidiMessage's timestamp
is set to the same value as samplePosition.
@param samplePosition on return, this will be the position of the event, as a
sample index in the buffer
@returns true if an event was found, or false if the iterator has reached
the end of the buffer
*/
bool getNextEvent (MidiMessage& result,
int& samplePosition) noexcept;
/** Retrieves the next event from the buffer.
@param midiData on return, this pointer will be set to a block of data containing
the midi message. Note that to make it fast, this is a pointer
directly into the MidiBuffer's internal data, so is only valid
temporarily until the MidiBuffer is altered.
@param numBytesOfMidiData on return, this is the number of bytes of data used by the
midi message
@param samplePosition on return, this will be the position of the event, as a
sample index in the buffer
@returns true if an event was found, or false if the iterator has reached
the end of the buffer
*/
bool getNextEvent (const uint8* &midiData,
int& numBytesOfMidiData,
int& samplePosition) noexcept;
private:
//==============================================================================
const MidiBuffer& buffer;
const uint8* data;
CARLA_DECLARE_NON_COPY_CLASS (Iterator)
};
/** The raw data holding this buffer.
Obviously access to this data is provided at your own risk. Its internal format could
change in future, so don't write code that relies on it!
*/
Array<uint8> data;
};
}
#endif // WATER_MIDIBUFFER_H_INCLUDED
|
{
"pile_set_name": "Github"
}
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfaceTapConfigurationsOperations(object):
"""NetworkInterfaceTapConfigurationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
tap_configuration_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'tapConfigurationName': self._serialize.url("tap_configuration_name", tap_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
network_interface_name, # type: str
tap_configuration_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified tap configuration from the NetworkInterface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param tap_configuration_name: The name of the tap configuration.
:type tap_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
tap_configuration_name=tap_configuration_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_interface_name, # type: str
tap_configuration_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.NetworkInterfaceTapConfiguration"
"""Get the specified tap configuration on a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param tap_configuration_name: The name of the tap configuration.
:type tap_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterfaceTapConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_06_01.models.NetworkInterfaceTapConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceTapConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'tapConfigurationName': self._serialize.url("tap_configuration_name", tap_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterfaceTapConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
tap_configuration_name, # type: str
tap_configuration_parameters, # type: "models.NetworkInterfaceTapConfiguration"
**kwargs # type: Any
):
# type: (...) -> "models.NetworkInterfaceTapConfiguration"
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceTapConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'tapConfigurationName': self._serialize.url("tap_configuration_name", tap_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(tap_configuration_parameters, 'NetworkInterfaceTapConfiguration')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterfaceTapConfiguration', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkInterfaceTapConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
network_interface_name, # type: str
tap_configuration_name, # type: str
tap_configuration_parameters, # type: "models.NetworkInterfaceTapConfiguration"
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.NetworkInterfaceTapConfiguration"]
"""Creates or updates a Tap configuration in the specified NetworkInterface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param tap_configuration_name: The name of the tap configuration.
:type tap_configuration_name: str
:param tap_configuration_parameters: Parameters supplied to the create or update tap
configuration operation.
:type tap_configuration_parameters: ~azure.mgmt.network.v2019_06_01.models.NetworkInterfaceTapConfiguration
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NetworkInterfaceTapConfiguration or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_06_01.models.NetworkInterfaceTapConfiguration]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceTapConfiguration"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
tap_configuration_name=tap_configuration_name,
tap_configuration_parameters=tap_configuration_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceTapConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations/{tapConfigurationName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.NetworkInterfaceTapConfigurationListResult"]
"""Get all Tap configurations in a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceTapConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_06_01.models.NetworkInterfaceTapConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceTapConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceTapConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/tapConfigurations'} # type: ignore
|
{
"pile_set_name": "Github"
}
|
//
// NSObject+GICEvent.m
// GICXMLLayout
//
// Created by gonghaiwei on 2018/7/8.
//
#import "NSObject+GICEvent.h"
@implementation NSObject (GICEvent)
-(GICEvent *)gic_event_addEvent:(GICEvent *)event{
if(event.onlyExistOne){
for(GICBehavior *b in self.gic_Behaviors.behaviors){
if(b.class == event.class){
return (GICEvent *)b;
}
}
}
[self gic_addBehavior:event];
return event;
}
-(GICEvent *)gic_event_findFirstWithEventClass:(Class)eventType{
GICBehavior *b = [self.gic_Behaviors findFirstWithBehaviorClass:eventType];
if([b isKindOfClass:[GICEvent class]]){
return (GICEvent *)b;
}
return nil;
}
-(GICEvent *)gic_event_findFirstWithEventClassOrCreate:(Class)eventType{
GICEvent *e = [self gic_event_findFirstWithEventClass:eventType];
if(e==nil){
e = [[eventType alloc] initWithExpresion:nil withEventName:nil];
[self gic_event_addEvent:e];
if(e.target==nil){
GICPerformBlockOnElementQueue(^{
[e attachTo:self];
});
}
}
return e;
}
-(GICEvent *)gic_event_findWithEventName:(NSString *)eventName{
GICBehavior *b = [self.gic_Behaviors findWithBehaviorName:eventName];
if([b isKindOfClass:[GICEvent class]]){
return (GICEvent *)b;
}
return nil;
}
-(GICEvent *)gic_event_findFirstWithEventNameOrCreate:(NSString *)eventName{
GICEvent *e = [self gic_event_findWithEventName:eventName];
if(e==nil){
GICAttributeValueConverter *p = [GICElementsCache classAttributs:[self class]][eventName];
if(p){
p.propertySetter(self, nil);
e = [self gic_event_findWithEventName:eventName];
[self gic_event_addEvent:e];
if(e.target==nil){
GICPerformBlockOnElementQueue(^{
[e attachTo:self];
});
}
}
}
return e;
}
@end
|
{
"pile_set_name": "Github"
}
|
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!1 &100000
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 3
m_Component:
- 4: {fileID: 400000}
- 114: {fileID: 11400000}
m_Layer: 8
m_Name: SciFi Font - Normal
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 0
--- !u!1002 &100001
EditorExtensionImpl:
serializedVersion: 6
--- !u!4 &400000
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 100000}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 0}
--- !u!1002 &400001
EditorExtensionImpl:
serializedVersion: 6
--- !u!114 &11400000
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 100000}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 0b4eb3a400afab046abb8471a9d746d6, type: 1}
m_Name:
mMat: {fileID: 2100000, guid: 7b4d21284f230674c8f3d4d6f45b3dde, type: 2}
mUVRect:
serializedVersion: 2
x: .5
y: .5
width: .5
height: .5
mFont:
mSize: 20
mBase: 15
mWidth: 256
mHeight: 256
mSpriteName: Font - Normal
mSaved:
- index: 32
x: 192
y: 235
width: 7
height: 7
offsetX: -2
offsetY: 13
advance: 6
channel: 0
kerning:
- index: 33
x: 156
y: 186
width: 9
height: 17
offsetX: -1
offsetY: 2
advance: 5
channel: 0
kerning:
- index: 34
x: 0
y: 240
width: 13
height: 11
offsetX: -1
offsetY: 2
advance: 9
channel: 0
kerning:
- index: 35
x: 176
y: 147
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 36
x: 56
y: 24
width: 16
height: 21
offsetX: -1
offsetY: 0
advance: 12
channel: 0
kerning:
- index: 37
x: 47
y: 130
width: 20
height: 17
offsetX: -1
offsetY: 2
advance: 16
channel: 0
kerning:
- index: 38
x: 195
y: 146
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 39
x: 29
y: 240
width: 9
height: 11
offsetX: -1
offsetY: 2
advance: 6
channel: 0
kerning:
- index: 40
x: 30
y: 0
width: 12
height: 22
offsetX: -1
offsetY: 1
advance: 7
channel: 0
kerning:
- index: 41
x: 44
y: 0
width: 12
height: 22
offsetX: -2
offsetY: 1
advance: 7
channel: 0
kerning:
- index: 42
x: 153
y: 222
width: 14
height: 12
offsetX: -1
offsetY: 2
advance: 10
channel: 0
kerning:
- index: 43
x: 19
y: 207
width: 17
height: 15
offsetX: -1
offsetY: 4
advance: 12
channel: 0
kerning:
- index: 44
x: 53
y: 240
width: 9
height: 10
offsetX: -1
offsetY: 11
advance: 6
channel: 0
kerning:
- index: 45
x: 123
y: 238
width: 11
height: 8
offsetX: -1
offsetY: 7
advance: 7
channel: 0
kerning:
- index: 46
x: 147
y: 238
width: 9
height: 8
offsetX: -1
offsetY: 11
advance: 6
channel: 0
kerning:
- index: 47
x: 83
y: 69
width: 14
height: 19
offsetX: -2
offsetY: 1
advance: 8
channel: 0
kerning:
- index: 48
x: 214
y: 146
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 49
x: 242
y: 107
width: 11
height: 17
offsetX: -2
offsetY: 2
advance: 7
channel: 0
kerning:
- index: 50
x: 149
y: 167
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 51
x: 36
y: 188
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 52
x: 233
y: 145
width: 17
height: 17
offsetX: -2
offsetY: 2
advance: 11
channel: 0
kerning:
- index: 53
x: 113
y: 168
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 11
channel: 0
kerning:
- index: 54
x: 167
y: 166
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 55
x: 157
y: 147
width: 17
height: 17
offsetX: -2
offsetY: 2
advance: 10
channel: 0
kerning:
- index: 56
x: 0
y: 188
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 57
x: 18
y: 188
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 58
x: 244
y: 183
width: 9
height: 14
offsetX: -1
offsetY: 5
advance: 6
channel: 0
kerning:
- index: 59
x: 189
y: 185
width: 9
height: 16
offsetX: -1
offsetY: 5
advance: 6
channel: 0
kerning:
- index: 60
x: 38
y: 207
width: 17
height: 15
offsetX: -2
offsetY: 4
advance: 12
channel: 0
kerning:
- index: 61
x: 234
y: 218
width: 16
height: 11
offsetX: -1
offsetY: 6
advance: 12
channel: 0
kerning:
- index: 62
x: 75
y: 206
width: 16
height: 15
offsetX: -1
offsetY: 4
advance: 12
channel: 0
kerning:
- index: 63
x: 140
y: 186
width: 14
height: 17
offsetX: -1
offsetY: 2
advance: 10
channel: 0
kerning:
- index: 64
x: 98
y: 0
width: 22
height: 21
offsetX: -1
offsetY: 2
advance: 18
channel: 0
kerning:
- index: 65
x: 215
y: 126
width: 18
height: 17
offsetX: -2
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 66
x: 221
y: 165
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 67
x: 0
y: 169
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 68
x: 19
y: 169
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 69
x: 203
y: 165
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 70
x: 185
y: 166
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 11
channel: 0
kerning:
- index: 71
x: 38
y: 169
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 72
x: 195
y: 127
width: 18
height: 17
offsetX: -1
offsetY: 2
advance: 14
channel: 0
kerning:
- index: 73
x: 167
y: 185
width: 9
height: 17
offsetX: -1
offsetY: 2
advance: 5
channel: 0
kerning:
- index: 74
x: 90
y: 187
width: 15
height: 17
offsetX: -1
offsetY: 2
advance: 11
channel: 0
kerning:
- index: 75
x: 57
y: 168
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 76
x: 239
y: 164
width: 15
height: 17
offsetX: -1
offsetY: 2
advance: 10
channel: 0
kerning:
- index: 77
x: 69
y: 130
width: 20
height: 17
offsetX: -1
offsetY: 2
advance: 16
channel: 0
kerning:
- index: 78
x: 235
y: 126
width: 18
height: 17
offsetX: -1
offsetY: 2
advance: 14
channel: 0
kerning:
- index: 79
x: 76
y: 168
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 80
x: 131
y: 167
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 81
x: 14
y: 69
width: 17
height: 19
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 82
x: 100
y: 149
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 83
x: 95
y: 168
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 84
x: 119
y: 148
width: 17
height: 17
offsetX: -2
offsetY: 2
advance: 11
channel: 0
kerning:
- index: 85
x: 138
y: 148
width: 17
height: 17
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 86
x: 0
y: 150
width: 18
height: 17
offsetX: -2
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 87
x: 0
y: 131
width: 23
height: 17
offsetX: -2
offsetY: 2
advance: 17
channel: 0
kerning:
- index: 88
x: 60
y: 149
width: 18
height: 17
offsetX: -2
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 89
x: 154
y: 128
width: 19
height: 17
offsetX: -3
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 90
x: 72
y: 187
width: 16
height: 17
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 91
x: 58
y: 0
width: 12
height: 22
offsetX: -1
offsetY: 1
advance: 8
channel: 0
kerning:
- index: 92
x: 67
y: 69
width: 14
height: 19
offsetX: -2
offsetY: 1
advance: 8
channel: 0
kerning:
- index: 93
x: 72
y: 0
width: 12
height: 22
offsetX: -2
offsetY: 1
advance: 7
channel: 0
kerning:
- index: 94
x: 216
y: 219
width: 16
height: 11
offsetX: -2
offsetY: 2
advance: 10
channel: 0
kerning:
- index: 95
x: 158
y: 236
width: 17
height: 7
offsetX: -2
offsetY: 16
advance: 11
channel: 0
kerning:
- index: 96
x: 95
y: 239
width: 12
height: 8
offsetX: -1
offsetY: 1
advance: 7
channel: 0
kerning:
- index: 97
x: 162
y: 205
width: 15
height: 15
offsetX: -1
offsetY: 4
advance: 11
channel: 0
kerning:
- index: 98
x: 189
y: 67
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 99
x: 145
y: 205
width: 15
height: 15
offsetX: -1
offsetY: 4
advance: 11
channel: 0
kerning:
- index: 100
x: 206
y: 67
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 101
x: 179
y: 204
width: 15
height: 15
offsetX: -1
offsetY: 4
advance: 11
channel: 0
kerning:
- index: 102
x: 102
y: 109
width: 14
height: 18
offsetX: -1
offsetY: 1
advance: 9
channel: 0
kerning:
- index: 103
x: 50
y: 69
width: 15
height: 19
offsetX: -1
offsetY: 4
advance: 11
channel: 0
kerning:
- index: 104
x: 223
y: 67
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 105
x: 192
y: 107
width: 9
height: 18
offsetX: -1
offsetY: 1
advance: 5
channel: 0
kerning:
- index: 106
x: 86
y: 0
width: 10
height: 22
offsetX: -2
offsetY: 1
advance: 5
channel: 0
kerning:
- index: 107
x: 135
y: 68
width: 16
height: 18
offsetX: -1
offsetY: 1
advance: 10
channel: 0
kerning:
- index: 108
x: 203
y: 107
width: 9
height: 18
offsetX: -1
offsetY: 1
advance: 5
channel: 0
kerning:
- index: 109
x: 223
y: 184
width: 19
height: 15
offsetX: -1
offsetY: 4
advance: 15
channel: 0
kerning:
- index: 110
x: 111
y: 206
width: 15
height: 15
offsetX: -1
offsetY: 4
advance: 11
channel: 0
kerning:
- index: 111
x: 128
y: 206
width: 15
height: 15
offsetX: -1
offsetY: 4
advance: 11
channel: 0
kerning:
- index: 112
x: 51
y: 90
width: 15
height: 18
offsetX: -1
offsetY: 4
advance: 11
channel: 0
kerning:
- index: 113
x: 68
y: 90
width: 15
height: 18
offsetX: -1
offsetY: 4
advance: 11
channel: 0
kerning:
- index: 114
x: 228
y: 201
width: 13
height: 15
offsetX: -1
offsetY: 4
advance: 9
channel: 0
kerning:
- index: 115
x: 196
y: 203
width: 14
height: 15
offsetX: -1
offsetY: 4
advance: 10
channel: 0
kerning:
- index: 116
x: 124
y: 187
width: 14
height: 17
offsetX: -1
offsetY: 2
advance: 9
channel: 0
kerning:
- index: 117
x: 58
y: 224
width: 15
height: 14
offsetX: -1
offsetY: 5
advance: 11
channel: 0
kerning:
- index: 118
x: 22
y: 224
width: 16
height: 14
offsetX: -2
offsetY: 5
advance: 10
channel: 0
kerning:
- index: 119
x: 0
y: 224
width: 20
height: 14
offsetX: -2
offsetY: 5
advance: 14
channel: 0
kerning:
- index: 120
x: 40
y: 224
width: 16
height: 14
offsetX: -2
offsetY: 5
advance: 10
channel: 0
kerning:
- index: 121
x: 54
y: 188
width: 16
height: 17
offsetX: -2
offsetY: 5
advance: 10
channel: 0
kerning:
- index: 122
x: 212
y: 202
width: 14
height: 15
offsetX: -1
offsetY: 4
advance: 10
channel: 0
kerning:
- index: 123
x: 0
y: 0
width: 13
height: 22
offsetX: -2
offsetY: 1
advance: 7
channel: 0
kerning:
- index: 124
x: 155
y: 23
width: 9
height: 21
offsetX: -1
offsetY: 1
advance: 5
channel: 0
kerning:
- index: 125
x: 15
y: 0
width: 13
height: 22
offsetX: -1
offsetY: 1
advance: 7
channel: 0
kerning:
- index: 126
x: 64
y: 240
width: 15
height: 8
offsetX: -1
offsetY: 7
advance: 10
channel: 0
kerning:
- index: 160
x: 247
y: 23
width: 7
height: 7
offsetX: -2
offsetY: 13
advance: 6
channel: 0
kerning:
- index: 161
x: 178
y: 185
width: 9
height: 17
offsetX: -1
offsetY: 5
advance: 5
channel: 0
kerning:
- index: 162
x: 85
y: 110
width: 15
height: 18
offsetX: -1
offsetY: 3
advance: 11
channel: 0
kerning:
- index: 163
x: 175
y: 128
width: 18
height: 17
offsetX: -2
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 164
x: 75
y: 223
width: 15
height: 14
offsetX: -1
offsetY: 3
advance: 11
channel: 0
kerning:
- index: 165
x: 133
y: 129
width: 19
height: 17
offsetX: -3
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 166
x: 144
y: 23
width: 9
height: 21
offsetX: -1
offsetY: 1
advance: 5
channel: 0
kerning:
- index: 167
x: 110
y: 23
width: 16
height: 21
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 168
x: 81
y: 239
width: 12
height: 8
offsetX: -1
offsetY: 1
advance: 8
channel: 0
kerning:
- index: 169
x: 20
y: 150
width: 18
height: 17
offsetX: -1
offsetY: 2
advance: 14
channel: 0
kerning:
- index: 170
x: 92
y: 223
width: 12
height: 14
offsetX: -1
offsetY: 2
advance: 8
channel: 0
kerning:
- index: 171
x: 120
y: 223
width: 15
height: 13
offsetX: -2
offsetY: 6
advance: 10
channel: 0
kerning:
- index: 172
x: 197
y: 220
width: 17
height: 11
offsetX: -2
offsetY: 6
advance: 12
channel: 0
kerning:
- index: 174
x: 40
y: 149
width: 18
height: 17
offsetX: -1
offsetY: 2
advance: 14
channel: 0
kerning:
- index: 175
x: 177
y: 236
width: 13
height: 7
offsetX: -1
offsetY: 2
advance: 9
channel: 0
kerning:
- index: 176
x: 15
y: 240
width: 12
height: 11
offsetX: -1
offsetY: 2
advance: 8
channel: 0
kerning:
- index: 177
x: 0
y: 207
width: 17
height: 15
offsetX: -1
offsetY: 4
advance: 12
channel: 0
kerning:
- index: 178
x: 169
y: 222
width: 12
height: 12
offsetX: -1
offsetY: 2
advance: 8
channel: 0
kerning:
- index: 179
x: 183
y: 221
width: 12
height: 12
offsetX: -1
offsetY: 2
advance: 8
channel: 0
kerning:
- index: 180
x: 109
y: 239
width: 12
height: 8
offsetX: -1
offsetY: 1
advance: 7
channel: 0
kerning:
- index: 181
x: 107
y: 187
width: 15
height: 17
offsetX: -1
offsetY: 5
advance: 11
channel: 0
kerning:
- index: 182
x: 188
y: 45
width: 16
height: 20
offsetX: -1
offsetY: 2
advance: 12
channel: 0
kerning:
- index: 183
x: 136
y: 238
width: 9
height: 8
offsetX: -1
offsetY: 6
advance: 6
channel: 0
kerning:
- index: 184
x: 40
y: 240
width: 11
height: 10
offsetX: -1
offsetY: 13
advance: 9
channel: 0
kerning:
- index: 185
x: 243
y: 201
width: 9
height: 12
offsetX: -1
offsetY: 2
advance: 5
channel: 0
kerning:
- index: 186
x: 106
y: 223
width: 12
height: 14
offsetX: -1
offsetY: 2
advance: 8
channel: 0
kerning:
- index: 187
x: 137
y: 223
width: 14
height: 13
offsetX: -1
offsetY: 6
advance: 10
channel: 0
kerning:
- index: 188
x: 80
y: 149
width: 18
height: 17
offsetX: -1
offsetY: 2
advance: 14
channel: 0
kerning:
- index: 189
x: 112
y: 129
width: 19
height: 17
offsetX: -1
offsetY: 2
advance: 15
channel: 0
kerning:
- index: 190
x: 25
y: 130
width: 20
height: 17
offsetX: -1
offsetY: 2
advance: 16
channel: 0
kerning:
- index: 191
x: 118
y: 109
width: 14
height: 18
offsetX: -1
offsetY: 5
advance: 10
channel: 0
kerning:
- index: 192
x: 0
y: 47
width: 18
height: 20
offsetX: -2
offsetY: -1
advance: 12
channel: 0
kerning:
- index: 193
x: 227
y: 23
width: 18
height: 20
offsetX: -2
offsetY: -1
advance: 12
channel: 0
kerning:
- index: 194
x: 162
y: 0
width: 18
height: 21
offsetX: -2
offsetY: -2
advance: 12
channel: 0
kerning:
- index: 195
x: 142
y: 0
width: 18
height: 21
offsetX: -2
offsetY: -2
advance: 12
channel: 0
kerning:
- index: 196
x: 187
y: 23
width: 18
height: 20
offsetX: -2
offsetY: -1
advance: 12
channel: 0
kerning:
- index: 197
x: 207
y: 23
width: 18
height: 20
offsetX: -2
offsetY: -1
advance: 12
channel: 0
kerning:
- index: 198
x: 214
y: 107
width: 26
height: 17
offsetX: -3
offsetY: 2
advance: 19
channel: 0
kerning:
- index: 199
x: 19
y: 24
width: 17
height: 21
offsetX: -1
offsetY: 2
advance: 13
channel: 0
kerning:
- index: 200
x: 170
y: 45
width: 16
height: 20
offsetX: -1
offsetY: -1
advance: 12
channel: 0
kerning:
- index: 201
x: 134
y: 46
width: 16
height: 20
offsetX: -1
offsetY: -1
advance: 12
channel: 0
kerning:
- index: 202
x: 92
y: 24
width: 16
height: 21
offsetX: -1
offsetY: -2
advance: 12
channel: 0
kerning:
- index: 203
x: 152
y: 46
width: 16
height: 20
offsetX: -1
offsetY: -1
advance: 12
channel: 0
kerning:
- index: 204
x: 238
y: 45
width: 12
height: 20
offsetX: -3
offsetY: -1
advance: 5
channel: 0
kerning:
- index: 205
x: 0
y: 69
width: 12
height: 20
offsetX: -2
offsetY: -1
advance: 5
channel: 0
kerning:
- index: 206
x: 128
y: 23
width: 14
height: 21
offsetX: -3
offsetY: -2
advance: 5
channel: 0
kerning:
- index: 207
x: 223
y: 45
width: 13
height: 20
offsetX: -2
offsetY: -1
advance: 5
channel: 0
kerning:
- index: 208
x: 91
y: 130
width: 19
height: 17
offsetX: -2
offsetY: 2
advance: 14
channel: 0
kerning:
- index: 209
x: 122
y: 0
width: 18
height: 21
offsetX: -1
offsetY: -2
advance: 14
channel: 0
kerning:
- index: 210
x: 58
y: 47
width: 17
height: 20
offsetX: -1
offsetY: -1
advance: 13
channel: 0
kerning:
- index: 211
x: 39
y: 47
width: 17
height: 20
offsetX: -1
offsetY: -1
advance: 13
channel: 0
kerning:
- index: 212
x: 0
y: 24
width: 17
height: 21
offsetX: -1
offsetY: -2
advance: 13
channel: 0
kerning:
- index: 213
x: 201
y: 0
width: 17
height: 21
offsetX: -1
offsetY: -2
advance: 13
channel: 0
kerning:
- index: 214
x: 20
y: 47
width: 17
height: 20
offsetX: -1
offsetY: -1
advance: 13
channel: 0
kerning:
- index: 215
x: 93
y: 206
width: 16
height: 15
offsetX: -1
offsetY: 4
advance: 12
channel: 0
kerning:
- index: 216
x: 220
y: 0
width: 17
height: 21
offsetX: -1
offsetY: 0
advance: 13
channel: 0
kerning:
- index: 217
x: 77
y: 47
width: 17
height: 20
offsetX: -1
offsetY: -1
advance: 13
channel: 0
kerning:
- index: 218
x: 96
y: 47
width: 17
height: 20
offsetX: -1
offsetY: -1
advance: 13
channel: 0
kerning:
- index: 219
x: 182
y: 0
width: 17
height: 21
offsetX: -1
offsetY: -2
advance: 13
channel: 0
kerning:
- index: 220
x: 115
y: 46
width: 17
height: 20
offsetX: -1
offsetY: -1
advance: 13
channel: 0
kerning:
- index: 221
x: 166
y: 23
width: 19
height: 20
offsetX: -3
offsetY: -1
advance: 12
channel: 0
kerning:
- index: 222
x: 99
y: 69
width: 16
height: 18
offsetX: -1
offsetY: 1
advance: 12
channel: 0
kerning:
- index: 223
x: 117
y: 68
width: 16
height: 18
offsetX: -1
offsetY: 1
advance: 12
channel: 0
kerning:
- index: 224
x: 68
y: 110
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 225
x: 51
y: 110
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 226
x: 221
y: 87
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 227
x: 17
y: 90
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 228
x: 0
y: 91
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 229
x: 206
y: 45
width: 15
height: 20
offsetX: -1
offsetY: -1
advance: 11
channel: 0
kerning:
- index: 230
x: 200
y: 185
width: 21
height: 15
offsetX: -1
offsetY: 4
advance: 17
channel: 0
kerning:
- index: 231
x: 33
y: 69
width: 15
height: 19
offsetX: -1
offsetY: 4
advance: 11
channel: 0
kerning:
- index: 232
x: 136
y: 88
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 233
x: 85
y: 90
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 234
x: 102
y: 89
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 235
x: 240
y: 67
width: 14
height: 18
offsetX: -1
offsetY: 1
advance: 10
channel: 0
kerning:
- index: 236
x: 164
y: 108
width: 12
height: 18
offsetX: -3
offsetY: 1
advance: 5
channel: 0
kerning:
- index: 237
x: 178
y: 108
width: 12
height: 18
offsetX: -2
offsetY: 1
advance: 5
channel: 0
kerning:
- index: 238
x: 134
y: 108
width: 13
height: 18
offsetX: -3
offsetY: 1
advance: 5
channel: 0
kerning:
- index: 239
x: 149
y: 108
width: 13
height: 18
offsetX: -2
offsetY: 1
advance: 5
channel: 0
kerning:
- index: 240
x: 119
y: 88
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 241
x: 34
y: 90
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 242
x: 153
y: 88
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 243
x: 170
y: 88
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 244
x: 187
y: 87
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 245
x: 204
y: 87
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 246
x: 153
y: 68
width: 16
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 247
x: 57
y: 207
width: 16
height: 15
offsetX: -1
offsetY: 4
advance: 12
channel: 0
kerning:
- index: 248
x: 238
y: 87
width: 15
height: 18
offsetX: -1
offsetY: 3
advance: 11
channel: 0
kerning:
- index: 249
x: 0
y: 111
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 250
x: 17
y: 110
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 251
x: 34
y: 110
width: 15
height: 18
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 252
x: 171
y: 67
width: 16
height: 18
offsetX: -1
offsetY: 1
advance: 12
channel: 0
kerning:
- index: 253
x: 74
y: 24
width: 16
height: 21
offsetX: -2
offsetY: 1
advance: 10
channel: 0
kerning:
- index: 254
x: 239
y: 0
width: 15
height: 21
offsetX: -1
offsetY: 1
advance: 11
channel: 0
kerning:
- index: 255
x: 38
y: 24
width: 16
height: 21
offsetX: -2
offsetY: 1
advance: 10
channel: 0
kerning:
mAtlas: {fileID: 11400000, guid: 2f2473f73b9ca724f9080d6d76009ea2, type: 2}
mReplacement: {fileID: 0}
mPixelSize: 1
mSymbols: []
mDynamicFont: {fileID: 0}
mDynamicFontSize: 16
mDynamicFontStyle: 0
--- !u!1002 &11400001
EditorExtensionImpl:
serializedVersion: 6
--- !u!1001 &100100000
Prefab:
m_ObjectHideFlags: 1
serializedVersion: 2
m_Modification:
m_TransformParent: {fileID: 0}
m_Modifications: []
m_RemovedComponents: []
m_ParentPrefab: {fileID: 0}
m_RootGameObject: {fileID: 100000}
m_IsPrefabParent: 1
m_IsExploded: 1
--- !u!1002 &100100001
EditorExtensionImpl:
serializedVersion: 6
|
{
"pile_set_name": "Github"
}
|
---
title: Pansexual
slug: pansexual
speech: adj
defined: true
excerpt: someone who is sexually attracted to people regardless of their gender, or to people of any gender.
reading:
- text: 'Pansexuality and Being Pansexual: Everything You Need to Know'
href: https://www.teenvogue.com/story/what-is-pansexuality
---
someone who is sexually attracted to people regardless of their gender, or to people of any gender.
## Other Languages
[Français](/definitions/fr_FR/pansexuel)
|
{
"pile_set_name": "Github"
}
|
define( function() {
"use strict";
return [];
} );
|
{
"pile_set_name": "Github"
}
|
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package globalaccelerator
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/client/metadata"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/signer/v4"
"github.com/aws/aws-sdk-go/private/protocol"
"github.com/aws/aws-sdk-go/private/protocol/jsonrpc"
)
// GlobalAccelerator provides the API operation methods for making requests to
// AWS Global Accelerator. See this package's package overview docs
// for details on the service.
//
// GlobalAccelerator methods are safe to use concurrently. It is not safe to
// modify mutate any of the struct's properties though.
type GlobalAccelerator struct {
*client.Client
}
// Used for custom client initialization logic
var initClient func(*client.Client)
// Used for custom request initialization logic
var initRequest func(*request.Request)
// Service information constants
const (
ServiceName = "Global Accelerator" // Name of service.
EndpointsID = "globalaccelerator" // ID to lookup a service endpoint with.
ServiceID = "Global Accelerator" // ServiceID is a unique identifier of a specific service.
)
// New creates a new instance of the GlobalAccelerator client with a session.
// If additional configuration is needed for the client instance use the optional
// aws.Config parameter to add your extra config.
//
// Example:
// mySession := session.Must(session.NewSession())
//
// // Create a GlobalAccelerator client from just a session.
// svc := globalaccelerator.New(mySession)
//
// // Create a GlobalAccelerator client with additional configuration
// svc := globalaccelerator.New(mySession, aws.NewConfig().WithRegion("us-west-2"))
func New(p client.ConfigProvider, cfgs ...*aws.Config) *GlobalAccelerator {
c := p.ClientConfig(EndpointsID, cfgs...)
if c.SigningNameDerived || len(c.SigningName) == 0 {
c.SigningName = "globalaccelerator"
}
return newClient(*c.Config, c.Handlers, c.PartitionID, c.Endpoint, c.SigningRegion, c.SigningName)
}
// newClient creates, initializes and returns a new service client instance.
func newClient(cfg aws.Config, handlers request.Handlers, partitionID, endpoint, signingRegion, signingName string) *GlobalAccelerator {
svc := &GlobalAccelerator{
Client: client.New(
cfg,
metadata.ClientInfo{
ServiceName: ServiceName,
ServiceID: ServiceID,
SigningName: signingName,
SigningRegion: signingRegion,
PartitionID: partitionID,
Endpoint: endpoint,
APIVersion: "2018-08-08",
JSONVersion: "1.1",
TargetPrefix: "GlobalAccelerator_V20180706",
},
handlers,
),
}
// Handlers
svc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler)
svc.Handlers.Build.PushBackNamed(jsonrpc.BuildHandler)
svc.Handlers.Unmarshal.PushBackNamed(jsonrpc.UnmarshalHandler)
svc.Handlers.UnmarshalMeta.PushBackNamed(jsonrpc.UnmarshalMetaHandler)
svc.Handlers.UnmarshalError.PushBackNamed(
protocol.NewUnmarshalErrorHandler(jsonrpc.NewUnmarshalTypedError(exceptionFromCode)).NamedHandler(),
)
// Run custom client initialization if present
if initClient != nil {
initClient(svc.Client)
}
return svc
}
// newRequest creates a new request for a GlobalAccelerator operation and runs any
// custom request initialization.
func (c *GlobalAccelerator) newRequest(op *request.Operation, params, data interface{}) *request.Request {
req := c.NewRequest(op, params, data)
// Run custom request initialization if present
if initRequest != nil {
initRequest(req)
}
return req
}
|
{
"pile_set_name": "Github"
}
|
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "../gameSource/objectBank.h"
#include "../gameSource/transitionBank.h"
#include "../gameSource/categoryBank.h"
#include "../gameSource/animationBank.h"
void usage() {
printf( "Usage:\n\n"
"printObjectName id\n\n" );
exit( 1 );
}
int main( int inNumArgs, char **inArgs ) {
if( inNumArgs != 2 ) {
usage();
}
char rebuilding;
initAnimationBankStart( &rebuilding );
while( initAnimationBankStep() < 1.0 );
initAnimationBankFinish();
initObjectBankStart( &rebuilding, true, true );
while( initObjectBankStep() < 1.0 );
initObjectBankFinish();
initCategoryBankStart( &rebuilding );
while( initCategoryBankStep() < 1.0 );
initCategoryBankFinish();
// auto-generate category-based transitions
initTransBankStart( &rebuilding, true, true, true, true );
while( initTransBankStep() < 1.0 );
initTransBankFinish();
int id = 0;
sscanf( inArgs[1], "%d", &id );
ObjectRecord *o = getObject( id );
if( o != NULL ) {
printf( "%s\n", o->description );
}
else {
printf( "Empty space\n" );
}
freeTransBank();
freeCategoryBank();
freeObjectBank();
freeAnimationBank();
return 1;
}
void *getSprite( int ) {
return NULL;
}
char *getSpriteTag( int ) {
return NULL;
}
char isSpriteBankLoaded() {
return false;
}
char markSpriteLive( int ) {
return false;
}
void stepSpriteBank() {
}
void drawSprite( void*, doublePair, double, double, char ) {
}
void setDrawColor( float inR, float inG, float inB, float inA ) {
}
void setDrawFade( float ) {
}
float getTotalGlobalFade() {
return 1.0f;
}
void toggleAdditiveTextureColoring( char inAdditive ) {
}
void toggleAdditiveBlend( char ) {
}
void drawSquare( doublePair, double ) {
}
void startAddingToStencil( char, char, float ) {
}
void startDrawingThroughStencil( char ) {
}
void stopStencil() {
}
// dummy implementations of these functions, which are used in editor
// and client, but not server
#include "../gameSource/spriteBank.h"
SpriteRecord *getSpriteRecord( int inSpriteID ) {
return NULL;
}
#include "../gameSource/soundBank.h"
void checkIfSoundStillNeeded( int inID ) {
}
char getSpriteHit( int inID, int inXCenterOffset, int inYCenterOffset ) {
return false;
}
char getUsesMultiplicativeBlending( int inID ) {
return false;
}
void toggleMultiplicativeBlend( char inMultiplicative ) {
}
void countLiveUse( SoundUsage inUsage ) {
}
void unCountLiveUse( SoundUsage inUsage ) {
}
// animation bank calls these only if lip sync hack is enabled, which
// it never is for server
void *loadSpriteBase( const char*, char ) {
return NULL;
}
void freeSprite( void* ) {
}
void startOutputAllFrames() {
}
void stopOutputAllFrames() {
}
|
{
"pile_set_name": "Github"
}
|
//=-- CoverageMappingReader.cpp - Code coverage mapping reader ----*- C++ -*-=//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file contains support for reading coverage mapping data for
// instrumentation based coverage.
//
//===----------------------------------------------------------------------===//
#include "llvm/ProfileData/CoverageMappingReader.h"
#include "llvm/ADT/DenseSet.h"
#include "llvm/Object/MachOUniversal.h"
#include "llvm/Object/ObjectFile.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/Endian.h"
#include "llvm/Support/LEB128.h"
#include "llvm/Support/MathExtras.h"
#include "llvm/Support/raw_ostream.h"
using namespace llvm;
using namespace coverage;
using namespace object;
#define DEBUG_TYPE "coverage-mapping"
void CoverageMappingIterator::increment() {
// Check if all the records were read or if an error occurred while reading
// the next record.
if (Reader->readNextRecord(Record))
*this = CoverageMappingIterator();
}
std::error_code RawCoverageReader::readULEB128(uint64_t &Result) {
if (Data.size() < 1)
return coveragemap_error::truncated;
unsigned N = 0;
Result = decodeULEB128(reinterpret_cast<const uint8_t *>(Data.data()), &N);
if (N > Data.size())
return coveragemap_error::malformed;
Data = Data.substr(N);
return std::error_code();
}
std::error_code RawCoverageReader::readIntMax(uint64_t &Result,
uint64_t MaxPlus1) {
if (auto Err = readULEB128(Result))
return Err;
if (Result >= MaxPlus1)
return coveragemap_error::malformed;
return std::error_code();
}
std::error_code RawCoverageReader::readSize(uint64_t &Result) {
if (auto Err = readULEB128(Result))
return Err;
// Sanity check the number.
if (Result > Data.size())
return coveragemap_error::malformed;
return std::error_code();
}
std::error_code RawCoverageReader::readString(StringRef &Result) {
uint64_t Length;
if (auto Err = readSize(Length))
return Err;
Result = Data.substr(0, Length);
Data = Data.substr(Length);
return std::error_code();
}
std::error_code RawCoverageFilenamesReader::read() {
uint64_t NumFilenames;
if (auto Err = readSize(NumFilenames))
return Err;
for (size_t I = 0; I < NumFilenames; ++I) {
StringRef Filename;
if (auto Err = readString(Filename))
return Err;
Filenames.push_back(Filename);
}
return std::error_code();
}
std::error_code RawCoverageMappingReader::decodeCounter(unsigned Value,
Counter &C) {
auto Tag = Value & Counter::EncodingTagMask;
switch (Tag) {
case Counter::Zero:
C = Counter::getZero();
return std::error_code();
case Counter::CounterValueReference:
C = Counter::getCounter(Value >> Counter::EncodingTagBits);
return std::error_code();
default:
break;
}
Tag -= Counter::Expression;
switch (Tag) {
case CounterExpression::Subtract:
case CounterExpression::Add: {
auto ID = Value >> Counter::EncodingTagBits;
if (ID >= Expressions.size())
return coveragemap_error::malformed;
Expressions[ID].Kind = CounterExpression::ExprKind(Tag);
C = Counter::getExpression(ID);
break;
}
default:
return coveragemap_error::malformed;
}
return std::error_code();
}
std::error_code RawCoverageMappingReader::readCounter(Counter &C) {
uint64_t EncodedCounter;
if (auto Err =
readIntMax(EncodedCounter, std::numeric_limits<unsigned>::max()))
return Err;
if (auto Err = decodeCounter(EncodedCounter, C))
return Err;
return std::error_code();
}
static const unsigned EncodingExpansionRegionBit = 1
<< Counter::EncodingTagBits;
/// \brief Read the sub-array of regions for the given inferred file id.
/// \param NumFileIDs the number of file ids that are defined for this
/// function.
std::error_code RawCoverageMappingReader::readMappingRegionsSubArray(
std::vector<CounterMappingRegion> &MappingRegions, unsigned InferredFileID,
size_t NumFileIDs) {
uint64_t NumRegions;
if (auto Err = readSize(NumRegions))
return Err;
unsigned LineStart = 0;
for (size_t I = 0; I < NumRegions; ++I) {
Counter C;
CounterMappingRegion::RegionKind Kind = CounterMappingRegion::CodeRegion;
// Read the combined counter + region kind.
uint64_t EncodedCounterAndRegion;
if (auto Err = readIntMax(EncodedCounterAndRegion,
std::numeric_limits<unsigned>::max()))
return Err;
unsigned Tag = EncodedCounterAndRegion & Counter::EncodingTagMask;
uint64_t ExpandedFileID = 0;
if (Tag != Counter::Zero) {
if (auto Err = decodeCounter(EncodedCounterAndRegion, C))
return Err;
} else {
// Is it an expansion region?
if (EncodedCounterAndRegion & EncodingExpansionRegionBit) {
Kind = CounterMappingRegion::ExpansionRegion;
ExpandedFileID = EncodedCounterAndRegion >>
Counter::EncodingCounterTagAndExpansionRegionTagBits;
if (ExpandedFileID >= NumFileIDs)
return coveragemap_error::malformed;
} else {
switch (EncodedCounterAndRegion >>
Counter::EncodingCounterTagAndExpansionRegionTagBits) {
case CounterMappingRegion::CodeRegion:
// Don't do anything when we have a code region with a zero counter.
break;
case CounterMappingRegion::SkippedRegion:
Kind = CounterMappingRegion::SkippedRegion;
break;
default:
return coveragemap_error::malformed;
}
}
}
// Read the source range.
uint64_t LineStartDelta, ColumnStart, NumLines, ColumnEnd;
if (auto Err =
readIntMax(LineStartDelta, std::numeric_limits<unsigned>::max()))
return Err;
if (auto Err = readULEB128(ColumnStart))
return Err;
if (ColumnStart > std::numeric_limits<unsigned>::max())
return coveragemap_error::malformed;
if (auto Err = readIntMax(NumLines, std::numeric_limits<unsigned>::max()))
return Err;
if (auto Err = readIntMax(ColumnEnd, std::numeric_limits<unsigned>::max()))
return Err;
LineStart += LineStartDelta;
// Adjust the column locations for the empty regions that are supposed to
// cover whole lines. Those regions should be encoded with the
// column range (1 -> std::numeric_limits<unsigned>::max()), but because
// the encoded std::numeric_limits<unsigned>::max() is several bytes long,
// we set the column range to (0 -> 0) to ensure that the column start and
// column end take up one byte each.
// The std::numeric_limits<unsigned>::max() is used to represent a column
// position at the end of the line without knowing the length of that line.
if (ColumnStart == 0 && ColumnEnd == 0) {
ColumnStart = 1;
ColumnEnd = std::numeric_limits<unsigned>::max();
}
DEBUG({
dbgs() << "Counter in file " << InferredFileID << " " << LineStart << ":"
<< ColumnStart << " -> " << (LineStart + NumLines) << ":"
<< ColumnEnd << ", ";
if (Kind == CounterMappingRegion::ExpansionRegion)
dbgs() << "Expands to file " << ExpandedFileID;
else
CounterMappingContext(Expressions).dump(C, dbgs());
dbgs() << "\n";
});
MappingRegions.push_back(CounterMappingRegion(
C, InferredFileID, ExpandedFileID, LineStart, ColumnStart,
LineStart + NumLines, ColumnEnd, Kind));
}
return std::error_code();
}
std::error_code RawCoverageMappingReader::read() {
// Read the virtual file mapping.
llvm::SmallVector<unsigned, 8> VirtualFileMapping;
uint64_t NumFileMappings;
if (auto Err = readSize(NumFileMappings))
return Err;
for (size_t I = 0; I < NumFileMappings; ++I) {
uint64_t FilenameIndex;
if (auto Err = readIntMax(FilenameIndex, TranslationUnitFilenames.size()))
return Err;
VirtualFileMapping.push_back(FilenameIndex);
}
// Construct the files using unique filenames and virtual file mapping.
for (auto I : VirtualFileMapping) {
Filenames.push_back(TranslationUnitFilenames[I]);
}
// Read the expressions.
uint64_t NumExpressions;
if (auto Err = readSize(NumExpressions))
return Err;
// Create an array of dummy expressions that get the proper counters
// when the expressions are read, and the proper kinds when the counters
// are decoded.
Expressions.resize(
NumExpressions,
CounterExpression(CounterExpression::Subtract, Counter(), Counter()));
for (size_t I = 0; I < NumExpressions; ++I) {
if (auto Err = readCounter(Expressions[I].LHS))
return Err;
if (auto Err = readCounter(Expressions[I].RHS))
return Err;
}
// Read the mapping regions sub-arrays.
for (unsigned InferredFileID = 0, S = VirtualFileMapping.size();
InferredFileID < S; ++InferredFileID) {
if (auto Err = readMappingRegionsSubArray(MappingRegions, InferredFileID,
VirtualFileMapping.size()))
return Err;
}
// Set the counters for the expansion regions.
// i.e. Counter of expansion region = counter of the first region
// from the expanded file.
// Perform multiple passes to correctly propagate the counters through
// all the nested expansion regions.
SmallVector<CounterMappingRegion *, 8> FileIDExpansionRegionMapping;
FileIDExpansionRegionMapping.resize(VirtualFileMapping.size(), nullptr);
for (unsigned Pass = 1, S = VirtualFileMapping.size(); Pass < S; ++Pass) {
for (auto &R : MappingRegions) {
if (R.Kind != CounterMappingRegion::ExpansionRegion)
continue;
assert(!FileIDExpansionRegionMapping[R.ExpandedFileID]);
FileIDExpansionRegionMapping[R.ExpandedFileID] = &R;
}
for (auto &R : MappingRegions) {
if (FileIDExpansionRegionMapping[R.FileID]) {
FileIDExpansionRegionMapping[R.FileID]->Count = R.Count;
FileIDExpansionRegionMapping[R.FileID] = nullptr;
}
}
}
return std::error_code();
}
namespace {
/// \brief A helper structure to access the data from a section
/// in an object file.
struct SectionData {
StringRef Data;
uint64_t Address;
std::error_code load(SectionRef &Section) {
if (auto Err = Section.getContents(Data))
return Err;
Address = Section.getAddress();
return std::error_code();
}
std::error_code get(uint64_t Pointer, size_t Size, StringRef &Result) {
if (Pointer < Address)
return coveragemap_error::malformed;
auto Offset = Pointer - Address;
if (Offset + Size > Data.size())
return coveragemap_error::malformed;
Result = Data.substr(Pointer - Address, Size);
return std::error_code();
}
};
}
template <typename T, support::endianness Endian>
std::error_code readCoverageMappingData(
SectionData &ProfileNames, StringRef Data,
std::vector<BinaryCoverageReader::ProfileMappingRecord> &Records,
std::vector<StringRef> &Filenames) {
using namespace support;
llvm::DenseSet<T> UniqueFunctionMappingData;
// Read the records in the coverage data section.
for (const char *Buf = Data.data(), *End = Buf + Data.size(); Buf < End;) {
if (Buf + 4 * sizeof(uint32_t) > End)
return coveragemap_error::malformed;
uint32_t NRecords = endian::readNext<uint32_t, Endian, unaligned>(Buf);
uint32_t FilenamesSize = endian::readNext<uint32_t, Endian, unaligned>(Buf);
uint32_t CoverageSize = endian::readNext<uint32_t, Endian, unaligned>(Buf);
uint32_t Version = endian::readNext<uint32_t, Endian, unaligned>(Buf);
switch (Version) {
case CoverageMappingVersion1:
break;
default:
return coveragemap_error::unsupported_version;
}
// Skip past the function records, saving the start and end for later.
const char *FunBuf = Buf;
Buf += NRecords * (sizeof(T) + 2 * sizeof(uint32_t) + sizeof(uint64_t));
const char *FunEnd = Buf;
// Get the filenames.
if (Buf + FilenamesSize > End)
return coveragemap_error::malformed;
size_t FilenamesBegin = Filenames.size();
RawCoverageFilenamesReader Reader(StringRef(Buf, FilenamesSize), Filenames);
if (auto Err = Reader.read())
return Err;
Buf += FilenamesSize;
// We'll read the coverage mapping records in the loop below.
const char *CovBuf = Buf;
Buf += CoverageSize;
const char *CovEnd = Buf;
if (Buf > End)
return coveragemap_error::malformed;
// Each coverage map has an alignment of 8, so we need to adjust alignment
// before reading the next map.
Buf += alignmentAdjustment(Buf, 8);
while (FunBuf < FunEnd) {
// Read the function information
T NamePtr = endian::readNext<T, Endian, unaligned>(FunBuf);
uint32_t NameSize = endian::readNext<uint32_t, Endian, unaligned>(FunBuf);
uint32_t DataSize = endian::readNext<uint32_t, Endian, unaligned>(FunBuf);
uint64_t FuncHash = endian::readNext<uint64_t, Endian, unaligned>(FunBuf);
// Now use that to read the coverage data.
if (CovBuf + DataSize > CovEnd)
return coveragemap_error::malformed;
auto Mapping = StringRef(CovBuf, DataSize);
CovBuf += DataSize;
// Ignore this record if we already have a record that points to the same
// function name. This is useful to ignore the redundant records for the
// functions with ODR linkage.
if (!UniqueFunctionMappingData.insert(NamePtr).second)
continue;
// Finally, grab the name and create a record.
StringRef FuncName;
if (std::error_code EC = ProfileNames.get(NamePtr, NameSize, FuncName))
return EC;
Records.push_back(BinaryCoverageReader::ProfileMappingRecord(
CoverageMappingVersion(Version), FuncName, FuncHash, Mapping,
FilenamesBegin, Filenames.size() - FilenamesBegin));
}
}
return std::error_code();
}
static const char *TestingFormatMagic = "llvmcovmtestdata";
static std::error_code loadTestingFormat(StringRef Data,
SectionData &ProfileNames,
StringRef &CoverageMapping,
uint8_t &BytesInAddress,
support::endianness &Endian) {
BytesInAddress = 8;
Endian = support::endianness::little;
Data = Data.substr(StringRef(TestingFormatMagic).size());
if (Data.size() < 1)
return coveragemap_error::truncated;
unsigned N = 0;
auto ProfileNamesSize =
decodeULEB128(reinterpret_cast<const uint8_t *>(Data.data()), &N);
if (N > Data.size())
return coveragemap_error::malformed;
Data = Data.substr(N);
if (Data.size() < 1)
return coveragemap_error::truncated;
N = 0;
ProfileNames.Address =
decodeULEB128(reinterpret_cast<const uint8_t *>(Data.data()), &N);
if (N > Data.size())
return coveragemap_error::malformed;
Data = Data.substr(N);
if (Data.size() < ProfileNamesSize)
return coveragemap_error::malformed;
ProfileNames.Data = Data.substr(0, ProfileNamesSize);
CoverageMapping = Data.substr(ProfileNamesSize);
return std::error_code();
}
#if 0 // HLSL Change Starts - remove support for object files
static ErrorOr<SectionRef> lookupSection(ObjectFile &OF, StringRef Name) {
StringRef FoundName;
for (const auto &Section : OF.sections()) {
if (auto EC = Section.getName(FoundName))
return EC;
if (FoundName == Name)
return Section;
}
return coveragemap_error::no_data_found;
}
#endif // HLSL Change Ends - remove support for object files
static std::error_code loadBinaryFormat(MemoryBufferRef ObjectBuffer,
SectionData &ProfileNames,
StringRef &CoverageMapping,
uint8_t &BytesInAddress,
support::endianness &Endian,
StringRef Arch) {
#if 1 // HLSL Change Starts - remove support for object files
return std::error_code();
#else
auto BinOrErr = object::createBinary(ObjectBuffer);
if (std::error_code EC = BinOrErr.getError())
return EC;
auto Bin = std::move(BinOrErr.get());
std::unique_ptr<ObjectFile> OF;
if (auto *Universal = dyn_cast<object::MachOUniversalBinary>(Bin.get())) {
// If we have a universal binary, try to look up the object for the
// appropriate architecture.
auto ObjectFileOrErr = Universal->getObjectForArch(Arch);
if (std::error_code EC = ObjectFileOrErr.getError())
return EC;
OF = std::move(ObjectFileOrErr.get());
} else
if (isa<object::ObjectFile>(Bin.get())) {
// For any other object file, upcast and take ownership.
OF.reset(cast<object::ObjectFile>(Bin.release()));
// If we've asked for a particular arch, make sure they match.
if (!Arch.empty() && OF->getArch() != Triple(Arch).getArch())
return object_error::arch_not_found;
} else
// We can only handle object files.
return coveragemap_error::malformed;
// The coverage uses native pointer sizes for the object it's written in.
BytesInAddress = OF->getBytesInAddress();
Endian = OF->isLittleEndian() ? support::endianness::little
: support::endianness::big;
// Look for the sections that we are interested in.
auto NamesSection = lookupSection(*OF, "__llvm_prf_names");
if (auto EC = NamesSection.getError())
return EC;
auto CoverageSection = lookupSection(*OF, "__llvm_covmap");
if (auto EC = CoverageSection.getError())
return EC;
// Get the contents of the given sections.
if (std::error_code EC = CoverageSection->getContents(CoverageMapping))
return EC;
if (std::error_code EC = ProfileNames.load(*NamesSection))
return EC;
return std::error_code();
#endif // HLSL Change Ends - remove support for object files
}
ErrorOr<std::unique_ptr<BinaryCoverageReader>>
BinaryCoverageReader::create(std::unique_ptr<MemoryBuffer> &ObjectBuffer,
StringRef Arch) {
std::unique_ptr<BinaryCoverageReader> Reader(new BinaryCoverageReader());
SectionData Profile;
StringRef Coverage;
uint8_t BytesInAddress;
support::endianness Endian;
std::error_code EC;
if (ObjectBuffer->getBuffer().startswith(TestingFormatMagic))
// This is a special format used for testing.
EC = loadTestingFormat(ObjectBuffer->getBuffer(), Profile, Coverage,
BytesInAddress, Endian);
else
EC = loadBinaryFormat(ObjectBuffer->getMemBufferRef(), Profile, Coverage,
BytesInAddress, Endian, Arch);
if (EC)
return EC;
if (BytesInAddress == 4 && Endian == support::endianness::little)
EC = readCoverageMappingData<uint32_t, support::endianness::little>(
Profile, Coverage, Reader->MappingRecords, Reader->Filenames);
else if (BytesInAddress == 4 && Endian == support::endianness::big)
EC = readCoverageMappingData<uint32_t, support::endianness::big>(
Profile, Coverage, Reader->MappingRecords, Reader->Filenames);
else if (BytesInAddress == 8 && Endian == support::endianness::little)
EC = readCoverageMappingData<uint64_t, support::endianness::little>(
Profile, Coverage, Reader->MappingRecords, Reader->Filenames);
else if (BytesInAddress == 8 && Endian == support::endianness::big)
EC = readCoverageMappingData<uint64_t, support::endianness::big>(
Profile, Coverage, Reader->MappingRecords, Reader->Filenames);
else
return coveragemap_error::malformed;
if (EC)
return EC;
return std::move(Reader);
}
std::error_code
BinaryCoverageReader::readNextRecord(CoverageMappingRecord &Record) {
if (CurrentRecord >= MappingRecords.size())
return coveragemap_error::eof;
FunctionsFilenames.clear();
Expressions.clear();
MappingRegions.clear();
auto &R = MappingRecords[CurrentRecord];
RawCoverageMappingReader Reader(
R.CoverageMapping,
makeArrayRef(Filenames).slice(R.FilenamesBegin, R.FilenamesSize),
FunctionsFilenames, Expressions, MappingRegions);
if (auto Err = Reader.read())
return Err;
Record.FunctionName = R.FunctionName;
Record.FunctionHash = R.FunctionHash;
Record.Filenames = FunctionsFilenames;
Record.Expressions = Expressions;
Record.MappingRegions = MappingRegions;
++CurrentRecord;
return std::error_code();
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright 2016 lizhaotailang
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marktony.zhihudaily.database
import android.arch.persistence.room.Room
import android.support.test.InstrumentationRegistry
import android.support.test.runner.AndroidJUnit4
import com.marktony.zhihudaily.data.ZhihuDailyContent
import org.hamcrest.CoreMatchers.*
import org.hamcrest.MatcherAssert.assertThat
import org.junit.After
import org.junit.Before
import org.junit.Test
import org.junit.runner.RunWith
@RunWith(AndroidJUnit4::class)
class ZhihuDailyContentDaoTest {
private lateinit var database: AppDatabase
companion object {
private val DEFAULT_BODY = "<div class=\\\"main-wrap content-wrap\\\">\\n<div class=\\\"headline\\\">\\n\\n..."
private val DEFAULT_IMAGE_SOURCE = "《安妮 · 霍尔》"
private val DEFAULT_TITLE = "小事 · 直到麻木了,就到了要走的时候"
private val DEFAULT_IMAGE = "https:\\/\\/pic1.zhimg.com\\/v2-85aa2c5a36962571b329ea65cff8bf74.jpg"
private val DEFAULT_SHARE_URL = "http:\\/\\/daily.zhihu.com\\/story\\/9683773"
private val DEFAULT_JS = listOf<String>()
private val DEFAULT_ID = 9683773
private val DEFAULT_GA_PREFIX = "052422"
private val DEFAULT_IMAGES = listOf("https:\\/\\/pic1.zhimg.com\\/v2-5c5f75baa911c58f2476334180f5cda0.jpg")
private val DEFAULT_TYPE = 0
private val DEFAULT_CSS = listOf("http:\\/\\/news-at.zhihu.com\\/css\\/news_qa.auto.css?v=4b3e3")
private val DEFAULT_ZHIHU_CONTENT = ZhihuDailyContent(DEFAULT_BODY, DEFAULT_IMAGE_SOURCE, DEFAULT_TITLE, DEFAULT_IMAGE, DEFAULT_SHARE_URL, DEFAULT_JS, DEFAULT_GA_PREFIX, DEFAULT_IMAGES, DEFAULT_TYPE, DEFAULT_ID, DEFAULT_CSS)
private val NEW_BODY = "NEW BODY"
}
@Before
fun initDb() {
// using an in-memory database because the information stored here disappears when the
// process is killed
database = Room.inMemoryDatabaseBuilder(InstrumentationRegistry.getContext(), AppDatabase::class.java).build()
}
@After
fun closeDb() = database.close()
@Test
fun insertZhihuDailyContentAndGetById() {
// When inserting a piece of zhihu daily content
database.zhihuDailyContentDao().insert(DEFAULT_ZHIHU_CONTENT)
// When getting the zhihu daily news story by id from the database
val loaded = database.zhihuDailyContentDao().queryContentById(DEFAULT_ID)
// The loaded data contains the expected values
assertContent(loaded, DEFAULT_BODY, DEFAULT_IMAGE_SOURCE, DEFAULT_TITLE, DEFAULT_IMAGE, DEFAULT_SHARE_URL, DEFAULT_JS, DEFAULT_ID, DEFAULT_GA_PREFIX, DEFAULT_TYPE, DEFAULT_CSS)
}
@Test
fun insertContentIgnoredOnConflict() {
// Given that a piece of zhihu daily content is inserted
database.zhihuDailyContentDao().insert(DEFAULT_ZHIHU_CONTENT)
// When a piece of zhihu daily content with the same id is inserted
val newContent = ZhihuDailyContent(NEW_BODY, DEFAULT_IMAGE_SOURCE, DEFAULT_TITLE, DEFAULT_IMAGE, DEFAULT_SHARE_URL, DEFAULT_JS, DEFAULT_GA_PREFIX, DEFAULT_IMAGES, DEFAULT_TYPE, DEFAULT_ID, DEFAULT_CSS)
database.zhihuDailyContentDao().insert(newContent)
// When getting the zhihu daily news story by id from the database
val loaded = database.zhihuDailyContentDao().queryContentById(DEFAULT_ID)
// The loaded data contains the expected values
// The insertion of a piece of zhihu daily content whose id already existed in database
// will be ignored.
assertContent(loaded, DEFAULT_BODY, DEFAULT_IMAGE_SOURCE, DEFAULT_TITLE, DEFAULT_IMAGE, DEFAULT_SHARE_URL, DEFAULT_JS, DEFAULT_ID, DEFAULT_GA_PREFIX, DEFAULT_TYPE, DEFAULT_CSS)
}
@Test
fun updateContentAndGetById() {
// When inserting a piece of zhihu daily content
database.zhihuDailyContentDao().insert(DEFAULT_ZHIHU_CONTENT)
// When the zhihu daily news content is updated
val updatedContent = ZhihuDailyContent(NEW_BODY, DEFAULT_IMAGE_SOURCE, DEFAULT_TITLE, DEFAULT_IMAGE, DEFAULT_SHARE_URL, DEFAULT_JS, DEFAULT_GA_PREFIX, DEFAULT_IMAGES, DEFAULT_TYPE, DEFAULT_ID, DEFAULT_CSS)
database.zhihuDailyContentDao().update(updatedContent)
val loaded = database.zhihuDailyContentDao().queryContentById(DEFAULT_ID)
// The loaded data contains the expected values
assertContent(loaded, NEW_BODY, DEFAULT_IMAGE_SOURCE, DEFAULT_TITLE, DEFAULT_IMAGE, DEFAULT_SHARE_URL, DEFAULT_JS, DEFAULT_ID, DEFAULT_GA_PREFIX, DEFAULT_TYPE, DEFAULT_CSS)
}
@Test
fun deleteContentAndGettingAllContent() {
// Given a piece of zhihu daily content inserted
database.zhihuDailyContentDao().insert(DEFAULT_ZHIHU_CONTENT)
// When deleting a piece of zhihu daily content by id
database.zhihuDailyContentDao().delete(DEFAULT_ZHIHU_CONTENT)
// When getting the zhihu daily content
val content = database.zhihuDailyContentDao().queryContentById(DEFAULT_ID)
// The content is null
assertThat(content, `is`(nullValue()))
}
private fun assertContent(content: ZhihuDailyContent?,
body: String,
imageSource: String,
title: String,
image: String,
shareUrl: String,
js: List<String>,
id: Int,
gaPrefix: String,
type: Int,
css: List<String>) {
assertThat<ZhihuDailyContent>(content as ZhihuDailyContent, notNullValue())
assertThat(content.body, `is`(body))
assertThat(content.imageSource, `is`(imageSource))
assertThat(content.title, `is`(title))
assertThat(content.image, `is`(image))
assertThat(content.shareUrl, `is`(shareUrl))
assertThat(content.js, `is`(js))
assertThat(content.id, `is`(id))
assertThat(content.gaPrefix, `is`(gaPrefix))
assertThat(content.type, `is`(type))
assertThat(content.css, `is`(css))
}
}
|
{
"pile_set_name": "Github"
}
|
fileFormatVersion: 2
guid: 3ac02e7e783571c468f9c086d2384ba7
timeCreated: 1485107928
licenseType: Store
TextureImporter:
fileIDToRecycleName: {}
serializedVersion: 4
mipmaps:
mipMapMode: 0
enableMipMap: 0
sRGBTexture: 0
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
filterMode: 0
aniso: -1
mipBias: -1
wrapMode: 0
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 2
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 10
textureShape: 1
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- buildTarget: DefaultTexturePlatform
maxTextureSize: 64
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
- buildTarget: Standalone
maxTextureSize: 64
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE html>
<html>
<head>
<title id='Description'>Angular FormattedInput NegativeNumbers</title>
<base href="./../dist/formattedinput/negativenumbers/">
<meta name="description" content="This is an example of negative numbers in Angular FormattedInput." />
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Styles -->
<link rel="stylesheet" href="/node_modules/jqwidgets-ng/jqwidgets/styles/jqx.base.css" type="text/css" />
<!-- jQWidgets -->
<script src="./../../../scripts/demos.js"></script>
</head>
<!-- Display the application -->
<body>
<div class="example-description">
Angular FormattedInput can also display negative numbers in the four supported numeral systems. Negative binary, octal and hexadecimal numbers are represented as 64-bit numbers using the two's complement method.
</div>
<app-root>Loading...</app-root>
<script src="runtime-es2015.js" type="module"></script><script src="polyfills-es2015.js" type="module"></script><script src="runtime-es5.js" nomodule></script><script src="polyfills-es5.js" nomodule></script><script src="main-es2015.js" type="module"></script><script src="main-es5.js" nomodule></script>
</body>
</html>
|
{
"pile_set_name": "Github"
}
|
// <copyright file="BaseNavigator.cs" company="Automate The Planet Ltd.">
// Copyright 2016 Automate The Planet Ltd.
// Licensed under the Apache License, Version 2.0 (the "License");
// You may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
// <author>Anton Angelov</author>
// <site>http://automatetheplanet.com/</site>
namespace AAngelov.Utilities.UI.Core
{
using System;
using System.Windows;
using FirstFloor.ModernUI.Windows.Navigation;
/// <summary>
/// Contains methods which navigate to different views with option to set different parameters
/// </summary>
public class BaseNavigator
{
/// <summary>
/// Navigates the specified source.
/// </summary>
/// <param name="source">The source.</param>
/// <param name="url">The URL.</param>
public void Navigate(FrameworkElement source, string url)
{
DefaultLinkNavigator navigator = new DefaultLinkNavigator();
navigator.Navigate(new Uri(url, UriKind.Relative), source, null);
}
/// <summary>
/// Navigates the back.
/// </summary>
/// <param name="source">The source.</param>
public void NavigateBack(FrameworkElement source)
{
string url = "cmd://browseback";
DefaultLinkNavigator navigator = new DefaultLinkNavigator();
navigator.Navigate(new Uri(url, UriKind.Absolute), source, "_self");
}
}
}
|
{
"pile_set_name": "Github"
}
|
mysql> CREATE TABLE pet (name VARCHAR(20), owner VARCHAR(20),
-> species VARCHAR(20), sex CHAR(1), birth DATE, death DATE);
After creating your table, you need to populate it. The LOAD DATA and INSERT statements are useful for this.
You could create a text file pet.txt containing one record per line, with values separated by tabs, and given in the order in which the columns were listed in the CREATE TABLE statement.
For missing values (such as unknown sexes or death dates for animals that are still living), you can use NULL values. To represent these in your text file, use \N (backslash, capital-N).
mysql> LOAD DATA LOCAL INFILE '/path/pet.txt' INTO TABLE pet;
When you want to add new records one at a time, the INSERT statement is useful.
mysql> INSERT INTO pet
-> VALUES ('Puffball','Diane','hamster','f','1999-03-30',NULL);
The simplest form of SELECT retrieves everything from a table:
mysql> SELECT * FROM pet;
Fix only the erroneous record with an UPDATE statement:
mysql> UPDATE pet SET birth = '1989-08-31' WHERE name = 'Bowser';
You can select only particular rows from your table.
mysql> SELECT * FROM pet WHERE name = 'Bowser';
mysql> SELECT * FROM pet WHERE birth >= '1998-1-1';
mysql> SELECT * FROM pet WHERE species = 'dog' AND sex = 'f';
mysql> SELECT * FROM pet WHERE species = 'snake' OR species = 'bird';
SELECT name, birth FROM pet;
To find out who owns pets, use this query:
mysql> SELECT owner FROM pet;
However, notice that the query simply retrieves the owner field from each record, and some of them appear more than once. To minimize the output, retrieve each unique output record just once by adding the keyword DISTINCT:
mysql> SELECT DISTINCT owner FROM pet;
Here are animal birthdays, sorted by date:
mysql> SELECT name, birth FROM pet ORDER BY birth;
You can force a case-sensitive sort for a column by using the BINARY cast: ORDER BY BINARY col_name.
The default sort order is ascending, with smallest values first. To sort in reverse (descending) order, add the DESC keyword to the name of the column you are sorting by:
mysql> SELECT name, birth FROM pet ORDER BY birth DESC;
You can sort on multiple columns, and you can sort columns in different directions. For example, to sort by type of animal in ascending order, then by birth date within animal type in descending order (youngest animals first), use the following query:
mysql> SELECT name, species, birth FROM pet
-> ORDER BY species, birth DESC;
To determine how many years old each of your pets is, compute the difference in the year part of the current date and the birth date, then subtract one if the current date occurs earlier in the calendar year than the birth date. The following query shows, for each pet, the birth date, the current date, and the age in years.
mysql> SELECT name, birth, CURDATE(),
-> (YEAR(CURDATE())-YEAR(birth))
-> - (RIGHT(CURDATE(),5)<RIGHT(birth,5))
-> AS age
-> FROM pet;
Here, YEAR() pulls out the year part of a date and RIGHT() pulls off the rightmost five characters that represent the MM-DD (calendar year) part of the date. The part of the expression that compares the MM-DD values evaluates to 1 or 0, which adjusts the year difference down a year if CURDATE() occurs earlier in the year than birth. The full expression is somewhat ungainly, so an alias (age) is used to make the output column label more meaningful.
The query works, but the result could be scanned more easily if the rows were presented in some order. This can be done by adding an ORDER BY name clause to sort the output by name:
mysql> SELECT name, birth, CURDATE(),
-> (YEAR(CURDATE())-YEAR(birth))
-> - (RIGHT(CURDATE(),5)<RIGHT(birth,5))
-> AS age
-> FROM pet ORDER BY name;
To sort the output by age rather than name, just use a different ORDER BY clause:
mysql> SELECT name, birth, CURDATE(),
-> (YEAR(CURDATE())-YEAR(birth))
-> - (RIGHT(CURDATE(),5)<RIGHT(birth,5))
-> AS age
-> FROM pet ORDER BY age;
A similar query can be used to determine age at death for animals that have died. You determine which animals these are by checking whether the death value is NULL. Then, for those with non-NULL values, compute the difference between the death and birth values:
mysql> SELECT name, birth, death,
-> (YEAR(death)-YEAR(birth)) - (RIGHT(death,5)<RIGHT(birth,5))
-> AS age
-> FROM pet WHERE death IS NOT NULL ORDER BY age;
The query uses death IS NOT NULL rather than death <> NULL because NULL is a special value that cannot be compared using the usual comparison operators.
Finding animals with birthdays in the upcoming month is easy, too. Suppose that the current month is April. Then the month value is 4 and you look for animals born in May (month 5) like this:
mysql> SELECT name, birth FROM pet WHERE MONTH(birth) = 5;
You can even write the query so that it works no matter what the current month is. That way you don't have to use a particular month number in the query. DATE_ADD() allows you to add a time interval to a given date. If you add a month to the value of CURDATE(), then extract the month part with MONTH(), the result produces the month in which to look for birthdays:
mysql> SELECT name, birth FROM pet
-> WHERE MONTH(birth) = MONTH(DATE_ADD(CURDATE(),INTERVAL 1 MONTH));
A different way to accomplish the same task is to add 1 to get the next month after the current one (after using the modulo function (MOD) to wrap around the month value to 0 if it is currently 12):
mysql> SELECT name, birth FROM pet
-> WHERE MONTH(birth) = MOD(MONTH(CURDATE()), 12) + 1;
Note that MONTH returns a number between 1 and 12. And MOD(something,12) returns a number between 0 and 11. So the addition has to be after the MOD(), otherwise we would go from November (11) to January (1).
Use the IS NULL and IS NOT NULL operators instead:
mysql> SELECT 1 IS NULL, 1 IS NOT NULL;
Note that in MySQL, 0 or NULL means false and anything else means true. The default truth value from a boolean operation is 1.
SQL pattern matching allows you to use '_' to match any single character and '%' to match an arbitrary number of characters (including zero characters). In MySQL, SQL patterns are case-insensitive by default. Some examples are shown here. Note that you do not use = or <> when you use SQL patterns; use the LIKE or NOT LIKE comparison operators instead.
To find names beginning with 'b':
mysql> SELECT * FROM pet WHERE name LIKE 'b%';
To find names ending with 'fy':
mysql> SELECT * FROM pet WHERE name LIKE '%fy';
To find names containing a 'w':
mysql> SELECT * FROM pet WHERE name LIKE '%w%';
To find names containing exactly five characters, use five instances of the '_' pattern character:
mysql> SELECT * FROM pet WHERE name LIKE '_____';
The other type of pattern matching provided by MySQL uses extended regular expressions. When you test for a match for this type of pattern, use the REGEXP and NOT REGEXP operators (or RLIKE and NOT RLIKE, which are synonyms).
To demonstrate how extended regular expressions work, the LIKE queries shown previously are rewritten here to use REGEXP.
To find names beginning with 'b', use '^' to match the beginning of the name:
mysql> SELECT * FROM pet WHERE name REGEXP '^b';
To find names ending with 'fy', use '$' to match the end of the name:
mysql> SELECT * FROM pet WHERE name REGEXP 'fy$';
To find names containing a 'w', use this query:
mysql> SELECT * FROM pet WHERE name REGEXP 'w';
To find names containing exactly five characters, use '^' and '$' to match the beginning and end of the name, and five instances of '.' in between:
mysql> SELECT * FROM pet WHERE name REGEXP '^.....$';
You could also write the previous query using the '{n}' ``repeat-n-times'' operator:
mysql> SELECT * FROM pet WHERE name REGEXP '^.{5}$';
You can also store pattern in you database fields and use syntax like this:
SELECT country FROM ip_list WHERE '127.0.0.1' like ip
where ip is the field name with patterns.
Counting the total number of animals you have is the same question as ``How many rows are in the pet table?'' because there is one record per pet. COUNT(*) counts the number of rows, so the query to count your animals looks like this:
mysql> SELECT COUNT(*) FROM pet;
Earlier, you retrieved the names of the people who owned pets. You can use COUNT() if you want to find out how many pets each owner has:
mysql> SELECT owner, COUNT(*) FROM pet GROUP BY owner;
Note the use of GROUP BY to group together all records for each owner.
COUNT() and GROUP BY are useful for characterizing your data in various ways. The following examples show different ways to perform animal census operations.
Number of animals per species:
mysql> SELECT species, COUNT(*) FROM pet GROUP BY species;
Number of animals per sex:
mysql> SELECT sex, COUNT(*) FROM pet GROUP BY sex;
Number of animals per combination of species and sex:
mysql> SELECT species, sex, COUNT(*) FROM pet GROUP BY species, sex;
+---------+------+----------+
| species | sex | COUNT(*) |
+---------+------+----------+
| bird | NULL | 1 |
| bird | f | 1 |
| cat | f | 1 |
| cat | m | 1 |
| dog | f | 1 |
| dog | m | 2 |
| hamster | f | 1 |
| snake | m | 1 |
+---------+------+----------+
You need not retrieve an entire table when you use COUNT(). For example, the previous query, when performed just on dogs and cats, looks like this:
mysql> SELECT species, sex, COUNT(*) FROM pet
-> WHERE species = 'dog' OR species = 'cat'
-> GROUP BY species, sex;
+---------+------+----------+
| species | sex | COUNT(*) |
+---------+------+----------+
| cat | f | 1 |
| cat | m | 1 |
| dog | f | 1 |
| dog | m | 2 |
+---------+------+----------+
Or, if you wanted the number of animals per sex only for known-sex animals:
mysql> SELECT species, sex, COUNT(*) FROM pet
-> WHERE sex IS NOT NULL
-> GROUP BY species, sex;
the CREATE TABLE statement for the event table might look like this:
mysql> CREATE TABLE event (name VARCHAR(20), date DATE,
-> type VARCHAR(15), remark VARCHAR(255));
Suppose that you want to find out the ages at which each pet had its litters. We saw earlier how to calculate ages from two dates. The litter date of the mother is in the event table, but to calculate her age on that date you need her birth date, which is stored in the pet table. This means the query requires both tables:
mysql> SELECT pet.name,
-> (YEAR(date)-YEAR(birth)) - (RIGHT(date,5)<RIGHT(birth,5)) AS age,
-> remark
-> FROM pet, event
-> WHERE pet.name = event.name AND type = 'litter';
+--------+------+-----------------------------+
| name | age | remark |
+--------+------+-----------------------------+
| Fluffy | 2 | 4 kittens, 3 female, 1 male |
| Buffy | 4 | 5 puppies, 2 female, 3 male |
| Buffy | 5 | 3 puppies, 3 female |
+--------+------+-----------------------------+
Sometimes it is useful to join a table to itself, if you want to compare records in a table to other records in that same table. For example, to find breeding pairs among your pets, you can join the pet table with itself to produce candidate pairs of males and females of like species:
mysql> SELECT p1.name, p1.sex, p2.name, p2.sex, p1.species
-> FROM pet AS p1, pet AS p2
-> WHERE p1.species = p2.species AND p1.sex = 'f' AND p2.sex = 'm';
+--------+------+--------+------+---------+
| name | sex | name | sex | species |
+--------+------+--------+------+---------+
| Fluffy | f | Claws | m | cat |
| Buffy | f | Fang | m | dog |
| Buffy | f | Bowser | m | dog |
+--------+------+--------+------+---------+
In this query, we specify aliases for the table name in order to refer to the columns and keep straight which instance of the table each column reference is associated with.
3.4. Getting Information About Databases and Tables
What if you forget the name of a database or table, or what the structure of a given table is (for example, what its columns are called)? MySQL addresses this problem through several statements that provide information about the databases and tables it supports.
You have previously seen SHOW DATABASES, which lists the databases managed by the server. To find out which database is currently selected, use the DATABASE() function:
mysql> SELECT DATABASE();
+------------+
| DATABASE() |
+------------+
| menagerie |
+------------+
If you haven't selected any database yet, the result is NULL (or the empty string before MySQL 4.1.1).
To find out what tables the current database contains (for example, when you're not sure about the name of a table), use this command:
mysql> SHOW TABLES;
+---------------------+
| Tables in menagerie |
+---------------------+
| event |
| pet |
+---------------------+
If you want to find out about the structure of a table, the DESCRIBE command is useful; it displays information about each of a table's columns:
mysql> DESCRIBE pet;
+---------+-------------+------+-----+---------+-------+
| Field | Type | Null | Key | Default | Extra |
+---------+-------------+------+-----+---------+-------+
| name | varchar(20) | YES | | NULL | |
| owner | varchar(20) | YES | | NULL | |
| species | varchar(20) | YES | | NULL | |
| sex | char(1) | YES | | NULL | |
| birth | date | YES | | NULL | |
| death | date | YES | | NULL | |
+---------+-------------+------+-----+---------+-------+
Field indicates the column name, Type is the data type for the column, NULL indicates whether the column can contain NULL values, Key indicates whether the column is indexed, and Default specifies the column's default value.
If you have indexes on a table, SHOW INDEX FROM tbl_name produces information about them.
|
{
"pile_set_name": "Github"
}
|
import assert from 'assert'
import { Given, When, Then } from '@cucumber/fake-cucumber'
Given('there are {int} cucumbers', function (initialCount) {
this.count = initialCount
})
When('I eat {int} cucumbers', function (eatCount) {
this.count -= eatCount
})
Then('I should have {int} cucumbers', function (expectedCount) {
assert.strictEqual(this.count, expectedCount)
})
|
{
"pile_set_name": "Github"
}
|
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
class Google_Service_HangoutsChat_User extends Google_Model
{
public $displayName;
public $name;
public $type;
public function setDisplayName($displayName)
{
$this->displayName = $displayName;
}
public function getDisplayName()
{
return $this->displayName;
}
public function setName($name)
{
$this->name = $name;
}
public function getName()
{
return $this->name;
}
public function setType($type)
{
$this->type = $type;
}
public function getType()
{
return $this->type;
}
}
|
{
"pile_set_name": "Github"
}
|
package huawei.android.widget;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.database.ContentObserver;
import android.graphics.Rect;
import android.os.Handler;
import android.provider.Settings;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.Interpolator;
import android.view.animation.TranslateAnimation;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import huawei.android.widget.loader.ResLoader;
import huawei.android.widget.loader.ResLoaderUtil;
public class HwFloatingLayout extends FrameLayout {
private static final long ANIMATION_DURATION = 300;
private static final int ARRAY_SIZE = 2;
private static final int HEIGHT_QUARTER_NUM = 4;
private static final String KEY_NAVIGATION_BAR_STATUS = "navigationbar_is_min";
private static final String TAG = "HwFloatingLayout";
private Activity mActivity;
private FrameLayout mContentLayout;
private ContentObserver mContentObserver = new ContentObserver(new Handler()) {
/* class huawei.android.widget.HwFloatingLayout.AnonymousClass1 */
public void onChange(boolean isSelfChange) {
boolean isExist = HwFloatingLayout.this.isNavigationBarExist();
if (isExist != HwFloatingLayout.this.mIsNavigationBarExist) {
HwFloatingLayout.this.mIsNavigationBarExist = isExist;
HwFloatingLayout.this.updateSystemWidgetsLayout();
}
}
};
private final Context mContext;
private View mFab;
private int mFabHeight;
private View mHwBottomNavigationView;
private int mHwBottomNavigationViewHeight;
private HwFloatingLayout mHwFloatingView;
private View mHwToolbar;
private int mHwToolbarHeight;
private final Rect mInsetsRect = new Rect();
private Interpolator mInterpolator;
private boolean mIsInMultiWindowOrPictureInPictureMode;
private boolean mIsNavigationBarExist;
private boolean mIsSplitViewListenerAdded;
private int mLastKeyBoardHeight;
private LinearLayout mLinearLayout;
private int mNavigationBarHeight;
private ViewTreeObserver.OnGlobalLayoutListener mOnGlobalLayoutListener = new ViewTreeObserver.OnGlobalLayoutListener() {
/* class huawei.android.widget.HwFloatingLayout.AnonymousClass3 */
public void onGlobalLayout() {
int screenHeigh;
int keyboardHeight;
View view;
Rect rect = new Rect();
HwFloatingLayout.this.mHwFloatingView.getWindowVisibleDisplayFrame(rect);
int usableHeight = rect.bottom;
if (usableHeight != HwFloatingLayout.this.mPreviousUsableHeight && (keyboardHeight = (screenHeigh = HwFloatingLayout.this.mHwFloatingView.getRootView().getHeight()) - usableHeight) != HwFloatingLayout.this.mLastKeyBoardHeight) {
HwFloatingLayout.this.mLastKeyBoardHeight = keyboardHeight;
if (HwFloatingLayout.this.mLinearLayout != null) {
HwFloatingLayout.this.mPreviousUsableHeight = usableHeight;
if (keyboardHeight <= screenHeigh / 4) {
HwFloatingLayout.this.mLinearLayout.setPadding(0, 0, 0, 0);
} else if (HwFloatingLayout.this.mActivity != null && (view = HwFloatingLayout.this.mActivity.getCurrentFocus()) != null && (view instanceof TextView)) {
int[] location = new int[2];
view.getLocationOnScreen(location);
int bottomPadding = keyboardHeight - ((screenHeigh - location[1]) - view.getHeight());
if (bottomPadding > 0) {
HwFloatingLayout.this.mLinearLayout.setPadding(0, -bottomPadding, 0, 0);
}
}
}
}
}
};
private int mOrientation;
private int mPreviousUsableHeight;
private RelativeLayout mRelativeLayoutBottom;
private RelativeLayout mRelativeLayoutTop;
private ScrollView mScrollView;
private View mSplitView;
private int mSplitViewHeight;
private View.OnLayoutChangeListener mSplitViewOnLayoutChangeListener = new View.OnLayoutChangeListener() {
/* class huawei.android.widget.HwFloatingLayout.AnonymousClass2 */
public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
int splitViewHeight = 0;
if (HwFloatingLayout.this.mSplitView != null) {
splitViewHeight = HwFloatingLayout.this.mSplitView.getHeight();
}
if (splitViewHeight != HwFloatingLayout.this.mSplitViewHeight) {
HwFloatingLayout.this.mSplitViewHeight = splitViewHeight;
HwFloatingLayout.this.mViewHolder.requestLayout();
HwFloatingLayout.this.mViewHolder.invalidate();
}
}
};
private View mViewHolder;
private int mViewHolderHeight;
public HwFloatingLayout(Context context) {
super(context, null);
this.mContext = context;
Context context2 = this.mContext;
if (context2 instanceof Activity) {
this.mActivity = (Activity) context2;
}
this.mIsNavigationBarExist = isNavigationBarExist();
this.mOrientation = this.mContext.getResources().getConfiguration().orientation;
this.mInterpolator = AnimationUtils.loadInterpolator(this.mContext, 34078890);
}
/* access modifiers changed from: protected */
public void onAttachedToWindow() {
super.onAttachedToWindow();
this.mContext.getContentResolver().registerContentObserver(Settings.Global.getUriFor(KEY_NAVIGATION_BAR_STATUS), false, this.mContentObserver);
HwFloatingLayout hwFloatingLayout = this.mHwFloatingView;
if (hwFloatingLayout != null) {
hwFloatingLayout.getViewTreeObserver().addOnGlobalLayoutListener(this.mOnGlobalLayoutListener);
}
}
/* access modifiers changed from: protected */
public void onDetachedFromWindow() {
super.onDetachedFromWindow();
this.mContext.getContentResolver().unregisterContentObserver(this.mContentObserver);
HwFloatingLayout hwFloatingLayout = this.mHwFloatingView;
if (hwFloatingLayout != null) {
hwFloatingLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this.mOnGlobalLayoutListener);
}
View view = this.mSplitView;
if (view != null) {
view.removeOnLayoutChangeListener(this.mSplitViewOnLayoutChangeListener);
}
}
/* access modifiers changed from: protected */
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
this.mOrientation = newConfig.orientation;
}
public void setContentView(int layoutResId) {
this.mHwFloatingView = (HwFloatingLayout) LayoutInflater.from(this.mContext).inflate(ResLoader.getInstance().getIdentifier(this.mContext, ResLoaderUtil.LAYOUT, "hw_immersive_mode_layout"), this);
this.mRelativeLayoutTop = (RelativeLayout) findViewById(ResLoader.getInstance().getIdentifier(this.mContext, ResLoaderUtil.ID, "relative_layout_top"));
this.mRelativeLayoutBottom = (RelativeLayout) findViewById(ResLoader.getInstance().getIdentifier(this.mContext, ResLoaderUtil.ID, "relative_layout_bottom"));
this.mContentLayout = (FrameLayout) findViewById(ResLoader.getInstance().getIdentifier(this.mContext, ResLoaderUtil.ID, "user_content"));
LayoutInflater.from(this.mContext).inflate(layoutResId, this.mContentLayout);
this.mLinearLayout = (LinearLayout) findViewById(ResLoader.getInstance().getIdentifier(this.mContext, ResLoaderUtil.ID, "linear_layout"));
this.mScrollView = (ScrollView) findViewById(ResLoader.getInstance().getIdentifier(this.mContext, ResLoaderUtil.ID, "scroll_view"));
this.mScrollView.setVerticalScrollBarEnabled(false);
this.mViewHolder = findViewById(ResLoader.getInstance().getIdentifier(this.mContext, ResLoaderUtil.ID, "view_holder"));
this.mViewHolder.setBackgroundColor(0);
Activity activity = this.mActivity;
if (activity != null) {
activity.setContentView(this.mHwFloatingView);
}
}
public void addHwToolbar(View hwToolbar) {
if (hwToolbar != null) {
this.mHwToolbar = hwToolbar;
this.mRelativeLayoutTop.addView(this.mHwToolbar, new RelativeLayout.LayoutParams(-1, -2));
}
}
public void addHwBottomNavigationView(View hwBottomNavigationView) {
if (hwBottomNavigationView != null) {
this.mHwBottomNavigationView = hwBottomNavigationView;
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(-1, -2);
params.addRule(12, -1);
this.mRelativeLayoutBottom.addView(this.mHwBottomNavigationView, params);
}
}
public void addFAB(View fab) {
if (fab != null) {
this.mFab = fab;
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(-2, -2);
params.addRule(12, -1);
params.addRule(11, -1);
this.mRelativeLayoutBottom.addView(this.mFab, params);
}
}
/* access modifiers changed from: protected */
public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
boolean isInMultiWindowOrPictureInPictureMode;
int navigationbarHeight;
View rootView;
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int hwToolbarHeight = 0;
int hwBottomNavigationViewHeight = 0;
int fabHeight = 0;
View view = this.mHwToolbar;
if (view != null) {
hwToolbarHeight = view.getMeasuredHeight();
}
View view2 = this.mHwBottomNavigationView;
if (view2 != null) {
hwBottomNavigationViewHeight = view2.getMeasuredHeight();
}
View view3 = this.mFab;
if (view3 != null) {
fabHeight = view3.getMeasuredHeight();
}
HwFloatingLayout hwFloatingLayout = this.mHwFloatingView;
if (!(hwFloatingLayout == null || (rootView = hwFloatingLayout.getRootView()) == null)) {
this.mSplitView = rootView.findViewById(16909427);
}
View rootView2 = this.mSplitView;
if (rootView2 != null && !this.mIsSplitViewListenerAdded) {
this.mIsSplitViewListenerAdded = true;
rootView2.addOnLayoutChangeListener(this.mSplitViewOnLayoutChangeListener);
}
Activity activity = this.mActivity;
if (activity != null && (activity.isInMultiWindowMode() || this.mActivity.isInPictureInPictureMode())) {
isInMultiWindowOrPictureInPictureMode = true;
} else {
isInMultiWindowOrPictureInPictureMode = false;
}
if (!isNavigationBarExist() || this.mOrientation != 1) {
navigationbarHeight = 0;
} else {
navigationbarHeight = getNavigationHeight();
}
this.mViewHolderHeight = this.mHwBottomNavigationViewHeight + navigationbarHeight + this.mSplitViewHeight + this.mFabHeight;
int linearLayoutMeasureSpec = View.MeasureSpec.makeSafeMeasureSpec(((View.MeasureSpec.getSize(heightMeasureSpec) - hwToolbarHeight) - getStatusBarHeight()) + this.mViewHolderHeight, View.MeasureSpec.getMode(heightMeasureSpec));
this.mContentLayout.getLayoutParams().height = (View.MeasureSpec.getSize(heightMeasureSpec) - hwToolbarHeight) - getStatusBarHeight();
this.mViewHolder.getLayoutParams().height = this.mViewHolderHeight;
LinearLayout linearLayout = this.mLinearLayout;
if (linearLayout != null) {
linearLayout.measure(widthMeasureSpec, linearLayoutMeasureSpec);
}
if (!(hwToolbarHeight == this.mHwToolbarHeight && hwBottomNavigationViewHeight == this.mHwBottomNavigationViewHeight && fabHeight == this.mFabHeight && isInMultiWindowOrPictureInPictureMode == this.mIsInMultiWindowOrPictureInPictureMode)) {
this.mHwToolbarHeight = hwToolbarHeight;
this.mHwBottomNavigationViewHeight = hwBottomNavigationViewHeight;
this.mFabHeight = fabHeight;
this.mIsInMultiWindowOrPictureInPictureMode = isInMultiWindowOrPictureInPictureMode;
updateSystemWidgetsLayout();
}
ScrollView scrollView = this.mScrollView;
if (scrollView != null) {
scrollView.setPadding(0, getStatusBarHeight() + this.mHwToolbarHeight, 0, 0);
}
}
private void moveView(View view, int toDeltaY) {
if (view.getLayoutParams() instanceof ViewGroup.MarginLayoutParams) {
ViewGroup.MarginLayoutParams params = (ViewGroup.MarginLayoutParams) view.getLayoutParams();
int lastBottomMargin = params.bottomMargin;
if (this.mIsInMultiWindowOrPictureInPictureMode) {
params.bottomMargin = toDeltaY;
view.setLayoutParams(params);
return;
}
Animation animation = new TranslateAnimation(0.0f, 0.0f, 0.0f, (float) (lastBottomMargin - toDeltaY));
animation.setDuration(ANIMATION_DURATION);
animation.setInterpolator(this.mInterpolator);
animation.setAnimationListener(new MoveViewAnimationListener(view, params, toDeltaY));
view.startAnimation(animation);
}
}
/* access modifiers changed from: private */
/* access modifiers changed from: public */
private void updateSystemWidgetsLayout() {
int bottomMargin = 0;
if (this.mHwToolbar != null) {
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(-1, -2);
params.topMargin = getStatusBarHeight();
this.mHwToolbar.setLayoutParams(params);
}
if (this.mIsInMultiWindowOrPictureInPictureMode || !isNavigationBarExist()) {
this.mNavigationBarHeight = 0;
} else {
this.mNavigationBarHeight = getNavigationHeight();
}
int i = this.mOrientation;
if (i == 1) {
bottomMargin = this.mNavigationBarHeight;
} else if (i == 2) {
bottomMargin = 0;
}
View view = this.mHwBottomNavigationView;
if (view != null) {
moveView(view, bottomMargin);
}
View view2 = this.mSplitView;
if (view2 != null) {
moveView(view2, this.mHwBottomNavigationViewHeight + bottomMargin);
}
View view3 = this.mFab;
if (view3 != null) {
moveView(view3, this.mHwBottomNavigationViewHeight + bottomMargin + this.mSplitViewHeight);
}
setPadding();
}
private void setPadding() {
if (this.mOrientation == 2) {
this.mRelativeLayoutBottom.setPadding(0, 0, this.mNavigationBarHeight, 0);
this.mRelativeLayoutTop.setPadding(0, 0, this.mNavigationBarHeight, 0);
this.mLinearLayout.setPadding(0, 0, this.mNavigationBarHeight, 0);
}
}
/* access modifiers changed from: private */
/* access modifiers changed from: public */
private boolean isNavigationBarExist() {
boolean isExist = false;
if (Settings.Global.getInt(this.mContext.getContentResolver(), KEY_NAVIGATION_BAR_STATUS, 0) == 0) {
isExist = true;
}
return isExist;
}
private int getStatusBarHeight() {
return this.mContext.getResources().getDimensionPixelSize(this.mContext.getResources().getIdentifier("status_bar_height", ResLoaderUtil.DIMEN, "android"));
}
private int getNavigationHeight() {
return this.mContext.getResources().getDimensionPixelSize(this.mContext.getResources().getIdentifier("navigation_bar_height", ResLoaderUtil.DIMEN, "android"));
}
public static class CustomScrollView extends ScrollView {
public CustomScrollView(Context context) {
super(context);
}
public CustomScrollView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public CustomScrollView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
/* access modifiers changed from: protected */
public void measureChild(View child, int parentWidthMeasureSpec, int parentHeightMeasureSpec) {
ViewGroup.LayoutParams lp = child.getLayoutParams();
child.measure(getChildMeasureSpec(parentWidthMeasureSpec, this.mPaddingLeft + this.mPaddingRight, lp.width), getChildMeasureSpec(parentHeightMeasureSpec, this.mPaddingTop + this.mPaddingBottom, lp.height));
}
/* access modifiers changed from: protected */
public void measureChildWithMargins(View child, int parentWidthMeasureSpec, int widthUsed, int parentHeightMeasureSpec, int heightUsed) {
ViewGroup.MarginLayoutParams lp = (ViewGroup.MarginLayoutParams) child.getLayoutParams();
child.measure(getChildMeasureSpec(parentWidthMeasureSpec, this.mPaddingLeft + this.mPaddingRight + lp.leftMargin + lp.rightMargin + widthUsed, lp.width), getChildMeasureSpec(parentHeightMeasureSpec, this.mPaddingTop + this.mPaddingBottom + lp.topMargin + lp.bottomMargin + heightUsed, lp.height));
}
public boolean onStartNestedScroll(View child, View target, int nestedScrollAxes) {
return (nestedScrollAxes & 2) != 0;
}
public void onNestedPreScroll(View target, int dx, int dy, int[] consumed) {
if (dy > 0) {
if (!canScrollVerticallyDown(target) && canScrollVerticallyDown(this)) {
smoothScrollBy(0, dy);
consumed[1] = dy;
}
} else if (!canScrollVerticallyUp(target) && canScrollVerticallyUp(this)) {
smoothScrollBy(0, dy);
consumed[1] = dy;
}
}
private boolean canScrollVerticallyDown(View view) {
return canScrollVertically(view, 1);
}
private boolean canScrollVerticallyUp(View view) {
return canScrollVertically(view, -1);
}
private boolean canScrollVertically(View view, int direction) {
if (view instanceof ViewGroup) {
if (view.canScrollVertically(direction)) {
return true;
}
ViewGroup viewGroup = (ViewGroup) view;
int count = viewGroup.getChildCount();
for (int i = 0; i < count; i++) {
if (canScrollVertically(viewGroup.getChildAt(i), direction)) {
return true;
}
}
}
if (view.canScrollVertically(direction)) {
return true;
}
return false;
}
}
/* access modifiers changed from: package-private */
public class MoveViewAnimationListener implements Animation.AnimationListener {
private ViewGroup.MarginLayoutParams mParams;
private int mToDeltaY;
private View mView;
MoveViewAnimationListener(View view, ViewGroup.MarginLayoutParams params, int toDeltaY) {
this.mView = view;
this.mParams = params;
this.mToDeltaY = toDeltaY;
}
public void onAnimationEnd(Animation animation) {
this.mView.clearAnimation();
ViewGroup.MarginLayoutParams marginLayoutParams = this.mParams;
marginLayoutParams.bottomMargin = this.mToDeltaY;
this.mView.setLayoutParams(marginLayoutParams);
}
public void onAnimationRepeat(Animation animation) {
}
public void onAnimationStart(Animation animation) {
}
}
}
|
{
"pile_set_name": "Github"
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.