code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
3
942
language
stringclasses
30 values
license
stringclasses
15 values
size
int32
3
1.05M
import { Component } from '@angular/core'; @Component({ selector: 'uxd-landing-page-feature-list', template: '<ng-content></ng-content>', styles: [':host { display: block; }'], host: { 'class': 'row' } }) export class LandingPageFeatureListComponent { }
UXAspects/UXAspects
docs/app/components/landing-page-feature-list/landing-page-feature-list.component.ts
TypeScript
apache-2.0
284
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_151) on Wed Jul 17 13:50:51 MST 2019 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Interface org.wildfly.swarm.config.infinispan.cache_container.BackupForComponentConsumer (BOM: * : All 2.5.0.Final API)</title> <meta name="date" content="2019-07-17"> <link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Interface org.wildfly.swarm.config.infinispan.cache_container.BackupForComponentConsumer (BOM: * : All 2.5.0.Final API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">Thorntail API, 2.5.0.Final</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/wildfly/swarm/config/infinispan/cache_container/class-use/BackupForComponentConsumer.html" target="_top">Frames</a></li> <li><a href="BackupForComponentConsumer.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Interface org.wildfly.swarm.config.infinispan.cache_container.BackupForComponentConsumer" class="title">Uses of Interface<br>org.wildfly.swarm.config.infinispan.cache_container.BackupForComponentConsumer</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#org.wildfly.swarm.config.infinispan.cache_container">org.wildfly.swarm.config.infinispan.cache_container</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="org.wildfly.swarm.config.infinispan.cache_container"> <!-- --> </a> <h3>Uses of <a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a> in <a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/package-summary.html">org.wildfly.swarm.config.infinispan.cache_container</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/package-summary.html">org.wildfly.swarm.config.infinispan.cache_container</a> that return <a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>default <a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a>&lt;<a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="type parameter in BackupForComponentConsumer">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">BackupForComponentConsumer.</span><code><span class="memberNameLink"><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html#andThen-org.wildfly.swarm.config.infinispan.cache_container.BackupForComponentConsumer-">andThen</a></span>(<a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a>&lt;<a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="type parameter in BackupForComponentConsumer">T</a>&gt;&nbsp;after)</code>&nbsp;</td> </tr> </tbody> </table> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/package-summary.html">org.wildfly.swarm.config.infinispan.cache_container</a> with parameters of type <a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>default <a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a>&lt;<a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="type parameter in BackupForComponentConsumer">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">BackupForComponentConsumer.</span><code><span class="memberNameLink"><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html#andThen-org.wildfly.swarm.config.infinispan.cache_container.BackupForComponentConsumer-">andThen</a></span>(<a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a>&lt;<a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="type parameter in BackupForComponentConsumer">T</a>&gt;&nbsp;after)</code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/ScatteredCache.html" title="type parameter in ScatteredCache">T</a></code></td> <td class="colLast"><span class="typeNameLabel">ScatteredCache.</span><code><span class="memberNameLink"><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/ScatteredCache.html#backupForComponent-org.wildfly.swarm.config.infinispan.cache_container.BackupForComponentConsumer-">backupForComponent</a></span>(<a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a>&nbsp;consumer)</code> <div class="block">A cache for which this cache acts as a backup (for use with cross site replication).</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/DistributedCache.html" title="type parameter in DistributedCache">T</a></code></td> <td class="colLast"><span class="typeNameLabel">DistributedCache.</span><code><span class="memberNameLink"><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/DistributedCache.html#backupForComponent-org.wildfly.swarm.config.infinispan.cache_container.BackupForComponentConsumer-">backupForComponent</a></span>(<a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a>&nbsp;consumer)</code> <div class="block">A cache for which this cache acts as a backup (for use with cross site replication).</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/ReplicatedCache.html" title="type parameter in ReplicatedCache">T</a></code></td> <td class="colLast"><span class="typeNameLabel">ReplicatedCache.</span><code><span class="memberNameLink"><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/ReplicatedCache.html#backupForComponent-org.wildfly.swarm.config.infinispan.cache_container.BackupForComponentConsumer-">backupForComponent</a></span>(<a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">BackupForComponentConsumer</a>&nbsp;consumer)</code> <div class="block">A cache for which this cache acts as a backup (for use with cross site replication).</div> </td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../org/wildfly/swarm/config/infinispan/cache_container/BackupForComponentConsumer.html" title="interface in org.wildfly.swarm.config.infinispan.cache_container">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">Thorntail API, 2.5.0.Final</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/wildfly/swarm/config/infinispan/cache_container/class-use/BackupForComponentConsumer.html" target="_top">Frames</a></li> <li><a href="BackupForComponentConsumer.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2019 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p> </body> </html>
wildfly-swarm/wildfly-swarm-javadocs
2.5.0.Final/apidocs/org/wildfly/swarm/config/infinispan/cache_container/class-use/BackupForComponentConsumer.html
HTML
apache-2.0
13,410
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_60-ea) on Wed Jan 04 17:08:18 EST 2017 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Interface org.wildfly.swarm.config.NamingConsumer (Public javadocs 2017.1.1 API)</title> <meta name="date" content="2017-01-04"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Interface org.wildfly.swarm.config.NamingConsumer (Public javadocs 2017.1.1 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2017.1.1</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/config/class-use/NamingConsumer.html" target="_top">Frames</a></li> <li><a href="NamingConsumer.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Interface org.wildfly.swarm.config.NamingConsumer" class="title">Uses of Interface<br>org.wildfly.swarm.config.NamingConsumer</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">NamingConsumer</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#org.wildfly.swarm.config">org.wildfly.swarm.config</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="org.wildfly.swarm.config"> <!-- --> </a> <h3>Uses of <a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">NamingConsumer</a> in <a href="../../../../../org/wildfly/swarm/config/package-summary.html">org.wildfly.swarm.config</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../org/wildfly/swarm/config/package-summary.html">org.wildfly.swarm.config</a> that return <a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">NamingConsumer</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>default <a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">NamingConsumer</a>&lt;<a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="type parameter in NamingConsumer">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">NamingConsumer.</span><code><span class="memberNameLink"><a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html#andThen-org.wildfly.swarm.config.NamingConsumer-">andThen</a></span>(<a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">NamingConsumer</a>&lt;<a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="type parameter in NamingConsumer">T</a>&gt;&nbsp;after)</code>&nbsp;</td> </tr> </tbody> </table> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../org/wildfly/swarm/config/package-summary.html">org.wildfly.swarm.config</a> with parameters of type <a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">NamingConsumer</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>default <a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">NamingConsumer</a>&lt;<a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="type parameter in NamingConsumer">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">NamingConsumer.</span><code><span class="memberNameLink"><a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html#andThen-org.wildfly.swarm.config.NamingConsumer-">andThen</a></span>(<a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">NamingConsumer</a>&lt;<a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="type parameter in NamingConsumer">T</a>&gt;&nbsp;after)</code>&nbsp;</td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/wildfly/swarm/config/NamingConsumer.html" title="interface in org.wildfly.swarm.config">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2017.1.1</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/config/class-use/NamingConsumer.html" target="_top">Frames</a></li> <li><a href="NamingConsumer.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2017 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p> </body> </html>
wildfly-swarm/wildfly-swarm-javadocs
2017.1.1/apidocs/org/wildfly/swarm/config/class-use/NamingConsumer.html
HTML
apache-2.0
8,813
// Copyright 2015 The oauth2 Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Package odnoklassniki provides constants for using OAuth2 to access Odnoklassniki. package odnoklassniki import ( "github.com/coreos/mantle/Godeps/_workspace/src/golang.org/x/oauth2" ) // Endpoint is Odnoklassniki's OAuth 2.0 endpoint. var Endpoint = oauth2.Endpoint{ AuthURL: "https://www.odnoklassniki.ru/oauth/authorize", TokenURL: "https://api.odnoklassniki.ru/oauth/token.do", }
mischief/mantle
Godeps/_workspace/src/golang.org/x/oauth2/odnoklassniki/odnoklassniki.go
GO
apache-2.0
557
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.attribute; import io.undertow.server.HttpServerExchange; /** * The thread name * * @author Stuart Douglas */ public class ThreadNameAttribute implements ExchangeAttribute { public static final String THREAD_NAME_SHORT = "%I"; public static final String THREAD_NAME = "%{THREAD_NAME}"; public static final ExchangeAttribute INSTANCE = new ThreadNameAttribute(); private ThreadNameAttribute() { } @Override public String readAttribute(final HttpServerExchange exchange) { return Thread.currentThread().getName(); } @Override public void writeAttribute(final HttpServerExchange exchange, final String newValue) throws ReadOnlyAttributeException { throw new ReadOnlyAttributeException("Thread name", newValue); } public static final class Builder implements ExchangeAttributeBuilder { @Override public String name() { return "Thread name"; } @Override public ExchangeAttribute build(final String token) { if (token.equals(THREAD_NAME) || token.equals(THREAD_NAME_SHORT)) { return ThreadNameAttribute.INSTANCE; } return null; } } }
emag/codereading-undertow
core/src/main/java/io/undertow/attribute/ThreadNameAttribute.java
Java
apache-2.0
1,955
/** * Copyright [2013-2014] [OHsystem] * * We spent a lot of time writing this code, so show some respect: * - Do not remove this copyright notice anywhere (bot, website etc.) * - We do not provide support to those who removed copyright notice * * OHSystem is free software: You can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * You can contact the developers on: [email protected] * or join us directly here: http://forum.ohsystem.net/ * * Visit us also on http://ohsystem.net/ and keep track always of the latest * features and changes. * * * This is modified from GHOST++: http://ohbotplusplus.googlecode.com/ */ #include "../ohbot.h" #include "stats.h" // // CStats // CStats :: CStats( CBaseGame *nGame ) : m_Game( nGame ), m_Locked( false ) { } CStats :: ~CStats( ) { } bool CStats :: ProcessAction( CIncomingAction *Action ) { return false; } void CStats :: Save( COHBot *GHost, COHBotDB *DB, uint32_t GameID ) { }
m-unkel/OHSystem
ghost/src/stats/stats.cpp
C++
apache-2.0
1,101
package org.techniche.technothlon.katana.tcd; import android.content.Context; import android.content.SharedPreferences; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.AsyncTask; import android.os.Looper; import android.util.Log; import android.widget.TextView; import android.widget.Toast; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.techniche.technothlon.katana.R; import org.techniche.technothlon.katana.db.TCDDatabase; import java.io.*; import java.net.HttpURLConnection; import java.net.URL; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; /** * Helper class for providing sample content for user interfaces created by * Android template wizards. * <p/> * TODO: Replace all uses of this class before publishing your app. */ public class TCDContent { /** * An array of sample (dummy) items. */ public static List<TCDQuestionMini> ITEMS = new ArrayList<TCDQuestionMini>(); /** * A map of sample (dummy) items, by ID. */ public static Map<String, TCDQuestion> ITEM_MAP = new HashMap<String, TCDQuestion>(); private static String url = "http://localhost/technothlon/technocoupdoeil_app_gateway/android/?technocoupdoeil=fjalkfq2045rudacnavsofu0aswd988q29ra&lastFetchId="; private static int download(Context context) { SharedPreferences sharedPref = context.getSharedPreferences( context.getString(R.string.preference_file_key), Context.MODE_PRIVATE); long lastFetchID = sharedPref.getLong(context.getString(R.string.tcd_fetch_id), 0); Log.d("Pref - log", lastFetchID + " from shared pref"); ConnectivityManager connMgr = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo networkInfo = connMgr.getActiveNetworkInfo(); if (networkInfo != null && networkInfo.isConnected()) { try { JSONObject json = new JSONObject(downloadUrl(url + lastFetchID)); if (json.getString("status").equals("success")) { TCDDatabase db = new TCDDatabase(context); JSONArray questions = json.getJSONArray("questions"); lastFetchID = json.getLong("lastFetchId"); int count = json.getInt("questions_count"), lastID; for (int i = 0; i < count; i++) { JSONObject q = questions.getJSONObject(i); JSONObject links = q.getJSONObject("links"); lastID = q.getInt("uniqueId"); db.insert( lastID, q.getString("id"), q.getString("color"), q.getString("title"), q.getString("question"), links.getString("facebook"), links.getString("google"), links.getString("tumblr"), links.getString("answer"), q.getString("by"), q.getString("time"), q.getString("answer") ); Log.d("Database - log", lastID + " loaded in database"); } db.close(); SharedPreferences.Editor edit = sharedPref.edit(); edit.putLong(context.getString(R.string.tcd_fetch_id), lastFetchID); edit.commit(); } else if (json.getString("status").equals("reset")) { TCDDatabase db = new TCDDatabase(context); db.reset(); db.close(); SharedPreferences.Editor edit = sharedPref.edit(); edit.putLong(context.getString(R.string.tcd_fetch_id), 0); edit.commit(); download(context); } final Context ct = context; new Thread() { @Override public void run() { Looper.prepare(); Toast.makeText(ct, "Sync Completed.", Toast.LENGTH_SHORT).show(); Looper.loop(); } }.start(); return 0; } catch (JSONException e) { e.printStackTrace(); final Context ct = context; new Thread() { @Override public void run() { Looper.prepare(); Toast.makeText(ct, "Sync Failed.", Toast.LENGTH_SHORT).show(); Looper.loop(); } }.start(); return 3; } catch (IOException e) { e.printStackTrace(); final Context ct = context; new Thread() { @Override public void run() { Looper.prepare(); Toast.makeText(ct, "Sync Failed.", Toast.LENGTH_SHORT).show(); Looper.loop(); } }.start(); return 2; } } else { final Context ct = context; new Thread() { @Override public void run() { Looper.prepare(); Toast.makeText(ct, "No network connection available.", Toast.LENGTH_SHORT).show(); Looper.loop(); } }.start(); return 1; } } private static String downloadUrl(String myurl) throws IOException { InputStream is = null; // Only display the first 500 characters of the retrieved // web page content. try { URL url = new URL(myurl); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setReadTimeout(10000 /* milliseconds */); conn.setConnectTimeout(15000 /* milliseconds */); conn.setRequestMethod("GET"); conn.setDoInput(true); // Starts the query conn.connect(); int response = conn.getResponseCode(); Log.d("TCD latest downloads", "The response is: " + response); int size = conn.getContentLength(); Log.d("TCD latest downloads", "The content-length is: " + size); is = conn.getInputStream(); // Convert the InputStream into a string return readTextResponse(is); // Makes sure that the InputStream is closed after the app is // finished using it. } finally { if (is != null) { is.close(); } } } private static String readTextResponse(InputStream inputStream) throws IOException { Reader in = new InputStreamReader(inputStream); BufferedReader bufferedreader = new BufferedReader(in); StringBuilder stringBuilder = new StringBuilder(); String stringReadLine; while ((stringReadLine = bufferedreader.readLine()) != null) { stringBuilder.append(stringReadLine); } return stringBuilder.toString(); } public static void load(Context context) { boolean update = ITEMS.isEmpty() ? false : true; TCDDatabase helper = new TCDDatabase(context); SQLiteDatabase db = helper.getReadableDatabase(); assert db != null; Cursor c = db.rawQuery("SELECT * FROM " + TCDDatabase.Contracts.NAME + " ORDER BY " + TCDDatabase.Contracts.FIELD_TIME + " DESC, " + TCDDatabase.Contracts.FIELD_ID + " DESC", null); Log.d("DB", c.getCount() + " object in database"); c.moveToFirst(); while (!c.isAfterLast()) { addItem(new TCDQuestion( c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_ID)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_DISPLAY_ID)), c.getInt(c.getColumnIndex(TCDDatabase.Contracts.FIELD_COLOR)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_TITLE)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_QUESTION)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_FACEBOOK)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_GOOGLE)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_TUMBLR)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_ANSWER_URL)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_BY)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_ANSWER)), c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_TIME)) ), update); c.moveToNext(); } c.close(); db.close(); } private static void addItem(TCDQuestion item, boolean update) { if (!ITEM_MAP.containsKey(item.uniqueId)) { if (update) ITEMS.add(0, (new TCDQuestionMini(item.uniqueId))); else ITEMS.add((new TCDQuestionMini(item.uniqueId))); ITEM_MAP.put(item.uniqueId, item); } } public abstract static class TCDLoader extends AsyncTask<Object, Integer, Integer> { @Override protected Integer doInBackground(Object[] params) { int d = 4; try { d = download((Context) params[0]); } catch (Exception e) { e.printStackTrace(); } finally { load((Context) params[0]); } return d; } @Override protected void onPostExecute(Integer o) { finished(o); } public abstract void finished(int result); } /** * A dummy item representing a piece of content. */ public static class TCDQuestion { public String id; public String question; public String facebook; public String google; public String tumblr; public String answer_url; public String by; public String answer; public String title; public java.util.Date date = null; public String dateString = ""; public int color = R.drawable.tcd_background_1; public String uniqueId; private String status; private boolean ret = false; public TCDQuestion(String uniqueId, String id, int color, String title, String question, String facebook, String google, String tumblr, String answer_url, String by, String answer, String status) { this.uniqueId = uniqueId; this.id = id; this.title = title; this.question = question; this.facebook = facebook; this.google = google; this.tumblr = tumblr; this.answer_url = answer_url; this.by = by; this.color = getBackground(color); this.answer = answer; SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); try { this.date = sdf.parse(status); } catch (ParseException e) { e.printStackTrace(); } sdf = new SimpleDateFormat("yyyy-MM-dd"); assert this.date != null; this.dateString = sdf.format(this.date); this.status = getStatus(); } private int getBackground(int color) { switch (color) { case 10: return R.drawable.tcd_background_2; case 20: return R.drawable.tcd_background_3; case 30: return R.drawable.tcd_background_4; case 40: return R.drawable.tcd_background_5; case 50: return R.drawable.tcd_background_6; default: return R.drawable.tcd_background_1; } } public String getStatus() { if (ret) return status; long seconds = Math.abs(((new Date()).getTime() - date.getTime()) / 1000); if (seconds < 60) status = "about " + seconds + " seconds ago"; else if (seconds < 3600) status = "about " + (seconds / 60) + " minutes ago"; else if (seconds < 86400) status = "about " + (seconds / 3600) + " hours ago"; else if (seconds < 172800) status = "yesterday"; else if (seconds < 345600) status = (seconds / 86400) + " days ago"; else { ret = true; status = dateString; } return status; } } public static class TCDHolder { public TextView id, title, question, status; } public static class TCDQuestionMini { public String id; public TCDQuestionMini(String id) { this.id = id; } } }
znck/technothlon-android-app
katana/src/main/java/org/techniche/technothlon/katana/tcd/TCDContent.java
Java
apache-2.0
13,524
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.spring.scan; import java.lang.annotation.Annotation; import java.net.URL; import java.net.URLClassLoader; import java.util.HashSet; import java.util.Set; import org.apache.camel.impl.DefaultPackageScanClassResolver; import org.apache.camel.spring.scan.a.ScanTargetOne; import org.apache.camel.spring.scan.b.ScanTargetTwo; import org.apache.camel.spring.scan.c.ScanTargetThree; import org.junit.Before; import org.junit.Test; public class DefaultPackageScanClassResolverTest extends org.apache.camel.spring.scan.ScanTestSupport { private DefaultPackageScanClassResolver resolver; private Set<Class<? extends Annotation>> annotations = new HashSet<>(); private String scanPackage = "org.apache.camel.spring.scan"; @Before public void setUp() throws Exception { super.setUp(); resolver = new DefaultPackageScanClassResolver(); annotations.add(org.apache.camel.spring.scan.ScannableOne.class); annotations.add(org.apache.camel.spring.scan.ScannableTwo.class); } @Test public void testAccepableSchema() { assertFalse("We should not accept the test by default!", resolver.isAcceptableScheme("test://test")); resolver.setAcceptableSchemes("test:;test2:"); assertTrue("We should accept the test:!", resolver.isAcceptableScheme("test://test")); assertTrue("We should accept the test2:!", resolver.isAcceptableScheme("test2://test")); } @Test public void testFindByAnnotationWithoutExtraFilters() { Set<Class<?>> scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableOne.class, scanPackage); validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class); scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableTwo.class, scanPackage); validateMatchingSetContains(scanned, ScanTargetThree.class); } @Test public void testFindByAnnotationsWithoutExtraFilters() { Set<Class<?>> scanned = resolver.findAnnotated(annotations, scanPackage); validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class, ScanTargetThree.class); } @Test public void testFindImplementationsWithoutExtraFilters() { Set<Class<?>> scanned = resolver.findImplementations(ScanTargetOne.class, scanPackage); validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class); } @Test public void testFindByAnnotationWithIncludePackageFilter() { filter.addIncludePattern(scanPackage + ".b.*"); resolver.addFilter(filter); Set<Class<?>> scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableOne.class, scanPackage); validateMatchingSetContains(scanned, ScanTargetTwo.class); scanned = resolver.findAnnotated(ScannableTwo.class, scanPackage); validateMatchingSetContains(scanned); } @Test public void testFindByAnnotationsWithIncludePackageFilter() { filter.addIncludePattern(scanPackage + ".b.*"); filter.addIncludePattern(scanPackage + ".c.*"); resolver.addFilter(filter); Set<Class<?>> scanned = resolver.findAnnotated(annotations, "org.apache.camel.spring.scan"); validateMatchingSetContains(scanned, ScanTargetTwo.class, ScanTargetThree.class); } @Test public void testFindByAnnotationWithExcludePackageFilter() { filter.addExcludePattern(scanPackage + ".b.*"); filter.addExcludePattern(scanPackage + ".c.*"); resolver.addFilter(filter); Set<Class<?>> scanned = resolver.findAnnotated(ScannableOne.class, scanPackage); validateMatchingSetContains(scanned, ScanTargetOne.class); scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableTwo.class, scanPackage); validateMatchingSetContains(scanned); } @Test public void testFindByAnnotationsWithExcludePackageFilter() { filter.addExcludePattern(scanPackage + ".a.*"); Set<Class<?>> scanned = resolver.findAnnotated(annotations, "org.apache.camel.spring.scan"); validateMatchingSetContains(scanned, ScanTargetTwo.class, ScanTargetThree.class); } @Test public void testFindByFilterWithIncludePackageFilter() { filter.addIncludePattern(scanPackage + ".**.ScanTarget*"); resolver.addFilter(filter); Set<Class<?>> scanned = resolver.findByFilter(filter, "org.apache.camel.spring.scan"); validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class, ScanTargetThree.class); } @Test public void testFindImplementationsWithIncludePackageFilter() { filter.addIncludePattern(scanPackage + ".b.*"); resolver.addFilter(filter); Set<Class<?>> scanned = resolver.findImplementations(ScanTargetOne.class, scanPackage); validateMatchingSetContains(scanned, ScanTargetTwo.class); } @Test public void testFindImplementationsWithExcludePackageFilter() { filter.addExcludePattern(scanPackage + ".a.*"); resolver.addFilter(filter); Set<Class<?>> scanned = resolver.findImplementations(ScanTargetOne.class, scanPackage); validateMatchingSetContains(scanned, ScanTargetTwo.class); } @Test // Need to run the mvn clean install to create the jar file when running it from IDE public void testFindByFilterPackageInJarUrl() throws Exception { ClassLoader savedClassLoader = null; try { savedClassLoader = Thread.currentThread().getContextClassLoader(); // build a mock URLClassLoader URL url = getClass().getResource("/package_scan_test.jar"); URL urls[] = {new URL("jar:" + url.toString() + "!/")}; URLClassLoader classLoader = new URLClassLoader(urls, savedClassLoader); Thread.currentThread().setContextClassLoader(classLoader); // recreate resolver since we mess with context class loader resolver = new DefaultPackageScanClassResolver(); filter.addIncludePattern("a.*.c.*"); resolver.addFilter(filter); Set<Class<?>> scanned = resolver.findByFilter(filter, "a.b.c"); assertEquals(1, scanned.size()); assertEquals("class a.b.c.Test", scanned.iterator().next().toString()); } finally { if (savedClassLoader != null) { Thread.currentThread().setContextClassLoader(savedClassLoader); } } } @Test // Need to run the mvn clean install to create the test jar file when running it from IDE public void testFindByFilterPackageInJarUrlWithPlusChars() throws Exception { ClassLoader savedClassLoader = null; try { savedClassLoader = Thread.currentThread().getContextClassLoader(); URL url = getClass().getResource("/package+scan+test.jar"); URL urls[] = {new URL("jar:" + url.toString() + "!/")}; URLClassLoader classLoader = new URLClassLoader(urls, savedClassLoader); Thread.currentThread().setContextClassLoader(classLoader); // recreate resolver since we mess with context class loader resolver = new DefaultPackageScanClassResolver(); filter.addIncludePattern("a.*.c.*"); resolver.addFilter(filter); Set<Class<?>> scanned = resolver.findByFilter(filter, "a.b.c"); assertEquals(1, scanned.size()); assertEquals("class a.b.c.Test", scanned.iterator().next().toString()); } finally { if (savedClassLoader != null) { Thread.currentThread().setContextClassLoader(savedClassLoader); } } } }
punkhorn/camel-upstream
components/camel-spring/src/test/java/org/apache/camel/spring/scan/DefaultPackageScanClassResolverTest.java
Java
apache-2.0
8,636
import {Component} from '@angular/core'; import {NgbActiveModal} from '@ng-bootstrap/ng-bootstrap'; import {Category} from 'idai-field-core'; import {ProjectConfiguration} from '../../../core/configuration/project-configuration'; @Component({ selector: 'link-modal', templateUrl: './link-modal.html', host: { '(window:keydown)': 'onKeyDown($event)' } }) export class LinkModalComponent { public filterOptions: Array<Category> = []; constructor(public activeModal: NgbActiveModal, private projectConfiguration: ProjectConfiguration) {} public onKeyDown(event: KeyboardEvent) { if (event.key === 'Escape') this.activeModal.dismiss('cancel'); } public initializeFilterOptions() { this.filterOptions = this.projectConfiguration.getAllowedRelationDomainCategories( 'isDepictedIn', 'Image' ); } }
codarchlab/idai-field-client
desktop/src/app/components/image/overview/link-modal.component.ts
TypeScript
apache-2.0
902
Get up to speed on [pandas](http://pandas.pydata.org/) and [TensorFlow](https://www.tensorflow.org/) basics with these Datalab notebooks (click [here](https://github.com/google/eng-edu/blob/master/ml/cc/README.md) for installation instructions): * **Hello World:** An introduction to the Datalab notebook environment that shows how to code ["Hello World"](https://en.wikipedia.org/wiki/%22Hello,_World!%22_program) in TensorFlow. * **TensorFlow Programming Concepts:** A walkthrough of the fundamental components of a TensorFlow application: tensors, operations, graphs, and sessions. * **Creating and Manipulating Tensors:** A quick primer on tensors: the central abstraction in TensorFlow programming. Also provides a refresher on matrix addition and multiplication in linear algebra. * **Quick Introduction to pandas:** A tutorial on basic data manipulation with pandas.
google/eng-edu
ml/cc/prework/README.md
Markdown
apache-2.0
918
--- name: (Maintainers Only) Good First Issue about: For maintainers to create an issue that is good for new contributors --- <!-- Issue text below --> <!-- End issue text, leave the following intact --> --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev). **Screenshot Required**: *If your pull request makes a visual change*, include a screenshot of your update. This helps our team give you feedback faster.
dstufft/warehouse
.github/ISSUE_TEMPLATE/~good-first-issue.md
Markdown
apache-2.0
1,092
/* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.snmp.ctl; import com.btisystems.pronx.ems.core.snmp.ISnmpConfiguration; import com.btisystems.pronx.ems.core.snmp.ISnmpConfigurationFactory; import com.btisystems.pronx.ems.core.snmp.ISnmpSession; import com.btisystems.pronx.ems.core.snmp.ISnmpSessionFactory; import com.google.common.collect.Maps; import org.junit.Before; import org.junit.Test; import org.onosproject.alarm.Alarm; import org.onosproject.alarm.AlarmId; import org.onosproject.alarm.DefaultAlarm; import java.io.IOException; import static org.junit.Assert.*; /** * DefaultSnmpController test class. */ public class DefaultSnmpControllerTest { ISnmpSessionFactory mockSnmpSessionFactory = new MockISnmpSessionFactory(); DefaultSnmpController snmpController = new DefaultSnmpController(); DefaultSnmpDevice device = new DefaultSnmpDevice("1.1.1.1", 1, "test", "test"); ISnmpSession snmpSession = new ISnmpSessionAdapter(); long time = System.currentTimeMillis(); DefaultAlarm alarm = new DefaultAlarm.Builder( AlarmId.alarmId(device.deviceId(), Long.toString(time)), device.deviceId(), "SNMP alarm retrieval failed", Alarm.SeverityLevel.CRITICAL, time).build(); @Before public void setUp() { snmpController.factoryMap = Maps.newHashMap(); snmpController.factoryMap.put(1, mockSnmpSessionFactory); } @Test public void testActivate() { snmpController.activate(null); assertTrue("Snmp session factory map should contain atleast one factory object", snmpController.factoryMap.size() > 0); } @Test public void testDeactivate() { snmpController.deactivate(); assertEquals("Device map should be clear", 0, snmpController.getDevices().size()); assertEquals("Session map should be clear", 0, snmpController.sessionMap.size()); } @Test public void addDevice() { snmpController.addDevice(device); assertEquals("Controller should contain device", device, snmpController.getDevice(device.deviceId())); } /** * tests session creation and get from map if already exists. */ @Test public void getNotExistingSession() throws Exception { addDevice(); assertEquals("Session should be created", snmpSession, snmpController.getSession(device.deviceId())); assertEquals("Map should contain session", 1, snmpController.snmpDeviceMap.size()); assertEquals("Session should be fetched from map", snmpSession, snmpController.getSession(device.deviceId())); } @Test public void removeDevice() { addDevice(); snmpController.removeDevice(device.deviceId()); assertNull("Device shoudl not be present", snmpController.getDevice(device.deviceId())); } @Test public void walkFailedAlarm() { assertEquals("Alarms should be equals", alarm, snmpController.buildWalkFailedAlarm(device.deviceId())); } public class MockISnmpSessionFactory implements ISnmpSessionFactory { @Override public ISnmpSession createSession(ISnmpConfiguration configuration, String ipAddress) throws IOException { new ISnmpSessionAdapter(); return snmpSession; } @Override public ISnmpSession createSession(String ipAddress, String community) throws IOException { return snmpSession; } @Override public ISnmpSession createSession(String ipAddress, String community, String factoryName, ISnmpConfigurationFactory.AccessType accessType) throws IOException { return snmpSession; } } }
opennetworkinglab/onos
protocols/snmp/ctl/src/test/java/org/onosproject/snmp/ctl/DefaultSnmpControllerTest.java
Java
apache-2.0
4,414
# Neonaviculopsis spinosa (D. Bukry) P. Prema & T. V. Desikachary SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Chromista/Ochrophyta/Chrysophyceae/Dictyochales/Dictyochaceae/Neonaviculopsis/Neonaviculopsis spinosa/README.md
Markdown
apache-2.0
221
package com.zaaach.citypicker.db; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.os.Environment; import com.zaaach.citypicker.model.City; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * author Bro0cL on 2016/1/26. */ public class DBManager { private static final String ASSETS_NAME = "china_cities.db"; private static final String DB_NAME = "china_cities.db"; private static final String TABLE_NAME = "city"; private static final String NAME = "name"; private static final String PINYIN = "pinyin"; private static final int BUFFER_SIZE = 1024; private String DB_PATH; private Context mContext; public DBManager(Context context) { this.mContext = context; DB_PATH = File.separator + "data" + Environment.getDataDirectory().getAbsolutePath() + File.separator + context.getPackageName() + File.separator + "databases" + File.separator; } @SuppressWarnings("ResultOfMethodCallIgnored") public void copyDBFile(){ File dir = new File(DB_PATH); if (!dir.exists()){ dir.mkdirs(); } File dbFile = new File(DB_PATH + DB_NAME); if (!dbFile.exists()){ InputStream is; OutputStream os; try { is = mContext.getResources().getAssets().open(ASSETS_NAME); os = new FileOutputStream(dbFile); byte[] buffer = new byte[BUFFER_SIZE]; int length; while ((length = is.read(buffer, 0, buffer.length)) > 0){ os.write(buffer, 0, length); } os.flush(); os.close(); is.close(); } catch (IOException e) { e.printStackTrace(); } } } public List<City> getAllCities(){ SQLiteDatabase db = SQLiteDatabase.openOrCreateDatabase(DB_PATH + DB_NAME, null); Cursor cursor = db.rawQuery("select * from " + TABLE_NAME, null); List<City> result = new ArrayList<>(); City city; while (cursor.moveToNext()){ String name = cursor.getString(cursor.getColumnIndex(NAME)); String pinyin = cursor.getString(cursor.getColumnIndex(PINYIN)); city = new City(name, pinyin); result.add(city); } cursor.close(); db.close(); Collections.sort(result, new CityComparator()); return result; } public List<City> searchCity(final String keyword){ SQLiteDatabase db = SQLiteDatabase.openOrCreateDatabase(DB_PATH + DB_NAME, null); Cursor cursor = db.rawQuery("select * from " + TABLE_NAME +" where name like \"%" + keyword + "%\" or pinyin like \"%" + keyword + "%\"", null); List<City> result = new ArrayList<>(); City city; while (cursor.moveToNext()){ String name = cursor.getString(cursor.getColumnIndex(NAME)); String pinyin = cursor.getString(cursor.getColumnIndex(PINYIN)); city = new City(name, pinyin); result.add(city); } cursor.close(); db.close(); Collections.sort(result, new CityComparator()); return result; } /** * sort by a-z */ private class CityComparator implements Comparator<City>{ @Override public int compare(City lhs, City rhs) { String a = lhs.getPinyin().substring(0, 1); String b = rhs.getPinyin().substring(0, 1); return a.compareTo(b); } } }
weiwenqiang/GitHub
SelectWidget/city/CityPicker/citypicker/src/main/java/com/zaaach/citypicker/db/DBManager.java
Java
apache-2.0
3,876
// Copyright (C) 2012-2018 Leap Motion, Inc. All rights reserved. #pragma once #include "AutoFilterDescriptor.h" namespace autowiring { /// <summary> /// A single subscription counter entry /// </summary> struct SatCounter: AutoFilterDescriptor { SatCounter(void) = default; SatCounter(const AutoFilterDescriptor& source): AutoFilterDescriptor(source), remaining(m_requiredCount) {} SatCounter(const SatCounter& source): AutoFilterDescriptor(static_cast<const AutoFilterDescriptor&>(source)), remaining(source.remaining) {} // Forward and backward linked list pointers SatCounter* flink = nullptr; SatCounter* blink = nullptr; // The number of inputs remaining to this counter: size_t remaining = 0; /// <summary> /// Conditionally decrements AutoFilter argument satisfaction. /// </summary> /// <returns>True if this decrement yielded satisfaction of all arguments</returns> bool Decrement(void) { return remaining && !--remaining; } /// <summary> /// Conditionally increments AutoFilter argument satisfaction. /// </summary> void Increment(void) { ++remaining; } }; } namespace std { template<> struct hash<autowiring::SatCounter> { size_t operator()(const autowiring::SatCounter& satCounter) const { return (size_t)satCounter.GetAutoFilter().ptr(); } }; }
leapmotion/autowiring
src/autowiring/SatCounter.h
C
apache-2.0
1,359
using Hyperstore.CodeAnalysis.Syntax; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Hyperstore.CodeAnalysis.Compilation { public class HyperstoreCompiler { private readonly string _basePath; private readonly string _outputDirectory; private HyperstoreCompilation _compilation; private const string OutputFileName = "Domains.g.cs"; public IEnumerable<Diagnostic> Diagnostics { get { return _compilation.GetDiagnostics(); } } public HyperstoreCompiler(string outputDirectory, string basePath = null) { _basePath = basePath; _outputDirectory = outputDirectory; } public bool Run(string[] inputFiles) { if (inputFiles == null || inputFiles.Length == 0) { ClearOutputFile(); return false; } try { var trees = new HyperstoreSyntaxTree[inputFiles.Count()]; //if (trees.Length > 1) //{ // Parallel.For(0, trees.Length, ix => // { // var inputFile = inputFiles[ix]; // string content; // string normalizedPath; // if (OpenFile(inputFile, out content, out normalizedPath)) // { // trees[ix] = HyperstoreSyntaxTree.ParseText(content, normalizedPath); // } // }); //} //else var i = 0; foreach (var inputFile in inputFiles) { string content; string normalizedPath; if (OpenFile(inputFile, out content, out normalizedPath)) { trees[i++] = HyperstoreSyntaxTree.ParseText(content, normalizedPath); } } _compilation = HyperstoreCompilation.Create("C#", trees.Where(t => t != null)); if (_compilation.HasErrors) { ClearOutputFile(); return false; } var output = _compilation.Generate(); WriteOutputFile(output); return true; } catch (Exception ex) { ClearOutputFile(); throw ex; } } private void ClearOutputFile() { var tmp = MakeOutputFilePath(); OutputFilePath = null; if (File.Exists(tmp)) File.Delete(tmp); } private void WriteOutputFile(string output) { OutputFilePath = MakeOutputFilePath(); Directory.CreateDirectory(Path.GetDirectoryName(OutputFilePath)); File.WriteAllText(OutputFilePath, output); OutputFilePath = new FileInfo(OutputFilePath).FullName; } private string MakeOutputFilePath() { return Path.Combine(_outputDirectory, OutputFileName); } public bool OpenFile(string inputFile, out string content, out string normalizedPath) { content = null; normalizedPath = _basePath != null ? Path.Combine(_basePath, inputFile) : inputFile; if (!File.Exists(normalizedPath)) { AddDiagnostic("File {0} not found.", normalizedPath); return false; } using (var stream = File.OpenRead(normalizedPath)) { using (var reader = new StreamReader(stream)) { content = reader.ReadToEnd(); normalizedPath = stream.Name; } } return true; } private void AddDiagnostic(string msg, params string[] args) { var diag = Diagnostic.Create( args.Length == 0 ? msg : String.Format(msg, args), DiagnosticSeverity.Error); } public string OutputFilePath { get; private set; } } }
Hyperstore/Hyperstore.CodeAnalysis
Hyperstore.CodeAnalysis/Compilation/HyperstoreCompiler.cs
C#
apache-2.0
4,432
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import automl from '@google-cloud/automl'; import * as dayjs from 'dayjs'; import * as express from 'express'; import { auth } from 'google-auth-library'; import * as morgan from 'morgan'; import { AUTOML_API_SCOPE, AUTOML_API_URL, AUTOML_BUCKET_URL, LOCATION, PROJECT_ID, } from './constants'; import { OperationMetadata } from './types'; export const app = express(); app.use(express.json()); app.use(morgan('combined')); const client = new automl.v1beta1.AutoMlClient(); // Controls model type. For more options, see: // https://cloud.google.com/vision/automl/alpha/docs/reference/rest/v1beta1/projects.locations.models#imageclassificationmodelmetadata const DEFAULT_MODEL_TYPE = 'mobile-high-accuracy-1'; const DEFAULT_TRAIN_BUDGET = 1; const DATASET_NAME_REGEX = new RegExp('^[a-zA-Z_0-9]+$'); const MODEL_VERSION_FORMAT = 'vYYYYMMDDHHmmss'; const parent = client.locationPath(PROJECT_ID, LOCATION); // A model as returned by AutoML /models response interface Model { name: string; datasetId: string; displayName: string; createTime: string; updateTime: string; imageClassificationModelMetadata: { trainBudget: string; trainCost: string; stopReason: string; modelType: string; }; } interface ModelResp { model: Model[]; } /// create a new dataset function createDataset(displayName: String): Promise<any> { const dataset = { name: displayName, displayName, imageClassificationDatasetMetadata: { classificationType: 'MULTICLASS', }, }; return client.createDataset({ parent, dataset }); } const extractIdFromName = (datasetName: string): string => { const parts = datasetName.split('/'); return parts[parts.length - 1]; }; /// returns the ID of a dataset of the format ICN** or null if not found function getDatasetName(automlId: string): Promise<string | null> { return client.listDatasets({ parent }).then((responses: any[]) => { const datasets = responses[0]; for (const dataset of datasets) { if (extractIdFromName(dataset['name']) === automlId) { return dataset['name']; } } return null; }); } /// initiates an operation on automl to start importing data for a dataset async function importDataset( name: string, displayName: string, labels: string ): Promise<OperationMetadata> { const inputConfig = { gcsSource: { inputUris: [`${AUTOML_BUCKET_URL}/${displayName}/${labels}`], }, }; return client .importData({ name, inputConfig }) .then((responses: any[]) => responses[1]); // initial api response with operation metadata } /** * List all datasets */ app.get('/datasets', async (req, res, next) => { try { const authClient = await auth.getClient({ scopes: [AUTOML_API_SCOPE] }); const url = `${AUTOML_API_URL}/datasets`; const resp = await authClient.request({ url }); res.json(resp.data); } catch (err) { console.error(err); next(err); } }); /** * Endpoint to create a new dataset in automl. Requires a name parameter */ app.post('/datasets', async (req, res, next) => { try { const { displayName } = req.body; if (displayName === undefined) { res.status(400).send('Expected a dataset `displayName`'); return; } if (!displayName.match(DATASET_NAME_REGEX)) { res .status(400) .send( 'The displayName contains a not allowed character, the' + ' only allowed ones are ASCII Latin letters A-Z and a-z, an underscore (_),' + ' and ASCII digits 0-9' ); return; } console.info(`Attempting to create dataset: ${displayName}`); const [response] = await createDataset(displayName); res.json(response); } catch (err) { res.status(500); res.json({message: err.message}); console.error(err); } }); /** * Endpoint to delete dataset from automl */ app.delete('/datasets/:datasetId', async (req, res, next) => { try { const { datasetId } = req.params; if (!datasetId) { res.status(400).send(`Expected datasetId: ${datasetId}`); return; } const name = await getDatasetName(datasetId); if (name === null) { res.status(404).send(`No dataset found for id: ${datasetId}`); return; } const resp = await client.deleteDataset({ name }); console.log(resp); res.json(); } catch (err) { console.error(err); res.status(500); res.json({message: err.message}); } }); /** * Endpoint to initiate importing data for a dataset in automl. * * Inputs: * - datasetId: string - automl ID of the dataset * - name: string - display name of the dataset * - labels: string - file name containing the labels information. e.g * labels.csv */ app.post('/import', async (req, res, next) => { const { name, labels, datasetId } = req.body; if (!name) { res.status(400).json({ error: 'Need a dataset name' }); return; } if (!datasetId) { res.status(400).json({ error: 'Need a dataset Id' }); return; } if (!labels) { res.status(400).json({ error: 'Need a path for labels file' }); return; } try { const datasetName = await getDatasetName(datasetId); if (datasetName === null) { res.status(400).json({ error: 'Dataset not found' }); return; } const operationMetadata = await importDataset(datasetName, name, labels); res.json(operationMetadata); } catch (err) { console.error(err); res.status(500); res.json({message: err.message}); } }); /** * Endpoint to initiate creation of a new model for the provided dataset * * Inputs * - datasetId: string - automl ID of the dataset * - trainBudget (optional) * - modelType (optional) * Calls the create model api on AutoML * https://cloud.google.com/vision/automl/alpha/docs/reference/rest/v1beta1/projects.locations.models/create * * Uses the rest API */ app.post('/train', async (req, res, next) => { const { datasetId } = req.body; if (!datasetId) { res.status(400).json({ error: 'Need a dataset Id' }); return; } let { trainBudget, modelType } = req.body; trainBudget = trainBudget === undefined ? DEFAULT_TRAIN_BUDGET : trainBudget; modelType = modelType === undefined ? DEFAULT_MODEL_TYPE : modelType; console.log( `Using train budget: ${trainBudget}, and model type: ${modelType}` ); try { const datasetName = await getDatasetName(datasetId); if (datasetName === null) { res.status(400).json({ error: 'Dataset not found' }); return; } const authClient = await auth.getClient({ scopes: [AUTOML_API_SCOPE] }); const url = `${AUTOML_API_URL}/models`; const resp = await authClient.request({ method: 'POST', data: { displayName: `${dayjs().format(MODEL_VERSION_FORMAT)}`, dataset_id: datasetId, imageClassificationModelMetadata: { trainBudget, modelType }, }, url, }); const operationMetadata = resp.data as OperationMetadata; res.json(operationMetadata); } catch (err) { console.error(err); res.status(500); res.json({message: err.message}); } }); /** * Exports a model in tflite format to a gcspath * * modelId - AutoML model ID: "ICN1119584480450950787", * gcsPath - Path to which model is exported * "gs://${AUTOML_BUCKET}/models/on-device/<folder_name>" * * Note the model will be generated in a folder with timestamp as name. For * more, refer to * https://cloud.google.com/vision/automl/alpha/docs/deploy#deployment_on_mobile_models_not_core_ml */ app.post('/export', async (req, res, next) => { const { modelId, gcsPath } = req.body; if (!modelId) { res.status(400).send('need a model id: modelId'); return; } if (!gcsPath) { res.status(400).send('need a gcs path: gcsPath'); return; } const authClient = await auth.getClient({ scopes: [AUTOML_API_SCOPE] }); const url = `${AUTOML_API_URL}/models/${modelId}:export`; try { const operationMetadata = await authClient .request({ method: 'POST', url, data: { output_config: { model_format: 'tflite', gcs_destination: { output_uri_prefix: gcsPath, }, }, }, }) .then(resp => resp.data as OperationMetadata); res.json(operationMetadata); } catch (err) { console.error(err); res.status(500); res.json({message: err.message}); } }); /** * Exports the latest generated model for the dataset */ app.post('/exportlatestmodel', async (req, res, next) => { const { datasetId, gcsPath } = req.body; if (!datasetId) { res.status(400).send('need a dataset id: datasetId'); return; } if (!gcsPath) { res.status(400).send('need a gcs path: gcsPath'); return; } try { // 1. Get all the models const modelsResp = (await getAllModels()).data as ModelResp; // 2. Filter the models for the provided dataset and get the latest model const datasetModels = modelsResp.model.filter( m => m.datasetId === datasetId && m.imageClassificationModelMetadata.modelType.startsWith('mobile-') ); if (datasetModels === undefined) { throw new Error('No models found for this dataset'); } // 3. Find the latest (based on createTime) model const latestModel = datasetModels.sort( (m1, m2) => new Date(m2.createTime).getTime() - new Date(m1.createTime).getTime() )[0]; // 3. Initiate its export console.log('Initiating export for the latest model', latestModel); const modelId = extractIdFromName(latestModel.name); const authClient = await auth.getClient({ scopes: [AUTOML_API_SCOPE] }); const url = `${AUTOML_API_URL}/models/${modelId}:export`; const operationMetadata = await authClient .request({ method: 'POST', url, data: { output_config: { model_format: 'tflite', gcs_destination: { output_uri_prefix: gcsPath, }, }, }, }) .then(resp => resp.data as OperationMetadata); res.json(operationMetadata); } catch (err) { console.error(err); res.status(500); res.json({message: err.message}); } }); /** * List all models - trying out the REST API */ app.get('/models', async (req, res, next) => { try { const resp = await getAllModels(); res.json(resp.data); } catch (err) { console.error(err); res.status(500); res.json({message: err.message}); } }); /** Queries all models from AutoML */ async function getAllModels() { const authClient = await auth.getClient({ scopes: [AUTOML_API_SCOPE] }); const url = `${AUTOML_API_URL}/models`; return authClient.request({ url }); }
FirebaseExtended/mlkit-custom-image-classifier
functions/src/automlapi.ts
TypeScript
apache-2.0
11,363
//===-- ARMISelDAGToDAG.cpp - A dag to dag inst selector for ARM ----------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This file defines an instruction selector for the ARM target. // //===----------------------------------------------------------------------===// #include "ARM.h" #include "ARMBaseInstrInfo.h" #include "ARMTargetMachine.h" #include "MCTargetDesc/ARMAddressingModes.h" #include "Utils/ARMBaseInfo.h" #include "llvm/ADT/StringSwitch.h" #include "llvm/CodeGen/MachineFrameInfo.h" #include "llvm/CodeGen/MachineFunction.h" #include "llvm/CodeGen/MachineInstrBuilder.h" #include "llvm/CodeGen/MachineRegisterInfo.h" #include "llvm/CodeGen/SelectionDAG.h" #include "llvm/CodeGen/SelectionDAGISel.h" #include "llvm/CodeGen/TargetLowering.h" #include "llvm/IR/CallingConv.h" #include "llvm/IR/Constants.h" #include "llvm/IR/DerivedTypes.h" #include "llvm/IR/Function.h" #include "llvm/IR/Intrinsics.h" #include "llvm/IR/LLVMContext.h" #include "llvm/Support/CommandLine.h" #include "llvm/Support/Debug.h" #include "llvm/Support/ErrorHandling.h" #include "llvm/Target/TargetOptions.h" using namespace llvm; #define DEBUG_TYPE "arm-isel" static cl::opt<bool> DisableShifterOp("disable-shifter-op", cl::Hidden, cl::desc("Disable isel of shifter-op"), cl::init(false)); //===--------------------------------------------------------------------===// /// ARMDAGToDAGISel - ARM specific code to select ARM machine /// instructions for SelectionDAG operations. /// namespace { class ARMDAGToDAGISel : public SelectionDAGISel { /// Subtarget - Keep a pointer to the ARMSubtarget around so that we can /// make the right decision when generating code for different targets. const ARMSubtarget *Subtarget; public: explicit ARMDAGToDAGISel(ARMBaseTargetMachine &tm, CodeGenOpt::Level OptLevel) : SelectionDAGISel(tm, OptLevel) {} bool runOnMachineFunction(MachineFunction &MF) override { // Reset the subtarget each time through. Subtarget = &MF.getSubtarget<ARMSubtarget>(); SelectionDAGISel::runOnMachineFunction(MF); return true; } StringRef getPassName() const override { return "ARM Instruction Selection"; } void PreprocessISelDAG() override; /// getI32Imm - Return a target constant of type i32 with the specified /// value. inline SDValue getI32Imm(unsigned Imm, const SDLoc &dl) { return CurDAG->getTargetConstant(Imm, dl, MVT::i32); } void Select(SDNode *N) override; bool hasNoVMLxHazardUse(SDNode *N) const; bool isShifterOpProfitable(const SDValue &Shift, ARM_AM::ShiftOpc ShOpcVal, unsigned ShAmt); bool SelectRegShifterOperand(SDValue N, SDValue &A, SDValue &B, SDValue &C, bool CheckProfitability = true); bool SelectImmShifterOperand(SDValue N, SDValue &A, SDValue &B, bool CheckProfitability = true); bool SelectShiftRegShifterOperand(SDValue N, SDValue &A, SDValue &B, SDValue &C) { // Don't apply the profitability check return SelectRegShifterOperand(N, A, B, C, false); } bool SelectShiftImmShifterOperand(SDValue N, SDValue &A, SDValue &B) { // Don't apply the profitability check return SelectImmShifterOperand(N, A, B, false); } bool SelectAddLikeOr(SDNode *Parent, SDValue N, SDValue &Out); bool SelectAddrModeImm12(SDValue N, SDValue &Base, SDValue &OffImm); bool SelectLdStSOReg(SDValue N, SDValue &Base, SDValue &Offset, SDValue &Opc); bool SelectCMOVPred(SDValue N, SDValue &Pred, SDValue &Reg) { const ConstantSDNode *CN = cast<ConstantSDNode>(N); Pred = CurDAG->getTargetConstant(CN->getZExtValue(), SDLoc(N), MVT::i32); Reg = CurDAG->getRegister(ARM::CPSR, MVT::i32); return true; } bool SelectAddrMode2OffsetReg(SDNode *Op, SDValue N, SDValue &Offset, SDValue &Opc); bool SelectAddrMode2OffsetImm(SDNode *Op, SDValue N, SDValue &Offset, SDValue &Opc); bool SelectAddrMode2OffsetImmPre(SDNode *Op, SDValue N, SDValue &Offset, SDValue &Opc); bool SelectAddrOffsetNone(SDValue N, SDValue &Base); bool SelectAddrMode3(SDValue N, SDValue &Base, SDValue &Offset, SDValue &Opc); bool SelectAddrMode3Offset(SDNode *Op, SDValue N, SDValue &Offset, SDValue &Opc); bool IsAddressingMode5(SDValue N, SDValue &Base, SDValue &Offset, bool FP16); bool SelectAddrMode5(SDValue N, SDValue &Base, SDValue &Offset); bool SelectAddrMode5FP16(SDValue N, SDValue &Base, SDValue &Offset); bool SelectAddrMode6(SDNode *Parent, SDValue N, SDValue &Addr,SDValue &Align); bool SelectAddrMode6Offset(SDNode *Op, SDValue N, SDValue &Offset); bool SelectAddrModePC(SDValue N, SDValue &Offset, SDValue &Label); // Thumb Addressing Modes: bool SelectThumbAddrModeRR(SDValue N, SDValue &Base, SDValue &Offset); bool SelectThumbAddrModeRRSext(SDValue N, SDValue &Base, SDValue &Offset); bool SelectThumbAddrModeImm5S(SDValue N, unsigned Scale, SDValue &Base, SDValue &OffImm); bool SelectThumbAddrModeImm5S1(SDValue N, SDValue &Base, SDValue &OffImm); bool SelectThumbAddrModeImm5S2(SDValue N, SDValue &Base, SDValue &OffImm); bool SelectThumbAddrModeImm5S4(SDValue N, SDValue &Base, SDValue &OffImm); bool SelectThumbAddrModeSP(SDValue N, SDValue &Base, SDValue &OffImm); // Thumb 2 Addressing Modes: bool SelectT2AddrModeImm12(SDValue N, SDValue &Base, SDValue &OffImm); bool SelectT2AddrModeImm8(SDValue N, SDValue &Base, SDValue &OffImm); bool SelectT2AddrModeImm8Offset(SDNode *Op, SDValue N, SDValue &OffImm); bool SelectT2AddrModeSoReg(SDValue N, SDValue &Base, SDValue &OffReg, SDValue &ShImm); bool SelectT2AddrModeExclusive(SDValue N, SDValue &Base, SDValue &OffImm); inline bool is_so_imm(unsigned Imm) const { return ARM_AM::getSOImmVal(Imm) != -1; } inline bool is_so_imm_not(unsigned Imm) const { return ARM_AM::getSOImmVal(~Imm) != -1; } inline bool is_t2_so_imm(unsigned Imm) const { return ARM_AM::getT2SOImmVal(Imm) != -1; } inline bool is_t2_so_imm_not(unsigned Imm) const { return ARM_AM::getT2SOImmVal(~Imm) != -1; } // Include the pieces autogenerated from the target description. #include "ARMGenDAGISel.inc" private: void transferMemOperands(SDNode *Src, SDNode *Dst); /// Indexed (pre/post inc/dec) load matching code for ARM. bool tryARMIndexedLoad(SDNode *N); bool tryT1IndexedLoad(SDNode *N); bool tryT2IndexedLoad(SDNode *N); /// SelectVLD - Select NEON load intrinsics. NumVecs should be /// 1, 2, 3 or 4. The opcode arrays specify the instructions used for /// loads of D registers and even subregs and odd subregs of Q registers. /// For NumVecs <= 2, QOpcodes1 is not used. void SelectVLD(SDNode *N, bool isUpdating, unsigned NumVecs, const uint16_t *DOpcodes, const uint16_t *QOpcodes0, const uint16_t *QOpcodes1); /// SelectVST - Select NEON store intrinsics. NumVecs should /// be 1, 2, 3 or 4. The opcode arrays specify the instructions used for /// stores of D registers and even subregs and odd subregs of Q registers. /// For NumVecs <= 2, QOpcodes1 is not used. void SelectVST(SDNode *N, bool isUpdating, unsigned NumVecs, const uint16_t *DOpcodes, const uint16_t *QOpcodes0, const uint16_t *QOpcodes1); /// SelectVLDSTLane - Select NEON load/store lane intrinsics. NumVecs should /// be 2, 3 or 4. The opcode arrays specify the instructions used for /// load/store of D registers and Q registers. void SelectVLDSTLane(SDNode *N, bool IsLoad, bool isUpdating, unsigned NumVecs, const uint16_t *DOpcodes, const uint16_t *QOpcodes); /// SelectVLDDup - Select NEON load-duplicate intrinsics. NumVecs /// should be 1, 2, 3 or 4. The opcode array specifies the instructions used /// for loading D registers. void SelectVLDDup(SDNode *N, bool IsIntrinsic, bool isUpdating, unsigned NumVecs, const uint16_t *DOpcodes, const uint16_t *QOpcodes0 = nullptr, const uint16_t *QOpcodes1 = nullptr); /// Try to select SBFX/UBFX instructions for ARM. bool tryV6T2BitfieldExtractOp(SDNode *N, bool isSigned); // Select special operations if node forms integer ABS pattern bool tryABSOp(SDNode *N); bool tryReadRegister(SDNode *N); bool tryWriteRegister(SDNode *N); bool tryInlineAsm(SDNode *N); void SelectCMPZ(SDNode *N, bool &SwitchEQNEToPLMI); void SelectCMP_SWAP(SDNode *N); /// SelectInlineAsmMemoryOperand - Implement addressing mode selection for /// inline asm expressions. bool SelectInlineAsmMemoryOperand(const SDValue &Op, unsigned ConstraintID, std::vector<SDValue> &OutOps) override; // Form pairs of consecutive R, S, D, or Q registers. SDNode *createGPRPairNode(EVT VT, SDValue V0, SDValue V1); SDNode *createSRegPairNode(EVT VT, SDValue V0, SDValue V1); SDNode *createDRegPairNode(EVT VT, SDValue V0, SDValue V1); SDNode *createQRegPairNode(EVT VT, SDValue V0, SDValue V1); // Form sequences of 4 consecutive S, D, or Q registers. SDNode *createQuadSRegsNode(EVT VT, SDValue V0, SDValue V1, SDValue V2, SDValue V3); SDNode *createQuadDRegsNode(EVT VT, SDValue V0, SDValue V1, SDValue V2, SDValue V3); SDNode *createQuadQRegsNode(EVT VT, SDValue V0, SDValue V1, SDValue V2, SDValue V3); // Get the alignment operand for a NEON VLD or VST instruction. SDValue GetVLDSTAlign(SDValue Align, const SDLoc &dl, unsigned NumVecs, bool is64BitVector); /// Returns the number of instructions required to materialize the given /// constant in a register, or 3 if a literal pool load is needed. unsigned ConstantMaterializationCost(unsigned Val) const; /// Checks if N is a multiplication by a constant where we can extract out a /// power of two from the constant so that it can be used in a shift, but only /// if it simplifies the materialization of the constant. Returns true if it /// is, and assigns to PowerOfTwo the power of two that should be extracted /// out and to NewMulConst the new constant to be multiplied by. bool canExtractShiftFromMul(const SDValue &N, unsigned MaxShift, unsigned &PowerOfTwo, SDValue &NewMulConst) const; /// Replace N with M in CurDAG, in a way that also ensures that M gets /// selected when N would have been selected. void replaceDAGValue(const SDValue &N, SDValue M); }; } /// isInt32Immediate - This method tests to see if the node is a 32-bit constant /// operand. If so Imm will receive the 32-bit value. static bool isInt32Immediate(SDNode *N, unsigned &Imm) { if (N->getOpcode() == ISD::Constant && N->getValueType(0) == MVT::i32) { Imm = cast<ConstantSDNode>(N)->getZExtValue(); return true; } return false; } // isInt32Immediate - This method tests to see if a constant operand. // If so Imm will receive the 32 bit value. static bool isInt32Immediate(SDValue N, unsigned &Imm) { return isInt32Immediate(N.getNode(), Imm); } // isOpcWithIntImmediate - This method tests to see if the node is a specific // opcode and that it has a immediate integer right operand. // If so Imm will receive the 32 bit value. static bool isOpcWithIntImmediate(SDNode *N, unsigned Opc, unsigned& Imm) { return N->getOpcode() == Opc && isInt32Immediate(N->getOperand(1).getNode(), Imm); } /// Check whether a particular node is a constant value representable as /// (N * Scale) where (N in [\p RangeMin, \p RangeMax). /// /// \param ScaledConstant [out] - On success, the pre-scaled constant value. static bool isScaledConstantInRange(SDValue Node, int Scale, int RangeMin, int RangeMax, int &ScaledConstant) { assert(Scale > 0 && "Invalid scale!"); // Check that this is a constant. const ConstantSDNode *C = dyn_cast<ConstantSDNode>(Node); if (!C) return false; ScaledConstant = (int) C->getZExtValue(); if ((ScaledConstant % Scale) != 0) return false; ScaledConstant /= Scale; return ScaledConstant >= RangeMin && ScaledConstant < RangeMax; } void ARMDAGToDAGISel::PreprocessISelDAG() { if (!Subtarget->hasV6T2Ops()) return; bool isThumb2 = Subtarget->isThumb(); for (SelectionDAG::allnodes_iterator I = CurDAG->allnodes_begin(), E = CurDAG->allnodes_end(); I != E; ) { SDNode *N = &*I++; // Preincrement iterator to avoid invalidation issues. if (N->getOpcode() != ISD::ADD) continue; // Look for (add X1, (and (srl X2, c1), c2)) where c2 is constant with // leading zeros, followed by consecutive set bits, followed by 1 or 2 // trailing zeros, e.g. 1020. // Transform the expression to // (add X1, (shl (and (srl X2, c1), (c2>>tz)), tz)) where tz is the number // of trailing zeros of c2. The left shift would be folded as an shifter // operand of 'add' and the 'and' and 'srl' would become a bits extraction // node (UBFX). SDValue N0 = N->getOperand(0); SDValue N1 = N->getOperand(1); unsigned And_imm = 0; if (!isOpcWithIntImmediate(N1.getNode(), ISD::AND, And_imm)) { if (isOpcWithIntImmediate(N0.getNode(), ISD::AND, And_imm)) std::swap(N0, N1); } if (!And_imm) continue; // Check if the AND mask is an immediate of the form: 000.....1111111100 unsigned TZ = countTrailingZeros(And_imm); if (TZ != 1 && TZ != 2) // Be conservative here. Shifter operands aren't always free. e.g. On // Swift, left shifter operand of 1 / 2 for free but others are not. // e.g. // ubfx r3, r1, #16, #8 // ldr.w r3, [r0, r3, lsl #2] // vs. // mov.w r9, #1020 // and.w r2, r9, r1, lsr #14 // ldr r2, [r0, r2] continue; And_imm >>= TZ; if (And_imm & (And_imm + 1)) continue; // Look for (and (srl X, c1), c2). SDValue Srl = N1.getOperand(0); unsigned Srl_imm = 0; if (!isOpcWithIntImmediate(Srl.getNode(), ISD::SRL, Srl_imm) || (Srl_imm <= 2)) continue; // Make sure first operand is not a shifter operand which would prevent // folding of the left shift. SDValue CPTmp0; SDValue CPTmp1; SDValue CPTmp2; if (isThumb2) { if (SelectImmShifterOperand(N0, CPTmp0, CPTmp1)) continue; } else { if (SelectImmShifterOperand(N0, CPTmp0, CPTmp1) || SelectRegShifterOperand(N0, CPTmp0, CPTmp1, CPTmp2)) continue; } // Now make the transformation. Srl = CurDAG->getNode(ISD::SRL, SDLoc(Srl), MVT::i32, Srl.getOperand(0), CurDAG->getConstant(Srl_imm + TZ, SDLoc(Srl), MVT::i32)); N1 = CurDAG->getNode(ISD::AND, SDLoc(N1), MVT::i32, Srl, CurDAG->getConstant(And_imm, SDLoc(Srl), MVT::i32)); N1 = CurDAG->getNode(ISD::SHL, SDLoc(N1), MVT::i32, N1, CurDAG->getConstant(TZ, SDLoc(Srl), MVT::i32)); CurDAG->UpdateNodeOperands(N, N0, N1); } } /// hasNoVMLxHazardUse - Return true if it's desirable to select a FP MLA / MLS /// node. VFP / NEON fp VMLA / VMLS instructions have special RAW hazards (at /// least on current ARM implementations) which should be avoidded. bool ARMDAGToDAGISel::hasNoVMLxHazardUse(SDNode *N) const { if (OptLevel == CodeGenOpt::None) return true; if (!Subtarget->hasVMLxHazards()) return true; if (!N->hasOneUse()) return false; SDNode *Use = *N->use_begin(); if (Use->getOpcode() == ISD::CopyToReg) return true; if (Use->isMachineOpcode()) { const ARMBaseInstrInfo *TII = static_cast<const ARMBaseInstrInfo *>( CurDAG->getSubtarget().getInstrInfo()); const MCInstrDesc &MCID = TII->get(Use->getMachineOpcode()); if (MCID.mayStore()) return true; unsigned Opcode = MCID.getOpcode(); if (Opcode == ARM::VMOVRS || Opcode == ARM::VMOVRRD) return true; // vmlx feeding into another vmlx. We actually want to unfold // the use later in the MLxExpansion pass. e.g. // vmla // vmla (stall 8 cycles) // // vmul (5 cycles) // vadd (5 cycles) // vmla // This adds up to about 18 - 19 cycles. // // vmla // vmul (stall 4 cycles) // vadd adds up to about 14 cycles. return TII->isFpMLxInstruction(Opcode); } return false; } bool ARMDAGToDAGISel::isShifterOpProfitable(const SDValue &Shift, ARM_AM::ShiftOpc ShOpcVal, unsigned ShAmt) { if (!Subtarget->isLikeA9() && !Subtarget->isSwift()) return true; if (Shift.hasOneUse()) return true; // R << 2 is free. return ShOpcVal == ARM_AM::lsl && (ShAmt == 2 || (Subtarget->isSwift() && ShAmt == 1)); } unsigned ARMDAGToDAGISel::ConstantMaterializationCost(unsigned Val) const { if (Subtarget->isThumb()) { if (Val <= 255) return 1; // MOV if (Subtarget->hasV6T2Ops() && (Val <= 0xffff || // MOV ARM_AM::getT2SOImmVal(Val) != -1 || // MOVW ARM_AM::getT2SOImmVal(~Val) != -1)) // MVN return 1; if (Val <= 510) return 2; // MOV + ADDi8 if (~Val <= 255) return 2; // MOV + MVN if (ARM_AM::isThumbImmShiftedVal(Val)) return 2; // MOV + LSL } else { if (ARM_AM::getSOImmVal(Val) != -1) return 1; // MOV if (ARM_AM::getSOImmVal(~Val) != -1) return 1; // MVN if (Subtarget->hasV6T2Ops() && Val <= 0xffff) return 1; // MOVW if (ARM_AM::isSOImmTwoPartVal(Val)) return 2; // two instrs } if (Subtarget->useMovt()) return 2; // MOVW + MOVT return 3; // Literal pool load } bool ARMDAGToDAGISel::canExtractShiftFromMul(const SDValue &N, unsigned MaxShift, unsigned &PowerOfTwo, SDValue &NewMulConst) const { assert(N.getOpcode() == ISD::MUL); assert(MaxShift > 0); // If the multiply is used in more than one place then changing the constant // will make other uses incorrect, so don't. if (!N.hasOneUse()) return false; // Check if the multiply is by a constant ConstantSDNode *MulConst = dyn_cast<ConstantSDNode>(N.getOperand(1)); if (!MulConst) return false; // If the constant is used in more than one place then modifying it will mean // we need to materialize two constants instead of one, which is a bad idea. if (!MulConst->hasOneUse()) return false; unsigned MulConstVal = MulConst->getZExtValue(); if (MulConstVal == 0) return false; // Find the largest power of 2 that MulConstVal is a multiple of PowerOfTwo = MaxShift; while ((MulConstVal % (1 << PowerOfTwo)) != 0) { --PowerOfTwo; if (PowerOfTwo == 0) return false; } // Only optimise if the new cost is better unsigned NewMulConstVal = MulConstVal / (1 << PowerOfTwo); NewMulConst = CurDAG->getConstant(NewMulConstVal, SDLoc(N), MVT::i32); unsigned OldCost = ConstantMaterializationCost(MulConstVal); unsigned NewCost = ConstantMaterializationCost(NewMulConstVal); return NewCost < OldCost; } void ARMDAGToDAGISel::replaceDAGValue(const SDValue &N, SDValue M) { CurDAG->RepositionNode(N.getNode()->getIterator(), M.getNode()); ReplaceUses(N, M); } bool ARMDAGToDAGISel::SelectImmShifterOperand(SDValue N, SDValue &BaseReg, SDValue &Opc, bool CheckProfitability) { if (DisableShifterOp) return false; // If N is a multiply-by-constant and it's profitable to extract a shift and // use it in a shifted operand do so. if (N.getOpcode() == ISD::MUL) { unsigned PowerOfTwo = 0; SDValue NewMulConst; if (canExtractShiftFromMul(N, 31, PowerOfTwo, NewMulConst)) { HandleSDNode Handle(N); SDLoc Loc(N); replaceDAGValue(N.getOperand(1), NewMulConst); BaseReg = Handle.getValue(); Opc = CurDAG->getTargetConstant( ARM_AM::getSORegOpc(ARM_AM::lsl, PowerOfTwo), Loc, MVT::i32); return true; } } ARM_AM::ShiftOpc ShOpcVal = ARM_AM::getShiftOpcForNode(N.getOpcode()); // Don't match base register only case. That is matched to a separate // lower complexity pattern with explicit register operand. if (ShOpcVal == ARM_AM::no_shift) return false; BaseReg = N.getOperand(0); unsigned ShImmVal = 0; ConstantSDNode *RHS = dyn_cast<ConstantSDNode>(N.getOperand(1)); if (!RHS) return false; ShImmVal = RHS->getZExtValue() & 31; Opc = CurDAG->getTargetConstant(ARM_AM::getSORegOpc(ShOpcVal, ShImmVal), SDLoc(N), MVT::i32); return true; } bool ARMDAGToDAGISel::SelectRegShifterOperand(SDValue N, SDValue &BaseReg, SDValue &ShReg, SDValue &Opc, bool CheckProfitability) { if (DisableShifterOp) return false; ARM_AM::ShiftOpc ShOpcVal = ARM_AM::getShiftOpcForNode(N.getOpcode()); // Don't match base register only case. That is matched to a separate // lower complexity pattern with explicit register operand. if (ShOpcVal == ARM_AM::no_shift) return false; BaseReg = N.getOperand(0); unsigned ShImmVal = 0; ConstantSDNode *RHS = dyn_cast<ConstantSDNode>(N.getOperand(1)); if (RHS) return false; ShReg = N.getOperand(1); if (CheckProfitability && !isShifterOpProfitable(N, ShOpcVal, ShImmVal)) return false; Opc = CurDAG->getTargetConstant(ARM_AM::getSORegOpc(ShOpcVal, ShImmVal), SDLoc(N), MVT::i32); return true; } // Determine whether an ISD::OR's operands are suitable to turn the operation // into an addition, which often has more compact encodings. bool ARMDAGToDAGISel::SelectAddLikeOr(SDNode *Parent, SDValue N, SDValue &Out) { assert(Parent->getOpcode() == ISD::OR && "unexpected parent"); Out = N; return CurDAG->haveNoCommonBitsSet(N, Parent->getOperand(1)); } bool ARMDAGToDAGISel::SelectAddrModeImm12(SDValue N, SDValue &Base, SDValue &OffImm) { // Match simple R + imm12 operands. // Base only. if (N.getOpcode() != ISD::ADD && N.getOpcode() != ISD::SUB && !CurDAG->isBaseWithConstantOffset(N)) { if (N.getOpcode() == ISD::FrameIndex) { // Match frame index. int FI = cast<FrameIndexSDNode>(N)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); return true; } if (N.getOpcode() == ARMISD::Wrapper && N.getOperand(0).getOpcode() != ISD::TargetGlobalAddress && N.getOperand(0).getOpcode() != ISD::TargetExternalSymbol && N.getOperand(0).getOpcode() != ISD::TargetGlobalTLSAddress) { Base = N.getOperand(0); } else Base = N; OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); return true; } if (ConstantSDNode *RHS = dyn_cast<ConstantSDNode>(N.getOperand(1))) { int RHSC = (int)RHS->getSExtValue(); if (N.getOpcode() == ISD::SUB) RHSC = -RHSC; if (RHSC > -0x1000 && RHSC < 0x1000) { // 12 bits Base = N.getOperand(0); if (Base.getOpcode() == ISD::FrameIndex) { int FI = cast<FrameIndexSDNode>(Base)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); } OffImm = CurDAG->getTargetConstant(RHSC, SDLoc(N), MVT::i32); return true; } } // Base only. Base = N; OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); return true; } bool ARMDAGToDAGISel::SelectLdStSOReg(SDValue N, SDValue &Base, SDValue &Offset, SDValue &Opc) { if (N.getOpcode() == ISD::MUL && ((!Subtarget->isLikeA9() && !Subtarget->isSwift()) || N.hasOneUse())) { if (ConstantSDNode *RHS = dyn_cast<ConstantSDNode>(N.getOperand(1))) { // X * [3,5,9] -> X + X * [2,4,8] etc. int RHSC = (int)RHS->getZExtValue(); if (RHSC & 1) { RHSC = RHSC & ~1; ARM_AM::AddrOpc AddSub = ARM_AM::add; if (RHSC < 0) { AddSub = ARM_AM::sub; RHSC = - RHSC; } if (isPowerOf2_32(RHSC)) { unsigned ShAmt = Log2_32(RHSC); Base = Offset = N.getOperand(0); Opc = CurDAG->getTargetConstant(ARM_AM::getAM2Opc(AddSub, ShAmt, ARM_AM::lsl), SDLoc(N), MVT::i32); return true; } } } } if (N.getOpcode() != ISD::ADD && N.getOpcode() != ISD::SUB && // ISD::OR that is equivalent to an ISD::ADD. !CurDAG->isBaseWithConstantOffset(N)) return false; // Leave simple R +/- imm12 operands for LDRi12 if (N.getOpcode() == ISD::ADD || N.getOpcode() == ISD::OR) { int RHSC; if (isScaledConstantInRange(N.getOperand(1), /*Scale=*/1, -0x1000+1, 0x1000, RHSC)) // 12 bits. return false; } // Otherwise this is R +/- [possibly shifted] R. ARM_AM::AddrOpc AddSub = N.getOpcode() == ISD::SUB ? ARM_AM::sub:ARM_AM::add; ARM_AM::ShiftOpc ShOpcVal = ARM_AM::getShiftOpcForNode(N.getOperand(1).getOpcode()); unsigned ShAmt = 0; Base = N.getOperand(0); Offset = N.getOperand(1); if (ShOpcVal != ARM_AM::no_shift) { // Check to see if the RHS of the shift is a constant, if not, we can't fold // it. if (ConstantSDNode *Sh = dyn_cast<ConstantSDNode>(N.getOperand(1).getOperand(1))) { ShAmt = Sh->getZExtValue(); if (isShifterOpProfitable(Offset, ShOpcVal, ShAmt)) Offset = N.getOperand(1).getOperand(0); else { ShAmt = 0; ShOpcVal = ARM_AM::no_shift; } } else { ShOpcVal = ARM_AM::no_shift; } } // Try matching (R shl C) + (R). if (N.getOpcode() != ISD::SUB && ShOpcVal == ARM_AM::no_shift && !(Subtarget->isLikeA9() || Subtarget->isSwift() || N.getOperand(0).hasOneUse())) { ShOpcVal = ARM_AM::getShiftOpcForNode(N.getOperand(0).getOpcode()); if (ShOpcVal != ARM_AM::no_shift) { // Check to see if the RHS of the shift is a constant, if not, we can't // fold it. if (ConstantSDNode *Sh = dyn_cast<ConstantSDNode>(N.getOperand(0).getOperand(1))) { ShAmt = Sh->getZExtValue(); if (isShifterOpProfitable(N.getOperand(0), ShOpcVal, ShAmt)) { Offset = N.getOperand(0).getOperand(0); Base = N.getOperand(1); } else { ShAmt = 0; ShOpcVal = ARM_AM::no_shift; } } else { ShOpcVal = ARM_AM::no_shift; } } } // If Offset is a multiply-by-constant and it's profitable to extract a shift // and use it in a shifted operand do so. if (Offset.getOpcode() == ISD::MUL && N.hasOneUse()) { unsigned PowerOfTwo = 0; SDValue NewMulConst; if (canExtractShiftFromMul(Offset, 31, PowerOfTwo, NewMulConst)) { HandleSDNode Handle(Offset); replaceDAGValue(Offset.getOperand(1), NewMulConst); Offset = Handle.getValue(); ShAmt = PowerOfTwo; ShOpcVal = ARM_AM::lsl; } } Opc = CurDAG->getTargetConstant(ARM_AM::getAM2Opc(AddSub, ShAmt, ShOpcVal), SDLoc(N), MVT::i32); return true; } bool ARMDAGToDAGISel::SelectAddrMode2OffsetReg(SDNode *Op, SDValue N, SDValue &Offset, SDValue &Opc) { unsigned Opcode = Op->getOpcode(); ISD::MemIndexedMode AM = (Opcode == ISD::LOAD) ? cast<LoadSDNode>(Op)->getAddressingMode() : cast<StoreSDNode>(Op)->getAddressingMode(); ARM_AM::AddrOpc AddSub = (AM == ISD::PRE_INC || AM == ISD::POST_INC) ? ARM_AM::add : ARM_AM::sub; int Val; if (isScaledConstantInRange(N, /*Scale=*/1, 0, 0x1000, Val)) return false; Offset = N; ARM_AM::ShiftOpc ShOpcVal = ARM_AM::getShiftOpcForNode(N.getOpcode()); unsigned ShAmt = 0; if (ShOpcVal != ARM_AM::no_shift) { // Check to see if the RHS of the shift is a constant, if not, we can't fold // it. if (ConstantSDNode *Sh = dyn_cast<ConstantSDNode>(N.getOperand(1))) { ShAmt = Sh->getZExtValue(); if (isShifterOpProfitable(N, ShOpcVal, ShAmt)) Offset = N.getOperand(0); else { ShAmt = 0; ShOpcVal = ARM_AM::no_shift; } } else { ShOpcVal = ARM_AM::no_shift; } } Opc = CurDAG->getTargetConstant(ARM_AM::getAM2Opc(AddSub, ShAmt, ShOpcVal), SDLoc(N), MVT::i32); return true; } bool ARMDAGToDAGISel::SelectAddrMode2OffsetImmPre(SDNode *Op, SDValue N, SDValue &Offset, SDValue &Opc) { unsigned Opcode = Op->getOpcode(); ISD::MemIndexedMode AM = (Opcode == ISD::LOAD) ? cast<LoadSDNode>(Op)->getAddressingMode() : cast<StoreSDNode>(Op)->getAddressingMode(); ARM_AM::AddrOpc AddSub = (AM == ISD::PRE_INC || AM == ISD::POST_INC) ? ARM_AM::add : ARM_AM::sub; int Val; if (isScaledConstantInRange(N, /*Scale=*/1, 0, 0x1000, Val)) { // 12 bits. if (AddSub == ARM_AM::sub) Val *= -1; Offset = CurDAG->getRegister(0, MVT::i32); Opc = CurDAG->getTargetConstant(Val, SDLoc(Op), MVT::i32); return true; } return false; } bool ARMDAGToDAGISel::SelectAddrMode2OffsetImm(SDNode *Op, SDValue N, SDValue &Offset, SDValue &Opc) { unsigned Opcode = Op->getOpcode(); ISD::MemIndexedMode AM = (Opcode == ISD::LOAD) ? cast<LoadSDNode>(Op)->getAddressingMode() : cast<StoreSDNode>(Op)->getAddressingMode(); ARM_AM::AddrOpc AddSub = (AM == ISD::PRE_INC || AM == ISD::POST_INC) ? ARM_AM::add : ARM_AM::sub; int Val; if (isScaledConstantInRange(N, /*Scale=*/1, 0, 0x1000, Val)) { // 12 bits. Offset = CurDAG->getRegister(0, MVT::i32); Opc = CurDAG->getTargetConstant(ARM_AM::getAM2Opc(AddSub, Val, ARM_AM::no_shift), SDLoc(Op), MVT::i32); return true; } return false; } bool ARMDAGToDAGISel::SelectAddrOffsetNone(SDValue N, SDValue &Base) { Base = N; return true; } bool ARMDAGToDAGISel::SelectAddrMode3(SDValue N, SDValue &Base, SDValue &Offset, SDValue &Opc) { if (N.getOpcode() == ISD::SUB) { // X - C is canonicalize to X + -C, no need to handle it here. Base = N.getOperand(0); Offset = N.getOperand(1); Opc = CurDAG->getTargetConstant(ARM_AM::getAM3Opc(ARM_AM::sub, 0), SDLoc(N), MVT::i32); return true; } if (!CurDAG->isBaseWithConstantOffset(N)) { Base = N; if (N.getOpcode() == ISD::FrameIndex) { int FI = cast<FrameIndexSDNode>(N)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); } Offset = CurDAG->getRegister(0, MVT::i32); Opc = CurDAG->getTargetConstant(ARM_AM::getAM3Opc(ARM_AM::add, 0), SDLoc(N), MVT::i32); return true; } // If the RHS is +/- imm8, fold into addr mode. int RHSC; if (isScaledConstantInRange(N.getOperand(1), /*Scale=*/1, -256 + 1, 256, RHSC)) { // 8 bits. Base = N.getOperand(0); if (Base.getOpcode() == ISD::FrameIndex) { int FI = cast<FrameIndexSDNode>(Base)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); } Offset = CurDAG->getRegister(0, MVT::i32); ARM_AM::AddrOpc AddSub = ARM_AM::add; if (RHSC < 0) { AddSub = ARM_AM::sub; RHSC = -RHSC; } Opc = CurDAG->getTargetConstant(ARM_AM::getAM3Opc(AddSub, RHSC), SDLoc(N), MVT::i32); return true; } Base = N.getOperand(0); Offset = N.getOperand(1); Opc = CurDAG->getTargetConstant(ARM_AM::getAM3Opc(ARM_AM::add, 0), SDLoc(N), MVT::i32); return true; } bool ARMDAGToDAGISel::SelectAddrMode3Offset(SDNode *Op, SDValue N, SDValue &Offset, SDValue &Opc) { unsigned Opcode = Op->getOpcode(); ISD::MemIndexedMode AM = (Opcode == ISD::LOAD) ? cast<LoadSDNode>(Op)->getAddressingMode() : cast<StoreSDNode>(Op)->getAddressingMode(); ARM_AM::AddrOpc AddSub = (AM == ISD::PRE_INC || AM == ISD::POST_INC) ? ARM_AM::add : ARM_AM::sub; int Val; if (isScaledConstantInRange(N, /*Scale=*/1, 0, 256, Val)) { // 12 bits. Offset = CurDAG->getRegister(0, MVT::i32); Opc = CurDAG->getTargetConstant(ARM_AM::getAM3Opc(AddSub, Val), SDLoc(Op), MVT::i32); return true; } Offset = N; Opc = CurDAG->getTargetConstant(ARM_AM::getAM3Opc(AddSub, 0), SDLoc(Op), MVT::i32); return true; } bool ARMDAGToDAGISel::IsAddressingMode5(SDValue N, SDValue &Base, SDValue &Offset, bool FP16) { if (!CurDAG->isBaseWithConstantOffset(N)) { Base = N; if (N.getOpcode() == ISD::FrameIndex) { int FI = cast<FrameIndexSDNode>(N)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); } else if (N.getOpcode() == ARMISD::Wrapper && N.getOperand(0).getOpcode() != ISD::TargetGlobalAddress && N.getOperand(0).getOpcode() != ISD::TargetExternalSymbol && N.getOperand(0).getOpcode() != ISD::TargetGlobalTLSAddress) { Base = N.getOperand(0); } Offset = CurDAG->getTargetConstant(ARM_AM::getAM5Opc(ARM_AM::add, 0), SDLoc(N), MVT::i32); return true; } // If the RHS is +/- imm8, fold into addr mode. int RHSC; const int Scale = FP16 ? 2 : 4; if (isScaledConstantInRange(N.getOperand(1), Scale, -255, 256, RHSC)) { Base = N.getOperand(0); if (Base.getOpcode() == ISD::FrameIndex) { int FI = cast<FrameIndexSDNode>(Base)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); } ARM_AM::AddrOpc AddSub = ARM_AM::add; if (RHSC < 0) { AddSub = ARM_AM::sub; RHSC = -RHSC; } if (FP16) Offset = CurDAG->getTargetConstant(ARM_AM::getAM5FP16Opc(AddSub, RHSC), SDLoc(N), MVT::i32); else Offset = CurDAG->getTargetConstant(ARM_AM::getAM5Opc(AddSub, RHSC), SDLoc(N), MVT::i32); return true; } Base = N; if (FP16) Offset = CurDAG->getTargetConstant(ARM_AM::getAM5FP16Opc(ARM_AM::add, 0), SDLoc(N), MVT::i32); else Offset = CurDAG->getTargetConstant(ARM_AM::getAM5Opc(ARM_AM::add, 0), SDLoc(N), MVT::i32); return true; } bool ARMDAGToDAGISel::SelectAddrMode5(SDValue N, SDValue &Base, SDValue &Offset) { return IsAddressingMode5(N, Base, Offset, /*FP16=*/ false); } bool ARMDAGToDAGISel::SelectAddrMode5FP16(SDValue N, SDValue &Base, SDValue &Offset) { return IsAddressingMode5(N, Base, Offset, /*FP16=*/ true); } bool ARMDAGToDAGISel::SelectAddrMode6(SDNode *Parent, SDValue N, SDValue &Addr, SDValue &Align) { Addr = N; unsigned Alignment = 0; MemSDNode *MemN = cast<MemSDNode>(Parent); if (isa<LSBaseSDNode>(MemN) || ((MemN->getOpcode() == ARMISD::VST1_UPD || MemN->getOpcode() == ARMISD::VLD1_UPD) && MemN->getConstantOperandVal(MemN->getNumOperands() - 1) == 1)) { // This case occurs only for VLD1-lane/dup and VST1-lane instructions. // The maximum alignment is equal to the memory size being referenced. unsigned MMOAlign = MemN->getAlignment(); unsigned MemSize = MemN->getMemoryVT().getSizeInBits() / 8; if (MMOAlign >= MemSize && MemSize > 1) Alignment = MemSize; } else { // All other uses of addrmode6 are for intrinsics. For now just record // the raw alignment value; it will be refined later based on the legal // alignment operands for the intrinsic. Alignment = MemN->getAlignment(); } Align = CurDAG->getTargetConstant(Alignment, SDLoc(N), MVT::i32); return true; } bool ARMDAGToDAGISel::SelectAddrMode6Offset(SDNode *Op, SDValue N, SDValue &Offset) { LSBaseSDNode *LdSt = cast<LSBaseSDNode>(Op); ISD::MemIndexedMode AM = LdSt->getAddressingMode(); if (AM != ISD::POST_INC) return false; Offset = N; if (ConstantSDNode *NC = dyn_cast<ConstantSDNode>(N)) { if (NC->getZExtValue() * 8 == LdSt->getMemoryVT().getSizeInBits()) Offset = CurDAG->getRegister(0, MVT::i32); } return true; } bool ARMDAGToDAGISel::SelectAddrModePC(SDValue N, SDValue &Offset, SDValue &Label) { if (N.getOpcode() == ARMISD::PIC_ADD && N.hasOneUse()) { Offset = N.getOperand(0); SDValue N1 = N.getOperand(1); Label = CurDAG->getTargetConstant(cast<ConstantSDNode>(N1)->getZExtValue(), SDLoc(N), MVT::i32); return true; } return false; } //===----------------------------------------------------------------------===// // Thumb Addressing Modes //===----------------------------------------------------------------------===// static bool shouldUseZeroOffsetLdSt(SDValue N) { // Negative numbers are difficult to materialise in thumb1. If we are // selecting the add of a negative, instead try to select ri with a zero // offset, so create the add node directly which will become a sub. if (N.getOpcode() != ISD::ADD) return false; // Look for an imm which is not legal for ld/st, but is legal for sub. if (auto C = dyn_cast<ConstantSDNode>(N.getOperand(1))) return C->getSExtValue() < 0 && C->getSExtValue() >= -255; return false; } bool ARMDAGToDAGISel::SelectThumbAddrModeRRSext(SDValue N, SDValue &Base, SDValue &Offset) { if (N.getOpcode() != ISD::ADD && !CurDAG->isBaseWithConstantOffset(N)) { ConstantSDNode *NC = dyn_cast<ConstantSDNode>(N); if (!NC || !NC->isNullValue()) return false; Base = Offset = N; return true; } Base = N.getOperand(0); Offset = N.getOperand(1); return true; } bool ARMDAGToDAGISel::SelectThumbAddrModeRR(SDValue N, SDValue &Base, SDValue &Offset) { if (shouldUseZeroOffsetLdSt(N)) return false; // Select ri instead return SelectThumbAddrModeRRSext(N, Base, Offset); } bool ARMDAGToDAGISel::SelectThumbAddrModeImm5S(SDValue N, unsigned Scale, SDValue &Base, SDValue &OffImm) { if (shouldUseZeroOffsetLdSt(N)) { Base = N; OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); return true; } if (!CurDAG->isBaseWithConstantOffset(N)) { if (N.getOpcode() == ISD::ADD) { return false; // We want to select register offset instead } else if (N.getOpcode() == ARMISD::Wrapper && N.getOperand(0).getOpcode() != ISD::TargetGlobalAddress && N.getOperand(0).getOpcode() != ISD::TargetExternalSymbol && N.getOperand(0).getOpcode() != ISD::TargetConstantPool && N.getOperand(0).getOpcode() != ISD::TargetGlobalTLSAddress) { Base = N.getOperand(0); } else { Base = N; } OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); return true; } // If the RHS is + imm5 * scale, fold into addr mode. int RHSC; if (isScaledConstantInRange(N.getOperand(1), Scale, 0, 32, RHSC)) { Base = N.getOperand(0); OffImm = CurDAG->getTargetConstant(RHSC, SDLoc(N), MVT::i32); return true; } // Offset is too large, so use register offset instead. return false; } bool ARMDAGToDAGISel::SelectThumbAddrModeImm5S4(SDValue N, SDValue &Base, SDValue &OffImm) { return SelectThumbAddrModeImm5S(N, 4, Base, OffImm); } bool ARMDAGToDAGISel::SelectThumbAddrModeImm5S2(SDValue N, SDValue &Base, SDValue &OffImm) { return SelectThumbAddrModeImm5S(N, 2, Base, OffImm); } bool ARMDAGToDAGISel::SelectThumbAddrModeImm5S1(SDValue N, SDValue &Base, SDValue &OffImm) { return SelectThumbAddrModeImm5S(N, 1, Base, OffImm); } bool ARMDAGToDAGISel::SelectThumbAddrModeSP(SDValue N, SDValue &Base, SDValue &OffImm) { if (N.getOpcode() == ISD::FrameIndex) { int FI = cast<FrameIndexSDNode>(N)->getIndex(); // Only multiples of 4 are allowed for the offset, so the frame object // alignment must be at least 4. MachineFrameInfo &MFI = MF->getFrameInfo(); if (MFI.getObjectAlignment(FI) < 4) MFI.setObjectAlignment(FI, 4); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); return true; } if (!CurDAG->isBaseWithConstantOffset(N)) return false; if (N.getOperand(0).getOpcode() == ISD::FrameIndex) { // If the RHS is + imm8 * scale, fold into addr mode. int RHSC; if (isScaledConstantInRange(N.getOperand(1), /*Scale=*/4, 0, 256, RHSC)) { Base = N.getOperand(0); int FI = cast<FrameIndexSDNode>(Base)->getIndex(); // For LHS+RHS to result in an offset that's a multiple of 4 the object // indexed by the LHS must be 4-byte aligned. MachineFrameInfo &MFI = MF->getFrameInfo(); if (MFI.getObjectAlignment(FI) < 4) MFI.setObjectAlignment(FI, 4); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); OffImm = CurDAG->getTargetConstant(RHSC, SDLoc(N), MVT::i32); return true; } } return false; } //===----------------------------------------------------------------------===// // Thumb 2 Addressing Modes //===----------------------------------------------------------------------===// bool ARMDAGToDAGISel::SelectT2AddrModeImm12(SDValue N, SDValue &Base, SDValue &OffImm) { // Match simple R + imm12 operands. // Base only. if (N.getOpcode() != ISD::ADD && N.getOpcode() != ISD::SUB && !CurDAG->isBaseWithConstantOffset(N)) { if (N.getOpcode() == ISD::FrameIndex) { // Match frame index. int FI = cast<FrameIndexSDNode>(N)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); return true; } if (N.getOpcode() == ARMISD::Wrapper && N.getOperand(0).getOpcode() != ISD::TargetGlobalAddress && N.getOperand(0).getOpcode() != ISD::TargetExternalSymbol && N.getOperand(0).getOpcode() != ISD::TargetGlobalTLSAddress) { Base = N.getOperand(0); if (Base.getOpcode() == ISD::TargetConstantPool) return false; // We want to select t2LDRpci instead. } else Base = N; OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); return true; } if (ConstantSDNode *RHS = dyn_cast<ConstantSDNode>(N.getOperand(1))) { if (SelectT2AddrModeImm8(N, Base, OffImm)) // Let t2LDRi8 handle (R - imm8). return false; int RHSC = (int)RHS->getZExtValue(); if (N.getOpcode() == ISD::SUB) RHSC = -RHSC; if (RHSC >= 0 && RHSC < 0x1000) { // 12 bits (unsigned) Base = N.getOperand(0); if (Base.getOpcode() == ISD::FrameIndex) { int FI = cast<FrameIndexSDNode>(Base)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); } OffImm = CurDAG->getTargetConstant(RHSC, SDLoc(N), MVT::i32); return true; } } // Base only. Base = N; OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); return true; } bool ARMDAGToDAGISel::SelectT2AddrModeImm8(SDValue N, SDValue &Base, SDValue &OffImm) { // Match simple R - imm8 operands. if (N.getOpcode() != ISD::ADD && N.getOpcode() != ISD::SUB && !CurDAG->isBaseWithConstantOffset(N)) return false; if (ConstantSDNode *RHS = dyn_cast<ConstantSDNode>(N.getOperand(1))) { int RHSC = (int)RHS->getSExtValue(); if (N.getOpcode() == ISD::SUB) RHSC = -RHSC; if ((RHSC >= -255) && (RHSC < 0)) { // 8 bits (always negative) Base = N.getOperand(0); if (Base.getOpcode() == ISD::FrameIndex) { int FI = cast<FrameIndexSDNode>(Base)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); } OffImm = CurDAG->getTargetConstant(RHSC, SDLoc(N), MVT::i32); return true; } } return false; } bool ARMDAGToDAGISel::SelectT2AddrModeImm8Offset(SDNode *Op, SDValue N, SDValue &OffImm){ unsigned Opcode = Op->getOpcode(); ISD::MemIndexedMode AM = (Opcode == ISD::LOAD) ? cast<LoadSDNode>(Op)->getAddressingMode() : cast<StoreSDNode>(Op)->getAddressingMode(); int RHSC; if (isScaledConstantInRange(N, /*Scale=*/1, 0, 0x100, RHSC)) { // 8 bits. OffImm = ((AM == ISD::PRE_INC) || (AM == ISD::POST_INC)) ? CurDAG->getTargetConstant(RHSC, SDLoc(N), MVT::i32) : CurDAG->getTargetConstant(-RHSC, SDLoc(N), MVT::i32); return true; } return false; } bool ARMDAGToDAGISel::SelectT2AddrModeSoReg(SDValue N, SDValue &Base, SDValue &OffReg, SDValue &ShImm) { // (R - imm8) should be handled by t2LDRi8. The rest are handled by t2LDRi12. if (N.getOpcode() != ISD::ADD && !CurDAG->isBaseWithConstantOffset(N)) return false; // Leave (R + imm12) for t2LDRi12, (R - imm8) for t2LDRi8. if (ConstantSDNode *RHS = dyn_cast<ConstantSDNode>(N.getOperand(1))) { int RHSC = (int)RHS->getZExtValue(); if (RHSC >= 0 && RHSC < 0x1000) // 12 bits (unsigned) return false; else if (RHSC < 0 && RHSC >= -255) // 8 bits return false; } // Look for (R + R) or (R + (R << [1,2,3])). unsigned ShAmt = 0; Base = N.getOperand(0); OffReg = N.getOperand(1); // Swap if it is ((R << c) + R). ARM_AM::ShiftOpc ShOpcVal = ARM_AM::getShiftOpcForNode(OffReg.getOpcode()); if (ShOpcVal != ARM_AM::lsl) { ShOpcVal = ARM_AM::getShiftOpcForNode(Base.getOpcode()); if (ShOpcVal == ARM_AM::lsl) std::swap(Base, OffReg); } if (ShOpcVal == ARM_AM::lsl) { // Check to see if the RHS of the shift is a constant, if not, we can't fold // it. if (ConstantSDNode *Sh = dyn_cast<ConstantSDNode>(OffReg.getOperand(1))) { ShAmt = Sh->getZExtValue(); if (ShAmt < 4 && isShifterOpProfitable(OffReg, ShOpcVal, ShAmt)) OffReg = OffReg.getOperand(0); else { ShAmt = 0; } } } // If OffReg is a multiply-by-constant and it's profitable to extract a shift // and use it in a shifted operand do so. if (OffReg.getOpcode() == ISD::MUL && N.hasOneUse()) { unsigned PowerOfTwo = 0; SDValue NewMulConst; if (canExtractShiftFromMul(OffReg, 3, PowerOfTwo, NewMulConst)) { HandleSDNode Handle(OffReg); replaceDAGValue(OffReg.getOperand(1), NewMulConst); OffReg = Handle.getValue(); ShAmt = PowerOfTwo; } } ShImm = CurDAG->getTargetConstant(ShAmt, SDLoc(N), MVT::i32); return true; } bool ARMDAGToDAGISel::SelectT2AddrModeExclusive(SDValue N, SDValue &Base, SDValue &OffImm) { // This *must* succeed since it's used for the irreplaceable ldrex and strex // instructions. Base = N; OffImm = CurDAG->getTargetConstant(0, SDLoc(N), MVT::i32); if (N.getOpcode() != ISD::ADD || !CurDAG->isBaseWithConstantOffset(N)) return true; ConstantSDNode *RHS = dyn_cast<ConstantSDNode>(N.getOperand(1)); if (!RHS) return true; uint32_t RHSC = (int)RHS->getZExtValue(); if (RHSC > 1020 || RHSC % 4 != 0) return true; Base = N.getOperand(0); if (Base.getOpcode() == ISD::FrameIndex) { int FI = cast<FrameIndexSDNode>(Base)->getIndex(); Base = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); } OffImm = CurDAG->getTargetConstant(RHSC/4, SDLoc(N), MVT::i32); return true; } //===--------------------------------------------------------------------===// /// getAL - Returns a ARMCC::AL immediate node. static inline SDValue getAL(SelectionDAG *CurDAG, const SDLoc &dl) { return CurDAG->getTargetConstant((uint64_t)ARMCC::AL, dl, MVT::i32); } void ARMDAGToDAGISel::transferMemOperands(SDNode *N, SDNode *Result) { MachineMemOperand *MemOp = cast<MemSDNode>(N)->getMemOperand(); CurDAG->setNodeMemRefs(cast<MachineSDNode>(Result), {MemOp}); } bool ARMDAGToDAGISel::tryARMIndexedLoad(SDNode *N) { LoadSDNode *LD = cast<LoadSDNode>(N); ISD::MemIndexedMode AM = LD->getAddressingMode(); if (AM == ISD::UNINDEXED) return false; EVT LoadedVT = LD->getMemoryVT(); SDValue Offset, AMOpc; bool isPre = (AM == ISD::PRE_INC) || (AM == ISD::PRE_DEC); unsigned Opcode = 0; bool Match = false; if (LoadedVT == MVT::i32 && isPre && SelectAddrMode2OffsetImmPre(N, LD->getOffset(), Offset, AMOpc)) { Opcode = ARM::LDR_PRE_IMM; Match = true; } else if (LoadedVT == MVT::i32 && !isPre && SelectAddrMode2OffsetImm(N, LD->getOffset(), Offset, AMOpc)) { Opcode = ARM::LDR_POST_IMM; Match = true; } else if (LoadedVT == MVT::i32 && SelectAddrMode2OffsetReg(N, LD->getOffset(), Offset, AMOpc)) { Opcode = isPre ? ARM::LDR_PRE_REG : ARM::LDR_POST_REG; Match = true; } else if (LoadedVT == MVT::i16 && SelectAddrMode3Offset(N, LD->getOffset(), Offset, AMOpc)) { Match = true; Opcode = (LD->getExtensionType() == ISD::SEXTLOAD) ? (isPre ? ARM::LDRSH_PRE : ARM::LDRSH_POST) : (isPre ? ARM::LDRH_PRE : ARM::LDRH_POST); } else if (LoadedVT == MVT::i8 || LoadedVT == MVT::i1) { if (LD->getExtensionType() == ISD::SEXTLOAD) { if (SelectAddrMode3Offset(N, LD->getOffset(), Offset, AMOpc)) { Match = true; Opcode = isPre ? ARM::LDRSB_PRE : ARM::LDRSB_POST; } } else { if (isPre && SelectAddrMode2OffsetImmPre(N, LD->getOffset(), Offset, AMOpc)) { Match = true; Opcode = ARM::LDRB_PRE_IMM; } else if (!isPre && SelectAddrMode2OffsetImm(N, LD->getOffset(), Offset, AMOpc)) { Match = true; Opcode = ARM::LDRB_POST_IMM; } else if (SelectAddrMode2OffsetReg(N, LD->getOffset(), Offset, AMOpc)) { Match = true; Opcode = isPre ? ARM::LDRB_PRE_REG : ARM::LDRB_POST_REG; } } } if (Match) { if (Opcode == ARM::LDR_PRE_IMM || Opcode == ARM::LDRB_PRE_IMM) { SDValue Chain = LD->getChain(); SDValue Base = LD->getBasePtr(); SDValue Ops[]= { Base, AMOpc, getAL(CurDAG, SDLoc(N)), CurDAG->getRegister(0, MVT::i32), Chain }; SDNode *New = CurDAG->getMachineNode(Opcode, SDLoc(N), MVT::i32, MVT::i32, MVT::Other, Ops); transferMemOperands(N, New); ReplaceNode(N, New); return true; } else { SDValue Chain = LD->getChain(); SDValue Base = LD->getBasePtr(); SDValue Ops[]= { Base, Offset, AMOpc, getAL(CurDAG, SDLoc(N)), CurDAG->getRegister(0, MVT::i32), Chain }; SDNode *New = CurDAG->getMachineNode(Opcode, SDLoc(N), MVT::i32, MVT::i32, MVT::Other, Ops); transferMemOperands(N, New); ReplaceNode(N, New); return true; } } return false; } bool ARMDAGToDAGISel::tryT1IndexedLoad(SDNode *N) { LoadSDNode *LD = cast<LoadSDNode>(N); EVT LoadedVT = LD->getMemoryVT(); ISD::MemIndexedMode AM = LD->getAddressingMode(); if (AM != ISD::POST_INC || LD->getExtensionType() != ISD::NON_EXTLOAD || LoadedVT.getSimpleVT().SimpleTy != MVT::i32) return false; auto *COffs = dyn_cast<ConstantSDNode>(LD->getOffset()); if (!COffs || COffs->getZExtValue() != 4) return false; // A T1 post-indexed load is just a single register LDM: LDM r0!, {r1}. // The encoding of LDM is not how the rest of ISel expects a post-inc load to // look however, so we use a pseudo here and switch it for a tLDMIA_UPD after // ISel. SDValue Chain = LD->getChain(); SDValue Base = LD->getBasePtr(); SDValue Ops[]= { Base, getAL(CurDAG, SDLoc(N)), CurDAG->getRegister(0, MVT::i32), Chain }; SDNode *New = CurDAG->getMachineNode(ARM::tLDR_postidx, SDLoc(N), MVT::i32, MVT::i32, MVT::Other, Ops); transferMemOperands(N, New); ReplaceNode(N, New); return true; } bool ARMDAGToDAGISel::tryT2IndexedLoad(SDNode *N) { LoadSDNode *LD = cast<LoadSDNode>(N); ISD::MemIndexedMode AM = LD->getAddressingMode(); if (AM == ISD::UNINDEXED) return false; EVT LoadedVT = LD->getMemoryVT(); bool isSExtLd = LD->getExtensionType() == ISD::SEXTLOAD; SDValue Offset; bool isPre = (AM == ISD::PRE_INC) || (AM == ISD::PRE_DEC); unsigned Opcode = 0; bool Match = false; if (SelectT2AddrModeImm8Offset(N, LD->getOffset(), Offset)) { switch (LoadedVT.getSimpleVT().SimpleTy) { case MVT::i32: Opcode = isPre ? ARM::t2LDR_PRE : ARM::t2LDR_POST; break; case MVT::i16: if (isSExtLd) Opcode = isPre ? ARM::t2LDRSH_PRE : ARM::t2LDRSH_POST; else Opcode = isPre ? ARM::t2LDRH_PRE : ARM::t2LDRH_POST; break; case MVT::i8: case MVT::i1: if (isSExtLd) Opcode = isPre ? ARM::t2LDRSB_PRE : ARM::t2LDRSB_POST; else Opcode = isPre ? ARM::t2LDRB_PRE : ARM::t2LDRB_POST; break; default: return false; } Match = true; } if (Match) { SDValue Chain = LD->getChain(); SDValue Base = LD->getBasePtr(); SDValue Ops[]= { Base, Offset, getAL(CurDAG, SDLoc(N)), CurDAG->getRegister(0, MVT::i32), Chain }; SDNode *New = CurDAG->getMachineNode(Opcode, SDLoc(N), MVT::i32, MVT::i32, MVT::Other, Ops); transferMemOperands(N, New); ReplaceNode(N, New); return true; } return false; } /// Form a GPRPair pseudo register from a pair of GPR regs. SDNode *ARMDAGToDAGISel::createGPRPairNode(EVT VT, SDValue V0, SDValue V1) { SDLoc dl(V0.getNode()); SDValue RegClass = CurDAG->getTargetConstant(ARM::GPRPairRegClassID, dl, MVT::i32); SDValue SubReg0 = CurDAG->getTargetConstant(ARM::gsub_0, dl, MVT::i32); SDValue SubReg1 = CurDAG->getTargetConstant(ARM::gsub_1, dl, MVT::i32); const SDValue Ops[] = { RegClass, V0, SubReg0, V1, SubReg1 }; return CurDAG->getMachineNode(TargetOpcode::REG_SEQUENCE, dl, VT, Ops); } /// Form a D register from a pair of S registers. SDNode *ARMDAGToDAGISel::createSRegPairNode(EVT VT, SDValue V0, SDValue V1) { SDLoc dl(V0.getNode()); SDValue RegClass = CurDAG->getTargetConstant(ARM::DPR_VFP2RegClassID, dl, MVT::i32); SDValue SubReg0 = CurDAG->getTargetConstant(ARM::ssub_0, dl, MVT::i32); SDValue SubReg1 = CurDAG->getTargetConstant(ARM::ssub_1, dl, MVT::i32); const SDValue Ops[] = { RegClass, V0, SubReg0, V1, SubReg1 }; return CurDAG->getMachineNode(TargetOpcode::REG_SEQUENCE, dl, VT, Ops); } /// Form a quad register from a pair of D registers. SDNode *ARMDAGToDAGISel::createDRegPairNode(EVT VT, SDValue V0, SDValue V1) { SDLoc dl(V0.getNode()); SDValue RegClass = CurDAG->getTargetConstant(ARM::QPRRegClassID, dl, MVT::i32); SDValue SubReg0 = CurDAG->getTargetConstant(ARM::dsub_0, dl, MVT::i32); SDValue SubReg1 = CurDAG->getTargetConstant(ARM::dsub_1, dl, MVT::i32); const SDValue Ops[] = { RegClass, V0, SubReg0, V1, SubReg1 }; return CurDAG->getMachineNode(TargetOpcode::REG_SEQUENCE, dl, VT, Ops); } /// Form 4 consecutive D registers from a pair of Q registers. SDNode *ARMDAGToDAGISel::createQRegPairNode(EVT VT, SDValue V0, SDValue V1) { SDLoc dl(V0.getNode()); SDValue RegClass = CurDAG->getTargetConstant(ARM::QQPRRegClassID, dl, MVT::i32); SDValue SubReg0 = CurDAG->getTargetConstant(ARM::qsub_0, dl, MVT::i32); SDValue SubReg1 = CurDAG->getTargetConstant(ARM::qsub_1, dl, MVT::i32); const SDValue Ops[] = { RegClass, V0, SubReg0, V1, SubReg1 }; return CurDAG->getMachineNode(TargetOpcode::REG_SEQUENCE, dl, VT, Ops); } /// Form 4 consecutive S registers. SDNode *ARMDAGToDAGISel::createQuadSRegsNode(EVT VT, SDValue V0, SDValue V1, SDValue V2, SDValue V3) { SDLoc dl(V0.getNode()); SDValue RegClass = CurDAG->getTargetConstant(ARM::QPR_VFP2RegClassID, dl, MVT::i32); SDValue SubReg0 = CurDAG->getTargetConstant(ARM::ssub_0, dl, MVT::i32); SDValue SubReg1 = CurDAG->getTargetConstant(ARM::ssub_1, dl, MVT::i32); SDValue SubReg2 = CurDAG->getTargetConstant(ARM::ssub_2, dl, MVT::i32); SDValue SubReg3 = CurDAG->getTargetConstant(ARM::ssub_3, dl, MVT::i32); const SDValue Ops[] = { RegClass, V0, SubReg0, V1, SubReg1, V2, SubReg2, V3, SubReg3 }; return CurDAG->getMachineNode(TargetOpcode::REG_SEQUENCE, dl, VT, Ops); } /// Form 4 consecutive D registers. SDNode *ARMDAGToDAGISel::createQuadDRegsNode(EVT VT, SDValue V0, SDValue V1, SDValue V2, SDValue V3) { SDLoc dl(V0.getNode()); SDValue RegClass = CurDAG->getTargetConstant(ARM::QQPRRegClassID, dl, MVT::i32); SDValue SubReg0 = CurDAG->getTargetConstant(ARM::dsub_0, dl, MVT::i32); SDValue SubReg1 = CurDAG->getTargetConstant(ARM::dsub_1, dl, MVT::i32); SDValue SubReg2 = CurDAG->getTargetConstant(ARM::dsub_2, dl, MVT::i32); SDValue SubReg3 = CurDAG->getTargetConstant(ARM::dsub_3, dl, MVT::i32); const SDValue Ops[] = { RegClass, V0, SubReg0, V1, SubReg1, V2, SubReg2, V3, SubReg3 }; return CurDAG->getMachineNode(TargetOpcode::REG_SEQUENCE, dl, VT, Ops); } /// Form 4 consecutive Q registers. SDNode *ARMDAGToDAGISel::createQuadQRegsNode(EVT VT, SDValue V0, SDValue V1, SDValue V2, SDValue V3) { SDLoc dl(V0.getNode()); SDValue RegClass = CurDAG->getTargetConstant(ARM::QQQQPRRegClassID, dl, MVT::i32); SDValue SubReg0 = CurDAG->getTargetConstant(ARM::qsub_0, dl, MVT::i32); SDValue SubReg1 = CurDAG->getTargetConstant(ARM::qsub_1, dl, MVT::i32); SDValue SubReg2 = CurDAG->getTargetConstant(ARM::qsub_2, dl, MVT::i32); SDValue SubReg3 = CurDAG->getTargetConstant(ARM::qsub_3, dl, MVT::i32); const SDValue Ops[] = { RegClass, V0, SubReg0, V1, SubReg1, V2, SubReg2, V3, SubReg3 }; return CurDAG->getMachineNode(TargetOpcode::REG_SEQUENCE, dl, VT, Ops); } /// GetVLDSTAlign - Get the alignment (in bytes) for the alignment operand /// of a NEON VLD or VST instruction. The supported values depend on the /// number of registers being loaded. SDValue ARMDAGToDAGISel::GetVLDSTAlign(SDValue Align, const SDLoc &dl, unsigned NumVecs, bool is64BitVector) { unsigned NumRegs = NumVecs; if (!is64BitVector && NumVecs < 3) NumRegs *= 2; unsigned Alignment = cast<ConstantSDNode>(Align)->getZExtValue(); if (Alignment >= 32 && NumRegs == 4) Alignment = 32; else if (Alignment >= 16 && (NumRegs == 2 || NumRegs == 4)) Alignment = 16; else if (Alignment >= 8) Alignment = 8; else Alignment = 0; return CurDAG->getTargetConstant(Alignment, dl, MVT::i32); } static bool isVLDfixed(unsigned Opc) { switch (Opc) { default: return false; case ARM::VLD1d8wb_fixed : return true; case ARM::VLD1d16wb_fixed : return true; case ARM::VLD1d64Qwb_fixed : return true; case ARM::VLD1d32wb_fixed : return true; case ARM::VLD1d64wb_fixed : return true; case ARM::VLD1d64TPseudoWB_fixed : return true; case ARM::VLD1d64QPseudoWB_fixed : return true; case ARM::VLD1q8wb_fixed : return true; case ARM::VLD1q16wb_fixed : return true; case ARM::VLD1q32wb_fixed : return true; case ARM::VLD1q64wb_fixed : return true; case ARM::VLD1DUPd8wb_fixed : return true; case ARM::VLD1DUPd16wb_fixed : return true; case ARM::VLD1DUPd32wb_fixed : return true; case ARM::VLD1DUPq8wb_fixed : return true; case ARM::VLD1DUPq16wb_fixed : return true; case ARM::VLD1DUPq32wb_fixed : return true; case ARM::VLD2d8wb_fixed : return true; case ARM::VLD2d16wb_fixed : return true; case ARM::VLD2d32wb_fixed : return true; case ARM::VLD2q8PseudoWB_fixed : return true; case ARM::VLD2q16PseudoWB_fixed : return true; case ARM::VLD2q32PseudoWB_fixed : return true; case ARM::VLD2DUPd8wb_fixed : return true; case ARM::VLD2DUPd16wb_fixed : return true; case ARM::VLD2DUPd32wb_fixed : return true; } } static bool isVSTfixed(unsigned Opc) { switch (Opc) { default: return false; case ARM::VST1d8wb_fixed : return true; case ARM::VST1d16wb_fixed : return true; case ARM::VST1d32wb_fixed : return true; case ARM::VST1d64wb_fixed : return true; case ARM::VST1q8wb_fixed : return true; case ARM::VST1q16wb_fixed : return true; case ARM::VST1q32wb_fixed : return true; case ARM::VST1q64wb_fixed : return true; case ARM::VST1d64TPseudoWB_fixed : return true; case ARM::VST1d64QPseudoWB_fixed : return true; case ARM::VST2d8wb_fixed : return true; case ARM::VST2d16wb_fixed : return true; case ARM::VST2d32wb_fixed : return true; case ARM::VST2q8PseudoWB_fixed : return true; case ARM::VST2q16PseudoWB_fixed : return true; case ARM::VST2q32PseudoWB_fixed : return true; } } // Get the register stride update opcode of a VLD/VST instruction that // is otherwise equivalent to the given fixed stride updating instruction. static unsigned getVLDSTRegisterUpdateOpcode(unsigned Opc) { assert((isVLDfixed(Opc) || isVSTfixed(Opc)) && "Incorrect fixed stride updating instruction."); switch (Opc) { default: break; case ARM::VLD1d8wb_fixed: return ARM::VLD1d8wb_register; case ARM::VLD1d16wb_fixed: return ARM::VLD1d16wb_register; case ARM::VLD1d32wb_fixed: return ARM::VLD1d32wb_register; case ARM::VLD1d64wb_fixed: return ARM::VLD1d64wb_register; case ARM::VLD1q8wb_fixed: return ARM::VLD1q8wb_register; case ARM::VLD1q16wb_fixed: return ARM::VLD1q16wb_register; case ARM::VLD1q32wb_fixed: return ARM::VLD1q32wb_register; case ARM::VLD1q64wb_fixed: return ARM::VLD1q64wb_register; case ARM::VLD1d64Twb_fixed: return ARM::VLD1d64Twb_register; case ARM::VLD1d64Qwb_fixed: return ARM::VLD1d64Qwb_register; case ARM::VLD1d64TPseudoWB_fixed: return ARM::VLD1d64TPseudoWB_register; case ARM::VLD1d64QPseudoWB_fixed: return ARM::VLD1d64QPseudoWB_register; case ARM::VLD1DUPd8wb_fixed : return ARM::VLD1DUPd8wb_register; case ARM::VLD1DUPd16wb_fixed : return ARM::VLD1DUPd16wb_register; case ARM::VLD1DUPd32wb_fixed : return ARM::VLD1DUPd32wb_register; case ARM::VLD1DUPq8wb_fixed : return ARM::VLD1DUPq8wb_register; case ARM::VLD1DUPq16wb_fixed : return ARM::VLD1DUPq16wb_register; case ARM::VLD1DUPq32wb_fixed : return ARM::VLD1DUPq32wb_register; case ARM::VST1d8wb_fixed: return ARM::VST1d8wb_register; case ARM::VST1d16wb_fixed: return ARM::VST1d16wb_register; case ARM::VST1d32wb_fixed: return ARM::VST1d32wb_register; case ARM::VST1d64wb_fixed: return ARM::VST1d64wb_register; case ARM::VST1q8wb_fixed: return ARM::VST1q8wb_register; case ARM::VST1q16wb_fixed: return ARM::VST1q16wb_register; case ARM::VST1q32wb_fixed: return ARM::VST1q32wb_register; case ARM::VST1q64wb_fixed: return ARM::VST1q64wb_register; case ARM::VST1d64TPseudoWB_fixed: return ARM::VST1d64TPseudoWB_register; case ARM::VST1d64QPseudoWB_fixed: return ARM::VST1d64QPseudoWB_register; case ARM::VLD2d8wb_fixed: return ARM::VLD2d8wb_register; case ARM::VLD2d16wb_fixed: return ARM::VLD2d16wb_register; case ARM::VLD2d32wb_fixed: return ARM::VLD2d32wb_register; case ARM::VLD2q8PseudoWB_fixed: return ARM::VLD2q8PseudoWB_register; case ARM::VLD2q16PseudoWB_fixed: return ARM::VLD2q16PseudoWB_register; case ARM::VLD2q32PseudoWB_fixed: return ARM::VLD2q32PseudoWB_register; case ARM::VST2d8wb_fixed: return ARM::VST2d8wb_register; case ARM::VST2d16wb_fixed: return ARM::VST2d16wb_register; case ARM::VST2d32wb_fixed: return ARM::VST2d32wb_register; case ARM::VST2q8PseudoWB_fixed: return ARM::VST2q8PseudoWB_register; case ARM::VST2q16PseudoWB_fixed: return ARM::VST2q16PseudoWB_register; case ARM::VST2q32PseudoWB_fixed: return ARM::VST2q32PseudoWB_register; case ARM::VLD2DUPd8wb_fixed: return ARM::VLD2DUPd8wb_register; case ARM::VLD2DUPd16wb_fixed: return ARM::VLD2DUPd16wb_register; case ARM::VLD2DUPd32wb_fixed: return ARM::VLD2DUPd32wb_register; } return Opc; // If not one we handle, return it unchanged. } /// Returns true if the given increment is a Constant known to be equal to the /// access size performed by a NEON load/store. This means the "[rN]!" form can /// be used. static bool isPerfectIncrement(SDValue Inc, EVT VecTy, unsigned NumVecs) { auto C = dyn_cast<ConstantSDNode>(Inc); return C && C->getZExtValue() == VecTy.getSizeInBits() / 8 * NumVecs; } void ARMDAGToDAGISel::SelectVLD(SDNode *N, bool isUpdating, unsigned NumVecs, const uint16_t *DOpcodes, const uint16_t *QOpcodes0, const uint16_t *QOpcodes1) { assert(NumVecs >= 1 && NumVecs <= 4 && "VLD NumVecs out-of-range"); SDLoc dl(N); SDValue MemAddr, Align; bool IsIntrinsic = !isUpdating; // By coincidence, all supported updating // nodes are not intrinsics. unsigned AddrOpIdx = IsIntrinsic ? 2 : 1; if (!SelectAddrMode6(N, N->getOperand(AddrOpIdx), MemAddr, Align)) return; SDValue Chain = N->getOperand(0); EVT VT = N->getValueType(0); bool is64BitVector = VT.is64BitVector(); Align = GetVLDSTAlign(Align, dl, NumVecs, is64BitVector); unsigned OpcodeIndex; switch (VT.getSimpleVT().SimpleTy) { default: llvm_unreachable("unhandled vld type"); // Double-register operations: case MVT::v8i8: OpcodeIndex = 0; break; case MVT::v4f16: case MVT::v4i16: OpcodeIndex = 1; break; case MVT::v2f32: case MVT::v2i32: OpcodeIndex = 2; break; case MVT::v1i64: OpcodeIndex = 3; break; // Quad-register operations: case MVT::v16i8: OpcodeIndex = 0; break; case MVT::v8f16: case MVT::v8i16: OpcodeIndex = 1; break; case MVT::v4f32: case MVT::v4i32: OpcodeIndex = 2; break; case MVT::v2f64: case MVT::v2i64: OpcodeIndex = 3; break; } EVT ResTy; if (NumVecs == 1) ResTy = VT; else { unsigned ResTyElts = (NumVecs == 3) ? 4 : NumVecs; if (!is64BitVector) ResTyElts *= 2; ResTy = EVT::getVectorVT(*CurDAG->getContext(), MVT::i64, ResTyElts); } std::vector<EVT> ResTys; ResTys.push_back(ResTy); if (isUpdating) ResTys.push_back(MVT::i32); ResTys.push_back(MVT::Other); SDValue Pred = getAL(CurDAG, dl); SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); SDNode *VLd; SmallVector<SDValue, 7> Ops; // Double registers and VLD1/VLD2 quad registers are directly supported. if (is64BitVector || NumVecs <= 2) { unsigned Opc = (is64BitVector ? DOpcodes[OpcodeIndex] : QOpcodes0[OpcodeIndex]); Ops.push_back(MemAddr); Ops.push_back(Align); if (isUpdating) { SDValue Inc = N->getOperand(AddrOpIdx + 1); bool IsImmUpdate = isPerfectIncrement(Inc, VT, NumVecs); if (!IsImmUpdate) { // We use a VLD1 for v1i64 even if the pseudo says vld2/3/4, so // check for the opcode rather than the number of vector elements. if (isVLDfixed(Opc)) Opc = getVLDSTRegisterUpdateOpcode(Opc); Ops.push_back(Inc); // VLD1/VLD2 fixed increment does not need Reg0 so only include it in // the operands if not such an opcode. } else if (!isVLDfixed(Opc)) Ops.push_back(Reg0); } Ops.push_back(Pred); Ops.push_back(Reg0); Ops.push_back(Chain); VLd = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); } else { // Otherwise, quad registers are loaded with two separate instructions, // where one loads the even registers and the other loads the odd registers. EVT AddrTy = MemAddr.getValueType(); // Load the even subregs. This is always an updating load, so that it // provides the address to the second load for the odd subregs. SDValue ImplDef = SDValue(CurDAG->getMachineNode(TargetOpcode::IMPLICIT_DEF, dl, ResTy), 0); const SDValue OpsA[] = { MemAddr, Align, Reg0, ImplDef, Pred, Reg0, Chain }; SDNode *VLdA = CurDAG->getMachineNode(QOpcodes0[OpcodeIndex], dl, ResTy, AddrTy, MVT::Other, OpsA); Chain = SDValue(VLdA, 2); // Load the odd subregs. Ops.push_back(SDValue(VLdA, 1)); Ops.push_back(Align); if (isUpdating) { SDValue Inc = N->getOperand(AddrOpIdx + 1); assert(isa<ConstantSDNode>(Inc.getNode()) && "only constant post-increment update allowed for VLD3/4"); (void)Inc; Ops.push_back(Reg0); } Ops.push_back(SDValue(VLdA, 0)); Ops.push_back(Pred); Ops.push_back(Reg0); Ops.push_back(Chain); VLd = CurDAG->getMachineNode(QOpcodes1[OpcodeIndex], dl, ResTys, Ops); } // Transfer memoperands. MachineMemOperand *MemOp = cast<MemIntrinsicSDNode>(N)->getMemOperand(); CurDAG->setNodeMemRefs(cast<MachineSDNode>(VLd), {MemOp}); if (NumVecs == 1) { ReplaceNode(N, VLd); return; } // Extract out the subregisters. SDValue SuperReg = SDValue(VLd, 0); static_assert(ARM::dsub_7 == ARM::dsub_0 + 7 && ARM::qsub_3 == ARM::qsub_0 + 3, "Unexpected subreg numbering"); unsigned Sub0 = (is64BitVector ? ARM::dsub_0 : ARM::qsub_0); for (unsigned Vec = 0; Vec < NumVecs; ++Vec) ReplaceUses(SDValue(N, Vec), CurDAG->getTargetExtractSubreg(Sub0 + Vec, dl, VT, SuperReg)); ReplaceUses(SDValue(N, NumVecs), SDValue(VLd, 1)); if (isUpdating) ReplaceUses(SDValue(N, NumVecs + 1), SDValue(VLd, 2)); CurDAG->RemoveDeadNode(N); } void ARMDAGToDAGISel::SelectVST(SDNode *N, bool isUpdating, unsigned NumVecs, const uint16_t *DOpcodes, const uint16_t *QOpcodes0, const uint16_t *QOpcodes1) { assert(NumVecs >= 1 && NumVecs <= 4 && "VST NumVecs out-of-range"); SDLoc dl(N); SDValue MemAddr, Align; bool IsIntrinsic = !isUpdating; // By coincidence, all supported updating // nodes are not intrinsics. unsigned AddrOpIdx = IsIntrinsic ? 2 : 1; unsigned Vec0Idx = 3; // AddrOpIdx + (isUpdating ? 2 : 1) if (!SelectAddrMode6(N, N->getOperand(AddrOpIdx), MemAddr, Align)) return; MachineMemOperand *MemOp = cast<MemIntrinsicSDNode>(N)->getMemOperand(); SDValue Chain = N->getOperand(0); EVT VT = N->getOperand(Vec0Idx).getValueType(); bool is64BitVector = VT.is64BitVector(); Align = GetVLDSTAlign(Align, dl, NumVecs, is64BitVector); unsigned OpcodeIndex; switch (VT.getSimpleVT().SimpleTy) { default: llvm_unreachable("unhandled vst type"); // Double-register operations: case MVT::v8i8: OpcodeIndex = 0; break; case MVT::v4f16: case MVT::v4i16: OpcodeIndex = 1; break; case MVT::v2f32: case MVT::v2i32: OpcodeIndex = 2; break; case MVT::v1i64: OpcodeIndex = 3; break; // Quad-register operations: case MVT::v16i8: OpcodeIndex = 0; break; case MVT::v8f16: case MVT::v8i16: OpcodeIndex = 1; break; case MVT::v4f32: case MVT::v4i32: OpcodeIndex = 2; break; case MVT::v2f64: case MVT::v2i64: OpcodeIndex = 3; break; } std::vector<EVT> ResTys; if (isUpdating) ResTys.push_back(MVT::i32); ResTys.push_back(MVT::Other); SDValue Pred = getAL(CurDAG, dl); SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); SmallVector<SDValue, 7> Ops; // Double registers and VST1/VST2 quad registers are directly supported. if (is64BitVector || NumVecs <= 2) { SDValue SrcReg; if (NumVecs == 1) { SrcReg = N->getOperand(Vec0Idx); } else if (is64BitVector) { // Form a REG_SEQUENCE to force register allocation. SDValue V0 = N->getOperand(Vec0Idx + 0); SDValue V1 = N->getOperand(Vec0Idx + 1); if (NumVecs == 2) SrcReg = SDValue(createDRegPairNode(MVT::v2i64, V0, V1), 0); else { SDValue V2 = N->getOperand(Vec0Idx + 2); // If it's a vst3, form a quad D-register and leave the last part as // an undef. SDValue V3 = (NumVecs == 3) ? SDValue(CurDAG->getMachineNode(TargetOpcode::IMPLICIT_DEF,dl,VT), 0) : N->getOperand(Vec0Idx + 3); SrcReg = SDValue(createQuadDRegsNode(MVT::v4i64, V0, V1, V2, V3), 0); } } else { // Form a QQ register. SDValue Q0 = N->getOperand(Vec0Idx); SDValue Q1 = N->getOperand(Vec0Idx + 1); SrcReg = SDValue(createQRegPairNode(MVT::v4i64, Q0, Q1), 0); } unsigned Opc = (is64BitVector ? DOpcodes[OpcodeIndex] : QOpcodes0[OpcodeIndex]); Ops.push_back(MemAddr); Ops.push_back(Align); if (isUpdating) { SDValue Inc = N->getOperand(AddrOpIdx + 1); bool IsImmUpdate = isPerfectIncrement(Inc, VT, NumVecs); if (!IsImmUpdate) { // We use a VST1 for v1i64 even if the pseudo says VST2/3/4, so // check for the opcode rather than the number of vector elements. if (isVSTfixed(Opc)) Opc = getVLDSTRegisterUpdateOpcode(Opc); Ops.push_back(Inc); } // VST1/VST2 fixed increment does not need Reg0 so only include it in // the operands if not such an opcode. else if (!isVSTfixed(Opc)) Ops.push_back(Reg0); } Ops.push_back(SrcReg); Ops.push_back(Pred); Ops.push_back(Reg0); Ops.push_back(Chain); SDNode *VSt = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); // Transfer memoperands. CurDAG->setNodeMemRefs(cast<MachineSDNode>(VSt), {MemOp}); ReplaceNode(N, VSt); return; } // Otherwise, quad registers are stored with two separate instructions, // where one stores the even registers and the other stores the odd registers. // Form the QQQQ REG_SEQUENCE. SDValue V0 = N->getOperand(Vec0Idx + 0); SDValue V1 = N->getOperand(Vec0Idx + 1); SDValue V2 = N->getOperand(Vec0Idx + 2); SDValue V3 = (NumVecs == 3) ? SDValue(CurDAG->getMachineNode(TargetOpcode::IMPLICIT_DEF, dl, VT), 0) : N->getOperand(Vec0Idx + 3); SDValue RegSeq = SDValue(createQuadQRegsNode(MVT::v8i64, V0, V1, V2, V3), 0); // Store the even D registers. This is always an updating store, so that it // provides the address to the second store for the odd subregs. const SDValue OpsA[] = { MemAddr, Align, Reg0, RegSeq, Pred, Reg0, Chain }; SDNode *VStA = CurDAG->getMachineNode(QOpcodes0[OpcodeIndex], dl, MemAddr.getValueType(), MVT::Other, OpsA); CurDAG->setNodeMemRefs(cast<MachineSDNode>(VStA), {MemOp}); Chain = SDValue(VStA, 1); // Store the odd D registers. Ops.push_back(SDValue(VStA, 0)); Ops.push_back(Align); if (isUpdating) { SDValue Inc = N->getOperand(AddrOpIdx + 1); assert(isa<ConstantSDNode>(Inc.getNode()) && "only constant post-increment update allowed for VST3/4"); (void)Inc; Ops.push_back(Reg0); } Ops.push_back(RegSeq); Ops.push_back(Pred); Ops.push_back(Reg0); Ops.push_back(Chain); SDNode *VStB = CurDAG->getMachineNode(QOpcodes1[OpcodeIndex], dl, ResTys, Ops); CurDAG->setNodeMemRefs(cast<MachineSDNode>(VStB), {MemOp}); ReplaceNode(N, VStB); } void ARMDAGToDAGISel::SelectVLDSTLane(SDNode *N, bool IsLoad, bool isUpdating, unsigned NumVecs, const uint16_t *DOpcodes, const uint16_t *QOpcodes) { assert(NumVecs >=2 && NumVecs <= 4 && "VLDSTLane NumVecs out-of-range"); SDLoc dl(N); SDValue MemAddr, Align; bool IsIntrinsic = !isUpdating; // By coincidence, all supported updating // nodes are not intrinsics. unsigned AddrOpIdx = IsIntrinsic ? 2 : 1; unsigned Vec0Idx = 3; // AddrOpIdx + (isUpdating ? 2 : 1) if (!SelectAddrMode6(N, N->getOperand(AddrOpIdx), MemAddr, Align)) return; MachineMemOperand *MemOp = cast<MemIntrinsicSDNode>(N)->getMemOperand(); SDValue Chain = N->getOperand(0); unsigned Lane = cast<ConstantSDNode>(N->getOperand(Vec0Idx + NumVecs))->getZExtValue(); EVT VT = N->getOperand(Vec0Idx).getValueType(); bool is64BitVector = VT.is64BitVector(); unsigned Alignment = 0; if (NumVecs != 3) { Alignment = cast<ConstantSDNode>(Align)->getZExtValue(); unsigned NumBytes = NumVecs * VT.getScalarSizeInBits() / 8; if (Alignment > NumBytes) Alignment = NumBytes; if (Alignment < 8 && Alignment < NumBytes) Alignment = 0; // Alignment must be a power of two; make sure of that. Alignment = (Alignment & -Alignment); if (Alignment == 1) Alignment = 0; } Align = CurDAG->getTargetConstant(Alignment, dl, MVT::i32); unsigned OpcodeIndex; switch (VT.getSimpleVT().SimpleTy) { default: llvm_unreachable("unhandled vld/vst lane type"); // Double-register operations: case MVT::v8i8: OpcodeIndex = 0; break; case MVT::v4f16: case MVT::v4i16: OpcodeIndex = 1; break; case MVT::v2f32: case MVT::v2i32: OpcodeIndex = 2; break; // Quad-register operations: case MVT::v8f16: case MVT::v8i16: OpcodeIndex = 0; break; case MVT::v4f32: case MVT::v4i32: OpcodeIndex = 1; break; } std::vector<EVT> ResTys; if (IsLoad) { unsigned ResTyElts = (NumVecs == 3) ? 4 : NumVecs; if (!is64BitVector) ResTyElts *= 2; ResTys.push_back(EVT::getVectorVT(*CurDAG->getContext(), MVT::i64, ResTyElts)); } if (isUpdating) ResTys.push_back(MVT::i32); ResTys.push_back(MVT::Other); SDValue Pred = getAL(CurDAG, dl); SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); SmallVector<SDValue, 8> Ops; Ops.push_back(MemAddr); Ops.push_back(Align); if (isUpdating) { SDValue Inc = N->getOperand(AddrOpIdx + 1); bool IsImmUpdate = isPerfectIncrement(Inc, VT.getVectorElementType(), NumVecs); Ops.push_back(IsImmUpdate ? Reg0 : Inc); } SDValue SuperReg; SDValue V0 = N->getOperand(Vec0Idx + 0); SDValue V1 = N->getOperand(Vec0Idx + 1); if (NumVecs == 2) { if (is64BitVector) SuperReg = SDValue(createDRegPairNode(MVT::v2i64, V0, V1), 0); else SuperReg = SDValue(createQRegPairNode(MVT::v4i64, V0, V1), 0); } else { SDValue V2 = N->getOperand(Vec0Idx + 2); SDValue V3 = (NumVecs == 3) ? SDValue(CurDAG->getMachineNode(TargetOpcode::IMPLICIT_DEF, dl, VT), 0) : N->getOperand(Vec0Idx + 3); if (is64BitVector) SuperReg = SDValue(createQuadDRegsNode(MVT::v4i64, V0, V1, V2, V3), 0); else SuperReg = SDValue(createQuadQRegsNode(MVT::v8i64, V0, V1, V2, V3), 0); } Ops.push_back(SuperReg); Ops.push_back(getI32Imm(Lane, dl)); Ops.push_back(Pred); Ops.push_back(Reg0); Ops.push_back(Chain); unsigned Opc = (is64BitVector ? DOpcodes[OpcodeIndex] : QOpcodes[OpcodeIndex]); SDNode *VLdLn = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); CurDAG->setNodeMemRefs(cast<MachineSDNode>(VLdLn), {MemOp}); if (!IsLoad) { ReplaceNode(N, VLdLn); return; } // Extract the subregisters. SuperReg = SDValue(VLdLn, 0); static_assert(ARM::dsub_7 == ARM::dsub_0 + 7 && ARM::qsub_3 == ARM::qsub_0 + 3, "Unexpected subreg numbering"); unsigned Sub0 = is64BitVector ? ARM::dsub_0 : ARM::qsub_0; for (unsigned Vec = 0; Vec < NumVecs; ++Vec) ReplaceUses(SDValue(N, Vec), CurDAG->getTargetExtractSubreg(Sub0 + Vec, dl, VT, SuperReg)); ReplaceUses(SDValue(N, NumVecs), SDValue(VLdLn, 1)); if (isUpdating) ReplaceUses(SDValue(N, NumVecs + 1), SDValue(VLdLn, 2)); CurDAG->RemoveDeadNode(N); } void ARMDAGToDAGISel::SelectVLDDup(SDNode *N, bool IsIntrinsic, bool isUpdating, unsigned NumVecs, const uint16_t *DOpcodes, const uint16_t *QOpcodes0, const uint16_t *QOpcodes1) { assert(NumVecs >= 1 && NumVecs <= 4 && "VLDDup NumVecs out-of-range"); SDLoc dl(N); SDValue MemAddr, Align; unsigned AddrOpIdx = IsIntrinsic ? 2 : 1; if (!SelectAddrMode6(N, N->getOperand(AddrOpIdx), MemAddr, Align)) return; SDValue Chain = N->getOperand(0); EVT VT = N->getValueType(0); bool is64BitVector = VT.is64BitVector(); unsigned Alignment = 0; if (NumVecs != 3) { Alignment = cast<ConstantSDNode>(Align)->getZExtValue(); unsigned NumBytes = NumVecs * VT.getScalarSizeInBits() / 8; if (Alignment > NumBytes) Alignment = NumBytes; if (Alignment < 8 && Alignment < NumBytes) Alignment = 0; // Alignment must be a power of two; make sure of that. Alignment = (Alignment & -Alignment); if (Alignment == 1) Alignment = 0; } Align = CurDAG->getTargetConstant(Alignment, dl, MVT::i32); unsigned OpcodeIndex; switch (VT.getSimpleVT().SimpleTy) { default: llvm_unreachable("unhandled vld-dup type"); case MVT::v8i8: case MVT::v16i8: OpcodeIndex = 0; break; case MVT::v4i16: case MVT::v8i16: case MVT::v4f16: case MVT::v8f16: OpcodeIndex = 1; break; case MVT::v2f32: case MVT::v2i32: case MVT::v4f32: case MVT::v4i32: OpcodeIndex = 2; break; case MVT::v1f64: case MVT::v1i64: OpcodeIndex = 3; break; } unsigned ResTyElts = (NumVecs == 3) ? 4 : NumVecs; if (!is64BitVector) ResTyElts *= 2; EVT ResTy = EVT::getVectorVT(*CurDAG->getContext(), MVT::i64, ResTyElts); std::vector<EVT> ResTys; ResTys.push_back(ResTy); if (isUpdating) ResTys.push_back(MVT::i32); ResTys.push_back(MVT::Other); SDValue Pred = getAL(CurDAG, dl); SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); SDNode *VLdDup; if (is64BitVector || NumVecs == 1) { SmallVector<SDValue, 6> Ops; Ops.push_back(MemAddr); Ops.push_back(Align); unsigned Opc = is64BitVector ? DOpcodes[OpcodeIndex] : QOpcodes0[OpcodeIndex]; if (isUpdating) { // fixed-stride update instructions don't have an explicit writeback // operand. It's implicit in the opcode itself. SDValue Inc = N->getOperand(2); bool IsImmUpdate = isPerfectIncrement(Inc, VT.getVectorElementType(), NumVecs); if (NumVecs <= 2 && !IsImmUpdate) Opc = getVLDSTRegisterUpdateOpcode(Opc); if (!IsImmUpdate) Ops.push_back(Inc); // FIXME: VLD3 and VLD4 haven't been updated to that form yet. else if (NumVecs > 2) Ops.push_back(Reg0); } Ops.push_back(Pred); Ops.push_back(Reg0); Ops.push_back(Chain); VLdDup = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); } else if (NumVecs == 2) { const SDValue OpsA[] = { MemAddr, Align, Pred, Reg0, Chain }; SDNode *VLdA = CurDAG->getMachineNode(QOpcodes0[OpcodeIndex], dl, ResTys, OpsA); Chain = SDValue(VLdA, 1); const SDValue OpsB[] = { MemAddr, Align, Pred, Reg0, Chain }; VLdDup = CurDAG->getMachineNode(QOpcodes1[OpcodeIndex], dl, ResTys, OpsB); } else { SDValue ImplDef = SDValue(CurDAG->getMachineNode(TargetOpcode::IMPLICIT_DEF, dl, ResTy), 0); const SDValue OpsA[] = { MemAddr, Align, ImplDef, Pred, Reg0, Chain }; SDNode *VLdA = CurDAG->getMachineNode(QOpcodes0[OpcodeIndex], dl, ResTys, OpsA); SDValue SuperReg = SDValue(VLdA, 0); Chain = SDValue(VLdA, 1); const SDValue OpsB[] = { MemAddr, Align, SuperReg, Pred, Reg0, Chain }; VLdDup = CurDAG->getMachineNode(QOpcodes1[OpcodeIndex], dl, ResTys, OpsB); } // Transfer memoperands. MachineMemOperand *MemOp = cast<MemIntrinsicSDNode>(N)->getMemOperand(); CurDAG->setNodeMemRefs(cast<MachineSDNode>(VLdDup), {MemOp}); // Extract the subregisters. if (NumVecs == 1) { ReplaceUses(SDValue(N, 0), SDValue(VLdDup, 0)); } else { SDValue SuperReg = SDValue(VLdDup, 0); static_assert(ARM::dsub_7 == ARM::dsub_0 + 7, "Unexpected subreg numbering"); unsigned SubIdx = is64BitVector ? ARM::dsub_0 : ARM::qsub_0; for (unsigned Vec = 0; Vec != NumVecs; ++Vec) { ReplaceUses(SDValue(N, Vec), CurDAG->getTargetExtractSubreg(SubIdx+Vec, dl, VT, SuperReg)); } } ReplaceUses(SDValue(N, NumVecs), SDValue(VLdDup, 1)); if (isUpdating) ReplaceUses(SDValue(N, NumVecs + 1), SDValue(VLdDup, 2)); CurDAG->RemoveDeadNode(N); } bool ARMDAGToDAGISel::tryV6T2BitfieldExtractOp(SDNode *N, bool isSigned) { if (!Subtarget->hasV6T2Ops()) return false; unsigned Opc = isSigned ? (Subtarget->isThumb() ? ARM::t2SBFX : ARM::SBFX) : (Subtarget->isThumb() ? ARM::t2UBFX : ARM::UBFX); SDLoc dl(N); // For unsigned extracts, check for a shift right and mask unsigned And_imm = 0; if (N->getOpcode() == ISD::AND) { if (isOpcWithIntImmediate(N, ISD::AND, And_imm)) { // The immediate is a mask of the low bits iff imm & (imm+1) == 0 if (And_imm & (And_imm + 1)) return false; unsigned Srl_imm = 0; if (isOpcWithIntImmediate(N->getOperand(0).getNode(), ISD::SRL, Srl_imm)) { assert(Srl_imm > 0 && Srl_imm < 32 && "bad amount in shift node!"); // Mask off the unnecessary bits of the AND immediate; normally // DAGCombine will do this, but that might not happen if // targetShrinkDemandedConstant chooses a different immediate. And_imm &= -1U >> Srl_imm; // Note: The width operand is encoded as width-1. unsigned Width = countTrailingOnes(And_imm) - 1; unsigned LSB = Srl_imm; SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); if ((LSB + Width + 1) == N->getValueType(0).getSizeInBits()) { // It's cheaper to use a right shift to extract the top bits. if (Subtarget->isThumb()) { Opc = isSigned ? ARM::t2ASRri : ARM::t2LSRri; SDValue Ops[] = { N->getOperand(0).getOperand(0), CurDAG->getTargetConstant(LSB, dl, MVT::i32), getAL(CurDAG, dl), Reg0, Reg0 }; CurDAG->SelectNodeTo(N, Opc, MVT::i32, Ops); return true; } // ARM models shift instructions as MOVsi with shifter operand. ARM_AM::ShiftOpc ShOpcVal = ARM_AM::getShiftOpcForNode(ISD::SRL); SDValue ShOpc = CurDAG->getTargetConstant(ARM_AM::getSORegOpc(ShOpcVal, LSB), dl, MVT::i32); SDValue Ops[] = { N->getOperand(0).getOperand(0), ShOpc, getAL(CurDAG, dl), Reg0, Reg0 }; CurDAG->SelectNodeTo(N, ARM::MOVsi, MVT::i32, Ops); return true; } assert(LSB + Width + 1 <= 32 && "Shouldn't create an invalid ubfx"); SDValue Ops[] = { N->getOperand(0).getOperand(0), CurDAG->getTargetConstant(LSB, dl, MVT::i32), CurDAG->getTargetConstant(Width, dl, MVT::i32), getAL(CurDAG, dl), Reg0 }; CurDAG->SelectNodeTo(N, Opc, MVT::i32, Ops); return true; } } return false; } // Otherwise, we're looking for a shift of a shift unsigned Shl_imm = 0; if (isOpcWithIntImmediate(N->getOperand(0).getNode(), ISD::SHL, Shl_imm)) { assert(Shl_imm > 0 && Shl_imm < 32 && "bad amount in shift node!"); unsigned Srl_imm = 0; if (isInt32Immediate(N->getOperand(1), Srl_imm)) { assert(Srl_imm > 0 && Srl_imm < 32 && "bad amount in shift node!"); // Note: The width operand is encoded as width-1. unsigned Width = 32 - Srl_imm - 1; int LSB = Srl_imm - Shl_imm; if (LSB < 0) return false; SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); assert(LSB + Width + 1 <= 32 && "Shouldn't create an invalid ubfx"); SDValue Ops[] = { N->getOperand(0).getOperand(0), CurDAG->getTargetConstant(LSB, dl, MVT::i32), CurDAG->getTargetConstant(Width, dl, MVT::i32), getAL(CurDAG, dl), Reg0 }; CurDAG->SelectNodeTo(N, Opc, MVT::i32, Ops); return true; } } // Or we are looking for a shift of an and, with a mask operand if (isOpcWithIntImmediate(N->getOperand(0).getNode(), ISD::AND, And_imm) && isShiftedMask_32(And_imm)) { unsigned Srl_imm = 0; unsigned LSB = countTrailingZeros(And_imm); // Shift must be the same as the ands lsb if (isInt32Immediate(N->getOperand(1), Srl_imm) && Srl_imm == LSB) { assert(Srl_imm > 0 && Srl_imm < 32 && "bad amount in shift node!"); unsigned MSB = 31 - countLeadingZeros(And_imm); // Note: The width operand is encoded as width-1. unsigned Width = MSB - LSB; SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); assert(Srl_imm + Width + 1 <= 32 && "Shouldn't create an invalid ubfx"); SDValue Ops[] = { N->getOperand(0).getOperand(0), CurDAG->getTargetConstant(Srl_imm, dl, MVT::i32), CurDAG->getTargetConstant(Width, dl, MVT::i32), getAL(CurDAG, dl), Reg0 }; CurDAG->SelectNodeTo(N, Opc, MVT::i32, Ops); return true; } } if (N->getOpcode() == ISD::SIGN_EXTEND_INREG) { unsigned Width = cast<VTSDNode>(N->getOperand(1))->getVT().getSizeInBits(); unsigned LSB = 0; if (!isOpcWithIntImmediate(N->getOperand(0).getNode(), ISD::SRL, LSB) && !isOpcWithIntImmediate(N->getOperand(0).getNode(), ISD::SRA, LSB)) return false; if (LSB + Width > 32) return false; SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); assert(LSB + Width <= 32 && "Shouldn't create an invalid ubfx"); SDValue Ops[] = { N->getOperand(0).getOperand(0), CurDAG->getTargetConstant(LSB, dl, MVT::i32), CurDAG->getTargetConstant(Width - 1, dl, MVT::i32), getAL(CurDAG, dl), Reg0 }; CurDAG->SelectNodeTo(N, Opc, MVT::i32, Ops); return true; } return false; } /// Target-specific DAG combining for ISD::XOR. /// Target-independent combining lowers SELECT_CC nodes of the form /// select_cc setg[ge] X, 0, X, -X /// select_cc setgt X, -1, X, -X /// select_cc setl[te] X, 0, -X, X /// select_cc setlt X, 1, -X, X /// which represent Integer ABS into: /// Y = sra (X, size(X)-1); xor (add (X, Y), Y) /// ARM instruction selection detects the latter and matches it to /// ARM::ABS or ARM::t2ABS machine node. bool ARMDAGToDAGISel::tryABSOp(SDNode *N){ SDValue XORSrc0 = N->getOperand(0); SDValue XORSrc1 = N->getOperand(1); EVT VT = N->getValueType(0); if (Subtarget->isThumb1Only()) return false; if (XORSrc0.getOpcode() != ISD::ADD || XORSrc1.getOpcode() != ISD::SRA) return false; SDValue ADDSrc0 = XORSrc0.getOperand(0); SDValue ADDSrc1 = XORSrc0.getOperand(1); SDValue SRASrc0 = XORSrc1.getOperand(0); SDValue SRASrc1 = XORSrc1.getOperand(1); ConstantSDNode *SRAConstant = dyn_cast<ConstantSDNode>(SRASrc1); EVT XType = SRASrc0.getValueType(); unsigned Size = XType.getSizeInBits() - 1; if (ADDSrc1 == XORSrc1 && ADDSrc0 == SRASrc0 && XType.isInteger() && SRAConstant != nullptr && Size == SRAConstant->getZExtValue()) { unsigned Opcode = Subtarget->isThumb2() ? ARM::t2ABS : ARM::ABS; CurDAG->SelectNodeTo(N, Opcode, VT, ADDSrc0); return true; } return false; } /// We've got special pseudo-instructions for these void ARMDAGToDAGISel::SelectCMP_SWAP(SDNode *N) { unsigned Opcode; EVT MemTy = cast<MemSDNode>(N)->getMemoryVT(); if (MemTy == MVT::i8) Opcode = ARM::CMP_SWAP_8; else if (MemTy == MVT::i16) Opcode = ARM::CMP_SWAP_16; else if (MemTy == MVT::i32) Opcode = ARM::CMP_SWAP_32; else llvm_unreachable("Unknown AtomicCmpSwap type"); SDValue Ops[] = {N->getOperand(1), N->getOperand(2), N->getOperand(3), N->getOperand(0)}; SDNode *CmpSwap = CurDAG->getMachineNode( Opcode, SDLoc(N), CurDAG->getVTList(MVT::i32, MVT::i32, MVT::Other), Ops); MachineMemOperand *MemOp = cast<MemSDNode>(N)->getMemOperand(); CurDAG->setNodeMemRefs(cast<MachineSDNode>(CmpSwap), {MemOp}); ReplaceUses(SDValue(N, 0), SDValue(CmpSwap, 0)); ReplaceUses(SDValue(N, 1), SDValue(CmpSwap, 2)); CurDAG->RemoveDeadNode(N); } static Optional<std::pair<unsigned, unsigned>> getContiguousRangeOfSetBits(const APInt &A) { unsigned FirstOne = A.getBitWidth() - A.countLeadingZeros() - 1; unsigned LastOne = A.countTrailingZeros(); if (A.countPopulation() != (FirstOne - LastOne + 1)) return Optional<std::pair<unsigned,unsigned>>(); return std::make_pair(FirstOne, LastOne); } void ARMDAGToDAGISel::SelectCMPZ(SDNode *N, bool &SwitchEQNEToPLMI) { assert(N->getOpcode() == ARMISD::CMPZ); SwitchEQNEToPLMI = false; if (!Subtarget->isThumb()) // FIXME: Work out whether it is profitable to do this in A32 mode - LSL and // LSR don't exist as standalone instructions - they need the barrel shifter. return; // select (cmpz (and X, C), #0) -> (LSLS X) or (LSRS X) or (LSRS (LSLS X)) SDValue And = N->getOperand(0); if (!And->hasOneUse()) return; SDValue Zero = N->getOperand(1); if (!isa<ConstantSDNode>(Zero) || !cast<ConstantSDNode>(Zero)->isNullValue() || And->getOpcode() != ISD::AND) return; SDValue X = And.getOperand(0); auto C = dyn_cast<ConstantSDNode>(And.getOperand(1)); if (!C) return; auto Range = getContiguousRangeOfSetBits(C->getAPIntValue()); if (!Range) return; // There are several ways to lower this: SDNode *NewN; SDLoc dl(N); auto EmitShift = [&](unsigned Opc, SDValue Src, unsigned Imm) -> SDNode* { if (Subtarget->isThumb2()) { Opc = (Opc == ARM::tLSLri) ? ARM::t2LSLri : ARM::t2LSRri; SDValue Ops[] = { Src, CurDAG->getTargetConstant(Imm, dl, MVT::i32), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), CurDAG->getRegister(0, MVT::i32) }; return CurDAG->getMachineNode(Opc, dl, MVT::i32, Ops); } else { SDValue Ops[] = {CurDAG->getRegister(ARM::CPSR, MVT::i32), Src, CurDAG->getTargetConstant(Imm, dl, MVT::i32), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32)}; return CurDAG->getMachineNode(Opc, dl, MVT::i32, Ops); } }; if (Range->second == 0) { // 1. Mask includes the LSB -> Simply shift the top N bits off NewN = EmitShift(ARM::tLSLri, X, 31 - Range->first); ReplaceNode(And.getNode(), NewN); } else if (Range->first == 31) { // 2. Mask includes the MSB -> Simply shift the bottom N bits off NewN = EmitShift(ARM::tLSRri, X, Range->second); ReplaceNode(And.getNode(), NewN); } else if (Range->first == Range->second) { // 3. Only one bit is set. We can shift this into the sign bit and use a // PL/MI comparison. NewN = EmitShift(ARM::tLSLri, X, 31 - Range->first); ReplaceNode(And.getNode(), NewN); SwitchEQNEToPLMI = true; } else if (!Subtarget->hasV6T2Ops()) { // 4. Do a double shift to clear bottom and top bits, but only in // thumb-1 mode as in thumb-2 we can use UBFX. NewN = EmitShift(ARM::tLSLri, X, 31 - Range->first); NewN = EmitShift(ARM::tLSRri, SDValue(NewN, 0), Range->second + (31 - Range->first)); ReplaceNode(And.getNode(), NewN); } } void ARMDAGToDAGISel::Select(SDNode *N) { SDLoc dl(N); if (N->isMachineOpcode()) { N->setNodeId(-1); return; // Already selected. } switch (N->getOpcode()) { default: break; case ISD::STORE: { // For Thumb1, match an sp-relative store in C++. This is a little // unfortunate, but I don't think I can make the chain check work // otherwise. (The chain of the store has to be the same as the chain // of the CopyFromReg, or else we can't replace the CopyFromReg with // a direct reference to "SP".) // // This is only necessary on Thumb1 because Thumb1 sp-relative stores use // a different addressing mode from other four-byte stores. // // This pattern usually comes up with call arguments. StoreSDNode *ST = cast<StoreSDNode>(N); SDValue Ptr = ST->getBasePtr(); if (Subtarget->isThumb1Only() && ST->isUnindexed()) { int RHSC = 0; if (Ptr.getOpcode() == ISD::ADD && isScaledConstantInRange(Ptr.getOperand(1), /*Scale=*/4, 0, 256, RHSC)) Ptr = Ptr.getOperand(0); if (Ptr.getOpcode() == ISD::CopyFromReg && cast<RegisterSDNode>(Ptr.getOperand(1))->getReg() == ARM::SP && Ptr.getOperand(0) == ST->getChain()) { SDValue Ops[] = {ST->getValue(), CurDAG->getRegister(ARM::SP, MVT::i32), CurDAG->getTargetConstant(RHSC, dl, MVT::i32), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), ST->getChain()}; MachineSDNode *ResNode = CurDAG->getMachineNode(ARM::tSTRspi, dl, MVT::Other, Ops); MachineMemOperand *MemOp = ST->getMemOperand(); CurDAG->setNodeMemRefs(cast<MachineSDNode>(ResNode), {MemOp}); ReplaceNode(N, ResNode); return; } } break; } case ISD::WRITE_REGISTER: if (tryWriteRegister(N)) return; break; case ISD::READ_REGISTER: if (tryReadRegister(N)) return; break; case ISD::INLINEASM: case ISD::INLINEASM_BR: if (tryInlineAsm(N)) return; break; case ISD::XOR: // Select special operations if XOR node forms integer ABS pattern if (tryABSOp(N)) return; // Other cases are autogenerated. break; case ISD::Constant: { unsigned Val = cast<ConstantSDNode>(N)->getZExtValue(); // If we can't materialize the constant we need to use a literal pool if (ConstantMaterializationCost(Val) > 2) { SDValue CPIdx = CurDAG->getTargetConstantPool( ConstantInt::get(Type::getInt32Ty(*CurDAG->getContext()), Val), TLI->getPointerTy(CurDAG->getDataLayout())); SDNode *ResNode; if (Subtarget->isThumb()) { SDValue Ops[] = { CPIdx, getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), CurDAG->getEntryNode() }; ResNode = CurDAG->getMachineNode(ARM::tLDRpci, dl, MVT::i32, MVT::Other, Ops); } else { SDValue Ops[] = { CPIdx, CurDAG->getTargetConstant(0, dl, MVT::i32), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), CurDAG->getEntryNode() }; ResNode = CurDAG->getMachineNode(ARM::LDRcp, dl, MVT::i32, MVT::Other, Ops); } // Annotate the Node with memory operand information so that MachineInstr // queries work properly. This e.g. gives the register allocation the // required information for rematerialization. MachineFunction& MF = CurDAG->getMachineFunction(); MachineMemOperand *MemOp = MF.getMachineMemOperand(MachinePointerInfo::getConstantPool(MF), MachineMemOperand::MOLoad, 4, 4); CurDAG->setNodeMemRefs(cast<MachineSDNode>(ResNode), {MemOp}); ReplaceNode(N, ResNode); return; } // Other cases are autogenerated. break; } case ISD::FrameIndex: { // Selects to ADDri FI, 0 which in turn will become ADDri SP, imm. int FI = cast<FrameIndexSDNode>(N)->getIndex(); SDValue TFI = CurDAG->getTargetFrameIndex( FI, TLI->getPointerTy(CurDAG->getDataLayout())); if (Subtarget->isThumb1Only()) { // Set the alignment of the frame object to 4, to avoid having to generate // more than one ADD MachineFrameInfo &MFI = MF->getFrameInfo(); if (MFI.getObjectAlignment(FI) < 4) MFI.setObjectAlignment(FI, 4); CurDAG->SelectNodeTo(N, ARM::tADDframe, MVT::i32, TFI, CurDAG->getTargetConstant(0, dl, MVT::i32)); return; } else { unsigned Opc = ((Subtarget->isThumb() && Subtarget->hasThumb2()) ? ARM::t2ADDri : ARM::ADDri); SDValue Ops[] = { TFI, CurDAG->getTargetConstant(0, dl, MVT::i32), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), CurDAG->getRegister(0, MVT::i32) }; CurDAG->SelectNodeTo(N, Opc, MVT::i32, Ops); return; } } case ISD::SRL: if (tryV6T2BitfieldExtractOp(N, false)) return; break; case ISD::SIGN_EXTEND_INREG: case ISD::SRA: if (tryV6T2BitfieldExtractOp(N, true)) return; break; case ISD::MUL: if (Subtarget->isThumb1Only()) break; if (ConstantSDNode *C = dyn_cast<ConstantSDNode>(N->getOperand(1))) { unsigned RHSV = C->getZExtValue(); if (!RHSV) break; if (isPowerOf2_32(RHSV-1)) { // 2^n+1? unsigned ShImm = Log2_32(RHSV-1); if (ShImm >= 32) break; SDValue V = N->getOperand(0); ShImm = ARM_AM::getSORegOpc(ARM_AM::lsl, ShImm); SDValue ShImmOp = CurDAG->getTargetConstant(ShImm, dl, MVT::i32); SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); if (Subtarget->isThumb()) { SDValue Ops[] = { V, V, ShImmOp, getAL(CurDAG, dl), Reg0, Reg0 }; CurDAG->SelectNodeTo(N, ARM::t2ADDrs, MVT::i32, Ops); return; } else { SDValue Ops[] = { V, V, Reg0, ShImmOp, getAL(CurDAG, dl), Reg0, Reg0 }; CurDAG->SelectNodeTo(N, ARM::ADDrsi, MVT::i32, Ops); return; } } if (isPowerOf2_32(RHSV+1)) { // 2^n-1? unsigned ShImm = Log2_32(RHSV+1); if (ShImm >= 32) break; SDValue V = N->getOperand(0); ShImm = ARM_AM::getSORegOpc(ARM_AM::lsl, ShImm); SDValue ShImmOp = CurDAG->getTargetConstant(ShImm, dl, MVT::i32); SDValue Reg0 = CurDAG->getRegister(0, MVT::i32); if (Subtarget->isThumb()) { SDValue Ops[] = { V, V, ShImmOp, getAL(CurDAG, dl), Reg0, Reg0 }; CurDAG->SelectNodeTo(N, ARM::t2RSBrs, MVT::i32, Ops); return; } else { SDValue Ops[] = { V, V, Reg0, ShImmOp, getAL(CurDAG, dl), Reg0, Reg0 }; CurDAG->SelectNodeTo(N, ARM::RSBrsi, MVT::i32, Ops); return; } } } break; case ISD::AND: { // Check for unsigned bitfield extract if (tryV6T2BitfieldExtractOp(N, false)) return; // If an immediate is used in an AND node, it is possible that the immediate // can be more optimally materialized when negated. If this is the case we // can negate the immediate and use a BIC instead. auto *N1C = dyn_cast<ConstantSDNode>(N->getOperand(1)); if (N1C && N1C->hasOneUse() && Subtarget->isThumb()) { uint32_t Imm = (uint32_t) N1C->getZExtValue(); // In Thumb2 mode, an AND can take a 12-bit immediate. If this // immediate can be negated and fit in the immediate operand of // a t2BIC, don't do any manual transform here as this can be // handled by the generic ISel machinery. bool PreferImmediateEncoding = Subtarget->hasThumb2() && (is_t2_so_imm(Imm) || is_t2_so_imm_not(Imm)); if (!PreferImmediateEncoding && ConstantMaterializationCost(Imm) > ConstantMaterializationCost(~Imm)) { // The current immediate costs more to materialize than a negated // immediate, so negate the immediate and use a BIC. SDValue NewImm = CurDAG->getConstant(~N1C->getZExtValue(), dl, MVT::i32); // If the new constant didn't exist before, reposition it in the topological // ordering so it is just before N. Otherwise, don't touch its location. if (NewImm->getNodeId() == -1) CurDAG->RepositionNode(N->getIterator(), NewImm.getNode()); if (!Subtarget->hasThumb2()) { SDValue Ops[] = {CurDAG->getRegister(ARM::CPSR, MVT::i32), N->getOperand(0), NewImm, getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32)}; ReplaceNode(N, CurDAG->getMachineNode(ARM::tBIC, dl, MVT::i32, Ops)); return; } else { SDValue Ops[] = {N->getOperand(0), NewImm, getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), CurDAG->getRegister(0, MVT::i32)}; ReplaceNode(N, CurDAG->getMachineNode(ARM::t2BICrr, dl, MVT::i32, Ops)); return; } } } // (and (or x, c2), c1) and top 16-bits of c1 and c2 match, lower 16-bits // of c1 are 0xffff, and lower 16-bit of c2 are 0. That is, the top 16-bits // are entirely contributed by c2 and lower 16-bits are entirely contributed // by x. That's equal to (or (and x, 0xffff), (and c1, 0xffff0000)). // Select it to: "movt x, ((c1 & 0xffff) >> 16) EVT VT = N->getValueType(0); if (VT != MVT::i32) break; unsigned Opc = (Subtarget->isThumb() && Subtarget->hasThumb2()) ? ARM::t2MOVTi16 : (Subtarget->hasV6T2Ops() ? ARM::MOVTi16 : 0); if (!Opc) break; SDValue N0 = N->getOperand(0), N1 = N->getOperand(1); N1C = dyn_cast<ConstantSDNode>(N1); if (!N1C) break; if (N0.getOpcode() == ISD::OR && N0.getNode()->hasOneUse()) { SDValue N2 = N0.getOperand(1); ConstantSDNode *N2C = dyn_cast<ConstantSDNode>(N2); if (!N2C) break; unsigned N1CVal = N1C->getZExtValue(); unsigned N2CVal = N2C->getZExtValue(); if ((N1CVal & 0xffff0000U) == (N2CVal & 0xffff0000U) && (N1CVal & 0xffffU) == 0xffffU && (N2CVal & 0xffffU) == 0x0U) { SDValue Imm16 = CurDAG->getTargetConstant((N2CVal & 0xFFFF0000U) >> 16, dl, MVT::i32); SDValue Ops[] = { N0.getOperand(0), Imm16, getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32) }; ReplaceNode(N, CurDAG->getMachineNode(Opc, dl, VT, Ops)); return; } } break; } case ARMISD::UMAAL: { unsigned Opc = Subtarget->isThumb() ? ARM::t2UMAAL : ARM::UMAAL; SDValue Ops[] = { N->getOperand(0), N->getOperand(1), N->getOperand(2), N->getOperand(3), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32) }; ReplaceNode(N, CurDAG->getMachineNode(Opc, dl, MVT::i32, MVT::i32, Ops)); return; } case ARMISD::UMLAL:{ if (Subtarget->isThumb()) { SDValue Ops[] = { N->getOperand(0), N->getOperand(1), N->getOperand(2), N->getOperand(3), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32)}; ReplaceNode( N, CurDAG->getMachineNode(ARM::t2UMLAL, dl, MVT::i32, MVT::i32, Ops)); return; }else{ SDValue Ops[] = { N->getOperand(0), N->getOperand(1), N->getOperand(2), N->getOperand(3), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), CurDAG->getRegister(0, MVT::i32) }; ReplaceNode(N, CurDAG->getMachineNode( Subtarget->hasV6Ops() ? ARM::UMLAL : ARM::UMLALv5, dl, MVT::i32, MVT::i32, Ops)); return; } } case ARMISD::SMLAL:{ if (Subtarget->isThumb()) { SDValue Ops[] = { N->getOperand(0), N->getOperand(1), N->getOperand(2), N->getOperand(3), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32)}; ReplaceNode( N, CurDAG->getMachineNode(ARM::t2SMLAL, dl, MVT::i32, MVT::i32, Ops)); return; }else{ SDValue Ops[] = { N->getOperand(0), N->getOperand(1), N->getOperand(2), N->getOperand(3), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), CurDAG->getRegister(0, MVT::i32) }; ReplaceNode(N, CurDAG->getMachineNode( Subtarget->hasV6Ops() ? ARM::SMLAL : ARM::SMLALv5, dl, MVT::i32, MVT::i32, Ops)); return; } } case ARMISD::SUBE: { if (!Subtarget->hasV6Ops() || !Subtarget->hasDSP()) break; // Look for a pattern to match SMMLS // (sube a, (smul_loHi a, b), (subc 0, (smul_LOhi(a, b)))) if (N->getOperand(1).getOpcode() != ISD::SMUL_LOHI || N->getOperand(2).getOpcode() != ARMISD::SUBC || !SDValue(N, 1).use_empty()) break; if (Subtarget->isThumb()) assert(Subtarget->hasThumb2() && "This pattern should not be generated for Thumb"); SDValue SmulLoHi = N->getOperand(1); SDValue Subc = N->getOperand(2); auto *Zero = dyn_cast<ConstantSDNode>(Subc.getOperand(0)); if (!Zero || Zero->getZExtValue() != 0 || Subc.getOperand(1) != SmulLoHi.getValue(0) || N->getOperand(1) != SmulLoHi.getValue(1) || N->getOperand(2) != Subc.getValue(1)) break; unsigned Opc = Subtarget->isThumb2() ? ARM::t2SMMLS : ARM::SMMLS; SDValue Ops[] = { SmulLoHi.getOperand(0), SmulLoHi.getOperand(1), N->getOperand(0), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32) }; ReplaceNode(N, CurDAG->getMachineNode(Opc, dl, MVT::i32, Ops)); return; } case ISD::LOAD: { if (Subtarget->isThumb() && Subtarget->hasThumb2()) { if (tryT2IndexedLoad(N)) return; } else if (Subtarget->isThumb()) { if (tryT1IndexedLoad(N)) return; } else if (tryARMIndexedLoad(N)) return; // Other cases are autogenerated. break; } case ARMISD::BRCOND: { // Pattern: (ARMbrcond:void (bb:Other):$dst, (imm:i32):$cc) // Emits: (Bcc:void (bb:Other):$dst, (imm:i32):$cc) // Pattern complexity = 6 cost = 1 size = 0 // Pattern: (ARMbrcond:void (bb:Other):$dst, (imm:i32):$cc) // Emits: (tBcc:void (bb:Other):$dst, (imm:i32):$cc) // Pattern complexity = 6 cost = 1 size = 0 // Pattern: (ARMbrcond:void (bb:Other):$dst, (imm:i32):$cc) // Emits: (t2Bcc:void (bb:Other):$dst, (imm:i32):$cc) // Pattern complexity = 6 cost = 1 size = 0 unsigned Opc = Subtarget->isThumb() ? ((Subtarget->hasThumb2()) ? ARM::t2Bcc : ARM::tBcc) : ARM::Bcc; SDValue Chain = N->getOperand(0); SDValue N1 = N->getOperand(1); SDValue N2 = N->getOperand(2); SDValue N3 = N->getOperand(3); SDValue InFlag = N->getOperand(4); assert(N1.getOpcode() == ISD::BasicBlock); assert(N2.getOpcode() == ISD::Constant); assert(N3.getOpcode() == ISD::Register); unsigned CC = (unsigned) cast<ConstantSDNode>(N2)->getZExtValue(); if (InFlag.getOpcode() == ARMISD::CMPZ) { bool SwitchEQNEToPLMI; SelectCMPZ(InFlag.getNode(), SwitchEQNEToPLMI); InFlag = N->getOperand(4); if (SwitchEQNEToPLMI) { switch ((ARMCC::CondCodes)CC) { default: llvm_unreachable("CMPZ must be either NE or EQ!"); case ARMCC::NE: CC = (unsigned)ARMCC::MI; break; case ARMCC::EQ: CC = (unsigned)ARMCC::PL; break; } } } SDValue Tmp2 = CurDAG->getTargetConstant(CC, dl, MVT::i32); SDValue Ops[] = { N1, Tmp2, N3, Chain, InFlag }; SDNode *ResNode = CurDAG->getMachineNode(Opc, dl, MVT::Other, MVT::Glue, Ops); Chain = SDValue(ResNode, 0); if (N->getNumValues() == 2) { InFlag = SDValue(ResNode, 1); ReplaceUses(SDValue(N, 1), InFlag); } ReplaceUses(SDValue(N, 0), SDValue(Chain.getNode(), Chain.getResNo())); CurDAG->RemoveDeadNode(N); return; } case ARMISD::CMPZ: { // select (CMPZ X, #-C) -> (CMPZ (ADDS X, #C), #0) // This allows us to avoid materializing the expensive negative constant. // The CMPZ #0 is useless and will be peepholed away but we need to keep it // for its glue output. SDValue X = N->getOperand(0); auto *C = dyn_cast<ConstantSDNode>(N->getOperand(1).getNode()); if (C && C->getSExtValue() < 0 && Subtarget->isThumb()) { int64_t Addend = -C->getSExtValue(); SDNode *Add = nullptr; // ADDS can be better than CMN if the immediate fits in a // 16-bit ADDS, which means either [0,256) for tADDi8 or [0,8) for tADDi3. // Outside that range we can just use a CMN which is 32-bit but has a // 12-bit immediate range. if (Addend < 1<<8) { if (Subtarget->isThumb2()) { SDValue Ops[] = { X, CurDAG->getTargetConstant(Addend, dl, MVT::i32), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), CurDAG->getRegister(0, MVT::i32) }; Add = CurDAG->getMachineNode(ARM::t2ADDri, dl, MVT::i32, Ops); } else { unsigned Opc = (Addend < 1<<3) ? ARM::tADDi3 : ARM::tADDi8; SDValue Ops[] = {CurDAG->getRegister(ARM::CPSR, MVT::i32), X, CurDAG->getTargetConstant(Addend, dl, MVT::i32), getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32)}; Add = CurDAG->getMachineNode(Opc, dl, MVT::i32, Ops); } } if (Add) { SDValue Ops2[] = {SDValue(Add, 0), CurDAG->getConstant(0, dl, MVT::i32)}; CurDAG->MorphNodeTo(N, ARMISD::CMPZ, CurDAG->getVTList(MVT::Glue), Ops2); } } // Other cases are autogenerated. break; } case ARMISD::CMOV: { SDValue InFlag = N->getOperand(4); if (InFlag.getOpcode() == ARMISD::CMPZ) { bool SwitchEQNEToPLMI; SelectCMPZ(InFlag.getNode(), SwitchEQNEToPLMI); if (SwitchEQNEToPLMI) { SDValue ARMcc = N->getOperand(2); ARMCC::CondCodes CC = (ARMCC::CondCodes)cast<ConstantSDNode>(ARMcc)->getZExtValue(); switch (CC) { default: llvm_unreachable("CMPZ must be either NE or EQ!"); case ARMCC::NE: CC = ARMCC::MI; break; case ARMCC::EQ: CC = ARMCC::PL; break; } SDValue NewARMcc = CurDAG->getConstant((unsigned)CC, dl, MVT::i32); SDValue Ops[] = {N->getOperand(0), N->getOperand(1), NewARMcc, N->getOperand(3), N->getOperand(4)}; CurDAG->MorphNodeTo(N, ARMISD::CMOV, N->getVTList(), Ops); } } // Other cases are autogenerated. break; } case ARMISD::VZIP: { unsigned Opc = 0; EVT VT = N->getValueType(0); switch (VT.getSimpleVT().SimpleTy) { default: return; case MVT::v8i8: Opc = ARM::VZIPd8; break; case MVT::v4f16: case MVT::v4i16: Opc = ARM::VZIPd16; break; case MVT::v2f32: // vzip.32 Dd, Dm is a pseudo-instruction expanded to vtrn.32 Dd, Dm. case MVT::v2i32: Opc = ARM::VTRNd32; break; case MVT::v16i8: Opc = ARM::VZIPq8; break; case MVT::v8f16: case MVT::v8i16: Opc = ARM::VZIPq16; break; case MVT::v4f32: case MVT::v4i32: Opc = ARM::VZIPq32; break; } SDValue Pred = getAL(CurDAG, dl); SDValue PredReg = CurDAG->getRegister(0, MVT::i32); SDValue Ops[] = { N->getOperand(0), N->getOperand(1), Pred, PredReg }; ReplaceNode(N, CurDAG->getMachineNode(Opc, dl, VT, VT, Ops)); return; } case ARMISD::VUZP: { unsigned Opc = 0; EVT VT = N->getValueType(0); switch (VT.getSimpleVT().SimpleTy) { default: return; case MVT::v8i8: Opc = ARM::VUZPd8; break; case MVT::v4f16: case MVT::v4i16: Opc = ARM::VUZPd16; break; case MVT::v2f32: // vuzp.32 Dd, Dm is a pseudo-instruction expanded to vtrn.32 Dd, Dm. case MVT::v2i32: Opc = ARM::VTRNd32; break; case MVT::v16i8: Opc = ARM::VUZPq8; break; case MVT::v8f16: case MVT::v8i16: Opc = ARM::VUZPq16; break; case MVT::v4f32: case MVT::v4i32: Opc = ARM::VUZPq32; break; } SDValue Pred = getAL(CurDAG, dl); SDValue PredReg = CurDAG->getRegister(0, MVT::i32); SDValue Ops[] = { N->getOperand(0), N->getOperand(1), Pred, PredReg }; ReplaceNode(N, CurDAG->getMachineNode(Opc, dl, VT, VT, Ops)); return; } case ARMISD::VTRN: { unsigned Opc = 0; EVT VT = N->getValueType(0); switch (VT.getSimpleVT().SimpleTy) { default: return; case MVT::v8i8: Opc = ARM::VTRNd8; break; case MVT::v4f16: case MVT::v4i16: Opc = ARM::VTRNd16; break; case MVT::v2f32: case MVT::v2i32: Opc = ARM::VTRNd32; break; case MVT::v16i8: Opc = ARM::VTRNq8; break; case MVT::v8f16: case MVT::v8i16: Opc = ARM::VTRNq16; break; case MVT::v4f32: case MVT::v4i32: Opc = ARM::VTRNq32; break; } SDValue Pred = getAL(CurDAG, dl); SDValue PredReg = CurDAG->getRegister(0, MVT::i32); SDValue Ops[] = { N->getOperand(0), N->getOperand(1), Pred, PredReg }; ReplaceNode(N, CurDAG->getMachineNode(Opc, dl, VT, VT, Ops)); return; } case ARMISD::BUILD_VECTOR: { EVT VecVT = N->getValueType(0); EVT EltVT = VecVT.getVectorElementType(); unsigned NumElts = VecVT.getVectorNumElements(); if (EltVT == MVT::f64) { assert(NumElts == 2 && "unexpected type for BUILD_VECTOR"); ReplaceNode( N, createDRegPairNode(VecVT, N->getOperand(0), N->getOperand(1))); return; } assert(EltVT == MVT::f32 && "unexpected type for BUILD_VECTOR"); if (NumElts == 2) { ReplaceNode( N, createSRegPairNode(VecVT, N->getOperand(0), N->getOperand(1))); return; } assert(NumElts == 4 && "unexpected type for BUILD_VECTOR"); ReplaceNode(N, createQuadSRegsNode(VecVT, N->getOperand(0), N->getOperand(1), N->getOperand(2), N->getOperand(3))); return; } case ARMISD::VLD1DUP: { static const uint16_t DOpcodes[] = { ARM::VLD1DUPd8, ARM::VLD1DUPd16, ARM::VLD1DUPd32 }; static const uint16_t QOpcodes[] = { ARM::VLD1DUPq8, ARM::VLD1DUPq16, ARM::VLD1DUPq32 }; SelectVLDDup(N, /* IsIntrinsic= */ false, false, 1, DOpcodes, QOpcodes); return; } case ARMISD::VLD2DUP: { static const uint16_t Opcodes[] = { ARM::VLD2DUPd8, ARM::VLD2DUPd16, ARM::VLD2DUPd32 }; SelectVLDDup(N, /* IsIntrinsic= */ false, false, 2, Opcodes); return; } case ARMISD::VLD3DUP: { static const uint16_t Opcodes[] = { ARM::VLD3DUPd8Pseudo, ARM::VLD3DUPd16Pseudo, ARM::VLD3DUPd32Pseudo }; SelectVLDDup(N, /* IsIntrinsic= */ false, false, 3, Opcodes); return; } case ARMISD::VLD4DUP: { static const uint16_t Opcodes[] = { ARM::VLD4DUPd8Pseudo, ARM::VLD4DUPd16Pseudo, ARM::VLD4DUPd32Pseudo }; SelectVLDDup(N, /* IsIntrinsic= */ false, false, 4, Opcodes); return; } case ARMISD::VLD1DUP_UPD: { static const uint16_t DOpcodes[] = { ARM::VLD1DUPd8wb_fixed, ARM::VLD1DUPd16wb_fixed, ARM::VLD1DUPd32wb_fixed }; static const uint16_t QOpcodes[] = { ARM::VLD1DUPq8wb_fixed, ARM::VLD1DUPq16wb_fixed, ARM::VLD1DUPq32wb_fixed }; SelectVLDDup(N, /* IsIntrinsic= */ false, true, 1, DOpcodes, QOpcodes); return; } case ARMISD::VLD2DUP_UPD: { static const uint16_t Opcodes[] = { ARM::VLD2DUPd8wb_fixed, ARM::VLD2DUPd16wb_fixed, ARM::VLD2DUPd32wb_fixed }; SelectVLDDup(N, /* IsIntrinsic= */ false, true, 2, Opcodes); return; } case ARMISD::VLD3DUP_UPD: { static const uint16_t Opcodes[] = { ARM::VLD3DUPd8Pseudo_UPD, ARM::VLD3DUPd16Pseudo_UPD, ARM::VLD3DUPd32Pseudo_UPD }; SelectVLDDup(N, /* IsIntrinsic= */ false, true, 3, Opcodes); return; } case ARMISD::VLD4DUP_UPD: { static const uint16_t Opcodes[] = { ARM::VLD4DUPd8Pseudo_UPD, ARM::VLD4DUPd16Pseudo_UPD, ARM::VLD4DUPd32Pseudo_UPD }; SelectVLDDup(N, /* IsIntrinsic= */ false, true, 4, Opcodes); return; } case ARMISD::VLD1_UPD: { static const uint16_t DOpcodes[] = { ARM::VLD1d8wb_fixed, ARM::VLD1d16wb_fixed, ARM::VLD1d32wb_fixed, ARM::VLD1d64wb_fixed }; static const uint16_t QOpcodes[] = { ARM::VLD1q8wb_fixed, ARM::VLD1q16wb_fixed, ARM::VLD1q32wb_fixed, ARM::VLD1q64wb_fixed }; SelectVLD(N, true, 1, DOpcodes, QOpcodes, nullptr); return; } case ARMISD::VLD2_UPD: { static const uint16_t DOpcodes[] = { ARM::VLD2d8wb_fixed, ARM::VLD2d16wb_fixed, ARM::VLD2d32wb_fixed, ARM::VLD1q64wb_fixed}; static const uint16_t QOpcodes[] = { ARM::VLD2q8PseudoWB_fixed, ARM::VLD2q16PseudoWB_fixed, ARM::VLD2q32PseudoWB_fixed }; SelectVLD(N, true, 2, DOpcodes, QOpcodes, nullptr); return; } case ARMISD::VLD3_UPD: { static const uint16_t DOpcodes[] = { ARM::VLD3d8Pseudo_UPD, ARM::VLD3d16Pseudo_UPD, ARM::VLD3d32Pseudo_UPD, ARM::VLD1d64TPseudoWB_fixed}; static const uint16_t QOpcodes0[] = { ARM::VLD3q8Pseudo_UPD, ARM::VLD3q16Pseudo_UPD, ARM::VLD3q32Pseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VLD3q8oddPseudo_UPD, ARM::VLD3q16oddPseudo_UPD, ARM::VLD3q32oddPseudo_UPD }; SelectVLD(N, true, 3, DOpcodes, QOpcodes0, QOpcodes1); return; } case ARMISD::VLD4_UPD: { static const uint16_t DOpcodes[] = { ARM::VLD4d8Pseudo_UPD, ARM::VLD4d16Pseudo_UPD, ARM::VLD4d32Pseudo_UPD, ARM::VLD1d64QPseudoWB_fixed}; static const uint16_t QOpcodes0[] = { ARM::VLD4q8Pseudo_UPD, ARM::VLD4q16Pseudo_UPD, ARM::VLD4q32Pseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VLD4q8oddPseudo_UPD, ARM::VLD4q16oddPseudo_UPD, ARM::VLD4q32oddPseudo_UPD }; SelectVLD(N, true, 4, DOpcodes, QOpcodes0, QOpcodes1); return; } case ARMISD::VLD2LN_UPD: { static const uint16_t DOpcodes[] = { ARM::VLD2LNd8Pseudo_UPD, ARM::VLD2LNd16Pseudo_UPD, ARM::VLD2LNd32Pseudo_UPD }; static const uint16_t QOpcodes[] = { ARM::VLD2LNq16Pseudo_UPD, ARM::VLD2LNq32Pseudo_UPD }; SelectVLDSTLane(N, true, true, 2, DOpcodes, QOpcodes); return; } case ARMISD::VLD3LN_UPD: { static const uint16_t DOpcodes[] = { ARM::VLD3LNd8Pseudo_UPD, ARM::VLD3LNd16Pseudo_UPD, ARM::VLD3LNd32Pseudo_UPD }; static const uint16_t QOpcodes[] = { ARM::VLD3LNq16Pseudo_UPD, ARM::VLD3LNq32Pseudo_UPD }; SelectVLDSTLane(N, true, true, 3, DOpcodes, QOpcodes); return; } case ARMISD::VLD4LN_UPD: { static const uint16_t DOpcodes[] = { ARM::VLD4LNd8Pseudo_UPD, ARM::VLD4LNd16Pseudo_UPD, ARM::VLD4LNd32Pseudo_UPD }; static const uint16_t QOpcodes[] = { ARM::VLD4LNq16Pseudo_UPD, ARM::VLD4LNq32Pseudo_UPD }; SelectVLDSTLane(N, true, true, 4, DOpcodes, QOpcodes); return; } case ARMISD::VST1_UPD: { static const uint16_t DOpcodes[] = { ARM::VST1d8wb_fixed, ARM::VST1d16wb_fixed, ARM::VST1d32wb_fixed, ARM::VST1d64wb_fixed }; static const uint16_t QOpcodes[] = { ARM::VST1q8wb_fixed, ARM::VST1q16wb_fixed, ARM::VST1q32wb_fixed, ARM::VST1q64wb_fixed }; SelectVST(N, true, 1, DOpcodes, QOpcodes, nullptr); return; } case ARMISD::VST2_UPD: { static const uint16_t DOpcodes[] = { ARM::VST2d8wb_fixed, ARM::VST2d16wb_fixed, ARM::VST2d32wb_fixed, ARM::VST1q64wb_fixed}; static const uint16_t QOpcodes[] = { ARM::VST2q8PseudoWB_fixed, ARM::VST2q16PseudoWB_fixed, ARM::VST2q32PseudoWB_fixed }; SelectVST(N, true, 2, DOpcodes, QOpcodes, nullptr); return; } case ARMISD::VST3_UPD: { static const uint16_t DOpcodes[] = { ARM::VST3d8Pseudo_UPD, ARM::VST3d16Pseudo_UPD, ARM::VST3d32Pseudo_UPD, ARM::VST1d64TPseudoWB_fixed}; static const uint16_t QOpcodes0[] = { ARM::VST3q8Pseudo_UPD, ARM::VST3q16Pseudo_UPD, ARM::VST3q32Pseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VST3q8oddPseudo_UPD, ARM::VST3q16oddPseudo_UPD, ARM::VST3q32oddPseudo_UPD }; SelectVST(N, true, 3, DOpcodes, QOpcodes0, QOpcodes1); return; } case ARMISD::VST4_UPD: { static const uint16_t DOpcodes[] = { ARM::VST4d8Pseudo_UPD, ARM::VST4d16Pseudo_UPD, ARM::VST4d32Pseudo_UPD, ARM::VST1d64QPseudoWB_fixed}; static const uint16_t QOpcodes0[] = { ARM::VST4q8Pseudo_UPD, ARM::VST4q16Pseudo_UPD, ARM::VST4q32Pseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VST4q8oddPseudo_UPD, ARM::VST4q16oddPseudo_UPD, ARM::VST4q32oddPseudo_UPD }; SelectVST(N, true, 4, DOpcodes, QOpcodes0, QOpcodes1); return; } case ARMISD::VST2LN_UPD: { static const uint16_t DOpcodes[] = { ARM::VST2LNd8Pseudo_UPD, ARM::VST2LNd16Pseudo_UPD, ARM::VST2LNd32Pseudo_UPD }; static const uint16_t QOpcodes[] = { ARM::VST2LNq16Pseudo_UPD, ARM::VST2LNq32Pseudo_UPD }; SelectVLDSTLane(N, false, true, 2, DOpcodes, QOpcodes); return; } case ARMISD::VST3LN_UPD: { static const uint16_t DOpcodes[] = { ARM::VST3LNd8Pseudo_UPD, ARM::VST3LNd16Pseudo_UPD, ARM::VST3LNd32Pseudo_UPD }; static const uint16_t QOpcodes[] = { ARM::VST3LNq16Pseudo_UPD, ARM::VST3LNq32Pseudo_UPD }; SelectVLDSTLane(N, false, true, 3, DOpcodes, QOpcodes); return; } case ARMISD::VST4LN_UPD: { static const uint16_t DOpcodes[] = { ARM::VST4LNd8Pseudo_UPD, ARM::VST4LNd16Pseudo_UPD, ARM::VST4LNd32Pseudo_UPD }; static const uint16_t QOpcodes[] = { ARM::VST4LNq16Pseudo_UPD, ARM::VST4LNq32Pseudo_UPD }; SelectVLDSTLane(N, false, true, 4, DOpcodes, QOpcodes); return; } case ISD::INTRINSIC_VOID: case ISD::INTRINSIC_W_CHAIN: { unsigned IntNo = cast<ConstantSDNode>(N->getOperand(1))->getZExtValue(); switch (IntNo) { default: break; case Intrinsic::arm_mrrc: case Intrinsic::arm_mrrc2: { SDLoc dl(N); SDValue Chain = N->getOperand(0); unsigned Opc; if (Subtarget->isThumb()) Opc = (IntNo == Intrinsic::arm_mrrc ? ARM::t2MRRC : ARM::t2MRRC2); else Opc = (IntNo == Intrinsic::arm_mrrc ? ARM::MRRC : ARM::MRRC2); SmallVector<SDValue, 5> Ops; Ops.push_back(getI32Imm(cast<ConstantSDNode>(N->getOperand(2))->getZExtValue(), dl)); /* coproc */ Ops.push_back(getI32Imm(cast<ConstantSDNode>(N->getOperand(3))->getZExtValue(), dl)); /* opc */ Ops.push_back(getI32Imm(cast<ConstantSDNode>(N->getOperand(4))->getZExtValue(), dl)); /* CRm */ // The mrrc2 instruction in ARM doesn't allow predicates, the top 4 bits of the encoded // instruction will always be '1111' but it is possible in assembly language to specify // AL as a predicate to mrrc2 but it doesn't make any difference to the encoded instruction. if (Opc != ARM::MRRC2) { Ops.push_back(getAL(CurDAG, dl)); Ops.push_back(CurDAG->getRegister(0, MVT::i32)); } Ops.push_back(Chain); // Writes to two registers. const EVT RetType[] = {MVT::i32, MVT::i32, MVT::Other}; ReplaceNode(N, CurDAG->getMachineNode(Opc, dl, RetType, Ops)); return; } case Intrinsic::arm_ldaexd: case Intrinsic::arm_ldrexd: { SDLoc dl(N); SDValue Chain = N->getOperand(0); SDValue MemAddr = N->getOperand(2); bool isThumb = Subtarget->isThumb() && Subtarget->hasV8MBaselineOps(); bool IsAcquire = IntNo == Intrinsic::arm_ldaexd; unsigned NewOpc = isThumb ? (IsAcquire ? ARM::t2LDAEXD : ARM::t2LDREXD) : (IsAcquire ? ARM::LDAEXD : ARM::LDREXD); // arm_ldrexd returns a i64 value in {i32, i32} std::vector<EVT> ResTys; if (isThumb) { ResTys.push_back(MVT::i32); ResTys.push_back(MVT::i32); } else ResTys.push_back(MVT::Untyped); ResTys.push_back(MVT::Other); // Place arguments in the right order. SDValue Ops[] = {MemAddr, getAL(CurDAG, dl), CurDAG->getRegister(0, MVT::i32), Chain}; SDNode *Ld = CurDAG->getMachineNode(NewOpc, dl, ResTys, Ops); // Transfer memoperands. MachineMemOperand *MemOp = cast<MemIntrinsicSDNode>(N)->getMemOperand(); CurDAG->setNodeMemRefs(cast<MachineSDNode>(Ld), {MemOp}); // Remap uses. SDValue OutChain = isThumb ? SDValue(Ld, 2) : SDValue(Ld, 1); if (!SDValue(N, 0).use_empty()) { SDValue Result; if (isThumb) Result = SDValue(Ld, 0); else { SDValue SubRegIdx = CurDAG->getTargetConstant(ARM::gsub_0, dl, MVT::i32); SDNode *ResNode = CurDAG->getMachineNode(TargetOpcode::EXTRACT_SUBREG, dl, MVT::i32, SDValue(Ld, 0), SubRegIdx); Result = SDValue(ResNode,0); } ReplaceUses(SDValue(N, 0), Result); } if (!SDValue(N, 1).use_empty()) { SDValue Result; if (isThumb) Result = SDValue(Ld, 1); else { SDValue SubRegIdx = CurDAG->getTargetConstant(ARM::gsub_1, dl, MVT::i32); SDNode *ResNode = CurDAG->getMachineNode(TargetOpcode::EXTRACT_SUBREG, dl, MVT::i32, SDValue(Ld, 0), SubRegIdx); Result = SDValue(ResNode,0); } ReplaceUses(SDValue(N, 1), Result); } ReplaceUses(SDValue(N, 2), OutChain); CurDAG->RemoveDeadNode(N); return; } case Intrinsic::arm_stlexd: case Intrinsic::arm_strexd: { SDLoc dl(N); SDValue Chain = N->getOperand(0); SDValue Val0 = N->getOperand(2); SDValue Val1 = N->getOperand(3); SDValue MemAddr = N->getOperand(4); // Store exclusive double return a i32 value which is the return status // of the issued store. const EVT ResTys[] = {MVT::i32, MVT::Other}; bool isThumb = Subtarget->isThumb() && Subtarget->hasThumb2(); // Place arguments in the right order. SmallVector<SDValue, 7> Ops; if (isThumb) { Ops.push_back(Val0); Ops.push_back(Val1); } else // arm_strexd uses GPRPair. Ops.push_back(SDValue(createGPRPairNode(MVT::Untyped, Val0, Val1), 0)); Ops.push_back(MemAddr); Ops.push_back(getAL(CurDAG, dl)); Ops.push_back(CurDAG->getRegister(0, MVT::i32)); Ops.push_back(Chain); bool IsRelease = IntNo == Intrinsic::arm_stlexd; unsigned NewOpc = isThumb ? (IsRelease ? ARM::t2STLEXD : ARM::t2STREXD) : (IsRelease ? ARM::STLEXD : ARM::STREXD); SDNode *St = CurDAG->getMachineNode(NewOpc, dl, ResTys, Ops); // Transfer memoperands. MachineMemOperand *MemOp = cast<MemIntrinsicSDNode>(N)->getMemOperand(); CurDAG->setNodeMemRefs(cast<MachineSDNode>(St), {MemOp}); ReplaceNode(N, St); return; } case Intrinsic::arm_neon_vld1: { static const uint16_t DOpcodes[] = { ARM::VLD1d8, ARM::VLD1d16, ARM::VLD1d32, ARM::VLD1d64 }; static const uint16_t QOpcodes[] = { ARM::VLD1q8, ARM::VLD1q16, ARM::VLD1q32, ARM::VLD1q64}; SelectVLD(N, false, 1, DOpcodes, QOpcodes, nullptr); return; } case Intrinsic::arm_neon_vld1x2: { static const uint16_t DOpcodes[] = { ARM::VLD1q8, ARM::VLD1q16, ARM::VLD1q32, ARM::VLD1q64 }; static const uint16_t QOpcodes[] = { ARM::VLD1d8QPseudo, ARM::VLD1d16QPseudo, ARM::VLD1d32QPseudo, ARM::VLD1d64QPseudo }; SelectVLD(N, false, 2, DOpcodes, QOpcodes, nullptr); return; } case Intrinsic::arm_neon_vld1x3: { static const uint16_t DOpcodes[] = { ARM::VLD1d8TPseudo, ARM::VLD1d16TPseudo, ARM::VLD1d32TPseudo, ARM::VLD1d64TPseudo }; static const uint16_t QOpcodes0[] = { ARM::VLD1q8LowTPseudo_UPD, ARM::VLD1q16LowTPseudo_UPD, ARM::VLD1q32LowTPseudo_UPD, ARM::VLD1q64LowTPseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VLD1q8HighTPseudo, ARM::VLD1q16HighTPseudo, ARM::VLD1q32HighTPseudo, ARM::VLD1q64HighTPseudo }; SelectVLD(N, false, 3, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vld1x4: { static const uint16_t DOpcodes[] = { ARM::VLD1d8QPseudo, ARM::VLD1d16QPseudo, ARM::VLD1d32QPseudo, ARM::VLD1d64QPseudo }; static const uint16_t QOpcodes0[] = { ARM::VLD1q8LowQPseudo_UPD, ARM::VLD1q16LowQPseudo_UPD, ARM::VLD1q32LowQPseudo_UPD, ARM::VLD1q64LowQPseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VLD1q8HighQPseudo, ARM::VLD1q16HighQPseudo, ARM::VLD1q32HighQPseudo, ARM::VLD1q64HighQPseudo }; SelectVLD(N, false, 4, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vld2: { static const uint16_t DOpcodes[] = { ARM::VLD2d8, ARM::VLD2d16, ARM::VLD2d32, ARM::VLD1q64 }; static const uint16_t QOpcodes[] = { ARM::VLD2q8Pseudo, ARM::VLD2q16Pseudo, ARM::VLD2q32Pseudo }; SelectVLD(N, false, 2, DOpcodes, QOpcodes, nullptr); return; } case Intrinsic::arm_neon_vld3: { static const uint16_t DOpcodes[] = { ARM::VLD3d8Pseudo, ARM::VLD3d16Pseudo, ARM::VLD3d32Pseudo, ARM::VLD1d64TPseudo }; static const uint16_t QOpcodes0[] = { ARM::VLD3q8Pseudo_UPD, ARM::VLD3q16Pseudo_UPD, ARM::VLD3q32Pseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VLD3q8oddPseudo, ARM::VLD3q16oddPseudo, ARM::VLD3q32oddPseudo }; SelectVLD(N, false, 3, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vld4: { static const uint16_t DOpcodes[] = { ARM::VLD4d8Pseudo, ARM::VLD4d16Pseudo, ARM::VLD4d32Pseudo, ARM::VLD1d64QPseudo }; static const uint16_t QOpcodes0[] = { ARM::VLD4q8Pseudo_UPD, ARM::VLD4q16Pseudo_UPD, ARM::VLD4q32Pseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VLD4q8oddPseudo, ARM::VLD4q16oddPseudo, ARM::VLD4q32oddPseudo }; SelectVLD(N, false, 4, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vld2dup: { static const uint16_t DOpcodes[] = { ARM::VLD2DUPd8, ARM::VLD2DUPd16, ARM::VLD2DUPd32, ARM::VLD1q64 }; static const uint16_t QOpcodes0[] = { ARM::VLD2DUPq8EvenPseudo, ARM::VLD2DUPq16EvenPseudo, ARM::VLD2DUPq32EvenPseudo }; static const uint16_t QOpcodes1[] = { ARM::VLD2DUPq8OddPseudo, ARM::VLD2DUPq16OddPseudo, ARM::VLD2DUPq32OddPseudo }; SelectVLDDup(N, /* IsIntrinsic= */ true, false, 2, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vld3dup: { static const uint16_t DOpcodes[] = { ARM::VLD3DUPd8Pseudo, ARM::VLD3DUPd16Pseudo, ARM::VLD3DUPd32Pseudo, ARM::VLD1d64TPseudo }; static const uint16_t QOpcodes0[] = { ARM::VLD3DUPq8EvenPseudo, ARM::VLD3DUPq16EvenPseudo, ARM::VLD3DUPq32EvenPseudo }; static const uint16_t QOpcodes1[] = { ARM::VLD3DUPq8OddPseudo, ARM::VLD3DUPq16OddPseudo, ARM::VLD3DUPq32OddPseudo }; SelectVLDDup(N, /* IsIntrinsic= */ true, false, 3, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vld4dup: { static const uint16_t DOpcodes[] = { ARM::VLD4DUPd8Pseudo, ARM::VLD4DUPd16Pseudo, ARM::VLD4DUPd32Pseudo, ARM::VLD1d64QPseudo }; static const uint16_t QOpcodes0[] = { ARM::VLD4DUPq8EvenPseudo, ARM::VLD4DUPq16EvenPseudo, ARM::VLD4DUPq32EvenPseudo }; static const uint16_t QOpcodes1[] = { ARM::VLD4DUPq8OddPseudo, ARM::VLD4DUPq16OddPseudo, ARM::VLD4DUPq32OddPseudo }; SelectVLDDup(N, /* IsIntrinsic= */ true, false, 4, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vld2lane: { static const uint16_t DOpcodes[] = { ARM::VLD2LNd8Pseudo, ARM::VLD2LNd16Pseudo, ARM::VLD2LNd32Pseudo }; static const uint16_t QOpcodes[] = { ARM::VLD2LNq16Pseudo, ARM::VLD2LNq32Pseudo }; SelectVLDSTLane(N, true, false, 2, DOpcodes, QOpcodes); return; } case Intrinsic::arm_neon_vld3lane: { static const uint16_t DOpcodes[] = { ARM::VLD3LNd8Pseudo, ARM::VLD3LNd16Pseudo, ARM::VLD3LNd32Pseudo }; static const uint16_t QOpcodes[] = { ARM::VLD3LNq16Pseudo, ARM::VLD3LNq32Pseudo }; SelectVLDSTLane(N, true, false, 3, DOpcodes, QOpcodes); return; } case Intrinsic::arm_neon_vld4lane: { static const uint16_t DOpcodes[] = { ARM::VLD4LNd8Pseudo, ARM::VLD4LNd16Pseudo, ARM::VLD4LNd32Pseudo }; static const uint16_t QOpcodes[] = { ARM::VLD4LNq16Pseudo, ARM::VLD4LNq32Pseudo }; SelectVLDSTLane(N, true, false, 4, DOpcodes, QOpcodes); return; } case Intrinsic::arm_neon_vst1: { static const uint16_t DOpcodes[] = { ARM::VST1d8, ARM::VST1d16, ARM::VST1d32, ARM::VST1d64 }; static const uint16_t QOpcodes[] = { ARM::VST1q8, ARM::VST1q16, ARM::VST1q32, ARM::VST1q64 }; SelectVST(N, false, 1, DOpcodes, QOpcodes, nullptr); return; } case Intrinsic::arm_neon_vst1x2: { static const uint16_t DOpcodes[] = { ARM::VST1q8, ARM::VST1q16, ARM::VST1q32, ARM::VST1q64 }; static const uint16_t QOpcodes[] = { ARM::VST1d8QPseudo, ARM::VST1d16QPseudo, ARM::VST1d32QPseudo, ARM::VST1d64QPseudo }; SelectVST(N, false, 2, DOpcodes, QOpcodes, nullptr); return; } case Intrinsic::arm_neon_vst1x3: { static const uint16_t DOpcodes[] = { ARM::VST1d8TPseudo, ARM::VST1d16TPseudo, ARM::VST1d32TPseudo, ARM::VST1d64TPseudo }; static const uint16_t QOpcodes0[] = { ARM::VST1q8LowTPseudo_UPD, ARM::VST1q16LowTPseudo_UPD, ARM::VST1q32LowTPseudo_UPD, ARM::VST1q64LowTPseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VST1q8HighTPseudo, ARM::VST1q16HighTPseudo, ARM::VST1q32HighTPseudo, ARM::VST1q64HighTPseudo }; SelectVST(N, false, 3, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vst1x4: { static const uint16_t DOpcodes[] = { ARM::VST1d8QPseudo, ARM::VST1d16QPseudo, ARM::VST1d32QPseudo, ARM::VST1d64QPseudo }; static const uint16_t QOpcodes0[] = { ARM::VST1q8LowQPseudo_UPD, ARM::VST1q16LowQPseudo_UPD, ARM::VST1q32LowQPseudo_UPD, ARM::VST1q64LowQPseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VST1q8HighQPseudo, ARM::VST1q16HighQPseudo, ARM::VST1q32HighQPseudo, ARM::VST1q64HighQPseudo }; SelectVST(N, false, 4, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vst2: { static const uint16_t DOpcodes[] = { ARM::VST2d8, ARM::VST2d16, ARM::VST2d32, ARM::VST1q64 }; static const uint16_t QOpcodes[] = { ARM::VST2q8Pseudo, ARM::VST2q16Pseudo, ARM::VST2q32Pseudo }; SelectVST(N, false, 2, DOpcodes, QOpcodes, nullptr); return; } case Intrinsic::arm_neon_vst3: { static const uint16_t DOpcodes[] = { ARM::VST3d8Pseudo, ARM::VST3d16Pseudo, ARM::VST3d32Pseudo, ARM::VST1d64TPseudo }; static const uint16_t QOpcodes0[] = { ARM::VST3q8Pseudo_UPD, ARM::VST3q16Pseudo_UPD, ARM::VST3q32Pseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VST3q8oddPseudo, ARM::VST3q16oddPseudo, ARM::VST3q32oddPseudo }; SelectVST(N, false, 3, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vst4: { static const uint16_t DOpcodes[] = { ARM::VST4d8Pseudo, ARM::VST4d16Pseudo, ARM::VST4d32Pseudo, ARM::VST1d64QPseudo }; static const uint16_t QOpcodes0[] = { ARM::VST4q8Pseudo_UPD, ARM::VST4q16Pseudo_UPD, ARM::VST4q32Pseudo_UPD }; static const uint16_t QOpcodes1[] = { ARM::VST4q8oddPseudo, ARM::VST4q16oddPseudo, ARM::VST4q32oddPseudo }; SelectVST(N, false, 4, DOpcodes, QOpcodes0, QOpcodes1); return; } case Intrinsic::arm_neon_vst2lane: { static const uint16_t DOpcodes[] = { ARM::VST2LNd8Pseudo, ARM::VST2LNd16Pseudo, ARM::VST2LNd32Pseudo }; static const uint16_t QOpcodes[] = { ARM::VST2LNq16Pseudo, ARM::VST2LNq32Pseudo }; SelectVLDSTLane(N, false, false, 2, DOpcodes, QOpcodes); return; } case Intrinsic::arm_neon_vst3lane: { static const uint16_t DOpcodes[] = { ARM::VST3LNd8Pseudo, ARM::VST3LNd16Pseudo, ARM::VST3LNd32Pseudo }; static const uint16_t QOpcodes[] = { ARM::VST3LNq16Pseudo, ARM::VST3LNq32Pseudo }; SelectVLDSTLane(N, false, false, 3, DOpcodes, QOpcodes); return; } case Intrinsic::arm_neon_vst4lane: { static const uint16_t DOpcodes[] = { ARM::VST4LNd8Pseudo, ARM::VST4LNd16Pseudo, ARM::VST4LNd32Pseudo }; static const uint16_t QOpcodes[] = { ARM::VST4LNq16Pseudo, ARM::VST4LNq32Pseudo }; SelectVLDSTLane(N, false, false, 4, DOpcodes, QOpcodes); return; } } break; } case ISD::ATOMIC_CMP_SWAP: SelectCMP_SWAP(N); return; } SelectCode(N); } // Inspect a register string of the form // cp<coprocessor>:<opc1>:c<CRn>:c<CRm>:<opc2> (32bit) or // cp<coprocessor>:<opc1>:c<CRm> (64bit) inspect the fields of the string // and obtain the integer operands from them, adding these operands to the // provided vector. static void getIntOperandsFromRegisterString(StringRef RegString, SelectionDAG *CurDAG, const SDLoc &DL, std::vector<SDValue> &Ops) { SmallVector<StringRef, 5> Fields; RegString.split(Fields, ':'); if (Fields.size() > 1) { bool AllIntFields = true; for (StringRef Field : Fields) { // Need to trim out leading 'cp' characters and get the integer field. unsigned IntField; AllIntFields &= !Field.trim("CPcp").getAsInteger(10, IntField); Ops.push_back(CurDAG->getTargetConstant(IntField, DL, MVT::i32)); } assert(AllIntFields && "Unexpected non-integer value in special register string."); } } // Maps a Banked Register string to its mask value. The mask value returned is // for use in the MRSbanked / MSRbanked instruction nodes as the Banked Register // mask operand, which expresses which register is to be used, e.g. r8, and in // which mode it is to be used, e.g. usr. Returns -1 to signify that the string // was invalid. static inline int getBankedRegisterMask(StringRef RegString) { auto TheReg = ARMBankedReg::lookupBankedRegByName(RegString.lower()); if (!TheReg) return -1; return TheReg->Encoding; } // The flags here are common to those allowed for apsr in the A class cores and // those allowed for the special registers in the M class cores. Returns a // value representing which flags were present, -1 if invalid. static inline int getMClassFlagsMask(StringRef Flags) { return StringSwitch<int>(Flags) .Case("", 0x2) // no flags means nzcvq for psr registers, and 0x2 is // correct when flags are not permitted .Case("g", 0x1) .Case("nzcvq", 0x2) .Case("nzcvqg", 0x3) .Default(-1); } // Maps MClass special registers string to its value for use in the // t2MRS_M/t2MSR_M instruction nodes as the SYSm value operand. // Returns -1 to signify that the string was invalid. static int getMClassRegisterMask(StringRef Reg, const ARMSubtarget *Subtarget) { auto TheReg = ARMSysReg::lookupMClassSysRegByName(Reg); const FeatureBitset &FeatureBits = Subtarget->getFeatureBits(); if (!TheReg || !TheReg->hasRequiredFeatures(FeatureBits)) return -1; return (int)(TheReg->Encoding & 0xFFF); // SYSm value } static int getARClassRegisterMask(StringRef Reg, StringRef Flags) { // The mask operand contains the special register (R Bit) in bit 4, whether // the register is spsr (R bit is 1) or one of cpsr/apsr (R bit is 0), and // bits 3-0 contains the fields to be accessed in the special register, set by // the flags provided with the register. int Mask = 0; if (Reg == "apsr") { // The flags permitted for apsr are the same flags that are allowed in // M class registers. We get the flag value and then shift the flags into // the correct place to combine with the mask. Mask = getMClassFlagsMask(Flags); if (Mask == -1) return -1; return Mask << 2; } if (Reg != "cpsr" && Reg != "spsr") { return -1; } // This is the same as if the flags were "fc" if (Flags.empty() || Flags == "all") return Mask | 0x9; // Inspect the supplied flags string and set the bits in the mask for // the relevant and valid flags allowed for cpsr and spsr. for (char Flag : Flags) { int FlagVal; switch (Flag) { case 'c': FlagVal = 0x1; break; case 'x': FlagVal = 0x2; break; case 's': FlagVal = 0x4; break; case 'f': FlagVal = 0x8; break; default: FlagVal = 0; } // This avoids allowing strings where the same flag bit appears twice. if (!FlagVal || (Mask & FlagVal)) return -1; Mask |= FlagVal; } // If the register is spsr then we need to set the R bit. if (Reg == "spsr") Mask |= 0x10; return Mask; } // Lower the read_register intrinsic to ARM specific DAG nodes // using the supplied metadata string to select the instruction node to use // and the registers/masks to construct as operands for the node. bool ARMDAGToDAGISel::tryReadRegister(SDNode *N){ const MDNodeSDNode *MD = dyn_cast<MDNodeSDNode>(N->getOperand(1)); const MDString *RegString = dyn_cast<MDString>(MD->getMD()->getOperand(0)); bool IsThumb2 = Subtarget->isThumb2(); SDLoc DL(N); std::vector<SDValue> Ops; getIntOperandsFromRegisterString(RegString->getString(), CurDAG, DL, Ops); if (!Ops.empty()) { // If the special register string was constructed of fields (as defined // in the ACLE) then need to lower to MRC node (32 bit) or // MRRC node(64 bit), we can make the distinction based on the number of // operands we have. unsigned Opcode; SmallVector<EVT, 3> ResTypes; if (Ops.size() == 5){ Opcode = IsThumb2 ? ARM::t2MRC : ARM::MRC; ResTypes.append({ MVT::i32, MVT::Other }); } else { assert(Ops.size() == 3 && "Invalid number of fields in special register string."); Opcode = IsThumb2 ? ARM::t2MRRC : ARM::MRRC; ResTypes.append({ MVT::i32, MVT::i32, MVT::Other }); } Ops.push_back(getAL(CurDAG, DL)); Ops.push_back(CurDAG->getRegister(0, MVT::i32)); Ops.push_back(N->getOperand(0)); ReplaceNode(N, CurDAG->getMachineNode(Opcode, DL, ResTypes, Ops)); return true; } std::string SpecialReg = RegString->getString().lower(); int BankedReg = getBankedRegisterMask(SpecialReg); if (BankedReg != -1) { Ops = { CurDAG->getTargetConstant(BankedReg, DL, MVT::i32), getAL(CurDAG, DL), CurDAG->getRegister(0, MVT::i32), N->getOperand(0) }; ReplaceNode( N, CurDAG->getMachineNode(IsThumb2 ? ARM::t2MRSbanked : ARM::MRSbanked, DL, MVT::i32, MVT::Other, Ops)); return true; } // The VFP registers are read by creating SelectionDAG nodes with opcodes // corresponding to the register that is being read from. So we switch on the // string to find which opcode we need to use. unsigned Opcode = StringSwitch<unsigned>(SpecialReg) .Case("fpscr", ARM::VMRS) .Case("fpexc", ARM::VMRS_FPEXC) .Case("fpsid", ARM::VMRS_FPSID) .Case("mvfr0", ARM::VMRS_MVFR0) .Case("mvfr1", ARM::VMRS_MVFR1) .Case("mvfr2", ARM::VMRS_MVFR2) .Case("fpinst", ARM::VMRS_FPINST) .Case("fpinst2", ARM::VMRS_FPINST2) .Default(0); // If an opcode was found then we can lower the read to a VFP instruction. if (Opcode) { if (!Subtarget->hasVFP2Base()) return false; if (Opcode == ARM::VMRS_MVFR2 && !Subtarget->hasFPARMv8Base()) return false; Ops = { getAL(CurDAG, DL), CurDAG->getRegister(0, MVT::i32), N->getOperand(0) }; ReplaceNode(N, CurDAG->getMachineNode(Opcode, DL, MVT::i32, MVT::Other, Ops)); return true; } // If the target is M Class then need to validate that the register string // is an acceptable value, so check that a mask can be constructed from the // string. if (Subtarget->isMClass()) { int SYSmValue = getMClassRegisterMask(SpecialReg, Subtarget); if (SYSmValue == -1) return false; SDValue Ops[] = { CurDAG->getTargetConstant(SYSmValue, DL, MVT::i32), getAL(CurDAG, DL), CurDAG->getRegister(0, MVT::i32), N->getOperand(0) }; ReplaceNode( N, CurDAG->getMachineNode(ARM::t2MRS_M, DL, MVT::i32, MVT::Other, Ops)); return true; } // Here we know the target is not M Class so we need to check if it is one // of the remaining possible values which are apsr, cpsr or spsr. if (SpecialReg == "apsr" || SpecialReg == "cpsr") { Ops = { getAL(CurDAG, DL), CurDAG->getRegister(0, MVT::i32), N->getOperand(0) }; ReplaceNode(N, CurDAG->getMachineNode(IsThumb2 ? ARM::t2MRS_AR : ARM::MRS, DL, MVT::i32, MVT::Other, Ops)); return true; } if (SpecialReg == "spsr") { Ops = { getAL(CurDAG, DL), CurDAG->getRegister(0, MVT::i32), N->getOperand(0) }; ReplaceNode( N, CurDAG->getMachineNode(IsThumb2 ? ARM::t2MRSsys_AR : ARM::MRSsys, DL, MVT::i32, MVT::Other, Ops)); return true; } return false; } // Lower the write_register intrinsic to ARM specific DAG nodes // using the supplied metadata string to select the instruction node to use // and the registers/masks to use in the nodes bool ARMDAGToDAGISel::tryWriteRegister(SDNode *N){ const MDNodeSDNode *MD = dyn_cast<MDNodeSDNode>(N->getOperand(1)); const MDString *RegString = dyn_cast<MDString>(MD->getMD()->getOperand(0)); bool IsThumb2 = Subtarget->isThumb2(); SDLoc DL(N); std::vector<SDValue> Ops; getIntOperandsFromRegisterString(RegString->getString(), CurDAG, DL, Ops); if (!Ops.empty()) { // If the special register string was constructed of fields (as defined // in the ACLE) then need to lower to MCR node (32 bit) or // MCRR node(64 bit), we can make the distinction based on the number of // operands we have. unsigned Opcode; if (Ops.size() == 5) { Opcode = IsThumb2 ? ARM::t2MCR : ARM::MCR; Ops.insert(Ops.begin()+2, N->getOperand(2)); } else { assert(Ops.size() == 3 && "Invalid number of fields in special register string."); Opcode = IsThumb2 ? ARM::t2MCRR : ARM::MCRR; SDValue WriteValue[] = { N->getOperand(2), N->getOperand(3) }; Ops.insert(Ops.begin()+2, WriteValue, WriteValue+2); } Ops.push_back(getAL(CurDAG, DL)); Ops.push_back(CurDAG->getRegister(0, MVT::i32)); Ops.push_back(N->getOperand(0)); ReplaceNode(N, CurDAG->getMachineNode(Opcode, DL, MVT::Other, Ops)); return true; } std::string SpecialReg = RegString->getString().lower(); int BankedReg = getBankedRegisterMask(SpecialReg); if (BankedReg != -1) { Ops = { CurDAG->getTargetConstant(BankedReg, DL, MVT::i32), N->getOperand(2), getAL(CurDAG, DL), CurDAG->getRegister(0, MVT::i32), N->getOperand(0) }; ReplaceNode( N, CurDAG->getMachineNode(IsThumb2 ? ARM::t2MSRbanked : ARM::MSRbanked, DL, MVT::Other, Ops)); return true; } // The VFP registers are written to by creating SelectionDAG nodes with // opcodes corresponding to the register that is being written. So we switch // on the string to find which opcode we need to use. unsigned Opcode = StringSwitch<unsigned>(SpecialReg) .Case("fpscr", ARM::VMSR) .Case("fpexc", ARM::VMSR_FPEXC) .Case("fpsid", ARM::VMSR_FPSID) .Case("fpinst", ARM::VMSR_FPINST) .Case("fpinst2", ARM::VMSR_FPINST2) .Default(0); if (Opcode) { if (!Subtarget->hasVFP2Base()) return false; Ops = { N->getOperand(2), getAL(CurDAG, DL), CurDAG->getRegister(0, MVT::i32), N->getOperand(0) }; ReplaceNode(N, CurDAG->getMachineNode(Opcode, DL, MVT::Other, Ops)); return true; } std::pair<StringRef, StringRef> Fields; Fields = StringRef(SpecialReg).rsplit('_'); std::string Reg = Fields.first.str(); StringRef Flags = Fields.second; // If the target was M Class then need to validate the special register value // and retrieve the mask for use in the instruction node. if (Subtarget->isMClass()) { int SYSmValue = getMClassRegisterMask(SpecialReg, Subtarget); if (SYSmValue == -1) return false; SDValue Ops[] = { CurDAG->getTargetConstant(SYSmValue, DL, MVT::i32), N->getOperand(2), getAL(CurDAG, DL), CurDAG->getRegister(0, MVT::i32), N->getOperand(0) }; ReplaceNode(N, CurDAG->getMachineNode(ARM::t2MSR_M, DL, MVT::Other, Ops)); return true; } // We then check to see if a valid mask can be constructed for one of the // register string values permitted for the A and R class cores. These values // are apsr, spsr and cpsr; these are also valid on older cores. int Mask = getARClassRegisterMask(Reg, Flags); if (Mask != -1) { Ops = { CurDAG->getTargetConstant(Mask, DL, MVT::i32), N->getOperand(2), getAL(CurDAG, DL), CurDAG->getRegister(0, MVT::i32), N->getOperand(0) }; ReplaceNode(N, CurDAG->getMachineNode(IsThumb2 ? ARM::t2MSR_AR : ARM::MSR, DL, MVT::Other, Ops)); return true; } return false; } bool ARMDAGToDAGISel::tryInlineAsm(SDNode *N){ std::vector<SDValue> AsmNodeOperands; unsigned Flag, Kind; bool Changed = false; unsigned NumOps = N->getNumOperands(); // Normally, i64 data is bounded to two arbitrary GRPs for "%r" constraint. // However, some instrstions (e.g. ldrexd/strexd in ARM mode) require // (even/even+1) GPRs and use %n and %Hn to refer to the individual regs // respectively. Since there is no constraint to explicitly specify a // reg pair, we use GPRPair reg class for "%r" for 64-bit data. For Thumb, // the 64-bit data may be referred by H, Q, R modifiers, so we still pack // them into a GPRPair. SDLoc dl(N); SDValue Glue = N->getGluedNode() ? N->getOperand(NumOps-1) : SDValue(nullptr,0); SmallVector<bool, 8> OpChanged; // Glue node will be appended late. for(unsigned i = 0, e = N->getGluedNode() ? NumOps - 1 : NumOps; i < e; ++i) { SDValue op = N->getOperand(i); AsmNodeOperands.push_back(op); if (i < InlineAsm::Op_FirstOperand) continue; if (ConstantSDNode *C = dyn_cast<ConstantSDNode>(N->getOperand(i))) { Flag = C->getZExtValue(); Kind = InlineAsm::getKind(Flag); } else continue; // Immediate operands to inline asm in the SelectionDAG are modeled with // two operands. The first is a constant of value InlineAsm::Kind_Imm, and // the second is a constant with the value of the immediate. If we get here // and we have a Kind_Imm, skip the next operand, and continue. if (Kind == InlineAsm::Kind_Imm) { SDValue op = N->getOperand(++i); AsmNodeOperands.push_back(op); continue; } unsigned NumRegs = InlineAsm::getNumOperandRegisters(Flag); if (NumRegs) OpChanged.push_back(false); unsigned DefIdx = 0; bool IsTiedToChangedOp = false; // If it's a use that is tied with a previous def, it has no // reg class constraint. if (Changed && InlineAsm::isUseOperandTiedToDef(Flag, DefIdx)) IsTiedToChangedOp = OpChanged[DefIdx]; // Memory operands to inline asm in the SelectionDAG are modeled with two // operands: a constant of value InlineAsm::Kind_Mem followed by the input // operand. If we get here and we have a Kind_Mem, skip the next operand (so // it doesn't get misinterpreted), and continue. We do this here because // it's important to update the OpChanged array correctly before moving on. if (Kind == InlineAsm::Kind_Mem) { SDValue op = N->getOperand(++i); AsmNodeOperands.push_back(op); continue; } if (Kind != InlineAsm::Kind_RegUse && Kind != InlineAsm::Kind_RegDef && Kind != InlineAsm::Kind_RegDefEarlyClobber) continue; unsigned RC; bool HasRC = InlineAsm::hasRegClassConstraint(Flag, RC); if ((!IsTiedToChangedOp && (!HasRC || RC != ARM::GPRRegClassID)) || NumRegs != 2) continue; assert((i+2 < NumOps) && "Invalid number of operands in inline asm"); SDValue V0 = N->getOperand(i+1); SDValue V1 = N->getOperand(i+2); unsigned Reg0 = cast<RegisterSDNode>(V0)->getReg(); unsigned Reg1 = cast<RegisterSDNode>(V1)->getReg(); SDValue PairedReg; MachineRegisterInfo &MRI = MF->getRegInfo(); if (Kind == InlineAsm::Kind_RegDef || Kind == InlineAsm::Kind_RegDefEarlyClobber) { // Replace the two GPRs with 1 GPRPair and copy values from GPRPair to // the original GPRs. unsigned GPVR = MRI.createVirtualRegister(&ARM::GPRPairRegClass); PairedReg = CurDAG->getRegister(GPVR, MVT::Untyped); SDValue Chain = SDValue(N,0); SDNode *GU = N->getGluedUser(); SDValue RegCopy = CurDAG->getCopyFromReg(Chain, dl, GPVR, MVT::Untyped, Chain.getValue(1)); // Extract values from a GPRPair reg and copy to the original GPR reg. SDValue Sub0 = CurDAG->getTargetExtractSubreg(ARM::gsub_0, dl, MVT::i32, RegCopy); SDValue Sub1 = CurDAG->getTargetExtractSubreg(ARM::gsub_1, dl, MVT::i32, RegCopy); SDValue T0 = CurDAG->getCopyToReg(Sub0, dl, Reg0, Sub0, RegCopy.getValue(1)); SDValue T1 = CurDAG->getCopyToReg(Sub1, dl, Reg1, Sub1, T0.getValue(1)); // Update the original glue user. std::vector<SDValue> Ops(GU->op_begin(), GU->op_end()-1); Ops.push_back(T1.getValue(1)); CurDAG->UpdateNodeOperands(GU, Ops); } else { // For Kind == InlineAsm::Kind_RegUse, we first copy two GPRs into a // GPRPair and then pass the GPRPair to the inline asm. SDValue Chain = AsmNodeOperands[InlineAsm::Op_InputChain]; // As REG_SEQ doesn't take RegisterSDNode, we copy them first. SDValue T0 = CurDAG->getCopyFromReg(Chain, dl, Reg0, MVT::i32, Chain.getValue(1)); SDValue T1 = CurDAG->getCopyFromReg(Chain, dl, Reg1, MVT::i32, T0.getValue(1)); SDValue Pair = SDValue(createGPRPairNode(MVT::Untyped, T0, T1), 0); // Copy REG_SEQ into a GPRPair-typed VR and replace the original two // i32 VRs of inline asm with it. unsigned GPVR = MRI.createVirtualRegister(&ARM::GPRPairRegClass); PairedReg = CurDAG->getRegister(GPVR, MVT::Untyped); Chain = CurDAG->getCopyToReg(T1, dl, GPVR, Pair, T1.getValue(1)); AsmNodeOperands[InlineAsm::Op_InputChain] = Chain; Glue = Chain.getValue(1); } Changed = true; if(PairedReg.getNode()) { OpChanged[OpChanged.size() -1 ] = true; Flag = InlineAsm::getFlagWord(Kind, 1 /* RegNum*/); if (IsTiedToChangedOp) Flag = InlineAsm::getFlagWordForMatchingOp(Flag, DefIdx); else Flag = InlineAsm::getFlagWordForRegClass(Flag, ARM::GPRPairRegClassID); // Replace the current flag. AsmNodeOperands[AsmNodeOperands.size() -1] = CurDAG->getTargetConstant( Flag, dl, MVT::i32); // Add the new register node and skip the original two GPRs. AsmNodeOperands.push_back(PairedReg); // Skip the next two GPRs. i += 2; } } if (Glue.getNode()) AsmNodeOperands.push_back(Glue); if (!Changed) return false; SDValue New = CurDAG->getNode(N->getOpcode(), SDLoc(N), CurDAG->getVTList(MVT::Other, MVT::Glue), AsmNodeOperands); New->setNodeId(-1); ReplaceNode(N, New.getNode()); return true; } bool ARMDAGToDAGISel:: SelectInlineAsmMemoryOperand(const SDValue &Op, unsigned ConstraintID, std::vector<SDValue> &OutOps) { switch(ConstraintID) { default: llvm_unreachable("Unexpected asm memory constraint"); case InlineAsm::Constraint_i: // FIXME: It seems strange that 'i' is needed here since it's supposed to // be an immediate and not a memory constraint. LLVM_FALLTHROUGH; case InlineAsm::Constraint_m: case InlineAsm::Constraint_o: case InlineAsm::Constraint_Q: case InlineAsm::Constraint_Um: case InlineAsm::Constraint_Un: case InlineAsm::Constraint_Uq: case InlineAsm::Constraint_Us: case InlineAsm::Constraint_Ut: case InlineAsm::Constraint_Uv: case InlineAsm::Constraint_Uy: // Require the address to be in a register. That is safe for all ARM // variants and it is hard to do anything much smarter without knowing // how the operand is used. OutOps.push_back(Op); return false; } return true; } /// createARMISelDag - This pass converts a legalized DAG into a /// ARM-specific DAG, ready for instruction scheduling. /// FunctionPass *llvm::createARMISelDag(ARMBaseTargetMachine &TM, CodeGenOpt::Level OptLevel) { return new ARMDAGToDAGISel(TM, OptLevel); }
apple/swift-llvm
lib/Target/ARM/ARMISelDAGToDAG.cpp
C++
apache-2.0
172,333
package com.huawei.esdk.sms.north.http.common; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.log4j.Logger; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import com.huawei.esdk.platform.common.utils.ESDKIOUtils; import com.huawei.esdk.platform.common.utils.help.DocumentBuilderFactories; import com.huawei.esdk.sms.north.http.bean.PlaceHolderBean; public abstract class AbstractXMLProcessor implements IXMLProcessor { private static Logger LOGGER = Logger.getLogger(AbstractXMLProcessor.class); @Override public List<PlaceHolderBean> processClasspathXMLFile(String fileName) throws ParserConfigurationException, SAXException, IOException { String xmlContent = ESDKIOUtils.getClasspathFileContent(fileName); return parseXML(xmlContent); } @Override public List<PlaceHolderBean> processXML(String xmlContent) throws ParserConfigurationException, SAXException, IOException { return parseXML(xmlContent); } protected List<PlaceHolderBean> parseXML(String xmlAsString) throws ParserConfigurationException, SAXException, IOException { DocumentBuilderFactory dbFactory = DocumentBuilderFactories.newSecurityInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(new InputSource(new ByteArrayInputStream(xmlAsString.getBytes("utf-8")))); doc.getDocumentElement().normalize(); Element rootElement = doc.getDocumentElement(); List<PlaceHolderBean> result = new ArrayList<PlaceHolderBean>(); return parseNode(rootElement, result); } protected List<PlaceHolderBean> parseNode(Node nNode, List<PlaceHolderBean> placerHolders) { StringBuilder sb = new StringBuilder(); if (LOGGER.isDebugEnabled()) { sb.append("Current Node :").append(nNode.getNodeName()); sb.append("|Node Type:").append(nNode.getNodeType()); sb.append("|Node Value:").append(nNode.getNodeValue()); sb.append("|Text Value:" + nNode.getTextContent()); LOGGER.debug(sb.toString()); } if (nNode.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element)nNode; if (hasSubElement(nNode)) { NodeList nList = nNode.getChildNodes(); Node nodeItem; for (int temp = 0; temp < nList.getLength(); temp++) { nodeItem = nList.item(temp); parseNode(nodeItem, placerHolders); } } else { if (LOGGER.isDebugEnabled()) { sb.delete(0, sb.length()); sb.append("Tag Name:").append(eElement.getTagName()); sb.append("|Node Name:").append(eElement.getNodeName()); sb.append("|Node Value:").append(eElement.getNodeValue()); sb.append("|Text Content:").append(eElement.getTextContent()); LOGGER.debug(sb.toString()); } //It's the element which hasn't child element and should be processed PlaceHolderBean placeHolder = processElement(eElement); if (null != placeHolder) { placerHolders.add(placeHolder); } } } return placerHolders; } private boolean hasSubElement(Node node) { if (null == node || Node.ELEMENT_NODE != node.getNodeType()) { return false; } NodeList nList = node.getChildNodes(); Node nodeItem; for (int temp = 0; temp < nList.getLength(); temp++) { nodeItem = nList.item(temp); if (Node.ELEMENT_NODE == nodeItem.getNodeType()) { return true; } } return false; } protected abstract PlaceHolderBean processElement(Element element); }
eSDK/esdk_sms
source/esdk_sms_neadp_http/src/main/java/com/huawei/esdk/sms/north/http/common/AbstractXMLProcessor.java
Java
apache-2.0
4,493
<!DOCTYPE html > <html > <head> <title> {gooraye:$f_siteTitle} {gooraye:$f_siteName}</title> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <meta name="keywords" content="{gooraye:$f_metaKeyword}" /> <meta name="description" content="{gooraye:$f_metaDes}" /> <meta http-equiv="MSThemeCompatible" content="Yes" /> <!-- <link rel="stylesheet" type="text/css" href="{gooraye::RES}/css/style_2_common.css" /> --> <link href="{gooraye::STATICS}/font-awesome/css/font-awesome.min.css" rel="stylesheet" type="text/css" /> <script>var SITEURL='';</script> <!-- <script src="{gooraye::RES}/js/common.js" type="text/javascript"></script> --> <script src="{gooraye::STATICS}/jquery-1.9.1.js" type="text/javascript"></script> <script src="{gooraye::RES}/js/index.js" type="text/javascript"></script> <link href="{gooraye::RES}/css/style.css" rel="stylesheet" type="text/css" /> <link href="{gooraye::RES}/css/user.css" rel="stylesheet" type="text/css" /> <script type="text/javascript"> $(function(){ initComponent(); }) </script> <style type="text/css"> .mask{ width: 99999px; height: 99999px; background: rgba(85, 85, 85, 0.55); position: absolute; z-index: 10; top: 0px; } .goorayealert{ display: none; background: #f8f8f8; padding: 15px; top:100px; width:460px; position: absolute; left: 50%; z-index: 15; margin-left: -230px; } .alertcontent{ background-color: #fff; } .close:hover{ color:#000; } .close{ float: right; font-size: 21px; font-weight: bold; line-height: 1; color: #000; top:-6px; text-shadow: 0 1px 0 #fff; opacity: .2; filter: alpha(opacity=20); cursor: pointer; position: relative; } </style> </head> <body id="nv_member" class="pg_CURMODULE"> <div class="topbg"> <!-- top START --> <div class="top"> <!-- toplink START --> <div class="toplink"> <div class="memberinfo" id="destoon_member"> <a href="{gooraye::U('User/Index/index')}"> <img class="logo" src="{gooraye::RES}/images/logo.png"> </a> <!-- <img src="{gooraye:$wecha.headerpic}" width="60" height="60"> <strong>{gooraye:$wecha.wxname}</strong><a href="#" target="_blank" class="vipimg vip-icon<php>echo $userinfo['taxisid']-1;</php>" title=""></a> --> <if condition="$_SESSION[uid]==false"> <else/> 你好,<a href="{gooraye::U('User/Index/index')}" hidefocus="true" ><span style="color:#f40">{gooraye:$Think.session.uname}</span></a>(uid:{gooraye:$Think.session.uid}) <a class="btn btn-small btn-inverse" href="{gooraye::U('System/Admin/logout')}" ><i class="fa fa-power-off " title="退出系统"></i></a> </if> </div> <!-- memberinfo END --> </div> <!-- toplink END --> </div> <!-- top END --> <!-- wp 块 START --> <div id="wp" class="wp"> <!-- contentmanage 块 START --> <div class="contentmanage"> <!-- developer 块 START --> <div class="developer">
gooraye/GoorayeWeixin
tpl/User/default/Index_header.html
HTML
apache-2.0
3,074
<!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/> <link rel="SHORTCUT ICON" href="../../../../../img/clover.ico" /> <link rel="stylesheet" href="../../../../../aui/css/aui.min.css" media="all"/> <link rel="stylesheet" href="../../../../../aui/css/aui-experimental.min.css" media="all"/> <!--[if IE 9]><link rel="stylesheet" href="../../../../../aui/css/aui-ie9.min.css" media="all"/><![endif]--> <style type="text/css" media="all"> @import url('../../../../../style.css'); @import url('../../../../../tree.css'); </style> <script src="../../../../../jquery-1.8.3.min.js" type="text/javascript"></script> <script src="../../../../../aui/js/aui.min.js" type="text/javascript"></script> <script src="../../../../../aui/js/aui-experimental.min.js" type="text/javascript"></script> <script src="../../../../../aui/js/aui-soy.min.js" type="text/javascript"></script> <script src="../../../../../package-nodes-tree.js" type="text/javascript"></script> <script src="../../../../../clover-tree.js" type="text/javascript"></script> <script src="../../../../../clover.js" type="text/javascript"></script> <script src="../../../../../clover-descriptions.js" type="text/javascript"></script> <script src="../../../../../cloud.js" type="text/javascript"></script> <title>ABA Route Transit Number Validator 1.0.1-SNAPSHOT</title> </head> <body> <div id="page"> <header id="header" role="banner"> <nav class="aui-header aui-dropdown2-trigger-group" role="navigation"> <div class="aui-header-inner"> <div class="aui-header-primary"> <h1 id="logo" class="aui-header-logo aui-header-logo-clover"> <a href="http://openclover.org" title="Visit OpenClover home page"><span class="aui-header-logo-device">OpenClover</span></a> </h1> </div> <div class="aui-header-secondary"> <ul class="aui-nav"> <li id="system-help-menu"> <a class="aui-nav-link" title="Open online documentation" target="_blank" href="http://openclover.org/documentation"> <span class="aui-icon aui-icon-small aui-iconfont-help">&#160;Help</span> </a> </li> </ul> </div> </div> </nav> </header> <div class="aui-page-panel"> <div class="aui-page-panel-inner"> <div class="aui-page-panel-nav aui-page-panel-nav-clover"> <div class="aui-page-header-inner" style="margin-bottom: 20px;"> <div class="aui-page-header-image"> <a href="http://cardatechnologies.com" target="_top"> <div class="aui-avatar aui-avatar-large aui-avatar-project"> <div class="aui-avatar-inner"> <img src="../../../../../img/clover_logo_large.png" alt="Clover icon"/> </div> </div> </a> </div> <div class="aui-page-header-main" > <h1> <a href="http://cardatechnologies.com" target="_top"> ABA Route Transit Number Validator 1.0.1-SNAPSHOT </a> </h1> </div> </div> <nav class="aui-navgroup aui-navgroup-vertical"> <div class="aui-navgroup-inner"> <ul class="aui-nav"> <li class=""> <a href="../../../../../dashboard.html">Project overview</a> </li> </ul> <div class="aui-nav-heading packages-nav-heading"> <strong>Packages</strong> </div> <div class="aui-nav project-packages"> <form method="get" action="#" class="aui package-filter-container"> <input type="text" autocomplete="off" class="package-filter text" placeholder="Type to filter packages..." name="package-filter" id="package-filter" title="Start typing package name (or part of the name) to search through the tree. Use arrow keys and the Enter key to navigate."/> </form> <p class="package-filter-no-results-message hidden"> <small>No results found.</small> </p> <div class="packages-tree-wrapper" data-root-relative="../../../../../" data-package-name="com.cardatechnologies.utils.validators.abaroutevalidator"> <div class="packages-tree-container"></div> <div class="clover-packages-lozenges"></div> </div> </div> </div> </nav> </div> <section class="aui-page-panel-content"> <div class="aui-page-panel-content-clover"> <div class="aui-page-header-main"><ol class="aui-nav aui-nav-breadcrumbs"> <li><a href="../../../../../dashboard.html"> Project Clover database Sat Aug 7 2021 12:29:33 MDT</a></li> <li><a href="test-pkg-summary.html">Package com.cardatechnologies.utils.validators.abaroutevalidator</a></li> <li><a href="test-Test_AbaRouteValidator_03.html">Class Test_AbaRouteValidator_03</a></li> </ol></div> <h1 class="aui-h2-clover"> Test testAbaNumberCheck_4359_good </h1> <table class="aui"> <thead> <tr> <th>Test</th> <th><label title="The test result. Either a Pass, Fail or Error.">Status</label></th> <th><label title="When the test execution was started">Start time</label></th> <th><label title="The total time in seconds taken to run this test.">Time (seconds)</label></th> <th><label title="A failure or error message if the test is not successful.">Message</label></th> </tr> </thead> <tbody> <tr> <td> <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_03.html?line=44681#src-44681" >testAbaNumberCheck_4359_good</a> </td> <td> <span class="sortValue">1</span><span class="aui-lozenge aui-lozenge-success">PASS</span> </td> <td> 7 Aug 12:33:07 </td> <td> 0.0 </td> <td> <div></div> <div class="errorMessage"></div> </td> </tr> </tbody> </table> <div>&#160;</div> <table class="aui aui-table-sortable"> <thead> <tr> <th style="white-space:nowrap;"><label title="A class that was directly hit by this test.">Target Class</label></th> <th colspan="4"><label title="The percentage of coverage contributed by each single test.">Coverage contributed by</label> testAbaNumberCheck_4359_good</th> </tr> </thead> <tbody> <tr> <td> <span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</span> &#160;&#160;<a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/AbaRouteValidator.html?id=40441#AbaRouteValidator" title="AbaRouteValidator" name="sl-47">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</a> </td> <td> <span class="sortValue">0.7352941</span>73.5% </td> <td class="align-middle" style="width: 100%" colspan="3"> <div> <div title="73.5% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:73.5%"></div></div></div> </td> </tr> </tbody> </table> </div> <!-- class="aui-page-panel-content-clover" --> <footer id="footer" role="contentinfo"> <section class="footer-body"> <ul> <li> Report generated by <a target="_new" href="http://openclover.org">OpenClover</a> v 4.4.1 on Sat Aug 7 2021 12:49:26 MDT using coverage data from Sat Aug 7 2021 12:47:23 MDT. </li> </ul> <ul> <li>OpenClover is free and open-source software. </li> </ul> </section> </footer> </section> <!-- class="aui-page-panel-content" --> </div> <!-- class="aui-page-panel-inner" --> </div> <!-- class="aui-page-panel" --> </div> <!-- id="page" --> </body> </html>
dcarda/aba.route.validator
target13/site/clover/com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_03_testAbaNumberCheck_4359_good_v7d.html
HTML
apache-2.0
9,178
/* * Copyright [1999-2017] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef __INTRONSUPPORTINGEVIDENCE_H__ #define __INTRONSUPPORTINGEVIDENCE_H__ #include "DataModelTypes.h" #include "SeqFeature.h" #define INTRONSUPPORTINGEVIDENCEFUNCS_DATA(CLASSTYPE) \ SEQFEATUREFUNCS_DATA(CLASSTYPE) SEQFEATUREFUNC_TYPES(IntronSupportingEvidence) typedef struct IntronSupportingEvidenceFuncsStruct { INTRONSUPPORTINGEVIDENCEFUNCS_DATA(IntronSupportingEvidence) } IntronSupportingEvidenceFuncs; #define INTRONSUPPORTINGEVIDENCE_DATA \ SEQFEATURE_DATA \ char isSpliceCanonical; \ char * hitName; \ ECOSTRING scoreType; #define FUNCSTRUCTTYPE IntronSupportingEvidenceFuncs struct IntronSupportingEvidenceStruct { INTRONSUPPORTINGEVIDENCE_DATA }; #undef FUNCSTRUCTTYPE IntronSupportingEvidence *IntronSupportingEvidence_new(void); Intron *IntronSupportingEvidence_getIntron(IntronSupportingEvidence *ise, Transcript *transcript); void IntronSupportingEvidence_setValuesFromIntron(IntronSupportingEvidence *ise, Intron *intron); int IntronSupportingEvidence_hasLinkedTranscripts(IntronSupportingEvidence *ise); Exon *IntronSupportingEvidence_findPreviousExon(IntronSupportingEvidence *ise, Transcript *transcript); Exon *IntronSupportingEvidence_findNextExon(IntronSupportingEvidence *ise, Transcript *transcript); #define IntronSupportingEvidence_isStored(ise, db) Storable_isStored(&((ise)->st), (db)) ECOSTRING IntronSupportingEvidence_setScoreType(IntronSupportingEvidence *ise, char *scoreType); #define IntronSupportingEvidence_getScoreType(ise) (ise)->scoreType char *IntronSupportingEvidence_setHitName(IntronSupportingEvidence *ise, char *str); #define IntronSupportingEvidence_getHitName(ise) (ise)->hitName #define IntronSupportingEvidence_setIsSpliceCanonical(ise,flag) (ise)->isSpliceCanonical = (flag) #define IntronSupportingEvidence_getIsSpliceCanonical(ise) (ise)->isSpliceCanonical #define IntronSupportingEvidence_setStart(ise,start) SeqFeature_setStart((ise),(start)) #define IntronSupportingEvidence_getStart(ise) SeqFeature_getStart((ise)) #define IntronSupportingEvidence_setEnd(ise,end) SeqFeature_setEnd((ise),(end)) #define IntronSupportingEvidence_getEnd(ise) SeqFeature_getEnd((ise)) #define IntronSupportingEvidence_setStrand(ise,strand) SeqFeature_setStrand((ise),(strand)) #define IntronSupportingEvidence_getStrand(ise) SeqFeature_getStrand((ise)) #define IntronSupportingEvidence_setDbID(ise,dbID) SeqFeature_setDbID((ise),(dbID)) #define IntronSupportingEvidence_getDbID(ise) SeqFeature_getDbID((ise)) #define IntronSupportingEvidence_setAnalysis(ise,anal) SeqFeature_setAnalysis((ise),(anal)) #define IntronSupportingEvidence_getAnalysis(ise) SeqFeature_getAnalysis((ise)) #define IntronSupportingEvidence_setAdaptor(ise,adaptor) SeqFeature_setAdaptor((ise),(adaptor)) #define IntronSupportingEvidence_getAdaptor(ise) SeqFeature_getAdaptor((ise)) #define IntronSupportingEvidence_setSlice(ise,contig) SeqFeature_setSlice((ise),(contig)) #define IntronSupportingEvidence_getSlice(ise) SeqFeature_getSlice((ise)) #define IntronSupportingEvidence_setScore(ise,score) SeqFeature_setScore((ise),(score)) #define IntronSupportingEvidence_getScore(ise) SeqFeature_getScore((ise)) #define IntronSupportingEvidence_free(ise) SeqFeature_free((ise)) void IntronSupportingEvidence_freeImpl(IntronSupportingEvidence *ise); #define IntronSupportingEvidence_getSeqRegionStart(ise) SeqFeature_getSeqRegionStart((SeqFeature *)(ise)) #define IntronSupportingEvidence_getSeqRegionEnd(ise) SeqFeature_getSeqRegionEnd((SeqFeature *)(ise)) #define IntronSupportingEvidence_getSeqRegionStrand(ise) SeqFeature_getSeqRegionStrand((SeqFeature *)(ise)) IntronSupportingEvidence *IntronSupportingEvidence_shallowCopyImpl(IntronSupportingEvidence *ise); #define IntronSupportingEvidence_shallowCopy(ise) SeqFeature_shallowCopy((ise)) #ifdef __INTRONSUPPORTINGEVIDENCE_MAIN__ IntronSupportingEvidenceFuncs intronSupportingEvidenceFuncs = { IntronSupportingEvidence_freeImpl, // free IntronSupportingEvidence_shallowCopyImpl, // shallowCopy NULL, // deepCopy NULL, // getStart NULL, // setStart NULL, // getEnd NULL // setEnd }; #else extern IntronSupportingEvidenceFuncs intronSupportingEvidenceFuncs; #endif #endif
Ensembl/ensc-core
src/DataModel/IntronSupportingEvidence.h
C
apache-2.0
5,064
# Copyright © 2014, Microsoft Corporation. All rights reserved. @{ # Version number of this module. ModuleVersion = '0.3.0.0' # ID used to uniquely identify this module GUID = '1088cfb5-36e8-4e9f-b7e4-d49e8032dde6' # Author of this module Author = 'Microsoft Corporation' # Company or vendor of this module CompanyName = 'Microsoft Corporation' # Copyright statement for this module Copyright = '(c) 2014 Microsoft Corporation. All rights reserved.' # Description of the functionality provided by this module Description = 'Module with DSC Resources for Just Enough Admin (JEA). Jea makes it simple to create custom RBAC solutions using PowerShell.' # Minimum version of the Windows PowerShell engine required by this module PowerShellVersion = '4.0' NestedModules = ".\xjea.psm1" # Minimum version of the common language runtime (CLR) required by this module CLRVersion = '4.0' # Functions to export from this module FunctionsToExport = '*' # Cmdlets to export from this module CmdletsToExport = '*' }
t3377/MyPuppet
modules/dsc/lib/puppet_x/dsc_resources/xJea/xJea.psd1
PowerShell
apache-2.0
1,015
package com.example; /** * Created by Nish on 2/21/15. */ public interface Movable { public void moveLeft(); public void moveRight(); }
nishtahir/Mektory-BeginnersAndroid
Week2/mygame/src/main/java/com/example/Movable.java
Java
apache-2.0
147
# AUTOGENERATED FILE FROM balenalib/orange-pi-zero-alpine:3.13-run ENV GO_VERSION 1.16.14 # set up nsswitch.conf for Go's "netgo" implementation # - https://github.com/golang/go/blob/go1.9.1/src/net/conf.go#L194-L275 # - docker run --rm debian:stretch grep '^hosts:' /etc/nsswitch.conf RUN [ ! -e /etc/nsswitch.conf ] && echo 'hosts: files dns' > /etc/nsswitch.conf # gcc for cgo RUN apk add --no-cache git gcc ca-certificates RUN fetchDeps='curl' \ && set -x \ && apk add --no-cache $fetchDeps \ && mkdir -p /usr/local/go \ && curl -SLO "http://resin-packages.s3.amazonaws.com/golang/v$GO_VERSION/go$GO_VERSION.linux-alpine-armv7hf.tar.gz" \ && echo "39f009c69b763f83d7e885f305d3f505710fc9bee56ffc29cc6472d05bbbcbe0 go$GO_VERSION.linux-alpine-armv7hf.tar.gz" | sha256sum -c - \ && tar -xzf "go$GO_VERSION.linux-alpine-armv7hf.tar.gz" -C /usr/local/go --strip-components=1 \ && rm -f go$GO_VERSION.linux-alpine-armv7hf.tar.gz ENV GOROOT /usr/local/go ENV GOPATH /go ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH" WORKDIR $GOPATH CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/613d8e9ca8540f29a43fddf658db56a8d826fffe/scripts/assets/tests/[email protected]" \ && echo "Running test-stack@golang" \ && chmod +x [email protected] \ && bash [email protected] \ && rm -rf [email protected] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Alpine Linux 3.13 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nGo v1.16.14 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && ln -f /bin/sh /bin/sh.real \ && ln -f /bin/sh-shim /bin/sh
resin-io-library/base-images
balena-base-images/golang/orange-pi-zero/alpine/3.13/1.16.14/run/Dockerfile
Dockerfile
apache-2.0
2,473
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Python front-end supports for functions. NOTE: functions are currently experimental and subject to change! """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import hashlib from tensorflow.core.framework import attr_value_pb2 from tensorflow.core.framework import function_pb2 from tensorflow.python import pywrap_tensorflow as c_api from tensorflow.python.eager import context from tensorflow.python.framework import c_api_util from tensorflow.python.framework import dtypes from tensorflow.python.framework import graph_to_function_def from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import variable_scope as vs from tensorflow.python.util import compat from tensorflow.python.util import function_utils from tensorflow.python.util import tf_contextlib from tensorflow.python.util import tf_inspect class Defun(object): """Decorator used to define TensorFlow functions. Use this decorator to make a Python function usable directly as a TensorFlow function. The decorated function must add ops to the default graph and return zero or more `Tensor` objects. Call the decorator with named arguments, one for each argument of the function to decorate, with the expected type of the argument as value. For example if the function to decorate accepts two `tf.float32` arguments named `x` and `y`, call the decorator with: @Defun(tf.float32, tf.float32) def foo(x, y): ... When you call the decorated function it will add `call` ops to the default graph and adds the definition of the function into the default graph. Because the addition of the function into the graph is deferred, the decorator can be used anywhere in the program. Any variables created inside of the function are hoisted into the outer graph. Note that the variables are created in the variable scope that was active during the first call to the function. Subsequent function calls will refer to the same set of variables. Definitions of functions in a graph are frozen as soon as the graph is used to create a session. However, new functions and new calls to existing functions may be added to the graph, with the new functions themselves becoming immediately frozen. Example, but also see the [How To on functions](link_needed). ```python # Defining the function. @tf.Defun(tf.float32, tf.float32) def MyFunc(x, y): return x + y, x - y # Building the graph. a = tf.constant([1.0]) b = tf.constant([2.0]) c, d = MyFunc(a, b, name='mycall') ``` """ def __init__(self, *input_types, **kwargs): """Create a `Defun` decorator. Args: *input_types: A list of `tf.DType` **kwargs: Optional keyword arguments, including func_name - (optional). A python string, the name to use to declare this `Function` in the graph. grad_func - (optional). A function implementing the gradient of the function-to-register. This is must be a `_DefinedFunction` object. The gradient function must satisfy the criterion defined in function.proto:GradientDef. python_grad_func - (optional). A function implementing the gradient of the function python-side. This function must take the current op and the gradients w.r.t. its outputs, and return the gradients w.r.t. the inputs. That is it must implement the interface expected by `tf.RegisterGradient`). This will be called by tf.gradients to add the gradient ops to the graph. At most one of grad_func and python_grad_func can be specified. out_names = (optional). A list of strings, one per output tensor. shape_func - (optional). A function taking the op and returning a list of static shapes to set for the function's outputs. """ self._input_types = input_types self._func_name = kwargs.pop("func_name", None) self._grad_func = kwargs.pop("grad_func", None) self._python_grad_func = kwargs.pop("python_grad_func", None) self._out_names = kwargs.pop("out_names", None) self._extra_kwargs = kwargs def __call__(self, func): # Various sanity checks on the callable func. if not callable(func): raise ValueError("func %s must be callable" % func) # Func should not use kwargs and defaults. argspec = tf_inspect.getargspec(func) if argspec.keywords or argspec.defaults: raise ValueError("Functions with argument defaults or keyword " "arguments are not supported.") # Computes how many arguments 'func' has. min_args = len(argspec.args) max_args = min_args if argspec.varargs: max_args = 1000000 argnames = argspec.args if tf_inspect.ismethod(func): # 1st argument is the "class" type. min_args -= 1 argnames = argnames[1:] if self._input_types: # If Defun is given a list of types for the inputs, the number # of input types should be compatible with 'func'. num = len(self._input_types) if num < min_args or num > max_args: raise ValueError( "The function has fewer arguments than the number of specified " "input types.") return _DefinedFunction( func, argnames, self._input_types, self._func_name, self._grad_func, self._python_grad_func, out_names=self._out_names, **self._extra_kwargs) # 'func' expects no arguments and input types is an empty list. if min_args == 0 and max_args == 0: return _DefinedFunction( func, [], [], self._func_name, self._grad_func, self._python_grad_func, out_names=self._out_names, **self._extra_kwargs) # Input types are unknown. It's an overloaded function and hence # its definition needs to be deferred until it's called. return _OverloadedFunction( func, argnames, self._func_name, self._grad_func, self._python_grad_func, out_names=self._out_names, **self._extra_kwargs) class _DefinedFunction(object): """_DefinedFunction encapsulates a function definition and its properties. Attributes: name: The function name. definition: The definition of this function. A FunctionDef proto. grad_func_name: If not None, the name of this function's gradient function. python_grad_func: A python callable implementing the gradient of the function python-side. """ def __init__(self, func, argnames, input_types, func_name=None, grad_func=None, python_grad_func=None, out_names=None, shape_func=None, capture_by_value=False, **kwargs): """Creates _DefinedFunction. Args: func: A python callable which constructs a tf function body. argnames: A list of strings for function argument names. input_types: The function's argument types. Can be a tuple, list of tf data types. func_name: The function name. Defaults to None, in which derives from 'func'. grad_func: This function's gradient function, if not None. Defaults to None. python_grad_func: A python callable implementing the gradient of the function python-side. out_names: An optional list of strings for the function return value names. shape_func: An optional function mapping an op to a list of static output shapes. capture_by_value: Boolean (defaults to False). If True, captured values will be copied into the function body. **kwargs: The keyword arguments. **kwargs is passed to every call site of this function. Raises: ValueError: The function definition is invalid. """ self._func = func self._input_types = input_types self._func_name = func_name self._grad_func = grad_func self._python_grad_func = python_grad_func self._out_names = out_names self._shape_func = shape_func self._capture_by_value = capture_by_value self._extra_kwargs = kwargs # Constructed only when C API is disabled, lazily self._definition = None # Constructed only when C API is enabled, lazily self._c_func = None self._sub_functions = dict() # Constructed with _definition or _c_func # pylint: disable=protected-access device_funcs = ops.get_default_graph()._device_functions_outer_to_inner # pylint: enable=protected-access # Get the innermost device if possbile. self._caller_device = device_funcs[-1] if device_funcs else None # Cached OpDef for this function. When C API is enabled, this is # the only part of FunctionDef that we cache in Python. When C API # is disabled the whole _definition is available and this is simply # another reference to _definition.signature self._op_def = None assert isinstance(input_types, (list, tuple)) self._arg_types = input_types self._arg_names = [argnames[i] if i < len(argnames) else ("arg%d" % i) for i in range(len(input_types))] @property def name(self): """Function name.""" self._create_definition_if_needed() return self._func_name @property def definition(self): """Function definition proto.""" self._create_definition_if_needed() if self._c_func: with c_api_util.tf_buffer() as buf: c_api.TF_FunctionToFunctionDef(self._c_func.func, buf) fdef = function_pb2.FunctionDef() proto_data = c_api.TF_GetBuffer(buf) fdef.ParseFromString(compat.as_bytes(proto_data)) return fdef return self._definition @property def _signature(self): self._create_definition_if_needed() return self._op_def def set_grad_func(self, grad_func): """Specifies the gradient function of this function.""" assert not self._grad_func assert isinstance(grad_func, _DefinedFunction) self._grad_func = grad_func @property def grad_func_name(self): """Its gradient function's name.""" return self._grad_func.name if self._grad_func else None @property def python_grad_func(self): """Python gradient function callable.""" return self._python_grad_func @property def declared_input_types(self): """Returns the list of data types of explicit declared inputs.""" return self._input_types @property def captured_inputs(self): """Returns the list of implicitly captured inputs.""" self._create_definition_if_needed() return self._extra_inputs @property def stateful_ops(self): """Returns the list of stateful ops in function definition. Returns: A list of (op.name, op.type) pairs. """ self._create_definition_if_needed() return self._stateful_ops def _create_definition_if_needed(self): """Creates the function definition if it's not created yet.""" with context.graph_mode(): self._create_definition_if_needed_impl() def _create_definition_if_needed_impl(self): """This is not what you want, see _create_definition_if_needed.""" if self._definition is not None or self._c_func is not None: return temp_graph = func_graph_from_py_func( self._func, self._arg_names, self._arg_types, self._func_name, self._capture_by_value, self._caller_device) self._extra_inputs = temp_graph.extra_inputs # pylint: disable=protected-access self._sub_functions = temp_graph._functions # pylint: enable=protected-access # Extra kwargs are treated as attrs on the function def. if self._func_name: base_func_name = self._func_name else: base_func_name = function_utils.get_func_name(self._func) if self._grad_func: base_func_name += ("_%s" % self._grad_func.name) kwargs_attr = _parse_kwargs_as_attrs(base_func_name, **self._extra_kwargs) if not temp_graph._c_graph: # pylint: disable=protected-access # Build the FunctionDef self._definition = graph_to_function_def.graph_to_function_def( temp_graph, temp_graph.get_operations(), temp_graph.inputs, temp_graph.outputs, out_names=self._out_names) for k in kwargs_attr: self._definition.attr[k].CopyFrom(kwargs_attr[k]) # Hash the definition and its dependencies. self._hash_str = self._create_hash_str( self._definition.signature.input_arg, self._definition.signature.output_arg, self._definition.node_def) # Finally, we decide the function name to use. If not specified, # make up something which is almost certainly unique (but deterministic). if not self._func_name: self._func_name = "_".join([base_func_name, self._hash_str]) self._definition.signature.name = self._func_name if self._func.__doc__: self._definition.signature.description = self._func.__doc__ self._op_def = self._definition.signature else: # C API is enabled output_names = ([compat.as_bytes(x) for x in self._out_names] if self._out_names else []) description = self._func.__doc__ or None # pylint: disable=protected-access c_func = c_api.TF_GraphToFunction_wrapper( temp_graph._c_graph, base_func_name, self._func_name is None, # append_hash_to_fn_name None, # opers [t._as_tf_output() for t in temp_graph.inputs], [t._as_tf_output() for t in temp_graph.outputs], output_names, None, # opts description) self._c_func = c_api_util.ScopedTFFunction(c_func) # pylint: enable=protected-access self._set_c_attrs(kwargs_attr) # Set cached fields: _op_def and _func_name (if not already set) self._op_def = self.definition.signature if self._func_name: assert self._func_name == self._op_def.name else: self._func_name = compat.as_str(self._op_def.name) self._stateful_ops = [(op.name, op.type) for op in temp_graph.get_operations() if op.op_def.is_stateful] def _set_c_attrs(self, attrs): """Sets `attrs` as attributes of self._c_func. Requires that self._c_func is not None. Args: attrs: a dictionary from attribute name to attribute proto value """ for name, attr_value in attrs.items(): serialized = attr_value.SerializeToString() # TODO(skyewm): this creates and deletes a new TF_Status for every attr. # It might be worth creating a convenient way to re-use the same status. c_api.TF_FunctionSetAttrValueProto(self._c_func.func, compat.as_str(name), serialized) def _create_hash_str(self, input_arg, output_arg, node_def): """Creates an 8-character string unique to this input. Args: input_arg: the input_arg field of an OpDef (e.g. self._definition.signature.input_arg) output_arg: the output_arg field of an OpDef (e.g. self._definition.signature.output_arg) node_def: the node_def field of a FunctionDef (e.g. self._definition.node_def) Returns: The unique string for this input """ hasher = hashlib.sha1() def update_num(n): hasher.update(compat.as_bytes("%x" % n)) def update_str(s): update_num(len(s)) hasher.update(compat.as_bytes(s)) def update_strs(slist): update_num(len(slist)) for s in slist: update_str(s) for adef in input_arg: update_str(adef.SerializeToString()) for adef in output_arg: update_str(adef.SerializeToString()) for n in sorted(node_def, key=lambda n: n.name): update_str(n.name) update_str(n.op) update_strs(n.input) update_num(len(n.attr)) # NOTE: protobuf map serialization does not guarantee ordering. for k in sorted(n.attr): update_str(k) update_str(n.attr[k].SerializeToString()) return hasher.hexdigest()[:8] def add_to_graph(self, g): """Adds this function into the graph g.""" self._create_definition_if_needed() # Adds this function into 'g'. # pylint: disable=protected-access if context.executing_eagerly(): context.context().add_function_def(self.definition) else: g._add_function(self) # pylint: enable=protected-access # Ensures related sub-routines are defined in 'g', too. for f in self._sub_functions.values(): f.add_to_graph(g) # Adds its gradient function, too. if self._grad_func: self._grad_func.add_to_graph(g) def __call__(self, *args, **kwargs): self.add_to_graph(ops.get_default_graph()) args = [ops.convert_to_tensor(_) for _ in args] + self._extra_inputs ret, op = _call(self._signature, *args, **kwargs) # Set a hidden attr in 'op' so that gradients_impl can refer back # to this _DefinedFunction instance to access python_grad_func. assert isinstance(op, ops.Operation) setattr(op, "__defun", self) if self._shape_func is not None: shapes = self._shape_func(op) if len(shapes) != len(op.outputs): raise ValueError("shape_func produced %d shapes for %d outputs" % (len(shapes), len(op.outputs))) for (t, shape) in zip(op.outputs, shapes): t.set_shape(shape) return ret class _OverloadedFunction(object): """_OverloadedFunction encapsulates an overloaded function. _OverloadedFunction maintains a mapping from input types to instantiated _DefinedFunction in self._overload. """ def __init__(self, func, argnames, func_name=None, grad_func=None, python_grad_func=None, out_names=None, **kwargs): """Creates _DefinedFunction. Args: func: A python callable which constructs a tf function body. argnames: A list of strings for function argument names. func_name: The function name. Defaults to None, in which derives from 'func'. grad_func: This function's gradient function, if not None. Defaults to None. python_grad_func: A python callable implementing the gradient of the function python-side. out_names: A list of strings for the function return value names. **kwargs: The keyword arguments. **kwargs is passed to every call site of this function. Raises: ValueError: The function definition is invalid. """ self._func = func self._argnames = argnames self._func_name = func_name assert grad_func is None or isinstance(grad_func, _OverloadedFunction) self._grad_func = grad_func self._python_grad_func = python_grad_func self._out_names = out_names self._extra_kwargs = kwargs self._overload = {} def instantiate(self, input_types): """Instantiate this function given input argument types. Args: input_types: A list of data types for the inputs. Returns: _DefinedFunction for the given input types. """ # Stringify the type list. key = _type_list_to_str(input_types) defined = self._overload.get(key) if not defined: # If not defined yet, define the function given the input types. name = self._func_name if name is not None: name = "_".join([name, key]) defined = _DefinedFunction( self._func, self._argnames, input_types, name, None, self._python_grad_func, out_names=self._out_names, **self._extra_kwargs) _ = defined.name # Fully instantiate the function definition. if self._grad_func: # If _grad_func is given, it is another # _OverloadedFunction. We need to instantiate it with the # right input types. output_types = [ dtypes.DType(_.type) for _ in defined._signature.output_arg # pylint: disable=protected-access ] # pylint: disable=protected-access defined._grad_func = self._grad_func.instantiate(input_types + output_types) # pylint: enable=protected-access self._overload[key] = defined return defined def __call__(self, *args, **kwargs): input_types = [] args = list(args) for (i, x) in enumerate(args): x = ops.convert_to_tensor(x) if not isinstance(x, ops.Tensor): raise ValueError("Expect a Tensor but get ", x) input_types.append(x.dtype) args[i] = x return self.instantiate(input_types)(*args, **kwargs) class _FuncGraph(ops.Graph): """A helper for constructing a function. _FuncGraph overrides ops.Graph's create_op() so that we can keep track of all inputs into every op created inside the function. If any input is from other graphs, we keep track of it in self.capture and substitute the input with a place holder. Each captured input's corresponding place holder is converted into a function argument and the caller passes in the captured tensor. """ def __init__(self, name, capture_by_value, *args, **kwargs): super(_FuncGraph, self).__init__(*args, **kwargs) self._capture_by_value = capture_by_value self._building_function = True self._outer_graph = ops.get_default_graph() self._vscope = vs.get_variable_scope() self._old_custom_getter = self._vscope.custom_getter # The name of the function. self.name = name # Placeholder tensors representing the inputs to this function. The tensors # are in this _FuncGraph. self.inputs = [] # Tensors that will be returned this function. The tensors are in this # _FuncGraph. self.outputs = [] # Maps external tensor -> internal tensor (e.g. input placeholder). self._captured = {} # The external tensors that have been captured as inputs and must be passed # to this function (empty if capturing by value, otherwise these are the # keys of _captured). self.extra_inputs = [] # Input placeholders that been added for captured values (empty if capturing # by value). self.extra_args = [] # Captured variables. # TODO(skyewm): is this needed? self.extra_vars = [] # pylint: disable=g-doc-return-or-yield @tf_contextlib.contextmanager def container(self, container_name): """Returns a context manager that specifies the resource container to use. Overridden from `tf.Graph` to update both the init_scope container and the present inner container. This is necessary to make sure setting containers applies correctly both to created variables and to stateful ops. Args: container_name: container name string. Returns: A context manager for defining resource containers for stateful ops, yields the container name. """ original_container = self._container # pylint: disable=protected-access with ops.init_scope(): original_init_container = ops.get_default_graph()._container try: self._container = container_name with ops.init_scope(): ops.get_default_graph()._container = container_name yield self._container finally: self._container = original_container with ops.init_scope(): ops.get_default_graph()._container = original_init_container # pylint: enable=protected-access # pylint: enable=g-doc-return-or-yield def getvar( self, getter, name, shape=None, dtype=None, initializer=None, reuse=None, trainable=True, collections=None, # pylint: disable=redefined-outer-name use_resource=None, **kwargs): """A custom variable getter.""" # Here, we switch the default graph to the outer graph and ask the # variable scope in which the function is defined to give us the # variable. The variable is stashed in extra_vars and returned to # the caller. # # We capture these variables so that the variable definition is # hoisted upward to the outer most graph. with self._outer_graph.as_default(): # pylint: disable=protected-access var = self._vscope.get_variable( vs._get_default_variable_store(), name, shape=shape, dtype=dtype, initializer=initializer, reuse=reuse, trainable=trainable, collections=collections, use_resource=use_resource) self.extra_vars.append(var) if isinstance(var, resource_variable_ops.ResourceVariable): # For resource-based variables read the variable outside the function # and pass in the value. This ensures that the function is pure and # differentiable. TODO(apassos) this may have performance problems if # the function will only do embedding lookups on the variable. return var.value() return var def create_op(self, op_type, inputs, data_types, **kwargs): for i, x in enumerate(inputs): if isinstance(x, ops.EagerTensor) or x.graph is not self: inputs[i] = self.capture(x) return super(_FuncGraph, self).create_op(op_type, inputs, data_types, **kwargs) def capture(self, tensor, name=None): """Adds the given tensor to this graph and returns the captured tensor.""" if tensor in self._captured: # Captured already. return self._captured[tensor] elif self._capture_by_value: return self._add_tensor_and_parents(tensor) else: return self._capture_tensor_as_extra_input(tensor, name) def _capture_tensor_as_extra_input(self, tensor, name=None): # Substitute with a placeholder. self.extra_inputs.append(tensor) # Hoist the new input placeholder out of any control flow context # we're currently in. with ops.control_dependencies(None): ph = array_ops.placeholder( tensor.dtype, shape=tensor.get_shape(), name=name) # pylint: disable=protected-access if ops._USE_C_SHAPES: if isinstance(tensor, ops.EagerTensor): handle_data = tensor._handle_data if handle_data: handle_data = handle_data.SerializeToString() else: handle_data = c_api.GetHandleShapeAndType(tensor.graph._c_graph, tensor._as_tf_output()) if handle_data: c_api.SetHandleShapeAndType(ph.graph._c_graph, ph._as_tf_output(), compat.as_bytes(handle_data)) else: ph._handle_data = tensor._handle_data # pylint: enable=protected-access self.inputs.append(ph) self._captured[tensor] = ph self.extra_args.append(ph) if _is_guaranteed_const(tensor): with ops.control_dependencies(None): return array_ops.guarantee_const(ph) else: return ph def _add_tensor_and_parents(self, tensor): op = self._add_op_and_parents(tensor.op) return op.outputs[tensor.value_index] def _add_op_and_parents(self, op): # pylint: disable=protected-access op_def = graph_to_function_def._get_op_def(op) # pylint: enable=protected-access if op_def.is_stateful: raise ValueError("Cannot capture a stateful node (name:%s, type:%s) " "by value." % (op.name, op.type)) elif op.type in ("Placeholder", "PlaceholderV2"): raise ValueError("Cannot capture a placeholder (name:%s, type:%s) " "by value." % (op.name, op.type)) captured_inputs = [self._add_tensor_and_parents(x) for x in op.inputs] captured_op = self.create_op( op.type, captured_inputs, [o.dtype for o in op.outputs], name=op.name, attrs=op.node_def.attr, op_def=op_def) for t, captured_t in zip(op.outputs, captured_op.outputs): self._captured[t] = captured_t return captured_op def func_graph_from_py_func(func, arg_names, arg_types, name=None, capture_by_value=False, device=None, colocation_stack=None, container=None, collections_ref=None, arg_shapes=None): """Returns a _FuncGraph generated from `func`. Args: func: A Python callable which constructs a TF function body. The arguments must correspond to `arg_types`. Returns a value or list/tuple of values. No returned value can be None. arg_names: A sequence of strings for the function argument names. arg_types: A sequence of the function's argument types. name: The function name. If None, the name is derived from `func`. capture_by_value: boolean. If True, captured values will be copied into the function body. device: device name or function. colocation_stack: A colocation stack (list) the _FuncGraph should use. container: A container name the _FuncGraph should start with. collections_ref: A reference to a collections dict the _FuncGraph should use internally. arg_shapes: A sequence of the function's argument shapes. Returns: A _FuncGraph. Raises: ValueError: if func returns None. """ if not name: name = function_utils.get_func_name(func) func_graph = _FuncGraph(name, capture_by_value) with func_graph.as_default(), ops.device(device): # pylint: disable=protected-access if collections_ref is not None: func_graph._collections = collections_ref if container is not None: func_graph._container = container if colocation_stack is not None: func_graph._colocation_stack = colocation_stack # pylint: enable=protected-access if arg_shapes is None: arg_shapes = [None] * len(arg_types) # Create placeholders for the function arguments. for (argname, argtype, argshape) in zip(arg_names, arg_types, arg_shapes): argholder = array_ops.placeholder(argtype, shape=argshape, name=argname) func_graph.inputs.append(argholder) # Call func and gather the output tensors. with vs.variable_scope("", custom_getter=func_graph.getvar): outputs = func(*func_graph.inputs) # There is no way of distinguishing between a function not returning # anything and a function returning None in Python. # We need to allow the former and ideally want to forbid the latter as # it is most likely user error. # TODO(iga): Consider adding a @NoOutput decorator on top of @Defun to # allow users to explicitly mark the function as not returning anything. # For now, we allow a single None return and interpret it as a function # with no output. if outputs is None: outputs = [] else: # If func only returned one value, make it a tuple. if not isinstance(outputs, (list, tuple)): outputs = (outputs,) if any([_ is None for _ in outputs]): raise ValueError("Function can not return None.") # Ensures each output is a Tensor in the function graph. outputs = [ops.convert_to_tensor(t) for t in outputs] outputs = [func_graph.capture(t) if t.graph is not func_graph else t for t in outputs] func_graph.outputs = outputs return func_graph def _is_guaranteed_const(tensor): """Determines whether `tensor` is guaranteed to be a constant. A tensor is guaranteed to be a constant if either it was produced by a `GuaranteeConst` op or if all of its children are guaranteed to be constants. Args: tensor: The tensor for which to determine const-ness. Returns: True if `tensor` is guaranteed to be a constant, False otherwise. """ if isinstance(tensor, ops.EagerTensor): return False class Work(object): def __init__(self, op, leaving): self.op = op self.leaving = leaving is_guaranteed_const = lambda op: op.node_def.op == "GuaranteeConst" constants = set([]) def all_inputs_const(op): # If all inputs of an op are guaranteed constants, then we can infer that # the op produces a constant as well. return op.inputs and all(inp.op in constants for inp in op.inputs) visited = set([]) stack = [Work(tensor.op, leaving=False)] while stack: work = stack.pop() if work.leaving: if all_inputs_const(work.op): constants.add(work.op) continue visited.add(work.op) if is_guaranteed_const(work.op): constants.add(work.op) continue # This op will be revisited after all its inputs are checked for const-ness. stack.append(Work(work.op, leaving=True)) for inp in work.op.inputs: if inp.op not in visited: stack.append(Work(inp.op, leaving=False)) return tensor.op in constants def _call(sig, *inputs, **kwargs): """Adds a node calling a function. This adds a `call` op to the default graph that calls the function of signature `sig`, passing the tensors in `inputs` as arguments. It returns the outputs of the call, which are one or more tensors. `sig` is OpDefArg.a `_DefinedFunction` object. You can pass an optional keyword parameter `name=string` to name the added operation. You can pass an optional keyword parameter `noinline=True|False` to instruct the runtime not to inline the function body into the call site. Args: sig: OpDefArg. The signature of the function. *inputs: arguments to the function. **kwargs: Optional keyword arguments. Can only contain 'name' or 'noinline'. Returns: A 2-element tuple. First element: a Tensor if the function returns a single value; a list of Tensors if the function returns multiple value; the Operation if the function returns no values. Second element: the Operation. Raises: ValueError: if the arguments are invalid. """ if len(inputs) != len(sig.input_arg): raise ValueError("Expected number of arguments: %d, received: %d" % (len( sig.input_arg), len(inputs))) name = kwargs.pop("name", None) g = ops.get_default_graph() func_name = sig.name attrs = _parse_kwargs_as_attrs(func_name, **kwargs) output_types = [dtypes.DType(x.type) for x in sig.output_arg] with ops.name_scope(name, func_name, inputs) as name: op = g.create_op( func_name, list(inputs), output_types, name=name, attrs=attrs, op_def=sig, compute_shapes=False) if op.outputs: if len(op.outputs) == 1: ret = op.outputs[0] else: ret = tuple(op.outputs) else: ret = op return ret, op def _from_definition(fdef, grad_func=None): """Creates a _DefinedFunction initialized from a FunctionDef proto. Args: fdef: a FunctionDef grad_func: a _DefinedFunction or None Returns: A _DefinedFunction representing fdef """ # TODO(iga): This method does major surgery on _DefinedFunction. # Make it a named constructor using @classmethod of _DefinedFunction. # The Python callable is only needed to create a FunctionDef. Since we have # the FunctionDef here, we don't need to set _DefinedFunction._func (nor do we # have access to such a callable here). func = None argnames = [arg.name for arg in fdef.signature.input_arg] input_types = tuple( dtypes.as_dtype(arg.type) for arg in fdef.signature.input_arg) func_name = fdef.signature.name # Note: FunctionDefs do not include python gradient functions, so if the # original _DefinedFunction included one it will not be reflected here. python_grad_func = None out_names = [arg.name for arg in fdef.signature.output_arg] result = _DefinedFunction(func, argnames, input_types, func_name, grad_func, python_grad_func, out_names) # pylint: disable=protected-access serialized = fdef.SerializeToString() c_func = c_api.TF_FunctionImportFunctionDef(serialized) result._c_func = c_api_util.ScopedTFFunction(c_func) result._extra_inputs = [] # pylint: enable=protected-access return result def _from_library(lib): """Creates _DefinedFunctions initialized from a FunctionDefLibrary proto. This method handles assigning the correct gradient functions to each function. Args: lib: a FunctionDefLibrary Returns: A list of _DefinedFunctions Raises: ValueError: `lib` is invalid """ if not lib.function and not lib.gradient: return [] # function name -> FunctionDef proto funcs = {fdef.signature.name: fdef for fdef in lib.function} # Validate that all references function names have function defs for g in lib.gradient: if g.function_name not in funcs: raise ValueError("FunctionDefLibrary missing '%s' FunctionDef\n%s" % (g.function_name, str(lib))) if g.gradient_func not in funcs: raise ValueError("FunctionDefLibrary missing '%s' FunctionDef\n%s" % (g.gradient_func, str(lib))) # function name -> gradient function name func_to_grad = collections.defaultdict(lambda: None) # gradient function name -> names of functions having that grad function grad_to_funcs = collections.defaultdict(list) for gdef in lib.gradient: func_to_grad[gdef.function_name] = gdef.gradient_func grad_to_funcs[gdef.gradient_func].append(gdef.function_name) # Start with functions without gradients ready = [ fdef for fdef in lib.function if func_to_grad[fdef.signature.name] is None ] if not ready: raise ValueError( "FunctionDefLibrary contains cyclic gradient functions!\n" + str(lib)) # function name -> _DefinedFunction initialized = {} while ready: fdef = ready.pop() name = fdef.signature.name grad = initialized.get(func_to_grad[name]) if func_to_grad[name]: assert grad defined_func = _from_definition(fdef, grad_func=grad) initialized[name] = defined_func ready.extend(funcs[f] for f in grad_to_funcs[name]) return initialized.values() def _get_experimental_kwarg_as_attr(attr_name, value): """Creates an AttrValue for a python object.""" if isinstance(value, bool): return attr_value_pb2.AttrValue(b=value) elif isinstance(value, int): return attr_value_pb2.AttrValue(i=value) elif isinstance(value, float): return attr_value_pb2.AttrValue(f=value) elif isinstance(value, str): return attr_value_pb2.AttrValue(s=compat.as_bytes(value)) else: raise ValueError("Unsupported attribute type for %s with type %s" % (attr_name, type(value))) def _parse_kwargs_as_attrs(func_name, **kwargs): """Parses **kwargs into a node's attributes.""" attrs = {} noinline = kwargs.pop("noinline", None) if noinline is not None: attrs["_noinline"] = attr_value_pb2.AttrValue(b=bool(noinline)) compiled = kwargs.pop("compiled", None) separate_compiled_gradients = kwargs.pop("separate_compiled_gradients", None) if compiled is not None: attrs["_XlaCompile"] = attr_value_pb2.AttrValue(b=bool(compiled)) attrs["_XlaSeparateCompiledGradients"] = attr_value_pb2.AttrValue( b=bool(separate_compiled_gradients)) # Forward _XlaScope from enclosing context (if set), otherwise create new. # pylint: disable=protected-access if "_XlaScope" in ops.get_default_graph()._attr_scope_map: attrs["_XlaScope"] = ops.get_default_graph()._attr_scope_map["_XlaScope"] else: attrs["_XlaScope"] = attr_value_pb2.AttrValue( s=("function_%s" % func_name).encode()) # pylint: enable=protected-access kwargs_keys = list(kwargs.keys()) for key in kwargs_keys: if key.startswith("experimental_"): attrs[key] = _get_experimental_kwarg_as_attr(key, kwargs[key]) del kwargs[key] if kwargs: raise ValueError("Unknown keyword arguments: %s" % kwargs.keys()) return attrs def get_extra_vars(): """Returns the captured variables by the function. Returns: If the default graph is being used to define a function, the returned list of variables are those created inside the function body so far. Otherwise, returns an empty list. """ g = ops.get_default_graph() if isinstance(g, _FuncGraph): return g.extra_vars else: return [] def get_extra_inputs(): """Returns the captured input tensors by the function. Returns: If the default graph is being used to define a function, the returned list of tensors are those accessed inside the function body but defined outside the function body so far. Otherwise, returns an empty list. """ g = ops.get_default_graph() if isinstance(g, _FuncGraph): return g.extra_inputs else: return [] def get_extra_args(): """Returns the corresponding function arguments for the captured inputs. Returns: If the default graph is being used to define a function, the returned list of place holders are those used inside the function body corresponding those returned by get_extra_inputs(). Otherwise, returns an empty list. """ g = ops.get_default_graph() if isinstance(g, _FuncGraph): return g.extra_args else: return [] def _type_list_to_str(types): if any([_ not in _DTYPE_TO_STR for _ in types]): raise ValueError("Unsupported dtypes: %s" % types) return "".join([_DTYPE_TO_STR[_] for _ in types]) # NOTE: The list needs to be extended when more data types are added. _DTYPE_TO_STR = { dtypes.float16: "f16", dtypes.float32: "f32", dtypes.float64: "f64", dtypes.int32: "i32", dtypes.uint8: "i8", dtypes.uint16: "u16", dtypes.uint32: "u32", dtypes.uint64: "u64", dtypes.int16: "i16", dtypes.int8: "i8", dtypes.string: "s", dtypes.complex64: "c64", dtypes.complex128: "c128", dtypes.int64: "i64", dtypes.bool: "b", dtypes.qint8: "qi8", dtypes.quint8: "qu8", dtypes.qint16: "qi16", dtypes.quint16: "qu16", dtypes.qint32: "qi32", dtypes.bfloat16: "b16" } def function_def_from_tf_function(c_func): """Converts a SWIG-wrapped TF_Function* to a FunctionDef proto.""" with c_api_util.tf_buffer() as buf: c_api.TF_FunctionToFunctionDef(c_func, buf) data = c_api.TF_GetBuffer(buf) fdef = function_pb2.FunctionDef() fdef.ParseFromString(compat.as_bytes(data)) return fdef
kobejean/tensorflow
tensorflow/python/framework/function.py
Python
apache-2.0
43,277
//************************************************************* // Filename: socket.js // // Author: Jake Higgins <[email protected]> //************************************************************* var Socket; function addSocketListeners() { Socket = new io(); Socket.on('sync objects', function(objects, room, caller) { console.log(objects); if(CallerID == caller) { console.log(objects); $.each(objects, function(key, object) { createStroke(object); }); CanvasManager.render(); } }); Socket.on('add object', function(object, room, caller) { if(CallerID != caller && RoomID == room) { createStroke(object); CanvasManager.clearCanvas(); } }); Socket.on('move object', function(object, room, caller) { console.log('move object'); if(CallerID != caller && RoomID == room) { var targetObj = ObjectManager.findObject(object.objectID); console.log(targetObj); if(targetObj != null) { targetObj.max = object.max; targetObj.min = object.min; $(targetObj.container).css({ top: targetObj.min.y, left: targetObj.min.x }); } } }); Socket.on('delete object', function(object, room, caller) { if(CallerID != caller && RoomID == room) { ObjectManager.deleteObject(object.objectID); } }); Socket.on('clear objects', function(room, caller) { console.log('clear'); if(CallerID != caller && RoomID == room) { CanvasManager.clear(true); } }); Socket.on('draw', function(drawData, room, caller) { if(CallerID != caller && RoomID == room) { Drawing.draw(drawData, true); } }); // ======== Chat =============/ // Comes in the format message/roomID/caller // if(roomID == this.roomID) // pseudocode for now // add chat to chat thingy Socket.on('receiveMessage', function(message, room, caller) { if ( RoomID == room ) { // Proceed Chat.write(message, caller); } }); } function createStroke(stroke) { console.log(stroke); var obj = new object("stroke"); obj.initialize(); obj.imageData = stroke.imageData; obj.layer = stroke.layer; obj.max = stroke.max; obj.min = stroke.min; obj.objectID = stroke.objectID; obj.type = "stroke"; obj.objectData = { imageData: obj.imageData, layer: obj.layer, max: obj.max, min: obj.min, objectID: obj.objectID, objectType: obj.type, }; obj.createImage(); ObjectManager.addObject(obj); }
IGME-Production-Studio/driftwoodrp
update/public/static/scripts/socket.js
JavaScript
apache-2.0
2,530
package ch.unibe.scg.regex; import static java.util.Collections.singleton; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import ch.unibe.scg.regex.ParserProvider.Node; import ch.unibe.scg.regex.ParserProvider.Node.Basic; import ch.unibe.scg.regex.ParserProvider.Node.Group; import ch.unibe.scg.regex.ParserProvider.Node.NonGreedyStar; import ch.unibe.scg.regex.ParserProvider.Node.Optional; import ch.unibe.scg.regex.ParserProvider.Node.Plus; import ch.unibe.scg.regex.ParserProvider.Node.PositiveSet; import ch.unibe.scg.regex.ParserProvider.Node.SetItem; import ch.unibe.scg.regex.ParserProvider.Node.Simple; import ch.unibe.scg.regex.ParserProvider.Node.Star; import ch.unibe.scg.regex.ParserProvider.Node.Union; import ch.unibe.scg.regex.TNFA.Builder; import ch.unibe.scg.regex.Transition.Priority; /** * Not thread-safe! Use only from one thread at a time! * * @author nes */ class RegexToNFA { final InputRangeCleanup inputRangeCleanup = new InputRangeCleanup(); TNFA convert(final Node node) { Collection<InputRange> allInputRanges = new ArrayList<>(); allInputRanges.add(InputRange.ANY); // All regexes contain this implicitly. findRanges(node, allInputRanges); final Builder builder = Builder.make(allInputRanges); builder.registerCaptureGroup(builder.captureGroupMaker.entireMatch); final MiniAutomaton m = makeInitialMiniAutomaton(builder, builder.captureGroupMaker.entireMatch); final MiniAutomaton a = make(m, builder, node, builder.captureGroupMaker.entireMatch); final State endTagger = builder.makeState(); builder.addEndTagTransition(a.finishing, endTagger, builder.captureGroupMaker.entireMatch, Priority.NORMAL); builder.setAsAccepting(endTagger); return builder.build(); } private void findRanges(Node n, Collection<InputRange> out) { if (n instanceof Node.SetItem) { out.add(((SetItem) n).inputRange); } for (Node c : n.getChildren()) { findRanges(c, out); } } static class MiniAutomaton { final Collection<State> finishing; final Collection<State> initial; MiniAutomaton(final Collection<State> initial, final Collection<State> finishing) { if (initial.iterator().next() == null) { assert false; } this.initial = initial; this.finishing = finishing; } MiniAutomaton(final Collection<State> initial, final State finishing) { this(initial, singleton(finishing)); } @Override public String toString() { return "" + initial + " -> " + finishing; } } MiniAutomaton make(final MiniAutomaton last, final Builder builder, final Node node, CaptureGroup captureGroup) { MiniAutomaton ret; if (node instanceof Node.Any) { ret = makeAny(last, builder); } else if (node instanceof Node.Char) { ret = makeChar(last, builder, (Node.Char) node); } else if (node instanceof Node.Simple) { ret = makeSimple(last, builder, (Node.Simple) node, captureGroup); } else if (node instanceof Node.Optional) { ret = makeOptional(last, builder, (Node.Optional) node, captureGroup); } else if (node instanceof Node.NonGreedyStar) { ret = makeNonGreedyStar(last, builder, (Node.NonGreedyStar) node, captureGroup); } else if (node instanceof Node.Star) { ret = makeStar(last, builder, (Star) node, captureGroup); } else if (node instanceof Node.Plus) { ret = makePlus(last, builder, (Node.Plus) node, captureGroup); } else if (node instanceof Node.Group) { ret = makeGroup(last, builder, (Node.Group) node, captureGroup); } else if (node instanceof Node.Eos) { ret = makeEos(last, builder); } else if (node instanceof Node.Char) { ret = makeChar(last, builder, (Node.Char) node); } else if (node instanceof Node.PositiveSet) { ret = makePositiveSet(last, builder, (Node.PositiveSet) node); } else if (node instanceof Node.Union) { ret = makeUnion(last, builder, (Node.Union) node, captureGroup); } else { throw new AssertionError("Unknown node type: " + node); } assert !ret.initial.contains(null); assert !ret.finishing.contains(null); return ret; } MiniAutomaton makeAny(final MiniAutomaton last, final Builder builder) { final State a = builder.makeState(); builder.addUntaggedTransition(InputRange.ANY, last.finishing, a); return new MiniAutomaton(last.finishing, a); } MiniAutomaton makeChar(final MiniAutomaton last, final Builder b, final Node.Char character) { final State a = b.makeState(); final MiniAutomaton ret = new MiniAutomaton(last.finishing, a); b.addUntaggedTransition(character.inputRange, ret.initial, a); return ret; } MiniAutomaton makeEos(final MiniAutomaton last, final Builder builder) { final State a = builder.makeState(); builder.addUntaggedTransition(InputRange.EOS, last.finishing, a); return new MiniAutomaton(last.finishing, a); } MiniAutomaton makeGroup(final MiniAutomaton last, final Builder builder, final Group group, CaptureGroup parentCaptureGroup) { final CaptureGroup cg = builder.makeCaptureGroup(parentCaptureGroup); builder.registerCaptureGroup(cg); final State startGroup = builder.makeState(); builder.addStartTagTransition(last.finishing, startGroup, cg, Priority.NORMAL); final MiniAutomaton startGroupAutomaton = new MiniAutomaton(singleton(startGroup), singleton(startGroup)); final MiniAutomaton body = make(startGroupAutomaton, builder, group.body, cg); final State endTag = builder.makeState(); builder.addEndTagTransition(body.finishing, endTag, cg, Priority.NORMAL); return new MiniAutomaton(last.finishing, endTag); } MiniAutomaton makeInitialMiniAutomaton(final Builder builder, CaptureGroup entireMatch) { final State init = builder.makeInitialState(); final State startTagger = builder.makeState(); builder.addStartTagTransition(singleton(init), startTagger, entireMatch, Priority.NORMAL); return new MiniAutomaton(singleton(init), singleton(startTagger)); } MiniAutomaton makeOptional(final MiniAutomaton last, final Builder builder, final Optional optional, CaptureGroup captureGroup) { final MiniAutomaton ma = make(last, builder, optional.elementary, captureGroup); final List<State> f = new ArrayList<>(last.finishing); f.addAll(ma.finishing); return new MiniAutomaton(last.finishing, f); } MiniAutomaton makePlus(final MiniAutomaton last, final Builder builder, final Plus plus, CaptureGroup captureGroup) { final MiniAutomaton inner = make(last, builder, plus.elementary, captureGroup); Collection<State> out = singleton(builder.makeState()); builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, out, Priority.LOW); final MiniAutomaton ret = new MiniAutomaton(last.finishing, out); builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, inner.initial, Priority.NORMAL); return ret; } MiniAutomaton makeUnion(MiniAutomaton last, Builder builder, Union union, CaptureGroup captureGroup) { MiniAutomaton left = make(last, builder, union.left, captureGroup); MiniAutomaton right = make(last, builder, union.right, captureGroup); Collection<State> out = singleton(builder.makeState()); builder.makeUntaggedEpsilonTransitionFromTo(left.finishing, out, Priority.NORMAL); builder.makeUntaggedEpsilonTransitionFromTo(right.finishing, out, Priority.LOW); return new MiniAutomaton(last.finishing, out); } MiniAutomaton makePositiveSet(final MiniAutomaton last, final Builder builder, final PositiveSet set) { final List<SetItem> is = set.items; final SortedSet<InputRange> ranges = new TreeSet<>(); for (final SetItem i : is) { ranges.add(i.inputRange); } final List<InputRange> rangesList = new ArrayList<>(ranges); final List<InputRange> cleanedRanges = inputRangeCleanup.cleanUp(rangesList); final State a = builder.makeState(); for (InputRange range : cleanedRanges) { builder.addUntaggedTransition(range, last.finishing, a); } return new MiniAutomaton(last.finishing, a); } MiniAutomaton makeSimple(final MiniAutomaton last, final Builder b, final Simple simple, CaptureGroup captureGroup) { final List<? extends Basic> bs = simple.basics; MiniAutomaton lm = last; for (final Basic e : bs) { lm = make(lm, b, e, captureGroup); } return new MiniAutomaton(last.finishing, lm.finishing); } MiniAutomaton makeNonGreedyStar(MiniAutomaton last, Builder builder, NonGreedyStar nonGreedyStar, CaptureGroup captureGroup) { // Make start state and connect. State start = builder.makeState(); builder.makeUntaggedEpsilonTransitionFromTo(last.finishing, singleton(start), Priority.NORMAL); // Make inner machine. MiniAutomaton innerLast = new MiniAutomaton(last.finishing, start); final MiniAutomaton inner = make(innerLast, builder, nonGreedyStar.elementary, captureGroup); // Connect inner machine back to start. builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, singleton(start), Priority.LOW); // Make and connect `out` state. State out = builder.makeState(); builder.makeUntaggedEpsilonTransitionFromTo(singleton(start), singleton(out), Priority.NORMAL); return new MiniAutomaton(last.finishing, out); } MiniAutomaton makeStar(final MiniAutomaton last, final Builder builder, final Star star, CaptureGroup captureGroup) { // Make start state and connect. State start = builder.makeState(); builder.makeUntaggedEpsilonTransitionFromTo(last.finishing, singleton(start), Priority.NORMAL); // Make inner machine. MiniAutomaton innerLast = new MiniAutomaton(singleton(start), start); final MiniAutomaton inner = make(innerLast, builder, star.elementary, captureGroup); // Connect inner machine back to start. builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, singleton(start), Priority.NORMAL); // Make and connect `out` state. State out = builder.makeState(); builder.makeUntaggedEpsilonTransitionFromTo(singleton(start), singleton(out), Priority.LOW); return new MiniAutomaton(last.finishing, out); } }
nes1983/tree-regex
src/ch/unibe/scg/regex/RegexToNFA.java
Java
apache-2.0
10,402
/* * Copyright (c) 2015 TextGlass * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ public class TransformerIsNumber implements Transformer { @Override public String transform(String input) throws Exception { try { Double.parseDouble(input); } catch(NumberFormatException nfe) { throw new Exception(nfe.toString()); } return input; } @Override public String toString() { return "TransformerIsNumber"; } }
TextGlass/reference
client/src/TransformerIsNumber.java
Java
apache-2.0
972
/* * Copyright (c) 2017 Martin Pfeffer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pepperonas.materialdialog.adapter; import android.content.Context; import android.content.Intent; import android.content.pm.ResolveInfo; import android.graphics.Typeface; import android.support.annotation.NonNull; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.pepperonas.materialdialog.R; import com.pepperonas.materialdialog.utils.Utils; import java.util.List; /** * @author Martin Pfeffer (pepperonas) */ public class ShareAdapter extends BaseAdapter { private Object[] items; private LayoutInflater mInflater; private Context mCtx; private Typeface mTypeface; public ShareAdapter(@NonNull Context context) { this.mInflater = LayoutInflater.from(context); Intent sendIntent = new Intent(android.content.Intent.ACTION_SEND); sendIntent.setType("text/plain"); List activities = context.getPackageManager().queryIntentActivities(sendIntent, 0); items = activities.toArray(); mCtx = context; } public ShareAdapter(@NonNull Context context, Typeface typeface) { this.mInflater = LayoutInflater.from(context); Intent sendIntent = new Intent(android.content.Intent.ACTION_SEND); sendIntent.setType("text/plain"); List activities = context.getPackageManager().queryIntentActivities(sendIntent, 0); items = activities.toArray(); mCtx = context; mTypeface = typeface; } public int getCount() { return items.length; } public Object getItem(int position) { return items[position]; } public long getItemId(int position) { return position; } public View getView(int position, View convertView, ViewGroup parent) { ViewHolder holder; if (convertView == null) { convertView = mInflater.inflate(R.layout.custom_list_item_share_app, null); holder = new ViewHolder(); holder.logo = (ImageView) convertView.findViewById(R.id.iv_simple_list_item_share_app); holder.name = (TextView) convertView.findViewById(R.id.tv_simple_list_item_share_app); if (mTypeface != null) { holder.name.setTypeface(mTypeface); } convertView.setTag(holder); } else { holder = (ViewHolder) convertView.getTag(); } holder.name.setText(((ResolveInfo) items[position]).activityInfo .applicationInfo.loadLabel(mCtx.getPackageManager()).toString()); holder.logo.setImageDrawable(((ResolveInfo) items[position]).activityInfo .applicationInfo.loadIcon(mCtx.getPackageManager())); LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams( LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT); layoutParams.setMargins( Utils.dp2px(mCtx, 16), Utils.dp2px(mCtx, 4), Utils.dp2px(mCtx, 4), Utils.dp2px(mCtx, 4)); holder.logo.setLayoutParams(layoutParams); return convertView; } static class ViewHolder { TextView name; ImageView logo; } }
pepperonas/MaterialDialog
library/src/main/java/com/pepperonas/materialdialog/adapter/ShareAdapter.java
Java
apache-2.0
3,990
# Rhexia reticulata Humb. & Bonpl. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Myrtales/Melastomataceae/Rhexia/Rhexia reticulata/README.md
Markdown
apache-2.0
182
// Copyright Istio Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package forwarder import ( "bytes" "fmt" "net/http" "istio.io/pkg/log" ) const ( hostHeader = "Host" ) var fwLog = log.RegisterScope("forwarder", "echo clientside", 0) func writeHeaders(requestID int, header http.Header, outBuffer bytes.Buffer, addFn func(string, string)) { for key, values := range header { for _, v := range values { addFn(key, v) if key == hostHeader { outBuffer.WriteString(fmt.Sprintf("[%d] Host=%s\n", requestID, v)) } else { outBuffer.WriteString(fmt.Sprintf("[%d] Header=%s:%s\n", requestID, key, v)) } } } }
istio/istio
pkg/test/echo/server/forwarder/util.go
GO
apache-2.0
1,156
package com.siqisoft.stone.admin.dict.controller; import java.util.List; import org.siqisource.stone.dict.model.Dict; import org.siqisource.stone.dict.service.DictService; import org.siqisource.stone.orm.condition.Condition; import org.siqisource.stone.ui.AjaxResponse; import org.siqisource.stone.ui.Notify; import org.siqisource.stone.ui.easyui.PagedRows; import org.siqisource.stone.ui.easyui.Paging; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; import com.siqisoft.stone.admin.dict.service.DictConditionBuilder; @Controller public class DictController { @Autowired DictService service; @RequestMapping("/dict/DictList.do") public String list(Model model) { return "dict/DictList"; } @RequestMapping("/dict/dictListData.do") @ResponseBody public PagedRows<Dict> listData(DictQueryForm dictQueryForm, Paging paging) { Condition condition = DictConditionBuilder.listCondition(dictQueryForm); int count = service.count(condition); List<Dict> dictList = service.list(condition, paging.getRowBounds()); return new PagedRows<Dict>(count, dictList); } @RequestMapping("/dict/DictRead.do") public String read(String code, Model model) { Dict dict = service.read(code); model.addAttribute("dict", dict); return "dict/DictRead"; } @RequestMapping("/dict/DictAddInit.do") public String addInit(Dict dict, Model model) { return "dict/DictAdd"; } @RequestMapping("/dict/DictAdd.do") public String add(Dict dict, Model model) { service.insert(dict); return this.read(dict.getCode(), model); } @RequestMapping("/dict/dictDelete.do") @ResponseBody public AjaxResponse delete(String[] codeList, Model model) { // 判断是否被关联 if (codeList != null) { service.deleteBatch(codeList); } return new Notify("成功删除"+codeList.length+"条记录"); } @RequestMapping("/dict/DictEditInit.do") public String editInit(String code, Model model) { Dict dict = service.read(code); model.addAttribute("dict", dict); return "dict/DictEdit"; } @RequestMapping("/dict/DictEdit.do") public String edit(Dict dict, Model model) { service.update(dict); return this.read(dict.getCode(), model); } }
ylyxf/stone-sdk
src/main/java/com/siqisoft/stone/admin/dict/controller/DictController.java
Java
apache-2.0
2,471
# Schizaea palmata Hombr. & Jacq. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Pteridophyta/Polypodiopsida/Schizaeales/Schizaeaceae/Schizaea/Schizaea palmata/README.md
Markdown
apache-2.0
181
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.listeners; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.psi.PsiElement; /** * Refactorings invoke {@link #getListener(com.intellij.psi.PsiElement)} of registered * {@linkplain RefactoringElementListenerProvider} before particular element is subjected to refactoring. * @author dsl */ public interface RefactoringElementListenerProvider { ExtensionPointName<RefactoringElementListenerProvider> EP_NAME = ExtensionPointName.create("com.intellij.refactoring.elementListenerProvider"); /** * * Should return a listener for particular element. Invoked in read action. */ @javax.annotation.Nullable RefactoringElementListener getListener(PsiElement element); }
consulo/consulo
modules/base/lang-api/src/main/java/com/intellij/refactoring/listeners/RefactoringElementListenerProvider.java
Java
apache-2.0
1,347
/* * Copyright 2016 Shredder121. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.shredder121.gh_event_api.handler.pull_request; /** * The handler interface for receiving {@code pull_request} events. * * @author Shredder121 */ @FunctionalInterface public interface PullRequestHandler { void handle(PullRequestPayload payload); }
johnktims/gh-event-api
src/main/java/com/github/shredder121/gh_event_api/handler/pull_request/PullRequestHandler.java
Java
apache-2.0
873
<?php if (!defined('BASEPATH')) exit('No direct script access allowed'); class MY_Model extends CI_Model { // Variável que define o nome da tabela var $table = ""; /** * Método Construtor */ function __construct() { parent::__construct(); } /** * Insere um registro na tabela * * @param array $data Dados a serem inseridos * * @return boolean */ function Inserir($data) { if(!isset($data)) return false; return $this->db->insert($this->table, $data); } /** * Recupera um registro a partir de um ID * * @param integer $id ID do registro a ser recuperado * * @return array */ function GetById($id) { if(is_null($id)) return false; $this->db->where('id', $id); $query = $this->db->get($this->table); if ($query->num_rows() > 0) { return $query->row_array(); } else { return null; } } /** * Lista todos os registros da tabela * * @param string $sort Campo para ordenação dos registros * * @param string $order Tipo de ordenação: ASC ou DESC * * @return array */ function GetAll($sort = 'id', $order = 'asc') { // $this->db->where('servico', 'Website'); $this->db->order_by($sort, $order); $query = $this->db->get($this->table); if ($query->num_rows() > 0) { return $query->result_array(); } else { return null; } } function GetAllFace($sort = 'id', $order = 'asc') { $this->db->where("servico like '%Facebook%'"); $this->db->order_by($sort, $order); $query = $this->db->get($this->table); if ($query->num_rows() > 0) { return $query->result_array(); } else { return null; } } function GetAllSite($sort = 'id', $order = 'asc') { $this->db->where("servico like '%Website%'"); $this->db->order_by($sort, $order); $query = $this->db->get($this->table); if ($query->num_rows() > 0) { return $query->result_array(); } else { return null; } } function GetAllMail($sort = 'id', $order = 'asc') { $this->db->where("servico like '%Mail%'"); $this->db->order_by($sort, $order); $query = $this->db->get($this->table); if ($query->num_rows() > 0) { return $query->result_array(); } else { return null; } } /** * Atualiza um registro na tabela * * @param integer $int ID do registro a ser atualizado * * @param array $data Dados a serem inseridos * * @return boolean */ function Atualizar($id, $data) { if(is_null($id) || !isset($data)) return false; $this->db->where('id', $id); return $this->db->update($this->table, $data); } /** * Remove um registro na tabela * * @param integer $int ID do registro a ser removido * * * @return boolean */ function Excluir($id) { if(is_null($id)) return false; $this->db->where('id', $id); return $this->db->delete($this->table); } } /* End of file */
ChristianHerber/UebiControl
application/core/MY_Model.php
PHP
apache-2.0
2,979
package com.nguyenmanhtuan.benhandientu; import android.app.Activity; import android.content.Intent; import android.content.res.Configuration; import android.content.res.Resources; import android.os.Bundle; import android.util.DisplayMetrics; import android.view.View; import android.widget.Button; import android.widget.TextView; import java.util.HashMap; import java.util.Locale; import com.nguyenmanhtuan.utils.DatabaseHandler; public class RegisteredActivity extends Activity { private Locale myLocale; /** * Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_registered); DatabaseHandler db = new DatabaseHandler(getApplicationContext()); HashMap<String, String> user = new HashMap<String, String>(); user = db.getUserDetails(); /** * Displays the registration details in Text view **/ final TextView fname = (TextView) findViewById(R.id.fname); final TextView lname = (TextView) findViewById(R.id.lname); final TextView uname = (TextView) findViewById(R.id.uname); final TextView email = (TextView) findViewById(R.id.email); final TextView address = (TextView) findViewById(R.id.tvadd); final TextView phonenumber = (TextView) findViewById(R.id.tvphone); final TextView birthyear = (TextView) findViewById(R.id.tvBirthyear); final TextView created_at = (TextView) findViewById(R.id.regat); fname.setText(user.get("fname")); lname.setText(user.get("lname")); uname.setText(user.get("uname")); email.setText(user.get("email")); address.setText(user.get("address")); phonenumber.setText(user.get("phonenumber")); birthyear.setText(user.get("birthyear")); created_at.setText(user.get("created_at")); Button login = (Button) findViewById(R.id.login); login.setOnClickListener(new View.OnClickListener() { public void onClick(View view) { Intent myIntent = new Intent(view.getContext(), LoginActivity.class); startActivityForResult(myIntent, 0); finish(); } }); } public void setLocale(String lang) { myLocale = new Locale(lang); Resources res = getResources(); DisplayMetrics dm = res.getDisplayMetrics(); Configuration conf = res.getConfiguration(); conf.locale = myLocale; res.updateConfiguration(conf, dm); Intent refresh = new Intent(this, RegisteredActivity.class); startActivity(refresh); } }
techmaster-prj/BenhAnDienTu
BenhAnDienTu/src/com/nguyenmanhtuan/benhandientu/RegisteredActivity.java
Java
apache-2.0
2,788
package com.vertabelo.mobileorm.myplaces.orm.gen; public class AddressViewDAOImpl extends com.vertabelo.mobileorm.myplaces.orm.runtime.dao.BaseDAO<AddressView> implements AddressViewDAO { public AddressViewDAOImpl(com.vertabelo.mobileorm.myplaces.orm.runtime.util.SQLiteDataSource dataSource) { super(dataSource); } public AddressViewDAOImpl(com.vertabelo.mobileorm.myplaces.orm.runtime.util.SQLiteDataSource dataSource, com.vertabelo.mobileorm.myplaces.orm.runtime.util.DAOMonitor daoMonitor) { super(dataSource, daoMonitor); } @Override public Class<AddressView> getPojoClass() { return POJO_CLASS; } @Override public com.vertabelo.mobileorm.myplaces.orm.runtime.query.TableExpression getTableExpression() { return TABLE_EXPRESSION; } @Override public com.vertabelo.mobileorm.myplaces.orm.runtime.util.ResultSetHandler getResultSetHandler() { return RESULT_SET_HANDLER; } @Override public java.util.List<AddressView> getAddressViewList() { com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query = new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION); com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView> selectObjectListResult = select(query, RESULT_SET_HANDLER); return selectObjectListResult.getObjectList(); } @Override public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy) { com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query = new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION); query.orderBy(orderBy); com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView> selectObjectListResult = select(query, RESULT_SET_HANDLER); return selectObjectListResult.getObjectList(); } @Override public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy, com.vertabelo.mobileorm.myplaces.orm.runtime.query.OrderByDirection asc) { com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query = new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION); query.orderBy(orderBy, asc); com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView> selectObjectListResult = select(query, RESULT_SET_HANDLER); return selectObjectListResult.getObjectList(); } @Override public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where) { com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query = new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION); query.setWhere(where); com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView> selectObjectListResult = select(query, RESULT_SET_HANDLER); return selectObjectListResult.getObjectList(); } @Override public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where, com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy) { com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query = new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION); query.setWhere(where); query.orderBy(orderBy); com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView> selectObjectListResult = select(query, RESULT_SET_HANDLER); return selectObjectListResult.getObjectList(); } @Override public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where, com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy, com.vertabelo.mobileorm.myplaces.orm.runtime.query.OrderByDirection asc) { com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query = new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION); query.setWhere(where); query.orderBy(orderBy, asc); com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView> selectObjectListResult = select(query, RESULT_SET_HANDLER); return selectObjectListResult.getObjectList(); } @Override public Long getCount() { com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query = new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION, com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.fun("COUNT", com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.ASTERISK)); java.util.List<Long> list = select(query, new com.vertabelo.mobileorm.myplaces.orm.runtime.util.handlers.LongResultSetHandler()).getObjectList(); if (list.size() > 1) { throw new RuntimeException("More than one object returned"); } else if (list.size() == 1) { return list.get(0); } else { throw new RuntimeException("Cannot retrieve count() method result"); } } @Override public Long getCount(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where) { com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query = new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION, com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.fun("COUNT", com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.ASTERISK)); query.setWhere(where); java.util.List<Long> list = select(query, new com.vertabelo.mobileorm.myplaces.orm.runtime.util.handlers.LongResultSetHandler()).getObjectList(); if (list.size() > 1) { throw new RuntimeException("More than one object returned"); } else if (list.size() == 1) { return list.get(0); } else { throw new RuntimeException("Cannot retrieve count() method result"); } } }
Vertabelo/mobiorm-demo-android
app/src/main/java/com/vertabelo/mobileorm/myplaces/orm/gen/AddressViewDAOImpl.java
Java
apache-2.0
6,557
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.cep.operator; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.cep.Event; import org.apache.flink.cep.SubEvent; import org.apache.flink.cep.nfa.NFA; import org.apache.flink.cep.nfa.compiler.NFACompiler; import org.apache.flink.cep.pattern.Pattern; import org.apache.flink.cep.pattern.conditions.SimpleCondition; import org.apache.flink.runtime.checkpoint.OperatorSubtaskState; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.api.windowing.time.Time; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; import org.apache.flink.streaming.util.OperatorSnapshotUtil; import org.apache.flink.streaming.util.migration.MigrationTestUtil; import org.apache.flink.streaming.util.migration.MigrationVersion; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; import static org.apache.flink.cep.operator.CepOperatorTestUtilities.getKeyedCepOpearator; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * Tests for checking whether CEP operator can restore from snapshots that were done * using previous Flink versions. * * <p>For regenerating the binary snapshot file of previous versions you have to run the * {@code write*()} method on the corresponding Flink release-* branch. */ @RunWith(Parameterized.class) public class CEPMigrationTest { /** * TODO change this to the corresponding savepoint version to be written (e.g. {@link MigrationVersion#v1_3} for 1.3) * TODO and remove all @Ignore annotations on write*Snapshot() methods to generate savepoints */ private final MigrationVersion flinkGenerateSavepointVersion = null; private final MigrationVersion migrateVersion; @Parameterized.Parameters(name = "Migration Savepoint: {0}") public static Collection<MigrationVersion> parameters () { return Arrays.asList(MigrationVersion.v1_3, MigrationVersion.v1_4, MigrationVersion.v1_5); } public CEPMigrationTest(MigrationVersion migrateVersion) { this.migrateVersion = migrateVersion; } /** * Manually run this to write binary snapshot data. */ @Ignore @Test public void writeAfterBranchingPatternSnapshot() throws Exception { KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; final Event startEvent = new Event(42, "start", 1.0); final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0); final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new NFAFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); try { harness.setup(); harness.open(); harness.processElement(new StreamRecord<Event>(startEvent, 1)); harness.processElement(new StreamRecord<Event>(new Event(42, "foobar", 1.0), 2)); harness .processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3)); harness.processElement(new StreamRecord<Event>(middleEvent1, 2)); harness.processElement(new StreamRecord<Event>(middleEvent2, 3)); harness.processWatermark(new Watermark(5)); // do snapshot and save to file OperatorSubtaskState snapshot = harness.snapshot(0L, 0L); OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/cep-migration-after-branching-flink" + flinkGenerateSavepointVersion + "-snapshot"); } finally { harness.close(); } } @Test public void testRestoreAfterBranchingPattern() throws Exception { KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; final Event startEvent = new Event(42, "start", 1.0); final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0); final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0); final Event endEvent = new Event(42, "end", 1.0); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new NFAFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); try { harness.setup(); MigrationTestUtil.restoreFromSnapshot( harness, OperatorSnapshotUtil.getResourceFilename("cep-migration-after-branching-flink" + migrateVersion + "-snapshot"), migrateVersion); harness.open(); harness.processElement(new StreamRecord<>(new Event(42, "start", 1.0), 4)); harness.processElement(new StreamRecord<>(endEvent, 5)); harness.processWatermark(new Watermark(20)); ConcurrentLinkedQueue<Object> result = harness.getOutput(); // watermark and 2 results assertEquals(3, result.size()); Object resultObject1 = result.poll(); assertTrue(resultObject1 instanceof StreamRecord); StreamRecord<?> resultRecord1 = (StreamRecord<?>) resultObject1; assertTrue(resultRecord1.getValue() instanceof Map); Object resultObject2 = result.poll(); assertTrue(resultObject2 instanceof StreamRecord); StreamRecord<?> resultRecord2 = (StreamRecord<?>) resultObject2; assertTrue(resultRecord2.getValue() instanceof Map); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap1 = (Map<String, List<Event>>) resultRecord1.getValue(); assertEquals(startEvent, patternMap1.get("start").get(0)); assertEquals(middleEvent1, patternMap1.get("middle").get(0)); assertEquals(endEvent, patternMap1.get("end").get(0)); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap2 = (Map<String, List<Event>>) resultRecord2.getValue(); assertEquals(startEvent, patternMap2.get("start").get(0)); assertEquals(middleEvent2, patternMap2.get("middle").get(0)); assertEquals(endEvent, patternMap2.get("end").get(0)); // and now go for a checkpoint with the new serializers final Event startEvent1 = new Event(42, "start", 2.0); final SubEvent middleEvent3 = new SubEvent(42, "foo", 1.0, 11.0); final Event endEvent1 = new Event(42, "end", 2.0); harness.processElement(new StreamRecord<Event>(startEvent1, 21)); harness.processElement(new StreamRecord<Event>(middleEvent3, 23)); // simulate snapshot/restore with some elements in internal sorting queue OperatorSubtaskState snapshot = harness.snapshot(1L, 1L); harness.close(); harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new NFAFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); harness.setup(); harness.initializeState(snapshot); harness.open(); harness.processElement(new StreamRecord<>(endEvent1, 25)); harness.processWatermark(new Watermark(50)); result = harness.getOutput(); // watermark and the result assertEquals(2, result.size()); Object resultObject3 = result.poll(); assertTrue(resultObject3 instanceof StreamRecord); StreamRecord<?> resultRecord3 = (StreamRecord<?>) resultObject3; assertTrue(resultRecord3.getValue() instanceof Map); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap3 = (Map<String, List<Event>>) resultRecord3.getValue(); assertEquals(startEvent1, patternMap3.get("start").get(0)); assertEquals(middleEvent3, patternMap3.get("middle").get(0)); assertEquals(endEvent1, patternMap3.get("end").get(0)); } finally { harness.close(); } } /** * Manually run this to write binary snapshot data. */ @Ignore @Test public void writeStartingNewPatternAfterMigrationSnapshot() throws Exception { KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; final Event startEvent1 = new Event(42, "start", 1.0); final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new NFAFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); try { harness.setup(); harness.open(); harness.processElement(new StreamRecord<Event>(startEvent1, 1)); harness.processElement(new StreamRecord<Event>(new Event(42, "foobar", 1.0), 2)); harness .processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3)); harness.processElement(new StreamRecord<Event>(middleEvent1, 2)); harness.processWatermark(new Watermark(5)); // do snapshot and save to file OperatorSubtaskState snapshot = harness.snapshot(0L, 0L); OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/cep-migration-starting-new-pattern-flink" + flinkGenerateSavepointVersion + "-snapshot"); } finally { harness.close(); } } @Test public void testRestoreStartingNewPatternAfterMigration() throws Exception { KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; final Event startEvent1 = new Event(42, "start", 1.0); final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0); final Event startEvent2 = new Event(42, "start", 5.0); final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0); final Event endEvent = new Event(42, "end", 1.0); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new NFAFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); try { harness.setup(); MigrationTestUtil.restoreFromSnapshot( harness, OperatorSnapshotUtil.getResourceFilename("cep-migration-starting-new-pattern-flink" + migrateVersion + "-snapshot"), migrateVersion); harness.open(); harness.processElement(new StreamRecord<>(startEvent2, 5)); harness.processElement(new StreamRecord<Event>(middleEvent2, 6)); harness.processElement(new StreamRecord<>(endEvent, 7)); harness.processWatermark(new Watermark(20)); ConcurrentLinkedQueue<Object> result = harness.getOutput(); // watermark and 3 results assertEquals(4, result.size()); Object resultObject1 = result.poll(); assertTrue(resultObject1 instanceof StreamRecord); StreamRecord<?> resultRecord1 = (StreamRecord<?>) resultObject1; assertTrue(resultRecord1.getValue() instanceof Map); Object resultObject2 = result.poll(); assertTrue(resultObject2 instanceof StreamRecord); StreamRecord<?> resultRecord2 = (StreamRecord<?>) resultObject2; assertTrue(resultRecord2.getValue() instanceof Map); Object resultObject3 = result.poll(); assertTrue(resultObject3 instanceof StreamRecord); StreamRecord<?> resultRecord3 = (StreamRecord<?>) resultObject3; assertTrue(resultRecord3.getValue() instanceof Map); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap1 = (Map<String, List<Event>>) resultRecord1.getValue(); assertEquals(startEvent1, patternMap1.get("start").get(0)); assertEquals(middleEvent1, patternMap1.get("middle").get(0)); assertEquals(endEvent, patternMap1.get("end").get(0)); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap2 = (Map<String, List<Event>>) resultRecord2.getValue(); assertEquals(startEvent1, patternMap2.get("start").get(0)); assertEquals(middleEvent2, patternMap2.get("middle").get(0)); assertEquals(endEvent, patternMap2.get("end").get(0)); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap3 = (Map<String, List<Event>>) resultRecord3.getValue(); assertEquals(startEvent2, patternMap3.get("start").get(0)); assertEquals(middleEvent2, patternMap3.get("middle").get(0)); assertEquals(endEvent, patternMap3.get("end").get(0)); // and now go for a checkpoint with the new serializers final Event startEvent3 = new Event(42, "start", 2.0); final SubEvent middleEvent3 = new SubEvent(42, "foo", 1.0, 11.0); final Event endEvent1 = new Event(42, "end", 2.0); harness.processElement(new StreamRecord<Event>(startEvent3, 21)); harness.processElement(new StreamRecord<Event>(middleEvent3, 23)); // simulate snapshot/restore with some elements in internal sorting queue OperatorSubtaskState snapshot = harness.snapshot(1L, 1L); harness.close(); harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new NFAFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); harness.setup(); harness.initializeState(snapshot); harness.open(); harness.processElement(new StreamRecord<>(endEvent1, 25)); harness.processWatermark(new Watermark(50)); result = harness.getOutput(); // watermark and the result assertEquals(2, result.size()); Object resultObject4 = result.poll(); assertTrue(resultObject4 instanceof StreamRecord); StreamRecord<?> resultRecord4 = (StreamRecord<?>) resultObject4; assertTrue(resultRecord4.getValue() instanceof Map); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap4 = (Map<String, List<Event>>) resultRecord4.getValue(); assertEquals(startEvent3, patternMap4.get("start").get(0)); assertEquals(middleEvent3, patternMap4.get("middle").get(0)); assertEquals(endEvent1, patternMap4.get("end").get(0)); } finally { harness.close(); } } /** * Manually run this to write binary snapshot data. */ @Ignore @Test public void writeSinglePatternAfterMigrationSnapshot() throws Exception { KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; final Event startEvent1 = new Event(42, "start", 1.0); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new SinglePatternNFAFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); try { harness.setup(); harness.open(); harness.processWatermark(new Watermark(5)); // do snapshot and save to file OperatorSubtaskState snapshot = harness.snapshot(0L, 0L); OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/cep-migration-single-pattern-afterwards-flink" + flinkGenerateSavepointVersion + "-snapshot"); } finally { harness.close(); } } @Test public void testSinglePatternAfterMigration() throws Exception { KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; final Event startEvent1 = new Event(42, "start", 1.0); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new SinglePatternNFAFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); try { harness.setup(); MigrationTestUtil.restoreFromSnapshot( harness, OperatorSnapshotUtil.getResourceFilename("cep-migration-single-pattern-afterwards-flink" + migrateVersion + "-snapshot"), migrateVersion); harness.open(); harness.processElement(new StreamRecord<>(startEvent1, 5)); harness.processWatermark(new Watermark(20)); ConcurrentLinkedQueue<Object> result = harness.getOutput(); // watermark and the result assertEquals(2, result.size()); Object resultObject = result.poll(); assertTrue(resultObject instanceof StreamRecord); StreamRecord<?> resultRecord = (StreamRecord<?>) resultObject; assertTrue(resultRecord.getValue() instanceof Map); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap = (Map<String, List<Event>>) resultRecord.getValue(); assertEquals(startEvent1, patternMap.get("start").get(0)); } finally { harness.close(); } } /** * Manually run this to write binary snapshot data. */ @Ignore @Test public void writeAndOrSubtypConditionsPatternAfterMigrationSnapshot() throws Exception { KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new NFAComplexConditionsFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); try { harness.setup(); harness.open(); harness.processElement(new StreamRecord<>(startEvent1, 5)); harness.processWatermark(new Watermark(6)); // do snapshot and save to file OperatorSubtaskState snapshot = harness.snapshot(0L, 0L); OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/cep-migration-conditions-flink" + flinkGenerateSavepointVersion + "-snapshot"); } finally { harness.close(); } } @Test public void testAndOrSubtypeConditionsAfterMigration() throws Exception { KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() { private static final long serialVersionUID = -4873366487571254798L; @Override public Integer getKey(Event value) throws Exception { return value.getId(); } }; final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0); OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>( getKeyedCepOpearator(false, new NFAComplexConditionsFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO); try { harness.setup(); MigrationTestUtil.restoreFromSnapshot( harness, OperatorSnapshotUtil.getResourceFilename("cep-migration-conditions-flink" + migrateVersion + "-snapshot"), migrateVersion); harness.open(); final Event endEvent = new SubEvent(42, "end", 1.0, 2.0); harness.processElement(new StreamRecord<>(endEvent, 9)); harness.processWatermark(new Watermark(20)); ConcurrentLinkedQueue<Object> result = harness.getOutput(); // watermark and the result assertEquals(2, result.size()); Object resultObject = result.poll(); assertTrue(resultObject instanceof StreamRecord); StreamRecord<?> resultRecord = (StreamRecord<?>) resultObject; assertTrue(resultRecord.getValue() instanceof Map); @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap = (Map<String, List<Event>>) resultRecord.getValue(); assertEquals(startEvent1, patternMap.get("start").get(0)); assertEquals(endEvent, patternMap.get("start").get(1)); } finally { harness.close(); } } private static class SinglePatternNFAFactory implements NFACompiler.NFAFactory<Event> { private static final long serialVersionUID = 1173020762472766713L; private final boolean handleTimeout; private SinglePatternNFAFactory() { this(false); } private SinglePatternNFAFactory(boolean handleTimeout) { this.handleTimeout = handleTimeout; } @Override public NFA<Event> createNFA() { Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new StartFilter()) .within(Time.milliseconds(10L)); return NFACompiler.compileFactory(pattern, handleTimeout).createNFA(); } } private static class NFAComplexConditionsFactory implements NFACompiler.NFAFactory<Event> { private static final long serialVersionUID = 1173020762472766713L; private final boolean handleTimeout; private NFAComplexConditionsFactory() { this(false); } private NFAComplexConditionsFactory(boolean handleTimeout) { this.handleTimeout = handleTimeout; } @Override public NFA<Event> createNFA() { Pattern<Event, ?> pattern = Pattern.<Event>begin("start") .subtype(SubEvent.class) .where(new MiddleFilter()) .or(new SubEventEndFilter()) .times(2) .within(Time.milliseconds(10L)); return NFACompiler.compileFactory(pattern, handleTimeout).createNFA(); } } private static class NFAFactory implements NFACompiler.NFAFactory<Event> { private static final long serialVersionUID = 1173020762472766713L; private final boolean handleTimeout; private NFAFactory() { this(false); } private NFAFactory(boolean handleTimeout) { this.handleTimeout = handleTimeout; } @Override public NFA<Event> createNFA() { Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new StartFilter()) .followedByAny("middle") .subtype(SubEvent.class) .where(new MiddleFilter()) .followedByAny("end") .where(new EndFilter()) // add a window timeout to test whether timestamps of elements in the // priority queue in CEP operator are correctly checkpointed/restored .within(Time.milliseconds(10L)); return NFACompiler.compileFactory(pattern, handleTimeout).createNFA(); } } private static class StartFilter extends SimpleCondition<Event> { private static final long serialVersionUID = 5726188262756267490L; @Override public boolean filter(Event value) throws Exception { return value.getName().equals("start"); } } private static class MiddleFilter extends SimpleCondition<SubEvent> { private static final long serialVersionUID = 6215754202506583964L; @Override public boolean filter(SubEvent value) throws Exception { return value.getVolume() > 5.0; } } private static class EndFilter extends SimpleCondition<Event> { private static final long serialVersionUID = 7056763917392056548L; @Override public boolean filter(Event value) throws Exception { return value.getName().equals("end"); } } private static class SubEventEndFilter extends SimpleCondition<SubEvent> { private static final long serialVersionUID = 7056763917392056548L; @Override public boolean filter(SubEvent value) throws Exception { return value.getName().equals("end"); } } }
zhangminglei/flink
flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/operator/CEPMigrationTest.java
Java
apache-2.0
24,021
package misc // TrackedRepo identifies a git remote repository. type TrackedRepo string
kubernetes-sigs/kustomize
cmd/gorepomod/internal/misc/trackedrepo.go
GO
apache-2.0
89
# -*- coding: utf-8 -*- # # Armstrong Platform Documentation documentation build configuration file, created by # sphinx-quickstart on Mon Sep 26 13:38:48 2011. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.intersphinx', 'sphinx.ext.todo'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Armstrong Platform' copyright = u'2011, Bay Citizen and Texas Tribune' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '12.03.1' # The full version, including alpha/beta/rc tags. release = '12.03.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'armstrong' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ['_themes', ] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. html_additional_pages = { 'index': 'index.html', } # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'ArmstrongPlatformDocumentationdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'ArmstrongPlatformDocumentation.tex', u'Armstrong Platform Documentation Documentation', u'Bay Citizen and Texas Tribune', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'armstrongplatformdocumentation', u'Armstrong Platform Documentation Documentation', [u'Bay Citizen and Texas Tribune'], 1) ] # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'Armstrong Platform Documentation' epub_author = u'Bay Citizen and Texas Tribune' epub_publisher = u'Bay Citizen and Texas Tribune' epub_copyright = u'2011, Bay Citizen and Texas Tribune' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. #epub_exclude_files = [] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None}
armstrong/docs.armstrongcms.org
source/conf.py
Python
apache-2.0
8,620
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.generation; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.intention.AddAnnotationPsiFix; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.ArrayUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import static com.intellij.codeInsight.AnnotationUtil.CHECK_EXTERNAL; import static com.intellij.codeInsight.AnnotationUtil.CHECK_TYPE; /** * @author anna */ public interface OverrideImplementsAnnotationsHandler { ExtensionPointName<OverrideImplementsAnnotationsHandler> EP_NAME = ExtensionPointName.create("com.intellij.overrideImplementsAnnotationsHandler"); /** * Returns annotations which should be copied from a source to an implementation (by default, no annotations are copied). */ default String[] getAnnotations(@NotNull PsiFile file) { //noinspection deprecation return getAnnotations(file.getProject()); } /** * @deprecated Use {@link #getAnnotations(PsiFile)} */ @Deprecated String[] getAnnotations(Project project); @Deprecated @NotNull default String[] annotationsToRemove(Project project, @NotNull String fqName) { return ArrayUtil.EMPTY_STRING_ARRAY; } /** Perform post processing on the annotations, such as deleting or renaming or otherwise updating annotations in the override */ default void cleanup(PsiModifierListOwner source, @Nullable PsiElement targetClass, PsiModifierListOwner target) { } static void repeatAnnotationsFromSource(PsiModifierListOwner source, @Nullable PsiElement targetClass, PsiModifierListOwner target) { Module module = ModuleUtilCore.findModuleForPsiElement(targetClass != null ? targetClass : target); GlobalSearchScope moduleScope = module != null ? GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(module) : null; Project project = target.getProject(); JavaPsiFacade facade = JavaPsiFacade.getInstance(project); for (OverrideImplementsAnnotationsHandler each : EP_NAME.getExtensionList()) { for (String annotation : each.getAnnotations(target.getContainingFile())) { if (moduleScope != null && facade.findClass(annotation, moduleScope) == null) continue; int flags = CHECK_EXTERNAL | CHECK_TYPE; if (AnnotationUtil.isAnnotated(source, annotation, flags) && !AnnotationUtil.isAnnotated(target, annotation, flags)) { each.transferToTarget(annotation, source, target); } } } for (OverrideImplementsAnnotationsHandler each : EP_NAME.getExtensionList()) { each.cleanup(source, targetClass, target); } } default void transferToTarget(String annotation, PsiModifierListOwner source, PsiModifierListOwner target) { PsiModifierList modifierList = target.getModifierList(); assert modifierList != null : target; PsiAnnotation srcAnnotation = AnnotationUtil.findAnnotation(source, annotation); PsiNameValuePair[] valuePairs = srcAnnotation != null ? srcAnnotation.getParameterList().getAttributes() : PsiNameValuePair.EMPTY_ARRAY; AddAnnotationPsiFix.addPhysicalAnnotation(annotation, valuePairs, modifierList); } }
paplorinc/intellij-community
java/java-impl/src/com/intellij/codeInsight/generation/OverrideImplementsAnnotationsHandler.java
Java
apache-2.0
3,542
package com.chisw.work.addressbook.test; import com.chisw.work.addressbook.Data.GroupData; import com.chisw.work.addressbook.Data.Groups; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; public class TestGroupModification extends TestBase { @BeforeMethod public void checkPreconditions() { if (app.db().groups().size() == 0) { app.goTo().groupPage(); app.groups().createGroupInBeforeMethod(); } } @Test public void checkGroupModification() { Groups before = app.db().groups(); GroupData modifiedGroup = before.iterator().next(); GroupData group = new GroupData() .withId(modifiedGroup.getId()).withGroupName("test 258").withGroupLogo("Logo 123").withGroupComment("Comment 12345"); app.goTo().groupPage(); app.groups().modifyGroup(group); assertThat(app.groups().count(),equalTo(before.size())); Groups after = app.db().groups(); assertThat(after, equalTo(before.withoutAdded(modifiedGroup).withAdded(group))); verifyGroupsListInUi(); } }
Tarrest/java_home
addressbook/src/test/java/com/chisw/work/addressbook/test/TestGroupModification.java
Java
apache-2.0
1,230
package io.omengye.common.utils.constants; public class Constants { private Constants(){} public static final String RESULT_FLAG = "flag"; }
omengye/ws
common/src/main/java/io/omengye/common/utils/constants/Constants.java
Java
apache-2.0
153
/* * Created on Mar 29, 2009 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright @2013 the original author or authors. */ package org.fest.assertions.api; import static org.fest.test.ExpectedException.none; import org.fest.test.ExpectedException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; /** * Tests for {@link LongAssert#isNull()}. * * @author Yvonne Wang */ public class LongAssert_isNull_Test { @Rule public ExpectedException thrown = none(); private LongAssert assertions; private Long actual; @Before public void setUp() { actual = null; assertions = new LongAssert(actual); } @Test public void should_pass_if_actual_is_null() { assertions.isNull(); } @Test public void should_fail_if_actual_is_not_null() { thrown.expect(AssertionError.class); actual = new Long(6l); assertions = new LongAssert(actual); assertions.isNull(); } }
alexruiz/fest-assert-2.x
src/test/java/org/fest/assertions/api/LongAssert_isNull_Test.java
Java
apache-2.0
1,440
#ifndef LAYER_H_INCLUDED #define LAYER_H_INCLUDED #include "basicresource.h" #include "Drawable.h" class Layer : public Drawable { private: void transformation() override; void onDraw() override; void postDraw() override; public: Layer(): Drawable() { // } }; #endif // LAYER_H_INCLUDED
pibomb/crowbot
Crowbot/Crowbot/Layer.h
C
apache-2.0
330
/** * Copyright 2017 dialog LLC <[email protected]> * @flow */ import type { PeerInfo } from '@dlghq/dialog-types'; import type { AvatarSize } from '../Avatar/getAvatarSize'; import type { Gradient } from '../Avatar/getAvatarColor'; import React, { PureComponent } from 'react'; import classNames from 'classnames'; import getAvatarSize from '../Avatar/getAvatarSize'; import getAvatarText from '../Avatar/getAvatarText'; import getAvatarColor from '../Avatar/getAvatarColor'; import createSequence from '../../utils/createSequence'; import styles from '../PeerAvatar/PeerAvatar.css'; export type Props = { className?: string, peerBig: PeerInfo, peerSmall: PeerInfo, size: AvatarSize, onClick?: (event: SyntheticMouseEvent) => any }; type DefaultProps = { size: AvatarSize }; const seq = createSequence(); class DoublePeerAvatar extends PureComponent<DefaultProps, Props, void> { id: string; ids: { big: string, clip: string, small: string }; static defaultProps = { size: 'medium' }; constructor(props: Props) { super(props); this.id = 'double_peer_avatar_' + seq.next(); this.ids = { big: `${this.id}_big`, clip: `${this.id}_big_clip`, small: `${this.id}_small` }; } getAvatarSize(): number { return getAvatarSize(this.props.size); } renderDefsBig(): React.Element<any> { if (this.props.peerBig.avatar) { return ( <pattern id={this.ids.big} width="100%" height="100%" patternUnits="userSpaceOnUse"> <image x="0" y="0" width="100px" height="100px" xlinkHref={this.props.peerBig.avatar} /> </pattern> ); } const colors: Gradient = getAvatarColor(this.props.peerBig.placeholder); return ( <linearGradient id={this.ids.big} gradientUnits="userSpaceOnUse" x1="6.79%" y1="105.31%" x2="93.21%" y2="-5.31%" > <stop stopColor={colors.payload.from} /> <stop offset="1" stopColor={colors.payload.to} /> </linearGradient> ); } renderClipMaskBig(): React.Element<any> { return ( <clipPath id={this.ids.clip}> <path // eslint-disable-next-line d="M58.2070074,99.3297063 C55.5367715,99.7706374 52.795171,100 50,100 C22.3857625,100 0,77.6142375 0,50 C0,22.3857625 22.3857625,0 50,0 C77.6142375,0 100,22.3857625 100,50 C100,52.795171 99.7706374,55.5367715 99.3297063,58.2070074 C94.8434182,55.5348957 89.6009561,54 84,54 C67.4314575,54 54,67.4314575 54,84 C54,89.6009561 55.5348957,94.8434182 58.2070074,99.3297063 Z" /> </clipPath> ); } renderDefsSmall(): React.Element<any> { if (this.props.peerSmall.avatar) { return ( <pattern id={this.ids.small} width="100%" height="100%" x="58" y="58" patternUnits="userSpaceOnUse" > <image x="0" y="0" width="100px" height="100px" xlinkHref={this.props.peerSmall.avatar} transform="scale(0.507046569,0.507046569)" /> </pattern> ); } const colors: Gradient = getAvatarColor(this.props.peerSmall.placeholder); return ( <linearGradient id={this.ids.small} gradientUnits="userSpaceOnUse" x1="6.79%" y1="105.31%" x2="93.21%" y2="-5.31%" > <stop stopColor={colors.payload.from} /> <stop offset="1" stopColor={colors.payload.to} /> </linearGradient> ); } renderSmallAvatar(): React.Element<any> { return ( <circle cx="84" cy="84" r="25" fill={`url(#${this.ids.small})`} /> ); } renderBigAvatar(): React.Element<any> { return ( <path // eslint-disable-next-line d="M58.2070074,99.3297063 C55.5367715,99.7706374 52.795171,100 50,100 C22.3857625,100 0,77.6142375 0,50 C0,22.3857625 22.3857625,0 50,0 C77.6142375,0 100,22.3857625 100,50 C100,52.795171 99.7706374,55.5367715 99.3297063,58.2070074 C94.8434182,55.5348957 89.6009561,54 84,54 C67.4314575,54 54,67.4314575 54,84 C54,89.6009561 55.5348957,94.8434182 58.2070074,99.3297063 Z" fill={`url(#${this.ids.big})`} /> ); } renderPeerSmallText(): ?React.Element<any> { if (this.props.peerSmall.avatar) { return null; } const size = this.getAvatarSize(); const text = size >= 20 ? getAvatarText(this.props.peerSmall.title) : null; const twoChars = Boolean(text && text.length !== 1); const textStyles = { fontSize: twoChars ? 20 : 24 }; return ( <text className={styles.text} x="84" y="84" textAnchor="middle" alignmentBaseline="central" dominantBaseline="central" style={textStyles} > {text} </text> ); } renderPeerBigText(): ?React.Element<any> { if (this.props.peerBig.avatar) { return null; } const size = this.getAvatarSize(); const text = size >= 20 ? getAvatarText(this.props.peerBig.title) : null; const twoChars = Boolean(text && text.length !== 1); const textStyles = { fontSize: twoChars ? 38 : 48 }; return ( <text className={styles.text} x="50" y="50" textAnchor="middle" alignmentBaseline="central" dominantBaseline="central" style={textStyles} clipPath={`url(#${this.ids.clip})`} > {text} </text> ); } render(): React.Element<any> { const className = classNames(styles.container, { [styles.clickable]: this.props.onClick }, this.props.className); const size = this.getAvatarSize(); return ( <svg viewBox="0 0 109 109" width={size} height={size} className={className} onClick={this.props.onClick} > <defs> {this.renderDefsBig()} {this.renderClipMaskBig()} {this.renderDefsSmall()} </defs> {this.renderBigAvatar()} {this.renderSmallAvatar()} {this.renderPeerBigText()} {this.renderPeerSmallText()} </svg> ); } } export default DoublePeerAvatar;
nolawi/champs-dialog-sg
src/components/DoublePeerAvatar/DoublePeerAvatar.js
JavaScript
apache-2.0
6,308
# This file describes the standard way to build Docker, using docker # # Usage: # # # Assemble the full dev environment. This is slow the first time. # docker build -t docker . # # # Mount your source in an interactive container for quick testing: # docker run -v `pwd`:/go/src/github.com/dotcloud/docker -privileged -i -t docker bash # # # Run the test suite: # docker run -privileged docker hack/make.sh test # # # Publish a release: # docker run -privileged \ # -e AWS_S3_BUCKET=baz \ # -e AWS_ACCESS_KEY=foo \ # -e AWS_SECRET_KEY=bar \ # -e GPG_PASSPHRASE=gloubiboulga \ # docker hack/release.sh # # Note: Apparmor used to mess with privileged mode, but this is no longer # the case. Therefore, you don't have to disable it anymore. # docker-version 0.6.1 from ubuntu:12.04 maintainer Solomon Hykes <[email protected]> # Build dependencies run echo 'deb http://archive.ubuntu.com/ubuntu precise main universe' > /etc/apt/sources.list run apt-get update run apt-get install -y -q curl run apt-get install -y -q git run apt-get install -y -q mercurial run apt-get install -y -q build-essential libsqlite3-dev # Install Go run curl -s https://go.googlecode.com/files/go1.2rc2.src.tar.gz | tar -v -C /usr/local -xz env PATH /usr/local/go/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin env GOPATH /go:/go/src/github.com/dotcloud/docker/vendor run cd /usr/local/go/src && ./make.bash && go install -ldflags '-w -linkmode external -extldflags "-static -Wl,--unresolved-symbols=ignore-in-shared-libs"' -tags netgo -a std # Ubuntu stuff run apt-get install -y -q ruby1.9.3 rubygems libffi-dev run gem install --no-rdoc --no-ri fpm run apt-get install -y -q reprepro dpkg-sig # Install s3cmd 1.0.1 (earlier versions don't support env variables in the config) run apt-get install -y -q python-pip run pip install s3cmd run pip install python-magic run /bin/echo -e '[default]\naccess_key=$AWS_ACCESS_KEY\nsecret_key=$AWS_SECRET_KEY\n' > /.s3cfg # Runtime dependencies run apt-get install -y -q iptables run apt-get install -y -q lxc run apt-get install -y -q aufs-tools volume /var/lib/docker workdir /go/src/github.com/dotcloud/docker # Wrap all commands in the "docker-in-docker" script to allow nested containers entrypoint ["hack/dind"] # Upload docker source add . /go/src/github.com/dotcloud/docker
ecnahc515/docker
Dockerfile
Dockerfile
apache-2.0
2,339
/* ** Copyright (c) 2007, DNA Pty Ltd and contributors ** ** Permission is hereby granted, free of charge, to any person obtaining a copy ** of this software and associated documentation files (the "Software"), to deal ** in the Software without restriction, including without limitation the rights ** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ** copies of the Software, and to permit persons to whom the Software is ** furnished to do so, subject to the following conditions: ** ** The above copyright notice and this permission notice shall be included in ** all copies or substantial portions of the Software. ** ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ** OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN ** THE SOFTWARE. */ #include "libxatmi.h" int tpdiscon(int cd) { write(2, "tpdiscon not supported\n", 23); tperrno = TPEPROTO; return -1; }
openhoodie/hoodie
xatmi/src/main/cxx/tpdiscon.c
C
apache-2.0
1,261
# ormbad.version # Helper module for ORMBad version information # # Author: Benjamin Bengfort <[email protected]> # Created: Thu Aug 13 12:38:42 2015 -0400 # # Copyright (C) 2015 Tipsy Bear Studios # For license information, see LICENSE.txt # # ID: version.py [] [email protected] $ """ Helper module for ORMBad version information. """ ########################################################################## ## Versioning ########################################################################## __version_info__ = { 'major': 0, 'minor': 1, 'micro': 0, 'releaselevel': 'final', 'serial': 0, } def get_version(short=False): """ Returns the version from the version info. """ assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final') vers = ["%(major)i.%(minor)i" % __version_info__, ] if __version_info__['micro']: vers.append(".%(micro)i" % __version_info__) if __version_info__['releaselevel'] != 'final' and not short: vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial'])) return ''.join(vers)
tipsybear/ormbad
ormbad/version.py
Python
apache-2.0
1,161
/** * Created by raj on 19/8/14. */ var fs = require('fs'); var content = fs.read ('animeEpisode.json'); console.log(JSON.stringify(JSON.parse(content)[1][0].title)); videolinks=JSON.parse(content); links=[]; function pages(k) { var page = new WebPage(); page.open('http://www.gogoanime.com/', function (status) { console.log('opened gogoanime :++++ ', status); if (status==fail){ page.close(); pages(k); } if (status == success) { page.includeJs('http://ajax.googleapis.com/ajax/libs/jquery/1.8.2/jquery.min.js', function () { console.log('jq included') var data = page.evaluate(function (data) { var tempdata=[]; for (var i = 0; i <$('.post div:eq(1) table tbody tr td:eq(0) ul').length; i = i + 1) { data.links.push($('.post div:eq(1) table tbody tr td:eq(0) ul li a').attr('href')); } return JSON.stringify(data); }); links[k][m] = JSON.parse(data); console.log(data); if (m < links[k].length - 1) { page.close(); console.log('next episoide called'); pages(k, m + 1); } ; if (m == links[k].length - 1) { page.close(); console.log('next anime called'); var path = 'links.json'; fs.write(path, links[k], 'w'); pages(k + 1, 1); } if (k == links.length - 1) { var path = 'links.json'; fs.write(path, links, 'w'); } }); } }); } pages(1,1);
msandeepraj211/phantom
recent.js
JavaScript
apache-2.0
1,995
#!/usr/bin/python # # Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This example creates a bidder-level filter set. A bidder-level filter set can be used to retrieve aggregated data for all Authorized Buyers accounts under the given bidder account, including the bidder account itself. """ import argparse from datetime import date from datetime import datetime from datetime import timedelta import os import pprint import sys import uuid sys.path.insert(0, os.path.abspath('..')) from googleapiclient.errors import HttpError import samples_util _DATE_FORMAT = '%Y%m%d' _FILTER_SET_NAME_TEMPLATE = ('bidders/{bidders_resource_id}/' 'filterSets/{filtersets_resource_id}') _OWNER_NAME_TEMPLATE = 'bidders/{bidders_resource_id}' _TODAY = date.today() _VALID_ENVIRONMENTS = ('WEB', 'APP') _VALID_FORMATS = ('DISPLAY', 'VIDEO') _VALID_PLATFORMS = ('DESKTOP', 'TABLET', 'MOBILE') _VALID_TIME_SERIES_GRANULARITIES = ('HOURLY', 'DAILY') DEFAULT_BIDDER_RESOURCE_ID = 'ENTER_BIDDER_RESOURCE_ID_HERE' DEFAULT_FILTER_SET_RESOURCE_ID = f'FilterSet_{uuid.uuid4()}' DEFAULT_END_DATE = _TODAY.strftime(_DATE_FORMAT) DEFAULT_START_DATE = (_TODAY - timedelta(days=7)).strftime( _DATE_FORMAT) def main(ad_exchange_buyer, owner_name, body, is_transient): try: # Construct and execute the request. filter_set = ad_exchange_buyer.bidders().filterSets().create( ownerName=owner_name, isTransient=is_transient, body=body).execute() print(f'FilterSet created for bidder: "{owner_name}".') pprint.pprint(filter_set) except HttpError as e: print(e) if __name__ == '__main__': def time_series_granularity_type(s): if s not in _VALID_TIME_SERIES_GRANULARITIES: raise argparse.ArgumentTypeError('Invalid TimeSeriesGranularity ' f'specified: "{s}".') return s def environment_type(s): if s not in _VALID_ENVIRONMENTS: raise argparse.ArgumentTypeError( f'Invalid Environment specified: "{s}".') return s def format_type(s): if s not in _VALID_FORMATS: raise argparse.ArgumentTypeError(f'Invalid Format specified: "{s}".') return s def platform_type(s): if s not in _VALID_PLATFORMS: raise argparse.ArgumentTypeError(f'Invalid Platform specified: "{s}".') return s def valid_date(s): try: return datetime.strptime(s, _DATE_FORMAT).date() except ValueError: raise argparse.ArgumentTypeError(f'Invalid date specified: "{s}".') parser = argparse.ArgumentParser( description=('Creates a bidder-level filter set with the specified ' 'options.')) # Required fields. parser.add_argument( '-b', '--bidder_resource_id', default=DEFAULT_BIDDER_RESOURCE_ID, help=('The resource ID of the bidders resource for which the filter set ' 'is being created. This will be used to construct the ownerName ' 'used as a path parameter for filter set requests. For additional ' 'information on how to configure the ownerName path parameter, ' 'see: https://developers.google.com/authorized-buyers/apis/' 'reference/rest/v2beta1/bidders.filterSets/create' '#body.PATH_PARAMETERS.owner_name')) parser.add_argument( '-r', '--resource_id', default=DEFAULT_FILTER_SET_RESOURCE_ID, help=('The resource ID of the filter set. Note that this must be ' 'unique. This will be used to construct the filter set\'s name. ' 'For additional information on how to configure a filter set\'s ' 'name, see: https://developers.google.com/authorized-buyers/apis/' 'reference/rest/v2beta1/bidders.filterSets#FilterSet.FIELDS.name')) parser.add_argument( '--end_date', default=DEFAULT_END_DATE, type=valid_date, help=('The end date for the filter set\'s absoluteDateRange field, which ' 'will be accepted in this example in YYYYMMDD format.')) parser.add_argument( '--start_date', default=DEFAULT_START_DATE, type=valid_date, help=('The start date for the filter set\'s time_range field, which ' 'will be accepted in this example in YYYYMMDD format.')) # Optional fields. parser.add_argument( '-e', '--environment', required=False, type=environment_type, help=('The environment on which to filter.')) parser.add_argument( '-f', '--format', required=False, type=format_type, help=('The format on which to filter.')) parser.add_argument( '-p', '--platforms', required=False, nargs='*', type=platform_type, help=('The platforms on which to filter. The filters represented by ' 'multiple platforms are ORed together. Note that you may specify ' 'more than one using a space as a delimiter.')) parser.add_argument( '-s', '--seller_network_ids', required=False, nargs='*', type=int, help=('The list of IDs for seller networks on which to filter. The ' 'filters represented by multiple seller network IDs are ORed ' 'together. Note that you may specify more than one using a space ' 'as a delimiter.')) parser.add_argument( '-t', '--time_series_granularity', required=False, type=time_series_granularity_type, help=('The granularity of time intervals if a time series breakdown is ' 'desired.')) parser.add_argument( '--is_transient', required=False, default=True, type=bool, help=('Whether the filter set is transient, or should be persisted ' 'indefinitely. In this example, this will default to True.')) args = parser.parse_args() # Build the time_range as an AbsoluteDateRange. time_range = { 'startDate': { 'year': args.start_date.year, 'month': args.start_date.month, 'day': args.start_date.day }, 'endDate': { 'year': args.end_date.year, 'month': args.end_date.month, 'day': args.end_date.day } } # Create a body containing the required fields. BODY = { 'name': _FILTER_SET_NAME_TEMPLATE.format( bidders_resource_id=args.bidder_resource_id, filtersets_resource_id=args.resource_id), # Note: You may alternatively specify relativeDateRange or # realtimeTimeRange. 'absoluteDateRange': time_range } # Add optional fields to body if specified. if args.environment: BODY['environment'] = args.environment if args.format: BODY['format'] = args.format if args.platforms: BODY['platforms'] = args.platforms if args.seller_network_ids: BODY['sellerNetworkIds'] = args.seller_network_ids if args.time_series_granularity: BODY['timeSeriesGranularity'] = args.time_series_granularity try: service = samples_util.GetService('v2beta1') except IOError as ex: print(f'Unable to create adexchangebuyer service - {ex}') print('Did you specify the key file in samples_util.py?') sys.exit(1) main(service, _OWNER_NAME_TEMPLATE.format( bidders_resource_id=args.bidder_resource_id), BODY, args.is_transient)
googleads/googleads-adxbuyer-examples
python/samples/v2_x/create_bidder_level_filter_set.py
Python
apache-2.0
7,717
/******************************************************************************* * Copyright 2006 - 2012 Vienna University of Technology, * Department of Software Technology and Interactive Systems, IFS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This work originates from the Planets project, co-funded by the European Union under the Sixth Framework Programme. ******************************************************************************/ package eu.scape_project.planning.model.transform; import java.io.Serializable; import java.util.List; import javax.persistence.CascadeType; import javax.persistence.DiscriminatorColumn; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Inheritance; import javax.persistence.ManyToOne; import eu.scape_project.planning.model.ChangeLog; import eu.scape_project.planning.model.IChangesHandler; import eu.scape_project.planning.model.ITouchable; import eu.scape_project.planning.model.Values; import eu.scape_project.planning.model.values.INumericValue; import eu.scape_project.planning.model.values.IOrdinalValue; import eu.scape_project.planning.model.values.TargetValues; import eu.scape_project.planning.model.values.Value; import eu.scape_project.planning.validation.ValidationError; /** * Implements basic transformation functionality, i.e. aggregation over {@link Values} and * common properties of transformers. * @author Hannes Kulovits */ @Entity @Inheritance @DiscriminatorColumn(name = "type") public abstract class Transformer implements ITransformer, Serializable, ITouchable { private static final long serialVersionUID = -3708795251848706848L; @Id @GeneratedValue protected int id; public int getId() { return id; } public void setId(int id) { this.id = id; } @ManyToOne(cascade=CascadeType.ALL) private ChangeLog changeLog = new ChangeLog(); /** * Transforms all the values in the list of the provided {@link Values}. * According to the type of each {@link Value}, either * {@link ITransformer#transform(INumericValue)} or {@link ITransformer#transform(IOrdinalValue)} * is called. * @param values List of values to be transformed * @return {@link TargetValues}, which contains a list of all transformed values corresponding to the provided input */ public TargetValues transformValues(Values values) { TargetValues result = new TargetValues(); for (Value v : values.getList()) { if (v instanceof INumericValue) { result.add(transform((INumericValue) v)); } else { result.add(transform((IOrdinalValue) v)); } } return result; } public ChangeLog getChangeLog() { return this.changeLog; } public void setChangeLog(ChangeLog value) { changeLog = value; } public boolean isChanged() { return changeLog.isAltered(); } public void touch(String username) { getChangeLog().touch(username); } public void touch() { getChangeLog().touch(); } /** * @see ITouchable#handleChanges(IChangesHandler) */ public void handleChanges(IChangesHandler h){ h.visit(this); } /** * If this Transformer is not correctly configured, this method adds * an appropriate error-message to the given list and returns false. * * @return true if this transformer is correctly configured */ public abstract boolean isTransformable(List<ValidationError> errors); public abstract Transformer clone(); }
openpreserve/plato
plato-model/src/main/java/eu/scape_project/planning/model/transform/Transformer.java
Java
apache-2.0
4,334
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_67) on Fri Nov 14 18:25:20 PST 2014 --> <meta http-equiv="Content-Type" content="text/html" charset="UTF-8"> <title>Uses of Class org.apache.hadoop.hbase.thrift.ThriftServerRunner (HBase 0.98.8-hadoop2 API)</title> <meta name="date" content="2014-11-14"> <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.hadoop.hbase.thrift.ThriftServerRunner (HBase 0.98.8-hadoop2 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../org/apache/hadoop/hbase/thrift/ThriftServerRunner.html" title="class in org.apache.hadoop.hbase.thrift">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.html" target="_top">Frames</a></li> <li><a href="ThriftServerRunner.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.apache.hadoop.hbase.thrift.ThriftServerRunner" class="title">Uses of Class<br>org.apache.hadoop.hbase.thrift.ThriftServerRunner</h2> </div> <div class="classUseContainer">No usage of org.apache.hadoop.hbase.thrift.ThriftServerRunner</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../org/apache/hadoop/hbase/thrift/ThriftServerRunner.html" title="class in org.apache.hadoop.hbase.thrift">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.html" target="_top">Frames</a></li> <li><a href="ThriftServerRunner.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2014 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p> </body> </html>
devansh2015/hbase-0.98.8
docs/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.html
HTML
apache-2.0
4,584
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for Keras core layers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python import keras from tensorflow.python.eager import context from tensorflow.python.framework import ops from tensorflow.python.keras import keras_parameterized from tensorflow.python.keras import testing_utils from tensorflow.python.keras.mixed_precision.experimental import policy from tensorflow.python.ops import math_ops from tensorflow.python.platform import test @keras_parameterized.run_all_keras_modes class DropoutLayersTest(keras_parameterized.TestCase): def test_dropout(self): testing_utils.layer_test( keras.layers.Dropout, kwargs={'rate': 0.5}, input_shape=(3, 2)) testing_utils.layer_test( keras.layers.Dropout, kwargs={'rate': 0.5, 'noise_shape': [3, 1]}, input_shape=(3, 2)) def test_dropout_supports_masking(self): dropout = keras.layers.Dropout(0.5) self.assertEqual(True, dropout.supports_masking) def test_spatial_dropout_1d(self): testing_utils.layer_test( keras.layers.SpatialDropout1D, kwargs={'rate': 0.5}, input_shape=(2, 3, 4)) def test_spatial_dropout_2d(self): testing_utils.layer_test( keras.layers.SpatialDropout2D, kwargs={'rate': 0.5}, input_shape=(2, 3, 4, 5)) testing_utils.layer_test( keras.layers.SpatialDropout2D, kwargs={'rate': 0.5, 'data_format': 'channels_first'}, input_shape=(2, 3, 4, 5)) def test_spatial_dropout_3d(self): testing_utils.layer_test( keras.layers.SpatialDropout3D, kwargs={'rate': 0.5}, input_shape=(2, 3, 4, 4, 5)) testing_utils.layer_test( keras.layers.SpatialDropout3D, kwargs={'rate': 0.5, 'data_format': 'channels_first'}, input_shape=(2, 3, 4, 4, 5)) @keras_parameterized.run_all_keras_modes class LambdaLayerTest(keras_parameterized.TestCase): def test_lambda(self): testing_utils.layer_test( keras.layers.Lambda, kwargs={'function': lambda x: x + 1}, input_shape=(3, 2)) testing_utils.layer_test( keras.layers.Lambda, kwargs={ 'function': lambda x, a, b: x * a + b, 'arguments': { 'a': 0.6, 'b': 0.4 } }, input_shape=(3, 2)) # test serialization with function def f(x): return x + 1 ld = keras.layers.Lambda(f) config = ld.get_config() ld = keras.layers.deserialize({ 'class_name': 'Lambda', 'config': config }) # test with lambda ld = keras.layers.Lambda( lambda x: keras.backend.concatenate([math_ops.square(x), x])) config = ld.get_config() ld = keras.layers.Lambda.from_config(config) def test_lambda_multiple_inputs(self): ld = keras.layers.Lambda(lambda x: x[0], output_shape=lambda x: x[0]) x1 = np.ones([3, 2], np.float32) x2 = np.ones([3, 5], np.float32) out = ld([x1, x2]) self.assertAllEqual(out.shape, [3, 2]) def test_lambda_output_shape(self): l = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1)) l(keras.backend.variable(np.ones((1, 1)))) self.assertEqual((1, 1), l.get_config()['output_shape']) def test_lambda_output_shape_function(self): def get_output_shape(input_shape): return 1 * input_shape l = keras.layers.Lambda(lambda x: x + 1, output_shape=get_output_shape) l(keras.backend.variable(np.ones((1, 1)))) self.assertEqual('lambda', l.get_config()['output_shape_type']) def test_lambda_output_shape_autocalculate_multiple_inputs(self): def lambda_fn(x): return math_ops.matmul(x[0], x[1]) l = keras.layers.Lambda(lambda_fn) output_shape = l.compute_output_shape([(10, 10), (10, 20)]) self.assertAllEqual((10, 20), output_shape) def test_lambda_output_shape_list_multiple_outputs(self): def lambda_fn(x): return x l = keras.layers.Lambda(lambda_fn, output_shape=[(10,), (20,)]) output_shape = l.compute_output_shape([(10, 10), (10, 20)]) self.assertAllEqual([(10, 10), (10, 20)], output_shape) def test_lambda_output_shape_tuple_with_none(self): def lambda_fn(x): return x l = keras.layers.Lambda(lambda_fn, output_shape=(None, 10)) output_shape = l.compute_output_shape((5, 10, 20)) self.assertAllEqual([5, None, 10], output_shape.as_list()) def test_lambda_output_shape_function_multiple_outputs(self): def lambda_fn(x): return x def output_shape_fn(input_shape): return input_shape l = keras.layers.Lambda(lambda_fn, output_shape=output_shape_fn) output_shape = l.compute_output_shape([(10, 10), (10, 20)]) self.assertAllEqual([(10, 10), (10, 20)], output_shape) def test_lambda_config_serialization(self): # Test serialization with output_shape and output_shape_type layer = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1)) layer(keras.backend.variable(np.ones((1, 1)))) config = layer.get_config() layer = keras.layers.deserialize({ 'class_name': 'Lambda', 'config': config }) layer = keras.layers.Lambda.from_config(config) @keras_parameterized.run_all_keras_modes class CoreLayersTest(keras_parameterized.TestCase): def test_masking(self): testing_utils.layer_test( keras.layers.Masking, kwargs={}, input_shape=(3, 2, 3)) def test_keras_mask(self): x = np.ones((10, 10)) y = keras.layers.Masking(1.)(x) self.assertTrue(hasattr(y, '_keras_mask')) self.assertTrue(y._keras_mask is not None) self.assertAllClose(self.evaluate(y._keras_mask), np.zeros((10,))) def test_activation(self): # with string argument testing_utils.layer_test( keras.layers.Activation, kwargs={'activation': 'relu'}, input_shape=(3, 2)) # with function argument testing_utils.layer_test( keras.layers.Activation, kwargs={'activation': keras.backend.relu}, input_shape=(3, 2)) def test_reshape(self): testing_utils.layer_test( keras.layers.Reshape, kwargs={'target_shape': (8, 1)}, input_shape=(3, 2, 4)) testing_utils.layer_test( keras.layers.Reshape, kwargs={'target_shape': (-1, 1)}, input_shape=(3, 2, 4)) testing_utils.layer_test( keras.layers.Reshape, kwargs={'target_shape': (1, -1)}, input_shape=(3, 2, 4)) testing_utils.layer_test( keras.layers.Reshape, kwargs={'target_shape': (-1, 1)}, input_shape=(None, None, 2)) def test_permute(self): testing_utils.layer_test( keras.layers.Permute, kwargs={'dims': (2, 1)}, input_shape=(3, 2, 4)) def test_permute_errors_on_invalid_starting_dims_index(self): with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'): testing_utils.layer_test( keras.layers.Permute, kwargs={'dims': (0, 1, 2)}, input_shape=(3, 2, 4)) def test_permute_errors_on_invalid_set_of_dims_indices(self): with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'): testing_utils.layer_test( keras.layers.Permute, kwargs={'dims': (1, 4, 2)}, input_shape=(3, 2, 4)) def test_flatten(self): testing_utils.layer_test( keras.layers.Flatten, kwargs={}, input_shape=(3, 2, 4)) # Test channels_first inputs = np.random.random((10, 3, 5, 5)).astype('float32') outputs = testing_utils.layer_test( keras.layers.Flatten, kwargs={'data_format': 'channels_first'}, input_data=inputs) target_outputs = np.reshape( np.transpose(inputs, (0, 2, 3, 1)), (-1, 5 * 5 * 3)) self.assertAllClose(outputs, target_outputs) def test_flatten_scalar_channels(self): testing_utils.layer_test( keras.layers.Flatten, kwargs={}, input_shape=(3,)) # Test channels_first inputs = np.random.random((10,)).astype('float32') outputs = testing_utils.layer_test( keras.layers.Flatten, kwargs={'data_format': 'channels_first'}, input_data=inputs) target_outputs = np.expand_dims(inputs, -1) self.assertAllClose(outputs, target_outputs) def test_repeat_vector(self): testing_utils.layer_test( keras.layers.RepeatVector, kwargs={'n': 3}, input_shape=(3, 2)) def test_dense(self): testing_utils.layer_test( keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 2)) testing_utils.layer_test( keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.Dense, kwargs={'units': 3}, input_shape=(None, None, 2)) testing_utils.layer_test( keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 4, 5, 2)) def test_dense_dtype(self): inputs = ops.convert_to_tensor( np.random.randint(low=0, high=7, size=(2, 2))) layer = keras.layers.Dense(5, dtype='float32') outputs = layer(inputs) self.assertEqual(outputs.dtype, 'float32') def test_dense_with_policy(self): inputs = ops.convert_to_tensor( np.random.randint(low=0, high=7, size=(2, 2)), dtype='float16') layer = keras.layers.Dense(5, dtype=policy.Policy('infer_float32_vars')) outputs = layer(inputs) self.assertEqual(outputs.dtype, 'float16') self.assertEqual(layer.kernel.dtype, 'float32') def test_dense_regularization(self): layer = keras.layers.Dense( 3, kernel_regularizer=keras.regularizers.l1(0.01), bias_regularizer='l1', activity_regularizer='l2', name='dense_reg') layer(keras.backend.variable(np.ones((2, 4)))) self.assertEqual(3, len(layer.losses)) def test_dense_constraints(self): k_constraint = keras.constraints.max_norm(0.01) b_constraint = keras.constraints.max_norm(0.01) layer = keras.layers.Dense( 3, kernel_constraint=k_constraint, bias_constraint=b_constraint) layer(keras.backend.variable(np.ones((2, 4)))) self.assertEqual(layer.kernel.constraint, k_constraint) self.assertEqual(layer.bias.constraint, b_constraint) def test_activity_regularization(self): layer = keras.layers.ActivityRegularization(l1=0.1) layer(keras.backend.variable(np.ones((2, 4)))) self.assertEqual(1, len(layer.losses)) config = layer.get_config() self.assertEqual(config.pop('l1'), 0.1) def test_numpy_inputs(self): if context.executing_eagerly(): layer = keras.layers.RepeatVector(2) x = np.ones((10, 10)) self.assertAllEqual(np.ones((10, 2, 10)), layer(x)) layer = keras.layers.Concatenate() x, y = np.ones((10, 10)), np.ones((10, 10)) self.assertAllEqual(np.ones((10, 20)), layer([x, y])) if __name__ == '__main__': test.main()
ageron/tensorflow
tensorflow/python/keras/layers/core_test.py
Python
apache-2.0
11,584
/* * Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dtolabs.rundeck.core.execution.workflow; /* * StepFirstWorkflowStrategyTests.java * * User: Greg Schueler <a href="mailto:[email protected]">[email protected]</a> * Created: 3/25/11 9:30 AM * */ import java.io.File; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.dtolabs.rundeck.core.common.*; import com.dtolabs.rundeck.core.execution.*; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.tools.ant.BuildListener; import org.junit.Assert; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import com.dtolabs.rundeck.core.execution.dispatch.Dispatchable; import com.dtolabs.rundeck.core.execution.dispatch.DispatcherResult; import com.dtolabs.rundeck.core.execution.service.NodeExecutorResult; import com.dtolabs.rundeck.core.execution.workflow.steps.FailureReason; import com.dtolabs.rundeck.core.execution.workflow.steps.NodeDispatchStepExecutor; import com.dtolabs.rundeck.core.execution.workflow.steps.StepExecutionResult; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepException; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutionItem; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutionService; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutor; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepResult; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepResultImpl; import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ExecCommandBase; import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ExecCommandExecutionItem; import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ScriptFileCommandBase; import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ScriptFileCommandExecutionItem; import com.dtolabs.rundeck.core.tools.AbstractBaseTest; import com.dtolabs.rundeck.core.utils.FileUtils; import com.dtolabs.rundeck.core.utils.NodeSet; public class TestStepFirstWorkflowStrategy extends AbstractBaseTest { Framework testFramework; String testnode; private static final String TEST_PROJECT = "StepFirstWorkflowStrategyTests"; public TestStepFirstWorkflowStrategy(String name) { super(name); } public static Test suite() { return new TestSuite(TestStepFirstWorkflowStrategy.class); } protected void setUp() { super.setUp(); testFramework = getFrameworkInstance(); testnode=testFramework.getFrameworkNodeName(); final IRundeckProject frameworkProject = testFramework.getFrameworkProjectMgr().createFrameworkProject( TEST_PROJECT, generateProjectResourcesFile( new File("src/test/resources/com/dtolabs/rundeck/core/common/test-nodes1.xml") ) ); } protected void tearDown() throws Exception { super.tearDown(); File projectdir = new File(getFrameworkProjectsBase(), TEST_PROJECT); FileUtils.deleteDir(projectdir); } public static void main(String args[]) { junit.textui.TestRunner.run(suite()); } static class testWorkflowCmdItem extends BaseExecutionItem implements NodeStepExecutionItem { private String type; int flag=-1; @Override public String toString() { return "testWorkflowCmdItem{" + "type='" + type + '\'' + ", flag=" + flag + '}'; } @Override public String getNodeStepType() { return type; } public String getType() { return "NodeDispatch"; } } /*static class testWorkflowJobCmdItem extends testWorkflowCmdItem implements IWorkflowJobItem { private String jobIdentifier; public String getJobIdentifier() { return jobIdentifier; } }*/ static class testListener implements ExecutionListenerOverride { public boolean isTerse() { return false; } public String getLogFormat() { return null; } public void log(int i, String s) { } @Override public void event(String eventType, String message, Map eventMeta) { } public FailedNodesListener getFailedNodesListener() { return null; } public void beginStepExecution(ExecutionContext context, StepExecutionItem item) { } public void finishStepExecution(StatusResult result, ExecutionContext context, StepExecutionItem item) { } public void beginNodeExecution(ExecutionContext context, String[] command, INodeEntry node) { } public void finishNodeExecution(NodeExecutorResult result, ExecutionContext context, String[] command, INodeEntry node) { } public void beginNodeDispatch(ExecutionContext context, StepExecutionItem item) { } public void beginNodeDispatch(ExecutionContext context, Dispatchable item) { } public void finishNodeDispatch(DispatcherResult result, ExecutionContext context, StepExecutionItem item) { } public void finishNodeDispatch(DispatcherResult result, ExecutionContext context, Dispatchable item) { } public void beginFileCopyFileStream(ExecutionContext context, InputStream input, INodeEntry node) { } public void beginFileCopyFile(ExecutionContext context, File input, INodeEntry node) { } public void beginFileCopyScriptContent(ExecutionContext context, String input, INodeEntry node) { } public void finishFileCopy(String result, ExecutionContext context, INodeEntry node) { } public void beginExecuteNodeStep(ExecutionContext context, NodeStepExecutionItem item, INodeEntry node) { } public void finishExecuteNodeStep(NodeStepResult result, ExecutionContext context, StepExecutionItem item, INodeEntry node) { } public BuildListener getBuildListener() { return null; } public ExecutionListenerOverride createOverride() { return this; } public void setTerse(boolean terse) { } public void setLogFormat(String format) { } public void setFailedNodesListener(FailedNodesListener listener) { } } static class testInterpreter implements NodeStepExecutor { List<StepExecutionItem> executionItemList = new ArrayList<StepExecutionItem>(); List<ExecutionContext> executionContextList = new ArrayList<ExecutionContext>(); List<INodeEntry> nodeEntryList = new ArrayList<INodeEntry>(); int index = 0; List<NodeStepResult> resultList = new ArrayList<NodeStepResult>(); boolean shouldThrowException = false; public NodeStepResult executeNodeStep(StepExecutionContext executionContext, NodeStepExecutionItem executionItem, INodeEntry iNodeEntry) throws NodeStepException { executionItemList.add(executionItem); executionContextList.add(executionContext); nodeEntryList.add(iNodeEntry); if (shouldThrowException) { throw new NodeStepException("testInterpreter test exception",null,iNodeEntry.getNodename()); } // System.out.println("return index: (" + index + ") in size: " + resultList.size()); return resultList.get(index++); } } static enum Reason implements FailureReason{ Test } static class testResult extends NodeStepResultImpl { boolean success; int flag; INodeEntry node; testResult(boolean success, int flag) { super(null,success?null: TestStepFirstWorkflowStrategy.Reason.Test,success?null:"test failure",null); this.success = success; this.flag = flag; } @Override public Exception getException() { return null; } public boolean isSuccess() { return success; } @Override public String toString() { return "testResult{" + "success=" + success + ", flag=" + flag + '}'; } public INodeEntry getNode() { return node; } } public void testExecuteWorkflow() throws Exception { final IRundeckProject frameworkProject = testFramework.getFrameworkProjectMgr().getFrameworkProject( TEST_PROJECT); final INodeSet nodes = frameworkProject.getNodeSet(); assertNotNull(nodes); assertEquals(2, nodes.getNodes().size()); } public void testExecuteWorkflow_empty() throws Exception { //test empty workflow final NodeSet nodeset = new NodeSet(); final WorkflowImpl workflow = new WorkflowImpl(new ArrayList<StepExecutionItem>(), 1, false, WorkflowExecutor.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, interpreterMock); // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(0, interpreterMock.executionItemList.size()); } public void testExecuteWorkflow_undefined_item() throws Exception { //test undefined workflow item final NodeSet nodeset = new NodeSet(); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); commands.add(new testWorkflowCmdItem()); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset.nodeSelectorWithDefaultAll()) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes(nodeset.nodeSelectorWithDefaultAll(), testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet())) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, interpreterMock); // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.out); } assertFalse(result.isSuccess()); assertEquals(0, interpreterMock.executionItemList.size()); assertNotNull("threw exception: " + result.getException(), result.getException()); assertTrue("threw exception: " + result.getException(), result.getException() instanceof NullPointerException); assertEquals("threw exception: " + result.getException(), "provider name was null for Service: WorkflowNodeStep", result.getException().getMessage()); } public void testExecuteWorkflow_scriptExec() throws Exception { //test script exec item final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final StepExecutionItem testWorkflowCmdItem = new ScriptFileCommandBase(){ @Override public String getScript() { return "a command"; } }; commands.add(testWorkflowCmdItem); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; interpreterService.registerInstance("exec", failMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, failMock); //set resturn result interpreterMock.resultList.add(new NodeStepResultImpl(null)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(1, interpreterMock.executionItemList.size()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) executionItem1; assertEquals("a command", scriptItem.getScript()); assertNull(scriptItem.getScriptAsStream()); assertNull(scriptItem.getServerScriptFilePath()); assertEquals(1, interpreterMock.executionContextList.size()); final ExecutionContext executionContext = interpreterMock.executionContextList.get(0); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals("expected " + nodeset + ", but was " + executionContext.getNodeSelector(), nodeset, executionContext.getNodeSelector()); } public void testExecuteWorkflow_commandexec() throws Exception { //test command exec item final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[]{"a", "command"}; } }; commands.add(testWorkflowCmdItem); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, failMock); //set resturn result interpreterMock.resultList.add(new NodeStepResultImpl(null)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(1, interpreterMock.executionItemList.size()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(2, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("command", execItem.getCommand()[1]); assertEquals(1, interpreterMock.executionContextList.size()); final ExecutionContext executionContext = interpreterMock.executionContextList.get(0); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); } public void testExecuteWorkflowThreeItems() throws Exception{ { //test workflow of three successful items final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[]{"a", "2","command"}; } }; commands.add(testWorkflowCmdItem); final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() { @Override public String getScript() { return "a command"; } @Override public String[] getArgs() { return new String[]{"-testargs", "1"}; } }; commands.add(testWorkflowCmdItemScript); final StepExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() { @Override public String getServerScriptFilePath() { return "/some/file/path"; } @Override public String[] getArgs() { return new String[]{"-testargs", "2"}; } }; commands.add(testWorkflowCmdItemScript2); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); //set resturn results interpreterMock.resultList.add(new testResult(true, 0)); interpreterMock.resultList.add(new testResult(true, 1)); interpreterMock.resultList.add(new testResult(true, 2)); final WorkflowExecutionResult result = strategy.executeWorkflow(context,executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertNotNull(result.getResultSet()); final List<StepExecutionResult> test1 = result.getResultSet(); assertEquals(3, test1.size()); for (final int i : new int[]{0, 1, 2}) { final StepExecutionResult interpreterResult = test1.get(i); final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult); assertEquals(1, dr.getResults().size()); final NodeStepResult nrs = dr.getResults().values().iterator().next(); assertTrue("unexpected class: " + nrs.getClass(), nrs instanceof testResult); testResult val = (testResult) nrs; assertTrue(val.isSuccess()); assertEquals(i, val.flag); } assertEquals(3, interpreterMock.executionItemList.size()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(3, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("2", execItem.getCommand()[1]); assertEquals("command", execItem.getCommand()[2]); final StepExecutionItem item2 = interpreterMock.executionItemList.get(1); assertTrue("wrong class: " + item2.getClass().getName(), item2 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2; assertEquals("a command", scriptItem.getScript()); assertNull(scriptItem.getScriptAsStream()); assertNull(scriptItem.getServerScriptFilePath()); final StepExecutionItem item3 = interpreterMock.executionItemList.get(2); assertTrue("wrong class: " + item3.getClass().getName(), item2 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem2 = (ScriptFileCommandExecutionItem) item3; assertNull(scriptItem2.getScript()); assertNull(scriptItem2.getScriptAsStream()); assertEquals("/some/file/path", scriptItem2.getServerScriptFilePath()); assertNotNull(scriptItem2.getArgs()); assertEquals(2, scriptItem2.getArgs().length); assertEquals("-testargs", scriptItem2.getArgs()[0]); assertEquals("2", scriptItem2.getArgs()[1]); assertEquals(3, interpreterMock.executionContextList.size()); for (final int i : new int[]{0, 1, 2}) { final ExecutionContext executionContext = interpreterMock.executionContextList.get(i); assertEquals("item "+i,TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull("item " + i, executionContext.getDataContext()); assertNotNull("item " + i, executionContext.getDataContext().get("node")); assertEquals("item " + i,0, executionContext.getLoglevel()); assertEquals("item " + i,"user1", executionContext.getUser()); assertEquals("item " + i,nodeset, executionContext.getNodeSelector()); } } } public void testWorkflowFailNoKeepgoing() throws Exception{ { //test a workflow with a failing item (1), with keepgoing=false final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[]{"a", "2", "command"}; } }; commands.add(testWorkflowCmdItem); final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() { @Override public String getScript() { return "a command"; } @Override public String[] getArgs() { return new String[]{"-testargs", "1"}; } }; commands.add(testWorkflowCmdItemScript); final StepExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() { @Override public String getServerScriptFilePath() { return "/some/file/path"; } @Override public String[] getArgs() { return new String[]{"-testargs", "2"}; } }; commands.add(testWorkflowCmdItemScript2); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); workflow.setKeepgoing(false); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); //set resturn results, fail on second item interpreterMock.resultList.add(new testResult(true, 0)); interpreterMock.resultList.add(new testResult(false, 1)); interpreterMock.resultList.add(new testResult(true, 2)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (null != result.getException()) { result.getException().printStackTrace(System.out); } assertFalse(result.isSuccess()); assertNull("threw exception: " + result.getException(), result.getException()); StepExecutionResult result1 = result.getResultSet().get(1); final DispatcherResult executionResult = NodeDispatchStepExecutor.extractDispatcherResult(result1); assertNotNull(executionResult.getResults()); assertEquals(1, executionResult.getResults().size()); assertNotNull(executionResult.getResults().get(testnode)); final StatusResult testnode1 = executionResult.getResults().get(testnode); assertNotNull(testnode1); assertTrue(testnode1 instanceof testResult); testResult failResult = (testResult) testnode1; assertEquals(1, failResult.flag); assertNotNull(result.getResultSet()); final List<StepExecutionResult> test1 = result.getResultSet(); assertEquals(2, test1.size()); for (final int i : new int[]{0, 1}) { final StepExecutionResult interpreterResult = test1.get(i); final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult); assertEquals(1, dr.getResults().size()); final NodeStepResult nrs = dr.getResults().values().iterator().next(); assertTrue("unexpected class: " + nrs.getClass(), nrs instanceof testResult); testResult val = (testResult) nrs; assertEquals(i, val.flag); if(0==i){ assertTrue(val.isSuccess()); }else{ assertFalse(val.isSuccess()); } } assertEquals(2, interpreterMock.executionItemList.size()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(3, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("2", execItem.getCommand()[1]); assertEquals("command", execItem.getCommand()[2]); final StepExecutionItem item2 = interpreterMock.executionItemList.get(1); assertTrue("wrong class: " + item2.getClass().getName(), item2 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2; assertEquals("a command", scriptItem.getScript()); assertNull(scriptItem.getScriptAsStream()); assertNull(scriptItem.getServerScriptFilePath()); assertNotNull(scriptItem.getArgs()); assertEquals(2, scriptItem.getArgs().length); assertEquals("-testargs", scriptItem.getArgs()[0]); assertEquals("1",scriptItem.getArgs()[1]); assertEquals(2, interpreterMock.executionContextList.size()); for (final int i : new int[]{0, 1}) { final ExecutionContext executionContext = interpreterMock.executionContextList.get(i); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); } } } public void testWorkflowFailYesKeepgoing() throws Exception{ { //test a workflow with a failing item (1), with keepgoing=true final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[]{"a", "2", "command"}; } }; commands.add(testWorkflowCmdItem); final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() { @Override public String getScript() { return "a command"; } @Override public String[] getArgs() { return new String[]{"-testargs", "1"}; } }; commands.add(testWorkflowCmdItemScript); final StepExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() { @Override public String getServerScriptFilePath() { return "/some/file/path"; } @Override public String[] getArgs() { return new String[]{"-testargs", "2"}; } }; commands.add(testWorkflowCmdItemScript2); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); workflow.setKeepgoing(true); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); //set resturn results, fail on second item interpreterMock.resultList.add(new testResult(true, 0)); interpreterMock.resultList.add(new testResult(false, 1)); interpreterMock.resultList.add(new testResult(true, 2)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertFalse(result.isSuccess()); assertNull("threw exception: " + result.getException(), result.getException()); assertNotNull(result.getResultSet()); final List<StepExecutionResult> test1 = result.getResultSet(); assertEquals(3, test1.size()); for (final int i : new int[]{0, 1, 2}) { final StepExecutionResult interpreterResult = test1.get(i); assertTrue(NodeDispatchStepExecutor.isWrappedDispatcherResult(interpreterResult)); final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult); assertEquals(1, dr.getResults().size()); final NodeStepResult nrs = dr.getResults().values().iterator().next(); assertTrue("unexpected class: " + nrs.getClass(), nrs instanceof testResult); testResult val = (testResult) nrs; assertEquals(i, val.flag); if (1 == i) { assertFalse(val.isSuccess()); } else { assertTrue(val.isSuccess()); } } assertEquals(3, interpreterMock.executionItemList.size()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(3, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("2", execItem.getCommand()[1]); assertEquals("command", execItem.getCommand()[2]); final StepExecutionItem item2 = interpreterMock.executionItemList.get(1); assertTrue("wrong class: " + item2.getClass().getName(), item2 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2; assertEquals("a command", scriptItem.getScript()); assertNull(scriptItem.getScriptAsStream()); assertNull(scriptItem.getServerScriptFilePath()); assertNotNull(scriptItem.getArgs()); assertEquals(2, scriptItem.getArgs().length); assertEquals("-testargs", scriptItem.getArgs()[0]); assertEquals("1",scriptItem.getArgs()[1]); final StepExecutionItem item3 = interpreterMock.executionItemList.get(2); assertTrue("wrong class: " + item2.getClass().getName(), item2 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem3 = (ScriptFileCommandExecutionItem) item3; assertEquals("/some/file/path", scriptItem3.getServerScriptFilePath()); assertNull(scriptItem3.getScript()); assertNull(scriptItem3.getScriptAsStream()); assertNotNull(scriptItem3.getArgs()); assertEquals(2, scriptItem3.getArgs().length); assertEquals("-testargs", scriptItem3.getArgs()[0]); assertEquals("2", scriptItem3.getArgs()[1]); assertEquals(3, interpreterMock.executionContextList.size()); for (final int i : new int[]{0, 1}) { final ExecutionContext executionContext = interpreterMock.executionContextList.get(i); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); } } } public void testFailureHandlerItemNoKeepgoing() throws Exception{ { //test a workflow with a failing item (1), with keepgoing=false, and a failureHandler final boolean KEEPGOING_TEST = false; final boolean STEP_0_RESULT = false; final boolean STEP_1_RESULT = true; final boolean HANDLER_RESULT = true; final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final StepExecutionItem testHandlerItem = new ScriptFileCommandBase() { @Override public String getScript() { return "failure handler script"; } @Override public String[] getArgs() { return new String[]{"failure","script","args"}; } }; final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[]{"a", "2", "command"}; } @Override public StepExecutionItem getFailureHandler() { return testHandlerItem; } }; commands.add(testWorkflowCmdItem); final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() { @Override public String getScript() { return "a command"; } @Override public String[] getArgs() { return new String[]{"-testargs", "1"}; } }; commands.add(testWorkflowCmdItemScript); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); workflow.setKeepgoing(KEEPGOING_TEST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter handlerInterpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", handlerInterpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); //set resturn results, fail on second item interpreterMock.resultList.add(new testResult(STEP_0_RESULT, 0)); interpreterMock.resultList.add(new testResult(STEP_1_RESULT, 1)); handlerInterpreterMock.resultList.add(new testResult(HANDLER_RESULT, 0)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertFalse(result.isSuccess()); assertNull("threw exception: " + result.getException(), result.getException()); StepExecutionResult result1 = result.getResultSet().get(0); final DispatcherResult executionResult = NodeDispatchStepExecutor.extractDispatcherResult(result1); assertNotNull(executionResult.getResults()); assertEquals(1, executionResult.getResults().size()); assertNotNull(executionResult.getResults().get(testnode)); final StatusResult testnode1 = executionResult.getResults().get(testnode); assertNotNull(testnode1); assertTrue(testnode1 instanceof testResult); testResult failResult = (testResult) testnode1; assertEquals(0, failResult.flag); assertEquals(1, result.getResultSet().size()); assertNotNull(result.getResultSet()); final List<StepExecutionResult> test1 = result.getResultSet(); assertEquals(1, test1.size()); final int i =0; final StepExecutionResult interpreterResult = test1.get(i); final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult); assertEquals(1, dr.getResults().size()); final NodeStepResult nrs = dr.getResults().values().iterator().next(); assertTrue("unexpected class: " + nrs.getClass(), nrs instanceof testResult); testResult val = (testResult) nrs; assertEquals(i, val.flag); assertFalse(val.isSuccess()); assertEquals(1, interpreterMock.executionItemList.size()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(3, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("2", execItem.getCommand()[1]); assertEquals("command", execItem.getCommand()[2]); assertEquals(1, interpreterMock.executionContextList.size()); final ExecutionContext executionContext = interpreterMock.executionContextList.get(i); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); //check handler item was executed assertEquals(1, handlerInterpreterMock.executionItemList.size()); final StepExecutionItem executionItemX = handlerInterpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItemX.getClass().getName(), executionItemX instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem execItemX = (ScriptFileCommandExecutionItem) executionItemX; assertNotNull(execItemX.getScript()); assertNotNull(execItemX.getArgs()); assertEquals("failure handler script", execItemX.getScript()); assertEquals(3, execItemX.getArgs().length); assertEquals("failure", execItemX.getArgs()[0]); assertEquals("script", execItemX.getArgs()[1]); assertEquals("args", execItemX.getArgs()[2]); assertEquals(1, handlerInterpreterMock.executionContextList.size()); final ExecutionContext executionContextX = handlerInterpreterMock.executionContextList.get(i); assertEquals(TEST_PROJECT, executionContextX.getFrameworkProject()); assertNotNull(executionContextX.getDataContext()); assertNotNull(executionContextX.getDataContext().get("node")); assertEquals(0, executionContextX.getLoglevel()); assertEquals("user1", executionContextX.getUser()); assertEquals(nodeset, executionContextX.getNodeSelector()); } } public void testFailureHandlerItemYesKeepgoing() throws Exception{ { //test a workflow with a failing item (1), with keepgoing=true, and a failureHandler that fails final boolean KEEPGOING_TEST = true; final boolean STEP_0_RESULT = false; final boolean STEP_1_RESULT = true; final boolean HANDLER_RESULT = false; final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final StepExecutionItem testHandlerItem = new ScriptFileCommandBase() { @Override public String getScript() { return "failure handler script"; } @Override public String[] getArgs() { return new String[]{"failure","script","args"}; } @Override public String toString() { return "testHandlerItem"; } }; final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[]{"a", "2", "command"}; } @Override public StepExecutionItem getFailureHandler() { return testHandlerItem; } @Override public String toString() { return "testWorkflowCmdItem"; } }; commands.add(testWorkflowCmdItem); final StepExecutionItem testWorkflowCmdItem2 = new ExecCommandBase() { @Override public String[] getCommand() { return new String[]{"a", "3", "command"}; } @Override public StepExecutionItem getFailureHandler() { return testHandlerItem; } @Override public String toString() { return "testWorkflowCmdItem2"; } }; commands.add(testWorkflowCmdItem2); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); workflow.setKeepgoing(KEEPGOING_TEST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter handlerInterpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", handlerInterpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); //set resturn results interpreterMock.resultList.add(new testResult(STEP_0_RESULT, 0)); interpreterMock.resultList.add(new testResult(STEP_1_RESULT, 1)); handlerInterpreterMock.resultList.add(new testResult(HANDLER_RESULT, 0)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertFalse(result.isSuccess()); assertNull("threw exception: " + result.getException(), result.getException()); assertNotNull(result.getResultSet()); final List<StepExecutionResult> test1 = result.getResultSet(); System.out.println("results: "+test1); assertEquals(2, interpreterMock.executionItemList.size()); assertEquals(2, interpreterMock.executionContextList.size()); //check handler item was executed assertEquals(1, handlerInterpreterMock.executionItemList.size()); assertEquals(1, handlerInterpreterMock.executionContextList.size()); assertEquals(2, test1.size()); int resultIndex =0; int stepNum=0; { //first step result final StepExecutionResult interpreterResult = test1.get(resultIndex); final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult); assertEquals(1, dr.getResults().size()); final NodeStepResult nrs = dr.getResults().values().iterator().next(); assertTrue("unexpected class: " + nrs.getClass(), nrs instanceof testResult); testResult val = (testResult) nrs; assertEquals(0, val.flag); assertFalse(val.isSuccess()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(stepNum); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(3, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("2", execItem.getCommand()[1]); assertEquals("command", execItem.getCommand()[2]); final ExecutionContext executionContext = interpreterMock.executionContextList.get(stepNum); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); } resultIndex=1; // // { // //failure handler result // final StepExecutionResult interpreterResult = test1.get(resultIndex); // final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult); // assertEquals(1, dr.getResults().size()); // final NodeStepResult nrs = dr.getResults().values().iterator().next(); // assertTrue("unexpected class: " + nrs.getClass(), // nrs instanceof testResult); // testResult val = (testResult) nrs; // assertEquals(0, val.flag); // assertFalse(val.isSuccess()); // // final StepExecutionItem executionItemX = handlerInterpreterMock.executionItemList.get(stepNum); // assertTrue("wrong class: " + executionItemX.getClass().getName(), // executionItemX instanceof ScriptFileCommandExecutionItem); // ScriptFileCommandExecutionItem execItemX = (ScriptFileCommandExecutionItem) executionItemX; // assertNotNull(execItemX.getScript()); // assertNotNull(execItemX.getArgs()); // assertEquals("failure handler script", execItemX.getScript()); // assertEquals(3, execItemX.getArgs().length); // assertEquals("failure", execItemX.getArgs()[0]); // assertEquals("script", execItemX.getArgs()[1]); // assertEquals("args", execItemX.getArgs()[2]); // // // final ExecutionContext executionContextX = handlerInterpreterMock.executionContextList.get(stepNum); // assertEquals(TEST_PROJECT, executionContextX.getFrameworkProject()); // assertNull(executionContextX.getDataContext()); // assertEquals(0, executionContextX.getLoglevel()); // assertEquals("user1", executionContextX.getUser()); // assertEquals(nodeset, executionContextX.getNodeSelector()); // assertNull(executionContextX.getArgs()); // } // resultIndex=2; stepNum = 1; { //second step result final StepExecutionResult interpreterResult = test1.get(resultIndex); final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult); assertEquals(1, dr.getResults().size()); final NodeStepResult nrs = dr.getResults().values().iterator().next(); assertTrue("unexpected class: " + nrs.getClass(), nrs instanceof testResult); testResult val = (testResult) nrs; assertEquals(1, val.flag); assertTrue(val.isSuccess()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(stepNum); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(3, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("3", execItem.getCommand()[1]); assertEquals("command", execItem.getCommand()[2]); final ExecutionContext executionContext = interpreterMock.executionContextList.get(stepNum); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); } } } public void testFailureHandlerItemYesKeepgoingHandlerSuccess() throws Exception { { //test a workflow with a failing item (1), with keepgoing=true, and a failureHandler that succeeds final boolean KEEPGOING_TEST = true; final boolean STEP_0_RESULT = false; final boolean STEP_1_RESULT = true; final boolean HANDLER_RESULT = true; final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final StepExecutionItem testHandlerItem = new ScriptFileCommandBase() { @Override public String getScript() { return "failure handler script"; } @Override public String[] getArgs() { return new String[]{"failure","script","args"}; } @Override public String toString() { return "testHandlerItem"; } }; final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[]{"a", "2", "command"}; } @Override public StepExecutionItem getFailureHandler() { return testHandlerItem; } @Override public String toString() { return "testWorkflowCmdItem"; } }; commands.add(testWorkflowCmdItem); final StepExecutionItem testWorkflowCmdItem2 = new ExecCommandBase() { @Override public String[] getCommand() { return new String[]{"a", "3", "command"}; } @Override public StepExecutionItem getFailureHandler() { return testHandlerItem; } @Override public String toString() { return "testWorkflowCmdItem2"; } }; commands.add(testWorkflowCmdItem2); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); workflow.setKeepgoing(KEEPGOING_TEST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter handlerInterpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", handlerInterpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); //set resturn results interpreterMock.resultList.add(new testResult(STEP_0_RESULT, 0)); interpreterMock.resultList.add(new testResult(STEP_1_RESULT, 1)); handlerInterpreterMock.resultList.add(new testResult(HANDLER_RESULT, 0)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertTrue(result.isSuccess()); assertNull("threw exception: " + result.getException(), result.getException()); assertNotNull(result.getResultSet()); final List<StepExecutionResult> test1 = result.getResultSet(); System.err.println("results: "+test1); assertEquals(2, test1.size()); assertEquals(2, interpreterMock.executionItemList.size()); assertEquals(2, interpreterMock.executionContextList.size()); //check handler item was executed assertEquals(1, handlerInterpreterMock.executionItemList.size()); assertEquals(1, handlerInterpreterMock.executionContextList.size()); int resultIndex =0; int stepNum=0; { //failure handler result final StepExecutionResult interpreterResult = test1.get(resultIndex); final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult); assertEquals(1, dr.getResults().size()); final NodeStepResult nrs = dr.getResults().values().iterator().next(); assertTrue("unexpected class: " + nrs.getClass(), nrs instanceof testResult); testResult val = (testResult) nrs; assertEquals(0, val.flag); assertTrue(val.isSuccess()); final StepExecutionItem executionItemX = handlerInterpreterMock.executionItemList.get(stepNum); assertTrue("wrong class: " + executionItemX.getClass().getName(), executionItemX instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem execItemX = (ScriptFileCommandExecutionItem) executionItemX; assertNotNull(execItemX.getScript()); assertNotNull(execItemX.getArgs()); assertEquals("failure handler script", execItemX.getScript()); assertEquals(3, execItemX.getArgs().length); assertEquals("failure", execItemX.getArgs()[0]); assertEquals("script", execItemX.getArgs()[1]); assertEquals("args", execItemX.getArgs()[2]); final ExecutionContext executionContextX = handlerInterpreterMock.executionContextList.get(stepNum); assertEquals(TEST_PROJECT, executionContextX.getFrameworkProject()); assertNotNull(executionContextX.getDataContext()); assertNotNull(executionContextX.getDataContext().get("node")); assertEquals(0, executionContextX.getLoglevel()); assertEquals("user1", executionContextX.getUser()); assertEquals(nodeset, executionContextX.getNodeSelector()); } resultIndex=1; stepNum = 1; { //second step result final StepExecutionResult interpreterResult = test1.get(resultIndex); final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult); assertEquals(1, dr.getResults().size()); final NodeStepResult nrs = dr.getResults().values().iterator().next(); assertTrue("unexpected class: " + nrs.getClass(), nrs instanceof testResult); testResult val = (testResult) nrs; assertEquals(1, val.flag); assertTrue(val.isSuccess()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(stepNum); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(3, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("3", execItem.getCommand()[1]); assertEquals("command", execItem.getCommand()[2]); final ExecutionContext executionContext = interpreterMock.executionContextList.get(stepNum); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); } } } public void testGenericItem() throws Exception{ { //test jobref item final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final testWorkflowCmdItem item = new testWorkflowCmdItem(); item.type = "my-type"; commands.add(item); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; interpreterService.registerInstance("my-type", interpreterMock); interpreterService.registerInstance("exec", failMock); interpreterService.registerInstance("script", failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); //set resturn result interpreterMock.resultList.add(new NodeStepResultImpl(null)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(1, interpreterMock.executionItemList.size()); final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof testWorkflowCmdItem); testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1; assertNotNull(execItem.getNodeStepType()); assertEquals("my-type", execItem.getNodeStepType()); assertEquals(1, interpreterMock.executionContextList.size()); final ExecutionContext executionContext = interpreterMock.executionContextList.get(0); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); } } public void testMultipleNodes() throws Exception{ { //test jobref item final NodeSet nodeset = new NodeSet(); nodeset.createInclude().setName(".*"); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final testWorkflowCmdItem item = new testWorkflowCmdItem(); item.type = "my-type"; commands.add(item); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; interpreterService.registerInstance("my-type", interpreterMock); interpreterService.registerInstance("exec", failMock); interpreterService.registerInstance("script", failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); //set resturn result node 1 interpreterMock.resultList.add(new NodeStepResultImpl(null)); //set resturn result node 2 interpreterMock.resultList.add(new NodeStepResultImpl(null)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(2, interpreterMock.executionItemList.size()); assertEquals(2, interpreterMock.executionContextList.size()); { final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof testWorkflowCmdItem); testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1; assertNotNull(execItem.getNodeStepType()); assertEquals("my-type", execItem.getNodeStepType()); final ExecutionContext executionContext = interpreterMock.executionContextList.get(0); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(SelectorUtils.singleNode("test1"), executionContext.getNodeSelector()); } { final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(1); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof testWorkflowCmdItem); testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1; assertNotNull(execItem.getNodeStepType()); assertEquals("my-type", execItem.getNodeStepType()); final ExecutionContext executionContext = interpreterMock.executionContextList.get(1); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(SelectorUtils.singleNode("testnode2"), executionContext.getNodeSelector()); } } } public void testMultipleItemsAndNodes() throws Exception{ { //test jobref item final NodeSet nodeset = new NodeSet(); nodeset.createInclude().setName(".*"); final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>(); final testWorkflowCmdItem item = new testWorkflowCmdItem(); item.flag=0; item.type = "my-type"; commands.add(item); final testWorkflowCmdItem item2 = new testWorkflowCmdItem(); item2.flag = 1; item2.type = "my-type"; commands.add(item2); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); final StepExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .nodes(NodeFilter.filterNodes( nodeset, testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet() )) .build(); //setup testInterpreter for all command types final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework( testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; interpreterService.registerInstance("my-type", interpreterMock); interpreterService.registerInstance("exec", failMock); interpreterService.registerInstance("script", failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); //set resturn result node 1 step 1 interpreterMock.resultList.add(new NodeStepResultImpl(null)); //set resturn result node 2 step 1 interpreterMock.resultList.add(new NodeStepResultImpl(null)); //set resturn result node 1 step 2 interpreterMock.resultList.add(new NodeStepResultImpl(null)); //set resturn result node 2 step 2 interpreterMock.resultList.add(new NodeStepResultImpl(null)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(4, interpreterMock.executionItemList.size()); assertEquals(4, interpreterMock.executionContextList.size()); {//node 1 step 1 final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof testWorkflowCmdItem); testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1; assertNotNull(execItem.getNodeStepType()); assertEquals("my-type", execItem.getNodeStepType()); assertEquals(0, execItem.flag); final ExecutionContext executionContext = interpreterMock.executionContextList.get(0); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(SelectorUtils.singleNode("test1"), executionContext.getNodeSelector()); } {//node 2 step 1 final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(1); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof testWorkflowCmdItem); testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1; assertNotNull(execItem.getNodeStepType()); assertEquals("my-type", execItem.getNodeStepType()); assertEquals(0, execItem.flag); final ExecutionContext executionContext = interpreterMock.executionContextList.get(1); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(SelectorUtils.singleNode("testnode2"), executionContext.getNodeSelector()); } {//node 1 step 2 final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(2); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof testWorkflowCmdItem); testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1; assertNotNull(execItem.getNodeStepType()); assertEquals("my-type", execItem.getNodeStepType()); assertEquals(1, execItem.flag); final ExecutionContext executionContext = interpreterMock.executionContextList.get(2); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(SelectorUtils.singleNode("test1"), executionContext.getNodeSelector()); } {//node 2 step 2 final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(3); assertTrue("wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof testWorkflowCmdItem); testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1; assertNotNull(execItem.getNodeStepType()); assertEquals("my-type", execItem.getNodeStepType()); assertEquals(1, execItem.flag); final ExecutionContext executionContext = interpreterMock.executionContextList.get(3); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNotNull(executionContext.getDataContext()); assertNotNull(executionContext.getDataContext().get("node")); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(SelectorUtils.singleNode("testnode2"), executionContext.getNodeSelector()); } } } public void testCreatePrintableDataContext() { Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>(); String otherKey = "other"; Map<String, String> otherData = new HashMap<String, String>(); dataContext.put(otherKey, otherData); Map<String, String> secureData = new HashMap<String, String>(); String secureKey = "secureKey"; secureData.put(secureKey, "secureValue"); dataContext.put(StepFirstWorkflowExecutor.SECURE_OPTION_KEY, secureData); Map<String, String> regularData = new HashMap<String, String>(); String insecureKey = "insecureKey"; regularData.put(insecureKey, "insecureValue"); regularData.put(secureKey, "secureValue"); dataContext.put(StepFirstWorkflowExecutor.OPTION_KEY, regularData); StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext); Assert.assertSame("Expected other data to be present", otherData, result.get(otherKey)); Map<String, String> resultSecureData = result.get(StepFirstWorkflowExecutor.SECURE_OPTION_KEY); Assert.assertEquals("Expected secure value to be replaced", StepFirstWorkflowExecutor.SECURE_OPTION_VALUE, resultSecureData.get(secureKey)); Map<String, String> resultRegularData = result.get(StepFirstWorkflowExecutor.OPTION_KEY); Assert.assertEquals("Expected secure value to be replaced", StepFirstWorkflowExecutor.SECURE_OPTION_VALUE, resultRegularData.get(secureKey)); Assert.assertEquals("Expected insecure value to be untouched", regularData.get(insecureKey), resultRegularData.get(insecureKey)); } public void testCreatePrintableDataContextNoDataContext() { StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); Map<String, Map<String, String>> result = strategy.createPrintableDataContext(null); Assert.assertTrue("Expected empty data context", result.isEmpty()); } public void testCreatePrintableDataContextEmptyDataContext() { Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>(); StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext); Assert.assertTrue("Expected empty data context", result.isEmpty()); } public void testCreatePrintableDataContextNoSecureData() { Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>(); String otherKey = "other"; Map<String, String> otherData = new HashMap<String, String>(); dataContext.put(otherKey, otherData); Map<String, String> regularData = new HashMap<String, String>(); String insecureKey = "insecureKey"; regularData.put(insecureKey, "insecureValue"); dataContext.put(StepFirstWorkflowExecutor.OPTION_KEY, regularData); StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext); Assert.assertSame("Expected other data to be present", otherData, result.get(otherKey)); Map<String, String> resultRegularData = result.get(StepFirstWorkflowExecutor.OPTION_KEY); Assert.assertEquals("Expected insecure value to be untouched", regularData.get(insecureKey), resultRegularData.get(insecureKey)); } public void testCreatePrintableDataContextNoRegularData() { Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>(); String otherKey = "other"; Map<String, String> otherData = new HashMap<String, String>(); dataContext.put(otherKey, otherData); Map<String, String> secureData = new HashMap<String, String>(); String secureKey = "secureKey"; secureData.put(secureKey, "secureValue"); dataContext.put(StepFirstWorkflowExecutor.SECURE_OPTION_KEY, secureData); StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext); Assert.assertSame("Expected other data to be present", otherData, result.get(otherKey)); Map<String, String> resultSecureData = result.get(StepFirstWorkflowExecutor.SECURE_OPTION_KEY); Assert.assertEquals("Expected secure value to be replaced", StepFirstWorkflowExecutor.SECURE_OPTION_VALUE, resultSecureData.get(secureKey)); } @SuppressWarnings("unchecked") public void testExecuteWorkflowUsesPrintableDataContext() { ExecutionListener listener = Mockito.mock(ExecutionListener.class); StepExecutionContext context = Mockito.mock(StepExecutionContext.class); Mockito.when(context.getExecutionListener()).thenReturn(listener); String printableContextToString = "this is hopefully some string that won't appear elsewhere"; Map<String, Map<String, String>> printableContext = Mockito.mock(Map.class); Mockito.when(printableContext.toString()).thenReturn(printableContextToString); String dataContextToString = "this is another magic string that hopefully won't appear elsewhere"; Map<String, Map<String, String>> dataContext = Mockito.mock(Map.class); Mockito.when(dataContext.toString()).thenReturn(dataContextToString); Mockito.when(context.getDataContext()).thenReturn(dataContext); StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework); strategy = Mockito.spy(strategy); Mockito.doReturn(printableContext).when(strategy).createPrintableDataContext(Mockito.same(dataContext)); WorkflowExecutionItem item = Mockito.mock(WorkflowExecutionItem.class); IWorkflow workflow = Mockito.mock(IWorkflow.class); Mockito.doReturn(workflow).when(item).getWorkflow(); strategy.executeWorkflowImpl(context, item); ArgumentCaptor<String> logLineCaptor = ArgumentCaptor.forClass(String.class); Mockito.verify(listener, Mockito.atLeastOnce()).log(Mockito.anyInt(), logLineCaptor.capture()); for (String line : logLineCaptor.getAllValues()) { if (line.startsWith(StepFirstWorkflowExecutor.DATA_CONTEXT_PREFIX)) { Assert.assertTrue("Expected printable data context string.", line.contains(printableContextToString)); Assert.assertFalse("Not expecting raw data context string.", line.contains(dataContextToString)); } } } }
jgpacker/rundeck
core/src/test/java/com/dtolabs/rundeck/core/execution/workflow/TestStepFirstWorkflowStrategy.java
Java
apache-2.0
100,809
// Copyright (C) 2013 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.googlesource.gerrit.plugins.hooks.rtc.network; import java.net.URI; import org.apache.http.client.methods.HttpPost; public class HttpPatch extends HttpPost { public HttpPatch() { super(); } public HttpPatch(String uri) { super(uri); } public HttpPatch(URI uri) { super(uri); } @Override public String getMethod() { return "PATCH"; } }
GerritCodeReview/plugins_hooks-rtc
src/main/java/com/googlesource/gerrit/plugins/hooks/rtc/network/HttpPatch.java
Java
apache-2.0
999
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.test.runtime; import org.apache.flink.api.common.JobExecutionResult; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.io.IOReadableWritable; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; import org.apache.flink.runtime.io.network.api.reader.RecordReader; import org.apache.flink.runtime.io.network.api.writer.RecordWriter; import org.apache.flink.runtime.io.network.partition.ResultPartitionType; import org.apache.flink.runtime.jobgraph.DistributionPattern; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.jobmanager.scheduler.SlotSharingGroup; import org.apache.flink.test.util.JavaProgramTestBase; import org.apache.flink.util.TestLogger; import org.junit.Ignore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Arrays; import java.util.concurrent.TimeUnit; /** * Manually test the throughput of the network stack. */ @Ignore public class NetworkStackThroughputITCase extends TestLogger { private static final Logger LOG = LoggerFactory.getLogger(NetworkStackThroughputITCase.class); private static final String DATA_VOLUME_GB_CONFIG_KEY = "data.volume.gb"; private static final String USE_FORWARDER_CONFIG_KEY = "use.forwarder"; private static final String PARALLELISM_CONFIG_KEY = "num.subtasks"; private static final String NUM_SLOTS_PER_TM_CONFIG_KEY = "num.slots.per.tm"; private static final String IS_SLOW_SENDER_CONFIG_KEY = "is.slow.sender"; private static final String IS_SLOW_RECEIVER_CONFIG_KEY = "is.slow.receiver"; private static final int IS_SLOW_SLEEP_MS = 10; private static final int IS_SLOW_EVERY_NUM_RECORDS = (2 * 32 * 1024) / SpeedTestRecord.RECORD_SIZE; // ------------------------------------------------------------------------ // wrapper to reuse JavaProgramTestBase code in runs via main() private static class TestBaseWrapper extends JavaProgramTestBase { private int dataVolumeGb; private boolean useForwarder; private boolean isSlowSender; private boolean isSlowReceiver; private int parallelism; public TestBaseWrapper(Configuration config) { super(config); dataVolumeGb = config.getInteger(DATA_VOLUME_GB_CONFIG_KEY, 1); useForwarder = config.getBoolean(USE_FORWARDER_CONFIG_KEY, true); isSlowSender = config.getBoolean(IS_SLOW_SENDER_CONFIG_KEY, false); isSlowReceiver = config.getBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, false); parallelism = config.getInteger(PARALLELISM_CONFIG_KEY, 1); int numSlots = config.getInteger(NUM_SLOTS_PER_TM_CONFIG_KEY, 1); if (parallelism % numSlots != 0) { throw new RuntimeException("The test case defines a parallelism that is not a multiple of the slots per task manager."); } setNumTaskManagers(parallelism / numSlots); setTaskManagerNumSlots(numSlots); } protected JobGraph getJobGraph() throws Exception { return createJobGraph(dataVolumeGb, useForwarder, isSlowSender, isSlowReceiver, parallelism); } private JobGraph createJobGraph(int dataVolumeGb, boolean useForwarder, boolean isSlowSender, boolean isSlowReceiver, int numSubtasks) { JobGraph jobGraph = new JobGraph("Speed Test"); SlotSharingGroup sharingGroup = new SlotSharingGroup(); JobVertex producer = new JobVertex("Speed Test Producer"); jobGraph.addVertex(producer); producer.setSlotSharingGroup(sharingGroup); producer.setInvokableClass(SpeedTestProducer.class); producer.setParallelism(numSubtasks); producer.getConfiguration().setInteger(DATA_VOLUME_GB_CONFIG_KEY, dataVolumeGb); producer.getConfiguration().setBoolean(IS_SLOW_SENDER_CONFIG_KEY, isSlowSender); JobVertex forwarder = null; if (useForwarder) { forwarder = new JobVertex("Speed Test Forwarder"); jobGraph.addVertex(forwarder); forwarder.setSlotSharingGroup(sharingGroup); forwarder.setInvokableClass(SpeedTestForwarder.class); forwarder.setParallelism(numSubtasks); } JobVertex consumer = new JobVertex("Speed Test Consumer"); jobGraph.addVertex(consumer); consumer.setSlotSharingGroup(sharingGroup); consumer.setInvokableClass(SpeedTestConsumer.class); consumer.setParallelism(numSubtasks); consumer.getConfiguration().setBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, isSlowReceiver); if (useForwarder) { forwarder.connectNewDataSetAsInput(producer, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); consumer.connectNewDataSetAsInput(forwarder, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); } else { consumer.connectNewDataSetAsInput(producer, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); } return jobGraph; } @Override protected void testProgram() throws Exception { JobExecutionResult jer = executor.submitJobAndWait(getJobGraph(), false); int dataVolumeGb = this.config.getInteger(DATA_VOLUME_GB_CONFIG_KEY, 1); long dataVolumeMbit = dataVolumeGb * 8192; long runtimeSecs = jer.getNetRuntime(TimeUnit.SECONDS); int mbitPerSecond = (int) (((double) dataVolumeMbit) / runtimeSecs); LOG.info(String.format("Test finished with throughput of %d MBit/s (runtime [secs]: %d, " + "data volume [gb/mbits]: %d/%d)", mbitPerSecond, runtimeSecs, dataVolumeGb, dataVolumeMbit)); } } // ------------------------------------------------------------------------ private static class SpeedTestProducer extends AbstractInvokable { @Override public void invoke() throws Exception { RecordWriter<SpeedTestRecord> writer = new RecordWriter<>(getEnvironment().getWriter(0)); try { // Determine the amount of data to send per subtask int dataVolumeGb = getTaskConfiguration().getInteger(NetworkStackThroughputITCase.DATA_VOLUME_GB_CONFIG_KEY, 1); long dataMbPerSubtask = (dataVolumeGb * 1024) / getCurrentNumberOfSubtasks(); long numRecordsToEmit = (dataMbPerSubtask * 1024 * 1024) / SpeedTestRecord.RECORD_SIZE; LOG.info(String.format("%d/%d: Producing %d records (each record: %d bytes, total: %.2f GB)", getIndexInSubtaskGroup() + 1, getCurrentNumberOfSubtasks(), numRecordsToEmit, SpeedTestRecord.RECORD_SIZE, dataMbPerSubtask / 1024.0)); boolean isSlow = getTaskConfiguration().getBoolean(IS_SLOW_SENDER_CONFIG_KEY, false); int numRecords = 0; SpeedTestRecord record = new SpeedTestRecord(); for (long i = 0; i < numRecordsToEmit; i++) { if (isSlow && (numRecords++ % IS_SLOW_EVERY_NUM_RECORDS) == 0) { Thread.sleep(IS_SLOW_SLEEP_MS); } writer.emit(record); } } finally { writer.flush(); } } } private static class SpeedTestForwarder extends AbstractInvokable { @Override public void invoke() throws Exception { RecordReader<SpeedTestRecord> reader = new RecordReader<>( getEnvironment().getInputGate(0), SpeedTestRecord.class, getEnvironment().getTaskManagerInfo().getTmpDirectories()); RecordWriter<SpeedTestRecord> writer = new RecordWriter<>(getEnvironment().getWriter(0)); try { SpeedTestRecord record; while ((record = reader.next()) != null) { writer.emit(record); } } finally { reader.clearBuffers(); writer.flush(); } } } private static class SpeedTestConsumer extends AbstractInvokable { @Override public void invoke() throws Exception { RecordReader<SpeedTestRecord> reader = new RecordReader<>( getEnvironment().getInputGate(0), SpeedTestRecord.class, getEnvironment().getTaskManagerInfo().getTmpDirectories()); try { boolean isSlow = getTaskConfiguration().getBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, false); int numRecords = 0; while (reader.next() != null) { if (isSlow && (numRecords++ % IS_SLOW_EVERY_NUM_RECORDS) == 0) { Thread.sleep(IS_SLOW_SLEEP_MS); } } } finally { reader.clearBuffers(); } } } private static class SpeedTestRecord implements IOReadableWritable { private static final int RECORD_SIZE = 128; private final byte[] buf = new byte[RECORD_SIZE]; public SpeedTestRecord() { for (int i = 0; i < RECORD_SIZE; ++i) { this.buf[i] = (byte) (i % 128); } } @Override public void write(DataOutputView out) throws IOException { out.write(this.buf); } @Override public void read(DataInputView in) throws IOException { in.readFully(this.buf); } } // ------------------------------------------------------------------------ public void testThroughput() throws Exception { Object[][] configParams = new Object[][]{ new Object[]{1, false, false, false, 4, 2}, new Object[]{1, true, false, false, 4, 2}, new Object[]{1, true, true, false, 4, 2}, new Object[]{1, true, false, true, 4, 2}, new Object[]{2, true, false, false, 4, 2}, new Object[]{4, true, false, false, 4, 2}, new Object[]{4, true, false, false, 8, 4}, }; for (Object[] p : configParams) { Configuration config = new Configuration(); config.setInteger(DATA_VOLUME_GB_CONFIG_KEY, (Integer) p[0]); config.setBoolean(USE_FORWARDER_CONFIG_KEY, (Boolean) p[1]); config.setBoolean(IS_SLOW_SENDER_CONFIG_KEY, (Boolean) p[2]); config.setBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, (Boolean) p[3]); config.setInteger(PARALLELISM_CONFIG_KEY, (Integer) p[4]); config.setInteger(NUM_SLOTS_PER_TM_CONFIG_KEY, (Integer) p[5]); TestBaseWrapper test = new TestBaseWrapper(config); test.startCluster(); System.out.println(Arrays.toString(p)); test.testProgram(); test.stopCluster(); } } private void runAllTests() throws Exception { testThroughput(); System.out.println("Done."); } public static void main(String[] args) throws Exception { new NetworkStackThroughputITCase().runAllTests(); } }
mtunique/flink
flink-tests/src/test/java/org/apache/flink/test/runtime/NetworkStackThroughputITCase.java
Java
apache-2.0
10,771
/*******************************************************/ /* "C" Language Integrated Production System */ /* */ /* CLIPS Version 6.20 01/31/02 */ /* */ /* DEFFACTS PARSER HEADER FILE */ /*******************************************************/ /*************************************************************/ /* Purpose: */ /* */ /* Principal Programmer(s): */ /* Gary D. Riley */ /* */ /* Contributing Programmer(s): */ /* Brian L. Donnell */ /* */ /* Revision History: */ /* */ /*************************************************************/ #ifndef _H_dffctpsr #define _H_dffctpsr #ifdef LOCALE #undef LOCALE #endif #ifdef _DFFCTPSR_SOURCE_ #define LOCALE #else #define LOCALE extern #endif LOCALE int ParseDeffacts(void *,char *); #endif
jarcec/rbclips
src/clips/dffctpsr.h
C
apache-2.0
1,468
/* * ************************************************************************* * Copyright (C) FRS Belgium NV ("FRSGlobal"). All rights reserved. * * This computer program is protected by copyright law and international * treaties. Unauthorized reproduction or distribution of this program, * or any portion of it, may result in severe civil and criminal penalties, * and will be prosecuted to the maximum extent possible under the law. * ************************************************************************* */ package org.cluj.bus.servlet; import com.google.gson.Gson; import org.cluj.bus.model.BusSchedule; import org.cluj.bus.model.BusScheduleDTO; import org.cluj.bus.model.CategorySchedule; import org.cluj.bus.services.JPARepository; import org.cluj.bus.util.ScheduleUtilities; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.text.ParseException; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; public class BusScheduleServlet extends HttpServlet { private static final Logger LOGGER = Logger.getLogger(BusScheduleServlet.class.getName()); @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { doPost(req, resp); } @Override protected void doPost(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException { String busId = httpServletRequest.getParameter(ServletUtils.BUS_ID_PARAMETER_KEY); ServletUtils.sendResponse(httpServletResponse, getResponseString(busId)); } private String getResponseString(String busId) { List<BusSchedule> busSchedules = new JPARepository<>(BusSchedule.class).findAll("busId", busId); Map<String, CategorySchedule> categorySchedules = new HashMap<>(); for (BusSchedule busSchedule : busSchedules) { String days = busSchedule.getDays(); CategorySchedule categorySchedule = categorySchedules.get(days); if (categorySchedule == null) { categorySchedule = new CategorySchedule(); categorySchedules.put(days, categorySchedule); categorySchedule.setDisplayName(busSchedule.getCategory()); categorySchedule.setApplicableDays(getApplicableDays(days)); } Collection<Date> startTimes = categorySchedule.getStartTimes(); if (startTimes == null) { startTimes = new ArrayList<>(); categorySchedule.setStartTimes(startTimes); } try { startTimes.add(ScheduleUtilities.getStartTime(busSchedule.getStartTime())); } catch (ParseException e) { LOGGER.log(Level.SEVERE, "Error parsing start time", e); } } BusScheduleDTO schedule = new BusScheduleDTO(); schedule.setSchedules(categorySchedules.values()); return new Gson().toJson(schedule); } private Collection<Integer> getApplicableDays(String days) { List<Integer> applicableDays = new ArrayList<>(); for (char aChar : days.toCharArray()) { int day = Integer.parseInt(String.valueOf(aChar)); applicableDays.add(day); } return applicableDays; } }
abotos/ClujLiveTransit
Java/appengine-code/appengine-web-ui/src/java/org/cluj/bus/servlet/BusScheduleServlet.java
Java
apache-2.0
3,589
# Rhodocybe fallax (Quél.) Singer, 1946 SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in Farlowia 2: 549 (1946) #### Original name Omphalia fallax Quél., 1896 ### Remarks null
mdoering/backbone
life/Fungi/Basidiomycota/Agaricomycetes/Agaricales/Entolomataceae/Rhodocybe/Rhodocybe fallax/README.md
Markdown
apache-2.0
238
# XML Data Source for Apache Spark [![Build Status](https://travis-ci.org/databricks/spark-xml.svg?branch=master)](https://travis-ci.org/databricks/spark-xml) [![codecov.io](http://codecov.io/github/databricks/spark-xml/coverage.svg?branch=master)](http://codecov.io/github/databricks/spark-xml?branch=master) - A library for parsing and querying XML data with Apache Spark, for Spark SQL and DataFrames. The structure and test tools are mostly copied from [CSV Data Source for Spark](https://github.com/databricks/spark-csv). - This package supports to process format-free XML files in a distributed way, unlike JSON datasource in Spark restricts in-line JSON format. ## Requirements This library requires Spark 1.3+ ## Linking You can link against this library in your program at the following coordinates: ### Scala 2.10 ``` groupId: com.databricks artifactId: spark-xml_2.10 version: 0.3.3 ``` ### Scala 2.11 ``` groupId: com.databricks artifactId: spark-xml_2.11 version: 0.3.3 ``` ## Using with Spark shell This package can be added to Spark using the `--packages` command line option. For example, to include it when starting the spark shell: ### Spark compiled with Scala 2.10 ``` $SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.10:0.3.3 ``` ### Spark compiled with Scala 2.11 ``` $SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.11:0.3.3 ``` ## Features This package allows reading XML files in local or distributed filesystem as [Spark DataFrames](https://spark.apache.org/docs/1.6.0/sql-programming-guide.html). When reading files the API accepts several options: * `path`: Location of files. Similar to Spark can accept standard Hadoop globbing expressions. * `rowTag`: The row tag of your xml files to treat as a row. For example, in this xml `<books> <book><book> ...</books>`, the appropriate value would be `book`. Default is `ROW`. * `samplingRatio`: Sampling ratio for inferring schema (0.0 ~ 1). Default is 1. Possible types are `StructType`, `ArrayType`, `StringType`, `LongType`, `DoubleType`, `BooleanType`, `TimestampType` and `NullType`, unless user provides a schema for this. * `excludeAttribute` : Whether you want to exclude attributes in elements or not. Default is false. * `treatEmptyValuesAsNulls` : Whether you want to treat whitespaces as a null value. Default is false. * `failFast` : Whether you want to fail when it fails to parse malformed rows in XML files, instead of dropping the rows. Default is false. * `attributePrefix`: The prefix for attributes so that we can differentiate attributes and elements. This will be the prefix for field names. Default is `@`. * `valueTag`: The tag used for the value when there are attributes in the element having no child. Default is `#VALUE`. * `charset`: Defaults to 'UTF-8' but can be set to other valid charset names When writing files the API accepts several options: * `path`: Location to write files. * `rowTag`: The row tag of your xml files to treat as a row. For example, in this xml `<books> <book><book> ...</books>`, the appropriate value would be `book`. Default is `ROW`. * `rootTag`: The root tag of your xml files to treat as the root. For example, in this xml `<books> <book><book> ...</books>`, the appropriate value would be `books`. Default is `ROWS`. * `nullValue`: The value to write `null` value. Default is string `null`. When this is `null`, it does not write attributes and elements for fields. * `attributePrefix`: The prefix for attributes so that we can differentiating attributes and elements. This will be the prefix for field names. Default is `@`. * `valueTag`: The tag used for the value when there are attributes in the element having no child. Default is `#VALUE`. * `codec`: compression codec to use when saving to file. Should be the fully qualified name of a class implementing `org.apache.hadoop.io.compress.CompressionCodec` or one of case-insensitive shorten names (`bzip2`, `gzip`, `lz4`, and `snappy`). Defaults to no compression when a codec is not specified. Currently it supports the shortened name usage. You can use just `xml` instead of `com.databricks.spark.xml` from Spark 1.5.0+ ## Structure Conversion Due to the structure differences between `DataFrame` and XML, there are some conversion rules from XML data to `DataFrame` and from `DataFrame` to XML data. Note that handling attributes can be disabled with the option `excludeAttribute`. ### Conversion from XML to `DataFrame` - __Attributes__: Attributes are converted as fields with the heading prefix, `attributePrefix`. ```xml ... <one myOneAttrib="AAAA"> <two>two</two> <three>three</three> </one> ... ``` produces a schema below: ``` root |-- @myOneAttrib: string (nullable = true) |-- two: string (nullable = true) |-- three: string (nullable = true) ``` - __Value in an element that has no child elements but attributes__: The value is put in a separate field, `valueTag`. ```xml ... <one> <two myTwoAttrib="BBBBB">two</two> <three>three</three> </one> ... ``` produces a schema below: ``` root |-- two: struct (nullable = true) | |-- #VALUE: string (nullable = true) | |-- @myTwoAttrib: string (nullable = true) |-- three: string (nullable = true) ``` ### Conversion from `DataFrame` to XML - __Element as an array in an array__: Writing a XML file from `DataFrame` having a field `ArrayType` with its element as `ArrayType` would have an additional nested field for the element. This would not happen in reading and writing XML data but writing a `DataFrame` read from other sources. Therefore, roundtrip in reading and writing XML files has the same structure but writing a `DataFrame` read from other sources is possible to have a different structure. `DataFrame` with a schema below: ``` |-- a: array (nullable = true) | |-- element: array (containsNull = true) | | |-- element: string (containsNull = true) ``` with data below: ``` +------------------------------------+ | a| +------------------------------------+ |[WrappedArray(aa), WrappedArray(bb)]| +------------------------------------+ ``` produces a XML file below: ```xml ... <a> <item>aa</item> </a> <a> <item>bb</item> </a> ... ``` ## Examples These examples use a XML file available for download [here](https://github.com/databricks/spark-xml/raw/master/src/test/resources/books.xml): ``` $ wget https://github.com/databricks/spark-xml/raw/master/src/test/resources/books.xml ``` ### SQL API XML data source for Spark can infer data types: ```sql CREATE TABLE books USING com.databricks.spark.xml OPTIONS (path "books.xml", rowTag "book") ``` You can also specify column names and types in DDL. In this case, we do not infer schema. ```sql CREATE TABLE books (author string, description string, genre string, @id string, price double, publish_date string, title string) USING com.databricks.spark.xml OPTIONS (path "books.xml", rowTag "book") ``` ### Scala API __Spark 1.4+:__ ```scala import org.apache.spark.sql.SQLContext val sqlContext = new SQLContext(sc) val df = sqlContext.read .format("com.databricks.spark.xml") .option("rowTag", "book") .load("books.xml") val selectedData = df.select("author", "@id") selectedData.write .format("com.databricks.spark.xml") .option("rootTag", "books") .option("rowTag", "book") .save("newbooks.xml") ``` You can manually specify the schema when reading data: ```scala import org.apache.spark.sql.SQLContext import org.apache.spark.sql.types.{StructType, StructField, StringType, DoubleType}; val sqlContext = new SQLContext(sc) val customSchema = StructType(Array( StructField("@id", StringType, nullable = true), StructField("author", StringType, nullable = true), StructField("description", StringType, nullable = true), StructField("genre", StringType ,nullable = true), StructField("price", DoubleType, nullable = true), StructField("publish_date", StringType, nullable = true), StructField("title", StringType, nullable = true))) val df = sqlContext.read .format("com.databricks.spark.xml") .option("rowTag", "book") .schema(customSchema) .load("books.xml") val selectedData = df.select("author", "@id") selectedData.write .format("com.databricks.spark.xml") .option("rootTag", "books") .option("rowTag", "book") .save("newbooks.xml") ``` __Spark 1.3:__ ```scala import org.apache.spark.sql.SQLContext val sqlContext = new SQLContext(sc) val df = sqlContext.load( "com.databricks.spark.xml", Map("path" -> "books.xml", "rowTag" -> "book")) val selectedData = df.select("author", "@id") selectedData.save("com.databricks.spark.xml", SaveMode.ErrorIfExists, Map("path" -> "newbooks.xml", "rootTag" -> "books", "rowTag" -> "book")) ``` You can manually specify the schema when reading data: ```scala import org.apache.spark.sql.SQLContext import org.apache.spark.sql.types.{StructType, StructField, StringType, IntegerType}; val sqlContext = new SQLContext(sc) val customSchema = StructType(Array( StructField("@id", StringType, nullable = true), StructField("author", StringType, nullable = true), StructField("description", StringType, nullable = true), StructField("genre", StringType ,nullable = true), StructField("price", DoubleType, nullable = true), StructField("publish_date", StringType, nullable = true), StructField("title", StringType, nullable = true))) val df = sqlContext.load( "com.databricks.spark.xml", schema = customSchema, Map("path" -> "books.xml", "rowTag" -> "book")) val selectedData = df.select("author", "@id") selectedData.save("com.databricks.spark.xml", SaveMode.ErrorIfExists, Map("path" -> "newbooks.xml", "rootTag" -> "books", "rowTag" -> "book")) ``` ### Java API __Spark 1.4+:__ ```java import org.apache.spark.sql.SQLContext SQLContext sqlContext = new SQLContext(sc); DataFrame df = sqlContext.read() .format("com.databricks.spark.xml") .option("rowTag", "book") .load("books.xml"); df.select("author", "@id").write() .format("com.databricks.spark.xml") .option("rootTag", "books") .option("rowTag", "book") .save("newbooks.xml"); ``` You can manually specify schema: ```java import org.apache.spark.sql.SQLContext; import org.apache.spark.sql.types.*; SQLContext sqlContext = new SQLContext(sc); StructType customSchema = new StructType(new StructField[] { new StructField("@id", DataTypes.StringType, true, Metadata.empty()), new StructField("author", DataTypes.StringType, true, Metadata.empty()), new StructField("description", DataTypes.StringType, true, Metadata.empty()), new StructField("genre", DataTypes.StringType, true, Metadata.empty()), new StructField("price", DataTypes.DoubleType, true, Metadata.empty()), new StructField("publish_date", DataTypes.StringType, true, Metadata.empty()), new StructField("title", DataTypes.StringType, true, Metadata.empty()) }); DataFrame df = sqlContext.read() .format("com.databricks.spark.xml") .option("rowTag", "book") .schema(customSchema) .load("books.xml"); df.select("author", "@id").write() .format("com.databricks.spark.xml") .option("rootTag", "books") .option("rowTag", "book") .save("newbooks.xml"); ``` __Spark 1.3:__ ```java import org.apache.spark.sql.SQLContext SQLContext sqlContext = new SQLContext(sc); HashMap<String, String> options = new HashMap<String, String>(); options.put("rowTag", "book"); options.put("path", "books.xml"); DataFrame df = sqlContext.load("com.databricks.spark.xml", options); HashMap<String, String> options = new HashMap<String, String>(); options.put("rowTag", "book"); options.put("rootTag", "books"); options.put("path", "newbooks.xml"); df.select("author", "@id").save("com.databricks.spark.xml", SaveMode.ErrorIfExists, options) ``` You can manually specify schema: ```java import org.apache.spark.sql.SQLContext; import org.apache.spark.sql.types.*; SQLContext sqlContext = new SQLContext(sc); StructType customSchema = new StructType(new StructField[] { new StructField("@id", DataTypes.StringType, true, Metadata.empty()), new StructField("author", DataTypes.StringType, true, Metadata.empty()), new StructField("description", DataTypes.StringType, true, Metadata.empty()), new StructField("genre", DataTypes.StringType, true, Metadata.empty()), new StructField("price", DataTypes.DoubleType, true, Metadata.empty()), new StructField("publish_date", DataTypes.StringType, true, Metadata.empty()), new StructField("title", DataTypes.StringType, true, Metadata.empty()) }); HashMap<String, String> options = new HashMap<String, String>(); options.put("rowTag", "book"); options.put("path", "books.xml"); DataFrame df = sqlContext.load("com.databricks.spark.xml", customSchema, options); HashMap<String, String> options = new HashMap<String, String>(); options.put("rowTag", "book"); options.put("rootTag", "books"); options.put("path", "newbooks.xml"); df.select("author", "@id").save("com.databricks.spark.xml", SaveMode.ErrorIfExists, options) ``` ### Python API __Spark 1.4+:__ ```python from pyspark.sql import SQLContext sqlContext = SQLContext(sc) df = sqlContext.read.format('com.databricks.spark.xml').options(rowTag='book').load('books.xml') df.select("author", "@id").write \ .format('com.databricks.spark.xml') \ .options(rowTag='book', rootTag='books') \ .save('newbooks.xml') ``` You can manually specify schema: ```python from pyspark.sql import SQLContext from pyspark.sql.types import * sqlContext = SQLContext(sc) customSchema = StructType([ \ StructField("@id", StringType(), True), \ StructField("author", StringType(), True), \ StructField("description", StringType(), True), \ StructField("genre", StringType(), True), \ StructField("price", DoubleType(), True), \ StructField("publish_date", StringType(), True), \ StructField("title", StringType(), True)]) df = sqlContext.read \ .format('com.databricks.spark.xml') \ .options(rowTag='book') \ .load('books.xml', schema = customSchema) df.select("author", "@id").write \ .format('com.databricks.spark.xml') \ .options(rowTag='book', rootTag='books') \ .save('newbooks.xml') ``` __Spark 1.3:__ ```python from pyspark.sql import SQLContext sqlContext = SQLContext(sc) df = sqlContext.load(source="com.databricks.spark.xml", rowTag = 'book', path = 'books.xml') df.select("author", "@id").save('newbooks.xml', rootTag = 'books', rowTag = 'book', path = 'newbooks.xml') ``` You can manually specify schema: ```python from pyspark.sql import SQLContext from pyspark.sql.types import * sqlContext = SQLContext(sc) customSchema = StructType([ \ StructField("@id", StringType(), True), \ StructField("author", StringType(), True), \ StructField("description", StringType(), True), \ StructField("genre", StringType(), True), \ StructField("price", DoubleType(), True), \ StructField("publish_date", StringType(), True), \ StructField("title", StringType(), True)]) df = sqlContext.load(source="com.databricks.spark.xml", rowTag = 'book', schema = customSchema, path = 'books.xml') df.select("author", "@id").save('newbooks.xml', rootTag = 'books', rowTag = 'book', path = 'newbooks.xml') ``` ### R API __Spark 1.4+:__ Automatically infer schema (data types) ```R library(SparkR) Sys.setenv('SPARKR_SUBMIT_ARGS'='"--packages" "com.databricks:spark-xml_2.10:0.3.3" "sparkr-shell"') sqlContext <- sparkRSQL.init(sc) df <- read.df(sqlContext, "books.xml", source = "com.databricks.spark.xml", rowTag = "book") # In this case, `rootTag` is set to "ROWS" and `rowTag` is set to "ROW". write.df(df, "newbooks.csv", "com.databricks.spark.xml", "overwrite") ``` You can manually specify schema: ```R library(SparkR) Sys.setenv('SPARKR_SUBMIT_ARGS'='"--packages" "com.databricks:spark-csv_2.10:0.3.3" "sparkr-shell"') sqlContext <- sparkRSQL.init(sc) customSchema <- structType( structField("@id", "string"), structField("author", "string"), structField("description", "string"), structField("genre", "string"), structField("price", "double"), structField("publish_date", "string"), structField("title", "string")) df <- read.df(sqlContext, "books.xml", source = "com.databricks.spark.xml", rowTag = "book") # In this case, `rootTag` is set to "ROWS" and `rowTag` is set to "ROW". write.df(df, "newbooks.csv", "com.databricks.spark.xml", "overwrite") ``` ## Hadoop InputFormat The library contains a Hadoop input format for reading XML files by a start tag and an end tag. This is similar with [XmlInputFormat.java](https://github.com/apache/mahout/blob/9d14053c80a1244bdf7157ab02748a492ae9868a/integration/src/main/java/org/apache/mahout/text/wikipedia/XmlInputFormat.java) in [Mahout](http://mahout.apache.org) but supports to read compressed files, different encodings and read elements including attributes, which you may make direct use of as follows: ```scala import com.databricks.spark.xml.XmlInputFormat // This will detect the tags including attributes sc.hadoopConfiguration.set(XmlInputFormat.START_TAG_KEY, "<book>") sc.hadoopConfiguration.set(XmlInputFormat.END_TAG_KEY, "</book>") sc.hadoopConfiguration.set(XmlInputFormat.ENCODING_KEY, "utf-8") val records = sc.newAPIHadoopFile( path, classOf[XmlInputFormat], classOf[LongWritable], classOf[Text]) ``` ## Building From Source This library is built with [SBT](http://www.scala-sbt.org/0.13/docs/Command-Line-Reference.html), which is automatically downloaded by the included shell script. To build a JAR file simply run `sbt/sbt package` from the project root. The build configuration includes support for both Scala 2.10 and 2.11. ## Acknowledgements This project was initially created by [HyukjinKwon](https://github.com/HyukjinKwon) and donated to [Databricks](https://databricks.com).
lokm01/spark-xml
README.md
Markdown
apache-2.0
18,194
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.netty4; import java.net.URI; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ThreadFactory; import io.netty.util.concurrent.DefaultEventExecutorGroup; import io.netty.util.concurrent.EventExecutorGroup; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; import org.apache.camel.impl.UriEndpointComponent; import org.apache.camel.util.IntrospectionSupport; import org.apache.camel.util.concurrent.CamelThreadFactory; public class NettyComponent extends UriEndpointComponent { private NettyConfiguration configuration; private volatile EventExecutorGroup executorService; public NettyComponent() { super(NettyEndpoint.class); } public NettyComponent(Class<? extends Endpoint> endpointClass) { super(endpointClass); } public NettyComponent(CamelContext context) { super(context, NettyEndpoint.class); } @Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { NettyConfiguration config; if (configuration != null) { config = configuration.copy(); } else { config = new NettyConfiguration(); } config = parseConfiguration(config, remaining, parameters); // merge any custom bootstrap configuration on the config NettyServerBootstrapConfiguration bootstrapConfiguration = resolveAndRemoveReferenceParameter(parameters, "bootstrapConfiguration", NettyServerBootstrapConfiguration.class); if (bootstrapConfiguration != null) { Map<String, Object> options = new HashMap<String, Object>(); if (IntrospectionSupport.getProperties(bootstrapConfiguration, options, null, false)) { IntrospectionSupport.setProperties(getCamelContext().getTypeConverter(), config, options); } } // validate config config.validateConfiguration(); NettyEndpoint nettyEndpoint = new NettyEndpoint(remaining, this, config); setProperties(nettyEndpoint.getConfiguration(), parameters); return nettyEndpoint; } /** * Parses the configuration * * @return the parsed and valid configuration to use */ protected NettyConfiguration parseConfiguration(NettyConfiguration configuration, String remaining, Map<String, Object> parameters) throws Exception { configuration.parseURI(new URI(remaining), parameters, this, "tcp", "udp"); return configuration; } public NettyConfiguration getConfiguration() { return configuration; } public void setConfiguration(NettyConfiguration configuration) { this.configuration = configuration; } public void setExecutorService(EventExecutorGroup executorService) { this.executorService = executorService; } public synchronized EventExecutorGroup getExecutorService() { if (executorService == null) { executorService = createExecutorService(); } return executorService; } @Override protected void doStart() throws Exception { if (configuration == null) { configuration = new NettyConfiguration(); } if (configuration.isUsingExecutorService() && executorService == null) { executorService = createExecutorService(); } super.doStart(); } protected EventExecutorGroup createExecutorService() { // Provide the executor service for the application // and use a Camel thread factory so we have consistent thread namings // we should use a shared thread pool as recommended by Netty String pattern = getCamelContext().getExecutorServiceManager().getThreadNamePattern(); ThreadFactory factory = new CamelThreadFactory(pattern, "NettyEventExecutorGroup", true); return new DefaultEventExecutorGroup(configuration.getMaximumPoolSize(), factory); } @Override protected void doStop() throws Exception { if (executorService != null) { getCamelContext().getExecutorServiceManager().shutdownNow(executorService); executorService = null; } super.doStop(); } }
logzio/camel
components/camel-netty4/src/main/java/org/apache/camel/component/netty4/NettyComponent.java
Java
apache-2.0
5,124
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.valves.rewrite; import java.nio.charset.Charset; /** * Resolver abstract class. */ public abstract class Resolver { public abstract String resolve(String key); public String resolveEnv(String key) { return System.getProperty(key); } public abstract String resolveSsl(String key); public abstract String resolveHttp(String key); public abstract boolean resolveResource(int type, String name); /** * @return The name of the encoding to use to %nn encode URIs * * @deprecated This will be removed in Tomcat 9.0.x */ @Deprecated public abstract String getUriEncoding(); public abstract Charset getUriCharset(); }
IAMTJW/Tomcat-8.5.20
tomcat-8.5.20/java/org/apache/catalina/valves/rewrite/Resolver.java
Java
apache-2.0
1,568
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> <html> <head> <meta httpEquiv="Content-Type" content="text/html; charset=utf-8"/> <title>Test results - Class carlosgsouza.vinylshop.functional.v1.SummaryFunctionalSpec</title> <link href="base-style.css" rel="stylesheet" type="text/css"/> <link href="style.css" rel="stylesheet" type="text/css"/> <script src="report.js" type="text/javascript"></script> </head> <body> <div id="content"> <h1>Class carlosgsouza.vinylshop.functional.v1.SummaryFunctionalSpec</h1> <div class="breadcrumbs"> <a href="index.html">all</a> &gt; <a href="carlosgsouza.vinylshop.functional.v1.html">carlosgsouza.vinylshop.functional.v1</a> &gt; SummaryFunctionalSpec</div> <div id="summary"> <table> <tr> <td> <div class="summaryGroup"> <table> <tr> <td> <div class="infoBox" id="tests"> <div class="counter">1</div> <p>tests</p> </div> </td> <td> <div class="infoBox" id="failures"> <div class="counter">0</div> <p>failures</p> </div> </td> <td> <div class="infoBox" id="duration"> <div class="counter">0.054s</div> <p>duration</p> </div> </td> </tr> </table> </div> </td> <td> <div class="infoBox success" id="successRate"> <div class="percent">100%</div> <p>successful</p> </div> </td> </tr> </table> </div> <div id="tabs"> <ul class="tabLinks"> <li> <a href="#tab0">Tests</a> </li> </ul> <div id="tab0" class="tab"> <h2>Tests</h2> <table> <thead> <tr> <th>Test</th> <th>Duration</th> <th>Result</th> </tr> </thead> <tr> <td class="success">should show a summary of the vinyls</td> <td>0.054s</td> <td class="success">passed</td> </tr> </table> </div> </div> <div id="footer"> <p>Generated by <a href="http://www.gradle.org">Gradle 1.8</a> at Dec 14, 2013 2:32:16 PM</p> </div> </div> </body>
carlosgsouza/types-and-quality
experiments/1_vinyl_collection/a0/analysis/data/pedro/snapshots/1386956683120/build/reports/tests/carlosgsouza.vinylshop.functional.v1.SummaryFunctionalSpec.html
HTML
apache-2.0
1,764
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.droids.impl; import java.util.Date; import java.util.Queue; import java.util.concurrent.TimeUnit; import org.apache.droids.api.DelayTimer; import org.apache.droids.api.Droid; import org.apache.droids.api.Task; import org.apache.droids.api.TaskExceptionHandler; import org.apache.droids.api.TaskExceptionResult; import org.apache.droids.api.TaskMaster; import org.apache.droids.api.Worker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SequentialTaskMaster<T extends Task> implements TaskMaster<T> { private static final Logger LOG = LoggerFactory.getLogger(SequentialTaskMaster.class); private final Object mutex; private volatile boolean completed; private volatile Date startedWorking = null; private volatile Date finishedWorking = null; private volatile int completedTask = 0; private volatile T lastCompletedTask = null; private volatile ExecutionState state = ExecutionState.INITIALIZED; private DelayTimer delayTimer = null; private TaskExceptionHandler exHandler = null; public SequentialTaskMaster() { super(); this.mutex = new Object(); } /** * The queue has been initialized */ @Override public synchronized void start(final Queue<T> queue, final Droid<T> droid) { this.completed = false; this.startedWorking = new Date(); this.finishedWorking = null; this.completedTask = 0; this.state = ExecutionState.RUNNING; boolean terminated = false; while (!terminated) { T task = queue.poll(); if (task == null) { break; } if (delayTimer != null) { long delay = delayTimer.getDelayMillis(); if (delay > 0) { try { Thread.sleep(delay); } catch (InterruptedException e) { } } } Worker<T> worker = droid.getNewWorker(); try { if (!task.isAborted()) { worker.execute(task); } completedTask++; lastCompletedTask = task; } catch (Exception ex) { TaskExceptionResult result = TaskExceptionResult.WARN; if (exHandler != null) { result = exHandler.handleException(ex); } switch (result) { case WARN: LOG.warn(ex.toString() + " " + task.getId()); if (LOG.isDebugEnabled()) { LOG.debug(ex.toString(), ex); } break; case FATAL: LOG.error(ex.getMessage(), ex); terminated = true; break; } } } finishedWorking = new Date(); this.state = ExecutionState.STOPPED; droid.finished(); synchronized (mutex) { completed = true; mutex.notifyAll(); } } @Override public final void setExceptionHandler(TaskExceptionHandler exHandler) { this.exHandler = exHandler; } @Override public final void setDelayTimer(DelayTimer delayTimer) { this.delayTimer = delayTimer; } public boolean isWorking() { return startedWorking != null && finishedWorking == null; } @Override public Date getStartTime() { return startedWorking; } @Override public Date getFinishedWorking() { return finishedWorking; } @Override public long getCompletedTasks() { return completedTask; } @Override public T getLastCompletedTask() { return lastCompletedTask; } @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { if (timeout < 0) { timeout = 0; } synchronized (this.mutex) { long deadline = System.currentTimeMillis() + unit.toMillis(timeout); long remaining = timeout; while (!completed) { this.mutex.wait(remaining); if (timeout >= 0) { remaining = deadline - System.currentTimeMillis(); if (remaining <= 0) { return false; // Reach if timeout is over and no finish. } } } } return true; } @Override public ExecutionState getExecutionState() { return state; } }
fogbeam/Heceta_droids
droids-core/src/main/java/org/apache/droids/impl/SequentialTaskMaster.java
Java
apache-2.0
4,864
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Immutable; using System.Composition; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.Completion; using Microsoft.CodeAnalysis.Completion.Providers; using Microsoft.CodeAnalysis.CSharp.Completion.KeywordRecommenders; using Microsoft.CodeAnalysis.CSharp.Extensions.ContextQuery; using Microsoft.CodeAnalysis.Host.Mef; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Text; namespace Microsoft.CodeAnalysis.CSharp.Completion.Providers { [ExportCompletionProvider(nameof(KeywordCompletionProvider), LanguageNames.CSharp)] [ExtensionOrder(After = nameof(NamedParameterCompletionProvider))] [Shared] internal class KeywordCompletionProvider : AbstractKeywordCompletionProvider<CSharpSyntaxContext> { [ImportingConstructor] [Obsolete(MefConstruction.ImportingConstructorMessage, error: true)] public KeywordCompletionProvider() : base(GetKeywordRecommenders()) { } private static ImmutableArray<IKeywordRecommender<CSharpSyntaxContext>> GetKeywordRecommenders() { return new IKeywordRecommender<CSharpSyntaxContext>[] { new AbstractKeywordRecommender(), new AddKeywordRecommender(), new AliasKeywordRecommender(), new AnnotationsKeywordRecommender(), new AscendingKeywordRecommender(), new AsKeywordRecommender(), new AssemblyKeywordRecommender(), new AsyncKeywordRecommender(), new AwaitKeywordRecommender(), new BaseKeywordRecommender(), new BoolKeywordRecommender(), new BreakKeywordRecommender(), new ByKeywordRecommender(), new ByteKeywordRecommender(), new CaseKeywordRecommender(), new CatchKeywordRecommender(), new CharKeywordRecommender(), new CheckedKeywordRecommender(), new ChecksumKeywordRecommender(), new ClassKeywordRecommender(), new ConstKeywordRecommender(), new ContinueKeywordRecommender(), new DecimalKeywordRecommender(), new DefaultKeywordRecommender(), new DefineKeywordRecommender(), new DelegateKeywordRecommender(), new DescendingKeywordRecommender(), new DisableKeywordRecommender(), new DoKeywordRecommender(), new DoubleKeywordRecommender(), new DynamicKeywordRecommender(), new ElifKeywordRecommender(), new ElseKeywordRecommender(), new EnableKeywordRecommender(), new EndIfKeywordRecommender(), new EndRegionKeywordRecommender(), new EnumKeywordRecommender(), new EqualsKeywordRecommender(), new ErrorKeywordRecommender(), new EventKeywordRecommender(), new ExplicitKeywordRecommender(), new ExternKeywordRecommender(), new FalseKeywordRecommender(), new FieldKeywordRecommender(), new FinallyKeywordRecommender(), new FixedKeywordRecommender(), new FloatKeywordRecommender(), new ForEachKeywordRecommender(), new ForKeywordRecommender(), new FromKeywordRecommender(), new GetKeywordRecommender(), new GlobalKeywordRecommender(), new GotoKeywordRecommender(), new GroupKeywordRecommender(), new HiddenKeywordRecommender(), new IfKeywordRecommender(), new ImplicitKeywordRecommender(), new InKeywordRecommender(), new InterfaceKeywordRecommender(), new InternalKeywordRecommender(), new IntKeywordRecommender(), new IntoKeywordRecommender(), new IsKeywordRecommender(), new JoinKeywordRecommender(), new LetKeywordRecommender(), new LineKeywordRecommender(), new LoadKeywordRecommender(), new LockKeywordRecommender(), new LongKeywordRecommender(), new MethodKeywordRecommender(), new ModuleKeywordRecommender(), new NameOfKeywordRecommender(), new NamespaceKeywordRecommender(), new NewKeywordRecommender(), new NintKeywordRecommender(), new NotNullKeywordRecommender(), new NuintKeywordRecommender(), new NullableKeywordRecommender(), new NullKeywordRecommender(), new ObjectKeywordRecommender(), new OnKeywordRecommender(), new OperatorKeywordRecommender(), new OrderByKeywordRecommender(), new OutKeywordRecommender(), new OverrideKeywordRecommender(), new ParamKeywordRecommender(), new ParamsKeywordRecommender(), new PartialKeywordRecommender(), new PragmaKeywordRecommender(), new PrivateKeywordRecommender(), new PropertyKeywordRecommender(), new ProtectedKeywordRecommender(), new PublicKeywordRecommender(), new ReadOnlyKeywordRecommender(), new ReferenceKeywordRecommender(), new RefKeywordRecommender(), new RegionKeywordRecommender(), new RemoveKeywordRecommender(), new RestoreKeywordRecommender(), new ReturnKeywordRecommender(), new SByteKeywordRecommender(), new SealedKeywordRecommender(), new SelectKeywordRecommender(), new SetKeywordRecommender(), new ShortKeywordRecommender(), new SizeOfKeywordRecommender(), new StackAllocKeywordRecommender(), new StaticKeywordRecommender(), new StringKeywordRecommender(), new StructKeywordRecommender(), new SwitchKeywordRecommender(), new ThisKeywordRecommender(), new ThrowKeywordRecommender(), new TrueKeywordRecommender(), new TryKeywordRecommender(), new TypeKeywordRecommender(), new TypeOfKeywordRecommender(), new TypeVarKeywordRecommender(), new UIntKeywordRecommender(), new ULongKeywordRecommender(), new UncheckedKeywordRecommender(), new UndefKeywordRecommender(), new UnmanagedKeywordRecommender(), new UnsafeKeywordRecommender(), new UShortKeywordRecommender(), new UsingKeywordRecommender(), new VarKeywordRecommender(), new VirtualKeywordRecommender(), new VoidKeywordRecommender(), new VolatileKeywordRecommender(), new WarningKeywordRecommender(), new WarningsKeywordRecommender(), new WhenKeywordRecommender(), new WhereKeywordRecommender(), new WhileKeywordRecommender(), new YieldKeywordRecommender(), }.ToImmutableArray(); } internal override bool IsInsertionTrigger(SourceText text, int characterPosition, OptionSet options) => CompletionUtilities.IsTriggerCharacter(text, characterPosition, options); internal override ImmutableHashSet<char> TriggerCharacters { get; } = CompletionUtilities.CommonTriggerCharacters; protected override async Task<CSharpSyntaxContext> CreateContextAsync(Document document, int position, CancellationToken cancellationToken) { var span = new TextSpan(position, length: 0); var semanticModel = await document.GetSemanticModelForSpanAsync(span, cancellationToken).ConfigureAwait(false); return CSharpSyntaxContext.CreateContext(document.Project.Solution.Workspace, semanticModel, position, cancellationToken); } private static readonly CompletionItemRules s_tupleRules = CompletionItemRules.Default. WithCommitCharacterRule(CharacterSetModificationRule.Create(CharacterSetModificationKind.Remove, ':')); protected override CompletionItem CreateItem(RecommendedKeyword keyword, CSharpSyntaxContext context) { var rules = context.IsPossibleTupleContext ? s_tupleRules : CompletionItemRules.Default; return CommonCompletionItem.Create( displayText: keyword.Keyword, displayTextSuffix: "", description: keyword.DescriptionFactory(CancellationToken.None), glyph: Glyph.Keyword, rules: rules.WithMatchPriority(keyword.MatchPriority) .WithFormatOnCommit(keyword.ShouldFormatOnCommit)); } internal override TextSpan GetCurrentSpan(TextSpan span, SourceText text) => CompletionUtilities.GetCompletionItemSpan(text, span.End); } }
reaction1989/roslyn
src/Features/CSharp/Portable/Completion/CompletionProviders/KeywordCompletionProvider.cs
C#
apache-2.0
9,814
# Cymbosetaria sagittifolia (A.Rich.) Schweick. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in Hook. F. , Icon. Pl. 34 (T. 3320):1. 1936 #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Liliopsida/Poales/Poaceae/Setaria/Setaria sagittifolia/ Syn. Cymbosetaria sagittifolia/README.md
Markdown
apache-2.0
239
# Psoralea harveyana Meisn. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Psoralea/Psoralea harveyana/README.md
Markdown
apache-2.0
175
package com.wjyup.coolq.util; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import com.google.common.hash.HashCode; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.gson.JsonObject; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.springframework.util.DigestUtils; import java.nio.charset.StandardCharsets; /** * 发送消息工具类 * @author WJY */ public class SendMessageUtil { private static Logger log = LogManager.getLogger(SendMessageUtil.class); /** * 发送json数据并获取返回值 * @param message 消息 * @return 发送消息的结果 */ public static String sendSocketData(String message){ try { ConfigCache configCache = SpringContext.getConfigCache(); //判断发送消息方式 if(StaticConf.MSG_SEND_TYPE_HTTP.equalsIgnoreCase(configCache.getMSG_SEND_TYPE())){// http String url = String.format("http://%s:%s", configCache.getHTTP_HOST(), configCache.getHTTP_PORT()); if(configCache.isUSE_TOKEN()){// 使用token long authTime = System.currentTimeMillis() / 1000; String key = configCache.getKEY()+":"+authTime; String authToken = DigestUtils.md5DigestAsHex(key.getBytes(StandardCharsets.UTF_8)); JSONObject jsonObject = JSON.parseObject(message); jsonObject.put("authTime", authTime); jsonObject.put("authToken", authToken); message = jsonObject.toJSONString(); } log.debug("发送的json文本:"+message); try{ String result = WebUtil.post(url, message); log.debug("返回结果:" + result); return result; }catch (Exception e){ log.error(e.getMessage(),e); } } } catch (Exception e) { log.error(e.getMessage(), e); } return null; } }
ForeverWJY/CoolQ_Java_Plugin
src/main/java/com/wjyup/coolq/util/SendMessageUtil.java
Java
apache-2.0
1,815
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_222) on Thu Jan 16 21:49:29 PST 2020 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Class org.apache.datasketches.quantiles.DoublesUnionBuilder (datasketches-java 1.2.0-incubating API)</title> <meta name="date" content="2020-01-16"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.datasketches.quantiles.DoublesUnionBuilder (datasketches-java 1.2.0-incubating API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/apache/datasketches/quantiles/DoublesUnionBuilder.html" title="class in org.apache.datasketches.quantiles">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/apache/datasketches/quantiles/class-use/DoublesUnionBuilder.html" target="_top">Frames</a></li> <li><a href="DoublesUnionBuilder.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.apache.datasketches.quantiles.DoublesUnionBuilder" class="title">Uses of Class<br>org.apache.datasketches.quantiles.DoublesUnionBuilder</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../org/apache/datasketches/quantiles/DoublesUnionBuilder.html" title="class in org.apache.datasketches.quantiles">DoublesUnionBuilder</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#org.apache.datasketches.quantiles">org.apache.datasketches.quantiles</a></td> <td class="colLast"> <div class="block">The quantiles package contains stochastic streaming algorithms that enable single-pass analysis of the distribution of a stream of real (double) values or generic items.</div> </td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="org.apache.datasketches.quantiles"> <!-- --> </a> <h3>Uses of <a href="../../../../../org/apache/datasketches/quantiles/DoublesUnionBuilder.html" title="class in org.apache.datasketches.quantiles">DoublesUnionBuilder</a> in <a href="../../../../../org/apache/datasketches/quantiles/package-summary.html">org.apache.datasketches.quantiles</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../org/apache/datasketches/quantiles/package-summary.html">org.apache.datasketches.quantiles</a> that return <a href="../../../../../org/apache/datasketches/quantiles/DoublesUnionBuilder.html" title="class in org.apache.datasketches.quantiles">DoublesUnionBuilder</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>static <a href="../../../../../org/apache/datasketches/quantiles/DoublesUnionBuilder.html" title="class in org.apache.datasketches.quantiles">DoublesUnionBuilder</a></code></td> <td class="colLast"><span class="typeNameLabel">DoublesUnion.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/datasketches/quantiles/DoublesUnion.html#builder--">builder</a></span>()</code> <div class="block">Returns a new UnionBuilder</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href="../../../../../org/apache/datasketches/quantiles/DoublesUnionBuilder.html" title="class in org.apache.datasketches.quantiles">DoublesUnionBuilder</a></code></td> <td class="colLast"><span class="typeNameLabel">DoublesUnionBuilder.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/datasketches/quantiles/DoublesUnionBuilder.html#setMaxK-int-">setMaxK</a></span>(int&nbsp;maxK)</code> <div class="block">Sets the parameter <i>masK</i> that determines the maximum size of the sketch that results from a union and its accuracy.</div> </td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/apache/datasketches/quantiles/DoublesUnionBuilder.html" title="class in org.apache.datasketches.quantiles">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/apache/datasketches/quantiles/class-use/DoublesUnionBuilder.html" target="_top">Frames</a></li> <li><a href="DoublesUnionBuilder.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2015&#x2013;2020 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p> </body> </html>
DataSketches/DataSketches.github.io
api/java/snapshot/apidocs/org/apache/datasketches/quantiles/class-use/DoublesUnionBuilder.html
HTML
apache-2.0
7,944
/* * Copyright 2014–2017 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.physical.mongodb import slamdata.Predef.{ Eq => _, _ } import quasar._ import quasar.javascript.Js import quasar.jscore._ import java.time.Instant final case class javascript[R](embed: JsCoreF[R] => R) { val js = jscore.fixpoint[R](embed) import js._ /** Convert a `Bson.Date` to a JavaScript `Date`. */ def toJsDate(value: Bson.Date): R = New(Name("Date"), List( Literal(Js.Str(Instant.ofEpochMilli(value.millis).toString)))) /** Convert a `Bson.ObjectId` to a JavaScript `ObjectId`. */ def toJsObjectId(value: Bson.ObjectId): R = New(Name("ObjectId"), List(Literal(Js.Str(value.str)))) def isNull(expr: R): R = BinOp(Eq, Literal(Js.Null), expr) def isAnyNumber(expr: R): R = BinOp(Or, isDec(expr), isInt(expr)) def isInt[A](expr: R): R = BinOp(Or, BinOp(Instance, expr, ident("NumberInt")), BinOp(Instance, expr, ident("NumberLong"))) def isDec(expr: R): R = Call(ident("isNumber"), List(expr)) def isString(expr: R): R = Call(ident("isString"), List(expr)) def isObjectOrArray(expr: R): R = Call(ident("isObject"), List(expr)) def isArray(expr: R): R = Call(select(ident("Array"), "isArray"), List(expr)) def isObject(expr: R): R = BinOp(And, isObjectOrArray(expr), UnOp(Not, isArray(expr))) def isArrayOrString(expr: R): R = BinOp(Or, isArray(expr), isString(expr)) def isBoolean(expr: R): R = BinOp(Eq, UnOp(TypeOf, expr), Literal(Js.Str("boolean"))) def isTimestamp(expr: R): R = BinOp(Instance, expr, ident("Timestamp")) def isDate(expr: R): R = BinOp(Instance, expr, ident("Date")) def isBinary(expr: R): R = BinOp(Instance, expr, ident("Binary")) def isObjectId(expr: R): R = BinOp(Instance, expr, ident("ObjectId")) }
drostron/quasar
mongodb/src/main/scala/quasar/physical/mongodb/javascript/package.scala
Scala
apache-2.0
2,413
# Lepiota olivaceomammosa var. irritans Raithelh. VARIETY #### Status ACCEPTED #### According to Index Fungorum #### Published in Metrodiana 16(1-3): 19 (1988) #### Original name Lepiota olivaceomammosa var. irritans Raithelh. ### Remarks null
mdoering/backbone
life/Fungi/Basidiomycota/Agaricomycetes/Agaricales/Agaricaceae/Lepiota/Lepiota olivaceomammosa/Lepiota olivaceomammosa irritans/README.md
Markdown
apache-2.0
248
# vim: set et sw=4 sts=4 fileencoding=utf-8: # # Python header conversion # Copyright (c) 2013,2014 Dave Hughes <[email protected]> # # Original headers # Copyright (c) 2012, Broadcom Europe Ltd # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from __future__ import ( unicode_literals, print_function, division, absolute_import, ) # Make Py2's str equivalent to Py3's str = type('') import ctypes as ct import warnings _lib = ct.CDLL('libbcm_host.so') # bcm_host.h ################################################################# bcm_host_init = _lib.bcm_host_init bcm_host_init.argtypes = [] bcm_host_init.restype = None bcm_host_deinit = _lib.bcm_host_deinit bcm_host_deinit.argtypes = [] bcm_host_deinit.restype = None graphics_get_display_size = _lib.graphics_get_display_size graphics_get_display_size.argtypes = [ct.c_uint16, ct.POINTER(ct.c_uint32), ct.POINTER(ct.c_uint32)] graphics_get_display_size.restype = ct.c_int32
naziris/HomeSecPi
picamera/bcm_host.py
Python
apache-2.0
2,448
# quick demo of some python image filters # using raspberry pi camera import Tkinter as tk from picamera import PiCamera from time import sleep from PIL import Image,ImageFilter,ImageChops,ImageTk imagefile = "image.jpg" w = 320 h = 240 lastfilter = "none" camera = PiCamera() def takephoto(): camera.capture(imagefile) image1 = Image.open(imagefile) return image1 def photoloop(): count = 0 while (count < 9): sleep(0.5) image1 = newphoto() if lastfilter is not "none": dofilter(lastfilter,image1) count = count + 1 def newphoto(): global image1 image1 = takephoto() tkimage1 = ImageTk.PhotoImage(image1) panel1.configure(image=tkimage1) panel1.image = tkimage1 def invert(): global image1 image1= ImageChops.invert(image1) tkimage1 = ImageTk.PhotoImage(image1) panel1.configure(image=tkimage1) panel1.image = tkimage1 def grayscale(): global image1 r, g, b = image1.split() image1 = Image.merge("RGB", (g,g,g)) tkimage1 = ImageTk.PhotoImage(image1) panel1.configure(image=tkimage1) panel1.image = tkimage1 def dofilter (theimage,thefilter): lastfilter = thefilter global image1 image1 = image1.filter(thefilter) tkimage1 = ImageTk.PhotoImage(image1) panel1.configure(image=tkimage1) panel1.image = tkimage1 # Setup a window root = tk.Tk() root.title('Image') image1 = takephoto() tkimage1 = ImageTk.PhotoImage(image1) w = tkimage1.width() h = tkimage1.height() root.geometry("%dx%d+%d+%d" % (w, h, 0, 0)) # root has no image argument, so use a label as a panel panel1 = tk.Label(root, image=tkimage1) panel1.pack(side='top', fill='both', expand='yes') # save the panel's image from 'garbage collection' panel1.image = tkimage1 # Add some buttons buttonrow = tk.Frame(root) buttonrow.place(y=0,x=0) button = tk.Button(buttonrow, text='CAMERA',command = lambda: newphoto()) button.pack(side='left',) button = tk.Button(buttonrow, text='LOOP',command = lambda: photoloop()) button.pack(side='left',) button = tk.Button(buttonrow, text='INVERT',command = lambda: invert()) button.pack(side='left',) button = tk.Button(buttonrow, text='GRAY',command = lambda: grayscale()) button.pack(side='left',) # add some filter buttons button = tk.Button(buttonrow, text='BLUR',command = lambda: dofilter(image1,ImageFilter.BLUR)) button.pack(side='left') button = tk.Button(buttonrow, text='CONTOUR',command = lambda: dofilter(image1,ImageFilter.CONTOUR)) button.pack(side='left') button = tk.Button(buttonrow, text='FIND_EDGES',command = lambda: dofilter(image1,ImageFilter.FIND_EDGES)) button.pack(side='left') button = tk.Button(buttonrow, text='EMBOSS',command = lambda: dofilter(image1,ImageFilter.EMBOSS)) button.pack(side='left') button = tk.Button(buttonrow, text='EDGE_ENHANCE',command = lambda: dofilter(image1,ImageFilter.EDGE_ENHANCE)) button.pack(side='left') button = tk.Button(buttonrow, text='CLOSE',command = lambda: root.destroy()) button.pack(side='left') root.mainloop()
emschimmel/CameraPi
camera_try.py
Python
apache-2.0
2,985
/** * Copyright (c) 2008-2010 Andrey Somov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.yaml.snakeyaml.tokens; import java.util.List; import org.yaml.snakeyaml.error.Mark; import org.yaml.snakeyaml.error.YAMLException; /** * @see <a href="http://pyyaml.org/wiki/PyYAML">PyYAML</a> for more information */ public final class DirectiveToken<T> extends Token { private final String name; private final List<T> value; public DirectiveToken(String name, List<T> value, Mark startMark, Mark endMark) { super(startMark, endMark); this.name = name; if (value != null && value.size() != 2) { throw new YAMLException("Two strings must be provided instead of " + String.valueOf(value.size())); } this.value = value; } public String getName() { return this.name; } public List<T> getValue() { return this.value; } @Override protected String getArguments() { if (value != null) { return "name=" + name + ", value=[" + value.get(0) + ", " + value.get(1) + "]"; } else { return "name=" + name; } } @Override public Token.ID getTokenId() { return ID.Directive; } }
spariev/snakeyaml
src/main/java/org/yaml/snakeyaml/tokens/DirectiveToken.java
Java
apache-2.0
1,789
var a02307 = [ [ "GenericVector", "a02307.html#a60d42eebf02708482a8b506edd417990", null ], [ "GenericVector", "a02307.html#a28a69767bcadb6058a2a9df4afecd5fc", null ], [ "GenericVector", "a02307.html#a2b61cd1cd756770518f5ac30f817a9bf", null ], [ "~GenericVector", "a02307.html#a49840c8743a063b87839baef7e19b968", null ], [ "back", "a02307.html#a48b82547ebbaa5fedecfdebe7e2f155a", null ], [ "binary_search", "a02307.html#ad561e19e75a0fb30f0118774d7fa5621", null ], [ "bool_binary_search", "a02307.html#a8c261f66a24da67aac1acca7aa8f650a", null ], [ "choose_nth_item", "a02307.html#a5c4218ef833d0fe5db9b9749abd81ea5", null ], [ "choose_nth_item", "a02307.html#ae1e555b0cdded2c36dd6cf15345f659f", null ], [ "clear", "a02307.html#a9cdbff49b186574b83e43afba606fdd9", null ], [ "compact", "a02307.html#a080f7786e007523bcaa3f69913a82882", null ], [ "compact_sorted", "a02307.html#a8cb22ff55d6dd125d93cd03fd73bf8ad", null ], [ "contains", "a02307.html#a997e0fcaaa6b6533401dc54c0691e2e5", null ], [ "contains_index", "a02307.html#ac1aae0b1c22248f264dad02481123398", null ], [ "delete_data_pointers", "a02307.html#a98f62dccd75224a60437c2761bd215cd", null ], [ "DeSerialize", "a02307.html#aa4f5b1bc0d044fbd1fc77363b798c39c", null ], [ "DeSerialize", "a02307.html#a2e4fca9599eff590b76affc0a0aa0a2b", null ], [ "DeSerializeClasses", "a02307.html#a698ebd328d22f1edc114053ca2eba48e", null ], [ "DeSerializeClasses", "a02307.html#ade729c7d5429fbd5be304e3493a8a95f", null ], [ "dot_product", "a02307.html#a6f6dfbc607499173e7809656c6c505bc", null ], [ "double_the_size", "a02307.html#af0214c8c21da9eb57dfebc78611d0cd6", null ], [ "empty", "a02307.html#a172c4aa23ba397e24319ae095281cbcc", null ], [ "get", "a02307.html#abd0a875f98a1d78613ed3521d96e5300", null ], [ "get_index", "a02307.html#a6dee574daf4a3d4f0fc7048964f8f252", null ], [ "init", "a02307.html#a5b010723588fe15f303e4f3474d8479e", null ], [ "init_to_size", "a02307.html#a6751521fd3eb461d81fc83ef93a0def3", null ], [ "insert", "a02307.html#a57ca5259541548a97bcfd4d0925a27ff", null ], [ "length", "a02307.html#a6af4e0a2a30dda267d19bf783ae22eb7", null ], [ "move", "a02307.html#abae057ce589be25aae9b80958f84e34c", null ], [ "operator+=", "a02307.html#af73fadcdb08f0a12a5615f2bcf6fa6a8", null ], [ "operator+=", "a02307.html#acc7df2256174b32632e4d5b6c8d05d29", null ], [ "operator=", "a02307.html#af6fd5b3891b276c10add96f9411bec05", null ], [ "operator[]", "a02307.html#afd51f3f981284adb20bdf3b0bfd1c1f7", null ], [ "pop_back", "a02307.html#a0621dd57ce58dae3cb5f3d61e76bd233", null ], [ "push_back", "a02307.html#a0dc89fe2a365b04a61017f9d78c1a303", null ], [ "push_back_new", "a02307.html#a393f9f8dcc55ad759a5c7fbdc4840a89", null ], [ "push_front", "a02307.html#ae08e7cece0097ad356b5e565cbb2cf0b", null ], [ "read", "a02307.html#a10a273cab07e56c1654b2167f8aa9408", null ], [ "remove", "a02307.html#a3fd37a240a42f1c3052e8d28614d3702", null ], [ "reserve", "a02307.html#aa225ea3fc9374961482bc804028317eb", null ], [ "resize_no_init", "a02307.html#a09005e8f2b51d033d60eb5690aa5d112", null ], [ "reverse", "a02307.html#a58f6d73009cc3c56d0efb0d96ad35b5b", null ], [ "Serialize", "a02307.html#a206a6fe71c3780d862d97ef7c5fc9546", null ], [ "Serialize", "a02307.html#a3e994fd938468ff4fc4b4a902e970876", null ], [ "SerializeClasses", "a02307.html#ad0e8164e4c5c82e9e367c8a6d9b755b1", null ], [ "SerializeClasses", "a02307.html#a7d0060c687429049a0ea5cf21d067b8e", null ], [ "set", "a02307.html#a067b7833ee66238b7b5e230404525fcb", null ], [ "set_clear_callback", "a02307.html#af2bbca5b3258035a333b62679835a253", null ], [ "set_compare_callback", "a02307.html#aa3ec670c7f68a95f84641a0ded8cb61f", null ], [ "size", "a02307.html#a20cfad5c58c50cb85a9529d8ddbd96af", null ], [ "size_reserved", "a02307.html#a1c273622446ec7b5a6669fa9c9fdd8e5", null ], [ "sort", "a02307.html#a999bbd8ff336c81fe1198ea714c7936d", null ], [ "sort", "a02307.html#a461142d4ff7c61f22119552b7c0b2755", null ], [ "swap", "a02307.html#ac10b1de04fdfd4f5e4b90ac6d03f35b9", null ], [ "truncate", "a02307.html#a980882b5ebc3e72fdedbdbe345196f21", null ], [ "unsigned_size", "a02307.html#a47bd2385b28d536e8b6e87b689d61ede", null ], [ "WithinBounds", "a02307.html#a367914d03777eef59176d48155d06b72", null ], [ "write", "a02307.html#a8745d1d8394e852d12398d0458684dee", null ], [ "clear_cb_", "a02307.html#a57a833bdcc07a53e9a7b57d07cac2131", null ], [ "compare_cb_", "a02307.html#acd69761952fb39cbe7d7b43a6b06a432", null ], [ "data_", "a02307.html#ab88657a46d06c175dcfc76c0fcdaac7d", null ], [ "size_reserved_", "a02307.html#a4a02eb2a4ed31e8454cd8ae06eb8d7c5", null ], [ "size_used_", "a02307.html#a99185b084a6ace7536818ce2f17b11fb", null ] ];
stweil/tesseract-ocr.github.io
4.0.0/a02307.js
JavaScript
apache-2.0
4,880
/* Copyright 2018 Nationale-Nederlanden Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.http.cxf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeThat; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Iterator; import java.util.Properties; import javax.activation.DataHandler; import javax.xml.soap.AttachmentPart; import javax.xml.soap.MessageFactory; import javax.xml.soap.MimeHeader; import javax.xml.soap.SOAPConstants; import javax.xml.soap.SOAPException; import javax.xml.soap.SOAPMessage; import javax.xml.transform.stream.StreamSource; import javax.xml.ws.WebServiceContext; import org.apache.soap.util.mime.ByteArrayDataSource; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Spy; import org.mockito.junit.MockitoJUnitRunner; import org.w3c.dom.Element; import nl.nn.adapterframework.core.PipeLineSession; import nl.nn.adapterframework.stream.Message; import nl.nn.adapterframework.util.DomBuilderException; import nl.nn.adapterframework.util.Misc; import nl.nn.adapterframework.util.XmlUtils; @RunWith(MockitoJUnitRunner.class) public class SoapProviderTest { @BeforeClass public static void setUp() { Properties prop = System.getProperties(); String vendor = prop.getProperty("java.vendor"); System.out.println("JVM Vendor : " + vendor); assumeThat(vendor, not(equalTo("IBM Corporation"))); /* * The above exclusion of IBM JDK to work around the below error, seen when executing these tests with an IBM JDK: * java.lang.VerifyError: JVMVRFY012 stack shape inconsistent; class=com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl, method=createDocumentFragment()Lorg/w3c/dom/DocumentFragment;, pc=5; Type Mismatch, argument 0 in signature com/sun/xml/messaging/saaj/soap/SOAPDocumentFragment.<init>:(Lcom/sun/org/apache/xerces/internal/dom/CoreDocumentImpl;)V does not match Exception Details: Location: com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl.createDocumentFragment()Lorg/w3c/dom/DocumentFragment; @5: JBinvokespecial Reason: Type 'com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl' (current frame, stack[2]) is not assignable to 'com/sun/org/apache/xerces/internal/dom/CoreDocumentImpl' Current Frame: bci: @5 flags: { } locals: { 'com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl' } stack: { 'uninitialized', 'uninitialized', 'com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl' } at com.sun.xml.messaging.saaj.soap.SOAPPartImpl.<init>(SOAPPartImpl.java:106) at com.sun.xml.messaging.saaj.soap.ver1_1.SOAPPart1_1Impl.<init>(SOAPPart1_1Impl.java:70) at com.sun.xml.messaging.saaj.soap.ver1_1.Message1_1Impl.getSOAPPart(Message1_1Impl.java:90) at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.createMessage(SoapProviderTest.java:109) at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.createMessage(SoapProviderTest.java:98) at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.createMessage(SoapProviderTest.java:94) at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.sendMessageWithInputStreamAttachmentsTest(SoapProviderTest.java:228) */ } @Spy WebServiceContext webServiceContext = new WebServiceContextStub(); @InjectMocks private SoapProviderStub SOAPProvider = new SoapProviderStub(); private final String ATTACHMENT_CONTENT = "<dummy/>"; private final String ATTACHMENT_MIMETYPE = "plain/text"; private final String ATTACHMENT2_CONTENT = "<I'm a pdf file/>"; private final String ATTACHMENT2_NAME = "document.pdf"; private final String ATTACHMENT2_MIMETYPE = "application/pdf"; private final String MULTIPART_XML = "<parts><part type=\"file\" name=\""+ATTACHMENT2_NAME+"\" " + "sessionKey=\"part_file\" size=\"72833\" " + "mimeType=\""+ATTACHMENT2_MIMETYPE+"\"/></parts>"; private final String BASEDIR = "/Soap/"; protected InputStream getFile(String file) throws IOException { URL url = this.getClass().getResource(BASEDIR+file); if (url == null) { throw new IOException("file not found"); } return url.openStream(); } private SOAPMessage createMessage(String filename) throws IOException, SOAPException { return createMessage(filename, false, false); } private SOAPMessage createMessage(String filename, boolean addAttachment, boolean isSoap1_1) throws IOException, SOAPException { MessageFactory factory = MessageFactory.newInstance(isSoap1_1 ? SOAPConstants.SOAP_1_1_PROTOCOL : SOAPConstants.SOAP_1_2_PROTOCOL); SOAPMessage soapMessage = factory.createMessage(); StreamSource streamSource = new StreamSource(getFile(filename)); soapMessage.getSOAPPart().setContent(streamSource); if(addAttachment) { InputStream fis = new ByteArrayInputStream(ATTACHMENT_CONTENT.getBytes()); DataHandler dataHander = new DataHandler(new ByteArrayDataSource(fis, ATTACHMENT_MIMETYPE)); AttachmentPart part = soapMessage.createAttachmentPart(dataHander); soapMessage.addAttachmentPart(part); } return soapMessage; } private void assertAttachmentInSession(PipeLineSession session) throws DomBuilderException, IOException { assertNotNull(session.get("mimeHeaders")); assertNotNull(session.get("attachments")); Element xml = XmlUtils.buildElement((String) session.get("attachments")); Element attachment = XmlUtils.getFirstChildTag(xml, "attachment"); assertNotNull(attachment); //Retrieve sessionkey the attachment was stored in String sessionKey = XmlUtils.getChildTagAsString(attachment, "sessionKey"); assertNotNull(sessionKey); Message attachmentMessage = session.getMessage(sessionKey); //Verify that the attachment sent, was received properly assertEquals(ATTACHMENT_CONTENT, attachmentMessage.asString()); //Verify the content type Element mimeTypes = XmlUtils.getFirstChildTag(attachment, "mimeHeaders"); mimeTypes.getElementsByTagName("mimeHeader"); //TODO check what happens when multiple attachments are returned... String mimeType = XmlUtils.getChildTagAsString(mimeTypes, "mimeHeader"); assertEquals(ATTACHMENT_MIMETYPE, mimeType); } private void assertAttachmentInReceivedMessage(SOAPMessage message) throws SOAPException, IOException { assertEquals(1, message.countAttachments()); Iterator<?> attachmentParts = message.getAttachments(); while (attachmentParts.hasNext()) { AttachmentPart soapAttachmentPart = (AttachmentPart)attachmentParts.next(); String attachment = Misc.streamToString(soapAttachmentPart.getRawContent()); //ContentID should be equal to the filename assertEquals(ATTACHMENT2_NAME, soapAttachmentPart.getContentId()); //Validate the attachment's content assertEquals(ATTACHMENT2_CONTENT, attachment); //Make sure at least the content-type header has been set Iterator<?> headers = soapAttachmentPart.getAllMimeHeaders(); String contentType = null; while (headers.hasNext()) { MimeHeader header = (MimeHeader) headers.next(); if("Content-Type".equalsIgnoreCase(header.getName())) contentType = header.getValue(); } assertEquals(ATTACHMENT2_MIMETYPE, contentType); } } @Test /** * Receive SOAP message without attachment * Reply SOAP message without attachment * @throws Throwable */ public void simpleMessageTest() throws Throwable { SOAPMessage request = createMessage("correct-soapmsg.xml"); SOAPMessage message = SOAPProvider.invoke(request); String result = XmlUtils.nodeToString(message.getSOAPPart()); String expected = Misc.streamToString(getFile("correct-soapmsg.xml")); assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", "")); PipeLineSession session = SOAPProvider.getSession(); assertNotNull(session.get("mimeHeaders")); assertNotNull(session.get("attachments")); assertEquals("<attachments/>", session.get("attachments").toString().trim()); } @Test /** * Receive faulty message without attachment * @throws Throwable */ public void errorMessageTest() throws Throwable { SOAPMessage message = SOAPProvider.invoke(null); String result = XmlUtils.nodeToString(message.getSOAPPart()); assertTrue(result.indexOf("SOAPMessage is null") > 0); } @Test /** * Receive SOAP message with MTOM attachment * Reply SOAP message without attachment * @throws Throwable */ public void receiveMessageWithAttachmentsTest() throws Throwable { SOAPMessage request = createMessage("correct-soapmsg.xml", true, false); SOAPMessage message = SOAPProvider.invoke(request); String result = XmlUtils.nodeToString(message.getSOAPPart()); String expected = Misc.streamToString(getFile("correct-soapmsg.xml")); assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", "")); PipeLineSession session = SOAPProvider.getSession(); assertAttachmentInSession(session); } @Test /** * Receive SOAP message without attachment * Reply SOAP message with (InputStream) attachment * @throws Throwable */ public void sendMessageWithInputStreamAttachmentsTest() throws Throwable { SOAPMessage request = createMessage("correct-soapmsg.xml"); PipeLineSession session = new PipeLineSession(); session.put("attachmentXmlSessionKey", MULTIPART_XML); session.put("part_file", new ByteArrayInputStream(ATTACHMENT2_CONTENT.getBytes())); SOAPProvider.setAttachmentXmlSessionKey("attachmentXmlSessionKey"); SOAPProvider.setSession(session); SOAPMessage message = SOAPProvider.invoke(request); String result = XmlUtils.nodeToString(message.getSOAPPart()); String expected = Misc.streamToString(getFile("correct-soapmsg.xml")); assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", "")); assertAttachmentInReceivedMessage(message); } @Test /** * Receive SOAP message without attachment * Reply SOAP message with (String) attachment * @throws Throwable */ public void sendMessageWithStringAttachmentsTest() throws Throwable { SOAPMessage request = createMessage("correct-soapmsg.xml"); PipeLineSession session = new PipeLineSession(); session.put("attachmentXmlSessionKey", MULTIPART_XML); session.put("part_file", ATTACHMENT2_CONTENT); SOAPProvider.setAttachmentXmlSessionKey("attachmentXmlSessionKey"); SOAPProvider.setSession(session); SOAPMessage message = SOAPProvider.invoke(request); String result = XmlUtils.nodeToString(message.getSOAPPart()); String expected = Misc.streamToString(getFile("correct-soapmsg.xml")); assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", "")); assertAttachmentInReceivedMessage(message); } @Test /** * Receive SOAP message with attachment * Reply SOAP message with attachment * @throws Throwable */ public void receiveAndSendMessageWithAttachmentsTest() throws Throwable { SOAPMessage request = createMessage("correct-soapmsg.xml", true, false); PipeLineSession session = new PipeLineSession(); session.put("attachmentXmlSessionKey", MULTIPART_XML); session.put("part_file", ATTACHMENT2_CONTENT); SOAPProvider.setAttachmentXmlSessionKey("attachmentXmlSessionKey"); SOAPProvider.setSession(session); SOAPMessage message = SOAPProvider.invoke(request); String result = XmlUtils.nodeToString(message.getSOAPPart()); String expected = Misc.streamToString(getFile("correct-soapmsg.xml")); assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", "")); //Validate an attachment was sent to the listener assertAttachmentInSession(SOAPProvider.getSession()); //Validate the listener returned an attachment back assertAttachmentInReceivedMessage(message); } @Test public void soapActionInSessionKeySOAP1_1() throws Throwable { // Soap protocol 1.1 SOAPMessage request = createMessage("soapmsg1_1.xml", false, true); String value = "1.1-SoapAction"; webServiceContext.getMessageContext().put("SOAPAction", value); SOAPProvider.invoke(request); webServiceContext.getMessageContext().clear(); assertEquals(value, SOAPProvider.getSession().get("SOAPAction")); } @Test public void noSoapActionInSessionKeySOAP1_1() throws Throwable { // Soap protocol 1.1 SOAPMessage request = createMessage("soapmsg1_1.xml", false, true); SOAPProvider.invoke(request); assertNull(SOAPProvider.getSession().get("SOAPAction")); } @Test public void soap1_1MessageWithActionInContentTypeHeader() throws Throwable { // Soap protocol 1.1 SOAPMessage request = createMessage("soapmsg1_1.xml", false, true); String value = "ActionInContentTypeHeader"; webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action="+value); SOAPProvider.invoke(request); webServiceContext.getMessageContext().clear(); assertNull(SOAPProvider.getSession().get("SOAPAction")); } @Test public void soapActionInSessionKeySOAP1_2ActionIsTheLastItem() throws Throwable { SOAPMessage request = createMessage("soapmsg1_2.xml"); String value = "SOAP1_2ActionIsTheLastItem"; webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action="+value); SOAPProvider.invoke(request); webServiceContext.getMessageContext().clear(); assertEquals(value, SOAPProvider.getSession().get("SOAPAction")); } @Test public void soapActionInSessionKeySOAP1_2ActionIsInMiddle() throws Throwable { SOAPMessage request = createMessage("soapmsg1_2.xml"); String value = "SOAP1_2ActionIsInMiddle"; webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action="+value+";somethingelse"); SOAPProvider.invoke(request); webServiceContext.getMessageContext().clear(); assertEquals(value, SOAPProvider.getSession().get("SOAPAction")); } @Test public void soapActionInSessionKeySOAP1_2ActionIsAtTheBeginning() throws Throwable { SOAPMessage request = createMessage("soapmsg1_2.xml"); String value = "SOAP1_2ActionIsAtTheBeginning"; webServiceContext.getMessageContext().put("Content-Type", "action="+value+";application/soap+xml; somethingelse"); SOAPProvider.invoke(request); webServiceContext.getMessageContext().clear(); assertEquals(value, SOAPProvider.getSession().get("SOAPAction")); } @Test public void noSoapActionInSessionKey1_2() throws Throwable { SOAPMessage request = createMessage("soapmsg1_2.xml"); webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; somethingelse"); SOAPProvider.invoke(request); webServiceContext.getMessageContext().clear(); assertNull(SOAPProvider.getSession().get("SOAPAction")); } @Test public void emptySoapActionInSessionKey1_2() throws Throwable { SOAPMessage request = createMessage("soapmsg1_2.xml"); webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action=; somethingelse"); SOAPProvider.invoke(request); webServiceContext.getMessageContext().clear(); assertNull(SOAPProvider.getSession().get("SOAPAction")); } @Test public void soap1_2MessageWithSOAPActionHeader() throws Throwable { SOAPMessage request = createMessage("soapmsg1_2.xml"); webServiceContext.getMessageContext().put("SOAPAction", "action"); SOAPProvider.invoke(request); webServiceContext.getMessageContext().clear(); assertNull(SOAPProvider.getSession().get("SOAPAction")); } }
ibissource/iaf
core/src/test/java/nl/nn/adapterframework/http/cxf/SoapProviderTest.java
Java
apache-2.0
16,059
### note * this is a resuable container for a given project/volume ### initial steps * ensure current directory contains `resources/jdk-9+175_linux-x64_bin.tar.gz` * run: `docker build -t="jdk9/b175" .` * run: `docker run -i -t -v $(pwd):/data jdk9/b175` * inside container, run: `/data/resources/install.sh` * inside container, run: `. /data/resources/setvars.sh` * confirm: `java --version` ### subsequent steps * `docker start [container name]` * `docker attach [container name]` * run: `. /data/resources/setvars.sh` * confirm: `java --version`
codetojoy/gists
docker/jdk9_jun_2017/NOTES.md
Markdown
apache-2.0
556
package template import ( "bytes" "io" "io/ioutil" "log" "os" "testing" "text/template" "github.com/k8sp/sextant/cloud-config-server/certgen" "github.com/k8sp/sextant/clusterdesc" "github.com/stretchr/testify/assert" "github.com/topicai/candy" "gopkg.in/yaml.v2" ) func TestExecute(t *testing.T) { out, err := ioutil.TempDir("", "") candy.Must(err) defer func() { if e := os.RemoveAll(out); e != nil { log.Printf("Generator.Gen failed deleting %s", out) } }() caKey, caCrt := certgen.GenerateRootCA(out) config := candy.WithOpened("./cluster-desc.sample.yaml", func(r io.Reader) interface{} { b, e := ioutil.ReadAll(r) candy.Must(e) c := &clusterdesc.Cluster{} assert.Nil(t, yaml.Unmarshal(b, &c)) return c }).(*clusterdesc.Cluster) tmpl, e := template.ParseFiles("cloud-config.template") candy.Must(e) var ccTmpl bytes.Buffer Execute(tmpl, config, "00:25:90:c0:f7:80", caKey, caCrt, &ccTmpl) yml := make(map[interface{}]interface{}) candy.Must(yaml.Unmarshal(ccTmpl.Bytes(), yml)) initialEtcdCluster := yml["coreos"].(map[interface{}]interface{})["etcd2"].(map[interface{}]interface{})["initial-cluster-token"] assert.Equal(t, initialEtcdCluster, "etcd-cluster-1") }
JunfeiYang/sextant
cloud-config-server/template/template_test.go
GO
apache-2.0
1,218
package mx.emite.sdk.scot.request; import java.util.List; import javax.validation.Valid; import javax.validation.constraints.NotNull; import org.hibernate.validator.constraints.NotEmpty; import lombok.Builder; import lombok.Data; import lombok.Singular; import mx.emite.sdk.cfdi32.anotaciones.Rfc; import mx.emite.sdk.scot.request.extra.SucursalInfo; @Data @Builder public class SucursalesAltaRequest { /** * Token del <b>Integrador</b> obtenido con el servicio de Token * -- SETTER -- * * @param token * Token del <b>Integrador</b> obtenido de Scot&copy; * */ @NotNull private String token; /** * @param rfc del emisor, si se deja en blanco se consultan todos los emisores */ @Rfc private String rfc; /** * @param sucursales lista de sucursales a dar de alta */ @Valid @NotEmpty @Singular("sucursal") private List<SucursalInfo> sucursales; /** * modificar si la sucursal ya se encuentra dado de alta */ @NotNull public Boolean modificar; }
emite-mx/ef-sdk-java
ef-sdk-java/src/main/java/mx/emite/sdk/scot/request/SucursalesAltaRequest.java
Java
apache-2.0
1,030
/* * Copyright 2016-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rust; import com.facebook.buck.cxx.CxxPlatform; import com.facebook.buck.cxx.CxxPlatforms; import com.facebook.buck.cxx.Linker; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.FlavorDomain; import com.facebook.buck.model.Flavored; import com.facebook.buck.model.InternalFlavor; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.AbstractDescriptionArg; import com.facebook.buck.rules.BinaryWrapperRule; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.CellPathResolver; import com.facebook.buck.rules.Description; import com.facebook.buck.rules.ImplicitDepsInferringDescription; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.Tool; import com.facebook.buck.rules.ToolProvider; import com.facebook.buck.versions.VersionRoot; import com.facebook.infer.annotation.SuppressFieldNotInitialized; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import java.util.Map; import java.util.Optional; import java.util.stream.Stream; public class RustTestDescription implements Description<RustTestDescription.Arg>, ImplicitDepsInferringDescription<RustTestDescription.Arg>, Flavored, VersionRoot<RustTestDescription.Arg> { private final RustBuckConfig rustBuckConfig; private final FlavorDomain<CxxPlatform> cxxPlatforms; private final CxxPlatform defaultCxxPlatform; public RustTestDescription( RustBuckConfig rustBuckConfig, FlavorDomain<CxxPlatform> cxxPlatforms, CxxPlatform defaultCxxPlatform) { this.rustBuckConfig = rustBuckConfig; this.cxxPlatforms = cxxPlatforms; this.defaultCxxPlatform = defaultCxxPlatform; } @Override public Arg createUnpopulatedConstructorArg() { return new Arg(); } @Override public <A extends Arg> BuildRule createBuildRule( TargetGraph targetGraph, BuildRuleParams params, BuildRuleResolver resolver, CellPathResolver cellRoots, A args) throws NoSuchBuildTargetException { final BuildTarget buildTarget = params.getBuildTarget(); BuildTarget exeTarget = params.getBuildTarget() .withAppendedFlavors(InternalFlavor.of("unittest")); Optional<Map.Entry<Flavor, RustBinaryDescription.Type>> type = RustBinaryDescription.BINARY_TYPE.getFlavorAndValue(buildTarget); boolean isCheck = type.map(t -> t.getValue().isCheck()).orElse(false); BinaryWrapperRule testExeBuild = resolver.addToIndex( RustCompileUtils.createBinaryBuildRule( params.withBuildTarget(exeTarget), resolver, rustBuckConfig, cxxPlatforms, defaultCxxPlatform, args.crate, args.features, Stream.of( args.framework ? Stream.of("--test") : Stream.<String>empty(), rustBuckConfig.getRustTestFlags().stream(), args.rustcFlags.stream()) .flatMap(x -> x).iterator(), args.linkerFlags.iterator(), RustCompileUtils.getLinkStyle(params.getBuildTarget(), args.linkStyle), args.rpath, args.srcs, args.crateRoot, ImmutableSet.of("lib.rs", "main.rs"), isCheck )); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); Tool testExe = testExeBuild.getExecutableCommand(); BuildRuleParams testParams = params.copyAppendingExtraDeps( testExe.getDeps(ruleFinder)); return new RustTest( testParams, ruleFinder, testExeBuild, args.labels, args.contacts); } @Override public void findDepsForTargetFromConstructorArgs( BuildTarget buildTarget, CellPathResolver cellRoots, Arg constructorArg, ImmutableCollection.Builder<BuildTarget> extraDepsBuilder, ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) { ToolProvider compiler = rustBuckConfig.getRustCompiler(); extraDepsBuilder.addAll(compiler.getParseTimeDeps()); extraDepsBuilder.addAll(CxxPlatforms.getParseTimeDeps(cxxPlatforms.getValues())); } @Override public boolean hasFlavors(ImmutableSet<Flavor> flavors) { if (cxxPlatforms.containsAnyOf(flavors)) { return true; } for (RustBinaryDescription.Type type : RustBinaryDescription.Type.values()) { if (flavors.contains(type.getFlavor())) { return true; } } return false; } @Override public Optional<ImmutableSet<FlavorDomain<?>>> flavorDomains() { return Optional.of(ImmutableSet.of(cxxPlatforms, RustBinaryDescription.BINARY_TYPE)); } @Override public boolean isVersionRoot(ImmutableSet<Flavor> flavors) { return true; } @SuppressFieldNotInitialized public static class Arg extends AbstractDescriptionArg { public ImmutableSortedSet<SourcePath> srcs = ImmutableSortedSet.of(); public ImmutableSet<String> contacts = ImmutableSet.of(); public ImmutableSortedSet<String> features = ImmutableSortedSet.of(); public ImmutableList<String> rustcFlags = ImmutableList.of(); public ImmutableList<String> linkerFlags = ImmutableList.of(); public ImmutableSortedSet<BuildTarget> deps = ImmutableSortedSet.of(); public Optional<Linker.LinkableDepType> linkStyle; public boolean rpath = true; public boolean framework = true; public Optional<String> crate; public Optional<SourcePath> crateRoot; } }
vschs007/buck
src/com/facebook/buck/rust/RustTestDescription.java
Java
apache-2.0
6,463
using System.Reflection; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("MassTransit.Publisher")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("MassTransit.Publisher")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("918f334e-cfe9-4000-bd5d-8154d3f88163")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
shibukraj/Masstransit.PubSubSample
MassTransit.Publisher/Properties/AssemblyInfo.cs
C#
apache-2.0
1,382
/* Copyright IBM Corp. 2016 All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package attributes import ( "bytes" "crypto/x509" "encoding/asn1" "errors" "fmt" "strconv" "strings" pb "github.com/fabric_sdk_golang/core/crypto/attributes/proto" "github.com/fabric_sdk_golang/core/crypto/primitives" "github.com/golang/protobuf/proto" ) var ( // TCertEncAttributesBase is the base ASN1 object identifier for attributes. // When generating an extension to include the attribute an index will be // appended to this Object Identifier. TCertEncAttributesBase = asn1.ObjectIdentifier{1, 2, 3, 4, 5, 6} // TCertAttributesHeaders is the ASN1 object identifier of attributes header. TCertAttributesHeaders = asn1.ObjectIdentifier{1, 2, 3, 4, 5, 6, 9} padding = []byte{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255} //headerPrefix is the prefix used in the header exteion of the certificate. headerPrefix = "00HEAD" //HeaderAttributeName is the name used to derivate the K used to encrypt/decrypt the header. HeaderAttributeName = "attributeHeader" ) //ParseAttributesHeader parses a string and returns a map with the attributes. func ParseAttributesHeader(header string) (map[string]int, error) { if !strings.HasPrefix(header, headerPrefix) { return nil, errors.New("Invalid header") } headerBody := strings.Replace(header, headerPrefix, "", 1) tokens := strings.Split(headerBody, "#") result := make(map[string]int) for _, token := range tokens { pair := strings.Split(token, "->") if len(pair) == 2 { key := pair[0] valueStr := pair[1] value, err := strconv.Atoi(valueStr) if err != nil { return nil, err } result[key] = value } } return result, nil } //ReadAttributeHeader read the header of the attributes. func ReadAttributeHeader(tcert *x509.Certificate, headerKey []byte) (map[string]int, bool, error) { var err error var headerRaw []byte encrypted := false if headerRaw, err = primitives.GetCriticalExtension(tcert, TCertAttributesHeaders); err != nil { return nil, encrypted, err } headerStr := string(headerRaw) var header map[string]int header, err = ParseAttributesHeader(headerStr) if err != nil { if headerKey == nil { return nil, false, errors.New("Is not possible read an attribute encrypted without the headerKey") } headerRaw, err = DecryptAttributeValue(headerKey, headerRaw) if err != nil { return nil, encrypted, errors.New("error decrypting header value '" + err.Error() + "''") } headerStr = string(headerRaw) header, err = ParseAttributesHeader(headerStr) if err != nil { return nil, encrypted, err } encrypted = true } return header, encrypted, nil } //ReadTCertAttributeByPosition read the attribute stored in the position "position" of the tcert. func ReadTCertAttributeByPosition(tcert *x509.Certificate, position int) ([]byte, error) { if position <= 0 { return nil, fmt.Errorf("Invalid attribute position. Received [%v]", position) } oid := asn1.ObjectIdentifier{1, 2, 3, 4, 5, 6, 9 + position} value, err := primitives.GetCriticalExtension(tcert, oid) if err != nil { return nil, err } return value, nil } //ReadTCertAttribute read the attribute with name "attributeName" and returns the value and a boolean indicating if the returned value is encrypted or not. func ReadTCertAttribute(tcert *x509.Certificate, attributeName string, headerKey []byte) ([]byte, bool, error) { header, encrypted, err := ReadAttributeHeader(tcert, headerKey) if err != nil { return nil, false, err } position := header[attributeName] if position == 0 { return nil, encrypted, errors.New("Failed attribute '" + attributeName + "' doesn't exists in the TCert.") } value, err := ReadTCertAttributeByPosition(tcert, position) if err != nil { return nil, encrypted, err } return value, encrypted, nil } //EncryptAttributeValue encrypts "attributeValue" using "attributeKey" func EncryptAttributeValue(attributeKey []byte, attributeValue []byte) ([]byte, error) { value := append(attributeValue, padding...) return primitives.CBCPKCS7Encrypt(attributeKey, value) } //getAttributeKey returns the attributeKey derived from the preK0 to the attributeName. func getAttributeKey(preK0 []byte, attributeName string) []byte { return primitives.HMACTruncated(preK0, []byte(attributeName), 32) } //EncryptAttributeValuePK0 encrypts "attributeValue" using a key derived from preK0. func EncryptAttributeValuePK0(preK0 []byte, attributeName string, attributeValue []byte) ([]byte, error) { attributeKey := getAttributeKey(preK0, attributeName) return EncryptAttributeValue(attributeKey, attributeValue) } //DecryptAttributeValue decrypts "encryptedValue" using "attributeKey" and return the decrypted value. func DecryptAttributeValue(attributeKey []byte, encryptedValue []byte) ([]byte, error) { value, err := primitives.CBCPKCS7Decrypt(attributeKey, encryptedValue) if err != nil { return nil, err } lenPadding := len(padding) lenValue := len(value) if lenValue < lenPadding { return nil, errors.New("Error invalid value. Decryption verification failed.") } lenWithoutPadding := lenValue - lenPadding if bytes.Compare(padding[0:lenPadding], value[lenWithoutPadding:lenValue]) != 0 { return nil, errors.New("Error generating decryption key for value. Decryption verification failed.") } value = value[0:lenWithoutPadding] return value, nil } //getKAndValueForAttribute derives K for the attribute "attributeName", checks the value padding and returns both key and decrypted value func getKAndValueForAttribute(attributeName string, preK0 []byte, cert *x509.Certificate) ([]byte, []byte, error) { headerKey := getAttributeKey(preK0, HeaderAttributeName) value, encrypted, err := ReadTCertAttribute(cert, attributeName, headerKey) if err != nil { return nil, nil, err } attributeKey := getAttributeKey(preK0, attributeName) if encrypted { value, err = DecryptAttributeValue(attributeKey, value) if err != nil { return nil, nil, err } } return attributeKey, value, nil } //GetKForAttribute derives the K for the attribute "attributeName" and returns the key func GetKForAttribute(attributeName string, preK0 []byte, cert *x509.Certificate) ([]byte, error) { key, _, err := getKAndValueForAttribute(attributeName, preK0, cert) return key, err } //GetValueForAttribute derives the K for the attribute "attributeName" and returns the value func GetValueForAttribute(attributeName string, preK0 []byte, cert *x509.Certificate) ([]byte, error) { _, value, err := getKAndValueForAttribute(attributeName, preK0, cert) return value, err } func createAttributesHeaderEntry(preK0 []byte) *pb.AttributesMetadataEntry { attKey := getAttributeKey(preK0, HeaderAttributeName) return &pb.AttributesMetadataEntry{AttributeName: HeaderAttributeName, AttributeKey: attKey} } func createAttributesMetadataEntry(attributeName string, preK0 []byte) *pb.AttributesMetadataEntry { attKey := getAttributeKey(preK0, attributeName) return &pb.AttributesMetadataEntry{AttributeName: attributeName, AttributeKey: attKey} } //CreateAttributesMetadataObjectFromCert creates an AttributesMetadata object from certificate "cert", metadata and the attributes keys. func CreateAttributesMetadataObjectFromCert(cert *x509.Certificate, metadata []byte, preK0 []byte, attributeKeys []string) *pb.AttributesMetadata { var entries []*pb.AttributesMetadataEntry for _, key := range attributeKeys { if len(key) == 0 { continue } entry := createAttributesMetadataEntry(key, preK0) entries = append(entries, entry) } headerEntry := createAttributesHeaderEntry(preK0) entries = append(entries, headerEntry) return &pb.AttributesMetadata{Metadata: metadata, Entries: entries} } //CreateAttributesMetadataFromCert creates the AttributesMetadata from the original metadata and certificate "cert". func CreateAttributesMetadataFromCert(cert *x509.Certificate, metadata []byte, preK0 []byte, attributeKeys []string) ([]byte, error) { attributesMetadata := CreateAttributesMetadataObjectFromCert(cert, metadata, preK0, attributeKeys) return proto.Marshal(attributesMetadata) } //CreateAttributesMetadata create the AttributesMetadata from the original metadata func CreateAttributesMetadata(raw []byte, metadata []byte, preK0 []byte, attributeKeys []string) ([]byte, error) { cert, err := primitives.DERToX509Certificate(raw) if err != nil { return nil, err } return CreateAttributesMetadataFromCert(cert, metadata, preK0, attributeKeys) } //GetAttributesMetadata object from the original metadata "metadata". func GetAttributesMetadata(metadata []byte) (*pb.AttributesMetadata, error) { attributesMetadata := &pb.AttributesMetadata{} err := proto.Unmarshal(metadata, attributesMetadata) return attributesMetadata, err } //BuildAttributesHeader builds a header attribute from a map of attribute names and positions. func BuildAttributesHeader(attributesHeader map[string]int) ([]byte, error) { var header []byte var headerString string var positions = make(map[int]bool) for k, v := range attributesHeader { if positions[v] { return nil, errors.New("Duplicated position found in attributes header") } positions[v] = true vStr := strconv.Itoa(v) headerString = headerString + k + "->" + vStr + "#" } header = []byte(headerPrefix + headerString) return header, nil }
shangsony/fabric_sdk_golang
core/crypto/attributes/attributes.go
GO
apache-2.0
9,892
/* bme680.c - Driver for Bosch Sensortec's BME680 temperature, pressure, * humidity and gas sensor * * https://www.bosch-sensortec.com/bst/products/all_products/bme680 */ /* * Copyright (c) 2018 Bosch Sensortec GmbH * * SPDX-License-Identifier: Apache-2.0 */ #include "bme680.h" #include <gpio.h> #include <i2c.h> #include <init.h> #include <kernel.h> #include <misc/byteorder.h> #include <misc/__assert.h> #include <sensor.h> #include <logging/log.h> LOG_MODULE_REGISTER(bme680, CONFIG_SENSOR_LOG_LEVEL); static int bme680_reg_read(struct bme680_data *data, u8_t start, u8_t *buf, int size) { return i2c_burst_read(data->i2c_master, data->i2c_slave_addr, start, buf, size); return 0; } static int bme680_reg_write(struct bme680_data *data, u8_t reg, u8_t val) { return i2c_reg_write_byte(data->i2c_master, data->i2c_slave_addr, reg, val); return 0; } static void bme680_calc_temp(struct bme680_data *data, u32_t adc_temp) { s64_t var1, var2, var3; var1 = ((s32_t)adc_temp >> 3) - ((s32_t)data->par_t1 << 1); var2 = (var1 * (s32_t)data->par_t2) >> 11; var3 = ((var1 >> 1) * (var1 >> 1)) >> 12; var3 = ((var3) * ((s32_t)data->par_t3 << 4)) >> 14; data->t_fine = var2 + var3; data->calc_temp = ((data->t_fine * 5) + 128) >> 8; } static void bme680_calc_press(struct bme680_data *data, u32_t adc_press) { s32_t var1, var2, var3, calc_press; var1 = (((s32_t)data->t_fine) >> 1) - 64000; var2 = ((((var1 >> 2) * (var1 >> 2)) >> 11) * (s32_t)data->par_p6) >> 2; var2 = var2 + ((var1 * (s32_t)data->par_p5) << 1); var2 = (var2 >> 2) + ((s32_t)data->par_p4 << 16); var1 = (((((var1 >> 2) * (var1 >> 2)) >> 13) * ((s32_t)data->par_p3 << 5)) >> 3) + (((s32_t)data->par_p2 * var1) >> 1); var1 = var1 >> 18; var1 = ((32768 + var1) * (s32_t)data->par_p1) >> 15; calc_press = 1048576 - adc_press; calc_press = (calc_press - (var2 >> 12)) * ((u32_t)3125); /* This max value is used to provide precedence to multiplication or * division in the pressure calculation equation to achieve least * loss of precision and avoiding overflows. * i.e Comparing value, signed int 32bit (1 << 30) */ if (calc_press >= (s32_t)0x40000000) { calc_press = ((calc_press / var1) << 1); } else { calc_press = ((calc_press << 1) / var1); } var1 = ((s32_t)data->par_p9 * (s32_t)(((calc_press >> 3) * (calc_press >> 3)) >> 13)) >> 12; var2 = ((s32_t)(calc_press >> 2) * (s32_t)data->par_p8) >> 13; var3 = ((s32_t)(calc_press >> 8) * (s32_t)(calc_press >> 8) * (s32_t)(calc_press >> 8) * (s32_t)data->par_p10) >> 17; data->calc_press = calc_press + ((var1 + var2 + var3 + ((s32_t)data->par_p7 << 7)) >> 4); } static void bme680_calc_humidity(struct bme680_data *data, u16_t adc_humidity) { s32_t var1, var2_1, var2_2, var2, var3, var4, var5, var6; s32_t temp_scaled, calc_hum; temp_scaled = (((s32_t)data->t_fine * 5) + 128) >> 8; var1 = (s32_t)(adc_humidity - ((s32_t)((s32_t)data->par_h1 * 16))) - (((temp_scaled * (s32_t)data->par_h3) / ((s32_t)100)) >> 1); var2_1 = (s32_t)data->par_h2; var2_2 = ((temp_scaled * (s32_t)data->par_h4) / (s32_t)100) + (((temp_scaled * ((temp_scaled * (s32_t)data->par_h5) / ((s32_t)100))) >> 6) / ((s32_t)100)) + (s32_t)(1 << 14); var2 = (var2_1 * var2_2) >> 10; var3 = var1 * var2; var4 = (s32_t)data->par_h6 << 7; var4 = ((var4) + ((temp_scaled * (s32_t)data->par_h7) / ((s32_t)100))) >> 4; var5 = ((var3 >> 14) * (var3 >> 14)) >> 10; var6 = (var4 * var5) >> 1; calc_hum = (((var3 + var6) >> 10) * ((s32_t)1000)) >> 12; if (calc_hum > 100000) { /* Cap at 100%rH */ calc_hum = 100000; } else if (calc_hum < 0) { calc_hum = 0; } data->calc_humidity = calc_hum; } static void bme680_calc_gas_resistance(struct bme680_data *data, u8_t gas_range, u16_t adc_gas_res) { s64_t var1, var3; u64_t var2; static const u32_t look_up1[16] = { 2147483647, 2147483647, 2147483647, 2147483647, 2147483647, 2126008810, 2147483647, 2130303777, 2147483647, 2147483647, 2143188679, 2136746228, 2147483647, 2126008810, 2147483647, 2147483647 }; static const u32_t look_up2[16] = { 4096000000, 2048000000, 1024000000, 512000000, 255744255, 127110228, 64000000, 32258064, 16016016, 8000000, 4000000, 2000000, 1000000, 500000, 250000, 125000 }; var1 = (s64_t)((1340 + (5 * (s64_t)data->range_sw_err)) * ((s64_t)look_up1[gas_range])) >> 16; var2 = (((s64_t)((s64_t)adc_gas_res << 15) - (s64_t)(16777216)) + var1); var3 = (((s64_t)look_up2[gas_range] * (s64_t)var1) >> 9); data->calc_gas_resistance = (u32_t)((var3 + ((s64_t)var2 >> 1)) / (s64_t)var2); } static u8_t bme680_calc_res_heat(struct bme680_data *data, u16_t heatr_temp) { u8_t heatr_res; s32_t var1, var2, var3, var4, var5; s32_t heatr_res_x100; s32_t amb_temp = 25; /* Assume ambient temperature to be 25 deg C */ if (heatr_temp > 400) { /* Cap temperature */ heatr_temp = 400; } var1 = ((amb_temp * data->par_gh3) / 1000) * 256; var2 = (data->par_gh1 + 784) * (((((data->par_gh2 + 154009) * heatr_temp * 5) / 100) + 3276800) / 10); var3 = var1 + (var2 / 2); var4 = (var3 / (data->res_heat_range + 4)); var5 = (131 * data->res_heat_val) + 65536; heatr_res_x100 = ((var4 / var5) - 250) * 34; heatr_res = (heatr_res_x100 + 50) / 100; return heatr_res; } static u8_t bme680_calc_gas_wait(u16_t dur) { u8_t factor = 0, durval; if (dur >= 0xfc0) { durval = 0xff; /* Max duration*/ } else { while (dur > 0x3F) { dur = dur / 4; factor += 1; } durval = dur + (factor * 64); } return durval; } static int bme680_sample_fetch(struct device *dev, enum sensor_channel chan) { struct bme680_data *data = dev->driver_data; u8_t buff[BME680_LEN_FIELD] = { 0 }; u8_t gas_range; u32_t adc_temp, adc_press; u16_t adc_hum, adc_gas_res; int size = BME680_LEN_FIELD; int ret; __ASSERT_NO_MSG(chan == SENSOR_CHAN_ALL); ret = bme680_reg_read(data, BME680_REG_FIELD0, buff, size); if (ret < 0) { return ret; } data->new_data = buff[0] & BME680_MSK_NEW_DATA; data->heatr_stab = buff[14] & BME680_MSK_HEATR_STAB; adc_press = (u32_t)(((u32_t)buff[2] << 12) | ((u32_t)buff[3] << 4) | ((u32_t)buff[4] >> 4)); adc_temp = (u32_t)(((u32_t)buff[5] << 12) | ((u32_t)buff[6] << 4) | ((u32_t)buff[7] >> 4)); adc_hum = (u16_t)(((u32_t)buff[8] << 8) | (u32_t)buff[9]); adc_gas_res = (u16_t)((u32_t)buff[13] << 2 | (((u32_t)buff[14]) >> 6)); gas_range = buff[14] & BME680_MSK_GAS_RANGE; if (data->new_data) { bme680_calc_temp(data, adc_temp); bme680_calc_press(data, adc_press); bme680_calc_humidity(data, adc_hum); bme680_calc_gas_resistance(data, gas_range, adc_gas_res); } /* Trigger the next measurement */ ret = bme680_reg_write(data, BME680_REG_CTRL_MEAS, BME680_CTRL_MEAS_VAL); if (ret < 0) { return ret; } return 0; } static int bme680_channel_get(struct device *dev, enum sensor_channel chan, struct sensor_value *val) { struct bme680_data *data = dev->driver_data; switch (chan) { case SENSOR_CHAN_AMBIENT_TEMP: /* * data->calc_temp has a resolution of 0.01 degC. * So 5123 equals 51.23 degC. */ val->val1 = data->calc_temp / 100; val->val2 = data->calc_temp % 100 * 10000; break; case SENSOR_CHAN_PRESS: /* * data->calc_press has a resolution of 1 Pa. * So 96321 equals 96.321 kPa. */ val->val1 = data->calc_press / 1000; val->val2 = (data->calc_press % 1000) * 1000; break; case SENSOR_CHAN_HUMIDITY: /* * data->calc_humidity has a resolution of 0.001 %RH. * So 46333 equals 46.333 %RH. */ val->val1 = data->calc_humidity / 1000; val->val2 = (data->calc_humidity % 1000) * 1000; break; case SENSOR_CHAN_GAS_RES: /* * data->calc_gas_resistance has a resolution of 1 ohm. * So 100000 equals 100000 ohms. */ val->val1 = data->calc_gas_resistance; val->val2 = 0; break; default: return -EINVAL; } return 0; } static int bme680_read_compensation(struct bme680_data *data) { u8_t buff[BME680_LEN_COEFF_ALL]; int err = 0; err = bme680_reg_read(data, BME680_REG_COEFF1, buff, BME680_LEN_COEFF1); if (err < 0) { return err; } err = bme680_reg_read(data, BME680_REG_COEFF2, &buff[BME680_LEN_COEFF1], 16); if (err < 0) { return err; } err = bme680_reg_read(data, BME680_REG_COEFF3, &buff[BME680_LEN_COEFF1 + BME680_LEN_COEFF2], BME680_LEN_COEFF3); if (err < 0) { return err; } /* Temperature related coefficients */ data->par_t1 = (u16_t)(BME680_CONCAT_BYTES(buff[32], buff[31])); data->par_t2 = (s16_t)(BME680_CONCAT_BYTES(buff[1], buff[0])); data->par_t3 = (u8_t)(buff[2]); /* Pressure related coefficients */ data->par_p1 = (u16_t)(BME680_CONCAT_BYTES(buff[5], buff[4])); data->par_p2 = (s16_t)(BME680_CONCAT_BYTES(buff[7], buff[6])); data->par_p3 = (s8_t)buff[8]; data->par_p4 = (s16_t)(BME680_CONCAT_BYTES(buff[11], buff[10])); data->par_p5 = (s16_t)(BME680_CONCAT_BYTES(buff[13], buff[12])); data->par_p6 = (s8_t)(buff[15]); data->par_p7 = (s8_t)(buff[14]); data->par_p8 = (s16_t)(BME680_CONCAT_BYTES(buff[19], buff[18])); data->par_p9 = (s16_t)(BME680_CONCAT_BYTES(buff[21], buff[20])); data->par_p10 = (u8_t)(buff[22]); /* Humidity related coefficients */ data->par_h1 = (u16_t)(((u16_t)buff[25] << 4) | (buff[24] & 0x0f)); data->par_h2 = (u16_t)(((u16_t)buff[23] << 4) | ((buff[24]) >> 4)); data->par_h3 = (s8_t)buff[26]; data->par_h4 = (s8_t)buff[27]; data->par_h5 = (s8_t)buff[28]; data->par_h6 = (u8_t)buff[29]; data->par_h7 = (s8_t)buff[30]; /* Gas heater related coefficients */ data->par_gh1 = (s8_t)buff[35]; data->par_gh2 = (s16_t)(BME680_CONCAT_BYTES(buff[34], buff[33])); data->par_gh3 = (s8_t)buff[36]; data->res_heat_val = (s8_t)buff[37]; data->res_heat_range = ((buff[39] & BME680_MSK_RH_RANGE) >> 4); data->range_sw_err = ((s8_t)(buff[41] & BME680_MSK_RANGE_SW_ERR)) / 16; return 0; } static int bme680_chip_init(struct device *dev) { struct bme680_data *data = (struct bme680_data *)dev->driver_data; int err; err = bme680_reg_read(data, BME680_REG_CHIP_ID, &data->chip_id, 1); if (err < 0) { return err; } if (data->chip_id == BME680_CHIP_ID) { LOG_ERR("BME680 chip detected"); } else { LOG_ERR("Bad BME680 chip id 0x%x", data->chip_id); return -ENOTSUP; } err = bme680_read_compensation(data); if (err < 0) { return err; } err = bme680_reg_write(data, BME680_REG_CTRL_HUM, BME680_HUMIDITY_OVER); if (err < 0) { return err; } err = bme680_reg_write(data, BME680_REG_CONFIG, BME680_CONFIG_VAL); if (err < 0) { return err; } err = bme680_reg_write(data, BME680_REG_CTRL_GAS_1, BME680_CTRL_GAS_1_VAL); if (err < 0) { return err; } err = bme680_reg_write(data, BME680_REG_RES_HEAT0, bme680_calc_res_heat(data, BME680_HEATR_TEMP)); if (err < 0) { return err; } err = bme680_reg_write(data, BME680_REG_GAS_WAIT0, bme680_calc_gas_wait(BME680_HEATR_DUR_MS)); if (err < 0) { return err; } err = bme680_reg_write(data, BME680_REG_CTRL_MEAS, BME680_CTRL_MEAS_VAL); if (err < 0) { return err; } return 0; } static int bme680_init(struct device *dev) { struct bme680_data *data = dev->driver_data; data->i2c_master = device_get_binding( DT_INST_0_BOSCH_BME680_BUS_NAME); if (!data->i2c_master) { LOG_ERR("I2C master not found: %s", DT_INST_0_BOSCH_BME680_BUS_NAME); return -EINVAL; } data->i2c_slave_addr = DT_INST_0_BOSCH_BME680_BASE_ADDRESS; if (bme680_chip_init(dev) < 0) { return -EINVAL; } return 0; } static const struct sensor_driver_api bme680_api_funcs = { .sample_fetch = bme680_sample_fetch, .channel_get = bme680_channel_get, }; static struct bme680_data bme680_data; DEVICE_AND_API_INIT(bme680, DT_INST_0_BOSCH_BME680_LABEL, bme680_init, &bme680_data, NULL, POST_KERNEL, CONFIG_SENSOR_INIT_PRIORITY, &bme680_api_funcs);
GiulianoFranchetto/zephyr
drivers/sensor/bme680/bme680.c
C
apache-2.0
11,979
var crypto = require("crypto"), Request = require("./../request"), Response = require("./../response"); module.exports = sessionCookie; /** * A middleware for storing and retrieving session data using HTTP cookies. * The `options` may be any of the following: * * - secret A secret string to use to verify the cookie's contents, * defaults to `null`. If this is set the session's contents * will be cleared if the cookie has been tampered with * - name The name of the cookie, defaults to "strata.session" * - path The path of the cookie, defaults to "/" * - domain The cookie's domain, defaults to `null` * - expireAfter A number of seconds after which this cookie will expire, * defaults to `null` * - secure True to only send this cookie over HTTPS, defaults to `false` * - httpOnly True to only send this cookie over HTTP, defaults to `true` */ function sessionCookie(app, options) { var readSession = sessionCookieReader(options); var writeSession = sessionCookieWriter(options); return function (env, callback) { if (env.session) { app(env, callback); return; } readSession(env, function (err, session) { if (err) { env.session = {}; } else { env.session = session; } app(env, function (status, headers, body) { var res = new Response(body, headers, status); writeSession(env, res); res.send(callback); }); }); } } function sessionCookieReader(options) { options = sessionCookieOptions(options); return function readSessionCookie(env, callback) { var req = new Request(env); req.cookies(function (err, cookies) { if (err) { callback(err, cookies); return; } var cookie = cookies[options.name]; if (cookie) { cookie = new Buffer(cookie, "base64").toString("utf8"); var parts = cookie.split("--"), data = parts[0], digest = parts[1]; if (digest === sessionDigest(data, options.secret)) { try { callback(null, JSON.parse(data)); return; } catch (e) { // The cookie does not contain valid JSON. callback(e, {}); return; } } } callback(null, {}); }); } } function sessionCookieWriter(options) { options = sessionCookieOptions(options); return function writeSessionCookie(env, res) { var session = env.session; if (session) { var data = JSON.stringify(session); var digest = sessionDigest(data, options.secret); var cookie = new Buffer(data + "--" + digest, "utf8").toString("base64"); if (cookie.length > 4096) { env.error.write("Session cookie data size exceeds 4k; content dropped\n"); return; } var cookieOptions = { value: cookie, path: options.path, domain: options.domain, secure: options.secure, httpOnly: options.httpOnly }; if (options.expireAfter) { // expireAfter is given in seconds. var expires = new Date().getTime() + (options.expireAfter * 1000); cookieOptions.expires = new Date(expires); } res.setCookie(options.name, cookieOptions); } } } function sessionDigest(data, secret) { var shasum = crypto.createHash("sha1"); shasum.update(data); if (secret) { shasum.update(secret); } return shasum.digest("hex"); } /** * Creates a new options object from the given session cookie `options` with * sane defaults. */ function sessionCookieOptions(options) { options = options || {}; var opts = { secret: options.secret || null, name: options.name || "strata.session", path: options.path || "/", domain: options.domain || null, expireAfter: options.expireAfter || null, secure: options.secure || false }; if ("httpOnly" in options) { opts.httpOnly = options.httpOnly || false; } else { opts.httpOnly = true; } return opts; }
mbutler/nfn
node_modules/hem/node_modules/strata/lib/session/cookie.js
JavaScript
apache-2.0
4,665
use inkwell::context::Context; use inkwell::values::{BasicValue, InstructionOpcode::*}; use inkwell::{AddressSpace, AtomicOrdering, AtomicRMWBinOp, FloatPredicate, IntPredicate}; #[test] fn test_operands() { let context = Context::create(); let module = context.create_module("ivs"); let builder = context.create_builder(); let void_type = context.void_type(); let f32_type = context.f32_type(); let f32_ptr_type = f32_type.ptr_type(AddressSpace::Generic); let fn_type = void_type.fn_type(&[f32_ptr_type.into()], false); let function = module.add_function("take_f32_ptr", fn_type, None); let basic_block = context.append_basic_block(function, "entry"); builder.position_at_end(basic_block); let arg1 = function.get_first_param().unwrap().into_pointer_value(); let f32_val = f32_type.const_float(::std::f64::consts::PI); let store_instruction = builder.build_store(arg1, f32_val); let free_instruction = builder.build_free(arg1); let return_instruction = builder.build_return(None); assert_eq!(store_instruction.get_opcode(), Store); assert_eq!(free_instruction.get_opcode(), Call); assert_eq!(return_instruction.get_opcode(), Return); assert!(arg1.as_instruction_value().is_none()); // Test operands assert_eq!(store_instruction.get_num_operands(), 2); assert_eq!(free_instruction.get_num_operands(), 2); let store_operand0 = store_instruction.get_operand(0).unwrap(); let store_operand1 = store_instruction.get_operand(1).unwrap(); assert_eq!(store_operand0.left().unwrap(), f32_val); // f32 const assert_eq!(store_operand1.left().unwrap(), arg1); // f32* arg1 assert!(store_instruction.get_operand(2).is_none()); assert!(store_instruction.get_operand(3).is_none()); assert!(store_instruction.get_operand(4).is_none()); let free_operand0 = free_instruction.get_operand(0).unwrap().left().unwrap(); let free_operand1 = free_instruction.get_operand(1).unwrap().left().unwrap(); let free_operand0_instruction = free_operand0.as_instruction_value().unwrap(); assert!(free_operand0.is_pointer_value()); // (implictly casted) i8* arg1 assert!(free_operand1.is_pointer_value()); // Free function ptr assert_eq!(free_operand0_instruction.get_opcode(), BitCast); assert_eq!(free_operand0_instruction.get_operand(0).unwrap().left().unwrap(), arg1); assert!(free_operand0_instruction.get_operand(1).is_none()); assert!(free_operand0_instruction.get_operand(2).is_none()); assert!(free_instruction.get_operand(2).is_none()); assert!(free_instruction.get_operand(3).is_none()); assert!(free_instruction.get_operand(4).is_none()); assert!(module.verify().is_ok()); assert!(free_instruction.set_operand(0, arg1)); // Module is no longer valid because free takes an i8* not f32* assert!(module.verify().is_err()); assert!(free_instruction.set_operand(0, free_operand0)); assert!(module.verify().is_ok()); // No-op, free only has two (0-1) operands assert!(!free_instruction.set_operand(2, free_operand0)); assert!(module.verify().is_ok()); assert_eq!(return_instruction.get_num_operands(), 0); assert!(return_instruction.get_operand(0).is_none()); assert!(return_instruction.get_operand(1).is_none()); assert!(return_instruction.get_operand(2).is_none()); // Test Uses let bitcast_use_value = free_operand0_instruction .get_first_use() .unwrap() .get_used_value() .left() .unwrap(); let free_call_param = free_instruction.get_operand(0).unwrap().left().unwrap(); assert_eq!(bitcast_use_value, free_call_param); // These instructions/calls don't return any ir value so they aren't used anywhere assert!(store_instruction.get_first_use().is_none()); assert!(free_instruction.get_first_use().is_none()); assert!(return_instruction.get_first_use().is_none()); // arg1 (%0) has two uses: // store float 0x400921FB60000000, float* %0 // %1 = bitcast float* %0 to i8* let arg1_first_use = arg1.get_first_use().unwrap(); let arg1_second_use = arg1_first_use.get_next_use().unwrap(); // However their operands are used let store_operand_use0 = store_instruction.get_operand_use(0).unwrap(); let store_operand_use1 = store_instruction.get_operand_use(1).unwrap(); assert!(store_operand_use0.get_next_use().is_none()); assert!(store_operand_use1.get_next_use().is_none()); assert_eq!(store_operand_use1, arg1_second_use); assert_eq!(store_operand_use0.get_user().into_instruction_value(), store_instruction); assert_eq!(store_operand_use1.get_user().into_instruction_value(), store_instruction); assert_eq!(store_operand_use0.get_used_value().left().unwrap(), f32_val); assert_eq!(store_operand_use1.get_used_value().left().unwrap(), arg1); assert!(store_instruction.get_operand_use(2).is_none()); assert!(store_instruction.get_operand_use(3).is_none()); assert!(store_instruction.get_operand_use(4).is_none()); assert!(store_instruction.get_operand_use(5).is_none()); assert!(store_instruction.get_operand_use(6).is_none()); let free_operand_use0 = free_instruction.get_operand_use(0).unwrap(); let free_operand_use1 = free_instruction.get_operand_use(1).unwrap(); assert!(free_operand_use0.get_next_use().is_none()); assert!(free_operand_use1.get_next_use().is_none()); assert!(free_instruction.get_operand_use(2).is_none()); assert!(free_instruction.get_operand_use(3).is_none()); assert!(free_instruction.get_operand_use(4).is_none()); assert!(free_instruction.get_operand_use(5).is_none()); assert!(free_instruction.get_operand_use(6).is_none()); assert!(module.verify().is_ok()); } #[test] fn test_basic_block_operand() { let context = Context::create(); let module = context.create_module("ivs"); let builder = context.create_builder(); let void_type = context.void_type(); let fn_type = void_type.fn_type(&[], false); let function = module.add_function("bb_op", fn_type, None); let basic_block = context.append_basic_block(function, "entry"); let basic_block2 = context.append_basic_block(function, "exit"); builder.position_at_end(basic_block); let branch_instruction = builder.build_unconditional_branch(basic_block2); let bb_operand = branch_instruction.get_operand(0).unwrap().right().unwrap(); assert_eq!(bb_operand, basic_block2); let bb_operand_use = branch_instruction.get_operand_use(0).unwrap(); assert_eq!(bb_operand_use.get_used_value().right().unwrap(), basic_block2); builder.position_at_end(basic_block2); builder.build_return(None); assert!(module.verify().is_ok()); } #[test] fn test_get_next_use() { let context = Context::create(); let module = context.create_module("ivs"); let builder = context.create_builder(); let f32_type = context.f32_type(); let fn_type = f32_type.fn_type(&[f32_type.into()], false); let function = module.add_function("take_f32", fn_type, None); let basic_block = context.append_basic_block(function, "entry"); builder.position_at_end(basic_block); let arg1 = function.get_first_param().unwrap().into_float_value(); let f32_val = f32_type.const_float(::std::f64::consts::PI); let add_pi0 = builder.build_float_add(arg1, f32_val, "add_pi"); let add_pi1 = builder.build_float_add(add_pi0, f32_val, "add_pi"); builder.build_return(Some(&add_pi1)); // f32_val constant appears twice, so there are two uses (first, next) let first_use = f32_val.get_first_use().unwrap(); assert_eq!(first_use.get_user(), add_pi1.as_instruction_value().unwrap()); assert_eq!(first_use.get_next_use().map(|x| x.get_user().into_float_value()), Some(add_pi0)); assert!(arg1.get_first_use().is_some()); assert!(module.verify().is_ok()); } #[test] fn test_instructions() { let context = Context::create(); let module = context.create_module("testing"); let builder = context.create_builder(); let void_type = context.void_type(); let i64_type = context.i64_type(); let f32_type = context.f32_type(); let f32_ptr_type = f32_type.ptr_type(AddressSpace::Generic); let fn_type = void_type.fn_type(&[f32_ptr_type.into(), f32_type.into()], false); let function = module.add_function("free_f32", fn_type, None); let basic_block = context.append_basic_block(function, "entry"); builder.position_at_end(basic_block); let arg1 = function.get_first_param().unwrap().into_pointer_value(); let arg2 = function.get_nth_param(1).unwrap().into_float_value(); assert!(arg1.get_first_use().is_none()); assert!(arg2.get_first_use().is_none()); let f32_val = f32_type.const_float(::std::f64::consts::PI); let store_instruction = builder.build_store(arg1, f32_val); let ptr_val = builder.build_ptr_to_int(arg1, i64_type, "ptr_val"); let ptr = builder.build_int_to_ptr(ptr_val, f32_ptr_type, "ptr"); let icmp = builder.build_int_compare(IntPredicate::EQ, ptr_val, ptr_val, "icmp"); let f32_sum = builder.build_float_add(arg2, f32_val, "f32_sum"); let fcmp = builder.build_float_compare(FloatPredicate::OEQ, f32_sum, arg2, "fcmp"); let free_instruction = builder.build_free(arg1); let return_instruction = builder.build_return(None); assert_eq!(store_instruction.get_opcode(), Store); assert_eq!(ptr_val.as_instruction().unwrap().get_opcode(), PtrToInt); assert_eq!(ptr.as_instruction().unwrap().get_opcode(), IntToPtr); assert_eq!(icmp.as_instruction().unwrap().get_opcode(), ICmp); assert_eq!(ptr.as_instruction().unwrap().get_icmp_predicate(), None); assert_eq!(icmp.as_instruction().unwrap().get_icmp_predicate().unwrap(), IntPredicate::EQ); assert_eq!(f32_sum.as_instruction().unwrap().get_opcode(), FAdd); assert_eq!(fcmp.as_instruction().unwrap().get_opcode(), FCmp); assert_eq!(f32_sum.as_instruction().unwrap().get_fcmp_predicate(), None); assert_eq!(icmp.as_instruction().unwrap().get_fcmp_predicate(), None); assert_eq!(fcmp.as_instruction().unwrap().get_fcmp_predicate().unwrap(), FloatPredicate::OEQ); assert_eq!(free_instruction.get_opcode(), Call); assert_eq!(return_instruction.get_opcode(), Return); // test instruction cloning let instruction_clone = return_instruction.clone(); assert_eq!(instruction_clone.get_opcode(), return_instruction.get_opcode()); assert_ne!(instruction_clone, return_instruction); // test copying let instruction_clone_copy = instruction_clone; assert_eq!(instruction_clone, instruction_clone_copy); } #[llvm_versions(10.0..=latest)] #[test] fn test_volatile_atomicrmw_cmpxchg() { let context = Context::create(); let module = context.create_module("testing"); let builder = context.create_builder(); let void_type = context.void_type(); let i32_type = context.i32_type(); let i32_ptr_type = i32_type.ptr_type(AddressSpace::Generic); let fn_type = void_type.fn_type(&[i32_ptr_type.into(), i32_type.into()], false); let function = module.add_function("mem_inst", fn_type, None); let basic_block = context.append_basic_block(function, "entry"); builder.position_at_end(basic_block); let arg1 = function.get_first_param().unwrap().into_pointer_value(); let arg2 = function.get_nth_param(1).unwrap().into_int_value(); assert!(arg1.get_first_use().is_none()); assert!(arg2.get_first_use().is_none()); let i32_val = i32_type.const_int(7, false); let atomicrmw = builder .build_atomicrmw(AtomicRMWBinOp::Add, arg1, arg2, AtomicOrdering::Unordered) .unwrap() .as_instruction_value() .unwrap(); let cmpxchg = builder .build_cmpxchg( arg1, arg2, i32_val, AtomicOrdering::Monotonic, AtomicOrdering::Monotonic, ) .unwrap() .as_instruction_value() .unwrap(); assert_eq!(atomicrmw.get_volatile().unwrap(), false); assert_eq!(cmpxchg.get_volatile().unwrap(), false); atomicrmw.set_volatile(true).unwrap(); cmpxchg.set_volatile(true).unwrap(); assert_eq!(atomicrmw.get_volatile().unwrap(), true); assert_eq!(cmpxchg.get_volatile().unwrap(), true); atomicrmw.set_volatile(false).unwrap(); cmpxchg.set_volatile(false).unwrap(); assert_eq!(atomicrmw.get_volatile().unwrap(), false); assert_eq!(cmpxchg.get_volatile().unwrap(), false); } #[llvm_versions(3.6..=10.0)] #[test] fn test_mem_instructions() { let context = Context::create(); let module = context.create_module("testing"); let builder = context.create_builder(); let void_type = context.void_type(); let f32_type = context.f32_type(); let f32_ptr_type = f32_type.ptr_type(AddressSpace::Generic); let fn_type = void_type.fn_type(&[f32_ptr_type.into(), f32_type.into()], false); let function = module.add_function("mem_inst", fn_type, None); let basic_block = context.append_basic_block(function, "entry"); builder.position_at_end(basic_block); let arg1 = function.get_first_param().unwrap().into_pointer_value(); let arg2 = function.get_nth_param(1).unwrap().into_float_value(); assert!(arg1.get_first_use().is_none()); assert!(arg2.get_first_use().is_none()); let f32_val = f32_type.const_float(::std::f64::consts::PI); let store_instruction = builder.build_store(arg1, f32_val); let load = builder.build_load(arg1, ""); let load_instruction = load.as_instruction_value().unwrap(); assert_eq!(store_instruction.get_volatile().unwrap(), false); assert_eq!(load_instruction.get_volatile().unwrap(), false); store_instruction.set_volatile(true).unwrap(); load_instruction.set_volatile(true).unwrap(); assert_eq!(store_instruction.get_volatile().unwrap(), true); assert_eq!(load_instruction.get_volatile().unwrap(), true); store_instruction.set_volatile(false).unwrap(); load_instruction.set_volatile(false).unwrap(); assert_eq!(store_instruction.get_volatile().unwrap(), false); assert_eq!(load_instruction.get_volatile().unwrap(), false); assert_eq!(store_instruction.get_alignment().unwrap(), 0); assert_eq!(load_instruction.get_alignment().unwrap(), 0); assert!(store_instruction.set_alignment(16).is_ok()); assert!(load_instruction.set_alignment(16).is_ok()); assert_eq!(store_instruction.get_alignment().unwrap(), 16); assert_eq!(load_instruction.get_alignment().unwrap(), 16); assert!(store_instruction.set_alignment(0).is_ok()); assert!(load_instruction.set_alignment(0).is_ok()); assert_eq!(store_instruction.get_alignment().unwrap(), 0); assert_eq!(load_instruction.get_alignment().unwrap(), 0); assert!(store_instruction.set_alignment(14).is_err()); assert_eq!(store_instruction.get_alignment().unwrap(), 0); let fadd_instruction = builder.build_float_add(load.into_float_value(), f32_val, "").as_instruction_value().unwrap(); assert!(fadd_instruction.get_volatile().is_err()); assert!(fadd_instruction.set_volatile(false).is_err()); assert!(fadd_instruction.get_alignment().is_err()); assert!(fadd_instruction.set_alignment(16).is_err()); } #[llvm_versions(11.0..=latest)] #[test] fn test_mem_instructions() { let context = Context::create(); let module = context.create_module("testing"); let builder = context.create_builder(); let void_type = context.void_type(); let f32_type = context.f32_type(); let f32_ptr_type = f32_type.ptr_type(AddressSpace::Generic); let fn_type = void_type.fn_type(&[f32_ptr_type.into(), f32_type.into()], false); let function = module.add_function("mem_inst", fn_type, None); let basic_block = context.append_basic_block(function, "entry"); builder.position_at_end(basic_block); let arg1 = function.get_first_param().unwrap().into_pointer_value(); let arg2 = function.get_nth_param(1).unwrap().into_float_value(); assert!(arg1.get_first_use().is_none()); assert!(arg2.get_first_use().is_none()); let f32_val = f32_type.const_float(::std::f64::consts::PI); let store_instruction = builder.build_store(arg1, f32_val); let load = builder.build_load(arg1, ""); let load_instruction = load.as_instruction_value().unwrap(); assert_eq!(store_instruction.get_volatile().unwrap(), false); assert_eq!(load_instruction.get_volatile().unwrap(), false); store_instruction.set_volatile(true).unwrap(); load_instruction.set_volatile(true).unwrap(); assert_eq!(store_instruction.get_volatile().unwrap(), true); assert_eq!(load_instruction.get_volatile().unwrap(), true); store_instruction.set_volatile(false).unwrap(); load_instruction.set_volatile(false).unwrap(); assert_eq!(store_instruction.get_volatile().unwrap(), false); assert_eq!(load_instruction.get_volatile().unwrap(), false); assert_eq!(store_instruction.get_alignment().unwrap(), 4); assert_eq!(load_instruction.get_alignment().unwrap(), 4); assert!(store_instruction.set_alignment(16).is_ok()); assert!(load_instruction.set_alignment(16).is_ok()); assert_eq!(store_instruction.get_alignment().unwrap(), 16); assert_eq!(load_instruction.get_alignment().unwrap(), 16); assert!(store_instruction.set_alignment(4).is_ok()); assert!(load_instruction.set_alignment(4).is_ok()); assert_eq!(store_instruction.get_alignment().unwrap(), 4); assert_eq!(load_instruction.get_alignment().unwrap(), 4); assert!(store_instruction.set_alignment(14).is_err()); assert_eq!(store_instruction.get_alignment().unwrap(), 4); let fadd_instruction = builder.build_float_add(load.into_float_value(), f32_val, "").as_instruction_value().unwrap(); assert!(fadd_instruction.get_volatile().is_err()); assert!(fadd_instruction.set_volatile(false).is_err()); assert!(fadd_instruction.get_alignment().is_err()); assert!(fadd_instruction.set_alignment(16).is_err()); } #[llvm_versions(3.8..=latest)] #[test] fn test_atomic_ordering_mem_instructions() { let context = Context::create(); let module = context.create_module("testing"); let builder = context.create_builder(); let void_type = context.void_type(); let f32_type = context.f32_type(); let f32_ptr_type = f32_type.ptr_type(AddressSpace::Generic); let fn_type = void_type.fn_type(&[f32_ptr_type.into(), f32_type.into()], false); let function = module.add_function("mem_inst", fn_type, None); let basic_block = context.append_basic_block(function, "entry"); builder.position_at_end(basic_block); let arg1 = function.get_first_param().unwrap().into_pointer_value(); let arg2 = function.get_nth_param(1).unwrap().into_float_value(); assert!(arg1.get_first_use().is_none()); assert!(arg2.get_first_use().is_none()); let f32_val = f32_type.const_float(::std::f64::consts::PI); let store_instruction = builder.build_store(arg1, f32_val); let load = builder.build_load(arg1, ""); let load_instruction = load.as_instruction_value().unwrap(); assert_eq!(store_instruction.get_atomic_ordering().unwrap(), AtomicOrdering::NotAtomic); assert_eq!(load_instruction.get_atomic_ordering().unwrap(), AtomicOrdering::NotAtomic); assert!(store_instruction.set_atomic_ordering(AtomicOrdering::Monotonic).is_ok()); assert_eq!(store_instruction.get_atomic_ordering().unwrap(), AtomicOrdering::Monotonic); assert!(store_instruction.set_atomic_ordering(AtomicOrdering::Release).is_ok()); assert!(load_instruction.set_atomic_ordering(AtomicOrdering::Acquire).is_ok()); assert!(store_instruction.set_atomic_ordering(AtomicOrdering::Acquire).is_err()); assert!(store_instruction.set_atomic_ordering(AtomicOrdering::AcquireRelease).is_err()); assert!(load_instruction.set_atomic_ordering(AtomicOrdering::AcquireRelease).is_err()); assert!(load_instruction.set_atomic_ordering(AtomicOrdering::Release).is_err()); let fadd_instruction = builder.build_float_add(load.into_float_value(), f32_val, "").as_instruction_value().unwrap(); assert!(fadd_instruction.get_atomic_ordering().is_err()); assert!(fadd_instruction.set_atomic_ordering(AtomicOrdering::NotAtomic).is_err()); } #[test] fn test_metadata_kinds() { let context = Context::create(); let i8_type = context.i8_type(); let f32_type = context.f32_type(); let ptr_type = i8_type.ptr_type(AddressSpace::Generic); let struct_type = context.struct_type(&[i8_type.into(), f32_type.into()], false); let vector_type = i8_type.vec_type(2); let i8_value = i8_type.const_zero(); let i8_array_value = i8_type.const_array(&[i8_value]); let f32_value = f32_type.const_zero(); let ptr_value = ptr_type.const_null(); let struct_value = struct_type.get_undef(); let vector_value = vector_type.const_zero(); let md_string = context.metadata_string("lots of metadata here"); context.metadata_node(&[ i8_array_value.into(), i8_value.into(), f32_value.into(), ptr_value.into(), struct_value.into(), vector_value.into(), md_string.into(), ]); }
TheDan64/inkwell
tests/all/test_instruction_values.rs
Rust
apache-2.0
21,302
package org.efix.util.buffer; import org.efix.util.ByteSequenceWrapper; import org.efix.util.StringUtil; public class BufferUtil { public static UnsafeBuffer fromString(String string) { return new UnsafeBuffer(StringUtil.asciiBytes(string)); } public static String toString(Buffer buffer) { return toString(buffer, 0, buffer.capacity()); } public static String toString(Buffer buffer, int offset, int length) { return new ByteSequenceWrapper(buffer, offset, length).toString(); } }
artyomkorzun/efix
src/main/java/org/efix/util/buffer/BufferUtil.java
Java
apache-2.0
536
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.example.db.discovery.spring.namespace.jdbc.repository; import org.apache.shardingsphere.example.db.discovery.spring.namespace.jdbc.entity.Address; import javax.sql.DataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.LinkedList; import java.util.List; public final class AddressRepository { private final DataSource dataSource; public AddressRepository(final DataSource dataSource) { this.dataSource = dataSource; } public void createTableIfNotExists() throws SQLException { String sql = "CREATE TABLE IF NOT EXISTS t_address " + "(address_id BIGINT NOT NULL, address_name VARCHAR(100) NOT NULL, PRIMARY KEY (address_id))"; try (Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement()) { statement.executeUpdate(sql); } } public void dropTable() throws SQLException { String sql = "DROP TABLE t_address"; try (Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement()) { statement.executeUpdate(sql); } } public void truncateTable() throws SQLException { String sql = "TRUNCATE TABLE t_address"; try (Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement()) { statement.executeUpdate(sql); } } public Long insert(final Address entity) throws SQLException { String sql = "INSERT INTO t_address (address_id, address_name) VALUES (?, ?)"; try (Connection connection = dataSource.getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(sql)) { preparedStatement.setLong(1, entity.getAddressId()); preparedStatement.setString(2, entity.getAddressName()); preparedStatement.executeUpdate(); } return entity.getAddressId(); } public void delete(final Long primaryKey) throws SQLException { String sql = "DELETE FROM t_address WHERE address_id=?"; try (Connection connection = dataSource.getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(sql)) { preparedStatement.setLong(1, primaryKey); preparedStatement.executeUpdate(); } } public List<Address> selectAll() throws SQLException { String sql = "SELECT * FROM t_address"; return getAddress(sql); } private List<Address> getAddress(final String sql) throws SQLException { List<Address> result = new LinkedList<>(); try (Connection connection = dataSource.getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(sql); ResultSet resultSet = preparedStatement.executeQuery()) { while (resultSet.next()) { Address address = new Address(); address.setAddressId(resultSet.getLong(1)); address.setAddressName(resultSet.getString(2)); result.add(address); } } return result; } }
apache/incubator-shardingsphere
examples/shardingsphere-sample/shardingsphere-example-generated/shardingsphere-jdbc-sample/shardingsphere-jdbc-memory-local-db-discovery-spring-namespace-jdbc-example/src/main/java/org/apache/shardingsphere/example/db/discovery/spring/namespace/jdbc/repository/AddressRepository.java
Java
apache-2.0
4,185
package dao import ( "go-common/app/admin/ep/merlin/model" "testing" . "github.com/smartystreets/goconvey/convey" ) var ( username = "[email protected]" ) func Test_Mail_Log(t *testing.T) { Convey("test add mail log", t, func() { ml := &model.MailLog{ ReceiverName: username, MailType: 1, SendContext: "test add mail log", } err := d.InsertMailLog(ml) So(err, ShouldBeNil) }) Convey("test find mail log", t, func() { mailLogs, err := d.FindMailLog(username) So(len(mailLogs), ShouldBeGreaterThan, 0) So(err, ShouldBeNil) }) Convey("test delete mail log", t, func() { err := d.DelMailLog(username) So(err, ShouldBeNil) }) Convey("test find mail log", t, func() { mailLogs, err := d.FindMailLog(username) So(len(mailLogs), ShouldEqual, 0) So(err, ShouldBeNil) }) }
LQJJ/demo
126-go-common-master/app/admin/ep/merlin/dao/mysql_mail_log_test.go
GO
apache-2.0
832
<!DOCTYPE html> <html layout:decorate="~{layouts/adminlte}"> <head> <title>FormKiQ Server - Setup</title> </head> <body> <div layout:fragment="content"> <!-- Main content --> <section class="content"> <div class="row"> <form method="post"> <div class="box-body"> <th:block th:with="form=${flow.currentState.data},fielderrors=${flow.currentState.fielderrors}" th:if="${!flow.currentState.end}" th:include="fragments/component/form" /> <th:block th:with="form=${T(com.formkiq.forms.JSONService).instance().loadForm('com.formkiq.core.service.dto.Setupcomplete.form')}" th:if="${flow.currentState.end}" th:include="fragments/component/form" /> </div> </form> </div> </section> </div> </body> </html>
formkiq/formkiq-server
core/src/main/resources/templates/setup.html
HTML
apache-2.0
890
# Pseudocercospora membranaceae H.S.G. Rao & S. Narayan SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in Journal of Living World 6(2): 14 (1999) #### Original name Pseudocercospora membranaceae H.S.G. Rao & S. Narayan ### Remarks null
mdoering/backbone
life/Fungi/Ascomycota/Dothideomycetes/Capnodiales/Mycosphaerellaceae/Pseudocercospora/Pseudocercospora membranaceae/README.md
Markdown
apache-2.0
270
/** * @file eigen-tools.h * @brief header file containing all header with eigen API and tools functions * * .. invisible: * _ _ _____ _ _____ _____ * * | | | | ___| | | ___/ ___| * * | | | | |__ | | | |__ \ `--. * * | | | | __|| | | __| `--. \ * * \ \_/ / |___| |___| |___/\__/ / * * \___/\____/\_____|____/\____/ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License * */ #ifndef EIGEN_TOOLS #define EIGEN_TOOLS #include "eigen-data-types.h" #include <sstream> #include <iostream> #include <fstream> #include "time.h" using namespace Eigen; using std::vector; using std::ios; namespace eigentools { /** *Splitts sparse matrix (columns of i_mat) to mat1 and mat2 in portion (0.8 by default) */ template<typename Derived> void splitSparseMatrix(const SparseMatrix<Derived> &i_mat, SparseMatrix<Derived> &o_mat1,SparseMatrix<Derived> &o_mat2, double portion = 0.8) { int num = (int)(portion * (double)i_mat.cols()); o_mat1 = i_mat.leftCols(num); o_mat2 = i_mat.rightCols(i_mat.cols()-num); } template <typename Derived> Matrix<Derived, Dynamic, Dynamic> shuffleByIndexes(const Matrix<Derived, Dynamic, Dynamic>& mat, const std::vector<int>& indexes) { if (indexes.empty()) return mat; PermutationMatrix<Dynamic,Dynamic> perm(mat.cols()); for (int i = 0; i < perm.indices().size(); i++) { *(perm.indices().data()+i) = indexes[i]; } return mat * perm; } template <typename Derived> SparseMatrix<Derived> shuffleByIndexes(const SparseMatrix<Derived>& mat, const std::vector<int>& indexes) { if (indexes.empty()) return mat; PermutationMatrix<Dynamic,Dynamic> perm(mat.cols()); for (int i = 0; i < perm.indices().size(); i++) { *(perm.indices().data()+i) = indexes[i]; } return mat * perm; } template <class M> M randomShuffleMatrix(const M& mat, std::vector<int>& indexes) { indexes.clear(); indexes = vector<int>(mat.cols(),0); for (size_t i = 0; i < indexes.size();i++) indexes[i] = i; std::random_shuffle(indexes.begin(), indexes.end()); return eigentools::shuffleByIndexes(mat,indexes); } template <typename Derived> void randomStohasticMatrix(Matrix<Derived, Dynamic, Dynamic>& m, int rows, int cols, bool NormByRows = true) { srand(time(NULL)); Matrix<Derived, Dynamic, Dynamic> tmp; if (NormByRows) { tmp = (Matrix<Derived, Dynamic, Dynamic>::Random(rows, cols).array()+1).matrix()/2; } else { tmp = (Matrix<Derived, Dynamic, Dynamic>::Random(cols,rows).array()+1).matrix()/2; } Array<Derived, Dynamic, 1> c = tmp.rowwise().sum().array().pow(-1); tmp = (tmp.array().colwise() * c).matrix(); if (!NormByRows) m = tmp.transpose(); else m = tmp; } template <typename Derived> void makeStohastic(Matrix<Derived, Dynamic, Dynamic>& m, bool NormByRows = true) { srand(time(NULL)); if (NormByRows) { Array<Derived, Dynamic, 1> c = m.rowwise().sum().array(); c = (c.array() ==0).select(-1,c); m = m.array().colwise() / c; } else if (!NormByRows) { Array<Derived, Dynamic, 1> c = m.colwise().sum().array(); c = (c.array() ==0).select(-1,c); m = m.array().rowwise() / c.transpose(); } } template <typename Derived> void makeStohastic(const Matrix<Derived, Dynamic, Dynamic>& m, Matrix<Derived, Dynamic, Dynamic>& res, bool NormByRows = true) { srand(time(NULL)); if (NormByRows) { Array<Derived, Dynamic, 1> c = m.rowwise().sum().array(); c = (c.array() ==0).select(-1,c); res = m.array().colwise() / c; } else if (!NormByRows) { Array<Derived, Dynamic, 1> c = m.colwise().sum().array(); c = (c.array() ==0).select(-1,c); res = m.array().rowwise() / c.transpose(); } } template <typename Derived> ScalarType density(const Matrix<Derived,Dynamic,Dynamic>& i_mat, ScalarType i_precision = 0) { Matrix<Derived,Dynamic,Dynamic> a(i_mat.rows(),i_mat.cols()); a = (i_mat.array() > i_precision).select(1,i_mat); a = (i_mat.array() <= i_precision).select(0,a); return a.sum()/(i_mat.cols()*i_mat.rows()); } template <typename Derived> ScalarType density(const SparseMatrix<Derived>& i_mat) { return (ScalarType)i_mat.nonZeros()/(i_mat.cols()*i_mat.rows()); } template <typename Derived, typename T> void eigenVec2StlVecOfPair(const DenseBase<Derived> &i_mat, vector<std::pair<int,T> >& result) { if (i_mat.cols()==1) { result = vector<std::pair<int,T> >(i_mat.rows(),std::make_pair(0,(T)0)); for (int i = 0; i < i_mat.rows(); i++) result[i] = std::make_pair(i,(T)i_mat.coeff(i,0)); } else if (i_mat.rows()==1) { result = vector<std::pair<int,T> >(i_mat.cols(),std::make_pair(0,(T)0)); for (int i = 0; i < i_mat.cols(); i++) result[i] = std::make_pair(i,(T)i_mat.coeff(0,i)); } else { result = vector<std::pair<int,T> >(); } } template <typename Derived, typename T> void eigenVec2StlVec(const DenseBase<Derived> &i_mat, vector<T>& result) { if (i_mat.cols()==1) { result = vector<T>(i_mat.rows(),(T)0); for (int i = 0; i < i_mat.rows(); i++) result[i] = (T)i_mat.coeff(i,0); } else if (i_mat.rows()==1) { result = vector<T>(i_mat.cols(),(T)0); for (int i = 0; i < i_mat.cols(); i++) result[i] = i_mat.coeff(0,i); } else { result = vector<T>(); } } template <class M> void removeColumn(M &io_mat, int col) { int rows = io_mat.rows(); int cols = io_mat.cols(); if (col < 0 || col>=cols) return; io_mat.block(0,col,rows,cols-col-1) = io_mat.block(0,col+1,rows,cols-col-1); io_mat.conservativeResize(rows,cols-1); } template <class M> void removeRow(M &io_mat, int row) { int rows = io_mat.rows(); int cols = io_mat.cols(); if (row < 0 || row>=rows) return; io_mat.block(row,0,rows-row-1,cols) =io_mat.block(row+1,0,rows-row-1,cols); io_mat.conservativeResize(rows-1,cols); } template <typename Derived> void saveMatrix(const DenseBase<Derived> &i_mat, const std::string& filename) { std::ofstream oStream(filename); if (!oStream) return; oStream<<i_mat.rows()<<std::endl; oStream<<i_mat.cols()<<std::endl; oStream<<i_mat; oStream.close(); } /** *template function for loading from file matrix * ATTENTION: correspondence of scalar types in loading and saving matrix is obligation */ template <typename Derived> void loadMatrix(DenseBase<Derived> &i_mat, const std::string &filename) { typedef typename DenseBase<Derived>::Scalar Scalar; std::ifstream iStream(filename); if (!iStream) return; // std::cerr<<"Can't open file for writing " << filename << "\n"; int rows,cols; Scalar val; std::string line; int count = -1; char* pEnd; while( std::getline(iStream,line) ) { count++; if (count == 0) { rows = (int)strtof(line.c_str(),&pEnd); continue; } if (count == 1) { cols = (int)strtof(line.c_str(),&pEnd); i_mat.derived().resize(rows,cols); i_mat.setZero(); continue; } pEnd = (char*)line.c_str(); for (int i = 0; i < cols; i++) { val = (Scalar)strtod(pEnd,&pEnd); i_mat(count-2,i) = val; } } iStream.close(); } template<typename Derived> void saveSparseMatrix(SparseMatrix<Derived> &i_mat, const std::string &filename) { std::ofstream oStream(filename); if (!oStream) return; oStream<<i_mat.rows()<<std::endl; oStream<<i_mat.cols()<<std::endl; for (int k=0; k<i_mat.outerSize(); ++k) { int count = 0; std::string line; std::stringstream sstream(line); for (typename SparseMatrix<Derived>::InnerIterator it(i_mat,k); it; ++it) { sstream<<"\t"<<it.row()<<"\t"<<it.value(); count++; } oStream<<k<<"\t"<<count<<sstream.str(); oStream<<std::endl; } oStream.close(); } /** *template function for loading from file sparse matrix * ATTENTION: correspondence of scalar types in loading and saving matrix is obligation */ template<typename Derived> void loadSparseMatrix(SparseMatrix<Derived> &i_mat, const std::string &filename) { typedef typename SparseMatrix<Derived>::Scalar Scalar; std::ifstream iStream(filename); if (!iStream) return; int rows,cols,col,row; vector<Eigen::Triplet<Scalar> > t; std::string line; int count = -1,num; Scalar value; char* pEnd; while( std::getline(iStream,line) ) { count++; if (count == 0) { rows = (int)strtof(line.c_str(),&pEnd); continue; } if (count == 1) { cols = (int)strtof(line.c_str(),&pEnd); i_mat = SparseMatrix<Scalar>(rows,cols); if (rows == 0 || cols == 0) { return; } continue; } col = (int)strtof(line.c_str(),&pEnd); num = (int)strtof(pEnd, &pEnd); for (size_t i = 0; i < num; i++) { row = (int)strtof(pEnd, &pEnd); value = (Scalar)strtod(pEnd, &pEnd); t.push_back(Eigen::Triplet<Scalar>(row, col, value)); } } i_mat.setFromTriplets(t.begin(),t.end()); iStream.close(); } template<typename Derived> void concatenateMatrix(const DenseBase<Derived> &i_mat1, const DenseBase<Derived> &i_mat2, DenseBase<Derived>& res) { res.derived().resize(i_mat1.rows(),i_mat1.cols()+i_mat2.cols()); res.block(0,0,i_mat1.rows(),i_mat1.cols()) = i_mat1; res.block(0,i_mat1.cols(),i_mat2.rows(),i_mat2.cols()) = i_mat2; } template<typename Derived> void removeFirstNCols(const DenseBase<Derived> &i_mat, DenseBase<Derived> &o_mat, int N) { if (N < 0) { o_mat = i_mat; return; } if (N > i_mat.cols()) { o_mat = i_mat; return; } o_mat.derived().resize(i_mat.rows(),i_mat.cols()-N); o_mat = i_mat.block(0,N,i_mat.rows(),i_mat.cols()-N); } //template<typename Derived> //ScalarType cosineMeasure(const MatrixBase<Derived> &a, const MatrixBase<Derived> &b) template<typename Derived> void saveSparseMatrixBinary(SparseMatrix<Derived> &i_mat, const std::string &filename) { std::ofstream oStream(filename,ios::out|ios::binary); if (!oStream) return; saveSparseMatrixBinary(i_mat,oStream); oStream.close(); } template<typename Derived> void saveSparseMatrixBinary(SparseMatrix<Derived> &i_mat, std::ofstream& i_oStream) { if (!i_oStream) return; int num = i_mat.rows(); i_oStream.write((char*)& num,4); num =i_mat.cols(); i_oStream.write((char*)& num,4); int SizeOfScalarType = (int)sizeof(typename SparseMatrix<Derived>::Scalar); i_oStream.write((char*)& SizeOfScalarType,4); for (int k=0; k<i_mat.outerSize(); ++k) { i_oStream.write((char*)& k,4); for (typename SparseMatrix<Derived>::InnerIterator it(i_mat,k); it; ++it) { num = it.row(); i_oStream.write((char*)& num,4); i_oStream.write((char*)& it.value(),SizeOfScalarType); } num = -1; i_oStream.write((char*)& num,4); } } template<typename Derived> void loadSparseMatrixBinary(SparseMatrix<Derived> &o_mat, const std::string &filename) { std::ifstream iStream(filename,ios::in|ios::binary); if (!iStream) return; loadSparseMatrixBinary(o_mat,iStream); iStream.close(); } template<typename Derived> void loadSparseMatrixBinary(SparseMatrix<Derived> &o_mat, std::ifstream& i_iStream) { if (!i_iStream) return; typedef typename SparseMatrix<Derived>::Scalar Scalar; Scalar value; vector<Eigen::Triplet<Scalar> > t; int rows,cols, SizeOfScalarType, col,row; i_iStream.read((char*)& rows, 4); i_iStream.read((char*)& cols, 4); i_iStream.read((char*)& SizeOfScalarType, 4); if (SizeOfScalarType != (int)sizeof(Scalar)) { std::cerr<<"Sizes of data type stored in matreces are not equal"<<std::endl; } o_mat.derived().resize(rows,cols); while (1) { i_iStream.read((char*)& col, 4); while (1) { i_iStream.read((char*)& row, 4); if (row == -1) break; i_iStream.read((char*)& value, SizeOfScalarType); t.push_back(Eigen::Triplet<Scalar>(row,col,(Scalar)value)); } if (col == cols - 1) break; } o_mat.setFromTriplets(t.begin(),t.end()); } ScalarType cosineMeasure(DenseMat &a,DenseMat &b); } #endif // EIGEN_TOOLS
zy4kamu/Coda
src/utils/Eigen/eigen-tools.h
C
apache-2.0
13,529
import styled, { css as styledCss, keyframes } from 'styled-components' import type { TTestable } from '@/spec' import Img from '@/Img' import { theme } from '@/utils/themes' import css from '@/utils/css' const DURATION = '2.5s' const load = keyframes` 0% { top: 24px; } 70% { top: 10px; } 90% { top: 0; } 95% { top: 0; } 100% { top: 24px; } ` const liquid1 = keyframes` 0% { height: 0; opacity: 0; top: -5px; } 22% { height: 2.8125px; top: 3.75px; opacity: 1; } 25% { top: -2.5px; } 35% { height: 11.25px; top: -5px; } 55% { height: 3px; top: -1.25px; } 60% { height: 6px; opacity: 1; top: -3px; } 96% { height: 8.4375px; opacity: 0; top: 5px; } 100% { height: 0; opacity: 0; } ` const liquid2 = keyframes` 0% { height: 0; opacity: 0; top: -0.5rem; } 17.5% { height: 3px; top: 2px; opacity: 1; } 20% { top: -2.5px; } 25% { height: 15px; top: -6px; } 45% { height: 3px; top: -1px; } 60% { opacity: 1; height: 15px; top: -5px; } 96% { opacity: 0; height: 8px; top: 5px; } 100% { height: 0; opacity: 0; } ` const loadRule = styledCss` ${load} ${DURATION} infinite; ` const liquid1Rule = styledCss` ${liquid1} ${DURATION} infinite; ` const liquid2Rule = styledCss` ${liquid2} ${DURATION} infinite; ` export const Wrapper = styled.div.attrs(({ testid }: TTestable) => ({ 'data-test-id': testid, }))<TTestable>` text-align: center; position: relative; height: 28px; margin-bottom: 6px; cursor: pointer; ` export const Battery = styled.div` display: inline-block; position: relative; width: 16px; height: 26px; box-shadow: 0 0 0 2px #155e76; border-radius: 2px; &:before { content: ''; position: absolute; left: 5px; top: -4px; height: 3px; width: 6px; background: #155e76; border-radius: 2px; } ${Wrapper}:hover & { &:after { content: ''; position: absolute; top: 0; bottom: 0; left: 0; right: 0; border-right: 16px solid transparent; border-bottom: 22px solid rgba(255, 255, 255, 0.25); } } ` export const Liquid = styled.div` position: absolute; top: 23px; bottom: 0; left: 0; right: 0; width: 16px; background: ${theme('baseColor.green')}; ${Wrapper}:hover & { top: 0; animation: ${loadRule}; &:before { left: 0; animation: ${liquid2Rule}; content: ''; position: absolute; top: -5px; height: 11.25px; width: 14.625px; background: ${theme('baseColor.green')}; border-radius: 50%; opacity: 0; } &:after { right: 0; animation: ${liquid1Rule}; content: ''; position: absolute; top: -5px; height: 11.25px; width: 14.625px; background: ${theme('baseColor.green')}; border-radius: 50%; opacity: 0; } } ` export const MoneySign = styled(Img)` position: absolute; top: 6px; left: 3px; ${css.size(10)}; fill: #327faf; transition: opacity 0.25s; ${Wrapper}:hover & { fill: #ecbcb3; top: 8px; left: 2px; ${css.size(12)}; } transition: all 0.2s; `
mydearxym/mastani
src/widgets/Charger/styles/index.ts
TypeScript
apache-2.0
3,323
/* * Copyright (c) 2017 Antony Esik * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ae.camunda.dispatcher.mapper.xml; import com.ae.camunda.dispatcher.api.mapper.TaskMapper; import com.ae.camunda.dispatcher.exception.CamundaMappingException; import org.eclipse.persistence.jaxb.JAXBContextFactory; import org.springframework.stereotype.Component; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import java.io.StringReader; import java.io.StringWriter; import java.util.Collections; /** * @author AEsik * Date 09.10.2017 */ @Component public class XmlTaskMapper implements TaskMapper { @Override public String map(Object task) { try { JAXBContext context = JAXBContextFactory.createContext(new Class[]{task.getClass()}, Collections.emptyMap()); StringWriter sw = new StringWriter(); context.createMarshaller().marshal(task, sw); return sw.toString(); } catch (JAXBException e) { throw new CamundaMappingException(e); } } @Override public Object map(String body, Class<?> clazz) { try { JAXBContext context = JAXBContextFactory.createContext(new Class[]{clazz}, Collections.emptyMap()); StringReader sr = new StringReader(body); return context.createUnmarshaller().unmarshal(sr); } catch (JAXBException e) { throw new CamundaMappingException(e); } } }
EsikAntony/camunda-task-dispatcher
camunda-task-dispatcher-mapper-xml/src/main/java/com/ae/camunda/dispatcher/mapper/xml/XmlTaskMapper.java
Java
apache-2.0
1,991
/* Copyright 2022 The Vitess Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package physical import ( "strings" "vitess.io/vitess/go/mysql/collations" "vitess.io/vitess/go/sqltypes" vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc" "vitess.io/vitess/go/vt/sqlparser" "vitess.io/vitess/go/vt/vterrors" "vitess.io/vitess/go/vt/vtgate/evalengine" ) func (rp *Route) findSysInfoRoutingPredicatesGen4(predicates []sqlparser.Expr, reservedVars *sqlparser.ReservedVars) error { for _, pred := range predicates { isTableSchema, bvName, out, err := extractInfoSchemaRoutingPredicate(pred, reservedVars) if err != nil { return err } if out == nil { // we didn't find a predicate to use for routing, continue to look for next predicate continue } if isTableSchema { rp.SysTableTableSchema = append(rp.SysTableTableSchema, out) } else { if rp.SysTableTableName == nil { rp.SysTableTableName = map[string]evalengine.Expr{} } rp.SysTableTableName[bvName] = out } } return nil } func extractInfoSchemaRoutingPredicate(in sqlparser.Expr, reservedVars *sqlparser.ReservedVars) (bool, string, evalengine.Expr, error) { switch cmp := in.(type) { case *sqlparser.ComparisonExpr: if cmp.Operator == sqlparser.EqualOp { isSchemaName, col, other, replaceOther := findOtherComparator(cmp) if col != nil && shouldRewrite(other) { evalExpr, err := evalengine.Translate(other, &notImplementedSchemaInfoConverter{}) if err != nil { if strings.Contains(err.Error(), evalengine.ErrTranslateExprNotSupported) { // This just means we can't rewrite this particular expression, // not that we have to exit altogether return false, "", nil, nil } return false, "", nil, err } var name string if isSchemaName { name = sqltypes.BvSchemaName } else { name = reservedVars.ReserveColName(col.(*sqlparser.ColName)) } replaceOther(sqlparser.NewArgument(name)) return isSchemaName, name, evalExpr, nil } } } return false, "", nil, nil } func findOtherComparator(cmp *sqlparser.ComparisonExpr) (bool, sqlparser.Expr, sqlparser.Expr, func(arg sqlparser.Argument)) { if schema, table := isTableSchemaOrName(cmp.Left); schema || table { return schema, cmp.Left, cmp.Right, func(arg sqlparser.Argument) { cmp.Right = arg } } if schema, table := isTableSchemaOrName(cmp.Right); schema || table { return schema, cmp.Right, cmp.Left, func(arg sqlparser.Argument) { cmp.Left = arg } } return false, nil, nil, nil } func shouldRewrite(e sqlparser.Expr) bool { switch node := e.(type) { case *sqlparser.FuncExpr: // we should not rewrite database() calls against information_schema return !(node.Name.EqualString("database") || node.Name.EqualString("schema")) } return true } func isTableSchemaOrName(e sqlparser.Expr) (isTableSchema bool, isTableName bool) { col, ok := e.(*sqlparser.ColName) if !ok { return false, false } return isDbNameCol(col), isTableNameCol(col) } func isDbNameCol(col *sqlparser.ColName) bool { return col.Name.EqualString("table_schema") || col.Name.EqualString("constraint_schema") || col.Name.EqualString("schema_name") || col.Name.EqualString("routine_schema") } func isTableNameCol(col *sqlparser.ColName) bool { return col.Name.EqualString("table_name") } type notImplementedSchemaInfoConverter struct{} func (f *notImplementedSchemaInfoConverter) ColumnLookup(*sqlparser.ColName) (int, error) { return 0, vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "Comparing table schema name with a column name not yet supported") } func (f *notImplementedSchemaInfoConverter) CollationForExpr(sqlparser.Expr) collations.ID { return collations.Unknown } func (f *notImplementedSchemaInfoConverter) DefaultCollation() collations.ID { return collations.Default() }
vitessio/vitess
go/vt/vtgate/planbuilder/physical/system_tables.go
GO
apache-2.0
4,314
"""Support for switches which integrates with other components.""" import logging import voluptuous as vol from homeassistant.components.switch import ( ENTITY_ID_FORMAT, PLATFORM_SCHEMA, SwitchEntity, ) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, CONF_ENTITY_PICTURE_TEMPLATE, CONF_ICON_TEMPLATE, CONF_SWITCHES, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, STATE_OFF, STATE_ON, ) from homeassistant.core import callback from homeassistant.exceptions import TemplateError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.script import Script from .const import CONF_AVAILABILITY_TEMPLATE, DOMAIN, PLATFORMS from .template_entity import TemplateEntity _LOGGER = logging.getLogger(__name__) _VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"] ON_ACTION = "turn_on" OFF_ACTION = "turn_off" SWITCH_SCHEMA = vol.Schema( { vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_ICON_TEMPLATE): cv.template, vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template, vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template, vol.Required(ON_ACTION): cv.SCRIPT_SCHEMA, vol.Required(OFF_ACTION): cv.SCRIPT_SCHEMA, vol.Optional(ATTR_FRIENDLY_NAME): cv.string, vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(CONF_UNIQUE_ID): cv.string, } ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)} ) async def _async_create_entities(hass, config): """Create the Template switches.""" switches = [] for device, device_config in config[CONF_SWITCHES].items(): friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device) state_template = device_config.get(CONF_VALUE_TEMPLATE) icon_template = device_config.get(CONF_ICON_TEMPLATE) entity_picture_template = device_config.get(CONF_ENTITY_PICTURE_TEMPLATE) availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE) on_action = device_config[ON_ACTION] off_action = device_config[OFF_ACTION] unique_id = device_config.get(CONF_UNIQUE_ID) switches.append( SwitchTemplate( hass, device, friendly_name, state_template, icon_template, entity_picture_template, availability_template, on_action, off_action, unique_id, ) ) return switches async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the template switches.""" await async_setup_reload_service(hass, DOMAIN, PLATFORMS) async_add_entities(await _async_create_entities(hass, config)) class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity): """Representation of a Template switch.""" def __init__( self, hass, device_id, friendly_name, state_template, icon_template, entity_picture_template, availability_template, on_action, off_action, unique_id, ): """Initialize the Template switch.""" super().__init__( availability_template=availability_template, icon_template=icon_template, entity_picture_template=entity_picture_template, ) self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, device_id, hass=hass ) self._name = friendly_name self._template = state_template domain = __name__.split(".")[-2] self._on_script = Script(hass, on_action, friendly_name, domain) self._off_script = Script(hass, off_action, friendly_name, domain) self._state = False self._unique_id = unique_id @callback def _update_state(self, result): super()._update_state(result) if isinstance(result, TemplateError): self._state = None return self._state = result.lower() in ("true", STATE_ON) async def async_added_to_hass(self): """Register callbacks.""" if self._template is None: # restore state after startup await super().async_added_to_hass() state = await self.async_get_last_state() if state: self._state = state.state == STATE_ON # no need to listen for events else: self.add_template_attribute( "_state", self._template, None, self._update_state ) await super().async_added_to_hass() @property def name(self): """Return the name of the switch.""" return self._name @property def unique_id(self): """Return the unique id of this switch.""" return self._unique_id @property def is_on(self): """Return true if device is on.""" return self._state @property def should_poll(self): """Return the polling state.""" return False async def async_turn_on(self, **kwargs): """Fire the on action.""" await self._on_script.async_run(context=self._context) if self._template is None: self._state = True self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Fire the off action.""" await self._off_script.async_run(context=self._context) if self._template is None: self._state = False self.async_write_ha_state() @property def assumed_state(self): """State is assumed, if no template given.""" return self._template is None
titilambert/home-assistant
homeassistant/components/template/switch.py
Python
apache-2.0
6,023
package org.whale.ext.domain; import java.util.ArrayList; import java.util.List; import org.whale.system.annotation.jdbc.Column; import org.whale.system.annotation.jdbc.Id; import org.whale.system.annotation.jdbc.Table; import org.whale.system.annotation.jdbc.Validate; import org.whale.system.base.BaseEntry; import org.whale.system.common.util.PropertiesUtil; /** * 实体对象 * * @author wjs * 2014年9月10日-上午10:12:48 */ @Table(value="sys_domian", cnName="实体对象") public class Domain extends BaseEntry{ private static final long serialVersionUID = -23042834921L; @Id @Column(cnName="id") private Long id; @Validate(required=true) @Column(cnName="实体名") private String domainName; @Validate(required=true) @Column(cnName="中文名") private String domainCnName; @Validate(required=true) @Column(cnName="数据库", unique=true) private String domainSqlName; @Column(cnName="基础包路径") private String pkgName = "org.whale.system"; //树模型 private Integer treeModel; private String treeId; private String treePid; private String treeName; //模板类型 private Integer ftlType; //代码路径 private String codePath; private String author = PropertiesUtil.getValue("author", "wjs"); //主键 private Attr idAttr; private List<Attr> attrs; private List<Attr> listAttrs = new ArrayList<Attr>(); private List<Attr> formAttrs = new ArrayList<Attr>(); private List<Attr> queryAttrs = new ArrayList<Attr>(); public Long getId() { return id; } public String getDomainName() { return domainName; } public void setDomainName(String domainName) { this.domainName = domainName; } public String getDomainCnName() { return domainCnName; } public void setDomainCnName(String domainCnName) { this.domainCnName = domainCnName; } public String getDomainSqlName() { return domainSqlName; } public void setDomainSqlName(String domainSqlName) { this.domainSqlName = domainSqlName; } public String getPkgName() { return pkgName; } public void setPkgName(String pkgName) { this.pkgName = pkgName; } public Attr getIdAttr() { return idAttr; } public void setIdAttr(Attr idAttr) { this.idAttr = idAttr; } public List<Attr> getAttrs() { return attrs; } public void setAttrs(List<Attr> attrs) { this.attrs = attrs; } public List<Attr> getListAttrs() { return listAttrs; } public void setListAttrs(List<Attr> listAttrs) { this.listAttrs = listAttrs; } public List<Attr> getFormAttrs() { return formAttrs; } public void setFormAttrs(List<Attr> formAttrs) { this.formAttrs = formAttrs; } public List<Attr> getQueryAttrs() { return queryAttrs; } public void setQueryAttrs(List<Attr> queryAttrs) { this.queryAttrs = queryAttrs; } public void setId(Long id) { this.id = id; } public Integer getFtlType() { return ftlType; } public void setFtlType(Integer ftlType) { this.ftlType = ftlType; } public String getCodePath() { return codePath; } public void setCodePath(String codePath) { this.codePath = codePath; } public Integer getTreeModel() { return treeModel; } public void setTreeModel(Integer treeModel) { this.treeModel = treeModel; } public String getTreeId() { return treeId; } public void setTreeId(String treeId) { this.treeId = treeId; } public String getTreePid() { return treePid; } public void setTreePid(String treePid) { this.treePid = treePid; } public String getTreeName() { return treeName; } public void setTreeName(String treeName) { this.treeName = treeName; } public String getAuthor() { return author; } public void setAuthor(String author) { this.author = author; } }
fywxin/base
system-parent/ext-code/src/main/java/org/whale/ext/domain/Domain.java
Java
apache-2.0
3,741
/* * syscall.cpp * * Created on: Jun 3, 2017 * Author: warlo */ #include "syscall.hpp" #include <diag\Trace.h> namespace os { #if 0 static void dispatch_syscall(void) naked_function; static void dispatch_syscall(void) { __asm__ __volatile__ ( " sub sp, sp, #16\n" /* Create a stack frame to hold 3 parms + lr */ " str r4, [sp, #0]\n" /* Move parameter 4 (if any) into position */ " str r5, [sp, #4]\n" /* Move parameter 5 (if any) into position */ " str r6, [sp, #8]\n" /* Move parameter 6 (if any) into position */ " str lr, [sp, #12]\n" /* Save lr in the stack frame */ " ldr ip, =g_stublookup\n" /* R12=The base of the stub lookup table */ " ldr ip, [ip, r0, lsl #2]\n" /* R12=The address of the stub for this syscall */ " blx ip\n" /* Call the stub (modifies lr) */ " ldr lr, [sp, #12]\n" /* Restore lr */ " add sp, sp, #16\n" /* Destroy the stack frame */ " mov r2, r0\n" /* R2=Save return value in R2 */ " mov r0, #3\n" /* R0=SYS_syscall_return */ " svc 0" /* Return from the syscall */ ); } #endif } #if 0 enum register_stack_t { /* Saved by hardware */ REG_R0, REG_R1, REG_R2, REG_R3, REG_R12, REG_LR, REG_PC, REG_xPSR }; #define RESERVED_STACK \ (8 * sizeof(uint32_t)) static void dispatch_syscall() __attribute((naked)); static void dispatch_syscall(uint32_t* caller) __attribute((naked)){ uint32_t svc_num = ((char *) caller[REG_PC])[-2]; } void syscall_init(uint8_t nbr, uintptr_t call){ assert(nbr < MAX_SYSCALLS); caller = call; } } template<uintptr_t FROM, uintptr_t TO> static inline void copy_stack(){ __asm volatile( "ldr r12, [sp, %0]\n" "str r12, [sp, %1]\n" : "i"(FROM), "i"(TO) ::"r12"); } __attribute((always_inline) )static inline void copy_memory(uintptr from, uintptr_t to) __attribute((always_inline) )static inline void copy_stack() { __asm__ __volatile__ ("push {r12 }sub sp, #(8*4)\n"); copy_stack<REG_R0+8, REG_R0>(); } #endif //extern "C" void SVC_Handler() __attribute((naked)) ; #if 0 extern "C" void SVC_Handler() { assert(0); } #endif
WarlockD/arm-cortex-v7-unix
f9_os/inc/os/syscall.cpp
C++
apache-2.0
2,217