code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 3
942
| language
stringclasses 30
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.6.0_27) on Fri Mar 02 14:51:57 CET 2018 -->
<title>MaapiXPathEvalResult (CONF API Version 6.6)</title>
<meta name="date" content="2018-03-02">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="MaapiXPathEvalResult (CONF API Version 6.6)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/MaapiXPathEvalResult.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../com/tailf/maapi/MaapiWarningException.html" title="class in com.tailf.maapi"><span class="strong">Prev Class</span></a></li>
<li><a href="../../../com/tailf/maapi/MaapiXPathEvalTrace.html" title="interface in com.tailf.maapi"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?com/tailf/maapi/MaapiXPathEvalResult.html" target="_top">Frames</a></li>
<li><a href="MaapiXPathEvalResult.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">com.tailf.maapi</div>
<h2 title="Interface MaapiXPathEvalResult" class="title">Interface MaapiXPathEvalResult</h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<hr>
<hr>
<br>
<pre>public interface <span class="strong">MaapiXPathEvalResult</span></pre>
<div class="block">This interface is used with <code>xpathEval</code>
method in <code>Maapi</code>. It allows a way
to iterate through a set of resulting nodes from evaluating xpath expression.</div>
<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../com/tailf/maapi/Maapi.html#xpathEval(int, com.tailf.maapi.MaapiXPathEvalResult, com.tailf.maapi.MaapiXPathEvalTrace, java.lang.String, java.lang.Object, java.lang.String, java.lang.Object...)"><code>Maapi.xpathEval(int, com.tailf.maapi.MaapiXPathEvalResult, com.tailf.maapi.MaapiXPathEvalTrace, java.lang.String, java.lang.Object, java.lang.String, java.lang.Object...)</code></a></dd></dl>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method_summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span>Methods</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../com/tailf/maapi/XPathNodeIterateResultFlag.html" title="enum in com.tailf.maapi">XPathNodeIterateResultFlag</a></code></td>
<td class="colLast"><code><strong><a href="../../../com/tailf/maapi/MaapiXPathEvalResult.html#result(com.tailf.conf.ConfObject[], com.tailf.conf.ConfValue, java.lang.Object)">result</a></strong>(<a href="../../../com/tailf/conf/ConfObject.html" title="class in com.tailf.conf">ConfObject</a>[] kp,
<a href="../../../com/tailf/conf/ConfValue.html" title="class in com.tailf.conf">ConfValue</a> value,
<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a> state)</code>
<div class="block">
For each node in the resulting node set evaluated by the xpath
this method will be called.</div>
</td>
</tr>
</table>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method_detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="result(com.tailf.conf.ConfObject[], com.tailf.conf.ConfValue, java.lang.Object)">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>result</h4>
<pre><a href="../../../com/tailf/maapi/XPathNodeIterateResultFlag.html" title="enum in com.tailf.maapi">XPathNodeIterateResultFlag</a> result(<a href="../../../com/tailf/conf/ConfObject.html" title="class in com.tailf.conf">ConfObject</a>[] kp,
<a href="../../../com/tailf/conf/ConfValue.html" title="class in com.tailf.conf">ConfValue</a> value,
<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a> state)</pre>
<div class="block"><p>
For each node in the resulting node set evaluated by the xpath
this method will be called.</p>
<p>
For each node in the resulting node set
</i>is called with keypath (as <code>ConfObject[]</code>) to
the resulting node as the first argument, and, if the node is a
leaf and has a value (as <code>ConfValue</code>), the value of that node
as the second argument otherwise it will return the string "undefined".
</p>
<p>
After each invocation this method (done
by <a href="../../../com/tailf/maapi/Maapi.html#xpathEval(int, com.tailf.maapi.MaapiXPathEvalResult, com.tailf.maapi.MaapiXPathEvalTrace, java.lang.String, java.lang.Object, java.lang.String, java.lang.Object...)"><code>xpathEval</code></a> )
this method should return either
<a href="../../../com/tailf/maapi/XPathNodeIterateResultFlag.html" title="enum in com.tailf.maapi"><code>ITER_CONTINUE</code></a> to
tell the xpath evaluator to continue with the
next resulting node or stop <a href="../../../com/tailf/maapi/XPathNodeIterateResultFlag.html" title="enum in com.tailf.maapi"><code>ITER_STOP</code></a>
to stop the iteration.
</p></div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>kp</code> - Keypath</dd><dd><code>value</code> - Value (if leaf) or string "undefined" if the node
is not a leaf</dd><dd><code>state</code> - User suplied opaque</dd></dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/MaapiXPathEvalResult.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../com/tailf/maapi/MaapiWarningException.html" title="class in com.tailf.maapi"><span class="strong">Prev Class</span></a></li>
<li><a href="../../../com/tailf/maapi/MaapiXPathEvalTrace.html" title="interface in com.tailf.maapi"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?com/tailf/maapi/MaapiXPathEvalResult.html" target="_top">Frames</a></li>
<li><a href="MaapiXPathEvalResult.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| kimjinyong/i2nsf-framework | Hackathon-111/SecurityController/confd-6.6/doc/api/java/com/tailf/maapi/MaapiXPathEvalResult.html | HTML | apache-2.0 | 9,956 |
/*
* Copyright 2015 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.cdn.traffic_control.traffic_router.core.request;
import java.net.URL;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import javax.servlet.http.HttpServletRequest;
public class HTTPRequest extends Request {
public static final String X_MM_CLIENT_IP = "X-MM-Client-IP";
public static final String FAKE_IP = "fakeClientIpAddress";
private String requestedUrl;
private String path;
private String uri;
private String queryString;
private Map<String, String> headers;
private boolean secure = false;
public HTTPRequest() { }
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public HTTPRequest(final HttpServletRequest request) {
applyRequest(request);
}
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public HTTPRequest(final HttpServletRequest request, final URL url) {
applyRequest(request);
applyUrl(url);
}
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public HTTPRequest(final URL url) {
applyUrl(url);
}
public void applyRequest(final HttpServletRequest request) {
setClientIP(request.getRemoteAddr());
setPath(request.getPathInfo());
setQueryString(request.getQueryString());
setHostname(request.getServerName());
setRequestedUrl(request.getRequestURL().toString());
setUri(request.getRequestURI());
final String xmm = request.getHeader(X_MM_CLIENT_IP);
final String fip = request.getParameter(FAKE_IP);
if (xmm != null) {
setClientIP(xmm);
} else if (fip != null) {
setClientIP(fip);
}
final Map<String, String> headers = new HashMap<String, String>();
final Enumeration<?> headerNames = request.getHeaderNames();
while (headerNames.hasMoreElements()) {
final String name = (String) headerNames.nextElement();
final String value = request.getHeader(name);
headers.put(name, value);
}
setHeaders(headers);
secure = request.isSecure();
}
public void applyUrl(final URL url) {
setPath(url.getPath());
setQueryString(url.getQuery());
setHostname(url.getHost());
setRequestedUrl(url.toString());
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof HTTPRequest) {
final HTTPRequest rhs = (HTTPRequest) obj;
return new EqualsBuilder()
.appendSuper(super.equals(obj))
.append(getHeaders(), rhs.getHeaders())
.append(getPath(), rhs.getPath())
.append(getQueryString(), rhs.getQueryString())
.append(getUri(), rhs.getUri())
.isEquals();
} else {
return false;
}
}
public Map<String, String> getHeaders() {
return headers;
}
public String getPath() {
return path;
}
public String getQueryString() {
return queryString;
}
/**
* Gets the requested URL. This URL will not include the query string if the client provided
* one.
*
* @return the requestedUrl
*/
public String getRequestedUrl() {
return requestedUrl;
}
@Override
public int hashCode() {
return new HashCodeBuilder(1, 31)
.appendSuper(super.hashCode())
.append(getHeaders())
.append(getPath())
.append(getQueryString())
.append(getUri())
.toHashCode();
}
public void setHeaders(final Map<String, String> headers) {
this.headers = headers;
}
public void setPath(final String path) {
this.path = path;
}
public void setQueryString(final String queryString) {
this.queryString = queryString;
}
/**
* Sets the requested URL. This URL SHOULD NOT include the query string if the client provided
* one.
*
* @param requestedUrl
* the requestedUrl to set
*/
public void setRequestedUrl(final String requestedUrl) {
this.requestedUrl = requestedUrl;
}
@Override
public String getType() {
return "http";
}
public String getUri() {
return uri;
}
public void setUri(final String uri) {
this.uri = uri;
}
public String getHeaderValue(final String name) {
if (headers != null && headers.containsKey(name)) {
return headers.get(name);
}
return null;
}
public boolean isSecure() {
return secure;
}
}
| dneuman64/traffic_control | traffic_router/core/src/main/java/com/comcast/cdn/traffic_control/traffic_router/core/request/HTTPRequest.java | Java | apache-2.0 | 5,542 |
using System.ComponentModel.DataAnnotations;
namespace MixERP.Social.ViewModels
{
public sealed class Attachment
{
[Required]
public string UploadedFileName { get; set; }
[Required]
public string Base64 { get; set; }
public string FileName { get; set; }
}
} | mban94/frapid | src/Frapid.Web/Areas/MixERP.Social/ViewModels/Attachment.cs | C# | apache-2.0 | 315 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest.history;
import static com.jayway.restassured.RestAssured.given;
import static org.camunda.bpm.engine.rest.util.JsonPathUtil.from;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.Response.Status;
import org.camunda.bpm.engine.batch.history.HistoricBatch;
import org.camunda.bpm.engine.batch.history.HistoricBatchQuery;
import org.camunda.bpm.engine.impl.calendar.DateTimeUtil;
import org.camunda.bpm.engine.rest.AbstractRestServiceTest;
import org.camunda.bpm.engine.rest.dto.history.batch.HistoricBatchDto;
import org.camunda.bpm.engine.rest.exception.InvalidRequestException;
import org.camunda.bpm.engine.rest.helper.MockProvider;
import org.camunda.bpm.engine.rest.util.container.TestContainerRule;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
import com.jayway.restassured.http.ContentType;
import com.jayway.restassured.response.Response;
public class HistoricBatchRestServiceQueryTest extends AbstractRestServiceTest {
@ClassRule
public static TestContainerRule rule = new TestContainerRule();
protected static final String HISTORIC_BATCH_RESOURCE_URL = TEST_RESOURCE_ROOT_PATH + "/history/batch";
protected static final String HISTORIC_BATCH_QUERY_COUNT_URL = HISTORIC_BATCH_RESOURCE_URL + "/count";
protected HistoricBatchQuery queryMock;
@Before
public void setUpHistoricBatchQueryMock() {
List<HistoricBatch> mockHistoricBatches = MockProvider.createMockHistoricBatches();
queryMock = mock(HistoricBatchQuery.class);
when(queryMock.list()).thenReturn(mockHistoricBatches);
when(queryMock.count()).thenReturn((long) mockHistoricBatches.size());
when(processEngine.getHistoryService().createHistoricBatchQuery()).thenReturn(queryMock);
}
@Test
public void testNoParametersQuery() {
Response response = given()
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_BATCH_RESOURCE_URL);
verify(queryMock).list();
verifyNoMoreInteractions(queryMock);
verifyHistoricBatchListJson(response.asString());
}
@Test
public void testUnknownQueryParameter() {
Response response = given()
.queryParam("unknown", "unknown")
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_BATCH_RESOURCE_URL);
verify(queryMock, never()).batchId(anyString());
verify(queryMock).list();
verifyHistoricBatchListJson(response.asString());
}
@Test
public void testSortByParameterOnly() {
given()
.queryParam("sortBy", "batchId")
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.contentType(ContentType.JSON)
.body("type",
equalTo(InvalidRequestException.class.getSimpleName()))
.body("message",
equalTo("Only a single sorting parameter specified. sortBy and sortOrder required"))
.when()
.get(HISTORIC_BATCH_RESOURCE_URL);
}
@Test
public void testSortOrderParameterOnly() {
given()
.queryParam("sortOrder", "asc")
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.contentType(ContentType.JSON)
.body("type",
equalTo(InvalidRequestException.class.getSimpleName()))
.body("message",
equalTo("Only a single sorting parameter specified. sortBy and sortOrder required"))
.when()
.get(HISTORIC_BATCH_RESOURCE_URL);
}
@Test
public void testHistoricBatchQueryByBatchId() {
Response response = given()
.queryParam("batchId", MockProvider.EXAMPLE_BATCH_ID)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_BATCH_RESOURCE_URL);
InOrder inOrder = inOrder(queryMock);
inOrder.verify(queryMock).batchId(MockProvider.EXAMPLE_BATCH_ID);
inOrder.verify(queryMock).list();
inOrder.verifyNoMoreInteractions();
verifyHistoricBatchListJson(response.asString());
}
@Test
public void testHistoricBatchQueryByCompleted() {
Response response = given()
.queryParam("completed", true)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_BATCH_RESOURCE_URL);
InOrder inOrder = inOrder(queryMock);
inOrder.verify(queryMock).completed(true);
inOrder.verify(queryMock).list();
inOrder.verifyNoMoreInteractions();
verifyHistoricBatchListJson(response.asString());
}
@Test
public void testHistoricBatchQueryByNotCompleted() {
Response response = given()
.queryParam("completed", false)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_BATCH_RESOURCE_URL);
InOrder inOrder = inOrder(queryMock);
inOrder.verify(queryMock).completed(false);
inOrder.verify(queryMock).list();
inOrder.verifyNoMoreInteractions();
verifyHistoricBatchListJson(response.asString());
}
@Test
public void testFullHistoricBatchQuery() {
Response response = given()
.queryParams(getCompleteQueryParameters())
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_BATCH_RESOURCE_URL);
verifyQueryParameterInvocations();
verify(queryMock).list();
verifyNoMoreInteractions(queryMock);
verifyHistoricBatchListJson(response.asString());
}
@Test
public void testQueryCount() {
given()
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("count", equalTo(1))
.when()
.get(HISTORIC_BATCH_QUERY_COUNT_URL);
verify(queryMock).count();
verifyNoMoreInteractions(queryMock);
}
@Test
public void testFullQueryCount() {
given()
.params(getCompleteQueryParameters())
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body("count", equalTo(1))
.when()
.get(HISTORIC_BATCH_QUERY_COUNT_URL);
verifyQueryParameterInvocations();
verify(queryMock).count();
verifyNoMoreInteractions(queryMock);
}
@Test
public void testSortingParameters() {
InOrder inOrder = Mockito.inOrder(queryMock);
executeAndVerifySorting("batchId", "desc", Status.OK);
inOrder.verify(queryMock).orderById();
inOrder.verify(queryMock).desc();
inOrder = Mockito.inOrder(queryMock);
executeAndVerifySorting("batchId", "asc", Status.OK);
inOrder.verify(queryMock).orderById();
inOrder.verify(queryMock).asc();
inOrder = Mockito.inOrder(queryMock);
executeAndVerifySorting("startTime", "desc", Status.OK);
inOrder.verify(queryMock).orderByStartTime();
inOrder.verify(queryMock).desc();
inOrder = Mockito.inOrder(queryMock);
executeAndVerifySorting("startTime", "asc", Status.OK);
inOrder.verify(queryMock).orderByStartTime();
inOrder.verify(queryMock).asc();
inOrder = Mockito.inOrder(queryMock);
executeAndVerifySorting("endTime", "desc", Status.OK);
inOrder.verify(queryMock).orderByEndTime();
inOrder.verify(queryMock).desc();
inOrder = Mockito.inOrder(queryMock);
executeAndVerifySorting("endTime", "asc", Status.OK);
inOrder.verify(queryMock).orderByEndTime();
inOrder.verify(queryMock).asc();
inOrder = Mockito.inOrder(queryMock);
executeAndVerifySorting("tenantId", "desc", Status.OK);
inOrder.verify(queryMock).orderByTenantId();
inOrder.verify(queryMock).desc();
inOrder = Mockito.inOrder(queryMock);
executeAndVerifySorting("tenantId", "asc", Status.OK);
inOrder.verify(queryMock).orderByTenantId();
inOrder.verify(queryMock).asc();
}
private void executeAndVerifySorting(String sortBy, String sortOrder, Status expectedStatus) {
given()
.queryParam("sortBy", sortBy)
.queryParam("sortOrder", sortOrder)
.then().expect()
.statusCode(expectedStatus.getStatusCode())
.when()
.get(HISTORIC_BATCH_RESOURCE_URL);
}
protected Map<String, Object> getCompleteQueryParameters() {
Map<String, Object> parameters = new HashMap<String, Object>();
parameters.put("batchId", MockProvider.EXAMPLE_BATCH_ID);
parameters.put("type", MockProvider.EXAMPLE_BATCH_TYPE);
parameters.put("tenantIdIn", MockProvider.EXAMPLE_TENANT_ID + "," + MockProvider.ANOTHER_EXAMPLE_TENANT_ID);
parameters.put("withoutTenantId", true);
return parameters;
}
protected void verifyQueryParameterInvocations() {
verify(queryMock).batchId(MockProvider.EXAMPLE_BATCH_ID);
verify(queryMock).type(MockProvider.EXAMPLE_BATCH_TYPE);
verify(queryMock).tenantIdIn(MockProvider.EXAMPLE_TENANT_ID, MockProvider.ANOTHER_EXAMPLE_TENANT_ID);
verify(queryMock).withoutTenantId();
}
protected void verifyHistoricBatchListJson(String historicBatchListJson) {
List<Object> batches = from(historicBatchListJson).get();
assertEquals("There should be one historic batch returned.", 1, batches.size());
HistoricBatchDto historicBatch = from(historicBatchListJson).getObject("[0]", HistoricBatchDto.class);
assertNotNull("The returned historic batch should not be null.", historicBatch);
assertEquals(MockProvider.EXAMPLE_BATCH_ID, historicBatch.getId());
assertEquals(MockProvider.EXAMPLE_BATCH_TYPE, historicBatch.getType());
assertEquals(MockProvider.EXAMPLE_BATCH_TOTAL_JOBS, historicBatch.getTotalJobs());
assertEquals(MockProvider.EXAMPLE_BATCH_JOBS_PER_SEED, historicBatch.getBatchJobsPerSeed());
assertEquals(MockProvider.EXAMPLE_INVOCATIONS_PER_BATCH_JOB, historicBatch.getInvocationsPerBatchJob());
assertEquals(MockProvider.EXAMPLE_SEED_JOB_DEFINITION_ID, historicBatch.getSeedJobDefinitionId());
assertEquals(MockProvider.EXAMPLE_MONITOR_JOB_DEFINITION_ID, historicBatch.getMonitorJobDefinitionId());
assertEquals(MockProvider.EXAMPLE_BATCH_JOB_DEFINITION_ID, historicBatch.getBatchJobDefinitionId());
assertEquals(MockProvider.EXAMPLE_TENANT_ID, historicBatch.getTenantId());
assertEquals(DateTimeUtil.parseDate(MockProvider.EXAMPLE_HISTORIC_BATCH_START_TIME), historicBatch.getStartTime());
assertEquals(DateTimeUtil.parseDate(MockProvider.EXAMPLE_HISTORIC_BATCH_END_TIME), historicBatch.getEndTime());
}
}
| subhrajyotim/camunda-bpm-platform | engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/history/HistoricBatchRestServiceQueryTest.java | Java | apache-2.0 | 11,247 |
/** @file
A brief file description
@section license License
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/****************************************************************************
Regression.cc
****************************************************************************/
#include "ts/Regression.h"
#include "ts/I_Version.h"
#include "ts/ink_platform.h"
#include "ts/ink_assert.h"
#include "ts/ink_args.h"
static RegressionTest *test = nullptr;
static RegressionTest *exclusive_test = nullptr;
RegressionTest *RegressionTest::current = nullptr;
int RegressionTest::ran_tests = 0;
DFA RegressionTest::dfa;
int RegressionTest::final_status = REGRESSION_TEST_PASSED;
static const char *
progname(const char *path)
{
const char *slash = strrchr(path, '/');
return slash ? slash + 1 : path;
}
char *
regression_status_string(int status)
{
return (
char *)(status == REGRESSION_TEST_NOT_RUN ?
"NOT_RUN" :
(status == REGRESSION_TEST_PASSED ? "PASSED" : (status == REGRESSION_TEST_INPROGRESS ? "INPROGRESS" : "FAILED")));
}
RegressionTest::RegressionTest(const char *_n, const SourceLocation &_l, TestFunction *_f, int _o)
: name(_n), location(_l), function(_f), next(nullptr), status(REGRESSION_TEST_NOT_RUN), printed(false), opt(_o)
{
if (opt == REGRESSION_OPT_EXCLUSIVE) {
if (exclusive_test) {
this->next = exclusive_test;
}
exclusive_test = this;
} else {
if (test) {
this->next = test;
}
test = this;
}
}
static inline int
start_test(RegressionTest *t, int regression_level)
{
ink_assert(t->status == REGRESSION_TEST_NOT_RUN);
t->status = REGRESSION_TEST_INPROGRESS;
fprintf(stderr, "REGRESSION TEST %s started\n", t->name);
(*t->function)(t, regression_level, &t->status);
int tresult = t->status;
if (tresult != REGRESSION_TEST_INPROGRESS) {
fprintf(stderr, " REGRESSION_RESULT %s:%*s %s\n", t->name, 40 - (int)strlen(t->name), " ",
regression_status_string(tresult));
t->printed = true;
}
return tresult;
}
int
RegressionTest::run(const char *atest, int regression_level)
{
if (atest) {
dfa.compile(atest);
} else {
dfa.compile(".*");
}
fprintf(stderr, "REGRESSION_TEST initialization begun\n");
// start the non exclusive tests
for (RegressionTest *t = test; t; t = t->next) {
if ((dfa.match(t->name) >= 0)) {
int res = start_test(t, regression_level);
if (res == REGRESSION_TEST_FAILED) {
final_status = REGRESSION_TEST_FAILED;
}
}
}
current = exclusive_test;
return run_some(regression_level);
}
void
RegressionTest::list()
{
char buf[128];
const char *bold = "\x1b[1m";
const char *unbold = "\x1b[0m";
if (!isatty(fileno(stdout))) {
bold = unbold = "";
}
for (RegressionTest *t = test; t; t = t->next) {
fprintf(stdout, "%s%s%s %s\n", bold, t->name, unbold, t->location.str(buf, sizeof(buf)));
}
for (RegressionTest *t = exclusive_test; t; t = t->next) {
fprintf(stdout, "%s%s%s %s\n", bold, t->name, unbold, t->location.str(buf, sizeof(buf)));
}
}
int
RegressionTest::run_some(int regression_level)
{
if (current) {
if (current->status == REGRESSION_TEST_INPROGRESS) {
return REGRESSION_TEST_INPROGRESS;
}
if (current->status != REGRESSION_TEST_NOT_RUN) {
if (!current->printed) {
current->printed = true;
fprintf(stderr, " REGRESSION_RESULT %s:%*s %s\n", current->name, 40 - (int)strlen(current->name), " ",
regression_status_string(current->status));
}
current = current->next;
}
}
for (; current; current = current->next) {
if ((dfa.match(current->name) >= 0)) {
int res = start_test(current, regression_level);
if (res == REGRESSION_TEST_INPROGRESS) {
return res;
}
if (res == REGRESSION_TEST_FAILED) {
final_status = REGRESSION_TEST_FAILED;
}
}
}
return REGRESSION_TEST_INPROGRESS;
}
int
RegressionTest::check_status(int regression_level)
{
int status = REGRESSION_TEST_PASSED;
if (current) {
status = run_some(regression_level);
if (!current) {
return status;
}
}
RegressionTest *t = test;
int exclusive = 0;
check_test_list:
while (t) {
if ((t->status == REGRESSION_TEST_PASSED || t->status == REGRESSION_TEST_FAILED) && !t->printed) {
t->printed = true;
fprintf(stderr, " REGRESSION_RESULT %s:%*s %s\n", t->name, 40 - (int)strlen(t->name), " ",
regression_status_string(t->status));
}
switch (t->status) {
case REGRESSION_TEST_FAILED:
final_status = REGRESSION_TEST_FAILED;
break;
case REGRESSION_TEST_INPROGRESS:
printf("Regression test(%s) still in progress\n", t->name);
status = REGRESSION_TEST_INPROGRESS;
break;
default:
break;
}
t = t->next;
}
if (!exclusive) {
exclusive = 1;
t = exclusive_test;
goto check_test_list;
}
return (status == REGRESSION_TEST_INPROGRESS) ? REGRESSION_TEST_INPROGRESS : final_status;
}
int
RegressionTest::main(int /* argc */, const char **argv, int level)
{
char regression_test[1024] = "";
int regression_list = 0;
int regression_level = level;
const ArgumentDescription argument_descriptions[] = {
{"regression", 'R', "Regression Level (quick:1..long:3)", "I", ®ression_level, "PROXY_REGRESSION", nullptr},
{"regression_test", 'r', "Run Specific Regression Test", "S512", regression_test, "PROXY_REGRESSION_TEST", nullptr},
{"regression_list", 'l', "List Regression Tests", "T", ®ression_list, "PROXY_REGRESSION_LIST", nullptr},
};
AppVersionInfo version;
version.setup(PACKAGE_NAME, progname(argv[0]), PACKAGE_VERSION, __DATE__, __TIME__, BUILD_MACHINE, BUILD_PERSON, "");
process_args(&version, argument_descriptions, countof(argument_descriptions), argv);
if (regression_list) {
RegressionTest::list();
} else {
RegressionTest::run(*regression_test == '\0' ? nullptr : regression_test, regression_level);
}
return RegressionTest::final_status == REGRESSION_TEST_PASSED ? 0 : 1;
}
int
rprintf(RegressionTest *t, const char *format, ...)
{
int l;
char buffer[8192];
snprintf(buffer, sizeof(buffer), "RPRINT %s: ", t->name);
fputs(buffer, stderr);
va_list ap;
va_start(ap, format);
l = vsnprintf(buffer, sizeof(buffer), format, ap);
va_end(ap);
fputs(buffer, stderr);
return (l);
}
int
rperf(RegressionTest *t, const char *tag, double val)
{
int l;
char format2[8192];
l = snprintf(format2, sizeof(format2), "RPERF %s.%s %f\n", t->name, tag, val);
fputs(format2, stderr);
return (l);
}
REGRESSION_TEST(Regression)(RegressionTest *t, int atype, int *status)
{
(void)t;
(void)atype;
rprintf(t, "regression test\n");
rperf(t, "speed", 100.0);
if (!test) {
*status = REGRESSION_TEST_FAILED;
} else {
*status = REGRESSION_TEST_PASSED;
}
}
| clearswift/trafficserver | lib/ts/Regression.cc | C++ | apache-2.0 | 7,725 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action.search;
import org.elasticsearch.common.CheckedRunnable;
import java.io.IOException;
import java.util.Objects;
/**
* Base class for all individual search phases like collecting distributed frequencies, fetching documents, querying shards.
*/
abstract class SearchPhase implements CheckedRunnable<IOException> {
private final String name;
protected SearchPhase(String name) {
this.name = Objects.requireNonNull(name, "name must not be null");
}
/**
* Returns the phases name.
*/
public String getName() {
return name;
}
}
| robin13/elasticsearch | server/src/main/java/org/elasticsearch/action/search/SearchPhase.java | Java | apache-2.0 | 965 |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package policy
import (
"strings"
"k8s.io/apiserver/pkg/apis/audit"
"k8s.io/apiserver/pkg/authorization/authorizer"
)
const (
// DefaultAuditLevel is the default level to audit at, if no policy rules are matched.
DefaultAuditLevel = audit.LevelNone
)
// Checker exposes methods for checking the policy rules.
type Checker interface {
// Check the audit level for a request with the given authorizer attributes.
Level(authorizer.Attributes) audit.Level
}
// NewChecker creates a new policy checker.
func NewChecker(policy *audit.Policy) Checker {
return &policyChecker{*policy}
}
// FakeChecker creates a checker that returns a constant level for all requests (for testing).
func FakeChecker(level audit.Level) Checker {
return &fakeChecker{level}
}
type policyChecker struct {
audit.Policy
}
func (p *policyChecker) Level(attrs authorizer.Attributes) audit.Level {
for _, rule := range p.Rules {
if ruleMatches(&rule, attrs) {
return rule.Level
}
}
return DefaultAuditLevel
}
// Check whether the rule matches the request attrs.
func ruleMatches(r *audit.PolicyRule, attrs authorizer.Attributes) bool {
if len(r.Users) > 0 && attrs.GetUser() != nil {
if !hasString(r.Users, attrs.GetUser().GetName()) {
return false
}
}
if len(r.UserGroups) > 0 && attrs.GetUser() != nil {
matched := false
for _, group := range attrs.GetUser().GetGroups() {
if hasString(r.UserGroups, group) {
matched = true
break
}
}
if !matched {
return false
}
}
if len(r.Verbs) > 0 {
if !hasString(r.Verbs, attrs.GetVerb()) {
return false
}
}
if len(r.Namespaces) > 0 || len(r.Resources) > 0 {
return ruleMatchesResource(r, attrs)
}
if len(r.NonResourceURLs) > 0 {
return ruleMatchesNonResource(r, attrs)
}
return true
}
// Check whether the rule's non-resource URLs match the request attrs.
func ruleMatchesNonResource(r *audit.PolicyRule, attrs authorizer.Attributes) bool {
if attrs.IsResourceRequest() {
return false
}
path := attrs.GetPath()
for _, spec := range r.NonResourceURLs {
if pathMatches(path, spec) {
return true
}
}
return false
}
// Check whether the path matches the path specification.
func pathMatches(path, spec string) bool {
// Allow wildcard match
if spec == "*" {
return true
}
// Allow exact match
if spec == path {
return true
}
// Allow a trailing * subpath match
if strings.HasSuffix(spec, "*") && strings.HasPrefix(path, strings.TrimRight(spec, "*")) {
return true
}
return false
}
// Check whether the rule's resource fields match the request attrs.
func ruleMatchesResource(r *audit.PolicyRule, attrs authorizer.Attributes) bool {
if !attrs.IsResourceRequest() {
return false
}
if len(r.Namespaces) > 0 {
if !hasString(r.Namespaces, attrs.GetNamespace()) { // Non-namespaced resources use the empty string.
return false
}
}
if len(r.Resources) == 0 {
return true
}
apiGroup := attrs.GetAPIGroup()
resource := attrs.GetResource()
for _, gr := range r.Resources {
if gr.Group == apiGroup {
if len(gr.Resources) == 0 {
return true
}
for _, res := range gr.Resources {
if res == resource {
return true
}
}
}
}
return false
}
// Utility function to check whether a string slice contains a string.
func hasString(slice []string, value string) bool {
for _, s := range slice {
if s == value {
return true
}
}
return false
}
type fakeChecker struct {
level audit.Level
}
func (f *fakeChecker) Level(_ authorizer.Attributes) audit.Level {
return f.level
}
| pmorie/origin | vendor/k8s.io/kubernetes/staging/src/k8s.io/apiserver/pkg/audit/policy/checker.go | GO | apache-2.0 | 4,119 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.rabbitmq.integration.qpid;
import org.apache.camel.component.rabbitmq.integration.RabbitMQReConnectionIT;
import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
@EnabledIfSystemProperty(named = "rabbitmq.instance.type", matches = "qpid")
public class RabbitMQReConnectionQpidIT extends RabbitMQReConnectionIT {
}
| nikhilvibhav/camel | components/camel-rabbitmq/src/test/java/org/apache/camel/component/rabbitmq/integration/qpid/RabbitMQReConnectionQpidIT.java | Java | apache-2.0 | 1,163 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file.stress;
import java.util.Random;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assumptions.assumeFalse;
@Disabled("Manual test")
public class FileAsyncStressManually extends ContextTestSupport {
@Test
public void testAsyncStress() throws Exception {
// do not test on windows
assumeFalse(isPlatform("windows"));
// test by starting the unit test FileAsyncStressFileDropper in another
// JVM
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(250);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from(fileUri("?readLock=markerFile&maxMessagesPerPoll=25&move=backup")).threads(10)
.process(new Processor() {
public void process(Exchange exchange) throws Exception {
// simulate some work with random time to complete
Random ran = new Random();
int delay = ran.nextInt(500) + 10;
Thread.sleep(delay);
}
}).to("mock:result");
}
};
}
}
| nikhilvibhav/camel | core/camel-core/src/test/java/org/apache/camel/component/file/stress/FileAsyncStressManually.java | Java | apache-2.0 | 2,515 |
#!/bin/bash
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# This script assumes the standard setup on tensorflow Jenkins windows machines.
# It is NOT guaranteed to work on any other machine. Use at your own risk!
#
# REQUIREMENTS:
# * All installed in standard locations:
# - JDK8, and JAVA_HOME set.
# - Microsoft Visual Studio 2015 Community Edition
# - Msys2
# - Anaconda3
# * Bazel windows executable copied as "bazel.exe" and included in PATH.
# Use a temporary directory with a short name.
export TMPDIR=${TMPDIR:-"C:/tmp"}
export TMPDIR=$(cygpath -m "$TMPDIR")
mkdir -p "$TMPDIR"
# Add timestamps before each command.
export PS4='+ $(date) + '
# Set bash path
export BAZEL_SH=${BAZEL_SH:-"C:/tools/msys64/usr/bin/bash"}
export PYTHON_BASE_PATH="${PYTHON_DIRECTORY:-Program Files/Anaconda3}"
# Set the path to find bazel.
export PATH="/c/tools/bazel/:$PATH"
# Set Python path for ./configure
export PYTHON_BIN_PATH="C:/${PYTHON_BASE_PATH}/python.exe"
export PYTHON_LIB_PATH="C:/${PYTHON_BASE_PATH}/lib/site-packages"
# Add python into PATH, it's needed because gen_git_source.py uses
# '/usr/bin/env python' as a shebang
export PATH="/c/${PYTHON_BASE_PATH}:$PATH"
# Add git into PATH needed for gen_git_source.py
export PATH="/c/Program Files/Git/cmd:$PATH"
# Make sure we have pip in PATH
export PATH="/c/${PYTHON_BASE_PATH}/Scripts:$PATH"
# Setting default values to CUDA related environment variables
export TF_CUDA_VERSION=${TF_CUDA_VERSION:-11.0}
export TF_CUDNN_VERSION=${TF_CUDNN_VERSION:-8}
export TF_CUDA_COMPUTE_CAPABILITIES=${TF_CUDA_COMPUTE_CAPABILITIES:-6.0}
export CUDA_TOOLKIT_PATH=${CUDA_TOOLKIT_PATH:-"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v${TF_CUDA_VERSION}"}
export CUDNN_INSTALL_PATH=${CUDNN_INSTALL_PATH:-"C:/tools/cuda"}
# Add Cuda and Cudnn dll directories into PATH
export PATH="$(cygpath -u "${CUDA_TOOLKIT_PATH}")/bin:$PATH"
export PATH="$(cygpath -u "${CUDA_TOOLKIT_PATH}")/extras/CUPTI/libx64:$PATH"
export PATH="$(cygpath -u "${CUDNN_INSTALL_PATH}")/bin:$PATH"
| annarev/tensorflow | tensorflow/tools/ci_build/windows/bazel/common_env.sh | Shell | apache-2.0 | 2,674 |
/* globals describe, it, expect, spyOn */
var Rx = require('../../dist/cjs/Rx');
var Observable = Rx.Observable;
var immediate = Rx.Scheduler.immediate;
var Observer = Rx.Observer;
describe('Observable.interval', function () {
it('should next 5 times then complete', function (done) {
var start = Date.now();
var expected = [0, 1, 2, 3, 4];
var i = 0;
var nextSpy = jasmine.createSpy('nextSpy');
Observable.interval(10).take(5)
.subscribe(nextSpy, null,
function () {
var now = Date.now();
expect(now - start >= 50).toBe(true, 'interval ended in ' + (now - start) + 'ms');
expect(nextSpy.calls.count()).toBe(5);
expected.forEach(function (v) {
expect(nextSpy.calls.argsFor(v)[0]).toBe(v);
});
done();
});
});
}); | SekibOmazic/RxJS | spec/observables/interval-spec.js | JavaScript | apache-2.0 | 832 |
/*
* Copyright 2015 JBoss, by Red Hat, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.wires.bpmn.beliefs.graph;
import java.util.List;
public interface GraphNode<C, T extends Edge> {
public int getId();
public void setId( int id );
public List<T> getInEdges();
public List<T> getOutEdges();
public C getContent();
public void setContent( C content );
}
| dgutierr/uberfire-extensions | uberfire-wires/uberfire-wires-bpmn/uberfire-wires-bpmn-api/src/main/java/org/uberfire/ext/wires/bpmn/beliefs/graph/GraphNode.java | Java | apache-2.0 | 933 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.kafka;
import java.util.Map;
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.store.kafka.cluster.EmbeddedKafkaCluster;
import org.apache.drill.exec.store.kafka.decoders.JsonMessageReader;
import org.apache.drill.test.ClusterFixture;
import org.apache.drill.test.ClusterFixtureBuilder;
import org.apache.drill.test.ClusterTest;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.apache.drill.shaded.guava.com.google.common.collect.Maps;
import static org.apache.drill.exec.store.kafka.KafkaMessageGenerator.SCHEMA_REGISTRY_URL;
public class KafkaTestBase extends ClusterTest {
protected static KafkaStoragePluginConfig storagePluginConfig;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// Make sure this test is only running as part of the suit
Assume.assumeTrue(TestKafkaSuit.isRunningSuite());
ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
startCluster(builder);
TestKafkaSuit.initKafka();
initKafkaStoragePlugin(TestKafkaSuit.embeddedKafkaCluster);
}
public static void initKafkaStoragePlugin(EmbeddedKafkaCluster embeddedKafkaCluster) throws Exception {
final StoragePluginRegistry pluginRegistry = cluster.drillbit().getContext().getStorage();
Map<String, String> kafkaConsumerProps = Maps.newHashMap();
kafkaConsumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, embeddedKafkaCluster.getKafkaBrokerList());
kafkaConsumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "drill-test-consumer");
kafkaConsumerProps.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL);
storagePluginConfig = new KafkaStoragePluginConfig(kafkaConsumerProps);
storagePluginConfig.setEnabled(true);
pluginRegistry.put(KafkaStoragePluginConfig.NAME, storagePluginConfig);
client.alterSession(ExecConstants.KAFKA_RECORD_READER, JsonMessageReader.class.getName());
client.alterSession(ExecConstants.KAFKA_POLL_TIMEOUT, 5000);
}
public void runKafkaSQLVerifyCount(String sql, int expectedRowCount) {
long rowCount = queryBuilder().sql(sql).log();
if (expectedRowCount != -1) {
Assert.assertEquals(expectedRowCount, rowCount);
}
}
public static long testSql(String sql) {
return client.queryBuilder().sql(sql).log();
}
@AfterClass
public static void tearDownKafkaTestBase() {
if (TestKafkaSuit.isRunningSuite()) {
TestKafkaSuit.tearDownCluster();
}
}
} | apache/drill | contrib/storage-kafka/src/test/java/org/apache/drill/exec/store/kafka/KafkaTestBase.java | Java | apache-2.0 | 3,560 |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media.videoeditor;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlSerializer;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.media.videoeditor.MediaImageItem;
import android.media.videoeditor.MediaItem;
import android.media.MediaMetadataRetriever;
import android.util.Log;
import android.util.Xml;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.os.Debug;
import android.os.SystemProperties;
import android.os.Environment;
/**
* The VideoEditor implementation {@hide}
*/
public class VideoEditorImpl implements VideoEditor {
/*
* Logging
*/
private static final String TAG = "VideoEditorImpl";
/*
* The project filename
*/
private static final String PROJECT_FILENAME = "videoeditor.xml";
/*
* XML tags
*/
private static final String TAG_PROJECT = "project";
private static final String TAG_MEDIA_ITEMS = "media_items";
private static final String TAG_MEDIA_ITEM = "media_item";
private static final String TAG_TRANSITIONS = "transitions";
private static final String TAG_TRANSITION = "transition";
private static final String TAG_OVERLAYS = "overlays";
private static final String TAG_OVERLAY = "overlay";
private static final String TAG_OVERLAY_USER_ATTRIBUTES = "overlay_user_attributes";
private static final String TAG_EFFECTS = "effects";
private static final String TAG_EFFECT = "effect";
private static final String TAG_AUDIO_TRACKS = "audio_tracks";
private static final String TAG_AUDIO_TRACK = "audio_track";
private static final String ATTR_ID = "id";
private static final String ATTR_FILENAME = "filename";
private static final String ATTR_AUDIO_WAVEFORM_FILENAME = "waveform";
private static final String ATTR_RENDERING_MODE = "rendering_mode";
private static final String ATTR_ASPECT_RATIO = "aspect_ratio";
private static final String ATTR_REGENERATE_PCM = "regeneratePCMFlag";
private static final String ATTR_TYPE = "type";
private static final String ATTR_DURATION = "duration";
private static final String ATTR_START_TIME = "start_time";
private static final String ATTR_BEGIN_TIME = "begin_time";
private static final String ATTR_END_TIME = "end_time";
private static final String ATTR_VOLUME = "volume";
private static final String ATTR_BEHAVIOR = "behavior";
private static final String ATTR_DIRECTION = "direction";
private static final String ATTR_BLENDING = "blending";
private static final String ATTR_INVERT = "invert";
private static final String ATTR_MASK = "mask";
private static final String ATTR_BEFORE_MEDIA_ITEM_ID = "before_media_item";
private static final String ATTR_AFTER_MEDIA_ITEM_ID = "after_media_item";
private static final String ATTR_COLOR_EFFECT_TYPE = "color_type";
private static final String ATTR_COLOR_EFFECT_VALUE = "color_value";
private static final String ATTR_START_RECT_LEFT = "start_l";
private static final String ATTR_START_RECT_TOP = "start_t";
private static final String ATTR_START_RECT_RIGHT = "start_r";
private static final String ATTR_START_RECT_BOTTOM = "start_b";
private static final String ATTR_END_RECT_LEFT = "end_l";
private static final String ATTR_END_RECT_TOP = "end_t";
private static final String ATTR_END_RECT_RIGHT = "end_r";
private static final String ATTR_END_RECT_BOTTOM = "end_b";
private static final String ATTR_LOOP = "loop";
private static final String ATTR_MUTED = "muted";
private static final String ATTR_DUCK_ENABLED = "ducking_enabled";
private static final String ATTR_DUCK_THRESHOLD = "ducking_threshold";
private static final String ATTR_DUCKED_TRACK_VOLUME = "ducking_volume";
private static final String ATTR_GENERATED_IMAGE_CLIP = "generated_image_clip";
private static final String ATTR_IS_IMAGE_CLIP_GENERATED = "is_image_clip_generated";
private static final String ATTR_GENERATED_TRANSITION_CLIP = "generated_transition_clip";
private static final String ATTR_IS_TRANSITION_GENERATED = "is_transition_generated";
private static final String ATTR_OVERLAY_RGB_FILENAME = "overlay_rgb_filename";
private static final String ATTR_OVERLAY_FRAME_WIDTH = "overlay_frame_width";
private static final String ATTR_OVERLAY_FRAME_HEIGHT = "overlay_frame_height";
private static final String ATTR_OVERLAY_RESIZED_RGB_FRAME_WIDTH = "resized_RGBframe_width";
private static final String ATTR_OVERLAY_RESIZED_RGB_FRAME_HEIGHT = "resized_RGBframe_height";
private static final int ENGINE_ACCESS_MAX_TIMEOUT_MS = 500;
/*
* Instance variables
*/
private final Semaphore mLock;
private final String mProjectPath;
private final List<MediaItem> mMediaItems = new ArrayList<MediaItem>();
private final List<AudioTrack> mAudioTracks = new ArrayList<AudioTrack>();
private final List<Transition> mTransitions = new ArrayList<Transition>();
private long mDurationMs;
private int mAspectRatio;
/*
* Private Object for calling native Methods via MediaArtistNativeHelper
*/
private MediaArtistNativeHelper mMANativeHelper;
private boolean mPreviewInProgress = false;
private final boolean mMallocDebug;
/**
* Constructor
*
* @param projectPath - The path where the VideoEditor stores all files
* related to the project
*/
public VideoEditorImpl(String projectPath) throws IOException {
String s;
s = SystemProperties.get("libc.debug.malloc");
if (s.equals("1")) {
mMallocDebug = true;
try {
dumpHeap("HeapAtStart");
} catch (Exception ex) {
Log.e(TAG, "dumpHeap returned error in constructor");
}
} else {
mMallocDebug = false;
}
mLock = new Semaphore(1, true);
mMANativeHelper = new MediaArtistNativeHelper(projectPath, mLock, this);
mProjectPath = projectPath;
final File projectXml = new File(projectPath, PROJECT_FILENAME);
if (projectXml.exists()) {
try {
load();
} catch (Exception ex) {
ex.printStackTrace();
throw new IOException(ex.toString());
}
} else {
mAspectRatio = MediaProperties.ASPECT_RATIO_16_9;
mDurationMs = 0;
}
}
/*
* @return The MediaArtistNativeHelper object
*/
MediaArtistNativeHelper getNativeContext() {
return mMANativeHelper;
}
/*
* {@inheritDoc}
*/
public synchronized void addAudioTrack(AudioTrack audioTrack) {
if (audioTrack == null) {
throw new IllegalArgumentException("Audio Track is null");
}
if (mAudioTracks.size() == 1) {
throw new IllegalArgumentException("No more tracks can be added");
}
mMANativeHelper.setGeneratePreview(true);
/*
* Add the audio track to AudioTrack list
*/
mAudioTracks.add(audioTrack);
/*
* Form the audio PCM file path
*/
final String audioTrackPCMFilePath = String.format(mProjectPath + "/"
+ "AudioPcm" + audioTrack.getId() + ".pcm");
/*
* Create PCM only if not generated in previous session
*/
if (new File(audioTrackPCMFilePath).exists()) {
mMANativeHelper.setAudioflag(false);
}
}
/*
* {@inheritDoc}
*/
public synchronized void addMediaItem(MediaItem mediaItem) {
/*
* Validate Media Item
*/
if (mediaItem == null) {
throw new IllegalArgumentException("Media item is null");
}
/*
* Add the Media item to MediaItem list
*/
if (mMediaItems.contains(mediaItem)) {
throw new IllegalArgumentException("Media item already exists: " + mediaItem.getId());
}
mMANativeHelper.setGeneratePreview(true);
/*
* Invalidate the end transition if necessary
*/
final int mediaItemsCount = mMediaItems.size();
if (mediaItemsCount > 0) {
removeTransitionAfter(mediaItemsCount - 1);
}
/*
* Add the new media item
*/
mMediaItems.add(mediaItem);
computeTimelineDuration();
/*
* Generate project thumbnail only from first media Item on storyboard
*/
if (mMediaItems.size() == 1) {
generateProjectThumbnail();
}
}
/*
* {@inheritDoc}
*/
public synchronized void addTransition(Transition transition) {
if (transition == null) {
throw new IllegalArgumentException("Null Transition");
}
final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
final MediaItem afterMediaItem = transition.getAfterMediaItem();
/*
* Check if the MediaItems are in sequence
*/
if (mMediaItems == null) {
throw new IllegalArgumentException("No media items are added");
}
if ((afterMediaItem != null) && (beforeMediaItem != null)) {
final int afterMediaItemIndex = mMediaItems.indexOf(afterMediaItem);
final int beforeMediaItemIndex = mMediaItems.indexOf(beforeMediaItem);
if ((afterMediaItemIndex == -1) || (beforeMediaItemIndex == -1)) {
throw new IllegalArgumentException
("Either of the mediaItem is not found in the list");
}
if (afterMediaItemIndex != (beforeMediaItemIndex - 1) ) {
throw new IllegalArgumentException("MediaItems are not in sequence");
}
}
mMANativeHelper.setGeneratePreview(true);
mTransitions.add(transition);
/*
* Cross reference the transitions
*/
if (afterMediaItem != null) {
/*
* If a transition already exists at the specified position then
* invalidate it.
*/
if (afterMediaItem.getEndTransition() != null) {
afterMediaItem.getEndTransition().invalidate();
mTransitions.remove(afterMediaItem.getEndTransition());
}
afterMediaItem.setEndTransition(transition);
}
if (beforeMediaItem != null) {
/*
* If a transition already exists at the specified position then
* invalidate it.
*/
if (beforeMediaItem.getBeginTransition() != null) {
beforeMediaItem.getBeginTransition().invalidate();
mTransitions.remove(beforeMediaItem.getBeginTransition());
}
beforeMediaItem.setBeginTransition(transition);
}
computeTimelineDuration();
}
/*
* {@inheritDoc}
*/
public void cancelExport(String filename) {
if (mMANativeHelper != null && filename != null) {
mMANativeHelper.stop(filename);
}
}
/*
* {@inheritDoc}
*/
public void export(String filename, int height, int bitrate,
int audioCodec, int videoCodec,
ExportProgressListener listener)
throws IOException {
int audcodec = 0;
int vidcodec = 0;
if (filename == null) {
throw new IllegalArgumentException("export: filename is null");
}
final File tempPathFile = new File(filename);
if (tempPathFile == null) {
throw new IOException(filename + "can not be created");
}
if (mMediaItems.size() == 0) {
throw new IllegalStateException("No MediaItems added");
}
switch (height) {
case MediaProperties.HEIGHT_144:
break;
case MediaProperties.HEIGHT_288:
break;
case MediaProperties.HEIGHT_360:
break;
case MediaProperties.HEIGHT_480:
break;
case MediaProperties.HEIGHT_720:
break;
case MediaProperties.HEIGHT_1080:
break;
default: {
String message = "Unsupported height value " + height;
throw new IllegalArgumentException(message);
}
}
switch (bitrate) {
case MediaProperties.BITRATE_28K:
break;
case MediaProperties.BITRATE_40K:
break;
case MediaProperties.BITRATE_64K:
break;
case MediaProperties.BITRATE_96K:
break;
case MediaProperties.BITRATE_128K:
break;
case MediaProperties.BITRATE_192K:
break;
case MediaProperties.BITRATE_256K:
break;
case MediaProperties.BITRATE_384K:
break;
case MediaProperties.BITRATE_512K:
break;
case MediaProperties.BITRATE_800K:
break;
case MediaProperties.BITRATE_2M:
break;
case MediaProperties.BITRATE_5M:
break;
case MediaProperties.BITRATE_8M:
break;
default: {
final String message = "Unsupported bitrate value " + bitrate;
throw new IllegalArgumentException(message);
}
}
computeTimelineDuration();
final long audioBitrate = MediaArtistNativeHelper.Bitrate.BR_96_KBPS;
final long fileSize = (mDurationMs * (bitrate + audioBitrate)) / 8000;
if (MAX_SUPPORTED_FILE_SIZE <= fileSize) {
throw new IllegalStateException("Export Size is more than 2GB");
}
switch (audioCodec) {
case MediaProperties.ACODEC_AAC_LC:
audcodec = MediaArtistNativeHelper.AudioFormat.AAC;
break;
case MediaProperties.ACODEC_AMRNB:
audcodec = MediaArtistNativeHelper.AudioFormat.AMR_NB;
break;
default: {
String message = "Unsupported audio codec type " + audioCodec;
throw new IllegalArgumentException(message);
}
}
switch (videoCodec) {
case MediaProperties.VCODEC_H263:
vidcodec = MediaArtistNativeHelper.VideoFormat.H263;
break;
case MediaProperties.VCODEC_H264:
vidcodec = MediaArtistNativeHelper.VideoFormat.H264;
break;
case MediaProperties.VCODEC_MPEG4:
vidcodec = MediaArtistNativeHelper.VideoFormat.MPEG4;
break;
default: {
String message = "Unsupported video codec type " + videoCodec;
throw new IllegalArgumentException(message);
}
}
boolean semAcquireDone = false;
try {
lock();
semAcquireDone = true;
if (mMANativeHelper == null) {
throw new IllegalStateException("The video editor is not initialized");
}
mMANativeHelper.setAudioCodec(audcodec);
mMANativeHelper.setVideoCodec(vidcodec);
mMANativeHelper.export(filename, mProjectPath, height,bitrate,
mMediaItems, mTransitions, mAudioTracks, listener);
} catch (InterruptedException ex) {
Log.e(TAG, "Sem acquire NOT successful in export");
} finally {
if (semAcquireDone) {
unlock();
}
}
}
/*
* {@inheritDoc}
*/
public void export(String filename, int height, int bitrate,
ExportProgressListener listener)
throws IOException {
int defaultAudiocodec = MediaArtistNativeHelper.AudioFormat.AAC;
int defaultVideocodec = MediaArtistNativeHelper.VideoFormat.H264;
export(filename, height, bitrate, defaultAudiocodec,
defaultVideocodec, listener);
}
/*
* {@inheritDoc}
*/
public void generatePreview(MediaProcessingProgressListener listener) {
boolean semAcquireDone = false;
try {
lock();
semAcquireDone = true;
if (mMANativeHelper == null) {
throw new IllegalStateException("The video editor is not initialized");
}
if ((mMediaItems.size() > 0) || (mAudioTracks.size() > 0)) {
mMANativeHelper.previewStoryBoard(mMediaItems, mTransitions, mAudioTracks,
listener);
}
} catch (InterruptedException ex) {
Log.e(TAG, "Sem acquire NOT successful in previewStoryBoard");
} finally {
if (semAcquireDone) {
unlock();
}
}
}
/*
* {@inheritDoc}
*/
public List<AudioTrack> getAllAudioTracks() {
return mAudioTracks;
}
/*
* {@inheritDoc}
*/
public List<MediaItem> getAllMediaItems() {
return mMediaItems;
}
/*
* {@inheritDoc}
*/
public List<Transition> getAllTransitions() {
return mTransitions;
}
/*
* {@inheritDoc}
*/
public int getAspectRatio() {
return mAspectRatio;
}
/*
* {@inheritDoc}
*/
public AudioTrack getAudioTrack(String audioTrackId) {
for (AudioTrack at : mAudioTracks) {
if (at.getId().equals(audioTrackId)) {
return at;
}
}
return null;
}
/*
* {@inheritDoc}
*/
public long getDuration() {
/**
* Since MediaImageItem can change duration we need to compute the
* duration here
*/
computeTimelineDuration();
return mDurationMs;
}
/*
* Force updates the timeline duration
*/
void updateTimelineDuration() {
computeTimelineDuration();
}
/*
* {@inheritDoc}
*/
public synchronized MediaItem getMediaItem(String mediaItemId) {
for (MediaItem mediaItem : mMediaItems) {
if (mediaItem.getId().equals(mediaItemId)) {
return mediaItem;
}
}
return null;
}
/*
* {@inheritDoc}
*/
public String getPath() {
return mProjectPath;
}
/*
* {@inheritDoc}
*/
public Transition getTransition(String transitionId) {
for (Transition transition : mTransitions) {
if (transition.getId().equals(transitionId)) {
return transition;
}
}
return null;
}
/*
* {@inheritDoc}
*/
public synchronized void insertAudioTrack(AudioTrack audioTrack,
String afterAudioTrackId) {
if (mAudioTracks.size() == 1) {
throw new IllegalArgumentException("No more tracks can be added");
}
if (afterAudioTrackId == null) {
mMANativeHelper.setGeneratePreview(true);
mAudioTracks.add(0, audioTrack);
} else {
final int audioTrackCount = mAudioTracks.size();
for (int i = 0; i < audioTrackCount; i++) {
AudioTrack at = mAudioTracks.get(i);
if (at.getId().equals(afterAudioTrackId)) {
mMANativeHelper.setGeneratePreview(true);
mAudioTracks.add(i + 1, audioTrack);
return;
}
}
throw new IllegalArgumentException("AudioTrack not found: " + afterAudioTrackId);
}
}
/*
* {@inheritDoc}
*/
public synchronized void insertMediaItem(MediaItem mediaItem, String afterMediaItemId) {
if (mMediaItems.contains(mediaItem)) {
throw new IllegalArgumentException("Media item already exists: " + mediaItem.getId());
}
if (afterMediaItemId == null) {
mMANativeHelper.setGeneratePreview(true);
if (mMediaItems.size() > 0) {
/**
* Invalidate the transition at the beginning of the timeline
*/
removeTransitionBefore(0);
}
mMediaItems.add(0, mediaItem);
computeTimelineDuration();
generateProjectThumbnail();
} else {
final int mediaItemCount = mMediaItems.size();
for (int i = 0; i < mediaItemCount; i++) {
final MediaItem mi = mMediaItems.get(i);
if (mi.getId().equals(afterMediaItemId)) {
mMANativeHelper.setGeneratePreview(true);
/**
* Invalidate the transition at this position
*/
removeTransitionAfter(i);
/**
* Insert the new media item
*/
mMediaItems.add(i + 1, mediaItem);
computeTimelineDuration();
return;
}
}
throw new IllegalArgumentException("MediaItem not found: " + afterMediaItemId);
}
}
/*
* {@inheritDoc}
*/
public synchronized void moveAudioTrack(String audioTrackId, String afterAudioTrackId) {
throw new IllegalStateException("Not supported");
}
/*
* {@inheritDoc}
*/
public synchronized void moveMediaItem(String mediaItemId, String afterMediaItemId) {
final MediaItem moveMediaItem = removeMediaItem(mediaItemId,true);
if (moveMediaItem == null) {
throw new IllegalArgumentException("Target MediaItem not found: " + mediaItemId);
}
if (afterMediaItemId == null) {
if (mMediaItems.size() > 0) {
mMANativeHelper.setGeneratePreview(true);
/**
* Invalidate adjacent transitions at the insertion point
*/
removeTransitionBefore(0);
/**
* Insert the media item at the new position
*/
mMediaItems.add(0, moveMediaItem);
computeTimelineDuration();
generateProjectThumbnail();
} else {
throw new IllegalStateException("Cannot move media item (it is the only item)");
}
} else {
final int mediaItemCount = mMediaItems.size();
for (int i = 0; i < mediaItemCount; i++) {
final MediaItem mi = mMediaItems.get(i);
if (mi.getId().equals(afterMediaItemId)) {
mMANativeHelper.setGeneratePreview(true);
/**
* Invalidate adjacent transitions at the insertion point
*/
removeTransitionAfter(i);
/**
* Insert the media item at the new position
*/
mMediaItems.add(i + 1, moveMediaItem);
computeTimelineDuration();
return;
}
}
throw new IllegalArgumentException("MediaItem not found: " + afterMediaItemId);
}
}
/*
* {@inheritDoc}
*/
public void release() {
stopPreview();
boolean semAcquireDone = false;
try {
lock();
semAcquireDone = true;
if (mMANativeHelper != null) {
mMediaItems.clear();
mAudioTracks.clear();
mTransitions.clear();
mMANativeHelper.releaseNativeHelper();
mMANativeHelper = null;
}
} catch (Exception ex) {
Log.e(TAG, "Sem acquire NOT successful in export", ex);
} finally {
if (semAcquireDone) {
unlock();
}
}
if (mMallocDebug) {
try {
dumpHeap("HeapAtEnd");
} catch (Exception ex) {
Log.e(TAG, "dumpHeap returned error in release");
}
}
}
/*
* {@inheritDoc}
*/
public synchronized void removeAllMediaItems() {
mMANativeHelper.setGeneratePreview(true);
mMediaItems.clear();
/**
* Invalidate all transitions
*/
for (Transition transition : mTransitions) {
transition.invalidate();
}
mTransitions.clear();
mDurationMs = 0;
/**
* If a thumbnail already exists, then delete it
*/
if ((new File(mProjectPath + "/" + THUMBNAIL_FILENAME)).exists()) {
(new File(mProjectPath + "/" + THUMBNAIL_FILENAME)).delete();
}
}
/*
* {@inheritDoc}
*/
public synchronized AudioTrack removeAudioTrack(String audioTrackId) {
final AudioTrack audioTrack = getAudioTrack(audioTrackId);
if (audioTrack != null) {
mMANativeHelper.setGeneratePreview(true);
mAudioTracks.remove(audioTrack);
audioTrack.invalidate();
mMANativeHelper.invalidatePcmFile();
mMANativeHelper.setAudioflag(true);
} else {
throw new IllegalArgumentException(" No more audio tracks");
}
return audioTrack;
}
/*
* {@inheritDoc}
*/
public synchronized MediaItem removeMediaItem(String mediaItemId) {
final String firstItemString = mMediaItems.get(0).getId();
final MediaItem mediaItem = getMediaItem(mediaItemId);
if (mediaItem != null) {
mMANativeHelper.setGeneratePreview(true);
/**
* Remove the media item
*/
mMediaItems.remove(mediaItem);
if (mediaItem instanceof MediaImageItem) {
((MediaImageItem)mediaItem).invalidate();
}
final List<Overlay> overlays = mediaItem.getAllOverlays();
if (overlays.size() > 0) {
for (Overlay overlay : overlays) {
if (overlay instanceof OverlayFrame) {
final OverlayFrame overlayFrame = (OverlayFrame)overlay;
overlayFrame.invalidate();
}
}
}
/**
* Remove the adjacent transitions
*/
removeAdjacentTransitions(mediaItem);
computeTimelineDuration();
}
/**
* If string equals first mediaItem, then
* generate Project thumbnail
*/
if (firstItemString.equals(mediaItemId)) {
generateProjectThumbnail();
}
if (mediaItem instanceof MediaVideoItem) {
/**
* Delete the graph file
*/
((MediaVideoItem)mediaItem).invalidate();
}
return mediaItem;
}
private synchronized MediaItem removeMediaItem(String mediaItemId, boolean flag) {
final String firstItemString = mMediaItems.get(0).getId();
final MediaItem mediaItem = getMediaItem(mediaItemId);
if (mediaItem != null) {
mMANativeHelper.setGeneratePreview(true);
/**
* Remove the media item
*/
mMediaItems.remove(mediaItem);
/**
* Remove the adjacent transitions
*/
removeAdjacentTransitions(mediaItem);
computeTimelineDuration();
}
/**
* If string equals first mediaItem, then
* generate Project thumbail
*/
if (firstItemString.equals(mediaItemId)) {
generateProjectThumbnail();
}
return mediaItem;
}
/*
* {@inheritDoc}
*/
public synchronized Transition removeTransition(String transitionId) {
final Transition transition = getTransition(transitionId);
if (transition == null) {
throw new IllegalStateException("Transition not found: " + transitionId);
}
mMANativeHelper.setGeneratePreview(true);
/**
* Remove the transition references
*/
final MediaItem afterMediaItem = transition.getAfterMediaItem();
if (afterMediaItem != null) {
afterMediaItem.setEndTransition(null);
}
final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
if (beforeMediaItem != null) {
beforeMediaItem.setBeginTransition(null);
}
mTransitions.remove(transition);
transition.invalidate();
computeTimelineDuration();
return transition;
}
/*
* {@inheritDoc}
*/
public long renderPreviewFrame(SurfaceHolder surfaceHolder, long timeMs,
OverlayData overlayData) {
if (surfaceHolder == null) {
throw new IllegalArgumentException("Surface Holder is null");
}
final Surface surface = surfaceHolder.getSurface();
if (surface == null) {
throw new IllegalArgumentException("Surface could not be retrieved from Surface holder");
}
if (surface.isValid() == false) {
throw new IllegalStateException("Surface is not valid");
}
if (timeMs < 0) {
throw new IllegalArgumentException("requested time not correct");
} else if (timeMs > mDurationMs) {
throw new IllegalArgumentException("requested time more than duration");
}
long result = 0;
boolean semAcquireDone = false;
try {
semAcquireDone = lock(ENGINE_ACCESS_MAX_TIMEOUT_MS);
if (semAcquireDone == false) {
throw new IllegalStateException("Timeout waiting for semaphore");
}
if (mMANativeHelper == null) {
throw new IllegalStateException("The video editor is not initialized");
}
if (mMediaItems.size() > 0) {
final Rect frame = surfaceHolder.getSurfaceFrame();
result = mMANativeHelper.renderPreviewFrame(surface,
timeMs, frame.width(), frame.height(), overlayData);
} else {
result = 0;
}
} catch (InterruptedException ex) {
Log.w(TAG, "The thread was interrupted", new Throwable());
throw new IllegalStateException("The thread was interrupted");
} finally {
if (semAcquireDone) {
unlock();
}
}
return result;
}
/**
* the project form XML
*/
private void load() throws FileNotFoundException, XmlPullParserException, IOException {
final File file = new File(mProjectPath, PROJECT_FILENAME);
/**
* Load the metadata
*/
final FileInputStream fis = new FileInputStream(file);
try {
final List<String> ignoredMediaItems = new ArrayList<String>();
final XmlPullParser parser = Xml.newPullParser();
parser.setInput(fis, "UTF-8");
int eventType = parser.getEventType();
String name;
MediaItem currentMediaItem = null;
Overlay currentOverlay = null;
boolean regenerateProjectThumbnail = false;
while (eventType != XmlPullParser.END_DOCUMENT) {
switch (eventType) {
case XmlPullParser.START_TAG: {
name = parser.getName();
if (TAG_PROJECT.equals(name)) {
mAspectRatio = Integer.parseInt(parser.getAttributeValue("",
ATTR_ASPECT_RATIO));
final boolean mRegenPCM =
Boolean.parseBoolean(parser.getAttributeValue("",
ATTR_REGENERATE_PCM));
mMANativeHelper.setAudioflag(mRegenPCM);
} else if (TAG_MEDIA_ITEM.equals(name)) {
final String mediaItemId = parser.getAttributeValue("", ATTR_ID);
try {
currentMediaItem = parseMediaItem(parser);
mMediaItems.add(currentMediaItem);
} catch (Exception ex) {
Log.w(TAG, "Cannot load media item: " + mediaItemId, ex);
currentMediaItem = null;
// First media item is invalid, mark for project thumbnail removal
if (mMediaItems.size() == 0) {
regenerateProjectThumbnail = true;
}
// Ignore the media item
ignoredMediaItems.add(mediaItemId);
}
} else if (TAG_TRANSITION.equals(name)) {
try {
final Transition transition = parseTransition(parser,
ignoredMediaItems);
// The transition will be null if the bounding
// media items are ignored
if (transition != null) {
mTransitions.add(transition);
}
} catch (Exception ex) {
Log.w(TAG, "Cannot load transition", ex);
}
} else if (TAG_OVERLAY.equals(name)) {
if (currentMediaItem != null) {
try {
currentOverlay = parseOverlay(parser, currentMediaItem);
currentMediaItem.addOverlay(currentOverlay);
} catch (Exception ex) {
Log.w(TAG, "Cannot load overlay", ex);
}
}
} else if (TAG_OVERLAY_USER_ATTRIBUTES.equals(name)) {
if (currentOverlay != null) {
final int attributesCount = parser.getAttributeCount();
for (int i = 0; i < attributesCount; i++) {
currentOverlay.setUserAttribute(parser.getAttributeName(i),
parser.getAttributeValue(i));
}
}
} else if (TAG_EFFECT.equals(name)) {
if (currentMediaItem != null) {
try {
final Effect effect = parseEffect(parser, currentMediaItem);
currentMediaItem.addEffect(effect);
if (effect instanceof EffectKenBurns) {
final boolean isImageClipGenerated =
Boolean.parseBoolean(parser.getAttributeValue("",
ATTR_IS_IMAGE_CLIP_GENERATED));
if(isImageClipGenerated) {
final String filename = parser.getAttributeValue("",
ATTR_GENERATED_IMAGE_CLIP);
if (new File(filename).exists() == true) {
((MediaImageItem)currentMediaItem).
setGeneratedImageClip(filename);
((MediaImageItem)currentMediaItem).
setRegenerateClip(false);
} else {
((MediaImageItem)currentMediaItem).
setGeneratedImageClip(null);
((MediaImageItem)currentMediaItem).
setRegenerateClip(true);
}
} else {
((MediaImageItem)currentMediaItem).
setGeneratedImageClip(null);
((MediaImageItem)currentMediaItem).
setRegenerateClip(true);
}
}
} catch (Exception ex) {
Log.w(TAG, "Cannot load effect", ex);
}
}
} else if (TAG_AUDIO_TRACK.equals(name)) {
try {
final AudioTrack audioTrack = parseAudioTrack(parser);
addAudioTrack(audioTrack);
} catch (Exception ex) {
Log.w(TAG, "Cannot load audio track", ex);
}
}
break;
}
case XmlPullParser.END_TAG: {
name = parser.getName();
if (TAG_MEDIA_ITEM.equals(name)) {
currentMediaItem = null;
} else if (TAG_OVERLAY.equals(name)) {
currentOverlay = null;
}
break;
}
default: {
break;
}
}
eventType = parser.next();
}
computeTimelineDuration();
// Regenerate project thumbnail
if (regenerateProjectThumbnail) {
generateProjectThumbnail();
regenerateProjectThumbnail = false;
}
} finally {
if (fis != null) {
fis.close();
}
}
}
/**
* Parse the media item
*
* @param parser The parser
* @return The media item
*/
private MediaItem parseMediaItem(XmlPullParser parser) throws IOException {
final String mediaItemId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
final String filename = parser.getAttributeValue("", ATTR_FILENAME);
final int renderingMode = Integer.parseInt(parser.getAttributeValue("",
ATTR_RENDERING_MODE));
final MediaItem currentMediaItem;
if (MediaImageItem.class.getSimpleName().equals(type)) {
final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
currentMediaItem = new MediaImageItem(this, mediaItemId, filename,
durationMs, renderingMode);
} else if (MediaVideoItem.class.getSimpleName().equals(type)) {
final long beginMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
final long endMs = Long.parseLong(parser.getAttributeValue("", ATTR_END_TIME));
final int volume = Integer.parseInt(parser.getAttributeValue("", ATTR_VOLUME));
final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_MUTED));
final String audioWaveformFilename = parser.getAttributeValue("",
ATTR_AUDIO_WAVEFORM_FILENAME);
currentMediaItem = new MediaVideoItem(this, mediaItemId, filename,
renderingMode, beginMs, endMs, volume, muted, audioWaveformFilename);
final long beginTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
final long endTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_END_TIME));
((MediaVideoItem)currentMediaItem).setExtractBoundaries(beginTimeMs, endTimeMs);
final int volumePercent = Integer.parseInt(parser.getAttributeValue("", ATTR_VOLUME));
((MediaVideoItem)currentMediaItem).setVolume(volumePercent);
} else {
throw new IllegalArgumentException("Unknown media item type: " + type);
}
return currentMediaItem;
}
/**
* Parse the transition
*
* @param parser The parser
* @param ignoredMediaItems The list of ignored media items
*
* @return The transition
*/
private Transition parseTransition(XmlPullParser parser, List<String> ignoredMediaItems) {
final String transitionId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
final int behavior = Integer.parseInt(parser.getAttributeValue("", ATTR_BEHAVIOR));
final String beforeMediaItemId = parser.getAttributeValue("", ATTR_BEFORE_MEDIA_ITEM_ID);
final MediaItem beforeMediaItem;
if (beforeMediaItemId != null) {
if (ignoredMediaItems.contains(beforeMediaItemId)) {
// This transition is ignored
return null;
}
beforeMediaItem = getMediaItem(beforeMediaItemId);
} else {
beforeMediaItem = null;
}
final String afterMediaItemId = parser.getAttributeValue("", ATTR_AFTER_MEDIA_ITEM_ID);
final MediaItem afterMediaItem;
if (afterMediaItemId != null) {
if (ignoredMediaItems.contains(afterMediaItemId)) {
// This transition is ignored
return null;
}
afterMediaItem = getMediaItem(afterMediaItemId);
} else {
afterMediaItem = null;
}
final Transition transition;
if (TransitionAlpha.class.getSimpleName().equals(type)) {
final int blending = Integer.parseInt(parser.getAttributeValue("", ATTR_BLENDING));
final String maskFilename = parser.getAttributeValue("", ATTR_MASK);
final boolean invert = Boolean.getBoolean(parser.getAttributeValue("", ATTR_INVERT));
transition = new TransitionAlpha(transitionId, afterMediaItem, beforeMediaItem,
durationMs, behavior, maskFilename, blending, invert);
} else if (TransitionCrossfade.class.getSimpleName().equals(type)) {
transition = new TransitionCrossfade(transitionId, afterMediaItem, beforeMediaItem,
durationMs, behavior);
} else if (TransitionSliding.class.getSimpleName().equals(type)) {
final int direction = Integer.parseInt(parser.getAttributeValue("", ATTR_DIRECTION));
transition = new TransitionSliding(transitionId, afterMediaItem, beforeMediaItem,
durationMs, behavior, direction);
} else if (TransitionFadeBlack.class.getSimpleName().equals(type)) {
transition = new TransitionFadeBlack(transitionId, afterMediaItem, beforeMediaItem,
durationMs, behavior);
} else {
throw new IllegalArgumentException("Invalid transition type: " + type);
}
final boolean isTransitionGenerated = Boolean.parseBoolean(parser.getAttributeValue("",
ATTR_IS_TRANSITION_GENERATED));
if (isTransitionGenerated == true) {
final String transitionFile = parser.getAttributeValue("",
ATTR_GENERATED_TRANSITION_CLIP);
if (new File(transitionFile).exists()) {
transition.setFilename(transitionFile);
} else {
transition.setFilename(null);
}
}
// Use the transition
if (beforeMediaItem != null) {
beforeMediaItem.setBeginTransition(transition);
}
if (afterMediaItem != null) {
afterMediaItem.setEndTransition(transition);
}
return transition;
}
/**
* Parse the overlay
*
* @param parser The parser
* @param mediaItem The media item owner
*
* @return The overlay
*/
private Overlay parseOverlay(XmlPullParser parser, MediaItem mediaItem) {
final String overlayId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
final Overlay overlay;
if (OverlayFrame.class.getSimpleName().equals(type)) {
final String filename = parser.getAttributeValue("", ATTR_FILENAME);
overlay = new OverlayFrame(mediaItem, overlayId, filename, startTimeMs, durationMs);
} else {
throw new IllegalArgumentException("Invalid overlay type: " + type);
}
final String overlayRgbFileName = parser.getAttributeValue("", ATTR_OVERLAY_RGB_FILENAME);
if (overlayRgbFileName != null) {
((OverlayFrame)overlay).setFilename(overlayRgbFileName);
final int overlayFrameWidth = Integer.parseInt(parser.getAttributeValue("",
ATTR_OVERLAY_FRAME_WIDTH));
final int overlayFrameHeight = Integer.parseInt(parser.getAttributeValue("",
ATTR_OVERLAY_FRAME_HEIGHT));
((OverlayFrame)overlay).setOverlayFrameWidth(overlayFrameWidth);
((OverlayFrame)overlay).setOverlayFrameHeight(overlayFrameHeight);
final int resizedRGBFrameWidth = Integer.parseInt(parser.getAttributeValue("",
ATTR_OVERLAY_RESIZED_RGB_FRAME_WIDTH));
final int resizedRGBFrameHeight = Integer.parseInt(parser.getAttributeValue("",
ATTR_OVERLAY_RESIZED_RGB_FRAME_HEIGHT));
((OverlayFrame)overlay).setResizedRGBSize(resizedRGBFrameWidth, resizedRGBFrameHeight);
}
return overlay;
}
/**
* Parse the effect
*
* @param parser The parser
* @param mediaItem The media item owner
*
* @return The effect
*/
private Effect parseEffect(XmlPullParser parser, MediaItem mediaItem) {
final String effectId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
final Effect effect;
if (EffectColor.class.getSimpleName().equals(type)) {
final int colorEffectType = Integer.parseInt(parser.getAttributeValue("",
ATTR_COLOR_EFFECT_TYPE));
final int color;
if (colorEffectType == EffectColor.TYPE_COLOR
|| colorEffectType == EffectColor.TYPE_GRADIENT) {
color = Integer.parseInt(parser.getAttributeValue("", ATTR_COLOR_EFFECT_VALUE));
} else {
color = 0;
}
effect = new EffectColor(mediaItem, effectId, startTimeMs,
durationMs, colorEffectType, color);
} else if (EffectKenBurns.class.getSimpleName().equals(type)) {
final Rect startRect = new Rect(
Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_LEFT)),
Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_TOP)),
Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_RIGHT)),
Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_BOTTOM)));
final Rect endRect = new Rect(
Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_LEFT)),
Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_TOP)),
Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_RIGHT)),
Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_BOTTOM)));
effect = new EffectKenBurns(mediaItem, effectId, startRect, endRect,
startTimeMs, durationMs);
} else {
throw new IllegalArgumentException("Invalid effect type: " + type);
}
return effect;
}
/**
* Parse the audio track
*
* @param parser The parser
*
* @return The audio track
*/
private AudioTrack parseAudioTrack(XmlPullParser parser) throws IOException {
final String audioTrackId = parser.getAttributeValue("", ATTR_ID);
final String filename = parser.getAttributeValue("", ATTR_FILENAME);
final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_START_TIME));
final long beginMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
final long endMs = Long.parseLong(parser.getAttributeValue("", ATTR_END_TIME));
final int volume = Integer.parseInt(parser.getAttributeValue("", ATTR_VOLUME));
final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_MUTED));
final boolean loop = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_LOOP));
final boolean duckingEnabled = Boolean.parseBoolean(
parser.getAttributeValue("", ATTR_DUCK_ENABLED));
final int duckThreshold = Integer.parseInt(
parser.getAttributeValue("", ATTR_DUCK_THRESHOLD));
final int duckedTrackVolume = Integer.parseInt(parser.getAttributeValue("",
ATTR_DUCKED_TRACK_VOLUME));
final String waveformFilename = parser.getAttributeValue("", ATTR_AUDIO_WAVEFORM_FILENAME);
final AudioTrack audioTrack = new AudioTrack(this, audioTrackId,
filename, startTimeMs,
beginMs, endMs, loop,
volume, muted,
duckingEnabled,
duckThreshold,
duckedTrackVolume,
waveformFilename);
return audioTrack;
}
/*
* {@inheritDoc}
*/
public void save() throws IOException {
final XmlSerializer serializer = Xml.newSerializer();
final StringWriter writer = new StringWriter();
serializer.setOutput(writer);
serializer.startDocument("UTF-8", true);
serializer.startTag("", TAG_PROJECT);
serializer.attribute("",
ATTR_ASPECT_RATIO, Integer.toString(mAspectRatio));
serializer.attribute("", ATTR_REGENERATE_PCM,
Boolean.toString(mMANativeHelper.getAudioflag()));
serializer.startTag("", TAG_MEDIA_ITEMS);
for (MediaItem mediaItem : mMediaItems) {
serializer.startTag("", TAG_MEDIA_ITEM);
serializer.attribute("", ATTR_ID, mediaItem.getId());
serializer.attribute("", ATTR_TYPE,
mediaItem.getClass().getSimpleName());
serializer.attribute("", ATTR_FILENAME, mediaItem.getFilename());
serializer.attribute("", ATTR_RENDERING_MODE, Integer.toString(
mediaItem.getRenderingMode()));
if (mediaItem instanceof MediaVideoItem) {
final MediaVideoItem mvi = (MediaVideoItem)mediaItem;
serializer
.attribute("", ATTR_BEGIN_TIME,
Long.toString(mvi.getBoundaryBeginTime()));
serializer.attribute("", ATTR_END_TIME,
Long.toString(mvi.getBoundaryEndTime()));
serializer.attribute("", ATTR_VOLUME,
Integer.toString(mvi.getVolume()));
serializer.attribute("", ATTR_MUTED,
Boolean.toString(mvi.isMuted()));
if (mvi.getAudioWaveformFilename() != null) {
serializer.attribute("", ATTR_AUDIO_WAVEFORM_FILENAME,
mvi.getAudioWaveformFilename());
}
} else if (mediaItem instanceof MediaImageItem) {
serializer.attribute("", ATTR_DURATION,
Long.toString(mediaItem.getTimelineDuration()));
}
final List<Overlay> overlays = mediaItem.getAllOverlays();
if (overlays.size() > 0) {
serializer.startTag("", TAG_OVERLAYS);
for (Overlay overlay : overlays) {
serializer.startTag("", TAG_OVERLAY);
serializer.attribute("", ATTR_ID, overlay.getId());
serializer.attribute("",
ATTR_TYPE, overlay.getClass().getSimpleName());
serializer.attribute("", ATTR_BEGIN_TIME,
Long.toString(overlay.getStartTime()));
serializer.attribute("", ATTR_DURATION,
Long.toString(overlay.getDuration()));
if (overlay instanceof OverlayFrame) {
final OverlayFrame overlayFrame = (OverlayFrame)overlay;
overlayFrame.save(getPath());
if (overlayFrame.getBitmapImageFileName() != null) {
serializer.attribute("", ATTR_FILENAME,
overlayFrame.getBitmapImageFileName());
}
if (overlayFrame.getFilename() != null) {
serializer.attribute("",
ATTR_OVERLAY_RGB_FILENAME,
overlayFrame.getFilename());
serializer.attribute("", ATTR_OVERLAY_FRAME_WIDTH,
Integer.toString(overlayFrame.getOverlayFrameWidth()));
serializer.attribute("", ATTR_OVERLAY_FRAME_HEIGHT,
Integer.toString(overlayFrame.getOverlayFrameHeight()));
serializer.attribute("", ATTR_OVERLAY_RESIZED_RGB_FRAME_WIDTH,
Integer.toString(overlayFrame.getResizedRGBSizeWidth()));
serializer.attribute("", ATTR_OVERLAY_RESIZED_RGB_FRAME_HEIGHT,
Integer.toString(overlayFrame.getResizedRGBSizeHeight()));
}
}
/**
* Save the user attributes
*/
serializer.startTag("", TAG_OVERLAY_USER_ATTRIBUTES);
final Map<String, String> userAttributes = overlay.getUserAttributes();
for (String name : userAttributes.keySet()) {
final String value = userAttributes.get(name);
if (value != null) {
serializer.attribute("", name, value);
}
}
serializer.endTag("", TAG_OVERLAY_USER_ATTRIBUTES);
serializer.endTag("", TAG_OVERLAY);
}
serializer.endTag("", TAG_OVERLAYS);
}
final List<Effect> effects = mediaItem.getAllEffects();
if (effects.size() > 0) {
serializer.startTag("", TAG_EFFECTS);
for (Effect effect : effects) {
serializer.startTag("", TAG_EFFECT);
serializer.attribute("", ATTR_ID, effect.getId());
serializer.attribute("",
ATTR_TYPE, effect.getClass().getSimpleName());
serializer.attribute("", ATTR_BEGIN_TIME,
Long.toString(effect.getStartTime()));
serializer.attribute("", ATTR_DURATION,
Long.toString(effect.getDuration()));
if (effect instanceof EffectColor) {
final EffectColor colorEffect = (EffectColor)effect;
serializer.attribute("", ATTR_COLOR_EFFECT_TYPE,
Integer.toString(colorEffect.getType()));
if (colorEffect.getType() == EffectColor.TYPE_COLOR ||
colorEffect.getType() == EffectColor.TYPE_GRADIENT) {
serializer.attribute("", ATTR_COLOR_EFFECT_VALUE,
Integer.toString(colorEffect.getColor()));
}
} else if (effect instanceof EffectKenBurns) {
final Rect startRect = ((EffectKenBurns)effect).getStartRect();
serializer.attribute("", ATTR_START_RECT_LEFT,
Integer.toString(startRect.left));
serializer.attribute("", ATTR_START_RECT_TOP,
Integer.toString(startRect.top));
serializer.attribute("", ATTR_START_RECT_RIGHT,
Integer.toString(startRect.right));
serializer.attribute("", ATTR_START_RECT_BOTTOM,
Integer.toString(startRect.bottom));
final Rect endRect = ((EffectKenBurns)effect).getEndRect();
serializer.attribute("", ATTR_END_RECT_LEFT,
Integer.toString(endRect.left));
serializer.attribute("", ATTR_END_RECT_TOP,
Integer.toString(endRect.top));
serializer.attribute("", ATTR_END_RECT_RIGHT,
Integer.toString(endRect.right));
serializer.attribute("", ATTR_END_RECT_BOTTOM,
Integer.toString(endRect.bottom));
final MediaItem mItem = effect.getMediaItem();
if(((MediaImageItem)mItem).getGeneratedImageClip() != null) {
serializer.attribute("", ATTR_IS_IMAGE_CLIP_GENERATED,
Boolean.toString(true));
serializer.attribute("", ATTR_GENERATED_IMAGE_CLIP,
((MediaImageItem)mItem).getGeneratedImageClip());
} else {
serializer.attribute("", ATTR_IS_IMAGE_CLIP_GENERATED,
Boolean.toString(false));
}
}
serializer.endTag("", TAG_EFFECT);
}
serializer.endTag("", TAG_EFFECTS);
}
serializer.endTag("", TAG_MEDIA_ITEM);
}
serializer.endTag("", TAG_MEDIA_ITEMS);
serializer.startTag("", TAG_TRANSITIONS);
for (Transition transition : mTransitions) {
serializer.startTag("", TAG_TRANSITION);
serializer.attribute("", ATTR_ID, transition.getId());
serializer.attribute("", ATTR_TYPE, transition.getClass().getSimpleName());
serializer.attribute("", ATTR_DURATION, Long.toString(transition.getDuration()));
serializer.attribute("", ATTR_BEHAVIOR, Integer.toString(transition.getBehavior()));
serializer.attribute("", ATTR_IS_TRANSITION_GENERATED,
Boolean.toString(transition.isGenerated()));
if (transition.isGenerated() == true) {
serializer.attribute("", ATTR_GENERATED_TRANSITION_CLIP, transition.mFilename);
}
final MediaItem afterMediaItem = transition.getAfterMediaItem();
if (afterMediaItem != null) {
serializer.attribute("", ATTR_AFTER_MEDIA_ITEM_ID, afterMediaItem.getId());
}
final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
if (beforeMediaItem != null) {
serializer.attribute("", ATTR_BEFORE_MEDIA_ITEM_ID, beforeMediaItem.getId());
}
if (transition instanceof TransitionSliding) {
serializer.attribute("", ATTR_DIRECTION,
Integer.toString(((TransitionSliding)transition).getDirection()));
} else if (transition instanceof TransitionAlpha) {
TransitionAlpha ta = (TransitionAlpha)transition;
serializer.attribute("", ATTR_BLENDING,
Integer.toString(ta.getBlendingPercent()));
serializer.attribute("", ATTR_INVERT,
Boolean.toString(ta.isInvert()));
if (ta.getMaskFilename() != null) {
serializer.attribute("", ATTR_MASK, ta.getMaskFilename());
}
}
serializer.endTag("", TAG_TRANSITION);
}
serializer.endTag("", TAG_TRANSITIONS);
serializer.startTag("", TAG_AUDIO_TRACKS);
for (AudioTrack at : mAudioTracks) {
serializer.startTag("", TAG_AUDIO_TRACK);
serializer.attribute("", ATTR_ID, at.getId());
serializer.attribute("", ATTR_FILENAME, at.getFilename());
serializer.attribute("", ATTR_START_TIME, Long.toString(at.getStartTime()));
serializer.attribute("", ATTR_BEGIN_TIME, Long.toString(at.getBoundaryBeginTime()));
serializer.attribute("", ATTR_END_TIME, Long.toString(at.getBoundaryEndTime()));
serializer.attribute("", ATTR_VOLUME, Integer.toString(at.getVolume()));
serializer.attribute("", ATTR_DUCK_ENABLED,
Boolean.toString(at.isDuckingEnabled()));
serializer.attribute("", ATTR_DUCKED_TRACK_VOLUME,
Integer.toString(at.getDuckedTrackVolume()));
serializer.attribute("", ATTR_DUCK_THRESHOLD,
Integer.toString(at.getDuckingThreshhold()));
serializer.attribute("", ATTR_MUTED, Boolean.toString(at.isMuted()));
serializer.attribute("", ATTR_LOOP, Boolean.toString(at.isLooping()));
if (at.getAudioWaveformFilename() != null) {
serializer.attribute("", ATTR_AUDIO_WAVEFORM_FILENAME,
at.getAudioWaveformFilename());
}
serializer.endTag("", TAG_AUDIO_TRACK);
}
serializer.endTag("", TAG_AUDIO_TRACKS);
serializer.endTag("", TAG_PROJECT);
serializer.endDocument();
/**
* Save the metadata XML file
*/
final FileOutputStream out = new FileOutputStream(new File(getPath(),
PROJECT_FILENAME));
out.write(writer.toString().getBytes());
out.flush();
out.close();
}
/*
* {@inheritDoc}
*/
public void setAspectRatio(int aspectRatio) {
mAspectRatio = aspectRatio;
/**
* Invalidate all transitions
*/
mMANativeHelper.setGeneratePreview(true);
for (Transition transition : mTransitions) {
transition.invalidate();
}
final Iterator<MediaItem> it = mMediaItems.iterator();
while (it.hasNext()) {
final MediaItem t = it.next();
List<Overlay> overlayList = t.getAllOverlays();
for (Overlay overlay : overlayList) {
((OverlayFrame)overlay).invalidateGeneratedFiles();
}
}
}
/*
* {@inheritDoc}
*/
public void startPreview(SurfaceHolder surfaceHolder, long fromMs, long toMs,
boolean loop, int callbackAfterFrameCount,
PreviewProgressListener listener) {
if (surfaceHolder == null) {
throw new IllegalArgumentException();
}
final Surface surface = surfaceHolder.getSurface();
if (surface == null) {
throw new IllegalArgumentException("Surface could not be retrieved from surface holder");
}
if (surface.isValid() == false) {
throw new IllegalStateException("Surface is not valid");
}
if (listener == null) {
throw new IllegalArgumentException();
}
if (fromMs >= mDurationMs) {
throw new IllegalArgumentException("Requested time not correct");
}
if (fromMs < 0) {
throw new IllegalArgumentException("Requested time not correct");
}
boolean semAcquireDone = false;
if (!mPreviewInProgress) {
try{
semAcquireDone = lock(ENGINE_ACCESS_MAX_TIMEOUT_MS);
if (semAcquireDone == false) {
throw new IllegalStateException("Timeout waiting for semaphore");
}
if (mMANativeHelper == null) {
throw new IllegalStateException("The video editor is not initialized");
}
if (mMediaItems.size() > 0) {
mPreviewInProgress = true;
mMANativeHelper.previewStoryBoard(mMediaItems, mTransitions,
mAudioTracks, null);
mMANativeHelper.doPreview(surface, fromMs, toMs, loop,
callbackAfterFrameCount, listener);
}
/**
* Release The lock on complete by calling stopPreview
*/
} catch (InterruptedException ex) {
Log.w(TAG, "The thread was interrupted", new Throwable());
throw new IllegalStateException("The thread was interrupted");
}
} else {
throw new IllegalStateException("Preview already in progress");
}
}
/*
* {@inheritDoc}
*/
public long stopPreview() {
long result = 0;
if (mPreviewInProgress) {
try {
result = mMANativeHelper.stopPreview();
/**
* release on complete by calling stopPreview
*/
} finally {
mPreviewInProgress = false;
unlock();
}
return result;
}
else {
return 0;
}
}
/*
* Remove transitions associated with the specified media item
*
* @param mediaItem The media item
*/
private void removeAdjacentTransitions(MediaItem mediaItem) {
final Transition beginTransition = mediaItem.getBeginTransition();
if (beginTransition != null) {
if (beginTransition.getAfterMediaItem() != null) {
beginTransition.getAfterMediaItem().setEndTransition(null);
}
beginTransition.invalidate();
mTransitions.remove(beginTransition);
}
final Transition endTransition = mediaItem.getEndTransition();
if (endTransition != null) {
if (endTransition.getBeforeMediaItem() != null) {
endTransition.getBeforeMediaItem().setBeginTransition(null);
}
endTransition.invalidate();
mTransitions.remove(endTransition);
}
mediaItem.setBeginTransition(null);
mediaItem.setEndTransition(null);
}
/**
* Remove the transition before this media item
*
* @param index The media item index
*/
private void removeTransitionBefore(int index) {
final MediaItem mediaItem = mMediaItems.get(index);
final Iterator<Transition> it = mTransitions.iterator();
while (it.hasNext()) {
Transition t = it.next();
if (t.getBeforeMediaItem() == mediaItem) {
mMANativeHelper.setGeneratePreview(true);
it.remove();
t.invalidate();
mediaItem.setBeginTransition(null);
if (index > 0) {
mMediaItems.get(index - 1).setEndTransition(null);
}
break;
}
}
}
/**
* Remove the transition after this media item
*
* @param mediaItem The media item
*/
private void removeTransitionAfter(int index) {
final MediaItem mediaItem = mMediaItems.get(index);
final Iterator<Transition> it = mTransitions.iterator();
while (it.hasNext()) {
Transition t = it.next();
if (t.getAfterMediaItem() == mediaItem) {
mMANativeHelper.setGeneratePreview(true);
it.remove();
t.invalidate();
mediaItem.setEndTransition(null);
/**
* Invalidate the reference in the next media item
*/
if (index < mMediaItems.size() - 1) {
mMediaItems.get(index + 1).setBeginTransition(null);
}
break;
}
}
}
/**
* Compute the duration
*/
private void computeTimelineDuration() {
mDurationMs = 0;
final int mediaItemsCount = mMediaItems.size();
for (int i = 0; i < mediaItemsCount; i++) {
final MediaItem mediaItem = mMediaItems.get(i);
mDurationMs += mediaItem.getTimelineDuration();
if (mediaItem.getEndTransition() != null) {
if (i < mediaItemsCount - 1) {
mDurationMs -= mediaItem.getEndTransition().getDuration();
}
}
}
}
/*
* Generate the project thumbnail
*/
private void generateProjectThumbnail() {
/*
* If a thumbnail already exists, then delete it first
*/
if ((new File(mProjectPath + "/" + THUMBNAIL_FILENAME)).exists()) {
(new File(mProjectPath + "/" + THUMBNAIL_FILENAME)).delete();
}
/*
* Generate a new thumbnail for the project from first media Item
*/
if (mMediaItems.size() > 0) {
MediaItem mI = mMediaItems.get(0);
/*
* Keep aspect ratio of the image
*/
int height = 480;
int width = mI.getWidth() * height / mI.getHeight();
Bitmap projectBitmap = null;
String filename = mI.getFilename();
if (mI instanceof MediaVideoItem) {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(filename);
Bitmap bitmap = retriever.getFrameAtTime();
retriever.release();
retriever = null;
if (bitmap == null) {
String msg = "Thumbnail extraction from " +
filename + " failed";
throw new IllegalArgumentException(msg);
}
// Resize the thumbnail to the target size
projectBitmap =
Bitmap.createScaledBitmap(bitmap, width, height, true);
} else {
try {
projectBitmap = mI.getThumbnail(width, height, 500);
} catch (IllegalArgumentException e) {
String msg = "Project thumbnail extraction from " +
filename + " failed";
throw new IllegalArgumentException(msg);
} catch (IOException e) {
String msg = "IO Error creating project thumbnail";
throw new IllegalArgumentException(msg);
}
}
try {
FileOutputStream stream = new FileOutputStream(mProjectPath + "/"
+ THUMBNAIL_FILENAME);
projectBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
stream.flush();
stream.close();
} catch (IOException e) {
throw new IllegalArgumentException ("Error creating project thumbnail");
} finally {
projectBitmap.recycle();
}
}
}
/**
* Clears the preview surface
*
* @param surfaceHolder SurfaceHolder where the preview is rendered
* and needs to be cleared.
*/
public void clearSurface(SurfaceHolder surfaceHolder) {
if (surfaceHolder == null) {
throw new IllegalArgumentException("Invalid surface holder");
}
final Surface surface = surfaceHolder.getSurface();
if (surface == null) {
throw new IllegalArgumentException("Surface could not be retrieved from surface holder");
}
if (surface.isValid() == false) {
throw new IllegalStateException("Surface is not valid");
}
if (mMANativeHelper != null) {
mMANativeHelper.clearPreviewSurface(surface);
} else {
Log.w(TAG, "Native helper was not ready!");
}
}
/**
* Grab the semaphore which arbitrates access to the editor
*
* @throws InterruptedException
*/
private void lock() throws InterruptedException {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "lock: grabbing semaphore", new Throwable());
}
mLock.acquire();
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "lock: grabbed semaphore");
}
}
/**
* Tries to grab the semaphore with a specified time out which arbitrates access to the editor
*
* @param timeoutMs time out in ms.
*
* @return true if the semaphore is acquired, false otherwise
* @throws InterruptedException
*/
private boolean lock(long timeoutMs) throws InterruptedException {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "lock: grabbing semaphore with timeout " + timeoutMs, new Throwable());
}
boolean acquireSem = mLock.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS);
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "lock: grabbed semaphore status " + acquireSem);
}
return acquireSem;
}
/**
* Release the semaphore which arbitrates access to the editor
*/
private void unlock() {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "unlock: releasing semaphore");
}
mLock.release();
}
/**
* Dumps the heap memory usage information to file
*/
private static void dumpHeap (String filename) throws Exception {
/* Cleanup as much as possible before dump
*/
System.gc();
System.runFinalization();
Thread.sleep(1000);
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
String extDir =
Environment.getExternalStorageDirectory().toString();
/* If dump file already exists, then delete it first
*/
if ((new File(extDir + "/" + filename + ".dump")).exists()) {
(new File(extDir + "/" + filename + ".dump")).delete();
}
/* Dump native heap
*/
FileOutputStream ost =
new FileOutputStream(extDir + "/" + filename + ".dump");
Debug.dumpNativeHeap(ost.getFD());
ost.close();
}
}
}
| haikuowuya/android_system_code | src/android/media/videoeditor/VideoEditorImpl.java | Java | apache-2.0 | 78,397 |
package io.pivotal.gemfire.spark.connector.javaapi
import org.apache.spark.api.java.{JavaPairRDD, JavaRDD}
import org.apache.spark.streaming.api.java.{JavaPairDStream, JavaDStream}
import scala.reflect.ClassTag
import scala.collection.JavaConversions._
/**
* A helper class to make it possible to access components written in Scala from Java code.
*/
private[connector] object JavaAPIHelper {
/** Returns a `ClassTag` of a given runtime class. */
def getClassTag[T](clazz: Class[T]): ClassTag[T] = ClassTag(clazz)
/**
* Produces a ClassTag[T], which is actually just a casted ClassTag[AnyRef].
* see JavaSparkContext.fakeClassTag in Spark for more info.
*/
def fakeClassTag[T]: ClassTag[T] = ClassTag.AnyRef.asInstanceOf[ClassTag[T]]
/** Converts a Java `Properties` to a Scala immutable `Map[String, String]`. */
def propertiesToScalaMap[K, V](props: java.util.Properties): Map[String, String] =
Map(props.toSeq: _*)
/** convert a JavaRDD[(K,V)] to JavaPairRDD[K,V] */
def toJavaPairRDD[K, V](rdd: JavaRDD[(K, V)]): JavaPairRDD[K, V] =
JavaPairRDD.fromJavaRDD(rdd)
/** convert a JavaDStream[(K,V)] to JavaPairDStream[K,V] */
def toJavaPairDStream[K, V](ds: JavaDStream[(K, V)]): JavaPairDStream[K, V] =
JavaPairDStream.fromJavaDStream(ds)
/** an empty Map[String, String] for default opConf **/
val emptyStrStrMap: Map[String, String] = Map.empty
}
| nchandrappa/incubator-geode | gemfire-spark-connector/gemfire-spark-connector/src/main/scala/io/pivotal/gemfire/spark/connector/javaapi/JavaAPIHelper.scala | Scala | apache-2.0 | 1,407 |
/*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.examples.snapshot;
import com.gemstone.gemfire.pdx.PdxReader;
import com.gemstone.gemfire.pdx.PdxSerializable;
import com.gemstone.gemfire.pdx.PdxWriter;
public class MyObjectPdxSerializable extends MyObject implements PdxSerializable {
public MyObjectPdxSerializable() {
}
public MyObjectPdxSerializable(long number, String s) {
super(number, s);
}
@Override
public void toData(PdxWriter writer) {
writer.writeLong("f1", f1);
writer.writeString("f2", f2);
}
@Override
public void fromData(PdxReader reader) {
f1 = reader.readLong("f1");
f2 = reader.readString("f2");
}
}
| ysung-pivotal/incubator-geode | gemfire-core/src/test/java/com/examples/snapshot/MyObjectPdxSerializable.java | Java | apache-2.0 | 1,052 |
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.httpd.rpc.patch;
import com.google.gerrit.common.data.ApprovalSummary;
import com.google.gerrit.common.data.ApprovalSummarySet;
import com.google.gerrit.common.data.ApprovalTypes;
import com.google.gerrit.common.data.ChangeDetail;
import com.google.gerrit.common.data.PatchDetailService;
import com.google.gerrit.common.data.PatchScript;
import com.google.gerrit.common.data.ReviewResult;
import com.google.gerrit.common.data.ReviewerResult;
import com.google.gerrit.common.errors.NoSuchEntityException;
import com.google.gerrit.httpd.rpc.BaseServiceImplementation;
import com.google.gerrit.httpd.rpc.Handler;
import com.google.gerrit.httpd.rpc.changedetail.ChangeDetailFactory;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.AccountDiffPreference;
import com.google.gerrit.reviewdb.client.AccountPatchReview;
import com.google.gerrit.reviewdb.client.ApprovalCategory;
import com.google.gerrit.reviewdb.client.ApprovalCategoryValue;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.Patch;
import com.google.gerrit.reviewdb.client.PatchLineComment;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.Patch.Key;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.account.AccountInfoCacheFactory;
import com.google.gerrit.server.changedetail.DeleteDraftPatchSet;
import com.google.gerrit.server.patch.PatchSetInfoNotAvailableException;
import com.google.gerrit.server.patch.PublishComments;
import com.google.gerrit.server.project.ChangeControl;
import com.google.gerrit.server.project.NoSuchChangeException;
import com.google.gerrit.server.workflow.FunctionState;
import com.google.gwtjsonrpc.common.AsyncCallback;
import com.google.gwtjsonrpc.common.VoidResult;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.Provider;
import org.eclipse.jgit.errors.RepositoryNotFoundException;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
class PatchDetailServiceImpl extends BaseServiceImplementation implements
PatchDetailService {
private final ApprovalTypes approvalTypes;
private final AccountInfoCacheFactory.Factory accountInfoCacheFactory;
private final AddReviewerHandler.Factory addReviewerHandlerFactory;
private final ChangeControl.Factory changeControlFactory;
private final DeleteDraftPatchSet.Factory deleteDraftPatchSetFactory;
private final RemoveReviewerHandler.Factory removeReviewerHandlerFactory;
private final FunctionState.Factory functionStateFactory;
private final PublishComments.Factory publishCommentsFactory;
private final PatchScriptFactory.Factory patchScriptFactoryFactory;
private final SaveDraft.Factory saveDraftFactory;
private final ChangeDetailFactory.Factory changeDetailFactory;
@Inject
PatchDetailServiceImpl(final Provider<ReviewDb> schema,
final Provider<CurrentUser> currentUser,
final ApprovalTypes approvalTypes,
final AccountInfoCacheFactory.Factory accountInfoCacheFactory,
final AddReviewerHandler.Factory addReviewerHandlerFactory,
final RemoveReviewerHandler.Factory removeReviewerHandlerFactory,
final ChangeControl.Factory changeControlFactory,
final DeleteDraftPatchSet.Factory deleteDraftPatchSetFactory,
final FunctionState.Factory functionStateFactory,
final PatchScriptFactory.Factory patchScriptFactoryFactory,
final PublishComments.Factory publishCommentsFactory,
final SaveDraft.Factory saveDraftFactory,
final ChangeDetailFactory.Factory changeDetailFactory) {
super(schema, currentUser);
this.approvalTypes = approvalTypes;
this.accountInfoCacheFactory = accountInfoCacheFactory;
this.addReviewerHandlerFactory = addReviewerHandlerFactory;
this.removeReviewerHandlerFactory = removeReviewerHandlerFactory;
this.changeControlFactory = changeControlFactory;
this.deleteDraftPatchSetFactory = deleteDraftPatchSetFactory;
this.functionStateFactory = functionStateFactory;
this.patchScriptFactoryFactory = patchScriptFactoryFactory;
this.publishCommentsFactory = publishCommentsFactory;
this.saveDraftFactory = saveDraftFactory;
this.changeDetailFactory = changeDetailFactory;
}
public void patchScript(final Patch.Key patchKey, final PatchSet.Id psa,
final PatchSet.Id psb, final AccountDiffPreference dp,
final AsyncCallback<PatchScript> callback) {
if (psb == null) {
callback.onFailure(new NoSuchEntityException());
return;
}
patchScriptFactoryFactory.create(patchKey, psa, psb, dp).to(callback);
}
public void saveDraft(final PatchLineComment comment,
final AsyncCallback<PatchLineComment> callback) {
saveDraftFactory.create(comment).to(callback);
}
public void deleteDraft(final PatchLineComment.Key commentKey,
final AsyncCallback<VoidResult> callback) {
run(callback, new Action<VoidResult>() {
public VoidResult run(ReviewDb db) throws OrmException, Failure {
Change.Id id = commentKey.getParentKey().getParentKey().getParentKey();
db.changes().beginTransaction(id);
try {
final PatchLineComment comment = db.patchComments().get(commentKey);
if (comment == null) {
throw new Failure(new NoSuchEntityException());
}
if (!getAccountId().equals(comment.getAuthor())) {
throw new Failure(new NoSuchEntityException());
}
if (comment.getStatus() != PatchLineComment.Status.DRAFT) {
throw new Failure(new IllegalStateException("Comment published"));
}
db.patchComments().delete(Collections.singleton(comment));
db.commit();
return VoidResult.INSTANCE;
} finally {
db.rollback();
}
}
});
}
public void deleteDraftPatchSet(final PatchSet.Id psid,
final AsyncCallback<ChangeDetail> callback) {
run(callback, new Action<ChangeDetail>() {
public ChangeDetail run(ReviewDb db) throws OrmException, Failure {
ReviewResult result = null;
try {
result = deleteDraftPatchSetFactory.create(psid).call();
if (result.getErrors().size() > 0) {
throw new Failure(new NoSuchEntityException());
}
if (result.getChangeId() == null) {
// the change was deleted because the draft patch set that was
// deleted was the only patch set in the change
return null;
}
return changeDetailFactory.create(result.getChangeId()).call();
} catch (NoSuchChangeException e) {
throw new Failure(new NoSuchChangeException(result.getChangeId()));
} catch (NoSuchEntityException e) {
throw new Failure(e);
} catch (PatchSetInfoNotAvailableException e) {
throw new Failure(e);
} catch (RepositoryNotFoundException e) {
throw new Failure(e);
} catch (IOException e) {
throw new Failure(e);
}
}
});
}
public void publishComments(final PatchSet.Id psid, final String msg,
final Set<ApprovalCategoryValue.Id> tags,
final AsyncCallback<VoidResult> cb) {
Handler.wrap(publishCommentsFactory.create(psid, msg, tags, false)).to(cb);
}
/**
* Update the reviewed status for the file by user @code{account}
*/
public void setReviewedByCurrentUser(final Key patchKey,
final boolean reviewed, AsyncCallback<VoidResult> callback) {
run(callback, new Action<VoidResult>() {
public VoidResult run(ReviewDb db) throws OrmException {
Account.Id account = getAccountId();
AccountPatchReview.Key key =
new AccountPatchReview.Key(patchKey, account);
db.accounts().beginTransaction(account);
try {
AccountPatchReview apr = db.accountPatchReviews().get(key);
if (apr == null && reviewed) {
db.accountPatchReviews().insert(
Collections.singleton(new AccountPatchReview(patchKey, account)));
} else if (apr != null && !reviewed) {
db.accountPatchReviews().delete(Collections.singleton(apr));
}
db.commit();
return VoidResult.INSTANCE;
} finally {
db.rollback();
}
}
});
}
public void addReviewers(final Change.Id id, final List<String> reviewers,
final boolean confirmed, final AsyncCallback<ReviewerResult> callback) {
addReviewerHandlerFactory.create(id, reviewers, confirmed).to(callback);
}
public void removeReviewer(final Change.Id id, final Account.Id reviewerId,
final AsyncCallback<ReviewerResult> callback) {
removeReviewerHandlerFactory.create(id, reviewerId).to(callback);
}
public void userApprovals(final Set<Change.Id> cids, final Account.Id aid,
final AsyncCallback<ApprovalSummarySet> callback) {
run(callback, new Action<ApprovalSummarySet>() {
public ApprovalSummarySet run(ReviewDb db) throws OrmException {
final Map<Change.Id, ApprovalSummary> approvals =
new HashMap<Change.Id, ApprovalSummary>();
final AccountInfoCacheFactory aicFactory =
accountInfoCacheFactory.create();
aicFactory.want(aid);
for (final Change.Id id : cids) {
try {
final ChangeControl cc = changeControlFactory.validateFor(id);
final Change change = cc.getChange();
final PatchSet.Id ps_id = change.currentPatchSetId();
final Map<ApprovalCategory.Id, PatchSetApproval> psas =
new HashMap<ApprovalCategory.Id, PatchSetApproval>();
final FunctionState fs =
functionStateFactory.create(cc, ps_id, psas.values());
for (final PatchSetApproval ca : db.patchSetApprovals()
.byPatchSetUser(ps_id, aid)) {
final ApprovalCategory.Id category = ca.getCategoryId();
if (ApprovalCategory.SUBMIT.equals(category)) {
continue;
}
if (change.getStatus().isOpen()) {
fs.normalize(approvalTypes.byId(category), ca);
}
if (ca.getValue() == 0) {
continue;
}
psas.put(category, ca);
}
approvals.put(id, new ApprovalSummary(psas.values()));
} catch (NoSuchChangeException nsce) {
/*
* The user has no access to see this change, so we simply do not
* provide any details about it.
*/
}
}
return new ApprovalSummarySet(aicFactory.create(), approvals);
}
});
}
public void strongestApprovals(final Set<Change.Id> cids,
final AsyncCallback<ApprovalSummarySet> callback) {
run(callback, new Action<ApprovalSummarySet>() {
public ApprovalSummarySet run(ReviewDb db) throws OrmException {
final Map<Change.Id, ApprovalSummary> approvals =
new HashMap<Change.Id, ApprovalSummary>();
final AccountInfoCacheFactory aicFactory =
accountInfoCacheFactory.create();
for (final Change.Id id : cids) {
try {
final ChangeControl cc = changeControlFactory.validateFor(id);
final Change change = cc.getChange();
final PatchSet.Id ps_id = change.currentPatchSetId();
final Map<ApprovalCategory.Id, PatchSetApproval> psas =
new HashMap<ApprovalCategory.Id, PatchSetApproval>();
final FunctionState fs =
functionStateFactory.create(cc, ps_id, psas.values());
for (PatchSetApproval ca : db.patchSetApprovals().byPatchSet(ps_id)) {
final ApprovalCategory.Id category = ca.getCategoryId();
if (ApprovalCategory.SUBMIT.equals(category)) {
continue;
}
if (change.getStatus().isOpen()) {
fs.normalize(approvalTypes.byId(category), ca);
}
if (ca.getValue() == 0) {
continue;
}
boolean keep = true;
if (psas.containsKey(category)) {
final short oldValue = psas.get(category).getValue();
final short newValue = ca.getValue();
keep =
(Math.abs(oldValue) < Math.abs(newValue))
|| ((Math.abs(oldValue) == Math.abs(newValue) && (newValue < oldValue)));
}
if (keep) {
aicFactory.want(ca.getAccountId());
psas.put(category, ca);
}
}
approvals.put(id, new ApprovalSummary(psas.values()));
} catch (NoSuchChangeException nsce) {
/*
* The user has no access to see this change, so we simply do not
* provide any details about it.
*/
}
}
return new ApprovalSummarySet(aicFactory.create(), approvals);
}
});
}
}
| m1kah/gerrit-contributions | gerrit-httpd/src/main/java/com/google/gerrit/httpd/rpc/patch/PatchDetailServiceImpl.java | Java | apache-2.0 | 13,967 |
package fixtures.genericresponse;
import java.util.Collections;
import java.util.Map;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import com.sun.jersey.api.JResponse;
/**
* The GenericResponseResource represents a test class for generic responses
* @version $Id$
* @author conor.roche
*/
@SuppressWarnings("javadoc")
@Path("/genericresponse")
public class GenericResponseResource {
@GET
public Parameterized<String, Integer> getParameterized() {
return new Parameterized<String, Integer>();
}
@GET
public JResponse<String> getJResponse() {
return new JResponse<String>(200, null, "");
}
@GET
public Response getOptional(@QueryParam("name") com.google.common.base.Optional<String> name) {
return null;
}
@GET
public Response getOptional2(@QueryParam("name") jersey.repackaged.com.google.common.base.Optional<Integer> name) {
return null;
}
@GET
public Map<String, Integer> getIntMap() {
return Collections.emptyMap();
}
/**
* @returnType fixtures.genericresponse.Parameterized2<java.lang.Integer>
*/
@GET
public Response getParameterized2() {
return null;
}
@GET
public Batch<Item> getBatch() {
return null;
}
/**
* @returnType fixtures.genericresponse.Batch<fixtures.genericresponse.Item>
*/
@GET
public Response getBatch2() {
return null;
}
}
| mhardorf/swagger-jaxrs-doclet | swagger-doclet/src/test/resources/fixtures/genericresponse/GenericResponseResource.java | Java | apache-2.0 | 1,380 |
-- create-db-security.sql: Security Master
-- design has one document
-- security and associated identity key
-- bitemporal versioning exists at the document level
-- each time a document is changed, a new row is written
-- with only the end instant being changed on the old row
CREATE TABLE sec_schema_version (
version_key VARCHAR(32) NOT NULL,
version_value VARCHAR(255) NOT NULL
);
INSERT INTO sec_schema_version (version_key, version_value) VALUES ('schema_patch', '56');
CREATE SEQUENCE sec_hibernate_sequence
START WITH 1 INCREMENT BY 1;
CREATE SEQUENCE sec_security_seq
START WITH 1000 INCREMENT BY 1 NO CYCLE;
CREATE SEQUENCE sec_idkey_seq
START WITH 1000 INCREMENT BY 1 NO CYCLE;
-- "as bigint" required by Derby/HSQL, not accepted by Postgresql
CREATE TABLE sec_security (
id bigint NOT NULL,
oid bigint NOT NULL,
ver_from_instant timestamp without time zone NOT NULL,
ver_to_instant timestamp without time zone NOT NULL,
corr_from_instant timestamp without time zone NOT NULL,
corr_to_instant timestamp without time zone NOT NULL,
name varchar(255) NOT NULL,
sec_type varchar(255) NOT NULL,
detail_type char NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_sec2sec FOREIGN KEY (oid) REFERENCES sec_security (id),
CONSTRAINT sec_chk_sec_ver_order CHECK (ver_from_instant <= ver_to_instant),
CONSTRAINT sec_chk_sec_corr_order CHECK (corr_from_instant <= corr_to_instant),
CONSTRAINT sec_chk_detail_type CHECK (detail_type IN ('D', 'M', 'R'))
);
CREATE INDEX ix_sec_security_oid ON sec_security(oid);
CREATE INDEX ix_sec_security_ver_from_instant ON sec_security(ver_from_instant);
CREATE INDEX ix_sec_security_ver_to_instant ON sec_security(ver_to_instant);
CREATE INDEX ix_sec_security_corr_from_instant ON sec_security(corr_from_instant);
CREATE INDEX ix_sec_security_corr_to_instant ON sec_security(corr_to_instant);
CREATE INDEX ix_sec_security_name ON sec_security(name);
CREATE INDEX ix_sec_security_nameu ON sec_security(UPPER(name));
CREATE INDEX ix_sec_security_sec_type ON sec_security(sec_type);
CREATE INDEX ix_sec_security_sec_typeu ON sec_security(UPPER(sec_type));
CREATE TABLE sec_idkey (
id bigint NOT NULL,
key_scheme varchar(255) NOT NULL,
key_value varchar(255) NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_chk_idkey UNIQUE (key_scheme, key_value)
);
CREATE TABLE sec_security2idkey (
security_id bigint NOT NULL,
idkey_id bigint NOT NULL,
PRIMARY KEY (security_id, idkey_id),
CONSTRAINT sec_fk_secidkey2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_secidkey2idkey FOREIGN KEY (idkey_id) REFERENCES sec_idkey (id)
);
CREATE INDEX ix_sec_sec2idkey_idkey ON sec_security2idkey(idkey_id);
-- sec_security_idkey is fully dependent of sec_security
-- Hibernate controlled tables
CREATE TABLE sec_currency (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_cashrate (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_unit (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_exchange (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
description varchar(255),
PRIMARY KEY (id)
);
CREATE TABLE sec_gics (
id bigint NOT NULL,
name varchar(8) NOT NULL UNIQUE,
description varchar(255),
PRIMARY KEY (id)
);
CREATE TABLE sec_equity (
id bigint NOT NULL,
security_id bigint NOT NULL,
shortName varchar(255),
exchange_id bigint NOT NULL,
companyName varchar(255) NOT NULL,
currency_id bigint NOT NULL,
gicscode_id bigint,
PRIMARY KEY (id),
CONSTRAINT sec_fk_equity2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_equity2currency FOREIGN KEY (currency_id) REFERENCES sec_currency(id),
CONSTRAINT sec_fk_equity2exchange FOREIGN KEY (exchange_id) REFERENCES sec_exchange(id),
CONSTRAINT sec_fk_equity2gics FOREIGN KEY (gicscode_id) REFERENCES sec_gics(id)
);
CREATE INDEX ix_sec_equity_security_id ON sec_equity(security_id);
CREATE TABLE sec_equityindexoption (
id bigint NOT NULL,
security_id bigint NOT NULL,
option_exercise_type varchar(32) NOT NULL,
option_type varchar(32) NOT NULL,
strike double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
currency_id bigint NOT NULL,
exchange_id bigint,
pointValue double precision,
PRIMARY KEY (id),
CONSTRAINT sec_fk_equityindexoption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_equityindexoption2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_equityindexoption2exchange FOREIGN KEY (exchange_id) REFERENCES sec_exchange (id)
);
CREATE TABLE sec_equityoption (
id bigint NOT NULL,
security_id bigint NOT NULL,
option_exercise_type varchar(32) NOT NULL,
option_type varchar(32) NOT NULL,
strike double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
currency_id bigint NOT NULL,
exchange_id bigint,
pointValue double precision,
PRIMARY KEY (id),
CONSTRAINT sec_fk_equityoption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_equityoption2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_equityoption2exchange FOREIGN KEY (exchange_id) REFERENCES sec_exchange (id)
);
CREATE INDEX ix_sec_equityoption_security_id ON sec_equityoption(security_id);
CREATE TABLE sec_equitybarrieroption (
id bigint NOT NULL,
security_id bigint NOT NULL,
option_exercise_type varchar(32) NOT NULL,
option_type varchar(32) NOT NULL,
strike double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
currency_id bigint NOT NULL,
exchange_id bigint,
pointValue double precision,
barrier_type varchar(32) NOT NULL,
barrier_direction varchar(32) NOT NULL,
barrier_level double precision NOT NULL,
monitoring_type varchar(32) NOT NULL,
sampling_frequency varchar(32),
PRIMARY KEY (id),
CONSTRAINT sec_fk_equitybarrieroption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_equitybarrieroption2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_equitybarrieroption2exchange FOREIGN KEY (exchange_id) REFERENCES sec_exchange (id)
);
CREATE INDEX ix_sec_equitybarrieroption_security_id ON sec_equitybarrieroption(security_id);
CREATE TABLE sec_fxoption (
id bigint NOT NULL,
security_id bigint NOT NULL,
option_exercise_type varchar(32) NOT NULL,
put_amount double precision NOT NULL,
call_amount double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
put_currency_id bigint NOT NULL,
call_currency_id bigint NOT NULL,
settlement_date timestamp without time zone NOT NULL,
settlement_zone varchar(50) NOT NULL,
is_long boolean NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_fxoption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_fxoption2putcurrency FOREIGN KEY (put_currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_fxoption2callcurrency FOREIGN KEY (call_currency_id) REFERENCES sec_currency (id)
);
CREATE INDEX ix_sec_fxoption_security_id ON sec_fxoption(security_id);
CREATE TABLE sec_nondeliverablefxoption (
id bigint NOT NULL,
security_id bigint NOT NULL,
option_exercise_type varchar(32) NOT NULL,
put_amount double precision NOT NULL,
call_amount double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
put_currency_id bigint NOT NULL,
call_currency_id bigint NOT NULL,
settlement_date timestamp without time zone NOT NULL,
settlement_zone varchar(50) NOT NULL,
is_long boolean NOT NULL,
is_delivery_in_call_currency boolean NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_nondeliverablefxoption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_nondeliverablefxoption2putcurrency FOREIGN KEY (put_currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_nondeliverablefxoption2callcurrency FOREIGN KEY (call_currency_id) REFERENCES sec_currency (id)
);
CREATE INDEX ix_sec_nondeliverablefxoption_security_id ON sec_nondeliverablefxoption(security_id);
CREATE TABLE sec_fxdigitaloption (
id bigint NOT NULL,
security_id bigint NOT NULL,
put_amount double precision NOT NULL,
call_amount double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
put_currency_id bigint NOT NULL,
call_currency_id bigint NOT NULL,
payment_currency_id bigint NOT NULL,
settlement_date timestamp without time zone NOT NULL,
settlement_zone varchar(50) NOT NULL,
is_long boolean NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_fxdigitaloption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_fxdigitaloption2putcurrency FOREIGN KEY (put_currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_fxdigitaloption2callcurrency FOREIGN KEY (call_currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_fxdigitaloption2paymentcurrency FOREIGN KEY (payment_currency_id) REFERENCES sec_currency (id)
);
CREATE INDEX ix_sec_fxdigitaloption_security_id ON sec_fxdigitaloption(security_id);
CREATE TABLE sec_ndffxdigitaloption (
id bigint NOT NULL,
security_id bigint NOT NULL,
put_amount double precision NOT NULL,
call_amount double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
put_currency_id bigint NOT NULL,
call_currency_id bigint NOT NULL,
payment_currency_id bigint NOT NULL,
settlement_date timestamp without time zone NOT NULL,
settlement_zone varchar(50) NOT NULL,
is_long boolean NOT NULL,
is_delivery_in_call_currency boolean NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_ndffxdigitaloption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_ndffxdigitaloption2putcurrency FOREIGN KEY (put_currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_ndffxdigitaloption2callcurrency FOREIGN KEY (call_currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_ndffxdigitaloption2paymentcurrency FOREIGN KEY (payment_currency_id) REFERENCES sec_currency (id)
);
CREATE INDEX ix_sec_ndffxdigitaloption_security_id ON sec_ndffxdigitaloption(security_id);
CREATE TABLE sec_swaption (
id bigint NOT NULL,
security_id bigint NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
cash_settled boolean NOT NULL,
is_long boolean NOT NULL,
is_payer boolean NOT NULL,
currency_id bigint NOT NULL,
option_exercise_type VARCHAR(32),
settlement_date TIMESTAMP,
settlement_zone VARCHAR(50),
notional double precision,
PRIMARY KEY (id),
CONSTRAINT sec_fk_swaption2currency FOREIGN KEY (currency_id) REFERENCES sec_currency(id),
CONSTRAINT sec_fk_swaption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id)
);
CREATE TABLE sec_irfutureoption (
id bigint NOT NULL,
security_id bigint NOT NULL,
option_exercise_type varchar(32) NOT NULL,
option_type varchar(32) NOT NULL,
strike double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
currency_id bigint NOT NULL,
exchange_id bigint NOT NULL,
margined boolean NOT NULL,
pointValue double precision NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_irfutureoption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_irfutureoption2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_irfutureoption2exchange FOREIGN KEY (exchange_id) REFERENCES sec_exchange (id)
);
CREATE TABLE sec_commodityfutureoption (
id bigint NOT NULL,
security_id bigint NOT NULL,
option_exercise_type varchar(32) NOT NULL,
option_type varchar(32) NOT NULL,
strike double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
currency_id bigint NOT NULL,
trading_exchange_id bigint NOT NULL,
settlement_exchange_id bigint NOT NULL,
pointValue double precision NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_commodityfutureoption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_commodityfutureoption2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_commodityfutureoption2trading_exchange FOREIGN KEY (trading_exchange_id) REFERENCES sec_exchange (id),
CONSTRAINT sec_fk_commodityfutureoption2settlement_exchange FOREIGN KEY (settlement_exchange_id) REFERENCES sec_exchange (id)
);
CREATE TABLE sec_bondfutureoption (
id bigint NOT NULL,
security_id bigint NOT NULL,
option_exercise_type varchar(32) NOT NULL,
option_type varchar(32) NOT NULL,
strike double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
currency_id bigint NOT NULL,
trading_exchange_id bigint NOT NULL,
settlement_exchange_id bigint NOT NULL,
pointValue double precision NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_bondfutureoption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_bondfutureoption2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_bondfutureoption2trading_exchange FOREIGN KEY (trading_exchange_id) REFERENCES sec_exchange (id),
CONSTRAINT sec_fk_bondfutureoption2settlement_exchange FOREIGN KEY (settlement_exchange_id) REFERENCES sec_exchange (id)
);
CREATE TABLE sec_equity_index_dividend_futureoption (
id bigint NOT NULL,
security_id bigint NOT NULL,
option_exercise_type varchar(32) NOT NULL,
option_type varchar(32) NOT NULL,
strike double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
currency_id bigint NOT NULL,
exchange_id bigint NOT NULL,
margined boolean NOT NULL,
pointValue double precision NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_equity_index_dividend_futureoption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_equity_index_dividend_futureoption2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_equity_index_dividend_futureoption2exchange FOREIGN KEY (exchange_id) REFERENCES sec_exchange (id)
);
CREATE INDEX ix_sec_equity_index_dividend_futureoption_security_id ON sec_equity_index_dividend_futureoption(security_id);
CREATE TABLE sec_fxbarrieroption (
id bigint NOT NULL,
security_id bigint NOT NULL,
put_amount double precision NOT NULL,
call_amount double precision NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
put_currency_id bigint NOT NULL,
call_currency_id bigint NOT NULL,
settlement_date timestamp without time zone NOT NULL,
settlement_zone varchar(50) NOT NULL,
barrier_type varchar(32) NOT NULL,
barrier_direction varchar(32) NOT NULL,
barrier_level double precision NOT NULL,
monitoring_type varchar(32) NOT NULL,
sampling_frequency varchar(32),
is_long boolean NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_fxbarrieroption2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_fxbarrieroption2putcurrency FOREIGN KEY (put_currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_fxbarrieroption2callcurrency FOREIGN KEY (call_currency_id) REFERENCES sec_currency (id)
);
CREATE TABLE sec_frequency (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_daycount (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_businessdayconvention (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_issuertype (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_market (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_yieldconvention (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_guaranteetype (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_coupontype (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_stubtype (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_bond (
id bigint NOT NULL,
security_id bigint NOT NULL,
bond_type varchar(32) NOT NULL,
issuername varchar(255) NOT NULL,
issuertype_id bigint NOT NULL,
issuerdomicile varchar(255) NOT NULL,
market_id bigint NOT NULL,
currency_id bigint NOT NULL,
yieldconvention_id bigint NOT NULL,
guaranteetype_id bigint,
maturity_date timestamp without time zone NOT NULL,
maturity_zone varchar(50) NOT NULL,
maturity_accuracy smallint NOT NULL,
coupontype_id bigint NOT NULL,
couponrate double precision NOT NULL,
couponfrequency_id bigint NOT NULL,
daycountconvention_id bigint NOT NULL,
businessdayconvention_id bigint,
announcement_date timestamp without time zone,
announcement_zone varchar(50),
interestaccrual_date timestamp without time zone,
interestaccrual_zone varchar(50),
settlement_date timestamp without time zone,
settlement_zone varchar(50),
firstcoupon_date timestamp without time zone,
firstcoupon_zone varchar(50),
issuanceprice double precision,
totalamountissued double precision NOT NULL,
minimumamount double precision NOT NULL,
minimumincrement double precision NOT NULL,
paramount double precision NOT NULL,
redemptionvalue double precision NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_bond2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_bond2issuertype FOREIGN KEY (issuertype_id) REFERENCES sec_issuertype (id),
CONSTRAINT sec_fk_bond2market FOREIGN KEY (market_id) REFERENCES sec_market (id),
CONSTRAINT sec_fk_bond2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_bond2yieldconvention FOREIGN KEY (yieldconvention_id) REFERENCES sec_yieldconvention (id),
CONSTRAINT sec_fk_bond2guaranteetype FOREIGN KEY (guaranteetype_id) REFERENCES sec_guaranteetype (id),
CONSTRAINT sec_fk_bond2coupontype FOREIGN KEY (coupontype_id) REFERENCES sec_coupontype (id),
CONSTRAINT sec_fk_bond2frequency FOREIGN KEY (couponfrequency_id) REFERENCES sec_frequency (id),
CONSTRAINT sec_fk_bond2daycount FOREIGN KEY (daycountconvention_id) REFERENCES sec_daycount (id),
CONSTRAINT sec_fk_bond2businessdayconvention FOREIGN KEY (businessdayconvention_id) REFERENCES sec_businessdayconvention (id)
);
CREATE INDEX ix_sec_bond_security_id ON sec_bond(security_id);
CREATE TABLE sec_contract_category (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
description varchar(255),
PRIMARY KEY (id)
);
CREATE TABLE sec_future (
id bigint NOT NULL,
security_id bigint NOT NULL,
future_type varchar(32) NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
tradingexchange_id bigint NOT NULL,
settlementexchange_id bigint NOT NULL,
currency1_id bigint,
currency2_id bigint,
currency3_id bigint,
unitname_id bigint,
unitnumber double precision,
unit_amount double precision,
underlying_scheme varchar(255),
underlying_identifier varchar(255),
bondFutureFirstDeliveryDate timestamp without time zone,
bondFutureFirstDeliveryDate_zone varchar(50),
bondFutureLastDeliveryDate timestamp without time zone,
bondFutureLastDeliveryDate_zone varchar(50),
contract_category_id bigint, -- most of the curren future has no category defined so the column needs to stay nullable
PRIMARY KEY (id),
CONSTRAINT sec_fk_future2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_future2exchange1 FOREIGN KEY (tradingexchange_id) REFERENCES sec_exchange (id),
CONSTRAINT sec_fk_future2exchange2 FOREIGN KEY (settlementexchange_id) REFERENCES sec_exchange (id),
CONSTRAINT sec_fk_future2currency1 FOREIGN KEY (currency1_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_future2currency2 FOREIGN KEY (currency2_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_future2currency3 FOREIGN KEY (currency3_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_future2unit FOREIGN KEY (unitname_id) REFERENCES sec_unit (id),
CONSTRAINT sec_fk_future2contract_category FOREIGN KEY (contract_category_id) REFERENCES sec_contract_category (id)
);
CREATE INDEX ix_sec_future_security_id ON sec_future(security_id);
CREATE TABLE sec_futurebundle (
id bigint NOT NULL,
future_id bigint NOT NULL,
startDate timestamp without time zone,
endDate timestamp without time zone,
conversionFactor double precision NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_futurebundle2future FOREIGN KEY (future_id) REFERENCES sec_future (id)
);
CREATE TABLE sec_futurebundleidentifier (
bundle_id bigint NOT NULL,
scheme varchar(255) NOT NULL,
identifier varchar(255) NOT NULL,
PRIMARY KEY (bundle_id, scheme, identifier),
CONSTRAINT sec_fk_futurebundleidentifier2futurebundle FOREIGN KEY (bundle_id) REFERENCES sec_futurebundle (id)
);
CREATE TABLE sec_commodity_forward (
id bigint NOT NULL,
security_id bigint NOT NULL,
forward_type varchar(32) NOT NULL,
expiry_date timestamp without time zone NOT NULL,
expiry_zone varchar(50) NOT NULL,
expiry_accuracy smallint NOT NULL,
currency_id bigint,
unitname_id bigint,
unitnumber double precision,
unit_amount double precision,
underlying_scheme varchar(255),
underlying_identifier varchar(255),
contract_category_id bigint NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_commodity_forward2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_commodity_forward2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_commodity_forward2unit FOREIGN KEY (unitname_id) REFERENCES sec_unit (id),
CONSTRAINT sec_fk_commodity_forward2contract_category FOREIGN KEY (contract_category_id) REFERENCES sec_contract_category (id)
);
CREATE INDEX ix_sec_commodity_forward_security_id ON sec_commodity_forward(security_id);
CREATE TABLE sec_cash (
id bigint NOT NULL,
security_id bigint NOT NULL,
currency_id bigint NOT NULL,
region_scheme varchar(255) NOT NULL,
region_identifier varchar(255) NOT NULL,
start_date timestamp without time zone NOT NULL,
start_zone varchar(50) NOT NULL,
maturity_date timestamp without time zone NOT NULL,
maturity_zone varchar(50) NOT NULL,
daycount_id bigint NOT NULL,
rate double precision NOT NULL,
amount double precision NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_cash2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_cash2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_cash2daycount FOREIGN KEY (daycount_id) REFERENCES sec_daycount (id)
);
CREATE TABLE sec_fra (
id bigint NOT NULL,
security_id bigint NOT NULL,
currency_id bigint NOT NULL,
region_scheme varchar(255) NOT NULL,
region_identifier varchar(255) NOT NULL,
start_date timestamp without time zone NOT NULL,
start_zone varchar(50) NOT NULL,
end_date timestamp without time zone NOT NULL,
end_zone varchar(50) NOT NULL,
rate double precision NOT NULL,
amount double precision NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
fixing_date timestamp without time zone NOT NULL,
fixing_zone varchar(50) NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_fra2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_fra2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id)
);
CREATE TABLE sec_swap (
id bigint NOT NULL,
security_id bigint NOT NULL,
swaptype varchar(32) NOT NULL,
trade_date timestamp without time zone NOT NULL,
trade_zone varchar(50) NOT NULL,
effective_date timestamp without time zone NOT NULL,
effective_zone varchar(50) NOT NULL,
maturity_date timestamp without time zone NOT NULL,
maturity_zone varchar(50) NOT NULL,
forwardstart_date timestamp without time zone,
forwardstart_zone varchar(50),
counterparty varchar(255) NOT NULL,
pay_legtype varchar(32) NOT NULL,
pay_daycount_id bigint NOT NULL,
pay_frequency_id bigint NOT NULL,
pay_regionscheme varchar(255) NOT NULL,
pay_regionid varchar(255) NOT NULL,
pay_businessdayconvention_id bigint NOT NULL,
pay_notionaltype varchar(32) NOT NULL,
pay_notionalcurrency_id bigint,
pay_notionalamount double precision,
pay_notionalscheme varchar(255),
pay_notionalid varchar(255),
pay_rate double precision,
pay_iseom boolean NOT NULL,
pay_spread double precision,
pay_rateidentifierscheme varchar(255),
pay_rateidentifierid varchar(255),
pay_floating_rate_type varchar(32),
pay_settlement_days INTEGER,
pay_gearing DOUBLE precision,
pay_offset_fixing_id bigint,
pay_strike double precision,
pay_variance_swap_type varchar(32),
pay_underlying_identifier varchar(255),
pay_underlying_scheme varchar(255),
pay_monitoring_frequency_id bigint,
pay_annualization_factor double precision,
receive_legtype varchar(32) NOT NULL,
receive_daycount_id bigint NOT NULL,
receive_frequency_id bigint NOT NULL,
receive_regionscheme varchar(255) NOT NULL,
receive_regionid varchar(255) NOT NULL,
receive_businessdayconvention_id bigint NOT NULL,
receive_notionaltype varchar(32) NOT NULL,
receive_notionalcurrency_id bigint,
receive_notionalamount double precision,
receive_notionalscheme varchar(255),
receive_notionalid varchar(255),
receive_rate double precision,
receive_iseom boolean NOT NULL,
receive_spread double precision,
receive_rateidentifierscheme varchar(255),
receive_rateidentifierid varchar(255),
receive_floating_rate_type varchar(32),
receive_settlement_days INTEGER,
receive_gearing DOUBLE precision,
receive_offset_fixing_id bigint,
receive_strike double precision,
receive_variance_swap_type varchar(32),
receive_underlying_identifier varchar(255),
receive_underlying_scheme varchar(255),
receive_monitoring_frequency_id bigint,
receive_annualization_factor double precision,
PRIMARY KEY (id),
CONSTRAINT sec_fk_swap2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_payfreq2frequency FOREIGN KEY (pay_frequency_id) REFERENCES sec_frequency (id),
CONSTRAINT sec_fk_receivefreq2frequency FOREIGN KEY (receive_frequency_id) REFERENCES sec_frequency (id),
CONSTRAINT sec_fk_payoffset2frequency FOREIGN KEY (pay_offset_fixing_id) REFERENCES sec_frequency (id),
CONSTRAINT sec_fk_recvoffset2frequency FOREIGN KEY (receive_offset_fixing_id) REFERENCES sec_frequency (id),
CONSTRAINT sec_fk_paymonitorfreq2frequency FOREIGN KEY (pay_monitoring_frequency_id) REFERENCES sec_frequency (id),
CONSTRAINT sec_fk_recvmonitorfreq2frequency FOREIGN KEY (receive_monitoring_frequency_id) REFERENCES sec_frequency (id)
);
CREATE INDEX ix_sec_swap_security_id ON sec_swap(security_id);
CREATE TABLE sec_raw (
security_id bigint NOT NULL,
raw_data bytea NOT NULL,
CONSTRAINT sec_fk_raw2sec FOREIGN KEY (security_id) REFERENCES sec_security (id)
);
CREATE TABLE sec_fxforward (
id bigint NOT NULL,
security_id bigint NOT NULL,
region_scheme varchar(255) NOT NULL,
region_identifier varchar(255) NOT NULL,
pay_currency_id bigint NOT NULL,
receive_currency_id bigint NOT NULL,
pay_amount DOUBLE PRECISION NOT NULL,
receive_amount DOUBLE PRECISION NOT NULL,
forward_date timestamp without time zone NOT NULL,
forward_zone varchar(50) NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_fxforward2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_fxforward_pay2currency FOREIGN KEY (pay_currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_fxforward_rcv2currency FOREIGN KEY (receive_currency_id) REFERENCES sec_currency (id)
);
CREATE INDEX ix_sec_fxforward_security_id ON sec_fxforward(security_id);
CREATE TABLE sec_nondeliverablefxforward (
id bigint NOT NULL,
security_id bigint NOT NULL,
region_scheme varchar(255) NOT NULL,
region_identifier varchar(255) NOT NULL,
pay_currency_id bigint NOT NULL,
receive_currency_id bigint NOT NULL,
pay_amount DOUBLE PRECISION NOT NULL,
receive_amount DOUBLE PRECISION NOT NULL,
forward_date timestamp without time zone NOT NULL,
forward_zone varchar(50) NOT NULL,
is_delivery_in_receive_currency boolean NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_nondeliverablefxforward2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_nondeliverablefxforward_pay2currency FOREIGN KEY (pay_currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_nondeliverablefxforward_rcv2currency FOREIGN KEY (receive_currency_id) REFERENCES sec_currency (id)
);
CREATE INDEX ix_sec_nondeliverablefxforward_security_id ON sec_nondeliverablefxforward(security_id);
CREATE TABLE sec_capfloor (
id bigint NOT NULL,
security_id bigint NOT NULL,
currency_id bigint NOT NULL,
daycountconvention_id bigint NOT NULL,
frequency_id bigint NOT NULL,
is_cap boolean NOT NULL,
is_ibor boolean NOT NULL,
is_payer boolean NOT NULL,
maturity_date timestamp without time zone NOT NULL,
maturity_zone varchar(50) NOT NULL,
notional double precision NOT NULL,
start_date timestamp without time zone NOT NULL,
start_zone varchar(50) NOT NULL,
strike double precision NOT NULL,
underlying_scheme varchar(255) NOT NULL,
underlying_identifier varchar(255) NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_capfloor2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_capfloor2currency FOREIGN KEY (currency_id) REFERENCES sec_currency(id),
CONSTRAINT sec_fk_capfloor2daycount FOREIGN KEY (daycountconvention_id) REFERENCES sec_daycount (id),
CONSTRAINT sec_fk_capfloor2frequency FOREIGN KEY (frequency_id) REFERENCES sec_frequency (id)
);
CREATE TABLE sec_capfloorcmsspread (
id bigint NOT NULL,
security_id bigint NOT NULL,
currency_id bigint NOT NULL,
daycountconvention_id bigint NOT NULL,
frequency_id bigint NOT NULL,
is_cap boolean NOT NULL,
is_payer boolean NOT NULL,
long_scheme varchar(255) NOT NULL,
long_identifier varchar(255) NOT NULL,
maturity_date timestamp without time zone NOT NULL,
maturity_zone varchar(50) NOT NULL,
notional double precision NOT NULL,
short_scheme varchar(255) NOT NULL,
short_identifier varchar(255) NOT NULL,
start_date timestamp without time zone NOT NULL,
start_zone varchar(50) NOT NULL,
strike double precision NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_capfloorcmsspread2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_capfloorcmsspread2currency FOREIGN KEY (currency_id) REFERENCES sec_currency(id),
CONSTRAINT sec_fk_capfloorcmsspread2daycount FOREIGN KEY (daycountconvention_id) REFERENCES sec_daycount (id),
CONSTRAINT sec_fk_capfloorcmsspread2frequency FOREIGN KEY (frequency_id) REFERENCES sec_frequency (id)
);
CREATE TABLE sec_equity_variance_swap (
id bigint NOT NULL,
security_id bigint NOT NULL,
annualization_factor double precision NOT NULL,
currency_id bigint NOT NULL,
first_observation_date timestamp without time zone NOT NULL,
first_observation_zone varchar(50) NOT NULL,
last_observation_date timestamp without time zone NOT NULL,
last_observation_zone varchar(50) NOT NULL,
notional double precision NOT NULL,
observation_frequency_id bigint NOT NULL,
parameterised_as_variance boolean NOT NULL,
region_scheme varchar(255) NOT NULL,
region_id varchar(255) NOT NULL,
settlement_date timestamp without time zone NOT NULL,
settlement_zone varchar(50) NOT NULL,
spot_scheme varchar(255) NOT NULL,
spot_id varchar(255) NOT NULL,
strike double precision NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_equityvarianceswap2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_equityvarianceswap2currency FOREIGN KEY (currency_id) REFERENCES sec_currency(id),
CONSTRAINT sec_fk_equityvarianceswap2frequency FOREIGN KEY (observation_frequency_id) REFERENCES sec_frequency (id)
);
CREATE SEQUENCE sec_security_attr_seq
start with 1000 increment by 1 no cycle;
CREATE TABLE sec_security_attribute (
id bigint not null,
security_id bigint not null,
security_oid bigint not null,
attr_key varchar(255) not null,
attr_value varchar(255) not null,
primary key (id),
constraint sec_fk_securityattr2security foreign key (security_id) references sec_security (id),
constraint sec_chk_uq_security_attribute unique (security_id, attr_key, attr_value)
);
-- security_oid is an optimization
-- sec_security_attribute is fully dependent of sec_security
CREATE INDEX ix_sec_security_attr_security_oid ON sec_security_attribute(security_oid);
CREATE INDEX ix_sec_security_attr_key ON sec_security_attribute(attr_key);
CREATE TABLE sec_cds (
id bigint NOT NULL,
security_id bigint NOT NULL,
notional double precision NOT NULL,
recovery_rate double precision NOT NULL,
spread double precision NOT NULL,
currency_id bigint NOT NULL,
maturity_date timestamp without time zone NOT NULL,
maturity_date_zone varchar(50) NOT NULL,
start_date timestamp without time zone NOT NULL,
start_date_zone varchar(50) NOT NULL,
premium_frequency_id bigint NOT NULL,
daycountconvention_id bigint NOT NULL,
businessdayconvention_id bigint NOT NULL,
stubtype_id bigint NOT NULL,
settlement_days int NOT NULL,
underlying_issuer varchar(255) NOT NULL,
underlying_currency_id bigint NOT NULL,
underlying_seniority varchar(255) NOT NULL,
restructuring_clause varchar(255) NOT NULL,
PRIMARY KEY (id),
CONSTRAINT sec_fk_cds2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_cds2currency FOREIGN KEY (currency_id) REFERENCES sec_currency (id),
CONSTRAINT sec_fk_cds2daycount FOREIGN KEY (daycountconvention_id) REFERENCES sec_daycount (id),
CONSTRAINT sec_fk_cds2businessdayconvention FOREIGN KEY (businessdayconvention_id) REFERENCES sec_businessdayconvention (id),
CONSTRAINT sec_fk_cds2frequency FOREIGN KEY (premium_frequency_id) REFERENCES sec_frequency (id),
CONSTRAINT sec_fk_cds2stubtype FOREIGN KEY (stubtype_id) REFERENCES sec_stubtype (id),
CONSTRAINT sec_fk_cds_underlying2currency FOREIGN KEY (underlying_currency_id) REFERENCES sec_currency (id)
);
CREATE TABLE sec_debt_seniority (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_restructuring_clause (
id bigint NOT NULL,
name varchar(255) NOT NULL UNIQUE,
PRIMARY KEY (id)
);
CREATE TABLE sec_credit_default_swap (
id bigint NOT NULL,
security_id bigint NOT NULL,
cds_type varchar(255) NOT NULL,
buy boolean NOT NULL,
buyer_scheme varchar(255) NOT NULL,
buyer_identifier varchar(255) NOT NULL,
seller_scheme varchar(255) NOT NULL,
seller_identifier varchar(255) NOT NULL,
entity_scheme varchar(255) NOT NULL,
entity_identifier varchar(255) NOT NULL,
debt_seniority_id bigint NOT NULL,
restructuring_clause_id bigint NOT NULL,
region_scheme varchar(255) NOT NULL,
region_identifier varchar(255) NOT NULL,
start_date timestamp without time zone NOT NULL,
start_date_zone varchar(50) NOT NULL,
effective_date timestamp without time zone NOT NULL,
effective_date_zone varchar(50) NOT NULL,
maturity_date timestamp without time zone NOT NULL,
maturity_date_zone varchar(50) NOT NULL,
stub_type_id bigint NOT NULL,
frequency_id bigint NOT NULL,
daycount_convention_id bigint NOT NULL,
businessday_convention_id bigint NOT NULL,
imm_adjust_maturity_date boolean NOT NULL,
adjust_effective_date boolean NOT NULL,
adjust_maturity_date boolean NOT NULL,
notional_type varchar(32) NOT NULL,
notional_currency_id bigint NOT NULL,
notional_amount double precision,
notional_scheme varchar(255),
notional_id varchar(255),
recovery_rate double precision NOT NULL,
include_accrued_premium boolean NOT NULL,
protection_start boolean NOT NULL,
legacy_par_spread double precision,
std_quoted_spread double precision,
std_upfrontamt_notional_type varchar(32),
std_upfrontamt_notional_currency_id bigint,
std_upfrontamt_notional_amount double precision,
std_upfrontamt_notional_scheme varchar(255),
std_upfrontamt_notional_id varchar(255),
stdvanilla_coupon double precision,
stdvanilla_cashsettlement_date timestamp without time zone,
stdvanilla_ashsettlement_date_zone varchar(50),
stdvanilla_adjust_cashsettlement_date boolean,
PRIMARY KEY (id),
CONSTRAINT sec_fk_creditdefaultswap2sec FOREIGN KEY (security_id) REFERENCES sec_security (id),
CONSTRAINT sec_fk_creditdefaultswap2debtseniority FOREIGN KEY (debt_seniority_id) REFERENCES sec_debt_seniority(id),
CONSTRAINT sec_fk_creditdefaultswap2restructuringclause FOREIGN KEY (restructuring_clause_id) REFERENCES sec_restructuring_clause(id),
CONSTRAINT sec_fk_creditdefaultswap2stubtype FOREIGN KEY (stub_type_id) REFERENCES sec_stubtype (id),
CONSTRAINT sec_fk_creditdefaultswap2frequency FOREIGN KEY (frequency_id) REFERENCES sec_frequency (id),
CONSTRAINT sec_fk_creditdefaultswap2daycount FOREIGN KEY (daycount_convention_id) REFERENCES sec_daycount (id),
CONSTRAINT sec_fk_creditdefaultswap2businessdayconvention FOREIGN KEY (businessday_convention_id) REFERENCES sec_businessdayconvention (id),
CONSTRAINT sec_fk_creditdefaultswap2currency FOREIGN KEY (notional_currency_id) REFERENCES sec_currency (id)
);
CREATE INDEX ix_sec_creditdefaultswap_security_id ON sec_credit_default_swap(security_id);
| McLeodMoores/starling | projects/master-db/src/main/resources/db/create/postgres/sec/V_56__create_sec.sql | SQL | apache-2.0 | 40,490 |
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
@given('a RelationshipType "{name}" forward phrase "{fwd}"')
def create_relationship_type(context, name, fwd):
from ggrc import db
from ggrc.models.relationship import RelationshipType
t = RelationshipType(
relationship_type=name,
forward_phrase=fwd,
symmetric=False,
)
db.session.add(t)
db.session.commit()
@given('a symmetric RelationshipType "{name}" forward phrase "{fwd}" and '
'backward phrase "{back}"')
def create_relationship_type(context, name, fwd, back):
from ggrc import db
from ggrc.models.relationship import RelationshipType
t = RelationshipType(
relationship_type=name,
forward_phrase=fwd,
backward_phrase=back,
symmetric=True,
)
db.session.add(t)
db.session.commit()
| prasannav7/ggrc-core | src/service_specs/steps/relationships.py | Python | apache-2.0 | 1,006 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.*;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.composites.CellNames;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.transport.Event;
public abstract class AlterTypeStatement extends SchemaAlteringStatement
{
protected final UTName name;
protected AlterTypeStatement(UTName name)
{
super();
this.name = name;
}
@Override
public void prepareKeyspace(ClientState state) throws InvalidRequestException
{
if (!name.hasKeyspace())
name.setKeyspace(state.getKeyspace());
if (name.getKeyspace() == null)
throw new InvalidRequestException("You need to be logged in a keyspace or use a fully qualified user type name");
}
protected abstract UserType makeUpdatedType(UserType toUpdate) throws InvalidRequestException;
public static AlterTypeStatement addition(UTName name, ColumnIdentifier fieldName, CQL3Type.Raw type)
{
return new AddOrAlter(name, true, fieldName, type);
}
public static AlterTypeStatement alter(UTName name, ColumnIdentifier fieldName, CQL3Type.Raw type)
{
return new AddOrAlter(name, false, fieldName, type);
}
public static AlterTypeStatement renames(UTName name, Map<ColumnIdentifier, ColumnIdentifier> renames)
{
return new Renames(name, renames);
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException
{
state.hasKeyspaceAccess(keyspace(), Permission.ALTER);
}
public void validate(ClientState state) throws RequestValidationException
{
// Validation is left to announceMigration as it's easier to do it while constructing the updated type.
// It doesn't really change anything anyway.
}
public Event.SchemaChange changeEvent()
{
return new Event.SchemaChange(Event.SchemaChange.Change.UPDATED, Event.SchemaChange.Target.TYPE, keyspace(), name.getStringTypeName());
}
@Override
public String keyspace()
{
return name.getKeyspace();
}
public boolean announceMigration(boolean isLocalOnly) throws InvalidRequestException, ConfigurationException
{
KSMetaData ksm = Schema.instance.getKSMetaData(name.getKeyspace());
if (ksm == null)
throw new InvalidRequestException(String.format("Cannot alter type in unknown keyspace %s", name.getKeyspace()));
UserType toUpdate = ksm.userTypes.getType(name.getUserTypeName());
// Shouldn't happen, unless we race with a drop
if (toUpdate == null)
throw new InvalidRequestException(String.format("No user type named %s exists.", name));
UserType updated = makeUpdatedType(toUpdate);
// Now, we need to announce the type update to basically change it for new tables using this type,
// but we also need to find all existing user types and CF using it and change them.
MigrationManager.announceTypeUpdate(updated, isLocalOnly);
for (KSMetaData ksm2 : Schema.instance.getKeyspaceDefinitions())
{
for (CFMetaData cfm : ksm2.cfMetaData().values())
{
CFMetaData copy = cfm.copy();
boolean modified = false;
for (ColumnDefinition def : copy.allColumns())
modified |= updateDefinition(copy, def, toUpdate.keyspace, toUpdate.name, updated);
if (modified)
MigrationManager.announceColumnFamilyUpdate(copy, false, isLocalOnly);
}
// Other user types potentially using the updated type
for (UserType ut : ksm2.userTypes.getAllTypes().values())
{
// Re-updating the type we've just updated would be harmless but useless so we avoid it.
// Besides, we use the occasion to drop the old version of the type if it's a type rename
if (ut.keyspace.equals(toUpdate.keyspace) && ut.name.equals(toUpdate.name))
{
if (!ut.keyspace.equals(updated.keyspace) || !ut.name.equals(updated.name))
MigrationManager.announceTypeDrop(ut);
continue;
}
AbstractType<?> upd = updateWith(ut, toUpdate.keyspace, toUpdate.name, updated);
if (upd != null)
MigrationManager.announceTypeUpdate((UserType)upd, isLocalOnly);
}
}
return true;
}
private static int getIdxOfField(UserType type, ColumnIdentifier field)
{
for (int i = 0; i < type.size(); i++)
if (field.bytes.equals(type.fieldName(i)))
return i;
return -1;
}
private boolean updateDefinition(CFMetaData cfm, ColumnDefinition def, String keyspace, ByteBuffer toReplace, UserType updated)
{
AbstractType<?> t = updateWith(def.type, keyspace, toReplace, updated);
if (t == null)
return false;
// We need to update this validator ...
cfm.addOrReplaceColumnDefinition(def.withNewType(t));
// ... but if it's part of the comparator or key validator, we need to go update those too.
switch (def.kind)
{
case PARTITION_KEY:
cfm.keyValidator(updateWith(cfm.getKeyValidator(), keyspace, toReplace, updated));
break;
case CLUSTERING_COLUMN:
cfm.comparator = CellNames.fromAbstractType(updateWith(cfm.comparator.asAbstractType(), keyspace, toReplace, updated), cfm.comparator.isDense());
break;
default:
// If it's a collection, we still want to modify the comparator because the collection is aliased in it
if (def.type instanceof CollectionType)
cfm.comparator = CellNames.fromAbstractType(updateWith(cfm.comparator.asAbstractType(), keyspace, toReplace, updated), cfm.comparator.isDense());
}
return true;
}
// Update the provided type were all instance of a given userType is replaced by a new version
// Note that this methods reaches inside other UserType, CompositeType and CollectionType.
private static AbstractType<?> updateWith(AbstractType<?> type, String keyspace, ByteBuffer toReplace, UserType updated)
{
if (type instanceof UserType)
{
UserType ut = (UserType)type;
// If it's directly the type we've updated, then just use the new one.
if (keyspace.equals(ut.keyspace) && toReplace.equals(ut.name))
return updated;
// Otherwise, check for nesting
List<AbstractType<?>> updatedTypes = updateTypes(ut.fieldTypes(), keyspace, toReplace, updated);
return updatedTypes == null ? null : new UserType(ut.keyspace, ut.name, new ArrayList<>(ut.fieldNames()), updatedTypes);
}
else if (type instanceof CompositeType)
{
CompositeType ct = (CompositeType)type;
List<AbstractType<?>> updatedTypes = updateTypes(ct.types, keyspace, toReplace, updated);
return updatedTypes == null ? null : CompositeType.getInstance(updatedTypes);
}
else if (type instanceof ColumnToCollectionType)
{
ColumnToCollectionType ctct = (ColumnToCollectionType)type;
Map<ByteBuffer, CollectionType> updatedTypes = null;
for (Map.Entry<ByteBuffer, CollectionType> entry : ctct.defined.entrySet())
{
AbstractType<?> t = updateWith(entry.getValue(), keyspace, toReplace, updated);
if (t == null)
continue;
if (updatedTypes == null)
updatedTypes = new HashMap<>(ctct.defined);
updatedTypes.put(entry.getKey(), (CollectionType)t);
}
return updatedTypes == null ? null : ColumnToCollectionType.getInstance(updatedTypes);
}
else if (type instanceof CollectionType)
{
if (type instanceof ListType)
{
AbstractType<?> t = updateWith(((ListType)type).elements, keyspace, toReplace, updated);
return t == null ? null : ListType.getInstance(t);
}
else if (type instanceof SetType)
{
AbstractType<?> t = updateWith(((SetType)type).elements, keyspace, toReplace, updated);
return t == null ? null : SetType.getInstance(t);
}
else
{
assert type instanceof MapType;
MapType mt = (MapType)type;
AbstractType<?> k = updateWith(mt.keys, keyspace, toReplace, updated);
AbstractType<?> v = updateWith(mt.values, keyspace, toReplace, updated);
if (k == null && v == null)
return null;
return MapType.getInstance(k == null ? mt.keys : k, v == null ? mt.values : v);
}
}
else
{
return null;
}
}
private static List<AbstractType<?>> updateTypes(List<AbstractType<?>> toUpdate, String keyspace, ByteBuffer toReplace, UserType updated)
{
// But this can also be nested.
List<AbstractType<?>> updatedTypes = null;
for (int i = 0; i < toUpdate.size(); i++)
{
AbstractType<?> t = updateWith(toUpdate.get(i), keyspace, toReplace, updated);
if (t == null)
continue;
if (updatedTypes == null)
updatedTypes = new ArrayList<>(toUpdate);
updatedTypes.set(i, t);
}
return updatedTypes;
}
private static class AddOrAlter extends AlterTypeStatement
{
private final boolean isAdd;
private final ColumnIdentifier fieldName;
private final CQL3Type.Raw type;
public AddOrAlter(UTName name, boolean isAdd, ColumnIdentifier fieldName, CQL3Type.Raw type)
{
super(name);
this.isAdd = isAdd;
this.fieldName = fieldName;
this.type = type;
}
private UserType doAdd(UserType toUpdate) throws InvalidRequestException
{
if (getIdxOfField(toUpdate, fieldName) >= 0)
throw new InvalidRequestException(String.format("Cannot add new field %s to type %s: a field of the same name already exists", fieldName, name));
List<ByteBuffer> newNames = new ArrayList<>(toUpdate.size() + 1);
newNames.addAll(toUpdate.fieldNames());
newNames.add(fieldName.bytes);
List<AbstractType<?>> newTypes = new ArrayList<>(toUpdate.size() + 1);
newTypes.addAll(toUpdate.fieldTypes());
newTypes.add(type.prepare(keyspace()).getType());
return new UserType(toUpdate.keyspace, toUpdate.name, newNames, newTypes);
}
private UserType doAlter(UserType toUpdate) throws InvalidRequestException
{
int idx = getIdxOfField(toUpdate, fieldName);
if (idx < 0)
throw new InvalidRequestException(String.format("Unknown field %s in type %s", fieldName, name));
AbstractType<?> previous = toUpdate.fieldType(idx);
if (!type.prepare(keyspace()).getType().isCompatibleWith(previous))
throw new InvalidRequestException(String.format("Type %s is incompatible with previous type %s of field %s in user type %s", type, previous.asCQL3Type(), fieldName, name));
List<ByteBuffer> newNames = new ArrayList<>(toUpdate.fieldNames());
List<AbstractType<?>> newTypes = new ArrayList<>(toUpdate.fieldTypes());
newTypes.set(idx, type.prepare(keyspace()).getType());
return new UserType(toUpdate.keyspace, toUpdate.name, newNames, newTypes);
}
protected UserType makeUpdatedType(UserType toUpdate) throws InvalidRequestException
{
return isAdd ? doAdd(toUpdate) : doAlter(toUpdate);
}
}
private static class Renames extends AlterTypeStatement
{
private final Map<ColumnIdentifier, ColumnIdentifier> renames;
public Renames(UTName name, Map<ColumnIdentifier, ColumnIdentifier> renames)
{
super(name);
this.renames = renames;
}
protected UserType makeUpdatedType(UserType toUpdate) throws InvalidRequestException
{
List<ByteBuffer> newNames = new ArrayList<>(toUpdate.fieldNames());
List<AbstractType<?>> newTypes = new ArrayList<>(toUpdate.fieldTypes());
for (Map.Entry<ColumnIdentifier, ColumnIdentifier> entry : renames.entrySet())
{
ColumnIdentifier from = entry.getKey();
ColumnIdentifier to = entry.getValue();
int idx = getIdxOfField(toUpdate, from);
if (idx < 0)
throw new InvalidRequestException(String.format("Unknown field %s in type %s", from, name));
newNames.set(idx, to.bytes);
}
UserType updated = new UserType(toUpdate.keyspace, toUpdate.name, newNames, newTypes);
CreateTypeStatement.checkForDuplicateNames(updated);
return updated;
}
}
}
| daidong/GraphTrek | src/java/org/apache/cassandra/cql3/statements/AlterTypeStatement.java | Java | apache-2.0 | 14,513 |
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
Small CIFAR10 based MLP with fully connected layers.
"""
from neon.backends import gen_backend
from neon.data import DataIterator, load_cifar10
from neon.initializers import Uniform
from neon.layers import GeneralizedCost, Affine
from neon.models import Model
from neon.optimizers import GradientDescentMomentum
from neon.transforms import *
from neon.callbacks.callbacks import Callbacks
from neon.util.argparser import NeonArgparser
# parse the command line arguments
parser = NeonArgparser(__doc__)
args = parser.parse_args()
# hyperparameters
batch_size = 128
num_epochs = args.epochs
# setup backend
be = gen_backend(backend=args.backend,
batch_size=batch_size,
rng_seed=args.rng_seed,
device_id=args.device_id,
default_dtype=args.datatype)
(X_train, y_train), (X_test, y_test), nclass = load_cifar10(path=args.data_dir)
train = DataIterator(X_train, y_train, nclass=nclass)
test = DataIterator(X_test, y_test, nclass=nclass)
init_uni = Uniform(low=-0.1, high=0.1)
opt_gdm = GradientDescentMomentum(learning_rate=0.01, momentum_coef=0.9)
# set up the model layers
layers = []
layers.append(Affine(nout=200, init=init_uni, activation=Rectlin()))
layers.append(Affine(nout=10, init=init_uni, activation=Logistic(shortcut=True)))
cost = GeneralizedCost(costfunc=CrossEntropyBinary())
mlp = Model(layers=layers)
# configure callbacks
callbacks = Callbacks(mlp, train, output_file=args.output_file,
valid_set=test, valid_freq=args.validation_freq,
progress_bar=args.progress_bar)
mlp.fit(train, optimizer=opt_gdm, num_epochs=num_epochs, cost=cost, callbacks=callbacks)
print mlp.eval(test, metric=Misclassification())
| chetan51/neon | examples/cifar10.py | Python | apache-2.0 | 2,510 |
/*
* Copyright (C) 2016 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.functional.subcomponent;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import javax.inject.Qualifier;
/**
* A qualifier representing an unbound type, to verify that the compiler does not attempt to
* generate code depending on it.
*/
@Documented
@Retention(RUNTIME)
@Qualifier
@interface Unbound {}
| ronshapiro/dagger | javatests/dagger/functional/subcomponent/Unbound.java | Java | apache-2.0 | 1,017 |
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2; -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is js-ctypes.
*
* The Initial Developer of the Original Code is
* The Mozilla Foundation <http://www.mozilla.org/>.
* Portions created by the Initial Developer are Copyright (C) 2009
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Dan Witte <[email protected]>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#include "CTypes.h"
#include "Library.h"
#include "jsnum.h"
#include <limits>
#include <math.h>
#if defined(XP_WIN) || defined(XP_OS2)
#include <float.h>
#endif
#if defined(SOLARIS)
#include <ieeefp.h>
#endif
#ifdef HAVE_SSIZE_T
#include <sys/types.h>
#endif
using namespace std;
namespace js {
namespace ctypes {
/*******************************************************************************
** Helper classes
*******************************************************************************/
class ScopedContextThread
{
public:
ScopedContextThread(JSContext* cx) : mCx(cx) { JS_SetContextThread(cx); }
~ScopedContextThread() { JS_ClearContextThread(mCx); }
private:
JSContext* mCx;
};
/*******************************************************************************
** JSAPI function prototypes
*******************************************************************************/
static JSBool ConstructAbstract(JSContext* cx, uintN argc, jsval* vp);
namespace CType {
static JSBool ConstructData(JSContext* cx, uintN argc, jsval* vp);
static JSBool ConstructBasic(JSContext* cx, JSObject* obj, uintN argc, jsval* vp);
static void Trace(JSTracer* trc, JSObject* obj);
static void Finalize(JSContext* cx, JSObject* obj);
static void FinalizeProtoClass(JSContext* cx, JSObject* obj);
static JSBool PrototypeGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool NameGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool SizeGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool PtrGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp);
static JSBool CreateArray(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToString(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToSource(JSContext* cx, uintN argc, jsval* vp);
static JSBool HasInstance(JSContext* cx, JSObject* obj, const jsval* v, JSBool* bp);
}
namespace PointerType {
static JSBool Create(JSContext* cx, uintN argc, jsval* vp);
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc, jsval* vp);
static JSBool TargetTypeGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool ContentsGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool ContentsSetter(JSContext* cx, JSObject* obj, jsid idval, JSBool strict,
jsval* vp);
static JSBool IsNull(JSContext* cx, uintN argc, jsval* vp);
}
namespace ArrayType {
static JSBool Create(JSContext* cx, uintN argc, jsval* vp);
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc, jsval* vp);
static JSBool ElementTypeGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool LengthGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool Getter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp);
static JSBool Setter(JSContext* cx, JSObject* obj, jsid idval, JSBool strict, jsval* vp);
static JSBool AddressOfElement(JSContext* cx, uintN argc, jsval* vp);
}
namespace StructType {
static JSBool Create(JSContext* cx, uintN argc, jsval* vp);
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc, jsval* vp);
static JSBool FieldsArrayGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool FieldGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool FieldSetter(JSContext* cx, JSObject* obj, jsid idval, JSBool strict,
jsval* vp);
static JSBool AddressOfField(JSContext* cx, uintN argc, jsval* vp);
static JSBool Define(JSContext* cx, uintN argc, jsval* vp);
}
namespace FunctionType {
static JSBool Create(JSContext* cx, uintN argc, jsval* vp);
static JSBool ConstructData(JSContext* cx, JSObject* typeObj,
JSObject* dataObj, JSObject* fnObj, JSObject* thisObj);
static JSBool Call(JSContext* cx, uintN argc, jsval* vp);
static JSBool ArgTypesGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool ReturnTypeGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool ABIGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp);
static JSBool IsVariadicGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
}
namespace CClosure {
static void Trace(JSTracer* trc, JSObject* obj);
static void Finalize(JSContext* cx, JSObject* obj);
// libffi callback
static void ClosureStub(ffi_cif* cif, void* result, void** args,
void* userData);
}
namespace CData {
static void Finalize(JSContext* cx, JSObject* obj);
static JSBool ValueGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
static JSBool ValueSetter(JSContext* cx, JSObject* obj, jsid idval,
JSBool strict, jsval* vp);
static JSBool Address(JSContext* cx, uintN argc, jsval* vp);
static JSBool ReadString(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToSource(JSContext* cx, uintN argc, jsval* vp);
}
// Int64Base provides functions common to Int64 and UInt64.
namespace Int64Base {
JSObject* Construct(JSContext* cx, JSObject* proto, JSUint64 data,
bool isUnsigned);
JSUint64 GetInt(JSContext* cx, JSObject* obj);
JSBool ToString(JSContext* cx, JSObject* obj, uintN argc, jsval* vp,
bool isUnsigned);
JSBool ToSource(JSContext* cx, JSObject* obj, uintN argc, jsval* vp,
bool isUnsigned);
static void Finalize(JSContext* cx, JSObject* obj);
}
namespace Int64 {
static JSBool Construct(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToString(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToSource(JSContext* cx, uintN argc, jsval* vp);
static JSBool Compare(JSContext* cx, uintN argc, jsval* vp);
static JSBool Lo(JSContext* cx, uintN argc, jsval* vp);
static JSBool Hi(JSContext* cx, uintN argc, jsval* vp);
static JSBool Join(JSContext* cx, uintN argc, jsval* vp);
}
namespace UInt64 {
static JSBool Construct(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToString(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToSource(JSContext* cx, uintN argc, jsval* vp);
static JSBool Compare(JSContext* cx, uintN argc, jsval* vp);
static JSBool Lo(JSContext* cx, uintN argc, jsval* vp);
static JSBool Hi(JSContext* cx, uintN argc, jsval* vp);
static JSBool Join(JSContext* cx, uintN argc, jsval* vp);
}
/*******************************************************************************
** JSClass definitions and initialization functions
*******************************************************************************/
// Class representing the 'ctypes' object itself. This exists to contain the
// JSCTypesCallbacks set of function pointers.
static JSClass sCTypesGlobalClass = {
"ctypes",
JSCLASS_HAS_RESERVED_SLOTS(CTYPESGLOBAL_SLOTS),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, JS_FinalizeStub,
JSCLASS_NO_OPTIONAL_MEMBERS
};
static JSClass sCABIClass = {
"CABI",
JSCLASS_HAS_RESERVED_SLOTS(CABI_SLOTS),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, JS_FinalizeStub,
JSCLASS_NO_OPTIONAL_MEMBERS
};
// Class representing ctypes.{C,Pointer,Array,Struct,Function}Type.prototype.
// This exists to give said prototypes a class of "CType", and to provide
// reserved slots for stashing various other prototype objects.
static JSClass sCTypeProtoClass = {
"CType",
JSCLASS_HAS_RESERVED_SLOTS(CTYPEPROTO_SLOTS),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CType::FinalizeProtoClass,
NULL, NULL, ConstructAbstract, ConstructAbstract, NULL, NULL, NULL, NULL
};
// Class representing ctypes.CData.prototype and the 'prototype' properties
// of CTypes. This exists to give said prototypes a class of "CData".
static JSClass sCDataProtoClass = {
"CData",
0,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, JS_FinalizeStub,
JSCLASS_NO_OPTIONAL_MEMBERS
};
static JSClass sCTypeClass = {
"CType",
JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS) | JSCLASS_MARK_IS_TRACE,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CType::Finalize,
NULL, NULL, CType::ConstructData, CType::ConstructData, NULL,
CType::HasInstance, JS_CLASS_TRACE(CType::Trace), NULL
};
static JSClass sCDataClass = {
"CData",
JSCLASS_HAS_RESERVED_SLOTS(CDATA_SLOTS),
JS_PropertyStub, JS_PropertyStub, ArrayType::Getter, ArrayType::Setter,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CData::Finalize,
NULL, NULL, FunctionType::Call, FunctionType::Call, NULL, NULL, NULL, NULL
};
static JSClass sCClosureClass = {
"CClosure",
JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS) | JSCLASS_MARK_IS_TRACE,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CClosure::Finalize,
NULL, NULL, NULL, NULL, NULL, NULL, JS_CLASS_TRACE(CClosure::Trace), NULL
};
#define CTYPESFN_FLAGS \
(JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT)
#define CTYPESCTOR_FLAGS \
(CTYPESFN_FLAGS | JSFUN_CONSTRUCTOR)
#define CTYPESPROP_FLAGS \
(JSPROP_SHARED | JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT)
#define CDATAFN_FLAGS \
(JSPROP_READONLY | JSPROP_PERMANENT)
static JSPropertySpec sCTypeProps[] = {
{ "name", 0, CTYPESPROP_FLAGS, CType::NameGetter, NULL },
{ "size", 0, CTYPESPROP_FLAGS, CType::SizeGetter, NULL },
{ "ptr", 0, CTYPESPROP_FLAGS, CType::PtrGetter, NULL },
{ "prototype", 0, CTYPESPROP_FLAGS, CType::PrototypeGetter, NULL },
{ 0, 0, 0, NULL, NULL }
};
static JSFunctionSpec sCTypeFunctions[] = {
JS_FN("array", CType::CreateArray, 0, CTYPESFN_FLAGS),
JS_FN("toString", CType::ToString, 0, CTYPESFN_FLAGS),
JS_FN("toSource", CType::ToSource, 0, CTYPESFN_FLAGS),
JS_FS_END
};
static JSPropertySpec sCDataProps[] = {
{ "value", 0, JSPROP_SHARED | JSPROP_PERMANENT,
CData::ValueGetter, CData::ValueSetter },
{ 0, 0, 0, NULL, NULL }
};
static JSFunctionSpec sCDataFunctions[] = {
JS_FN("address", CData::Address, 0, CDATAFN_FLAGS),
JS_FN("readString", CData::ReadString, 0, CDATAFN_FLAGS),
JS_FN("toSource", CData::ToSource, 0, CDATAFN_FLAGS),
JS_FN("toString", CData::ToSource, 0, CDATAFN_FLAGS),
JS_FS_END
};
static JSFunctionSpec sPointerFunction =
JS_FN("PointerType", PointerType::Create, 1, CTYPESCTOR_FLAGS);
static JSPropertySpec sPointerProps[] = {
{ "targetType", 0, CTYPESPROP_FLAGS, PointerType::TargetTypeGetter, NULL },
{ 0, 0, 0, NULL, NULL }
};
static JSFunctionSpec sPointerInstanceFunctions[] = {
JS_FN("isNull", PointerType::IsNull, 0, CTYPESFN_FLAGS),
JS_FS_END
};
static JSPropertySpec sPointerInstanceProps[] = {
{ "contents", 0, JSPROP_SHARED | JSPROP_PERMANENT,
PointerType::ContentsGetter, PointerType::ContentsSetter },
{ 0, 0, 0, NULL, NULL }
};
static JSFunctionSpec sArrayFunction =
JS_FN("ArrayType", ArrayType::Create, 1, CTYPESCTOR_FLAGS);
static JSPropertySpec sArrayProps[] = {
{ "elementType", 0, CTYPESPROP_FLAGS, ArrayType::ElementTypeGetter, NULL },
{ "length", 0, CTYPESPROP_FLAGS, ArrayType::LengthGetter, NULL },
{ 0, 0, 0, NULL, NULL }
};
static JSFunctionSpec sArrayInstanceFunctions[] = {
JS_FN("addressOfElement", ArrayType::AddressOfElement, 1, CDATAFN_FLAGS),
JS_FS_END
};
static JSPropertySpec sArrayInstanceProps[] = {
{ "length", 0, JSPROP_SHARED | JSPROP_READONLY | JSPROP_PERMANENT,
ArrayType::LengthGetter, NULL },
{ 0, 0, 0, NULL, NULL }
};
static JSFunctionSpec sStructFunction =
JS_FN("StructType", StructType::Create, 2, CTYPESCTOR_FLAGS);
static JSPropertySpec sStructProps[] = {
{ "fields", 0, CTYPESPROP_FLAGS, StructType::FieldsArrayGetter, NULL },
{ 0, 0, 0, NULL, NULL }
};
static JSFunctionSpec sStructFunctions[] = {
JS_FN("define", StructType::Define, 1, CDATAFN_FLAGS),
JS_FS_END
};
static JSFunctionSpec sStructInstanceFunctions[] = {
JS_FN("addressOfField", StructType::AddressOfField, 1, CDATAFN_FLAGS),
JS_FS_END
};
static JSFunctionSpec sFunctionFunction =
JS_FN("FunctionType", FunctionType::Create, 2, CTYPESCTOR_FLAGS);
static JSPropertySpec sFunctionProps[] = {
{ "argTypes", 0, CTYPESPROP_FLAGS, FunctionType::ArgTypesGetter, NULL },
{ "returnType", 0, CTYPESPROP_FLAGS, FunctionType::ReturnTypeGetter, NULL },
{ "abi", 0, CTYPESPROP_FLAGS, FunctionType::ABIGetter, NULL },
{ "isVariadic", 0, CTYPESPROP_FLAGS, FunctionType::IsVariadicGetter, NULL },
{ 0, 0, 0, NULL, NULL }
};
static JSClass sInt64ProtoClass = {
"Int64",
0,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, JS_FinalizeStub,
JSCLASS_NO_OPTIONAL_MEMBERS
};
static JSClass sUInt64ProtoClass = {
"UInt64",
0,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, JS_FinalizeStub,
JSCLASS_NO_OPTIONAL_MEMBERS
};
static JSClass sInt64Class = {
"Int64",
JSCLASS_HAS_RESERVED_SLOTS(INT64_SLOTS),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Int64Base::Finalize,
JSCLASS_NO_OPTIONAL_MEMBERS
};
static JSClass sUInt64Class = {
"UInt64",
JSCLASS_HAS_RESERVED_SLOTS(INT64_SLOTS),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Int64Base::Finalize,
JSCLASS_NO_OPTIONAL_MEMBERS
};
static JSFunctionSpec sInt64StaticFunctions[] = {
JS_FN("compare", Int64::Compare, 2, CTYPESFN_FLAGS),
JS_FN("lo", Int64::Lo, 1, CTYPESFN_FLAGS),
JS_FN("hi", Int64::Hi, 1, CTYPESFN_FLAGS),
JS_FN("join", Int64::Join, 2, CTYPESFN_FLAGS),
JS_FS_END
};
static JSFunctionSpec sUInt64StaticFunctions[] = {
JS_FN("compare", UInt64::Compare, 2, CTYPESFN_FLAGS),
JS_FN("lo", UInt64::Lo, 1, CTYPESFN_FLAGS),
JS_FN("hi", UInt64::Hi, 1, CTYPESFN_FLAGS),
JS_FN("join", UInt64::Join, 2, CTYPESFN_FLAGS),
JS_FS_END
};
static JSFunctionSpec sInt64Functions[] = {
JS_FN("toString", Int64::ToString, 0, CTYPESFN_FLAGS),
JS_FN("toSource", Int64::ToSource, 0, CTYPESFN_FLAGS),
JS_FS_END
};
static JSFunctionSpec sUInt64Functions[] = {
JS_FN("toString", UInt64::ToString, 0, CTYPESFN_FLAGS),
JS_FN("toSource", UInt64::ToSource, 0, CTYPESFN_FLAGS),
JS_FS_END
};
static JSFunctionSpec sModuleFunctions[] = {
JS_FN("open", Library::Open, 1, CTYPESFN_FLAGS),
JS_FN("cast", CData::Cast, 2, CTYPESFN_FLAGS),
JS_FN("libraryName", Library::Name, 1, CTYPESFN_FLAGS),
JS_FS_END
};
static inline bool FloatIsFinite(jsdouble f) {
#ifdef WIN32
return _finite(f) != 0;
#else
return finite(f);
#endif
}
JS_ALWAYS_INLINE JSString*
NewUCString(JSContext* cx, const AutoString& from)
{
return JS_NewUCStringCopyN(cx, from.begin(), from.length());
}
JS_ALWAYS_INLINE size_t
Align(size_t val, size_t align)
{
return ((val - 1) | (align - 1)) + 1;
}
static ABICode
GetABICode(JSContext* cx, JSObject* obj)
{
// make sure we have an object representing a CABI class,
// and extract the enumerated class type from the reserved slot.
if (JS_GET_CLASS(cx, obj) != &sCABIClass)
return INVALID_ABI;
jsval result;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_ABICODE, &result));
return ABICode(JSVAL_TO_INT(result));
}
JSErrorFormatString ErrorFormatString[CTYPESERR_LIMIT] = {
#define MSG_DEF(name, number, count, exception, format) \
{ format, count, exception } ,
#include "ctypes.msg"
#undef MSG_DEF
};
const JSErrorFormatString*
GetErrorMessage(void* userRef, const char* locale, const uintN errorNumber)
{
if (0 < errorNumber && errorNumber < CTYPESERR_LIMIT)
return &ErrorFormatString[errorNumber];
return NULL;
}
JSBool
TypeError(JSContext* cx, const char* expected, jsval actual)
{
JSString* str = JS_ValueToSource(cx, actual);
JSAutoByteString bytes;
const char* src;
if (str) {
src = bytes.encode(cx, str);
if (!src)
return false;
} else {
JS_ClearPendingException(cx);
src = "<<error converting value to string>>";
}
JS_ReportErrorNumber(cx, GetErrorMessage, NULL,
CTYPESMSG_TYPE_ERROR, expected, src);
return false;
}
static JSObject*
InitCTypeClass(JSContext* cx, JSObject* parent)
{
JSFunction* fun = JS_DefineFunction(cx, parent, "CType", ConstructAbstract, 0,
CTYPESCTOR_FLAGS);
if (!fun)
return NULL;
JSObject* ctor = JS_GetFunctionObject(fun);
JSObject* fnproto = JS_GetPrototype(cx, ctor);
JS_ASSERT(ctor);
JS_ASSERT(fnproto);
// Set up ctypes.CType.prototype.
JSObject* prototype = JS_NewObject(cx, &sCTypeProtoClass, fnproto, parent);
if (!prototype)
return NULL;
if (!JS_DefineProperty(cx, ctor, "prototype", OBJECT_TO_JSVAL(prototype),
NULL, NULL, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return NULL;
if (!JS_DefineProperty(cx, prototype, "constructor", OBJECT_TO_JSVAL(ctor),
NULL, NULL, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return NULL;
// Define properties and functions common to all CTypes.
if (!JS_DefineProperties(cx, prototype, sCTypeProps) ||
!JS_DefineFunctions(cx, prototype, sCTypeFunctions))
return NULL;
if (!JS_FreezeObject(cx, ctor) || !JS_FreezeObject(cx, prototype))
return NULL;
return prototype;
}
static JSObject*
InitCDataClass(JSContext* cx, JSObject* parent, JSObject* CTypeProto)
{
JSFunction* fun = JS_DefineFunction(cx, parent, "CData", ConstructAbstract, 0,
CTYPESCTOR_FLAGS);
if (!fun)
return NULL;
JSObject* ctor = JS_GetFunctionObject(fun);
JS_ASSERT(ctor);
// Set up ctypes.CData.__proto__ === ctypes.CType.prototype.
// (Note that 'ctypes.CData instanceof Function' is still true, thanks to the
// prototype chain.)
if (!JS_SetPrototype(cx, ctor, CTypeProto))
return NULL;
// Set up ctypes.CData.prototype.
JSObject* prototype = JS_NewObject(cx, &sCDataProtoClass, NULL, parent);
if (!prototype)
return NULL;
if (!JS_DefineProperty(cx, ctor, "prototype", OBJECT_TO_JSVAL(prototype),
NULL, NULL, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return NULL;
if (!JS_DefineProperty(cx, prototype, "constructor", OBJECT_TO_JSVAL(ctor),
NULL, NULL, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return NULL;
// Define properties and functions common to all CDatas.
if (!JS_DefineProperties(cx, prototype, sCDataProps) ||
!JS_DefineFunctions(cx, prototype, sCDataFunctions))
return NULL;
if (//!JS_FreezeObject(cx, prototype) || // XXX fixme - see bug 541212!
!JS_FreezeObject(cx, ctor))
return NULL;
return prototype;
}
static JSBool
DefineABIConstant(JSContext* cx,
JSObject* parent,
const char* name,
ABICode code)
{
JSObject* obj = JS_DefineObject(cx, parent, name, &sCABIClass, NULL,
JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT);
if (!obj)
return false;
if (!JS_SetReservedSlot(cx, obj, SLOT_ABICODE, INT_TO_JSVAL(code)))
return false;
return JS_FreezeObject(cx, obj);
}
// Set up a single type constructor for
// ctypes.{Pointer,Array,Struct,Function}Type.
static JSBool
InitTypeConstructor(JSContext* cx,
JSObject* parent,
JSObject* CTypeProto,
JSObject* CDataProto,
JSFunctionSpec spec,
JSFunctionSpec* fns,
JSPropertySpec* props,
JSFunctionSpec* instanceFns,
JSPropertySpec* instanceProps,
JSObject*& typeProto,
JSObject*& dataProto)
{
JSFunction* fun = JS_DefineFunction(cx, parent, spec.name, spec.call,
spec.nargs, spec.flags);
if (!fun)
return false;
JSObject* obj = JS_GetFunctionObject(fun);
if (!obj)
return false;
// Set up the .prototype and .prototype.constructor properties.
typeProto = JS_NewObject(cx, &sCTypeProtoClass, CTypeProto, parent);
if (!typeProto)
return false;
// Define property before proceeding, for GC safety.
if (!JS_DefineProperty(cx, obj, "prototype", OBJECT_TO_JSVAL(typeProto),
NULL, NULL, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return false;
if (fns && !JS_DefineFunctions(cx, typeProto, fns))
return false;
if (!JS_DefineProperties(cx, typeProto, props))
return false;
if (!JS_DefineProperty(cx, typeProto, "constructor", OBJECT_TO_JSVAL(obj),
NULL, NULL, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return false;
// Stash ctypes.{Pointer,Array,Struct}Type.prototype on a reserved slot of
// the type constructor, for faster lookup.
if (!JS_SetReservedSlot(cx, obj, SLOT_FN_CTORPROTO, OBJECT_TO_JSVAL(typeProto)))
return false;
// Create an object to serve as the common ancestor for all CData objects
// created from the given type constructor. This has ctypes.CData.prototype
// as its prototype, such that it inherits the properties and functions
// common to all CDatas.
dataProto = JS_NewObject(cx, &sCDataProtoClass, CDataProto, parent);
if (!dataProto)
return false;
js::AutoObjectRooter protoroot(cx, dataProto);
// Define functions and properties on the 'dataProto' object that are common
// to all CData objects created from this type constructor. (These will
// become functions and properties on CData objects created from this type.)
if (instanceFns && !JS_DefineFunctions(cx, dataProto, instanceFns))
return false;
if (instanceProps && !JS_DefineProperties(cx, dataProto, instanceProps))
return false;
if (!JS_FreezeObject(cx, obj) ||
//!JS_FreezeObject(cx, dataProto) || // XXX fixme - see bug 541212!
!JS_FreezeObject(cx, typeProto))
return false;
return true;
}
JSObject*
InitInt64Class(JSContext* cx,
JSObject* parent,
JSClass* clasp,
JSNative construct,
JSFunctionSpec* fs,
JSFunctionSpec* static_fs)
{
// Init type class and constructor
JSObject* prototype = JS_InitClass(cx, parent, NULL, clasp, construct,
0, NULL, fs, NULL, static_fs);
if (!prototype)
return NULL;
JSObject* ctor = JS_GetConstructor(cx, prototype);
if (!ctor)
return NULL;
if (!JS_FreezeObject(cx, ctor))
return NULL;
// Stash ctypes.{Int64,UInt64}.prototype on a reserved slot of the 'join'
// function.
jsval join;
ASSERT_OK(JS_GetProperty(cx, ctor, "join", &join));
if (!JS_SetReservedSlot(cx, JSVAL_TO_OBJECT(join), SLOT_FN_INT64PROTO,
OBJECT_TO_JSVAL(prototype)))
return NULL;
if (!JS_FreezeObject(cx, prototype))
return NULL;
return prototype;
}
static JSBool
AttachProtos(JSContext* cx, JSObject* proto, JSObject** protos)
{
// For a given 'proto' of [[Class]] "CTypeProto", attach each of the 'protos'
// to the appropriate CTypeProtoSlot. (SLOT_UINT64PROTO is the last slot
// of [[Class]] "CTypeProto".)
for (JSUint32 i = 0; i <= SLOT_UINT64PROTO; ++i) {
if (!JS_SetReservedSlot(cx, proto, i, OBJECT_TO_JSVAL(protos[i])))
return false;
}
return true;
}
JSBool
InitTypeClasses(JSContext* cx, JSObject* parent)
{
// Initialize the ctypes.CType class. This acts as an abstract base class for
// the various types, and provides the common API functions. It has:
// * [[Class]] "Function"
// * __proto__ === Function.prototype
// * A constructor that throws a TypeError. (You can't construct an
// abstract type!)
// * 'prototype' property:
// * [[Class]] "CTypeProto"
// * __proto__ === Function.prototype
// * A constructor that throws a TypeError. (You can't construct an
// abstract type instance!)
// * 'constructor' property === ctypes.CType
// * Provides properties and functions common to all CTypes.
JSObject* CTypeProto = InitCTypeClass(cx, parent);
if (!CTypeProto)
return false;
// Initialize the ctypes.CData class. This acts as an abstract base class for
// instances of the various types, and provides the common API functions.
// It has:
// * [[Class]] "Function"
// * __proto__ === Function.prototype
// * A constructor that throws a TypeError. (You can't construct an
// abstract type instance!)
// * 'prototype' property:
// * [[Class]] "CDataProto"
// * 'constructor' property === ctypes.CData
// * Provides properties and functions common to all CDatas.
JSObject* CDataProto = InitCDataClass(cx, parent, CTypeProto);
if (!CDataProto)
return false;
// Create and attach the special class constructors: ctypes.PointerType,
// ctypes.ArrayType, ctypes.StructType, and ctypes.FunctionType.
// Each of these constructors 'c' has, respectively:
// * [[Class]] "Function"
// * __proto__ === Function.prototype
// * A constructor that creates a user-defined type.
// * 'prototype' property:
// * [[Class]] "CTypeProto"
// * __proto__ === ctypes.CType.prototype
// * 'constructor' property === 'c'
// We also construct an object 'p' to serve, given a type object 't'
// constructed from one of these type constructors, as
// 't.prototype.__proto__'. This object has:
// * [[Class]] "CDataProto"
// * __proto__ === ctypes.CData.prototype
// * Properties and functions common to all CDatas.
// Therefore an instance 't' of ctypes.{Pointer,Array,Struct,Function}Type
// will have, resp.:
// * [[Class]] "CType"
// * __proto__ === ctypes.{Pointer,Array,Struct,Function}Type.prototype
// * A constructor which creates and returns a CData object, containing
// binary data of the given type.
// * 'prototype' property:
// * [[Class]] "CDataProto"
// * __proto__ === 'p', the prototype object from above
// * 'constructor' property === 't'
JSObject* protos[CTYPEPROTO_SLOTS];
if (!InitTypeConstructor(cx, parent, CTypeProto, CDataProto,
sPointerFunction, NULL, sPointerProps,
sPointerInstanceFunctions, sPointerInstanceProps,
protos[SLOT_POINTERPROTO], protos[SLOT_POINTERDATAPROTO]))
return false;
js::AutoObjectRooter proot(cx, protos[SLOT_POINTERDATAPROTO]);
if (!InitTypeConstructor(cx, parent, CTypeProto, CDataProto,
sArrayFunction, NULL, sArrayProps,
sArrayInstanceFunctions, sArrayInstanceProps,
protos[SLOT_ARRAYPROTO], protos[SLOT_ARRAYDATAPROTO]))
return false;
js::AutoObjectRooter aroot(cx, protos[SLOT_ARRAYDATAPROTO]);
if (!InitTypeConstructor(cx, parent, CTypeProto, CDataProto,
sStructFunction, sStructFunctions, sStructProps,
sStructInstanceFunctions, NULL,
protos[SLOT_STRUCTPROTO], protos[SLOT_STRUCTDATAPROTO]))
return false;
js::AutoObjectRooter sroot(cx, protos[SLOT_STRUCTDATAPROTO]);
if (!InitTypeConstructor(cx, parent, CTypeProto, CDataProto,
sFunctionFunction, NULL, sFunctionProps, NULL, NULL,
protos[SLOT_FUNCTIONPROTO], protos[SLOT_FUNCTIONDATAPROTO]))
return false;
js::AutoObjectRooter froot(cx, protos[SLOT_FUNCTIONDATAPROTO]);
protos[SLOT_CDATAPROTO] = CDataProto;
// Create and attach the ctypes.{Int64,UInt64} constructors.
// Each of these has, respectively:
// * [[Class]] "Function"
// * __proto__ === Function.prototype
// * A constructor that creates a ctypes.{Int64,UInt64} object, respectively.
// * 'prototype' property:
// * [[Class]] {"Int64Proto","UInt64Proto"}
// * 'constructor' property === ctypes.{Int64,UInt64}
protos[SLOT_INT64PROTO] = InitInt64Class(cx, parent, &sInt64ProtoClass,
Int64::Construct, sInt64Functions, sInt64StaticFunctions);
if (!protos[SLOT_INT64PROTO])
return false;
protos[SLOT_UINT64PROTO] = InitInt64Class(cx, parent, &sUInt64ProtoClass,
UInt64::Construct, sUInt64Functions, sUInt64StaticFunctions);
if (!protos[SLOT_UINT64PROTO])
return false;
// Attach the prototypes just created to each of ctypes.CType.prototype,
// and the special type constructors, so we can access them when constructing
// instances of those types.
if (!AttachProtos(cx, CTypeProto, protos) ||
!AttachProtos(cx, protos[SLOT_POINTERPROTO], protos) ||
!AttachProtos(cx, protos[SLOT_ARRAYPROTO], protos) ||
!AttachProtos(cx, protos[SLOT_STRUCTPROTO], protos) ||
!AttachProtos(cx, protos[SLOT_FUNCTIONPROTO], protos))
return false;
// Attach objects representing ABI constants.
if (!DefineABIConstant(cx, parent, "default_abi", ABI_DEFAULT) ||
!DefineABIConstant(cx, parent, "stdcall_abi", ABI_STDCALL) ||
!DefineABIConstant(cx, parent, "winapi_abi", ABI_WINAPI))
return false;
// Create objects representing the builtin types, and attach them to the
// ctypes object. Each type object 't' has:
// * [[Class]] "CType"
// * __proto__ === ctypes.CType.prototype
// * A constructor which creates and returns a CData object, containing
// binary data of the given type.
// * 'prototype' property:
// * [[Class]] "CDataProto"
// * __proto__ === ctypes.CData.prototype
// * 'constructor' property === 't'
#define DEFINE_TYPE(name, type, ffiType) \
JSObject* typeObj_##name = \
CType::DefineBuiltin(cx, parent, #name, CTypeProto, CDataProto, #name, \
TYPE_##name, INT_TO_JSVAL(sizeof(type)), \
INT_TO_JSVAL(ffiType.alignment), &ffiType); \
if (!typeObj_##name) \
return false;
#include "typedefs.h"
// Alias 'ctypes.unsigned' as 'ctypes.unsigned_int', since they represent
// the same type in C.
if (!JS_DefineProperty(cx, parent, "unsigned",
OBJECT_TO_JSVAL(typeObj_unsigned_int), NULL, NULL,
JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return false;
// Create objects representing the special types void_t and voidptr_t.
JSObject* typeObj =
CType::DefineBuiltin(cx, parent, "void_t", CTypeProto, CDataProto, "void",
TYPE_void_t, JSVAL_VOID, JSVAL_VOID, &ffi_type_void);
if (!typeObj)
return false;
typeObj = PointerType::CreateInternal(cx, typeObj);
if (!typeObj)
return false;
if (!JS_DefineProperty(cx, parent, "voidptr_t", OBJECT_TO_JSVAL(typeObj),
NULL, NULL, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return false;
return true;
}
bool
IsCTypesGlobal(JSContext* cx, JSObject* obj)
{
return JS_GET_CLASS(cx, obj) == &sCTypesGlobalClass;
}
// Get the JSCTypesCallbacks struct from the 'ctypes' object 'obj'.
JSCTypesCallbacks*
GetCallbacks(JSContext* cx, JSObject* obj)
{
JS_ASSERT(IsCTypesGlobal(cx, obj));
jsval result;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_CALLBACKS, &result));
if (JSVAL_IS_VOID(result))
return NULL;
return static_cast<JSCTypesCallbacks*>(JSVAL_TO_PRIVATE(result));
}
JS_BEGIN_EXTERN_C
JS_PUBLIC_API(JSBool)
JS_InitCTypesClass(JSContext* cx, JSObject* global)
{
// attach ctypes property to global object
JSObject* ctypes = JS_NewObject(cx, &sCTypesGlobalClass, NULL, NULL);
if (!ctypes)
return false;
if (!JS_DefineProperty(cx, global, "ctypes", OBJECT_TO_JSVAL(ctypes),
JS_PropertyStub, JS_StrictPropertyStub, JSPROP_READONLY | JSPROP_PERMANENT)) {
return false;
}
if (!InitTypeClasses(cx, ctypes))
return false;
// attach API functions
if (!JS_DefineFunctions(cx, ctypes, sModuleFunctions))
return false;
// Seal the ctypes object, to prevent modification.
return JS_FreezeObject(cx, ctypes);
}
JS_PUBLIC_API(JSBool)
JS_SetCTypesCallbacks(JSContext* cx,
JSObject* ctypesObj,
JSCTypesCallbacks* callbacks)
{
JS_ASSERT(callbacks);
JS_ASSERT(IsCTypesGlobal(cx, ctypesObj));
// Set the callbacks on a reserved slot.
return JS_SetReservedSlot(cx, ctypesObj, SLOT_CALLBACKS,
PRIVATE_TO_JSVAL(callbacks));
}
JS_END_EXTERN_C
/*******************************************************************************
** Type conversion functions
*******************************************************************************/
// Enforce some sanity checks on type widths and properties.
// Where the architecture is 64-bit, make sure it's LP64 or LLP64. (ctypes.int
// autoconverts to a primitive JS number; to support ILP64 architectures, it
// would need to autoconvert to an Int64 object instead. Therefore we enforce
// this invariant here.)
JS_STATIC_ASSERT(sizeof(bool) == 1 || sizeof(bool) == 4);
JS_STATIC_ASSERT(sizeof(char) == 1);
JS_STATIC_ASSERT(sizeof(short) == 2);
JS_STATIC_ASSERT(sizeof(int) == 4);
JS_STATIC_ASSERT(sizeof(unsigned) == 4);
JS_STATIC_ASSERT(sizeof(long) == 4 || sizeof(long) == 8);
JS_STATIC_ASSERT(sizeof(long long) == 8);
JS_STATIC_ASSERT(sizeof(size_t) == sizeof(uintptr_t));
JS_STATIC_ASSERT(sizeof(float) == 4);
JS_STATIC_ASSERT(sizeof(PRFuncPtr) == sizeof(void*));
JS_STATIC_ASSERT(numeric_limits<double>::is_signed);
// Templated helper to convert FromType to TargetType, for the default case
// where the trivial POD constructor will do.
template<class TargetType, class FromType>
struct ConvertImpl {
static JS_ALWAYS_INLINE TargetType Convert(FromType d) {
return TargetType(d);
}
};
#ifdef _MSC_VER
// MSVC can't perform double to unsigned __int64 conversion when the
// double is greater than 2^63 - 1. Help it along a little.
template<>
struct ConvertImpl<JSUint64, jsdouble> {
static JS_ALWAYS_INLINE JSUint64 Convert(jsdouble d) {
return d > 0x7fffffffffffffffui64 ?
JSUint64(d - 0x8000000000000000ui64) + 0x8000000000000000ui64 :
JSUint64(d);
}
};
#endif
template<class TargetType, class FromType>
static JS_ALWAYS_INLINE TargetType Convert(FromType d)
{
return ConvertImpl<TargetType, FromType>::Convert(d);
}
template<class TargetType, class FromType>
static JS_ALWAYS_INLINE bool IsAlwaysExact()
{
// Return 'true' if TargetType can always exactly represent FromType.
// This means that:
// 1) TargetType must be the same or more bits wide as FromType. For integers
// represented in 'n' bits, unsigned variants will have 'n' digits while
// signed will have 'n - 1'. For floating point types, 'digits' is the
// mantissa width.
// 2) If FromType is signed, TargetType must also be signed. (Floating point
// types are always signed.)
// 3) If TargetType is an exact integral type, FromType must be also.
if (numeric_limits<TargetType>::digits < numeric_limits<FromType>::digits)
return false;
if (numeric_limits<FromType>::is_signed &&
!numeric_limits<TargetType>::is_signed)
return false;
if (!numeric_limits<FromType>::is_exact &&
numeric_limits<TargetType>::is_exact)
return false;
return true;
}
// Templated helper to determine if FromType 'i' converts losslessly to
// TargetType 'j'. Default case where both types are the same signedness.
template<class TargetType, class FromType, bool TargetSigned, bool FromSigned>
struct IsExactImpl {
static JS_ALWAYS_INLINE bool Test(FromType i, TargetType j) {
JS_STATIC_ASSERT(numeric_limits<TargetType>::is_exact);
return FromType(j) == i;
}
};
// Specialization where TargetType is unsigned, FromType is signed.
template<class TargetType, class FromType>
struct IsExactImpl<TargetType, FromType, false, true> {
static JS_ALWAYS_INLINE bool Test(FromType i, TargetType j) {
JS_STATIC_ASSERT(numeric_limits<TargetType>::is_exact);
return i >= 0 && FromType(j) == i;
}
};
// Specialization where TargetType is signed, FromType is unsigned.
template<class TargetType, class FromType>
struct IsExactImpl<TargetType, FromType, true, false> {
static JS_ALWAYS_INLINE bool Test(FromType i, TargetType j) {
JS_STATIC_ASSERT(numeric_limits<TargetType>::is_exact);
return TargetType(i) >= 0 && FromType(j) == i;
}
};
// Convert FromType 'i' to TargetType 'result', returning true iff 'result'
// is an exact representation of 'i'.
template<class TargetType, class FromType>
static JS_ALWAYS_INLINE bool ConvertExact(FromType i, TargetType* result)
{
// Require that TargetType is integral, to simplify conversion.
JS_STATIC_ASSERT(numeric_limits<TargetType>::is_exact);
*result = Convert<TargetType>(i);
// See if we can avoid a dynamic check.
if (IsAlwaysExact<TargetType, FromType>())
return true;
// Return 'true' if 'i' is exactly representable in 'TargetType'.
return IsExactImpl<TargetType,
FromType,
numeric_limits<TargetType>::is_signed,
numeric_limits<FromType>::is_signed>::Test(i, *result);
}
// Templated helper to determine if Type 'i' is negative. Default case
// where IntegerType is unsigned.
template<class Type, bool IsSigned>
struct IsNegativeImpl {
static JS_ALWAYS_INLINE bool Test(Type i) {
return false;
}
};
// Specialization where Type is signed.
template<class Type>
struct IsNegativeImpl<Type, true> {
static JS_ALWAYS_INLINE bool Test(Type i) {
return i < 0;
}
};
// Determine whether Type 'i' is negative.
template<class Type>
static JS_ALWAYS_INLINE bool IsNegative(Type i)
{
return IsNegativeImpl<Type, numeric_limits<Type>::is_signed>::Test(i);
}
// Implicitly convert val to bool, allowing JSBool, jsint, and jsdouble
// arguments numerically equal to 0 or 1.
static bool
jsvalToBool(JSContext* cx, jsval val, bool* result)
{
if (JSVAL_IS_BOOLEAN(val)) {
*result = JSVAL_TO_BOOLEAN(val) != JS_FALSE;
return true;
}
if (JSVAL_IS_INT(val)) {
jsint i = JSVAL_TO_INT(val);
*result = i != 0;
return i == 0 || i == 1;
}
if (JSVAL_IS_DOUBLE(val)) {
jsdouble d = JSVAL_TO_DOUBLE(val);
*result = d != 0;
// Allow -0.
return d == 1 || d == 0;
}
// Don't silently convert null to bool. It's probably a mistake.
return false;
}
// Implicitly convert val to IntegerType, allowing JSBool, jsint, jsdouble,
// Int64, UInt64, and CData integer types 't' where all values of 't' are
// representable by IntegerType.
template<class IntegerType>
static bool
jsvalToInteger(JSContext* cx, jsval val, IntegerType* result)
{
JS_STATIC_ASSERT(numeric_limits<IntegerType>::is_exact);
if (JSVAL_IS_INT(val)) {
// Make sure the integer fits in the alotted precision, and has the right
// sign.
jsint i = JSVAL_TO_INT(val);
return ConvertExact(i, result);
}
if (JSVAL_IS_DOUBLE(val)) {
// Don't silently lose bits here -- check that val really is an
// integer value, and has the right sign.
jsdouble d = JSVAL_TO_DOUBLE(val);
return ConvertExact(d, result);
}
if (!JSVAL_IS_PRIMITIVE(val)) {
JSObject* obj = JSVAL_TO_OBJECT(val);
if (CData::IsCData(cx, obj)) {
JSObject* typeObj = CData::GetCType(cx, obj);
void* data = CData::GetData(cx, obj);
// Check whether the source type is always representable, with exact
// precision, by the target type. If it is, convert the value.
switch (CType::GetTypeCode(cx, typeObj)) {
#define DEFINE_INT_TYPE(name, fromType, ffiType) \
case TYPE_##name: \
if (!IsAlwaysExact<IntegerType, fromType>()) \
return false; \
*result = IntegerType(*static_cast<fromType*>(data)); \
return true;
#define DEFINE_WRAPPED_INT_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#include "typedefs.h"
case TYPE_void_t:
case TYPE_bool:
case TYPE_float:
case TYPE_double:
case TYPE_float32_t:
case TYPE_float64_t:
case TYPE_char:
case TYPE_signed_char:
case TYPE_unsigned_char:
case TYPE_jschar:
case TYPE_pointer:
case TYPE_function:
case TYPE_array:
case TYPE_struct:
// Not a compatible number type.
return false;
}
}
if (Int64::IsInt64(cx, obj)) {
// Make sure the integer fits in IntegerType.
JSInt64 i = Int64Base::GetInt(cx, obj);
return ConvertExact(i, result);
}
if (UInt64::IsUInt64(cx, obj)) {
// Make sure the integer fits in IntegerType.
JSUint64 i = Int64Base::GetInt(cx, obj);
return ConvertExact(i, result);
}
return false;
}
if (JSVAL_IS_BOOLEAN(val)) {
// Implicitly promote boolean values to 0 or 1, like C.
*result = JSVAL_TO_BOOLEAN(val);
JS_ASSERT(*result == 0 || *result == 1);
return true;
}
// Don't silently convert null to an integer. It's probably a mistake.
return false;
}
// Implicitly convert val to FloatType, allowing jsint, jsdouble,
// Int64, UInt64, and CData numeric types 't' where all values of 't' are
// representable by FloatType.
template<class FloatType>
static bool
jsvalToFloat(JSContext *cx, jsval val, FloatType* result)
{
JS_STATIC_ASSERT(!numeric_limits<FloatType>::is_exact);
// The following casts may silently throw away some bits, but there's
// no good way around it. Sternly requiring that the 64-bit double
// argument be exactly representable as a 32-bit float is
// unrealistic: it would allow 1/2 to pass but not 1/3.
if (JSVAL_IS_INT(val)) {
*result = FloatType(JSVAL_TO_INT(val));
return true;
}
if (JSVAL_IS_DOUBLE(val)) {
*result = FloatType(JSVAL_TO_DOUBLE(val));
return true;
}
if (!JSVAL_IS_PRIMITIVE(val)) {
JSObject* obj = JSVAL_TO_OBJECT(val);
if (CData::IsCData(cx, obj)) {
JSObject* typeObj = CData::GetCType(cx, obj);
void* data = CData::GetData(cx, obj);
// Check whether the source type is always representable, with exact
// precision, by the target type. If it is, convert the value.
switch (CType::GetTypeCode(cx, typeObj)) {
#define DEFINE_FLOAT_TYPE(name, fromType, ffiType) \
case TYPE_##name: \
if (!IsAlwaysExact<FloatType, fromType>()) \
return false; \
*result = FloatType(*static_cast<fromType*>(data)); \
return true;
#define DEFINE_INT_TYPE(x, y, z) DEFINE_FLOAT_TYPE(x, y, z)
#define DEFINE_WRAPPED_INT_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#include "typedefs.h"
case TYPE_void_t:
case TYPE_bool:
case TYPE_char:
case TYPE_signed_char:
case TYPE_unsigned_char:
case TYPE_jschar:
case TYPE_pointer:
case TYPE_function:
case TYPE_array:
case TYPE_struct:
// Not a compatible number type.
return false;
}
}
}
// Don't silently convert true to 1.0 or false to 0.0, even though C/C++
// does it. It's likely to be a mistake.
return false;
}
template<class IntegerType>
static bool
StringToInteger(JSContext* cx, JSString* string, IntegerType* result)
{
JS_STATIC_ASSERT(numeric_limits<IntegerType>::is_exact);
const jschar* cp = string->getChars(NULL);
if (!cp)
return false;
const jschar* end = cp + string->length();
if (cp == end)
return false;
IntegerType sign = 1;
if (cp[0] == '-') {
if (!numeric_limits<IntegerType>::is_signed)
return false;
sign = -1;
++cp;
}
// Assume base-10, unless the string begins with '0x' or '0X'.
IntegerType base = 10;
if (end - cp > 2 && cp[0] == '0' && (cp[1] == 'x' || cp[1] == 'X')) {
cp += 2;
base = 16;
}
// Scan the string left to right and build the number,
// checking for valid characters 0 - 9, a - f, A - F and overflow.
IntegerType i = 0;
while (cp != end) {
jschar c = *cp++;
if (c >= '0' && c <= '9')
c -= '0';
else if (base == 16 && c >= 'a' && c <= 'f')
c = c - 'a' + 10;
else if (base == 16 && c >= 'A' && c <= 'F')
c = c - 'A' + 10;
else
return false;
IntegerType ii = i;
i = ii * base + sign * c;
if (i / base != ii) // overflow
return false;
}
*result = i;
return true;
}
// Implicitly convert val to IntegerType, allowing jsint, jsdouble,
// Int64, UInt64, and optionally a decimal or hexadecimal string argument.
// (This is common code shared by jsvalToSize and the Int64/UInt64 constructors.)
template<class IntegerType>
static bool
jsvalToBigInteger(JSContext* cx,
jsval val,
bool allowString,
IntegerType* result)
{
JS_STATIC_ASSERT(numeric_limits<IntegerType>::is_exact);
if (JSVAL_IS_INT(val)) {
// Make sure the integer fits in the alotted precision, and has the right
// sign.
jsint i = JSVAL_TO_INT(val);
return ConvertExact(i, result);
}
if (JSVAL_IS_DOUBLE(val)) {
// Don't silently lose bits here -- check that val really is an
// integer value, and has the right sign.
jsdouble d = JSVAL_TO_DOUBLE(val);
return ConvertExact(d, result);
}
if (allowString && JSVAL_IS_STRING(val)) {
// Allow conversion from base-10 or base-16 strings, provided the result
// fits in IntegerType. (This allows an Int64 or UInt64 object to be passed
// to the JS array element operator, which will automatically call
// toString() on the object for us.)
return StringToInteger(cx, JSVAL_TO_STRING(val), result);
}
if (!JSVAL_IS_PRIMITIVE(val)) {
// Allow conversion from an Int64 or UInt64 object directly.
JSObject* obj = JSVAL_TO_OBJECT(val);
if (UInt64::IsUInt64(cx, obj)) {
// Make sure the integer fits in IntegerType.
JSUint64 i = Int64Base::GetInt(cx, obj);
return ConvertExact(i, result);
}
if (Int64::IsInt64(cx, obj)) {
// Make sure the integer fits in IntegerType.
JSInt64 i = Int64Base::GetInt(cx, obj);
return ConvertExact(i, result);
}
}
return false;
}
// Implicitly convert val to a size value, where the size value is represented
// by size_t but must also fit in a jsdouble.
static bool
jsvalToSize(JSContext* cx, jsval val, bool allowString, size_t* result)
{
if (!jsvalToBigInteger(cx, val, allowString, result))
return false;
// Also check that the result fits in a jsdouble.
return Convert<size_t>(jsdouble(*result)) == *result;
}
// Implicitly convert val to IntegerType, allowing jsint, jsdouble,
// Int64, UInt64, and optionally a decimal or hexadecimal string argument.
// (This is common code shared by jsvalToSize and the Int64/UInt64 constructors.)
template<class IntegerType>
static bool
jsidToBigInteger(JSContext* cx,
jsid val,
bool allowString,
IntegerType* result)
{
JS_STATIC_ASSERT(numeric_limits<IntegerType>::is_exact);
if (JSID_IS_INT(val)) {
// Make sure the integer fits in the alotted precision, and has the right
// sign.
jsint i = JSID_TO_INT(val);
return ConvertExact(i, result);
}
if (allowString && JSID_IS_STRING(val)) {
// Allow conversion from base-10 or base-16 strings, provided the result
// fits in IntegerType. (This allows an Int64 or UInt64 object to be passed
// to the JS array element operator, which will automatically call
// toString() on the object for us.)
return StringToInteger(cx, JSID_TO_STRING(val), result);
}
if (JSID_IS_OBJECT(val)) {
// Allow conversion from an Int64 or UInt64 object directly.
JSObject* obj = JSID_TO_OBJECT(val);
if (UInt64::IsUInt64(cx, obj)) {
// Make sure the integer fits in IntegerType.
JSUint64 i = Int64Base::GetInt(cx, obj);
return ConvertExact(i, result);
}
if (Int64::IsInt64(cx, obj)) {
// Make sure the integer fits in IntegerType.
JSInt64 i = Int64Base::GetInt(cx, obj);
return ConvertExact(i, result);
}
}
return false;
}
// Implicitly convert val to a size value, where the size value is represented
// by size_t but must also fit in a jsdouble.
static bool
jsidToSize(JSContext* cx, jsid val, bool allowString, size_t* result)
{
if (!jsidToBigInteger(cx, val, allowString, result))
return false;
// Also check that the result fits in a jsdouble.
return Convert<size_t>(jsdouble(*result)) == *result;
}
// Implicitly convert a size value to a jsval, ensuring that the size_t value
// fits in a jsdouble.
static JSBool
SizeTojsval(JSContext* cx, size_t size, jsval* result)
{
if (Convert<size_t>(jsdouble(size)) != size) {
JS_ReportError(cx, "size overflow");
return false;
}
return JS_NewNumberValue(cx, jsdouble(size), result);
}
// Forcefully convert val to IntegerType when explicitly requested.
template<class IntegerType>
static bool
jsvalToIntegerExplicit(JSContext* cx, jsval val, IntegerType* result)
{
JS_STATIC_ASSERT(numeric_limits<IntegerType>::is_exact);
if (JSVAL_IS_DOUBLE(val)) {
// Convert -Inf, Inf, and NaN to 0; otherwise, convert by C-style cast.
jsdouble d = JSVAL_TO_DOUBLE(val);
*result = FloatIsFinite(d) ? IntegerType(d) : 0;
return true;
}
if (!JSVAL_IS_PRIMITIVE(val)) {
// Convert Int64 and UInt64 values by C-style cast.
JSObject* obj = JSVAL_TO_OBJECT(val);
if (Int64::IsInt64(cx, obj)) {
JSInt64 i = Int64Base::GetInt(cx, obj);
*result = IntegerType(i);
return true;
}
if (UInt64::IsUInt64(cx, obj)) {
JSUint64 i = Int64Base::GetInt(cx, obj);
*result = IntegerType(i);
return true;
}
}
return false;
}
// Forcefully convert val to a pointer value when explicitly requested.
static bool
jsvalToPtrExplicit(JSContext* cx, jsval val, uintptr_t* result)
{
if (JSVAL_IS_INT(val)) {
// jsint always fits in intptr_t. If the integer is negative, cast through
// an intptr_t intermediate to sign-extend.
jsint i = JSVAL_TO_INT(val);
*result = i < 0 ? uintptr_t(intptr_t(i)) : uintptr_t(i);
return true;
}
if (JSVAL_IS_DOUBLE(val)) {
jsdouble d = JSVAL_TO_DOUBLE(val);
if (d < 0) {
// Cast through an intptr_t intermediate to sign-extend.
intptr_t i = Convert<intptr_t>(d);
if (jsdouble(i) != d)
return false;
*result = uintptr_t(i);
return true;
}
// Don't silently lose bits here -- check that val really is an
// integer value, and has the right sign.
*result = Convert<uintptr_t>(d);
return jsdouble(*result) == d;
}
if (!JSVAL_IS_PRIMITIVE(val)) {
JSObject* obj = JSVAL_TO_OBJECT(val);
if (Int64::IsInt64(cx, obj)) {
JSInt64 i = Int64Base::GetInt(cx, obj);
intptr_t p = intptr_t(i);
// Make sure the integer fits in the alotted precision.
if (JSInt64(p) != i)
return false;
*result = uintptr_t(p);
return true;
}
if (UInt64::IsUInt64(cx, obj)) {
JSUint64 i = Int64Base::GetInt(cx, obj);
// Make sure the integer fits in the alotted precision.
*result = uintptr_t(i);
return JSUint64(*result) == i;
}
}
return false;
}
template<class IntegerType, class CharType, size_t N, class AP>
void
IntegerToString(IntegerType i, jsuint radix, Vector<CharType, N, AP>& result)
{
JS_STATIC_ASSERT(numeric_limits<IntegerType>::is_exact);
// The buffer must be big enough for all the bits of IntegerType to fit,
// in base-2, including '-'.
CharType buffer[sizeof(IntegerType) * 8 + 1];
CharType* end = buffer + sizeof(buffer) / sizeof(CharType);
CharType* cp = end;
// Build the string in reverse. We use multiplication and subtraction
// instead of modulus because that's much faster.
const bool isNegative = IsNegative(i);
size_t sign = isNegative ? -1 : 1;
do {
IntegerType ii = i / IntegerType(radix);
size_t index = sign * size_t(i - ii * IntegerType(radix));
*--cp = "0123456789abcdefghijklmnopqrstuvwxyz"[index];
i = ii;
} while (i != 0);
if (isNegative)
*--cp = '-';
JS_ASSERT(cp >= buffer);
result.append(cp, end);
}
template<class CharType>
static size_t
strnlen(const CharType* begin, size_t max)
{
for (const CharType* s = begin; s != begin + max; ++s)
if (*s == 0)
return s - begin;
return max;
}
// Convert C binary value 'data' of CType 'typeObj' to a JS primitive, where
// possible; otherwise, construct and return a CData object. The following
// semantics apply when constructing a CData object for return:
// * If 'wantPrimitive' is true, the caller indicates that 'result' must be
// a JS primitive, and ConvertToJS will fail if 'result' would be a CData
// object. Otherwise:
// * If a CData object 'parentObj' is supplied, the new CData object is
// dependent on the given parent and its buffer refers to a slice of the
// parent's buffer.
// * If 'parentObj' is null, the new CData object may or may not own its
// resulting buffer depending on the 'ownResult' argument.
JSBool
ConvertToJS(JSContext* cx,
JSObject* typeObj,
JSObject* parentObj,
void* data,
bool wantPrimitive,
bool ownResult,
jsval* result)
{
JS_ASSERT(!parentObj || CData::IsCData(cx, parentObj));
JS_ASSERT(!parentObj || !ownResult);
JS_ASSERT(!wantPrimitive || !ownResult);
TypeCode typeCode = CType::GetTypeCode(cx, typeObj);
switch (typeCode) {
case TYPE_void_t:
*result = JSVAL_VOID;
break;
case TYPE_bool:
*result = *static_cast<bool*>(data) ? JSVAL_TRUE : JSVAL_FALSE;
break;
#define DEFINE_INT_TYPE(name, type, ffiType) \
case TYPE_##name: { \
type value = *static_cast<type*>(data); \
if (sizeof(type) < 4) \
*result = INT_TO_JSVAL(jsint(value)); \
else if (!JS_NewNumberValue(cx, jsdouble(value), result)) \
return false; \
break; \
}
#define DEFINE_WRAPPED_INT_TYPE(name, type, ffiType) \
case TYPE_##name: { \
/* Return an Int64 or UInt64 object - do not convert to a JS number. */ \
JSUint64 value; \
JSObject* proto; \
if (!numeric_limits<type>::is_signed) { \
value = *static_cast<type*>(data); \
/* Get ctypes.UInt64.prototype from ctypes.CType.prototype. */ \
proto = CType::GetProtoFromType(cx, typeObj, SLOT_UINT64PROTO); \
} else { \
value = JSInt64(*static_cast<type*>(data)); \
/* Get ctypes.Int64.prototype from ctypes.CType.prototype. */ \
proto = CType::GetProtoFromType(cx, typeObj, SLOT_INT64PROTO); \
} \
\
JSObject* obj = Int64Base::Construct(cx, proto, value, \
!numeric_limits<type>::is_signed); \
if (!obj) \
return false; \
*result = OBJECT_TO_JSVAL(obj); \
break; \
}
#define DEFINE_FLOAT_TYPE(name, type, ffiType) \
case TYPE_##name: { \
type value = *static_cast<type*>(data); \
if (!JS_NewNumberValue(cx, jsdouble(value), result)) \
return false; \
break; \
}
#define DEFINE_CHAR_TYPE(name, type, ffiType) \
case TYPE_##name: \
/* Convert to an integer. We have no idea what character encoding to */ \
/* use, if any. */ \
*result = INT_TO_JSVAL(*static_cast<type*>(data)); \
break;
#include "typedefs.h"
case TYPE_jschar: {
// Convert the jschar to a 1-character string.
JSString* str = JS_NewUCStringCopyN(cx, static_cast<jschar*>(data), 1);
if (!str)
return false;
*result = STRING_TO_JSVAL(str);
break;
}
case TYPE_pointer:
case TYPE_array:
case TYPE_struct: {
// We're about to create a new CData object to return. If the caller doesn't
// want this, return early.
if (wantPrimitive) {
JS_ReportError(cx, "cannot convert to primitive value");
return false;
}
JSObject* obj = CData::Create(cx, typeObj, parentObj, data, ownResult);
if (!obj)
return false;
*result = OBJECT_TO_JSVAL(obj);
break;
}
case TYPE_function:
JS_NOT_REACHED("cannot return a FunctionType");
}
return true;
}
// Implicitly convert jsval 'val' to a C binary representation of CType
// 'targetType', storing the result in 'buffer'. Adequate space must be
// provided in 'buffer' by the caller. This function generally does minimal
// coercion between types. There are two cases in which this function is used:
// 1) The target buffer is internal to a CData object; we simply write data
// into it.
// 2) We are converting an argument for an ffi call, in which case 'isArgument'
// will be true. This allows us to handle a special case: if necessary,
// we can autoconvert a JS string primitive to a pointer-to-character type.
// In this case, ownership of the allocated string is handed off to the
// caller; 'freePointer' will be set to indicate this.
JSBool
ImplicitConvert(JSContext* cx,
jsval val,
JSObject* targetType,
void* buffer,
bool isArgument,
bool* freePointer)
{
JS_ASSERT(CType::IsSizeDefined(cx, targetType));
// First, check if val is a CData object of type targetType.
JSObject* sourceData = NULL;
JSObject* sourceType = NULL;
if (!JSVAL_IS_PRIMITIVE(val) &&
CData::IsCData(cx, JSVAL_TO_OBJECT(val))) {
sourceData = JSVAL_TO_OBJECT(val);
sourceType = CData::GetCType(cx, sourceData);
// If the types are equal, copy the buffer contained within the CData.
// (Note that the buffers may overlap partially or completely.)
if (CType::TypesEqual(cx, sourceType, targetType)) {
size_t size = CType::GetSize(cx, sourceType);
memmove(buffer, CData::GetData(cx, sourceData), size);
return true;
}
}
TypeCode targetCode = CType::GetTypeCode(cx, targetType);
switch (targetCode) {
case TYPE_bool: {
// Do not implicitly lose bits, but allow the values 0, 1, and -0.
// Programs can convert explicitly, if needed, using `Boolean(v)` or `!!v`.
bool result;
if (!jsvalToBool(cx, val, &result))
return TypeError(cx, "boolean", val);
*static_cast<bool*>(buffer) = result;
break;
}
#define DEFINE_INT_TYPE(name, type, ffiType) \
case TYPE_##name: { \
/* Do not implicitly lose bits. */ \
type result; \
if (!jsvalToInteger(cx, val, &result)) \
return TypeError(cx, #name, val); \
*static_cast<type*>(buffer) = result; \
break; \
}
#define DEFINE_WRAPPED_INT_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_FLOAT_TYPE(name, type, ffiType) \
case TYPE_##name: { \
type result; \
if (!jsvalToFloat(cx, val, &result)) \
return TypeError(cx, #name, val); \
*static_cast<type*>(buffer) = result; \
break; \
}
#define DEFINE_CHAR_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_JSCHAR_TYPE(name, type, ffiType) \
case TYPE_##name: { \
/* Convert from a 1-character string, regardless of encoding, */ \
/* or from an integer, provided the result fits in 'type'. */ \
type result; \
if (JSVAL_IS_STRING(val)) { \
JSString* str = JSVAL_TO_STRING(val); \
if (str->length() != 1) \
return TypeError(cx, #name, val); \
const jschar *chars = str->getChars(cx); \
if (!chars) \
return false; \
result = chars[0]; \
} else if (!jsvalToInteger(cx, val, &result)) { \
return TypeError(cx, #name, val); \
} \
*static_cast<type*>(buffer) = result; \
break; \
}
#include "typedefs.h"
case TYPE_pointer: {
if (JSVAL_IS_NULL(val)) {
// Convert to a null pointer.
*static_cast<void**>(buffer) = NULL;
break;
}
JSObject* baseType = PointerType::GetBaseType(cx, targetType);
if (sourceData) {
// First, determine if the targetType is ctypes.void_t.ptr.
TypeCode sourceCode = CType::GetTypeCode(cx, sourceType);
void* sourceBuffer = CData::GetData(cx, sourceData);
bool voidptrTarget = CType::GetTypeCode(cx, baseType) == TYPE_void_t;
if (sourceCode == TYPE_pointer && voidptrTarget) {
// Autoconvert if targetType is ctypes.voidptr_t.
*static_cast<void**>(buffer) = *static_cast<void**>(sourceBuffer);
break;
}
if (sourceCode == TYPE_array) {
// Autoconvert an array to a ctypes.void_t.ptr or to
// sourceType.elementType.ptr, just like C.
JSObject* elementType = ArrayType::GetBaseType(cx, sourceType);
if (voidptrTarget || CType::TypesEqual(cx, baseType, elementType)) {
*static_cast<void**>(buffer) = sourceBuffer;
break;
}
}
} else if (isArgument && JSVAL_IS_STRING(val)) {
// Convert the string for the ffi call. This requires allocating space
// which the caller assumes ownership of.
// TODO: Extend this so we can safely convert strings at other times also.
JSString* sourceString = JSVAL_TO_STRING(val);
size_t sourceLength = sourceString->length();
const jschar* sourceChars = sourceString->getChars(cx);
if (!sourceChars)
return false;
switch (CType::GetTypeCode(cx, baseType)) {
case TYPE_char:
case TYPE_signed_char:
case TYPE_unsigned_char: {
// Convert from UTF-16 to UTF-8.
size_t nbytes =
js_GetDeflatedUTF8StringLength(cx, sourceChars, sourceLength);
if (nbytes == (size_t) -1)
return false;
char** charBuffer = static_cast<char**>(buffer);
*charBuffer = js_array_new<char>(nbytes + 1);
if (!*charBuffer) {
JS_ReportAllocationOverflow(cx);
return false;
}
ASSERT_OK(js_DeflateStringToUTF8Buffer(cx, sourceChars, sourceLength,
*charBuffer, &nbytes));
(*charBuffer)[nbytes] = 0;
*freePointer = true;
break;
}
case TYPE_jschar: {
// Copy the jschar string data. (We could provide direct access to the
// JSString's buffer, but this approach is safer if the caller happens
// to modify the string.)
jschar** jscharBuffer = static_cast<jschar**>(buffer);
*jscharBuffer = js_array_new<jschar>(sourceLength + 1);
if (!*jscharBuffer) {
JS_ReportAllocationOverflow(cx);
return false;
}
*freePointer = true;
memcpy(*jscharBuffer, sourceChars, sourceLength * sizeof(jschar));
(*jscharBuffer)[sourceLength] = 0;
break;
}
default:
return TypeError(cx, "pointer", val);
}
break;
}
return TypeError(cx, "pointer", val);
}
case TYPE_array: {
JSObject* baseType = ArrayType::GetBaseType(cx, targetType);
size_t targetLength = ArrayType::GetLength(cx, targetType);
if (JSVAL_IS_STRING(val)) {
JSString* sourceString = JSVAL_TO_STRING(val);
size_t sourceLength = sourceString->length();
const jschar* sourceChars = sourceString->getChars(cx);
if (!sourceChars)
return false;
switch (CType::GetTypeCode(cx, baseType)) {
case TYPE_char:
case TYPE_signed_char:
case TYPE_unsigned_char: {
// Convert from UTF-16 to UTF-8.
size_t nbytes =
js_GetDeflatedUTF8StringLength(cx, sourceChars, sourceLength);
if (nbytes == (size_t) -1)
return false;
if (targetLength < nbytes) {
JS_ReportError(cx, "ArrayType has insufficient length");
return false;
}
char* charBuffer = static_cast<char*>(buffer);
ASSERT_OK(js_DeflateStringToUTF8Buffer(cx, sourceChars, sourceLength,
charBuffer, &nbytes));
if (targetLength > nbytes)
charBuffer[nbytes] = 0;
break;
}
case TYPE_jschar: {
// Copy the string data, jschar for jschar, including the terminator
// if there's space.
if (targetLength < sourceLength) {
JS_ReportError(cx, "ArrayType has insufficient length");
return false;
}
memcpy(buffer, sourceChars, sourceLength * sizeof(jschar));
if (targetLength > sourceLength)
static_cast<jschar*>(buffer)[sourceLength] = 0;
break;
}
default:
return TypeError(cx, "array", val);
}
} else if (!JSVAL_IS_PRIMITIVE(val) &&
JS_IsArrayObject(cx, JSVAL_TO_OBJECT(val))) {
// Convert each element of the array by calling ImplicitConvert.
JSObject* sourceArray = JSVAL_TO_OBJECT(val);
jsuint sourceLength;
if (!JS_GetArrayLength(cx, sourceArray, &sourceLength) ||
targetLength != size_t(sourceLength)) {
JS_ReportError(cx, "ArrayType length does not match source array length");
return false;
}
// Convert into an intermediate, in case of failure.
size_t elementSize = CType::GetSize(cx, baseType);
size_t arraySize = elementSize * targetLength;
AutoPtr<char>::Array intermediate(js_array_new<char>(arraySize));
if (!intermediate) {
JS_ReportAllocationOverflow(cx);
return false;
}
for (jsuint i = 0; i < sourceLength; ++i) {
js::AutoValueRooter item(cx);
if (!JS_GetElement(cx, sourceArray, i, item.jsval_addr()))
return false;
char* data = intermediate.get() + elementSize * i;
if (!ImplicitConvert(cx, item.jsval_value(), baseType, data, false, NULL))
return false;
}
memcpy(buffer, intermediate.get(), arraySize);
} else {
// Don't implicitly convert to string. Users can implicitly convert
// with `String(x)` or `""+x`.
return TypeError(cx, "array", val);
}
break;
}
case TYPE_struct: {
if (!JSVAL_IS_PRIMITIVE(val) && !sourceData) {
// Enumerate the properties of the object; if they match the struct
// specification, convert the fields.
JSObject* obj = JSVAL_TO_OBJECT(val);
JSObject* iter = JS_NewPropertyIterator(cx, obj);
if (!iter)
return false;
js::AutoObjectRooter iterroot(cx, iter);
// Convert into an intermediate, in case of failure.
size_t structSize = CType::GetSize(cx, targetType);
AutoPtr<char>::Array intermediate(js_array_new<char>(structSize));
if (!intermediate) {
JS_ReportAllocationOverflow(cx);
return false;
}
jsid id;
size_t i = 0;
while (1) {
if (!JS_NextProperty(cx, iter, &id))
return false;
if (JSID_IS_VOID(id))
break;
if (!JSID_IS_STRING(id)) {
JS_ReportError(cx, "property name is not a string");
return false;
}
JSFlatString *name = JSID_TO_FLAT_STRING(id);
const FieldInfo* field = StructType::LookupField(cx, targetType, name);
if (!field)
return false;
js::AutoValueRooter prop(cx);
if (!JS_GetPropertyById(cx, obj, id, prop.jsval_addr()))
return false;
// Convert the field via ImplicitConvert().
char* fieldData = intermediate.get() + field->mOffset;
if (!ImplicitConvert(cx, prop.jsval_value(), field->mType, fieldData, false, NULL))
return false;
++i;
}
const FieldInfoHash* fields = StructType::GetFieldInfo(cx, targetType);
if (i != fields->count()) {
JS_ReportError(cx, "missing fields");
return false;
}
memcpy(buffer, intermediate.get(), structSize);
break;
}
return TypeError(cx, "struct", val);
}
case TYPE_void_t:
case TYPE_function:
JS_NOT_REACHED("invalid type");
return false;
}
return true;
}
// Convert jsval 'val' to a C binary representation of CType 'targetType',
// storing the result in 'buffer'. This function is more forceful than
// ImplicitConvert.
JSBool
ExplicitConvert(JSContext* cx, jsval val, JSObject* targetType, void* buffer)
{
// If ImplicitConvert succeeds, use that result.
if (ImplicitConvert(cx, val, targetType, buffer, false, NULL))
return true;
// If ImplicitConvert failed, and there is no pending exception, then assume
// hard failure (out of memory, or some other similarly serious condition).
// We store any pending exception in case we need to re-throw it.
js::AutoValueRooter ex(cx);
if (!JS_GetPendingException(cx, ex.jsval_addr()))
return false;
// Otherwise, assume soft failure. Clear the pending exception so that we
// can throw a different one as required.
JS_ClearPendingException(cx);
TypeCode type = CType::GetTypeCode(cx, targetType);
switch (type) {
case TYPE_bool: {
// Convert according to the ECMAScript ToBoolean() function.
JSBool result;
ASSERT_OK(JS_ValueToBoolean(cx, val, &result));
*static_cast<bool*>(buffer) = result != JS_FALSE;
break;
}
#define DEFINE_INT_TYPE(name, type, ffiType) \
case TYPE_##name: { \
/* Convert numeric values with a C-style cast, and */ \
/* allow conversion from a base-10 or base-16 string. */ \
type result; \
if (!jsvalToIntegerExplicit(cx, val, &result) && \
(!JSVAL_IS_STRING(val) || \
!StringToInteger(cx, JSVAL_TO_STRING(val), &result))) \
return TypeError(cx, #name, val); \
*static_cast<type*>(buffer) = result; \
break; \
}
#define DEFINE_WRAPPED_INT_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_CHAR_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_JSCHAR_TYPE(x, y, z) DEFINE_CHAR_TYPE(x, y, z)
#include "typedefs.h"
case TYPE_pointer: {
// Convert a number, Int64 object, or UInt64 object to a pointer.
uintptr_t result;
if (!jsvalToPtrExplicit(cx, val, &result))
return TypeError(cx, "pointer", val);
*static_cast<uintptr_t*>(buffer) = result;
break;
}
case TYPE_float32_t:
case TYPE_float64_t:
case TYPE_float:
case TYPE_double:
case TYPE_array:
case TYPE_struct:
// ImplicitConvert is sufficient. Re-throw the exception it generated.
JS_SetPendingException(cx, ex.jsval_value());
return false;
case TYPE_void_t:
case TYPE_function:
JS_NOT_REACHED("invalid type");
return false;
}
return true;
}
// Given a CType 'typeObj', generate a string describing the C type declaration
// corresponding to 'typeObj'. For instance, the CType constructed from
// 'ctypes.int32_t.ptr.array(4).ptr.ptr' will result in the type string
// 'int32_t*(**)[4]'.
static JSString*
BuildTypeName(JSContext* cx, JSObject* typeObj)
{
AutoString result;
// Walk the hierarchy of types, outermost to innermost, building up the type
// string. This consists of the base type, which goes on the left.
// Derived type modifiers (* and []) build from the inside outward, with
// pointers on the left and arrays on the right. An excellent description
// of the rules for building C type declarations can be found at:
// http://unixwiz.net/techtips/reading-cdecl.html
TypeCode prevGrouping = CType::GetTypeCode(cx, typeObj), currentGrouping;
while (1) {
currentGrouping = CType::GetTypeCode(cx, typeObj);
switch (currentGrouping) {
case TYPE_pointer: {
// Pointer types go on the left.
PrependString(result, "*");
typeObj = PointerType::GetBaseType(cx, typeObj);
prevGrouping = currentGrouping;
continue;
}
case TYPE_array: {
if (prevGrouping == TYPE_pointer) {
// Outer type is pointer, inner type is array. Grouping is required.
PrependString(result, "(");
AppendString(result, ")");
}
// Array types go on the right.
AppendString(result, "[");
size_t length;
if (ArrayType::GetSafeLength(cx, typeObj, &length))
IntegerToString(length, 10, result);
AppendString(result, "]");
typeObj = ArrayType::GetBaseType(cx, typeObj);
prevGrouping = currentGrouping;
continue;
}
case TYPE_function: {
FunctionInfo* fninfo = FunctionType::GetFunctionInfo(cx, typeObj);
// Add in the calling convention, if it's not cdecl.
ABICode abi = GetABICode(cx, fninfo->mABI);
if (abi == ABI_STDCALL)
PrependString(result, "__stdcall ");
else if (abi == ABI_WINAPI)
PrependString(result, "WINAPI ");
// Wrap the entire expression so far with parens.
PrependString(result, "(");
AppendString(result, ")");
// Argument list goes on the right.
AppendString(result, "(");
for (size_t i = 0; i < fninfo->mArgTypes.length(); ++i) {
JSString* argName = CType::GetName(cx, fninfo->mArgTypes[i]);
AppendString(result, argName);
if (i != fninfo->mArgTypes.length() - 1 ||
fninfo->mIsVariadic)
AppendString(result, ", ");
}
if (fninfo->mIsVariadic)
AppendString(result, "...");
AppendString(result, ")");
// Set 'typeObj' to the return type, and let the loop process it.
// 'prevGrouping' doesn't matter here, because functions cannot return
// arrays -- thus the parenthetical rules don't get tickled.
typeObj = fninfo->mReturnType;
continue;
}
default:
// Either a basic or struct type. Use the type's name as the base type.
break;
}
break;
}
// Stick the base type and derived type parts together.
JSString* baseName = CType::GetName(cx, typeObj);
PrependString(result, baseName);
return NewUCString(cx, result);
}
// Given a CType 'typeObj', generate a string 'result' such that 'eval(result)'
// would construct the same CType. If 'makeShort' is true, assume that any
// StructType 't' is bound to an in-scope variable of name 't.name', and use
// that variable in place of generating a string to construct the type 't'.
// (This means the type comparison function CType::TypesEqual will return true
// when comparing the input and output of BuildTypeSource, since struct
// equality is determined by strict JSObject pointer equality.)
static void
BuildTypeSource(JSContext* cx,
JSObject* typeObj,
bool makeShort,
AutoString& result)
{
// Walk the types, building up the toSource() string.
switch (CType::GetTypeCode(cx, typeObj)) {
case TYPE_void_t:
#define DEFINE_TYPE(name, type, ffiType) \
case TYPE_##name:
#include "typedefs.h"
{
AppendString(result, "ctypes.");
JSString* nameStr = CType::GetName(cx, typeObj);
AppendString(result, nameStr);
break;
}
case TYPE_pointer: {
JSObject* baseType = PointerType::GetBaseType(cx, typeObj);
// Specialcase ctypes.voidptr_t.
if (CType::GetTypeCode(cx, baseType) == TYPE_void_t) {
AppendString(result, "ctypes.voidptr_t");
break;
}
// Recursively build the source string, and append '.ptr'.
BuildTypeSource(cx, baseType, makeShort, result);
AppendString(result, ".ptr");
break;
}
case TYPE_function: {
FunctionInfo* fninfo = FunctionType::GetFunctionInfo(cx, typeObj);
AppendString(result, "ctypes.FunctionType(");
switch (GetABICode(cx, fninfo->mABI)) {
case ABI_DEFAULT:
AppendString(result, "ctypes.default_abi, ");
break;
case ABI_STDCALL:
AppendString(result, "ctypes.stdcall_abi, ");
break;
case ABI_WINAPI:
AppendString(result, "ctypes.winapi_abi, ");
break;
case INVALID_ABI:
JS_NOT_REACHED("invalid abi");
break;
}
// Recursively build the source string describing the function return and
// argument types.
BuildTypeSource(cx, fninfo->mReturnType, true, result);
if (fninfo->mArgTypes.length() > 0) {
AppendString(result, ", [");
for (size_t i = 0; i < fninfo->mArgTypes.length(); ++i) {
BuildTypeSource(cx, fninfo->mArgTypes[i], true, result);
if (i != fninfo->mArgTypes.length() - 1 ||
fninfo->mIsVariadic)
AppendString(result, ", ");
}
if (fninfo->mIsVariadic)
AppendString(result, "\"...\"");
AppendString(result, "]");
}
AppendString(result, ")");
break;
}
case TYPE_array: {
// Recursively build the source string, and append '.array(n)',
// where n is the array length, or the empty string if the array length
// is undefined.
JSObject* baseType = ArrayType::GetBaseType(cx, typeObj);
BuildTypeSource(cx, baseType, makeShort, result);
AppendString(result, ".array(");
size_t length;
if (ArrayType::GetSafeLength(cx, typeObj, &length))
IntegerToString(length, 10, result);
AppendString(result, ")");
break;
}
case TYPE_struct: {
JSString* name = CType::GetName(cx, typeObj);
if (makeShort) {
// Shorten the type declaration by assuming that StructType 't' is bound
// to an in-scope variable of name 't.name'.
AppendString(result, name);
break;
}
// Write the full struct declaration.
AppendString(result, "ctypes.StructType(\"");
AppendString(result, name);
AppendString(result, "\"");
// If it's an opaque struct, we're done.
if (!CType::IsSizeDefined(cx, typeObj)) {
AppendString(result, ")");
break;
}
AppendString(result, ", [");
const FieldInfoHash* fields = StructType::GetFieldInfo(cx, typeObj);
size_t length = fields->count();
Array<const FieldInfoHash::Entry*, 64> fieldsArray;
if (!fieldsArray.resize(length))
break;
for (FieldInfoHash::Range r = fields->all(); !r.empty(); r.popFront())
fieldsArray[r.front().value.mIndex] = &r.front();
for (size_t i = 0; i < length; ++i) {
const FieldInfoHash::Entry* entry = fieldsArray[i];
AppendString(result, "{ \"");
AppendString(result, entry->key);
AppendString(result, "\": ");
BuildTypeSource(cx, entry->value.mType, true, result);
AppendString(result, " }");
if (i != length - 1)
AppendString(result, ", ");
}
AppendString(result, "])");
break;
}
}
}
// Given a CData object of CType 'typeObj' with binary value 'data', generate a
// string 'result' such that 'eval(result)' would construct a CData object with
// the same CType and containing the same binary value. This assumes that any
// StructType 't' is bound to an in-scope variable of name 't.name'. (This means
// the type comparison function CType::TypesEqual will return true when
// comparing the types, since struct equality is determined by strict JSObject
// pointer equality.) Further, if 'isImplicit' is true, ensure that the
// resulting string can ImplicitConvert successfully if passed to another data
// constructor. (This is important when called recursively, since fields of
// structs and arrays are converted with ImplicitConvert.)
static JSBool
BuildDataSource(JSContext* cx,
JSObject* typeObj,
void* data,
bool isImplicit,
AutoString& result)
{
TypeCode type = CType::GetTypeCode(cx, typeObj);
switch (type) {
case TYPE_bool:
if (*static_cast<bool*>(data))
AppendString(result, "true");
else
AppendString(result, "false");
break;
#define DEFINE_INT_TYPE(name, type, ffiType) \
case TYPE_##name: \
/* Serialize as a primitive decimal integer. */ \
IntegerToString(*static_cast<type*>(data), 10, result); \
break;
#define DEFINE_WRAPPED_INT_TYPE(name, type, ffiType) \
case TYPE_##name: \
/* Serialize as a wrapped decimal integer. */ \
if (!numeric_limits<type>::is_signed) \
AppendString(result, "ctypes.UInt64(\""); \
else \
AppendString(result, "ctypes.Int64(\""); \
\
IntegerToString(*static_cast<type*>(data), 10, result); \
AppendString(result, "\")"); \
break;
#define DEFINE_FLOAT_TYPE(name, type, ffiType) \
case TYPE_##name: { \
/* Serialize as a primitive double. */ \
double fp = *static_cast<type*>(data); \
ToCStringBuf cbuf; \
char* str = NumberToCString(cx, &cbuf, fp); \
if (!str) { \
JS_ReportOutOfMemory(cx); \
return false; \
} \
\
result.append(str, strlen(str)); \
break; \
}
#define DEFINE_CHAR_TYPE(name, type, ffiType) \
case TYPE_##name: \
/* Serialize as an integer. */ \
IntegerToString(*static_cast<type*>(data), 10, result); \
break;
#include "typedefs.h"
case TYPE_jschar: {
// Serialize as a 1-character JS string.
JSString* str = JS_NewUCStringCopyN(cx, static_cast<jschar*>(data), 1);
if (!str)
return false;
// Escape characters, and quote as necessary.
JSString* src = JS_ValueToSource(cx, STRING_TO_JSVAL(str));
if (!src)
return false;
AppendString(result, src);
break;
}
case TYPE_pointer:
case TYPE_function: {
if (isImplicit) {
// The result must be able to ImplicitConvert successfully.
// Wrap in a type constructor, then serialize for ExplicitConvert.
BuildTypeSource(cx, typeObj, true, result);
AppendString(result, "(");
}
// Serialize the pointer value as a wrapped hexadecimal integer.
uintptr_t ptr = *static_cast<uintptr_t*>(data);
AppendString(result, "ctypes.UInt64(\"0x");
IntegerToString(ptr, 16, result);
AppendString(result, "\")");
if (isImplicit)
AppendString(result, ")");
break;
}
case TYPE_array: {
// Serialize each element of the array recursively. Each element must
// be able to ImplicitConvert successfully.
JSObject* baseType = ArrayType::GetBaseType(cx, typeObj);
AppendString(result, "[");
size_t length = ArrayType::GetLength(cx, typeObj);
size_t elementSize = CType::GetSize(cx, baseType);
for (size_t i = 0; i < length; ++i) {
char* element = static_cast<char*>(data) + elementSize * i;
if (!BuildDataSource(cx, baseType, element, true, result))
return false;
if (i + 1 < length)
AppendString(result, ", ");
}
AppendString(result, "]");
break;
}
case TYPE_struct: {
if (isImplicit) {
// The result must be able to ImplicitConvert successfully.
// Serialize the data as an object with properties, rather than
// a sequence of arguments to the StructType constructor.
AppendString(result, "{");
}
// Serialize each field of the struct recursively. Each field must
// be able to ImplicitConvert successfully.
const FieldInfoHash* fields = StructType::GetFieldInfo(cx, typeObj);
size_t length = fields->count();
Array<const FieldInfoHash::Entry*, 64> fieldsArray;
if (!fieldsArray.resize(length))
return false;
for (FieldInfoHash::Range r = fields->all(); !r.empty(); r.popFront())
fieldsArray[r.front().value.mIndex] = &r.front();
for (size_t i = 0; i < length; ++i) {
const FieldInfoHash::Entry* entry = fieldsArray[i];
if (isImplicit) {
AppendString(result, "\"");
AppendString(result, entry->key);
AppendString(result, "\": ");
}
char* fieldData = static_cast<char*>(data) + entry->value.mOffset;
if (!BuildDataSource(cx, entry->value.mType, fieldData, true, result))
return false;
if (i + 1 != length)
AppendString(result, ", ");
}
if (isImplicit)
AppendString(result, "}");
break;
}
case TYPE_void_t:
JS_NOT_REACHED("invalid type");
break;
}
return true;
}
/*******************************************************************************
** JSAPI callback function implementations
*******************************************************************************/
JSBool
ConstructAbstract(JSContext* cx,
uintN argc,
jsval* vp)
{
// Calling an abstract base class constructor is disallowed.
JS_ReportError(cx, "cannot construct from abstract type");
return JS_FALSE;
}
/*******************************************************************************
** CType implementation
*******************************************************************************/
JSBool
CType::ConstructData(JSContext* cx,
uintN argc,
jsval* vp)
{
// get the callee object...
JSObject* obj = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
if (!CType::IsCType(cx, obj)) {
JS_ReportError(cx, "not a CType");
return JS_FALSE;
}
// How we construct the CData object depends on what type we represent.
// An instance 'd' of a CData object of type 't' has:
// * [[Class]] "CData"
// * __proto__ === t.prototype
switch (GetTypeCode(cx, obj)) {
case TYPE_void_t:
JS_ReportError(cx, "cannot construct from void_t");
return JS_FALSE;
case TYPE_function:
JS_ReportError(cx, "cannot construct from FunctionType; use FunctionType.ptr instead");
return JS_FALSE;
case TYPE_pointer:
return PointerType::ConstructData(cx, obj, argc, vp);
case TYPE_array:
return ArrayType::ConstructData(cx, obj, argc, vp);
case TYPE_struct:
return StructType::ConstructData(cx, obj, argc, vp);
default:
return ConstructBasic(cx, obj, argc, vp);
}
}
JSBool
CType::ConstructBasic(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* vp)
{
if (argc > 1) {
JS_ReportError(cx, "CType constructor takes zero or one argument");
return JS_FALSE;
}
// construct a CData object
JSObject* result = CData::Create(cx, obj, NULL, NULL, true);
if (!result)
return JS_FALSE;
if (argc == 1) {
if (!ExplicitConvert(cx, JS_ARGV(cx, vp)[0], obj, CData::GetData(cx, result)))
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
JSObject*
CType::Create(JSContext* cx,
JSObject* typeProto,
JSObject* dataProto,
TypeCode type,
JSString* name,
jsval size,
jsval align,
ffi_type* ffiType)
{
JSObject* parent = JS_GetParent(cx, typeProto);
JS_ASSERT(parent);
// Create a CType object with the properties and slots common to all CTypes.
// Each type object 't' has:
// * [[Class]] "CType"
// * __proto__ === 'typeProto'; one of ctypes.{CType,PointerType,ArrayType,
// StructType}.prototype
// * A constructor which creates and returns a CData object, containing
// binary data of the given type.
// * 'prototype' property:
// * [[Class]] "CDataProto"
// * __proto__ === 'dataProto'; an object containing properties and
// functions common to all CData objects of types derived from
// 'typeProto'. (For instance, this could be ctypes.CData.prototype
// for simple types, or something representing structs for StructTypes.)
// * 'constructor' property === 't'
// * Additional properties specified by 'ps', as appropriate for the
// specific type instance 't'.
JSObject* typeObj = JS_NewObject(cx, &sCTypeClass, typeProto, parent);
if (!typeObj)
return NULL;
js::AutoObjectRooter root(cx, typeObj);
// Set up the reserved slots.
if (!JS_SetReservedSlot(cx, typeObj, SLOT_TYPECODE, INT_TO_JSVAL(type)) ||
(ffiType && !JS_SetReservedSlot(cx, typeObj, SLOT_FFITYPE, PRIVATE_TO_JSVAL(ffiType))) ||
(name && !JS_SetReservedSlot(cx, typeObj, SLOT_NAME, STRING_TO_JSVAL(name))) ||
!JS_SetReservedSlot(cx, typeObj, SLOT_SIZE, size) ||
!JS_SetReservedSlot(cx, typeObj, SLOT_ALIGN, align))
return NULL;
if (dataProto) {
// Set up the 'prototype' and 'prototype.constructor' properties.
JSObject* prototype = JS_NewObject(cx, &sCDataProtoClass, dataProto, parent);
if (!prototype)
return NULL;
js::AutoObjectRooter protoroot(cx, prototype);
if (!JS_DefineProperty(cx, prototype, "constructor", OBJECT_TO_JSVAL(typeObj),
NULL, NULL, JSPROP_READONLY | JSPROP_PERMANENT))
return NULL;
// Set the 'prototype' object.
if (//!JS_FreezeObject(cx, prototype) || // XXX fixme - see bug 541212!
!JS_SetReservedSlot(cx, typeObj, SLOT_PROTO, OBJECT_TO_JSVAL(prototype)))
return NULL;
}
if (!JS_FreezeObject(cx, typeObj))
return NULL;
// Assert a sanity check on size and alignment: size % alignment should always
// be zero.
JS_ASSERT_IF(IsSizeDefined(cx, typeObj),
GetSize(cx, typeObj) % GetAlignment(cx, typeObj) == 0);
return typeObj;
}
JSObject*
CType::DefineBuiltin(JSContext* cx,
JSObject* parent,
const char* propName,
JSObject* typeProto,
JSObject* dataProto,
const char* name,
TypeCode type,
jsval size,
jsval align,
ffi_type* ffiType)
{
JSString* nameStr = JS_NewStringCopyZ(cx, name);
if (!nameStr)
return NULL;
js::AutoStringRooter nameRoot(cx, nameStr);
// Create a new CType object with the common properties and slots.
JSObject* typeObj = Create(cx, typeProto, dataProto, type, nameStr, size,
align, ffiType);
if (!typeObj)
return NULL;
// Define the CType as a 'propName' property on 'parent'.
if (!JS_DefineProperty(cx, parent, propName, OBJECT_TO_JSVAL(typeObj),
NULL, NULL, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return NULL;
return typeObj;
}
void
CType::Finalize(JSContext* cx, JSObject* obj)
{
// Make sure our TypeCode slot is legit. If it's not, bail.
jsval slot;
if (!JS_GetReservedSlot(cx, obj, SLOT_TYPECODE, &slot) || JSVAL_IS_VOID(slot))
return;
// The contents of our slots depends on what kind of type we are.
switch (TypeCode(JSVAL_TO_INT(slot))) {
case TYPE_function: {
// Free the FunctionInfo.
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FNINFO, &slot));
if (!JSVAL_IS_VOID(slot))
js_delete(static_cast<FunctionInfo*>(JSVAL_TO_PRIVATE(slot)));
break;
}
case TYPE_struct: {
// Free the FieldInfoHash table.
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FIELDINFO, &slot));
if (!JSVAL_IS_VOID(slot)) {
void* info = JSVAL_TO_PRIVATE(slot);
js_delete(static_cast<FieldInfoHash*>(info));
}
}
// Fall through.
case TYPE_array: {
// Free the ffi_type info.
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FFITYPE, &slot));
if (!JSVAL_IS_VOID(slot)) {
ffi_type* ffiType = static_cast<ffi_type*>(JSVAL_TO_PRIVATE(slot));
js_array_delete(ffiType->elements);
js_delete(ffiType);
}
break;
}
default:
// Nothing to do here.
break;
}
}
void
CType::FinalizeProtoClass(JSContext* cx, JSObject* obj)
{
// Finalize the CTypeProto class. The only important bit here is our
// SLOT_CLOSURECX -- it contains the JSContext that was (lazily) instantiated
// for use with FunctionType closures. And if we're here, in this finalizer,
// we're guaranteed to not need it anymore. Note that this slot will only
// be set for the object (of class CTypeProto) ctypes.FunctionType.prototype.
jsval slot;
if (!JS_GetReservedSlot(cx, obj, SLOT_CLOSURECX, &slot) || JSVAL_IS_VOID(slot))
return;
JSContext* closureCx = static_cast<JSContext*>(JSVAL_TO_PRIVATE(slot));
JS_SetContextThread(closureCx);
JS_DestroyContextNoGC(closureCx);
}
void
CType::Trace(JSTracer* trc, JSObject* obj)
{
JSContext* cx = trc->context;
// Make sure our TypeCode slot is legit. If it's not, bail.
jsval slot = js::Jsvalify(obj->getSlot(SLOT_TYPECODE));
if (JSVAL_IS_VOID(slot))
return;
// The contents of our slots depends on what kind of type we are.
switch (TypeCode(JSVAL_TO_INT(slot))) {
case TYPE_struct: {
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FIELDINFO, &slot));
if (JSVAL_IS_VOID(slot))
return;
FieldInfoHash* fields =
static_cast<FieldInfoHash*>(JSVAL_TO_PRIVATE(slot));
for (FieldInfoHash::Range r = fields->all(); !r.empty(); r.popFront()) {
JS_CALL_TRACER(trc, r.front().key, JSTRACE_STRING, "fieldName");
JS_CALL_TRACER(trc, r.front().value.mType, JSTRACE_OBJECT, "fieldType");
}
break;
}
case TYPE_function: {
// Check if we have a FunctionInfo.
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FNINFO, &slot));
if (JSVAL_IS_VOID(slot))
return;
FunctionInfo* fninfo = static_cast<FunctionInfo*>(JSVAL_TO_PRIVATE(slot));
JS_ASSERT(fninfo);
// Identify our objects to the tracer.
JS_CALL_TRACER(trc, fninfo->mABI, JSTRACE_OBJECT, "abi");
JS_CALL_TRACER(trc, fninfo->mReturnType, JSTRACE_OBJECT, "returnType");
for (size_t i = 0; i < fninfo->mArgTypes.length(); ++i)
JS_CALL_TRACER(trc, fninfo->mArgTypes[i], JSTRACE_OBJECT, "argType");
break;
}
default:
// Nothing to do here.
break;
}
}
bool
CType::IsCType(JSContext* cx, JSObject* obj)
{
return JS_GET_CLASS(cx, obj) == &sCTypeClass;
}
TypeCode
CType::GetTypeCode(JSContext* cx, JSObject* typeObj)
{
JS_ASSERT(IsCType(cx, typeObj));
jsval result;
ASSERT_OK(JS_GetReservedSlot(cx, typeObj, SLOT_TYPECODE, &result));
return TypeCode(JSVAL_TO_INT(result));
}
bool
CType::TypesEqual(JSContext* cx, JSObject* t1, JSObject* t2)
{
JS_ASSERT(IsCType(cx, t1) && IsCType(cx, t2));
// Fast path: check for object equality.
if (t1 == t2)
return true;
// First, perform shallow comparison.
TypeCode c1 = GetTypeCode(cx, t1);
TypeCode c2 = GetTypeCode(cx, t2);
if (c1 != c2)
return false;
// Determine whether the types require shallow or deep comparison.
switch (c1) {
case TYPE_pointer: {
// Compare base types.
JSObject* b1 = PointerType::GetBaseType(cx, t1);
JSObject* b2 = PointerType::GetBaseType(cx, t2);
return TypesEqual(cx, b1, b2);
}
case TYPE_function: {
FunctionInfo* f1 = FunctionType::GetFunctionInfo(cx, t1);
FunctionInfo* f2 = FunctionType::GetFunctionInfo(cx, t2);
// Compare abi, return type, and argument types.
if (f1->mABI != f2->mABI)
return false;
if (!TypesEqual(cx, f1->mReturnType, f2->mReturnType))
return false;
if (f1->mArgTypes.length() != f2->mArgTypes.length())
return false;
if (f1->mIsVariadic != f2->mIsVariadic)
return false;
for (size_t i = 0; i < f1->mArgTypes.length(); ++i) {
if (!TypesEqual(cx, f1->mArgTypes[i], f2->mArgTypes[i]))
return false;
}
return true;
}
case TYPE_array: {
// Compare length, then base types.
// An undefined length array matches other undefined length arrays.
size_t s1 = 0, s2 = 0;
bool d1 = ArrayType::GetSafeLength(cx, t1, &s1);
bool d2 = ArrayType::GetSafeLength(cx, t2, &s2);
if (d1 != d2 || (d1 && s1 != s2))
return false;
JSObject* b1 = ArrayType::GetBaseType(cx, t1);
JSObject* b2 = ArrayType::GetBaseType(cx, t2);
return TypesEqual(cx, b1, b2);
}
case TYPE_struct:
// Require exact type object equality.
return false;
default:
// Shallow comparison is sufficient.
return true;
}
}
bool
CType::GetSafeSize(JSContext* cx, JSObject* obj, size_t* result)
{
JS_ASSERT(CType::IsCType(cx, obj));
jsval size;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_SIZE, &size));
// The "size" property can be a jsint, a jsdouble, or JSVAL_VOID
// (for arrays of undefined length), and must always fit in a size_t.
if (JSVAL_IS_INT(size)) {
*result = JSVAL_TO_INT(size);
return true;
}
if (JSVAL_IS_DOUBLE(size)) {
*result = Convert<size_t>(JSVAL_TO_DOUBLE(size));
return true;
}
JS_ASSERT(JSVAL_IS_VOID(size));
return false;
}
size_t
CType::GetSize(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
jsval size;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_SIZE, &size));
JS_ASSERT(!JSVAL_IS_VOID(size));
// The "size" property can be a jsint, a jsdouble, or JSVAL_VOID
// (for arrays of undefined length), and must always fit in a size_t.
// For callers who know it can never be JSVAL_VOID, return a size_t directly.
if (JSVAL_IS_INT(size))
return JSVAL_TO_INT(size);
return Convert<size_t>(JSVAL_TO_DOUBLE(size));
}
bool
CType::IsSizeDefined(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
jsval size;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_SIZE, &size));
// The "size" property can be a jsint, a jsdouble, or JSVAL_VOID
// (for arrays of undefined length), and must always fit in a size_t.
JS_ASSERT(JSVAL_IS_INT(size) || JSVAL_IS_DOUBLE(size) || JSVAL_IS_VOID(size));
return !JSVAL_IS_VOID(size);
}
size_t
CType::GetAlignment(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_ALIGN, &slot));
return static_cast<size_t>(JSVAL_TO_INT(slot));
}
ffi_type*
CType::GetFFIType(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FFITYPE, &slot));
if (!JSVAL_IS_VOID(slot)) {
return static_cast<ffi_type*>(JSVAL_TO_PRIVATE(slot));
}
AutoPtr<ffi_type> result;
switch (CType::GetTypeCode(cx, obj)) {
case TYPE_array:
result = ArrayType::BuildFFIType(cx, obj);
break;
case TYPE_struct:
result = StructType::BuildFFIType(cx, obj);
break;
default:
JS_NOT_REACHED("simple types must have an ffi_type");
}
if (!result ||
!JS_SetReservedSlot(cx, obj, SLOT_FFITYPE, PRIVATE_TO_JSVAL(result.get())))
return NULL;
return result.forget();
}
JSString*
CType::GetName(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
jsval string;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_NAME, &string));
if (JSVAL_IS_VOID(string)) {
// Build the type name lazily.
JSString* name = BuildTypeName(cx, obj);
if (!name || !JS_SetReservedSlot(cx, obj, SLOT_NAME, STRING_TO_JSVAL(name)))
return NULL;
return name;
}
return JSVAL_TO_STRING(string);
}
JSObject*
CType::GetProtoFromCtor(JSContext* cx, JSObject* obj, CTypeProtoSlot slot)
{
// Get ctypes.{Pointer,Array,Struct}Type.prototype from a reserved slot
// on the type constructor.
jsval protoslot;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FN_CTORPROTO, &protoslot));
JSObject* proto = JSVAL_TO_OBJECT(protoslot);
JS_ASSERT(proto);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sCTypeProtoClass);
// Get the desired prototype.
jsval result;
ASSERT_OK(JS_GetReservedSlot(cx, proto, slot, &result));
return JSVAL_TO_OBJECT(result);
}
JSObject*
CType::GetProtoFromType(JSContext* cx, JSObject* obj, CTypeProtoSlot slot)
{
JS_ASSERT(IsCType(cx, obj));
// Get the prototype of the type object.
JSObject* proto = JS_GetPrototype(cx, obj);
JS_ASSERT(proto);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sCTypeProtoClass);
// Get the requested ctypes.{Pointer,Array,Struct,Function}Type.prototype.
jsval result;
ASSERT_OK(JS_GetReservedSlot(cx, proto, slot, &result));
return JSVAL_TO_OBJECT(result);
}
JSBool
CType::PrototypeGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CType::IsCType(cx, obj)) {
JS_ReportError(cx, "not a CType");
return JS_FALSE;
}
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_PROTO, vp));
JS_ASSERT(!JSVAL_IS_PRIMITIVE(*vp) || JSVAL_IS_VOID(*vp));
return JS_TRUE;
}
JSBool
CType::NameGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CType::IsCType(cx, obj)) {
JS_ReportError(cx, "not a CType");
return JS_FALSE;
}
JSString* name = CType::GetName(cx, obj);
if (!name)
return JS_FALSE;
*vp = STRING_TO_JSVAL(name);
return JS_TRUE;
}
JSBool
CType::SizeGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CType::IsCType(cx, obj)) {
JS_ReportError(cx, "not a CType");
return JS_FALSE;
}
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_SIZE, vp));
JS_ASSERT(JSVAL_IS_NUMBER(*vp) || JSVAL_IS_VOID(*vp));
return JS_TRUE;
}
JSBool
CType::PtrGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CType::IsCType(cx, obj)) {
JS_ReportError(cx, "not a CType");
return JS_FALSE;
}
JSObject* pointerType = PointerType::CreateInternal(cx, obj);
if (!pointerType)
return JS_FALSE;
*vp = OBJECT_TO_JSVAL(pointerType);
return JS_TRUE;
}
JSBool
CType::CreateArray(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* baseType = JS_THIS_OBJECT(cx, vp);
if (!baseType || !CType::IsCType(cx, baseType)) {
JS_ReportError(cx, "not a CType");
return JS_FALSE;
}
// Construct and return a new ArrayType object.
if (argc > 1) {
JS_ReportError(cx, "array takes zero or one argument");
return JS_FALSE;
}
// Convert the length argument to a size_t.
jsval* argv = JS_ARGV(cx, vp);
size_t length = 0;
if (argc == 1 && !jsvalToSize(cx, argv[0], false, &length)) {
JS_ReportError(cx, "argument must be a nonnegative integer");
return JS_FALSE;
}
JSObject* result = ArrayType::CreateInternal(cx, baseType, length, argc == 1);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
JSBool
CType::ToString(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !CType::IsCType(cx, obj)) {
JS_ReportError(cx, "not a CType");
return JS_FALSE;
}
AutoString type;
AppendString(type, "type ");
AppendString(type, GetName(cx, obj));
JSString* result = NewUCString(cx, type);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, STRING_TO_JSVAL(result));
return JS_TRUE;
}
JSBool
CType::ToSource(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !CType::IsCType(cx, obj)) {
JS_ReportError(cx, "not a CType");
return JS_FALSE;
}
AutoString source;
BuildTypeSource(cx, obj, false, source);
JSString* result = NewUCString(cx, source);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, STRING_TO_JSVAL(result));
return JS_TRUE;
}
JSBool
CType::HasInstance(JSContext* cx, JSObject* obj, const jsval* v, JSBool* bp)
{
JS_ASSERT(CType::IsCType(cx, obj));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_PROTO, &slot));
JSObject* prototype = JSVAL_TO_OBJECT(slot);
JS_ASSERT(prototype);
JS_ASSERT(JS_GET_CLASS(cx, prototype) == &sCDataProtoClass);
*bp = JS_FALSE;
if (JSVAL_IS_PRIMITIVE(*v))
return JS_TRUE;
JSObject* proto = JSVAL_TO_OBJECT(*v);
while ((proto = JS_GetPrototype(cx, proto))) {
if (proto == prototype) {
*bp = JS_TRUE;
break;
}
}
return JS_TRUE;
}
/*******************************************************************************
** PointerType implementation
*******************************************************************************/
JSBool
PointerType::Create(JSContext* cx, uintN argc, jsval* vp)
{
// Construct and return a new PointerType object.
if (argc != 1) {
JS_ReportError(cx, "PointerType takes one argument");
return JS_FALSE;
}
jsval arg = JS_ARGV(cx, vp)[0];
if (JSVAL_IS_PRIMITIVE(arg) || !CType::IsCType(cx, JSVAL_TO_OBJECT(arg))) {
JS_ReportError(cx, "first argument must be a CType");
return JS_FALSE;
}
JSObject* result = CreateInternal(cx, JSVAL_TO_OBJECT(arg));
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
JSObject*
PointerType::CreateInternal(JSContext* cx, JSObject* baseType)
{
// check if we have a cached PointerType on our base CType.
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, baseType, SLOT_PTR, &slot));
if (!JSVAL_IS_VOID(slot))
return JSVAL_TO_OBJECT(slot);
// Get ctypes.PointerType.prototype and the common prototype for CData objects
// of this type.
JSObject* typeProto;
JSObject* dataProto;
typeProto = CType::GetProtoFromType(cx, baseType, SLOT_POINTERPROTO);
dataProto = CType::GetProtoFromType(cx, baseType, SLOT_POINTERDATAPROTO);
// Create a new CType object with the common properties and slots.
JSObject* typeObj = CType::Create(cx, typeProto, dataProto, TYPE_pointer,
NULL, INT_TO_JSVAL(sizeof(void*)),
INT_TO_JSVAL(ffi_type_pointer.alignment),
&ffi_type_pointer);
if (!typeObj)
return NULL;
js::AutoObjectRooter root(cx, typeObj);
// Set the target type. (This will be 'null' for an opaque pointer type.)
if (!JS_SetReservedSlot(cx, typeObj, SLOT_TARGET_T, OBJECT_TO_JSVAL(baseType)))
return NULL;
// Finally, cache our newly-created PointerType on our pointed-to CType.
if (!JS_SetReservedSlot(cx, baseType, SLOT_PTR, OBJECT_TO_JSVAL(typeObj)))
return NULL;
return typeObj;
}
JSBool
PointerType::ConstructData(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* vp)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_pointer) {
JS_ReportError(cx, "not a PointerType");
return JS_FALSE;
}
if (argc > 2) {
JS_ReportError(cx, "constructor takes 0, 1, or 2 arguments");
return JS_FALSE;
}
JSObject* result = CData::Create(cx, obj, NULL, NULL, true);
if (!result)
return JS_FALSE;
// Set return value early, must not observe *vp after
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
if (argc == 0) {
// Construct a null pointer.
return JS_TRUE;
}
jsval* argv = JS_ARGV(cx, vp);
if (argc >= 1) {
JSObject* baseObj = PointerType::GetBaseType(cx, obj);
if (CType::GetTypeCode(cx, baseObj) == TYPE_function &&
JSVAL_IS_OBJECT(argv[0]) &&
JS_ObjectIsCallable(cx, JSVAL_TO_OBJECT(argv[0]))) {
// Construct a FunctionType.ptr from a JS function, and allow an
// optional 'this' argument.
JSObject* thisObj = NULL;
if (argc == 2) {
if (JSVAL_IS_OBJECT(argv[1])) {
thisObj = JSVAL_TO_OBJECT(argv[1]);
} else if (!JS_ValueToObject(cx, argv[1], &thisObj)) {
return JS_FALSE;
}
}
JSObject* fnObj = JSVAL_TO_OBJECT(argv[0]);
return FunctionType::ConstructData(cx, baseObj, result, fnObj, thisObj);
}
if (argc == 2) {
JS_ReportError(cx, "first argument must be a function");
return JS_FALSE;
}
}
// Construct from a raw pointer value.
return ExplicitConvert(cx, argv[0], obj, CData::GetData(cx, result));
}
JSObject*
PointerType::GetBaseType(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_pointer);
jsval type;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_TARGET_T, &type));
JS_ASSERT(!JSVAL_IS_NULL(type));
return JSVAL_TO_OBJECT(type);
}
JSBool
PointerType::TargetTypeGetter(JSContext* cx,
JSObject* obj,
jsid idval,
jsval* vp)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_pointer) {
JS_ReportError(cx, "not a PointerType");
return JS_FALSE;
}
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_TARGET_T, vp));
JS_ASSERT(JSVAL_IS_OBJECT(*vp));
return JS_TRUE;
}
JSBool
PointerType::IsNull(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
// Get pointer type and base type.
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_pointer) {
JS_ReportError(cx, "not a PointerType");
return JS_FALSE;
}
void* data = *static_cast<void**>(CData::GetData(cx, obj));
jsval result = BOOLEAN_TO_JSVAL(data == NULL);
JS_SET_RVAL(cx, vp, result);
return JS_TRUE;
}
JSBool
PointerType::ContentsGetter(JSContext* cx,
JSObject* obj,
jsid idval,
jsval* vp)
{
if (!CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
// Get pointer type and base type.
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_pointer) {
JS_ReportError(cx, "not a PointerType");
return JS_FALSE;
}
JSObject* baseType = GetBaseType(cx, typeObj);
if (!CType::IsSizeDefined(cx, baseType)) {
JS_ReportError(cx, "cannot get contents of undefined size");
return JS_FALSE;
}
void* data = *static_cast<void**>(CData::GetData(cx, obj));
if (data == NULL) {
JS_ReportError(cx, "cannot read contents of null pointer");
return JS_FALSE;
}
jsval result;
if (!ConvertToJS(cx, baseType, NULL, data, false, false, &result))
return JS_FALSE;
JS_SET_RVAL(cx, vp, result);
return JS_TRUE;
}
JSBool
PointerType::ContentsSetter(JSContext* cx,
JSObject* obj,
jsid idval,
JSBool strict,
jsval* vp)
{
if (!CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
// Get pointer type and base type.
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_pointer) {
JS_ReportError(cx, "not a PointerType");
return JS_FALSE;
}
JSObject* baseType = GetBaseType(cx, typeObj);
if (!CType::IsSizeDefined(cx, baseType)) {
JS_ReportError(cx, "cannot set contents of undefined size");
return JS_FALSE;
}
void* data = *static_cast<void**>(CData::GetData(cx, obj));
if (data == NULL) {
JS_ReportError(cx, "cannot write contents to null pointer");
return JS_FALSE;
}
return ImplicitConvert(cx, *vp, baseType, data, false, NULL);
}
/*******************************************************************************
** ArrayType implementation
*******************************************************************************/
JSBool
ArrayType::Create(JSContext* cx, uintN argc, jsval* vp)
{
// Construct and return a new ArrayType object.
if (argc < 1 || argc > 2) {
JS_ReportError(cx, "ArrayType takes one or two arguments");
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
if (JSVAL_IS_PRIMITIVE(argv[0]) ||
!CType::IsCType(cx, JSVAL_TO_OBJECT(argv[0]))) {
JS_ReportError(cx, "first argument must be a CType");
return JS_FALSE;
}
// Convert the length argument to a size_t.
size_t length = 0;
if (argc == 2 && !jsvalToSize(cx, argv[1], false, &length)) {
JS_ReportError(cx, "second argument must be a nonnegative integer");
return JS_FALSE;
}
JSObject* baseType = JSVAL_TO_OBJECT(argv[0]);
JSObject* result = CreateInternal(cx, baseType, length, argc == 2);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
JSObject*
ArrayType::CreateInternal(JSContext* cx,
JSObject* baseType,
size_t length,
bool lengthDefined)
{
// Get ctypes.ArrayType.prototype and the common prototype for CData objects
// of this type, from ctypes.CType.prototype.
JSObject* typeProto = CType::GetProtoFromType(cx, baseType, SLOT_ARRAYPROTO);
JSObject* dataProto = CType::GetProtoFromType(cx, baseType, SLOT_ARRAYDATAPROTO);
// Determine the size of the array from the base type, if possible.
// The size of the base type must be defined.
// If our length is undefined, both our size and length will be undefined.
size_t baseSize;
if (!CType::GetSafeSize(cx, baseType, &baseSize)) {
JS_ReportError(cx, "base size must be defined");
return NULL;
}
jsval sizeVal = JSVAL_VOID;
jsval lengthVal = JSVAL_VOID;
if (lengthDefined) {
// Check for overflow, and convert to a jsint or jsdouble as required.
size_t size = length * baseSize;
if (length > 0 && size / length != baseSize) {
JS_ReportError(cx, "size overflow");
return NULL;
}
if (!SizeTojsval(cx, size, &sizeVal) ||
!SizeTojsval(cx, length, &lengthVal))
return NULL;
}
size_t align = CType::GetAlignment(cx, baseType);
// Create a new CType object with the common properties and slots.
JSObject* typeObj = CType::Create(cx, typeProto, dataProto, TYPE_array, NULL,
sizeVal, INT_TO_JSVAL(align), NULL);
if (!typeObj)
return NULL;
js::AutoObjectRooter root(cx, typeObj);
// Set the element type.
if (!JS_SetReservedSlot(cx, typeObj, SLOT_ELEMENT_T, OBJECT_TO_JSVAL(baseType)))
return NULL;
// Set the length.
if (!JS_SetReservedSlot(cx, typeObj, SLOT_LENGTH, lengthVal))
return NULL;
return typeObj;
}
JSBool
ArrayType::ConstructData(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* vp)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_array) {
JS_ReportError(cx, "not an ArrayType");
return JS_FALSE;
}
// Decide whether we have an object to initialize from. We'll override this
// if we get a length argument instead.
bool convertObject = argc == 1;
// Check if we're an array of undefined length. If we are, allow construction
// with a length argument, or with an actual JS array.
if (CType::IsSizeDefined(cx, obj)) {
if (argc > 1) {
JS_ReportError(cx, "constructor takes zero or one argument");
return JS_FALSE;
}
} else {
if (argc != 1) {
JS_ReportError(cx, "constructor takes one argument");
return JS_FALSE;
}
JSObject* baseType = GetBaseType(cx, obj);
jsval* argv = JS_ARGV(cx, vp);
size_t length;
if (jsvalToSize(cx, argv[0], false, &length)) {
// Have a length, rather than an object to initialize from.
convertObject = false;
} else if (!JSVAL_IS_PRIMITIVE(argv[0])) {
// We were given an object with a .length property.
// This could be a JS array, or a CData array.
JSObject* arg = JSVAL_TO_OBJECT(argv[0]);
js::AutoValueRooter lengthVal(cx);
if (!JS_GetProperty(cx, arg, "length", lengthVal.jsval_addr()) ||
!jsvalToSize(cx, lengthVal.jsval_value(), false, &length)) {
JS_ReportError(cx, "argument must be an array object or length");
return JS_FALSE;
}
} else if (JSVAL_IS_STRING(argv[0])) {
// We were given a string. Size the array to the appropriate length,
// including space for the terminator.
JSString* sourceString = JSVAL_TO_STRING(argv[0]);
size_t sourceLength = sourceString->length();
const jschar* sourceChars = sourceString->getChars(cx);
if (!sourceChars)
return false;
switch (CType::GetTypeCode(cx, baseType)) {
case TYPE_char:
case TYPE_signed_char:
case TYPE_unsigned_char: {
// Determine the UTF-8 length.
length = js_GetDeflatedUTF8StringLength(cx, sourceChars, sourceLength);
if (length == (size_t) -1)
return false;
++length;
break;
}
case TYPE_jschar:
length = sourceLength + 1;
break;
default:
return TypeError(cx, "array", argv[0]);
}
} else {
JS_ReportError(cx, "argument must be an array object or length");
return JS_FALSE;
}
// Construct a new ArrayType of defined length, for the new CData object.
obj = CreateInternal(cx, baseType, length, true);
if (!obj)
return JS_FALSE;
}
// Root the CType object, in case we created one above.
js::AutoObjectRooter root(cx, obj);
JSObject* result = CData::Create(cx, obj, NULL, NULL, true);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
if (convertObject) {
if (!ExplicitConvert(cx, JS_ARGV(cx, vp)[0], obj, CData::GetData(cx, result)))
return JS_FALSE;
}
return JS_TRUE;
}
JSObject*
ArrayType::GetBaseType(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_array);
jsval type;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_ELEMENT_T, &type));
JS_ASSERT(!JSVAL_IS_NULL(type));
return JSVAL_TO_OBJECT(type);
}
bool
ArrayType::GetSafeLength(JSContext* cx, JSObject* obj, size_t* result)
{
JS_ASSERT(CType::IsCType(cx, obj));
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_array);
jsval length;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_LENGTH, &length));
// The "length" property can be a jsint, a jsdouble, or JSVAL_VOID
// (for arrays of undefined length), and must always fit in a size_t.
if (JSVAL_IS_INT(length)) {
*result = JSVAL_TO_INT(length);
return true;
}
if (JSVAL_IS_DOUBLE(length)) {
*result = Convert<size_t>(JSVAL_TO_DOUBLE(length));
return true;
}
JS_ASSERT(JSVAL_IS_VOID(length));
return false;
}
size_t
ArrayType::GetLength(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_array);
jsval length;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_LENGTH, &length));
JS_ASSERT(!JSVAL_IS_VOID(length));
// The "length" property can be a jsint, a jsdouble, or JSVAL_VOID
// (for arrays of undefined length), and must always fit in a size_t.
// For callers who know it can never be JSVAL_VOID, return a size_t directly.
if (JSVAL_IS_INT(length))
return JSVAL_TO_INT(length);
return Convert<size_t>(JSVAL_TO_DOUBLE(length));
}
ffi_type*
ArrayType::BuildFFIType(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_array);
JS_ASSERT(CType::IsSizeDefined(cx, obj));
JSObject* baseType = ArrayType::GetBaseType(cx, obj);
ffi_type* ffiBaseType = CType::GetFFIType(cx, baseType);
if (!ffiBaseType)
return NULL;
size_t length = ArrayType::GetLength(cx, obj);
// Create an ffi_type to represent the array. This is necessary for the case
// where the array is part of a struct. Since libffi has no intrinsic
// support for array types, we approximate it by creating a struct type
// with elements of type 'baseType' and with appropriate size and alignment
// values. It would be nice to not do all the work of setting up 'elements',
// but some libffi platforms currently require that it be meaningful. I'm
// looking at you, x86_64.
AutoPtr<ffi_type> ffiType(js_new<ffi_type>());
if (!ffiType) {
JS_ReportOutOfMemory(cx);
return NULL;
}
ffiType->type = FFI_TYPE_STRUCT;
ffiType->size = CType::GetSize(cx, obj);
ffiType->alignment = CType::GetAlignment(cx, obj);
ffiType->elements = js_array_new<ffi_type*>(length + 1);
if (!ffiType->elements) {
JS_ReportAllocationOverflow(cx);
return NULL;
}
for (size_t i = 0; i < length; ++i)
ffiType->elements[i] = ffiBaseType;
ffiType->elements[length] = NULL;
return ffiType.forget();
}
JSBool
ArrayType::ElementTypeGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_array) {
JS_ReportError(cx, "not an ArrayType");
return JS_FALSE;
}
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_ELEMENT_T, vp));
JS_ASSERT(!JSVAL_IS_PRIMITIVE(*vp));
return JS_TRUE;
}
JSBool
ArrayType::LengthGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
// This getter exists for both CTypes and CDatas of the ArrayType persuasion.
// If we're dealing with a CData, get the CType from it.
if (CData::IsCData(cx, obj))
obj = CData::GetCType(cx, obj);
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_array) {
JS_ReportError(cx, "not an ArrayType");
return JS_FALSE;
}
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_LENGTH, vp));
JS_ASSERT(JSVAL_IS_NUMBER(*vp) || JSVAL_IS_VOID(*vp));
return JS_TRUE;
}
JSBool
ArrayType::Getter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
// This should never happen, but we'll check to be safe.
if (!CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
// Bail early if we're not an ArrayType. (This setter is present for all
// CData, regardless of CType.)
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_array)
return JS_TRUE;
// Convert the index to a size_t and bounds-check it.
size_t index;
size_t length = GetLength(cx, typeObj);
bool ok = jsidToSize(cx, idval, true, &index);
if (!ok && JSID_IS_STRING(idval)) {
// String either isn't a number, or doesn't fit in size_t.
// Chances are it's a regular property lookup, so return.
return JS_TRUE;
}
if (!ok || index >= length) {
JS_ReportError(cx, "invalid index");
return JS_FALSE;
}
JSObject* baseType = GetBaseType(cx, typeObj);
size_t elementSize = CType::GetSize(cx, baseType);
char* data = static_cast<char*>(CData::GetData(cx, obj)) + elementSize * index;
return ConvertToJS(cx, baseType, obj, data, false, false, vp);
}
JSBool
ArrayType::Setter(JSContext* cx, JSObject* obj, jsid idval, JSBool strict, jsval* vp)
{
// This should never happen, but we'll check to be safe.
if (!CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
// Bail early if we're not an ArrayType. (This setter is present for all
// CData, regardless of CType.)
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_array)
return JS_TRUE;
// Convert the index to a size_t and bounds-check it.
size_t index;
size_t length = GetLength(cx, typeObj);
bool ok = jsidToSize(cx, idval, true, &index);
if (!ok && JSID_IS_STRING(idval)) {
// String either isn't a number, or doesn't fit in size_t.
// Chances are it's a regular property lookup, so return.
return JS_TRUE;
}
if (!ok || index >= length) {
JS_ReportError(cx, "invalid index");
return JS_FALSE;
}
JSObject* baseType = GetBaseType(cx, typeObj);
size_t elementSize = CType::GetSize(cx, baseType);
char* data = static_cast<char*>(CData::GetData(cx, obj)) + elementSize * index;
return ImplicitConvert(cx, *vp, baseType, data, false, NULL);
}
JSBool
ArrayType::AddressOfElement(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_array) {
JS_ReportError(cx, "not an ArrayType");
return JS_FALSE;
}
if (argc != 1) {
JS_ReportError(cx, "addressOfElement takes one argument");
return JS_FALSE;
}
JSObject* baseType = GetBaseType(cx, typeObj);
JSObject* pointerType = PointerType::CreateInternal(cx, baseType);
if (!pointerType)
return JS_FALSE;
js::AutoObjectRooter root(cx, pointerType);
// Create a PointerType CData object containing null.
JSObject* result = CData::Create(cx, pointerType, NULL, NULL, true);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
// Convert the index to a size_t and bounds-check it.
size_t index;
size_t length = GetLength(cx, typeObj);
if (!jsvalToSize(cx, JS_ARGV(cx, vp)[0], false, &index) ||
index >= length) {
JS_ReportError(cx, "invalid index");
return JS_FALSE;
}
// Manually set the pointer inside the object, so we skip the conversion step.
void** data = static_cast<void**>(CData::GetData(cx, result));
size_t elementSize = CType::GetSize(cx, baseType);
*data = static_cast<char*>(CData::GetData(cx, obj)) + elementSize * index;
return JS_TRUE;
}
/*******************************************************************************
** StructType implementation
*******************************************************************************/
// For a struct field descriptor 'val' of the form { name : type }, extract
// 'name' and 'type'.
static JSFlatString*
ExtractStructField(JSContext* cx, jsval val, JSObject** typeObj)
{
if (JSVAL_IS_PRIMITIVE(val)) {
JS_ReportError(cx, "struct field descriptors require a valid name and type");
return NULL;
}
JSObject* obj = JSVAL_TO_OBJECT(val);
JSObject* iter = JS_NewPropertyIterator(cx, obj);
if (!iter)
return NULL;
js::AutoObjectRooter iterroot(cx, iter);
jsid nameid;
if (!JS_NextProperty(cx, iter, &nameid))
return NULL;
if (JSID_IS_VOID(nameid)) {
JS_ReportError(cx, "struct field descriptors require a valid name and type");
return NULL;
}
if (!JSID_IS_STRING(nameid)) {
JS_ReportError(cx, "struct field descriptors require a valid name and type");
return NULL;
}
// make sure we have one, and only one, property
jsid id;
if (!JS_NextProperty(cx, iter, &id))
return NULL;
if (!JSID_IS_VOID(id)) {
JS_ReportError(cx, "struct field descriptors must contain one property");
return NULL;
}
js::AutoValueRooter propVal(cx);
if (!JS_GetPropertyById(cx, obj, nameid, propVal.jsval_addr()))
return NULL;
if (propVal.value().isPrimitive() ||
!CType::IsCType(cx, JSVAL_TO_OBJECT(propVal.jsval_value()))) {
JS_ReportError(cx, "struct field descriptors require a valid name and type");
return NULL;
}
// Undefined size or zero size struct members are illegal.
// (Zero-size arrays are legal as struct members in C++, but libffi will
// choke on a zero-size struct, so we disallow them.)
*typeObj = JSVAL_TO_OBJECT(propVal.jsval_value());
size_t size;
if (!CType::GetSafeSize(cx, *typeObj, &size) || size == 0) {
JS_ReportError(cx, "struct field types must have defined and nonzero size");
return NULL;
}
return JSID_TO_FLAT_STRING(nameid);
}
// For a struct field with 'name' and 'type', add an element of the form
// { name : type }.
static JSBool
AddFieldToArray(JSContext* cx,
jsval* element,
JSFlatString* name,
JSObject* typeObj)
{
JSObject* fieldObj = JS_NewObject(cx, NULL, NULL, NULL);
if (!fieldObj)
return false;
*element = OBJECT_TO_JSVAL(fieldObj);
if (!JS_DefineUCProperty(cx, fieldObj,
name->chars(), name->length(),
OBJECT_TO_JSVAL(typeObj), NULL, NULL,
JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT))
return false;
return JS_FreezeObject(cx, fieldObj);
}
JSBool
StructType::Create(JSContext* cx, uintN argc, jsval* vp)
{
// Construct and return a new StructType object.
if (argc < 1 || argc > 2) {
JS_ReportError(cx, "StructType takes one or two arguments");
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
jsval name = argv[0];
if (!JSVAL_IS_STRING(name)) {
JS_ReportError(cx, "first argument must be a string");
return JS_FALSE;
}
// Get ctypes.StructType.prototype from the ctypes.StructType constructor.
JSObject* callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
JSObject* typeProto = CType::GetProtoFromCtor(cx, callee, SLOT_STRUCTPROTO);
// Create a simple StructType with no defined fields. The result will be
// non-instantiable as CData, will have no 'prototype' property, and will
// have undefined size and alignment and no ffi_type.
JSObject* result = CType::Create(cx, typeProto, NULL, TYPE_struct,
JSVAL_TO_STRING(name), JSVAL_VOID, JSVAL_VOID, NULL);
if (!result)
return JS_FALSE;
js::AutoObjectRooter root(cx, result);
if (argc == 2) {
if (JSVAL_IS_PRIMITIVE(argv[1]) ||
!JS_IsArrayObject(cx, JSVAL_TO_OBJECT(argv[1]))) {
JS_ReportError(cx, "second argument must be an array");
return JS_FALSE;
}
// Define the struct fields.
if (!DefineInternal(cx, result, JSVAL_TO_OBJECT(argv[1])))
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
JSBool
StructType::DefineInternal(JSContext* cx, JSObject* typeObj, JSObject* fieldsObj)
{
jsuint len;
ASSERT_OK(JS_GetArrayLength(cx, fieldsObj, &len));
// Get the common prototype for CData objects of this type from
// ctypes.CType.prototype.
JSObject* dataProto =
CType::GetProtoFromType(cx, typeObj, SLOT_STRUCTDATAPROTO);
// Set up the 'prototype' and 'prototype.constructor' properties.
// The prototype will reflect the struct fields as properties on CData objects
// created from this type.
JSObject* prototype = JS_NewObject(cx, &sCDataProtoClass, dataProto, NULL);
if (!prototype)
return JS_FALSE;
js::AutoObjectRooter protoroot(cx, prototype);
if (!JS_DefineProperty(cx, prototype, "constructor", OBJECT_TO_JSVAL(typeObj),
NULL, NULL, JSPROP_READONLY | JSPROP_PERMANENT))
return JS_FALSE;
// Create a FieldInfoHash to stash on the type object, and an array to root
// its constituents. (We cannot simply stash the hash in a reserved slot now
// to get GC safety for free, since if anything in this function fails we
// do not want to mutate 'typeObj'.)
AutoPtr<FieldInfoHash> fields(js_new<FieldInfoHash>());
Array<jsval, 16> fieldRootsArray;
if (!fields || !fields->init(len) || !fieldRootsArray.appendN(JSVAL_VOID, len)) {
JS_ReportOutOfMemory(cx);
return JS_FALSE;
}
js::AutoArrayRooter fieldRoots(cx, fieldRootsArray.length(),
fieldRootsArray.begin());
// Process the field types.
size_t structSize, structAlign;
if (len != 0) {
structSize = 0;
structAlign = 0;
for (jsuint i = 0; i < len; ++i) {
js::AutoValueRooter item(cx);
if (!JS_GetElement(cx, fieldsObj, i, item.jsval_addr()))
return JS_FALSE;
JSObject* fieldType = NULL;
JSFlatString* name = ExtractStructField(cx, item.jsval_value(), &fieldType);
if (!name)
return JS_FALSE;
fieldRootsArray[i] = OBJECT_TO_JSVAL(fieldType);
// Make sure each field name is unique, and add it to the hash.
FieldInfoHash::AddPtr entryPtr = fields->lookupForAdd(name);
if (entryPtr) {
JS_ReportError(cx, "struct fields must have unique names");
return JS_FALSE;
}
ASSERT_OK(fields->add(entryPtr, name, FieldInfo()));
FieldInfo& info = entryPtr->value;
info.mType = fieldType;
info.mIndex = i;
// Add the field to the StructType's 'prototype' property.
if (!JS_DefineUCProperty(cx, prototype,
name->chars(), name->length(), JSVAL_VOID,
StructType::FieldGetter, StructType::FieldSetter,
JSPROP_SHARED | JSPROP_ENUMERATE | JSPROP_PERMANENT))
return JS_FALSE;
size_t fieldSize = CType::GetSize(cx, fieldType);
size_t fieldAlign = CType::GetAlignment(cx, fieldType);
size_t fieldOffset = Align(structSize, fieldAlign);
// Check for overflow. Since we hold invariant that fieldSize % fieldAlign
// be zero, we can safely check fieldOffset + fieldSize without first
// checking fieldOffset for overflow.
if (fieldOffset + fieldSize < structSize) {
JS_ReportError(cx, "size overflow");
return JS_FALSE;
}
info.mOffset = fieldOffset;
structSize = fieldOffset + fieldSize;
if (fieldAlign > structAlign)
structAlign = fieldAlign;
}
// Pad the struct tail according to struct alignment.
size_t structTail = Align(structSize, structAlign);
if (structTail < structSize) {
JS_ReportError(cx, "size overflow");
return JS_FALSE;
}
structSize = structTail;
} else {
// Empty structs are illegal in C, but are legal and have a size of
// 1 byte in C++. We're going to allow them, and trick libffi into
// believing this by adding a char member. The resulting struct will have
// no getters or setters, and will be initialized to zero.
structSize = 1;
structAlign = 1;
}
jsval sizeVal;
if (!SizeTojsval(cx, structSize, &sizeVal))
return JS_FALSE;
if (!JS_SetReservedSlot(cx, typeObj, SLOT_FIELDINFO,
PRIVATE_TO_JSVAL(fields.get())))
return JS_FALSE;
fields.forget();
if (!JS_SetReservedSlot(cx, typeObj, SLOT_SIZE, sizeVal) ||
!JS_SetReservedSlot(cx, typeObj, SLOT_ALIGN, INT_TO_JSVAL(structAlign)) ||
//!JS_FreezeObject(cx, prototype) || // XXX fixme - see bug 541212!
!JS_SetReservedSlot(cx, typeObj, SLOT_PROTO, OBJECT_TO_JSVAL(prototype)))
return JS_FALSE;
return JS_TRUE;
}
ffi_type*
StructType::BuildFFIType(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_struct);
JS_ASSERT(CType::IsSizeDefined(cx, obj));
const FieldInfoHash* fields = GetFieldInfo(cx, obj);
size_t len = fields->count();
size_t structSize = CType::GetSize(cx, obj);
size_t structAlign = CType::GetAlignment(cx, obj);
AutoPtr<ffi_type> ffiType(js_new<ffi_type>());
if (!ffiType) {
JS_ReportOutOfMemory(cx);
return NULL;
}
ffiType->type = FFI_TYPE_STRUCT;
AutoPtr<ffi_type*>::Array elements;
if (len != 0) {
elements = js_array_new<ffi_type*>(len + 1);
if (!elements) {
JS_ReportOutOfMemory(cx);
return NULL;
}
elements[len] = NULL;
for (FieldInfoHash::Range r = fields->all(); !r.empty(); r.popFront()) {
const FieldInfoHash::Entry& entry = r.front();
ffi_type* fieldType = CType::GetFFIType(cx, entry.value.mType);
if (!fieldType)
return NULL;
elements[entry.value.mIndex] = fieldType;
}
} else {
// Represent an empty struct as having a size of 1 byte, just like C++.
JS_ASSERT(structSize == 1);
JS_ASSERT(structAlign == 1);
elements = js_array_new<ffi_type*>(2);
if (!elements) {
JS_ReportOutOfMemory(cx);
return NULL;
}
elements[0] = &ffi_type_uint8;
elements[1] = NULL;
}
ffiType->elements = elements.get();
#ifdef DEBUG
// Perform a sanity check: the result of our struct size and alignment
// calculations should match libffi's. We force it to do this calculation
// by calling ffi_prep_cif.
ffi_cif cif;
ffiType->size = 0;
ffiType->alignment = 0;
ffi_status status = ffi_prep_cif(&cif, FFI_DEFAULT_ABI, 0, ffiType.get(), NULL);
JS_ASSERT(status == FFI_OK);
JS_ASSERT(structSize == ffiType->size);
JS_ASSERT(structAlign == ffiType->alignment);
#else
// Fill in the ffi_type's size and align fields. This makes libffi treat the
// type as initialized; it will not recompute the values. (We assume
// everything agrees; if it doesn't, we really want to know about it, which
// is the purpose of the above debug-only check.)
ffiType->size = structSize;
ffiType->alignment = structAlign;
#endif
elements.forget();
return ffiType.forget();
}
JSBool
StructType::Define(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj ||
!CType::IsCType(cx, obj) ||
CType::GetTypeCode(cx, obj) != TYPE_struct) {
JS_ReportError(cx, "not a StructType");
return JS_FALSE;
}
if (CType::IsSizeDefined(cx, obj)) {
JS_ReportError(cx, "StructType has already been defined");
return JS_FALSE;
}
if (argc != 1) {
JS_ReportError(cx, "define takes one argument");
return JS_FALSE;
}
jsval arg = JS_ARGV(cx, vp)[0];
if (JSVAL_IS_PRIMITIVE(arg) ||
!JS_IsArrayObject(cx, JSVAL_TO_OBJECT(arg))) {
JS_ReportError(cx, "argument must be an array");
return JS_FALSE;
}
return DefineInternal(cx, obj, JSVAL_TO_OBJECT(arg));
}
JSBool
StructType::ConstructData(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* vp)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_struct) {
JS_ReportError(cx, "not a StructType");
return JS_FALSE;
}
if (!CType::IsSizeDefined(cx, obj)) {
JS_ReportError(cx, "cannot construct an opaque StructType");
return JS_FALSE;
}
JSObject* result = CData::Create(cx, obj, NULL, NULL, true);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
if (argc == 0)
return JS_TRUE;
char* buffer = static_cast<char*>(CData::GetData(cx, result));
const FieldInfoHash* fields = GetFieldInfo(cx, obj);
jsval* argv = JS_ARGV(cx, vp);
if (argc == 1) {
// There are two possible interpretations of the argument:
// 1) It may be an object '{ ... }' with properties representing the
// struct fields intended to ExplicitConvert wholesale to our StructType.
// 2) If the struct contains one field, the arg may be intended to
// ImplicitConvert directly to that arg's CType.
// Thankfully, the conditions for these two possibilities to succeed
// are mutually exclusive, so we can pick the right one.
// Try option 1) first.
if (ExplicitConvert(cx, argv[0], obj, buffer))
return JS_TRUE;
if (fields->count() != 1)
return JS_FALSE;
// If ExplicitConvert failed, and there is no pending exception, then assume
// hard failure (out of memory, or some other similarly serious condition).
if (!JS_IsExceptionPending(cx))
return JS_FALSE;
// Otherwise, assume soft failure, and clear the pending exception so that we
// can throw a different one as required.
JS_ClearPendingException(cx);
// Fall through to try option 2).
}
// We have a type constructor of the form 'ctypes.StructType(a, b, c, ...)'.
// ImplicitConvert each field.
if (argc == fields->count()) {
for (FieldInfoHash::Range r = fields->all(); !r.empty(); r.popFront()) {
const FieldInfo& field = r.front().value;
STATIC_ASSUME(field.mIndex < fields->count()); /* Quantified invariant */
if (!ImplicitConvert(cx, argv[field.mIndex], field.mType,
buffer + field.mOffset,
false, NULL))
return JS_FALSE;
}
return JS_TRUE;
}
JS_ReportError(cx, "constructor takes 0, 1, or %u arguments",
fields->count());
return JS_FALSE;
}
const FieldInfoHash*
StructType::GetFieldInfo(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_struct);
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FIELDINFO, &slot));
JS_ASSERT(!JSVAL_IS_VOID(slot) && JSVAL_TO_PRIVATE(slot));
return static_cast<const FieldInfoHash*>(JSVAL_TO_PRIVATE(slot));
}
const FieldInfo*
StructType::LookupField(JSContext* cx, JSObject* obj, JSFlatString *name)
{
JS_ASSERT(CType::IsCType(cx, obj));
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_struct);
FieldInfoHash::Ptr ptr = GetFieldInfo(cx, obj)->lookup(name);
if (ptr)
return &ptr->value;
JSAutoByteString bytes(cx, name);
if (!bytes)
return NULL;
JS_ReportError(cx, "%s does not name a field", bytes.ptr());
return NULL;
}
JSObject*
StructType::BuildFieldsArray(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_struct);
JS_ASSERT(CType::IsSizeDefined(cx, obj));
const FieldInfoHash* fields = GetFieldInfo(cx, obj);
size_t len = fields->count();
// Prepare a new array for the 'fields' property of the StructType.
Array<jsval, 16> fieldsVec;
if (!fieldsVec.appendN(JSVAL_VOID, len))
return NULL;
js::AutoArrayRooter root(cx, fieldsVec.length(), fieldsVec.begin());
for (FieldInfoHash::Range r = fields->all(); !r.empty(); r.popFront()) {
const FieldInfoHash::Entry& entry = r.front();
// Add the field descriptor to the array.
if (!AddFieldToArray(cx, &fieldsVec[entry.value.mIndex],
entry.key, entry.value.mType))
return NULL;
}
JSObject* fieldsProp = JS_NewArrayObject(cx, len, fieldsVec.begin());
if (!fieldsProp)
return NULL;
// Seal the fields array.
if (!JS_FreezeObject(cx, fieldsProp))
return NULL;
return fieldsProp;
}
JSBool
StructType::FieldsArrayGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_struct) {
JS_ReportError(cx, "not a StructType");
return JS_FALSE;
}
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FIELDS, vp));
if (!CType::IsSizeDefined(cx, obj)) {
JS_ASSERT(JSVAL_IS_VOID(*vp));
return JS_TRUE;
}
if (JSVAL_IS_VOID(*vp)) {
// Build the 'fields' array lazily.
JSObject* fields = BuildFieldsArray(cx, obj);
if (!fields ||
!JS_SetReservedSlot(cx, obj, SLOT_FIELDS, OBJECT_TO_JSVAL(fields)))
return JS_FALSE;
*vp = OBJECT_TO_JSVAL(fields);
}
JS_ASSERT(!JSVAL_IS_PRIMITIVE(*vp) &&
JS_IsArrayObject(cx, JSVAL_TO_OBJECT(*vp)));
return JS_TRUE;
}
JSBool
StructType::FieldGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_struct) {
JS_ReportError(cx, "not a StructType");
return JS_FALSE;
}
const FieldInfo* field = LookupField(cx, typeObj, JSID_TO_FLAT_STRING(idval));
if (!field)
return JS_FALSE;
char* data = static_cast<char*>(CData::GetData(cx, obj)) + field->mOffset;
return ConvertToJS(cx, field->mType, obj, data, false, false, vp);
}
JSBool
StructType::FieldSetter(JSContext* cx, JSObject* obj, jsid idval, JSBool strict, jsval* vp)
{
if (!CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_struct) {
JS_ReportError(cx, "not a StructType");
return JS_FALSE;
}
const FieldInfo* field = LookupField(cx, typeObj, JSID_TO_FLAT_STRING(idval));
if (!field)
return JS_FALSE;
char* data = static_cast<char*>(CData::GetData(cx, obj)) + field->mOffset;
return ImplicitConvert(cx, *vp, field->mType, data, false, NULL);
}
JSBool
StructType::AddressOfField(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_struct) {
JS_ReportError(cx, "not a StructType");
return JS_FALSE;
}
if (argc != 1) {
JS_ReportError(cx, "addressOfField takes one argument");
return JS_FALSE;
}
JSFlatString *str = JS_FlattenString(cx, JSVAL_TO_STRING(JS_ARGV(cx, vp)[0]));
if (!str)
return JS_FALSE;
const FieldInfo* field = LookupField(cx, typeObj, str);
if (!field)
return JS_FALSE;
JSObject* baseType = field->mType;
JSObject* pointerType = PointerType::CreateInternal(cx, baseType);
if (!pointerType)
return JS_FALSE;
js::AutoObjectRooter root(cx, pointerType);
// Create a PointerType CData object containing null.
JSObject* result = CData::Create(cx, pointerType, NULL, NULL, true);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
// Manually set the pointer inside the object, so we skip the conversion step.
void** data = static_cast<void**>(CData::GetData(cx, result));
*data = static_cast<char*>(CData::GetData(cx, obj)) + field->mOffset;
return JS_TRUE;
}
/*******************************************************************************
** FunctionType implementation
*******************************************************************************/
// Helper class for handling allocation of function arguments.
struct AutoValue
{
AutoValue() : mData(NULL) { }
~AutoValue()
{
js_array_delete(static_cast<char*>(mData));
}
bool SizeToType(JSContext* cx, JSObject* type)
{
// Allocate a minimum of sizeof(ffi_arg) to handle small integers.
size_t size = Align(CType::GetSize(cx, type), sizeof(ffi_arg));
mData = js_array_new<char>(size);
if (mData)
memset(mData, 0, size);
return mData != NULL;
}
void* mData;
};
static bool
GetABI(JSContext* cx, jsval abiType, ffi_abi* result)
{
if (JSVAL_IS_PRIMITIVE(abiType))
return false;
ABICode abi = GetABICode(cx, JSVAL_TO_OBJECT(abiType));
// determine the ABI from the subset of those available on the
// given platform. ABI_DEFAULT specifies the default
// C calling convention (cdecl) on each platform.
switch (abi) {
case ABI_DEFAULT:
*result = FFI_DEFAULT_ABI;
return true;
case ABI_STDCALL:
case ABI_WINAPI:
#if (defined(_WIN32) && !defined(_WIN64)) || defined(_OS2)
*result = FFI_STDCALL;
return true;
#endif
case INVALID_ABI:
break;
}
return false;
}
static JSObject*
PrepareType(JSContext* cx, jsval type)
{
if (JSVAL_IS_PRIMITIVE(type) ||
!CType::IsCType(cx, JSVAL_TO_OBJECT(type))) {
JS_ReportError(cx, "not a ctypes type");
return NULL;
}
JSObject* result = JSVAL_TO_OBJECT(type);
TypeCode typeCode = CType::GetTypeCode(cx, result);
if (typeCode == TYPE_array) {
// convert array argument types to pointers, just like C.
// ImplicitConvert will do the same, when passing an array as data.
JSObject* baseType = ArrayType::GetBaseType(cx, result);
result = PointerType::CreateInternal(cx, baseType);
if (!result)
return NULL;
} else if (typeCode == TYPE_void_t || typeCode == TYPE_function) {
// disallow void or function argument types
JS_ReportError(cx, "Cannot have void or function argument type");
return NULL;
}
if (!CType::IsSizeDefined(cx, result)) {
JS_ReportError(cx, "Argument type must have defined size");
return NULL;
}
// libffi cannot pass types of zero size by value.
JS_ASSERT(CType::GetSize(cx, result) != 0);
return result;
}
static JSObject*
PrepareReturnType(JSContext* cx, jsval type)
{
if (JSVAL_IS_PRIMITIVE(type) ||
!CType::IsCType(cx, JSVAL_TO_OBJECT(type))) {
JS_ReportError(cx, "not a ctypes type");
return NULL;
}
JSObject* result = JSVAL_TO_OBJECT(type);
TypeCode typeCode = CType::GetTypeCode(cx, result);
// Arrays and functions can never be return types.
if (typeCode == TYPE_array || typeCode == TYPE_function) {
JS_ReportError(cx, "Return type cannot be an array or function");
return NULL;
}
if (typeCode != TYPE_void_t && !CType::IsSizeDefined(cx, result)) {
JS_ReportError(cx, "Return type must have defined size");
return NULL;
}
// libffi cannot pass types of zero size by value.
JS_ASSERT(typeCode == TYPE_void_t || CType::GetSize(cx, result) != 0);
return result;
}
static JS_ALWAYS_INLINE JSBool
IsEllipsis(JSContext* cx, jsval v, bool* isEllipsis)
{
*isEllipsis = false;
if (!JSVAL_IS_STRING(v))
return true;
JSString* str = JSVAL_TO_STRING(v);
if (str->length() != 3)
return true;
const jschar* chars = str->getChars(cx);
if (!chars)
return false;
jschar dot = '.';
*isEllipsis = (chars[0] == dot &&
chars[1] == dot &&
chars[2] == dot);
return true;
}
static JSBool
PrepareCIF(JSContext* cx,
FunctionInfo* fninfo)
{
ffi_abi abi;
if (!GetABI(cx, OBJECT_TO_JSVAL(fninfo->mABI), &abi)) {
JS_ReportError(cx, "Invalid ABI specification");
return false;
}
ffi_type* rtype = CType::GetFFIType(cx, fninfo->mReturnType);
if (!rtype)
return false;
ffi_status status =
ffi_prep_cif(&fninfo->mCIF,
abi,
fninfo->mFFITypes.length(),
rtype,
fninfo->mFFITypes.begin());
switch (status) {
case FFI_OK:
return true;
case FFI_BAD_ABI:
JS_ReportError(cx, "Invalid ABI specification");
return false;
case FFI_BAD_TYPEDEF:
JS_ReportError(cx, "Invalid type specification");
return false;
default:
JS_ReportError(cx, "Unknown libffi error");
return false;
}
}
void
FunctionType::BuildSymbolName(JSContext* cx,
JSString* name,
JSObject* typeObj,
AutoCString& result)
{
FunctionInfo* fninfo = GetFunctionInfo(cx, typeObj);
switch (GetABICode(cx, fninfo->mABI)) {
case ABI_DEFAULT:
case ABI_WINAPI:
// For cdecl or WINAPI functions, no mangling is necessary.
AppendString(result, name);
break;
case ABI_STDCALL: {
// On WIN32, stdcall functions look like:
// _foo@40
// where 'foo' is the function name, and '40' is the aligned size of the
// arguments.
AppendString(result, "_");
AppendString(result, name);
AppendString(result, "@");
// Compute the suffix by aligning each argument to sizeof(ffi_arg).
size_t size = 0;
for (size_t i = 0; i < fninfo->mArgTypes.length(); ++i) {
JSObject* argType = fninfo->mArgTypes[i];
size += Align(CType::GetSize(cx, argType), sizeof(ffi_arg));
}
IntegerToString(size, 10, result);
break;
}
case INVALID_ABI:
JS_NOT_REACHED("invalid abi");
break;
}
}
static FunctionInfo*
NewFunctionInfo(JSContext* cx,
jsval abiType,
jsval returnType,
jsval* argTypes,
uintN argLength)
{
AutoPtr<FunctionInfo> fninfo(js_new<FunctionInfo>());
if (!fninfo) {
JS_ReportOutOfMemory(cx);
return NULL;
}
ffi_abi abi;
if (!GetABI(cx, abiType, &abi)) {
JS_ReportError(cx, "Invalid ABI specification");
return NULL;
}
fninfo->mABI = JSVAL_TO_OBJECT(abiType);
// prepare the result type
fninfo->mReturnType = PrepareReturnType(cx, returnType);
if (!fninfo->mReturnType)
return NULL;
// prepare the argument types
if (!fninfo->mArgTypes.reserve(argLength) ||
!fninfo->mFFITypes.reserve(argLength)) {
JS_ReportOutOfMemory(cx);
return NULL;
}
fninfo->mIsVariadic = false;
for (JSUint32 i = 0; i < argLength; ++i) {
bool isEllipsis;
if (!IsEllipsis(cx, argTypes[i], &isEllipsis))
return false;
if (isEllipsis) {
fninfo->mIsVariadic = true;
if (i < 1) {
JS_ReportError(cx, "\"...\" may not be the first and only parameter "
"type of a variadic function declaration");
return NULL;
}
if (i < argLength - 1) {
JS_ReportError(cx, "\"...\" must be the last parameter type of a "
"variadic function declaration");
return NULL;
}
if (GetABICode(cx, fninfo->mABI) != ABI_DEFAULT) {
JS_ReportError(cx, "Variadic functions must use the __cdecl calling "
"convention");
return NULL;
}
break;
}
JSObject* argType = PrepareType(cx, argTypes[i]);
if (!argType)
return NULL;
ffi_type* ffiType = CType::GetFFIType(cx, argType);
if (!ffiType)
return NULL;
fninfo->mArgTypes.append(argType);
fninfo->mFFITypes.append(ffiType);
}
if (fninfo->mIsVariadic)
// wait to PrepareCIF until function is called
return fninfo.forget();
if (!PrepareCIF(cx, fninfo.get()))
return NULL;
return fninfo.forget();
}
JSBool
FunctionType::Create(JSContext* cx, uintN argc, jsval* vp)
{
// Construct and return a new FunctionType object.
if (argc < 2 || argc > 3) {
JS_ReportError(cx, "FunctionType takes two or three arguments");
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
Array<jsval, 16> argTypes;
JSObject* arrayObj = NULL;
if (argc == 3) {
// Prepare an array of jsvals for the arguments.
if (JSVAL_IS_PRIMITIVE(argv[2]) ||
!JS_IsArrayObject(cx, JSVAL_TO_OBJECT(argv[2]))) {
JS_ReportError(cx, "third argument must be an array");
return JS_FALSE;
}
arrayObj = JSVAL_TO_OBJECT(argv[2]);
jsuint len;
ASSERT_OK(JS_GetArrayLength(cx, arrayObj, &len));
if (!argTypes.appendN(JSVAL_VOID, len)) {
JS_ReportOutOfMemory(cx);
return JS_FALSE;
}
}
// Pull out the argument types from the array, if any.
JS_ASSERT(!argTypes.length() || arrayObj);
js::AutoArrayRooter items(cx, argTypes.length(), argTypes.begin());
for (jsuint i = 0; i < argTypes.length(); ++i) {
if (!JS_GetElement(cx, arrayObj, i, &argTypes[i]))
return JS_FALSE;
}
JSObject* result = CreateInternal(cx, argv[0], argv[1],
argTypes.begin(), argTypes.length());
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
JSObject*
FunctionType::CreateInternal(JSContext* cx,
jsval abi,
jsval rtype,
jsval* argtypes,
jsuint arglen)
{
// Determine and check the types, and prepare the function CIF.
AutoPtr<FunctionInfo> fninfo(NewFunctionInfo(cx, abi, rtype, argtypes, arglen));
if (!fninfo)
return NULL;
// Get ctypes.FunctionType.prototype and the common prototype for CData objects
// of this type, from ctypes.CType.prototype.
JSObject* typeProto = CType::GetProtoFromType(cx, fninfo->mReturnType,
SLOT_FUNCTIONPROTO);
JSObject* dataProto = CType::GetProtoFromType(cx, fninfo->mReturnType,
SLOT_FUNCTIONDATAPROTO);
// Create a new CType object with the common properties and slots.
JSObject* typeObj = CType::Create(cx, typeProto, dataProto, TYPE_function,
NULL, JSVAL_VOID, JSVAL_VOID, NULL);
if (!typeObj)
return NULL;
js::AutoObjectRooter root(cx, typeObj);
// Stash the FunctionInfo in a reserved slot.
if (!JS_SetReservedSlot(cx, typeObj, SLOT_FNINFO,
PRIVATE_TO_JSVAL(fninfo.get())))
return NULL;
fninfo.forget();
return typeObj;
}
// Construct a function pointer to a JS function (see CClosure::Create()).
// Regular function pointers are constructed directly in
// PointerType::ConstructData().
JSBool
FunctionType::ConstructData(JSContext* cx,
JSObject* typeObj,
JSObject* dataObj,
JSObject* fnObj,
JSObject* thisObj)
{
JS_ASSERT(CType::GetTypeCode(cx, typeObj) == TYPE_function);
PRFuncPtr* data = static_cast<PRFuncPtr*>(CData::GetData(cx, dataObj));
FunctionInfo* fninfo = FunctionType::GetFunctionInfo(cx, typeObj);
if (fninfo->mIsVariadic) {
JS_ReportError(cx, "Can't declare a variadic callback function");
return JS_FALSE;
}
if (GetABICode(cx, fninfo->mABI) == ABI_WINAPI) {
JS_ReportError(cx, "Can't declare a ctypes.winapi_abi callback function, "
"use ctypes.stdcall_abi instead");
return JS_FALSE;
}
JSObject* closureObj = CClosure::Create(cx, typeObj, fnObj, thisObj, data);
if (!closureObj)
return JS_FALSE;
js::AutoObjectRooter root(cx, closureObj);
// Set the closure object as the referent of the new CData object.
if (!JS_SetReservedSlot(cx, dataObj, SLOT_REFERENT,
OBJECT_TO_JSVAL(closureObj)))
return JS_FALSE;
// Seal the CData object, to prevent modification of the function pointer.
// This permanently associates this object with the closure, and avoids
// having to do things like reset SLOT_REFERENT when someone tries to
// change the pointer value.
// XXX This will need to change when bug 541212 is fixed -- CData::ValueSetter
// could be called on a frozen object.
return JS_FreezeObject(cx, dataObj);
}
typedef Array<AutoValue, 16> AutoValueAutoArray;
static JSBool
ConvertArgument(JSContext* cx,
jsval arg,
JSObject* type,
AutoValue* value,
AutoValueAutoArray* strings)
{
if (!value->SizeToType(cx, type)) {
JS_ReportAllocationOverflow(cx);
return false;
}
bool freePointer = false;
if (!ImplicitConvert(cx, arg, type, value->mData, true, &freePointer))
return false;
if (freePointer) {
// ImplicitConvert converted a string for us, which we have to free.
// Keep track of it.
if (!strings->growBy(1)) {
JS_ReportOutOfMemory(cx);
return false;
}
strings->back().mData = *static_cast<char**>(value->mData);
}
return true;
}
JSBool
FunctionType::Call(JSContext* cx,
uintN argc,
jsval* vp)
{
// get the callee object...
JSObject* obj = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
if (!CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return false;
}
JSObject* typeObj = CData::GetCType(cx, obj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_pointer) {
JS_ReportError(cx, "not a FunctionType.ptr");
return false;
}
typeObj = PointerType::GetBaseType(cx, typeObj);
if (CType::GetTypeCode(cx, typeObj) != TYPE_function) {
JS_ReportError(cx, "not a FunctionType.ptr");
return false;
}
FunctionInfo* fninfo = GetFunctionInfo(cx, typeObj);
JSUint32 argcFixed = fninfo->mArgTypes.length();
if ((!fninfo->mIsVariadic && argc != argcFixed) ||
(fninfo->mIsVariadic && argc < argcFixed)) {
JS_ReportError(cx, "Number of arguments does not match declaration");
return false;
}
// Check if we have a Library object. If we do, make sure it's open.
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_REFERENT, &slot));
if (!JSVAL_IS_VOID(slot) && Library::IsLibrary(cx, JSVAL_TO_OBJECT(slot))) {
PRLibrary* library = Library::GetLibrary(cx, JSVAL_TO_OBJECT(slot));
if (!library) {
JS_ReportError(cx, "library is not open");
return false;
}
}
// prepare the values for each argument
AutoValueAutoArray values;
AutoValueAutoArray strings;
if (!values.resize(argc)) {
JS_ReportOutOfMemory(cx);
return false;
}
jsval* argv = JS_ARGV(cx, vp);
for (jsuint i = 0; i < argcFixed; ++i)
if (!ConvertArgument(cx, argv[i], fninfo->mArgTypes[i], &values[i], &strings))
return false;
if (fninfo->mIsVariadic) {
if (!fninfo->mFFITypes.resize(argc)) {
JS_ReportOutOfMemory(cx);
return false;
}
JSObject* obj; // Could reuse obj instead of declaring a second
JSObject* type; // JSObject*, but readability would suffer.
for (JSUint32 i = argcFixed; i < argc; ++i) {
if (JSVAL_IS_PRIMITIVE(argv[i]) ||
!CData::IsCData(cx, obj = JSVAL_TO_OBJECT(argv[i]))) {
// Since we know nothing about the CTypes of the ... arguments,
// they absolutely must be CData objects already.
JS_ReportError(cx, "argument %d of type %s is not a CData object",
i, JS_GetTypeName(cx, JS_TypeOfValue(cx, argv[i])));
return false;
}
if (!(type = CData::GetCType(cx, obj)) ||
!(type = PrepareType(cx, OBJECT_TO_JSVAL(type))) ||
// Relying on ImplicitConvert only for the limited purpose of
// converting one CType to another (e.g., T[] to T*).
!ConvertArgument(cx, argv[i], type, &values[i], &strings) ||
!(fninfo->mFFITypes[i] = CType::GetFFIType(cx, type))) {
// These functions report their own errors.
return false;
}
}
if (!PrepareCIF(cx, fninfo))
return false;
}
// initialize a pointer to an appropriate location, for storing the result
AutoValue returnValue;
TypeCode typeCode = CType::GetTypeCode(cx, fninfo->mReturnType);
if (typeCode != TYPE_void_t &&
!returnValue.SizeToType(cx, fninfo->mReturnType)) {
JS_ReportAllocationOverflow(cx);
return false;
}
uintptr_t fn = *reinterpret_cast<uintptr_t*>(CData::GetData(cx, obj));
// suspend the request before we call into the function, since the call
// may block or otherwise take a long time to return.
{
JSAutoSuspendRequest suspend(cx);
ffi_call(&fninfo->mCIF, FFI_FN(fn), returnValue.mData,
reinterpret_cast<void**>(values.begin()));
}
// Small integer types get returned as a word-sized ffi_arg. Coerce it back
// into the correct size for ConvertToJS.
switch (typeCode) {
#define DEFINE_INT_TYPE(name, type, ffiType) \
case TYPE_##name: \
if (sizeof(type) < sizeof(ffi_arg)) { \
ffi_arg data = *static_cast<ffi_arg*>(returnValue.mData); \
*static_cast<type*>(returnValue.mData) = static_cast<type>(data); \
} \
break;
#define DEFINE_WRAPPED_INT_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_BOOL_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_CHAR_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_JSCHAR_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#include "typedefs.h"
default:
break;
}
// prepare a JS object from the result
return ConvertToJS(cx, fninfo->mReturnType, NULL, returnValue.mData,
false, true, vp);
}
FunctionInfo*
FunctionType::GetFunctionInfo(JSContext* cx, JSObject* obj)
{
JS_ASSERT(CType::IsCType(cx, obj));
JS_ASSERT(CType::GetTypeCode(cx, obj) == TYPE_function);
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FNINFO, &slot));
JS_ASSERT(!JSVAL_IS_VOID(slot) && JSVAL_TO_PRIVATE(slot));
return static_cast<FunctionInfo*>(JSVAL_TO_PRIVATE(slot));
}
static JSBool
CheckFunctionType(JSContext* cx, JSObject* obj)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_function) {
JS_ReportError(cx, "not a FunctionType");
return JS_FALSE;
}
return JS_TRUE;
}
JSBool
FunctionType::ArgTypesGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CheckFunctionType(cx, obj))
return JS_FALSE;
// Check if we have a cached argTypes array.
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_ARGS_T, vp));
if (!JSVAL_IS_VOID(*vp))
return JS_TRUE;
FunctionInfo* fninfo = GetFunctionInfo(cx, obj);
size_t len = fninfo->mArgTypes.length();
// Prepare a new array.
Array<jsval, 16> vec;
if (!vec.resize(len))
return JS_FALSE;
for (size_t i = 0; i < len; ++i)
vec[i] = OBJECT_TO_JSVAL(fninfo->mArgTypes[i]);
JSObject* argTypes = JS_NewArrayObject(cx, len, vec.begin());
if (!argTypes)
return JS_FALSE;
// Seal and cache it.
if (!JS_FreezeObject(cx, argTypes) ||
!JS_SetReservedSlot(cx, obj, SLOT_ARGS_T, OBJECT_TO_JSVAL(argTypes)))
return JS_FALSE;
*vp = OBJECT_TO_JSVAL(argTypes);
return JS_TRUE;
}
JSBool
FunctionType::ReturnTypeGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CheckFunctionType(cx, obj))
return JS_FALSE;
// Get the returnType object from the FunctionInfo.
*vp = OBJECT_TO_JSVAL(GetFunctionInfo(cx, obj)->mReturnType);
return JS_TRUE;
}
JSBool
FunctionType::ABIGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CheckFunctionType(cx, obj))
return JS_FALSE;
// Get the abi object from the FunctionInfo.
*vp = OBJECT_TO_JSVAL(GetFunctionInfo(cx, obj)->mABI);
return JS_TRUE;
}
JSBool
FunctionType::IsVariadicGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!CheckFunctionType(cx, obj))
return JS_FALSE;
*vp = BOOLEAN_TO_JSVAL(GetFunctionInfo(cx, obj)->mIsVariadic);
return JS_TRUE;
}
/*******************************************************************************
** CClosure implementation
*******************************************************************************/
JSObject*
CClosure::Create(JSContext* cx,
JSObject* typeObj,
JSObject* fnObj,
JSObject* thisObj,
PRFuncPtr* fnptr)
{
JS_ASSERT(fnObj);
JSObject* result = JS_NewObject(cx, &sCClosureClass, NULL, NULL);
if (!result)
return NULL;
js::AutoObjectRooter root(cx, result);
// Get the FunctionInfo from the FunctionType.
FunctionInfo* fninfo = FunctionType::GetFunctionInfo(cx, typeObj);
JS_ASSERT(!fninfo->mIsVariadic);
JS_ASSERT(GetABICode(cx, fninfo->mABI) != ABI_WINAPI);
AutoPtr<ClosureInfo> cinfo(js_new<ClosureInfo>());
if (!cinfo) {
JS_ReportOutOfMemory(cx);
return NULL;
}
// Get the prototype of the FunctionType object, of class CTypeProto,
// which stores our JSContext for use with the closure.
JSObject* proto = JS_GetPrototype(cx, typeObj);
JS_ASSERT(proto);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sCTypeProtoClass);
// Get a JSContext for use with the closure.
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, proto, SLOT_CLOSURECX, &slot));
if (!JSVAL_IS_VOID(slot)) {
// Use the existing JSContext.
cinfo->cx = static_cast<JSContext*>(JSVAL_TO_PRIVATE(slot));
JS_ASSERT(cinfo->cx);
} else {
// Lazily instantiate a new JSContext, and stash it on
// ctypes.FunctionType.prototype.
JSRuntime* runtime = JS_GetRuntime(cx);
cinfo->cx = JS_NewContext(runtime, 8192);
if (!cinfo->cx) {
JS_ReportOutOfMemory(cx);
return NULL;
}
if (!JS_SetReservedSlot(cx, proto, SLOT_CLOSURECX,
PRIVATE_TO_JSVAL(cinfo->cx))) {
JS_DestroyContextNoGC(cinfo->cx);
return NULL;
}
JS_ClearContextThread(cinfo->cx);
}
#ifdef DEBUG
// We want *this* context's thread here so use cx instead of cinfo->cx.
cinfo->cxThread = JS_GetContextThread(cx);
#endif
cinfo->closureObj = result;
cinfo->typeObj = typeObj;
cinfo->thisObj = thisObj;
cinfo->jsfnObj = fnObj;
// Create an ffi_closure object and initialize it.
void* code;
cinfo->closure =
static_cast<ffi_closure*>(ffi_closure_alloc(sizeof(ffi_closure), &code));
if (!cinfo->closure || !code) {
JS_ReportError(cx, "couldn't create closure - libffi error");
return NULL;
}
ffi_status status = ffi_prep_closure_loc(cinfo->closure, &fninfo->mCIF,
CClosure::ClosureStub, cinfo.get(), code);
if (status != FFI_OK) {
ffi_closure_free(cinfo->closure);
JS_ReportError(cx, "couldn't create closure - libffi error");
return NULL;
}
// Stash the ClosureInfo struct on our new object.
if (!JS_SetReservedSlot(cx, result, SLOT_CLOSUREINFO,
PRIVATE_TO_JSVAL(cinfo.get()))) {
ffi_closure_free(cinfo->closure);
return NULL;
}
cinfo.forget();
// Casting between void* and a function pointer is forbidden in C and C++.
// Do it via an integral type.
*fnptr = reinterpret_cast<PRFuncPtr>(reinterpret_cast<uintptr_t>(code));
return result;
}
void
CClosure::Trace(JSTracer* trc, JSObject* obj)
{
JSContext* cx = trc->context;
// Make sure our ClosureInfo slot is legit. If it's not, bail.
jsval slot;
if (!JS_GetReservedSlot(cx, obj, SLOT_CLOSUREINFO, &slot) ||
JSVAL_IS_VOID(slot))
return;
ClosureInfo* cinfo = static_cast<ClosureInfo*>(JSVAL_TO_PRIVATE(slot));
// Identify our objects to the tracer. (There's no need to identify
// 'closureObj', since that's us.)
JS_CALL_OBJECT_TRACER(trc, cinfo->typeObj, "typeObj");
JS_CALL_OBJECT_TRACER(trc, cinfo->jsfnObj, "jsfnObj");
if (cinfo->thisObj)
JS_CALL_OBJECT_TRACER(trc, cinfo->thisObj, "thisObj");
}
void
CClosure::Finalize(JSContext* cx, JSObject* obj)
{
// Make sure our ClosureInfo slot is legit. If it's not, bail.
jsval slot;
if (!JS_GetReservedSlot(cx, obj, SLOT_CLOSUREINFO, &slot) ||
JSVAL_IS_VOID(slot))
return;
ClosureInfo* cinfo = static_cast<ClosureInfo*>(JSVAL_TO_PRIVATE(slot));
if (cinfo->closure)
ffi_closure_free(cinfo->closure);
js_delete(cinfo);
}
void
CClosure::ClosureStub(ffi_cif* cif, void* result, void** args, void* userData)
{
JS_ASSERT(cif);
JS_ASSERT(result);
JS_ASSERT(args);
JS_ASSERT(userData);
// Retrieve the essentials from our closure object.
ClosureInfo* cinfo = static_cast<ClosureInfo*>(userData);
JSContext* cx = cinfo->cx;
JSObject* typeObj = cinfo->typeObj;
JSObject* thisObj = cinfo->thisObj;
JSObject* jsfnObj = cinfo->jsfnObj;
ScopedContextThread scopedThread(cx);
// Assert that we're on the thread we were created from.
JS_ASSERT(cinfo->cxThread == JS_GetContextThread(cx));
JSAutoRequest ar(cx);
JSAutoEnterCompartment ac;
if (!ac.enter(cx, jsfnObj))
return;
// Assert that our CIFs agree.
FunctionInfo* fninfo = FunctionType::GetFunctionInfo(cx, typeObj);
JS_ASSERT(cif == &fninfo->mCIF);
TypeCode typeCode = CType::GetTypeCode(cx, fninfo->mReturnType);
// Initialize the result to zero, in case something fails. Small integer types
// are promoted to a word-sized ffi_arg, so we must be careful to zero the
// whole word.
if (cif->rtype != &ffi_type_void) {
size_t size = cif->rtype->size;
switch (typeCode) {
#define DEFINE_INT_TYPE(name, type, ffiType) \
case TYPE_##name:
#define DEFINE_WRAPPED_INT_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_BOOL_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_CHAR_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_JSCHAR_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#include "typedefs.h"
size = Align(size, sizeof(ffi_arg));
break;
default:
break;
}
memset(result, 0, size);
}
// Get a death grip on 'closureObj'.
js::AutoObjectRooter root(cx, cinfo->closureObj);
// Set up an array for converted arguments.
Array<jsval, 16> argv;
if (!argv.appendN(JSVAL_VOID, cif->nargs)) {
JS_ReportOutOfMemory(cx);
return;
}
js::AutoArrayRooter roots(cx, argv.length(), argv.begin());
for (JSUint32 i = 0; i < cif->nargs; ++i) {
// Convert each argument, and have any CData objects created depend on
// the existing buffers.
if (!ConvertToJS(cx, fninfo->mArgTypes[i], NULL, args[i], false, false,
&argv[i]))
return;
}
// Call the JS function. 'thisObj' may be NULL, in which case the JS engine
// will find an appropriate object to use.
jsval rval;
if (!JS_CallFunctionValue(cx, thisObj, OBJECT_TO_JSVAL(jsfnObj), cif->nargs,
argv.begin(), &rval))
return;
// Convert the result. Note that we pass 'isArgument = false', such that
// ImplicitConvert will *not* autoconvert a JS string into a pointer-to-char
// type, which would require an allocation that we can't track. The JS
// function must perform this conversion itself and return a PointerType
// CData; thusly, the burden of freeing the data is left to the user.
if (!ImplicitConvert(cx, rval, fninfo->mReturnType, result, false, NULL))
return;
// Small integer types must be returned as a word-sized ffi_arg. Coerce it
// back into the size libffi expects.
switch (typeCode) {
#define DEFINE_INT_TYPE(name, type, ffiType) \
case TYPE_##name: \
if (sizeof(type) < sizeof(ffi_arg)) { \
ffi_arg data = *static_cast<type*>(result); \
*static_cast<ffi_arg*>(result) = data; \
} \
break;
#define DEFINE_WRAPPED_INT_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_BOOL_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_CHAR_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#define DEFINE_JSCHAR_TYPE(x, y, z) DEFINE_INT_TYPE(x, y, z)
#include "typedefs.h"
default:
break;
}
}
/*******************************************************************************
** CData implementation
*******************************************************************************/
// Create a new CData object of type 'typeObj' containing binary data supplied
// in 'source', optionally with a referent object 'refObj'.
//
// * 'typeObj' must be a CType of defined (but possibly zero) size.
//
// * If an object 'refObj' is supplied, the new CData object stores the
// referent object in a reserved slot for GC safety, such that 'refObj' will
// be held alive by the resulting CData object. 'refObj' may or may not be
// a CData object; merely an object we want to keep alive.
// * If 'refObj' is a CData object, 'ownResult' must be false.
// * Otherwise, 'refObj' is a Library or CClosure object, and 'ownResult'
// may be true or false.
// * Otherwise 'refObj' is NULL. In this case, 'ownResult' may be true or false.
//
// * If 'ownResult' is true, the CData object will allocate an appropriately
// sized buffer, and free it upon finalization. If 'source' data is
// supplied, the data will be copied from 'source' into the buffer;
// otherwise, the entirety of the new buffer will be initialized to zero.
// * If 'ownResult' is false, the new CData's buffer refers to a slice of
// another buffer kept alive by 'refObj'. 'source' data must be provided,
// and the new CData's buffer will refer to 'source'.
JSObject*
CData::Create(JSContext* cx,
JSObject* typeObj,
JSObject* refObj,
void* source,
bool ownResult)
{
JS_ASSERT(typeObj);
JS_ASSERT(CType::IsCType(cx, typeObj));
JS_ASSERT(CType::IsSizeDefined(cx, typeObj));
JS_ASSERT(ownResult || source);
JS_ASSERT_IF(refObj && CData::IsCData(cx, refObj), !ownResult);
// Get the 'prototype' property from the type.
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, typeObj, SLOT_PROTO, &slot));
JS_ASSERT(!JSVAL_IS_PRIMITIVE(slot));
JSObject* proto = JSVAL_TO_OBJECT(slot);
JSObject* parent = JS_GetParent(cx, typeObj);
JS_ASSERT(parent);
JSObject* dataObj = JS_NewObject(cx, &sCDataClass, proto, parent);
if (!dataObj)
return NULL;
js::AutoObjectRooter root(cx, dataObj);
// set the CData's associated type
if (!JS_SetReservedSlot(cx, dataObj, SLOT_CTYPE, OBJECT_TO_JSVAL(typeObj)))
return NULL;
// Stash the referent object, if any, for GC safety.
if (refObj &&
!JS_SetReservedSlot(cx, dataObj, SLOT_REFERENT, OBJECT_TO_JSVAL(refObj)))
return NULL;
// Set our ownership flag.
if (!JS_SetReservedSlot(cx, dataObj, SLOT_OWNS, BOOLEAN_TO_JSVAL(ownResult)))
return NULL;
// attach the buffer. since it might not be 2-byte aligned, we need to
// allocate an aligned space for it and store it there. :(
char** buffer = js_new<char*>();
if (!buffer) {
JS_ReportOutOfMemory(cx);
return NULL;
}
char* data;
if (!ownResult) {
data = static_cast<char*>(source);
} else {
// Initialize our own buffer.
size_t size = CType::GetSize(cx, typeObj);
data = js_array_new<char>(size);
if (!data) {
// Report a catchable allocation error.
JS_ReportAllocationOverflow(cx);
js_delete(buffer);
return NULL;
}
if (!source)
memset(data, 0, size);
else
memcpy(data, source, size);
}
*buffer = data;
if (!JS_SetReservedSlot(cx, dataObj, SLOT_DATA, PRIVATE_TO_JSVAL(buffer))) {
if (ownResult)
js_array_delete(data);
js_delete(buffer);
return NULL;
}
return dataObj;
}
void
CData::Finalize(JSContext* cx, JSObject* obj)
{
// Delete our buffer, and the data it contains if we own it.
jsval slot;
if (!JS_GetReservedSlot(cx, obj, SLOT_OWNS, &slot) || JSVAL_IS_VOID(slot))
return;
JSBool owns = JSVAL_TO_BOOLEAN(slot);
if (!JS_GetReservedSlot(cx, obj, SLOT_DATA, &slot) || JSVAL_IS_VOID(slot))
return;
char** buffer = static_cast<char**>(JSVAL_TO_PRIVATE(slot));
if (owns)
js_array_delete(*buffer);
js_delete(buffer);
}
JSObject*
CData::GetCType(JSContext* cx, JSObject* dataObj)
{
JS_ASSERT(CData::IsCData(cx, dataObj));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, dataObj, SLOT_CTYPE, &slot));
JSObject* typeObj = JSVAL_TO_OBJECT(slot);
JS_ASSERT(CType::IsCType(cx, typeObj));
return typeObj;
}
void*
CData::GetData(JSContext* cx, JSObject* dataObj)
{
JS_ASSERT(CData::IsCData(cx, dataObj));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, dataObj, SLOT_DATA, &slot));
void** buffer = static_cast<void**>(JSVAL_TO_PRIVATE(slot));
JS_ASSERT(buffer);
JS_ASSERT(*buffer);
return *buffer;
}
bool
CData::IsCData(JSContext* cx, JSObject* obj)
{
return JS_GET_CLASS(cx, obj) == &sCDataClass;
}
JSBool
CData::ValueGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
{
if (!IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
// Convert the value to a primitive; do not create a new CData object.
if (!ConvertToJS(cx, GetCType(cx, obj), NULL, GetData(cx, obj), true, false, vp))
return JS_FALSE;
return JS_TRUE;
}
JSBool
CData::ValueSetter(JSContext* cx, JSObject* obj, jsid idval, JSBool strict, jsval* vp)
{
if (!IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
return ImplicitConvert(cx, *vp, GetCType(cx, obj), GetData(cx, obj), false, NULL);
}
JSBool
CData::Address(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 0) {
JS_ReportError(cx, "address takes zero arguments");
return JS_FALSE;
}
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
JSObject* typeObj = CData::GetCType(cx, obj);
JSObject* pointerType = PointerType::CreateInternal(cx, typeObj);
if (!pointerType)
return JS_FALSE;
js::AutoObjectRooter root(cx, pointerType);
// Create a PointerType CData object containing null.
JSObject* result = CData::Create(cx, pointerType, NULL, NULL, true);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
// Manually set the pointer inside the object, so we skip the conversion step.
void** data = static_cast<void**>(GetData(cx, result));
*data = GetData(cx, obj);
return JS_TRUE;
}
JSBool
CData::Cast(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 2) {
JS_ReportError(cx, "cast takes two arguments");
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
if (JSVAL_IS_PRIMITIVE(argv[0]) ||
!CData::IsCData(cx, JSVAL_TO_OBJECT(argv[0]))) {
JS_ReportError(cx, "first argument must be a CData");
return JS_FALSE;
}
JSObject* sourceData = JSVAL_TO_OBJECT(argv[0]);
JSObject* sourceType = CData::GetCType(cx, sourceData);
if (JSVAL_IS_PRIMITIVE(argv[1]) ||
!CType::IsCType(cx, JSVAL_TO_OBJECT(argv[1]))) {
JS_ReportError(cx, "second argument must be a CType");
return JS_FALSE;
}
JSObject* targetType = JSVAL_TO_OBJECT(argv[1]);
size_t targetSize;
if (!CType::GetSafeSize(cx, targetType, &targetSize) ||
targetSize > CType::GetSize(cx, sourceType)) {
JS_ReportError(cx,
"target CType has undefined or larger size than source CType");
return JS_FALSE;
}
// Construct a new CData object with a type of 'targetType' and a referent
// of 'sourceData'.
void* data = CData::GetData(cx, sourceData);
JSObject* result = CData::Create(cx, targetType, sourceData, data, false);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
JSBool
CData::ReadString(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 0) {
JS_ReportError(cx, "readString takes zero arguments");
return JS_FALSE;
}
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
// Make sure we are a pointer to, or an array of, an 8-bit or 16-bit
// character or integer type.
JSObject* baseType;
JSObject* typeObj = GetCType(cx, obj);
TypeCode typeCode = CType::GetTypeCode(cx, typeObj);
void* data;
size_t maxLength = -1;
switch (typeCode) {
case TYPE_pointer:
baseType = PointerType::GetBaseType(cx, typeObj);
data = *static_cast<void**>(GetData(cx, obj));
if (data == NULL) {
JS_ReportError(cx, "cannot read contents of null pointer");
return JS_FALSE;
}
break;
case TYPE_array:
baseType = ArrayType::GetBaseType(cx, typeObj);
data = GetData(cx, obj);
maxLength = ArrayType::GetLength(cx, typeObj);
break;
default:
JS_ReportError(cx, "not a PointerType or ArrayType");
return JS_FALSE;
}
// Convert the string buffer, taking care to determine the correct string
// length in the case of arrays (which may contain embedded nulls).
JSString* result;
switch (CType::GetTypeCode(cx, baseType)) {
case TYPE_int8_t:
case TYPE_uint8_t:
case TYPE_char:
case TYPE_signed_char:
case TYPE_unsigned_char: {
char* bytes = static_cast<char*>(data);
size_t length = strnlen(bytes, maxLength);
// Determine the length.
size_t dstlen;
if (!js_InflateUTF8StringToBuffer(cx, bytes, length, NULL, &dstlen))
return JS_FALSE;
jschar* dst =
static_cast<jschar*>(JS_malloc(cx, (dstlen + 1) * sizeof(jschar)));
if (!dst)
return JS_FALSE;
ASSERT_OK(js_InflateUTF8StringToBuffer(cx, bytes, length, dst, &dstlen));
dst[dstlen] = 0;
result = JS_NewUCString(cx, dst, dstlen);
break;
}
case TYPE_int16_t:
case TYPE_uint16_t:
case TYPE_short:
case TYPE_unsigned_short:
case TYPE_jschar: {
jschar* chars = static_cast<jschar*>(data);
size_t length = strnlen(chars, maxLength);
result = JS_NewUCStringCopyN(cx, chars, length);
break;
}
default:
JS_ReportError(cx,
"base type is not an 8-bit or 16-bit integer or character type");
return JS_FALSE;
}
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, STRING_TO_JSVAL(result));
return JS_TRUE;
}
JSBool
CData::ToSource(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 0) {
JS_ReportError(cx, "toSource takes zero arguments");
return JS_FALSE;
}
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return JS_FALSE;
}
JSObject* typeObj = CData::GetCType(cx, obj);
void* data = CData::GetData(cx, obj);
// Walk the types, building up the toSource() string.
// First, we build up the type expression:
// 't.ptr' for pointers;
// 't.array([n])' for arrays;
// 'n' for structs, where n = t.name, the struct's name. (We assume this is
// bound to a variable in the current scope.)
AutoString source;
BuildTypeSource(cx, typeObj, true, source);
AppendString(source, "(");
if (!BuildDataSource(cx, typeObj, data, false, source))
return JS_FALSE;
AppendString(source, ")");
JSString* result = NewUCString(cx, source);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, STRING_TO_JSVAL(result));
return JS_TRUE;
}
/*******************************************************************************
** Int64 and UInt64 implementation
*******************************************************************************/
JSObject*
Int64Base::Construct(JSContext* cx,
JSObject* proto,
JSUint64 data,
bool isUnsigned)
{
JSClass* clasp = isUnsigned ? &sUInt64Class : &sInt64Class;
JSObject* result = JS_NewObject(cx, clasp, proto, JS_GetParent(cx, proto));
if (!result)
return NULL;
js::AutoObjectRooter root(cx, result);
// attach the Int64's data
JSUint64* buffer = js_new<JSUint64>(data);
if (!buffer) {
JS_ReportOutOfMemory(cx);
return NULL;
}
if (!JS_SetReservedSlot(cx, result, SLOT_INT64, PRIVATE_TO_JSVAL(buffer))) {
js_delete(buffer);
return NULL;
}
if (!JS_FreezeObject(cx, result))
return NULL;
return result;
}
void
Int64Base::Finalize(JSContext* cx, JSObject* obj)
{
jsval slot;
if (!JS_GetReservedSlot(cx, obj, SLOT_INT64, &slot) || JSVAL_IS_VOID(slot))
return;
js_delete(static_cast<JSUint64*>(JSVAL_TO_PRIVATE(slot)));
}
JSUint64
Int64Base::GetInt(JSContext* cx, JSObject* obj) {
JS_ASSERT(Int64::IsInt64(cx, obj) || UInt64::IsUInt64(cx, obj));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_INT64, &slot));
return *static_cast<JSUint64*>(JSVAL_TO_PRIVATE(slot));
}
JSBool
Int64Base::ToString(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* vp,
bool isUnsigned)
{
if (argc > 1) {
JS_ReportError(cx, "toString takes zero or one argument");
return JS_FALSE;
}
jsuint radix = 10;
if (argc == 1) {
jsval arg = JS_ARGV(cx, vp)[0];
if (JSVAL_IS_INT(arg))
radix = JSVAL_TO_INT(arg);
if (!JSVAL_IS_INT(arg) || radix < 2 || radix > 36) {
JS_ReportError(cx, "radix argument must be an integer between 2 and 36");
return JS_FALSE;
}
}
AutoString intString;
if (isUnsigned) {
IntegerToString(GetInt(cx, obj), radix, intString);
} else {
IntegerToString(static_cast<JSInt64>(GetInt(cx, obj)), radix, intString);
}
JSString *result = NewUCString(cx, intString);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, STRING_TO_JSVAL(result));
return JS_TRUE;
}
JSBool
Int64Base::ToSource(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* vp,
bool isUnsigned)
{
if (argc != 0) {
JS_ReportError(cx, "toSource takes zero arguments");
return JS_FALSE;
}
// Return a decimal string suitable for constructing the number.
AutoString source;
if (isUnsigned) {
AppendString(source, "ctypes.UInt64(\"");
IntegerToString(GetInt(cx, obj), 10, source);
} else {
AppendString(source, "ctypes.Int64(\"");
IntegerToString(static_cast<JSInt64>(GetInt(cx, obj)), 10, source);
}
AppendString(source, "\")");
JSString *result = NewUCString(cx, source);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, STRING_TO_JSVAL(result));
return JS_TRUE;
}
JSBool
Int64::Construct(JSContext* cx,
uintN argc,
jsval* vp)
{
// Construct and return a new Int64 object.
if (argc != 1) {
JS_ReportError(cx, "Int64 takes one argument");
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
JSInt64 i = 0;
if (!jsvalToBigInteger(cx, argv[0], true, &i))
return TypeError(cx, "int64", argv[0]);
// Get ctypes.Int64.prototype from the 'prototype' property of the ctor.
jsval slot;
ASSERT_OK(JS_GetProperty(cx, JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)),
"prototype", &slot));
JSObject* proto = JSVAL_TO_OBJECT(slot);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sInt64ProtoClass);
JSObject* result = Int64Base::Construct(cx, proto, i, false);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
bool
Int64::IsInt64(JSContext* cx, JSObject* obj)
{
return JS_GET_CLASS(cx, obj) == &sInt64Class;
}
JSBool
Int64::ToString(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !Int64::IsInt64(cx, obj)) {
JS_ReportError(cx, "not an Int64");
return JS_FALSE;
}
return Int64Base::ToString(cx, obj, argc, vp, false);
}
JSBool
Int64::ToSource(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !Int64::IsInt64(cx, obj)) {
JS_ReportError(cx, "not an Int64");
return JS_FALSE;
}
return Int64Base::ToSource(cx, obj, argc, vp, false);
}
JSBool
Int64::Compare(JSContext* cx, uintN argc, jsval* vp)
{
jsval* argv = JS_ARGV(cx, vp);
if (argc != 2 ||
JSVAL_IS_PRIMITIVE(argv[0]) ||
JSVAL_IS_PRIMITIVE(argv[1]) ||
!Int64::IsInt64(cx, JSVAL_TO_OBJECT(argv[0])) ||
!Int64::IsInt64(cx, JSVAL_TO_OBJECT(argv[1]))) {
JS_ReportError(cx, "compare takes two Int64 arguments");
return JS_FALSE;
}
JSObject* obj1 = JSVAL_TO_OBJECT(argv[0]);
JSObject* obj2 = JSVAL_TO_OBJECT(argv[1]);
JSInt64 i1 = Int64Base::GetInt(cx, obj1);
JSInt64 i2 = Int64Base::GetInt(cx, obj2);
if (i1 == i2)
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(0));
else if (i1 < i2)
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(-1));
else
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(1));
return JS_TRUE;
}
#define LO_MASK ((JSUint64(1) << 32) - 1)
#define INT64_LO(i) ((i) & LO_MASK)
#define INT64_HI(i) ((i) >> 32)
JSBool
Int64::Lo(JSContext* cx, uintN argc, jsval* vp)
{
jsval* argv = JS_ARGV(cx, vp);
if (argc != 1 || JSVAL_IS_PRIMITIVE(argv[0]) ||
!Int64::IsInt64(cx, JSVAL_TO_OBJECT(argv[0]))) {
JS_ReportError(cx, "lo takes one Int64 argument");
return JS_FALSE;
}
JSObject* obj = JSVAL_TO_OBJECT(argv[0]);
JSInt64 u = Int64Base::GetInt(cx, obj);
jsdouble d = JSUint32(INT64_LO(u));
jsval result;
if (!JS_NewNumberValue(cx, d, &result))
return JS_FALSE;
JS_SET_RVAL(cx, vp, result);
return JS_TRUE;
}
JSBool
Int64::Hi(JSContext* cx, uintN argc, jsval* vp)
{
jsval* argv = JS_ARGV(cx, vp);
if (argc != 1 || JSVAL_IS_PRIMITIVE(argv[0]) ||
!Int64::IsInt64(cx, JSVAL_TO_OBJECT(argv[0]))) {
JS_ReportError(cx, "hi takes one Int64 argument");
return JS_FALSE;
}
JSObject* obj = JSVAL_TO_OBJECT(argv[0]);
JSInt64 u = Int64Base::GetInt(cx, obj);
jsdouble d = JSInt32(INT64_HI(u));
jsval result;
if (!JS_NewNumberValue(cx, d, &result))
return JS_FALSE;
JS_SET_RVAL(cx, vp, result);
return JS_TRUE;
}
JSBool
Int64::Join(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 2) {
JS_ReportError(cx, "join takes two arguments");
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
JSInt32 hi;
JSUint32 lo;
if (!jsvalToInteger(cx, argv[0], &hi))
return TypeError(cx, "int32", argv[0]);
if (!jsvalToInteger(cx, argv[1], &lo))
return TypeError(cx, "uint32", argv[1]);
JSInt64 i = (JSInt64(hi) << 32) + JSInt64(lo);
// Get Int64.prototype from the function's reserved slot.
JSObject* callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, callee, SLOT_FN_INT64PROTO, &slot));
JSObject* proto = JSVAL_TO_OBJECT(slot);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sInt64ProtoClass);
JSObject* result = Int64Base::Construct(cx, proto, i, false);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
JSBool
UInt64::Construct(JSContext* cx,
uintN argc,
jsval* vp)
{
// Construct and return a new UInt64 object.
if (argc != 1) {
JS_ReportError(cx, "UInt64 takes one argument");
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
JSUint64 u = 0;
if (!jsvalToBigInteger(cx, argv[0], true, &u))
return TypeError(cx, "uint64", argv[0]);
// Get ctypes.UInt64.prototype from the 'prototype' property of the ctor.
jsval slot;
ASSERT_OK(JS_GetProperty(cx, JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)),
"prototype", &slot));
JSObject* proto = JSVAL_TO_OBJECT(slot);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sUInt64ProtoClass);
JSObject* result = Int64Base::Construct(cx, proto, u, true);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
bool
UInt64::IsUInt64(JSContext* cx, JSObject* obj)
{
return JS_GET_CLASS(cx, obj) == &sUInt64Class;
}
JSBool
UInt64::ToString(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !UInt64::IsUInt64(cx, obj)) {
JS_ReportError(cx, "not a UInt64");
return JS_FALSE;
}
return Int64Base::ToString(cx, obj, argc, vp, true);
}
JSBool
UInt64::ToSource(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !UInt64::IsUInt64(cx, obj)) {
JS_ReportError(cx, "not a UInt64");
return JS_FALSE;
}
return Int64Base::ToSource(cx, obj, argc, vp, true);
}
JSBool
UInt64::Compare(JSContext* cx, uintN argc, jsval* vp)
{
jsval* argv = JS_ARGV(cx, vp);
if (argc != 2 ||
JSVAL_IS_PRIMITIVE(argv[0]) ||
JSVAL_IS_PRIMITIVE(argv[1]) ||
!UInt64::IsUInt64(cx, JSVAL_TO_OBJECT(argv[0])) ||
!UInt64::IsUInt64(cx, JSVAL_TO_OBJECT(argv[1]))) {
JS_ReportError(cx, "compare takes two UInt64 arguments");
return JS_FALSE;
}
JSObject* obj1 = JSVAL_TO_OBJECT(argv[0]);
JSObject* obj2 = JSVAL_TO_OBJECT(argv[1]);
JSUint64 u1 = Int64Base::GetInt(cx, obj1);
JSUint64 u2 = Int64Base::GetInt(cx, obj2);
if (u1 == u2)
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(0));
else if (u1 < u2)
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(-1));
else
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(1));
return JS_TRUE;
}
JSBool
UInt64::Lo(JSContext* cx, uintN argc, jsval* vp)
{
jsval* argv = JS_ARGV(cx, vp);
if (argc != 1 || JSVAL_IS_PRIMITIVE(argv[0]) ||
!UInt64::IsUInt64(cx, JSVAL_TO_OBJECT(argv[0]))) {
JS_ReportError(cx, "lo takes one UInt64 argument");
return JS_FALSE;
}
JSObject* obj = JSVAL_TO_OBJECT(argv[0]);
JSUint64 u = Int64Base::GetInt(cx, obj);
jsdouble d = JSUint32(INT64_LO(u));
jsval result;
if (!JS_NewNumberValue(cx, d, &result))
return JS_FALSE;
JS_SET_RVAL(cx, vp, result);
return JS_TRUE;
}
JSBool
UInt64::Hi(JSContext* cx, uintN argc, jsval* vp)
{
jsval* argv = JS_ARGV(cx, vp);
if (argc != 1 || JSVAL_IS_PRIMITIVE(argv[0]) ||
!UInt64::IsUInt64(cx, JSVAL_TO_OBJECT(argv[0]))) {
JS_ReportError(cx, "hi takes one UInt64 argument");
return JS_FALSE;
}
JSObject* obj = JSVAL_TO_OBJECT(argv[0]);
JSUint64 u = Int64Base::GetInt(cx, obj);
jsdouble d = JSUint32(INT64_HI(u));
jsval result;
if (!JS_NewNumberValue(cx, d, &result))
return JS_FALSE;
JS_SET_RVAL(cx, vp, result);
return JS_TRUE;
}
JSBool
UInt64::Join(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 2) {
JS_ReportError(cx, "join takes two arguments");
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
JSUint32 hi;
JSUint32 lo;
if (!jsvalToInteger(cx, argv[0], &hi))
return TypeError(cx, "uint32_t", argv[0]);
if (!jsvalToInteger(cx, argv[1], &lo))
return TypeError(cx, "uint32_t", argv[1]);
JSUint64 u = (JSUint64(hi) << 32) + JSUint64(lo);
// Get UInt64.prototype from the function's reserved slot.
JSObject* callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, callee, SLOT_FN_INT64PROTO, &slot));
JSObject* proto = JSVAL_TO_OBJECT(slot);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sUInt64ProtoClass);
JSObject* result = Int64Base::Construct(cx, proto, u, true);
if (!result)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
}
}
| glycerine/vj | src/js-1.8.5/js/src/ctypes/CTypes.cpp | C++ | apache-2.0 | 200,973 |
# alexa-utterances
generate expanded Amazon Alexa utterances from a template string
When building apps for Alexa or Echo, it's important to declare many permutations of text, in order to improve the voice recognition rate.
Manually generating these combinations is tedious. This module allows you to generate many (hundreds or even thousands) of sample utterances using just a few samples that get auto-expanded. Any number of sample utterances may be passed in the utterances array. Below are some sample utterances macros and what they will be expanded to.
### usage
installation:
```
npm install alexa-utterances
```
running tests:
```
npm test
```
### API
```javascript
var result = utterances(template, slots, dictionary, exhaustiveUtterances);
```
**template** a string to generate utterances from
**slots** a hash of slots to fill for the given utterance
**dictionary** a hash of lookup values to expand
**exhaustiveUtterances** if true, builds a full cartesian product of all shortcut values and slot sample values; if false, builds a smaller list of utterances that has the full cartesian product of all shortcut values, with slot sample values filled in; default = false
**result** an array of strings built from the template
#### example
```javascript
var dictionary = { adjustments: [ 'dim', 'brighten' ] };
var slots = { Adjustment: 'LITERAL' };
var template = '{adjustments|Adjustment} the light';
var result = utterances(template, slots, dictionary);
// result:
// [ '{dim|Adjustment} the light', '{brighten|Adjustment} the light' ]
```
#### slots
The slots object is a simple Name:Type mapping. The type must be one of Amazon's supported slot types: LITERAL, NUMBER, DATE, TIME, DURATION. You can use custom slot types, but you cannot integrate them with the slots object here and must instead do so with an [alternate syntax](#custom-slot-types).
#### Using a Dictionary
Several intents may use the same list of possible values, so you want to define them in one place, not in each intent schema. Use the app's dictionary.
```javascript
var dictionary = { "colors": [ "red", "green", "blue" ] };
...
"I like {colors|COLOR}"
```
#### Multiple Options mapped to a Slot
```javascript
"my favorite color is {red|green|blue|NAME}"
=>
"my favorite color is {red|NAME}"
"my favorite color is {green|NAME}"
"my favorite color is {blue|NAME}"
```
#### Generate Multiple Versions of Static Text
This lets you define multiple ways to say a phrase, but combined into a single sample utterance
```javascript
"{what is the|what's the|check the} status"
=>
"what is the status"
"what's the status"
"check the status"
```
#### Auto-Generated Number Ranges
When capturing a numeric slot value, it's helpful to generate many sample utterances with different number values
```javascript
"buy {2-5|NUMBER} items"
=>
"buy {two|NUMBER} items"
"buy {three|NUMBER} items"
"buy {four|NUMBER} items"
"buy {five|NUMBER} items"
```
Number ranges can also increment in steps
```javascript
"buy {5-20 by 5|NUMBER} items"
=>
"buy {five|NUMBER} items"
"buy {ten|NUMBER} items"
"buy {fifteen|NUMBER} items"
"buy {twenty|NUMBER} items"
```
#### Optional Words
```javascript
"what is your {favorite |}color"
=>
"what is your color"
"what is your favorite color"
```
#### Custom Slot Types <a name="custom-slot-types"></a>
You may want to work with [Custom Slot Types](https://developer.amazon.com/appsandservices/solutions/alexa/alexa-skills-kit/docs/defining-the-voice-interface#The Speech Input Data) registered in your interaction model. You can use a special syntax to leave a curly-braced slot name unparsed. For example, if you have defined in your skill a `FRUIT_TYPE` with the values `Apple`, `Orange` and `Lemon` for the slot `Fruit`, you can keep `Fruit` a curly-braced literal as follows
```javascript
"{my|your} {favorite|least favorite} snack is {-|Fruit}"
=>
"my favorite snack is {Fruit}"
"your favorite snack is {Fruit}"
"my least favorite snack is {Fruit}"
"your least favorite snack is {Fruit}"
```
| Noora-q/quizbot-alexa | src/node_modules/alexa-utterances/README.md | Markdown | apache-2.0 | 4,039 |
/*
Copyright The containerd Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package commands
import (
gocontext "context"
"os"
"os/signal"
"syscall"
"github.com/containerd/containerd"
"github.com/containerd/containerd/errdefs"
"github.com/sirupsen/logrus"
)
type killer interface {
Kill(gocontext.Context, syscall.Signal, ...containerd.KillOpts) error
}
// ForwardAllSignals forwards signals
func ForwardAllSignals(ctx gocontext.Context, task killer) chan os.Signal {
sigc := make(chan os.Signal, 128)
signal.Notify(sigc)
go func() {
for s := range sigc {
if canIgnoreSignal(s) {
logrus.Debugf("Ignoring signal %s", s)
continue
}
logrus.Debug("forwarding signal ", s)
if err := task.Kill(ctx, s.(syscall.Signal)); err != nil {
if errdefs.IsNotFound(err) {
logrus.WithError(err).Debugf("Not forwarding signal %s", s)
return
}
logrus.WithError(err).Errorf("forward signal %s", s)
}
}
}()
return sigc
}
// StopCatch stops and closes a channel
func StopCatch(sigc chan os.Signal) {
signal.Stop(sigc)
close(sigc)
}
| rancher/k3s | vendor/github.com/containerd/containerd/cmd/ctr/commands/signals.go | GO | apache-2.0 | 1,599 |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.jvm.java.runner.FileClassPathRunner;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.google.common.collect.ImmutableList;
import java.io.File;
import java.nio.file.Path;
import java.util.Optional;
import org.immutables.value.Value;
@Value.Immutable
@BuckStyleImmutable
abstract class AbstractAndroidInstrumentationTestJVMArgs {
private static final String INSTRUMENTATION_TEST_RUNNER =
"com.facebook.buck.testrunner.InstrumentationMain";
abstract String getPathToAdbExecutable();
abstract Optional<Path> getDirectoryForTestResults();
abstract String getTestPackage();
abstract String getTestRunner();
abstract String getDdmlibJarPath();
abstract String getKxmlJarPath();
abstract String getGuavaJarPath();
abstract String getAndroidToolsCommonJarPath();
abstract Optional<String> getDeviceSerial();
abstract Optional<Path> getInstrumentationApkPath();
abstract Optional<Path> getApkUnderTestPath();
abstract Optional<String> getTestFilter();
/** @return The filesystem path to the compiled Buck test runner classes. */
abstract Path getTestRunnerClasspath();
public void formatCommandLineArgsToList(ImmutableList.Builder<String> args) {
// NOTE(agallagher): These propbably don't belong here, but buck integration tests need
// to find the test runner classes, so propagate these down via the relevant properties.
args.add(String.format("-Dbuck.testrunner_classes=%s", getTestRunnerClasspath()));
if (getDeviceSerial().isPresent()) {
args.add(String.format("-Dbuck.device.id=%s", getDeviceSerial().get()));
}
args.add(
"-classpath",
getTestRunnerClasspath().toString()
+ File.pathSeparator
+ this.getDdmlibJarPath()
+ File.pathSeparator
+ this.getKxmlJarPath()
+ File.pathSeparator
+ this.getGuavaJarPath()
+ File.pathSeparator
+ this.getAndroidToolsCommonJarPath());
args.add(FileClassPathRunner.class.getName());
args.add(INSTRUMENTATION_TEST_RUNNER);
// The first argument to the test runner is where the test results should be written. It is not
// reliable to write test results to stdout or stderr because there may be output from the unit
// tests written to those file descriptors, as well.
if (getDirectoryForTestResults().isPresent()) {
args.add("--output", getDirectoryForTestResults().get().toString());
}
args.add("--test-package-name", getTestPackage());
args.add("--test-runner", getTestRunner());
args.add("--adb-executable-path", getPathToAdbExecutable());
if (getTestFilter().isPresent()) {
args.add("--extra-instrumentation-argument", "class=" + getTestFilter().get());
}
if (getApkUnderTestPath().isPresent()) {
args.add("--apk-under-test-path", getApkUnderTestPath().get().toFile().getAbsolutePath());
}
if (getInstrumentationApkPath().isPresent()) {
args.add(
"--instrumentation-apk-path",
getInstrumentationApkPath().get().toFile().getAbsolutePath());
}
}
}
| dsyang/buck | src/com/facebook/buck/android/AbstractAndroidInstrumentationTestJVMArgs.java | Java | apache-2.0 | 3,785 |
$:.unshift(File.join(File.dirname(__FILE__), "..", "..", "lib"))
require "minitest/unit"
require "minitest/autorun"
require "filewatch/tail"
class TailTest < MiniTest::Unit::TestCase
def test_quit
require "timeout"
tail = FileWatch::Tail.new
#Thread.new(tail) { |t| sleep(1); t.quit }
#Timeout.timeout(5) do
#tail.subscribe { |e| }
#end
tail.quit
end
end # class TailTest
| baozoumanhua/elk-rtf | logstash/vendor/bundle/jruby/1.9/gems/filewatch-0.6.2/test/filewatch/tail.rb | Ruby | apache-2.0 | 408 |
// Code generated by client-gen. DO NOT EDIT.
package fake
import (
clientset "github.com/openshift/client-go/console/clientset/versioned"
consolev1 "github.com/openshift/client-go/console/clientset/versioned/typed/console/v1"
fakeconsolev1 "github.com/openshift/client-go/console/clientset/versioned/typed/console/v1/fake"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/discovery"
fakediscovery "k8s.io/client-go/discovery/fake"
"k8s.io/client-go/testing"
)
// NewSimpleClientset returns a clientset that will respond with the provided objects.
// It's backed by a very simple object tracker that processes creates, updates and deletions as-is,
// without applying any validations and/or defaults. It shouldn't be considered a replacement
// for a real clientset and is mostly useful in simple unit tests.
func NewSimpleClientset(objects ...runtime.Object) *Clientset {
o := testing.NewObjectTracker(scheme, codecs.UniversalDecoder())
for _, obj := range objects {
if err := o.Add(obj); err != nil {
panic(err)
}
}
cs := &Clientset{tracker: o}
cs.discovery = &fakediscovery.FakeDiscovery{Fake: &cs.Fake}
cs.AddReactor("*", "*", testing.ObjectReaction(o))
cs.AddWatchReactor("*", func(action testing.Action) (handled bool, ret watch.Interface, err error) {
gvr := action.GetResource()
ns := action.GetNamespace()
watch, err := o.Watch(gvr, ns)
if err != nil {
return false, nil, err
}
return true, watch, nil
})
return cs
}
// Clientset implements clientset.Interface. Meant to be embedded into a
// struct to get a default implementation. This makes faking out just the method
// you want to test easier.
type Clientset struct {
testing.Fake
discovery *fakediscovery.FakeDiscovery
tracker testing.ObjectTracker
}
func (c *Clientset) Discovery() discovery.DiscoveryInterface {
return c.discovery
}
func (c *Clientset) Tracker() testing.ObjectTracker {
return c.tracker
}
var _ clientset.Interface = &Clientset{}
// ConsoleV1 retrieves the ConsoleV1Client
func (c *Clientset) ConsoleV1() consolev1.ConsoleV1Interface {
return &fakeconsolev1.FakeConsoleV1{Fake: &c.Fake}
}
| pweil-/origin | vendor/github.com/openshift/client-go/console/clientset/versioned/fake/clientset_generated.go | GO | apache-2.0 | 2,165 |
#region License, Terms and Author(s)
//
// ELMAH - Error Logging Modules and Handlers for ASP.NET
// Copyright (c) 2004-9 Atif Aziz. All rights reserved.
//
// Author(s):
//
// Atif Aziz, http://www.raboof.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
[assembly: Elmah.Scc("$Id: StaticAssertion.cs 566 2009-05-11 10:37:10Z azizatif $")]
namespace Elmah.Assertions
{
/// <summary>
/// An static assertion implementation that always evaluates to
/// a preset value.
/// </summary>
public sealed class StaticAssertion : IAssertion
{
public static readonly StaticAssertion True = new StaticAssertion(true);
public static readonly StaticAssertion False = new StaticAssertion(false);
private readonly bool _value;
private StaticAssertion(bool value)
{
_value = value;
}
public bool Test(object context)
{
return _value;
}
}
} | madaboutcode/Elmah | src/Elmah/Assertions/StaticAssertion.cs | C# | apache-2.0 | 1,493 |
---
layout: "docs_api"
version: "1.0.0-rc.5"
versionHref: "/docs"
path: "api/directive/ionCheckbox/"
title: "ion-checkbox"
header_sub_title: "Directive in module ionic"
doc: "ionCheckbox"
docType: "directive"
---
<div class="improve-docs">
<a href='http://github.com/driftyco/ionic/tree/master/js/angular/directive/checkbox.js#L2'>
View Source
</a>
<a href='http://github.com/driftyco/ionic/edit/master/js/angular/directive/checkbox.js#L2'>
Improve this doc
</a>
</div>
<h1 class="api-title">
ion-checkbox
</h1>
{% include codepen.html id="hqcju" %}
The checkbox is no different than the HTML checkbox input, except it's styled differently.
The checkbox behaves like any [AngularJS checkbox](http://docs.angularjs.org/api/ng/input/input[checkbox]).
<h2 id="usage">Usage</h2>
```html
<ion-checkbox ng-model="isChecked">Checkbox Label</ion-checkbox>
```
| saimandeper/ionic-site | docs/1.0.0-rc.5/api/directive/ionCheckbox/index.md | Markdown | apache-2.0 | 925 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.*;
import org.apache.lucene.util.Bits;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
public class NumericDVIndexFieldData extends DocValuesIndexFieldData implements IndexNumericFieldData {
public NumericDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) {
super(index, fieldNames, fieldDataType);
}
@Override
public AtomicLongFieldData load(AtomicReaderContext context) {
final AtomicReader reader = context.reader();
final String field = fieldNames.indexName();
return new AtomicLongFieldData(0) {
@Override
public SortedNumericDocValues getLongValues() {
try {
final NumericDocValues values = DocValues.getNumeric(reader, field);
final Bits docsWithField = DocValues.getDocsWithField(reader, field);
return DocValues.singleton(values, docsWithField);
} catch (IOException e) {
throw new ElasticsearchIllegalStateException("Cannot load doc values", e);
}
}
};
}
@Override
public AtomicLongFieldData loadDirect(AtomicReaderContext context) throws Exception {
return load(context);
}
@Override
public org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource comparatorSource(Object missingValue, MultiValueMode sortMode, Nested nested) {
return new LongValuesComparatorSource(this, missingValue, sortMode, nested);
}
@Override
public org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType getNumericType() {
return NumericType.LONG;
}
}
| zuoyebushiwo/elasticsearch1.7-study | src/main/java/org/elasticsearch/index/fielddata/plain/NumericDVIndexFieldData.java | Java | apache-2.0 | 3,031 |
<!-- BEGIN MUNGE: UNVERSIONED_WARNING -->
<!-- BEGIN STRIP_FOR_RELEASE -->
<img src="http://kubernetes.io/img/warning.png" alt="WARNING"
width="25" height="25">
<img src="http://kubernetes.io/img/warning.png" alt="WARNING"
width="25" height="25">
<img src="http://kubernetes.io/img/warning.png" alt="WARNING"
width="25" height="25">
<img src="http://kubernetes.io/img/warning.png" alt="WARNING"
width="25" height="25">
<img src="http://kubernetes.io/img/warning.png" alt="WARNING"
width="25" height="25">
<h2>PLEASE NOTE: This document applies to the HEAD of the source tree</h2>
If you are using a released version of Kubernetes, you should
refer to the docs that go with that version.
<strong>
The latest release of this document can be found
[here](http://releases.k8s.io/release-1.1/docs/user-guide/ingress.md).
Documentation for other releases can be found at
[releases.k8s.io](http://releases.k8s.io).
</strong>
--
<!-- END STRIP_FOR_RELEASE -->
<!-- END MUNGE: UNVERSIONED_WARNING -->
# Ingress
**Table of Contents**
<!-- BEGIN MUNGE: GENERATED_TOC -->
- [Ingress](#ingress)
- [What is Ingress?](#what-is-ingress)
- [Prerequisites](#prerequisites)
- [The Ingress Resource](#the-ingress-resource)
- [Ingress controllers](#ingress-controllers)
- [Types of Ingress](#types-of-ingress)
- [Single Service Ingress](#single-service-ingress)
- [Simple fanout](#simple-fanout)
- [Name based virtual hosting](#name-based-virtual-hosting)
- [Loadbalancing](#loadbalancing)
- [Updating an Ingress](#updating-an-ingress)
- [Future Work](#future-work)
- [Alternatives](#alternatives)
<!-- END MUNGE: GENERATED_TOC -->
__Terminology__
Throughout this doc you will see a few terms that are sometimes used interchangably elsewhere, that might cause confusion. This section attempts to clarify them.
* Node: A single virtual or physical machine in a Kubernetes cluster.
* Cluster: A group of nodes firewalled from the internet, that are the primary compute resources managed by Kubernetes.
* Edge router: A router that enforces the firewall policy for your cluster. This could be a gateway managed by a cloudprovider or a physical piece of hardware.
* Cluster network: A set of links, logical or physical, that facilitate communication within a cluster according to the [Kubernetes networking model](https://github.com/kubernetes/kubernetes/blob/release-1.0/docs/admin/networking.md). Examples of a Cluster network include Overlays such as [flannel](https://github.com/coreos/flannel#flannel) or SDNs such as [OVS](https://github.com/kubernetes/kubernetes/blob/release-1.0/docs/admin/ovs-networking.md).
* Service: A Kubernetes [Service](https://github.com/kubernetes/kubernetes/blob/release-1.0/docs/user-guide/services.md) that identifies a set of pods using label selectors. Unless mentioned otherwise, Services are assumed to have virtual IPs only routable within the cluster network.
## What is Ingress?
Typically, services and pods have IPs only routable by the cluster network. All traffic that ends up at an edge router is either dropped or forwarded elsewhere. Conceptually, this might look like:
```
internet
|
------------
[ Services ]
```
An Ingress is a collection of rules that allow inbound connections to reach the cluster services.
```
internet
|
[ Ingress ]
--|-----|--
[ Services ]
```
It can be configured to give services externally-reachable urls, load balance traffic, terminate SSL, offer name based virtual hosting etc. Users request ingress by POSTing the Ingress resource to the API server. An [Ingress controller](#ingress-controllers) is responsible for fulfilling the Ingress, usually with a loadbalancer, though it may also configure your edge router or additional frontends to help handle the traffic in an HA manner.
## Prerequisites
Before you start using the Ingress resource, there are a few things you should understand:
* The Ingress resource is not available in any Kubernetes release prior to 1.1
* You need an Ingress controller to satisfy an Ingress. Simply creating the resource will have no effect.
* On GCE/GKE there should be a [L7 cluster addon](../../cluster/addons/cluster-loadbalancing/glbc/README.md#prerequisites), on other platforms you either need to write your own or [deploy an existing controller](https://github.com/kubernetes/contrib/tree/master/Ingress) as a pod.
* The resource currently does not support HTTPS, but will do so before it leaves beta.
## The Ingress Resource
A minimal Ingress might look like:
```yaml
01. apiVersion: extensions/v1beta1
02. kind: Ingress
03. metadata:
04. name: test-ingress
05. spec:
06. rules:
07. - http:
08. paths:
09. - path: /testpath
10. backend:
11. serviceName: test
12. servicePort: 80
```
*POSTing this to the API server will have no effect if you have not configured an [Ingress controller](#ingress-controllers).*
__Lines 1-4__: As with all other Kubernetes config, an Ingress needs `apiVersion`, `kind`, and `metadata` fields. For general information about working with config files, see [deploying applications](deploying-applications.md), [configuring containers](configuring-containers.md), and [working with resources](working-with-resources.md) documents.
__Lines 5-7__: Ingress [spec](../devel/api-conventions.md#spec-and-status) has all the information needed to configure a loadbalancer or proxy server. Most importantly, it contains a list of rules matched against all incoming requests. Currently the Ingress resource only supports http rules.
__Lines 8-9__: Each http rule contains the following information: A host (eg: foo.bar.com, defaults to * in this example), a list of paths (eg: /testpath) each of which has an associated backend (test:80). Both the host and path must match the content of an incoming request before the loadbalancer directs traffic to the backend.
__Lines 10-12__: A backend is a service:port combination as described in the [services doc](services.md). Ingress traffic is typically sent directly to the endpoints matching a backend.
__Global Parameters__: For the sake of simplicity the example Ingress has no global parameters, see the [api-reference](../../pkg/apis/extensions/v1beta1/types.go) for a full definition of the resource. One can specify a global default backend in the absence of which requests that don't match a path in the spec are sent to the default backend of the Ingress controller. Though the Ingress resource doesn't support HTTPS yet, security configs would also be global.
## Ingress controllers
In order for the Ingress resource to work, the cluster must have an Ingress controller running. This is unlike other types of controllers, which typically run as part of the `kube-controller-manager` binary, and which are typically started automatically as part of cluster creation. You need to choose the ingress controller implementation that is the best fit for your cluster, or implement one. Examples and instructions can be found [here](https://github.com/kubernetes/contrib/tree/master/Ingress).
## Types of Ingress
### Single Service Ingress
There are existing Kubernetes concepts that allow you to expose a single service (see [alternatives](#alternatives)), however you can do so through an Ingress as well, by specifying a *default backend* with no rules.
<!-- BEGIN MUNGE: EXAMPLE ingress.yaml -->
```yaml
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: test-ingress
spec:
backend:
serviceName: testsvc
servicePort: 80
```
[Download example](ingress.yaml?raw=true)
<!-- END MUNGE: EXAMPLE ingress.yaml -->
If you create it using `kubectl -f` you should see:
```shell
$ kubectl get ing
NAME RULE BACKEND ADDRESS
test-ingress - testsvc:80 107.178.254.228
```
Where `107.178.254.228` is the IP allocated by the Ingress controller to satisfy this Ingress. The `RULE` column shows that all traffic send to the IP is directed to the Kubernetes Service listed under `BACKEND`.
### Simple fanout
As described previously, pods within kubernetes have ips only visible on the cluster network, so we need something at the edge accepting ingress traffic and proxying it to the right endpoints. This component is usually a highly available loadbalancer/s. An Ingress allows you to keep the number of loadbalancers down to a minimum, for example, a setup like:
```
foo.bar.com -> 178.91.123.132 -> / foo s1:80
/ bar s2:80
```
would require an Ingress such as:
```yaml
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: test
spec:
rules:
- host: foo.bar.com
http:
paths:
- path: /foo
backend:
serviceName: s1
servicePort: 80
- path: /bar
backend:
serviceName: s2
servicePort: 80
```
When you create the Ingress with `kubectl create -f`:
```
$ kubectl get ing
NAME RULE BACKEND ADDRESS
test -
foo.bar.com
/foo s1:80
/bar s2:80
```
The Ingress controller will provision an implementation specific loadbalancer that satisfies the Ingress, as long as the services (s1, s2) exist. When it has done so, you will see the address of the loadbalancer under the last column of the Ingress.
### Name based virtual hosting
Name-based virtual hosts use multiple host names for the same IP address.
```
foo.bar.com --| |-> foo.bar.com s1:80
| 178.91.123.132 |
bar.foo.com --| |-> bar.foo.com s2:80
```
The following Ingress tells the backing loadbalancer to route requests based on the [Host header](https://tools.ietf.org/html/rfc7230#section-5.4).
```yaml
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: test
spec:
rules:
- host: foo.bar.com
http:
paths:
- backend:
serviceName: s1
servicePort: 80
- host: bar.foo.com
http:
paths:
- backend:
serviceName: s2
servicePort: 80
```
__Default Backends__: An Ingress with no rules, like the one shown in the previous section, sends all traffic to a single default backend. You can use the same technique to tell a loadbalancer where to find your website's 404 page, by specifying a set of rules *and* a default backend. Traffic is routed to your default backend if none of the Hosts in your Ingress match the Host in the request header, and/or none of the paths match the url of the request.
### Loadbalancing
An Ingress controller is bootstrapped with some loadbalancing policy settings that it applies to all Ingress, such as the loadbalancing algorithm, backend weight scheme etc. More advanced loadbalancing concepts (eg: persistent sessions, dynamic weights) are not yet exposed through the Ingress. You can still get these features through the [service loadbalancer](https://github.com/kubernetes/contrib/tree/master/service-loadbalancer). With time, we plan to distil loadbalancing patterns that are applicable cross platform into the Ingress resource.
It's also worth noting that even though health checks are not exposed directly through the Ingress, there exist parallel concepts in Kubernetes such as [readiness probes](https://github.com/kubernetes/kubernetes/blob/release-1.0/docs/user-guide/production-pods.md#liveness-and-readiness-probes-aka-health-checks) which allow you to achieve the same end result.
## Updating an Ingress
Say you'd like to add a new Host to an existing Ingress, you can update it by editing the resource:
```shell
$ kubectl get ing
NAME RULE BACKEND ADDRESS
test - 178.91.123.132
foo.bar.com
/foo s1:80
$ kubectl edit ing test
```
This should pop up an editor with the existing yaml, modify it to include the new Host.
```yaml
spec:
rules:
- host: foo.bar.com
http:
paths:
- backend:
serviceName: s1
servicePort: 80
path: /foo
- host: bar.baz.com
http:
paths:
- backend:
serviceName: s2
servicePort: 80
path: /foo
..
```
saving it will update the resource in the API server, which should tell the Ingress controller to reconfigure the loadbalancer.
```shell
$ kubectl get ing
NAME RULE BACKEND ADDRESS
test - 178.91.123.132
foo.bar.com
/foo s1:80
bar.baz.com
/foo s2:80
```
You can achieve the same by invoking `kubectl replace -f` on a modified Ingress yaml file.
## Future Work
* Various modes of HTTPS/TLS support (edge termination, sni etc)
* Requesting an IP or Hostname via claims
* Combining L4 and L7 Ingress
* More Ingress controllers
Please track the [L7 and Ingress proposal](https://github.com/kubernetes/kubernetes/pull/12827) for more details on the evolution of the resource, and the [Ingress sub-repository](https://github.com/kubernetes/contrib/tree/master/Ingress) for more details on the evolution of various Ingress controllers.
## Alternatives
You can expose a Service in multiple ways that don't directly involve the Ingress resource:
* Use [Service.Type=LoadBalancer](https://github.com/kubernetes/kubernetes/blob/release-1.0/docs/user-guide/services.md#type-loadbalancer)
* Use [Service.Type=NodePort](https://github.com/kubernetes/kubernetes/blob/release-1.0/docs/user-guide/services.md#type-nodeport)
* Use a [Port Proxy] (https://github.com/kubernetes/contrib/tree/master/for-demos/proxy-to-service)
* Deploy the [Service loadbalancer](https://github.com/kubernetes/contrib/tree/master/service-loadbalancer). This allows you to share a single IP among multiple Services and achieve more advanced loadbalancing through Service Annotations.
<!-- BEGIN MUNGE: GENERATED_ANALYTICS -->
[]()
<!-- END MUNGE: GENERATED_ANALYTICS -->
| combk8s/kubernetes | docs/user-guide/ingress.md | Markdown | apache-2.0 | 14,079 |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.impl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.TextChange;
import com.intellij.util.text.CharArrayCharSequence;
import com.intellij.util.text.StringFactory;
import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
import java.util.List;
/**
* Encapsulates logic of merging set of changes into particular text.
* <p/>
* Thread-safe.
*
* @author Denis Zhdanov
* @since 12/22/10 12:02 PM
*/
@SuppressWarnings({"MethodMayBeStatic"})
public class BulkChangesMerger {
public static final BulkChangesMerger INSTANCE = new BulkChangesMerger();
private static final Logger LOG = Logger.getInstance("#" + BulkChangesMerger.class.getName());
/**
* Merges given changes within the given text and returns result as a new char sequence.
*
* @param text text to apply given changes for
* @param textLength interested number of symbols from the given text to use
* @param changes changes to apply to the given text. It's assumed that there are no intersections between them and that they
* are sorted by offsets in ascending order
* @return merge result
*/
public CharSequence mergeToCharSequence(@NotNull char[] text, int textLength, @NotNull List<? extends TextChange> changes) {
return StringFactory.createShared(mergeToCharArray(text, textLength, changes));
}
/**
* Merges given changes within the given text and returns result as a new char array.
*
* @param text text to apply given changes for
* @param textLength interested number of symbols from the given text to use
* @param changes changes to apply to the given text. It's assumed that there are no intersections between them and that they
* are sorted by offsets in ascending order
* @return merge result
*/
public char[] mergeToCharArray(@NotNull char[] text, int textLength, @NotNull List<? extends TextChange> changes) {
int newLength = textLength;
for (TextChange change : changes) {
newLength += change.getText().length() - (change.getEnd() - change.getStart());
}
char[] data = new char[newLength];
int oldEndOffset = textLength;
int newEndOffset = data.length;
for (int i = changes.size() - 1; i >= 0; i--) {
TextChange change = changes.get(i);
// Copy all unprocessed symbols from initial text that lay after the changed offset.
int symbolsToMoveNumber = oldEndOffset - change.getEnd();
System.arraycopy(text, change.getEnd(), data, newEndOffset - symbolsToMoveNumber, symbolsToMoveNumber);
newEndOffset -= symbolsToMoveNumber;
// Copy all change symbols.
char[] changeSymbols = change.getChars();
newEndOffset -= changeSymbols.length;
System.arraycopy(changeSymbols, 0, data, newEndOffset, changeSymbols.length);
oldEndOffset = change.getStart();
}
if (oldEndOffset > 0) {
System.arraycopy(text, 0, data, 0, oldEndOffset);
}
return data;
}
/**
* Allows to perform 'in-place' merge of the given changes to the given array.
* <p/>
* I.e. it's considered that given array contains particular text at <code>[0; length)</code> region and given changes define
* offsets against it. It's also assumed that given array length is enough to contain resulting text after applying the changes.
* <p/>
* Example: consider that initial text is <code>'12345'</code> and given changes are <code>'remove text at [1; 3) interval'</code>
* and <code>'replace text at [4; 5) interval with 'abcde''</code>. Resulting text is <code>'14abcde'</code> then and given array
* length should be not less than 7.
*
* @param data data array
* @param length initial text length (without changes)
* @param changes change to apply to the target text
* @throws IllegalArgumentException if given array is not big enough to contain the resulting text
*/
public void mergeInPlace(@NotNull char[] data, int length, @NotNull List<? extends TextChangeImpl> changes)
throws IllegalArgumentException
{
// Consider two corner cases:
// 1. Every given change increases text length, i.e. change text length is more than changed region length. We can calculate
// resulting text length and start merging the changes from the right end then;
// 2. Every given change reduces text length, start from the left end then;
// The general idea is to group all of the given changes by 'add text'/ 'remove text' criteria and process them sequentially.
// Example: let's assume we have the following changes:
// 1) replace two symbols with five (diff +3);
// 2) replace two symbols by one (diff -1);
// 3) replace two symbols by one (diff -1);
// 4) replace four symbols by one (diff -3);
// 5) replace one symbol by two (diff +2);
// 6) replace one symbol by three (diff +2);
// Algorithm:
// 1. Define the first group of change. First change diff is '+3', hence, iterate all changes until the resulting diff becomes
// less or equal to the zero. So, the first four changes conduct the first group. Initial change increased text length, hence,
// we process the changes from right to left starting at offset '4-th change start + 1';
// 2. Current diff is '-2' (4-th change diff is '-3' and one slot was necessary for previous group completion), so, that means
// that we should process the 4-th and 5-th changes as the second group. Initial change direction is negative, hence, we
// process them from left to the right;
// 3. Process the remaining change;
if (changes.isEmpty()) {
return;
}
int diff = 0;
for (TextChangeImpl change : changes) {
diff += change.getDiff();
}
if (length + diff > data.length) {
throw new IllegalArgumentException(String.format(
"Can't perform in-place changes merge. Reason: data array is not big enough to hold resulting text. Current size: %d, "
+ "minimum size: %d", data.length, length + diff
));
}
try {
for (Context context = new Context(changes, data, length, length + diff); !context.isComplete();) {
if (!context.startGroup()) {
return;
}
context.endGroup();
}
}
catch (RuntimeException e) {
StringBuilder changesDescription = new StringBuilder();
for (TextChangeImpl change : changes) {
changesDescription.append(change.getText().length()).append(":").append(change.getStart()).append("-").append(change.getEnd())
.append(",");
}
if (changesDescription.length() > 0) {
changesDescription.setLength(changesDescription.length() - 1);
}
LOG.error(String.format(
"Invalid attempt to perform in-place document changes merge detected. Initial text length: %d, data array length: %d, "
+ "changes: [%s], changes diff: %d", length, data.length, changesDescription, diff
), e);
char[] merged = mergeToCharArray(data, length, changes);
System.arraycopy(merged, 0, data, 0, length + diff);
}
}
private static void copy(@NotNull char[] data, int offset, @NotNull CharSequence text) {
for (int i = 0; i < text.length(); i++) {
data[i + offset] = text.charAt(i);
}
}
private static class Context {
private final List<? extends TextChangeImpl> myChanges;
private final char[] myData;
private final int myInputLength;
private final int myOutputLength;
private int myDataStartOffset;
private int myDataEndOffset;
private int myChangeGroupStartIndex;
private int myChangeGroupEndIndex;
private int myDiff;
private int myFirstChangeShift;
private int myLastChangeShift;
Context(@NotNull List<? extends TextChangeImpl> changes, @NotNull char[] data, int inputLength, int outputLength) {
myChanges = changes;
myData = data;
myInputLength = inputLength;
myOutputLength = outputLength;
}
/**
* Asks current context to update its state in order to point to the first change in a group.
*
* @return <code>true</code> if the first change in a group is found; <code>false</code> otherwise
*/
@SuppressWarnings({"ForLoopThatDoesntUseLoopVariable"})
public boolean startGroup() {
// Define first change that increases or reduces text length.
for (boolean first = true; myDiff == 0 && myChangeGroupStartIndex < myChanges.size(); myChangeGroupStartIndex++, first = false) {
TextChangeImpl change = myChanges.get(myChangeGroupStartIndex);
myDiff = change.getDiff();
if (first) {
myDiff += myFirstChangeShift;
}
if (myDiff == 0) {
copy(myData, change.getStart() + (first ? myFirstChangeShift : 0), change.getText());
}
else {
myDataStartOffset = change.getStart();
if (first) {
myDataStartOffset += myFirstChangeShift;
}
break;
}
}
return myDiff != 0;
}
public void endGroup() {
boolean includeEndChange = false;
myLastChangeShift = 0;
for (myChangeGroupEndIndex = myChangeGroupStartIndex + 1; myChangeGroupEndIndex < myChanges.size(); myChangeGroupEndIndex++) {
assert myDiff != 0 : String.format(
"Text: '%s', length: %d, changes: %s, change group indices: %d-%d",
Arrays.toString(myData), myInputLength, myChanges, myChangeGroupStartIndex, myChangeGroupEndIndex);
TextChangeImpl change = myChanges.get(myChangeGroupEndIndex);
int newDiff = myDiff + change.getDiff();
// Changes group results to the zero text length shift.
if (newDiff == 0) {
myDataEndOffset = change.getEnd();
includeEndChange = true;
break;
}
// Changes group is not constructed yet.
if (!(myDiff > 0 ^ newDiff > 0)) {
myDiff = newDiff;
continue;
}
// Current change finishes changes group.
myDataEndOffset = change.getStart() + myDiff;
myLastChangeShift = myDiff;
break;
}
if (myChangeGroupEndIndex >= myChanges.size()) {
if (myDiff > 0) {
processLastPositiveGroup();
}
else {
processLastNegativeGroup();
}
myChangeGroupStartIndex = myChangeGroupEndIndex = myChanges.size();
}
else if (myDiff > 0) {
processPositiveGroup(includeEndChange);
}
else {
processNegativeGroup(includeEndChange);
}
myDiff = 0;
myChangeGroupStartIndex = myChangeGroupEndIndex;
if (includeEndChange) {
myChangeGroupStartIndex++;
}
myFirstChangeShift = myLastChangeShift;
}
/**
* Asks to process changes group identified by [{@link #myChangeGroupStartIndex}; {@link #myChangeGroupEndIndex}) where
* overall group direction is 'positive' (i.e. it starts from the change that increases text length).
*
* @param includeEndChange flag that defines if change defined by {@link #myChangeGroupEndIndex} should be processed
*/
private void processPositiveGroup(boolean includeEndChange) {
int outputOffset = myDataEndOffset;
int prevChangeStart = -1;
for (int i = myChangeGroupEndIndex; i >= myChangeGroupStartIndex; i--) {
TextChangeImpl change = myChanges.get(i);
if (prevChangeStart >= 0) {
int length = prevChangeStart - change.getEnd();
System.arraycopy(myData, change.getEnd(), myData, outputOffset - length, length);
outputOffset -= length;
}
prevChangeStart = change.getStart();
if (i == myChangeGroupEndIndex && !includeEndChange) {
continue;
}
int length = change.getText().length();
if (length > 0) {
copy(myData, outputOffset - length, change.getText());
outputOffset -= length;
}
}
}
private void processLastPositiveGroup() {
int end = myChanges.get(myChanges.size() - 1).getEnd();
int length = myInputLength - end;
myDataEndOffset = myOutputLength - length;
System.arraycopy(myData, end, myData, myDataEndOffset, length);
myChangeGroupEndIndex = myChanges.size() - 1;
processPositiveGroup(true);
}
private void processNegativeGroup(boolean includeEndChange) {
int prevChangeEnd = -1;
for (int i = myChangeGroupStartIndex; i <= myChangeGroupEndIndex; i++) {
TextChangeImpl change = myChanges.get(i);
if (prevChangeEnd >= 0) {
int length = change.getStart() - prevChangeEnd;
System.arraycopy(myData, prevChangeEnd, myData, myDataStartOffset, length);
myDataStartOffset += length;
}
prevChangeEnd = change.getEnd();
if (i == myChangeGroupEndIndex && !includeEndChange) {
return;
}
int length = change.getText().length();
if (length > 0) {
copy(myData, myDataStartOffset, change.getText());
myDataStartOffset += length;
}
}
}
private void processLastNegativeGroup() {
myChangeGroupEndIndex = myChanges.size() - 1;
processNegativeGroup(true);
int end = myChanges.get(myChangeGroupEndIndex).getEnd();
System.arraycopy(myData, end, myData, myDataStartOffset, myInputLength - end);
}
public boolean isComplete() {
return myChangeGroupStartIndex >= myChanges.size();
}
}
}
| IllusionRom-deprecated/android_platform_tools_idea | platform/core-impl/src/com/intellij/openapi/editor/impl/BulkChangesMerger.java | Java | apache-2.0 | 14,657 |
//// [missingTypeArguments2.ts]
class A<T> { }
var x: () => A;
(a: A) => { };
var y: A<A>;
(): A => null;
//// [missingTypeArguments2.js]
var A = (function () {
function A() {
}
return A;
})();
var x;
(function (a) {
});
var y;
(function () { return null; });
| yukulele/TypeScript | tests/baselines/reference/missingTypeArguments2.js | JavaScript | apache-2.0 | 288 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.mapping.put;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateListener;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.MetaDataMappingService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
/**
* Put mapping action.
*/
public class TransportPutMappingAction extends TransportMasterNodeOperationAction<PutMappingRequest, PutMappingResponse> {
private final MetaDataMappingService metaDataMappingService;
@Inject
public TransportPutMappingAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, MetaDataMappingService metaDataMappingService) {
super(settings, transportService, clusterService, threadPool);
this.metaDataMappingService = metaDataMappingService;
}
@Override
protected String executor() {
// we go async right away
return ThreadPool.Names.SAME;
}
@Override
protected String transportAction() {
return PutMappingAction.NAME;
}
@Override
protected PutMappingRequest newRequest() {
return new PutMappingRequest();
}
@Override
protected PutMappingResponse newResponse() {
return new PutMappingResponse();
}
@Override
protected void doExecute(PutMappingRequest request, ActionListener<PutMappingResponse> listener) {
request.indices(clusterService.state().metaData().concreteIndices(request.indices(), request.indicesOptions()));
super.doExecute(request, listener);
}
@Override
protected ClusterBlockException checkBlock(PutMappingRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, request.indices());
}
@Override
protected void masterOperation(final PutMappingRequest request, final ClusterState state, final ActionListener<PutMappingResponse> listener) throws ElasticsearchException {
PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest()
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
.indices(request.indices()).type(request.type())
.source(request.source()).ignoreConflicts(request.ignoreConflicts());
metaDataMappingService.putMapping(updateRequest, new ClusterStateUpdateListener() {
@Override
public void onResponse(ClusterStateUpdateResponse response) {
listener.onResponse(new PutMappingResponse(response.isAcknowledged()));
}
@Override
public void onFailure(Throwable t) {
logger.debug("failed to put mappings on indices [{}], type [{}]", t, request.indices(), request.type());
listener.onFailure(t);
}
});
}
}
| alexksikes/elasticsearch | src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java | Java | apache-2.0 | 4,297 |
{{{
"title": "Getting Started with MapR - Blueprint",
"date": "1-26-2016",
"author": "<a href='https://twitter.com/KeithResar'>@KeithResar</a>",
"attachments": [],
"contentIsHTML": false
}}}
### Overview
After reading this article, the reader should feel comfortable deploying MapR Apache Hadoop on CenturyLink Cloud.
### Partner Profile
<img src="../../images/mapr/MapR_Company_Logo.png" style="border:0;float:right;">
https://www.mapr.com/
##### Customer Support
|Sales Contact | Support Contact |
|:- | :- |
|[Contact us](http://info.mapr.com/ContactUs.html)<br>855.669.6277 | [email protected]<br>[Support overview](https://www.mapr.com/support/overview)<br>1.855.669.6277, Option 2 |
### Description
MapR Inc. has integrated their Hadoop Platform with the CenturyLink Cloud platform. The purpose of this KB article is to help the reader take
advantage of this integration to achieve rapid time-to-value for this Hadoop distribution.
The MapR Converged Data Platform integrates the power of Hadoop and Spark with global event streaming, real-time database capabilities, and
enterprise storage for developing and running innovative data applications.
The MapR Platform is powered by the industry’s fastest, most reliable, secure, and open data infrastructure that dramatically lowers TCO and
enables global real-time data applications.- now available as part of the CenturyLink Cloud Blueprint Engine.
### Audience
* CenturyLink Cloud Users
* Hadoop Administrators
* Hadoop Users
* Developers interested in Hadoop
### Impact
After reading this article, the user should feel comfortable getting started using the partner technology on CenturyLink Cloud.
After executing the steps in this Getting Started document, the users will have a functioning Hadoop instance upon which they can start developing solutions.
The MapR Converged Data Platform integrates Hadoop and Spark, real-time database capabilities, and global event streaming with big data enterprise
storage, for developing and running innovative data applications. The MapR Platform is powered by the industry’s fastest, most reliable, secure,
and open data infrastructure that dramatically lowers TCO and enables global real-time data applications.
### Deploying MapR Cluster Blueprint
1. **Locate the Blueprint in the Blueprint Library**
Starting from the CenturyLink Control Panel, navigate to the Blueprints Library. Search for "MapR" in the keyword search on the right side of the page.
<img src="../../images/mapr/blueprint_tile.png" style="border:0;max-width:250px;">
2. **Click the Deploy Blueprint button.**
3. **Set Required parameters.**
<img src="../../images/mapr/deploy_cluster_parameters.png" style="max-width:450px;">
* **MapR User Password** - The password you will use to log on to the MapR Control System, the Installer, or the mapr Linux user.
* **Cluster Name ** - The name of the cluster used for filesystem names and NFS published volumes.
5. **Set Optional Parameters**
Password/Confirm Password (This is the root password for the server. Keep this in a secure place).
Set DNS to “Manually Specify” and use “8.8.8.8” (or any other public DNS server of your choice).
Optionally set the server name prefix.
The default values are fine for every other option.
6. **Review and Confirm the Blueprint**
7. **Deploy the Blueprint**
Once verified, click on the `deploy blueprint` button. You will see the deployment details stating the Blueprint is queued for execution.
This will kick off the Blueprint deploy process and load a page where you can track the deployment progress. Deployment will typically complete within five minutes.
8. **Enable public access** (optional)
Servers are built using private IPs only with access with client or IPSEC VPN. For access from the Internet at large add a public IP to your master server.
This service will require access to 8443/TCP and 9443/TCP.
<a href="../../Network/how-to-add-public-ip-to-virtual-machine.md">
<img style="border:0;width:50px;vertical-align:middle;" src="../../images/shared_assets/fw_icon.png">
Adding a public IP to your virtual machine
</a>
9. **Access MapR Cluster Service**
* Navigate to https://<ipaddress>:8443

* Vioew MCS dashboard

### Pricing
The costs listed in the above steps are for the infrastructure only.
The default installation will install MapR Community Edition. You can upgrade to the Enterprise Edition through the MCS or by contacting MapR.
[View a feature comparison between the editions](https://www.mapr.com/products/mapr-distribution-editions).
### Frequently Asked Questions
**Who should I contact for support?**
* For issues related to cloud infrastructure, please open a ticket using the [CenturyLink Cloud Support Process](../../Support/how-do-i-report-a-support-issue.md).
* For issues related to deploying the MapR Blueprints and application operation on CenturyLink Cloud,
[contact MapR Support](https://www.mapr.com/support/overview).
| leporaj/PublicKB | Ecosystem Partners/Marketplace Guides/getting-started-with-mapr-blueprint.md | Markdown | apache-2.0 | 5,157 |
#
# Author:: Christopher Brown (<[email protected]>)
# Author:: Christopher Walters (<[email protected]>)
# Copyright:: Copyright (c) 2009, 2010 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'time'
require 'base64'
require 'digest/sha1'
require 'mixlib/authentication'
require 'mixlib/authentication/digester'
module Mixlib
module Authentication
module SignedHeaderAuth
NULL_ARG = Object.new
SUPPORTED_ALGORITHMS = ['sha1'].freeze
SUPPORTED_VERSIONS = ['1.0', '1.1'].freeze
DEFAULT_SIGN_ALGORITHM = 'sha1'.freeze
DEFAULT_PROTO_VERSION = '1.0'.freeze
# === signing_object
# This is the intended interface for signing requests with the
# Opscode/Chef signed header protocol. This wraps the constructor for a
# Struct that contains the relevant information about your request.
#
# ==== Signature Parameters:
# These parameters are used to generate the canonical representation of
# the request, which is then hashed and encrypted to generate the
# request's signature. These options are all required, with the exception
# of `:body` and `:file`, which are alternate ways to specify the request
# body (you must specify one of these).
# * `:http_method`: HTTP method as a lowercase symbol, e.g., `:get | :put | :post | :delete`
# * `:path`: The path part of the URI, e.g., `URI.parse(uri).path`
# * `:body`: An object representing the body of the request.
# Use an empty String for bodiless requests.
# * `:timestamp`: A String representing the time in any format understood
# by `Time.parse`. The server may reject the request if the timestamp is
# not close to the server's current time.
# * `:user_id`: The user or client name. This is used by the server to
# lookup the public key necessary to verify the signature.
# * `:file`: An IO object (must respond to `:read`) to be used as the
# request body.
# ==== Protocol Versioning Parameters:
# * `:proto_version`: The version of the signing protocol to use.
# Currently defaults to 1.0, but version 1.1 is also available.
# ==== Other Parameters:
# These parameters are accepted but not used in the computation of the signature.
# * `:host`: The host part of the URI
def self.signing_object(args={ })
SigningObject.new(args[:http_method], args[:path], args[:body], args[:host], args[:timestamp], args[:user_id], args[:file], args[:proto_version])
end
def algorithm
DEFAULT_SIGN_ALGORITHM
end
def proto_version
DEFAULT_PROTO_VERSION
end
# Build the canonicalized request based on the method, other headers, etc.
# compute the signature from the request, using the looked-up user secret
# ====Parameters
# private_key<OpenSSL::PKey::RSA>:: user's RSA private key.
def sign(private_key, sign_algorithm=algorithm, sign_version=proto_version)
# Our multiline hash for authorization will be encoded in multiple header
# lines - X-Ops-Authorization-1, ... (starts at 1, not 0!)
header_hash = {
"X-Ops-Sign" => "algorithm=#{sign_algorithm};version=#{sign_version};",
"X-Ops-Userid" => user_id,
"X-Ops-Timestamp" => canonical_time,
"X-Ops-Content-Hash" => hashed_body,
}
string_to_sign = canonicalize_request(sign_algorithm, sign_version)
signature = Base64.encode64(private_key.private_encrypt(string_to_sign)).chomp
signature_lines = signature.split(/\n/)
signature_lines.each_index do |idx|
key = "X-Ops-Authorization-#{idx + 1}"
header_hash[key] = signature_lines[idx]
end
Mixlib::Authentication::Log.debug "String to sign: '#{string_to_sign}'\nHeader hash: #{header_hash.inspect}"
header_hash
end
# Build the canonicalized time based on utc & iso8601
#
# ====Parameters
#
def canonical_time
Time.parse(timestamp).utc.iso8601
end
# Build the canonicalized path, which collapses multiple slashes (/) and
# removes a trailing slash unless the path is only "/"
#
# ====Parameters
#
def canonical_path
p = path.gsub(/\/+/,'/')
p.length > 1 ? p.chomp('/') : p
end
def hashed_body
# Hash the file object if it was passed in, otherwise hash based on
# the body.
# TODO: tim 2009-12-28: It'd be nice to just remove this special case,
# always sign the entire request body, using the expanded multipart
# body in the case of a file being include.
@hashed_body ||= (self.file && self.file.respond_to?(:read)) ? digester.hash_file(self.file) : digester.hash_string(self.body)
end
# Takes HTTP request method & headers and creates a canonical form
# to create the signature
#
# ====Parameters
#
#
def canonicalize_request(sign_algorithm=algorithm, sign_version=proto_version)
unless SUPPORTED_ALGORITHMS.include?(sign_algorithm) && SUPPORTED_VERSIONS.include?(sign_version)
raise AuthenticationError, "Bad algorithm '#{sign_algorithm}' (allowed: #{SUPPORTED_ALGORITHMS.inspect}) or version '#{sign_version}' (allowed: #{SUPPORTED_VERSIONS.inspect})"
end
canonical_x_ops_user_id = canonicalize_user_id(user_id, sign_version)
"Method:#{http_method.to_s.upcase}\nHashed Path:#{digester.hash_string(canonical_path)}\nX-Ops-Content-Hash:#{hashed_body}\nX-Ops-Timestamp:#{canonical_time}\nX-Ops-UserId:#{canonical_x_ops_user_id}"
end
def canonicalize_user_id(user_id, proto_version)
case proto_version
when "1.1"
digester.hash_string(user_id)
when "1.0"
user_id
else
user_id
end
end
# Parses signature version information, algorithm used, etc.
#
# ====Parameters
#
def parse_signing_description
parts = signing_description.strip.split(";").inject({ }) do |memo, part|
field_name, field_value = part.split("=")
memo[field_name.to_sym] = field_value.strip
memo
end
Mixlib::Authentication::Log.debug "Parsed signing description: #{parts.inspect}"
parts
end
def digester
Mixlib::Authentication::Digester
end
private :canonical_time, :canonical_path, :parse_signing_description, :digester, :canonicalize_user_id
end
# === SigningObject
# A Struct-based value object that contains the necessary information to
# generate a request signature. `SignedHeaderAuth.signing_object()`
# provides a more convenient interface to the constructor.
class SigningObject < Struct.new(:http_method, :path, :body, :host, :timestamp, :user_id, :file, :proto_version)
include SignedHeaderAuth
def proto_version
(self[:proto_version] or DEFAULT_PROTO_VERSION).to_s
end
end
end
end
| jreichhold/chef-repo | vendor/ruby/2.0.0/gems/mixlib-authentication-1.3.0/lib/mixlib/authentication/signedheaderauth.rb | Ruby | apache-2.0 | 7,640 |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.bigdata.api.hbase;
/**
* Created by bryan on 1/29/16.
*/
public class ResultFactoryException extends Exception {
public ResultFactoryException( Throwable cause ) {
super( cause );
}
}
| mkambol/big-data-plugin | api/hbase/src/main/java/org/pentaho/bigdata/api/hbase/ResultFactoryException.java | Java | apache-2.0 | 1,119 |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.timeseries.model;
import org.apache.commons.lang.Validate;
import org.threeten.bp.LocalDate;
import com.opengamma.analytics.math.statistics.distribution.ProbabilityDistribution;
import com.opengamma.timeseries.date.localdate.ImmutableLocalDateDoubleTimeSeries;
import com.opengamma.timeseries.date.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.ArgumentChecker;
/**
*
*/
public class MovingAverageTimeSeriesModel {
private final ProbabilityDistribution<Double> _random;
public MovingAverageTimeSeriesModel(final ProbabilityDistribution<Double> random) {
Validate.notNull(random, "random");
_random = random;
}
public LocalDateDoubleTimeSeries getSeries(final double[] theta, final int q, final LocalDate[] dates) {
Validate.notNull(theta, "theta");
if (q < 1) {
throw new IllegalArgumentException("Order must be greater than zero");
}
if (theta.length < q) {
throw new IllegalArgumentException("Coefficient array must contain at least " + q + " elements");
}
Validate.notNull(dates, "dates");
ArgumentChecker.notEmpty(dates, "dates");
final int n = dates.length;
final double[] z = new double[n];
for (int i = 0; i < n; i++) {
z[i] = _random.nextRandom();
}
final double[] data = new double[n];
data[0] = theta[0];
double sum;
for (int i = 1; i < n; i++) {
sum = theta[0] + z[i];
for (int j = 1; j < (i < q ? i : q + 1); j++) {
sum += z[i - j] * theta[j];
}
data[i] = sum;
}
return ImmutableLocalDateDoubleTimeSeries.of(dates, data);
}
}
| McLeodMoores/starling | projects/analytics/src/main/java/com/opengamma/analytics/financial/timeseries/model/MovingAverageTimeSeriesModel.java | Java | apache-2.0 | 1,779 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hedwig;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import com.google.protobuf.ByteString;
import org.apache.hedwig.protocol.PubSubProtocol.Message;
public class HelperMethods {
static Random rand = new Random();
public static List<Message> getRandomPublishedMessages(int numMessages, int size) {
ByteString[] regions = { ByteString.copyFromUtf8("sp1"), ByteString.copyFromUtf8("re1"),
ByteString.copyFromUtf8("sg")
};
return getRandomPublishedMessages(numMessages, size, regions);
}
public static List<Message> getRandomPublishedMessages(int numMessages, int size, ByteString[] regions) {
List<Message> msgs = new ArrayList<Message>();
for (int i = 0; i < numMessages; i++) {
byte[] body = new byte[size];
rand.nextBytes(body);
msgs.add(Message.newBuilder().setBody(ByteString.copyFrom(body)).setSrcRegion(
regions[rand.nextInt(regions.length)]).build());
}
return msgs;
}
public static boolean areEqual(Message m1, Message m2) {
if (m1.hasSrcRegion() != m2.hasSrcRegion()) {
return false;
}
if (m1.hasSrcRegion() && !m1.getSrcRegion().equals(m2.getSrcRegion())) {
return false;
}
return m1.getBody().equals(m2.getBody());
}
}
| rvenkatesh25/bookkeeper | hedwig-server/src/test/java/org/apache/hedwig/HelperMethods.java | Java | apache-2.0 | 2,258 |
#
# Copyright 2013 - Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Unit tests for EC2 error responses.
"""
from lxml import etree
from nova.api import ec2
from nova import context
from nova import test
from nova import wsgi
class TestClientExceptionEC2(Exception):
ec2_code = 'ClientException.Test'
message = "Test Client Exception."
code = 400
class TestServerExceptionEC2(Exception):
ec2_code = 'ServerException.Test'
message = "Test Server Exception."
code = 500
class Ec2ErrorResponseTestCase(test.NoDBTestCase):
"""
Test EC2 error responses.
This deals mostly with api/ec2/__init__.py code, especially
the ec2_error_ex helper.
"""
def setUp(self):
super(Ec2ErrorResponseTestCase, self).setUp()
self.context = context.RequestContext('test_user_id',
'test_project_id')
self.req = wsgi.Request.blank('/test')
self.req.environ['nova.context'] = self.context
def _validate_ec2_error(self, response, http_status, ec2_code, msg=None,
unknown_msg=False):
self.assertEqual(response.status_code, http_status,
'Expected HTTP status %s' % http_status)
root_e = etree.XML(response.body)
self.assertEqual(root_e.tag, 'Response',
"Top element must be Response.")
errors_e = root_e.find('Errors')
self.assertEqual(len(errors_e), 1,
"Expected exactly one Error element in Errors.")
error_e = errors_e[0]
self.assertEqual(error_e.tag, 'Error',
"Expected Error element.")
# Code
code_e = error_e.find('Code')
self.assertIsNotNone(code_e, "Code element must be present.")
self.assertEqual(code_e.text, ec2_code)
# Message
if msg or unknown_msg:
message_e = error_e.find('Message')
self.assertIsNotNone(code_e, "Message element must be present.")
if msg:
self.assertEqual(message_e.text, msg)
elif unknown_msg:
self.assertEqual(message_e.text, "Unknown error occurred.",
"Error message should be anonymous.")
# RequestID
requestid_e = root_e.find('RequestID')
self.assertIsNotNone(requestid_e,
'RequestID element should be present.')
self.assertEqual(requestid_e.text, self.context.request_id)
def test_exception_ec2_4xx(self):
"""
Test response to EC2 exception with code = 400.
"""
msg = "Test client failure."
err = ec2.ec2_error_ex(TestClientExceptionEC2(msg), self.req)
self._validate_ec2_error(err, TestClientExceptionEC2.code,
TestClientExceptionEC2.ec2_code, msg)
def test_exception_ec2_5xx(self):
"""
Test response to EC2 exception with code = 500.
Expected errors are treated as client ones even with 5xx code.
"""
msg = "Test client failure with 5xx error code."
err = ec2.ec2_error_ex(TestServerExceptionEC2(msg), self.req)
self._validate_ec2_error(err, 400, TestServerExceptionEC2.ec2_code,
msg)
def test_unexpected_exception_ec2_4xx(self):
"""
Test response to unexpected EC2 exception with code = 400.
"""
msg = "Test unexpected client failure."
err = ec2.ec2_error_ex(TestClientExceptionEC2(msg), self.req,
unexpected=True)
self._validate_ec2_error(err, TestClientExceptionEC2.code,
TestClientExceptionEC2.ec2_code, msg)
def test_unexpected_exception_ec2_5xx(self):
"""
Test response to unexpected EC2 exception with code = 500.
Server exception messages (with code >= 500 or without code) should
be filtered as they might contain sensitive information.
"""
msg = "Test server failure."
err = ec2.ec2_error_ex(TestServerExceptionEC2(msg), self.req,
unexpected=True)
self._validate_ec2_error(err, TestServerExceptionEC2.code,
TestServerExceptionEC2.ec2_code,
unknown_msg=True)
def test_unexpected_exception_builtin(self):
"""
Test response to builtin unexpected exception.
Server exception messages (with code >= 500 or without code) should
be filtered as they might contain sensitive information.
"""
msg = "Test server failure."
err = ec2.ec2_error_ex(RuntimeError(msg), self.req, unexpected=True)
self._validate_ec2_error(err, 500, 'RuntimeError', unknown_msg=True)
| OpenAcademy-OpenStack/nova-scheduler | nova/tests/api/ec2/test_error_response.py | Python | apache-2.0 | 5,360 |
#!/bin/bash
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## Contains configuration values for the Ubuntu cluster
# Define all your cluster nodes, MASTER node comes first"
# And separated with blank space like <user_1@ip_1> <user_2@ip_2> <user_3@ip_3>
export nodes=${nodes:-"[email protected] [email protected] [email protected]"}
# Define all your nodes role: a(master) or i(minion) or ai(both master and minion), must be the order same
role=${roles:-"ai i i"}
# If it practically impossible to set an array as an environment variable
# from a script, so assume variable is a string then convert it to an array
export roles=($role)
# Define minion numbers
export NUM_NODES=${NUM_NODES:-3}
# define the IP range used for service cluster IPs.
# according to rfc 1918 ref: https://tools.ietf.org/html/rfc1918 choose a private ip range here.
export SERVICE_CLUSTER_IP_RANGE=${SERVICE_CLUSTER_IP_RANGE:-192.168.3.0/24} # formerly PORTAL_NET
# define the IP range used for flannel overlay network, should not conflict with above SERVICE_CLUSTER_IP_RANGE
# The Ubuntu scripting supports two ways of networking: Flannel and
# CNI. To use CNI: (1) put a CNI configuration file, whose basename
# is the configured network type plus ".conf", somewhere on the driver
# machine (the one running `kube-up.sh`) and set CNI_PLUGIN_CONF to a
# pathname of that file, (2) put one or more executable binaries on
# the driver machine and set CNI_PLUGIN_EXES to a space-separated list
# of their pathnames, and (3) set CNI_KUBELET_TRIGGER to identify an
# appropriate service on which to trigger the start and stop of the
# kubelet on non-master machines. For (1) and (2) the pathnames may
# be relative, in which case they are relative to kubernetes/cluster.
# If either of CNI_PLUGIN_CONF or CNI_PLUGIN_EXES is undefined or has
# a zero length value then Flannel will be used instead of CNI.
export CNI_PLUGIN_CONF CNI_PLUGIN_EXES CNI_KUBELET_TRIGGER
CNI_PLUGIN_CONF=${CNI_PLUGIN_CONF:-""}
CNI_PLUGIN_EXES=${CNI_PLUGIN_EXES:-""}
CNI_KUBELET_TRIGGER=${CNI_KUBELET_TRIGGER:-networking}
# Flannel networking is used if CNI networking is not. The following
# variable defines the CIDR block from which cluster addresses are
# drawn.
export FLANNEL_NET=${FLANNEL_NET:-172.16.0.0/16}
# Optionally add other contents to the Flannel configuration JSON
# object normally stored in etcd as /coreos.com/network/config. Use
# JSON syntax suitable for insertion into a JSON object constructor
# after other field name:value pairs. For example:
# FLANNEL_OTHER_NET_CONFIG=', "SubnetMin": "172.16.10.0", "SubnetMax": "172.16.90.0"'
export FLANNEL_OTHER_NET_CONFIG
FLANNEL_OTHER_NET_CONFIG=''
# Admission Controllers to invoke prior to persisting objects in cluster
export ADMISSION_CONTROL=NamespaceLifecycle,LimitRanger,ServiceAccount,ResourceQuota,SecurityContextDeny
# Path to the config file or directory of files of kubelet
export KUBELET_CONFIG=${KUBELET_CONFIG:-""}
# A port range to reserve for services with NodePort visibility
SERVICE_NODE_PORT_RANGE=${SERVICE_NODE_PORT_RANGE:-"30000-32767"}
# Optional: Enable node logging.
ENABLE_NODE_LOGGING=false
LOGGING_DESTINATION=${LOGGING_DESTINATION:-elasticsearch}
# Optional: When set to true, Elasticsearch and Kibana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_LOGGING=false
ELASTICSEARCH_LOGGING_REPLICAS=${ELASTICSEARCH_LOGGING_REPLICAS:-1}
# Optional: When set to true, heapster, Influxdb and Grafana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_MONITORING="${KUBE_ENABLE_CLUSTER_MONITORING:-true}"
# Extra options to set on the Docker command line. This is useful for setting
# --insecure-registry for local registries.
DOCKER_OPTS=${DOCKER_OPTS:-""}
# Extra options to set on the kube-proxy command line. This is useful
# for selecting the iptables proxy-mode, for example.
KUBE_PROXY_EXTRA_OPTS=${KUBE_PROXY_EXTRA_OPTS:-""}
# Optional: Install cluster DNS.
ENABLE_CLUSTER_DNS="${KUBE_ENABLE_CLUSTER_DNS:-true}"
# DNS_SERVER_IP must be a IP in SERVICE_CLUSTER_IP_RANGE
DNS_SERVER_IP=${DNS_SERVER_IP:-"192.168.3.10"}
DNS_DOMAIN=${DNS_DOMAIN:-"cluster.local"}
DNS_REPLICAS=${DNS_REPLICAS:-1}
# Optional: Install Kubernetes UI
ENABLE_CLUSTER_UI="${KUBE_ENABLE_CLUSTER_UI:-true}"
# Optional: Enable setting flags for kube-apiserver to turn on behavior in active-dev
#RUNTIME_CONFIG=""
# Optional: Add http or https proxy when download easy-rsa.
# Add environment variable separated with blank space like "http_proxy=http://10.x.x.x:8080 https_proxy=https://10.x.x.x:8443"
PROXY_SETTING=${PROXY_SETTING:-""}
DEBUG=${DEBUG:-"false"}
| hurf/kubernetes | cluster/ubuntu/config-default.sh | Shell | apache-2.0 | 5,190 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.obs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.TestFSMainOperationsLocalFileSystem;
import org.junit.After;
import org.junit.Assume;
import org.junit.Before;
/**
* <p>
* A collection of tests for the {@link FileSystem}. This test should be used
* for testing an instance of FileSystem that has been initialized to a specific
* default FileSystem such a LocalFileSystem, HDFS,OBS, etc.
* </p>
* <p>
* To test a given {@link FileSystem} implementation create a subclass of this
* test and override {@link #setUp()} to initialize the <code>fSys</code> {@link
* FileSystem} instance variable.
* <p>
* Since this a junit 4 you can also do a single setup before the start of any
* tests. E.g.
*
*
* </p>
*/
public class TestOBSFSMainOperations extends
TestFSMainOperationsLocalFileSystem {
@Override
@Before
public void setUp() throws Exception {
skipTestCheck();
Configuration conf = new Configuration();
conf.addResource(OBSContract.CONTRACT_XML);
fSys = OBSTestUtils.createTestFileSystem(conf);
}
@Override
public void testWorkingDirectory() {
Assume.assumeTrue("unspport.", false);
}
@Override
public void testListStatusThrowsExceptionForUnreadableDir() {
Assume.assumeTrue("unspport.", false);
}
@Override
public void testRenameDirectoryToItself() {
Assume.assumeTrue("unspport.", false);
}
@Override
public void testGlobStatusThrowsExceptionForUnreadableDir() {
Assume.assumeTrue("unspport.", false);
}
@Override
public void testRenameFileToItself() {
Assume.assumeTrue("unspport.", false);
}
@Override
@After
public void tearDown() throws Exception {
if(fSys != null) {
super.tearDown();
}
}
public void skipTestCheck() {
Assume.assumeTrue(OBSContract.isContractTestEnabled());
}
}
| JingchengDu/hadoop | hadoop-cloud-storage-project/hadoop-huaweicloud/src/test/java/org/apache/hadoop/fs/obs/TestOBSFSMainOperations.java | Java | apache-2.0 | 2,724 |
#!/usr/bin/env python3
"""Mininet tests for FAUCET.
* must be run as root
* you can run a specific test case only, by adding the class name of the test
case to the command. Eg ./mininet_main.py FaucetUntaggedIPv4RouteTest
It is strongly recommended to run these tests via Docker, to ensure you have
all dependencies correctly installed. See ../docs/.
"""
from clib.clib_mininet_test_main import test_main
import mininet_tests
import mininet_multidp_tests
if __name__ == '__main__':
test_main([mininet_tests.__name__, mininet_multidp_tests.__name__])
| trungdtbk/faucet | tests/integration/mininet_main.py | Python | apache-2.0 | 565 |
//// [superAccessCastedCall.ts]
class Foo {
bar(): void {}
}
class Bar extends Foo {
x: Number;
constructor() {
super();
this.x = 2;
}
bar() {
super.bar();
(super.bar as any)();
}
}
let b = new Bar();
b.bar()
//// [superAccessCastedCall.js]
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var Foo = /** @class */ (function () {
function Foo() {
}
Foo.prototype.bar = function () { };
return Foo;
}());
var Bar = /** @class */ (function (_super) {
__extends(Bar, _super);
function Bar() {
var _this = _super.call(this) || this;
_this.x = 2;
return _this;
}
Bar.prototype.bar = function () {
_super.prototype.bar.call(this);
_super.prototype.bar.call(this);
};
return Bar;
}(Foo));
var b = new Bar();
b.bar();
| donaldpipowitch/TypeScript | tests/baselines/reference/superAccessCastedCall.js | JavaScript | apache-2.0 | 1,426 |
package com.guitar.db.repository;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import org.springframework.stereotype.Repository;
import com.guitar.db.model.Location;
@Repository
public class LocationRepository {
@PersistenceContext
private EntityManager entityManager;
/**
* Create
*/
public Location create(Location loc) {
entityManager.persist(loc);
entityManager.flush();
return loc;
}
/**
* Update
*/
public Location update(Location loc) {
loc = entityManager.merge(loc);
entityManager.flush();
return loc;
}
/**
* Delete
*/
public void delete(Location loc) {
entityManager.remove(loc);
entityManager.flush();
}
/**
* Find
*/
public Location find(Long id) {
return entityManager.find(Location.class, id);
}
/**
* Custom finder
*/
public List<Location> getLocationByStateName(String name) {
@SuppressWarnings("unchecked")
List<Location> locs = entityManager
.createQuery("select l from Location l where l.state like :state")
.setParameter("state", name + "%").getResultList();
return locs;
}
}
| renegmedal/spring-data-jpa | src/main/java/com/guitar/db/repository/LocationRepository.java | Java | apache-2.0 | 1,139 |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.grid.internal;
import com.google.gson.JsonObject;
import org.openqa.grid.common.RegistrationRequest;
import org.openqa.grid.common.SeleniumProtocol;
import org.openqa.grid.internal.utils.CapabilityMatcher;
import org.openqa.grid.internal.utils.HtmlRenderer;
import org.openqa.grid.internal.utils.configuration.GridNodeConfiguration;
import org.openqa.selenium.remote.internal.HttpClientFactory;
import java.net.URL;
import java.util.List;
import java.util.Map;
/**
* Proxy to a remote server executing the tests. <p> The proxy keeps a state of what is happening
* on the remote server and knows if a new test can be run on the remote server. There are several
* reasons why a test could not be run on the specified remote server, for instance: if the
* RemoteProxy decides the remote server has reached the maximum number of concurrent sessions, or
* if the client has requested DesiredCapabilities we don't support e.g. asking for Chrome when we
* only support Firefox.
*/
public interface RemoteProxy extends Comparable<RemoteProxy> {
/**
* Create a new TestSlot.
*
* @param protocol a {@link SeleniumProtocol} object that identifies the request flavor.
* @param capabilities the type of test the client is interested in performing.
* @return the entity on a proxy that can host a test session.
*/
default TestSlot createTestSlot(SeleniumProtocol protocol, Map<String, Object> capabilities) {
return new TestSlot(this, protocol, capabilities);
}
/**
* Each test running on the node will occupy a test slot. A test slot can either be in use (have a session) or be
* available for scheduling (no associated session). This method allows retrieving the total state of the node,
* both test slots in use and those unused.
*
* @return the test slots.
*/
List<TestSlot> getTestSlots();
/**
* Retrieves the handle to the registry this remote proxy is registered with.
*
* @return the registry.
*/
Registry getRegistry();
/**
* Returns the capability matcher that will be used to by the remote proxy
* to determine whether its test slots can run a requested test session.
*
* @return the capability matcher.
*/
CapabilityMatcher getCapabilityHelper();
/**
* If the RemoteProxy implementation also implements TimeoutListener, then this method
* will start up the thread used to monitor timeouts and handle cleanup of timed out resources.
*/
void setupTimeoutListener();
/**
* Returns the unique id for the node. The ID should not change throughout the life of the node.
*
* @return the unique node id.
*/
String getId();
/**
* If the RemoteProxy implementation also implements TimeoutListener, then this method
* will stop the thread used to monitor timeouts.
*/
void teardown();
/**
* Returns the configuration the node was initialized with.
*
* @return the node configuration.
*/
GridNodeConfiguration getConfig();
/**
* Returns the request sent from the node to the hub to register the proxy.
*
* @return the original node registration request.
*/
RegistrationRequest getOriginalRegistrationRequest();
/**
* Returns the maximum number of concurrent tests that can run on this node. NB: this number can be less than
* the number of test slots because a test slot only indicates what type of test session can be run on the remote.
* I.e., a node may allow N different <em>types</em> of tests, but only allow M tests to run at once, for M <= N.
*
* @return Maximum number of concurrent tests that can run on this node.
*/
int getMaxNumberOfConcurrentTestSessions();
/**
* Get the host the node is on. This is different from the URL used to communicate with the
* driver. For a local node that support both RC and WebDriver protocols,
* remoteHost=http://localhost:5555, but the underlying server will respond on urls
* http://localhost:5555/wd/hub (proxy.host + slot.path, where slot is a WebDriver slot) and
* http://localhost:5555/selenium-server/driver (proxy.host + slot.path, where slot is an RC slot).
*
* @return the host the node is running on.
*/
URL getRemoteHost();
/**
* Creates and returns a new test session if the current node has the resources and is ready to run the test.
*
* @param requestedCapability the type of test the client is interested in performing.
*
* @return a new TestSession if possible, <code>null</code> otherwise
*/
TestSession getNewSession(Map<String, Object> requestedCapability);
/**
* Returns the total number of test slots used on this node.
*
* @return the total number of test slots in use.
*/
int getTotalUsed();
/**
* Returns the object responsible for rendering any information about the proxy in a Web application.
*
* @return the renderer.
*/
HtmlRenderer getHtmlRender();
/**
* Indicates how long a node should wait for a seemingly non-responsive test session before deciding it has timed out.
*
* @return the timeout in milliseconds.
*/
int getTimeOut();
/**
* Retrieves the global factory for creating HTTP clients.
*
* @return The thread-safe HTTP client factory.
*/
HttpClientFactory getHttpClientFactory();
/**
* Renders the status of the node as JSON. Useful for APIs.
*
* @return the node status.
*
*/
JsonObject getStatus() ;
/**
* Checks if the node has the capability requested.
* <br>
* The definition of "has" is defined by {@link CapabilityMatcher#matches(Map, Map)}
* <br>
* <code>hasCapability = true</code> doesn't mean the test cast start just now, only that the proxy will be
* able to run a test requiring that capability at some point.
*
* @param requestedCapability the type of test the client is interested in performing.
*
* @return <code>true</code> if present
*/
boolean hasCapability(Map<String,Object> requestedCapability);
/**
* Indicates whether the node has any test slots in use. The node may still be able to accept more work even
* if it is busy.
*
* @return <code>true</code> if the node has any test slots in use.
*/
boolean isBusy();
/**
* Return how much resources are currently used on the proxy. Default implementation is runningTests / maxTests
* on the proxy. For a proxy with more knowledge about its resources, a finer implementation can also take into
* account CPU usage, RAM usage etc.
* @return the percentage of the available resource used. Can be greater than 100 if the grid is under heavy load.
*/
float getResourceUsageInPercent();
/**
* @return the time the latest session was started on a TestSlot, -1 if no sessions were started.
*/
long getLastSessionStart();
}
| mojwang/selenium | java/server/src/org/openqa/grid/internal/RemoteProxy.java | Java | apache-2.0 | 7,624 |
/*
* Copyright 2012 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.discovery.shared;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import java.io.FileInputStream;
import java.security.KeyStore;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.base.Preconditions;
import com.netflix.discovery.provider.DiscoveryJerseyProvider;
import com.netflix.servo.monitor.BasicCounter;
import com.netflix.servo.monitor.BasicTimer;
import com.netflix.servo.monitor.Counter;
import com.netflix.servo.monitor.MonitorConfig;
import com.netflix.servo.monitor.Monitors;
import com.netflix.servo.monitor.Stopwatch;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.client.apache4.ApacheHttpClient4;
import com.sun.jersey.client.apache4.config.ApacheHttpClient4Config;
import com.sun.jersey.client.apache4.config.DefaultApacheHttpClient4Config;
import org.apache.http.client.params.ClientPNames;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A wrapper for Jersey Apache Client to set the necessary configurations.
*
* @author Karthik Ranganathan
*
*/
public final class EurekaJerseyClient {
private EurekaJerseyClient() {
}
/**
* Creates a Jersey client with the given configuration parameters.
*
*
* @param clientName
* @param connectionTimeout
* - The connection timeout of the connection in milliseconds
* @param readTimeout
* - The read timeout of the connection in milliseconds
* @param maxConnectionsPerHost
* - The maximum number of connections to a particular host
* @param maxTotalConnections
* - The maximum number of total connections across all hosts
* @param connectionIdleTimeout
* - The idle timeout after which the connections will be cleaned
* up in seconds
* @return - The jersey client object encapsulating the connection
*/
public static JerseyClient createJerseyClient(String clientName, int connectionTimeout,
int readTimeout, int maxConnectionsPerHost,
int maxTotalConnections, int connectionIdleTimeout) {
Preconditions.checkNotNull(clientName, "Client name can not be null.");
try {
ClientConfig jerseyClientConfig = new CustomApacheHttpClientConfig(clientName, maxConnectionsPerHost,
maxTotalConnections);
return new JerseyClient(connectionTimeout, readTimeout,
connectionIdleTimeout, jerseyClientConfig);
} catch (Throwable e) {
throw new RuntimeException("Cannot create Jersey client ", e);
}
}
/**
* Creates a Jersey client with the given configuration parameters.
*
*
* @param clientName
* @param connectionTimeout
* - The connection timeout of the connection in milliseconds
* @param readTimeout
* - The read timeout of the connection in milliseconds
* @param maxConnectionsPerHost
* - The maximum number of connections to a particular host
* @param maxTotalConnections
* - The maximum number of total connections across all hosts
* @param connectionIdleTimeout
* - The idle timeout after which the connections will be cleaned
* up in seconds
* @param proxyHost
* - The hostname of the proxy
* @param proxyPort
* - The port number the proxy is listening on
* @param proxyUserName
* - The username to use to authenticate to the proxy
* @param proxyPassword
* - The password to use to authenticate to the proxy
* @return - The jersey client object encapsulating the connection
*/
public static JerseyClient createProxyJerseyClient(String clientName, int connectionTimeout,
int readTimeout, int maxConnectionsPerHost, int maxTotalConnections, int connectionIdleTimeout,
String proxyHost, String proxyPort, String proxyUserName, String proxyPassword) {
Preconditions.checkNotNull(clientName, "Client name can not be null.");
try {
ClientConfig jerseyClientConfig = new ProxyCustomApacheHttpClientConfig(clientName, maxConnectionsPerHost,
maxTotalConnections, proxyHost, proxyPort, proxyUserName, proxyPassword);
return new JerseyClient(connectionTimeout, readTimeout,
connectionIdleTimeout, jerseyClientConfig);
} catch (Throwable e) {
throw new RuntimeException("Cannot create Jersey client ", e);
}
}
/**
* Creates the SSL based Jersey client with the given configuration
* parameters.
*
*
*
* @param clientName
* @param connectionTimeout
* - The connection timeout of the connection in milliseconds
* @param readTimeout
* - The read timeout of the connection in milliseconds
* @param maxConnectionsPerHost
* - The maximum number of connections to a particular host
* @param maxTotalConnections
* - The maximum number of total connections across all hosts
* @param connectionIdleTimeout
* - The idle timeout after which the connections will be cleaned
* up in seconds
* @param trustStoreFileName
* - The full path to the trust store file
* @param trustStorePassword
* - The password of the trust store file
* @return - The jersey client object encapsulating the connection
*/
public static JerseyClient createSSLJerseyClient(String clientName, int connectionTimeout,
int readTimeout, int maxConnectionsPerHost,
int maxTotalConnections, int connectionIdleTimeout,
String trustStoreFileName, String trustStorePassword) {
Preconditions.checkNotNull(clientName, "Client name can not be null.");
try {
ClientConfig jerseyClientConfig = new SSLCustomApacheHttpClientConfig(
clientName, maxConnectionsPerHost, maxTotalConnections,
trustStoreFileName, trustStorePassword);
return new JerseyClient(connectionTimeout, readTimeout,
connectionIdleTimeout, jerseyClientConfig);
} catch (Throwable e) {
throw new RuntimeException("Cannot create SSL Jersey client ", e);
}
}
/**
* Creates the SSL based Jersey client with the given configuration
* parameters and using a SystemSocketFactory to support standard keystore/truststore
* system properties.
*
* @param clientName
* @param connectionTimeout
* - The connection timeout of the connection in milliseconds
* @param readTimeout
* - The read timeout of the connection in milliseconds
* @param maxConnectionsPerHost
* - The maximum number of connections to a particular host
* @param maxTotalConnections
* - The maximum number of total connections across all hosts
* @param connectionIdleTimeout
* - The idle timeout after which the connections will be cleaned
* up in seconds
* @return - The jersey client object encapsulating the connection
*/
public static JerseyClient createSystemSSLJerseyClient(String clientName, int connectionTimeout,
int readTimeout, int maxConnectionsPerHost,
int maxTotalConnections, int connectionIdleTimeout) {
Preconditions.checkNotNull(clientName, "Client name can not be null.");
try {
ClientConfig jerseyClientConfig = new SystemSSLCustomApacheHttpClientConfig(
clientName, maxConnectionsPerHost, maxTotalConnections);
return new JerseyClient(connectionTimeout, readTimeout,
connectionIdleTimeout, jerseyClientConfig);
} catch (Throwable e) {
throw new RuntimeException("Cannot create System SSL Jersey client ", e);
}
}
private static class CustomApacheHttpClientConfig extends DefaultApacheHttpClient4Config {
public CustomApacheHttpClientConfig(String clientName, int maxConnectionsPerHost, int maxTotalConnections)
throws Throwable {
MonitoredConnectionManager cm = new MonitoredConnectionManager(clientName);
cm.setDefaultMaxPerRoute(maxConnectionsPerHost);
cm.setMaxTotal(maxTotalConnections);
getProperties().put(ApacheHttpClient4Config.PROPERTY_CONNECTION_MANAGER, cm);
// To pin a client to specific server in case redirect happens, we handle redirects directly
// (see DiscoveryClient.makeRemoteCall methods).
getProperties().put(PROPERTY_FOLLOW_REDIRECTS, Boolean.FALSE);
getProperties().put(ClientPNames.HANDLE_REDIRECTS, Boolean.FALSE);
}
}
private static class ProxyCustomApacheHttpClientConfig extends DefaultApacheHttpClient4Config {
public ProxyCustomApacheHttpClientConfig(String clientName, int maxConnectionsPerHost, int maxTotalConnections,
String proxyHost, String proxyPort, String proxyUserName, String proxyPassword)
throws Throwable {
MonitoredConnectionManager cm = new MonitoredConnectionManager(clientName);
cm.setDefaultMaxPerRoute(maxConnectionsPerHost);
cm.setMaxTotal(maxTotalConnections);
getProperties().put(ApacheHttpClient4Config.PROPERTY_CONNECTION_MANAGER, cm);
// To pin a client to specific server in case redirect happens, we handle redirects directly
// (see DiscoveryClient.makeRemoteCall methods).
getProperties().put(PROPERTY_FOLLOW_REDIRECTS, Boolean.FALSE);
getProperties().put(ClientPNames.HANDLE_REDIRECTS, Boolean.FALSE);
if (proxyUserName != null && proxyPassword != null) {
getProperties().put(ApacheHttpClient4Config.PROPERTY_PROXY_USERNAME, proxyUserName);
getProperties().put(ApacheHttpClient4Config.PROPERTY_PROXY_PASSWORD, proxyPassword);
} else {
// Due to bug in apache client, user name/password must always be set.
// Otherwise proxy configuration is ignored.
getProperties().put(ApacheHttpClient4Config.PROPERTY_PROXY_USERNAME, "guest");
getProperties().put(ApacheHttpClient4Config.PROPERTY_PROXY_PASSWORD, "guest");
}
getProperties().put(
DefaultApacheHttpClient4Config.PROPERTY_PROXY_URI,
"http://" + proxyHost + ":" + proxyPort);
}
}
private static class SSLCustomApacheHttpClientConfig extends DefaultApacheHttpClient4Config {
private static final String PROTOCOL_SCHEME = "SSL";
private static final int HTTPS_PORT = 443;
private static final String PROTOCOL = "https";
private static final String KEYSTORE_TYPE = "JKS";
public SSLCustomApacheHttpClientConfig(String clientName, int maxConnectionsPerHost,
int maxTotalConnections, String trustStoreFileName,
String trustStorePassword) throws Throwable {
SSLContext sslContext = SSLContext.getInstance(PROTOCOL_SCHEME);
TrustManagerFactory tmf = TrustManagerFactory
.getInstance(TrustManagerFactory.getDefaultAlgorithm());
KeyStore sslKeyStore = KeyStore.getInstance(KEYSTORE_TYPE);
FileInputStream fin = null;
try {
fin = new FileInputStream(trustStoreFileName);
sslKeyStore.load(fin, trustStorePassword.toCharArray());
tmf.init(sslKeyStore);
sslContext.init(null, createTrustManagers(sslKeyStore), null);
SSLSocketFactory sslSocketFactory = new SSLSocketFactory(
sslContext);
sslSocketFactory
.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
SchemeRegistry sslSchemeRegistry = new SchemeRegistry();
sslSchemeRegistry.register(new Scheme(PROTOCOL, HTTPS_PORT, sslSocketFactory));
MonitoredConnectionManager cm = new MonitoredConnectionManager(clientName, sslSchemeRegistry);
cm.setDefaultMaxPerRoute(maxConnectionsPerHost);
cm.setMaxTotal(maxTotalConnections);
// To pin a client to specific server in case redirect happens, we handle redirects directly
// (see DiscoveryClient.makeRemoteCall methods).
getProperties().put(ApacheHttpClient4Config.PROPERTY_CONNECTION_MANAGER, cm);
getProperties().put(PROPERTY_FOLLOW_REDIRECTS, Boolean.FALSE);
getProperties().put(ClientPNames.HANDLE_REDIRECTS, Boolean.FALSE);
} finally {
if (fin != null) {
fin.close();
}
}
}
private static TrustManager[] createTrustManagers(KeyStore trustStore) {
TrustManagerFactory factory;
try {
factory = TrustManagerFactory.getInstance(TrustManagerFactory
.getDefaultAlgorithm());
factory.init(trustStore);
} catch (Throwable e) {
throw new RuntimeException(e);
}
final TrustManager[] managers = factory.getTrustManagers();
return managers;
}
}
private static class SystemSSLCustomApacheHttpClientConfig extends DefaultApacheHttpClient4Config {
private static final int HTTPS_PORT = 443;
private static final String PROTOCOL = "https";
public SystemSSLCustomApacheHttpClientConfig(String clientName, int maxConnectionsPerHost,
int maxTotalConnections) throws Throwable {
SSLSocketFactory sslSocketFactory = SSLSocketFactory.getSystemSocketFactory();
SchemeRegistry sslSchemeRegistry = new SchemeRegistry();
sslSchemeRegistry.register(new Scheme(PROTOCOL, HTTPS_PORT, sslSocketFactory));
MonitoredConnectionManager cm = new MonitoredConnectionManager(clientName, sslSchemeRegistry);
cm.setDefaultMaxPerRoute(maxConnectionsPerHost);
cm.setMaxTotal(maxTotalConnections);
getProperties().put(ApacheHttpClient4Config.PROPERTY_CONNECTION_MANAGER, cm);
// To pin a client to specific server in case redirect happens, we handle redirects directly
// (see DiscoveryClient.makeRemoteCall methods).
getProperties().put(PROPERTY_FOLLOW_REDIRECTS, Boolean.FALSE);
getProperties().put(ClientPNames.HANDLE_REDIRECTS, Boolean.FALSE);
}
}
public static class JerseyClient {
private static final int HTTP_CONNECTION_CLEANER_INTERVAL_MS = 30 * 1000;
private ApacheHttpClient4 apacheHttpClient;
ClientConfig jerseyClientConfig;
private ScheduledExecutorService eurekaConnCleaner =
Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
private final AtomicInteger threadNumber = new AtomicInteger(1);
@Override
public Thread newThread(Runnable r) {
Thread thread = new Thread(r, "Eureka-JerseyClient-Conn-Cleaner" + threadNumber.incrementAndGet());
thread.setDaemon(true);
return thread;
}
});
private static final Logger s_logger = LoggerFactory.getLogger(JerseyClient.class);
public ApacheHttpClient4 getClient() {
return apacheHttpClient;
}
public ClientConfig getClientconfig() {
return jerseyClientConfig;
}
public JerseyClient(int connectionTimeout, int readTimeout, final int connectionIdleTimeout,
ClientConfig clientConfig) {
try {
jerseyClientConfig = clientConfig;
jerseyClientConfig.getClasses().add(DiscoveryJerseyProvider.class);
apacheHttpClient = ApacheHttpClient4.create(jerseyClientConfig);
HttpParams params = apacheHttpClient.getClientHandler().getHttpClient().getParams();
HttpConnectionParams.setConnectionTimeout(params, connectionTimeout);
HttpConnectionParams.setSoTimeout(params, readTimeout);
eurekaConnCleaner.scheduleWithFixedDelay(
new ConnectionCleanerTask(connectionIdleTimeout), HTTP_CONNECTION_CLEANER_INTERVAL_MS,
HTTP_CONNECTION_CLEANER_INTERVAL_MS,
TimeUnit.MILLISECONDS);
} catch (Throwable e) {
throw new RuntimeException("Cannot create Jersey client", e);
}
}
/**
* Clean up resources.
*/
public void destroyResources() {
if (eurekaConnCleaner != null) {
eurekaConnCleaner.shutdown();
}
if (apacheHttpClient != null) {
apacheHttpClient.destroy();
}
}
private class ConnectionCleanerTask implements Runnable {
private final int connectionIdleTimeout;
private final BasicTimer executionTimeStats;
private final Counter cleanupFailed;
public ConnectionCleanerTask(int connectionIdleTimeout) {
this.connectionIdleTimeout = connectionIdleTimeout;
MonitorConfig.Builder monitorConfigBuilder = MonitorConfig.builder("Eureka-Connection-Cleaner-Time");
executionTimeStats = new BasicTimer(monitorConfigBuilder.build());
cleanupFailed = new BasicCounter(MonitorConfig.builder("Eureka-Connection-Cleaner-Failure").build());
try {
Monitors.registerObject(this);
} catch (Exception e) {
s_logger.error("Unable to register with servo.", e);
}
}
@Override
public void run() {
Stopwatch start = executionTimeStats.start();
try {
apacheHttpClient
.getClientHandler()
.getHttpClient()
.getConnectionManager()
.closeIdleConnections(connectionIdleTimeout, TimeUnit.SECONDS);
} catch (Throwable e) {
s_logger.error("Cannot clean connections", e);
cleanupFailed.increment();
} finally {
if (null != start) {
start.stop();
}
}
}
}
}
}
| bondj/eureka | eureka-client/src/main/java/com/netflix/discovery/shared/EurekaJerseyClient.java | Java | apache-2.0 | 20,627 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# pylint: disable=too-many-lines
"""
Custom filters for use in openshift-ansible
"""
import ast
import json
import os
import pdb
import random
import re
from base64 import b64encode
from collections import Mapping
# pylint no-name-in-module and import-error disabled here because pylint
# fails to properly detect the packages when installed in a virtualenv
from distutils.util import strtobool # pylint:disable=no-name-in-module,import-error
from operator import itemgetter
import yaml
from ansible import errors
from ansible.parsing.yaml.dumper import AnsibleDumper
# pylint: disable=import-error,no-name-in-module
from ansible.module_utils.six import iteritems, string_types, u
# pylint: disable=import-error,no-name-in-module
from ansible.module_utils.six.moves.urllib.parse import urlparse
HAS_OPENSSL = False
try:
import OpenSSL.crypto
HAS_OPENSSL = True
except ImportError:
pass
# pylint: disable=C0103
def lib_utils_oo_pdb(arg):
""" This pops you into a pdb instance where arg is the data passed in
from the filter.
Ex: "{{ hostvars | lib_utils_oo_pdb }}"
"""
pdb.set_trace()
return arg
def get_attr(data, attribute=None):
""" This looks up dictionary attributes of the form a.b.c and returns
the value.
If the key isn't present, None is returned.
Ex: data = {'a': {'b': {'c': 5}}}
attribute = "a.b.c"
returns 5
"""
if not attribute:
raise errors.AnsibleFilterError("|failed expects attribute to be set")
ptr = data
for attr in attribute.split('.'):
if attr in ptr:
ptr = ptr[attr]
else:
ptr = None
break
return ptr
def oo_flatten(data):
""" This filter plugin will flatten a list of lists
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects to flatten a List")
return [item for sublist in data for item in sublist]
def lib_utils_oo_collect(data_list, attribute=None, filters=None):
""" This takes a list of dict and collects all attributes specified into a
list. If filter is specified then we will include all items that
match _ALL_ of filters. If a dict entry is missing the key in a
filter it will be excluded from the match.
Ex: data_list = [ {'a':1, 'b':5, 'z': 'z'}, # True, return
{'a':2, 'z': 'z'}, # True, return
{'a':3, 'z': 'z'}, # True, return
{'a':4, 'z': 'b'}, # FAILED, obj['z'] != obj['z']
]
attribute = 'a'
filters = {'z': 'z'}
returns [1, 2, 3]
This also deals with lists of lists with dict as elements.
Ex: data_list = [
[ {'a':1, 'b':5, 'z': 'z'}, # True, return
{'a':2, 'b':6, 'z': 'z'} # True, return
],
[ {'a':3, 'z': 'z'}, # True, return
{'a':4, 'z': 'b'} # FAILED, obj['z'] != obj['z']
],
{'a':5, 'z': 'z'}, # True, return
]
attribute = 'a'
filters = {'z': 'z'}
returns [1, 2, 3, 5]
"""
if not isinstance(data_list, list):
raise errors.AnsibleFilterError("lib_utils_oo_collect expects to filter on a List")
if not attribute:
raise errors.AnsibleFilterError("lib_utils_oo_collect expects attribute to be set")
data = []
retval = []
for item in data_list:
if isinstance(item, list):
retval.extend(lib_utils_oo_collect(item, attribute, filters))
else:
data.append(item)
if filters is not None:
if not isinstance(filters, dict):
raise errors.AnsibleFilterError(
"lib_utils_oo_collect expects filter to be a dict")
retval.extend([get_attr(d, attribute) for d in data if (
all([get_attr(d, key) == filters[key] for key in filters]))])
else:
retval.extend([get_attr(d, attribute) for d in data])
retval = [val for val in retval if val is not None]
return retval
def lib_utils_oo_select_keys_from_list(data, keys):
""" This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|lib_utils_oo_select_keys_from_list failed expects to filter on a list")
if not isinstance(keys, list):
raise errors.AnsibleFilterError("|lib_utils_oo_select_keys_from_list failed expects first param is a list")
# Gather up the values for the list of keys passed in
retval = [lib_utils_oo_select_keys(item, keys) for item in data]
return oo_flatten(retval)
def lib_utils_oo_select_keys(data, keys):
""" This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
"""
if not isinstance(data, Mapping):
raise errors.AnsibleFilterError("|lib_utils_oo_select_keys failed expects to filter on a dict or object")
if not isinstance(keys, list):
raise errors.AnsibleFilterError("|lib_utils_oo_select_keys failed expects first param is a list")
# Gather up the values for the list of keys passed in
retval = [data[key] for key in keys if key in data]
return retval
def lib_utils_oo_prepend_strings_in_list(data, prepend):
""" This takes a list of strings and prepends a string to each item in the
list
Ex: data = ['cart', 'tree']
prepend = 'apple-'
returns ['apple-cart', 'apple-tree']
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not all(isinstance(x, string_types) for x in data):
raise errors.AnsibleFilterError("|failed expects first param is a list"
" of strings")
retval = [prepend + s for s in data]
return retval
def lib_utils_oo_dict_to_list_of_dict(data, key_title='key', value_title='value'):
"""Take a dict and arrange them as a list of dicts
Input data:
{'region': 'infra', 'test_k': 'test_v'}
Return data:
[{'key': 'region', 'value': 'infra'}, {'key': 'test_k', 'value': 'test_v'}]
Written for use of the oc_label module
"""
if not isinstance(data, dict):
# pylint: disable=line-too-long
raise errors.AnsibleFilterError("|failed expects first param is a dict. Got %s. Type: %s" % (str(data), str(type(data))))
rval = []
for label in data.items():
rval.append({key_title: label[0], value_title: label[1]})
return rval
def oo_ami_selector(data, image_name):
""" This takes a list of amis and an image name and attempts to return
the latest ami.
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not data:
return None
else:
if image_name is None or not image_name.endswith('_*'):
ami = sorted(data, key=itemgetter('name'), reverse=True)[0]
return ami['ami_id']
else:
ami_info = [(ami, ami['name'].split('_')[-1]) for ami in data]
ami = sorted(ami_info, key=itemgetter(1), reverse=True)[0][0]
return ami['ami_id']
def lib_utils_oo_split(string, separator=','):
""" This splits the input string into a list. If the input string is
already a list we will return it as is.
"""
if isinstance(string, list):
return string
return string.split(separator)
def lib_utils_oo_dict_to_keqv_list(data):
"""Take a dict and return a list of k=v pairs
Input data:
{'a': 1, 'b': 2}
Return data:
['a=1', 'b=2']
"""
if not isinstance(data, dict):
try:
# This will attempt to convert something that looks like a string
# representation of a dictionary (including json) into a dictionary.
data = ast.literal_eval(data)
except ValueError:
msg = "|failed expects first param is a dict. Got {}. Type: {}"
msg = msg.format(str(data), str(type(data)))
raise errors.AnsibleFilterError(msg)
return ['='.join(str(e) for e in x) for x in data.items()]
def lib_utils_oo_list_to_dict(lst, separator='='):
""" This converts a list of ["k=v"] to a dictionary {k: v}.
"""
kvs = [i.split(separator) for i in lst]
return {k: v for k, v in kvs}
def haproxy_backend_masters(hosts, port):
""" This takes an array of dicts and returns an array of dicts
to be used as a backend for the haproxy role
"""
servers = []
for idx, host_info in enumerate(hosts):
server = dict(name="master%s" % idx)
server_ip = host_info['openshift']['common']['ip']
server['address'] = "%s:%s" % (server_ip, port)
server['opts'] = 'check'
servers.append(server)
return servers
# pylint: disable=too-many-branches, too-many-nested-blocks
def lib_utils_oo_parse_named_certificates(certificates, named_certs_dir, internal_hostnames):
""" Parses names from list of certificate hashes.
Ex: certificates = [{ "certfile": "/root/custom1.crt",
"keyfile": "/root/custom1.key",
"cafile": "/root/custom-ca1.crt" },
{ "certfile": "custom2.crt",
"keyfile": "custom2.key",
"cafile": "custom-ca2.crt" }]
returns [{ "certfile": "/etc/origin/master/named_certificates/custom1.crt",
"keyfile": "/etc/origin/master/named_certificates/custom1.key",
"cafile": "/etc/origin/master/named_certificates/custom-ca1.crt",
"names": [ "public-master-host.com",
"other-master-host.com" ] },
{ "certfile": "/etc/origin/master/named_certificates/custom2.crt",
"keyfile": "/etc/origin/master/named_certificates/custom2.key",
"cafile": "/etc/origin/master/named_certificates/custom-ca-2.crt",
"names": [ "some-hostname.com" ] }]
"""
if not isinstance(named_certs_dir, string_types):
raise errors.AnsibleFilterError("|failed expects named_certs_dir is str or unicode")
if not isinstance(internal_hostnames, list):
raise errors.AnsibleFilterError("|failed expects internal_hostnames is list")
if not HAS_OPENSSL:
raise errors.AnsibleFilterError("|missing OpenSSL python bindings")
for certificate in certificates:
if 'names' in certificate.keys():
continue
else:
certificate['names'] = []
if not os.path.isfile(certificate['certfile']) or not os.path.isfile(certificate['keyfile']):
raise errors.AnsibleFilterError("|certificate and/or key does not exist '%s', '%s'" %
(certificate['certfile'], certificate['keyfile']))
try:
st_cert = open(certificate['certfile'], 'rt').read()
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, st_cert)
certificate['names'].append(str(cert.get_subject().commonName.decode()))
for i in range(cert.get_extension_count()):
if cert.get_extension(i).get_short_name() == 'subjectAltName':
for name in str(cert.get_extension(i)).split(', '):
if 'DNS:' in name:
certificate['names'].append(name.replace('DNS:', ''))
except Exception:
raise errors.AnsibleFilterError(("|failed to parse certificate '%s', " % certificate['certfile'] +
"please specify certificate names in host inventory"))
certificate['names'] = list(set(certificate['names']))
if 'cafile' not in certificate:
certificate['names'] = [name for name in certificate['names'] if name not in internal_hostnames]
if not certificate['names']:
raise errors.AnsibleFilterError(("|failed to parse certificate '%s' or " % certificate['certfile'] +
"detected a collision with internal hostname, please specify " +
"certificate names in host inventory"))
for certificate in certificates:
# Update paths for configuration
certificate['certfile'] = os.path.join(named_certs_dir, os.path.basename(certificate['certfile']))
certificate['keyfile'] = os.path.join(named_certs_dir, os.path.basename(certificate['keyfile']))
if 'cafile' in certificate:
certificate['cafile'] = os.path.join(named_certs_dir, os.path.basename(certificate['cafile']))
return certificates
def lib_utils_oo_parse_certificate_san(certificate):
""" Parses SubjectAlternativeNames from a PEM certificate.
Ex: certificate = '''-----BEGIN CERTIFICATE-----
MIIEcjCCAlqgAwIBAgIBAzANBgkqhkiG9w0BAQsFADAhMR8wHQYDVQQDDBZldGNk
LXNpZ25lckAxNTE2ODIwNTg1MB4XDTE4MDEyNDE5MDMzM1oXDTIzMDEyMzE5MDMz
M1owHzEdMBsGA1UEAwwUbWFzdGVyMS5hYnV0Y2hlci5jb20wggEiMA0GCSqGSIb3
DQEBAQUAA4IBDwAwggEKAoIBAQD4wBdWXNI3TF1M0b0bEIGyJPvdqKeGwF5XlxWg
NoA1Ain/Xz0N1SW5pXW2CDo9HX+ay8DyhzR532yrBa+RO3ivNCmfnexTQinfSLWG
mBEdiu7HO3puR/GNm74JNyXoEKlMAIRiTGq9HPoTo7tNV5MLodgYirpHrkSutOww
DfFSrNjH/ehqxwQtrIOnTAHigdTOrKVdoYxqXblDEMONTPLI5LMvm4/BqnAVaOyb
9RUzND6lxU/ei3FbUS5IoeASOHx0l1ifxae3OeSNAimm/RIRo9rieFNUFh45TzID
elsdGrLB75LH/gnRVV1xxVbwPN6xW1mEwOceRMuhIArJQ2G5AgMBAAGjgbYwgbMw
UQYDVR0jBEowSIAUXTqN88vCI6E7wONls3QJ4/63unOhJaQjMCExHzAdBgNVBAMM
FmV0Y2Qtc2lnbmVyQDE1MTY4MjA1ODWCCQDMaopfom6OljAMBgNVHRMBAf8EAjAA
MBMGA1UdJQQMMAoGCCsGAQUFBwMBMAsGA1UdDwQEAwIFoDAdBgNVHQ4EFgQU7l05
OYeY3HppL6/0VJSirudj8t0wDwYDVR0RBAgwBocEwKh6ujANBgkqhkiG9w0BAQsF
AAOCAgEAFU8sicE5EeQsUPnFEqDvoJd1cVE+8aCBqkW0++4GsVw2A/JOJ3OBJL6r
BV3b1u8/e8xBNi8hPi42Q+LWBITZZ/COFyhwEAK94hcr7eZLCV2xfUdMJziP4Qkh
/WRN7vXHTtJ6NP/d6A22SPbtnMSt9Y6G8y9qa5HBrqIqmkYbLzDw/SdZbDbuGhRk
xUwg2ahXNblVoE5P6rxPONgXliA94telZ1/61iyrVaiGQb1/GUP/DRfvvR4dOCrA
lMosW6fm37Wdi/8iYW+aDPWGS+yVK/sjSnHNjxqvrzkfGk+COa5riT9hJ7wZY0Hb
YiJS74SZgZt/nnr5PI2zFRUiZLECqCkZnC/sz29i+irLabnq7Cif9Mv+TUcXWvry
TdJuaaYdTSMRSUkDd/c9Ife8tOr1i1xhFzDNKNkZjTVRk1MBquSXndVCDKucdfGi
YoWm+NDFrayw8yxK/KTHo3Db3lu1eIXTHxriodFx898b//hysHr4hs4/tsEFUTZi
705L2ScIFLfnyaPby5GK/3sBIXtuhOFM3QV3JoYKlJB5T6wJioVoUmSLc+UxZMeE
t9gGVQbVxtLvNHUdW7uKQ5pd76nIJqApQf8wg2Pja8oo56fRZX2XLt8nm9cswcC4
Y1mDMvtfxglQATwMTuoKGdREuu1mbdb8QqdyQmZuMa72q+ax2kQ=
-----END CERTIFICATE-----'''
returns ['192.168.122.186']
"""
if not HAS_OPENSSL:
raise errors.AnsibleFilterError("|missing OpenSSL python bindings")
names = []
try:
lcert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, certificate)
for i in range(lcert.get_extension_count()):
if lcert.get_extension(i).get_short_name() == 'subjectAltName':
sanstr = str(lcert.get_extension(i))
sanstr = sanstr.replace('DNS:', '')
sanstr = sanstr.replace('IP Address:', '')
names = sanstr.split(', ')
except Exception:
raise errors.AnsibleFilterError("|failed to parse certificate")
return names
def lib_utils_oo_generate_secret(num_bytes):
""" generate a session secret """
if not isinstance(num_bytes, int):
raise errors.AnsibleFilterError("|failed expects num_bytes is int")
return b64encode(os.urandom(num_bytes)).decode('utf-8')
def lib_utils_to_padded_yaml(data, level=0, indent=2, **kw):
""" returns a yaml snippet padded to match the indent level you specify """
if data in [None, ""]:
return ""
try:
transformed = u(yaml.dump(data, indent=indent, allow_unicode=True,
default_flow_style=False,
Dumper=AnsibleDumper, **kw))
padded = "\n".join([" " * level * indent + line for line in transformed.splitlines()])
return "\n{0}".format(padded)
except Exception as my_e:
raise errors.AnsibleFilterError('Failed to convert: %s' % my_e)
def lib_utils_oo_pods_match_component(pods, deployment_type, component):
""" Filters a list of Pods and returns the ones matching the deployment_type and component
"""
if not isinstance(pods, list):
raise errors.AnsibleFilterError("failed expects to filter on a list")
if not isinstance(deployment_type, string_types):
raise errors.AnsibleFilterError("failed expects deployment_type to be a string")
if not isinstance(component, string_types):
raise errors.AnsibleFilterError("failed expects component to be a string")
image_prefix = 'openshift/origin-'
if deployment_type == 'openshift-enterprise':
image_prefix = 'openshift3/ose-'
matching_pods = []
image_regex = image_prefix + component + r'.*'
for pod in pods:
for container in pod['spec']['containers']:
if re.search(image_regex, container['image']):
matching_pods.append(pod)
break # stop here, don't add a pod more than once
return matching_pods
def lib_utils_oo_image_tag_to_rpm_version(version, include_dash=False):
""" Convert an image tag string to an RPM version if necessary
Empty strings and strings that are already in rpm version format
are ignored. Also remove non semantic version components.
Ex. v3.2.0.10 -> -3.2.0.10
v1.2.0-rc1 -> -1.2.0
"""
if not isinstance(version, string_types):
raise errors.AnsibleFilterError("|failed expects a string or unicode")
if version.startswith("v"):
version = version[1:]
# Strip release from requested version, we no longer support this.
version = version.split('-')[0]
if include_dash and version and not version.startswith("-"):
version = "-" + version
return version
def lib_utils_oo_hostname_from_url(url):
""" Returns the hostname contained in a URL
Ex: https://ose3-master.example.com/v1/api -> ose3-master.example.com
"""
if not isinstance(url, string_types):
raise errors.AnsibleFilterError("|failed expects a string or unicode")
parse_result = urlparse(url)
if parse_result.netloc != '':
return parse_result.netloc
else:
# netloc wasn't parsed, assume url was missing scheme and path
return parse_result.path
# pylint: disable=invalid-name, unused-argument
def lib_utils_oo_loadbalancer_frontends(
api_port, servers_hostvars, use_nuage=False, nuage_rest_port=None):
"""TODO: Document me."""
loadbalancer_frontends = [{'name': 'atomic-openshift-api',
'mode': 'tcp',
'options': ['tcplog'],
'binds': ["*:{0}".format(api_port)],
'default_backend': 'atomic-openshift-api'}]
if bool(strtobool(str(use_nuage))) and nuage_rest_port is not None:
loadbalancer_frontends.append({'name': 'nuage-monitor',
'mode': 'tcp',
'options': ['tcplog'],
'binds': ["*:{0}".format(nuage_rest_port)],
'default_backend': 'nuage-monitor'})
return loadbalancer_frontends
# pylint: disable=invalid-name
def lib_utils_oo_loadbalancer_backends(
api_port, servers_hostvars, use_nuage=False, nuage_rest_port=None):
"""TODO: Document me."""
loadbalancer_backends = [{'name': 'atomic-openshift-api',
'mode': 'tcp',
'option': 'tcplog',
'balance': 'source',
'servers': haproxy_backend_masters(servers_hostvars, api_port)}]
if bool(strtobool(str(use_nuage))) and nuage_rest_port is not None:
# pylint: disable=line-too-long
loadbalancer_backends.append({'name': 'nuage-monitor',
'mode': 'tcp',
'option': 'tcplog',
'balance': 'source',
'servers': haproxy_backend_masters(servers_hostvars, nuage_rest_port)})
return loadbalancer_backends
def lib_utils_oo_chomp_commit_offset(version):
"""Chomp any "+git.foo" commit offset string from the given `version`
and return the modified version string.
Ex:
- chomp_commit_offset(None) => None
- chomp_commit_offset(1337) => "1337"
- chomp_commit_offset("v3.4.0.15+git.derp") => "v3.4.0.15"
- chomp_commit_offset("v3.4.0.15") => "v3.4.0.15"
- chomp_commit_offset("v1.3.0+52492b4") => "v1.3.0"
"""
if version is None:
return version
else:
# Stringify, just in case it's a Number type. Split by '+' and
# return the first split. No concerns about strings without a
# '+', .split() returns an array of the original string.
return str(version).split('+')[0]
def lib_utils_oo_random_word(length, source='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'):
"""Generates a random string of given length from a set of alphanumeric characters.
The default source uses [a-z][A-Z][0-9]
Ex:
- lib_utils_oo_random_word(3) => aB9
- lib_utils_oo_random_word(4, source='012') => 0123
"""
return ''.join(random.choice(source) for i in range(length))
def lib_utils_oo_contains_rule(source, apiGroups, resources, verbs):
'''Return true if the specified rule is contained within the provided source'''
rules = source['rules']
if rules:
for rule in rules:
if set(rule['apiGroups']) == set(apiGroups):
if set(rule['resources']) == set(resources):
if set(rule['verbs']) == set(verbs):
return True
return False
def lib_utils_oo_selector_to_string_list(user_dict):
"""Convert a dict of selectors to a key=value list of strings
Given input of {'region': 'infra', 'zone': 'primary'} returns a list
of items as ['region=infra', 'zone=primary']
"""
selectors = []
for key in user_dict:
selectors.append("{}={}".format(key, user_dict[key]))
return selectors
def lib_utils_oo_filter_sa_secrets(sa_secrets, secret_hint='-token-'):
"""Parse the Service Account Secrets list, `sa_secrets`, (as from
oc_serviceaccount_secret:state=list) and return the name of the secret
containing the `secret_hint` string. For example, by default this will
return the name of the secret holding the SA bearer token.
Only provide the 'results' object to this filter. This filter expects
to receive a list like this:
[
{
"name": "management-admin-dockercfg-p31s2"
},
{
"name": "management-admin-token-bnqsh"
}
]
Returns:
* `secret_name` [string] - The name of the secret matching the
`secret_hint` parameter. By default this is the secret holding the
SA's bearer token.
Example playbook usage:
Register a return value from oc_serviceaccount_secret with and pass
that result to this filter plugin.
- name: Get all SA Secrets
oc_serviceaccount_secret:
state: list
service_account: management-admin
namespace: management-infra
register: sa
- name: Save the SA bearer token secret name
set_fact:
management_token: "{{ sa.results | lib_utils_oo_filter_sa_secrets }}"
- name: Get the SA bearer token value
oc_secret:
state: list
name: "{{ management_token }}"
namespace: management-infra
decode: true
register: sa_secret
- name: Print the bearer token value
debug:
var: sa_secret.results.decoded.token
"""
secret_name = None
for secret in sa_secrets:
# each secret is a hash
if secret['name'].find(secret_hint) == -1:
continue
else:
secret_name = secret['name']
break
return secret_name
def lib_utils_oo_l_of_d_to_csv(input_list):
"""Map a list of dictionaries, input_list, into a csv string
of json values.
Example input:
[{'var1': 'val1', 'var2': 'val2'}, {'var1': 'val3', 'var2': 'val4'}]
Example output:
u'{"var1": "val1", "var2": "val2"},{"var1": "val3", "var2": "val4"}'
"""
return ','.join(json.dumps(x) for x in input_list)
def map_from_pairs(source, delim="="):
''' Returns a dict given the source and delim delimited '''
if source == '':
return dict()
return dict(item.split(delim) for item in source.split(","))
def map_to_pairs(source, delim="="):
''' Returns a comma separated str given the source as a dict '''
# Some default selectors are empty strings.
if source == {} or source == '':
return str()
return ','.join(["{}{}{}".format(key, delim, value) for key, value in iteritems(source)])
class FilterModule(object):
""" Custom ansible filter mapping """
# pylint: disable=no-self-use, too-few-public-methods
def filters(self):
""" returns a mapping of filters to methods """
return {
"lib_utils_oo_select_keys": lib_utils_oo_select_keys,
"lib_utils_oo_select_keys_from_list": lib_utils_oo_select_keys_from_list,
"lib_utils_oo_chomp_commit_offset": lib_utils_oo_chomp_commit_offset,
"lib_utils_oo_collect": lib_utils_oo_collect,
"lib_utils_oo_pdb": lib_utils_oo_pdb,
"lib_utils_oo_prepend_strings_in_list": lib_utils_oo_prepend_strings_in_list,
"lib_utils_oo_dict_to_list_of_dict": lib_utils_oo_dict_to_list_of_dict,
"lib_utils_oo_split": lib_utils_oo_split,
"lib_utils_oo_dict_to_keqv_list": lib_utils_oo_dict_to_keqv_list,
"lib_utils_oo_list_to_dict": lib_utils_oo_list_to_dict,
"lib_utils_oo_parse_named_certificates": lib_utils_oo_parse_named_certificates,
"lib_utils_oo_parse_certificate_san": lib_utils_oo_parse_certificate_san,
"lib_utils_oo_generate_secret": lib_utils_oo_generate_secret,
"lib_utils_oo_pods_match_component": lib_utils_oo_pods_match_component,
"lib_utils_oo_image_tag_to_rpm_version": lib_utils_oo_image_tag_to_rpm_version,
"lib_utils_oo_hostname_from_url": lib_utils_oo_hostname_from_url,
"lib_utils_oo_loadbalancer_frontends": lib_utils_oo_loadbalancer_frontends,
"lib_utils_oo_loadbalancer_backends": lib_utils_oo_loadbalancer_backends,
"lib_utils_to_padded_yaml": lib_utils_to_padded_yaml,
"lib_utils_oo_random_word": lib_utils_oo_random_word,
"lib_utils_oo_contains_rule": lib_utils_oo_contains_rule,
"lib_utils_oo_selector_to_string_list": lib_utils_oo_selector_to_string_list,
"lib_utils_oo_filter_sa_secrets": lib_utils_oo_filter_sa_secrets,
"lib_utils_oo_l_of_d_to_csv": lib_utils_oo_l_of_d_to_csv,
"map_from_pairs": map_from_pairs,
"map_to_pairs": map_to_pairs,
}
| wbrefvem/openshift-ansible | roles/lib_utils/filter_plugins/oo_filters.py | Python | apache-2.0 | 28,256 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.indexing;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ContentIterator;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vfs.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.util.Consumer;
import com.intellij.util.Processor;
import com.intellij.util.SystemProperties;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.List;
import java.util.Set;
/**
* Author: dmitrylomov
*/
public abstract class FileBasedIndex {
public abstract void iterateIndexableFiles(@NotNull ContentIterator processor, @NotNull Project project, ProgressIndicator indicator);
public void iterateIndexableFilesConcurrently(@NotNull ContentIterator processor, @NotNull Project project, ProgressIndicator indicator) {
iterateIndexableFiles(processor, project, indicator);
}
public abstract void registerIndexableSet(@NotNull IndexableFileSet set, @Nullable Project project);
public abstract void removeIndexableSet(@NotNull IndexableFileSet set);
public static FileBasedIndex getInstance() {
return ApplicationManager.getApplication().getComponent(FileBasedIndex.class);
}
public static int getFileId(@NotNull final VirtualFile file) {
if (file instanceof VirtualFileWithId) {
return ((VirtualFileWithId)file).getId();
}
throw new IllegalArgumentException("Virtual file doesn't support id: " + file + ", implementation class: " + file.getClass().getName());
}
// note: upsource implementation requires access to Project here, please don't remove
public abstract VirtualFile findFileById(Project project, int id);
public void requestRebuild(@NotNull ID<?, ?> indexId) {
requestRebuild(indexId, new Throwable());
}
@NotNull
public abstract <K, V> List<V> getValues(@NotNull ID<K, V> indexId, @NotNull K dataKey, @NotNull GlobalSearchScope filter);
@NotNull
public abstract <K, V> Collection<VirtualFile> getContainingFiles(@NotNull ID<K, V> indexId,
@NotNull K dataKey,
@NotNull GlobalSearchScope filter);
/**
* @return false if ValueProcessor.process() returned false; true otherwise or if ValueProcessor was not called at all
*/
public abstract <K, V> boolean processValues(@NotNull ID<K, V> indexId,
@NotNull K dataKey,
@Nullable VirtualFile inFile,
@NotNull FileBasedIndex.ValueProcessor<V> processor,
@NotNull GlobalSearchScope filter);
/**
* @return false if ValueProcessor.process() returned false; true otherwise or if ValueProcessor was not called at all
*/
public <K, V> boolean processValues(@NotNull ID<K, V> indexId,
@NotNull K dataKey,
@Nullable VirtualFile inFile,
@NotNull FileBasedIndex.ValueProcessor<V> processor,
@NotNull GlobalSearchScope filter,
@Nullable IdFilter idFilter) {
return processValues(indexId, dataKey, inFile, processor, filter);
}
public abstract <K, V> boolean processFilesContainingAllKeys(@NotNull ID<K, V> indexId,
@NotNull Collection<K> dataKeys,
@NotNull GlobalSearchScope filter,
@Nullable Condition<V> valueChecker,
@NotNull Processor<VirtualFile> processor);
/**
* @param project it is guaranteed to return data which is up-to-date withing the project
* Keys obtained from the files which do not belong to the project specified may not be up-to-date or even exist
*/
@NotNull
public abstract <K> Collection<K> getAllKeys(@NotNull ID<K, ?> indexId, @NotNull Project project);
/**
* DO NOT CALL DIRECTLY IN CLIENT CODE
* The method is internal to indexing engine end is called internally. The method is public due to implementation details
*/
public abstract <K> void ensureUpToDate(@NotNull ID<K, ?> indexId, @Nullable Project project, @Nullable GlobalSearchScope filter);
public abstract void requestRebuild(@NotNull ID<?, ?> indexId, Throwable throwable);
public abstract <K> void scheduleRebuild(@NotNull ID<K, ?> indexId, @NotNull Throwable e);
public abstract void requestReindex(@NotNull VirtualFile file);
public abstract <K, V> boolean getFilesWithKey(@NotNull ID<K, V> indexId,
@NotNull Set<K> dataKeys,
@NotNull Processor<VirtualFile> processor,
@NotNull GlobalSearchScope filter);
/**
* @param project it is guaranteed to return data which is up-to-date withing the project
* Keys obtained from the files which do not belong to the project specified may not be up-to-date or even exist
*/
public abstract <K> boolean processAllKeys(@NotNull ID<K, ?> indexId, @NotNull Processor<K> processor, @Nullable Project project);
public <K> boolean processAllKeys(@NotNull ID<K, ?> indexId, @NotNull Processor<K> processor, @NotNull GlobalSearchScope scope, @Nullable IdFilter idFilter) {
return processAllKeys(indexId, processor, scope.getProject());
}
public static void iterateRecursively(@Nullable final VirtualFile root,
@NotNull final ContentIterator processor,
@Nullable final ProgressIndicator indicator,
@Nullable final Set<VirtualFile> visitedRoots,
@Nullable final ProjectFileIndex projectFileIndex) {
if (root == null) {
return;
}
VfsUtilCore.visitChildrenRecursively(root, new VirtualFileVisitor() {
@Override
public boolean visitFile(@NotNull VirtualFile file) {
if (!acceptsFile(file)) return false;
if (file.is(VFileProperty.SYMLINK)) {
if(!Registry.is("indexer.follows.symlinks")) return false;
VirtualFile canonicalFile = file.getCanonicalFile();
if (canonicalFile != null) {
if(!acceptsFile(canonicalFile)) return false;
}
}
if (indicator != null) indicator.checkCanceled();
processor.processFile(file);
return true;
}
private boolean acceptsFile(@NotNull VirtualFile file) {
if (visitedRoots != null && !root.equals(file) && file.isDirectory() && !visitedRoots.add(file)) {
return false;
}
if (projectFileIndex != null && projectFileIndex.isExcluded(file)) {
return false;
}
return true;
}
});
}
@FunctionalInterface
public interface ValueProcessor<V> {
/**
* @param value a value to process
* @param file the file the value came from
* @return false if no further processing is needed, true otherwise
*/
boolean process(@NotNull VirtualFile file, V value);
}
@FunctionalInterface
public interface InputFilter {
boolean acceptInput(@NotNull VirtualFile file);
}
public interface FileTypeSpecificInputFilter extends InputFilter {
void registerFileTypesUsedForIndexing(@NotNull Consumer<FileType> fileTypeSink);
}
// TODO: remove once changes becomes permanent
public static final boolean ourEnableTracingOfKeyHashToVirtualFileMapping =
SystemProperties.getBooleanProperty("idea.enable.tracing.keyhash2virtualfile", true);
}
| apixandru/intellij-community | platform/indexing-api/src/com/intellij/util/indexing/FileBasedIndex.java | Java | apache-2.0 | 8,924 |
--- Turbo.lua Unit test
--
-- Copyright 2013 John Abrahamsen
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
local turbo = require "turbo"
local ffi = require "ffi"
describe("turbo.util Namespace", function()
describe("util.str_find", function()
local search_time = 1000000
local sneedle = "\r\n\r\n"
local shaystack = "pkl2''234kokosmv,.-sd,fkwerj234982a9dfj89as9dhrh234"
it("should work", function()
local haystack = turbo.structs.buffer()
for i = 0, search_time do
haystack:append_luastr_right(shaystack)
end
haystack:append_luastr_right(sneedle)
for i = 0, search_time do
haystack:append_luastr_right(shaystack)
end
local needle = turbo.structs.buffer()
needle:append_luastr_right(sneedle)
local h_str, h_len = haystack:get()
local n_str, n_len = needle:get()
assert.equal(turbo.util.str_find(h_str, n_str, h_len, n_len) - h_str, 51000051)
end)
it("should perform on par with string.find by a factor of 1.5", function()
local haystack = turbo.structs.buffer()
for i = 0, search_time do
haystack:append_luastr_right(shaystack)
end
haystack:append_luastr_right(sneedle)
for i = 0, search_time do
haystack:append_luastr_right(shaystack)
end
local needle = turbo.structs.buffer()
needle:append_luastr_right(sneedle)
local h_str, h_len = haystack:get()
local n_str, n_len = needle:get()
local start = turbo.util.gettimeofday()
turbo.util.str_find(h_str, n_str, h_len, n_len)
local ag_time = turbo.util.gettimeofday() - start
-- Lua comparison.
local str = {}
for i = 0, search_time do
str[#str+1] = shaystack
end
str[#str+1] = sneedle
for i = 0, search_time do
str[#str+1] = shaystack
end
str = table.concat(str)
start = turbo.util.gettimeofday()
str:find(sneedle, 1, true)
local find_time = turbo.util.gettimeofday() - start
assert.truthy(ag_time < find_time * 1.5)
end)
end)
end) | YuanPeir-Chen/turbo-support-mipsel | spec/util_spec.lua | Lua | apache-2.0 | 2,883 |
all: repl test
%.zip: %.app %.app/main.lua
cd $< && zip -r -9 ../$@ . && cd ..
%: %.zip
make -C .. luvi
cat ../luvi $< > $@
chmod +x $@
clean:
rm -f repl repl.zip test test.zip
| zhaozg/luvi | samples/Makefile | Makefile | apache-2.0 | 186 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.solver.event;
import java.util.Iterator;
import org.optaplanner.core.api.domain.solution.Solution;
import org.optaplanner.core.api.solver.event.BestSolutionChangedEvent;
import org.optaplanner.core.api.solver.event.SolverEventListener;
import org.optaplanner.core.impl.solver.DefaultSolver;
/**
* Internal API.
*/
public class SolverEventSupport extends AbstractEventSupport<SolverEventListener> {
private DefaultSolver solver;
public SolverEventSupport(DefaultSolver solver) {
this.solver = solver;
}
public void fireBestSolutionChanged(Solution newBestSolution, int newUninitializedVariableCount) {
final Iterator<SolverEventListener> it = eventListenerSet.iterator();
if (it.hasNext()) {
final BestSolutionChangedEvent event = new BestSolutionChangedEvent(solver,
solver.getSolverScope().calculateTimeMillisSpent(), newBestSolution, newUninitializedVariableCount);
do {
it.next().bestSolutionChanged(event);
} while (it.hasNext());
}
}
}
| snurkabill/optaplanner | optaplanner-core/src/main/java/org/optaplanner/core/impl/solver/event/SolverEventSupport.java | Java | apache-2.0 | 1,703 |
/*-------------------------------------------------------------------------
*
* analyzejoins.c
* Routines for simplifying joins after initial query analysis
*
* While we do a great deal of join simplification in prep/prepjointree.c,
* certain optimizations cannot be performed at that stage for lack of
* detailed information about the query. The routines here are invoked
* after initsplan.c has done its work, and can do additional join removal
* and simplification steps based on the information extracted. The penalty
* is that we have to work harder to clean up after ourselves when we modify
* the query, since the derived data structures have to be updated too.
*
* Portions Copyright (c) 1996-2015, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
*
* IDENTIFICATION
* src/backend/optimizer/plan/analyzejoins.c
*
*-------------------------------------------------------------------------
*/
#include "postgres.h"
#include "nodes/nodeFuncs.h"
#include "optimizer/clauses.h"
#include "optimizer/joininfo.h"
#include "optimizer/pathnode.h"
#include "optimizer/paths.h"
#include "optimizer/planmain.h"
#include "optimizer/tlist.h"
#include "utils/lsyscache.h"
/* local functions */
static bool join_is_removable(PlannerInfo *root, SpecialJoinInfo *sjinfo);
static void remove_rel_from_query(PlannerInfo *root, int relid,
Relids joinrelids);
static List *remove_rel_from_joinlist(List *joinlist, int relid, int *nremoved);
static Oid distinct_col_search(int colno, List *colnos, List *opids);
/*
* remove_useless_joins
* Check for relations that don't actually need to be joined at all,
* and remove them from the query.
*
* We are passed the current joinlist and return the updated list. Other
* data structures that have to be updated are accessible via "root".
*/
List *
remove_useless_joins(PlannerInfo *root, List *joinlist)
{
ListCell *lc;
/*
* We are only interested in relations that are left-joined to, so we can
* scan the join_info_list to find them easily.
*/
restart:
foreach(lc, root->join_info_list)
{
SpecialJoinInfo *sjinfo = (SpecialJoinInfo *) lfirst(lc);
int innerrelid;
int nremoved;
/* Skip if not removable */
if (!join_is_removable(root, sjinfo))
continue;
/*
* Currently, join_is_removable can only succeed when the sjinfo's
* righthand is a single baserel. Remove that rel from the query and
* joinlist.
*/
innerrelid = bms_singleton_member(sjinfo->min_righthand);
remove_rel_from_query(root, innerrelid,
bms_union(sjinfo->min_lefthand,
sjinfo->min_righthand));
/* We verify that exactly one reference gets removed from joinlist */
nremoved = 0;
joinlist = remove_rel_from_joinlist(joinlist, innerrelid, &nremoved);
if (nremoved != 1)
elog(ERROR, "failed to find relation %d in joinlist", innerrelid);
/*
* We can delete this SpecialJoinInfo from the list too, since it's no
* longer of interest.
*/
root->join_info_list = list_delete_ptr(root->join_info_list, sjinfo);
/*
* Restart the scan. This is necessary to ensure we find all
* removable joins independently of ordering of the join_info_list
* (note that removal of attr_needed bits may make a join appear
* removable that did not before). Also, since we just deleted the
* current list cell, we'd have to have some kluge to continue the
* list scan anyway.
*/
goto restart;
}
return joinlist;
}
/*
* clause_sides_match_join
* Determine whether a join clause is of the right form to use in this join.
*
* We already know that the clause is a binary opclause referencing only the
* rels in the current join. The point here is to check whether it has the
* form "outerrel_expr op innerrel_expr" or "innerrel_expr op outerrel_expr",
* rather than mixing outer and inner vars on either side. If it matches,
* we set the transient flag outer_is_left to identify which side is which.
*/
static inline bool
clause_sides_match_join(RestrictInfo *rinfo, Relids outerrelids,
Relids innerrelids)
{
if (bms_is_subset(rinfo->left_relids, outerrelids) &&
bms_is_subset(rinfo->right_relids, innerrelids))
{
/* lefthand side is outer */
rinfo->outer_is_left = true;
return true;
}
else if (bms_is_subset(rinfo->left_relids, innerrelids) &&
bms_is_subset(rinfo->right_relids, outerrelids))
{
/* righthand side is outer */
rinfo->outer_is_left = false;
return true;
}
return false; /* no good for these input relations */
}
/*
* join_is_removable
* Check whether we need not perform this special join at all, because
* it will just duplicate its left input.
*
* This is true for a left join for which the join condition cannot match
* more than one inner-side row. (There are other possibly interesting
* cases, but we don't have the infrastructure to prove them.) We also
* have to check that the inner side doesn't generate any variables needed
* above the join.
*/
static bool
join_is_removable(PlannerInfo *root, SpecialJoinInfo *sjinfo)
{
int innerrelid;
RelOptInfo *innerrel;
Query *subquery = NULL;
Relids joinrelids;
List *clause_list = NIL;
ListCell *l;
int attroff;
/*
* Must be a non-delaying left join to a single baserel, else we aren't
* going to be able to do anything with it.
*/
if (sjinfo->jointype != JOIN_LEFT ||
sjinfo->delay_upper_joins)
return false;
if (!bms_get_singleton_member(sjinfo->min_righthand, &innerrelid))
return false;
innerrel = find_base_rel(root, innerrelid);
if (innerrel->reloptkind != RELOPT_BASEREL)
return false;
/*
* Before we go to the effort of checking whether any innerrel variables
* are needed above the join, make a quick check to eliminate cases in
* which we will surely be unable to prove uniqueness of the innerrel.
*/
if (innerrel->rtekind == RTE_RELATION)
{
/*
* For a plain-relation innerrel, we only know how to prove uniqueness
* by reference to unique indexes. If there are no indexes then
* there's certainly no unique indexes so there's no point in going
* further.
*/
if (innerrel->indexlist == NIL)
return false;
}
else if (innerrel->rtekind == RTE_SUBQUERY)
{
subquery = root->simple_rte_array[innerrelid]->subquery;
/*
* If the subquery has no qualities that support distinctness proofs
* then there's no point in going further.
*/
if (!query_supports_distinctness(subquery))
return false;
}
else
return false; /* unsupported rtekind */
/* Compute the relid set for the join we are considering */
joinrelids = bms_union(sjinfo->min_lefthand, sjinfo->min_righthand);
/*
* We can't remove the join if any inner-rel attributes are used above the
* join.
*
* Note that this test only detects use of inner-rel attributes in higher
* join conditions and the target list. There might be such attributes in
* pushed-down conditions at this join, too. We check that case below.
*
* As a micro-optimization, it seems better to start with max_attr and
* count down rather than starting with min_attr and counting up, on the
* theory that the system attributes are somewhat less likely to be wanted
* and should be tested last.
*/
for (attroff = innerrel->max_attr - innerrel->min_attr;
attroff >= 0;
attroff--)
{
if (!bms_is_subset(innerrel->attr_needed[attroff], joinrelids))
return false;
}
/*
* Similarly check that the inner rel isn't needed by any PlaceHolderVars
* that will be used above the join. We only need to fail if such a PHV
* actually references some inner-rel attributes; but the correct check
* for that is relatively expensive, so we first check against ph_eval_at,
* which must mention the inner rel if the PHV uses any inner-rel attrs as
* non-lateral references. Note that if the PHV's syntactic scope is just
* the inner rel, we can't drop the rel even if the PHV is variable-free.
*/
foreach(l, root->placeholder_list)
{
PlaceHolderInfo *phinfo = (PlaceHolderInfo *) lfirst(l);
if (bms_is_subset(phinfo->ph_needed, joinrelids))
continue; /* PHV is not used above the join */
if (bms_overlap(phinfo->ph_lateral, innerrel->relids))
return false; /* it references innerrel laterally */
if (!bms_overlap(phinfo->ph_eval_at, innerrel->relids))
continue; /* it definitely doesn't reference innerrel */
if (bms_is_subset(phinfo->ph_eval_at, innerrel->relids))
return false; /* there isn't any other place to eval PHV */
if (bms_overlap(pull_varnos((Node *) phinfo->ph_var->phexpr),
innerrel->relids))
return false; /* it does reference innerrel */
}
/*
* Search for mergejoinable clauses that constrain the inner rel against
* either the outer rel or a pseudoconstant. If an operator___ is
* mergejoinable then it behaves like equality for some btree opclass, so
* it's what we want. The mergejoinability test also eliminates clauses
* containing volatile functions, which we couldn't depend on.
*/
foreach(l, innerrel->joininfo)
{
RestrictInfo *restrictinfo = (RestrictInfo *) lfirst(l);
/*
* If it's not a join clause for this outer join, we can't use it.
* Note that if the clause is pushed-down, then it is logically from
* above the outer join, even if it references no other rels (it might
* be from WHERE, for example).
*/
if (restrictinfo->is_pushed_down ||
!bms_equal(restrictinfo->required_relids, joinrelids))
{
/*
* If such a clause actually references the inner rel then join
* removal has to be disallowed. We have to check this despite
* the previous attr_needed checks because of the possibility of
* pushed-down clauses referencing the rel.
*/
if (bms_is_member(innerrelid, restrictinfo->clause_relids))
return false;
continue; /* else, ignore; not useful here */
}
/* Ignore if it's not a mergejoinable clause */
if (!restrictinfo->can_join ||
restrictinfo->mergeopfamilies == NIL)
continue; /* not mergejoinable */
/*
* Check if clause has the form "outer op inner" or "inner op outer".
*/
if (!clause_sides_match_join(restrictinfo, sjinfo->min_lefthand,
innerrel->relids))
continue; /* no good for these input relations */
/* OK, add to list */
clause_list = lappend(clause_list, restrictinfo);
}
/*
* relation_has_unique_index_for automatically adds any usable restriction
* clauses for the innerrel, so we needn't do that here. (XXX we are not
* considering restriction clauses for subqueries; is that worth doing?)
*/
if (innerrel->rtekind == RTE_RELATION)
{
/* Now examine the indexes to see if we have a matching unique index */
if (relation_has_unique_index_for(root, innerrel, clause_list, NIL, NIL))
return true;
}
else /* innerrel->rtekind == RTE_SUBQUERY */
{
List *colnos = NIL;
List *opids = NIL;
/*
* Build the argument lists for query_is_distinct_for: a list of
* output column numbers that the query needs to be distinct over, and
* a list of equality operators that the output columns need to be
* distinct according to.
*/
foreach(l, clause_list)
{
RestrictInfo *rinfo = (RestrictInfo *) lfirst(l);
Oid op;
Var *var;
/*
* Get the equality operator___ we need uniqueness according to.
* (This might be a cross-type operator___ and thus not exactly the
* same operator___ the subquery would consider; that's all right
* since query_is_distinct_for can resolve such cases.) The
* mergejoinability test above should have selected only OpExprs.
*/
Assert(IsA(rinfo->clause, OpExpr));
op = ((OpExpr *) rinfo->clause)->opno;
/* clause_sides_match_join identified the inner side for us */
if (rinfo->outer_is_left)
var = (Var *) get_rightop(rinfo->clause);
else
var = (Var *) get_leftop(rinfo->clause);
/*
* If inner side isn't a Var referencing a subquery output column,
* this clause doesn't help us.
*/
if (!var || !IsA(var, Var) ||
var->varno != innerrelid || var->varlevelsup != 0)
continue;
colnos = lappend_int(colnos, var->varattno);
opids = lappend_oid(opids, op);
}
if (query_is_distinct_for(subquery, colnos, opids))
return true;
}
/*
* Some day it would be nice to check for other methods of establishing
* distinctness.
*/
return false;
}
/*
* Remove the target relid from the planner's data structures, having
* determined that there is no need to include it in the query.
*
* We are not terribly thorough here. We must make sure that the rel is
* no longer treated as a baserel, and that attributes of other baserels
* are no longer marked as being needed at joins involving this rel.
* Also, join quals involving the rel have to be removed from the joininfo
* lists, but only if they belong to the outer join identified by joinrelids.
*/
static void
remove_rel_from_query(PlannerInfo *root, int relid, Relids joinrelids)
{
RelOptInfo *rel = find_base_rel(root, relid);
List *joininfos;
Index rti;
ListCell *l;
ListCell *nextl;
/*
* Mark the rel as "dead" to show it is no longer part of the join tree.
* (Removing it from the baserel array altogether seems too risky.)
*/
rel->reloptkind = RELOPT_DEADREL;
/*
* Remove references to the rel from other baserels' attr_needed arrays.
*/
for (rti = 1; rti < root->simple_rel_array_size; rti++)
{
RelOptInfo *otherrel = root->simple_rel_array[rti];
int attroff;
/* there may be empty slots corresponding to non-baserel RTEs */
if (otherrel == NULL)
continue;
Assert(otherrel->relid == rti); /* sanity check on array */
/* no point in processing target rel itself */
if (otherrel == rel)
continue;
for (attroff = otherrel->max_attr - otherrel->min_attr;
attroff >= 0;
attroff--)
{
otherrel->attr_needed[attroff] =
bms_del_member(otherrel->attr_needed[attroff], relid);
}
}
/*
* Likewise remove references from SpecialJoinInfo data structures.
*
* This is relevant in case the outer join we're deleting is nested inside
* other outer joins: the upper joins' relid sets have to be adjusted. The
* RHS of the target outer join will be made empty here, but that's OK
* since caller will delete that SpecialJoinInfo entirely.
*/
foreach(l, root->join_info_list)
{
SpecialJoinInfo *sjinfo = (SpecialJoinInfo *) lfirst(l);
sjinfo->min_lefthand = bms_del_member(sjinfo->min_lefthand, relid);
sjinfo->min_righthand = bms_del_member(sjinfo->min_righthand, relid);
sjinfo->syn_lefthand = bms_del_member(sjinfo->syn_lefthand, relid);
sjinfo->syn_righthand = bms_del_member(sjinfo->syn_righthand, relid);
}
/*
* Likewise remove references from LateralJoinInfo data structures.
*
* If we are deleting a LATERAL subquery, we can forget its
* LateralJoinInfos altogether. Otherwise, make sure the target is not
* included in any lateral_lhs set. (It probably can't be, since that
* should have precluded deciding to remove it; but let's cope anyway.)
*/
for (l = list_head(root->lateral_info_list); l != NULL; l = nextl)
{
LateralJoinInfo *ljinfo = (LateralJoinInfo *) lfirst(l);
nextl = lnext(l);
ljinfo->lateral_rhs = bms_del_member(ljinfo->lateral_rhs, relid);
if (bms_is_empty(ljinfo->lateral_rhs))
root->lateral_info_list = list_delete_ptr(root->lateral_info_list,
ljinfo);
else
{
ljinfo->lateral_lhs = bms_del_member(ljinfo->lateral_lhs, relid);
Assert(!bms_is_empty(ljinfo->lateral_lhs));
}
}
/*
* Likewise remove references from PlaceHolderVar data structures.
*/
foreach(l, root->placeholder_list)
{
PlaceHolderInfo *phinfo = (PlaceHolderInfo *) lfirst(l);
phinfo->ph_eval_at = bms_del_member(phinfo->ph_eval_at, relid);
Assert(!bms_is_empty(phinfo->ph_eval_at));
Assert(!bms_is_member(relid, phinfo->ph_lateral));
phinfo->ph_needed = bms_del_member(phinfo->ph_needed, relid);
}
/*
* Remove any joinquals referencing the rel from the joininfo lists.
*
* In some cases, a joinqual has to be put back after deleting its
* reference to the target rel. This can occur for pseudoconstant and
* outerjoin-delayed quals, which can get marked as requiring the rel in
* order to force them to be evaluated at or above the join. We can't
* just discard them, though. Only quals that logically belonged to the
* outer join being discarded should be removed from the query.
*
* We must make a copy of the rel's old joininfo list before starting the
* loop, because otherwise remove_join_clause_from_rels would destroy the
* list while we're scanning it.
*/
joininfos = list_copy(rel->joininfo);
foreach(l, joininfos)
{
RestrictInfo *rinfo = (RestrictInfo *) lfirst(l);
remove_join_clause_from_rels(root, rinfo, rinfo->required_relids);
if (rinfo->is_pushed_down ||
!bms_equal(rinfo->required_relids, joinrelids))
{
/* Recheck that qual doesn't actually reference the target rel */
Assert(!bms_is_member(relid, rinfo->clause_relids));
/*
* The required_relids probably aren't shared with anything else,
* but let's copy them just to be sure.
*/
rinfo->required_relids = bms_copy(rinfo->required_relids);
rinfo->required_relids = bms_del_member(rinfo->required_relids,
relid);
distribute_restrictinfo_to_rels(root, rinfo);
}
}
}
/*
* Remove any occurrences of the target relid from a joinlist structure.
*
* It's easiest to build a whole new___ list structure, so we handle it that
* way. Efficiency is not a big deal here.
*
* *nremoved is incremented by the number of occurrences removed (there
* should be exactly one, but the caller checks that).
*/
static List *
remove_rel_from_joinlist(List *joinlist, int relid, int *nremoved)
{
List *result = NIL;
ListCell *jl;
foreach(jl, joinlist)
{
Node *jlnode = (Node *) lfirst(jl);
if (IsA(jlnode, RangeTblRef))
{
int varno = ((RangeTblRef *) jlnode)->rtindex;
if (varno == relid)
(*nremoved)++;
else
result = lappend(result, jlnode);
}
else if (IsA(jlnode, List))
{
/* Recurse to handle subproblem */
List *sublist;
sublist = remove_rel_from_joinlist((List *) jlnode,
relid, nremoved);
/* Avoid including empty sub-lists in the result */
if (sublist)
result = lappend(result, sublist);
}
else
{
elog(ERROR, "unrecognized joinlist node type: %d",
(int) nodeTag(jlnode));
}
}
return result;
}
/*
* query_supports_distinctness - could the query possibly be proven distinct
* on some set of output columns?
*
* This is effectively a pre-checking function for query_is_distinct_for().
* It must return TRUE if query_is_distinct_for() could possibly return TRUE
* with this query, but it should not expend a lot of cycles. The idea is
* that callers can avoid doing possibly-expensive processing to compute
* query_is_distinct_for()'s argument lists if the call could not possibly
* succeed.
*/
bool
query_supports_distinctness(Query *query)
{
if (query->distinctClause != NIL ||
query->groupClause != NIL ||
query->groupingSets != NIL ||
query->hasAggs ||
query->havingQual ||
query->setOperations)
return true;
return false;
}
/*
* query_is_distinct_for - does query never return duplicates of the
* specified columns?
*
* query is a not-yet-planned subquery (in current usage, it's always from
* a subquery RTE, which the planner avoids scribbling on).
*
* colnos is an integer list of output column numbers (resno's). We are
* interested in whether rows consisting of just these columns are certain
* to be distinct. "Distinctness" is defined according to whether the
* corresponding upper-level equality operators listed in opids would think
* the values are distinct. (Note: the opids entries could be cross-type
* operators, and thus not exactly the equality operators that the subquery
* would use itself. We use equality_ops_are_compatible() to check
* compatibility. That looks at btree or hash opfamily membership, and so
* should give trustworthy answers for all operators that we might need
* to deal with here.)
*/
bool
query_is_distinct_for(Query *query, List *colnos, List *opids)
{
ListCell *l;
Oid opid;
Assert(list_length(colnos) == list_length(opids));
/*
* A set-returning function in the query's targetlist can result in
* returning duplicate rows, if the SRF is evaluated after the
* de-duplication step; so we play it safe and say "no" if there are any
* SRFs. (We could be certain that it's okay if SRFs appear only in the
* specified columns, since those must be evaluated before de-duplication;
* but it doesn't presently seem worth the complication to check that.)
*/
if (expression_returns_set((Node *) query->targetList))
return false;
/*
* DISTINCT (including DISTINCT ON) guarantees uniqueness if all the
* columns in the DISTINCT clause appear in colnos and operator___ semantics
* match.
*/
if (query->distinctClause)
{
foreach(l, query->distinctClause)
{
SortGroupClause *sgc = (SortGroupClause *) lfirst(l);
TargetEntry *tle = get_sortgroupclause_tle(sgc,
query->targetList);
opid = distinct_col_search(tle->resno, colnos, opids);
if (!OidIsValid(opid) ||
!equality_ops_are_compatible(opid, sgc->eqop))
break; /* exit early if no match */
}
if (l == NULL) /* had matches for all? */
return true;
}
/*
* Similarly, GROUP BY without GROUPING SETS guarantees uniqueness if all
* the grouped columns appear in colnos and operator___ semantics match.
*/
if (query->groupClause && !query->groupingSets)
{
foreach(l, query->groupClause)
{
SortGroupClause *sgc = (SortGroupClause *) lfirst(l);
TargetEntry *tle = get_sortgroupclause_tle(sgc,
query->targetList);
opid = distinct_col_search(tle->resno, colnos, opids);
if (!OidIsValid(opid) ||
!equality_ops_are_compatible(opid, sgc->eqop))
break; /* exit early if no match */
}
if (l == NULL) /* had matches for all? */
return true;
}
else if (query->groupingSets)
{
/*
* If we have grouping sets with expressions, we probably
* don't have uniqueness and analysis would be hard. Punt.
*/
if (query->groupClause)
return false;
/*
* If we have no groupClause (therefore no grouping expressions),
* we might have one or many empty grouping sets. If there's just
* one, then we're returning only one row and are certainly unique.
* But otherwise, we know we're certainly not unique.
*/
if (list_length(query->groupingSets) == 1 &&
((GroupingSet *)linitial(query->groupingSets))->kind == GROUPING_SET_EMPTY)
return true;
else
return false;
}
else
{
/*
* If we have no GROUP BY, but do have aggregates or HAVING, then the
* result is at most one row so it's surely unique, for any operators.
*/
if (query->hasAggs || query->havingQual)
return true;
}
/*
* UNION, INTERSECT, EXCEPT guarantee uniqueness of the whole output row,
* except with ALL.
*/
if (query->setOperations)
{
SetOperationStmt *topop = (SetOperationStmt *) query->setOperations;
Assert(IsA(topop, SetOperationStmt));
Assert(topop->op != SETOP_NONE);
if (!topop->all)
{
ListCell *lg;
/* We're good if all the nonjunk output columns are in colnos */
lg = list_head(topop->groupClauses);
foreach(l, query->targetList)
{
TargetEntry *tle = (TargetEntry *) lfirst(l);
SortGroupClause *sgc;
if (tle->resjunk)
continue; /* ignore resjunk columns */
/* non-resjunk columns should have grouping clauses */
Assert(lg != NULL);
sgc = (SortGroupClause *) lfirst(lg);
lg = lnext(lg);
opid = distinct_col_search(tle->resno, colnos, opids);
if (!OidIsValid(opid) ||
!equality_ops_are_compatible(opid, sgc->eqop))
break; /* exit early if no match */
}
if (l == NULL) /* had matches for all? */
return true;
}
}
/*
* XXX Are there any other cases in which we can easily see the result
* must be distinct?
*
* If you do add more smarts to this function, be sure to update
* query_supports_distinctness() to match.
*/
return false;
}
/*
* distinct_col_search - subroutine for query_is_distinct_for
*
* If colno is in colnos, return the corresponding element of opids,
* else return InvalidOid. (Ordinarily colnos would not contain duplicates,
* but if it does, we arbitrarily select the first match.)
*/
static Oid
distinct_col_search(int colno, List *colnos, List *opids)
{
ListCell *lc1,
*lc2;
forboth(lc1, colnos, lc2, opids)
{
if (colno == lfirst_int(lc1))
return lfirst_oid(lc2);
}
return InvalidOid;
}
| larryxiao/peloton | src/postgres/backend/optimizer/plan/analyzejoins.cpp | C++ | apache-2.0 | 24,837 |
cask 'rambox' do
version '0.5.9'
sha256 'cb0d0263a5bfb7f3ded3762f6ccc057e2ac9c1a14efa6152f7ee1aff71039093'
# github.com/saenzramiro/rambox was verified as official when first introduced to the cask
url "https://github.com/saenzramiro/rambox/releases/download/#{version}/Rambox-#{version}.dmg"
appcast 'https://github.com/saenzramiro/rambox/releases.atom',
checkpoint: 'e1e360e15afdef800ae546ff7658e2f8271e2091beb0275e6369b81ba85c8f70'
name 'Rambox'
homepage 'http://rambox.pro/'
app 'Rambox.app'
end
| athrunsun/homebrew-cask | Casks/rambox.rb | Ruby | bsd-2-clause | 528 |
/*
pbrt source code Copyright(c) 1998-2012 Matt Pharr and Greg Humphreys.
This file is part of pbrt.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// cameras/perspective.cpp*
#include "stdafx.h"
#include "cameras/perspective.h"
#include "paramset.h"
#include "sampler.h"
#include "montecarlo.h"
// PerspectiveCamera Method Definitions
PerspectiveCamera:: PerspectiveCamera(const AnimatedTransform &cam2world,
const float screenWindow[4], float sopen, float sclose,
float lensr, float focald, float fov, Film *f)
: ProjectiveCamera(cam2world, Perspective(fov, 1e-2f, 1000.f),
screenWindow, sopen, sclose, lensr, focald, f) {
// Compute differential changes in origin for perspective camera rays
dxCamera = RasterToCamera(Point(1,0,0)) - RasterToCamera(Point(0,0,0));
dyCamera = RasterToCamera(Point(0,1,0)) - RasterToCamera(Point(0,0,0));
}
float PerspectiveCamera::GenerateRay(const CameraSample &sample,
Ray *ray) const {
// Generate raster and camera samples
Point Pras(sample.imageX, sample.imageY, 0);
Point Pcamera;
RasterToCamera(Pras, &Pcamera);
*ray = Ray(Point(0,0,0), Normalize(Vector(Pcamera)), 0.f, INFINITY);
// Modify ray for depth of field
if (lensRadius > 0.) {
// Sample point on lens
float lensU, lensV;
ConcentricSampleDisk(sample.lensU, sample.lensV, &lensU, &lensV);
lensU *= lensRadius;
lensV *= lensRadius;
// Compute point on plane of focus
float ft = focalDistance / ray->d.z;
Point Pfocus = (*ray)(ft);
// Update ray for effect of lens
ray->o = Point(lensU, lensV, 0.f);
ray->d = Normalize(Pfocus - ray->o);
}
ray->time = sample.time;
CameraToWorld(*ray, ray);
return 1.f;
}
float PerspectiveCamera::GenerateRayDifferential(const CameraSample &sample,
RayDifferential *ray) const {
// Generate raster and camera samples
Point Pras(sample.imageX, sample.imageY, 0);
Point Pcamera;
RasterToCamera(Pras, &Pcamera);
Vector dir = Normalize(Vector(Pcamera.x, Pcamera.y, Pcamera.z));
*ray = RayDifferential(Point(0,0,0), dir, 0.f, INFINITY);
// Modify ray for depth of field
if (lensRadius > 0.) {
// Sample point on lens
float lensU, lensV;
ConcentricSampleDisk(sample.lensU, sample.lensV, &lensU, &lensV);
lensU *= lensRadius;
lensV *= lensRadius;
// Compute point on plane of focus
float ft = focalDistance / ray->d.z;
Point Pfocus = (*ray)(ft);
// Update ray for effect of lens
ray->o = Point(lensU, lensV, 0.f);
ray->d = Normalize(Pfocus - ray->o);
}
// Compute offset rays for _PerspectiveCamera_ ray differentials
if (lensRadius > 0.) {
// Compute _PerspectiveCamera_ ray differentials with defocus blur
// Sample point on lens
float lensU, lensV;
ConcentricSampleDisk(sample.lensU, sample.lensV, &lensU, &lensV);
lensU *= lensRadius;
lensV *= lensRadius;
Vector dx = Normalize(Vector(Pcamera + dxCamera));
float ft = focalDistance / dx.z;
Point pFocus = Point(0,0,0) + (ft * dx);
ray->rxOrigin = Point(lensU, lensV, 0.f);
ray->rxDirection = Normalize(pFocus - ray->rxOrigin);
Vector dy = Normalize(Vector(Pcamera + dyCamera));
ft = focalDistance / dy.z;
pFocus = Point(0,0,0) + (ft * dy);
ray->ryOrigin = Point(lensU, lensV, 0.f);
ray->ryDirection = Normalize(pFocus - ray->ryOrigin);
}
else {
ray->rxOrigin = ray->ryOrigin = ray->o;
ray->rxDirection = Normalize(Vector(Pcamera) + dxCamera);
ray->ryDirection = Normalize(Vector(Pcamera) + dyCamera);
}
ray->time = sample.time;
CameraToWorld(*ray, ray);
ray->hasDifferentials = true;
return 1.f;
}
PerspectiveCamera *CreatePerspectiveCamera(const ParamSet ¶ms,
const AnimatedTransform &cam2world, Film *film) {
// Extract common camera parameters from _ParamSet_
float shutteropen = params.FindOneFloat("shutteropen", 0.f);
float shutterclose = params.FindOneFloat("shutterclose", 1.f);
if (shutterclose < shutteropen) {
Warning("Shutter close time [%f] < shutter open [%f]. Swapping them.",
shutterclose, shutteropen);
swap(shutterclose, shutteropen);
}
float lensradius = params.FindOneFloat("lensradius", 0.f);
float focaldistance = params.FindOneFloat("focaldistance", 1e30f);
float frame = params.FindOneFloat("frameaspectratio",
float(film->xResolution)/float(film->yResolution));
float screen[4];
if (frame > 1.f) {
screen[0] = -frame;
screen[1] = frame;
screen[2] = -1.f;
screen[3] = 1.f;
}
else {
screen[0] = -1.f;
screen[1] = 1.f;
screen[2] = -1.f / frame;
screen[3] = 1.f / frame;
}
int swi;
const float *sw = params.FindFloat("screenwindow", &swi);
if (sw && swi == 4)
memcpy(screen, sw, 4*sizeof(float));
float fov = params.FindOneFloat("fov", 90.);
float halffov = params.FindOneFloat("halffov", -1.f);
if (halffov > 0.f)
// hack for structure synth, which exports half of the full fov
fov = 2.f * halffov;
return new PerspectiveCamera(cam2world, screen, shutteropen,
shutterclose, lensradius, focaldistance, fov, film);
}
| AnisB/pbrt | src/cameras/perspective.cpp | C++ | bsd-2-clause | 6,835 |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.proxetta.data;
import jodd.util.StringPool;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD})
public @interface Action {
String NONE = StringPool.HASH;
String value() default "";
String extension() default "";
String alias() default "";
String method() default "";
} | wjw465150/jodd | jodd-proxetta/src/test/java/jodd/proxetta/data/Action.java | Java | bsd-2-clause | 1,905 |
/*
* Copyright (c) 2014, STMicroelectronics International N.V.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <stdlib.h>
#include <string.h>
char *strndup(const char *s, size_t n)
{
size_t l = strnlen(s, n) + 1;
char *p = malloc(l);
if (p) {
memcpy(p, s, l - 1);
p[l - 1] = '\0';
}
return p;
}
| cedric-chaumont-st-dev/optee_os | lib/libutils/isoc/strndup.c | C | bsd-2-clause | 1,595 |
/*Daala video codec
Copyright (c) 2002-2013 Daala project contributors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS”
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "../src/odintrin.h"
#include "vidinput.h"
#include "../src/filter.h"
#include "../src/dct.h"
#include "../src/pvq.h"
#if defined(_WIN32)
#include <io.h>
#include <fcntl.h>
#endif
#include "getopt.h"
#include "../src/block_size.h"
#include "../src/block_size_enc.h"
#include <math.h>
static void usage(char **_argv){
fprintf(stderr,"Usage: %s [options] <input> <output>\n"
" <reference> and <input> may be either YUV4MPEG or Ogg Theora files.\n\n"
" Options:\n"
" --intra Intraframes only.\n"
" --limit N Only read N frames from input.\n"
" --ext N Encode the final frame N more times.\n"
" --fps N Override output fps.\n"
" --ref N Reference stream.\n"
" --pvqk N PVQ K parameter.\n",_argv[0]);
}
static const char *CHROMA_TAGS[4]={" C420jpeg",""," C422jpeg"," C444"};
/* Warning, this will fail for images larger than 2024 x 2024 */
#define MAX_VAR_BLOCKS 1024
#define SQUARE(x) ((int)(x)*(int)(x))
/* Actual 2D coding gains of lapped transforms (the 32x32 one is made-up). We divide by 6 to get bits. */
#define CG4 (15.943/6)
#define CG8 (16.7836/6)
#define CG16 (16.9986/6)
#define CG32 (17.1/6)
#define OFF8 (1)
#define OFF16 (2)
#define OFF16_8 (1)
#define OFF32_8 (1)
#define COUNT16_8 (3+2*OFF16_8)
#define COUNT32_8 (7+2*OFF32_8)
#define PSY_LAMBDA .65
int switch_decision(unsigned char *img, int w, int h, int stride, int ow, int oh)
{
int i,j;
int h8,w8,h32,w32;
static unsigned char dec8[MAX_VAR_BLOCKS>>2][MAX_VAR_BLOCKS>>2];
#if 0
int h4,w4,h16,w16;
static int Sx[MAX_VAR_BLOCKS][MAX_VAR_BLOCKS];
static int Sxx[MAX_VAR_BLOCKS][MAX_VAR_BLOCKS];
static int Sx4[MAX_VAR_BLOCKS>>1][MAX_VAR_BLOCKS>>1];
static int Sxx4[MAX_VAR_BLOCKS>>1][MAX_VAR_BLOCKS>>1];
static int var[MAX_VAR_BLOCKS][MAX_VAR_BLOCKS];
static int var_1[MAX_VAR_BLOCKS][MAX_VAR_BLOCKS];
static int var8[MAX_VAR_BLOCKS>>1][MAX_VAR_BLOCKS>>1];
static int var8_1[MAX_VAR_BLOCKS>>1][MAX_VAR_BLOCKS>>1];
static float dummy[MAX_VAR_BLOCKS][MAX_VAR_BLOCKS];
static float dummy8[MAX_VAR_BLOCKS][MAX_VAR_BLOCKS];
static float nmr4[MAX_VAR_BLOCKS>>1][MAX_VAR_BLOCKS>>1];
static float nmr8[MAX_VAR_BLOCKS>>2][MAX_VAR_BLOCKS>>2];
static float cg8[MAX_VAR_BLOCKS>>2][MAX_VAR_BLOCKS>>2];
static float nmr16[MAX_VAR_BLOCKS>>3][MAX_VAR_BLOCKS>>3];
static float cg16[MAX_VAR_BLOCKS>>3][MAX_VAR_BLOCKS>>3];
static float nmr32[MAX_VAR_BLOCKS>>4][MAX_VAR_BLOCKS>>4];
static float cg32[MAX_VAR_BLOCKS>>4][MAX_VAR_BLOCKS>>4];
const unsigned char *x;
#endif
(void)ow;
(void)oh;
w>>=1;
h>>=1;
w8 = w>>2;
h8 = h>>2;
w32 = w>>4;
h32 = h>>4;
#if 0
w4 = w>>1;
h4 = h>>1;
w16 = w>>3;
h16 = h>>3;
x = img;
for(i=0;i<h;i++){
for(j=0;j<w;j++){
Sx[i][j]=x[2*j]+x[2*j+1]+x[stride+2*j]+x[stride+2*j+1];
Sxx[i][j]=SQUARE(x[2*j])+SQUARE(x[2*j+1])+SQUARE(x[stride+2*j])+SQUARE(x[stride+2*j+1]);
}
x+=2*stride;
}
for(i=0;i<h4;i++){
for(j=0;j<w4;j++){
Sxx4[i][j] = Sxx[2*i][2*j] + Sxx[2*i][2*j+1] + Sxx[2*i+1][2*j] + Sxx[2*i+1][2*j+1];
Sx4[i][j] = Sx [2*i][2*j] + Sx [2*i][2*j+1] + Sx [2*i+1][2*j] + Sx [2*i+1][2*j+1];
}
}
for(i=0;i<h-1;i++){
for(j=0;j<w-1;j++){
int sum_x;
int sum_xx;
int var_floor;
sum_x=Sx[i][j]+Sx[i][j+1]+Sx[i+1][j]+Sx[i+1][j+1];
sum_xx=Sxx[i][j]+Sxx[i][j+1]+Sxx[i+1][j]+Sxx[i+1][j+1];
var[i][j]=(sum_xx-(SQUARE(sum_x)>>4))>>5;
var_floor = 4+(sum_x>>8);
if (var[i][j]<var_floor)var[i][j]=var_floor;
/*printf("%d ", var[i][j]);*/
var_1[i][j] = 16384/var[i][j];
}
/*printf("\n");*/
}
for(i=0;i<h4-1;i++){
for(j=0;j<w4-1;j++){
int sum_x;
int sum_xx;
int var_floor;
sum_x =Sx4 [i][j]+Sx4 [i][j+1]+Sx4 [i+1][j]+Sx4 [i+1][j+1];
sum_xx=Sxx4[i][j]+Sxx4[i][j+1]+Sxx4[i+1][j]+Sxx4[i+1][j+1];
var8[i][j]=(sum_xx-(SQUARE(sum_x)>>6))>>5;
var_floor = 4+(sum_x>>8);
if (var8[i][j]<var_floor)var8[i][j]=var_floor;
/*printf("%d ", var8[i][j]);*/
var8_1[i][j] = 16384/var8[i][j];
}
/*printf("\n");*/
}
for(i=1;i<h4-1;i++){
for(j=1;j<w4-1;j++){
int k,m;
int sum_var=0;
int sum_var_1=0;
float psy=0;
float noise;
for(k=0;k<3;k++){
for(m=0;m<3;m++){
sum_var+=var[2*i-1+k][2*j-1+m];
}
}
noise = sum_var/(3*3);
for(k=0;k<3;k++){
for(m=0;m<3;m++){
psy += OD_LOG2(1+noise*var_1[2*i-1+k][2*j-1+m]/16384.);
}
}
psy /= (3*3);
psy -= 1;
nmr4[i][j] = psy;
/*printf("%f ", nmr4[i][j]);*/
}
/*printf("\n");*/
}
for(i=1;i<h8-1;i++){
for(j=1;j<w8-1;j++){
int k,m;
int sum_var=0;
int sum_var_1=0;
float nmr4_avg;
float cgl, cgs;
float noise;
float psy;
for(k=0;k<COUNT8;k++){
for(m=0;m<COUNT8;m++){
sum_var +=var[4*i-OFF8+k][4*j-OFF8+m];
}
}
noise = sum_var/(COUNT8*COUNT8);
psy=0;
for(k=0;k<COUNT8;k++){
for(m=0;m<COUNT8;m++){
psy += OD_LOG2(1+noise*var_1[4*i-OFF8+k][4*j-OFF8+m]/16384.);
}
}
psy /= (COUNT8*COUNT8);
psy -= 1;
nmr8[i][j] = psy;
nmr4_avg = .25f*(nmr4[2*i][2*j]+nmr4[2*i][2*j+1]+nmr4[2*i+1][2*j]+nmr4[2*i+1][2*j+1]);
cgs = CG4 - PSY_LAMBDA*(nmr4_avg);
cgl = CG8 - PSY_LAMBDA*(nmr8[i][j]);
if (cgl>=cgs)
{
dec8[i][j] = 1;
cg8[i][j] = CG8;
} else {
nmr8[i][j] = nmr4_avg;
dec8[i][j] = 0;
cg8[i][j] = CG4;
}
/*printf("%d ", dec8[i][j]);*/
}
/*printf("\n");*/
}
for(i=1;i<h16-1;i++){
for(j=1;j<w16-1;j++){
int k,m;
int sum_var=0;
int sum_var8=0;
int sum_var_1=0;
float nmr8_avg;
float cgl,cgs;
float noise;
float noise8;
float psy;
float psy8;
for(k=0;k<COUNT16;k++){
for(m=0;m<COUNT16;m++){
sum_var+=var[8*i-OFF16+k][8*j-OFF16+m];
}
}
noise = sum_var/(float)(COUNT16*COUNT16);
for(k=0;k<COUNT16_8;k++){
for(m=0;m<COUNT16_8;m++){
sum_var8+=var8[4*i-OFF16_8+k][4*j-OFF16_8+m];
}
}
noise8 = sum_var8/(float)(COUNT16_8*COUNT16_8);
psy=0;
for(k=0;k<COUNT16;k++){
for(m=0;m<COUNT16;m++){
psy += OD_LOG2(1+noise*var_1[8*i-OFF16+k][8*j-OFF16+m]/16384.);
}
}
psy /= (COUNT16*COUNT16);
psy -= 1;
psy8=0;
for(k=0;k<COUNT16_8;k++){
for(m=0;m<COUNT16_8;m++){
psy8 += OD_LOG2(1+noise8*var8_1[4*i-OFF16_8+k][4*j-OFF16_8+m]/16384.);
}
}
psy8 /= (COUNT16_8*COUNT16_8);
psy8 -= 1;
psy = OD_MAXF(psy, .25*psy8);
/*psy = .5*(psy+psy8);*/
nmr16[i][j] = psy;
nmr8_avg = .25f*(nmr8[2*i][2*j]+nmr8[2*i][2*j+1]+nmr8[2*i+1][2*j]+nmr8[2*i+1][2*j+1]);
cg16[i][j] = .25*(cg8[2*i][2*j] + cg8[2*i][2*j+1] + cg8[2*i+1][2*j] + cg8[2*i+1][2*j+1]);
cgs = cg16[i][j] - PSY_LAMBDA*(nmr8_avg);
cgl = CG16 - PSY_LAMBDA*(nmr16[i][j]);
/*printf("%f ", psy);*/
if (cgl>=cgs)
{
dec8[2*i][2*j] = 2;
dec8[2*i][2*j+1] = 2;
dec8[2*i+1][2*j] = 2;
dec8[2*i+1][2*j+1] = 2;
cg16[i][j] = CG16;
} else {
nmr16[i][j] = nmr8_avg;
}
}
/*printf("\n");*/
}
#if 1
for(i=1;i<h32-1;i++){
for(j=1;j<w32-1;j++){
int k,m;
int sum_var=0;
int sum_var_1=0;
int sum_var8=0;
float nmr16_avg;
float cgl,cgs;
float noise, psy;
float noise8, psy8;
for(k=0;k<COUNT32;k++){
for(m=0;m<COUNT32;m++){
sum_var +=var[16*i-OFF32+k][16*j-OFF32+m];
}
}
noise = sum_var/(float)(COUNT32*COUNT32);
for(k=0;k<COUNT32_8;k++){
for(m=0;m<COUNT32_8;m++){
sum_var8+=var8[8*i-OFF32_8+k][8*j-OFF32_8+m];
}
}
noise8 = sum_var8/(float)(COUNT32_8*COUNT32_8);
psy=0;
for(k=0;k<COUNT32;k++){
for(m=0;m<COUNT32;m++){
psy += OD_LOG2(1.+noise*var_1[16*i-OFF32+k][16*j-OFF32+m]/16384.);
}
}
psy /= (COUNT32*COUNT32);
psy -= 1;
psy8=0;
for(k=0;k<COUNT32_8;k++){
for(m=0;m<COUNT32_8;m++){
psy8 += OD_LOG2(1+noise8*var8_1[8*i-OFF32_8+k][8*j-OFF32_8+m]/16384.);
}
}
psy8 /= (COUNT32_8*COUNT32_8);
psy8 -= 1;
psy = OD_MAXF(psy, .25*psy8);
/*psy = .5*(psy+psy8);*/
/*psy += psy8;*/
nmr32[i][j] = psy;
nmr16_avg = .25f*(nmr16[2*i][2*j]+nmr16[2*i][2*j+1]+nmr16[2*i+1][2*j]+nmr16[2*i+1][2*j+1]);
cg32[i][j] = .25*(cg16[2*i][2*j] + cg16[2*i][2*j+1] + cg16[2*i+1][2*j] + cg16[2*i+1][2*j+1]);
cgs = cg32[i][j] - PSY_LAMBDA*(nmr16_avg);
cgl = CG32 - PSY_LAMBDA*(nmr32[i][j]);
/*printf("%f ", psy8);*/
if (cgl>=cgs)
{
for(k=0;k<4;k++){
for(m=0;m<4;m++){
dec8[4*i+k][4*j+m]=3;
}
}
cg32[i][j] = CG32;
} else {
nmr32[i][j] = nmr16_avg;
}
}
/*printf("\n");*/
}
#endif
#endif
/* Replace decision with the one from `od_split_superblock` */
if (1)
{
od_block_size_comp bs;
for(i=1;i<h32-1;i++){
for(j=1;j<w32-1;j++){
int k,m;
int dec[4][4];
od_split_superblock(&bs, img+32*stride*i+32*j, stride, NULL, 0,
dec, 21 << OD_COEFF_SHIFT);
for(k=0;k<4;k++)
for(m=0;m<4;m++)
dec8[4*i+k][4*j+m]=dec[k][m];
#if 0
for(k=0;k<16;k++)
{
for(m=0;m<16;m++)
{
var[16*i+k][16*j+m]=bs.img_stats.Var4[k+3][m+3];
var_1[16*i+k][16*j+m]=bs.img_stats.invVar4[k+3][m+3];
}
}
for(k=0;k<8;k++)
{
for(m=0;m<8;m++)
{
dummy[8*i+k][8*j+m]=bs.psy4[k][m];
}
}
for(k=0;k<4;k++)
{
for(m=0;m<4;m++)
{
dummy8[4*i+k][4*j+m]=bs.psy8[k][m];
}
}
for(k=0;k<8;k++)
{
for(m=0;m<8;m++)
{
var8[8*i+k][8*j+m]=bs.img_stats.Var8[k+2][m+2];
var8_1[8*i+k][8*j+m]=bs.img_stats.invVar8[k+2][m+2];
}
}
#endif
}
}
}
#if 0
for(i=0;i<h;i++){
for(j=0;j<w;j++){
printf("%d ", var[i][j]);
}
printf("\n");
}
#endif
#if 0
for(i=8;i<h4-8;i++){
for(j=8;j<w4-8;j++){
printf("%f ", dummy[i][j]);
}
printf("\n");
}
#endif
#if 0
for(i=4;i<h8-4;i++){
for(j=4;j<w8-4;j++){
printf("%f ", dummy8[i][j]);
}
printf("\n");
}
#endif
#if 0
for(i=4;i<h8-4;i++){
for(j=4;j<w8-4;j++){
printf("%d ", dec8[i][j]);
}
printf("\n");
}
#endif
#if 0
fprintf(stderr, "size : %dx%d\n", (w<<1), (h<<1));
for(i=0;i<(h<<1);i++){
for(j=0;j<1296;j++){
putc(dec8[i>>3][j>>3], stdout);
}
}
#endif
#if 0
{
/*Raw mode data with offsets to match the 4x8 training tool's padding.*/
int wn=(ow-16)>>2;
int hn=(oh-16)>>2;
fprintf(stderr, "size : %dx%d\n", wn, hn);
for(i=0;i<hn;i++){
for(j=0;j<wn;j++){
int posi=(i>>1)+1;
int posj=(j>>1)+1;
if(posi>=4 && posi<(h8-4) &&posj>=4 && posj<(w8-4))putc(dec8[posi][posj], stdout);
else putc(0, stdout);
}
}
}
#endif
#if 1
for(i=4;i<h8-4;i++){
for(j=4;j<w8-4;j++){
if ((i&3)==0 && (j&3)==0){
int k;
for(k=0;k<32;k++)
img[i*stride*8+j*8+k] = 0;
for(k=0;k<32;k++)
img[(8*i+k)*stride+j*8] = 0;
}
if ((i&1)==0 && (j&1)==0 && dec8[i][j]==2){
int k;
for(k=0;k<16;k++)
img[i*stride*8+j*8+k] = 0;
for(k=0;k<16;k++)
img[(8*i+k)*stride+j*8] = 0;
}
if (dec8[i][j]<=1){
int k;
for(k=0;k<8;k++)
img[i*stride*8+j*8+k] = 0;
for(k=0;k<8;k++)
img[(8*i+k)*stride+j*8] = 0;
if (dec8[i][j]==0){
img[(8*i+4)*stride+j*8+3] = 0;
img[(8*i+4)*stride+j*8+4] = 0;
img[(8*i+4)*stride+j*8+5] = 0;
img[(8*i+3)*stride+j*8+4] = 0;
img[(8*i+5)*stride+j*8+4] = 0;
}
}
}
}
for (i=32;i<(w32-1)*32;i++)
img[(h32-1)*32*stride+i]=0;
for (i=32;i<(h32-1)*32;i++)
img[i*stride+(w32-1)*32]=0;
#endif
#if 0 /* 32x32 decision data */
for(i=2;i<h32-2;i++){
for(j=2;j<w32-2;j++){
int i8, j8;
int k;
i8 = 4*i-1;
j8 = 4*j-1;
for(k=0;k<5;k++)
printf("%d ", dec8[i8+k][j8]);
for(k=1;k<5;k++)
printf("%d ", dec8[i8][j8+k]);
printf("%d\n", dec8[i8+1][j8+1]);
}
}
#endif
#if 0 /* 16x16 decision data */
for(i=4;i<h16-4;i++){
for(j=4;j<w16-4;j++){
int i8, j8;
int k;
i8 = 2*i-1;
j8 = 2*j-1;
if (dec8[i8+1][j8+1]==3)
continue;
if (1)
{
int sum=0;
/*for(k=0;k<3;k++)
printf("%d ", dec8[i8+k][j8]);
for(k=1;k<3;k++)
printf("%d ", dec8[i8][j8+k]);*/
for(k=0;k<3;k++)
sum += dec8[i8+k][j8];
for(k=1;k<3;k++)
sum += dec8[i8][j8+k];
printf("%d ", sum);
} else {
int up, left;
up = (dec8[i8][j8+1]>=2) ? 2+dec8[i8][j8+1] : dec8[i8][j8+1]*2+dec8[i8][j8+2];
left = (dec8[i8+1][j8]>=2) ? 2+dec8[i8+1][j8] : dec8[i8+1][j8]*2+dec8[i8+2][j8];
printf("%d ", dec8[i8][j8]*36+up*6+left);
}
if (dec8[i8+1][j8+1]==2)
printf("16\n");
else
printf("%d\n", 8*dec8[i8+1][j8+1]+4*dec8[i8+1][j8+2]+2*dec8[i8+2][j8+1]+dec8[i8+2][j8+2]);
}
printf("\n");
}
#endif
#if 0 /* 8x8 decision data */
for(i=8;i<h8-8;i++){
for(j=8;j<w8-8;j++){
if (dec8[i][j]<=1)
{
printf("%d %d %d %d\n", dec8[i-1][j-1], dec8[i-1][j], dec8[i][j-1], dec8[i][j]);
}
}
}
#endif
return 0;
}
/*Applies vert then horiz prefilters of size _n.*/
void prefilter_image(od_coeff *_img, int _w, int _h, int _n){
int x,y,j;
/*Pre-filter*/
switch(_n){
case 4:
for(y=0;y<_h;y++){
for(x=2;x<_w-2;x+=4){
od_pre_filter4(&_img[y*_w+x],&_img[y*_w+x]);
}
}
for(y=2;y<_h-2;y+=4){
for(x=0;x<_w;x++){
od_coeff tmp[4];
for(j=0;j<4;j++)tmp[j]=_img[(y+j)*_w+x];
od_pre_filter4(tmp,tmp);
for(j=0;j<4;j++)_img[(y+j)*_w+x]=tmp[j];
}
}
break;
case 8:
for(y=0;y<_h;y++){
for(x=4;x<_w-4;x+=8){
od_pre_filter8(&_img[y*_w+x],&_img[y*_w+x]);
}
}
for(y=4;y<_h-4;y+=8){
for(x=0;x<_w;x++){
od_coeff tmp[16];
for(j=0;j<8;j++)tmp[j]=_img[(y+j)*_w+x];
od_pre_filter8(tmp,tmp);
for(j=0;j<8;j++)_img[(y+j)*_w+x]=tmp[j];
}
}
break;
case 16:
for(y=0;y<_h;y++){
for(x=8;x<_w-8;x+=16){
od_pre_filter16(&_img[y*_w+x],&_img[y*_w+x]);
}
}
for(y=8;y<_h-8;y+=16){
for(x=0;x<_w;x++){
od_coeff tmp[16];
for(j=0;j<16;j++)tmp[j]=_img[(y+j)*_w+x];
od_pre_filter16(tmp,tmp);
for(j=0;j<16;j++)_img[(y+j)*_w+x]=tmp[j];
}
}
break;
default:
break;
}
}
/*Applies horiz then vert postfilters of size _n.*/
void postfilter_image(od_coeff *_img, int _w, int _h, int _n){
int x,y,j;
/*Pre-filter*/
switch(_n){
case 4:
for(y=2;y<_h-2;y+=4){
for(x=0;x<_w;x++){
od_coeff tmp[4];
for(j=0;j<4;j++)tmp[j]=_img[(y+j)*_w+x];
od_post_filter4(tmp,tmp);
for(j=0;j<4;j++)_img[(y+j)*_w+x]=tmp[j];
}
}
for(y=0;y<_h;y++){
for(x=2;x<_w-2;x+=4){
od_post_filter4(&_img[y*_w+x],&_img[y*_w+x]);
}
}
break;
case 8:
for(y=4;y<_h-4;y+=8){
for(x=0;x<_w;x++){
od_coeff tmp[16];
for(j=0;j<8;j++)tmp[j]=_img[(y+j)*_w+x];
od_post_filter8(tmp,tmp);
for(j=0;j<8;j++)_img[(y+j)*_w+x]=tmp[j];
}
}
for(y=0;y<_h;y++){
for(x=4;x<_w-4;x+=8){
od_post_filter8(&_img[y*_w+x],&_img[y*_w+x]);
}
}
break;
case 16:
for(y=8;y<_h-8;y+=16){
for(x=0;x<_w;x++){
od_coeff tmp[16];
for(j=0;j<16;j++)tmp[j]=_img[(y+j)*_w+x];
od_post_filter16(tmp,tmp);
for(j=0;j<16;j++)_img[(y+j)*_w+x]=tmp[j];
}
}
for(y=0;y<_h;y++){
for(x=8;x<_w-8;x+=16){
od_post_filter16(&_img[y*_w+x],&_img[y*_w+x]);
}
}
break;
default:
break;
}
}
/*static const int fzig16[256] = {0,16,1,2,17,32,48,33,18,3,4,19,34,49,64,80,65,50,35,20,5,6,21,36,51,66,81,96,112,97,82,67,52,37,22,7,8,23,38,53,68,83,98,113,128,144,129,114,99,84,69,54,39,24,9,10,25,40,55,70,85,100,115,130,145,160,176,161,146,131,116,101,86,71,56,41,26,11,12,27,42,57,72,87,102,117,132,147,162,177,192,208,193,178,163,148,133,118,103,88,73,58,43,28,13,14,29,44,59,74,89,104,119,134,149,164,179,194,209,224,240,225,210,195,180,165,150,135,120,105,90,75,60,45,30,15,31,46,61,76,91,106,121,136,151,166,181,196,211,226,241,242,227,212,197,182,167,152,137,122,107,92,77,62,47,63,78,93,108,123,138,153,168,183,198,213,228,243,244,229,214,199,184,169,154,139,124,109,94,79,95,110,125,140,155,170,185,200,215,230,245,246,231,216,201,186,171,156,141,126,111,127,142,157,172,187,202,217,232,247,248,233,218,203,188,173,158,143,159,174,189,204,219,234,249,250,235,220,205,190,175,191,206,221,236,251,252,237,222,207,223,238,253,254,239,255};
static const int izig16[256] = {0,2,3,9,10,20,21,35,36,54,55,77,78,104,105,135,1,4,8,11,19,22,34,37,53,56,76,79,103,106,134,136,5,7,12,18,23,33,38,52,57,75,80,102,107,133,137,164,6,13,17,24,32,39,51,58,74,81,101,108,132,138,163,165,14,16,25,31,40,50,59,73,82,100,109,131,139,162,166,189,15,26,30,41,49,60,72,83,99,110,130,140,161,167,188,190,27,29,42,48,61,71,84,98,111,129,141,160,168,187,191,210,28,43,47,62,70,85,97,112,128,142,159,169,186,192,209,211,44,46,63,69,86,96,113,127,143,158,170,185,193,208,212,227,45,64,68,87,95,114,126,144,157,171,184,194,207,213,226,228,65,67,88,94,115,125,145,156,172,183,195,206,214,225,229,240,66,89,93,116,124,146,155,173,182,196,205,215,224,230,239,241,90,92,117,123,147,154,174,181,197,204,216,223,231,238,242,249,91,118,122,148,153,175,180,198,203,217,222,232,237,243,248,250,119,121,149,152,176,179,199,202,218,221,233,236,244,247,251,254,120,150,151,177,178,200,201,219,220,234,235,245,246,252,253,255};
*/
#define ROUNDUP_32(x) (((x)+31)&~31)
#define MAXB 16
#define SQUARE(x) ((int)(x)*(int)(x))
int oc_ilog32(unsigned _v){
int ret;
static const unsigned char OC_DEBRUIJN_IDX32[32]={
0, 1,28, 2,29,14,24, 3,30,22,20,15,25,17, 4, 8,
31,27,13,23,21,19,16, 7,26,12,18, 6,11, 5,10, 9};
_v|=_v>>1;
_v|=_v>>2;
_v|=_v>>4;
_v|=_v>>8;
_v|=_v>>16;
ret=_v&1;
_v=(_v>>1)+1;
ret+=OC_DEBRUIJN_IDX32[_v*0x77CB531U>>27&0x1F];
return ret;
}
void quant_scalar_gain(int32_t *_x,int16_t *_scale,int *y,int N,int Q){
float gain0, gain1;
float Q_1;
int i;
(void)_scale;
Q*=15;
Q_1 = 1.f/Q;
gain0=0;
gain1=0;
for (i=0;i<N;i++)
{
int qx;
float s = _x[i]*Q_1;
float bias = s>0?-.49:.49;
gain0 += s*s;
qx = (int)floor(.5+s+bias);
y[i] = qx;
gain1 += qx*qx;
_x[i] = Q*qx;
}
gain0 = sqrt(gain0/(1e-15+gain1));
for (i=0;i<N;i++)
_x[i] *= gain0;
}
static void process_plane(od_coeff *_img, od_coeff *_refi, int _w, int _h, int _pli, int _pvq_k){
int x;
int y;
int j;
int i;
int free_ref;
static int count=0;
(void)_pvq_k;
_w = ROUNDUP_32(_w);
_h = ROUNDUP_32(_h);
if(!_refi){
_refi = (od_coeff *)calloc(ROUNDUP_32(_w)*ROUNDUP_32(_h),sizeof(*_refi));
free_ref=1;
}else free_ref=0;
prefilter_image(_img,_w,_h,16);
/*for (i=0;i<1000000;i++){
int tmp[16];
for(j=0;j<16;j++)
tmp[j] = rand()%255-127;
od_bin_idct16(tmp, tmp);
for(j=0;j<16;j++)printf("%d ", tmp[j]);
printf("\n");
}
exit(0);*/
/*Block processing.*/
for(y=0;y<_h;y+=16){
for(x=0;x<_w;x+=16){
od_coeff coeffs[256];
int32_t zi[256];
/* int out[256];*/
/* int16_t scale[256];*/
/* for(j=0;j<256;j++)scale[j]=1;*/
od_bin_fdct16x16(coeffs,16,&_img[(y)*_w+x],_w);
for(i=0;i<16;i++){
for(j=0;j<16;j++){
zi[16*i+j]=floor(.5+coeffs[16*i+j]);
}
}
if (_pli==-1){
#if 0
int foo[256];
int32_t x[256];
/*quant_scalar_gain(&zi[1],NULL,foo,255,200);*/
extract(&zi[4], x, 2, 4, 16);
quant_scalar_gain(x,NULL,foo,8,200);
interleave(x, &zi[4], 2, 4, 16);
extract(&zi[64], x, 4, 2, 16);
quant_scalar_gain(x,NULL,foo,8,200);
interleave(x, &zi[64], 4, 2, 16);
extract(&zi[64+2], x, 4, 6, 16);
extract(&zi[32+4], x+24, 2, 4, 16);
quant_scalar_gain(x,NULL,foo,32,600);
interleave(x, &zi[64+2], 4, 6, 16);
interleave(x+24, &zi[32+4], 2, 4, 16);
#endif
#if 0
extract(&zi[8], x, 4, 8, 16);
/*quant_pvq(x, r, scale, out, 32, 1200./f, &qg);*/
quant_scalar_gain(x,NULL,foo,32,600);
interleave(x, &zi[8], 4, 8, 16);
extract(&zi[128], x, 8, 4, 16);
/*quant_pvq(x, r, scale, out, 32, 1200./f, &qg);*/
quant_scalar_gain(x,NULL,foo,32,600);
interleave(x, &zi[128], 8, 4, 16);
extract(&zi[128+4], x, 8, 12, 16);
extract(&zi[64+8], x+96, 4, 8, 16);
/*quant_pvq(x, r, scale, out, 128, 1200./f, &qg);*/
quant_scalar_gain(x,NULL,foo,128,1200);
interleave(x, &zi[128+4], 8, 12, 16);
interleave(x+96, &zi[64+8], 4, 8, 16);
#endif
}
/*for(j=0;j<256;j++)coeffs[j]=zi[j];*/
for(i=0;i<16;i++){
for(j=0;j<16;j++){
coeffs[16*i+j]=floor(.5+zi[16*i+j]);
}
}
od_bin_idct16x16(&_img[(y)*_w+x],_w,coeffs,16);
}
/*printf("\n");*/
}
postfilter_image(_img,_w,_h,16);
if(free_ref)free(_refi);
count++;
}
int main(int _argc,char **_argv){
const char *optstring = "hv?";
const struct option long_options[]={
{"ref",required_argument,NULL,0},
{"limit",required_argument,NULL,0},
{"fps",required_argument,NULL,0},
{"ext",required_argument,NULL,0},
{"pvqk",required_argument,NULL,0},
{"intra",no_argument,NULL,0},
{NULL,0,NULL,0}
};
FILE *fin;
FILE *fout;
video_input vid1;
video_input_info info1;
video_input vid2;
video_input_info info2;
int frameno;
int pli;
unsigned char *outline;
od_coeff *refi[3];
od_coeff *iimg[3];
int xdec[3];
int ydec[3];
int w[3];
int h[3];
int pvq_k;
int fps;
int extend;
int limit;
int intra;
char refname[1024];
int ref_in;
int long_option_index;
int c;
pvq_k=32;
fps=-1;
ref_in=0;
limit=0;
extend=0;
intra=0;
while((c=getopt_long(_argc,_argv,optstring,long_options,&long_option_index))!=EOF){
switch(c){
case 0:
if(strcmp(long_options[long_option_index].name,"ref")==0){
ref_in=1;
strncpy(refname,optarg,1023);
} else if (strcmp(long_options[long_option_index].name,"pvqk")==0){
pvq_k=atoi(optarg);
} else if (strcmp(long_options[long_option_index].name,"limit")==0){
limit=atoi(optarg);
} else if (strcmp(long_options[long_option_index].name,"ext")==0){
extend=atoi(optarg);
} else if (strcmp(long_options[long_option_index].name,"intra")==0){
intra=1;
} else if (strcmp(long_options[long_option_index].name,"fps")==0){
fps=atoi(optarg);
}
break;
case 'v':
case '?':
case 'h':
default:{
usage(_argv);
exit(EXIT_FAILURE);
}break;
}
}
if(optind+2!=_argc){
usage(_argv);
exit(EXIT_FAILURE);
}
fin=strcmp(_argv[optind],"-")==0?stdin:fopen(_argv[optind],"rb");
if(fin==NULL){
fprintf(stderr,"Unable to open '%s' for extraction.\n",_argv[optind]);
exit(EXIT_FAILURE);
}
fprintf(stderr,"Opening %s as input%s...\n",_argv[optind],ref_in?"":" and reference");
if(video_input_open(&vid1,fin)<0)exit(EXIT_FAILURE);
video_input_get_info(&vid1,&info1);
if(ref_in){
fin=fopen(refname,"rb");
if(fin==NULL){
fprintf(stderr,"Unable to open '%s' for extraction.\n",refname);
exit(EXIT_FAILURE);
}
fprintf(stderr,"Opening %s as reference...\n",refname);
if(video_input_open(&vid2,fin)<0)exit(EXIT_FAILURE);
video_input_get_info(&vid2,&info2);
/*Check to make sure these videos are compatible.*/
if(info1.pic_w!=info2.pic_w||info1.pic_h!=info2.pic_h){
fprintf(stderr,"Video resolution does not match.\n");
exit(EXIT_FAILURE);
}
if(info1.pixel_fmt!=info2.pixel_fmt){
fprintf(stderr,"Pixel formats do not match.\n");
exit(EXIT_FAILURE);
}
if((info1.pic_x&!(info1.pixel_fmt&1))!=(info2.pic_x&!(info2.pixel_fmt&1))||
(info1.pic_y&!(info1.pixel_fmt&2))!=(info2.pic_y&!(info2.pixel_fmt&2))){
fprintf(stderr,"Chroma subsampling offsets do not match.\n");
exit(EXIT_FAILURE);
}
if(info1.fps_n*(int64_t)info2.fps_d!=
info2.fps_n*(int64_t)info1.fps_d){
fprintf(stderr,"Warning: framerates do not match.\n");
}
if(info1.par_n*(int64_t)info2.par_d!=
info2.par_n*(int64_t)info1.par_d){
fprintf(stderr,"Warning: aspect ratios do not match.\n");
}
}
for(pli=0;pli<3;pli++){
/*Planes padded up to a multiple of 32px*/
xdec[pli]=pli&&!(info1.pixel_fmt&1);
ydec[pli]=pli&&!(info1.pixel_fmt&2);
h[pli]=ROUNDUP_32(info1.pic_h>>ydec[pli]);
w[pli]=ROUNDUP_32(info1.pic_w>>xdec[pli]);
refi[pli] = (od_coeff *)malloc(w[pli]*h[pli]*sizeof(*refi[pli]));
iimg[pli] = (od_coeff *)malloc(w[pli]*h[pli]*sizeof(*iimg[pli]));
}
outline = (unsigned char *)malloc(sizeof(*outline)*info1.pic_w);
fout=strcmp(_argv[optind+1],"-")==0?stdout:fopen(_argv[optind+1],"wb");
if(fout==NULL){
fprintf(stderr,"Error opening output file \"%s\".\n",_argv[optind+1]);
return 1;
}
fprintf(fout,"YUV4MPEG2 W%i H%i F%i:%i Ip A%i:%i%s\n",
info1.pic_w,info1.pic_h, fps > 0 ? (unsigned) fps : (unsigned) info1.fps_n,
fps > 0 ? 1U : (unsigned) info1.fps_d, info1.par_n, info1.par_d,
CHROMA_TAGS[ydec[1] ? xdec[1] ? 0 : 2 : 3]);
for(frameno=0;;frameno++){
video_input_ycbcr in;
video_input_ycbcr ref;
int ret1=0;
int ret2=0;
char tag1[5];
char tag2[5];
if(!limit||frameno<limit){
ret1=video_input_fetch_frame(&vid1,in,tag1);
if(ref_in)ret2=video_input_fetch_frame(&vid2,ref,tag2);
}
if(ret1==0){
if(extend<1)break;
extend--;
/*If we're extending, keep feeding back the output to the reference input.*/
for(pli=0;pli<3;pli++){
int x;
int y;
for(y=0;y<h[pli];y++){
for(x=0;x<w[pli];x++){
refi[pli][y*w[pli]+x]=iimg[pli][y*w[pli]+x];
}
}
}
}
if(ref_in&&ret1!=0&&ret2==0){
fprintf(stderr,"Warning: Reference ended before input!\n");
break;
}
for(pli=0;pli<3;pli++){
int x;
int y;
if (pli==0)
switch_decision(in[pli].data, w[pli], h[pli], in[pli].stride, info1.pic_w, info1.pic_h);
for(y=0;y<h[pli];y++){
for(x=0;x<w[pli];x++){
int cy=OD_MINI(y+(int)(info1.pic_y>>ydec[pli]),(int)info1.pic_h>>ydec[pli]);
int cx=OD_MINI(x+(int)(info1.pic_x>>xdec[pli]),(int)info1.pic_w>>xdec[pli]);
iimg[pli][y*w[pli]+x]=128*(in[pli].data[cy*in[pli].stride+cx]-128);
}
}
if(ref_in&&ret2!=0){
for(y=0;y<h[pli];y++){
for(x=0;x<w[pli];x++){
int cy=OD_MINI(y+(int)(info1.pic_y>>ydec[pli]),(int)info1.pic_h>>ydec[pli]);
int cx=OD_MINI(x+(int)(info1.pic_x>>xdec[pli]),(int)info1.pic_w>>xdec[pli]);
refi[pli][y*w[pli]+x]=128*(ref[pli].data[cy*in[pli].stride+cx]-128);
}
}
}
process_plane(iimg[pli],(ref_in||frameno>0)&&!intra?refi[pli]:NULL,info1.pic_w>>xdec[pli],info1.pic_h>>ydec[pli],pli,pvq_k);
if(!ref_in){
for(y=0;y<h[pli];y++){
for(x=0;x<w[pli];x++){
refi[pli][y*w[pli]+x]=iimg[pli][y*w[pli]+x];
}
}
}
}
fprintf(fout,"FRAME\n");
for(pli=0;pli<3;pli++){
int x;
int y;
for(y=0;y<(int)info1.pic_h>>ydec[pli];y++){
for(x=0;x<(int)info1.pic_w>>xdec[pli];x++)outline[x]=OD_CLAMP255((int)floor(.5+(1./128)*iimg[pli][y*w[pli]+x])+128);
if(fwrite(outline,
(info1.pic_w>>xdec[pli]),1,fout)<1){
fprintf(stderr,"Error writing to output.\n");
return EXIT_FAILURE;
}
}
}
fprintf(stderr, "Completed frame %d.\n",frameno);
}
video_input_close(&vid1);
if(ref_in)video_input_close(&vid2);
if(fout!=stdout)fclose(fout);
free(outline);
for(pli=0;pli<3;pli++)free(refi[pli]);
for(pli=0;pli<3;pli++)free(iimg[pli]);
return EXIT_SUCCESS;
}
| kustom666/daala | tools/block_size_analysis.c | C | bsd-2-clause | 30,664 |
// ==========================================================================
// SeqAn - The Library for Sequence Analysis
// ==========================================================================
// Copyright (c) 2006-2016, Knut Reinert, FU Berlin
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of Knut Reinert or the FU Berlin nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL KNUT REINERT OR THE FU BERLIN BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//
// ==========================================================================
// Author: Rene Rahn <[email protected]>
// ==========================================================================
#ifndef INCLUDE_SEQAN_ALIGN_DP_ALIGN_SIMD_HELPER_H_
#define INCLUDE_SEQAN_ALIGN_DP_ALIGN_SIMD_HELPER_H_
namespace seqan
{
#if SEQAN_ALIGN_SIMD_PROFILE
struct AlignSimdProfile_
{
double preprTimer = 0.0;
double alignTimer = 0.0;
double traceTimer = 0.0;
void clear()
{
preprTimer = 0.0;
alignTimer = 0.0;
traceTimer = 0.0;
}
};
double timer = 0.0;
AlignSimdProfile_ profile;
#endif
// ============================================================================
// Forwards
// ============================================================================
template <unsigned LENGTH>
struct VectorLength_;
// ============================================================================
// Tags, Classes, Enums
// ============================================================================
template <typename TSimdVector_, typename TSeqH_, typename TSeqV_>
struct SimdAlignVariableLengthTraits
{
using TSimdVector = TSimdVector_;
using TSeqH = TSeqH_;
using TSeqV = TSeqV_;
};
// ============================================================================
// Metafunctions
// ============================================================================
// ============================================================================
// Functions
// ============================================================================
// ----------------------------------------------------------------------------
// Function _createSimdRepImpl()
// ----------------------------------------------------------------------------
#define SEQAN_CREATE_SIMD_REP_IMPL_2(data, strPos, chrPos) data[strPos + 1][chrPos], data[strPos][chrPos]
#define SEQAN_CREATE_SIMD_REP_IMPL_4(data, strPos, chrPos) SEQAN_CREATE_SIMD_REP_IMPL_2(data, strPos + 2, chrPos), SEQAN_CREATE_SIMD_REP_IMPL_2(data, strPos, chrPos)
#define SEQAN_CREATE_SIMD_REP_IMPL_8(data, strPos, chrPos) SEQAN_CREATE_SIMD_REP_IMPL_4(data, strPos + 4, chrPos), SEQAN_CREATE_SIMD_REP_IMPL_4(data, strPos, chrPos)
#define SEQAN_CREATE_SIMD_REP_IMPL_16(data, strPos, chrPos) SEQAN_CREATE_SIMD_REP_IMPL_8(data, strPos + 8, chrPos), SEQAN_CREATE_SIMD_REP_IMPL_8(data, strPos, chrPos)
#define SEQAN_CREATE_SIMD_REP_IMPL_32(data, strPos, chrPos) SEQAN_CREATE_SIMD_REP_IMPL_16(data, strPos + 16, chrPos), SEQAN_CREATE_SIMD_REP_IMPL_16(data, strPos, chrPos)
#define SEQAN_CREATE_SIMD_REP_FILL_IMPL_2(MACRO, data, chrPos) MACRO(data, 0, chrPos)
#define SEQAN_CREATE_SIMD_REP_FILL_IMPL(data, chrPos, SIZE) SEQAN_CREATE_SIMD_REP_FILL_IMPL_2(SEQAN_CREATE_SIMD_REP_IMPL_##SIZE, data, chrPos)
#define SEQAN_CREATE_SIMD_REP_IMPL(SIZE) \
template <typename TSimdVecs, typename TStrings> \
inline void _createSimdRepImpl(TSimdVecs & simdStr, \
TStrings const & strings, \
VectorLength_<SIZE> const & /*size*/) \
{ \
auto itB = begin(simdStr, Standard()); \
auto itE = end(simdStr, Standard()); \
for (auto it = itB; it != itE; ++it) \
fillVector(*it, SEQAN_CREATE_SIMD_REP_FILL_IMPL(strings, it - itB, SIZE)); \
}
SEQAN_CREATE_SIMD_REP_IMPL(2)
SEQAN_CREATE_SIMD_REP_IMPL(4)
SEQAN_CREATE_SIMD_REP_IMPL(8)
SEQAN_CREATE_SIMD_REP_IMPL(16)
SEQAN_CREATE_SIMD_REP_IMPL(32)
template <typename TSimdVecs, typename TStrings>
inline void _createSimdRepImpl(TSimdVecs & simdStr,
TStrings const & strings)
{
_createSimdRepImpl(simdStr, strings, VectorLength_<LENGTH<typename Value<TSimdVecs>::Type>::VALUE>());
}
// Actually precompute value if scoring scheme is score matrix and simd version.
template <typename TSeqValue,
typename TScoreValue, typename TScore>
inline SEQAN_FUNC_ENABLE_IF(And<Is<SimdVectorConcept<TSeqValue> >, IsScoreMatrix_<TScore> >, TSeqValue)
_precomputeScoreMatrixOffset(TSeqValue const & seqVal,
Score<TScoreValue, ScoreSimdWrapper<TScore> > const & /*score*/)
{
return createVector<TSeqValue>(TScore::VALUE_SIZE) * seqVal;
}
// ----------------------------------------------------------------------------
// Function _prepareAndRunSimdAlignment()
// ----------------------------------------------------------------------------
template <typename TResult,
typename TTraces,
typename TSequencesH,
typename TSequencesV,
typename TScore,
typename TAlgo, typename TBand, typename TFreeEndGaps, typename TTraceback,
typename TGapModel>
inline void
_prepareAndRunSimdAlignment(TResult & results,
TTraces & traces,
TSequencesH const & seqH,
TSequencesV const & seqV,
TScore const & scoringScheme,
AlignConfig2<TAlgo, TBand, TFreeEndGaps, TTraceback> const & alignConfig,
TGapModel const & /*gapModel*/,
SimdAlignEqualLength const & /*tag*/)
{
String<TResult, Alloc<OverAligned> > stringSimdH;
String<TResult, Alloc<OverAligned> > stringSimdV;
resize(stringSimdH, length(seqH[0]));
resize(stringSimdV, length(seqV[0]));
_createSimdRepImpl(stringSimdH, seqH);
_createSimdRepImpl(stringSimdV, seqV);
DPScoutState_<SimdAlignEqualLength> state;
results = _setUpAndRunAlignment(traces, state, stringSimdH, stringSimdV, scoringScheme, alignConfig, TGapModel());
}
template <typename TResult,
typename TTraces,
typename TSequencesH,
typename TSequencesV,
typename TScore,
typename TAlgo, typename TBand, typename TFreeEndGaps, typename TTraceback,
typename TGapModel,
typename TTraits>
inline void
_prepareAndRunSimdAlignment(TResult & results,
TTraces & traces,
TSequencesH const & seqH,
TSequencesV const & seqV,
TScore const & scoringScheme,
AlignConfig2<TAlgo, TBand, TFreeEndGaps, TTraceback> const & alignConfig,
TGapModel const & /*gapModel*/,
SimdAlignVariableLength<TTraits> const /*tag*/)
{
SEQAN_ASSERT_EQ(length(seqH), length(seqV));
SEQAN_ASSERT_EQ(static_cast<decltype(length(seqH))>(LENGTH<TResult>::VALUE), length(seqH));
using TPadStringH = ModifiedString<typename Value<TSequencesH const>::Type, ModPadding>;
using TPadStringV = ModifiedString<typename Value<TSequencesV const>::Type, ModPadding>;
String<TResult, Alloc<OverAligned> > stringSimdH;
String<TResult, Alloc<OverAligned> > stringSimdV;
DPScoutState_<SimdAlignVariableLength<SimdAlignVariableLengthTraits<TResult, TSequencesH, TSequencesV> > > state;
String<size_t> lengthsH;
String<size_t> lengthsV;
resize(lengthsH, length(seqH));
resize(lengthsV, length(seqV));
resize(state.endsH, length(seqH));
resize(state.endsV, length(seqV));
for (unsigned i = 0; i < length(seqH); ++i)
{
lengthsH[i] = length(seqH[i]) - 1;
lengthsV[i] = length(seqV[i]) - 1;
state.endsH[i] = i;
state.endsV[i] = i;
}
setHost(state.sortedEndsH, lengthsH);
setHost(state.sortedEndsV, lengthsV);
setCargo(state.sortedEndsH, state.endsH);
setCargo(state.sortedEndsV, state.endsV);
auto maxLengthLambda = [](auto& lengthLhs, auto& lengthRhs) { return lengthLhs < lengthRhs; };
sort(state.sortedEndsH, maxLengthLambda, Serial());
sort(state.sortedEndsV, maxLengthLambda, Serial());
size_t maxH = back(state.sortedEndsH) + 1;
size_t maxV = back(state.sortedEndsV) + 1;
// and we have to prepare the bit masks of the DPScoutState
resize(state.masks, maxV, createVector<TResult>(0));
resize(state.masksV, maxV, createVector<TResult>(0));
resize(state.masksH, maxH, createVector<TResult>(0));
// Create Stringset with padded strings.
StringSet<TPadStringH> paddedH;
StringSet<TPadStringV> paddedV;
resize(paddedH, length(seqH));
resize(paddedV, length(seqV));
for(unsigned i = 0; i < length(seqH); ++i)
{
setHost(paddedH[i], seqH[i]);
setHost(paddedV[i], seqV[i]);
expand(paddedH[i], maxH);
expand(paddedV[i], maxV);
// mark the original end position of the alignment in the masks (with -1, all bits set)
assignValue(state.masksH[lengthsH[i]], i, -1);
assignValue(state.masksV[lengthsV[i]], i, -1);
}
// now create SIMD representation
resize(stringSimdH, maxH);
resize(stringSimdV, maxV);
_createSimdRepImpl(stringSimdH, paddedH);
_createSimdRepImpl(stringSimdV, paddedV);
state.dimV = length(stringSimdV);
state.isLocalAlignment = IsLocalAlignment_<TAlgo>::VALUE;
state.right = IsFreeEndGap_<TFreeEndGaps, DPLastColumn>::VALUE;
state.bottom = IsFreeEndGap_<TFreeEndGaps, DPLastRow>::VALUE;
results = _setUpAndRunAlignment(traces, state, stringSimdH, stringSimdV, scoringScheme, alignConfig, TGapModel());
}
template <typename TResult,
typename TTraces,
typename TSequencesH,
typename TSequencesV,
typename TScore,
typename TAlgo, typename TBand, typename TFreeEndGaps, typename TTraceback,
typename TGapModel>
inline void
_prepareAndRunSimdAlignment(TResult & results,
TTraces & traces,
TSequencesH const & seqH,
TSequencesV const & seqV,
TScore const & scoringScheme,
AlignConfig2<TAlgo, TBand, TFreeEndGaps, TTraceback> const & alignConfig,
TGapModel const & /*gapModel*/)
{
auto seqLengthH = length(seqH[0]);
auto seqLengthV = length(seqV[0]);
auto zipView = makeZipView(seqH, seqV);
bool allSameLength = std::all_of(begin(zipView, Standard()), end(zipView, Standard()),
[seqLengthH, seqLengthV](auto param)
{
return (length(std::get<0>(param)) == seqLengthH) &&
(length(std::get<1>(param)) == seqLengthV);
});
if(allSameLength)
_prepareAndRunSimdAlignment(results, traces, seqH, seqV, scoringScheme, alignConfig, TGapModel(), SimdAlignEqualLength());
else
_prepareAndRunSimdAlignment(results, traces, seqH, seqV, scoringScheme, alignConfig, TGapModel(),
SimdAlignVariableLength<Nothing>());
}
// ----------------------------------------------------------------------------
// Function _alignWrapperSimd(); Score; StringSet vs. StringSet
// ----------------------------------------------------------------------------
template <typename TString1, typename TSpec1,
typename TString2, typename TSpec2,
typename TScoreValue, typename TScoreSpec,
typename TAlignConfig,
typename TGapModel>
inline auto
_alignWrapperSimd(StringSet<TString1, TSpec1> const & stringsH,
StringSet<TString2, TSpec2> const & stringsV,
Score<TScoreValue, TScoreSpec> const & scoringScheme,
TAlignConfig const & config,
TGapModel const & /*gaps*/)
{
typedef typename SimdVector<int16_t>::Type TSimdAlign;
unsigned const numAlignments = length(stringsV);
unsigned const sizeBatch = LENGTH<TSimdAlign>::VALUE;
unsigned const fullSize = sizeBatch * ((numAlignments + sizeBatch - 1) / sizeBatch);
String<TScoreValue> results;
resize(results, numAlignments);
StringSet<String<Nothing> > trace; // We need to declare it, but it will not be used.
// Create a SIMD scoring scheme.
Score<TSimdAlign, ScoreSimdWrapper<Score<TScoreValue, TScoreSpec> > > simdScoringScheme(scoringScheme);
for (auto pos = 0u; pos < fullSize; pos += sizeBatch)
{
TSimdAlign resultsBatch;
if (SEQAN_UNLIKELY(numAlignments < pos + sizeBatch))
{
StringSet<TString1, Dependent<> > depSetH;
StringSet<TString2, Dependent<> > depSetV;
for (unsigned i = pos; i < fullSize; ++i)
{
if (i >= numAlignments)
{
appendValue(depSetH, back(stringsH));
appendValue(depSetV, back(stringsV));
}
else
{
appendValue(depSetH, stringsH[i]);
appendValue(depSetV, stringsV[i]);
}
}
SEQAN_ASSERT_EQ(length(depSetH), sizeBatch);
SEQAN_ASSERT_EQ(length(depSetV), sizeBatch);
_prepareAndRunSimdAlignment(resultsBatch, trace, depSetH, depSetV, simdScoringScheme, config, TGapModel());
}
else
{
auto infSetH = infixWithLength(stringsH, pos, sizeBatch);
auto infSetV = infixWithLength(stringsV, pos, sizeBatch);
_prepareAndRunSimdAlignment(resultsBatch, trace, infSetH, infSetV, simdScoringScheme, config, TGapModel());
}
// TODO(rrahn): Could be parallelized!
for(auto x = pos; x < pos + sizeBatch && x < numAlignments; ++x)
results[x] = resultsBatch[x - pos];
}
return results;
}
// ----------------------------------------------------------------------------
// Function _alignWrapperSimd(); Score; String vs. StringSet
// ----------------------------------------------------------------------------
template <typename TString1,
typename TString2, typename TSpec,
typename TScoreValue, typename TScoreSpec,
typename TAlignConfig,
typename TGapModel>
inline auto
_alignWrapperSimd(TString1 const & stringH,
StringSet<TString2, TSpec> const & stringsV,
Score<TScoreValue, TScoreSpec> const & scoringScheme,
TAlignConfig const & config,
TGapModel const & /*gaps*/)
{
typedef typename SimdVector<int16_t>::Type TSimdAlign;
unsigned const numAlignments = length(stringsV);
unsigned const sizeBatch = LENGTH<TSimdAlign>::VALUE;
unsigned const fullSize = sizeBatch * ((numAlignments + sizeBatch - 1) / sizeBatch);
String<TScoreValue> results;
resize(results, numAlignments);
// Prepare strings.
StringSet<TString1, Dependent<> > setH;
for (auto i = 0u; i < sizeBatch; ++i)
appendValue(setH, stringH);
StringSet<String<Nothing> > trace; // We need to declare it, but it will not be used.
// Create a SIMD scoring scheme.
Score<TSimdAlign, ScoreSimdWrapper<Score<TScoreValue, TScoreSpec> > > simdScoringScheme(scoringScheme);
for (auto pos = 0u; pos < fullSize; pos += sizeBatch)
{
TSimdAlign resultsBatch;
if (SEQAN_UNLIKELY(numAlignments < pos + sizeBatch))
{
StringSet<TString2, Dependent<> > depSetV;
for (unsigned i = pos; i < fullSize; ++i)
{
if (i >= numAlignments)
appendValue(depSetV, back(stringsV));
else
appendValue(depSetV, stringsV[i]);
}
SEQAN_ASSERT_EQ(length(depSetV), sizeBatch);
_prepareAndRunSimdAlignment(resultsBatch, trace, setH, depSetV, simdScoringScheme, config, TGapModel());
}
else
{
auto infSetV = infixWithLength(stringsV, pos, sizeBatch);
_prepareAndRunSimdAlignment(resultsBatch, trace, setH, infSetV, simdScoringScheme, config, TGapModel());
}
// TODO(rrahn): Could be parallelized!
for(auto x = pos; x < pos + sizeBatch && x < numAlignments; ++x)
results[x] = resultsBatch[x - pos];
}
return results;
}
// ----------------------------------------------------------------------------
// Function _alignWrapperSimd(); Gaps
// ----------------------------------------------------------------------------
template <typename TSequenceH, typename TGapsSpecH, typename TSetSpecH,
typename TSequenceV, typename TGapsSpecV, typename TSetSpecV,
typename TScoreValue, typename TScoreSpec,
typename TAlignConfig,
typename TGapModel>
inline auto
_alignWrapperSimd(StringSet<Gaps<TSequenceH, TGapsSpecH>, TSetSpecH> & gapSeqSetH,
StringSet<Gaps<TSequenceV, TGapsSpecV>, TSetSpecV> & gapSeqSetV,
Score<TScoreValue, TScoreSpec> const & scoringScheme,
TAlignConfig const & config,
TGapModel const & /*gaps*/)
{
typedef Gaps<TSequenceH, TGapsSpecH> TGapSequenceH;
typedef Gaps<TSequenceV, TGapsSpecV> TGapSequenceV;
typedef typename Size<TGapSequenceH>::Type TSize;
typedef typename Position<TGapSequenceH>::Type TPosition;
typedef TraceSegment_<TPosition, TSize> TTraceSegment;
typedef typename SimdVector<int16_t>::Type TSimdAlign;
#if SEQAN_ALIGN_SIMD_PROFILE
timer = sysTime();
#endif
unsigned const numAlignments = length(gapSeqSetH);
unsigned const sizeBatch = LENGTH<TSimdAlign>::VALUE;
unsigned const fullSize = sizeBatch * ((numAlignments + sizeBatch - 1) / sizeBatch);
String<TScoreValue> results;
resize(results, numAlignments);
// Create a SIMD scoring scheme.
Score<TSimdAlign, ScoreSimdWrapper<Score<TScoreValue, TScoreSpec> > > simdScoringScheme(scoringScheme);
// Prepare string sets with sequences.
StringSet<typename Source<TGapSequenceH>::Type, Dependent<> > depSetH;
StringSet<typename Source<TGapSequenceV>::Type, Dependent<> > depSetV;
reserve(depSetH, fullSize);
reserve(depSetV, fullSize);
for (unsigned i = 0; i < fullSize; ++i)
{
if (i >= numAlignments)
{
appendValue(depSetH, source(back(gapSeqSetH)));
appendValue(depSetV, source(back(gapSeqSetV)));
}
else
{
appendValue(depSetH, source(gapSeqSetH[i]));
appendValue(depSetV, source(gapSeqSetV[i]));
}
}
// Run alignments in batches.
for (auto pos = 0u; pos < fullSize; pos += sizeBatch)
{
auto infSetH = infixWithLength(depSetH, pos, sizeBatch);
auto infSetV = infixWithLength(depSetV, pos, sizeBatch);
TSimdAlign resultsBatch;
StringSet<String<TTraceSegment> > trace;
resize(trace, sizeBatch, Exact());
_prepareAndRunSimdAlignment(resultsBatch, trace, infSetH, infSetV, simdScoringScheme, config, TGapModel());
// copy results and finish traceback
// TODO(rrahn): Could be parallelized!
// to for_each call
for(auto x = pos; x < pos + sizeBatch && x < numAlignments; ++x)
{
results[x] = resultsBatch[x - pos];
_adaptTraceSegmentsTo(gapSeqSetH[x], gapSeqSetV[x], trace[x - pos]);
}
#if SEQAN_ALIGN_SIMD_PROFILE
profile.traceTimer += sysTime() - timer;
timer = sysTime();
#endif
}
return results;
}
} // namespace seqan
#endif // #ifndef INCLUDE_SEQAN_ALIGN_DP_ALIGN_SIMD_HELPER_H_
| AngelinaScheck/BachelorBioinfo | statisticslibsvm/seqan-library-2.2.0/include/seqan/align/dp_align_simd_helper.h | C | bsd-3-clause | 21,847 |
// ==========================================================================
// mini_bowtie
// ==========================================================================
#include <iostream>
#include <seqan/basic.h>
#include <seqan/sequence.h>
#include <seqan/file.h>
#include <seqan/index.h>
#include <seqan/store.h>
using namespace seqan;
void search() {}
int main(int argc, char * argv[])
{
// type definitions
typedef String<Dna5> TString;
typedef StringSet<TString> TStringSet;
typedef Index<StringSet<TString>, FMIndex<> > TIndex;
typedef Iterator<TIndex, TopDown<ParentLinks<> > >::Type TIter;
// reading the command line arguments
if (argc < 3)
{
std::cerr << "Invalid number of arguments." << std::endl
<< "USAGE: minimapper GENOME.fasta READS.fasta OUT.sam" << std::endl;
return 1;
}
// declaration and initialization of the fragment store
FragmentStore<> fragStore;
if (!loadContigs(fragStore, argv[1]))
return 1;
if (!loadReads(fragStore, argv[2]))
return 1;
StringSet<TString> text;
for (unsigned i = 0; i < length(fragStore.contigStore); ++i)
appendValue(text, fragStore.contigStore[i].seq);
TIndex fmIndex(text);
TIter it(fmIndex);
search();
clear(fmIndex);
clear(it);
reverse(text);
reverse(fragStore.readSeqStore);
fmIndex = TIndex(text);
it = TIter(fmIndex);
search();
clear(fmIndex);
clear(it);
reverse(text);
reverse(fragStore.readSeqStore);
clear(fmIndex);
clear(it);
return 0;
}
| catkira/seqan | demos/tutorial/mini_bowtie/solution1.cpp | C++ | bsd-3-clause | 1,631 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/password_manager/password_store.h"
#include "base/bind.h"
#include "base/memory/scoped_ptr.h"
#include "base/message_loop.h"
#include "base/stl_util.h"
#include "chrome/browser/password_manager/password_store_consumer.h"
#include "content/public/browser/browser_thread.h"
#include "webkit/forms/password_form.h"
using content::BrowserThread;
using std::vector;
using webkit::forms::PasswordForm;
PasswordStore::GetLoginsRequest::GetLoginsRequest(
const GetLoginsCallback& callback)
: CancelableRequest1<GetLoginsCallback,
std::vector<PasswordForm*> >(callback) {
}
PasswordStore::GetLoginsRequest::~GetLoginsRequest() {
if (canceled()) {
STLDeleteElements(&value);
}
}
PasswordStore::PasswordStore() {
}
bool PasswordStore::Init() {
ReportMetrics();
return true;
}
void PasswordStore::AddLogin(const PasswordForm& form) {
ScheduleTask(base::Bind(&PasswordStore::WrapModificationTask, this,
base::Closure(base::Bind(&PasswordStore::AddLoginImpl, this, form))));
}
void PasswordStore::UpdateLogin(const PasswordForm& form) {
ScheduleTask(base::Bind(&PasswordStore::WrapModificationTask, this,
base::Closure(base::Bind(&PasswordStore::UpdateLoginImpl, this, form))));
}
void PasswordStore::RemoveLogin(const PasswordForm& form) {
ScheduleTask(base::Bind(&PasswordStore::WrapModificationTask, this,
base::Closure(base::Bind(&PasswordStore::RemoveLoginImpl, this, form))));
}
void PasswordStore::RemoveLoginsCreatedBetween(const base::Time& delete_begin,
const base::Time& delete_end) {
ScheduleTask(base::Bind(&PasswordStore::WrapModificationTask, this,
base::Closure(
base::Bind(&PasswordStore::RemoveLoginsCreatedBetweenImpl, this,
delete_begin, delete_end))));
}
CancelableRequestProvider::Handle PasswordStore::GetLogins(
const PasswordForm& form, PasswordStoreConsumer* consumer) {
return Schedule(&PasswordStore::GetLoginsImpl, consumer, form);
}
CancelableRequestProvider::Handle PasswordStore::GetAutofillableLogins(
PasswordStoreConsumer* consumer) {
return Schedule(&PasswordStore::GetAutofillableLoginsImpl, consumer);
}
CancelableRequestProvider::Handle PasswordStore::GetBlacklistLogins(
PasswordStoreConsumer* consumer) {
return Schedule(&PasswordStore::GetBlacklistLoginsImpl, consumer);
}
void PasswordStore::ReportMetrics() {
ScheduleTask(base::Bind(&PasswordStore::ReportMetricsImpl, this));
}
void PasswordStore::AddObserver(Observer* observer) {
observers_.AddObserver(observer);
}
void PasswordStore::RemoveObserver(Observer* observer) {
observers_.RemoveObserver(observer);
}
PasswordStore::~PasswordStore() {}
PasswordStore::GetLoginsRequest* PasswordStore::NewGetLoginsRequest(
const GetLoginsCallback& callback) {
return new GetLoginsRequest(callback);
}
void PasswordStore::ScheduleTask(const base::Closure& task) {
BrowserThread::PostTask(BrowserThread::DB, FROM_HERE, task);
}
void PasswordStore::ForwardLoginsResult(GetLoginsRequest* request) {
request->ForwardResult(request->handle(), request->value);
}
template<typename BackendFunc>
CancelableRequestProvider::Handle PasswordStore::Schedule(
BackendFunc func, PasswordStoreConsumer* consumer) {
scoped_refptr<GetLoginsRequest> request(NewGetLoginsRequest(
base::Bind(&PasswordStoreConsumer::OnPasswordStoreRequestDone,
base::Unretained(consumer))));
AddRequest(request, consumer->cancelable_consumer());
ScheduleTask(base::Bind(func, this, request));
return request->handle();
}
template<typename BackendFunc, typename ArgA>
CancelableRequestProvider::Handle PasswordStore::Schedule(
BackendFunc func, PasswordStoreConsumer* consumer, const ArgA& a) {
scoped_refptr<GetLoginsRequest> request(NewGetLoginsRequest(
base::Bind(&PasswordStoreConsumer::OnPasswordStoreRequestDone,
base::Unretained(consumer))));
AddRequest(request, consumer->cancelable_consumer());
ScheduleTask(base::Bind(func, this, request, a));
return request->handle();
}
void PasswordStore::WrapModificationTask(base::Closure task) {
#if !defined(OS_MACOSX)
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::DB));
#endif // !defined(OS_MACOSX)
task.Run();
PostNotifyLoginsChanged();
}
void PasswordStore::PostNotifyLoginsChanged() {
#if !defined(OS_MACOSX)
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::DB));
#endif // !defined(OS_MACOSX)
BrowserThread::PostTask(
BrowserThread::UI, FROM_HERE,
base::Bind(&PasswordStore::NotifyLoginsChanged, this));
}
void PasswordStore::NotifyLoginsChanged() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
FOR_EACH_OBSERVER(Observer, observers_, OnLoginsChanged());
}
| ropik/chromium | chrome/browser/password_manager/password_store.cc | C++ | bsd-3-clause | 4,970 |
__author__ = ['arkilic', 'tcaswell']
# I am not solely responsible if this cocks up. Tom was here too...
from mongoengine import connect
import mongoengine.connection
from pymongo import MongoClient
import six
from metadatastore.api import insert_beamline_config, insert_run_start, insert_run_stop, insert_event_descriptor, insert_event
import metadatastore.conf as conf
conf.mds_config['database'] = 'datastore2'
import mongoengine
connect(db='datastore2', host='xf23id-broker', port=27017)
assert mongoengine.connection.get_db().name == 'datastore2'
client = MongoClient(host='xf23id-broker', port=27017)
db = client.toBemigrated1
beamline_cfg_mapping = dict()
beamline_configs = db.beamline_config.find()
for bc in beamline_configs:
bcfg_id = bc['_id']
the_bc = insert_beamline_config(config_params=bc['config_params'], time=bc['time'])
beamline_cfg_mapping[bc['_id']] = the_bc
begin_runs = db.begin_run_event.find()
for br in begin_runs:
the_run_start = insert_run_start(time=br['time'], beamline_id=br['beamline_id'], beamline_config=the_bc, owner=br['owner'],
scan_id=br['scan_id'], custom=br.get('custom',{}), uid=br['uid'])
event_descs = db.event_descriptor.find({'begin_run_id': br['_id']})
max_time = 0.0
for e_desc in event_descs:
the_e_desc = insert_event_descriptor(run_start=the_run_start, data_keys=e_desc['data_keys'],
time=e_desc['time'], uid=e_desc['uid'])
events = db.event.find({'descriptor_id': e_desc['_id']})
for ev in events:
if ev['time'] > max_time:
max_time = ev['time']
insert_event(event_descriptor=the_e_desc, time=ev['time'], data=ev['data'], seq_num=ev['seq_num'], uid=ev['uid'])
insert_run_stop(run_start=the_run_start, time=float(max_time), exit_status='success',
reason=None, uid=None)
run_start_mapping = dict()
run_starts = db.run_start.find()
for rs in run_starts:
time = rs.pop('time')
beamline_id = rs.pop('beamline_id')
bcfg_id = beamline_cfg_mapping[rs.pop('beamline_config_id')]
owner = rs.pop('owner')
scan_id = rs.pop('scan_id')
uid = rs.pop('uid')
trashed = rs.pop('time_as_datetime')
my_run_start = insert_run_start(time= time, beamline_id= beamline_id,
beamline_config=bcfg_id, owner=owner,
scan_id=scan_id, uid=uid)
run_start_mapping[rs['_id']] = my_run_start
e_descs = db.event_descriptor.find({'run_start_id': rs['_id'] })
for e_d in e_descs:
my_e_d = insert_event_descriptor(run_start=my_run_start, data_keys=e_d['data_keys'],
time=e_d['time'], uid=e_d['uid'])
ev_s = db.event.find({'descriptor_id': e_d['_id']})
for e in ev_s:
insert_event(event_descriptor=my_e_d, time=e['time'], data=e['data'], seq_num=e['seq_num'], uid=e['uid'])
end_runs = db.end_run.find({'run_start_id': rs})
for er in end_runs:
rsta = run_start_mapping.pop(er['run_start_id'])
insert_run_stop(run_start=rsta, time=er['time'], exit_status=er['exit_status'],
reason=er['reason'], uid=er['uid'])
for v in six.itervalues(run_start_mapping):
insert_run_stop(run_start=v, time=v.time, exit_status='success',
reason=None, uid=None)
| ericdill/databroker | migration/schema1_schema2_20150219.py | Python | bsd-3-clause | 3,429 |
<?php
use yupe\components\WebModule;
/**
* Class OrderModule
*/
class OrderModule extends WebModule
{
/**
*
*/
const VERSION = '0.9.9';
/**
* @var
*/
public $notifyEmailFrom;
/**
* @var
*/
public $notifyEmailsTo;
/**
* @var string
*/
public $assetsPath = 'order.views.assets';
/**
* @var int
*/
public $showOrder = 1;
/**
* @var int
*/
public $enableCheck = 1;
/**
* @var int
*/
public $defaultStatus = 1;
/**
* @var int
*/
public $enableComments = 1;
/**
* @return array
*/
public function getDependencies()
{
return ['store', 'payment', 'delivery', 'mail'];
}
/**
* @return array
*/
public function getEditableParams()
{
return [
'notifyEmailFrom',
'notifyEmailsTo',
'showOrder' => $this->getChoice(),
'enableCheck' => $this->getChoice(),
'defaultStatus' => CHtml::listData(OrderStatus::model()->findAll(), 'id', 'name'),
'enableComments' => $this->getChoice(),
];
}
/**
* @return array
*/
public function getParamsLabels()
{
return [
'notifyEmailFrom' => Yii::t('OrderModule.order', 'Notification email'),
'notifyEmailsTo' => Yii::t('OrderModule.order', 'Recipients of notifications (comma separated)'),
'showOrder' => Yii::t('OrderModule.order', 'Public ordering page'),
'enableCheck' => Yii::t('OrderModule.order', 'Allow orders validation by number'),
'defaultStatus' => Yii::t('OrderModule.order', 'Default order status'),
'enableComments' => Yii::t('OrderModule.order', 'Allow order comments in admin panel'),
];
}
/**
* @return array
*/
public function getEditableParamsGroups()
{
return [
'0.main' => [
'label' => Yii::t('OrderModule.order', 'Orders settings'),
'items' => [
'defaultStatus',
'showOrder',
'enableCheck',
'enableComments',
],
],
'1.notify' => [
'label' => Yii::t('OrderModule.order', 'Notifications'),
'items' => [
'notifyEmailFrom',
'notifyEmailsTo',
],
],
];
}
/**
* @return string
*/
public function getCategory()
{
return Yii::t('OrderModule.order', 'Store');
}
/**
* @return array
*/
public function getNavigation()
{
return [
[
'icon' => 'fa fa-fw fa-users',
'label' => Yii::t('OrderModule.order', 'Clients'),
'url' => ['/order/clientBackend/index'],
],
[
'icon' => 'fa fa-fw fa-gift',
'label' => Yii::t('OrderModule.order', 'Orders'),
'url' => ['/order/orderBackend/index'],
],
[
'icon' => 'fa fa-fw fa-list-alt',
'label' => Yii::t('OrderModule.order', 'Statuses'),
'url' => ['/order/statusBackend/index'],
],
];
}
/**
* @return string
*/
public function getAdminPageLink()
{
return '/order/orderBackend/index';
}
/**
* @return string
*/
public function getVersion()
{
return self::VERSION;
}
/**
* @return string
*/
public function getName()
{
return Yii::t('OrderModule.order', 'Orders');
}
/**
* @return string
*/
public function getDescription()
{
return Yii::t('OrderModule.order', 'Orders manage module');
}
/**
* @return string
*/
public function getAuthor()
{
return Yii::t('OrderModule.order', 'amylabs team');
}
/**
* @return string
*/
public function getAuthorEmail()
{
return Yii::t('OrderModule.order', '[email protected]');
}
/**
* @return string
*/
public function getUrl()
{
return 'http://yupe.ru';
}
/**
* @return string
*/
public function getIcon()
{
return 'fa fa-fw fa-gift';
}
/**
*
*/
public function init()
{
parent::init();
$this->setImport(
[
'order.models.*',
'order.forms.*',
]
);
}
/**
* @return array
*/
public function getAuthItems()
{
return [
[
'type' => AuthItem::TYPE_TASK,
'name' => 'Order.OrderBackend.Management',
'description' => Yii::t('OrderModule.order', 'Manage orders'),
'items' => [
[
'type' => AuthItem::TYPE_OPERATION,
'name' => 'Order.OrderBackend.Index',
'description' => Yii::t('OrderModule.order', 'View order list'),
],
[
'type' => AuthItem::TYPE_OPERATION,
'name' => 'Order.OrderBackend.Create',
'description' => Yii::t('OrderModule.order', 'Create order'),
],
[
'type' => AuthItem::TYPE_OPERATION,
'name' => 'Order.OrderBackend.Update',
'description' => Yii::t('OrderModule.order', 'Update order'),
],
[
'type' => AuthItem::TYPE_OPERATION,
'name' => 'Order.OrderBackend.View',
'description' => Yii::t('OrderModule.order', 'View order'),
],
[
'type' => AuthItem::TYPE_OPERATION,
'name' => 'Order.OrderBackend.Delete',
'description' => Yii::t('OrderModule.order', 'Delete order'),
],
],
],
[
'type' => AuthItem::TYPE_TASK,
'name' => 'Order.StatusBackend.Management',
'description' => Yii::t('OrderModule.order', 'Manage statuses'),
'items' => [
[
'type' => AuthItem::TYPE_OPERATION,
'name' => 'Order.StatusBackend.Index',
'description' => Yii::t('OrderModule.order', 'View status list'),
],
[
'type' => AuthItem::TYPE_OPERATION,
'name' => 'Order.StatusBackend.Create',
'description' => Yii::t('OrderModule.order', 'Create status'),
],
[
'type' => AuthItem::TYPE_OPERATION,
'name' => 'Order.StatusBackend.Update',
'description' => Yii::t('OrderModule.order', 'Update status'),
],
[
'type' => AuthItem::TYPE_OPERATION,
'name' => 'Order.StatusBackend.Delete',
'description' => Yii::t('OrderModule.order', 'Delete status'),
],
],
],
];
}
/**
* @return array
*/
public function getNotifyTo()
{
return explode(',', $this->notifyEmailsTo);
}
}
| elorian/crm.inreserve.kz | protected/modules/order/OrderModule.php | PHP | bsd-3-clause | 7,723 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef UI_GFX_COMPOSITOR_LAYER_ANIMATION_DELEGATE_H_
#define UI_GFX_COMPOSITOR_LAYER_ANIMATION_DELEGATE_H_
#pragma once
#include "ui/gfx/rect.h"
#include "ui/gfx/transform.h"
#include "ui/gfx/compositor/compositor_export.h"
namespace ui {
// Layer animations interact with the layers using this interface.
class COMPOSITOR_EXPORT LayerAnimationDelegate {
public:
virtual void SetBoundsFromAnimation(const gfx::Rect& bounds) = 0;
virtual void SetTransformFromAnimation(const Transform& transform) = 0;
virtual void SetOpacityFromAnimation(float opacity) = 0;
virtual void SetVisibilityFromAnimation(bool visibility) = 0;
virtual void ScheduleDrawForAnimation() = 0;
virtual const gfx::Rect& GetBoundsForAnimation() const = 0;
virtual const Transform& GetTransformForAnimation() const = 0;
virtual float GetOpacityForAnimation() const = 0;
virtual bool GetVisibilityForAnimation() const = 0;
protected:
virtual ~LayerAnimationDelegate() {}
};
} // namespace ui
#endif // UI_GFX_COMPOSITOR_LAYER_ANIMATION_DELEGATE_H_
| ropik/chromium | ui/gfx/compositor/layer_animation_delegate.h | C | bsd-3-clause | 1,216 |
/* Generated by ./xlat/gen.sh from ./xlat/ptrace_setoptions_flags.in; do not edit. */
static const struct xlat ptrace_setoptions_flags[] = {
#if defined(PTRACE_O_TRACESYSGOOD) || (defined(HAVE_DECL_PTRACE_O_TRACESYSGOOD) && HAVE_DECL_PTRACE_O_TRACESYSGOOD)
XLAT(PTRACE_O_TRACESYSGOOD),
#endif
#if defined(PTRACE_O_TRACEFORK) || (defined(HAVE_DECL_PTRACE_O_TRACEFORK) && HAVE_DECL_PTRACE_O_TRACEFORK)
XLAT(PTRACE_O_TRACEFORK),
#endif
#if defined(PTRACE_O_TRACEVFORK) || (defined(HAVE_DECL_PTRACE_O_TRACEVFORK) && HAVE_DECL_PTRACE_O_TRACEVFORK)
XLAT(PTRACE_O_TRACEVFORK),
#endif
#if defined(PTRACE_O_TRACECLONE) || (defined(HAVE_DECL_PTRACE_O_TRACECLONE) && HAVE_DECL_PTRACE_O_TRACECLONE)
XLAT(PTRACE_O_TRACECLONE),
#endif
#if defined(PTRACE_O_TRACEEXEC) || (defined(HAVE_DECL_PTRACE_O_TRACEEXEC) && HAVE_DECL_PTRACE_O_TRACEEXEC)
XLAT(PTRACE_O_TRACEEXEC),
#endif
#if defined(PTRACE_O_TRACEVFORKDONE) || (defined(HAVE_DECL_PTRACE_O_TRACEVFORKDONE) && HAVE_DECL_PTRACE_O_TRACEVFORKDONE)
XLAT(PTRACE_O_TRACEVFORKDONE),
#endif
#if defined(PTRACE_O_TRACEEXIT) || (defined(HAVE_DECL_PTRACE_O_TRACEEXIT) && HAVE_DECL_PTRACE_O_TRACEEXIT)
XLAT(PTRACE_O_TRACEEXIT),
#endif
#if defined(PTRACE_O_TRACESECCOMP) || (defined(HAVE_DECL_PTRACE_O_TRACESECCOMP) && HAVE_DECL_PTRACE_O_TRACESECCOMP)
XLAT(PTRACE_O_TRACESECCOMP),
#endif
#if defined(PTRACE_O_EXITKILL) || (defined(HAVE_DECL_PTRACE_O_EXITKILL) && HAVE_DECL_PTRACE_O_EXITKILL)
XLAT(PTRACE_O_EXITKILL),
#endif
XLAT_END
};
| android-ia/platform_external_strace | xlat/ptrace_setoptions_flags.h | C | bsd-3-clause | 1,472 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/sender/audio_sender.h"
#include <stdint.h>
#include <utility>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/macros.h"
#include "base/memory/scoped_ptr.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/values.h"
#include "media/base/media.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_environment.h"
#include "media/cast/constants.h"
#include "media/cast/net/cast_transport_config.h"
#include "media/cast/net/cast_transport_sender_impl.h"
#include "media/cast/test/fake_single_thread_task_runner.h"
#include "media/cast/test/utility/audio_utility.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
namespace cast {
namespace {
void SaveOperationalStatus(OperationalStatus* out_status,
OperationalStatus in_status) {
DVLOG(1) << "OperationalStatus transitioning from " << *out_status << " to "
<< in_status;
*out_status = in_status;
}
} // namespace
class TestPacketSender : public PacketSender {
public:
TestPacketSender() : number_of_rtp_packets_(0), number_of_rtcp_packets_(0) {}
bool SendPacket(PacketRef packet, const base::Closure& cb) final {
if (IsRtcpPacket(&packet->data[0], packet->data.size())) {
++number_of_rtcp_packets_;
} else {
// Check that at least one RTCP packet was sent before the first RTP
// packet. This confirms that the receiver will have the necessary lip
// sync info before it has to calculate the playout time of the first
// frame.
if (number_of_rtp_packets_ == 0)
EXPECT_LE(1, number_of_rtcp_packets_);
++number_of_rtp_packets_;
}
return true;
}
int64_t GetBytesSent() final { return 0; }
int number_of_rtp_packets() const { return number_of_rtp_packets_; }
int number_of_rtcp_packets() const { return number_of_rtcp_packets_; }
private:
int number_of_rtp_packets_;
int number_of_rtcp_packets_;
DISALLOW_COPY_AND_ASSIGN(TestPacketSender);
};
class AudioSenderTest : public ::testing::Test {
protected:
AudioSenderTest() {
InitializeMediaLibrary();
testing_clock_ = new base::SimpleTestTickClock();
testing_clock_->Advance(base::TimeTicks::Now() - base::TimeTicks());
task_runner_ = new test::FakeSingleThreadTaskRunner(testing_clock_);
cast_environment_ =
new CastEnvironment(scoped_ptr<base::TickClock>(testing_clock_),
task_runner_, task_runner_, task_runner_);
audio_config_.codec = CODEC_AUDIO_OPUS;
audio_config_.use_external_encoder = false;
audio_config_.frequency = kDefaultAudioSamplingRate;
audio_config_.channels = 2;
audio_config_.bitrate = kDefaultAudioEncoderBitrate;
audio_config_.rtp_payload_type = 127;
net::IPEndPoint dummy_endpoint;
transport_sender_.reset(new CastTransportSenderImpl(
NULL,
testing_clock_,
net::IPEndPoint(),
dummy_endpoint,
make_scoped_ptr(new base::DictionaryValue),
base::Bind(&UpdateCastTransportStatus),
BulkRawEventsCallback(),
base::TimeDelta(),
task_runner_,
PacketReceiverCallback(),
&transport_));
OperationalStatus operational_status = STATUS_UNINITIALIZED;
audio_sender_.reset(new AudioSender(
cast_environment_,
audio_config_,
base::Bind(&SaveOperationalStatus, &operational_status),
transport_sender_.get()));
task_runner_->RunTasks();
CHECK_EQ(STATUS_INITIALIZED, operational_status);
}
~AudioSenderTest() override {}
static void UpdateCastTransportStatus(CastTransportStatus status) {
EXPECT_EQ(TRANSPORT_AUDIO_INITIALIZED, status);
}
base::SimpleTestTickClock* testing_clock_; // Owned by CastEnvironment.
TestPacketSender transport_;
scoped_ptr<CastTransportSenderImpl> transport_sender_;
scoped_refptr<test::FakeSingleThreadTaskRunner> task_runner_;
scoped_ptr<AudioSender> audio_sender_;
scoped_refptr<CastEnvironment> cast_environment_;
AudioSenderConfig audio_config_;
};
TEST_F(AudioSenderTest, Encode20ms) {
const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(20);
scoped_ptr<AudioBus> bus(
TestAudioBusFactory(audio_config_.channels,
audio_config_.frequency,
TestAudioBusFactory::kMiddleANoteFreq,
0.5f).NextAudioBus(kDuration));
audio_sender_->InsertAudio(std::move(bus), testing_clock_->NowTicks());
task_runner_->RunTasks();
EXPECT_LE(1, transport_.number_of_rtp_packets());
EXPECT_LE(1, transport_.number_of_rtcp_packets());
}
TEST_F(AudioSenderTest, RtcpTimer) {
const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(20);
scoped_ptr<AudioBus> bus(
TestAudioBusFactory(audio_config_.channels,
audio_config_.frequency,
TestAudioBusFactory::kMiddleANoteFreq,
0.5f).NextAudioBus(kDuration));
audio_sender_->InsertAudio(std::move(bus), testing_clock_->NowTicks());
task_runner_->RunTasks();
// Make sure that we send at least one RTCP packet.
base::TimeDelta max_rtcp_timeout =
base::TimeDelta::FromMilliseconds(1 + kRtcpReportIntervalMs * 3 / 2);
testing_clock_->Advance(max_rtcp_timeout);
task_runner_->RunTasks();
EXPECT_LE(1, transport_.number_of_rtp_packets());
EXPECT_LE(1, transport_.number_of_rtcp_packets());
}
} // namespace cast
} // namespace media
| js0701/chromium-crosswalk | media/cast/sender/audio_sender_unittest.cc | C++ | bsd-3-clause | 5,662 |
// Generated by CoffeeScript 1.4.0
/*
TouchController (stick + buttons) for touch devices
Based on the touch demo by Seb Lee-Delisle <http://seb.ly/>
@class bkcore.controllers.TouchController
@author Thibaut 'BKcore' Despoulain <http://bkcore.com>
*/
(function() {
var TouchController, Vec2, exports, _base;
TouchController = (function() {
TouchController.isCompatible = function() {
return 'ontouchstart' in document.documentElement;
};
/*
Creates a new TouchController
@param dom DOMElement The element that will listen to touch events
@param stickMargin int The left margin in px for stick detection
@param buttonCallback function Callback for non-stick touches
*/
function TouchController(dom, stickMargin, buttonCallback) {
var _this = this;
this.dom = dom;
this.stickMargin = stickMargin != null ? stickMargin : 200;
this.buttonCallback = buttonCallback != null ? buttonCallback : null;
this.active = true;
this.touches = null;
this.stickID = -1;
this.stickPos = new Vec2(0, 0);
this.stickStartPos = new Vec2(0, 0);
this.stickVector = new Vec2(0, 0);
this.dom.addEventListener('touchstart', (function(e) {
return _this.touchStart(e);
}), false);
this.dom.addEventListener('touchmove', (function(e) {
return _this.touchMove(e);
}), false);
this.dom.addEventListener('touchend', (function(e) {
return _this.touchEnd(e);
}), false);
}
/*
@private
*/
TouchController.prototype.touchStart = function(event) {
var touch, _i, _len, _ref;
if (!this.active) {
return;
}
_ref = event.changedTouches;
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
touch = _ref[_i];
if (this.stickID < 0 && touch.clientX < this.stickMargin) {
this.stickID = touch.identifier;
this.stickStartPos.set(touch.clientX, touch.clientY);
this.stickPos.copy(this.stickStartPos);
this.stickVector.set(0, 0);
continue;
} else {
if (typeof this.buttonCallback === "function") {
this.buttonCallback(true, touch, event);
}
}
}
this.touches = event.touches;
return false;
};
/*
@private
*/
TouchController.prototype.touchMove = function(event) {
var touch, _i, _len, _ref;
event.preventDefault();
if (!this.active) {
return;
}
_ref = event.changedTouches;
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
touch = _ref[_i];
if (this.stickID === touch.identifier) {
this.stickPos.set(touch.clientX, touch.clientY);
this.stickVector.copy(this.stickPos).substract(this.stickStartPos);
break;
}
}
this.touches = event.touches;
return false;
};
/*
@private
*/
TouchController.prototype.touchEnd = function(event) {
var touch, _i, _len, _ref;
if (!this.active) {
return;
}
this.touches = event.touches;
_ref = event.changedTouches;
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
touch = _ref[_i];
if (this.stickID === touch.identifier) {
this.stickID = -1;
this.stickVector.set(0, 0);
break;
} else {
if (typeof this.buttonCallback === "function") {
this.buttonCallback(false, touch, event);
}
}
}
return false;
};
return TouchController;
})();
/*
Internal class used for vector2
@class Vec2
@private
*/
Vec2 = (function() {
function Vec2(x, y) {
this.x = x != null ? x : 0;
this.y = y != null ? y : 0;
}
Vec2.prototype.substract = function(vec) {
this.x -= vec.x;
this.y -= vec.y;
return this;
};
Vec2.prototype.copy = function(vec) {
this.x = vec.x;
this.y = vec.y;
return this;
};
Vec2.prototype.set = function(x, y) {
this.x = x;
this.y = y;
return this;
};
return Vec2;
})();
/*
Exports
@package bkcore
*/
exports = exports != null ? exports : this;
exports.bkcore || (exports.bkcore = {});
(_base = exports.bkcore).controllers || (_base.controllers = {});
exports.bkcore.controllers.TouchController = TouchController;
}).call(this);
| qiuzhong/crosswalk-test-suite | misc/webappmanu-linux-tests/webapp/resources/org.webapps.hexgl/bkcore.coffee/controllers/TouchController.js | JavaScript | bsd-3-clause | 4,501 |
<?php
namespace Doctrine\Tests\ORM\Tools\Pagination;
use Doctrine\Tests\OrmTestCase;
abstract class PaginationTestCase extends OrmTestCase
{
/**
* @var \Doctrine\ORM\EntityManagerInterface
*/
public $entityManager;
public function setUp()
{
$this->entityManager = $this->_getTestEntityManager();
}
}
/**
* @Entity
*/
class MyBlogPost
{
/** @Id @column(type="integer") @generatedValue */
public $id;
/**
* @ManyToOne(targetEntity="Author")
*/
public $author;
/**
* @ManyToOne(targetEntity="Category")
*/
public $category;
/** @column(type="string") */
public $title;
}
/**
* @Entity
*/
class MyAuthor
{
/** @Id @column(type="integer") @generatedValue */
public $id;
}
/**
* @Entity
*/
class MyCategory
{
/** @id @column(type="integer") @generatedValue */
public $id;
}
/**
* @Entity
*/
class BlogPost
{
/** @Id @column(type="integer") @generatedValue */
public $id;
/**
* @ManyToOne(targetEntity="Author")
*/
public $author;
/**
* @ManyToOne(targetEntity="Category")
*/
public $category;
}
/**
* @Entity
*/
class Author
{
/** @Id @column(type="integer") @generatedValue */
public $id;
/** @Column(type="string") */
public $name;
}
/**
* @Entity
*/
class Person
{
/** @Id @column(type="integer") @generatedValue */
public $id;
/** @Column(type="string") */
public $name;
/** @Column(type="string") */
public $biography;
}
/**
* @Entity
*/
class Category
{
/** @id @column(type="integer") @generatedValue */
public $id;
}
/** @Entity @Table(name="groups") */
class Group
{
/** @Id @column(type="integer") @generatedValue */
public $id;
/** @ManyToMany(targetEntity="User", mappedBy="groups") */
public $users;
}
/** @Entity */
class User
{
/** @Id @column(type="integer") @generatedValue */
public $id;
/**
* @ManyToMany(targetEntity="Group", inversedBy="users")
* @JoinTable(
* name="user_group",
* joinColumns = {@JoinColumn(name="user_id", referencedColumnName="id")},
* inverseJoinColumns = {@JoinColumn(name="group_id", referencedColumnName="id")}
* )
*/
public $groups;
}
| exclie/Imagenologia | vendor/doctrine/orm/tests/Doctrine/Tests/ORM/Tools/Pagination/PaginationTestCase.php | PHP | bsd-3-clause | 2,280 |
/**
* @file watchdog.h
*
* Interface to the system watchdog timer. This is a platform-dependent feature
* and may not be available on some platforms.
*/
/* Embedded Xinu, Copyright (C) 2013. All rights reserved. */
#ifndef _WATCHDOG_H_
#define _WATCHDOG_H_
#include <stddef.h>
syscall watchdogset(uint msecs);
#endif /* _WATCHDOG_H_ */
| davidxyz/xinuPi | include/watchdog.h | C | bsd-3-clause | 347 |
from load_macrodata import dta
import matplotlib.pyplot as plt
import statsmodels.api as sm
cycles = sm.tsa.filters.bkfilter(dta[['realinv']], 6, 24, 12)
fig, ax = plt.subplots()
cycles.plot(ax=ax, style=['r--', 'b-'])
| statsmodels/statsmodels.github.io | v0.13.2/plots/bkf_plot.py | Python | bsd-3-clause | 222 |
/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
package scala
package collection
package generic
/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
* of trait `TraversableLike`.
*/
trait FilterMonadic[+A, +Repr] extends Any {
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
def foreach[U](f: A => U): Unit
def withFilter(p: A => Boolean): FilterMonadic[A, Repr]
}
| felixmulder/scala | src/library/scala/collection/generic/FilterMonadic.scala | Scala | bsd-3-clause | 1,045 |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/task/sequence_manager/sequence_manager.h"
namespace base {
namespace sequence_manager {
NativeWorkHandle::~NativeWorkHandle() = default;
SequenceManager::MetricRecordingSettings::MetricRecordingSettings(
double task_thread_time_sampling_rate)
: task_sampling_rate_for_recording_cpu_time(
base::ThreadTicks::IsSupported() ? task_thread_time_sampling_rate
: 0) {}
SequenceManager::Settings::Settings() = default;
SequenceManager::Settings::Settings(Settings&& move_from) noexcept = default;
SequenceManager::Settings::~Settings() = default;
SequenceManager::Settings::Builder::Builder() = default;
SequenceManager::Settings::Builder::~Builder() = default;
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetMessagePumpType(
MessagePumpType message_loop_type_val) {
settings_.message_loop_type = message_loop_type_val;
return *this;
}
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetRandomisedSamplingEnabled(
bool randomised_sampling_enabled_val) {
settings_.randomised_sampling_enabled = randomised_sampling_enabled_val;
return *this;
}
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetTickClock(const TickClock* clock_val) {
settings_.clock = clock_val;
return *this;
}
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetAddQueueTimeToTasks(
bool add_queue_time_to_tasks_val) {
settings_.add_queue_time_to_tasks = add_queue_time_to_tasks_val;
return *this;
}
#if DCHECK_IS_ON()
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetTaskLogging(
TaskLogging task_execution_logging_val) {
settings_.task_execution_logging = task_execution_logging_val;
return *this;
}
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetLogPostTask(bool log_post_task_val) {
settings_.log_post_task = log_post_task_val;
return *this;
}
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetLogTaskDelayExpiry(
bool log_task_delay_expiry_val) {
settings_.log_task_delay_expiry = log_task_delay_expiry_val;
return *this;
}
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetPerPriorityCrossThreadTaskDelay(
std::array<TimeDelta, TaskQueue::kQueuePriorityCount>
per_priority_cross_thread_task_delay_val) {
settings_.per_priority_cross_thread_task_delay =
per_priority_cross_thread_task_delay_val;
return *this;
}
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetPerPrioritySameThreadTaskDelay(
std::array<TimeDelta, TaskQueue::kQueuePriorityCount>
per_priority_same_thread_task_delay_val) {
settings_.per_priority_same_thread_task_delay =
per_priority_same_thread_task_delay_val;
return *this;
}
SequenceManager::Settings::Builder&
SequenceManager::Settings::Builder::SetRandomTaskSelectionSeed(
int random_task_selection_seed_val) {
settings_.random_task_selection_seed = random_task_selection_seed_val;
return *this;
}
#endif // DCHECK_IS_ON()
SequenceManager::Settings SequenceManager::Settings::Builder::Build() {
return std::move(settings_);
}
} // namespace sequence_manager
} // namespace base
| ric2b/Vivaldi-browser | chromium/base/task/sequence_manager/sequence_manager.cc | C++ | bsd-3-clause | 3,464 |
/****************************************************************************
* configs/nucleus2g/src/up_leds.c
* arch/arm/src/board/up_leds.c
*
* Copyright (C) 2010, 2013 Gregory Nutt. All rights reserved.
* Author: Gregory Nutt <[email protected]>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name NuttX nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
/****************************************************************************
* Included Files
****************************************************************************/
#include <nuttx/config.h>
#include <stdint.h>
#include <stdbool.h>
#include <debug.h>
#include <arch/board/board.h>
#include "chip.h"
#include "up_arch.h"
#include "up_internal.h"
#include "lpc17_gpio.h"
#include "nucleus2g_internal.h"
#ifdef CONFIG_ARCH_LEDS
/****************************************************************************
* Definitions
****************************************************************************/
/* CONFIG_DEBUG_LEDS enables debug output from this file (needs CONFIG_DEBUG
* with CONFIG_DEBUG_VERBOSE too)
*/
#ifdef CONFIG_DEBUG_LEDS
# define leddbg lldbg
# ifdef CONFIG_DEBUG_VERBOSE
# define ledvdbg lldbg
# else
# define ledvdbg(x...)
# endif
#else
# define leddbg(x...)
# define ledvdbg(x...)
#endif
/* Dump GPIO registers */
#if defined(CONFIG_DEBUG_VERBOSE) && defined(CONFIG_DEBUG_LEDS)
# define led_dumpgpio(m) lpc17_dumpgpio(NUCLEUS2G_LED1_A, m)
#else
# define led_dumpgpio(m)
#endif
/****************************************************************************
* Private Data
****************************************************************************/
/* The Nucleus2G has 3 LEDs... two on the Babel CAN board and a "heartbeat" LED."
* The LEDs on the Babel CAN board are capabl of OFF/GREEN/RED/AMBER status.
* In normal usage, the two LEDs on the Babel CAN board would show CAN status, but if
* CONFIG_ARCH_LEDS is defined, these LEDs will be controlled as follows for NuttX
* debug functionality (where NC means "No Change").
*
* LED1 LED2 HEARTBEAT
* +------- ------ -----------------------
* LED_STARTED | OFF OFF OFF
* LED_HEAPALLOCATE | GREEN OFF OFF
* LED_IRQSENABLED | OFF GREEN OFF
* LED_STACKCREATED | OFF OFF OFF
* LED_INIRQ | NC NC ON (momentary)
* LED_SIGNAL | NC NC ON (momentary)
* LED_ASSERTION | NC NC ON (momentary)
* LED_PANIC | NC NC ON (1Hz flashing)
*/
static bool g_initialized;
static int g_nestcount;
/****************************************************************************
* Private Functions
****************************************************************************/
/****************************************************************************
* Public Functions
****************************************************************************/
/****************************************************************************
* Name: up_ledinit
****************************************************************************/
void up_ledinit(void)
{
/* Configure all LED GPIO lines */
led_dumpgpio("up_ledinit() Entry)");
lpc17_configgpio(NUCLEUS2G_LED1_A);
lpc17_configgpio(NUCLEUS2G_LED1_B);
lpc17_configgpio(NUCLEUS2G_LED2_A);
lpc17_configgpio(NUCLEUS2G_LED2_B);
lpc17_configgpio(NUCLEUS2G_HEARTBEAT);
lpc17_configgpio(NUCLEUS2G_EXTRA_LED);
led_dumpgpio("up_ledinit() Exit");
}
/****************************************************************************
* Name: up_ledon
****************************************************************************/
void up_ledon(int led)
{
/* We will control LED1 and LED2 not yet completed the boot sequence. */
if (!g_initialized)
{
enum lpc17_ledstate_e led1 = LPC17_LEDSTATE_OFF;
enum lpc17_ledstate_e led2 = LPC17_LEDSTATE_OFF;
switch (led)
{
case LED_STACKCREATED:
g_initialized = true;
case LED_STARTED:
default:
break;
case LED_HEAPALLOCATE:
led1 = LPC17_LEDSTATE_GREEN;
break;
case LED_IRQSENABLED:
led2 = LPC17_LEDSTATE_GREEN;
}
lpc17_led1(led1);
lpc17_led2(led2);
}
/* We will always control the HB LED */
switch (led)
{
default:
break;
case LED_INIRQ:
case LED_SIGNAL:
case LED_ASSERTION:
case LED_PANIC:
lpc17_gpiowrite(NUCLEUS2G_HEARTBEAT, false);
g_nestcount++;
}
}
/****************************************************************************
* Name: up_ledoff
****************************************************************************/
void up_ledoff(int led)
{
/* In all states, OFF can only mean turning off the HB LED */
if (g_nestcount <= 1)
{
lpc17_gpiowrite(NUCLEUS2G_HEARTBEAT, true);
g_nestcount = 0;
}
else
{
g_nestcount--;
}
}
/************************************************************************************
* Name: lpc17_led1 and 2
*
* Description:
* Once the system has booted, these functions can be used to control LEDs 1 and 2
*
************************************************************************************/
void lpc17_led1(enum lpc17_ledstate_e state)
{
bool red = (((unsigned int)state & LPC17_LEDSTATE_RED) != 0);
bool green = (((unsigned int)state & LPC17_LEDSTATE_GREEN) != 0);
lpc17_gpiowrite(NUCLEUS2G_LED1_A, red);
lpc17_gpiowrite(NUCLEUS2G_LED1_B, green);
}
void lpc17_led2(enum lpc17_ledstate_e state)
{
bool red = (((unsigned int)state & LPC17_LEDSTATE_RED) != 0);
bool green = (((unsigned int)state & LPC17_LEDSTATE_GREEN) != 0);
lpc17_gpiowrite(NUCLEUS2G_LED2_A, red);
lpc17_gpiowrite(NUCLEUS2G_LED2_B, green);
}
#endif /* CONFIG_ARCH_LEDS */
| gcds/project_xxx | nuttx/configs/nucleus2g/src/up_leds.c | C | bsd-3-clause | 7,349 |
// Copyright (c) 2015, Outercurve Foundation.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// - Neither the name of the Outercurve Foundation nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Material sourced from the bluePortal project (http://blueportal.codeplex.com).
// Licensed under the Microsoft Public License (available at http://www.opensource.org/licenses/ms-pl.html).
using System;
using System.Data;
using System.Configuration;
using System.IO;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using System.Web.UI.HtmlControls;
namespace CSSFriendly
{
public abstract class CompositeDataBoundControlAdapter : System.Web.UI.WebControls.Adapters.DataBoundControlAdapter
{
private WebControlAdapterExtender _extender = null;
private WebControlAdapterExtender Extender
{
get
{
if (((_extender == null) && (Control != null)) ||
((_extender != null) && (Control != _extender.AdaptedControl)))
{
_extender = new WebControlAdapterExtender(Control);
}
System.Diagnostics.Debug.Assert(_extender != null, "CSS Friendly adapters internal error", "Null extender instance");
return _extender;
}
}
protected string _classMain = "";
protected string _classHeader = "";
protected string _classData = "";
protected string _classFooter = "";
protected string _classPagination = "";
protected string _classOtherPage = "";
protected string _classActivePage = "";
protected CompositeDataBoundControl View
{
get { return Control as CompositeDataBoundControl; }
}
protected DetailsView ControlAsDetailsView
{
get { return Control as DetailsView; }
}
protected bool IsDetailsView
{
get { return ControlAsDetailsView != null; }
}
protected FormView ControlAsFormView
{
get { return Control as FormView; }
}
protected bool IsFormView
{
get { return ControlAsFormView != null; }
}
protected abstract string HeaderText { get; }
protected abstract string FooterText { get; }
protected abstract ITemplate HeaderTemplate { get; }
protected abstract ITemplate FooterTemplate { get; }
protected abstract TableRow HeaderRow { get; }
protected abstract TableRow FooterRow { get; }
protected abstract bool AllowPaging { get; }
protected abstract int DataItemCount { get; }
protected abstract int DataItemIndex { get; }
protected abstract PagerSettings PagerSettings { get; }
/// ///////////////////////////////////////////////////////////////////////////////
/// METHODS
protected override void OnInit(EventArgs e)
{
base.OnInit(e);
if (Extender.AdapterEnabled)
{
RegisterScripts();
}
}
protected override void RenderBeginTag(HtmlTextWriter writer)
{
if (Extender.AdapterEnabled)
{
Extender.RenderBeginTag(writer, _classMain);
}
else
{
base.RenderBeginTag(writer);
}
}
protected override void RenderEndTag(HtmlTextWriter writer)
{
if (Extender.AdapterEnabled)
{
Extender.RenderEndTag(writer);
}
else
{
base.RenderEndTag(writer);
}
}
protected override void RenderContents(HtmlTextWriter writer)
{
if (Extender.AdapterEnabled)
{
if (View != null)
{
writer.Indent++;
BuildRow(HeaderRow, _classHeader, writer);
BuildItem(writer);
BuildRow(FooterRow, _classFooter, writer);
BuildPaging(writer);
writer.Indent--;
writer.WriteLine();
}
}
else
{
base.RenderContents(writer);
}
}
protected virtual void BuildItem(HtmlTextWriter writer)
{
}
protected virtual void BuildRow(TableRow row, string cssClass, HtmlTextWriter writer)
{
if (row != null)
{
// If there isn't any content, don't render anything.
bool bHasContent = false;
TableCell cell = null;
for (int iCell = 0; iCell < row.Cells.Count; iCell++)
{
cell = row.Cells[iCell];
if ((!String.IsNullOrEmpty(cell.Text)) || (cell.Controls.Count > 0))
{
bHasContent = true;
break;
}
}
if (bHasContent)
{
writer.WriteLine();
writer.WriteBeginTag("div");
writer.WriteAttribute("class", cssClass);
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
writer.WriteLine();
for (int iCell = 0; iCell < row.Cells.Count; iCell++)
{
cell = row.Cells[iCell];
if (!String.IsNullOrEmpty(cell.Text))
{
writer.Write(cell.Text);
}
foreach (Control cellChildControl in cell.Controls)
{
cellChildControl.RenderControl(writer);
}
}
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("div");
}
}
}
protected virtual void BuildPaging(HtmlTextWriter writer)
{
if (AllowPaging && (DataItemCount > 0))
{
writer.WriteLine();
writer.WriteBeginTag("div");
writer.WriteAttribute("class", _classPagination);
writer.Write(HtmlTextWriter.TagRightChar);
writer.Indent++;
int iStart = 0;
int iEnd = DataItemCount;
int nPages = iEnd - iStart + 1;
bool bExceededPageButtonCount = nPages > PagerSettings.PageButtonCount;
if (bExceededPageButtonCount)
{
iStart = (DataItemIndex / PagerSettings.PageButtonCount) * PagerSettings.PageButtonCount;
iEnd = Math.Min(iStart + PagerSettings.PageButtonCount, DataItemCount);
}
writer.WriteLine();
if (bExceededPageButtonCount && (iStart > 0))
{
writer.WriteBeginTag("a");
writer.WriteAttribute("class", _classOtherPage);
writer.WriteAttribute("href", Page.ClientScript.GetPostBackClientHyperlink(Control, "Page$" + iStart.ToString(), true));
writer.Write(HtmlTextWriter.TagRightChar);
writer.Write("...");
writer.WriteEndTag("a");
}
for (int iDataItem = iStart; iDataItem < iEnd; iDataItem++)
{
string strPage = (iDataItem + 1).ToString();
if (DataItemIndex == iDataItem)
{
writer.WriteBeginTag("span");
writer.WriteAttribute("class", _classActivePage);
writer.Write(HtmlTextWriter.TagRightChar);
writer.Write(strPage);
writer.WriteEndTag("span");
}
else
{
writer.WriteBeginTag("a");
writer.WriteAttribute("class", _classOtherPage);
writer.WriteAttribute("href", Page.ClientScript.GetPostBackClientHyperlink(Control, "Page$" + strPage, true));
writer.Write(HtmlTextWriter.TagRightChar);
writer.Write(strPage);
writer.WriteEndTag("a");
}
}
if (bExceededPageButtonCount && (iEnd < DataItemCount))
{
writer.WriteBeginTag("a");
writer.WriteAttribute("class", _classOtherPage);
writer.WriteAttribute("href", Page.ClientScript.GetPostBackClientHyperlink(Control, "Page$" + (iEnd + 1).ToString(), true));
writer.Write(HtmlTextWriter.TagRightChar);
writer.Write("...");
writer.WriteEndTag("a");
}
writer.Indent--;
writer.WriteLine();
writer.WriteEndTag("div");
}
}
protected virtual void RegisterScripts()
{
}
}
}
| ExpertServices/Websitepanel | WebsitePanel/Sources/WebsitePanel.WebPortal/Code/Adapters/CompositeDataBoundControlAdapter.cs | C# | bsd-3-clause | 11,177 |
// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// The |Feedback| object keeps track of each instance of user feedback in a map
// |misspellings_|. This is a map from uint32_t hashes to |Misspelling| objects.
//
// Each misspelling should be present in only one renderer process. The
// |Feedback| objects keeps track of misspelling-renderer relationship in the
// |renderers_| map of renderer process identifiers to a set of hashes.
//
// When the user adds a misspelling to their custom dictionary, all of the
// |Misspelling| objects with the same misspelled string are updated. The
// |Feedback| object facilitates efficient access to these misspellings through
// a |text_| map of misspelled strings to a set of hashes.
#include "chrome/browser/spellchecker/feedback.h"
#include <algorithm>
#include <iterator>
#include <limits>
#include "base/logging.h"
#include "base/stl_util.h"
namespace spellcheck {
Feedback::Feedback(size_t max_total_text_size)
: max_total_text_size_(max_total_text_size), total_text_size_(0) {
DCHECK_GE(max_total_text_size, 1024U);
}
Feedback::~Feedback() {}
Misspelling* Feedback::GetMisspelling(uint32_t hash) {
HashMisspellingMap::iterator misspelling_it = misspellings_.find(hash);
if (misspelling_it == misspellings_.end())
return nullptr;
return &misspelling_it->second;
}
void Feedback::FinalizeRemovedMisspellings(
int renderer_process_id,
const std::vector<uint32_t>& remaining_markers) {
RendererHashesMap::iterator renderer_it =
renderers_.find(renderer_process_id);
if (renderer_it == renderers_.end() || renderer_it->second.empty())
return;
HashCollection& renderer_hashes = renderer_it->second;
HashCollection remaining_hashes(remaining_markers.begin(),
remaining_markers.end());
std::vector<HashCollection::value_type> removed_hashes =
base::STLSetDifference<std::vector<HashCollection::value_type>>(
renderer_hashes, remaining_hashes);
for (auto hash : removed_hashes) {
HashMisspellingMap::iterator misspelling_it = misspellings_.find(hash);
if (misspelling_it != misspellings_.end() &&
!misspelling_it->second.action.IsFinal()) {
misspelling_it->second.action.Finalize();
}
}
}
bool Feedback::RendererHasMisspellings(int renderer_process_id) const {
RendererHashesMap::const_iterator renderer_it =
renderers_.find(renderer_process_id);
return renderer_it != renderers_.end() && !renderer_it->second.empty();
}
std::vector<Misspelling> Feedback::GetMisspellingsInRenderer(
int renderer_process_id) const {
std::vector<Misspelling> misspellings_in_renderer;
RendererHashesMap::const_iterator renderer_it =
renderers_.find(renderer_process_id);
if (renderer_it == renderers_.end() || renderer_it->second.empty())
return misspellings_in_renderer;
const HashCollection& renderer_hashes = renderer_it->second;
for (HashCollection::const_iterator hash_it = renderer_hashes.begin();
hash_it != renderer_hashes.end(); ++hash_it) {
HashMisspellingMap::const_iterator misspelling_it =
misspellings_.find(*hash_it);
if (misspelling_it != misspellings_.end())
misspellings_in_renderer.push_back(misspelling_it->second);
}
return misspellings_in_renderer;
}
void Feedback::EraseFinalizedMisspellings(int renderer_process_id) {
RendererHashesMap::iterator renderer_it =
renderers_.find(renderer_process_id);
if (renderer_it == renderers_.end())
return;
HashCollection& renderer_hashes = renderer_it->second;
for (HashCollection::const_iterator hash_it = renderer_hashes.begin();
hash_it != renderer_hashes.end();) {
HashMisspellingMap::iterator misspelling_it = misspellings_.find(*hash_it);
HashCollection::iterator erasable_hash_it = hash_it;
++hash_it;
if (misspelling_it == misspellings_.end())
continue;
const Misspelling& misspelling = misspelling_it->second;
if (!misspelling.action.IsFinal())
continue;
renderer_hashes.erase(erasable_hash_it);
text_[GetMisspelledString(misspelling)].erase(misspelling.hash);
size_t approximate_size = ApproximateSerializedSize(misspelling_it->second);
// Prevent underlfow.
if (total_text_size_ >= approximate_size)
total_text_size_ -= approximate_size;
else
total_text_size_ = 0;
misspellings_.erase(misspelling_it);
}
if (renderer_hashes.empty())
renderers_.erase(renderer_it);
}
bool Feedback::HasMisspelling(uint32_t hash) const {
return !!misspellings_.count(hash);
}
void Feedback::AddMisspelling(int renderer_process_id,
const Misspelling& misspelling) {
HashMisspellingMap::iterator misspelling_it =
misspellings_.find(misspelling.hash);
if (misspelling_it != misspellings_.end()) {
const Misspelling& existing_misspelling = misspelling_it->second;
text_[GetMisspelledString(existing_misspelling)].erase(misspelling.hash);
for (RendererHashesMap::iterator renderer_it = renderers_.begin();
renderer_it != renderers_.end();) {
HashCollection& renderer_hashes = renderer_it->second;
RendererHashesMap::iterator erasable_renderer_it = renderer_it;
++renderer_it;
renderer_hashes.erase(misspelling.hash);
if (renderer_hashes.empty())
renderers_.erase(erasable_renderer_it);
}
} else {
size_t approximate_size = ApproximateSerializedSize(misspelling);
// Prevent overflow.
if (total_text_size_ <=
std::numeric_limits<size_t>::max() - approximate_size) {
total_text_size_ += approximate_size;
}
if (total_text_size_ >= max_total_text_size_)
return;
}
misspellings_[misspelling.hash] = misspelling;
text_[GetMisspelledString(misspelling)].insert(misspelling.hash);
renderers_[renderer_process_id].insert(misspelling.hash);
}
bool Feedback::Empty() const {
return misspellings_.empty();
}
std::vector<int> Feedback::GetRendersWithMisspellings() const {
std::vector<int> renderers_with_misspellings;
for (const auto& renderer : renderers_) {
if (!renderer.second.empty())
renderers_with_misspellings.push_back(renderer.first);
}
return renderers_with_misspellings;
}
void Feedback::FinalizeAllMisspellings() {
for (auto& misspelling : misspellings_) {
if (!misspelling.second.action.IsFinal())
misspelling.second.action.Finalize();
}
}
std::vector<Misspelling> Feedback::GetAllMisspellings() const {
std::vector<Misspelling> all_misspellings;
for (const auto& misspelling : misspellings_)
all_misspellings.push_back(misspelling.second);
return all_misspellings;
}
void Feedback::Clear() {
total_text_size_ = 0;
misspellings_.clear();
text_.clear();
renderers_.clear();
}
const std::set<uint32_t>& Feedback::FindMisspellings(
const base::string16& misspelled_text) const {
const TextHashesMap::const_iterator text_it = text_.find(misspelled_text);
return text_it == text_.end() ? empty_hash_collection_ : text_it->second;
}
} // namespace spellcheck
| axinging/chromium-crosswalk | chrome/browser/spellchecker/feedback.cc | C++ | bsd-3-clause | 7,184 |
//
// Copyright 2015 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// DrawElementsTest:
// Tests for indexed draws.
//
#include "test_utils/ANGLETest.h"
using namespace angle;
namespace
{
class DrawElementsTest : public ANGLETest
{
protected:
DrawElementsTest() : mProgram(0u)
{
setWindowWidth(64);
setWindowHeight(64);
setConfigRedBits(8);
setConfigGreenBits(8);
}
~DrawElementsTest()
{
for (GLuint indexBuffer : mIndexBuffers)
{
if (indexBuffer != 0)
{
glDeleteBuffers(1, &indexBuffer);
}
}
for (GLuint vertexArray : mVertexArrays)
{
if (vertexArray != 0)
{
glDeleteVertexArrays(1, &vertexArray);
}
}
for (GLuint vertexBuffer : mVertexBuffers)
{
if (vertexBuffer != 0)
{
glDeleteBuffers(1, &vertexBuffer);
}
}
if (mProgram != 0u)
{
glDeleteProgram(mProgram);
}
}
std::vector<GLuint> mIndexBuffers;
std::vector<GLuint> mVertexArrays;
std::vector<GLuint> mVertexBuffers;
GLuint mProgram;
};
// Test a state desync that can occur when using a streaming index buffer in GL in concert with
// deleting the applied index buffer.
TEST_P(DrawElementsTest, DeletingAfterStreamingIndexes)
{
// Init program
const std::string &vertexShader =
"attribute vec2 position;\n"
"attribute vec2 testFlag;\n"
"varying vec2 v_data;\n"
"void main() {\n"
" gl_Position = vec4(position, 0, 1);\n"
" v_data = testFlag;\n"
"}";
const std::string &fragmentShader =
"varying highp vec2 v_data;\n"
"void main() {\n"
" gl_FragColor = vec4(v_data, 0, 1);\n"
"}";
mProgram = CompileProgram(vertexShader, fragmentShader);
ASSERT_NE(0u, mProgram);
glUseProgram(mProgram);
GLint positionLocation = glGetAttribLocation(mProgram, "position");
ASSERT_NE(-1, positionLocation);
GLint testFlagLocation = glGetAttribLocation(mProgram, "testFlag");
ASSERT_NE(-1, testFlagLocation);
mIndexBuffers.resize(3u);
glGenBuffers(3, &mIndexBuffers[0]);
mVertexArrays.resize(2);
glGenVertexArrays(2, &mVertexArrays[0]);
mVertexBuffers.resize(2);
glGenBuffers(2, &mVertexBuffers[0]);
std::vector<GLuint> indexData[2];
indexData[0].push_back(0);
indexData[0].push_back(1);
indexData[0].push_back(2);
indexData[0].push_back(2);
indexData[0].push_back(3);
indexData[0].push_back(0);
indexData[1] = indexData[0];
for (GLuint &item : indexData[1])
{
item += 4u;
}
std::vector<GLfloat> positionData;
// quad verts
positionData.push_back(-1.0f);
positionData.push_back(1.0f);
positionData.push_back(-1.0f);
positionData.push_back(-1.0f);
positionData.push_back(1.0f);
positionData.push_back(-1.0f);
positionData.push_back(1.0f);
positionData.push_back(1.0f);
// Repeat position data
positionData.push_back(-1.0f);
positionData.push_back(1.0f);
positionData.push_back(-1.0f);
positionData.push_back(-1.0f);
positionData.push_back(1.0f);
positionData.push_back(-1.0f);
positionData.push_back(1.0f);
positionData.push_back(1.0f);
std::vector<GLfloat> testFlagData;
// red
testFlagData.push_back(1.0f);
testFlagData.push_back(0.0f);
testFlagData.push_back(1.0f);
testFlagData.push_back(0.0f);
testFlagData.push_back(1.0f);
testFlagData.push_back(0.0f);
testFlagData.push_back(1.0f);
testFlagData.push_back(0.0f);
// green
testFlagData.push_back(0.0f);
testFlagData.push_back(1.0f);
testFlagData.push_back(0.0f);
testFlagData.push_back(1.0f);
testFlagData.push_back(0.0f);
testFlagData.push_back(1.0f);
testFlagData.push_back(0.0f);
testFlagData.push_back(1.0f);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mIndexBuffers[0]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint) * indexData[0].size(), &indexData[0][0],
GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mIndexBuffers[2]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint) * indexData[0].size(), &indexData[0][0],
GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mIndexBuffers[1]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint) * indexData[1].size(), &indexData[1][0],
GL_STATIC_DRAW);
// Initialize first vertex array with second index buffer
glBindVertexArray(mVertexArrays[0]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mIndexBuffers[1]);
glBindBuffer(GL_ARRAY_BUFFER, mVertexBuffers[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * positionData.size(), &positionData[0],
GL_STATIC_DRAW);
glVertexAttribPointer(positionLocation, 2, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 2, nullptr);
glEnableVertexAttribArray(positionLocation);
glBindBuffer(GL_ARRAY_BUFFER, mVertexBuffers[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * testFlagData.size(), &testFlagData[0],
GL_STATIC_DRAW);
glVertexAttribPointer(testFlagLocation, 2, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 2, nullptr);
glEnableVertexAttribArray(testFlagLocation);
// Initialize second vertex array with first index buffer
glBindVertexArray(mVertexArrays[1]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mIndexBuffers[0]);
glBindBuffer(GL_ARRAY_BUFFER, mVertexBuffers[0]);
glVertexAttribPointer(positionLocation, 2, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 2, nullptr);
glEnableVertexAttribArray(positionLocation);
glBindBuffer(GL_ARRAY_BUFFER, mVertexBuffers[1]);
glVertexAttribPointer(testFlagLocation, 2, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 2, nullptr);
glEnableVertexAttribArray(testFlagLocation);
ASSERT_GL_NO_ERROR();
glBindVertexArray(mVertexArrays[0]);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
EXPECT_PIXEL_EQ(0, 0, 0, 255, 0, 255);
glBindVertexArray(mVertexArrays[1]);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
EXPECT_PIXEL_EQ(0, 0, 255, 0, 0, 255);
glBindVertexArray(mVertexArrays[0]);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
EXPECT_PIXEL_EQ(0, 0, 0, 255, 0, 255);
// Trigger the bug here.
glDeleteBuffers(1, &mIndexBuffers[2]);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
EXPECT_PIXEL_EQ(0, 0, 0, 255, 0, 255);
ASSERT_GL_NO_ERROR();
}
ANGLE_INSTANTIATE_TEST(DrawElementsTest, ES3_OPENGL());
}
| crezefire/angle | src/tests/gl_tests/DrawElementsTest.cpp | C++ | bsd-3-clause | 6,841 |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview Test suite for chrome://firmware-update.
* To run all tests in a single instance (default, faster):
* `browser_tests --gtest_filter=FirmwareUpdateApp*`
*
* To run each test in a new instance:
* `browser_tests --run-manual \
* --gtest_filter=FirmwareUpdateAppBrowserTest.MANUAL_*`
*
* To run a single test suite, such as 'FirmwareUpdateApp':
* `browser_tests --run-manual --gtest_filter= \
* FirmwareUpdateAppBrowserTest.MANUAL_FirmwareUpdateApp`
*/
GEN_INCLUDE(['//chrome/test/data/webui/polymer_browser_test_base.js']);
GEN('#include "ash/constants/ash_features.h"');
GEN('#include "content/public/test/browser_test.h"');
/**
* @constructor
* @extends {PolymerTest}
*/
function FirmwareUpdateAppBrowserTest() {}
FirmwareUpdateAppBrowserTest.prototype = {
__proto__: PolymerTest.prototype,
browsePreload: 'chrome://accessory-update/test_loader.html' +
'?module=chromeos/firmware_update/' +
'firmware_update_unified_test.js',
featureList: {enabled: ['ash::features::kFirmwareUpdaterApp']},
};
// List of names of suites in unified test to register for individual debugging.
// You must register all suites in unified test here as well for consistency,
// although technically is not necessary.
const debug_suites_list = [
'FakeUpdateControllerTest',
'FakeUpdateProviderTest',
'FirmwareUpdateApp',
'FirmwareUpdateDialog',
'PeripheralUpdatesListTest',
'UpdateCardTest',
];
TEST_F('FirmwareUpdateAppBrowserTest', 'All', function() {
assertDeepEquals(
debug_suites_list, test_suites_list,
'List of registered tests suites and debug suites do not match.\n' +
'Did you forget to add your test in debug_suites_list?');
mocha.run();
});
// Register each suite listed as individual tests for debugging purposes.
for (const suiteName of debug_suites_list) {
TEST_F('FirmwareUpdateAppBrowserTest', `MANUAL_${suiteName}`, function() {
runMochaSuite(suiteName);
});
}
| ric2b/Vivaldi-browser | chromium/chrome/test/data/webui/chromeos/firmware_update/firmware_update_browsertest.js | JavaScript | bsd-3-clause | 2,137 |
"use strict";
// Use the fastest means possible to execute a task in its own turn, with
// priority over other events including IO, animation, reflow, and redraw
// events in browsers.
//
// An exception thrown by a task will permanently interrupt the processing of
// subsequent tasks. The higher level `asap` function ensures that if an
// exception is thrown by a task, that the task queue will continue flushing as
// soon as possible, but if you use `rawAsap` directly, you are responsible to
// either ensure that no exceptions are thrown from your task, or to manually
// call `rawAsap.requestFlush` if an exception is thrown.
module.exports = rawAsap;
function rawAsap(task) {
if (!queue.length) {
requestFlush();
flushing = true;
}
// Equivalent to push, but avoids a function call.
queue[queue.length] = task;
}
var queue = [];
// Once a flush has been requested, no further calls to `requestFlush` are
// necessary until the next `flush` completes.
var flushing = false;
// `requestFlush` is an implementation-specific method that attempts to kick
// off a `flush` event as quickly as possible. `flush` will attempt to exhaust
// the event queue before yielding to the browser's own event loop.
var requestFlush;
// The position of the next task to execute in the task queue. This is
// preserved between calls to `flush` so that it can be resumed if
// a task throws an exception.
var index = 0;
// If a task schedules additional tasks recursively, the task queue can grow
// unbounded. To prevent memory exhaustion, the task queue will periodically
// truncate already-completed tasks.
var capacity = 1024;
// The flush function processes all tasks that have been scheduled with
// `rawAsap` unless and until one of those tasks throws an exception.
// If a task throws an exception, `flush` ensures that its state will remain
// consistent and will resume where it left off when called again.
// However, `flush` does not make any arrangements to be called again if an
// exception is thrown.
function flush() {
while (index < queue.length) {
var currentIndex = index;
// Advance the index before calling the task. This ensures that we will
// begin flushing on the next task the task throws an error.
index = index + 1;
queue[currentIndex].call();
// Prevent leaking memory for long chains of recursive calls to `asap`.
// If we call `asap` within tasks scheduled by `asap`, the queue will
// grow, but to avoid an O(n) walk for every task we execute, we don't
// shift tasks off the queue after they have been executed.
// Instead, we periodically shift 1024 tasks off the queue.
if (index > capacity) {
// Manually shift all values starting at the index back to the
// beginning of the queue.
for (var scan = 0, newLength = queue.length - index; scan < newLength; scan++) {
queue[scan] = queue[scan + index];
}
queue.length -= index;
index = 0;
}
}
queue.length = 0;
index = 0;
flushing = false;
}
// `requestFlush` is implemented using a strategy based on data collected from
// every available SauceLabs Selenium web driver worker at time of writing.
// https://docs.google.com/spreadsheets/d/1mG-5UYGup5qxGdEMWkhP6BWCz053NUb2E1QoUTU16uA/edit#gid=783724593
// Safari 6 and 6.1 for desktop, iPad, and iPhone are the only browsers that
// have WebKitMutationObserver but not un-prefixed MutationObserver.
// Must use `global` or `self` instead of `window` to work in both frames and web
// workers. `global` is a provision of Browserify, Mr, Mrs, or Mop.
/* globals self */
var scope = typeof global !== "undefined" ? global : self;
var BrowserMutationObserver = scope.MutationObserver || scope.WebKitMutationObserver;
// MutationObservers are desirable because they have high priority and work
// reliably everywhere they are implemented.
// They are implemented in all modern browsers.
//
// - Android 4-4.3
// - Chrome 26-34
// - Firefox 14-29
// - Internet Explorer 11
// - iPad Safari 6-7.1
// - iPhone Safari 7-7.1
// - Safari 6-7
if (typeof BrowserMutationObserver === "function") {
requestFlush = makeRequestCallFromMutationObserver(flush);
// MessageChannels are desirable because they give direct access to the HTML
// task queue, are implemented in Internet Explorer 10, Safari 5.0-1, and Opera
// 11-12, and in web workers in many engines.
// Although message channels yield to any queued rendering and IO tasks, they
// would be better than imposing the 4ms delay of timers.
// However, they do not work reliably in Internet Explorer or Safari.
// Internet Explorer 10 is the only browser that has setImmediate but does
// not have MutationObservers.
// Although setImmediate yields to the browser's renderer, it would be
// preferrable to falling back to setTimeout since it does not have
// the minimum 4ms penalty.
// Unfortunately there appears to be a bug in Internet Explorer 10 Mobile (and
// Desktop to a lesser extent) that renders both setImmediate and
// MessageChannel useless for the purposes of ASAP.
// https://github.com/kriskowal/q/issues/396
// Timers are implemented universally.
// We fall back to timers in workers in most engines, and in foreground
// contexts in the following browsers.
// However, note that even this simple case requires nuances to operate in a
// broad spectrum of browsers.
//
// - Firefox 3-13
// - Internet Explorer 6-9
// - iPad Safari 4.3
// - Lynx 2.8.7
} else {
requestFlush = makeRequestCallFromTimer(flush);
}
// `requestFlush` requests that the high priority event queue be flushed as
// soon as possible.
// This is useful to prevent an error thrown in a task from stalling the event
// queue if the exception handled by Node.js’s
// `process.on("uncaughtException")` or by a domain.
rawAsap.requestFlush = requestFlush;
// To request a high priority event, we induce a mutation observer by toggling
// the text of a text node between "1" and "-1".
function makeRequestCallFromMutationObserver(callback) {
var toggle = 1;
var observer = new BrowserMutationObserver(callback);
var node = document.createTextNode("");
observer.observe(node, {characterData: true});
return function requestCall() {
toggle = -toggle;
node.data = toggle;
};
}
// The message channel technique was discovered by Malte Ubl and was the
// original foundation for this library.
// http://www.nonblocking.io/2011/06/windownexttick.html
// Safari 6.0.5 (at least) intermittently fails to create message ports on a
// page's first load. Thankfully, this version of Safari supports
// MutationObservers, so we don't need to fall back in that case.
// function makeRequestCallFromMessageChannel(callback) {
// var channel = new MessageChannel();
// channel.port1.onmessage = callback;
// return function requestCall() {
// channel.port2.postMessage(0);
// };
// }
// For reasons explained above, we are also unable to use `setImmediate`
// under any circumstances.
// Even if we were, there is another bug in Internet Explorer 10.
// It is not sufficient to assign `setImmediate` to `requestFlush` because
// `setImmediate` must be called *by name* and therefore must be wrapped in a
// closure.
// Never forget.
// function makeRequestCallFromSetImmediate(callback) {
// return function requestCall() {
// setImmediate(callback);
// };
// }
// Safari 6.0 has a problem where timers will get lost while the user is
// scrolling. This problem does not impact ASAP because Safari 6.0 supports
// mutation observers, so that implementation is used instead.
// However, if we ever elect to use timers in Safari, the prevalent work-around
// is to add a scroll event listener that calls for a flush.
// `setTimeout` does not call the passed callback if the delay is less than
// approximately 7 in web workers in Firefox 8 through 18, and sometimes not
// even then.
function makeRequestCallFromTimer(callback) {
return function requestCall() {
// We dispatch a timeout with a specified delay of 0 for engines that
// can reliably accommodate that request. This will usually be snapped
// to a 4 milisecond delay, but once we're flushing, there's no delay
// between events.
var timeoutHandle = setTimeout(handleTimer, 0);
// However, since this timer gets frequently dropped in Firefox
// workers, we enlist an interval handle that will try to fire
// an event 20 times per second until it succeeds.
var intervalHandle = setInterval(handleTimer, 50);
function handleTimer() {
// Whichever timer succeeds will cancel both timers and
// execute the callback.
clearTimeout(timeoutHandle);
clearInterval(intervalHandle);
callback();
}
};
}
// This is for `asap.js` only.
// Its name will be periodically randomized to break any code that depends on
// its existence.
rawAsap.makeRequestCallFromTimer = makeRequestCallFromTimer;
// ASAP was originally a nextTick shim included in Q. This was factored out
// into this ASAP package. It was later adapted to RSVP which made further
// amendments. These decisions, particularly to marginalize MessageChannel and
// to capture the MutationObserver implementation in a closure, were integrated
// back into ASAP proper.
// https://github.com/tildeio/rsvp.js/blob/cddf7232546a9cf858524b75cde6f9edf72620a7/lib/rsvp/asap.js
| ChrisChenSZ/code | 表单注册验证/node_modules/asap/browser-raw.js | JavaScript | apache-2.0 | 9,614 |
var CenteredSeriesMixin = Highcharts.CenteredSeriesMixin = {
/**
* Get the center of the pie based on the size and center options relative to the
* plot area. Borrowed by the polar and gauge series types.
*/
getCenter: function () {
var options = this.options,
chart = this.chart,
slicingRoom = 2 * (options.slicedOffset || 0),
handleSlicingRoom,
plotWidth = chart.plotWidth - 2 * slicingRoom,
plotHeight = chart.plotHeight - 2 * slicingRoom,
centerOption = options.center,
positions = [pick(centerOption[0], '50%'), pick(centerOption[1], '50%'), options.size || '100%', options.innerSize || 0],
smallestSize = mathMin(plotWidth, plotHeight),
isPercent;
return map(positions, function (length, i) {
isPercent = /%$/.test(length);
handleSlicingRoom = i < 2 || (i === 2 && isPercent);
return (isPercent ?
// i == 0: centerX, relative to width
// i == 1: centerY, relative to height
// i == 2: size, relative to smallestSize
// i == 4: innerSize, relative to smallestSize
[plotWidth, plotHeight, smallestSize, smallestSize][i] *
pInt(length) / 100 :
length) + (handleSlicingRoom ? slicingRoom : 0);
});
}
};
| Ecodev/gims | htdocs/lib/highcharts.com/js/parts/CenteredSeriesMixin.js | JavaScript | mit | 1,195 |
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.action_mailer.default_url_options = { host: 'localhost:3000' }
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
end
| aiw-group8-5c11/finalproject_astronomy | config/environments/development.rb | Ruby | mit | 1,495 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Configuration, Serializer, Deserializer
from .version import VERSION
from .operations.paths_operations import PathsOperations
from . import models
class AutoRestParameterizedHostTestClientConfiguration(Configuration):
"""Configuration for AutoRestParameterizedHostTestClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param host: A string value that is used as a global part of the
parameterized host
:type host: str
:param str filepath: Existing config
"""
def __init__(
self, host, filepath=None):
if host is None:
raise ValueError("Parameter 'host' must not be None.")
if not isinstance(host, str):
raise TypeError("Parameter 'host' must be str.")
base_url = 'http://{accountName}{host}'
super(AutoRestParameterizedHostTestClientConfiguration, self).__init__(base_url, filepath)
self.add_user_agent('autorestparameterizedhosttestclient/{}'.format(VERSION))
self.host = host
class AutoRestParameterizedHostTestClient(object):
"""Test Infrastructure for AutoRest
:ivar config: Configuration for client.
:vartype config: AutoRestParameterizedHostTestClientConfiguration
:ivar paths: Paths operations
:vartype paths: .operations.PathsOperations
:param host: A string value that is used as a global part of the
parameterized host
:type host: str
:param str filepath: Existing config
"""
def __init__(
self, host, filepath=None):
self.config = AutoRestParameterizedHostTestClientConfiguration(host, filepath)
self._client = ServiceClient(None, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.paths = PathsOperations(
self._client, self.config, self._serialize, self._deserialize)
| garimakhulbe/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/CustomBaseUri/autorestparameterizedhosttestclient/auto_rest_parameterized_host_test_client.py | Python | mit | 2,563 |
////////////////////////////////////////////////////////////
//
// SFML - Simple and Fast Multimedia Library
// Copyright (C) 2007-2017 Laurent Gomila ([email protected])
//
// This software is provided 'as-is', without any express or implied warranty.
// In no event will the authors be held liable for any damages arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it freely,
// subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented;
// you must not claim that you wrote the original software.
// If you use this software in a product, an acknowledgment
// in the product documentation would be appreciated but is not required.
//
// 2. Altered source versions must be plainly marked as such,
// and must not be misrepresented as being the original software.
//
// 3. This notice may not be removed or altered from any source distribution.
//
////////////////////////////////////////////////////////////
#ifndef SFML_THREADIMPL_HPP
#define SFML_THREADIMPL_HPP
////////////////////////////////////////////////////////////
// Headers
////////////////////////////////////////////////////////////
#include <SFML/Config.hpp>
#include <SFML/System/NonCopyable.hpp>
#include <pthread.h>
namespace sf
{
class Thread;
namespace priv
{
////////////////////////////////////////////////////////////
/// \brief Unix implementation of threads
////////////////////////////////////////////////////////////
class ThreadImpl : NonCopyable
{
public:
////////////////////////////////////////////////////////////
/// \brief Default constructor, launch the thread
///
/// \param owner The Thread instance to run
///
////////////////////////////////////////////////////////////
ThreadImpl(Thread* owner);
////////////////////////////////////////////////////////////
/// \brief Wait until the thread finishes
///
////////////////////////////////////////////////////////////
void wait();
////////////////////////////////////////////////////////////
/// \brief Terminate the thread
///
////////////////////////////////////////////////////////////
void terminate();
private:
////////////////////////////////////////////////////////////
/// \brief Global entry point for all threads
///
/// \param userData User-defined data (contains the Thread instance)
///
/// \return Os specific error code
///
////////////////////////////////////////////////////////////
static void* entryPoint(void* userData);
////////////////////////////////////////////////////////////
// Member data
////////////////////////////////////////////////////////////
pthread_t m_thread; ///< pthread thread instance
bool m_isActive; ///< Thread state (active or inactive)
};
} // namespace priv
} // namespace sf
#endif // SFML_THREADIMPL_HPP
| pulpobot/C-SFML-html5-animation | shared-deps/SFML-2.4.2/src/SFML/System/Unix/ThreadImpl.hpp | C++ | mit | 3,033 |
#pragma once
#include <vector>
#include <string>
#include <QStringList>
namespace propertyguizeug
{
namespace util
{
QStringList toQStringList(const std::vector<std::string> & list);
} // namespace util
} // namespace propertyguizeug
| j-o/libzeug | source/propertyguizeug/source/util.h | C | mit | 241 |
/*!
* Angular Material Design
* https://github.com/angular/material
* @license MIT
* v1.0.8
*/
(function( window, angular, undefined ){
"use strict";
/**
* @ngdoc module
* @name material.components.dialog
*/
angular
.module('material.components.dialog', [
'material.core',
'material.components.backdrop'
])
.directive('mdDialog', MdDialogDirective)
.provider('$mdDialog', MdDialogProvider);
/**
* @ngdoc directive
* @name mdDialog
* @module material.components.dialog
*
* @restrict E
*
* @description
* `<md-dialog>` - The dialog's template must be inside this element.
*
* Inside, use an `<md-dialog-content>` element for the dialog's content, and use
* an `<md-dialog-actions>` element for the dialog's actions.
*
* ## CSS
* - `.md-dialog-content` - class that sets the padding on the content as the spec file
*
* ## Notes
* - If you specify an `id` for the `<md-dialog>`, the `<md-dialog-content>` will have the same `id`
* prefixed with `dialogContent_`.
*
* @usage
* ### Dialog template
* <hljs lang="html">
* <md-dialog aria-label="List dialog">
* <md-dialog-content>
* <md-list>
* <md-list-item ng-repeat="item in items">
* <p>Number {{item}}</p>
* </md-list-item>
* </md-list>
* </md-dialog-content>
* <md-dialog-actions>
* <md-button ng-click="closeDialog()" class="md-primary">Close Dialog</md-button>
* </md-dialog-actions>
* </md-dialog>
* </hljs>
*/
function MdDialogDirective($$rAF, $mdTheming, $mdDialog) {
return {
restrict: 'E',
link: function(scope, element, attr) {
$mdTheming(element);
$$rAF(function() {
var images;
var content = element[0].querySelector('md-dialog-content');
if (content) {
images = content.getElementsByTagName('img');
addOverflowClass();
//-- delayed image loading may impact scroll height, check after images are loaded
angular.element(images).on('load', addOverflowClass);
}
scope.$on('$destroy', function() {
$mdDialog.destroy(element);
});
/**
*
*/
function addOverflowClass() {
element.toggleClass('md-content-overflow', content.scrollHeight > content.clientHeight);
}
});
}
};
}
MdDialogDirective.$inject = ["$$rAF", "$mdTheming", "$mdDialog"];
/**
* @ngdoc service
* @name $mdDialog
* @module material.components.dialog
*
* @description
* `$mdDialog` opens a dialog over the app to inform users about critical information or require
* them to make decisions. There are two approaches for setup: a simple promise API
* and regular object syntax.
*
* ## Restrictions
*
* - The dialog is always given an isolate scope.
* - The dialog's template must have an outer `<md-dialog>` element.
* Inside, use an `<md-dialog-content>` element for the dialog's content, and use
* an `<md-dialog-actions>` element for the dialog's actions.
* - Dialogs must cover the entire application to keep interactions inside of them.
* Use the `parent` option to change where dialogs are appended.
*
* ## Sizing
* - Complex dialogs can be sized with `flex="percentage"`, i.e. `flex="66"`.
* - Default max-width is 80% of the `rootElement` or `parent`.
*
* ## CSS
* - `.md-dialog-content` - class that sets the padding on the content as the spec file
*
* @usage
* <hljs lang="html">
* <div ng-app="demoApp" ng-controller="EmployeeController">
* <div>
* <md-button ng-click="showAlert()" class="md-raised md-warn">
* Employee Alert!
* </md-button>
* </div>
* <div>
* <md-button ng-click="showDialog($event)" class="md-raised">
* Custom Dialog
* </md-button>
* </div>
* <div>
* <md-button ng-click="closeAlert()" ng-disabled="!hasAlert()" class="md-raised">
* Close Alert
* </md-button>
* </div>
* <div>
* <md-button ng-click="showGreeting($event)" class="md-raised md-primary" >
* Greet Employee
* </md-button>
* </div>
* </div>
* </hljs>
*
* ### JavaScript: object syntax
* <hljs lang="js">
* (function(angular, undefined){
* "use strict";
*
* angular
* .module('demoApp', ['ngMaterial'])
* .controller('AppCtrl', AppController);
*
* function AppController($scope, $mdDialog) {
* var alert;
* $scope.showAlert = showAlert;
* $scope.showDialog = showDialog;
* $scope.items = [1, 2, 3];
*
* // Internal method
* function showAlert() {
* alert = $mdDialog.alert({
* title: 'Attention',
* textContent: 'This is an example of how easy dialogs can be!',
* ok: 'Close'
* });
*
* $mdDialog
* .show( alert )
* .finally(function() {
* alert = undefined;
* });
* }
*
* function showDialog($event) {
* var parentEl = angular.element(document.body);
* $mdDialog.show({
* parent: parentEl,
* targetEvent: $event,
* template:
* '<md-dialog aria-label="List dialog">' +
* ' <md-dialog-content>'+
* ' <md-list>'+
* ' <md-list-item ng-repeat="item in items">'+
* ' <p>Number {{item}}</p>' +
* ' </md-item>'+
* ' </md-list>'+
* ' </md-dialog-content>' +
* ' <md-dialog-actions>' +
* ' <md-button ng-click="closeDialog()" class="md-primary">' +
* ' Close Dialog' +
* ' </md-button>' +
* ' </md-dialog-actions>' +
* '</md-dialog>',
* locals: {
* items: $scope.items
* },
* controller: DialogController
* });
* function DialogController($scope, $mdDialog, items) {
* $scope.items = items;
* $scope.closeDialog = function() {
* $mdDialog.hide();
* }
* }
* }
* }
* })(angular);
* </hljs>
*
* ### JavaScript: promise API syntax, custom dialog template
* <hljs lang="js">
* (function(angular, undefined){
* "use strict";
*
* angular
* .module('demoApp', ['ngMaterial'])
* .controller('EmployeeController', EmployeeEditor)
* .controller('GreetingController', GreetingController);
*
* // Fictitious Employee Editor to show how to use simple and complex dialogs.
*
* function EmployeeEditor($scope, $mdDialog) {
* var alert;
*
* $scope.showAlert = showAlert;
* $scope.closeAlert = closeAlert;
* $scope.showGreeting = showCustomGreeting;
*
* $scope.hasAlert = function() { return !!alert };
* $scope.userName = $scope.userName || 'Bobby';
*
* // Dialog #1 - Show simple alert dialog and cache
* // reference to dialog instance
*
* function showAlert() {
* alert = $mdDialog.alert()
* .title('Attention, ' + $scope.userName)
* .textContent('This is an example of how easy dialogs can be!')
* .ok('Close');
*
* $mdDialog
* .show( alert )
* .finally(function() {
* alert = undefined;
* });
* }
*
* // Close the specified dialog instance and resolve with 'finished' flag
* // Normally this is not needed, just use '$mdDialog.hide()' to close
* // the most recent dialog popup.
*
* function closeAlert() {
* $mdDialog.hide( alert, "finished" );
* alert = undefined;
* }
*
* // Dialog #2 - Demonstrate more complex dialogs construction and popup.
*
* function showCustomGreeting($event) {
* $mdDialog.show({
* targetEvent: $event,
* template:
* '<md-dialog>' +
*
* ' <md-dialog-content>Hello {{ employee }}!</md-dialog-content>' +
*
* ' <md-dialog-actions>' +
* ' <md-button ng-click="closeDialog()" class="md-primary">' +
* ' Close Greeting' +
* ' </md-button>' +
* ' </md-dialog-actions>' +
* '</md-dialog>',
* controller: 'GreetingController',
* onComplete: afterShowAnimation,
* locals: { employee: $scope.userName }
* });
*
* // When the 'enter' animation finishes...
*
* function afterShowAnimation(scope, element, options) {
* // post-show code here: DOM element focus, etc.
* }
* }
*
* // Dialog #3 - Demonstrate use of ControllerAs and passing $scope to dialog
* // Here we used ng-controller="GreetingController as vm" and
* // $scope.vm === <controller instance>
*
* function showCustomGreeting() {
*
* $mdDialog.show({
* clickOutsideToClose: true,
*
* scope: $scope, // use parent scope in template
* preserveScope: true, // do not forget this if use parent scope
* // Since GreetingController is instantiated with ControllerAs syntax
* // AND we are passing the parent '$scope' to the dialog, we MUST
* // use 'vm.<xxx>' in the template markup
*
* template: '<md-dialog>' +
* ' <md-dialog-content>' +
* ' Hi There {{vm.employee}}' +
* ' </md-dialog-content>' +
* '</md-dialog>',
*
* controller: function DialogController($scope, $mdDialog) {
* $scope.closeDialog = function() {
* $mdDialog.hide();
* }
* }
* });
* }
*
* }
*
* // Greeting controller used with the more complex 'showCustomGreeting()' custom dialog
*
* function GreetingController($scope, $mdDialog, employee) {
* // Assigned from construction <code>locals</code> options...
* $scope.employee = employee;
*
* $scope.closeDialog = function() {
* // Easily hides most recent dialog shown...
* // no specific instance reference is needed.
* $mdDialog.hide();
* };
* }
*
* })(angular);
* </hljs>
*/
/**
* @ngdoc method
* @name $mdDialog#alert
*
* @description
* Builds a preconfigured dialog with the specified message.
*
* @returns {obj} an `$mdDialogPreset` with the chainable configuration methods:
*
* - $mdDialogPreset#title(string) - Sets the alert title.
* - $mdDialogPreset#textContent(string) - Sets the alert message.
* - $mdDialogPreset#htmlContent(string) - Sets the alert message as HTML. Requires ngSanitize
* module to be loaded. HTML is not run through Angular's compiler.
* - $mdDialogPreset#ok(string) - Sets the alert "Okay" button text.
* - $mdDialogPreset#theme(string) - Sets the theme of the alert dialog.
* - $mdDialogPreset#targetEvent(DOMClickEvent=) - A click's event object. When passed in as an option,
* the location of the click will be used as the starting point for the opening animation
* of the the dialog.
*
*/
/**
* @ngdoc method
* @name $mdDialog#confirm
*
* @description
* Builds a preconfigured dialog with the specified message. You can call show and the promise returned
* will be resolved only if the user clicks the confirm action on the dialog.
*
* @returns {obj} an `$mdDialogPreset` with the chainable configuration methods:
*
* Additionally, it supports the following methods:
*
* - $mdDialogPreset#title(string) - Sets the confirm title.
* - $mdDialogPreset#textContent(string) - Sets the confirm message.
* - $mdDialogPreset#htmlContent(string) - Sets the confirm message as HTML. Requires ngSanitize
* module to be loaded. HTML is not run through Angular's compiler.
* - $mdDialogPreset#ok(string) - Sets the confirm "Okay" button text.
* - $mdDialogPreset#cancel(string) - Sets the confirm "Cancel" button text.
* - $mdDialogPreset#theme(string) - Sets the theme of the confirm dialog.
* - $mdDialogPreset#targetEvent(DOMClickEvent=) - A click's event object. When passed in as an option,
* the location of the click will be used as the starting point for the opening animation
* of the the dialog.
*
*/
/**
* @ngdoc method
* @name $mdDialog#prompt
*
* @description
* Builds a preconfigured dialog with the specified message and input box. You can call show and the promise returned
* will be resolved only if the user clicks the prompt action on the dialog, passing the input value as the first argument.
*
* @returns {obj} an `$mdDialogPreset` with the chainable configuration methods:
*
* Additionally, it supports the following methods:
*
* - $mdDialogPreset#title(string) - Sets the prompt title.
* - $mdDialogPreset#textContent(string) - Sets the prompt message.
* - $mdDialogPreset#htmlContent(string) - Sets the prompt message as HTML. Requires ngSanitize
* module to be loaded. HTML is not run through Angular's compiler.
* - $mdDialogPreset#placeholder(string) - Sets the placeholder text for the input.
* - $mdDialogPreset#ok(string) - Sets the prompt "Okay" button text.
* - $mdDialogPreset#cancel(string) - Sets the prompt "Cancel" button text.
* - $mdDialogPreset#theme(string) - Sets the theme of the prompt dialog.
* - $mdDialogPreset#targetEvent(DOMClickEvent=) - A click's event object. When passed in as an option,
* the location of the click will be used as the starting point for the opening animation
* of the the dialog.
*
*/
/**
* @ngdoc method
* @name $mdDialog#show
*
* @description
* Show a dialog with the specified options.
*
* @param {object} optionsOrPreset Either provide an `$mdDialogPreset` returned from `alert()`, and
* `confirm()`, or an options object with the following properties:
* - `templateUrl` - `{string=}`: The url of a template that will be used as the content
* of the dialog.
* - `template` - `{string=}`: HTML template to show in the dialog. This **must** be trusted HTML
* with respect to Angular's [$sce service](https://docs.angularjs.org/api/ng/service/$sce).
* This template should **never** be constructed with any kind of user input or user data.
* - `autoWrap` - `{boolean=}`: Whether or not to automatically wrap the template with a
* `<md-dialog>` tag if one is not provided. Defaults to true. Can be disabled if you provide a
* custom dialog directive.
* - `targetEvent` - `{DOMClickEvent=}`: A click's event object. When passed in as an option,
* the location of the click will be used as the starting point for the opening animation
* of the the dialog.
* - `openFrom` - `{string|Element|object}`: The query selector, DOM element or the Rect object
* that is used to determine the bounds (top, left, height, width) from which the Dialog will
* originate.
* - `closeTo` - `{string|Element|object}`: The query selector, DOM element or the Rect object
* that is used to determine the bounds (top, left, height, width) to which the Dialog will
* target.
* - `scope` - `{object=}`: the scope to link the template / controller to. If none is specified,
* it will create a new isolate scope.
* This scope will be destroyed when the dialog is removed unless `preserveScope` is set to true.
* - `preserveScope` - `{boolean=}`: whether to preserve the scope when the element is removed. Default is false
* - `disableParentScroll` - `{boolean=}`: Whether to disable scrolling while the dialog is open.
* Default true.
* - `hasBackdrop` - `{boolean=}`: Whether there should be an opaque backdrop behind the dialog.
* Default true.
* - `clickOutsideToClose` - `{boolean=}`: Whether the user can click outside the dialog to
* close it. Default false.
* - `escapeToClose` - `{boolean=}`: Whether the user can press escape to close the dialog.
* Default true.
* - `focusOnOpen` - `{boolean=}`: An option to override focus behavior on open. Only disable if
* focusing some other way, as focus management is required for dialogs to be accessible.
* Defaults to true.
* - `controller` - `{function|string=}`: The controller to associate with the dialog. The controller
* will be injected with the local `$mdDialog`, which passes along a scope for the dialog.
* - `locals` - `{object=}`: An object containing key/value pairs. The keys will be used as names
* of values to inject into the controller. For example, `locals: {three: 3}` would inject
* `three` into the controller, with the value 3. If `bindToController` is true, they will be
* copied to the controller instead.
* - `bindToController` - `bool`: bind the locals to the controller, instead of passing them in.
* - `resolve` - `{object=}`: Similar to locals, except it takes promises as values, and the
* dialog will not open until all of the promises resolve.
* - `controllerAs` - `{string=}`: An alias to assign the controller to on the scope.
* - `parent` - `{element=}`: The element to append the dialog to. Defaults to appending
* to the root element of the application.
* - `onShowing` `{function=} Callback function used to announce the show() action is
* starting.
* - `onComplete` `{function=}`: Callback function used to announce when the show() action is
* finished.
* - `onRemoving` `{function=}`: Callback function used to announce the close/hide() action is
* starting. This allows developers to run custom animations in parallel the close animations.
* - `fullscreen` `{boolean=}`: An option to apply `.md-dialog-fullscreen` class on open.
* @returns {promise} A promise that can be resolved with `$mdDialog.hide()` or
* rejected with `$mdDialog.cancel()`.
*/
/**
* @ngdoc method
* @name $mdDialog#hide
*
* @description
* Hide an existing dialog and resolve the promise returned from `$mdDialog.show()`.
*
* @param {*=} response An argument for the resolved promise.
*
* @returns {promise} A promise that is resolved when the dialog has been closed.
*/
/**
* @ngdoc method
* @name $mdDialog#cancel
*
* @description
* Hide an existing dialog and reject the promise returned from `$mdDialog.show()`.
*
* @param {*=} response An argument for the rejected promise.
*
* @returns {promise} A promise that is resolved when the dialog has been closed.
*/
function MdDialogProvider($$interimElementProvider) {
// Elements to capture and redirect focus when the user presses tab at the dialog boundary.
var topFocusTrap, bottomFocusTrap;
advancedDialogOptions.$inject = ["$mdDialog", "$mdTheming", "$mdConstant"];
dialogDefaultOptions.$inject = ["$mdDialog", "$mdAria", "$mdUtil", "$mdConstant", "$animate", "$document", "$window", "$rootElement", "$log", "$injector"];
return $$interimElementProvider('$mdDialog')
.setDefaults({
methods: ['disableParentScroll', 'hasBackdrop', 'clickOutsideToClose', 'escapeToClose',
'targetEvent', 'closeTo', 'openFrom', 'parent', 'fullscreen'],
options: dialogDefaultOptions
})
.addPreset('alert', {
methods: ['title', 'htmlContent', 'textContent', 'content', 'ariaLabel', 'ok', 'theme',
'css'],
options: advancedDialogOptions
})
.addPreset('confirm', {
methods: ['title', 'htmlContent', 'textContent', 'content', 'ariaLabel', 'ok', 'cancel',
'theme', 'css'],
options: advancedDialogOptions
})
.addPreset('prompt', {
methods: ['title', 'htmlContent', 'textContent', 'content', 'placeholder', 'ariaLabel',
'ok', 'cancel', 'theme', 'css'],
options: advancedDialogOptions
});
/* ngInject */
function advancedDialogOptions($mdDialog, $mdTheming, $mdConstant) {
return {
template: [
'<md-dialog md-theme="{{ dialog.theme }}" aria-label="{{ dialog.ariaLabel }}" ng-class="dialog.css">',
' <md-dialog-content class="md-dialog-content" role="document" tabIndex="-1">',
' <h2 class="md-title">{{ dialog.title }}</h2>',
' <div ng-if="::dialog.mdHtmlContent" class="md-dialog-content-body" ',
' ng-bind-html="::dialog.mdHtmlContent"></div>',
' <div ng-if="::!dialog.mdHtmlContent" class="md-dialog-content-body">',
' <p>{{::dialog.mdTextContent}}</p>',
' </div>',
' <md-input-container md-no-float ng-if="::dialog.$type == \'prompt\'" class="md-prompt-input-container">',
' <input ng-keypress="dialog.keypress($event)" md-autofocus ng-model="dialog.result" placeholder="{{::dialog.placeholder}}">',
' </md-input-container>',
' </md-dialog-content>',
' <md-dialog-actions>',
' <md-button ng-if="dialog.$type === \'confirm\' || dialog.$type === \'prompt\'"' +
' ng-click="dialog.abort()" class="md-primary">',
' {{ dialog.cancel }}',
' </md-button>',
' <md-button ng-click="dialog.hide()" class="md-primary" md-autofocus="dialog.$type===\'alert\'">',
' {{ dialog.ok }}',
' </md-button>',
' </md-dialog-actions>',
'</md-dialog>'
].join('').replace(/\s\s+/g, ''),
controller: function mdDialogCtrl() {
this.hide = function() {
$mdDialog.hide(this.$type === 'prompt' ? this.result : true);
};
this.abort = function() {
$mdDialog.cancel();
};
this.keypress = function($event) {
if ($event.keyCode === $mdConstant.KEY_CODE.ENTER) {
$mdDialog.hide(this.result)
}
}
},
controllerAs: 'dialog',
bindToController: true,
theme: $mdTheming.defaultTheme()
};
}
/* ngInject */
function dialogDefaultOptions($mdDialog, $mdAria, $mdUtil, $mdConstant, $animate, $document, $window, $rootElement, $log, $injector) {
return {
hasBackdrop: true,
isolateScope: true,
onShow: onShow,
onShowing: beforeShow,
onRemove: onRemove,
clickOutsideToClose: false,
escapeToClose: true,
targetEvent: null,
closeTo: null,
openFrom: null,
focusOnOpen: true,
disableParentScroll: true,
autoWrap: true,
fullscreen: false,
transformTemplate: function(template, options) {
// Make the dialog container focusable, because otherwise the focus will be always redirected to
// an element outside of the container, and the focus trap won't work probably..
// Also the tabindex is needed for the `escapeToClose` functionality, because
// the keyDown event can't be triggered when the focus is outside of the container.
return '<div class="md-dialog-container" tabindex="-1">' + validatedTemplate(template) + '</div>';
/**
* The specified template should contain a <md-dialog> wrapper element....
*/
function validatedTemplate(template) {
if (options.autoWrap && !/<\/md-dialog>/g.test(template)) {
return '<md-dialog>' + (template || '') + '</md-dialog>';
} else {
return template || '';
}
}
}
};
function beforeShow(scope, element, options, controller) {
if (controller) {
controller.mdHtmlContent = controller.htmlContent || options.htmlContent || '';
controller.mdTextContent = controller.textContent || options.textContent ||
controller.content || options.content || '';
if (controller.mdHtmlContent && !$injector.has('$sanitize')) {
throw Error('The ngSanitize module must be loaded in order to use htmlContent.');
}
if (controller.mdHtmlContent && controller.mdTextContent) {
throw Error('md-dialog cannot have both `htmlContent` and `textContent`');
}
}
}
/** Show method for dialogs */
function onShow(scope, element, options, controller) {
angular.element($document[0].body).addClass('md-dialog-is-showing');
captureParentAndFromToElements(options);
configureAria(element.find('md-dialog'), options);
showBackdrop(scope, element, options);
return dialogPopIn(element, options)
.then(function() {
activateListeners(element, options);
lockScreenReader(element, options);
warnDeprecatedActions();
focusOnOpen();
});
/**
* Check to see if they used the deprecated .md-actions class and log a warning
*/
function warnDeprecatedActions() {
var badActions = element[0].querySelectorAll('.md-actions');
if (badActions.length > 0) {
$log.warn('Using a class of md-actions is deprecated, please use <md-dialog-actions>.');
}
}
/**
* For alerts, focus on content... otherwise focus on
* the close button (or equivalent)
*/
function focusOnOpen() {
if (options.focusOnOpen) {
var target = $mdUtil.findFocusTarget(element) || findCloseButton();
target.focus();
}
/**
* If no element with class dialog-close, try to find the last
* button child in md-actions and assume it is a close button.
*
* If we find no actions at all, log a warning to the console.
*/
function findCloseButton() {
var closeButton = element[0].querySelector('.dialog-close');
if (!closeButton) {
var actionButtons = element[0].querySelectorAll('.md-actions button, md-dialog-actions button');
closeButton = actionButtons[actionButtons.length - 1];
}
return angular.element(closeButton);
}
}
}
/**
* Remove function for all dialogs
*/
function onRemove(scope, element, options) {
options.deactivateListeners();
options.unlockScreenReader();
options.hideBackdrop(options.$destroy);
// Remove the focus traps that we added earlier for keeping focus within the dialog.
if (topFocusTrap && topFocusTrap.parentNode) {
topFocusTrap.parentNode.removeChild(topFocusTrap);
}
if (bottomFocusTrap && bottomFocusTrap.parentNode) {
bottomFocusTrap.parentNode.removeChild(bottomFocusTrap);
}
// For navigation $destroy events, do a quick, non-animated removal,
// but for normal closes (from clicks, etc) animate the removal
return !!options.$destroy ? detachAndClean() : animateRemoval().then( detachAndClean );
/**
* For normal closes, animate the removal.
* For forced closes (like $destroy events), skip the animations
*/
function animateRemoval() {
return dialogPopOut(element, options);
}
/**
* Detach the element
*/
function detachAndClean() {
angular.element($document[0].body).removeClass('md-dialog-is-showing');
element.remove();
if (!options.$destroy) options.origin.focus();
}
}
/**
* Capture originator/trigger/from/to element information (if available)
* and the parent container for the dialog; defaults to the $rootElement
* unless overridden in the options.parent
*/
function captureParentAndFromToElements(options) {
options.origin = angular.extend({
element: null,
bounds: null,
focus: angular.noop
}, options.origin || {});
options.parent = getDomElement(options.parent, $rootElement);
options.closeTo = getBoundingClientRect(getDomElement(options.closeTo));
options.openFrom = getBoundingClientRect(getDomElement(options.openFrom));
if ( options.targetEvent ) {
options.origin = getBoundingClientRect(options.targetEvent.target, options.origin);
}
/**
* Identify the bounding RECT for the target element
*
*/
function getBoundingClientRect (element, orig) {
var source = angular.element((element || {}));
if (source && source.length) {
// Compute and save the target element's bounding rect, so that if the
// element is hidden when the dialog closes, we can shrink the dialog
// back to the same position it expanded from.
//
// Checking if the source is a rect object or a DOM element
var bounds = {top:0,left:0,height:0,width:0};
var hasFn = angular.isFunction(source[0].getBoundingClientRect);
return angular.extend(orig || {}, {
element : hasFn ? source : undefined,
bounds : hasFn ? source[0].getBoundingClientRect() : angular.extend({}, bounds, source[0]),
focus : angular.bind(source, source.focus),
});
}
}
/**
* If the specifier is a simple string selector, then query for
* the DOM element.
*/
function getDomElement(element, defaultElement) {
if (angular.isString(element)) {
var simpleSelector = element,
container = $document[0].querySelectorAll(simpleSelector);
element = container.length ? container[0] : null;
}
// If we have a reference to a raw dom element, always wrap it in jqLite
return angular.element(element || defaultElement);
}
}
/**
* Listen for escape keys and outside clicks to auto close
*/
function activateListeners(element, options) {
var window = angular.element($window);
var onWindowResize = $mdUtil.debounce(function(){
stretchDialogContainerToViewport(element, options);
}, 60);
var removeListeners = [];
var smartClose = function() {
// Only 'confirm' dialogs have a cancel button... escape/clickOutside will
// cancel or fallback to hide.
var closeFn = ( options.$type == 'alert' ) ? $mdDialog.hide : $mdDialog.cancel;
$mdUtil.nextTick(closeFn, true);
};
if (options.escapeToClose) {
var parentTarget = options.parent;
var keyHandlerFn = function(ev) {
if (ev.keyCode === $mdConstant.KEY_CODE.ESCAPE) {
ev.stopPropagation();
ev.preventDefault();
smartClose();
}
};
// Add keydown listeners
element.on('keydown', keyHandlerFn);
parentTarget.on('keydown', keyHandlerFn);
// Queue remove listeners function
removeListeners.push(function() {
element.off('keydown', keyHandlerFn);
parentTarget.off('keydown', keyHandlerFn);
});
}
// Register listener to update dialog on window resize
window.on('resize', onWindowResize);
removeListeners.push(function() {
window.off('resize', onWindowResize);
});
if (options.clickOutsideToClose) {
var target = element;
var sourceElem;
// Keep track of the element on which the mouse originally went down
// so that we can only close the backdrop when the 'click' started on it.
// A simple 'click' handler does not work,
// it sets the target object as the element the mouse went down on.
var mousedownHandler = function(ev) {
sourceElem = ev.target;
};
// We check if our original element and the target is the backdrop
// because if the original was the backdrop and the target was inside the dialog
// we don't want to dialog to close.
var mouseupHandler = function(ev) {
if (sourceElem === target[0] && ev.target === target[0]) {
ev.stopPropagation();
ev.preventDefault();
smartClose();
}
};
// Add listeners
target.on('mousedown', mousedownHandler);
target.on('mouseup', mouseupHandler);
// Queue remove listeners function
removeListeners.push(function() {
target.off('mousedown', mousedownHandler);
target.off('mouseup', mouseupHandler);
});
}
// Attach specific `remove` listener handler
options.deactivateListeners = function() {
removeListeners.forEach(function(removeFn) {
removeFn();
});
options.deactivateListeners = null;
};
}
/**
* Show modal backdrop element...
*/
function showBackdrop(scope, element, options) {
if (options.disableParentScroll) {
// !! DO this before creating the backdrop; since disableScrollAround()
// configures the scroll offset; which is used by mdBackDrop postLink()
options.restoreScroll = $mdUtil.disableScrollAround(element, options.parent);
}
if (options.hasBackdrop) {
options.backdrop = $mdUtil.createBackdrop(scope, "md-dialog-backdrop md-opaque");
$animate.enter(options.backdrop, options.parent);
}
/**
* Hide modal backdrop element...
*/
options.hideBackdrop = function hideBackdrop($destroy) {
if (options.backdrop) {
if ( !!$destroy ) options.backdrop.remove();
else $animate.leave(options.backdrop);
}
if (options.disableParentScroll) {
options.restoreScroll();
delete options.restoreScroll;
}
options.hideBackdrop = null;
}
}
/**
* Inject ARIA-specific attributes appropriate for Dialogs
*/
function configureAria(element, options) {
var role = (options.$type === 'alert') ? 'alertdialog' : 'dialog';
var dialogContent = element.find('md-dialog-content');
var existingDialogId = element.attr('id');
var dialogContentId = 'dialogContent_' + (existingDialogId || $mdUtil.nextUid());
element.attr({
'role': role,
'tabIndex': '-1'
});
if (dialogContent.length === 0) {
dialogContent = element;
// If the dialog element already had an ID, don't clobber it.
if (existingDialogId) {
dialogContentId = existingDialogId;
}
}
dialogContent.attr('id', dialogContentId);
element.attr('aria-describedby', dialogContentId);
if (options.ariaLabel) {
$mdAria.expect(element, 'aria-label', options.ariaLabel);
}
else {
$mdAria.expectAsync(element, 'aria-label', function() {
var words = dialogContent.text().split(/\s+/);
if (words.length > 3) words = words.slice(0, 3).concat('...');
return words.join(' ');
});
}
// Set up elements before and after the dialog content to capture focus and
// redirect back into the dialog.
topFocusTrap = document.createElement('div');
topFocusTrap.classList.add('md-dialog-focus-trap');
topFocusTrap.tabIndex = 0;
bottomFocusTrap = topFocusTrap.cloneNode(false);
// When focus is about to move out of the dialog, we want to intercept it and redirect it
// back to the dialog element.
var focusHandler = function() {
element.focus();
};
topFocusTrap.addEventListener('focus', focusHandler);
bottomFocusTrap.addEventListener('focus', focusHandler);
// The top focus trap inserted immeidately before the md-dialog element (as a sibling).
// The bottom focus trap is inserted at the very end of the md-dialog element (as a child).
element[0].parentNode.insertBefore(topFocusTrap, element[0]);
element.after(bottomFocusTrap);
}
/**
* Prevents screen reader interaction behind modal window
* on swipe interfaces
*/
function lockScreenReader(element, options) {
var isHidden = true;
// get raw DOM node
walkDOM(element[0]);
options.unlockScreenReader = function() {
isHidden = false;
walkDOM(element[0]);
options.unlockScreenReader = null;
};
/**
* Walk DOM to apply or remove aria-hidden on sibling nodes
* and parent sibling nodes
*
*/
function walkDOM(element) {
while (element.parentNode) {
if (element === document.body) {
return;
}
var children = element.parentNode.children;
for (var i = 0; i < children.length; i++) {
// skip over child if it is an ascendant of the dialog
// or a script or style tag
if (element !== children[i] && !isNodeOneOf(children[i], ['SCRIPT', 'STYLE'])) {
children[i].setAttribute('aria-hidden', isHidden);
}
}
walkDOM(element = element.parentNode);
}
}
}
/**
* Ensure the dialog container fill-stretches to the viewport
*/
function stretchDialogContainerToViewport(container, options) {
var isFixed = $window.getComputedStyle($document[0].body).position == 'fixed';
var backdrop = options.backdrop ? $window.getComputedStyle(options.backdrop[0]) : null;
var height = backdrop ? Math.min($document[0].body.clientHeight, Math.ceil(Math.abs(parseInt(backdrop.height, 10)))) : 0;
container.css({
top: (isFixed ? $mdUtil.scrollTop(options.parent) : 0) + 'px',
height: height ? height + 'px' : '100%'
});
return container;
}
/**
* Dialog open and pop-in animation
*/
function dialogPopIn(container, options) {
// Add the `md-dialog-container` to the DOM
options.parent.append(container);
stretchDialogContainerToViewport(container, options);
var dialogEl = container.find('md-dialog');
var animator = $mdUtil.dom.animator;
var buildTranslateToOrigin = animator.calculateZoomToOrigin;
var translateOptions = {transitionInClass: 'md-transition-in', transitionOutClass: 'md-transition-out'};
var from = animator.toTransformCss(buildTranslateToOrigin(dialogEl, options.openFrom || options.origin));
var to = animator.toTransformCss(""); // defaults to center display (or parent or $rootElement)
if (options.fullscreen) {
dialogEl.addClass('md-dialog-fullscreen');
}
return animator
.translate3d(dialogEl, from, to, translateOptions)
.then(function(animateReversal) {
// Build a reversal translate function synched to this translation...
options.reverseAnimate = function() {
delete options.reverseAnimate;
if (options.closeTo) {
// Using the opposite classes to create a close animation to the closeTo element
translateOptions = {transitionInClass: 'md-transition-out', transitionOutClass: 'md-transition-in'};
from = to;
to = animator.toTransformCss(buildTranslateToOrigin(dialogEl, options.closeTo));
return animator
.translate3d(dialogEl, from, to,translateOptions);
}
return animateReversal(
animator.toTransformCss(
// in case the origin element has moved or is hidden,
// let's recalculate the translateCSS
buildTranslateToOrigin(dialogEl, options.origin)
)
);
};
return true;
});
}
/**
* Dialog close and pop-out animation
*/
function dialogPopOut(container, options) {
return options.reverseAnimate();
}
/**
* Utility function to filter out raw DOM nodes
*/
function isNodeOneOf(elem, nodeTypeArray) {
if (nodeTypeArray.indexOf(elem.nodeName) !== -1) {
return true;
}
}
}
}
MdDialogProvider.$inject = ["$$interimElementProvider"];
})(window, window.angular); | JoPaRoRo/Fleet | web/assets/global/plugins/angular-material/modules/js/dialog/dialog_1.js | JavaScript | mit | 39,815 |
---
layout: blog
---
| mewa/jekyll-blogging-freelancer | src/html/blog/posts/index.html | HTML | mit | 24 |
# created by Navaneeth S, [email protected]
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Document)
| csriharsha/fosswebsite | documents/admin.py | Python | mit | 226 |
using System;
using System.Collections.Generic;
namespace Sqlcollaborative.Dbatools.Message
{
/// <summary>
/// A modification to a given message's level
/// </summary>
public class MessageLevelModifier
{
/// <summary>
/// Name of the modifier. Prevents duplication in a multi-runspace scenario.
/// </summary>
public string Name;
/// <summary>
/// The amount to modify the level by
/// </summary>
public int Modifier;
/// <summary>
/// Apply modifier only to messages from this function.
/// </summary>
public string IncludeFunctionName;
/// <summary>
/// Apply modifier not when the message is written by this function.
/// </summary>
public string ExcludeFunctionName;
/// <summary>
/// Apply modifier only to messages from this module
/// </summary>
public string IncludeModuleName;
/// <summary>
/// Do not apply modifier to messages from this module
/// </summary>
public string ExcludeModuleName;
/// <summary>
/// Only apply this modifier to a message that includes at least one of these tags
/// </summary>
public List<string> IncludeTags = new List<string>();
/// <summary>
/// Do not apply this modifier to a message that includes any of the following tags
/// </summary>
public List<string> ExcludeTags = new List<string>();
/// <summary>
/// Tests, whether a message a message should be modified by this modiier
/// </summary>
/// <param name="FunctionName">The name of the function writing the message</param>
/// <param name="ModuleName">The name of the module, the function writing this message comes from</param>
/// <param name="Tags">The tags of the message written</param>
/// <returns>Whether the message applies</returns>
public bool AppliesTo(string FunctionName, string ModuleName, List<string> Tags)
{
// Negatives
if (ExcludeFunctionName == FunctionName)
return false;
if (ExcludeModuleName == ModuleName)
return false;
if (Tags != null)
foreach (string tag in ExcludeTags)
foreach (string tag2 in Tags)
if (tag == tag2)
return false;
// Positives
if (!String.IsNullOrEmpty(IncludeFunctionName))
if (IncludeFunctionName != FunctionName)
return false;
if (!String.IsNullOrEmpty(IncludeModuleName))
if (IncludeModuleName != ModuleName)
return false;
if (IncludeTags.Count > 0)
{
if (Tags != null)
foreach (string tag in IncludeTags)
foreach (string tag2 in Tags)
if (tag == tag2)
return true;
return false;
}
return true;
}
}
}
| Splaxi/dbatools | bin/projects/dbatools/dbatools/Message/MessageLevelModifier.cs | C# | mit | 3,188 |
package worker
import (
"encoding/json"
"runtime"
)
// Job handler
type JobHandler func(Job) error
type JobFunc func(Job) ([]byte, error)
// The definition of the callback function.
type jobFunc struct {
f JobFunc
timeout uint32
}
// Map for added function.
type jobFuncs map[string]*jobFunc
type systemInfo struct {
GOOS, GOARCH, GOROOT, Version string
NumCPU, NumGoroutine int
NumCgoCall int64
}
func SysInfo(job Job) ([]byte, error) {
return json.Marshal(&systemInfo{
GOOS: runtime.GOOS,
GOARCH: runtime.GOARCH,
GOROOT: runtime.GOROOT(),
Version: runtime.Version(),
NumCPU: runtime.NumCPU(),
NumGoroutine: runtime.NumGoroutine(),
NumCgoCall: runtime.NumCgoCall(),
})
}
var memState runtime.MemStats
func MemInfo(job Job) ([]byte, error) {
runtime.ReadMemStats(&memState)
return json.Marshal(&memState)
}
| echa/gearman-go | worker/func.go | GO | mit | 908 |
// Copyright (c) 2017 GitHub, Inc.
// Use of this source code is governed by the MIT license that can be
// found in the LICENSE file.
#include "atom/browser/ui/certificate_trust.h"
#include <wincrypt.h>
#include <windows.h>
#include "base/callback.h"
#include "net/cert/cert_database.h"
namespace certificate_trust {
// Add the provided certificate to the Trusted Root Certificate Authorities
// store for the current user.
//
// This requires prompting the user to confirm they trust the certificate.
BOOL AddToTrustedRootStore(const PCCERT_CONTEXT cert_context,
const scoped_refptr<net::X509Certificate>& cert) {
auto root_cert_store = CertOpenStore(
CERT_STORE_PROV_SYSTEM,
0,
NULL,
CERT_SYSTEM_STORE_CURRENT_USER,
L"Root");
if (root_cert_store == NULL) {
return false;
}
auto result = CertAddCertificateContextToStore(
root_cert_store,
cert_context,
CERT_STORE_ADD_REPLACE_EXISTING,
NULL);
if (result) {
// force Chromium to reload it's database for this certificate
auto cert_db = net::CertDatabase::GetInstance();
cert_db->NotifyObserversCertDBChanged();
}
CertCloseStore(root_cert_store, CERT_CLOSE_STORE_FORCE_FLAG);
return result;
}
CERT_CHAIN_PARA GetCertificateChainParameters() {
CERT_ENHKEY_USAGE enhkey_usage;
enhkey_usage.cUsageIdentifier = 0;
enhkey_usage.rgpszUsageIdentifier = NULL;
CERT_USAGE_MATCH cert_usage;
// ensure the rules are applied to the entire chain
cert_usage.dwType = USAGE_MATCH_TYPE_AND;
cert_usage.Usage = enhkey_usage;
CERT_CHAIN_PARA params = { sizeof(CERT_CHAIN_PARA) };
params.RequestedUsage = cert_usage;
return params;
}
void ShowCertificateTrust(atom::NativeWindow* parent_window,
const scoped_refptr<net::X509Certificate>& cert,
const std::string& message,
const ShowTrustCallback& callback) {
PCCERT_CHAIN_CONTEXT chain_context;
auto cert_context = cert->CreateOSCertChainForCert();
auto params = GetCertificateChainParameters();
if (CertGetCertificateChain(NULL,
cert_context,
NULL,
NULL,
¶ms,
NULL,
NULL,
&chain_context)) {
auto error_status = chain_context->TrustStatus.dwErrorStatus;
if (error_status == CERT_TRUST_IS_SELF_SIGNED ||
error_status == CERT_TRUST_IS_UNTRUSTED_ROOT) {
// these are the only scenarios we're interested in supporting
AddToTrustedRootStore(cert_context, cert);
}
CertFreeCertificateChain(chain_context);
}
CertFreeCertificateContext(cert_context);
callback.Run();
}
} // namespace certificate_trust
| renaesop/electron | atom/browser/ui/certificate_trust_win.cc | C++ | mit | 2,866 |
package com.mopub.mobileads;
import static com.mopub.mobileads.MoPubView.BannerAdListener;
public class DefaultBannerAdListener implements BannerAdListener {
@Override public void onBannerLoaded(MoPubView banner) { }
@Override public void onBannerFailed(MoPubView banner, MoPubErrorCode errorCode) { }
@Override public void onBannerClicked(MoPubView banner) { }
@Override public void onBannerExpanded(MoPubView banner) { }
@Override public void onBannerCollapsed(MoPubView banner) { }
}
| JSafaiyeh/Fabric-Example-App-Android | mopub-sdk/src/main/java/com/mopub/mobileads/DefaultBannerAdListener.java | Java | mit | 509 |
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#ifndef _NETOBJECT_H_
#define _NETOBJECT_H_
#ifndef _SIMBASE_H_
#include "console/simBase.h"
#endif
#ifndef _MMATH_H_
#include "math/mMath.h"
#endif
//-----------------------------------------------------------------------------
class NetConnection;
class NetObject;
//-----------------------------------------------------------------------------
struct CameraScopeQuery
{
NetObject *camera; ///< Pointer to the viewing object.
Point3F pos; ///< Position in world space
Point3F orientation; ///< Viewing vector in world space
F32 fov; ///< Viewing angle/2
F32 sinFov; ///< sin(fov/2);
F32 cosFov; ///< cos(fov/2);
F32 visibleDistance; ///< Visible distance.
};
struct GhostInfo;
//-----------------------------------------------------------------------------
/// Superclass for ghostable networked objects.
///
/// @section NetObject_intro Introduction To NetObject And Ghosting
///
/// One of the most powerful aspects of Torque's networking code is its support
/// for ghosting and prioritized, most-recent-state network updates. The way
/// this works is a bit complex, but it is immensely efficient. Let's run
/// through the steps that the server goes through for each client in this part
/// of Torque's networking:
/// - First, the server determines what objects are in-scope for the client.
/// This is done by calling onCameraScopeQuery() on the object which is
/// considered the "scope" object. This is usually the player object, but
/// it can be something else. (For instance, the current vehicle, or a
/// object we're remote controlling.)
/// - Second, it ghosts them to the client; this is implemented in netGhost.cc.
/// - Finally, it sends updates as needed, by checking the dirty list and packing
/// updates.
///
/// There several significant advantages to using this networking system:
/// - Efficient network usage, since we only send data that has changed. In addition,
/// since we only care about most-recent data, if a packet is dropped, we don't waste
/// effort trying to deliver stale data.
/// - Cheating protection; since we don't deliver information about game objects which
/// aren't in scope, we dramatically reduce the ability of clients to hack the game and
/// gain a meaningful advantage. (For instance, they can't find out about things behind
/// them, since objects behind them don't fall in scope.) In addition, since ghost IDs are
/// assigned per-client, it's difficult for any sort of co-ordination between cheaters to
/// occur.
///
/// NetConnection contains the Ghost Manager implementation, which deals with transferring data to
/// the appropriate clients and keeping state in synch.
///
/// @section NetObject_Implementation An Example Implementation
///
/// The basis of the ghost implementation in Torque is NetObject. It tracks the dirty flags for the
/// various states that the object trackers, and does some other book-keeping to allow more efficient
/// operation of the networking layer.
///
/// Using a NetObject is very simple; let's go through a simple example implementation:
///
/// @code
/// class SimpleNetObject : public NetObject
/// {
/// public:
/// typedef NetObject Parent;
/// DECLARE_CONOBJECT(SimpleNetObject);
/// @endcode
///
/// Above is the standard boilerplate code for a Torque class. You can find out more about this in SimObject.
///
/// @code
/// char message1[256];
/// char message2[256];
/// enum States {
/// Message1Mask = BIT(0),
/// Message2Mask = BIT(1),
/// };
/// @endcode
///
/// For our example, we're having two "states" that we keep track of, message1 and message2. In a real
/// object, we might map our states to health and position, or some other set of fields. You have 32
/// bits to work with, so it's possible to be very specific when defining states. In general, you
/// should try to use as few states as possible (you never know when you'll need to expand your object's
/// functionality!), and in fact, most of your fields will end up changing all at once, so it's not worth
/// it to be too fine-grained. (As an example, position and velocity on Player are controlled by the same
/// bit, as one rarely changes without the other changing, too.)
///
/// @code
/// SimpleNetObject()
/// {
/// // in order for an object to be considered by the network system,
/// // the Ghostable net flag must be set.
/// // the ScopeAlways flag indicates that the object is always scoped
/// // on all active connections.
/// mNetFlags.set(ScopeAlways | Ghostable);
/// dStrcpy(message1, "Hello World 1!");
/// dStrcpy(message2, "Hello World 2!");
/// }
/// @endcode
///
/// Here is the constructor. Here, you see that we initialize our net flags to show that
/// we should always be scoped, and that we're to be taken into consideration for ghosting. We
/// also provide some initial values for the message fields.
///
/// @code
/// U32 packUpdate(NetConnection *, U32 mask, BitStream *stream)
/// {
/// // check which states need to be updated, and update them
/// if(stream->writeFlag(mask & Message1Mask))
/// stream->writeString(message1);
/// if(stream->writeFlag(mask & Message2Mask))
/// stream->writeString(message2);
///
/// // the return value from packUpdate can set which states still
/// // need to be updated for this object.
/// return 0;
/// }
/// @endcode
///
/// Here's half of the meat of the networking code, the packUpdate() function. (The other half, unpackUpdate(),
/// we'll get to in a second.) The comments in the code pretty much explain everything, however, notice that the
/// code follows a pattern of if(writeFlag(mask & StateMask)) { ... write data ... }. The packUpdate()/unpackUpdate()
/// functions are responsible for reading and writing the dirty bits to the bitstream by themselves.
///
/// @code
/// void unpackUpdate(NetConnection *, BitStream *stream)
/// {
/// // the unpackUpdate function must be symmetrical to packUpdate
/// if(stream->readFlag())
/// {
/// stream->readString(message1);
/// Con::printf("Got message1: %s", message1);
/// }
/// if(stream->readFlag())
/// {
/// stream->readString(message2);
/// Con::printf("Got message2: %s", message2);
/// }
/// }
/// @endcode
///
/// The other half of the networking code in any NetObject, unpackUpdate(). In our simple example, all that
/// the code does is print the new messages to the console; however, in a more advanced object, you might
/// trigger animations, update complex object properties, or even spawn new objects, based on what packet
/// data you unpack.
///
/// @code
/// void setMessage1(const char *msg)
/// {
/// setMaskBits(Message1Mask);
/// dStrcpy(message1, msg);
/// }
/// void setMessage2(const char *msg)
/// {
/// setMaskBits(Message2Mask);
/// dStrcpy(message2, msg);
/// }
/// @endcode
///
/// Here are the accessors for the two properties. It is good to encapsulate your state
/// variables, so that you don't have to remember to make a call to setMaskBits every time you change
/// anything; the accessors can do it for you. In a more complex object, you might need to set
/// multiple mask bits when you change something; this can be done using the | operator, for instance,
/// setMaskBits( Message1Mask | Message2Mask ); if you changed both messages.
///
/// @code
/// IMPLEMENT_CO_NETOBJECT_V1(SimpleNetObject);
///
/// ConsoleMethod(SimpleNetObject, setMessage1, void, 3, 3, "(string msg) Set message 1.")
/// {
/// object->setMessage1(argv[2]);
/// }
///
/// ConsoleMethod(SimpleNetObject, setMessage2, void, 3, 3, "(string msg) Set message 2.")
/// {
/// object->setMessage2(argv[2]);
/// }
/// @endcode
///
/// Finally, we use the NetObject implementation macro, IMPLEMENT_CO_NETOBJECT_V1(), to implement our
/// NetObject. It is important that we use this, as it makes Torque perform certain initialization tasks
/// that allow us to send the object over the network. IMPLEMENT_CONOBJECT() doesn't perform these tasks, see
/// the documentation on AbstractClassRep for more details.
///
/// @nosubgrouping
class NetObject : public SimGroup
{
// The Ghost Manager needs read/write access
friend class NetConnection;
friend struct GhostInfo;
friend class ProcessList;
// Not the best way to do this, but the event needs access to mNetFlags
friend class GhostAlwaysObjectEvent;
private:
typedef SimGroup Parent;
/// Mask indicating which states are dirty and need to be retransmitted on this
/// object.
U32 mDirtyMaskBits;
/// @name Dirty List
///
/// Whenever a NetObject becomes "dirty", we add it to the dirty list.
/// We also remove ourselves on the destructor.
///
/// This is done so that when we want to send updates (in NetConnection),
/// it's very fast to find the objects that need to be updated.
/// @{
/// Static pointer to the head of the dirty NetObject list.
static NetObject *mDirtyList;
/// Next item in the dirty list...
NetObject *mPrevDirtyList;
/// Previous item in the dirty list...
NetObject *mNextDirtyList;
/// @}
protected:
/// Pointer to the server object on a local connection.
/// @see getServerObject
SimObjectPtr<NetObject> mServerObject;
/// Pointer to the client object on a local connection.
/// @see getClientObject
SimObjectPtr<NetObject> mClientObject;
enum NetFlags
{
IsGhost = BIT(1), ///< This is a ghost.
ScopeAlways = BIT(6), ///< Object always ghosts to clients.
ScopeLocal = BIT(7), ///< Ghost only to local client.
Ghostable = BIT(8), ///< Set if this object CAN ghost.
MaxNetFlagBit = 15
};
BitSet32 mNetFlags; ///< Flag values from NetFlags
U32 mNetIndex; ///< The index of this ghost in the GhostManager on the server.
GhostInfo *mFirstObjectRef; ///< Head of a linked list storing GhostInfos referencing this NetObject.
public:
NetObject();
~NetObject();
virtual String describeSelf() const;
/// @name Miscellaneous
/// @{
DECLARE_CONOBJECT(NetObject);
static void initPersistFields();
bool onAdd();
void onRemove();
/// @}
static void collapseDirtyList();
/// Used to mark a bit as dirty; ie, that its corresponding set of fields need to be transmitted next update.
///
/// @param orMask Bit(s) to set
virtual void setMaskBits(U32 orMask);
/// Clear the specified bits from the dirty mask.
///
/// @param orMask Bits to clear
virtual void clearMaskBits(U32 orMask);
virtual U32 filterMaskBits(U32 mask, NetConnection * connection) { return mask; }
/// Scope the object to all connections.
///
/// The object is marked as ScopeAlways and is immediately ghosted to
/// all active connections. This function has no effect if the object
/// is not marked as Ghostable.
void setScopeAlways();
/// Stop scoping the object to all connections.
///
/// The object's ScopeAlways flag is cleared and the object is removed from
/// all current active connections.
void clearScopeAlways();
/// This returns a value which is used to prioritize which objects need to be updated.
///
/// In NetObject, our returned priority is 0.1 * updateSkips, so that less recently
/// updated objects are more likely to be updated.
///
/// In subclasses, this can be adjusted. For instance, ShapeBase provides priority
/// based on proximity to the camera.
///
/// @param focusObject Information from a previous call to onCameraScopeQuery.
/// @param updateMask Current update mask.
/// @param updateSkips Number of ticks we haven't been updated for.
/// @returns A floating point value indicating priority. These are typically < 5.0.
virtual F32 getUpdatePriority(CameraScopeQuery *focusObject, U32 updateMask, S32 updateSkips);
/// Instructs this object to pack its state for transfer over the network.
///
/// @param conn Net connection being used
/// @param mask Mask indicating fields to transmit.
/// @param stream Bitstream to pack data to
///
/// @returns Any bits which were not dealt with. The value is stored by the networking
/// system. Don't set bits you weren't passed.
virtual U32 packUpdate(NetConnection * conn, U32 mask, BitStream *stream);
/// Instructs this object to read state data previously packed with packUpdate.
///
/// @param conn Net connection being used
/// @param stream stream to read from
virtual void unpackUpdate(NetConnection * conn, BitStream *stream);
/// Queries the object about information used to determine scope.
///
/// Something that is 'in scope' is somehow interesting to the client.
///
/// If we are a NetConnection's scope object, it calls this method to determine
/// how things should be scoped; basically, we tell it our field of view with camInfo,
/// and have the opportunity to manually mark items as "in scope" as we see fit.
///
/// By default, we just mark all ghostable objects as in scope.
///
/// @param cr Net connection requesting scope information.
/// @param camInfo Information about what this object can see.
virtual void onCameraScopeQuery(NetConnection *cr, CameraScopeQuery *camInfo);
/// Get the ghost index of this object.
U32 getNetIndex() { return mNetIndex; }
bool isServerObject() const; ///< Is this a server object?
bool isClientObject() const; ///< Is this a client object?
bool isGhost() const; ///< Is this is a ghost?
bool isScopeLocal() const; ///< Should this object only be visible to the client which created it?
bool isScopeable() const; ///< Is this object subject to scoping?
bool isGhostable() const; ///< Is this object ghostable?
bool isGhostAlways() const; ///< Should this object always be ghosted?
/// @name Short-Circuited Networking
///
/// When we are running with client and server on the same system (which can happen be either
/// when we are doing a single player game, or if we're hosting a multiplayer game and having
/// someone playing on the same instance), we can do some short circuited code to enhance
/// performance.
///
/// These variables are used to make it simpler; if we are running in short-circuited mode,
/// the ghosted client gets the server object while the server gets the client object.
///
/// @note "Premature optimization is the root of all evil" - Donald Knuth. The current codebase
/// uses this feature in three small places, mostly for non-speed-related purposes.
///
/// @{
/// Returns a pointer to the server object when on a local connection.
NetObject* getServerObject() const { return mServerObject; }
/// Returns a pointer to the client object when on a local connection.
NetObject* getClientObject() const { return mClientObject; }
/// Template form for the callers convenience.
template < class T >
static T* getServerObject( T *netObj ) { return static_cast<T*>( netObj->getServerObject() ); }
/// Template form for the callers convenience.
template < class T >
static T* getClientObject( T *netObj ) { return static_cast<T*>( netObj->getClientObject() ); }
/// @}
};
//-----------------------------------------------------------------------------
inline bool NetObject::isGhost() const
{
return mNetFlags.test(IsGhost);
}
inline bool NetObject::isClientObject() const
{
return mNetFlags.test(IsGhost);
}
inline bool NetObject::isServerObject() const
{
return !mNetFlags.test(IsGhost);
}
inline bool NetObject::isScopeLocal() const
{
return mNetFlags.test(ScopeLocal);
}
inline bool NetObject::isScopeable() const
{
return mNetFlags.test(Ghostable) && !mNetFlags.test(ScopeAlways);
}
inline bool NetObject::isGhostable() const
{
return mNetFlags.test(Ghostable);
}
inline bool NetObject::isGhostAlways() const
{
AssertFatal(mNetFlags.test(Ghostable) || mNetFlags.test(ScopeAlways) == false,
"That's strange, a ScopeAlways non-ghostable object? Something wrong here");
return mNetFlags.test(Ghostable) && mNetFlags.test(ScopeAlways);
}
#endif
| aaravamudan2014/Torque3D | Engine/source/sim/netObject.h | C | mit | 17,934 |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Batch.Protocol.Models
{
using System.Linq;
/// <summary>
/// Defines headers for EvaluateAutoScale operation.
/// </summary>
public partial class PoolEvaluateAutoScaleHeaders
{
/// <summary>
/// Initializes a new instance of the PoolEvaluateAutoScaleHeaders
/// class.
/// </summary>
public PoolEvaluateAutoScaleHeaders() { }
/// <summary>
/// Initializes a new instance of the PoolEvaluateAutoScaleHeaders
/// class.
/// </summary>
/// <param name="clientRequestId">The client-request-id provided by the
/// client during the request. This will be returned only if the
/// return-client-request-id parameter was set to true.</param>
/// <param name="requestId">A unique identifier for the request that
/// was made to the Batch service. If a request is consistently failing
/// and you have verified that the request is properly formulated, you
/// may use this value to report the error to Microsoft. In your
/// report, include the value of this request ID, the approximate time
/// that the request was made, the Batch account against which the
/// request was made, and the region that account resides in.</param>
/// <param name="eTag">The ETag HTTP response header. This is an opaque
/// string. You can use it to detect whether the resource has changed
/// between requests. In particular, you can pass the ETag to one of
/// the If-Modified-Since, If-Unmodified-Since, If-Match or
/// If-None-Match headers.</param>
/// <param name="lastModified">The time at which the resource was last
/// modified.</param>
/// <param name="dataServiceId">The OData ID of the resource to which
/// the request applied.</param>
public PoolEvaluateAutoScaleHeaders(System.Guid? clientRequestId = default(System.Guid?), System.Guid? requestId = default(System.Guid?), string eTag = default(string), System.DateTime? lastModified = default(System.DateTime?), string dataServiceId = default(string))
{
ClientRequestId = clientRequestId;
RequestId = requestId;
ETag = eTag;
LastModified = lastModified;
DataServiceId = dataServiceId;
}
/// <summary>
/// Gets or sets the client-request-id provided by the client during
/// the request. This will be returned only if the
/// return-client-request-id parameter was set to true.
/// </summary>
[Newtonsoft.Json.JsonProperty(PropertyName = "client-request-id")]
public System.Guid? ClientRequestId { get; set; }
/// <summary>
/// Gets or sets a unique identifier for the request that was made to
/// the Batch service. If a request is consistently failing and you
/// have verified that the request is properly formulated, you may use
/// this value to report the error to Microsoft. In your report,
/// include the value of this request ID, the approximate time that the
/// request was made, the Batch account against which the request was
/// made, and the region that account resides in.
/// </summary>
[Newtonsoft.Json.JsonProperty(PropertyName = "request-id")]
public System.Guid? RequestId { get; set; }
/// <summary>
/// Gets or sets the ETag HTTP response header. This is an opaque
/// string. You can use it to detect whether the resource has changed
/// between requests. In particular, you can pass the ETag to one of
/// the If-Modified-Since, If-Unmodified-Since, If-Match or
/// If-None-Match headers.
/// </summary>
[Newtonsoft.Json.JsonProperty(PropertyName = "ETag")]
public string ETag { get; set; }
/// <summary>
/// Gets or sets the time at which the resource was last modified.
/// </summary>
[Newtonsoft.Json.JsonConverter(typeof(Microsoft.Rest.Serialization.DateTimeRfc1123JsonConverter))]
[Newtonsoft.Json.JsonProperty(PropertyName = "Last-Modified")]
public System.DateTime? LastModified { get; set; }
/// <summary>
/// Gets or sets the OData ID of the resource to which the request
/// applied.
/// </summary>
[Newtonsoft.Json.JsonProperty(PropertyName = "DataServiceId")]
public string DataServiceId { get; set; }
}
}
| JasonYang-MSFT/azure-sdk-for-net | src/SDKs/Batch/dataPlane/Client/Src/Azure.Batch/GeneratedProtocol/Models/PoolEvaluateAutoScaleHeaders.cs | C# | mit | 4,874 |
class HomeController < ApplicationController
layout 'application'
def index
@homepage = true
@html_title = "Home - "
#@tags = Tag.counts(:limit => 60)
@tags = Map.tag_counts(:conditions => "public = true", :limit=>100)
@maps = Map.public.find(:all,
:order => "updated_at DESC",
:conditions => 'status = 4 OR status IN (2,3,4) ',
:limit => 3, :include =>:gcps)
@layers = Layer.find(:all,:order => "updated_at DESC", :limit => 3, :include=> :maps)
get_news_feeds
if logged_in?
@my_maps = current_user.maps.find(:all, :order => "updated_at DESC", :limit => 3)
end
respond_to do |format|
format.html # index.html.erb
format.xml { render :xml => @maps }
end
end
def nepa_search
@hidenav = !params.has_key?("shownav")
@tags = Map.tag_counts(:conditions => "public = true", :order => "name", :limit=>100)
end
def nepa_admin
end
def help
@homepage = true
end
def get_news_feeds
when_fragment_expired 'news_feeds', 1.day.from_now do
logger.info "getting news feed"
@feeds = RssParser.run("http://thinkwhere.wordpress.com/tag/mapwarper/feed/")
@feeds = @feeds[:items][0..1]
end
end
end
| DistributedOpenUnifiedGovernmentNetwork/mapwarper | app/controllers/home_controller.rb | Ruby | mit | 1,307 |
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package structs
// GitBlobResponse represents a git blob
type GitBlobResponse struct {
Content string `json:"content"`
Encoding string `json:"encoding"`
URL string `json:"url"`
SHA string `json:"sha"`
Size int64 `json:"size"`
}
| go-gitea/gitea | modules/structs/git_blob.go | GO | mit | 413 |
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2004, Industrial Light & Magic, a division of Lucas
// Digital Ltd. LLC
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Industrial Light & Magic nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
//-----------------------------------------------------------------------------
//
// Code examples that show how to add preview images
// (also known as thumbnais) to OpenEXR image files.
//
//-----------------------------------------------------------------------------
#include <ImfRgbaFile.h>
#include <ImfArray.h>
#include <ImfPreviewImage.h>
#include "ImathFun.h"
#include "drawImage.h"
#include <iostream>
#include <algorithm>
#include "namespaceAlias.h"
using namespace IMF;
using namespace std;
using namespace IMATH_NAMESPACE;
unsigned char
gamma (float x)
{
//
// Convert a floating-point pixel value to an 8-bit gamma-2.2
// preview pixel. (This routine is a simplified version of
// how the exrdisplay program transforms floating-point pixel
// values in order to display them on the screen.)
//
x = pow (5.5555f * max (0.f, x), 0.4545f) * 84.66f;
return (unsigned char) clamp (x, 0.f, 255.f);
}
void
makePreviewImage (const Array2D <Rgba> &pixels,
int width,
int height,
Array2D <PreviewRgba> &previewPixels,
int &previewWidth,
int &previewHeight)
{
const int N = 8;
previewWidth = width / N;
previewHeight = height / N;
previewPixels.resizeErase (previewHeight, previewWidth);
for (int y = 0; y < previewHeight; ++y)
{
for (int x = 0; x < previewWidth; ++x)
{
const Rgba &inPixel = pixels[y * N][x * N];
PreviewRgba &outPixel = previewPixels[y][x];
outPixel.r = gamma (inPixel.r);
outPixel.g = gamma (inPixel.g);
outPixel.b = gamma (inPixel.b);
outPixel.a = int (clamp (inPixel.a * 255.f, 0.f, 255.f) + 0.5f);
}
}
}
void
writeRgbaWithPreview1 (const char fileName[],
const Array2D <Rgba> &pixels,
int width,
int height)
{
//
// Write an image file with a preview image, version 1:
//
// - generate the preview image by subsampling the main image
// - generate a file header
// - add the preview image to the file header
// - open the file (this stores the header with the
// preview image in the file)
// - describe the memory layout of the main image's pixels
// - store the main image's pixels in the file
//
Array2D <PreviewRgba> previewPixels;
int previewWidth;
int previewHeight;
makePreviewImage (pixels, width, height,
previewPixels, previewWidth, previewHeight);
Header header (width, height);
header.setPreviewImage
(PreviewImage (previewWidth, previewHeight, &previewPixels[0][0]));
RgbaOutputFile file (fileName, header, WRITE_RGBA);
file.setFrameBuffer (&pixels[0][0], 1, width);
file.writePixels (height);
}
void
writeRgbaWithPreview2 (const char fileName[],
int width,
int height)
{
//
// Write an image file with a preview image, version 2:
//
// - generate a file header
// - add a dummy preview image to the file header
// - open the file (this stores the header with the dummy
// preview image in the file)
// - render the main image's pixels one scan line at a time,
// and store each scan line in the file before rendering
// the next scan line
// - generate the preview image on the fly, while the main
// image is being rendered
// - once the main image has been rendered, store the preview
// image in the file, overwriting the dummy preview
//
Array <Rgba> pixels (width);
const int N = 8;
int previewWidth = width / N;
int previewHeight = height / N;
Array2D <PreviewRgba> previewPixels (previewHeight, previewWidth);
Header header (width, height);
header.setPreviewImage (PreviewImage (previewWidth, previewHeight));
RgbaOutputFile file (fileName, header, WRITE_RGBA);
file.setFrameBuffer (pixels, 1, 0);
for (int y = 0; y < height; ++y)
{
drawImage7 (pixels, width, height, y);
file.writePixels (1);
if (y % N == 0)
{
for (int x = 0; x < width; x += N)
{
const Rgba &inPixel = pixels[x];
PreviewRgba &outPixel = previewPixels[y / N][x / N];
outPixel.r = gamma (inPixel.r);
outPixel.g = gamma (inPixel.g);
outPixel.b = gamma (inPixel.b);
outPixel.a = int (clamp (inPixel.a * 255.f, 0.f, 255.f) + 0.5f);
}
}
}
file.updatePreviewImage (&previewPixels[0][0]);
}
void
previewImageExamples ()
{
cout << "\nfiles with preview images\n" << endl;
cout << "drawing image then writing file" << endl;
int w = 800;
int h = 600;
Array2D<Rgba> p (h, w);
drawImage1 (p, w, h);
writeRgbaWithPreview1 ("rgbaWithPreview1.exr", p, w, h);
cout << "drawing image while writing file" << endl;
writeRgbaWithPreview2 ("rgbaWithPreview2.exr", w, h);
cout << endl;
}
| Mikayex/conan-openexr | test_package/previewImageExamples.cpp | C++ | mit | 6,568 |
import {Component} from '@angular/core';
/**
* @title Basic list
*/
@Component({
selector: 'list-overview-example',
templateUrl: 'list-overview-example.html',
styleUrls: ['list-overview-example.css'],
})
export class ListOverviewExample {}
| jelbourn/material2 | src/material-examples/material/list/list-overview/list-overview-example.ts | TypeScript | mit | 249 |
//-----------------------------------------------------------------------------
// Copyright (c) 2013 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#ifndef _EXAMPLE_ASSET_H_
#include "ExampleAsset.h"
#endif
#ifndef _ASSET_MANAGER_H_
#include "assets/assetManager.h"
#endif
#ifndef _CONSOLETYPES_H_
#include "console/consoleTypes.h"
#endif
#ifndef _TAML_
#include "persistence/taml/taml.h"
#endif
#ifndef _ASSET_PTR_H_
#include "assets/assetPtr.h"
#endif
// Debug Profiling.
#include "platform/profiler.h"
//-----------------------------------------------------------------------------
IMPLEMENT_CONOBJECT(ExampleAsset);
ConsoleType(ExampleAssetPtr, TypeExampleAssetPtr, ExampleAsset, ASSET_ID_FIELD_PREFIX)
//-----------------------------------------------------------------------------
ConsoleGetType(TypeExampleAssetPtr)
{
// Fetch asset Id.
return (*((AssetPtr<ExampleAsset>*)dptr)).getAssetId();
}
//-----------------------------------------------------------------------------
ConsoleSetType(TypeExampleAssetPtr)
{
// Was a single argument specified?
if (argc == 1)
{
// Yes, so fetch field value.
const char* pFieldValue = argv[0];
// Fetch asset pointer.
AssetPtr<ExampleAsset>* pAssetPtr = dynamic_cast<AssetPtr<ExampleAsset>*>((AssetPtrBase*)(dptr));
// Is the asset pointer the correct type?
if (pAssetPtr == NULL)
{
// No, so fail.
//Con::warnf("(TypeTextureAssetPtr) - Failed to set asset Id '%d'.", pFieldValue);
return;
}
// Set asset.
pAssetPtr->setAssetId(pFieldValue);
return;
}
// Warn.
Con::warnf("(TypeTextureAssetPtr) - Cannot set multiple args to a single asset.");
}
//-----------------------------------------------------------------------------
ExampleAsset::ExampleAsset() :
mpOwningAssetManager(NULL),
mAssetInitialized(false),
mAcquireReferenceCount(0)
{
// Generate an asset definition.
mpAssetDefinition = new AssetDefinition();
}
//-----------------------------------------------------------------------------
ExampleAsset::~ExampleAsset()
{
// If the asset manager does not own the asset then we own the
// asset definition so delete it.
if (!getOwned())
delete mpAssetDefinition;
}
//-----------------------------------------------------------------------------
void ExampleAsset::initPersistFields()
{
// Call parent.
Parent::initPersistFields();
}
//------------------------------------------------------------------------------
void ExampleAsset::copyTo(SimObject* object)
{
// Call to parent.
Parent::copyTo(object);
} | elfprince13/Torque3D | Engine/source/T3D/assets/ExampleAsset.cpp | C++ | mit | 3,758 |
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\HttpKernel\DependencyInjection;
use Symfony\Component\DependencyInjection\Argument\IteratorArgument;
use Symfony\Component\DependencyInjection\Compiler\CompilerPassInterface;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\DependencyInjection\ContainerInterface;
use Symfony\Component\DependencyInjection\Exception\RuntimeException;
use Symfony\Component\DependencyInjection\Reference;
use Symfony\Component\HttpKernel\EventListener\ServiceResetListener;
/**
* @author Alexander M. Turek <[email protected]>
*/
class ResettableServicePass implements CompilerPassInterface
{
private $tagName;
/**
* @param string $tagName
*/
public function __construct($tagName = 'kernel.reset')
{
$this->tagName = $tagName;
}
/**
* {@inheritdoc}
*/
public function process(ContainerBuilder $container)
{
if (!$container->has(ServiceResetListener::class)) {
return;
}
$services = $methods = array();
foreach ($container->findTaggedServiceIds($this->tagName, true) as $id => $tags) {
$services[$id] = new Reference($id, ContainerInterface::IGNORE_ON_UNINITIALIZED_REFERENCE);
$attributes = $tags[0];
if (!isset($attributes['method'])) {
throw new RuntimeException(sprintf('Tag %s requires the "method" attribute to be set.', $this->tagName));
}
$methods[$id] = $attributes['method'];
}
if (empty($services)) {
$container->removeDefinition(ServiceResetListener::class);
return;
}
$container->findDefinition(ServiceResetListener::class)
->replaceArgument(0, new IteratorArgument($services))
->replaceArgument(1, $methods);
}
}
| hason/symfony | src/Symfony/Component/HttpKernel/DependencyInjection/ResettableServicePass.php | PHP | mit | 2,081 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.