code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 3
942
| language
stringclasses 30
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
# Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)
| priscillaboyd/SPaT_Prediction | src/decision_tree/DT_Utils.py | Python | apache-2.0 | 3,533 |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.repo.sql.data.audit;
import com.evolveum.midpoint.audit.api.AuditEventRecord;
import com.evolveum.midpoint.audit.api.AuditService;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.polystring.PolyString;
import com.evolveum.midpoint.repo.sql.data.common.enums.ROperationResultStatus;
import com.evolveum.midpoint.repo.sql.data.common.other.RObjectType;
import com.evolveum.midpoint.repo.sql.util.ClassMapper;
import com.evolveum.midpoint.repo.sql.util.DtoTranslationException;
import com.evolveum.midpoint.repo.sql.util.RUtil;
import com.evolveum.midpoint.schema.ObjectDeltaOperation;
import com.evolveum.midpoint.schema.constants.ObjectTypes;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import org.apache.commons.lang.Validate;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.ForeignKey;
import javax.persistence.*;
import javax.xml.namespace.QName;
import java.io.Serializable;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author lazyman
*/
@Entity
@Table(name = RAuditEventRecord.TABLE_NAME, indexes = {
@Index(name = "iTimestampValue", columnList = RAuditEventRecord.COLUMN_TIMESTAMP)}) // TODO correct index name
public class RAuditEventRecord implements Serializable {
public static final String TABLE_NAME = "m_audit_event";
public static final String COLUMN_TIMESTAMP = "timestampValue";
private long id;
private Timestamp timestamp;
private String eventIdentifier;
private String sessionIdentifier;
private String taskIdentifier;
private String taskOID;
private String hostIdentifier;
//prism object - user
private String initiatorOid;
private String initiatorName;
//prism object
private String targetOid;
private String targetName;
private RObjectType targetType;
//prism object - user
private String targetOwnerOid;
private String targetOwnerName;
private RAuditEventType eventType;
private RAuditEventStage eventStage;
//collection of object deltas
private Set<RObjectDeltaOperation> deltas;
private String channel;
private ROperationResultStatus outcome;
private String parameter;
private String message;
private String result;
public String getResult() {
return result;
}
@Column(length = 1024)
public String getMessage() {
return message;
}
public String getParameter() {
return parameter;
}
public String getChannel() {
return channel;
}
@ForeignKey(name = "fk_audit_delta")
@OneToMany(mappedBy = "record", orphanRemoval = true)
@Cascade({org.hibernate.annotations.CascadeType.ALL})
public Set<RObjectDeltaOperation> getDeltas() {
if (deltas == null) {
deltas = new HashSet<RObjectDeltaOperation>();
}
return deltas;
}
public String getEventIdentifier() {
return eventIdentifier;
}
@Enumerated(EnumType.ORDINAL)
public RAuditEventStage getEventStage() {
return eventStage;
}
@Enumerated(EnumType.ORDINAL)
public RAuditEventType getEventType() {
return eventType;
}
public String getHostIdentifier() {
return hostIdentifier;
}
@Id
@GeneratedValue
public long getId() {
return id;
}
@Column(length = RUtil.COLUMN_LENGTH_OID)
public String getInitiatorOid() {
return initiatorOid;
}
public String getInitiatorName() {
return initiatorName;
}
@Enumerated(EnumType.ORDINAL)
public ROperationResultStatus getOutcome() {
return outcome;
}
public String getSessionIdentifier() {
return sessionIdentifier;
}
public String getTargetName() {
return targetName;
}
@Column(length = RUtil.COLUMN_LENGTH_OID)
public String getTargetOid() {
return targetOid;
}
@Enumerated(EnumType.ORDINAL)
public RObjectType getTargetType() {
return targetType;
}
public String getTargetOwnerName() {
return targetOwnerName;
}
@Column(length = RUtil.COLUMN_LENGTH_OID)
public String getTargetOwnerOid() {
return targetOwnerOid;
}
public String getTaskIdentifier() {
return taskIdentifier;
}
public String getTaskOID() {
return taskOID;
}
@Column(name = COLUMN_TIMESTAMP)
public Timestamp getTimestamp() {
return timestamp;
}
public void setMessage(String message) {
this.message = message;
}
public void setParameter(String parameter) {
this.parameter = parameter;
}
public void setChannel(String channel) {
this.channel = channel;
}
public void setDeltas(Set<RObjectDeltaOperation> deltas) {
this.deltas = deltas;
}
public void setEventIdentifier(String eventIdentifier) {
this.eventIdentifier = eventIdentifier;
}
public void setEventStage(RAuditEventStage eventStage) {
this.eventStage = eventStage;
}
public void setEventType(RAuditEventType eventType) {
this.eventType = eventType;
}
public void setHostIdentifier(String hostIdentifier) {
this.hostIdentifier = hostIdentifier;
}
public void setId(long id) {
this.id = id;
}
public void setInitiatorName(String initiatorName) {
this.initiatorName = initiatorName;
}
public void setInitiatorOid(String initiatorOid) {
this.initiatorOid = initiatorOid;
}
public void setOutcome(ROperationResultStatus outcome) {
this.outcome = outcome;
}
public void setSessionIdentifier(String sessionIdentifier) {
this.sessionIdentifier = sessionIdentifier;
}
public void setTargetName(String targetName) {
this.targetName = targetName;
}
public void setTargetOid(String targetOid) {
this.targetOid = targetOid;
}
public void setTargetType(RObjectType targetType) {
this.targetType = targetType;
}
public void setTargetOwnerName(String targetOwnerName) {
this.targetOwnerName = targetOwnerName;
}
public void setTargetOwnerOid(String targetOwnerOid) {
this.targetOwnerOid = targetOwnerOid;
}
public void setTaskIdentifier(String taskIdentifier) {
this.taskIdentifier = taskIdentifier;
}
public void setTaskOID(String taskOID) {
this.taskOID = taskOID;
}
public void setTimestamp(Timestamp timestamp) {
this.timestamp = timestamp;
}
public void setResult(String result) {
this.result = result;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RAuditEventRecord that = (RAuditEventRecord) o;
if (channel != null ? !channel.equals(that.channel) : that.channel != null) return false;
if (deltas != null ? !deltas.equals(that.deltas) : that.deltas != null) return false;
if (eventIdentifier != null ? !eventIdentifier.equals(that.eventIdentifier) : that.eventIdentifier != null)
return false;
if (eventStage != that.eventStage) return false;
if (eventType != that.eventType) return false;
if (hostIdentifier != null ? !hostIdentifier.equals(that.hostIdentifier) : that.hostIdentifier != null)
return false;
if (initiatorOid != null ? !initiatorOid.equals(that.initiatorOid) : that.initiatorOid != null) return false;
if (initiatorName != null ? !initiatorName.equals(that.initiatorName) : that.initiatorName != null)
return false;
if (outcome != that.outcome) return false;
if (sessionIdentifier != null ? !sessionIdentifier.equals(that.sessionIdentifier) : that.sessionIdentifier != null)
return false;
if (targetOid != null ? !targetOid.equals(that.targetOid) : that.targetOid != null) return false;
if (targetName != null ? !targetName.equals(that.targetName) : that.targetName != null) return false;
if (targetType != null ? !targetType.equals(that.targetType) : that.targetType != null) return false;
if (targetOwnerOid != null ? !targetOwnerOid.equals(that.targetOwnerOid) : that.targetOwnerOid != null)
return false;
if (targetOwnerName != null ? !targetOwnerName.equals(that.targetOwnerName) : that.targetOwnerName != null)
return false;
if (taskIdentifier != null ? !taskIdentifier.equals(that.taskIdentifier) : that.taskIdentifier != null)
return false;
if (taskOID != null ? !taskOID.equals(that.taskOID) : that.taskOID != null) return false;
if (timestamp != null ? !timestamp.equals(that.timestamp) : that.timestamp != null) return false;
if (parameter != null ? !parameter.equals(that.parameter) : that.parameter != null) return false;
if (message != null ? !message.equals(that.message) : that.message != null) return false;
if (result != null ? !result.equals(that.result) : that.result != null) return false;
return true;
}
@Override
public int hashCode() {
int result = timestamp != null ? timestamp.hashCode() : 0;
result = 31 * result + (eventIdentifier != null ? eventIdentifier.hashCode() : 0);
result = 31 * result + (sessionIdentifier != null ? sessionIdentifier.hashCode() : 0);
result = 31 * result + (taskIdentifier != null ? taskIdentifier.hashCode() : 0);
result = 31 * result + (taskOID != null ? taskOID.hashCode() : 0);
result = 31 * result + (hostIdentifier != null ? hostIdentifier.hashCode() : 0);
result = 31 * result + (initiatorName != null ? initiatorName.hashCode() : 0);
result = 31 * result + (initiatorOid != null ? initiatorOid.hashCode() : 0);
result = 31 * result + (targetOid != null ? targetOid.hashCode() : 0);
result = 31 * result + (targetName != null ? targetName.hashCode() : 0);
result = 31 * result + (targetType != null ? targetType.hashCode() : 0);
result = 31 * result + (targetOwnerOid != null ? targetOwnerOid.hashCode() : 0);
result = 31 * result + (targetOwnerName != null ? targetOwnerName.hashCode() : 0);
result = 31 * result + (eventType != null ? eventType.hashCode() : 0);
result = 31 * result + (eventStage != null ? eventStage.hashCode() : 0);
result = 31 * result + (deltas != null ? deltas.hashCode() : 0);
result = 31 * result + (channel != null ? channel.hashCode() : 0);
result = 31 * result + (outcome != null ? outcome.hashCode() : 0);
result = 31 * result + (parameter != null ? parameter.hashCode() : 0);
result = 31 * result + (message != null ? message.hashCode() : 0);
result = 31 * result + (this.result != null ? this.result.hashCode() : 0);
return result;
}
public static RAuditEventRecord toRepo(AuditEventRecord record, PrismContext prismContext)
throws DtoTranslationException {
Validate.notNull(record, "Audit event record must not be null.");
Validate.notNull(prismContext, "Prism context must not be null.");
RAuditEventRecord repo = new RAuditEventRecord();
repo.setChannel(record.getChannel());
if (record.getTimestamp() != null) {
repo.setTimestamp(new Timestamp(record.getTimestamp()));
}
repo.setEventStage(RAuditEventStage.toRepo(record.getEventStage()));
repo.setEventType(RAuditEventType.toRepo(record.getEventType()));
repo.setSessionIdentifier(record.getSessionIdentifier());
repo.setEventIdentifier(record.getEventIdentifier());
repo.setHostIdentifier(record.getHostIdentifier());
repo.setParameter(record.getParameter());
repo.setMessage(trimMessage(record.getMessage()));
if (record.getOutcome() != null) {
repo.setOutcome(RUtil.getRepoEnumValue(record.getOutcome().createStatusType(), ROperationResultStatus.class));
}
repo.setTaskIdentifier(record.getTaskIdentifier());
repo.setTaskOID(record.getTaskOID());
repo.setResult(record.getResult());
try {
if (record.getTarget() != null) {
PrismObject target = record.getTarget();
repo.setTargetName(getOrigName(target));
repo.setTargetOid(target.getOid());
QName type = ObjectTypes.getObjectType(target.getCompileTimeClass()).getTypeQName();
repo.setTargetType(ClassMapper.getHQLTypeForQName(type));
}
if (record.getTargetOwner() != null) {
PrismObject targetOwner = record.getTargetOwner();
repo.setTargetOwnerName(getOrigName(targetOwner));
repo.setTargetOwnerOid(targetOwner.getOid());
}
if (record.getInitiator() != null) {
PrismObject<UserType> initiator = record.getInitiator();
repo.setInitiatorName(getOrigName(initiator));
repo.setInitiatorOid(initiator.getOid());
}
for (ObjectDeltaOperation<?> delta : record.getDeltas()) {
if (delta == null) {
continue;
}
RObjectDeltaOperation rDelta = RObjectDeltaOperation.toRepo(repo, delta, prismContext);
rDelta.setTransient(true);
rDelta.setRecord(repo);
repo.getDeltas().add(rDelta);
}
} catch (Exception ex) {
throw new DtoTranslationException(ex.getMessage(), ex);
}
return repo;
}
public static AuditEventRecord fromRepo(RAuditEventRecord repo, PrismContext prismContext) throws DtoTranslationException{
AuditEventRecord audit = new AuditEventRecord();
audit.setChannel(repo.getChannel());
audit.setEventIdentifier(repo.getEventIdentifier());
if (repo.getEventStage() != null){
audit.setEventStage(repo.getEventStage().getStage());
}
if (repo.getEventType() != null){
audit.setEventType(repo.getEventType().getType());
}
audit.setHostIdentifier(repo.getHostIdentifier());
audit.setMessage(repo.getMessage());
if (repo.getOutcome() != null){
audit.setOutcome(repo.getOutcome().getStatus());
}
audit.setParameter(repo.getParameter());
audit.setResult(repo.getResult());
audit.setSessionIdentifier(repo.getSessionIdentifier());
audit.setTaskIdentifier(repo.getTaskIdentifier());
audit.setTaskOID(repo.getTaskOID());
if (repo.getTimestamp() != null){
audit.setTimestamp(repo.getTimestamp().getTime());
}
List<ObjectDeltaOperation> odos = new ArrayList<ObjectDeltaOperation>();
for (RObjectDeltaOperation rodo : repo.getDeltas()){
try {
ObjectDeltaOperation odo = RObjectDeltaOperation.fromRepo(rodo, prismContext);
if (odo != null){
odos.add(odo);
}
} catch (Exception ex){
//TODO: for now thi is OK, if we cannot parse detla, just skipp it.. Have to be resolved later;
}
}
audit.getDeltas().addAll((Collection) odos);
return audit;
//initiator, target, targetOwner
}
private static String trimMessage(String message) {
if (message == null || message.length() <= AuditService.MAX_MESSAGE_SIZE) {
return message;
}
return message.substring(0, AuditService.MAX_MESSAGE_SIZE - 4) + "...";
}
private static String getOrigName(PrismObject object) {
PolyString name = (PolyString) object.getPropertyRealValue(ObjectType.F_NAME, PolyString.class);
return name != null ? name.getOrig() : null;
}
}
| rpudil/midpoint | repo/repo-sql-impl/src/main/java/com/evolveum/midpoint/repo/sql/data/audit/RAuditEventRecord.java | Java | apache-2.0 | 16,740 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_232) on Tue Sep 15 08:53:05 UTC 2020 -->
<title>Uses of Class org.springframework.jmx.export.metadata.ManagedAttribute (Spring Framework 5.1.18.RELEASE API)</title>
<meta name="date" content="2020-09-15">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.springframework.jmx.export.metadata.ManagedAttribute (Spring Framework 5.1.18.RELEASE API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">Spring Framework</div>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/springframework/jmx/export/metadata/class-use/ManagedAttribute.html" target="_top">Frames</a></li>
<li><a href="ManagedAttribute.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.springframework.jmx.export.metadata.ManagedAttribute" class="title">Uses of Class<br>org.springframework.jmx.export.metadata.ManagedAttribute</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">ManagedAttribute</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.springframework.jmx.export.annotation">org.springframework.jmx.export.annotation</a></td>
<td class="colLast">
<div class="block">Java 5 annotations for MBean exposure.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#org.springframework.jmx.export.metadata">org.springframework.jmx.export.metadata</a></td>
<td class="colLast">
<div class="block">Provides generic JMX metadata classes and basic support for reading
JMX metadata in a provider-agnostic manner.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.springframework.jmx.export.annotation">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">ManagedAttribute</a> in <a href="../../../../../../org/springframework/jmx/export/annotation/package-summary.html">org.springframework.jmx.export.annotation</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../org/springframework/jmx/export/annotation/package-summary.html">org.springframework.jmx.export.annotation</a> that return <a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">ManagedAttribute</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">ManagedAttribute</a></code></td>
<td class="colLast"><span class="typeNameLabel">AnnotationJmxAttributeSource.</span><code><span class="memberNameLink"><a href="../../../../../../org/springframework/jmx/export/annotation/AnnotationJmxAttributeSource.html#getManagedAttribute-java.lang.reflect.Method-">getManagedAttribute</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Method.html?is-external=true" title="class or interface in java.lang.reflect">Method</a> method)</code> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.springframework.jmx.export.metadata">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">ManagedAttribute</a> in <a href="../../../../../../org/springframework/jmx/export/metadata/package-summary.html">org.springframework.jmx.export.metadata</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
<caption><span>Fields in <a href="../../../../../../org/springframework/jmx/export/metadata/package-summary.html">org.springframework.jmx.export.metadata</a> declared as <a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">ManagedAttribute</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Field and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">ManagedAttribute</a></code></td>
<td class="colLast"><span class="typeNameLabel">ManagedAttribute.</span><code><span class="memberNameLink"><a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html#EMPTY">EMPTY</a></span></code>
<div class="block">Empty attributes.</div>
</td>
</tr>
</tbody>
</table>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../org/springframework/jmx/export/metadata/package-summary.html">org.springframework.jmx.export.metadata</a> that return <a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">ManagedAttribute</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">ManagedAttribute</a></code></td>
<td class="colLast"><span class="typeNameLabel">JmxAttributeSource.</span><code><span class="memberNameLink"><a href="../../../../../../org/springframework/jmx/export/metadata/JmxAttributeSource.html#getManagedAttribute-java.lang.reflect.Method-">getManagedAttribute</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Method.html?is-external=true" title="class or interface in java.lang.reflect">Method</a> method)</code>
<div class="block">Implementations should return an instance of <code>ManagedAttribute</code>
if the supplied <code>Method</code> has the corresponding metadata.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/springframework/jmx/export/metadata/ManagedAttribute.html" title="class in org.springframework.jmx.export.metadata">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">Spring Framework</div>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/springframework/jmx/export/metadata/class-use/ManagedAttribute.html" target="_top">Frames</a></li>
<li><a href="ManagedAttribute.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| akhr/java | Spring/jars/spring-framework-5.1.18.RELEASE/docs/javadoc-api/org/springframework/jmx/export/metadata/class-use/ManagedAttribute.html | HTML | apache-2.0 | 10,929 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.worklink.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/worklink-2018-09-25/DescribeDevice" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeDeviceResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The current state of the device.
* </p>
*/
private String status;
/**
* <p>
* The model of the device.
* </p>
*/
private String model;
/**
* <p>
* The manufacturer of the device.
* </p>
*/
private String manufacturer;
/**
* <p>
* The operating system of the device.
* </p>
*/
private String operatingSystem;
/**
* <p>
* The operating system version of the device.
* </p>
*/
private String operatingSystemVersion;
/**
* <p>
* The operating system patch level of the device.
* </p>
*/
private String patchLevel;
/**
* <p>
* The date that the device first signed in to Amazon WorkLink.
* </p>
*/
private java.util.Date firstAccessedTime;
/**
* <p>
* The date that the device last accessed Amazon WorkLink.
* </p>
*/
private java.util.Date lastAccessedTime;
/**
* <p>
* The user name associated with the device.
* </p>
*/
private String username;
/**
* <p>
* The current state of the device.
* </p>
*
* @param status
* The current state of the device.
* @see DeviceStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* The current state of the device.
* </p>
*
* @return The current state of the device.
* @see DeviceStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* The current state of the device.
* </p>
*
* @param status
* The current state of the device.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DeviceStatus
*/
public DescribeDeviceResult withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* The current state of the device.
* </p>
*
* @param status
* The current state of the device.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DeviceStatus
*/
public DescribeDeviceResult withStatus(DeviceStatus status) {
this.status = status.toString();
return this;
}
/**
* <p>
* The model of the device.
* </p>
*
* @param model
* The model of the device.
*/
public void setModel(String model) {
this.model = model;
}
/**
* <p>
* The model of the device.
* </p>
*
* @return The model of the device.
*/
public String getModel() {
return this.model;
}
/**
* <p>
* The model of the device.
* </p>
*
* @param model
* The model of the device.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDeviceResult withModel(String model) {
setModel(model);
return this;
}
/**
* <p>
* The manufacturer of the device.
* </p>
*
* @param manufacturer
* The manufacturer of the device.
*/
public void setManufacturer(String manufacturer) {
this.manufacturer = manufacturer;
}
/**
* <p>
* The manufacturer of the device.
* </p>
*
* @return The manufacturer of the device.
*/
public String getManufacturer() {
return this.manufacturer;
}
/**
* <p>
* The manufacturer of the device.
* </p>
*
* @param manufacturer
* The manufacturer of the device.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDeviceResult withManufacturer(String manufacturer) {
setManufacturer(manufacturer);
return this;
}
/**
* <p>
* The operating system of the device.
* </p>
*
* @param operatingSystem
* The operating system of the device.
*/
public void setOperatingSystem(String operatingSystem) {
this.operatingSystem = operatingSystem;
}
/**
* <p>
* The operating system of the device.
* </p>
*
* @return The operating system of the device.
*/
public String getOperatingSystem() {
return this.operatingSystem;
}
/**
* <p>
* The operating system of the device.
* </p>
*
* @param operatingSystem
* The operating system of the device.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDeviceResult withOperatingSystem(String operatingSystem) {
setOperatingSystem(operatingSystem);
return this;
}
/**
* <p>
* The operating system version of the device.
* </p>
*
* @param operatingSystemVersion
* The operating system version of the device.
*/
public void setOperatingSystemVersion(String operatingSystemVersion) {
this.operatingSystemVersion = operatingSystemVersion;
}
/**
* <p>
* The operating system version of the device.
* </p>
*
* @return The operating system version of the device.
*/
public String getOperatingSystemVersion() {
return this.operatingSystemVersion;
}
/**
* <p>
* The operating system version of the device.
* </p>
*
* @param operatingSystemVersion
* The operating system version of the device.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDeviceResult withOperatingSystemVersion(String operatingSystemVersion) {
setOperatingSystemVersion(operatingSystemVersion);
return this;
}
/**
* <p>
* The operating system patch level of the device.
* </p>
*
* @param patchLevel
* The operating system patch level of the device.
*/
public void setPatchLevel(String patchLevel) {
this.patchLevel = patchLevel;
}
/**
* <p>
* The operating system patch level of the device.
* </p>
*
* @return The operating system patch level of the device.
*/
public String getPatchLevel() {
return this.patchLevel;
}
/**
* <p>
* The operating system patch level of the device.
* </p>
*
* @param patchLevel
* The operating system patch level of the device.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDeviceResult withPatchLevel(String patchLevel) {
setPatchLevel(patchLevel);
return this;
}
/**
* <p>
* The date that the device first signed in to Amazon WorkLink.
* </p>
*
* @param firstAccessedTime
* The date that the device first signed in to Amazon WorkLink.
*/
public void setFirstAccessedTime(java.util.Date firstAccessedTime) {
this.firstAccessedTime = firstAccessedTime;
}
/**
* <p>
* The date that the device first signed in to Amazon WorkLink.
* </p>
*
* @return The date that the device first signed in to Amazon WorkLink.
*/
public java.util.Date getFirstAccessedTime() {
return this.firstAccessedTime;
}
/**
* <p>
* The date that the device first signed in to Amazon WorkLink.
* </p>
*
* @param firstAccessedTime
* The date that the device first signed in to Amazon WorkLink.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDeviceResult withFirstAccessedTime(java.util.Date firstAccessedTime) {
setFirstAccessedTime(firstAccessedTime);
return this;
}
/**
* <p>
* The date that the device last accessed Amazon WorkLink.
* </p>
*
* @param lastAccessedTime
* The date that the device last accessed Amazon WorkLink.
*/
public void setLastAccessedTime(java.util.Date lastAccessedTime) {
this.lastAccessedTime = lastAccessedTime;
}
/**
* <p>
* The date that the device last accessed Amazon WorkLink.
* </p>
*
* @return The date that the device last accessed Amazon WorkLink.
*/
public java.util.Date getLastAccessedTime() {
return this.lastAccessedTime;
}
/**
* <p>
* The date that the device last accessed Amazon WorkLink.
* </p>
*
* @param lastAccessedTime
* The date that the device last accessed Amazon WorkLink.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDeviceResult withLastAccessedTime(java.util.Date lastAccessedTime) {
setLastAccessedTime(lastAccessedTime);
return this;
}
/**
* <p>
* The user name associated with the device.
* </p>
*
* @param username
* The user name associated with the device.
*/
public void setUsername(String username) {
this.username = username;
}
/**
* <p>
* The user name associated with the device.
* </p>
*
* @return The user name associated with the device.
*/
public String getUsername() {
return this.username;
}
/**
* <p>
* The user name associated with the device.
* </p>
*
* @param username
* The user name associated with the device.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDeviceResult withUsername(String username) {
setUsername(username);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getModel() != null)
sb.append("Model: ").append(getModel()).append(",");
if (getManufacturer() != null)
sb.append("Manufacturer: ").append(getManufacturer()).append(",");
if (getOperatingSystem() != null)
sb.append("OperatingSystem: ").append(getOperatingSystem()).append(",");
if (getOperatingSystemVersion() != null)
sb.append("OperatingSystemVersion: ").append(getOperatingSystemVersion()).append(",");
if (getPatchLevel() != null)
sb.append("PatchLevel: ").append(getPatchLevel()).append(",");
if (getFirstAccessedTime() != null)
sb.append("FirstAccessedTime: ").append(getFirstAccessedTime()).append(",");
if (getLastAccessedTime() != null)
sb.append("LastAccessedTime: ").append(getLastAccessedTime()).append(",");
if (getUsername() != null)
sb.append("Username: ").append(getUsername());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeDeviceResult == false)
return false;
DescribeDeviceResult other = (DescribeDeviceResult) obj;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getModel() == null ^ this.getModel() == null)
return false;
if (other.getModel() != null && other.getModel().equals(this.getModel()) == false)
return false;
if (other.getManufacturer() == null ^ this.getManufacturer() == null)
return false;
if (other.getManufacturer() != null && other.getManufacturer().equals(this.getManufacturer()) == false)
return false;
if (other.getOperatingSystem() == null ^ this.getOperatingSystem() == null)
return false;
if (other.getOperatingSystem() != null && other.getOperatingSystem().equals(this.getOperatingSystem()) == false)
return false;
if (other.getOperatingSystemVersion() == null ^ this.getOperatingSystemVersion() == null)
return false;
if (other.getOperatingSystemVersion() != null && other.getOperatingSystemVersion().equals(this.getOperatingSystemVersion()) == false)
return false;
if (other.getPatchLevel() == null ^ this.getPatchLevel() == null)
return false;
if (other.getPatchLevel() != null && other.getPatchLevel().equals(this.getPatchLevel()) == false)
return false;
if (other.getFirstAccessedTime() == null ^ this.getFirstAccessedTime() == null)
return false;
if (other.getFirstAccessedTime() != null && other.getFirstAccessedTime().equals(this.getFirstAccessedTime()) == false)
return false;
if (other.getLastAccessedTime() == null ^ this.getLastAccessedTime() == null)
return false;
if (other.getLastAccessedTime() != null && other.getLastAccessedTime().equals(this.getLastAccessedTime()) == false)
return false;
if (other.getUsername() == null ^ this.getUsername() == null)
return false;
if (other.getUsername() != null && other.getUsername().equals(this.getUsername()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getModel() == null) ? 0 : getModel().hashCode());
hashCode = prime * hashCode + ((getManufacturer() == null) ? 0 : getManufacturer().hashCode());
hashCode = prime * hashCode + ((getOperatingSystem() == null) ? 0 : getOperatingSystem().hashCode());
hashCode = prime * hashCode + ((getOperatingSystemVersion() == null) ? 0 : getOperatingSystemVersion().hashCode());
hashCode = prime * hashCode + ((getPatchLevel() == null) ? 0 : getPatchLevel().hashCode());
hashCode = prime * hashCode + ((getFirstAccessedTime() == null) ? 0 : getFirstAccessedTime().hashCode());
hashCode = prime * hashCode + ((getLastAccessedTime() == null) ? 0 : getLastAccessedTime().hashCode());
hashCode = prime * hashCode + ((getUsername() == null) ? 0 : getUsername().hashCode());
return hashCode;
}
@Override
public DescribeDeviceResult clone() {
try {
return (DescribeDeviceResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-worklink/src/main/java/com/amazonaws/services/worklink/model/DescribeDeviceResult.java | Java | apache-2.0 | 16,586 |
// Java Genetic Algorithm Library.
// Copyright (c) 2017 Franz Wilhelmstötter
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Author:
// Franz Wilhelmstötter ([email protected])
using System;
using System.Collections.Generic;
using Jenetics.Internal.Util;
using Jenetics.Util;
namespace Jenetics
{
[Serializable]
public class DoubleChromosome : BoundedChromosomeBase<double, DoubleGene>,
INumericChromosome<double, DoubleGene>
{
private DoubleChromosome(IImmutableSeq<DoubleGene> genes) : base(genes)
{
}
public DoubleChromosome(double min, double max, int length = 1) : this(DoubleGene.Seq(min, max, length))
{
Valid = true;
}
public override IEnumerator<DoubleGene> GetEnumerator()
{
return Genes.GetEnumerator();
}
public override IChromosome<DoubleGene> NewInstance()
{
return new DoubleChromosome(Min, Max, Length);
}
public override IChromosome<DoubleGene> NewInstance(IImmutableSeq<DoubleGene> genes)
{
return new DoubleChromosome(genes);
}
public static DoubleChromosome Of(double min, double max)
{
return new DoubleChromosome(min, max);
}
public static DoubleChromosome Of(double min, double max, int length)
{
return new DoubleChromosome(min, max, length);
}
public static DoubleChromosome Of(DoubleRange range)
{
return new DoubleChromosome(range.Min, range.Max);
}
public static DoubleChromosome Of(params DoubleGene[] genes)
{
return new DoubleChromosome(ImmutableSeq.Of(genes));
}
public override bool Equals(object obj)
{
return Equality.Of(this, obj)(base.Equals);
}
public override int GetHashCode()
{
return Hash.Of(GetType()).And(base.GetHashCode()).Value;
}
}
} | rmeindl/jenetics.net | src/core/Jenetics/DoubleChromosome.cs | C# | apache-2.0 | 2,538 |
/*
* Copyright 2012-2014 Netherlands eScience Center.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For the full license, see: LICENSE.txt (located in the root folder of this distribution).
* ---
*/
// source:
package nl.esciencecenter.ptk.web;
/**
* Interface for Managed HTTP Streams.
*/
public interface WebStream {
public boolean autoClose();
//public boolean isChunked();
}
| NLeSC/Platinum | ptk-web/src/main/java/nl/esciencecenter/ptk/web/WebStream.java | Java | apache-2.0 | 941 |
<?php
/**
* This file is part of the SevenShores/NetSuite library
* AND originally from the NetSuite PHP Toolkit.
*
* New content:
* @package ryanwinchester/netsuite-php
* @copyright Copyright (c) Ryan Winchester
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache-2.0
* @link https://github.com/ryanwinchester/netsuite-php
*
* Original content:
* @copyright Copyright (c) NetSuite Inc.
* @license https://raw.githubusercontent.com/ryanwinchester/netsuite-php/master/original/NetSuite%20Application%20Developer%20License%20Agreement.txt
* @link http://www.netsuite.com/portal/developers/resources/suitetalk-sample-applications.shtml
*
* generated: 2020-04-10 09:56:55 PM UTC
*/
namespace NetSuite\Classes;
class Customer extends Record {
/**
* @var \NetSuite\Classes\RecordRef
*/
public $customForm;
/**
* @var string
*/
public $entityId;
/**
* @var string
*/
public $altName;
/**
* @var boolean
*/
public $isPerson;
/**
* @var string
*/
public $phoneticName;
/**
* @var string
*/
public $salutation;
/**
* @var string
*/
public $firstName;
/**
* @var string
*/
public $middleName;
/**
* @var string
*/
public $lastName;
/**
* @var string
*/
public $companyName;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $entityStatus;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $parent;
/**
* @var string
*/
public $phone;
/**
* @var string
*/
public $fax;
/**
* @var string
*/
public $email;
/**
* @var string
*/
public $url;
/**
* @var string
*/
public $defaultAddress;
/**
* @var boolean
*/
public $isInactive;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $category;
/**
* @var string
*/
public $title;
/**
* @var string
*/
public $printOnCheckAs;
/**
* @var string
*/
public $altPhone;
/**
* @var string
*/
public $homePhone;
/**
* @var string
*/
public $mobilePhone;
/**
* @var string
*/
public $altEmail;
/**
* @var \NetSuite\Classes\Language
*/
public $language;
/**
* @var string
*/
public $comments;
/**
* @var \NetSuite\Classes\CustomerNumberFormat
*/
public $numberFormat;
/**
* @var \NetSuite\Classes\CustomerNegativeNumberFormat
*/
public $negativeNumberFormat;
/**
* @var string
*/
public $dateCreated;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $image;
/**
* @var \NetSuite\Classes\EmailPreference
*/
public $emailPreference;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $subsidiary;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $representingSubsidiary;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $salesRep;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $territory;
/**
* @var string
*/
public $contribPct;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $partner;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $salesGroup;
/**
* @var string
*/
public $vatRegNumber;
/**
* @var string
*/
public $accountNumber;
/**
* @var boolean
*/
public $taxExempt;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $terms;
/**
* @var float
*/
public $creditLimit;
/**
* @var \NetSuite\Classes\CustomerCreditHoldOverride
*/
public $creditHoldOverride;
/**
* @var \NetSuite\Classes\CustomerMonthlyClosing
*/
public $monthlyClosing;
/**
* @var boolean
*/
public $overrideCurrencyFormat;
/**
* @var string
*/
public $displaySymbol;
/**
* @var \NetSuite\Classes\CurrencySymbolPlacement
*/
public $symbolPlacement;
/**
* @var float
*/
public $balance;
/**
* @var float
*/
public $overdueBalance;
/**
* @var integer
*/
public $daysOverdue;
/**
* @var float
*/
public $unbilledOrders;
/**
* @var float
*/
public $consolUnbilledOrders;
/**
* @var float
*/
public $consolOverdueBalance;
/**
* @var float
*/
public $consolDepositBalance;
/**
* @var float
*/
public $consolBalance;
/**
* @var float
*/
public $consolAging;
/**
* @var float
*/
public $consolAging1;
/**
* @var float
*/
public $consolAging2;
/**
* @var float
*/
public $consolAging3;
/**
* @var float
*/
public $consolAging4;
/**
* @var integer
*/
public $consolDaysOverdue;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $priceLevel;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $currency;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $prefCCProcessor;
/**
* @var float
*/
public $depositBalance;
/**
* @var boolean
*/
public $shipComplete;
/**
* @var boolean
*/
public $taxable;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $taxItem;
/**
* @var string
*/
public $resaleNumber;
/**
* @var float
*/
public $aging;
/**
* @var float
*/
public $aging1;
/**
* @var float
*/
public $aging2;
/**
* @var float
*/
public $aging3;
/**
* @var float
*/
public $aging4;
/**
* @var string
*/
public $startDate;
/**
* @var \NetSuite\Classes\AlcoholRecipientType
*/
public $alcoholRecipientType;
/**
* @var string
*/
public $endDate;
/**
* @var integer
*/
public $reminderDays;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $shippingItem;
/**
* @var string
*/
public $thirdPartyAcct;
/**
* @var string
*/
public $thirdPartyZipcode;
/**
* @var \NetSuite\Classes\Country
*/
public $thirdPartyCountry;
/**
* @var boolean
*/
public $giveAccess;
/**
* @var float
*/
public $estimatedBudget;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $accessRole;
/**
* @var boolean
*/
public $sendEmail;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $assignedWebSite;
/**
* @var string
*/
public $password;
/**
* @var string
*/
public $password2;
/**
* @var boolean
*/
public $requirePwdChange;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $campaignCategory;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $sourceWebSite;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $leadSource;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $receivablesAccount;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $drAccount;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $fxAccount;
/**
* @var float
*/
public $defaultOrderPriority;
/**
* @var string
*/
public $webLead;
/**
* @var string
*/
public $referrer;
/**
* @var string
*/
public $keywords;
/**
* @var string
*/
public $clickStream;
/**
* @var string
*/
public $lastPageVisited;
/**
* @var integer
*/
public $visits;
/**
* @var string
*/
public $firstVisit;
/**
* @var string
*/
public $lastVisit;
/**
* @var boolean
*/
public $billPay;
/**
* @var float
*/
public $openingBalance;
/**
* @var string
*/
public $lastModifiedDate;
/**
* @var string
*/
public $openingBalanceDate;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $openingBalanceAccount;
/**
* @var \NetSuite\Classes\CustomerStage
*/
public $stage;
/**
* @var boolean
*/
public $emailTransactions;
/**
* @var boolean
*/
public $printTransactions;
/**
* @var boolean
*/
public $faxTransactions;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $defaultTaxReg;
/**
* @var boolean
*/
public $syncPartnerTeams;
/**
* @var boolean
*/
public $isBudgetApproved;
/**
* @var \NetSuite\Classes\GlobalSubscriptionStatus
*/
public $globalSubscriptionStatus;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $salesReadiness;
/**
* @var \NetSuite\Classes\CustomerSalesTeamList
*/
public $salesTeamList;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $buyingReason;
/**
* @var \NetSuite\Classes\CustomerDownloadList
*/
public $downloadList;
/**
* @var \NetSuite\Classes\RecordRef
*/
public $buyingTimeFrame;
/**
* @var \NetSuite\Classes\CustomerAddressbookList
*/
public $addressbookList;
/**
* @var \NetSuite\Classes\SubscriptionsList
*/
public $subscriptionsList;
/**
* @var \NetSuite\Classes\ContactAccessRolesList
*/
public $contactRolesList;
/**
* @var \NetSuite\Classes\CustomerCurrencyList
*/
public $currencyList;
/**
* @var \NetSuite\Classes\CustomerCreditCardsList
*/
public $creditCardsList;
/**
* @var \NetSuite\Classes\CustomerPartnersList
*/
public $partnersList;
/**
* @var \NetSuite\Classes\CustomerGroupPricingList
*/
public $groupPricingList;
/**
* @var \NetSuite\Classes\CustomerItemPricingList
*/
public $itemPricingList;
/**
* @var \NetSuite\Classes\CustomerTaxRegistrationList
*/
public $taxRegistrationList;
/**
* @var \NetSuite\Classes\CustomFieldList
*/
public $customFieldList;
/**
* @var string
*/
public $internalId;
/**
* @var string
*/
public $externalId;
static $paramtypesmap = array(
"customForm" => "RecordRef",
"entityId" => "string",
"altName" => "string",
"isPerson" => "boolean",
"phoneticName" => "string",
"salutation" => "string",
"firstName" => "string",
"middleName" => "string",
"lastName" => "string",
"companyName" => "string",
"entityStatus" => "RecordRef",
"parent" => "RecordRef",
"phone" => "string",
"fax" => "string",
"email" => "string",
"url" => "string",
"defaultAddress" => "string",
"isInactive" => "boolean",
"category" => "RecordRef",
"title" => "string",
"printOnCheckAs" => "string",
"altPhone" => "string",
"homePhone" => "string",
"mobilePhone" => "string",
"altEmail" => "string",
"language" => "Language",
"comments" => "string",
"numberFormat" => "CustomerNumberFormat",
"negativeNumberFormat" => "CustomerNegativeNumberFormat",
"dateCreated" => "dateTime",
"image" => "RecordRef",
"emailPreference" => "EmailPreference",
"subsidiary" => "RecordRef",
"representingSubsidiary" => "RecordRef",
"salesRep" => "RecordRef",
"territory" => "RecordRef",
"contribPct" => "string",
"partner" => "RecordRef",
"salesGroup" => "RecordRef",
"vatRegNumber" => "string",
"accountNumber" => "string",
"taxExempt" => "boolean",
"terms" => "RecordRef",
"creditLimit" => "float",
"creditHoldOverride" => "CustomerCreditHoldOverride",
"monthlyClosing" => "CustomerMonthlyClosing",
"overrideCurrencyFormat" => "boolean",
"displaySymbol" => "string",
"symbolPlacement" => "CurrencySymbolPlacement",
"balance" => "float",
"overdueBalance" => "float",
"daysOverdue" => "integer",
"unbilledOrders" => "float",
"consolUnbilledOrders" => "float",
"consolOverdueBalance" => "float",
"consolDepositBalance" => "float",
"consolBalance" => "float",
"consolAging" => "float",
"consolAging1" => "float",
"consolAging2" => "float",
"consolAging3" => "float",
"consolAging4" => "float",
"consolDaysOverdue" => "integer",
"priceLevel" => "RecordRef",
"currency" => "RecordRef",
"prefCCProcessor" => "RecordRef",
"depositBalance" => "float",
"shipComplete" => "boolean",
"taxable" => "boolean",
"taxItem" => "RecordRef",
"resaleNumber" => "string",
"aging" => "float",
"aging1" => "float",
"aging2" => "float",
"aging3" => "float",
"aging4" => "float",
"startDate" => "dateTime",
"alcoholRecipientType" => "AlcoholRecipientType",
"endDate" => "dateTime",
"reminderDays" => "integer",
"shippingItem" => "RecordRef",
"thirdPartyAcct" => "string",
"thirdPartyZipcode" => "string",
"thirdPartyCountry" => "Country",
"giveAccess" => "boolean",
"estimatedBudget" => "float",
"accessRole" => "RecordRef",
"sendEmail" => "boolean",
"assignedWebSite" => "RecordRef",
"password" => "string",
"password2" => "string",
"requirePwdChange" => "boolean",
"campaignCategory" => "RecordRef",
"sourceWebSite" => "RecordRef",
"leadSource" => "RecordRef",
"receivablesAccount" => "RecordRef",
"drAccount" => "RecordRef",
"fxAccount" => "RecordRef",
"defaultOrderPriority" => "float",
"webLead" => "string",
"referrer" => "string",
"keywords" => "string",
"clickStream" => "string",
"lastPageVisited" => "string",
"visits" => "integer",
"firstVisit" => "dateTime",
"lastVisit" => "dateTime",
"billPay" => "boolean",
"openingBalance" => "float",
"lastModifiedDate" => "dateTime",
"openingBalanceDate" => "dateTime",
"openingBalanceAccount" => "RecordRef",
"stage" => "CustomerStage",
"emailTransactions" => "boolean",
"printTransactions" => "boolean",
"faxTransactions" => "boolean",
"defaultTaxReg" => "RecordRef",
"syncPartnerTeams" => "boolean",
"isBudgetApproved" => "boolean",
"globalSubscriptionStatus" => "GlobalSubscriptionStatus",
"salesReadiness" => "RecordRef",
"salesTeamList" => "CustomerSalesTeamList",
"buyingReason" => "RecordRef",
"downloadList" => "CustomerDownloadList",
"buyingTimeFrame" => "RecordRef",
"addressbookList" => "CustomerAddressbookList",
"subscriptionsList" => "SubscriptionsList",
"contactRolesList" => "ContactAccessRolesList",
"currencyList" => "CustomerCurrencyList",
"creditCardsList" => "CustomerCreditCardsList",
"partnersList" => "CustomerPartnersList",
"groupPricingList" => "CustomerGroupPricingList",
"itemPricingList" => "CustomerItemPricingList",
"taxRegistrationList" => "CustomerTaxRegistrationList",
"customFieldList" => "CustomFieldList",
"internalId" => "string",
"externalId" => "string",
);
}
| RyanWinchester/netsuite-php | src/Classes/Customer.php | PHP | apache-2.0 | 15,854 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.honeycode;
import javax.annotation.Generated;
import com.amazonaws.ClientConfigurationFactory;
import com.amazonaws.annotation.NotThreadSafe;
import com.amazonaws.client.builder.AwsSyncClientBuilder;
import com.amazonaws.client.AwsSyncClientParams;
/**
* Fluent builder for {@link com.amazonaws.services.honeycode.AmazonHoneycode}. Use of the builder is preferred over
* using constructors of the client class.
**/
@NotThreadSafe
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public final class AmazonHoneycodeClientBuilder extends AwsSyncClientBuilder<AmazonHoneycodeClientBuilder, AmazonHoneycode> {
private static final ClientConfigurationFactory CLIENT_CONFIG_FACTORY = new ClientConfigurationFactory();
/**
* @return Create new instance of builder with all defaults set.
*/
public static AmazonHoneycodeClientBuilder standard() {
return new AmazonHoneycodeClientBuilder();
}
/**
* @return Default client using the {@link com.amazonaws.auth.DefaultAWSCredentialsProviderChain} and
* {@link com.amazonaws.regions.DefaultAwsRegionProviderChain} chain
*/
public static AmazonHoneycode defaultClient() {
return standard().build();
}
private AmazonHoneycodeClientBuilder() {
super(CLIENT_CONFIG_FACTORY);
}
/**
* Construct a synchronous implementation of AmazonHoneycode using the current builder configuration.
*
* @param params
* Current builder configuration represented as a parameter object.
* @return Fully configured implementation of AmazonHoneycode.
*/
@Override
protected AmazonHoneycode build(AwsSyncClientParams params) {
return new AmazonHoneycodeClient(params);
}
}
| aws/aws-sdk-java | aws-java-sdk-honeycode/src/main/java/com/amazonaws/services/honeycode/AmazonHoneycodeClientBuilder.java | Java | apache-2.0 | 2,368 |
namespace ts {
describe("TransformAPI", () => {
function replaceUndefinedWithVoid0(context: TransformationContext) {
const previousOnSubstituteNode = context.onSubstituteNode;
context.enableSubstitution(SyntaxKind.Identifier);
context.onSubstituteNode = (hint, node) => {
node = previousOnSubstituteNode(hint, node);
if (hint === EmitHint.Expression && isIdentifier(node) && node.escapedText === "undefined") {
node = createPartiallyEmittedExpression(
addSyntheticTrailingComment(
setTextRange(
createVoidZero(),
node),
SyntaxKind.MultiLineCommentTrivia, "undefined"));
}
return node;
};
return (file: SourceFile) => file;
}
function replaceNumberWith2(context: TransformationContext) {
function visitor(node: Node): Node {
if (isNumericLiteral(node)) {
return createNumericLiteral("2");
}
return visitEachChild(node, visitor, context);
}
return (file: SourceFile) => visitNode(file, visitor);
}
function replaceIdentifiersNamedOldNameWithNewName(context: TransformationContext) {
const previousOnSubstituteNode = context.onSubstituteNode;
context.enableSubstitution(SyntaxKind.Identifier);
context.onSubstituteNode = (hint, node) => {
node = previousOnSubstituteNode(hint, node);
if (isIdentifier(node) && node.escapedText === "oldName") {
node = setTextRange(createIdentifier("newName"), node);
}
return node;
};
return (file: SourceFile) => file;
}
function replaceIdentifiersNamedOldNameWithNewName2(context: TransformationContext) {
const visitor: Visitor = (node) => {
if (isIdentifier(node) && node.text === "oldName") {
return createIdentifier("newName");
}
return visitEachChild(node, visitor, context);
};
return (node: SourceFile) => visitNode(node, visitor);
}
function transformSourceFile(sourceText: string, transformers: TransformerFactory<SourceFile>[]) {
const transformed = transform(createSourceFile("source.ts", sourceText, ScriptTarget.ES2015), transformers);
const printer = createPrinter({ newLine: NewLineKind.CarriageReturnLineFeed }, {
onEmitNode: transformed.emitNodeWithNotification,
substituteNode: transformed.substituteNode
});
const result = printer.printBundle(createBundle(transformed.transformed));
transformed.dispose();
return result;
}
function testBaseline(testName: string, test: () => string) {
it(testName, () => {
Harness.Baseline.runBaseline(`transformApi/transformsCorrectly.${testName}.js`, test());
});
}
testBaseline("substitution", () => {
return transformSourceFile(`var a = undefined;`, [replaceUndefinedWithVoid0]);
});
testBaseline("types", () => {
return transformSourceFile(`let a: () => void`, [
context => file => visitNode(file, function visitor(node: Node): VisitResult<Node> {
return visitEachChild(node, visitor, context);
})
]);
});
testBaseline("fromTranspileModule", () => {
return transpileModule(`var oldName = undefined;`, {
transformers: {
before: [replaceUndefinedWithVoid0],
after: [replaceIdentifiersNamedOldNameWithNewName]
},
compilerOptions: {
newLine: NewLineKind.CarriageReturnLineFeed
}
}).outputText;
});
testBaseline("issue27854", () => {
return transpileModule(`oldName<{ a: string; }>\` ... \`;`, {
transformers: {
before: [replaceIdentifiersNamedOldNameWithNewName2]
},
compilerOptions: {
newLine: NewLineKind.CarriageReturnLineFeed,
target: ScriptTarget.Latest
}
}).outputText;
});
testBaseline("rewrittenNamespace", () => {
return transpileModule(`namespace Reflect { const x = 1; }`, {
transformers: {
before: [forceNamespaceRewrite],
},
compilerOptions: {
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
});
testBaseline("rewrittenNamespaceFollowingClass", () => {
return transpileModule(`
class C { foo = 10; static bar = 20 }
namespace C { export let x = 10; }
`, {
transformers: {
before: [forceNamespaceRewrite],
},
compilerOptions: {
target: ScriptTarget.ESNext,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
});
testBaseline("transformTypesInExportDefault", () => {
return transpileModule(`
export default (foo: string) => { return 1; }
`, {
transformers: {
before: [replaceNumberWith2],
},
compilerOptions: {
target: ScriptTarget.ESNext,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
});
testBaseline("synthesizedClassAndNamespaceCombination", () => {
return transpileModule("", {
transformers: {
before: [replaceWithClassAndNamespace],
},
compilerOptions: {
target: ScriptTarget.ESNext,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
function replaceWithClassAndNamespace() {
return (sourceFile: SourceFile) => {
const result = getMutableClone(sourceFile);
result.statements = createNodeArray([
createClassDeclaration(/*decorators*/ undefined, /*modifiers*/ undefined, "Foo", /*typeParameters*/ undefined, /*heritageClauses*/ undefined, /*members*/ undefined!), // TODO: GH#18217
createModuleDeclaration(/*decorators*/ undefined, /*modifiers*/ undefined, createIdentifier("Foo"), createModuleBlock([createEmptyStatement()]))
]);
return result;
};
}
});
function forceNamespaceRewrite(context: TransformationContext) {
return (sourceFile: SourceFile): SourceFile => {
return visitNode(sourceFile);
function visitNode<T extends Node>(node: T): T {
if (node.kind === SyntaxKind.ModuleBlock) {
const block = node as T & ModuleBlock;
const statements = createNodeArray([...block.statements]);
return updateModuleBlock(block, statements) as typeof block;
}
return visitEachChild(node, visitNode, context);
}
};
}
testBaseline("transformAwayExportStar", () => {
return transpileModule("export * from './helper';", {
transformers: {
before: [expandExportStar],
},
compilerOptions: {
target: ScriptTarget.ESNext,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
function expandExportStar(context: TransformationContext) {
return (sourceFile: SourceFile): SourceFile => {
return visitNode(sourceFile);
function visitNode<T extends Node>(node: T): T {
if (node.kind === SyntaxKind.ExportDeclaration) {
const ed = node as Node as ExportDeclaration;
const exports = [{ name: "x" }];
const exportSpecifiers = exports.map(e => createExportSpecifier(e.name, e.name));
const exportClause = createNamedExports(exportSpecifiers);
const newEd = updateExportDeclaration(ed, ed.decorators, ed.modifiers, exportClause, ed.moduleSpecifier);
return newEd as Node as T;
}
return visitEachChild(node, visitNode, context);
}
};
}
});
// https://github.com/Microsoft/TypeScript/issues/19618
testBaseline("transformAddImportStar", () => {
return transpileModule("", {
transformers: {
before: [transformAddImportStar],
},
compilerOptions: {
target: ScriptTarget.ES5,
module: ModuleKind.System,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
function transformAddImportStar(_context: TransformationContext) {
return (sourceFile: SourceFile): SourceFile => {
return visitNode(sourceFile);
};
function visitNode(sf: SourceFile) {
// produce `import * as i0 from './comp';
const importStar = createImportDeclaration(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*importClause*/ createImportClause(
/*name*/ undefined,
createNamespaceImport(createIdentifier("i0"))
),
/*moduleSpecifier*/ createLiteral("./comp1"));
return updateSourceFileNode(sf, [importStar]);
}
}
});
// https://github.com/Microsoft/TypeScript/issues/17384
testBaseline("transformAddDecoratedNode", () => {
return transpileModule("", {
transformers: {
before: [transformAddDecoratedNode],
},
compilerOptions: {
target: ScriptTarget.ES5,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
function transformAddDecoratedNode(_context: TransformationContext) {
return (sourceFile: SourceFile): SourceFile => {
return visitNode(sourceFile);
};
function visitNode(sf: SourceFile) {
// produce `class Foo { @Bar baz() {} }`;
const classDecl = createClassDeclaration([], [], "Foo", /*typeParameters*/ undefined, /*heritageClauses*/ undefined, [
createMethod([createDecorator(createIdentifier("Bar"))], [], /**/ undefined, "baz", /**/ undefined, /**/ undefined, [], /**/ undefined, createBlock([]))
]);
return updateSourceFileNode(sf, [classDecl]);
}
}
});
testBaseline("transformDeclarationFile", () => {
return baselineDeclarationTransform(`var oldName = undefined;`, {
transformers: {
afterDeclarations: [replaceIdentifiersNamedOldNameWithNewName]
},
compilerOptions: {
newLine: NewLineKind.CarriageReturnLineFeed,
declaration: true
}
});
});
function baselineDeclarationTransform(text: string, opts: TranspileOptions) {
const fs = vfs.createFromFileSystem(Harness.IO, /*caseSensitive*/ true, { documents: [new documents.TextDocument("/.src/index.ts", text)] });
const host = new fakes.CompilerHost(fs, opts.compilerOptions);
const program = createProgram(["/.src/index.ts"], opts.compilerOptions!, host);
program.emit(program.getSourceFile("/.src/index.ts"), (p, s, bom) => host.writeFile(p, s, bom), /*cancellationToken*/ undefined, /*onlyDts*/ true, opts.transformers);
return fs.readFileSync("/.src/index.d.ts").toString();
}
function addSyntheticComment(nodeFilter: (node: Node) => boolean) {
return (context: TransformationContext) => {
return (sourceFile: SourceFile): SourceFile => {
return visitNode(sourceFile, rootTransform, isSourceFile);
};
function rootTransform<T extends Node>(node: T): VisitResult<T> {
if (nodeFilter(node)) {
setEmitFlags(node, EmitFlags.NoLeadingComments);
setSyntheticLeadingComments(node, [{ kind: SyntaxKind.MultiLineCommentTrivia, text: "comment", pos: -1, end: -1, hasTrailingNewLine: true }]);
}
return visitEachChild(node, rootTransform, context);
}
};
}
// https://github.com/Microsoft/TypeScript/issues/24096
testBaseline("transformAddCommentToArrowReturnValue", () => {
return transpileModule(`const foo = () =>
void 0
`, {
transformers: {
before: [addSyntheticComment(isVoidExpression)],
},
compilerOptions: {
target: ScriptTarget.ES5,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
});
// https://github.com/Microsoft/TypeScript/issues/17594
testBaseline("transformAddCommentToExportedVar", () => {
return transpileModule(`export const exportedDirectly = 1;
const exportedSeparately = 2;
export {exportedSeparately};
`, {
transformers: {
before: [addSyntheticComment(isVariableStatement)],
},
compilerOptions: {
target: ScriptTarget.ES5,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
});
// https://github.com/Microsoft/TypeScript/issues/17594
testBaseline("transformAddCommentToImport", () => {
return transpileModule(`
// Previous comment on import.
import {Value} from 'somewhere';
import * as X from 'somewhere';
// Previous comment on export.
export { /* specifier comment */ X, Y} from 'somewhere';
export * from 'somewhere';
export {Value};
`, {
transformers: {
before: [addSyntheticComment(n => isImportDeclaration(n) || isExportDeclaration(n) || isImportSpecifier(n) || isExportSpecifier(n))],
},
compilerOptions: {
target: ScriptTarget.ES5,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
});
// https://github.com/Microsoft/TypeScript/issues/17594
testBaseline("transformAddCommentToProperties", () => {
return transpileModule(`
// class comment.
class Clazz {
// original comment 1.
static staticProp: number = 1;
// original comment 2.
instanceProp: number = 2;
// original comment 3.
constructor(readonly field = 1) {}
}
`, {
transformers: {
before: [addSyntheticComment(n => isPropertyDeclaration(n) || isParameterPropertyDeclaration(n) || isClassDeclaration(n) || isConstructorDeclaration(n))],
},
compilerOptions: {
target: ScriptTarget.ES2015,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
});
testBaseline("transformAddCommentToNamespace", () => {
return transpileModule(`
// namespace comment.
namespace Foo {
export const x = 1;
}
// another comment.
namespace Foo {
export const y = 1;
}
`, {
transformers: {
before: [addSyntheticComment(n => isModuleDeclaration(n))],
},
compilerOptions: {
target: ScriptTarget.ES2015,
newLine: NewLineKind.CarriageReturnLineFeed,
}
}).outputText;
});
});
}
| donaldpipowitch/TypeScript | src/testRunner/unittests/transform.ts | TypeScript | apache-2.0 | 17,845 |
package lm.com.framework.encrypt;
import java.io.IOException;
import java.security.SecureRandom;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.DESKeySpec;
import sun.misc.BASE64Decoder;
import sun.misc.BASE64Encoder;
public class DESEncrypt {
private final static String DES = "DES";
/**
* des加密
*
* @param encryptString
* @param key
* @return
* @throws Exception
*/
public static String encode(String encryptString, String key) throws Exception {
byte[] bt = encrypt(encryptString.getBytes(), key.getBytes());
String strs = new BASE64Encoder().encode(bt);
return strs;
}
/**
* des解密
*
* @param decryptString
* @param key
* @return
* @throws IOException
* @throws Exception
*/
public static String decode(String decryptString, String key) throws IOException, Exception {
if (decryptString == null || decryptString.trim().isEmpty())
return "";
BASE64Decoder decoder = new BASE64Decoder();
byte[] buf = decoder.decodeBuffer(decryptString);
byte[] bt = decrypt(buf, key.getBytes());
return new String(bt);
}
/**
* 根据键值进行加密
*/
private static byte[] encrypt(byte[] data, byte[] key) throws Exception {
Cipher cipher = cipherInit(data, key, Cipher.ENCRYPT_MODE);
return cipher.doFinal(data);
}
/**
* 根据键值进行解密
*/
private static byte[] decrypt(byte[] data, byte[] key) throws Exception {
Cipher cipher = cipherInit(data, key, Cipher.DECRYPT_MODE);
return cipher.doFinal(data);
}
private static Cipher cipherInit(byte[] data, byte[] key, int cipherValue) throws Exception {
/** 生成一个可信任的随机数源 **/
SecureRandom sr = new SecureRandom();
/** 从原始密钥数据创建DESKeySpec对象 **/
DESKeySpec dks = new DESKeySpec(key);
/** 创建一个密钥工厂,然后用它把DESKeySpec转换成SecretKey对象 **/
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(DES);
SecretKey securekey = keyFactory.generateSecret(dks);
/** Cipher对象实际完成加密或解密操作 **/
Cipher cipher = Cipher.getInstance(DES);
/** 用密钥初始化Cipher对象 **/
cipher.init(cipherValue, securekey, sr);
return cipher;
}
}
| mrluo735/lm.cloudplat | common/lm.com.framework/src/main/java/lm/com/framework/encrypt/DESEncrypt.java | Java | apache-2.0 | 2,267 |
package com.desple.view;
import javax.imageio.ImageIO;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
public class PreviewImageCanvas extends JPanel {
private BufferedImage image;
public PreviewImageCanvas() {
image = null;
}
public Dimension getPreferredSize() {
return new Dimension(512, 512);
}
public void paintComponent(Graphics g) {
super.paintComponent(g);
if (this.image != null) {
g.drawImage(this.image, 0, 0, 512, 512, this);
}
}
public void loadImage(String imageLocation) throws IOException {
this.image = ImageIO.read(new File(imageLocation));
repaint();
}
public BufferedImage getImage() {
return this.image;
}
public void setImage(BufferedImage image) {
this.image = image;
repaint();
}
}
| thebillkidy/RandomProjects | FaceRecognition/Java/src/main/java/com/desple/view/PreviewImageCanvas.java | Java | apache-2.0 | 938 |
// Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rafthttp
import (
"errors"
"fmt"
"io/ioutil"
"net/http"
"path"
"github.com/coreos/etcd/Godeps/_workspace/src/golang.org/x/net/context"
pioutil "github.com/coreos/etcd/pkg/ioutil"
"github.com/coreos/etcd/pkg/types"
"github.com/coreos/etcd/raft/raftpb"
"github.com/coreos/etcd/snap"
"github.com/coreos/etcd/version"
)
const (
// connReadLimitByte limits the number of bytes
// a single read can read out.
//
// 64KB should be large enough for not causing
// throughput bottleneck as well as small enough
// for not causing a read timeout.
connReadLimitByte = 64 * 1024
)
var (
RaftPrefix = "/raft"
ProbingPrefix = path.Join(RaftPrefix, "probing")
RaftStreamPrefix = path.Join(RaftPrefix, "stream")
RaftSnapshotPrefix = path.Join(RaftPrefix, "snapshot")
errIncompatibleVersion = errors.New("incompatible version")
errClusterIDMismatch = errors.New("cluster ID mismatch")
)
type peerGetter interface {
Get(id types.ID) Peer
}
type writerToResponse interface {
WriteTo(w http.ResponseWriter)
}
type pipelineHandler struct {
r Raft
cid types.ID
}
// newPipelineHandler returns a handler for handling raft messages
// from pipeline for RaftPrefix.
//
// The handler reads out the raft message from request body,
// and forwards it to the given raft state machine for processing.
func newPipelineHandler(r Raft, cid types.ID) http.Handler {
return &pipelineHandler{
r: r,
cid: cid,
}
}
func (h *pipelineHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
if r.Method != "POST" {
w.Header().Set("Allow", "POST")
http.Error(w, "Method Not Allowed", http.StatusMethodNotAllowed)
return
}
w.Header().Set("X-Etcd-Cluster-ID", h.cid.String())
if err := checkClusterCompatibilityFromHeader(r.Header, h.cid); err != nil {
http.Error(w, err.Error(), http.StatusPreconditionFailed)
return
}
// Limit the data size that could be read from the request body, which ensures that read from
// connection will not time out accidentally due to possible blocking in underlying implementation.
limitedr := pioutil.NewLimitedBufferReader(r.Body, connReadLimitByte)
b, err := ioutil.ReadAll(limitedr)
if err != nil {
plog.Errorf("failed to read raft message (%v)", err)
http.Error(w, "error reading raft message", http.StatusBadRequest)
return
}
var m raftpb.Message
if err := m.Unmarshal(b); err != nil {
plog.Errorf("failed to unmarshal raft message (%v)", err)
http.Error(w, "error unmarshaling raft message", http.StatusBadRequest)
return
}
if err := h.r.Process(context.TODO(), m); err != nil {
switch v := err.(type) {
case writerToResponse:
v.WriteTo(w)
default:
plog.Warningf("failed to process raft message (%v)", err)
http.Error(w, "error processing raft message", http.StatusInternalServerError)
}
return
}
// Write StatusNoContet header after the message has been processed by
// raft, which facilitates the client to report MsgSnap status.
w.WriteHeader(http.StatusNoContent)
}
type snapshotHandler struct {
r Raft
snapshotter *snap.Snapshotter
cid types.ID
}
func newSnapshotHandler(r Raft, snapshotter *snap.Snapshotter, cid types.ID) http.Handler {
return &snapshotHandler{
r: r,
snapshotter: snapshotter,
cid: cid,
}
}
// ServeHTTP serves HTTP request to receive and process snapshot message.
//
// If request sender dies without closing underlying TCP connection,
// the handler will keep waiting for the request body until TCP keepalive
// finds out that the connection is broken after several minutes.
// This is acceptable because
// 1. snapshot messages sent through other TCP connections could still be
// received and processed.
// 2. this case should happen rarely, so no further optimization is done.
func (h *snapshotHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
if r.Method != "POST" {
w.Header().Set("Allow", "POST")
http.Error(w, "Method Not Allowed", http.StatusMethodNotAllowed)
return
}
w.Header().Set("X-Etcd-Cluster-ID", h.cid.String())
if err := checkClusterCompatibilityFromHeader(r.Header, h.cid); err != nil {
http.Error(w, err.Error(), http.StatusPreconditionFailed)
return
}
dec := &messageDecoder{r: r.Body}
m, err := dec.decode()
if err != nil {
msg := fmt.Sprintf("failed to decode raft message (%v)", err)
plog.Errorf(msg)
http.Error(w, msg, http.StatusBadRequest)
return
}
if m.Type != raftpb.MsgSnap {
plog.Errorf("unexpected raft message type %s on snapshot path", m.Type)
http.Error(w, "wrong raft message type", http.StatusBadRequest)
return
}
// save incoming database snapshot.
if err := h.snapshotter.SaveDBFrom(r.Body, m.Snapshot.Metadata.Index); err != nil {
msg := fmt.Sprintf("failed to save KV snapshot (%v)", err)
plog.Error(msg)
http.Error(w, msg, http.StatusInternalServerError)
return
}
plog.Infof("received and saved database snapshot [index: %d, from: %s] successfully", m.Snapshot.Metadata.Index, types.ID(m.From))
if err := h.r.Process(context.TODO(), m); err != nil {
switch v := err.(type) {
// Process may return writerToResponse error when doing some
// additional checks before calling raft.Node.Step.
case writerToResponse:
v.WriteTo(w)
default:
msg := fmt.Sprintf("failed to process raft message (%v)", err)
plog.Warningf(msg)
http.Error(w, msg, http.StatusInternalServerError)
}
return
}
// Write StatusNoContet header after the message has been processed by
// raft, which facilitates the client to report MsgSnap status.
w.WriteHeader(http.StatusNoContent)
}
type streamHandler struct {
peerGetter peerGetter
r Raft
id types.ID
cid types.ID
}
func newStreamHandler(peerGetter peerGetter, r Raft, id, cid types.ID) http.Handler {
return &streamHandler{
peerGetter: peerGetter,
r: r,
id: id,
cid: cid,
}
}
func (h *streamHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
if r.Method != "GET" {
w.Header().Set("Allow", "GET")
http.Error(w, "Method Not Allowed", http.StatusMethodNotAllowed)
return
}
w.Header().Set("X-Server-Version", version.Version)
w.Header().Set("X-Etcd-Cluster-ID", h.cid.String())
if err := checkClusterCompatibilityFromHeader(r.Header, h.cid); err != nil {
http.Error(w, err.Error(), http.StatusPreconditionFailed)
return
}
var t streamType
switch path.Dir(r.URL.Path) {
case streamTypeMsgAppV2.endpoint():
t = streamTypeMsgAppV2
case streamTypeMessage.endpoint():
t = streamTypeMessage
default:
plog.Debugf("ignored unexpected streaming request path %s", r.URL.Path)
http.Error(w, "invalid path", http.StatusNotFound)
return
}
fromStr := path.Base(r.URL.Path)
from, err := types.IDFromString(fromStr)
if err != nil {
plog.Errorf("failed to parse from %s into ID (%v)", fromStr, err)
http.Error(w, "invalid from", http.StatusNotFound)
return
}
if h.r.IsIDRemoved(uint64(from)) {
plog.Warningf("rejected the stream from peer %s since it was removed", from)
http.Error(w, "removed member", http.StatusGone)
return
}
p := h.peerGetter.Get(from)
if p == nil {
// This may happen in following cases:
// 1. user starts a remote peer that belongs to a different cluster
// with the same cluster ID.
// 2. local etcd falls behind of the cluster, and cannot recognize
// the members that joined after its current progress.
plog.Errorf("failed to find member %s in cluster %s", from, h.cid)
http.Error(w, "error sender not found", http.StatusNotFound)
return
}
wto := h.id.String()
if gto := r.Header.Get("X-Raft-To"); gto != wto {
plog.Errorf("streaming request ignored (ID mismatch got %s want %s)", gto, wto)
http.Error(w, "to field mismatch", http.StatusPreconditionFailed)
return
}
w.WriteHeader(http.StatusOK)
w.(http.Flusher).Flush()
c := newCloseNotifier()
conn := &outgoingConn{
t: t,
Writer: w,
Flusher: w.(http.Flusher),
Closer: c,
}
p.attachOutgoingConn(conn)
<-c.closeNotify()
}
// checkClusterCompatibilityFromHeader checks the cluster compatibility of
// the local member from the given header.
// It checks whether the version of local member is compatible with
// the versions in the header, and whether the cluster ID of local member
// matches the one in the header.
func checkClusterCompatibilityFromHeader(header http.Header, cid types.ID) error {
if err := checkVersionCompability(header.Get("X-Server-From"), serverVersion(header), minClusterVersion(header)); err != nil {
plog.Errorf("request version incompatibility (%v)", err)
return errIncompatibleVersion
}
if gcid := header.Get("X-Etcd-Cluster-ID"); gcid != cid.String() {
plog.Errorf("request cluster ID mismatch (got %s want %s)", gcid, cid)
return errClusterIDMismatch
}
return nil
}
type closeNotifier struct {
done chan struct{}
}
func newCloseNotifier() *closeNotifier {
return &closeNotifier{
done: make(chan struct{}),
}
}
func (n *closeNotifier) Close() error {
close(n.done)
return nil
}
func (n *closeNotifier) closeNotify() <-chan struct{} { return n.done }
| fasaxc/etcd | rafthttp/http.go | GO | apache-2.0 | 9,699 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_79) on Wed Apr 29 14:47:00 PDT 2015 -->
<title>org.apache.nutch.crawl (apache-nutch 1.10 API)</title>
<meta name="date" content="2015-04-29">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<h1 class="bar"><a href="../../../../org/apache/nutch/crawl/package-summary.html" target="classFrame">org.apache.nutch.crawl</a></h1>
<div class="indexContainer">
<h2 title="Interfaces">Interfaces</h2>
<ul title="Interfaces">
<li><a href="FetchSchedule.html" title="interface in org.apache.nutch.crawl" target="classFrame"><i>FetchSchedule</i></a></li>
</ul>
<h2 title="Classes">Classes</h2>
<ul title="Classes">
<li><a href="AbstractFetchSchedule.html" title="class in org.apache.nutch.crawl" target="classFrame">AbstractFetchSchedule</a></li>
<li><a href="AdaptiveFetchSchedule.html" title="class in org.apache.nutch.crawl" target="classFrame">AdaptiveFetchSchedule</a></li>
<li><a href="CrawlDatum.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDatum</a></li>
<li><a href="CrawlDatum.Comparator.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDatum.Comparator</a></li>
<li><a href="CrawlDb.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDb</a></li>
<li><a href="CrawlDbFilter.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbFilter</a></li>
<li><a href="CrawlDbMerger.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbMerger</a></li>
<li><a href="CrawlDbMerger.Merger.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbMerger.Merger</a></li>
<li><a href="CrawlDbReader.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReader</a></li>
<li><a href="CrawlDbReader.CrawlDatumCsvOutputFormat.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReader.CrawlDatumCsvOutputFormat</a></li>
<li><a href="CrawlDbReader.CrawlDatumCsvOutputFormat.LineRecordWriter.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReader.CrawlDatumCsvOutputFormat.LineRecordWriter</a></li>
<li><a href="CrawlDbReader.CrawlDbDumpMapper.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReader.CrawlDbDumpMapper</a></li>
<li><a href="CrawlDbReader.CrawlDbStatCombiner.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReader.CrawlDbStatCombiner</a></li>
<li><a href="CrawlDbReader.CrawlDbStatMapper.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReader.CrawlDbStatMapper</a></li>
<li><a href="CrawlDbReader.CrawlDbStatReducer.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReader.CrawlDbStatReducer</a></li>
<li><a href="CrawlDbReader.CrawlDbTopNMapper.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReader.CrawlDbTopNMapper</a></li>
<li><a href="CrawlDbReader.CrawlDbTopNReducer.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReader.CrawlDbTopNReducer</a></li>
<li><a href="CrawlDbReducer.html" title="class in org.apache.nutch.crawl" target="classFrame">CrawlDbReducer</a></li>
<li><a href="DeduplicationJob.html" title="class in org.apache.nutch.crawl" target="classFrame">DeduplicationJob</a></li>
<li><a href="DeduplicationJob.DBFilter.html" title="class in org.apache.nutch.crawl" target="classFrame">DeduplicationJob.DBFilter</a></li>
<li><a href="DeduplicationJob.DedupReducer.html" title="class in org.apache.nutch.crawl" target="classFrame">DeduplicationJob.DedupReducer</a></li>
<li><a href="DeduplicationJob.StatusUpdateReducer.html" title="class in org.apache.nutch.crawl" target="classFrame">DeduplicationJob.StatusUpdateReducer</a></li>
<li><a href="DefaultFetchSchedule.html" title="class in org.apache.nutch.crawl" target="classFrame">DefaultFetchSchedule</a></li>
<li><a href="FetchScheduleFactory.html" title="class in org.apache.nutch.crawl" target="classFrame">FetchScheduleFactory</a></li>
<li><a href="Generator.html" title="class in org.apache.nutch.crawl" target="classFrame">Generator</a></li>
<li><a href="Generator.CrawlDbUpdater.html" title="class in org.apache.nutch.crawl" target="classFrame">Generator.CrawlDbUpdater</a></li>
<li><a href="Generator.DecreasingFloatComparator.html" title="class in org.apache.nutch.crawl" target="classFrame">Generator.DecreasingFloatComparator</a></li>
<li><a href="Generator.GeneratorOutputFormat.html" title="class in org.apache.nutch.crawl" target="classFrame">Generator.GeneratorOutputFormat</a></li>
<li><a href="Generator.HashComparator.html" title="class in org.apache.nutch.crawl" target="classFrame">Generator.HashComparator</a></li>
<li><a href="Generator.PartitionReducer.html" title="class in org.apache.nutch.crawl" target="classFrame">Generator.PartitionReducer</a></li>
<li><a href="Generator.Selector.html" title="class in org.apache.nutch.crawl" target="classFrame">Generator.Selector</a></li>
<li><a href="Generator.SelectorEntry.html" title="class in org.apache.nutch.crawl" target="classFrame">Generator.SelectorEntry</a></li>
<li><a href="Generator.SelectorInverseMapper.html" title="class in org.apache.nutch.crawl" target="classFrame">Generator.SelectorInverseMapper</a></li>
<li><a href="Injector.html" title="class in org.apache.nutch.crawl" target="classFrame">Injector</a></li>
<li><a href="Injector.InjectMapper.html" title="class in org.apache.nutch.crawl" target="classFrame">Injector.InjectMapper</a></li>
<li><a href="Injector.InjectReducer.html" title="class in org.apache.nutch.crawl" target="classFrame">Injector.InjectReducer</a></li>
<li><a href="Inlink.html" title="class in org.apache.nutch.crawl" target="classFrame">Inlink</a></li>
<li><a href="Inlinks.html" title="class in org.apache.nutch.crawl" target="classFrame">Inlinks</a></li>
<li><a href="LinkDb.html" title="class in org.apache.nutch.crawl" target="classFrame">LinkDb</a></li>
<li><a href="LinkDbFilter.html" title="class in org.apache.nutch.crawl" target="classFrame">LinkDbFilter</a></li>
<li><a href="LinkDbMerger.html" title="class in org.apache.nutch.crawl" target="classFrame">LinkDbMerger</a></li>
<li><a href="LinkDbReader.html" title="class in org.apache.nutch.crawl" target="classFrame">LinkDbReader</a></li>
<li><a href="LinkDbReader.LinkDBDumpMapper.html" title="class in org.apache.nutch.crawl" target="classFrame">LinkDbReader.LinkDBDumpMapper</a></li>
<li><a href="MapWritable.html" title="class in org.apache.nutch.crawl" target="classFrame">MapWritable</a></li>
<li><a href="MD5Signature.html" title="class in org.apache.nutch.crawl" target="classFrame">MD5Signature</a></li>
<li><a href="MimeAdaptiveFetchSchedule.html" title="class in org.apache.nutch.crawl" target="classFrame">MimeAdaptiveFetchSchedule</a></li>
<li><a href="NutchWritable.html" title="class in org.apache.nutch.crawl" target="classFrame">NutchWritable</a></li>
<li><a href="Signature.html" title="class in org.apache.nutch.crawl" target="classFrame">Signature</a></li>
<li><a href="SignatureComparator.html" title="class in org.apache.nutch.crawl" target="classFrame">SignatureComparator</a></li>
<li><a href="SignatureFactory.html" title="class in org.apache.nutch.crawl" target="classFrame">SignatureFactory</a></li>
<li><a href="TextMD5Signature.html" title="class in org.apache.nutch.crawl" target="classFrame">TextMD5Signature</a></li>
<li><a href="TextProfileSignature.html" title="class in org.apache.nutch.crawl" target="classFrame">TextProfileSignature</a></li>
<li><a href="URLPartitioner.html" title="class in org.apache.nutch.crawl" target="classFrame">URLPartitioner</a></li>
</ul>
</div>
</body>
</html>
| nhahv/apache-nutch-1.x | docs/api/org/apache/nutch/crawl/package-frame.html | HTML | apache-2.0 | 7,832 |
<?php
class _Upload
{
private static $files = array();
/**
* Takes a $_FILES array and standardizes it to be the same regardless of number of uploads
*
* @param array $files Files array to standardize
* @return void
*/
public static function standardizeFileUploads($files=array())
{
if (!count($files)) {
return $files;
}
// loop through files to standardize
foreach ($files as $field => $data) {
if (!isset(self::$files[$field]) || !is_array(self::$files[$field])) {
self::$files[$field] = array();
}
$data = array(
'name' => $data['name'],
'type' => $data['type'],
'tmp_name' => $data['tmp_name'],
'size' => $data['size'],
'error' => $data['error']
);
// loop through _FILES to standardize
foreach ($data as $key => $value) {
self::buildFileArray($key, $value, self::$files[$field], $field);
}
}
// return our cleaner version
return self::$files;
}
/**
* Recursively builds an array of files
*
* @param string $key Upload key that we're processing
* @param mixed $value Either a string or an array of the value
* @param array $output The referenced array object for manipulation
* @param string $path A string for colon-delimited path searching
* @return void
*/
private static function buildFileArray($key, $value, &$output, $path)
{
if (is_array($value)) {
foreach ($value as $sub_key => $sub_value) {
if (!isset($output[$sub_key]) || !is_array($output[$sub_key])) {
$output[$sub_key] = array();
}
$new_path = (empty($path)) ? $sub_key : $path . ':' . $sub_key;
self::buildFileArray($key, $sub_value, $output[$sub_key], $new_path);
}
} else {
$output[$key] = $value;
// add error message
if ($key === 'error') {
$error_message = self::getFriendlyErrorMessage($value);
$success_status = ($value === UPLOAD_ERR_OK);
$output['error_message'] = $error_message;
$output['success'] = $success_status;
} elseif ($key === 'size') {
$human_readable_size = File::getHumanSize($value);
$output['size_human_readable'] = $human_readable_size;
}
}
}
/**
* Create friendly error messages for upload issues
*
* @param int $error Error int
* @return string
*/
private static function getFriendlyErrorMessage($error)
{
// these errors are PHP-based
if ($error === UPLOAD_ERR_OK) {
return '';
} elseif ($error === UPLOAD_ERR_INI_SIZE) {
return Localization::fetch('upload_error_ini_size');
} elseif ($error === UPLOAD_ERR_FORM_SIZE) {
return Localization::fetch('upload_error_form_size');
} elseif ($error === UPLOAD_ERR_PARTIAL) {
return Localization::fetch('upload_error_err_partial');
} elseif ($error === UPLOAD_ERR_NO_FILE) {
return Localization::fetch('upload_error_no_file');
} elseif ($error === UPLOAD_ERR_NO_TMP_DIR) {
return Localization::fetch('upload_error_no_temp_dir');
} elseif ($error === UPLOAD_ERR_CANT_WRITE) {
return Localization::fetch('upload_error_cant_write');
} elseif ($error === UPLOAD_ERR_EXTENSION) {
return Localization::fetch('upload_error_extension');
} else {
// we should never, ever see this
return Localization::fetch('upload_error_unknown');
}
}
/**
* Upload file(s)
*
* @param string $destination Where the file is going
* @param string $id The field took look at in the files array
* @return array
*/
public static function uploadBatch($destination = null, $id = null)
{
$destination = $destination ?: Request::get('destination');
$id = $id ?: Request::get('id');
$files = self::standardizeFileUploads($_FILES);
$results = array();
// Resizing configuration
if ($resize = Request::get('resize')) {
$width = Request::get('width', null);
$height = Request::get('height', null);
$ratio = Request::get('ratio', true);
$upsize = Request::get('upsize', false);
$quality = Request::get('quality', '75');
}
// If $files[$id][0] exists, it means there's an array of images.
// If there's not, there's just one. We want to change this to an array.
if ( ! isset($files[$id][0])) {
$tmp = $files[$id];
unset($files[$id]);
$files[$id][] = $tmp;
}
// Process each image
foreach ($files[$id] as $file) {
// Image data
$path = File::upload($file, $destination);
$name = basename($path);
// Resize
if ($resize) {
$image = \Intervention\Image\Image::make(Path::assemble(BASE_PATH, $path));
$resize_folder = Path::assemble($image->dirname, 'resized');
if ( ! Folder::exists($resize_folder)) {
Folder::make($resize_folder);
}
$resize_path = Path::assemble($resize_folder, $image->basename);
$path = Path::toAsset($resize_path);
$name = basename($path);
$image->resize($width, $height, $ratio, $upsize)->save($resize_path, $quality);
}
$results[] = compact('path', 'name');
}
return $results;
}
} | kwanpt/blog | _app/core/private_api/_upload.php | PHP | apache-2.0 | 6,075 |
package example.multiview;
import io.db.Connect;
import io.db.ConnectFactory;
import io.db.FormatResultSet;
import io.json.JSONStructureMaker;
import io.parcoord.db.MakeTableModel;
import java.awt.BasicStroke;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.IOException;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Map.Entry;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSlider;
import javax.swing.JSplitPane;
import javax.swing.JTabbedPane;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.plaf.metal.MetalLookAndFeel;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
import model.graph.Edge;
import model.graph.EdgeSetValueMaker;
import model.graph.GraphFilter;
import model.graph.GraphModel;
import model.graph.impl.SymmetricGraphInstance;
import model.matrix.DefaultMatrixTableModel;
import model.matrix.MatrixTableModel;
import model.shared.selection.LinkedGraphMatrixSelectionModelBridge;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.MissingNode;
import org.codehaus.jackson.node.ObjectNode;
import swingPlus.graph.GraphCellRenderer;
import swingPlus.graph.JGraph;
import swingPlus.graph.force.impl.BarnesHut2DForceCalculator;
import swingPlus.graph.force.impl.EdgeWeightedAttractor;
import swingPlus.matrix.JHeaderRenderer;
import swingPlus.matrix.JMatrix;
import swingPlus.parcoord.JColumnList;
import swingPlus.parcoord.JColumnList2;
import swingPlus.parcoord.JParCoord;
import swingPlus.shared.MyFrame;
import swingPlus.tablelist.ColumnSortControl;
import swingPlus.tablelist.JEditableVarColTable;
import ui.StackedRowTableUI;
import util.Messages;
import util.colour.ColorUtilities;
import util.ui.NewMetalTheme;
import util.ui.VerticalLabelUI;
import example.graph.renderers.node.NodeDegreeGraphCellRenderer;
import example.multiview.renderers.edge.EdgeCountFatEdgeRenderer;
import example.multiview.renderers.matrix.JSONObjHeaderRenderer;
import example.multiview.renderers.matrix.KeyedDataHeaderRenderer;
import example.multiview.renderers.matrix.NumberShadeRenderer;
import example.multiview.renderers.node.JSONNodeTypeGraphRenderer;
import example.multiview.renderers.node.JSONTooltipGraphCellRenderer;
import example.multiview.renderers.node.KeyedDataGraphCellRenderer;
import example.multiview.renderers.node.TableTooltipGraphCellRenderer;
import example.multiview.renderers.node.valuemakers.NodeTotalEdgeWeightValueMaker;
import example.tablelist.renderers.ColourBarCellRenderer;
public class NapierDBVis {
static final Logger LOGGER = Logger.getLogger (NapierDBVis.class);
/**
* @param args
*/
public static void main (final String[] args) {
//final MetalLookAndFeel lf = new MetalLookAndFeel();
MetalLookAndFeel.setCurrentTheme (new NewMetalTheme());
PropertyConfigurator.configure (Messages.makeProperties ("log4j"));
new NapierDBVis ();
}
public NapierDBVis () {
TableModel tableModel = null;
GraphModel graph = null;
TableModel listTableModel = null;
MatrixTableModel matrixModel = null;
Map<JsonNode, String> nodeTypeMap = null;
final Properties connectionProperties = Messages.makeProperties ("dbconnect", this.getClass(), false);
final Properties queryProperties = Messages.makeProperties ("queries", this.getClass(), false);
final Connect connect = ConnectFactory.getConnect (connectionProperties);
//ResultSet resultSet = null;
Statement stmt;
try {
stmt = connect.getConnection().createStatement();
//final ResultSet resultSet = stmt.executeQuery ("Select * from people where peopleid>0;");
final String peopleDataQuery = queryProperties.get ("PeopleData").toString();
System.err.println (peopleDataQuery);
final ResultSet peopleDataResultSet = stmt.executeQuery (peopleDataQuery);
final MakeTableModel mtm2 = new MakeTableModel();
tableModel = mtm2.makeTable (peopleDataResultSet);
//final ResultSet resultSet = stmt.executeQuery ("Select * from people where peopleid>0;");
final String pubJoinQuery = queryProperties.get ("PublicationJoin").toString();
System.err.println (pubJoinQuery);
final ResultSet pubJoinResultSet = stmt.executeQuery (pubJoinQuery);
//FormatResultSet.getInstance().printResultSet (resultSet);
final MakeTableModel mtm = new MakeTableModel();
TableModel tableModel2 = mtm.makeTable (pubJoinResultSet);
//final DatabaseMetaData dmd = connect.getConnection().getMetaData();
//final ResultSet resultSet2 = dmd.getProcedures (connect.getConnection().getCatalog(), null, "%");
//FormatResultSet.getInstance().printResultSet (resultSet2);
final String pubsByYearQuery = queryProperties.get ("PubsByYear").toString();
System.err.println (pubsByYearQuery);
final ResultSet pubsByYearResultSet = stmt.executeQuery (pubsByYearQuery);
final MakeTableModel mtm3 = new MakeTableModel();
TableModel tableModel3 = mtm3.makeTable (pubsByYearResultSet);
listTableModel = makePubByYearTable (tableModel3);
Map<Object, KeyedData> keyDataMap = makeKeyedDataMap (tableModel, 0, 1);
graph = makeGraph (keyDataMap, "peopleid", tableModel2);
matrixModel = new DefaultMatrixTableModel (graph);
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
connect.close();
}
connect.close();
System.err.println (tableModel == null ? "no model" : "tableModel rows: "+tableModel.getRowCount()+", cols: "+tableModel.getColumnCount());
/*
try {
final ObjectMapper objMapper = new ObjectMapper ();
final JsonNode rootNode = objMapper.readValue (new File (fileName), JsonNode.class);
LOGGER.info ("rootnode: "+rootNode);
final JSONStructureMaker structureMaker = new JSONStructureMaker (rootNode);
graph = structureMaker.makeGraph (new String[] {"people"}, new String[] {"publications", "grants"});
//graph = structureMaker.makeGraph (new String[] {"grants"}, new String[] {"publications", "people"});
//graph = structureMaker.makeGraph (new String[] {"publications", "people", "grants"}, new String[] {"people"});
//tableModel = structureMaker.makeTable ("publications");
tableModel = structureMaker.makeTable ("people");
matrixModel = new DefaultMatrixTableModel (graph);
nodeTypeMap = structureMaker.makeNodeTypeMap (new String[] {"publications", "people", "grants"});
} catch (JsonParseException e) {
e.printStackTrace();
} catch (JsonMappingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
*/
Map<Object, Integer> keyRowMap = makeKeyRowMap (tableModel, 0);
final JGraph jgraph = new JGraph (graph);
final EdgeWeightedAttractor edgeWeighter = new EdgeWeightedAttractor ();
jgraph.setAttractiveForceCalculator (edgeWeighter);
jgraph.setShowEdges (true);
final EdgeSetValueMaker weightedEdgeMaker = new NodeTotalEdgeWeightValueMaker ();
//final GraphCellRenderer tableTupleRenderer = new TableTupleGraphRenderer (tableModel, keyRowMap);
final GraphCellRenderer jsonGraphRenderer = new JSONNodeTypeGraphRenderer (nodeTypeMap);
jgraph.setDefaultNodeRenderer (String.class, new NodeDegreeGraphCellRenderer (10.0));
jgraph.setDefaultNodeRenderer (JsonNode.class, jsonGraphRenderer);
jgraph.setDefaultNodeRenderer (ObjectNode.class, jsonGraphRenderer);
jgraph.setDefaultNodeRenderer (KeyedData.class, new KeyedDataGraphCellRenderer (weightedEdgeMaker));
jgraph.setDefaultEdgeRenderer (Integer.class, new EdgeCountFatEdgeRenderer ());
jgraph.setDefaultNodeToolTipRenderer (KeyedData.class, new TableTooltipGraphCellRenderer ());
final JTable pubTable = new JEditableVarColTable (listTableModel);
//final JTable jtable3 = new JTable (dtm);
pubTable.setSelectionMode (ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
pubTable.setRowSelectionAllowed (true);
//jt2.setColumnSelectionAllowed (true);
pubTable.setRowSorter (new TableRowSorter<DefaultTableModel> ((DefaultTableModel)listTableModel));
final StackedRowTableUI tlui = new StackedRowTableUI ();
pubTable.setUI (tlui);
tlui.setRelativeLayout (true);
final Color[] columnColours = new Color [pubTable.getColumnCount() - 1];
for (int n = 0; n < columnColours.length; n++) {
double perc = (double)n / columnColours.length;
columnColours[n] = ColorUtilities.mixColours (Color.orange, new Color (0, 128, 255), (float)perc);
}
pubTable.getTableHeader().setReorderingAllowed(true);
pubTable.getTableHeader().setResizingAllowed(false);
System.err.println ("ptc: "+pubTable.getColumnModel().getColumnCount());
for (int col = 1; col < pubTable.getColumnCount(); col++) {
System.err.println ("col: "+col+", ptyc: "+pubTable.getColumnModel().getColumn(col));
pubTable.getColumnModel().getColumn(col).setCellRenderer (new ColourBarCellRenderer (columnColours [(col - 1) % columnColours.length]));
}
final JColumnList jcl = new JColumnList (pubTable) {
@Override
public boolean isCellEditable (final int row, final int column) {
return super.isCellEditable (row, column) && row > 0;
}
};
//jcl.addTable (pubTable);
final JMatrix jmatrix = new JMatrix ((TableModel) matrixModel);
//final JHeaderRenderer stringHeader = new JSONObjHeaderRenderer ();
//final JHeaderRenderer stringHeader2 = new JSONObjHeaderRenderer ();
final JHeaderRenderer stringHeader = new KeyedDataHeaderRenderer ();
final JHeaderRenderer stringHeader2 = new KeyedDataHeaderRenderer ();
jmatrix.getRowHeader().setDefaultRenderer (Object.class, stringHeader);
jmatrix.getRowHeader().setDefaultRenderer (String.class, stringHeader);
jmatrix.getColumnHeader().setDefaultRenderer (Object.class, stringHeader2);
jmatrix.getColumnHeader().setDefaultRenderer (String.class, stringHeader2);
((JLabel)stringHeader2).setUI (new VerticalLabelUI (false));
stringHeader.setSelectionBackground (jmatrix.getRowHeader());
stringHeader2.setSelectionBackground (jmatrix.getColumnHeader());
//jmatrix.setDefaultRenderer (HashSet.class, stringHeader);
jmatrix.setDefaultRenderer (String.class, stringHeader);
jmatrix.setDefaultRenderer (Integer.class, new NumberShadeRenderer ());
final JTable table = new JParCoord (tableModel);
table.setSelectionMode (ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
table.setRowSelectionAllowed (true);
table.setAutoCreateRowSorter (true);
table.setColumnSelectionAllowed (true);
table.setForeground (Color.lightGray);
table.setSelectionForeground (Color.orange);
if (table instanceof JParCoord) {
((JParCoord)table).setBrushForegroundColour (Color.gray);
((JParCoord)table).setBrushSelectionColour (Color.red);
((JParCoord)table).setSelectedStroke (new BasicStroke (2.0f));
//((JParCoord)table).setBrushing (true);
}
table.setGridColor (Color.gray);
table.setShowVerticalLines (false);
table.setBorder (BorderFactory.createEmptyBorder (24, 2, 24, 2));
if (table.getRowSorter() instanceof TableRowSorter) {
final TableRowSorter<? extends TableModel> trs = (TableRowSorter<? extends TableModel>)table.getRowSorter();
}
table.setAutoResizeMode (JTable.AUTO_RESIZE_OFF);
/*
jgraph.setPreferredSize (new Dimension (768, 640));
table.setPreferredSize (new Dimension (768, 384));
table.setMinimumSize (new Dimension (256, 128));
final LinkedGraphMatrixSelectionModelBridge selectionBridge = new LinkedGraphMatrixSelectionModelBridge ();
selectionBridge.addJGraph (jgraph);
selectionBridge.addJTable (table);
selectionBridge.addJTable (jmatrix);
*/
SwingUtilities.invokeLater (
new Runnable () {
@Override
public void run() {
final JFrame jf2 = new MyFrame ("JGraph Demo");
jf2.setSize (1024, 768);
final JPanel optionPanel = new JPanel ();
optionPanel.setLayout (new BoxLayout (optionPanel, BoxLayout.Y_AXIS));
final JSlider llengthSlider = new JSlider (20, 1000, (int)edgeWeighter.getLinkLength());
llengthSlider.addChangeListener(
new ChangeListener () {
@Override
public void stateChanged (final ChangeEvent cEvent) {
edgeWeighter.setLinkLength (llengthSlider.getValue());
}
}
);
final JSlider lstiffSlider = new JSlider (20, 1000, edgeWeighter.getStiffness());
lstiffSlider.addChangeListener(
new ChangeListener () {
@Override
public void stateChanged (final ChangeEvent cEvent) {
edgeWeighter.setStiffness (lstiffSlider.getValue());
}
}
);
final JSlider repulseSlider = new JSlider (1, 50, 10);
repulseSlider.addChangeListener(
new ChangeListener () {
@Override
public void stateChanged (final ChangeEvent cEvent) {
((BarnesHut2DForceCalculator)jgraph.getRepulsiveForceCalculator()).setAttenuator (3.0 / repulseSlider.getValue());
}
}
);
final JCheckBox showSingletons = new JCheckBox ("Show singletons", true);
showSingletons.addActionListener (
new ActionListener () {
@Override
public void actionPerformed (final ActionEvent e) {
final Object source = e.getSource();
if (source instanceof JCheckBox) {
final boolean selected = ((JCheckBox)source).isSelected();
final GraphFilter singletonFilter = new GraphFilter () {
@Override
public boolean includeNode (final Object obj) {
return jgraph.getModel().getEdges(obj).size() > 0 || selected;
}
@Override
public boolean includeEdge (final Edge edge) {
return true;
}
};
jgraph.setGraphFilter (singletonFilter);
}
}
}
);
final JButton clearSelections = new JButton ("Clear Selections");
clearSelections.addActionListener (
new ActionListener () {
@Override
public void actionPerformed (ActionEvent e) {
jgraph.getSelectionModel().clearSelection ();
}
}
);
final JButton graphFreezer = new JButton ("Freeze Graph");
graphFreezer.addActionListener (
new ActionListener () {
@Override
public void actionPerformed (ActionEvent e) {
jgraph.pauseWorker();
}
}
);
optionPanel.add (new JLabel ("Link Length:"));
optionPanel.add (llengthSlider);
optionPanel.add (new JLabel ("Link Stiffness:"));
optionPanel.add (lstiffSlider);
optionPanel.add (new JLabel ("Repulse Strength:"));
optionPanel.add (repulseSlider);
optionPanel.add (showSingletons);
optionPanel.add (clearSelections);
optionPanel.add (graphFreezer);
JPanel listTablePanel = new JPanel (new BorderLayout ());
listTablePanel.add (new JScrollPane (pubTable), BorderLayout.CENTER);
final Box pubControlPanel = Box.createVerticalBox();
final JScrollPane pubTableScrollPane = new JScrollPane (pubControlPanel);
pubTableScrollPane.setPreferredSize (new Dimension (168, 400));
jcl.getColumnModel().getColumn(1).setWidth (30);
listTablePanel.add (pubTableScrollPane, BorderLayout.WEST);
JTable columnSorter = new ColumnSortControl (pubTable);
pubControlPanel.add (jcl.getTableHeader());
pubControlPanel.add (jcl);
pubControlPanel.add (columnSorter.getTableHeader());
pubControlPanel.add (columnSorter);
JScrollPane parCoordsScrollPane = new JScrollPane (table);
JScrollPane matrixScrollPane = new JScrollPane (jmatrix);
JTabbedPane jtp = new JTabbedPane ();
JPanel graphPanel = new JPanel (new BorderLayout ());
graphPanel.add (jgraph, BorderLayout.CENTER);
graphPanel.add (optionPanel, BorderLayout.WEST);
jtp.addTab ("Node-Link", graphPanel);
jtp.addTab ("Matrix", matrixScrollPane);
jtp.addTab ("Pubs", listTablePanel);
jtp.addTab ("||-Coords", parCoordsScrollPane);
jtp.setPreferredSize(new Dimension (800, 480));
//jf2.getContentPane().add (optionPanel, BorderLayout.EAST);
jf2.getContentPane().add (jtp, BorderLayout.CENTER);
//jf2.getContentPane().add (tableScrollPane, BorderLayout.SOUTH);
jf2.setVisible (true);
}
}
);
}
public GraphModel makeGraph (final ResultSet nodeSet, final ResultSet edgeSet) throws SQLException {
edgeSet.beforeFirst();
final GraphModel graph = new SymmetricGraphInstance ();
// Look through the rootnode for fields named 'nodeType'
// Add that nodeTypes' subfields as nodes to a graph
while (edgeSet.next()) {
Object author1 = edgeSet.getObject(1);
Object author2 = edgeSet.getObject(2);
graph.addNode (author1);
graph.addNode (author2);
final Set<Edge> edges = graph.getEdges (author1, author2);
if (edges.isEmpty()) {
graph.addEdge (author1, author2, Integer.valueOf (1));
} else {
final Iterator<Edge> edgeIter = edges.iterator();
final Edge firstEdge = edgeIter.next();
final Integer val = (Integer)firstEdge.getEdgeObject();
firstEdge.setEdgeObject (Integer.valueOf (val.intValue() + 1));
//graph.removeEdge (firstEdge);
//graph.addEdge (node1, node2, Integer.valueOf (val.intValue() + 1));
}
}
return graph;
}
public GraphModel makeGraph (final TableModel nodes, final String primaryKeyColumn, final TableModel edges) throws SQLException {
final GraphModel graph = new SymmetricGraphInstance ();
final Map<Object, Integer> primaryKeyRowMap = new HashMap<Object, Integer> ();
for (int row = 0; row < nodes.getRowCount(); row++) {
primaryKeyRowMap.put (nodes.getValueAt (row, 0), Integer.valueOf (row));
}
// Look through the rootnode for fields named 'nodeType'
// Add that nodeTypes' subfields as nodes to a graph
for (int row = 0; row < edges.getRowCount(); row++) {
Object authorKey1 = edges.getValueAt (row, 0);
Object authorKey2 = edges.getValueAt (row, 1);
int authorIndex1 = (primaryKeyRowMap.get(authorKey1) == null ? -1 : primaryKeyRowMap.get(authorKey1).intValue());
int authorIndex2 = (primaryKeyRowMap.get(authorKey2) == null ? -1 : primaryKeyRowMap.get(authorKey2).intValue());
if (authorIndex1 >= 0 && authorIndex2 >= 0) {
Object graphNode1 = nodes.getValueAt (authorIndex1, 1);
Object graphNode2 = nodes.getValueAt (authorIndex2, 1);
graph.addNode (graphNode1);
graph.addNode (graphNode2);
final Set<Edge> gedges = graph.getEdges (graphNode1, graphNode2);
if (gedges.isEmpty()) {
graph.addEdge (graphNode1, graphNode2, Integer.valueOf (1));
} else {
final Iterator<Edge> edgeIter = gedges.iterator();
final Edge firstEdge = edgeIter.next();
final Integer val = (Integer)firstEdge.getEdgeObject();
firstEdge.setEdgeObject (Integer.valueOf (val.intValue() + 1));
}
}
}
return graph;
}
public GraphModel makeGraph (final Map<Object, KeyedData> keyDataMap, final String primaryKeyColumn, final TableModel edges) throws SQLException {
final GraphModel graph = new SymmetricGraphInstance ();
// Look through the rootnode for fields named 'nodeType'
// Add that nodeTypes' subfields as nodes to a graph
for (int row = 0; row < edges.getRowCount(); row++) {
Object authorKey1 = edges.getValueAt (row, 0);
Object authorKey2 = edges.getValueAt (row, 1);
if (authorKey1 != null && authorKey2 != null) {
Object graphNode1 = keyDataMap.get (authorKey1);
Object graphNode2 = keyDataMap.get (authorKey2);
if (graphNode1 != null && graphNode2 != null) {
graph.addNode (graphNode1);
graph.addNode (graphNode2);
final Set<Edge> gedges = graph.getEdges (graphNode1, graphNode2);
if (gedges.isEmpty()) {
graph.addEdge (graphNode1, graphNode2, Integer.valueOf (1));
} else {
final Iterator<Edge> edgeIter = gedges.iterator();
final Edge firstEdge = edgeIter.next();
final Integer val = (Integer)firstEdge.getEdgeObject();
firstEdge.setEdgeObject (Integer.valueOf (val.intValue() + 1));
}
}
}
}
return graph;
}
public Map<Object, Integer> makeKeyRowMap (final TableModel tableModel, final int columnPKIndex) {
final Map<Object, Integer> primaryKeyRowMap = new HashMap<Object, Integer> ();
for (int row = 0; row < tableModel.getRowCount(); row++) {
primaryKeyRowMap.put (tableModel.getValueAt (row, 0), Integer.valueOf (row));
}
return primaryKeyRowMap;
}
public Map<Object, KeyedData> makeKeyedDataMap (final TableModel tableModel, final int columnPKIndex, final int columnLabelIndex) {
final Map<Object, KeyedData> primaryKeyDataMap = new HashMap<Object, KeyedData> ();
for (int row = 0; row < tableModel.getRowCount(); row++) {
primaryKeyDataMap.put (tableModel.getValueAt (row, columnPKIndex), makeKeyedData (tableModel, columnPKIndex, columnLabelIndex, row));
}
return primaryKeyDataMap;
}
public KeyedData makeKeyedData (final TableModel tableModel, final int columnPKIndex, final int columnLabelIndex, final int rowIndex) {
List<Object> data = new ArrayList<Object> ();
for (int n = 0; n < tableModel.getColumnCount(); n++) {
data.add (tableModel.getValueAt (rowIndex, n));
}
KeyedData kd = new KeyedData (tableModel.getValueAt (rowIndex, columnPKIndex), data, columnLabelIndex);
return kd;
}
/**
* can't do pivot queries in ANSI SQL
* @param sqlresult
* @return
*/
public TableModel makePubByYearTable (final TableModel sqlresult) {
DefaultTableModel tm = new DefaultTableModel () {
public Class<?> getColumnClass(int columnIndex) {
if (columnIndex > 0) {
return Long.class;
}
return Integer.class;
}
public boolean isCellEditable (final int row, final int column) {
return false;
}
};
Map<Object, List<Long>> yearsToTypes = new HashMap<Object, List<Long>> ();
Map<Object, Integer> columnTypes = new HashMap<Object, Integer> ();
tm.addColumn ("Year");
int col = 1;
for (int sqlrow = 0; sqlrow < sqlresult.getRowCount(); sqlrow++) {
Object type = sqlresult.getValueAt (sqlrow, 1);
if (columnTypes.get(type) == null) {
columnTypes.put(type, Integer.valueOf(col));
tm.addColumn (type);
col++;
}
}
System.err.println ("cols: "+columnTypes+", "+columnTypes.size());
for (int sqlrow = 0; sqlrow < sqlresult.getRowCount(); sqlrow++) {
Object year = sqlresult.getValueAt (sqlrow, 0);
if (year != null) {
Object type = sqlresult.getValueAt (sqlrow, 1);
Object val = sqlresult.getValueAt (sqlrow, 2);
int colIndex = columnTypes.get(type).intValue();
List<Long> store = yearsToTypes.get (year);
if (store == null) {
Long[] storep = new Long [col - 1];
Arrays.fill (storep, Long.valueOf(0));
List<Long> longs = Arrays.asList (storep);
store = new ArrayList (longs);
//Collections.fill (store, Long.valueOf (0));
yearsToTypes.put (year, store);
}
store.set (colIndex - 1, (Long)val);
}
}
for (Entry<Object, List<Long>> yearEntry : yearsToTypes.entrySet()) {
Object[] rowData = new Object [col];
rowData[0] = yearEntry.getKey();
for (int n = 1; n < col; n++) {
rowData[n] = yearEntry.getValue().get(n-1);
}
tm.addRow(rowData);
}
return tm;
}
}
| martingraham/JSwingPlus | src/example/multiview/NapierDBVis.java | Java | apache-2.0 | 24,478 |
# Minio B2 Gateway [](https://slack.minio.io)
Minio Gateway adds Amazon S3 compatibility to Backblaze B2 Cloud Storage.
## Run Minio Gateway for Backblaze B2 Cloud Storage
Please follow this [guide](https://www.backblaze.com/b2/docs/quick_account.html) to create an account on backblaze.com to obtain your access credentisals for B2 Cloud storage.
### Using Docker
```
docker run -p 9000:9000 --name b2-s3 \
-e "MINIO_ACCESS_KEY=b2_accound_id" \
-e "MINIO_SECRET_KEY=b2_application_key" \
minio/minio:edge gateway b2
```
## Test using Minio Browser
Minio Gateway comes with an embedded web based object browser. Point your web browser to http://127.0.0.1:9000 to ensure that your server has started successfully.

## Test using Minio Client `mc`
`mc` provides a modern alternative to UNIX commands such as ls, cat, cp, mirror, diff etc. It supports filesystems and Amazon S3 compatible cloud storage services.
### Configure `mc`
```
mc config host add myb2 http://gateway-ip:9000 b2_account_id b2_application_key
```
### List buckets on Backblaze B2
```
mc ls myb2
[2017-02-22 01:50:43 PST] 0B ferenginar/
[2017-02-26 21:43:51 PST] 0B my-bucket/
[2017-02-26 22:10:11 PST] 0B test-bucket1/
```
### Known limitations
Gateway inherits the following B2 limitations:
- No support for CopyObject S3 API (There are no equivalent APIs available on Backblaze B2).
- No support for CopyObjectPart S3 API (There are no equivalent APIs available on Backblaze B2).
- Only read-only bucket policy supported at bucket level, all other variations will return API Notimplemented error.
- DeleteObject() might not delete the object right away on Backblaze B2, so you might see the object immediately after a Delete request.
Other limitations:
- Bucket notification APIs are not supported.
## Explore Further
- [`mc` command-line interface](https://docs.minio.io/docs/minio-client-quickstart-guide)
- [`aws` command-line interface](https://docs.minio.io/docs/aws-cli-with-minio)
- [`minio-go` Go SDK](https://docs.minio.io/docs/golang-client-quickstart-guide)
| rushenn/minio | docs/gateway/b2.md | Markdown | apache-2.0 | 2,225 |
import props from './props';
import './view.html';
class NoteClab {
beforeRegister() {
this.is = 'note-clab';
this.properties = props;
}
computeClasses(type) {
var arr = ['input-note'];
if (type != undefined) arr.push(type);
return arr.join(' ');
}
}
Polymer(NoteClab);
| contactlab/contactlab-ui-components | src/note/index.js | JavaScript | apache-2.0 | 301 |
package com.umeng.soexample.run.step;
/**
* 步数更新回调
* Created by dylan on 16/9/27.
*/
public interface UpdateUiCallBack {
/**
* 更新UI步数
*
* @param stepCount 步数
*/
void updateUi(int stepCount);
}
| liulei-0911/LLApp | myselfapp/src/main/java/com/umeng/soexample/run/step/UpdateUiCallBack.java | Java | apache-2.0 | 249 |
package org.template.similarproduct
import io.prediction.controller.LServing
import breeze.stats.mean
import breeze.stats.meanAndVariance
import breeze.stats.MeanAndVariance
class Serving
extends LServing[Query, PredictedResult] {
override def serve(query: Query,
predictedResults: Seq[PredictedResult]): PredictedResult = {
// MODFIED
val standard: Seq[Array[ItemScore]] = if (query.num == 1) {
// if query 1 item, don't standardize
predictedResults.map(_.itemScores)
} else {
// Standardize the score before combine
val mvList: Seq[MeanAndVariance] = predictedResults.map { pr =>
meanAndVariance(pr.itemScores.map(_.score))
}
predictedResults.zipWithIndex
.map { case (pr, i) =>
pr.itemScores.map { is =>
// standardize score (z-score)
// if standard deviation is 0 (when all items have the same score,
// meaning all items are ranked equally), return 0.
val score = if (mvList(i).stdDev == 0) {
0
} else {
(is.score - mvList(i).mean) / mvList(i).stdDev
}
ItemScore(is.item, score)
}
}
}
// sum the standardized score if same item
val combined = standard.flatten // Array of ItemScore
.groupBy(_.item) // groupBy item id
.mapValues(itemScores => itemScores.map(_.score).reduce(_ + _))
.toArray // array of (item id, score)
.sortBy(_._2)(Ordering.Double.reverse)
.take(query.num)
.map { case (k,v) => ItemScore(k, v) }
new PredictedResult(combined)
}
}
| wangmiao1981/PredictionIO | examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala | Scala | apache-2.0 | 1,629 |
<layout name="layout" />
<div class="row">
<div class="panel panel-info">
<div class="panel-heading">
<h3 class="panel-title">
<{$arr.cate|getCatname=###}>-<b><{$arr.name}></b>
<a href="__APP__/<{$JC}>/Servicelist/index/cate/<{$arr.cate}>" class="pull-right">返回分类</a>
</h3>
</div>
<div class="panel-body">
<div class="col-md-3 ">
<img src="__UPLOAD__<{$arr.path}><{$arr.img}>" alt="<{$arr.img}>" height="180px">
</div>
<div class="col-md-7 ">
<h5>编号:<{$arr.mark}></h5>
<h4>名称:<{$arr.name}></h4>
<h4>价格:<b>¥<{$arr.money}></b>(市场价:<s><em>¥<{$arr.smoney}></em></s>)</h4>
<!--<h4>运费:¥<{$arr.wlmoney}></h4>-->
<h5>规格:<{$arr.weight}></h5>
<a href="#" class="btn btn-info btn-xs">咨询预约</a>
<a href="http://wpa.qq.com/msgrd?v=3&uin=<{$_SESSION[$JC]['qq']}>&site=qq&menu=yes" class="btn-xs" target="_blank">
<img border="0" title="在线客服" alt="在线客服" style="margin-top:0px;" src="http://wpa.qq.com/pa?p=2:<{$_SESSION[$JC]['qq']}>:41">
</a>
</div>
</div>
</div>
<div class="panel panel-info">
<div class="panel-heading"><h3 class="panel-title">详情介绍:</h3></div>
<div class="panel-body"><p><{$arr.content}></p></div>
</div>
</div>
| yaolihui129/Xinda | Apps/Mtsh/View/default/Service/index.html | HTML | apache-2.0 | 1,305 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_91) on Mon Jun 06 14:51:11 EDT 2016 -->
<title>Lists.Value (apache-cassandra API)</title>
<meta name="date" content="2016-06-06">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Lists.Value (apache-cassandra API)";
}
}
catch(err) {
}
//-->
var methods = {"i0":10,"i1":9,"i2":10,"i3":10};
var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/Lists.Value.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/apache/cassandra/cql3/Lists.SetterByIndex.html" title="class in org.apache.cassandra.cql3"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../org/apache/cassandra/cql3/Maps.html" title="class in org.apache.cassandra.cql3"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/apache/cassandra/cql3/Lists.Value.html" target="_top">Frames</a></li>
<li><a href="Lists.Value.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#field.summary">Field</a> | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li><a href="#field.detail">Field</a> | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.apache.cassandra.cql3</div>
<h2 title="Class Lists.Value" class="title">Class Lists.Value</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li><a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html" title="class in org.apache.cassandra.cql3">org.apache.cassandra.cql3.Term.Terminal</a></li>
<li>
<ul class="inheritance">
<li><a href="../../../../org/apache/cassandra/cql3/Term.MultiItemTerminal.html" title="class in org.apache.cassandra.cql3">org.apache.cassandra.cql3.Term.MultiItemTerminal</a></li>
<li>
<ul class="inheritance">
<li>org.apache.cassandra.cql3.Lists.Value</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Implemented Interfaces:</dt>
<dd><a href="../../../../org/apache/cassandra/cql3/Term.html" title="interface in org.apache.cassandra.cql3">Term</a></dd>
</dl>
<dl>
<dt>Enclosing class:</dt>
<dd><a href="../../../../org/apache/cassandra/cql3/Lists.html" title="class in org.apache.cassandra.cql3">Lists</a></dd>
</dl>
<hr>
<br>
<pre>public static class <span class="typeNameLabel">Lists.Value</span>
extends <a href="../../../../org/apache/cassandra/cql3/Term.MultiItemTerminal.html" title="class in org.apache.cassandra.cql3">Term.MultiItemTerminal</a></pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ======== NESTED CLASS SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="nested.class.summary">
<!-- -->
</a>
<h3>Nested Class Summary</h3>
<ul class="blockList">
<li class="blockList"><a name="nested.classes.inherited.from.class.org.apache.cassandra.cql3.Term">
<!-- -->
</a>
<h3>Nested classes/interfaces inherited from interface org.apache.cassandra.cql3.<a href="../../../../org/apache/cassandra/cql3/Term.html" title="interface in org.apache.cassandra.cql3">Term</a></h3>
<code><a href="../../../../org/apache/cassandra/cql3/Term.MultiColumnRaw.html" title="class in org.apache.cassandra.cql3">Term.MultiColumnRaw</a>, <a href="../../../../org/apache/cassandra/cql3/Term.MultiItemTerminal.html" title="class in org.apache.cassandra.cql3">Term.MultiItemTerminal</a>, <a href="../../../../org/apache/cassandra/cql3/Term.NonTerminal.html" title="class in org.apache.cassandra.cql3">Term.NonTerminal</a>, <a href="../../../../org/apache/cassandra/cql3/Term.Raw.html" title="class in org.apache.cassandra.cql3">Term.Raw</a>, <a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html" title="class in org.apache.cassandra.cql3">Term.Terminal</a></code></li>
</ul>
</li>
</ul>
<!-- =========== FIELD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="field.summary">
<!-- -->
</a>
<h3>Field Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Field Summary table, listing fields, and an explanation">
<caption><span>Fields</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Field and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code>java.util.List<java.nio.ByteBuffer></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/Lists.Value.html#elements">elements</a></span></code> </td>
</tr>
</table>
</li>
</ul>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/Lists.Value.html#Value-java.util.List-">Value</a></span>(java.util.List<java.nio.ByteBuffer> elements)</code> </td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t1" class="tableTab"><span><a href="javascript:show(1);">Static Methods</a></span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/Lists.Value.html#equals-org.apache.cassandra.db.marshal.ListType-org.apache.cassandra.cql3.Lists.Value-">equals</a></span>(<a href="../../../../org/apache/cassandra/db/marshal/ListType.html" title="class in org.apache.cassandra.db.marshal">ListType</a> lt,
<a href="../../../../org/apache/cassandra/cql3/Lists.Value.html" title="class in org.apache.cassandra.cql3">Lists.Value</a> v)</code> </td>
</tr>
<tr id="i1" class="rowColor">
<td class="colFirst"><code>static <a href="../../../../org/apache/cassandra/cql3/Lists.Value.html" title="class in org.apache.cassandra.cql3">Lists.Value</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/Lists.Value.html#fromSerialized-java.nio.ByteBuffer-org.apache.cassandra.db.marshal.ListType-int-">fromSerialized</a></span>(java.nio.ByteBuffer value,
<a href="../../../../org/apache/cassandra/db/marshal/ListType.html" title="class in org.apache.cassandra.db.marshal">ListType</a> type,
int version)</code> </td>
</tr>
<tr id="i2" class="altColor">
<td class="colFirst"><code>java.nio.ByteBuffer</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/Lists.Value.html#get-int-">get</a></span>(int protocolVersion)</code> </td>
</tr>
<tr id="i3" class="rowColor">
<td class="colFirst"><code>java.util.List<java.nio.ByteBuffer></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/Lists.Value.html#getElements--">getElements</a></span>()</code> </td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.org.apache.cassandra.cql3.Term.Terminal">
<!-- -->
</a>
<h3>Methods inherited from class org.apache.cassandra.cql3.<a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html" title="class in org.apache.cassandra.cql3">Term.Terminal</a></h3>
<code><a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html#addFunctionsTo-java.util.List-">addFunctionsTo</a>, <a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html#bind-org.apache.cassandra.cql3.QueryOptions-">bind</a>, <a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html#bindAndGet-org.apache.cassandra.cql3.QueryOptions-">bindAndGet</a>, <a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html#collectMarkerSpecification-org.apache.cassandra.cql3.VariableSpecifications-">collectMarkerSpecification</a>, <a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html#containsBindMarker--">containsBindMarker</a></code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ FIELD DETAIL =========== -->
<ul class="blockList">
<li class="blockList"><a name="field.detail">
<!-- -->
</a>
<h3>Field Detail</h3>
<a name="elements">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>elements</h4>
<pre>public final java.util.List<java.nio.ByteBuffer> elements</pre>
</li>
</ul>
</li>
</ul>
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="Value-java.util.List-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>Value</h4>
<pre>public Value(java.util.List<java.nio.ByteBuffer> elements)</pre>
</li>
</ul>
</li>
</ul>
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="fromSerialized-java.nio.ByteBuffer-org.apache.cassandra.db.marshal.ListType-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>fromSerialized</h4>
<pre>public static <a href="../../../../org/apache/cassandra/cql3/Lists.Value.html" title="class in org.apache.cassandra.cql3">Lists.Value</a> fromSerialized(java.nio.ByteBuffer value,
<a href="../../../../org/apache/cassandra/db/marshal/ListType.html" title="class in org.apache.cassandra.db.marshal">ListType</a> type,
int version)
throws <a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></pre>
<dl>
<dt><span class="throwsLabel">Throws:</span></dt>
<dd><code><a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></code></dd>
</dl>
</li>
</ul>
<a name="get-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>get</h4>
<pre>public java.nio.ByteBuffer get(int protocolVersion)</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code><a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html#get-int-">get</a></code> in class <code><a href="../../../../org/apache/cassandra/cql3/Term.Terminal.html" title="class in org.apache.cassandra.cql3">Term.Terminal</a></code></dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>the serialized value of this terminal.</dd>
</dl>
</li>
</ul>
<a name="equals-org.apache.cassandra.db.marshal.ListType-org.apache.cassandra.cql3.Lists.Value-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>equals</h4>
<pre>public boolean equals(<a href="../../../../org/apache/cassandra/db/marshal/ListType.html" title="class in org.apache.cassandra.db.marshal">ListType</a> lt,
<a href="../../../../org/apache/cassandra/cql3/Lists.Value.html" title="class in org.apache.cassandra.cql3">Lists.Value</a> v)</pre>
</li>
</ul>
<a name="getElements--">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>getElements</h4>
<pre>public java.util.List<java.nio.ByteBuffer> getElements()</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code><a href="../../../../org/apache/cassandra/cql3/Term.MultiItemTerminal.html#getElements--">getElements</a></code> in class <code><a href="../../../../org/apache/cassandra/cql3/Term.MultiItemTerminal.html" title="class in org.apache.cassandra.cql3">Term.MultiItemTerminal</a></code></dd>
</dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/Lists.Value.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/apache/cassandra/cql3/Lists.SetterByIndex.html" title="class in org.apache.cassandra.cql3"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../org/apache/cassandra/cql3/Maps.html" title="class in org.apache.cassandra.cql3"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/apache/cassandra/cql3/Lists.Value.html" target="_top">Frames</a></li>
<li><a href="Lists.Value.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#field.summary">Field</a> | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li><a href="#field.detail">Field</a> | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2016 The Apache Software Foundation</small></p>
</body>
</html>
| jasonwee/videoOnCloud | lib/cassandra/apache-cassandra-3.7/javadoc/org/apache/cassandra/cql3/Lists.Value.html | HTML | apache-2.0 | 18,169 |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/protobuf/unittest_preserve_unknown_enum2.proto
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
#include <google/protobuf/unittest_preserve_unknown_enum2.pb.h>
#include <algorithm>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/port.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/descriptor.h>
#include <google/protobuf/generated_message_reflection.h>
#include <google/protobuf/reflection_ops.h>
#include <google/protobuf/wire_format.h>
// @@protoc_insertion_point(includes)
namespace proto2_preserve_unknown_enum_unittest {
class MyMessageDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<MyMessage> {
public:
int oneof_e_1_;
int oneof_e_2_;
} _MyMessage_default_instance_;
namespace protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto {
namespace {
::google::protobuf::Metadata file_level_metadata[1];
const ::google::protobuf::EnumDescriptor* file_level_enum_descriptors[1];
} // namespace
PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTableField
const TableStruct::entries[] = {
{0, 0, 0, ::google::protobuf::internal::kInvalidMask, 0, 0},
};
PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::AuxillaryParseTableField
const TableStruct::aux[] = {
::google::protobuf::internal::AuxillaryParseTableField(),
};
PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTable const
TableStruct::schema[] = {
{ NULL, NULL, 0, -1, -1, false },
};
const ::google::protobuf::uint32 TableStruct::offsets[] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MyMessage, _has_bits_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MyMessage, _internal_metadata_),
~0u, // no _extensions_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MyMessage, _oneof_case_[0]),
~0u, // no _weak_field_map_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MyMessage, e_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MyMessage, repeated_e_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MyMessage, repeated_packed_e_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MyMessage, repeated_packed_unexpected_e_),
GOOGLE_PROTOBUF_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET((&_MyMessage_default_instance_), oneof_e_1_),
GOOGLE_PROTOBUF_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET((&_MyMessage_default_instance_), oneof_e_2_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MyMessage, o_),
0,
~0u,
~0u,
~0u,
~0u,
~0u,
};
static const ::google::protobuf::internal::MigrationSchema schemas[] = {
{ 0, 12, sizeof(MyMessage)},
};
static ::google::protobuf::Message const * const file_default_instances[] = {
reinterpret_cast<const ::google::protobuf::Message*>(&_MyMessage_default_instance_),
};
namespace {
void protobuf_AssignDescriptors() {
AddDescriptors();
::google::protobuf::MessageFactory* factory = NULL;
AssignDescriptors(
"google/protobuf/unittest_preserve_unknown_enum2.proto", schemas, file_default_instances, TableStruct::offsets, factory,
file_level_metadata, file_level_enum_descriptors, NULL);
}
void protobuf_AssignDescriptorsOnce() {
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
::google::protobuf::GoogleOnceInit(&once, &protobuf_AssignDescriptors);
}
void protobuf_RegisterTypes(const ::std::string&) GOOGLE_ATTRIBUTE_COLD;
void protobuf_RegisterTypes(const ::std::string&) {
protobuf_AssignDescriptorsOnce();
::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 1);
}
} // namespace
void TableStruct::Shutdown() {
_MyMessage_default_instance_.Shutdown();
delete file_level_metadata[0].reflection;
}
void TableStruct::InitDefaultsImpl() {
GOOGLE_PROTOBUF_VERIFY_VERSION;
::google::protobuf::internal::InitProtobufDefaults();
_MyMessage_default_instance_.DefaultConstruct();
_MyMessage_default_instance_.oneof_e_1_ = 0;
_MyMessage_default_instance_.oneof_e_2_ = 0;
}
void InitDefaults() {
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
::google::protobuf::GoogleOnceInit(&once, &TableStruct::InitDefaultsImpl);
}
void AddDescriptorsImpl() {
InitDefaults();
static const char descriptor[] = {
"\n5google/protobuf/unittest_preserve_unkn"
"own_enum2.proto\022%proto2_preserve_unknown"
"_enum_unittest\"\270\003\n\tMyMessage\0228\n\001e\030\001 \001(\0162"
"-.proto2_preserve_unknown_enum_unittest."
"MyEnum\022A\n\nrepeated_e\030\002 \003(\0162-.proto2_pres"
"erve_unknown_enum_unittest.MyEnum\022L\n\021rep"
"eated_packed_e\030\003 \003(\0162-.proto2_preserve_u"
"nknown_enum_unittest.MyEnumB\002\020\001\022S\n\034repea"
"ted_packed_unexpected_e\030\004 \003(\0162-.proto2_p"
"reserve_unknown_enum_unittest.MyEnum\022B\n\t"
"oneof_e_1\030\005 \001(\0162-.proto2_preserve_unknow"
"n_enum_unittest.MyEnumH\000\022B\n\toneof_e_2\030\006 "
"\001(\0162-.proto2_preserve_unknown_enum_unitt"
"est.MyEnumH\000B\003\n\001o*#\n\006MyEnum\022\007\n\003FOO\020\000\022\007\n\003"
"BAR\020\001\022\007\n\003BAZ\020\002"
};
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
descriptor, 574);
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
"google/protobuf/unittest_preserve_unknown_enum2.proto", &protobuf_RegisterTypes);
::google::protobuf::internal::OnShutdown(&TableStruct::Shutdown);
}
void AddDescriptors() {
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
::google::protobuf::GoogleOnceInit(&once, &AddDescriptorsImpl);
}
// Force AddDescriptors() to be called at static initialization time.
struct StaticDescriptorInitializer {
StaticDescriptorInitializer() {
AddDescriptors();
}
} static_descriptor_initializer;
} // namespace protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto
const ::google::protobuf::EnumDescriptor* MyEnum_descriptor() {
protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto::file_level_enum_descriptors[0];
}
bool MyEnum_IsValid(int value) {
switch (value) {
case 0:
case 1:
case 2:
return true;
default:
return false;
}
}
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int MyMessage::kEFieldNumber;
const int MyMessage::kRepeatedEFieldNumber;
const int MyMessage::kRepeatedPackedEFieldNumber;
const int MyMessage::kRepeatedPackedUnexpectedEFieldNumber;
const int MyMessage::kOneofE1FieldNumber;
const int MyMessage::kOneofE2FieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
MyMessage::MyMessage()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:proto2_preserve_unknown_enum_unittest.MyMessage)
}
MyMessage::MyMessage(const MyMessage& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
_has_bits_(from._has_bits_),
_cached_size_(0),
repeated_e_(from.repeated_e_),
repeated_packed_e_(from.repeated_packed_e_),
repeated_packed_unexpected_e_(from.repeated_packed_unexpected_e_) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
e_ = from.e_;
clear_has_o();
switch (from.o_case()) {
case kOneofE1: {
set_oneof_e_1(from.oneof_e_1());
break;
}
case kOneofE2: {
set_oneof_e_2(from.oneof_e_2());
break;
}
case O_NOT_SET: {
break;
}
}
// @@protoc_insertion_point(copy_constructor:proto2_preserve_unknown_enum_unittest.MyMessage)
}
void MyMessage::SharedCtor() {
_cached_size_ = 0;
e_ = 0;
clear_has_o();
}
MyMessage::~MyMessage() {
// @@protoc_insertion_point(destructor:proto2_preserve_unknown_enum_unittest.MyMessage)
SharedDtor();
}
void MyMessage::SharedDtor() {
if (has_o()) {
clear_o();
}
}
void MyMessage::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* MyMessage::descriptor() {
protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto::file_level_metadata[kIndexInFileMessages].descriptor;
}
const MyMessage& MyMessage::default_instance() {
protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto::InitDefaults();
return *internal_default_instance();
}
MyMessage* MyMessage::New(::google::protobuf::Arena* arena) const {
MyMessage* n = new MyMessage;
if (arena != NULL) {
arena->Own(n);
}
return n;
}
void MyMessage::clear_o() {
// @@protoc_insertion_point(one_of_clear_start:proto2_preserve_unknown_enum_unittest.MyMessage)
switch (o_case()) {
case kOneofE1: {
// No need to clear
break;
}
case kOneofE2: {
// No need to clear
break;
}
case O_NOT_SET: {
break;
}
}
_oneof_case_[0] = O_NOT_SET;
}
void MyMessage::Clear() {
// @@protoc_insertion_point(message_clear_start:proto2_preserve_unknown_enum_unittest.MyMessage)
repeated_e_.Clear();
repeated_packed_e_.Clear();
repeated_packed_unexpected_e_.Clear();
e_ = 0;
clear_o();
_has_bits_.Clear();
_internal_metadata_.Clear();
}
bool MyMessage::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:proto2_preserve_unknown_enum_unittest.MyMessage)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .proto2_preserve_unknown_enum_unittest.MyEnum e = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(8u)) {
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value)) {
set_e(static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(value));
} else {
mutable_unknown_fields()->AddVarint(1, value);
}
} else {
goto handle_unusual;
}
break;
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_e = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(16u)) {
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value)) {
add_repeated_e(static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(value));
} else {
mutable_unknown_fields()->AddVarint(2, value);
}
} else if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(18u)) {
DO_((::google::protobuf::internal::WireFormat::ReadPackedEnumPreserveUnknowns(
input,
2,
::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid,
mutable_unknown_fields(),
this->mutable_repeated_e())));
} else {
goto handle_unusual;
}
break;
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_e = 3 [packed = true];
case 3: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(26u)) {
::google::protobuf::uint32 length;
DO_(input->ReadVarint32(&length));
::google::protobuf::io::CodedInputStream::Limit limit = input->PushLimit(length);
while (input->BytesUntilLimit() > 0) {
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value)) {
add_repeated_packed_e(static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(value));
} else {
mutable_unknown_fields()->AddVarint(3, value);
}
}
input->PopLimit(limit);
} else if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(24u)) {
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value)) {
add_repeated_packed_e(static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(value));
} else {
mutable_unknown_fields()->AddVarint(3, value);
}
} else {
goto handle_unusual;
}
break;
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_unexpected_e = 4;
case 4: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(32u)) {
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value)) {
add_repeated_packed_unexpected_e(static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(value));
} else {
mutable_unknown_fields()->AddVarint(4, value);
}
} else if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(34u)) {
DO_((::google::protobuf::internal::WireFormat::ReadPackedEnumPreserveUnknowns(
input,
4,
::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid,
mutable_unknown_fields(),
this->mutable_repeated_packed_unexpected_e())));
} else {
goto handle_unusual;
}
break;
}
// optional .proto2_preserve_unknown_enum_unittest.MyEnum oneof_e_1 = 5;
case 5: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(40u)) {
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value)) {
set_oneof_e_1(static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(value));
} else {
mutable_unknown_fields()->AddVarint(5, value);
}
} else {
goto handle_unusual;
}
break;
}
// optional .proto2_preserve_unknown_enum_unittest.MyEnum oneof_e_2 = 6;
case 6: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(48u)) {
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value)) {
set_oneof_e_2(static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(value));
} else {
mutable_unknown_fields()->AddVarint(6, value);
}
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:proto2_preserve_unknown_enum_unittest.MyMessage)
return true;
failure:
// @@protoc_insertion_point(parse_failure:proto2_preserve_unknown_enum_unittest.MyMessage)
return false;
#undef DO_
}
void MyMessage::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:proto2_preserve_unknown_enum_unittest.MyMessage)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
cached_has_bits = _has_bits_[0];
// optional .proto2_preserve_unknown_enum_unittest.MyEnum e = 1;
if (cached_has_bits & 0x00000001u) {
::google::protobuf::internal::WireFormatLite::WriteEnum(
1, this->e(), output);
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_e = 2;
for (int i = 0, n = this->repeated_e_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteEnum(
2, this->repeated_e(i), output);
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_e = 3 [packed = true];
if (this->repeated_packed_e_size() > 0) {
::google::protobuf::internal::WireFormatLite::WriteTag(
3,
::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED,
output);
output->WriteVarint32(_repeated_packed_e_cached_byte_size_);
}
for (int i = 0, n = this->repeated_packed_e_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteEnumNoTag(
this->repeated_packed_e(i), output);
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_unexpected_e = 4;
for (int i = 0, n = this->repeated_packed_unexpected_e_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteEnum(
4, this->repeated_packed_unexpected_e(i), output);
}
switch (o_case()) {
case kOneofE1:
::google::protobuf::internal::WireFormatLite::WriteEnum(
5, this->oneof_e_1(), output);
break;
case kOneofE2:
::google::protobuf::internal::WireFormatLite::WriteEnum(
6, this->oneof_e_2(), output);
break;
default: ;
}
if (_internal_metadata_.have_unknown_fields()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:proto2_preserve_unknown_enum_unittest.MyMessage)
}
::google::protobuf::uint8* MyMessage::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:proto2_preserve_unknown_enum_unittest.MyMessage)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
cached_has_bits = _has_bits_[0];
// optional .proto2_preserve_unknown_enum_unittest.MyEnum e = 1;
if (cached_has_bits & 0x00000001u) {
target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray(
1, this->e(), target);
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_e = 2;
target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray(
2, this->repeated_e_, target);
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_e = 3 [packed = true];
if (this->repeated_packed_e_size() > 0) {
target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray(
3,
::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED,
target);
target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray( _repeated_packed_e_cached_byte_size_, target);
target = ::google::protobuf::internal::WireFormatLite::WriteEnumNoTagToArray(
this->repeated_packed_e_, target);
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_unexpected_e = 4;
target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray(
4, this->repeated_packed_unexpected_e_, target);
switch (o_case()) {
case kOneofE1:
target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray(
5, this->oneof_e_1(), target);
break;
case kOneofE2:
target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray(
6, this->oneof_e_2(), target);
break;
default: ;
}
if (_internal_metadata_.have_unknown_fields()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:proto2_preserve_unknown_enum_unittest.MyMessage)
return target;
}
size_t MyMessage::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:proto2_preserve_unknown_enum_unittest.MyMessage)
size_t total_size = 0;
if (_internal_metadata_.have_unknown_fields()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_e = 2;
{
size_t data_size = 0;
unsigned int count = this->repeated_e_size();for (unsigned int i = 0; i < count; i++) {
data_size += ::google::protobuf::internal::WireFormatLite::EnumSize(
this->repeated_e(i));
}
total_size += (1UL * count) + data_size;
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_e = 3 [packed = true];
{
size_t data_size = 0;
unsigned int count = this->repeated_packed_e_size();for (unsigned int i = 0; i < count; i++) {
data_size += ::google::protobuf::internal::WireFormatLite::EnumSize(
this->repeated_packed_e(i));
}
if (data_size > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int32Size(data_size);
}
int cached_size = ::google::protobuf::internal::ToCachedSize(data_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_repeated_packed_e_cached_byte_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
total_size += data_size;
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_unexpected_e = 4;
{
size_t data_size = 0;
unsigned int count = this->repeated_packed_unexpected_e_size();for (unsigned int i = 0; i < count; i++) {
data_size += ::google::protobuf::internal::WireFormatLite::EnumSize(
this->repeated_packed_unexpected_e(i));
}
total_size += (1UL * count) + data_size;
}
// optional .proto2_preserve_unknown_enum_unittest.MyEnum e = 1;
if (has_e()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::EnumSize(this->e());
}
switch (o_case()) {
// optional .proto2_preserve_unknown_enum_unittest.MyEnum oneof_e_1 = 5;
case kOneofE1: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::EnumSize(this->oneof_e_1());
break;
}
// optional .proto2_preserve_unknown_enum_unittest.MyEnum oneof_e_2 = 6;
case kOneofE2: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::EnumSize(this->oneof_e_2());
break;
}
case O_NOT_SET: {
break;
}
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void MyMessage::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:proto2_preserve_unknown_enum_unittest.MyMessage)
GOOGLE_DCHECK_NE(&from, this);
const MyMessage* source =
::google::protobuf::internal::DynamicCastToGenerated<const MyMessage>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:proto2_preserve_unknown_enum_unittest.MyMessage)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:proto2_preserve_unknown_enum_unittest.MyMessage)
MergeFrom(*source);
}
}
void MyMessage::MergeFrom(const MyMessage& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:proto2_preserve_unknown_enum_unittest.MyMessage)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
repeated_e_.MergeFrom(from.repeated_e_);
repeated_packed_e_.MergeFrom(from.repeated_packed_e_);
repeated_packed_unexpected_e_.MergeFrom(from.repeated_packed_unexpected_e_);
if (from.has_e()) {
set_e(from.e());
}
switch (from.o_case()) {
case kOneofE1: {
set_oneof_e_1(from.oneof_e_1());
break;
}
case kOneofE2: {
set_oneof_e_2(from.oneof_e_2());
break;
}
case O_NOT_SET: {
break;
}
}
}
void MyMessage::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:proto2_preserve_unknown_enum_unittest.MyMessage)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void MyMessage::CopyFrom(const MyMessage& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:proto2_preserve_unknown_enum_unittest.MyMessage)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool MyMessage::IsInitialized() const {
return true;
}
void MyMessage::Swap(MyMessage* other) {
if (other == this) return;
InternalSwap(other);
}
void MyMessage::InternalSwap(MyMessage* other) {
repeated_e_.InternalSwap(&other->repeated_e_);
repeated_packed_e_.InternalSwap(&other->repeated_packed_e_);
repeated_packed_unexpected_e_.InternalSwap(&other->repeated_packed_unexpected_e_);
std::swap(e_, other->e_);
std::swap(o_, other->o_);
std::swap(_oneof_case_[0], other->_oneof_case_[0]);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_internal_metadata_.Swap(&other->_internal_metadata_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata MyMessage::GetMetadata() const {
protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_google_2fprotobuf_2funittest_5fpreserve_5funknown_5fenum2_2eproto::file_level_metadata[kIndexInFileMessages];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// MyMessage
// optional .proto2_preserve_unknown_enum_unittest.MyEnum e = 1;
bool MyMessage::has_e() const {
return (_has_bits_[0] & 0x00000001u) != 0;
}
void MyMessage::set_has_e() {
_has_bits_[0] |= 0x00000001u;
}
void MyMessage::clear_has_e() {
_has_bits_[0] &= ~0x00000001u;
}
void MyMessage::clear_e() {
e_ = 0;
clear_has_e();
}
::proto2_preserve_unknown_enum_unittest::MyEnum MyMessage::e() const {
// @@protoc_insertion_point(field_get:proto2_preserve_unknown_enum_unittest.MyMessage.e)
return static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(e_);
}
void MyMessage::set_e(::proto2_preserve_unknown_enum_unittest::MyEnum value) {
assert(::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value));
set_has_e();
e_ = value;
// @@protoc_insertion_point(field_set:proto2_preserve_unknown_enum_unittest.MyMessage.e)
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_e = 2;
int MyMessage::repeated_e_size() const {
return repeated_e_.size();
}
void MyMessage::clear_repeated_e() {
repeated_e_.Clear();
}
::proto2_preserve_unknown_enum_unittest::MyEnum MyMessage::repeated_e(int index) const {
// @@protoc_insertion_point(field_get:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_e)
return static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(repeated_e_.Get(index));
}
void MyMessage::set_repeated_e(int index, ::proto2_preserve_unknown_enum_unittest::MyEnum value) {
assert(::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value));
repeated_e_.Set(index, value);
// @@protoc_insertion_point(field_set:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_e)
}
void MyMessage::add_repeated_e(::proto2_preserve_unknown_enum_unittest::MyEnum value) {
assert(::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value));
repeated_e_.Add(value);
// @@protoc_insertion_point(field_add:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_e)
}
const ::google::protobuf::RepeatedField<int>&
MyMessage::repeated_e() const {
// @@protoc_insertion_point(field_list:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_e)
return repeated_e_;
}
::google::protobuf::RepeatedField<int>*
MyMessage::mutable_repeated_e() {
// @@protoc_insertion_point(field_mutable_list:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_e)
return &repeated_e_;
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_e = 3 [packed = true];
int MyMessage::repeated_packed_e_size() const {
return repeated_packed_e_.size();
}
void MyMessage::clear_repeated_packed_e() {
repeated_packed_e_.Clear();
}
::proto2_preserve_unknown_enum_unittest::MyEnum MyMessage::repeated_packed_e(int index) const {
// @@protoc_insertion_point(field_get:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_e)
return static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(repeated_packed_e_.Get(index));
}
void MyMessage::set_repeated_packed_e(int index, ::proto2_preserve_unknown_enum_unittest::MyEnum value) {
assert(::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value));
repeated_packed_e_.Set(index, value);
// @@protoc_insertion_point(field_set:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_e)
}
void MyMessage::add_repeated_packed_e(::proto2_preserve_unknown_enum_unittest::MyEnum value) {
assert(::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value));
repeated_packed_e_.Add(value);
// @@protoc_insertion_point(field_add:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_e)
}
const ::google::protobuf::RepeatedField<int>&
MyMessage::repeated_packed_e() const {
// @@protoc_insertion_point(field_list:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_e)
return repeated_packed_e_;
}
::google::protobuf::RepeatedField<int>*
MyMessage::mutable_repeated_packed_e() {
// @@protoc_insertion_point(field_mutable_list:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_e)
return &repeated_packed_e_;
}
// repeated .proto2_preserve_unknown_enum_unittest.MyEnum repeated_packed_unexpected_e = 4;
int MyMessage::repeated_packed_unexpected_e_size() const {
return repeated_packed_unexpected_e_.size();
}
void MyMessage::clear_repeated_packed_unexpected_e() {
repeated_packed_unexpected_e_.Clear();
}
::proto2_preserve_unknown_enum_unittest::MyEnum MyMessage::repeated_packed_unexpected_e(int index) const {
// @@protoc_insertion_point(field_get:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_unexpected_e)
return static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(repeated_packed_unexpected_e_.Get(index));
}
void MyMessage::set_repeated_packed_unexpected_e(int index, ::proto2_preserve_unknown_enum_unittest::MyEnum value) {
assert(::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value));
repeated_packed_unexpected_e_.Set(index, value);
// @@protoc_insertion_point(field_set:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_unexpected_e)
}
void MyMessage::add_repeated_packed_unexpected_e(::proto2_preserve_unknown_enum_unittest::MyEnum value) {
assert(::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value));
repeated_packed_unexpected_e_.Add(value);
// @@protoc_insertion_point(field_add:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_unexpected_e)
}
const ::google::protobuf::RepeatedField<int>&
MyMessage::repeated_packed_unexpected_e() const {
// @@protoc_insertion_point(field_list:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_unexpected_e)
return repeated_packed_unexpected_e_;
}
::google::protobuf::RepeatedField<int>*
MyMessage::mutable_repeated_packed_unexpected_e() {
// @@protoc_insertion_point(field_mutable_list:proto2_preserve_unknown_enum_unittest.MyMessage.repeated_packed_unexpected_e)
return &repeated_packed_unexpected_e_;
}
// optional .proto2_preserve_unknown_enum_unittest.MyEnum oneof_e_1 = 5;
bool MyMessage::has_oneof_e_1() const {
return o_case() == kOneofE1;
}
void MyMessage::set_has_oneof_e_1() {
_oneof_case_[0] = kOneofE1;
}
void MyMessage::clear_oneof_e_1() {
if (has_oneof_e_1()) {
o_.oneof_e_1_ = 0;
clear_has_o();
}
}
::proto2_preserve_unknown_enum_unittest::MyEnum MyMessage::oneof_e_1() const {
// @@protoc_insertion_point(field_get:proto2_preserve_unknown_enum_unittest.MyMessage.oneof_e_1)
if (has_oneof_e_1()) {
return static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(o_.oneof_e_1_);
}
return static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(0);
}
void MyMessage::set_oneof_e_1(::proto2_preserve_unknown_enum_unittest::MyEnum value) {
assert(::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value));
if (!has_oneof_e_1()) {
clear_o();
set_has_oneof_e_1();
}
o_.oneof_e_1_ = value;
// @@protoc_insertion_point(field_set:proto2_preserve_unknown_enum_unittest.MyMessage.oneof_e_1)
}
// optional .proto2_preserve_unknown_enum_unittest.MyEnum oneof_e_2 = 6;
bool MyMessage::has_oneof_e_2() const {
return o_case() == kOneofE2;
}
void MyMessage::set_has_oneof_e_2() {
_oneof_case_[0] = kOneofE2;
}
void MyMessage::clear_oneof_e_2() {
if (has_oneof_e_2()) {
o_.oneof_e_2_ = 0;
clear_has_o();
}
}
::proto2_preserve_unknown_enum_unittest::MyEnum MyMessage::oneof_e_2() const {
// @@protoc_insertion_point(field_get:proto2_preserve_unknown_enum_unittest.MyMessage.oneof_e_2)
if (has_oneof_e_2()) {
return static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(o_.oneof_e_2_);
}
return static_cast< ::proto2_preserve_unknown_enum_unittest::MyEnum >(0);
}
void MyMessage::set_oneof_e_2(::proto2_preserve_unknown_enum_unittest::MyEnum value) {
assert(::proto2_preserve_unknown_enum_unittest::MyEnum_IsValid(value));
if (!has_oneof_e_2()) {
clear_o();
set_has_oneof_e_2();
}
o_.oneof_e_2_ = value;
// @@protoc_insertion_point(field_set:proto2_preserve_unknown_enum_unittest.MyMessage.oneof_e_2)
}
bool MyMessage::has_o() const {
return o_case() != O_NOT_SET;
}
void MyMessage::clear_has_o() {
_oneof_case_[0] = O_NOT_SET;
}
MyMessage::OCase MyMessage::o_case() const {
return MyMessage::OCase(_oneof_case_[0]);
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// @@protoc_insertion_point(namespace_scope)
} // namespace proto2_preserve_unknown_enum_unittest
// @@protoc_insertion_point(global_scope)
| dbHunter/bson_rtdb | 3rd/protobuf/src/google/protobuf/unittest_preserve_unknown_enum2.pb.cc | C++ | apache-2.0 | 35,954 |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2016 Oracle and/or its affiliates. All rights reserved.
*
* Oracle and Java are registered trademarks of Oracle and/or its affiliates.
* Other names may be trademarks of their respective owners.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common
* Development and Distribution License("CDDL") (collectively, the
* "License"). You may not use this file except in compliance with the
* License. You can obtain a copy of the License at
* http://www.netbeans.org/cddl-gplv2.html
* or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
* specific language governing permissions and limitations under the
* License. When distributing the software, include this License Header
* Notice in each file and include the License file at
* nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the
* License Header, with the fields enclosed by brackets [] replaced by
* your own identifying information:
* "Portions Copyrighted [year] [name of copyright owner]"
*
* If you wish your version of this file to be governed by only the CDDL
* or only the GPL Version 2, indicate your decision by adding
* "[Contributor] elects to include this software in this distribution
* under the [CDDL or GPL Version 2] license." If you do not indicate a
* single choice of license, a recipient has the option to distribute
* your version of this file under either the CDDL, the GPL Version 2 or
* to extend the choice of license to its licensees as provided above.
* However, if you add GPL Version 2 code and therefore, elected the GPL
* Version 2 license, then the option applies only if the new code is
* made subject to such option by the copyright holder.
*
* Contributor(s):
*
* Portions Copyrighted 2016 Sun Microsystems, Inc.
*/
package beans;
import java.io.Serializable;
import java.util.Collection;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
/**
*
* @author marc.gareta
*/
@Entity
@Table(name = "TIPO_SERVICIO", catalog = "", schema = "APP")
@XmlRootElement
@NamedQueries({
@NamedQuery(name = "TIPO_SERVICIO.findAll", query = "SELECT t FROM TipoServicio t"),
@NamedQuery(name = "TIPO_SERVICIO.findAllNombre", query = "SELECT t.nombre FROM TipoServicio t"),
@NamedQuery(name = "TIPO_SERVICIO.findById", query = "SELECT t FROM TipoServicio t WHERE t.id = :id"),
@NamedQuery(name = "TIPO_SERVICIO.findByNombre", query = "SELECT t FROM TipoServicio t WHERE t.nombre = :nombre"),
@NamedQuery(name = "TIPO_SERVICIO.deleteAll", query = "DELETE FROM TipoServicio t")})
public class TipoServicio implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(nullable = false)
private Integer id;
@Column(length = 100)
private String nombre;
@OneToMany(mappedBy = "tipoServicio")
private Collection<ParteIncidencia> parteIncidenciaCollection;
public TipoServicio() {
}
public TipoServicio(Integer id) {
this.id = id;
}
public TipoServicio(String nombre) {
this.nombre = nombre;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getNombre() {
return nombre;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
@XmlTransient
public Collection<ParteIncidencia> getParteIncidenciaCollection() {
return parteIncidenciaCollection;
}
public void setParteIncidenciaCollection(Collection<ParteIncidencia> parteIncidenciaCollection) {
this.parteIncidenciaCollection = parteIncidenciaCollection;
}
@Override
public int hashCode() {
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof TipoServicio)) {
return false;
}
TipoServicio other = (TipoServicio) object;
if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
return false;
}
return true;
}
@Override
public String toString() {
return "beans.TipoServicio[ id=" + id + " ]";
}
}
| MGareta/BBVA | src/java/beans/TipoServicio.java | Java | apache-2.0 | 5,309 |
---
title: Announcing Istio 1.11.6
linktitle: 1.11.6
subtitle: Patch Release
description: Istio 1.11.6 patch release.
publishdate: 2022-02-03
release: 1.11.6
aliases:
- /news/announcing-1.11.6
---
This release contains bug fixes to improve robustness. This release note describes what’s different between Istio 1.11.5 and Istio 1.11.6
{{< relnote >}}
## Changes
- **Added** privileged flag to Istio-CNI Helm charts to set `securityContext` flag.
([Issue #34211](https://github.com/istio/istio/issues/34211))
- **Added** an option to disable a number of nonstandard kubeconfig authentication methods when using multicluster secret by configuring the
`PILOT_INSECURE_MULTICLUSTER_KUBECONFIG_OPTIONS` environment variable in Istiod. By default, this option is configured to allow all methods; future versions will restrict this by default.
- **Fixed** an issue where enabling tracing with telemetry API would cause a malformed host header being used at the trace report request. ([Issue #35750](https://github.com/istio/istio/issues/35750)),([Issue #36166](https://github.com/istio/istio/issues/36166)),([Issue #36521](https://github.com/istio/istio/issues/36521))
- **Fixed** error format after json marshal in virtual machine config.
([Issue #36358](https://github.com/istio/istio/issues/36358))
- **Fixed** endpoint slice cache memory leak.
- **Fixed** an issue where `EnvoyFilter` patches on `virtualOutbound-blackhole` could cause memory leaks.
- **Fixed** an issue where using `ISTIO_MUTUAL` TLS mode in Gateways while also setting `credentialName` causes mutual TLS to not be configured.
For backwards compatibility, this only introduces a warning. To enable the new behavior, set the `PILOT_ENABLE_LEGACY_ISTIO_MUTUAL_CREDENTIAL_NAME=false`
environment variable in Istiod. This will cause invalid configurations to be rejected, and will be the default behavior in future releases.
| istio/istio.io | content/en/news/releases/1.11.x/announcing-1.11.6/index.md | Markdown | apache-2.0 | 1,907 |
namespace Snippets3.Serialization
{
using NServiceBus;
public class BinarySerializerUsage
{
public void Simple()
{
#region BinarySerialization
Configure configure = Configure.With();
configure.BinarySerializer();
#endregion
}
}
} | WojcikMike/docs.particular.net | Snippets/Snippets_3/Serialization/BinarySerializerUsage.cs | C# | apache-2.0 | 325 |
Ext.define('TaxRate', {
extend: 'Ext.data.Model',
fields: [{name: "id"},
{name: "date",type: 'date',dateFormat: 'Y-m-d'},
{name: "rate"},
{name: "remark"},
{name: "create_time",type: 'date',dateFormat: 'timestamp'},
{name: "update_time",type: 'date',dateFormat: 'timestamp'},
{name: "creater"},
{name: "updater"}]
});
var taxRateStore = Ext.create('Ext.data.Store', {
model: 'TaxRate',
proxy: {
type: 'ajax',
reader: 'json',
url: homePath+'/public/erp/setting_tax/gettaxrate/option/data'
}
});
var taxRateRowEditing = Ext.create('Ext.grid.plugin.CellEditing', {
clicksToEdit: 1
});
// 税率管理窗口
var taxRateWin = Ext.create('Ext.window.Window', {
title: '税率管理',
border: 0,
height: 300,
width: 600,
modal: true,
constrain: true,
closeAction: 'hide',
layout: 'fit',
tools: [{
type: 'refresh',
tooltip: 'Refresh',
scope: this,
handler: function(){taxRateStore.reload();}
}],
items: [{
xtype: 'gridpanel',
id: 'taxRateGrid',
columnLines: true,
store: taxRateStore,
selType: 'checkboxmodel',
tbar: [{
xtype: 'hiddenfield',
id: 'tax_id_to_rate'
}, {
text: '添加税率',
iconCls: 'icon-add',
scope: this,
handler: function(){
taxRateRowEditing.cancelEdit();
var r = Ext.create('TaxRate', {
date: Ext.util.Format.date(new Date(), 'Y-m-d'),
rate: 1
});
taxRateStore.insert(0, r);
taxRateRowEditing.startEdit(0, 0);
}
}, {
text: '删除税率',
iconCls: 'icon-delete',
scope: this,
handler: function(){
var selection = Ext.getCmp('taxRateGrid').getView().getSelectionModel().getSelection();
if(selection.length > 0){
taxRateStore.remove(selection);
}else{
Ext.MessageBox.alert('错误', '没有选择删除对象!');
}
}
}, {
text: '保存修改',
iconCls: 'icon-save',
scope: this,
handler: function(){
var updateRecords = taxRateStore.getUpdatedRecords();
var insertRecords = taxRateStore.getNewRecords();
var deleteRecords = taxRateStore.getRemovedRecords();
// 判断是否有修改数据
if(updateRecords.length + insertRecords.length + deleteRecords.length > 0){
var changeRows = {
updated: [],
inserted: [],
deleted: []
}
for(var i = 0; i < updateRecords.length; i++){
var data = updateRecords[i].data;
changeRows.updated.push(data)
}
for(var i = 0; i < insertRecords.length; i++){
var data = insertRecords[i].data;
changeRows.inserted.push(data)
}
for(var i = 0; i < deleteRecords.length; i++){
changeRows.deleted.push(deleteRecords[i].data)
}
Ext.MessageBox.confirm('确认', '确定保存修改内容?', function(button, text){
if(button == 'yes'){
var json = Ext.JSON.encode(changeRows);
var selection = Ext.getCmp('taxGrid').getView().getSelectionModel().getSelection();
Ext.Msg.wait('提交中,请稍后...', '提示');
Ext.Ajax.request({
url: homePath+'/public/erp/setting_tax/edittaxrate',
params: {json: json, tax_id: Ext.getCmp('tax_id_to_rate').value},
method: 'POST',
success: function(response, options) {
var data = Ext.JSON.decode(response.responseText);
if(data.success){
Ext.MessageBox.alert('提示', data.info);
taxRateStore.reload();
taxStore.reload();
}else{
Ext.MessageBox.alert('错误', data.info);
}
},
failure: function(response){
Ext.MessageBox.alert('错误', '保存提交失败');
}
});
}
});
}else{
Ext.MessageBox.alert('提示', '没有修改任何数据!');
}
}
}, '->', {
text: '刷新',
iconCls: 'icon-refresh',
handler: function(){
taxRateStore.reload();
}
}],
plugins: taxRateRowEditing,
columns: [{
xtype: 'rownumberer'
}, {
text: 'ID',
dataIndex: 'id',
hidden: true,
flex: 1
}, {
text: '生效日期',
dataIndex: 'date',
renderer: Ext.util.Format.dateRenderer('Y-m-d'),
editor: {
xtype: 'datefield',
editable: false,
format: 'Y-m-d'
},
flex: 3
}, {
text: '税率',
dataIndex: 'rate',
editor: 'numberfield',
flex: 2
}, {
text: '备注',
dataIndex: 'remark',
editor: 'textfield',
flex: 5
}, {
text: '创建人',
hidden: true,
dataIndex: 'creater',
flex: 2
}, {
text: '创建时间',
hidden: true,
dataIndex: 'create_time',
renderer : Ext.util.Format.dateRenderer('Y-m-d H:i:s'),
flex: 3
}, {
text: '更新人',
hidden: true,
dataIndex: 'updater',
flex: 2
}, {
text: '更新时间',
hidden: true,
dataIndex: 'update_time',
renderer : Ext.util.Format.dateRenderer('Y-m-d H:i:s'),
flex: 3
}]
}]
}); | eoasoft/evolve | application/modules/erp/views/scripts/setting/js/tax_rate.js | JavaScript | apache-2.0 | 6,462 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_55) on Thu Jan 22 11:30:09 CST 2015 -->
<meta http-equiv="Content-Type" content="text/html" charset="utf-8">
<title>ICreate</title>
<meta name="date" content="2015-01-22">
<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="ICreate";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/ICreate.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../../../com/share/mod/pay/ali/wap/inter/ICallback.html" title="interface in com.share.mod.pay.ali.wap.inter"><span class="strong">Prev Class</span></a></li>
<li><a href="../../../../../../../com/share/mod/pay/ali/wap/inter/IInterrupt.html" title="interface in com.share.mod.pay.ali.wap.inter"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?com/share/mod/pay/ali/wap/inter/ICreate.html" target="_top">Frames</a></li>
<li><a href="ICreate.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">com.share.mod.pay.ali.wap.inter</div>
<h2 title="Interface ICreate" class="title">Interface ICreate</h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<hr>
<br>
<pre>public interface <span class="strong">ICreate</span></pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method_summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span>Methods</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code>java.lang.String</code></td>
<td class="colLast"><code><strong><a href="../../../../../../../com/share/mod/pay/ali/wap/inter/ICreate.html#createOutTradeNo()">createOutTradeNo</a></strong>()</code>
<div class="block">创建商户订单号</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><strong><a href="../../../../../../../com/share/mod/pay/ali/wap/inter/ICreate.html#handleException(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, java.lang.Exception)">handleException</a></strong>(javax.servlet.http.HttpServletRequest request,
javax.servlet.http.HttpServletResponse response,
java.lang.Exception e)</code>
<div class="block">处理异常</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><strong><a href="../../../../../../../com/share/mod/pay/ali/wap/inter/ICreate.html#handleNumberFormatException(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)">handleNumberFormatException</a></strong>(javax.servlet.http.HttpServletRequest request,
javax.servlet.http.HttpServletResponse response)</code>
<div class="block">处理金额异常</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><strong><a href="../../../../../../../com/share/mod/pay/ali/wap/inter/ICreate.html#save(javax.servlet.http.HttpServletRequest)">save</a></strong>(javax.servlet.http.HttpServletRequest request)</code>
<div class="block">根据请求记录创建订单记录</div>
</td>
</tr>
</table>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method_detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="createOutTradeNo()">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>createOutTradeNo</h4>
<pre>java.lang.String createOutTradeNo()</pre>
<div class="block">创建商户订单号</div>
<dl><dt><span class="strong">Returns:</span></dt><dd></dd></dl>
</li>
</ul>
<a name="handleNumberFormatException(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>handleNumberFormatException</h4>
<pre>void handleNumberFormatException(javax.servlet.http.HttpServletRequest request,
javax.servlet.http.HttpServletResponse response)
throws javax.servlet.ServletException,
java.io.IOException</pre>
<div class="block">处理金额异常</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>request</code> - </dd><dd><code>response</code> - </dd>
<dt><span class="strong">Throws:</span></dt>
<dd><code>javax.servlet.ServletException</code></dd>
<dd><code>java.io.IOException</code></dd></dl>
</li>
</ul>
<a name="handleException(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, java.lang.Exception)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>handleException</h4>
<pre>void handleException(javax.servlet.http.HttpServletRequest request,
javax.servlet.http.HttpServletResponse response,
java.lang.Exception e)
throws javax.servlet.ServletException,
java.io.IOException</pre>
<div class="block">处理异常</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>request</code> - </dd><dd><code>response</code> - </dd>
<dt><span class="strong">Throws:</span></dt>
<dd><code>javax.servlet.ServletException</code></dd>
<dd><code>java.io.IOException</code></dd></dl>
</li>
</ul>
<a name="save(javax.servlet.http.HttpServletRequest)">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>save</h4>
<pre>void save(javax.servlet.http.HttpServletRequest request)
throws javax.servlet.ServletException,
java.io.IOException</pre>
<div class="block">根据请求记录创建订单记录</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>request</code> - </dd><dd><code>response</code> - </dd>
<dt><span class="strong">Throws:</span></dt>
<dd><code>javax.servlet.ServletException</code></dd>
<dd><code>java.io.IOException</code></dd></dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/ICreate.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../../../com/share/mod/pay/ali/wap/inter/ICallback.html" title="interface in com.share.mod.pay.ali.wap.inter"><span class="strong">Prev Class</span></a></li>
<li><a href="../../../../../../../com/share/mod/pay/ali/wap/inter/IInterrupt.html" title="interface in com.share.mod.pay.ali.wap.inter"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?com/share/mod/pay/ali/wap/inter/ICreate.html" target="_top">Frames</a></li>
<li><a href="ICreate.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| zhoukekestar/zkk-components | AlipayWAP/doc/com/share/mod/pay/ali/wap/inter/ICreate.html | HTML | apache-2.0 | 10,834 |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace Aula2505.Views.Categorias
{
public partial class Excluir
{
}
}
| gabyjabonski/Aulas-Asp.net | Aula2505/Aula2505/Views/Categorias/Excluir.aspx.designer.cs | C# | apache-2.0 | 448 |
---
layout: post
title: "2017年终总结"
subtitle: "我这一年到底干了什么~"
date: 2018-01-01
author: "Franary"
tags:
- Blog
- 废话
- 2018
---
# 2017
## 年终总结
## Code
- Github: 218次提交
- 学习
- 阿里云 云服务器工程师 认证
- PHP CURL库 Laravel框架
- Mysql基础语法
- 项目
- 图片爬虫[Image-spider](https://github.com/Chenjinyi/Image-spider)
- 四套官方网站(团队内部)
- 个人网站API [MyWebApi](https://github.com/Chenjinyi/MyWebApi)
- 个人在线交易平台 [Art_Online](https://github.com/Chenjinyi/Art_Online)
- 项目进度表[Project_progress](https://github.com/Chenjinyi/Project_progress)
- Shell Web管理 (未完成)
## Hardware
- Wacom CTH-490
## Game
- 单机游戏
- 完美通关《地平线 零之曙光》
- 完美通关《尼尔:机械纪元》
- 完美通关《巫师3》
- 完美通关 《美好世界(WILL)》
- 《Watch_Dogs2》2017全DLC
- 《HackNet》8小时游戏时间
- 《命运石之门》3小时游戏时间\
- 《辐射4》1小时游戏时间
- 联机游戏
- 通关《BattleBlock Theater》
- 《彩虹六号》121小时游戏时间
- 《剑侠情缘三:重置版(三测)》14小时游戏时间
- 《剑侠情缘三》42小时游戏时间
- 《天涯明月刀》152小时游戏时间
- 《CSGO》154小时游戏时间
- 《暗黑破坏神3》97小时游玩时间
- 《绝地求生》10小时游戏时间
- 《守望先锋》40小时游戏时间
- 《Stick Fight The Game》2小时游戏时间
## Coffee
- 咖啡豆
- 阿拉比卡(多个产地)
- 曼特宁(多种产地)
- 西达摩
- 苏门答腊
- 耶加雪菲
- 歌伦比亚
- 器材
- 法压壶
- 家用意式机
- 家用磨豆机
| Chenjinyi/chenjinyi.github.io | _posts/2018-01-01-年终总结.md | Markdown | apache-2.0 | 1,811 |
package ch.bfh.swos.bookapp.jpa.model;
import javax.persistence.*;
import java.io.Serializable;
import java.util.Date;
import static javax.persistence.GenerationType.IDENTITY;
import static javax.persistence.TemporalType.DATE;
/**
* Entity implementation class for Entity: Book
*
*/
@Entity
public class Book implements Serializable {
@Id
@GeneratedValue(strategy = IDENTITY)
private Long id;
private String bookId;
private String title;
@Temporal(DATE)
private Date releaseDate;
private static final long serialVersionUID = 1L;
@ManyToOne
private Author author;
public Book() {
super();
}
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public String getBookId() {
return bookId;
}
public void setBookId(String bookId) {
this.bookId = bookId;
}
public String getTitle() {
return this.title;
}
public void setTitle(String title) {
this.title = title;
}
public Date getReleaseDate() {
return this.releaseDate;
}
public void setReleaseDate(Date releaseDate) {
this.releaseDate = releaseDate;
}
public Author getAuthor() {
return author;
}
public void setAuthor(Author author) {
this.author = author;
}
}
| rvillars/bookapp-cqrs | ch.bfh.swos.bookapp.jpa/src/main/java/ch/bfh/swos/bookapp/jpa/model/Book.java | Java | apache-2.0 | 1,241 |
<?php
namespace App\Util;
use Illuminate\Support\Facades\DB;
class Access
{
// list all perm
// if the returned array is empty then user dont have permission to list the perms
public static function listPerm($userid, $appcode)
{
if (self::can_editPerm($userid, $appcode) == false) return [];
$app = DB::table('apps')->where('code', $appcode)->first();
if ($app == null) return -3;
return DB::table('user_app')
->join('users', 'user_app.userid', '=', 'users.id')
->where('user_app.appid', $app->id)->get();
}
// used to delete user from app
// return
// -1 acess deny
// -3 appid doesn't exist
// -4 cannot delte owner
//
public static function deletePerm($userid, $otherid, $appcode)
{
// get owner
$app = DB::table('apps')->where('code', $appcode)->first();
if ($app == null) return -3;
if ($otherid == $app->ownerid) return -4;
if (self::can_editPerm($userid, $app->id)) {
DB::table('user_app')->where('appid', $app->id)->where('userid', $otherid)->delete();
return 0;
}
return -1;
}
// used to add new user to app
// or $userid set perm for $otheruserid,
// if $can_perm is differ than null, then its value is valid
// if $can_struct is differ than null, then its value is valid
// if $can_reportis differ than null, then its value is valid
// 0 means unset, 1 means set
// return 0 if sucecss
// -1: access deny
// -2 other user not exist in app, must add first
// -3 appid doesn't exist
// -4 cannot set perm for owner
// -5 if user doesn't exist
public static function setPerm($userid, $otheruser, $appcode, $can_perm, $can_struct, $can_report)
{
//check if user existed
if (DB::table('users')->where('id', $otheruser)->count() + DB::table('users')->where('id', $userid)->count() != 2)
return -5;
// get owner
$app = DB::table('apps')->where('code', $appcode)->first();
if ($app == null) return -3;
if (self::can_editPerm($userid, $appcode)) {
$perm = DB::table('user_app')->where('appid', $app->id)->where('userid', $otheruser)->first();
if ($perm == null) {
if ($app->ownerid == $otheruser)
DB::table('user_app')->insert(
['appid' => $app->id, 'userid' => $otheruser, 'can_perm' => 1, 'can_struct' => 1, 'can_report' => 1]
);
else
DB::table('user_app')->insert(
['appid' => $app->id, 'userid' => $otheruser, 'can_perm' => 0, 'can_struct' => 0, 'can_report' => 1]
);
} else {
if ($app->ownerid == $otheruser) {
return -4;
}
}
$permrecord = [];
if ($can_perm != null) $permrecord['can_perm'] = $can_perm;
if ($can_struct != null) $permrecord['can_struct'] = $can_struct;
if ($can_report != null) $permrecord['can_report'] = $can_report;
if (count($permrecord) != 0) {
DB::table('user_app')->where('appid', $app->id)->where('userid', $otheruser)->update($permrecord);
}
return 0;
}
abort(500, "sdfdf");
return -1;
}
public static function isBanned($id){
$user = DB::table('users')->where('id',$id)->first();
if($user == null) return true;
if(!$user->banned)
return false;
return true;
}
public static function can_editPerm($userid, $appcode)
{
// full access for app owner
$app = DB::table('apps')->where('code', $appcode)->first();
if ($app == null) return -3;
if(self::isBanned($app->ownerid)) return false;
if ($app->ownerid == $userid) return true;
$perm = DB::table('user_app')->where('appid', $app->id)->where('userid', $userid)->first();
if ($perm == null) return false;
if ($perm->can_perm == 1) return true;
return false;
}
public static function can_editStruct($userid, $appcode)
{
// full access for app owner
$app = DB::table('apps')->where('code', $appcode)->first();
if ($app == null) return -3;
if(self::isBanned($app->ownerid)) return false;
if ($app->ownerid == $userid) return true;
$perm = DB::table('user_app')->where('appid', $app->id)->where('userid', $userid)->first();
if ($perm == null) return false;
if ($perm->can_struct == 1) return true;
return false;
}
public static function can_view($userid, $appcode)
{
// full access for app owner
$app = DB::table('apps')->where('code', $appcode)->first();
if ($app == null) return -3;
if(self::isBanned($app->ownerid)) return false;
if ($app->ownerid == $userid) return true;
$perm = DB::table('user_app')->where('appid', $app->id)->where('userid', $userid)->first();
if ($perm == null) return false;
return true;
}
public static function can_editReport($userid, $appcode)
{
// full access for app owner
$app = DB::table('apps')->where('code', $appcode)->first();
if ($app == null) return -3;
if(self::isBanned($app->ownerid)) return false;
if ($app->ownerid == $userid) return true;
$perm = DB::table('user_app')->where('appid', $app->id)->where('userid', $userid)->first();
if ($perm == null) return false;
if ($perm->can_report == 1) return true;
return false;
}
}
| meotrics/meotrics | dashboard/app/Util/Access.php | PHP | apache-2.0 | 4,937 |
/*
*
* Copyright (c) 2013-2017 Nest Labs, Inc.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @file
* This header file defines the <tt>nl::Inet::TCPEndPoint</tt>
* class, where the Nest Inet Layer encapsulates methods for
* interacting with TCP transport endpoints (SOCK_DGRAM sockets
* on Linux and BSD-derived systems) or LwIP TCP protocol
* control blocks, as the system is configured accordingly.
*/
#ifndef TCPENDPOINT_H
#define TCPENDPOINT_H
#include <InetLayer/EndPointBasis.h>
#include <InetLayer/IPAddress.h>
#include <SystemLayer/SystemPacketBuffer.h>
#if WEAVE_SYSTEM_CONFIG_USE_SOCKETS
#include <netinet/tcp.h>
#endif // WEAVE_SYSTEM_CONFIG_USE_SOCKETS
namespace nl {
namespace Inet {
class InetLayer;
#if INET_CONFIG_TCP_CONN_REPAIR_SUPPORTED
#if (!defined(TCP_REPAIR) || !defined(TCP_REPAIR_QUEUE) || !defined(TCP_REPAIR_OPTIONS) || \
!defined(TCPI_OPT_SACK) || !defined(TCPI_OPT_WSCALE) || !defined(TCPI_OPT_TIMESTAMPS) || \
!defined(TCPOPT_MAXSEG) || !defined(TCP_REPAIR_WINDOW))
#error "INET_CONFIG_TCP_CONN_REPAIR_SUPPORTED set but platform does not support TCP REPAIR"
#endif
typedef struct TCPConnRepairInfo
{
IPAddress srcIP; // Source IP address
IPAddress dstIP; // Destination IP address
IPAddressType addrType; // Address family type
uint16_t srcPort; // Source port
uint16_t dstPort; // Destination port
uint32_t txSeq; // Transmit sequence
uint32_t rxSeq; // Receive sequence
uint32_t sndWl1; // Segment seq number for last window update
uint32_t sndWnd; // Send window
uint32_t maxWindow; // Max window
uint32_t rcvWnd; // Receive window
uint32_t rcvWup; // Last ack number that was sent/
uint32_t tsVal; // TCP Timestamp
uint32_t tsecr;
uint16_t mss; // Max segment size
uint8_t sndWscale; // Send window scale
uint8_t rcvWscale; // Receive window scale
uint8_t tcpOptions; // TCP options
bool IsValid (void) const;
void Dump (void) const;
} TCPConnRepairInfo;
#endif // INET_CONFIG_TCP_CONN_REPAIR_SUPPORTED
/**
* @brief Objects of this class represent TCP transport endpoints.
*
* @details
* Nest Inet Layer encapsulates methods for interacting with TCP transport
* endpoints (SOCK_STREAM sockets on Linux and BSD-derived systems) or LwIP
* TCP protocol control blocks, as the system is configured accordingly.
*/
class NL_DLL_EXPORT TCPEndPoint : public EndPointBasis
{
friend class InetLayer;
public:
/** Control switch indicating whether the application is receiving data. */
bool ReceiveEnabled;
/**
* @brief Basic dynamic state of the underlying endpoint.
*
* @details
* Objects are initialized in the "ready" state, proceed to subsequent
* states corresponding to a simplification of the states of the TCP
* transport state machine.
*
* @note
* The \c kBasisState_Closed state enumeration is mapped to \c kState_Ready for historical binary-compatibility reasons. The
* existing \c kState_Closed exists to identify separately the distinction between "not opened yet" and "previously opened now
* closed" that existed previously in the \c kState_Ready and \c kState_Closed states.
*/
enum {
kState_Ready = kBasisState_Closed, /**< Endpoint initialized, but not bound. */
kState_Bound = 1, /**< Endpoint bound, but not listening. */
kState_Listening = 2, /**< Endpoint receiving connections. */
kState_Connecting = 3, /**< Endpoint attempting to connect. */
kState_Connected = 4, /**< Endpoint connected, ready for tx/rx. */
kState_SendShutdown = 5, /**< Endpoint initiated its half-close. */
kState_ReceiveShutdown = 6, /**< Endpoint responded to half-close. */
kState_Closing = 7, /**< Endpoint closing bidirectionally. */
kState_Closed = 8 /**< Endpoint closed, ready for release. */
} State;
/**
* @brief Bind the endpoint to an interface IP address.
*
* @param[in] addrType the protocol version of the IP address
* @param[in] addr the IP address (must be an interface address)
* @param[in] port the TCP port
* @param[in] reuseAddr option to share binding with other endpoints
*
* @retval INET_NO_ERROR success: endpoint bound to address
* @retval INET_ERROR_INCORRECT_STATE endpoint has been bound previously
* @retval INET_NO_MEMORY insufficient memory for endpoint
*
* @retval INET_ERROR_WRONG_PROTOCOL_TYPE
* \c addrType does not match \c IPVer.
*
* @retval INET_ERROR_WRONG_ADDRESS_TYPE
* \c addrType is \c kIPAddressType_Any, or the type of \c addr is not
* equal to \c addrType.
*
* @retval other another system or platform error
*
* @details
* Binds the endpoint to the specified network interface IP address.
*
* On LwIP, this method must not be called with the LwIP stack lock
* already acquired.
*/
INET_ERROR Bind(IPAddressType addrType, IPAddress addr, uint16_t port, bool reuseAddr = false);
/**
* @brief Prepare the endpoint to receive TCP messages.
*
* @param[in] backlog maximum depth of connection acceptance queue
*
* @retval INET_NO_ERROR success: endpoint ready to receive messages.
* @retval INET_ERROR_INCORRECT_STATE endpoint is already listening.
*
* @details
* If \c State is already \c kState_Listening, then no operation is
* performed, otherwise the \c State is set to \c kState_Listening and
* the endpoint is prepared to received TCP messages, according to the
* semantics of the platform.
*
* On some platforms, the \c backlog argument is not used (the depth of
* the queue is fixed; only one connection may be accepted at a time).
*
* On LwIP systems, this method must not be called with the LwIP stack
* lock already acquired
*/
INET_ERROR Listen(uint16_t backlog);
/**
* @brief Initiate a TCP connection.
*
* @param[in] addr the destination IP address
* @param[in] port the destination TCP port
* @param[in] intf an optional network interface indicator
*
* @retval INET_NO_ERROR success: \c msg is queued for transmit.
* @retval INET_ERROR_NOT_IMPLEMENTED system implementation not complete.
*
* @retval INET_ERROR_WRONG_ADDRESS_TYPE
* the destination address and the bound interface address do not
* have matching protocol versions or address type, or the destination
* address is an IPv6 link-local address and \c intf is not specified.
*
* @retval other another system or platform error
*
* @details
* If possible, then this method initiates a TCP connection to the
* destination \c addr (with \c intf used as the scope
* identifier for IPv6 link-local destinations) and \c port.
*/
INET_ERROR Connect(IPAddress addr, uint16_t port, InterfaceId intf = INET_NULL_INTERFACEID);
/**
* @brief Extract IP address and TCP port of remote endpoint.
*
* @param[out] retAddr IP address of remote endpoint.
* @param[out] retPort TCP port of remote endpoint.
*
* @retval INET_NO_ERROR success: address and port extracted.
* @retval INET_ERROR_INCORRECT_STATE TCP connection not established.
* @retval INET_ERROR_CONNECTION_ABORTED TCP connection no longer open.
*
* @details
* Do not use \c NULL pointer values for either argument.
*/
INET_ERROR GetPeerInfo(IPAddress *retAddr, uint16_t *retPort) const;
/**
* @brief Extract IP address and TCP port of local endpoint.
*
* @param[out] retAddr IP address of local endpoint.
* @param[out] retPort TCP port of local endpoint.
*
* @retval INET_NO_ERROR success: address and port extracted.
* @retval INET_ERROR_INCORRECT_STATE TCP connection not established.
* @retval INET_ERROR_CONNECTION_ABORTED TCP connection no longer open.
*
* @details
* Do not use \c NULL pointer values for either argument.
*/
INET_ERROR GetLocalInfo(IPAddress *retAddr, uint16_t *retPort);
/**
* @brief Send message text on TCP connection.
*
* @param[out] data Message text to send.
* @param[out] push If \c true, then send immediately, otherwise queue.
*
* @retval INET_NO_ERROR success: address and port extracted.
* @retval INET_ERROR_INCORRECT_STATE TCP connection not established.
*
* @details
* The <tt>Weave::System::PacketBuffer::Free</tt> method is called on the \c data argument
* regardless of whether the transmission is successful or failed.
*/
INET_ERROR Send(Weave::System::PacketBuffer *data, bool push = true);
/**
* @brief Disable reception.
*
* @details
* Disable all event handlers. Data sent to an endpoint that disables
* reception will be acknowledged until the receive window is exhausted.
*/
void DisableReceive(void);
/**
* @brief Enable reception.
*
* @details
* Enable all event handlers. Data sent to an endpoint that disables
* reception will be acknowledged until the receive window is exhausted.
*/
void EnableReceive(void);
/**
* @brief EnableNoDelay
*/
INET_ERROR EnableNoDelay(void);
/**
* @brief Enable the TCP "keep-alive" option.
*
* @param[in] interval time in seconds between probe requests.
* @param[in] timeoutCount number of probes to send before timeout.
*
* @retval INET_NO_ERROR success: address and port extracted.
* @retval INET_ERROR_INCORRECT_STATE TCP connection not established.
* @retval INET_ERROR_CONNECTION_ABORTED TCP connection no longer open.
* @retval INET_ERROR_NOT_IMPLEMENTED system implementation not complete.
*
* @retval other another system or platform error
*
* @details
* Start automatically transmitting TCP "keep-alive" probe segments every
* \c interval seconds. The connection will abort automatically after
* receiving a negative response, or after sending \c timeoutCount
* probe segments without receiving a positive response.
*
* See RFC 1122, section 4.2.3.6 for specification details.
*/
INET_ERROR EnableKeepAlive(uint16_t interval, uint16_t timeoutCount);
/**
* @brief Disable the TCP "keep-alive" option.
*
* @retval INET_NO_ERROR success: address and port extracted.
* @retval INET_ERROR_INCORRECT_STATE TCP connection not established.
* @retval INET_ERROR_CONNECTION_ABORTED TCP connection no longer open.
* @retval INET_ERROR_NOT_IMPLEMENTED system implementation not complete.
*
* @retval other another system or platform error
*/
INET_ERROR DisableKeepAlive(void);
/**
* @brief Set the TCP TCP_USER_TIMEOUT socket option.
*
* @param[in] userTimeoutMillis Tcp user timeout value in milliseconds.
*
* @retval INET_NO_ERROR success: address and port extracted.
* @retval INET_ERROR_NOT_IMPLEMENTED system implementation not complete.
*
* @retval other another system or platform error
*
* @details
* When the value is greater than 0, it specifies the maximum amount of
* time in milliseconds that transmitted data may remain
* unacknowledged before TCP will forcibly close the
* corresponding connection. If the option value is specified as 0,
* TCP will to use the system default.
* See RFC 5482, for further details.
*/
INET_ERROR SetUserTimeout(uint32_t userTimeoutMillis);
/**
* @brief Acknowledge receipt of message text.
*
* @param[in] len number of bytes to acknowledge.
*
* @retval INET_NO_ERROR success: reception acknowledged.
* @retval INET_ERROR_INCORRECT_STATE TCP connection not established.
* @retval INET_ERROR_CONNECTION_ABORTED TCP connection no longer open.
*
* @details
* Use this method to acknowledge reception of all or part of the data
* received. The operational semantics are undefined if \c len is larger
* than the total outstanding unacknowledged received data.
*/
INET_ERROR AckReceive(uint16_t len);
/**
* @brief Push message text back to the head of the receive queue.
*
* @param[out] data Message text to push.
*
* @retval INET_NO_ERROR success: reception acknowledged.
* @retval INET_ERROR_INCORRECT_STATE TCP connection not established.
*
* @details
* This method may only be called by data reception event handlers to
* put an unacknowledged portion of data back on the receive queue. The
* operational semantics are undefined if the caller is outside the scope
* of a data reception event handler, \c data is not the \c Weave::System::PacketBuffer
* provided to the handler, or \c data does not contain the unacknowledged
* portion remaining after the bytes acknowledged by a prior call to the
* <tt>AckReceive(uint16_t len)</tt> method.
*/
INET_ERROR PutBackReceivedData(Weave::System::PacketBuffer *data);
/**
* @brief Extract the length of the data awaiting first transmit.
*
* @return Number of untransmitted bytes in the transmit queue.
*/
uint32_t PendingSendLength(void);
/**
* @brief Extract the length of the unacknowledged receive data.
*
* @return Number of bytes in the receive queue that have not yet been
* acknowledged with <tt>AckReceive(uint16_t len)</tt>.
*/
uint32_t PendingReceiveLength(void);
/**
* @brief Initiate TCP half close, in other words, finished with sending.
*
* @retval INET_NO_ERROR success: address and port extracted.
* @retval INET_ERROR_INCORRECT_STATE TCP connection not established.
*
* @retval other another system or platform error
*/
INET_ERROR Shutdown(void);
/**
* @brief Initiate TCP full close, in other words, finished with both send and
* receive.
*
* @retval INET_NO_ERROR success: address and port extracted.
* @retval INET_ERROR_INCORRECT_STATE TCP connection not established.
*
* @retval other another system or platform error
*/
INET_ERROR Close(void);
/**
* @brief Abortively close the endpoint, in other words, send RST packets.
*/
void Abort(void);
/**
* @brief Initiate (or continue) TCP full close, ignoring errors.
*
* @details
* The object is returned to the free pool, and all remaining user
* references are subsequently invalid.
*/
void Free(void);
/**
* @brief Extract whether TCP connection is established.
*/
bool IsConnected(void) const;
void SetConnectTimeout(const uint32_t connTimeoutMsecs);
#if INET_TCP_IDLE_CHECK_INTERVAL > 0
/**
* @brief Set timer event for idle activity.
*
* @param[in] timeoutMS
*
* @details
* Set the idle timer interval to \c timeoutMS milliseconds. A zero
* time interval implies the idle timer is disabled.
*/
void SetIdleTimeout(uint32_t timeoutMS);
#endif // INET_TCP_IDLE_CHECK_INTERVAL > 0
/**
* @brief Note activity, in other words, reset the idle timer.
*
* @details
* Reset the idle timer to zero.
*/
void MarkActive(void);
/**
* @brief Obtain an identifier for the endpoint.
*
* @return Returns an opaque unique identifier for use logs.
*/
uint16_t LogId(void);
#if INET_CONFIG_TCP_CONN_REPAIR_SUPPORTED
INET_ERROR RepairConnection(const TCPConnRepairInfo &connRepairInfo, InterfaceId intf);
#endif // INET_CONFIG_TCP_CONN_REPAIR_SUPPORTED
/**
* @brief Type of connection establishment event handling function.
*
* @param[in] endPoint The TCP endpoint associated with the event.
* @param[in] err \c INET_NO_ERROR if success, else another code.
*
* @details
* Provide a function of this type to the \c OnConnectComplete delegate
* member to process connection establishment events on \c endPoint. The
* \c err argument distinguishes successful connections from failures.
*/
typedef void (*OnConnectCompleteFunct)(TCPEndPoint *endPoint, INET_ERROR err);
/**
* The endpoint's connection establishment event handling function
* delegate.
*/
OnConnectCompleteFunct OnConnectComplete;
/**
* @brief Type of data reception event handling function.
*
* @param[in] endPoint The TCP endpoint associated with the event.
* @param[in] data The data received.
*
* @details
* Provide a function of this type to the \c OnDataReceived delegate
* member to process data reception events on \c endPoint where \c data
* is the message text received.
*
* A data reception event handler must acknowledge data processed using
* the \c AckReceive method. The \c Free method on the data buffer must
* also be invoked unless the \c PutBackReceivedData is used instead.
*/
typedef void (*OnDataReceivedFunct)(TCPEndPoint *endPoint, Weave::System::PacketBuffer *data);
/**
* The endpoint's message text reception event handling function delegate.
*/
OnDataReceivedFunct OnDataReceived;
/**
* @brief Type of data transmission event handling function.
*
* @param[in] endPoint The TCP endpoint associated with the event.
* @param[in] len Number of bytes added to the transmit window.
*
* @details
* Provide a function of this type to the \c OnDataSent delegate
* member to process data transmission events on \c endPoint where \c len
* is the length of the message text added to the TCP transmit window,
* which are eligible for sending by the underlying network stack.
*/
typedef void (*OnDataSentFunct)(TCPEndPoint *endPoint, uint16_t len);
/**
* The endpoint's message text transmission event handling function
* delegate.
*/
OnDataSentFunct OnDataSent;
/**
* @brief Type of connection establishment event handling function.
*
* @param[in] endPoint The TCP endpoint associated with the event.
* @param[in] err \c INET_NO_ERROR if success, else another code.
*
* @details
* Provide a function of this type to the \c OnConnectionClosed delegate
* member to process connection termination events on \c endPoint. The
* \c err argument distinguishes successful terminations from failures.
*/
typedef void (*OnConnectionClosedFunct)(TCPEndPoint *endPoint, INET_ERROR err);
/** The endpoint's close event handling function delegate. */
OnConnectionClosedFunct OnConnectionClosed;
/**
* @brief Type of half-close reception event handling function.
*
* @param[in] endPoint The TCP endpoint associated with the event.
*
* @details
* Provide a function of this type to the \c OnPeerClose delegate member
* to process connection termination events on \c endPoint.
*/
typedef void (*OnPeerCloseFunct)(TCPEndPoint *endPoint);
/** The endpoint's half-close receive event handling function delegate. */
OnPeerCloseFunct OnPeerClose;
/**
* @brief Type of connection received event handling function.
*
* @param[in] listeningEndPoint The listening TCP endpoint.
* @param[in] conEndPoint The newly received TCP endpoint.
* @param[in] peerAddr The IP address of the remote peer.
* @param[in] peerPort The TCP port of the remote peer.
*
* @details
* Provide a function of this type to the \c OnConnectionReceived delegate
* member to process connection reception events on \c listeningEndPoint.
* The newly received endpoint \c conEndPoint is located at IP address
* \c peerAddr and TCP port \c peerPort.
*/
typedef void (*OnConnectionReceivedFunct)(TCPEndPoint *listeningEndPoint, TCPEndPoint *conEndPoint,
const IPAddress &peerAddr, uint16_t peerPort);
/** The endpoint's connection receive event handling function delegate. */
OnConnectionReceivedFunct OnConnectionReceived;
/**
* @brief Type of connection acceptance error event handling function.
*
* @param[in] endPoint The TCP endpoint associated with the event.
* @param[in] err The reason for the error.
*
* @details
* Provide a function of this type to the \c OnAcceptError delegate
* member to process connection acceptance error events on \c endPoint. The
* \c err argument provides specific detail about the type of the error.
*/
typedef void (*OnAcceptErrorFunct)(TCPEndPoint *endPoint, INET_ERROR err);
/**
* The endpoint's connection acceptance event handling function delegate.
*/
OnAcceptErrorFunct OnAcceptError;
#if INET_CONFIG_ENABLE_TCP_SEND_IDLE_CALLBACKS
/**
* @brief Type of TCP SendIdle changed signal handling function.
*
* @param[in] endPoint The TCP endpoint associated with the event.
*
* @param[in] isIdle True if the send channel of the TCP endpoint
* is Idle, otherwise false.
* @details
* Provide a function of this type to the \c OnTCPSendIdleChanged delegate
* member to process the event of the send channel of the TCPEndPoint
* changing state between being idle and not idle.
*/
typedef void (*OnTCPSendIdleChangedFunct)(TCPEndPoint *endPoint, bool isIdle);
/** The event handling function delegate of the endpoint signaling when the
* idleness of the TCP connection's send channel changes. This is utilized
* by upper layers to take appropriate actions based on whether sent data
* has been reliably delivered to the peer. */
OnTCPSendIdleChangedFunct OnTCPSendIdleChanged;
#endif // INET_CONFIG_ENABLE_TCP_SEND_IDLE_CALLBACKS
private:
static Weave::System::ObjectPool<TCPEndPoint, INET_CONFIG_NUM_TCP_ENDPOINTS> sPool;
Weave::System::PacketBuffer *mRcvQueue;
Weave::System::PacketBuffer *mSendQueue;
#if INET_TCP_IDLE_CHECK_INTERVAL > 0
uint16_t mIdleTimeout; // in units of INET_TCP_IDLE_CHECK_INTERVAL; zero means no timeout
uint16_t mRemainingIdleTime; // in units of INET_TCP_IDLE_CHECK_INTERVAL
#endif // INET_TCP_IDLE_CHECK_INTERVAL > 0
uint32_t mConnectTimeoutMsecs; // This is the timeout to wait for a Connect call to succeed or
// return an error; zero means use system defaults.
#if INET_CONFIG_OVERRIDE_SYSTEM_TCP_USER_TIMEOUT
uint32_t mUserTimeoutMillis; // The configured TCP user timeout value in milliseconds.
// If 0, assume not set.
#if INET_CONFIG_ENABLE_TCP_SEND_IDLE_CALLBACKS
bool mIsTCPSendIdle; // Indicates whether the send channel of the TCPEndPoint is Idle.
uint16_t mTCPSendQueueRemainingPollCount; // The current remaining number of TCP SendQueue polls before
// the TCP User timeout period is reached.
uint32_t mTCPSendQueuePollPeriodMillis; // The configured period of active polling of the TCP
// SendQueue. If 0, assume not set.
void SetTCPSendIdleAndNotifyChange(bool aIsSendIdle);
#endif // INET_CONFIG_ENABLE_TCP_SEND_IDLE_CALLBACKS
bool mUserTimeoutTimerRunning; // Indicates whether the TCP UserTimeout timer has been started.
static void TCPUserTimeoutHandler(Weave::System::Layer* aSystemLayer, void* aAppState, Weave::System::Error aError);
void StartTCPUserTimeoutTimer(void);
void StopTCPUserTimeoutTimer(void);
void RestartTCPUserTimeoutTimer(void);
void ScheduleNextTCPUserTimeoutPoll(uint32_t aTimeOut);
#if INET_CONFIG_ENABLE_TCP_SEND_IDLE_CALLBACKS
uint16_t MaxTCPSendQueuePolls(void);
#endif // INET_CONFIG_ENABLE_TCP_SEND_IDLE_CALLBACKS
#if WEAVE_SYSTEM_CONFIG_USE_SOCKETS
uint32_t mBytesWrittenSinceLastProbe; // This counts the number of bytes written on the TCP socket since the
// last probe into the TCP outqueue was made.
uint32_t mLastTCPKernelSendQueueLen; // This is the measured size(in bytes) of the kernel TCP send queue
// at the end of the last user timeout window.
INET_ERROR CheckConnectionProgress(bool &IsProgressing);
#endif // WEAVE_SYSTEM_CONFIG_USE_SOCKETS
#endif // INET_CONFIG_OVERRIDE_SYSTEM_TCP_USER_TIMEOUT
TCPEndPoint(void); // not defined
TCPEndPoint(const TCPEndPoint&); // not defined
~TCPEndPoint(void); // not defined
void Init(InetLayer *inetLayer);
INET_ERROR DriveSending(void);
void DriveReceiving(void);
void HandleConnectComplete(INET_ERROR err);
void HandleAcceptError(INET_ERROR err);
INET_ERROR DoClose(INET_ERROR err, bool suppressCallback);
static bool IsConnected(int state);
static void TCPConnectTimeoutHandler(Weave::System::Layer* aSystemLayer, void* aAppState, Weave::System::Error aError);
void StartConnectTimerIfSet(void);
void StopConnectTimer(void);
#if WEAVE_SYSTEM_CONFIG_USE_LWIP
struct BufferOffset {
const Weave::System::PacketBuffer *buffer;
uint32_t offset;
};
uint32_t mUnackedLength; // Amount sent but awaiting ACK. Used as a form of reference count
// to hang-on to backing packet buffers until they are no longer needed.
uint32_t RemainingToSend();
BufferOffset FindStartOfUnsent();
INET_ERROR GetPCB(IPAddressType addrType);
void HandleDataSent(uint16_t len);
void HandleDataReceived(Weave::System::PacketBuffer *buf);
void HandleIncomingConnection(TCPEndPoint *pcb);
void HandleError(INET_ERROR err);
static err_t LwIPHandleConnectComplete(void *arg, struct tcp_pcb *tpcb, err_t lwipErr);
static err_t LwIPHandleIncomingConnection(void *arg, struct tcp_pcb *tcpConPCB, err_t lwipErr);
static err_t LwIPHandleDataReceived(void *arg, struct tcp_pcb *tpcb, struct pbuf *p, err_t err);
static err_t LwIPHandleDataSent(void *arg, struct tcp_pcb *tpcb, u16_t len);
static void LwIPHandleError(void *arg, err_t err);
#endif // WEAVE_SYSTEM_CONFIG_USE_LWIP
#if WEAVE_SYSTEM_CONFIG_USE_SOCKETS
INET_ERROR GetSocket(IPAddressType addrType);
SocketEvents PrepareIO(void);
void HandlePendingIO(void);
void ReceiveData(void);
void HandleIncomingConnection(void);
INET_ERROR BindSrcAddrFromIntf(IPAddressType addrType, InterfaceId intf);
#endif // WEAVE_SYSTEM_CONFIG_USE_SOCKETS
};
#if INET_CONFIG_ENABLE_TCP_SEND_IDLE_CALLBACKS && INET_CONFIG_OVERRIDE_SYSTEM_TCP_USER_TIMEOUT
inline uint16_t TCPEndPoint::MaxTCPSendQueuePolls(void)
{
// If the UserTimeout is configured less than or equal to the poll interval,
// return 1 to poll at least once instead of returning zero and timing out
// immediately.
return (mUserTimeoutMillis > mTCPSendQueuePollPeriodMillis) ?
(mUserTimeoutMillis / mTCPSendQueuePollPeriodMillis) : 1;
}
#endif // INET_CONFIG_ENABLE_TCP_SEND_IDLE_CALLBACKS && INET_CONFIG_OVERRIDE_SYSTEM_TCP_USER_TIMEOUT
inline bool TCPEndPoint::IsConnected(void) const
{
return IsConnected(State);
}
inline uint16_t TCPEndPoint::LogId(void)
{
return static_cast<uint16_t>(reinterpret_cast<intptr_t>(this));
}
inline void TCPEndPoint::MarkActive(void)
{
#if INET_TCP_IDLE_CHECK_INTERVAL > 0
mRemainingIdleTime = mIdleTimeout;
#endif // INET_TCP_IDLE_CHECK_INTERVAL > 0
}
} // namespace Inet
} // namespace nl
#endif // !defined(TCPENDPOINT_H)
| openweave/openweave-core | src/inet/TCPEndPoint.h | C | apache-2.0 | 30,057 |
/*
* Copyright 2010-2011 Nabeel Mukhtar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.code.linkedinapi.schema.impl;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import com.google.code.linkedinapi.schema.Adapter1;
import com.google.code.linkedinapi.schema.DateOfBirth;
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"year",
"month",
"day"
})
@XmlRootElement(name = "date-of-birth")
public class DateOfBirthImpl
implements Serializable, DateOfBirth
{
private final static long serialVersionUID = 2461660169443089969L;
@XmlElement(required = true, type = String.class)
@XmlJavaTypeAdapter(Adapter1 .class)
protected Long year;
@XmlElement(required = true, type = String.class)
@XmlJavaTypeAdapter(Adapter1 .class)
protected Long month;
@XmlElement(required = true, type = String.class)
@XmlJavaTypeAdapter(Adapter1 .class)
protected Long day;
public Long getYear() {
return year;
}
public void setYear(Long value) {
this.year = value;
}
public Long getMonth() {
return month;
}
public void setMonth(Long value) {
this.month = value;
}
public Long getDay() {
return day;
}
public void setDay(Long value) {
this.day = value;
}
}
| shisoft/LinkedIn-J | core/src/main/java/com/google/code/linkedinapi/schema/impl/DateOfBirthImpl.java | Java | apache-2.0 | 2,214 |
package excelcom.api;
import com.sun.jna.platform.win32.COM.COMException;
import com.sun.jna.platform.win32.COM.COMLateBindingObject;
import com.sun.jna.platform.win32.COM.IDispatch;
import com.sun.jna.platform.win32.OaIdl;
import com.sun.jna.platform.win32.OleAuto;
import com.sun.jna.platform.win32.Variant;
import static com.sun.jna.platform.win32.Variant.VT_NULL;
/**
* Represents a Range
*/
class Range extends COMLateBindingObject {
Range(IDispatch iDispatch) throws COMException {
super(iDispatch);
}
Variant.VARIANT getValue() {
return this.invoke("Value");
}
int getRow() {
return this.invoke("Row").intValue();
}
int getColumn() {
return this.invoke("Column").intValue();
}
void setInteriorColor(ExcelColor color) {
new CellPane(this.getAutomationProperty("Interior", this)).setColorIndex(color);
}
ExcelColor getInteriorColor() {
return ExcelColor.getColor(new CellPane(this.getAutomationProperty("Interior", this)).getColorIndex());
}
void setFontColor(ExcelColor color) {
new CellPane(this.getAutomationProperty("Font", this)).setColorIndex(color);
}
ExcelColor getFontColor() {
return ExcelColor.getColor(new CellPane(this.getAutomationProperty("Font", this)).getColorIndex());
}
void setBorderColor(ExcelColor color) {
new CellPane(this.getAutomationProperty("Borders", this)).setColorIndex(color);
}
ExcelColor getBorderColor() {
return ExcelColor.getColor(new CellPane(this.getAutomationProperty("Borders", this)).getColorIndex());
}
void setComment(String comment) {
this.invokeNoReply("ClearComments");
this.invoke("AddComment", new Variant.VARIANT(comment));
}
String getComment() {
return new COMLateBindingObject(this.getAutomationProperty("Comment")) {
private String getText() {
return this.invoke("Text").stringValue();
}
}.getText();
}
FindResult find(Variant.VARIANT[] options) {
IDispatch find = (IDispatch) this.invoke("Find", options).getValue();
if (find == null) {
return null;
}
return new FindResult(find, this);
}
FindResult findNext(FindResult previous) {
return new FindResult(this.getAutomationProperty("FindNext", this, previous.toVariant()), this);
}
/**
* Can be Interior, Border or Font. Has methods for setting e.g. Color.
*/
private class CellPane extends COMLateBindingObject {
CellPane(IDispatch iDispatch) {
super(iDispatch);
}
void setColorIndex(ExcelColor color) {
this.setProperty("ColorIndex", color.getIndex());
}
int getColorIndex() {
Variant.VARIANT colorIndex = this.invoke("ColorIndex");
if(colorIndex.getVarType().intValue() == VT_NULL) {
throw new NullPointerException("return type of colorindex is null. Maybe multiple colors in range?");
}
return this.invoke("ColorIndex").intValue();
}
}
}
| lprc/excelcom | src/main/java/excelcom/api/Range.java | Java | apache-2.0 | 3,146 |
$packageName = 'kvrt'
$url = 'http://devbuilds.kaspersky-labs.com/devbuilds/KVRT/latest/full/KVRT.exe'
$checksum = '71ea93798110a5e6551208d4dccee5dc84204615'
$checksumType = 'sha1'
$toolsPath = "$(Split-Path -parent $MyInvocation.MyCommand.Definition)"
$installFile = Join-Path $toolsPath "kvrt.exe"
try {
Get-ChocolateyWebFile -PackageName "$packageName" `
-FileFullPath "$installFile" `
-Url "$url" `
-Checksum "$checksum" `
-ChecksumType "$checksumType"
# create empty sidecars so shimgen only creates one shim
Set-Content -Path ("$installFile.ignore") `
-Value $null
# create batch to start executable
$batchStart = Join-Path $toolsPath "kvrt.bat"
'start %~dp0\kvrt.exe -accepteula' | Out-File -FilePath $batchStart -Encoding ASCII
Install-BinFile "kvrt" "$batchStart"
} catch {
throw $_.Exception
} | dtgm/chocolatey-packages | automatic/_output/kvrt/2016.03.31.0039/tools/chocolateyInstall.ps1 | PowerShell | apache-2.0 | 931 |
/*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigtable.hbase;
import static com.google.cloud.bigtable.hbase.test_env.SharedTestEnvRule.COLUMN_FAMILY;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.junit.Assert;
import org.junit.Test;
@SuppressWarnings("deprecation")
public class TestCreateTable extends AbstractTestCreateTable {
@Override
protected void createTable(TableName tableName) throws IOException {
getConnection().getAdmin().createTable(createDescriptor(tableName));
}
@Override
protected void createTable(TableName tableName, byte[] start, byte[] end, int splitCount)
throws IOException {
getConnection().getAdmin().createTable(createDescriptor(tableName), start, end, splitCount);
}
@Override
protected void createTable(TableName tableName, byte[][] ranges) throws IOException {
getConnection().getAdmin().createTable(createDescriptor(tableName), ranges);
}
private HTableDescriptor createDescriptor(TableName tableName) {
return new HTableDescriptor(tableName).addFamily(new HColumnDescriptor(COLUMN_FAMILY));
}
@Override
protected List<HRegionLocation> getRegions(TableName tableName) throws Exception {
return getConnection().getRegionLocator(tableName).getAllRegionLocations();
}
@Test
public void testGetRegions() throws Exception {
TableName tableName = sharedTestEnv.newTestTableName();
getConnection().getAdmin().createTable(createDescriptor(tableName));
List<RegionInfo> regions = getConnection().getAdmin().getRegions(tableName);
Assert.assertEquals(1, regions.size());
}
@Override
protected boolean asyncGetRegions(TableName tableName) throws Exception {
return getConnection().getAdmin().getRegions(tableName).size() == 1 ? true : false;
}
@Override
protected boolean isTableEnabled(TableName tableName) throws Exception {
return getConnection().getAdmin().isTableEnabled(tableName);
}
@Override
protected void disableTable(TableName tableName) throws Exception {
getConnection().getAdmin().disableTable(tableName);
}
@Override
protected void adminDeleteTable(TableName tableName) throws Exception {
getConnection().getAdmin().deleteTable(tableName);
}
@Override
protected boolean tableExists(TableName tableName) throws Exception {
return getConnection().getAdmin().tableExists(tableName);
}
}
| sduskis/cloud-bigtable-client | bigtable-hbase-2.x-parent/bigtable-hbase-2.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestCreateTable.java | Java | apache-2.0 | 3,209 |
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<link rel="SHORTCUT ICON" href="../../../../../img/clover.ico" />
<link rel="stylesheet" href="../../../../../aui/css/aui.min.css" media="all"/>
<link rel="stylesheet" href="../../../../../aui/css/aui-experimental.min.css" media="all"/>
<!--[if IE 9]><link rel="stylesheet" href="../../../../../aui/css/aui-ie9.min.css" media="all"/><![endif]-->
<style type="text/css" media="all">
@import url('../../../../../style.css');
@import url('../../../../../tree.css');
</style>
<script src="../../../../../jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui-experimental.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui-soy.min.js" type="text/javascript"></script>
<script src="../../../../../package-nodes-tree.js" type="text/javascript"></script>
<script src="../../../../../clover-tree.js" type="text/javascript"></script>
<script src="../../../../../clover.js" type="text/javascript"></script>
<script src="../../../../../clover-descriptions.js" type="text/javascript"></script>
<script src="../../../../../cloud.js" type="text/javascript"></script>
<title>ABA Route Transit Number Validator 1.0.1-SNAPSHOT</title>
</head>
<body>
<div id="page">
<header id="header" role="banner">
<nav class="aui-header aui-dropdown2-trigger-group" role="navigation">
<div class="aui-header-inner">
<div class="aui-header-primary">
<h1 id="logo" class="aui-header-logo aui-header-logo-clover">
<a href="http://openclover.org" title="Visit OpenClover home page"><span class="aui-header-logo-device">OpenClover</span></a>
</h1>
</div>
<div class="aui-header-secondary">
<ul class="aui-nav">
<li id="system-help-menu">
<a class="aui-nav-link" title="Open online documentation" target="_blank"
href="http://openclover.org/documentation">
<span class="aui-icon aui-icon-small aui-iconfont-help"> Help</span>
</a>
</li>
</ul>
</div>
</div>
</nav>
</header>
<div class="aui-page-panel">
<div class="aui-page-panel-inner">
<div class="aui-page-panel-nav aui-page-panel-nav-clover">
<div class="aui-page-header-inner" style="margin-bottom: 20px;">
<div class="aui-page-header-image">
<a href="http://cardatechnologies.com" target="_top">
<div class="aui-avatar aui-avatar-large aui-avatar-project">
<div class="aui-avatar-inner">
<img src="../../../../../img/clover_logo_large.png" alt="Clover icon"/>
</div>
</div>
</a>
</div>
<div class="aui-page-header-main" >
<h1>
<a href="http://cardatechnologies.com" target="_top">
ABA Route Transit Number Validator 1.0.1-SNAPSHOT
</a>
</h1>
</div>
</div>
<nav class="aui-navgroup aui-navgroup-vertical">
<div class="aui-navgroup-inner">
<ul class="aui-nav">
<li class="">
<a href="../../../../../dashboard.html">Project overview</a>
</li>
</ul>
<div class="aui-nav-heading packages-nav-heading">
<strong>Packages</strong>
</div>
<div class="aui-nav project-packages">
<form method="get" action="#" class="aui package-filter-container">
<input type="text" autocomplete="off" class="package-filter text"
placeholder="Type to filter packages..." name="package-filter" id="package-filter"
title="Start typing package name (or part of the name) to search through the tree. Use arrow keys and the Enter key to navigate."/>
</form>
<p class="package-filter-no-results-message hidden">
<small>No results found.</small>
</p>
<div class="packages-tree-wrapper" data-root-relative="../../../../../" data-package-name="com.cardatechnologies.utils.validators.abaroutevalidator">
<div class="packages-tree-container"></div>
<div class="clover-packages-lozenges"></div>
</div>
</div>
</div>
</nav> </div>
<section class="aui-page-panel-content">
<div class="aui-page-panel-content-clover">
<div class="aui-page-header-main"><ol class="aui-nav aui-nav-breadcrumbs">
<li><a href="../../../../../dashboard.html"> Project Clover database Sat Aug 7 2021 12:29:33 MDT</a></li>
<li><a href="test-pkg-summary.html">Package com.cardatechnologies.utils.validators.abaroutevalidator</a></li>
<li><a href="test-Test_AbaRouteValidator_16.html">Class Test_AbaRouteValidator_16</a></li>
</ol></div>
<h1 class="aui-h2-clover">
Test testAbaNumberCheck_36904_bad
</h1>
<table class="aui">
<thead>
<tr>
<th>Test</th>
<th><label title="The test result. Either a Pass, Fail or Error.">Status</label></th>
<th><label title="When the test execution was started">Start time</label></th>
<th><label title="The total time in seconds taken to run this test.">Time (seconds)</label></th>
<th><label title="A failure or error message if the test is not successful.">Message</label></th>
</tr>
</thead>
<tbody>
<tr>
<td>
<a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_16.html?line=45760#src-45760" >testAbaNumberCheck_36904_bad</a>
</td>
<td>
<span class="sortValue">1</span><span class="aui-lozenge aui-lozenge-success">PASS</span>
</td>
<td>
7 Aug 12:45:47
</td>
<td>
0.0 </td>
<td>
<div></div>
<div class="errorMessage"></div>
</td>
</tr>
</tbody>
</table>
<div> </div>
<table class="aui aui-table-sortable">
<thead>
<tr>
<th style="white-space:nowrap;"><label title="A class that was directly hit by this test.">Target Class</label></th>
<th colspan="4"><label title="The percentage of coverage contributed by each single test.">Coverage contributed by</label> testAbaNumberCheck_36904_bad</th>
</tr>
</thead>
<tbody>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.exceptions.AbaRouteValidationException</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/exceptions/AbaRouteValidationException.html?id=23167#AbaRouteValidationException" title="AbaRouteValidationException" name="sl-43">com.cardatechnologies.utils.validators.abaroutevalidator.exceptions.AbaRouteValidationException</a>
</td>
<td>
<span class="sortValue">0.5714286</span>57.1%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="57.1% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:57.1%"></div></div></div> </td>
</tr>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.ErrorCodes</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/ErrorCodes.html?id=23167#ErrorCodes" title="ErrorCodes" name="sl-42">com.cardatechnologies.utils.validators.abaroutevalidator.ErrorCodes</a>
</td>
<td>
<span class="sortValue">0.5714286</span>57.1%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="57.1% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:57.1%"></div></div></div> </td>
</tr>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/AbaRouteValidator.html?id=23167#AbaRouteValidator" title="AbaRouteValidator" name="sl-47">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</a>
</td>
<td>
<span class="sortValue">0.29411766</span>29.4%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="29.4% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:29.4%"></div></div></div> </td>
</tr>
</tbody>
</table>
</div> <!-- class="aui-page-panel-content-clover" -->
<footer id="footer" role="contentinfo">
<section class="footer-body">
<ul>
<li>
Report generated by <a target="_new" href="http://openclover.org">OpenClover</a> v 4.4.1
on Sat Aug 7 2021 12:49:26 MDT using coverage data from Sat Aug 7 2021 12:47:23 MDT.
</li>
</ul>
<ul>
<li>OpenClover is free and open-source software. </li>
</ul>
</section>
</footer> </section> <!-- class="aui-page-panel-content" -->
</div> <!-- class="aui-page-panel-inner" -->
</div> <!-- class="aui-page-panel" -->
</div> <!-- id="page" -->
</body>
</html> | dcarda/aba.route.validator | target13/site/clover/com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_16_testAbaNumberCheck_36904_bad_hvj.html | HTML | apache-2.0 | 10,987 |
Administrator Guide {#admin_guide}
====================
# Installation
See the [Installation guide](installation.md).
# Server Configuration
See the [Server Configuration guide](server_configuration.md).
# General %Service Configuration
Various aspects of how a service is presented to a user such as the display name of the parameters, default values, *etc.* can be adjusted by changing the service's configuration details on the Grassroots server.
These are explained in the [Service Configuration guide](service_configuration.md)
# Services
As well as these general configuration options, each service can have a number of service-specific configuration details that can be set by a server administrator.
The services with extra configuration options are listed [below](#specific_services_configuration)
# Linking %Services
[Linked Services](linked_services.md) are when the output of running a %Service can be used to either fully or partially fill in the parameters of one or more other Services ready to be run by a user. The service-specific details for the linking of Services are listed below.
# Specific Services Configuration {#specific_services_configuration}
Links for the configuration for the following services are listed below:
* [BLAST services](blast_service.md)
* [SamTools service](samtools_service.md)
| TGAC/grassroots-api | docs/admin_guide.md | Markdown | apache-2.0 | 1,349 |
// Copyright 2004, 2005 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry.binding;
import org.apache.hivemind.Location;
import org.apache.tapestry.BindingException;
import org.apache.tapestry.IActionListener;
import org.apache.tapestry.IComponent;
import org.apache.tapestry.IRequestCycle;
import org.apache.tapestry.PageRedirectException;
import org.apache.tapestry.RedirectException;
import org.apache.tapestry.coerce.ValueConverter;
import org.apache.tapestry.listener.ListenerMap;
/**
* Test for {@link org.apache.tapestry.binding.ListenerMethodBinding}.
*
* @author Howard M. Lewis Ship
* @since 4.0
*/
public class TestListenerMethodBinding extends BindingTestCase
{
public void testInvokeListener()
{
IComponent component = newComponent();
ListenerMap map = newListenerMap();
IActionListener listener = newListener();
Location l = newLocation();
IComponent sourceComponent = newComponent();
IRequestCycle cycle = newCycle();
ValueConverter vc = newValueConverter();
trainGetListener(component, map, listener);
listener.actionTriggered(sourceComponent, cycle);
replayControls();
ListenerMethodBinding b = new ListenerMethodBinding("param", vc, l, component, "foo");
assertSame(b, b.getObject());
assertSame(component, b.getComponent());
b.actionTriggered(sourceComponent, cycle);
verifyControls();
}
public void testToString()
{
IComponent component = newComponent();
Location l = newLocation();
ValueConverter vc = newValueConverter();
trainGetExtendedId(component, "Fred/barney");
replayControls();
ListenerMethodBinding b = new ListenerMethodBinding("param", vc, l, component, "foo");
String toString = b.toString();
String description = toString.substring(toString.indexOf('[') + 1, toString.length() - 1);
assertEquals(
"param, component=Fred/barney, methodName=foo, location=classpath:/org/apache/tapestry/binding/TestListenerMethodBinding, line 1",
description);
verifyControls();
}
public void testInvokeAndPageRedirect()
{
IComponent component = newComponent();
ListenerMap map = newListenerMap();
IActionListener listener = newListener();
Location l = newLocation();
ValueConverter vc = newValueConverter();
IComponent sourceComponent = newComponent();
IRequestCycle cycle = newCycle();
trainGetListener(component, map, listener);
listener.actionTriggered(sourceComponent, cycle);
Throwable t = new PageRedirectException("TargetPage");
setThrowable(listener, t);
replayControls();
ListenerMethodBinding b = new ListenerMethodBinding("param", vc, l, component, "foo");
try
{
b.actionTriggered(sourceComponent, cycle);
unreachable();
}
catch (PageRedirectException ex)
{
assertSame(t, ex);
}
verifyControls();
}
public void testInvokeAndRedirect()
{
IComponent component = newComponent();
ListenerMap map = newListenerMap();
IActionListener listener = newListener();
Location l = newLocation();
ValueConverter vc = newValueConverter();
IComponent sourceComponent = newComponent();
IRequestCycle cycle = newCycle();
trainGetListener(component, map, listener);
listener.actionTriggered(sourceComponent, cycle);
Throwable t = new RedirectException("http://foo.bar");
setThrowable(listener, t);
replayControls();
ListenerMethodBinding b = new ListenerMethodBinding("param", vc, l, component, "foo");
try
{
b.actionTriggered(sourceComponent, cycle);
unreachable();
}
catch (RedirectException ex)
{
assertSame(t, ex);
}
verifyControls();
}
public void testInvokeListenerFailure()
{
IComponent component = newComponent();
ListenerMap map = newListenerMap();
IActionListener listener = newListener();
Location l = newLocation();
ValueConverter vc = newValueConverter();
IComponent sourceComponent = newComponent();
IRequestCycle cycle = newCycle();
trainGetListener(component, map, listener);
listener.actionTriggered(sourceComponent, cycle);
Throwable t = new RuntimeException("Failure.");
setThrowable(listener, t);
trainGetExtendedId(component, "Fred/barney");
replayControls();
ListenerMethodBinding b = new ListenerMethodBinding("param", vc, l, component, "foo");
try
{
b.actionTriggered(sourceComponent, cycle);
unreachable();
}
catch (BindingException ex)
{
assertEquals(
"Exception invoking listener method foo of component Fred/barney: Failure.",
ex.getMessage());
assertSame(component, ex.getComponent());
assertSame(l, ex.getLocation());
assertSame(b, ex.getBinding());
}
verifyControls();
}
private void trainGetListener(IComponent component, ListenerMap lm, IActionListener listener)
{
trainGetListeners(component, lm);
trainGetListener(lm, "foo", listener);
}
protected IRequestCycle newCycle()
{
return (IRequestCycle) newMock(IRequestCycle.class);
}
private void trainGetListener(ListenerMap map, String methodName, IActionListener listener)
{
map.getListener(methodName);
setReturnValue(map, listener);
}
private void trainGetListeners(IComponent component, ListenerMap lm)
{
component.getListeners();
setReturnValue(component,lm);
}
private ListenerMap newListenerMap()
{
return (ListenerMap) newMock(ListenerMap.class);
}
private IActionListener newListener()
{
return (IActionListener) newMock(IActionListener.class);
}
} | apache/tapestry4 | framework/src/test/org/apache/tapestry/binding/TestListenerMethodBinding.java | Java | apache-2.0 | 6,776 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.lops.compile;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.sysml.api.DMLScript;
import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.conf.DMLConfig;
import org.apache.sysml.hops.AggBinaryOp;
import org.apache.sysml.hops.BinaryOp;
import org.apache.sysml.hops.Hop.FileFormatTypes;
import org.apache.sysml.hops.HopsException;
import org.apache.sysml.hops.OptimizerUtils;
import org.apache.sysml.lops.AppendM;
import org.apache.sysml.lops.BinaryM;
import org.apache.sysml.lops.CombineBinary;
import org.apache.sysml.lops.Data;
import org.apache.sysml.lops.Data.OperationTypes;
import org.apache.sysml.lops.FunctionCallCP;
import org.apache.sysml.lops.Lop;
import org.apache.sysml.lops.Lop.Type;
import org.apache.sysml.lops.LopProperties.ExecLocation;
import org.apache.sysml.lops.LopProperties.ExecType;
import org.apache.sysml.lops.LopsException;
import org.apache.sysml.lops.MapMult;
import org.apache.sysml.lops.OutputParameters;
import org.apache.sysml.lops.OutputParameters.Format;
import org.apache.sysml.lops.PMMJ;
import org.apache.sysml.lops.ParameterizedBuiltin;
import org.apache.sysml.lops.PickByCount;
import org.apache.sysml.lops.SortKeys;
import org.apache.sysml.lops.Unary;
import org.apache.sysml.parser.DataExpression;
import org.apache.sysml.parser.Expression;
import org.apache.sysml.parser.Expression.DataType;
import org.apache.sysml.parser.ParameterizedBuiltinFunctionExpression;
import org.apache.sysml.parser.StatementBlock;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.controlprogram.parfor.ProgramConverter;
import org.apache.sysml.runtime.controlprogram.parfor.util.IDSequence;
import org.apache.sysml.runtime.instructions.CPInstructionParser;
import org.apache.sysml.runtime.instructions.Instruction;
import org.apache.sysml.runtime.instructions.Instruction.INSTRUCTION_TYPE;
import org.apache.sysml.runtime.instructions.InstructionParser;
import org.apache.sysml.runtime.instructions.MRJobInstruction;
import org.apache.sysml.runtime.instructions.SPInstructionParser;
import org.apache.sysml.runtime.instructions.cp.CPInstruction;
import org.apache.sysml.runtime.instructions.cp.CPInstruction.CPINSTRUCTION_TYPE;
import org.apache.sysml.runtime.instructions.cp.VariableCPInstruction;
import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
import org.apache.sysml.runtime.matrix.data.InputInfo;
import org.apache.sysml.runtime.matrix.data.OutputInfo;
import org.apache.sysml.runtime.matrix.sort.PickFromCompactInputFormat;
/**
*
* Class to maintain a DAG of lops and compile it into
* runtime instructions, incl piggybacking into jobs.
*
* @param <N> the class parameter has no affect and is
* only kept for documentation purposes.
*/
public class Dag<N extends Lop>
{
private static final Log LOG = LogFactory.getLog(Dag.class.getName());
private static final int CHILD_BREAKS_ALIGNMENT = 2;
private static final int CHILD_DOES_NOT_BREAK_ALIGNMENT = 1;
private static final int MRCHILD_NOT_FOUND = 0;
private static final int MR_CHILD_FOUND_BREAKS_ALIGNMENT = 4;
private static final int MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT = 5;
private static IDSequence job_id = null;
private static IDSequence var_index = null;
private int total_reducers = -1;
private String scratch = "";
private String scratchFilePath = null;
private double gmrMapperFootprint = 0;
static {
job_id = new IDSequence();
var_index = new IDSequence();
}
// hash set for all nodes in dag
private ArrayList<Lop> nodes = null;
/*
* Hashmap to translates the nodes in the DAG to a sequence of numbers
* key: Lop ID
* value: Sequence Number (0 ... |DAG|)
*
* This map is primarily used in performing DFS on the DAG, and subsequently in performing ancestor-descendant checks.
*/
private HashMap<Long, Integer> IDMap = null;
private static class NodeOutput {
String fileName;
String varName;
OutputInfo outInfo;
ArrayList<Instruction> preInstructions; // instructions added before a MR instruction
ArrayList<Instruction> postInstructions; // instructions added after a MR instruction
ArrayList<Instruction> lastInstructions;
NodeOutput() {
fileName = null;
varName = null;
outInfo = null;
preInstructions = new ArrayList<Instruction>();
postInstructions = new ArrayList<Instruction>();
lastInstructions = new ArrayList<Instruction>();
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getVarName() {
return varName;
}
public void setVarName(String varName) {
this.varName = varName;
}
public OutputInfo getOutInfo() {
return outInfo;
}
public void setOutInfo(OutputInfo outInfo) {
this.outInfo = outInfo;
}
public ArrayList<Instruction> getPreInstructions() {
return preInstructions;
}
public void addPreInstruction(Instruction inst) {
preInstructions.add(inst);
}
public ArrayList<Instruction> getPostInstructions() {
return postInstructions;
}
public void addPostInstruction(Instruction inst) {
postInstructions.add(inst);
}
public ArrayList<Instruction> getLastInstructions() {
return lastInstructions;
}
public void addLastInstruction(Instruction inst) {
lastInstructions.add(inst);
}
}
public Dag()
{
//allocate internal data structures
nodes = new ArrayList<Lop>();
IDMap = new HashMap<Long, Integer>();
// get number of reducers from dml config
total_reducers = ConfigurationManager.getNumReducers();
}
///////
// filename handling
private String getFilePath() {
if ( scratchFilePath == null ) {
scratchFilePath = scratch + Lop.FILE_SEPARATOR
+ Lop.PROCESS_PREFIX + DMLScript.getUUID()
+ Lop.FILE_SEPARATOR + Lop.FILE_SEPARATOR
+ ProgramConverter.CP_ROOT_THREAD_ID + Lop.FILE_SEPARATOR;
}
return scratchFilePath;
}
public static String getNextUniqueFilenameSuffix() {
return "temp" + job_id.getNextID();
}
public String getNextUniqueFilename() {
return getFilePath() + getNextUniqueFilenameSuffix();
}
public static String getNextUniqueVarname(DataType dt) {
return (dt==DataType.MATRIX ? Lop.MATRIX_VAR_NAME_PREFIX :
Lop.FRAME_VAR_NAME_PREFIX) + var_index.getNextID();
}
///////
// Dag modifications
/**
* Method to add a node to the DAG.
*
* @param node low-level operator
* @return true if node was not already present, false if not.
*/
public boolean addNode(Lop node) {
if (nodes.contains(node))
return false;
nodes.add(node);
return true;
}
/**
* Method to compile a dag generically
*
* @param sb statement block
* @param config dml configuration
* @return list of instructions
* @throws LopsException if LopsException occurs
* @throws IOException if IOException occurs
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
public ArrayList<Instruction> getJobs(StatementBlock sb, DMLConfig config)
throws LopsException, IOException, DMLRuntimeException {
if (config != null)
{
total_reducers = config.getIntValue(DMLConfig.NUM_REDUCERS);
scratch = config.getTextValue(DMLConfig.SCRATCH_SPACE) + "/";
}
// hold all nodes in a vector (needed for ordering)
ArrayList<Lop> node_v = new ArrayList<Lop>();
node_v.addAll(nodes);
/*
* Sort the nodes by topological order.
*
* 1) All nodes with level i appear prior to the nodes in level i+1.
* 2) All nodes within a level are ordered by their ID i.e., in the order
* they are created
*/
doTopologicalSort_strict_order(node_v);
// do greedy grouping of operations
ArrayList<Instruction> inst = doGreedyGrouping(sb, node_v);
return inst;
}
private static void deleteUpdatedTransientReadVariables(StatementBlock sb, ArrayList<Lop> nodeV,
ArrayList<Instruction> inst) throws DMLRuntimeException {
if ( sb == null )
return;
if( LOG.isTraceEnabled() )
LOG.trace("In delete updated variables");
// CANDIDATE list of variables which could have been updated in this statement block
HashMap<String, Lop> labelNodeMapping = new HashMap<String, Lop>();
// ACTUAL list of variables whose value is updated, AND the old value of the variable
// is no longer accessible/used.
HashSet<String> updatedLabels = new HashSet<String>();
HashMap<String, Lop> updatedLabelsLineNum = new HashMap<String, Lop>();
// first capture all transient read variables
for ( Lop node : nodeV ) {
if (node.getExecLocation() == ExecLocation.Data
&& ((Data) node).isTransient()
&& ((Data) node).getOperationType() == OperationTypes.READ
&& ((Data) node).getDataType() == DataType.MATRIX) {
// "node" is considered as updated ONLY IF the old value is not used any more
// So, make sure that this READ node does not feed into any (transient/persistent) WRITE
boolean hasWriteParent=false;
for(Lop p : node.getOutputs()) {
if(p.getExecLocation() == ExecLocation.Data) {
// if the "p" is of type Data, then it has to be a WRITE
hasWriteParent = true;
break;
}
}
if ( !hasWriteParent ) {
// node has no parent of type WRITE, so this is a CANDIDATE variable
// add it to labelNodeMapping so that it is considered in further processing
labelNodeMapping.put(node.getOutputParameters().getLabel(), node);
}
}
}
// capture updated transient write variables
for ( Lop node : nodeV ) {
if (node.getExecLocation() == ExecLocation.Data
&& ((Data) node).isTransient()
&& ((Data) node).getOperationType() == OperationTypes.WRITE
&& ((Data) node).getDataType() == DataType.MATRIX
&& labelNodeMapping.containsKey(node.getOutputParameters().getLabel()) // check to make sure corresponding (i.e., with the same label/name) transient read is present
&& !labelNodeMapping.containsValue(node.getInputs().get(0)) // check to avoid cases where transient read feeds into a transient write
) {
updatedLabels.add(node.getOutputParameters().getLabel());
updatedLabelsLineNum.put(node.getOutputParameters().getLabel(), node);
}
}
// generate RM instructions
Instruction rm_inst = null;
for ( String label : updatedLabels )
{
rm_inst = VariableCPInstruction.prepareRemoveInstruction(label);
rm_inst.setLocation(updatedLabelsLineNum.get(label));
if( LOG.isTraceEnabled() )
LOG.trace(rm_inst.toString());
inst.add(rm_inst);
}
}
private static void generateRemoveInstructions(StatementBlock sb, ArrayList<Instruction> deleteInst)
throws DMLRuntimeException {
if ( sb == null )
return;
if( LOG.isTraceEnabled() )
LOG.trace("In generateRemoveInstructions()");
Instruction inst = null;
// RULE 1: if in IN and not in OUT, then there should be an rmvar or rmfilevar inst
// (currently required for specific cases of external functions)
for (String varName : sb.liveIn().getVariableNames()) {
if (!sb.liveOut().containsVariable(varName)) {
// DataType dt = in.getVariable(varName).getDataType();
// if( !(dt==DataType.MATRIX || dt==DataType.UNKNOWN) )
// continue; //skip rm instructions for non-matrix objects
inst = VariableCPInstruction.prepareRemoveInstruction(varName);
inst.setLocation(sb.getEndLine(), sb.getEndLine(), -1, -1);
deleteInst.add(inst);
if( LOG.isTraceEnabled() )
LOG.trace(" Adding " + inst.toString());
}
}
// RULE 2: if in KILL and not in IN and not in OUT, then there should be an rmvar or rmfilevar inst
// (currently required for specific cases of nested loops)
// i.e., local variables which are created within the block, and used entirely within the block
/*for (String varName : sb.getKill().getVariableNames()) {
if ((!sb.liveIn().containsVariable(varName))
&& (!sb.liveOut().containsVariable(varName))) {
// DataType dt =
// sb.getKill().getVariable(varName).getDataType();
// if( !(dt==DataType.MATRIX || dt==DataType.UNKNOWN) )
// continue; //skip rm instructions for non-matrix objects
inst = createCleanupInstruction(varName);
deleteInst.add(inst);
if (DMLScript.DEBUG)
System.out.println("Adding instruction (r2) "
+ inst.toString());
}
}*/
}
private static ArrayList<ArrayList<Lop>> createNodeVectors(int size) {
ArrayList<ArrayList<Lop>> arr = new ArrayList<ArrayList<Lop>>();
// for each job type, we need to create a vector.
// additionally, create another vector for execNodes
for (int i = 0; i < size; i++) {
arr.add(new ArrayList<Lop>());
}
return arr;
}
private static void clearNodeVectors(ArrayList<ArrayList<Lop>> arr) {
for (ArrayList<Lop> tmp : arr) {
tmp.clear();
}
}
private static boolean isCompatible(ArrayList<Lop> nodes, JobType jt, int from, int to)
throws LopsException
{
int base = jt.getBase();
for ( Lop node : nodes ) {
if ((node.getCompatibleJobs() & base) == 0) {
if( LOG.isTraceEnabled() )
LOG.trace("Not compatible "+ node.toString());
return false;
}
}
return true;
}
/**
* Function that determines if the two input nodes can be executed together
* in at least one job.
*
* @param node1 low-level operator 1
* @param node2 low-level operator 2
* @return true if nodes can be executed together
*/
private static boolean isCompatible(Lop node1, Lop node2) {
return( (node1.getCompatibleJobs() & node2.getCompatibleJobs()) > 0);
}
/**
* Function that checks if the given node executes in the job specified by jt.
*
* @param node low-level operator
* @param jt job type
* @return true if node executes in the specified job type
*/
private static boolean isCompatible(Lop node, JobType jt) {
if ( jt == JobType.GMRCELL )
jt = JobType.GMR;
return ((node.getCompatibleJobs() & jt.getBase()) > 0);
}
/*
* Add node, and its relevant children to job-specific node vectors.
*/
private void addNodeByJobType(Lop node, ArrayList<ArrayList<Lop>> arr,
ArrayList<Lop> execNodes, boolean eliminate) throws LopsException {
if (!eliminate) {
// Check if this lop defines a MR job.
if ( node.definesMRJob() ) {
// find the corresponding JobType
JobType jt = JobType.findJobTypeFromLop(node);
if ( jt == null ) {
throw new LopsException(node.printErrorLocation() + "No matching JobType is found for a the lop type: " + node.getType() + " \n");
}
// Add "node" to corresponding job vector
if ( jt == JobType.GMR ) {
if ( node.hasNonBlockedInputs() ) {
int gmrcell_index = JobType.GMRCELL.getId();
arr.get(gmrcell_index).add(node);
int from = arr.get(gmrcell_index).size();
addChildren(node, arr.get(gmrcell_index), execNodes);
int to = arr.get(gmrcell_index).size();
if (!isCompatible(arr.get(gmrcell_index),JobType.GMR, from, to)) // check against GMR only, not against GMRCELL
throw new LopsException(node.printErrorLocation() + "Error during compatibility check \n");
}
else {
// if "node" (in this case, a group lop) has any inputs from RAND
// then add it to RAND job. Otherwise, create a GMR job
if (hasChildNode(node, arr.get(JobType.DATAGEN.getId()) )) {
arr.get(JobType.DATAGEN.getId()).add(node);
// we should NOT call 'addChildren' because appropriate
// child nodes would have got added to RAND job already
} else {
int gmr_index = JobType.GMR.getId();
arr.get(gmr_index).add(node);
int from = arr.get(gmr_index).size();
addChildren(node, arr.get(gmr_index), execNodes);
int to = arr.get(gmr_index).size();
if (!isCompatible(arr.get(gmr_index),JobType.GMR, from, to))
throw new LopsException(node.printErrorLocation() + "Error during compatibility check \n");
}
}
}
else {
int index = jt.getId();
arr.get(index).add(node);
int from = arr.get(index).size();
addChildren(node, arr.get(index), execNodes);
int to = arr.get(index).size();
// check if all added nodes are compatible with current job
if (!isCompatible(arr.get(index), jt, from, to)) {
throw new LopsException(
"Unexpected error in addNodeByType.");
}
}
return;
}
}
if ( eliminate ) {
// Eliminated lops are directly added to GMR queue.
// Note that eliminate flag is set only for 'group' lops
if ( node.hasNonBlockedInputs() )
arr.get(JobType.GMRCELL.getId()).add(node);
else
arr.get(JobType.GMR.getId()).add(node);
return;
}
/*
* If this lop does not define a job, check if it uses the output of any
* specialized job. i.e., if this lop has a child node in any of the
* job-specific vector, then add it to the vector. Note: This lop must
* be added to ONLY ONE of the job-specific vectors.
*/
int numAdded = 0;
for ( JobType j : JobType.values() ) {
if ( j.getId() > 0 && hasDirectChildNode(node, arr.get(j.getId()))) {
if (isCompatible(node, j)) {
arr.get(j.getId()).add(node);
numAdded += 1;
}
}
}
if (numAdded > 1) {
throw new LopsException("Unexpected error in addNodeByJobType(): A given lop can ONLY be added to a single job vector (numAdded = " + numAdded + ")." );
}
}
/*
* Remove the node from all job-specific node vectors. This method is
* invoked from removeNodesForNextIteration().
*/
private static void removeNodeByJobType(Lop node, ArrayList<ArrayList<Lop>> arr) {
for ( JobType jt : JobType.values())
if ( jt.getId() > 0 )
arr.get(jt.getId()).remove(node);
}
/**
* As some jobs only write one output, all operations in the mapper need to
* be redone and cannot be marked as finished.
*
* @param execNodes list of exec low-level operators
* @param jobNodes list of job low-level operators
* @param finishedNodes list of finished low-level operators
* @throws LopsException if LopsException occurs
*/
private void handleSingleOutputJobs(ArrayList<Lop> execNodes,
ArrayList<ArrayList<Lop>> jobNodes, ArrayList<Lop> finishedNodes)
throws LopsException {
/*
* If the input of a MMCJ/MMRJ job (must have executed in a Mapper) is used
* by multiple lops then we should mark it as not-finished.
*/
ArrayList<Lop> nodesWithUnfinishedOutputs = new ArrayList<Lop>();
int[] jobIndices = {JobType.MMCJ.getId()};
Lop.Type[] lopTypes = { Lop.Type.MMCJ};
// TODO: SortByValue should be treated similar to MMCJ, since it can
// only sort one file now
for ( int jobi=0; jobi < jobIndices.length; jobi++ ) {
int jindex = jobIndices[jobi];
if (!jobNodes.get(jindex).isEmpty()) {
ArrayList<Lop> vec = jobNodes.get(jindex);
// first find all nodes with more than one parent that is not finished.
for (int i = 0; i < vec.size(); i++) {
Lop node = vec.get(i);
if (node.getExecLocation() == ExecLocation.MapOrReduce
|| node.getExecLocation() == ExecLocation.Map) {
Lop MRparent = getParentNode(node, execNodes, ExecLocation.MapAndReduce);
if ( MRparent != null && MRparent.getType() == lopTypes[jobi]) {
int numParents = node.getOutputs().size();
if (numParents > 1) {
for (int j = 0; j < numParents; j++) {
if (!finishedNodes.contains(node.getOutputs()
.get(j)))
nodesWithUnfinishedOutputs.add(node);
}
}
}
}
}
// need to redo all nodes in nodesWithOutput as well as their children
for ( Lop node : vec ) {
if (node.getExecLocation() == ExecLocation.MapOrReduce
|| node.getExecLocation() == ExecLocation.Map) {
if (nodesWithUnfinishedOutputs.contains(node))
finishedNodes.remove(node);
if (hasParentNode(node, nodesWithUnfinishedOutputs))
finishedNodes.remove(node);
}
}
}
}
}
/**
* Method to check if a lop can be eliminated from checking
*
* @param node low-level operator
* @param execNodes list of exec nodes
* @return true if lop can be eliminated
*/
private static boolean canEliminateLop(Lop node, ArrayList<Lop> execNodes) {
// this function can only eliminate "aligner" lops such a group
if (!node.isAligner())
return false;
// find the child whose execLoc = 'MapAndReduce'
int ret = getChildAlignment(node, execNodes, ExecLocation.MapAndReduce);
if (ret == CHILD_BREAKS_ALIGNMENT)
return false;
else if (ret == CHILD_DOES_NOT_BREAK_ALIGNMENT)
return true;
else if (ret == MRCHILD_NOT_FOUND)
return false;
else if (ret == MR_CHILD_FOUND_BREAKS_ALIGNMENT)
return false;
else if (ret == MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT)
return true;
else
throw new RuntimeException("Should not happen. \n");
}
/**
* Method to generate createvar instructions, which creates a new entry
* in the symbol table. One instruction is generated for every LOP that is
* 1) type Data and
* 2) persistent and
* 3) matrix and
* 4) read
*
* Transient reads needn't be considered here since the previous program
* block would already create appropriate entries in the symbol table.
*
* @param nodes_v list of nodes
* @param inst list of instructions
* @throws LopsException if LopsException occurs
* @throws IOException if IOException occurs
*/
private static void generateInstructionsForInputVariables(ArrayList<Lop> nodes_v, ArrayList<Instruction> inst) throws LopsException, IOException {
for(Lop n : nodes_v) {
if (n.getExecLocation() == ExecLocation.Data && !((Data) n).isTransient()
&& ((Data) n).getOperationType() == OperationTypes.READ
&& (n.getDataType() == DataType.MATRIX || n.getDataType() == DataType.FRAME) ) {
if ( !((Data)n).isLiteral() ) {
try {
String inst_string = n.getInstructions();
CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(inst_string);
currInstr.setLocation(n);
inst.add(currInstr);
} catch (DMLRuntimeException e) {
throw new LopsException(n.printErrorLocation() + "error generating instructions from input variables in Dag -- \n", e);
}
}
}
}
}
/**
* Determine whether to send <code>node</code> to MR or to process it in the control program.
* It is sent to MR in the following cases:
*
* 1) if input lop gets processed in MR then <code>node</code> can be piggybacked
*
* 2) if the exectype of write lop itself is marked MR i.e., memory estimate > memory budget.
*
* @param node low-level operator
* @return true if lop should be sent to MR
*/
private static boolean sendWriteLopToMR(Lop node)
{
if ( DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE )
return false;
Lop in = node.getInputs().get(0);
Format nodeFormat = node.getOutputParameters().getFormat();
// Case of a transient read feeding into only one output persistent binaryblock write
// Move the temporary file on HDFS to required persistent location, insteadof copying.
if ( in.getExecLocation() == ExecLocation.Data && in.getOutputs().size() == 1
&& !((Data)node).isTransient()
&& ((Data)in).isTransient()
&& ((Data)in).getOutputParameters().isBlocked()
&& node.getOutputParameters().isBlocked() ) {
return false;
}
//send write lop to MR if (1) it is marked with exec type MR (based on its memory estimate), or
//(2) if the input lop is in MR and the write format allows to pack it into the same job (this does
//not apply to csv write because MR csvwrite is a separate MR job type)
return (node.getExecType() == ExecType.MR
|| (in.getExecType() == ExecType.MR && nodeFormat != Format.CSV));
}
/**
* Computes the memory footprint required to execute <code>node</code> in the mapper.
* It is used only for those nodes that use inputs from distributed cache. The returned
* value is utilized in limiting the number of instructions piggybacked onto a single GMR mapper.
*
* @param node low-level operator
* @return memory footprint
*/
private static double computeFootprintInMapper(Lop node) {
// Memory limits must be checked only for nodes that use distributed cache
if ( ! node.usesDistributedCache() )
// default behavior
return 0.0;
OutputParameters in1dims = node.getInputs().get(0).getOutputParameters();
OutputParameters in2dims = node.getInputs().get(1).getOutputParameters();
double footprint = 0;
if ( node instanceof MapMult ) {
int dcInputIndex = node.distributedCacheInputIndex()[0];
footprint = AggBinaryOp.getMapmmMemEstimate(
in1dims.getNumRows(), in1dims.getNumCols(), in1dims.getRowsInBlock(), in1dims.getColsInBlock(), in1dims.getNnz(),
in2dims.getNumRows(), in2dims.getNumCols(), in2dims.getRowsInBlock(), in2dims.getColsInBlock(), in2dims.getNnz(),
dcInputIndex, false);
}
else if ( node instanceof PMMJ ) {
int dcInputIndex = node.distributedCacheInputIndex()[0];
footprint = AggBinaryOp.getMapmmMemEstimate(
in1dims.getNumRows(), 1, in1dims.getRowsInBlock(), in1dims.getColsInBlock(), in1dims.getNnz(),
in2dims.getNumRows(), in2dims.getNumCols(), in2dims.getRowsInBlock(), in2dims.getColsInBlock(), in2dims.getNnz(),
dcInputIndex, true);
}
else if ( node instanceof AppendM ) {
footprint = BinaryOp.footprintInMapper(
in1dims.getNumRows(), in1dims.getNumCols(),
in2dims.getNumRows(), in2dims.getNumCols(),
in1dims.getRowsInBlock(), in1dims.getColsInBlock());
}
else if ( node instanceof BinaryM ) {
footprint = BinaryOp.footprintInMapper(
in1dims.getNumRows(), in1dims.getNumCols(),
in2dims.getNumRows(), in2dims.getNumCols(),
in1dims.getRowsInBlock(), in1dims.getColsInBlock());
}
else {
// default behavior
return 0.0;
}
return footprint;
}
/**
* Determines if <code>node</code> can be executed in current round of MR jobs or if it needs to be queued for later rounds.
* If the total estimated footprint (<code>node</code> and previously added nodes in GMR) is less than available memory on
* the mappers then <code>node</code> can be executed in current round, and <code>true</code> is returned. Otherwise,
* <code>node</code> must be queued and <code>false</code> is returned.
*
* @param node low-level operator
* @param footprintInMapper mapper footprint
* @return true if node can be executed in current round of jobs
*/
private static boolean checkMemoryLimits(Lop node, double footprintInMapper) {
boolean addNode = true;
// Memory limits must be checked only for nodes that use distributed cache
if ( ! node.usesDistributedCache() )
// default behavior
return addNode;
double memBudget = Math.min(AggBinaryOp.MAPMULT_MEM_MULTIPLIER, BinaryOp.APPEND_MEM_MULTIPLIER) * OptimizerUtils.getRemoteMemBudgetMap(true);
if ( footprintInMapper <= memBudget )
return addNode;
else
return !addNode;
}
/**
* Method to group a vector of sorted lops.
*
* @param sb statement block
* @param node_v list of low-level operators
* @return list of instructions
* @throws LopsException if LopsException occurs
* @throws IOException if IOException occurs
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private ArrayList<Instruction> doGreedyGrouping(StatementBlock sb, ArrayList<Lop> node_v)
throws LopsException, IOException, DMLRuntimeException
{
if( LOG.isTraceEnabled() )
LOG.trace("Grouping DAG ============");
// nodes to be executed in current iteration
ArrayList<Lop> execNodes = new ArrayList<Lop>();
// nodes that have already been processed
ArrayList<Lop> finishedNodes = new ArrayList<Lop>();
// nodes that are queued for the following iteration
ArrayList<Lop> queuedNodes = new ArrayList<Lop>();
ArrayList<ArrayList<Lop>> jobNodes = createNodeVectors(JobType.getNumJobTypes());
// list of instructions
ArrayList<Instruction> inst = new ArrayList<Instruction>();
//ArrayList<Instruction> preWriteDeleteInst = new ArrayList<Instruction>();
ArrayList<Instruction> writeInst = new ArrayList<Instruction>();
ArrayList<Instruction> deleteInst = new ArrayList<Instruction>();
ArrayList<Instruction> endOfBlockInst = new ArrayList<Instruction>();
// remove files for transient reads that are updated.
deleteUpdatedTransientReadVariables(sb, node_v, writeInst);
generateRemoveInstructions(sb, endOfBlockInst);
generateInstructionsForInputVariables(node_v, inst);
boolean done = false;
String indent = " ";
while (!done) {
if( LOG.isTraceEnabled() )
LOG.trace("Grouping nodes in DAG");
execNodes.clear();
queuedNodes.clear();
clearNodeVectors(jobNodes);
gmrMapperFootprint=0;
for ( Lop node : node_v ) {
// finished nodes don't need to be processed
if (finishedNodes.contains(node))
continue;
if( LOG.isTraceEnabled() )
LOG.trace("Processing node (" + node.getID()
+ ") " + node.toString() + " exec nodes size is " + execNodes.size());
//if node defines MR job, make sure it is compatible with all
//its children nodes in execNodes
if(node.definesMRJob() && !compatibleWithChildrenInExecNodes(execNodes, node))
{
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing node "
+ node.toString() + " (code 1)");
queuedNodes.add(node);
removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);
continue;
}
// if child is queued, this node will be processed in the later
// iteration
if (hasChildNode(node,queuedNodes)) {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing node "
+ node.toString() + " (code 2)");
queuedNodes.add(node);
// if node has more than two inputs,
// remove children that will be needed in a future
// iterations
// may also have to remove parent nodes of these children
removeNodesForNextIteration(node, finishedNodes, execNodes,
queuedNodes, jobNodes);
continue;
}
// if inputs come from different jobs, then queue
if ( node.getInputs().size() >= 2) {
int jobid = Integer.MIN_VALUE;
boolean queueit = false;
for(int idx=0; idx < node.getInputs().size(); idx++) {
int input_jobid = jobType(node.getInputs().get(idx), jobNodes);
if (input_jobid != -1) {
if ( jobid == Integer.MIN_VALUE )
jobid = input_jobid;
else if ( jobid != input_jobid ) {
queueit = true;
break;
}
}
}
if ( queueit ) {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing node " + node.toString() + " (code 3)");
queuedNodes.add(node);
removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);
continue;
}
}
// See if this lop can be eliminated
// This check is for "aligner" lops (e.g., group)
boolean eliminate = false;
eliminate = canEliminateLop(node, execNodes);
if (eliminate) {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Adding -"+ node.toString());
execNodes.add(node);
finishedNodes.add(node);
addNodeByJobType(node, jobNodes, execNodes, eliminate);
continue;
}
// If the node defines a MR Job then make sure none of its
// children that defines a MR Job are present in execNodes
if (node.definesMRJob()) {
if (hasMRJobChildNode(node, execNodes)) {
// "node" must NOT be queued when node=group and the child that defines job is Rand
// this is because "group" can be pushed into the "Rand" job.
if (! (node.getType() == Lop.Type.Grouping && checkDataGenAsChildNode(node,execNodes)) ) {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing node " + node.toString() + " (code 4)");
queuedNodes.add(node);
removeNodesForNextIteration(node, finishedNodes,
execNodes, queuedNodes, jobNodes);
continue;
}
}
}
// if "node" has more than one input, and has a descendant lop
// in execNodes that is of type RecordReader
// then all its inputs must be ancestors of RecordReader. If
// not, queue "node"
if (node.getInputs().size() > 1
&& hasChildNode(node, execNodes, ExecLocation.RecordReader)) {
// get the actual RecordReader lop
Lop rr_node = getChildNode(node, execNodes, ExecLocation.RecordReader);
// all inputs of "node" must be ancestors of rr_node
boolean queue_it = false;
for (Lop n : node.getInputs()) {
// each input should be ancestor of RecordReader lop
if (!n.equals(rr_node) && !isChild(rr_node, n, IDMap)) {
queue_it = true; // i.e., "node" must be queued
break;
}
}
if (queue_it) {
// queue node
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing -" + node.toString() + " (code 5)");
queuedNodes.add(node);
// TODO: does this have to be modified to handle
// recordreader lops?
removeNodesForNextIteration(node, finishedNodes,
execNodes, queuedNodes, jobNodes);
continue;
} else {
// nothing here.. subsequent checks have to be performed
// on "node"
;
}
}
// data node, always add if child not queued
// only write nodes are kept in execnodes
if (node.getExecLocation() == ExecLocation.Data) {
Data dnode = (Data) node;
boolean dnode_queued = false;
if ( dnode.getOperationType() == OperationTypes.READ ) {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Adding Data -"+ node.toString());
// TODO: avoid readScalar instruction, and read it on-demand just like the way Matrices are read in control program
if ( node.getDataType() == DataType.SCALAR
//TODO: LEO check the following condition is still needed
&& node.getOutputParameters().getFile_name() != null ) {
// this lop corresponds to reading a scalar from HDFS file
// add it to execNodes so that "readScalar" instruction gets generated
execNodes.add(node);
// note: no need to add it to any job vector
}
}
else if (dnode.getOperationType() == OperationTypes.WRITE) {
// Skip the transient write <code>node</code> if the input is a
// transient read with the same variable name. i.e., a dummy copy.
// Hence, <code>node</code> can be avoided.
// TODO: this case should ideally be handled in the language layer
// prior to the construction of Hops Dag
Lop input = dnode.getInputs().get(0);
if ( dnode.isTransient()
&& input.getExecLocation() == ExecLocation.Data
&& ((Data)input).isTransient()
&& dnode.getOutputParameters().getLabel().equals(input.getOutputParameters().getLabel()) ) {
// do nothing, <code>node</code> must not processed any further.
;
}
else if ( execNodes.contains(input) && !isCompatible(node, input) && sendWriteLopToMR(node)) {
// input is in execNodes but it is not compatible with write lop. So, queue the write lop.
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing -" + node.toString());
queuedNodes.add(node);
dnode_queued = true;
}
else {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Adding Data -"+ node.toString());
execNodes.add(node);
if ( sendWriteLopToMR(node) ) {
addNodeByJobType(node, jobNodes, execNodes, false);
}
}
}
if (!dnode_queued)
finishedNodes.add(node);
continue;
}
// map or reduce node, can always be piggybacked with parent
if (node.getExecLocation() == ExecLocation.MapOrReduce) {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Adding -"+ node.toString());
execNodes.add(node);
finishedNodes.add(node);
addNodeByJobType(node, jobNodes, execNodes, false);
continue;
}
// RecordReader node, add, if no parent needs reduce, else queue
if (node.getExecLocation() == ExecLocation.RecordReader) {
// "node" should not have any children in
// execNodes .. it has to be the first one in the job!
if (!hasChildNode(node, execNodes, ExecLocation.Map)
&& !hasChildNode(node, execNodes,
ExecLocation.MapAndReduce)) {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Adding -"+ node.toString());
execNodes.add(node);
finishedNodes.add(node);
addNodeByJobType(node, jobNodes, execNodes, false);
} else {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing -"+ node.toString() + " (code 6)");
queuedNodes.add(node);
removeNodesForNextIteration(node, finishedNodes,
execNodes, queuedNodes, jobNodes);
}
continue;
}
// map node, add, if no parent needs reduce, else queue
if (node.getExecLocation() == ExecLocation.Map) {
boolean queueThisNode = false;
int subcode = -1;
if ( node.usesDistributedCache() ) {
// if an input to <code>node</code> comes from distributed cache
// then that input must get executed in one of the previous jobs.
int[] dcInputIndexes = node.distributedCacheInputIndex();
for( int dcInputIndex : dcInputIndexes ){
Lop dcInput = node.getInputs().get(dcInputIndex-1);
if ( (dcInput.getType() != Lop.Type.Data && dcInput.getExecType()==ExecType.MR)
&& execNodes.contains(dcInput) )
{
queueThisNode = true;
subcode = 1;
}
}
// Limit the number of distributed cache inputs based on the available memory in mappers
double memsize = computeFootprintInMapper(node);
//gmrMapperFootprint += computeFootprintInMapper(node);
if ( gmrMapperFootprint>0 && !checkMemoryLimits(node, gmrMapperFootprint+memsize ) ) {
queueThisNode = true;
subcode = 2;
}
if(!queueThisNode)
gmrMapperFootprint += memsize;
}
if (!queueThisNode && !hasChildNode(node, execNodes,ExecLocation.MapAndReduce)&& !hasMRJobChildNode(node, execNodes)) {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Adding -"+ node.toString());
execNodes.add(node);
finishedNodes.add(node);
addNodeByJobType(node, jobNodes, execNodes, false);
} else {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing -"+ node.toString() + " (code 7 - " + "subcode " + subcode + ")");
queuedNodes.add(node);
removeNodesForNextIteration(node, finishedNodes,
execNodes, queuedNodes, jobNodes);
}
continue;
}
// reduce node, make sure no parent needs reduce, else queue
if (node.getExecLocation() == ExecLocation.MapAndReduce) {
// TODO: statiko -- keep the middle condition
// discuss about having a lop that is MapAndReduce but does
// not define a job
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Adding -"+ node.toString());
execNodes.add(node);
finishedNodes.add(node);
addNodeByJobType(node, jobNodes, execNodes, eliminate);
continue;
}
// aligned reduce, make sure a parent that is reduce exists
if (node.getExecLocation() == ExecLocation.Reduce) {
if ( compatibleWithChildrenInExecNodes(execNodes, node) &&
(hasChildNode(node, execNodes, ExecLocation.MapAndReduce)
|| hasChildNode(node, execNodes, ExecLocation.Map) ) )
{
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Adding -"+ node.toString());
execNodes.add(node);
finishedNodes.add(node);
addNodeByJobType(node, jobNodes, execNodes, false);
} else {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing -"+ node.toString() + " (code 8)");
queuedNodes.add(node);
removeNodesForNextIteration(node, finishedNodes,
execNodes, queuedNodes, jobNodes);
}
continue;
}
// add Scalar to execNodes if it has no child in exec nodes
// that will be executed in a MR job.
if (node.getExecLocation() == ExecLocation.ControlProgram) {
for ( Lop lop : node.getInputs() ) {
if (execNodes.contains(lop)
&& !(lop.getExecLocation() == ExecLocation.Data)
&& !(lop.getExecLocation() == ExecLocation.ControlProgram)) {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing -"+ node.toString() + " (code 9)");
queuedNodes.add(node);
removeNodesForNextIteration(node, finishedNodes,
execNodes, queuedNodes, jobNodes);
break;
}
}
if (queuedNodes.contains(node))
continue;
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Adding - scalar"+ node.toString());
execNodes.add(node);
addNodeByJobType(node, jobNodes, execNodes, false);
finishedNodes.add(node);
continue;
}
}
// no work to do
if ( execNodes.isEmpty() ) {
if( !queuedNodes.isEmpty() )
{
//System.err.println("Queued nodes should be 0");
throw new LopsException("Queued nodes should not be 0 at this point \n");
}
if( LOG.isTraceEnabled() )
LOG.trace("All done! queuedNodes = "+ queuedNodes.size());
done = true;
} else {
// work to do
if( LOG.isTraceEnabled() )
LOG.trace("Generating jobs for group -- Node count="+ execNodes.size());
// first process scalar instructions
generateControlProgramJobs(execNodes, inst, writeInst, deleteInst);
// copy unassigned lops in execnodes to gmrnodes
for (int i = 0; i < execNodes.size(); i++) {
Lop node = execNodes.get(i);
if (jobType(node, jobNodes) == -1) {
if ( isCompatible(node, JobType.GMR) ) {
if ( node.hasNonBlockedInputs() ) {
jobNodes.get(JobType.GMRCELL.getId()).add(node);
addChildren(node, jobNodes.get(JobType.GMRCELL.getId()), execNodes);
}
else {
jobNodes.get(JobType.GMR.getId()).add(node);
addChildren(node, jobNodes.get(JobType.GMR.getId()), execNodes);
}
}
else {
if( LOG.isTraceEnabled() )
LOG.trace(indent + "Queueing -" + node.toString() + " (code 10)");
execNodes.remove(i);
finishedNodes.remove(node);
queuedNodes.add(node);
removeNodesForNextIteration(node, finishedNodes,
execNodes, queuedNodes, jobNodes);
}
}
}
// next generate MR instructions
if (!execNodes.isEmpty())
generateMRJobs(execNodes, inst, writeInst, deleteInst, jobNodes);
handleSingleOutputJobs(execNodes, jobNodes, finishedNodes);
}
}
// add write and delete inst at the very end.
//inst.addAll(preWriteDeleteInst);
inst.addAll(writeInst);
inst.addAll(deleteInst);
inst.addAll(endOfBlockInst);
return inst;
}
private boolean compatibleWithChildrenInExecNodes(ArrayList<Lop> execNodes, Lop node) {
for( Lop tmpNode : execNodes ) {
// for lops that execute in control program, compatibleJobs property is set to LopProperties.INVALID
// we should not consider such lops in this check
if (isChild(tmpNode, node, IDMap)
&& tmpNode.getExecLocation() != ExecLocation.ControlProgram
//&& tmpNode.getCompatibleJobs() != LopProperties.INVALID
&& (tmpNode.getCompatibleJobs() & node.getCompatibleJobs()) == 0)
return false;
}
return true;
}
/**
* Exclude rmvar instruction for varname from deleteInst, if exists
*
* @param varName variable name
* @param deleteInst list of instructions
*/
private static void excludeRemoveInstruction(String varName, ArrayList<Instruction> deleteInst) {
//for(Instruction inst : deleteInst) {
for(int i=0; i < deleteInst.size(); i++) {
Instruction inst = deleteInst.get(i);
if ((inst.getType() == INSTRUCTION_TYPE.CONTROL_PROGRAM || inst.getType() == INSTRUCTION_TYPE.SPARK)
&& ((CPInstruction)inst).getCPInstructionType() == CPINSTRUCTION_TYPE.Variable
&& ((VariableCPInstruction)inst).isRemoveVariable(varName) ) {
deleteInst.remove(i);
}
}
}
/**
* Generate rmvar instructions for the inputs, if their consumer count becomes zero.
*
* @param node low-level operator
* @param inst list of instructions
* @param delteInst list of instructions
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private void processConsumersForInputs(Lop node, ArrayList<Instruction> inst, ArrayList<Instruction> delteInst) throws DMLRuntimeException {
// reduce the consumer count for all input lops
// if the count becomes zero, then then variable associated w/ input can be removed
for(Lop in : node.getInputs() ) {
if(DMLScript.ENABLE_DEBUG_MODE) {
processConsumers(in, inst, delteInst, node);
}
else {
processConsumers(in, inst, delteInst, null);
}
}
}
private static void processConsumers(Lop node, ArrayList<Instruction> inst, ArrayList<Instruction> deleteInst, Lop locationInfo) throws DMLRuntimeException {
// reduce the consumer count for all input lops
// if the count becomes zero, then then variable associated w/ input can be removed
if ( node.removeConsumer() == 0 ) {
if ( node.getExecLocation() == ExecLocation.Data && ((Data)node).isLiteral() ) {
return;
}
String label = node.getOutputParameters().getLabel();
Instruction currInstr = VariableCPInstruction.prepareRemoveInstruction(label);
if (locationInfo != null)
currInstr.setLocation(locationInfo);
else
currInstr.setLocation(node);
inst.add(currInstr);
excludeRemoveInstruction(label, deleteInst);
}
}
/**
* Method to generate instructions that are executed in Control Program. At
* this point, this DAG has no dependencies on the MR dag. ie. none of the
* inputs are outputs of MR jobs
*
* @param execNodes list of low-level operators
* @param inst list of instructions
* @param writeInst list of write instructions
* @param deleteInst list of delete instructions
* @throws LopsException if LopsException occurs
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private void generateControlProgramJobs(ArrayList<Lop> execNodes,
ArrayList<Instruction> inst, ArrayList<Instruction> writeInst, ArrayList<Instruction> deleteInst) throws LopsException, DMLRuntimeException {
// nodes to be deleted from execnodes
ArrayList<Lop> markedNodes = new ArrayList<Lop>();
// variable names to be deleted
ArrayList<String> var_deletions = new ArrayList<String>();
HashMap<String, Lop> var_deletionsLineNum = new HashMap<String, Lop>();
boolean doRmVar = false;
for (int i = 0; i < execNodes.size(); i++) {
Lop node = execNodes.get(i);
doRmVar = false;
// mark input scalar read nodes for deletion
// TODO: statiko -- check if this condition ever evaluated to TRUE
if (node.getExecLocation() == ExecLocation.Data
&& ((Data) node).getOperationType() == Data.OperationTypes.READ
&& ((Data) node).getDataType() == DataType.SCALAR
&& node.getOutputParameters().getFile_name() == null ) {
markedNodes.add(node);
continue;
}
// output scalar instructions and mark nodes for deletion
if (node.getExecLocation() == ExecLocation.ControlProgram) {
if (node.getDataType() == DataType.SCALAR) {
// Output from lops with SCALAR data type must
// go into Temporary Variables (Var0, Var1, etc.)
NodeOutput out = setupNodeOutputs(node, ExecType.CP, false, false);
inst.addAll(out.getPreInstructions()); // dummy
deleteInst.addAll(out.getLastInstructions());
} else {
// Output from lops with non-SCALAR data type must
// go into Temporary Files (temp0, temp1, etc.)
NodeOutput out = setupNodeOutputs(node, ExecType.CP, false, false);
inst.addAll(out.getPreInstructions());
boolean hasTransientWriteParent = false;
for ( Lop parent : node.getOutputs() ) {
if ( parent.getExecLocation() == ExecLocation.Data
&& ((Data)parent).getOperationType() == Data.OperationTypes.WRITE
&& ((Data)parent).isTransient() ) {
hasTransientWriteParent = true;
break;
}
}
if ( !hasTransientWriteParent ) {
deleteInst.addAll(out.getLastInstructions());
}
else {
var_deletions.add(node.getOutputParameters().getLabel());
var_deletionsLineNum.put(node.getOutputParameters().getLabel(), node);
}
}
String inst_string = "";
// Lops with arbitrary number of inputs (ParameterizedBuiltin, GroupedAggregate, DataGen)
// are handled separately, by simply passing ONLY the output variable to getInstructions()
if (node.getType() == Lop.Type.ParameterizedBuiltin
|| node.getType() == Lop.Type.GroupedAgg
|| node.getType() == Lop.Type.DataGen ){
inst_string = node.getInstructions(node.getOutputParameters().getLabel());
}
// Lops with arbitrary number of inputs and outputs are handled
// separately as well by passing arrays of inputs and outputs
else if ( node.getType() == Lop.Type.FunctionCallCP )
{
String[] inputs = new String[node.getInputs().size()];
String[] outputs = new String[node.getOutputs().size()];
int count = 0;
for( Lop in : node.getInputs() )
inputs[count++] = in.getOutputParameters().getLabel();
count = 0;
for( Lop out : node.getOutputs() )
{
outputs[count++] = out.getOutputParameters().getLabel();
}
inst_string = node.getInstructions(inputs, outputs);
}
else if (node.getType() == Lop.Type.MULTIPLE_CP) { // ie, MultipleCP class
inst_string = node.getInstructions(node.getOutputParameters().getLabel());
}
else {
if ( node.getInputs().isEmpty() ) {
// currently, such a case exists only for Rand lop
inst_string = node.getInstructions(node.getOutputParameters().getLabel());
}
else if (node.getInputs().size() == 1) {
inst_string = node.getInstructions(node.getInputs()
.get(0).getOutputParameters().getLabel(),
node.getOutputParameters().getLabel());
}
else if (node.getInputs().size() == 2) {
inst_string = node.getInstructions(
node.getInputs().get(0).getOutputParameters().getLabel(),
node.getInputs().get(1).getOutputParameters().getLabel(),
node.getOutputParameters().getLabel());
}
else if (node.getInputs().size() == 3 || node.getType() == Type.Ternary) {
inst_string = node.getInstructions(
node.getInputs().get(0).getOutputParameters().getLabel(),
node.getInputs().get(1).getOutputParameters().getLabel(),
node.getInputs().get(2).getOutputParameters().getLabel(),
node.getOutputParameters().getLabel());
}
else if (node.getInputs().size() == 4) {
inst_string = node.getInstructions(
node.getInputs().get(0).getOutputParameters().getLabel(),
node.getInputs().get(1).getOutputParameters().getLabel(),
node.getInputs().get(2).getOutputParameters().getLabel(),
node.getInputs().get(3).getOutputParameters().getLabel(),
node.getOutputParameters().getLabel());
}
else if (node.getInputs().size() == 5) {
inst_string = node.getInstructions(
node.getInputs().get(0).getOutputParameters().getLabel(),
node.getInputs().get(1).getOutputParameters().getLabel(),
node.getInputs().get(2).getOutputParameters().getLabel(),
node.getInputs().get(3).getOutputParameters().getLabel(),
node.getInputs().get(4).getOutputParameters().getLabel(),
node.getOutputParameters().getLabel());
}
else if (node.getInputs().size() == 6) {
inst_string = node.getInstructions(
node.getInputs().get(0).getOutputParameters().getLabel(),
node.getInputs().get(1).getOutputParameters().getLabel(),
node.getInputs().get(2).getOutputParameters().getLabel(),
node.getInputs().get(3).getOutputParameters().getLabel(),
node.getInputs().get(4).getOutputParameters().getLabel(),
node.getInputs().get(5).getOutputParameters().getLabel(),
node.getOutputParameters().getLabel());
}
else if (node.getInputs().size() == 7) {
inst_string = node.getInstructions(
node.getInputs().get(0).getOutputParameters().getLabel(),
node.getInputs().get(1).getOutputParameters().getLabel(),
node.getInputs().get(2).getOutputParameters().getLabel(),
node.getInputs().get(3).getOutputParameters().getLabel(),
node.getInputs().get(4).getOutputParameters().getLabel(),
node.getInputs().get(5).getOutputParameters().getLabel(),
node.getInputs().get(6).getOutputParameters().getLabel(),
node.getOutputParameters().getLabel());
}
else {
String[] inputs = new String[node.getInputs().size()];
for( int j=0; j<node.getInputs().size(); j++ )
inputs[j] = node.getInputs().get(j).getOutputParameters().getLabel();
inst_string = node.getInstructions(inputs,
node.getOutputParameters().getLabel());
}
}
try {
if( LOG.isTraceEnabled() )
LOG.trace("Generating instruction - "+ inst_string);
Instruction currInstr = InstructionParser.parseSingleInstruction(inst_string);
if(currInstr == null) {
throw new LopsException("Error parsing the instruction:" + inst_string);
}
if (node._beginLine != 0)
currInstr.setLocation(node);
else if ( !node.getOutputs().isEmpty() )
currInstr.setLocation(node.getOutputs().get(0));
else if ( !node.getInputs().isEmpty() )
currInstr.setLocation(node.getInputs().get(0));
inst.add(currInstr);
} catch (Exception e) {
throw new LopsException(node.printErrorLocation() + "Problem generating simple inst - "
+ inst_string, e);
}
markedNodes.add(node);
doRmVar = true;
//continue;
}
else if (node.getExecLocation() == ExecLocation.Data ) {
Data dnode = (Data)node;
Data.OperationTypes op = dnode.getOperationType();
if ( op == Data.OperationTypes.WRITE ) {
NodeOutput out = null;
if ( sendWriteLopToMR(node) ) {
// In this case, Data WRITE lop goes into MR, and
// we don't have to do anything here
doRmVar = false;
}
else {
out = setupNodeOutputs(node, ExecType.CP, false, false);
if ( dnode.getDataType() == DataType.SCALAR ) {
// processing is same for both transient and persistent scalar writes
writeInst.addAll(out.getLastInstructions());
//inst.addAll(out.getLastInstructions());
doRmVar = false;
}
else {
// setupNodeOutputs() handles both transient and persistent matrix writes
if ( dnode.isTransient() ) {
//inst.addAll(out.getPreInstructions()); // dummy ?
deleteInst.addAll(out.getLastInstructions());
doRmVar = false;
}
else {
// In case of persistent write lop, write instruction will be generated
// and that instruction must be added to <code>inst</code> so that it gets
// executed immediately. If it is added to <code>deleteInst</code> then it
// gets executed at the end of program block's execution
inst.addAll(out.getLastInstructions());
doRmVar = true;
}
}
markedNodes.add(node);
//continue;
}
}
else {
// generate a temp label to hold the value that is read from HDFS
if ( node.getDataType() == DataType.SCALAR ) {
node.getOutputParameters().setLabel(Lop.SCALAR_VAR_NAME_PREFIX + var_index.getNextID());
String io_inst = node.getInstructions(node.getOutputParameters().getLabel(),
node.getOutputParameters().getFile_name());
CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(io_inst);
currInstr.setLocation(node);
inst.add(currInstr);
Instruction tempInstr = VariableCPInstruction.prepareRemoveInstruction(node.getOutputParameters().getLabel());
tempInstr.setLocation(node);
deleteInst.add(tempInstr);
}
else {
throw new LopsException("Matrix READs are not handled in CP yet!");
}
markedNodes.add(node);
doRmVar = true;
//continue;
}
}
// see if rmvar instructions can be generated for node's inputs
if(doRmVar)
processConsumersForInputs(node, inst, deleteInst);
doRmVar = false;
}
for ( String var : var_deletions ) {
Instruction rmInst = VariableCPInstruction.prepareRemoveInstruction(var);
if( LOG.isTraceEnabled() )
LOG.trace(" Adding var_deletions: " + rmInst.toString());
rmInst.setLocation(var_deletionsLineNum.get(var));
deleteInst.add(rmInst);
}
// delete all marked nodes
for ( Lop node : markedNodes ) {
execNodes.remove(node);
}
}
/**
* Method to remove all child nodes of a queued node that should be executed
* in a following iteration.
*
* @param node low-level operator
* @param finishedNodes list of finished nodes
* @param execNodes list of exec nodes
* @param queuedNodes list of queued nodes
* @param jobvec list of lists of low-level operators
* @throws LopsException if LopsException occurs
*/
private void removeNodesForNextIteration(Lop node, ArrayList<Lop> finishedNodes,
ArrayList<Lop> execNodes, ArrayList<Lop> queuedNodes,
ArrayList<ArrayList<Lop>> jobvec) throws LopsException {
// only queued nodes with multiple inputs need to be handled.
if (node.getInputs().size() == 1)
return;
//if all children are queued, then there is nothing to do.
boolean allQueued = true;
for( Lop input : node.getInputs() ) {
if( !queuedNodes.contains(input) ) {
allQueued = false;
break;
}
}
if ( allQueued )
return;
if( LOG.isTraceEnabled() )
LOG.trace(" Before remove nodes for next iteration -- size of execNodes " + execNodes.size());
// Determine if <code>node</code> has inputs from the same job or multiple jobs
int jobid = Integer.MIN_VALUE;
boolean inputs_in_same_job = true;
for( Lop input : node.getInputs() ) {
int input_jobid = jobType(input, jobvec);
if ( jobid == Integer.MIN_VALUE )
jobid = input_jobid;
else if ( jobid != input_jobid ) {
inputs_in_same_job = false;
break;
}
}
// Determine if there exist any unassigned inputs to <code>node</code>
// Evaluate only those lops that execute in MR.
boolean unassigned_inputs = false;
for( Lop input : node.getInputs() ) {
//if ( input.getExecLocation() != ExecLocation.ControlProgram && jobType(input, jobvec) == -1 ) {
if ( input.getExecType() == ExecType.MR && !execNodes.contains(input)) { //jobType(input, jobvec) == -1 ) {
unassigned_inputs = true;
break;
}
}
// Determine if any node's children are queued
boolean child_queued = false;
for( Lop input : node.getInputs() ) {
if (queuedNodes.contains(input) ) {
child_queued = true;
break;
}
}
if (LOG.isTraceEnabled()) {
LOG.trace(" Property Flags:");
LOG.trace(" Inputs in same job: " + inputs_in_same_job);
LOG.trace(" Unassigned inputs: " + unassigned_inputs);
LOG.trace(" Child queued: " + child_queued);
}
// Evaluate each lop in <code>execNodes</code> for removal.
// Add lops to be removed to <code>markedNodes</code>.
ArrayList<Lop> markedNodes = new ArrayList<Lop>();
for (Lop tmpNode : execNodes ) {
if (LOG.isTraceEnabled()) {
LOG.trace(" Checking for removal (" + tmpNode.getID() + ") " + tmpNode.toString());
}
// if tmpNode is not a descendant of 'node', then there is no advantage in removing tmpNode for later iterations.
if(!isChild(tmpNode, node, IDMap))
continue;
// handle group input lops
if(node.getInputs().contains(tmpNode) && tmpNode.isAligner()) {
markedNodes.add(tmpNode);
if( LOG.isTraceEnabled() )
LOG.trace(" Removing for next iteration (code 1): (" + tmpNode.getID() + ") " + tmpNode.toString());
}
//if (child_queued) {
// if one of the children are queued,
// remove some child nodes on other leg that may be needed later on.
// For e.g. Group lop.
if (!hasOtherQueuedParentNode(tmpNode, queuedNodes, node)
&& branchHasNoOtherUnExecutedParents(tmpNode, node, execNodes, finishedNodes)) {
boolean queueit = false;
int code = -1;
switch(node.getExecLocation()) {
case Map:
if(branchCanBePiggyBackedMap(tmpNode, node, execNodes, queuedNodes, markedNodes))
queueit = true;
code=2;
break;
case MapAndReduce:
if(branchCanBePiggyBackedMapAndReduce(tmpNode, node, execNodes, queuedNodes)&& !tmpNode.definesMRJob())
queueit = true;
code=3;
break;
case Reduce:
if(branchCanBePiggyBackedReduce(tmpNode, node, execNodes, queuedNodes))
queueit = true;
code=4;
break;
default:
//do nothing
}
if(queueit) {
if( LOG.isTraceEnabled() )
LOG.trace(" Removing for next iteration (code " + code + "): (" + tmpNode.getID() + ") " + tmpNode.toString());
markedNodes.add(tmpNode);
}
}
/*
* "node" has no other queued children.
*
* If inputs are in the same job and "node" is of type
* MapAndReduce, then remove nodes of all types other than
* Reduce, MapAndReduce, and the ones that define a MR job as
* they can be piggybacked later.
*
* e.g: A=Rand, B=Rand, C=A%*%B Here, both inputs of MMCJ lop
* come from Rand job, and they should not be removed.
*
* Other examples: -- MMCJ whose children are of type
* MapAndReduce (say GMR) -- Inputs coming from two different
* jobs .. GMR & REBLOCK
*/
//boolean himr = hasOtherMapAndReduceParentNode(tmpNode, execNodes,node);
//boolean bcbp = branchCanBePiggyBackedMapAndReduce(tmpNode, node, execNodes, finishedNodes);
//System.out.println(" .. " + inputs_in_same_job + "," + himr + "," + bcbp);
if ((inputs_in_same_job || unassigned_inputs)
&& node.getExecLocation() == ExecLocation.MapAndReduce
&& !hasOtherMapAndReduceParentNode(tmpNode, execNodes,node) // don't remove since it already piggybacked with a MapReduce node
&& branchCanBePiggyBackedMapAndReduce(tmpNode, node, execNodes, queuedNodes)
&& !tmpNode.definesMRJob()) {
if( LOG.isTraceEnabled() )
LOG.trace(" Removing for next iteration (code 5): ("+ tmpNode.getID() + ") " + tmpNode.toString());
markedNodes.add(tmpNode);
}
} // for i
// we also need to delete all parent nodes of marked nodes
for ( Lop enode : execNodes ) {
if( LOG.isTraceEnabled() ) {
LOG.trace(" Checking for removal - ("
+ enode.getID() + ") " + enode.toString());
}
if (hasChildNode(enode, markedNodes) && !markedNodes.contains(enode)) {
markedNodes.add(enode);
if( LOG.isTraceEnabled() )
LOG.trace(" Removing for next iteration (code 6) (" + enode.getID() + ") " + enode.toString());
}
}
if ( execNodes.size() != markedNodes.size() ) {
// delete marked nodes from finishedNodes and execNodes
// add to queued nodes
for(Lop n : markedNodes) {
if ( n.usesDistributedCache() )
gmrMapperFootprint -= computeFootprintInMapper(n);
finishedNodes.remove(n);
execNodes.remove(n);
removeNodeByJobType(n, jobvec);
queuedNodes.add(n);
}
}
}
private boolean branchCanBePiggyBackedReduce(Lop tmpNode, Lop node, ArrayList<Lop> execNodes, ArrayList<Lop> queuedNodes) {
if(node.getExecLocation() != ExecLocation.Reduce)
return false;
// if tmpNode is descendant of any queued child of node, then branch can not be piggybacked
for(Lop ni : node.getInputs()) {
if(queuedNodes.contains(ni) && isChild(tmpNode, ni, IDMap))
return false;
}
for( Lop n : execNodes ) {
if(n.equals(node))
continue;
if(n.equals(tmpNode) && n.getExecLocation() != ExecLocation.Map && n.getExecLocation() != ExecLocation.MapOrReduce)
return false;
// check if n is on the branch tmpNode->*->node
if(isChild(n, node, IDMap) && isChild(tmpNode, n, IDMap)) {
if(!node.getInputs().contains(tmpNode) // redundant
&& n.getExecLocation() != ExecLocation.Map && n.getExecLocation() != ExecLocation.MapOrReduce)
return false;
}
}
return true;
}
private boolean branchCanBePiggyBackedMap(Lop tmpNode, Lop node, ArrayList<Lop> execNodes, ArrayList<Lop> queuedNodes, ArrayList<Lop> markedNodes) {
if(node.getExecLocation() != ExecLocation.Map)
return false;
// if tmpNode is descendant of any queued child of node, then branch can not be piggybacked
for(Lop ni : node.getInputs()) {
if(queuedNodes != null && queuedNodes.contains(ni) && isChild(tmpNode, ni, IDMap))
return false;
}
// since node.location=Map: only Map & MapOrReduce lops must be considered
if( tmpNode.definesMRJob() || (tmpNode.getExecLocation() != ExecLocation.Map && tmpNode.getExecLocation() != ExecLocation.MapOrReduce))
return false;
// if there exist a node "dcInput" that is
// -- a) parent of tmpNode, and b) feeds into "node" via distributed cache
// then, tmpNode should not be removed.
// "dcInput" must be executed prior to "node", and removal of tmpNode does not make that happen.
if(node.usesDistributedCache() ) {
for(int dcInputIndex : node.distributedCacheInputIndex()) {
Lop dcInput = node.getInputs().get(dcInputIndex-1);
if(isChild(tmpNode, dcInput, IDMap))
return false;
}
}
// if tmpNode requires an input from distributed cache,
// remove tmpNode only if that input can fit into mappers' memory. If not,
if ( tmpNode.usesDistributedCache() ) {
double memsize = computeFootprintInMapper(tmpNode);
if (node.usesDistributedCache() )
memsize += computeFootprintInMapper(node);
if ( markedNodes != null ) {
for(Lop n : markedNodes) {
if ( n.usesDistributedCache() )
memsize += computeFootprintInMapper(n);
}
}
if ( !checkMemoryLimits(node, memsize ) ) {
return false;
}
}
return ( (tmpNode.getCompatibleJobs() & node.getCompatibleJobs()) > 0);
}
/**
* Function that checks if <code>tmpNode</code> can be piggybacked with MapAndReduce
* lop <code>node</code>.
*
* Decision depends on the exec location of <code>tmpNode</code>. If the exec location is:
* MapAndReduce: CAN NOT be piggybacked since it defines its own MR job
* Reduce: CAN NOT be piggybacked since it must execute before <code>node</code>
* Map or MapOrReduce: CAN be piggybacked ONLY IF it is comatible w/ <code>tmpNode</code>
*
* @param tmpNode temporary low-level operator
* @param node low-level operator
* @param execNodes list of exec nodes
* @param queuedNodes list of queued nodes
* @return true if tmpNode can be piggbacked on node
*/
private boolean branchCanBePiggyBackedMapAndReduce(Lop tmpNode, Lop node,
ArrayList<Lop> execNodes, ArrayList<Lop> queuedNodes) {
if (node.getExecLocation() != ExecLocation.MapAndReduce)
return false;
JobType jt = JobType.findJobTypeFromLop(node);
for ( Lop n : execNodes ) {
if (n.equals(node))
continue;
// Evaluate only nodes on the branch between tmpNode->..->node
if (n.equals(tmpNode) || (isChild(n, node, IDMap) && isChild(tmpNode, n, IDMap))) {
if ( hasOtherMapAndReduceParentNode(tmpNode, queuedNodes,node) )
return false;
ExecLocation el = n.getExecLocation();
if (el != ExecLocation.Map && el != ExecLocation.MapOrReduce)
return false;
else if (!isCompatible(n, jt))
return false;
}
}
return true;
}
private boolean branchHasNoOtherUnExecutedParents(Lop tmpNode, Lop node,
ArrayList<Lop> execNodes, ArrayList<Lop> finishedNodes) {
//if tmpNode has more than one unfinished output, return false
if(tmpNode.getOutputs().size() > 1)
{
int cnt = 0;
for (Lop output : tmpNode.getOutputs() )
if (!finishedNodes.contains(output))
cnt++;
if(cnt != 1)
return false;
}
//check to see if any node between node and tmpNode has more than one unfinished output
for( Lop n : execNodes ) {
if(n.equals(node) || n.equals(tmpNode))
continue;
if(isChild(n, node, IDMap) && isChild(tmpNode, n, IDMap))
{
int cnt = 0;
for (Lop output : n.getOutputs() ) {
if (!finishedNodes.contains(output))
cnt++;
}
if(cnt != 1)
return false;
}
}
return true;
}
/**
* Method to return the job index for a lop.
*
* @param lops low-level operator
* @param jobvec list of lists of low-level operators
* @return job index for a low-level operator
* @throws LopsException if LopsException occurs
*/
private static int jobType(Lop lops, ArrayList<ArrayList<Lop>> jobvec) throws LopsException {
for ( JobType jt : JobType.values()) {
int i = jt.getId();
if (i > 0 && jobvec.get(i) != null && jobvec.get(i).contains(lops)) {
return i;
}
}
return -1;
}
/**
* Method to see if there is a node of type MapAndReduce between tmpNode and node
* in given node collection
*
* @param tmpNode temporary low-level operator
* @param nodeList list of low-level operators
* @param node low-level operator
* @return true if MapAndReduce node between tmpNode and node in nodeList
*/
private boolean hasOtherMapAndReduceParentNode(Lop tmpNode,
ArrayList<Lop> nodeList, Lop node) {
if ( tmpNode.getExecLocation() == ExecLocation.MapAndReduce)
return true;
for ( Lop n : tmpNode.getOutputs() ) {
if ( nodeList.contains(n) && isChild(n,node,IDMap)) {
if(!n.equals(node) && n.getExecLocation() == ExecLocation.MapAndReduce)
return true;
else
return hasOtherMapAndReduceParentNode(n, nodeList, node);
}
}
return false;
}
/**
* Method to check if there is a queued node that is a parent of both tmpNode and node
*
* @param tmpNode temporary low-level operator
* @param queuedNodes list of queued nodes
* @param node low-level operator
* @return true if there is a queued node that is a parent of tmpNode and node
*/
private boolean hasOtherQueuedParentNode(Lop tmpNode, ArrayList<Lop> queuedNodes, Lop node) {
if ( queuedNodes.isEmpty() )
return false;
boolean[] nodeMarked = node.get_reachable();
boolean[] tmpMarked = tmpNode.get_reachable();
long nodeid = IDMap.get(node.getID());
long tmpid = IDMap.get(tmpNode.getID());
for ( Lop qnode : queuedNodes ) {
int id = IDMap.get(qnode.getID());
if ((id != nodeid && nodeMarked[id]) && (id != tmpid && tmpMarked[id]) )
return true;
}
return false;
}
/**
* Method to print the lops grouped by job type
*
* @param jobNodes list of lists of low-level operators
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void printJobNodes(ArrayList<ArrayList<Lop>> jobNodes)
throws DMLRuntimeException
{
if (LOG.isTraceEnabled()){
for ( JobType jt : JobType.values() ) {
int i = jt.getId();
if (i > 0 && jobNodes.get(i) != null && !jobNodes.get(i).isEmpty() ) {
LOG.trace(jt.getName() + " Job Nodes:");
for (int j = 0; j < jobNodes.get(i).size(); j++) {
LOG.trace(" "
+ jobNodes.get(i).get(j).getID() + ") "
+ jobNodes.get(i).get(j).toString());
}
}
}
}
}
/**
* Method to check if there exists any lops with ExecLocation=RecordReader
*
* @param nodes list of low-level operators
* @param loc exec location
* @return true if there is a node with RecordReader exec location
*/
private static boolean hasANode(ArrayList<Lop> nodes, ExecLocation loc) {
for ( Lop n : nodes ) {
if (n.getExecLocation() == ExecLocation.RecordReader)
return true;
}
return false;
}
private ArrayList<ArrayList<Lop>> splitGMRNodesByRecordReader(ArrayList<Lop> gmrnodes)
{
// obtain the list of record reader nodes
ArrayList<Lop> rrnodes = new ArrayList<Lop>();
for (Lop gmrnode : gmrnodes ) {
if (gmrnode.getExecLocation() == ExecLocation.RecordReader)
rrnodes.add(gmrnode);
}
// We allocate one extra vector to hold lops that do not depend on any
// recordreader lops
ArrayList<ArrayList<Lop>> splitGMR = createNodeVectors(rrnodes.size() + 1);
// flags to indicate whether a lop has been added to one of the node vectors
boolean[] flags = new boolean[gmrnodes.size()];
Arrays.fill(flags, false);
// first, obtain all ancestors of recordreader lops
for (int rrid = 0; rrid < rrnodes.size(); rrid++) {
// prepare node list for i^th record reader lop
// add record reader lop
splitGMR.get(rrid).add(rrnodes.get(rrid));
for (int j = 0; j < gmrnodes.size(); j++) {
if (rrnodes.get(rrid).equals(gmrnodes.get(j)))
flags[j] = true;
else if (isChild(rrnodes.get(rrid), gmrnodes.get(j), IDMap)) {
splitGMR.get(rrid).add(gmrnodes.get(j));
flags[j] = true;
}
}
}
// add all remaining lops to a separate job
int jobindex = rrnodes.size(); // the last node vector
for (int i = 0; i < gmrnodes.size(); i++) {
if (!flags[i]) {
splitGMR.get(jobindex).add(gmrnodes.get(i));
flags[i] = true;
}
}
return splitGMR;
}
/**
* Method to generate hadoop jobs. Exec nodes can contains a mixture of node
* types requiring different mr jobs. This method breaks the job into
* sub-types and then invokes the appropriate method to generate
* instructions.
*
* @param execNodes list of exec nodes
* @param inst list of instructions
* @param writeinst list of write instructions
* @param deleteinst list of delete instructions
* @param jobNodes list of list of low-level operators
* @throws LopsException if LopsException occurs
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private void generateMRJobs(ArrayList<Lop> execNodes,
ArrayList<Instruction> inst,
ArrayList<Instruction> writeinst,
ArrayList<Instruction> deleteinst, ArrayList<ArrayList<Lop>> jobNodes)
throws LopsException, DMLRuntimeException
{
printJobNodes(jobNodes);
ArrayList<Instruction> rmvarinst = new ArrayList<Instruction>();
for (JobType jt : JobType.values()) {
// do nothing, if jt = INVALID or ANY
if ( jt == JobType.INVALID || jt == JobType.ANY )
continue;
int index = jt.getId(); // job id is used as an index into jobNodes
ArrayList<Lop> currNodes = jobNodes.get(index);
// generate MR job
if (currNodes != null && !currNodes.isEmpty() ) {
if( LOG.isTraceEnabled() )
LOG.trace("Generating " + jt.getName() + " job");
if (jt.allowsRecordReaderInstructions() && hasANode(jobNodes.get(index), ExecLocation.RecordReader)) {
// split the nodes by recordReader lops
ArrayList<ArrayList<Lop>> rrlist = splitGMRNodesByRecordReader(jobNodes.get(index));
for (int i = 0; i < rrlist.size(); i++) {
generateMapReduceInstructions(rrlist.get(i), inst, writeinst, deleteinst, rmvarinst, jt);
}
}
else if ( jt.allowsSingleShuffleInstruction() ) {
// These jobs allow a single shuffle instruction.
// We should split the nodes so that a separate job is produced for each shuffle instruction.
Lop.Type splittingLopType = jt.getShuffleLopType();
ArrayList<Lop> nodesForASingleJob = new ArrayList<Lop>();
for (int i = 0; i < jobNodes.get(index).size(); i++) {
if (jobNodes.get(index).get(i).getType() == splittingLopType) {
nodesForASingleJob.clear();
// Add the lop that defines the split
nodesForASingleJob.add(jobNodes.get(index).get(i));
/*
* Add the splitting lop's children. This call is redundant when jt=SORT
* because a sort job ALWAYS has a SINGLE lop in the entire job
* i.e., there are no children to add when jt=SORT.
*/
addChildren(jobNodes.get(index).get(i), nodesForASingleJob, jobNodes.get(index));
if ( jt.isCompatibleWithParentNodes() ) {
/*
* If the splitting lop is compatible with parent nodes
* then they must be added to the job. For example, MMRJ lop
* may have a Data(Write) lop as its parent, which can be
* executed along with MMRJ.
*/
addParents(jobNodes.get(index).get(i), nodesForASingleJob, jobNodes.get(index));
}
generateMapReduceInstructions(nodesForASingleJob, inst, writeinst, deleteinst, rmvarinst, jt);
}
}
}
else {
// the default case
generateMapReduceInstructions(jobNodes.get(index), inst, writeinst, deleteinst, rmvarinst, jt);
}
}
}
inst.addAll(rmvarinst);
}
/**
* Method to add all parents of "node" in exec_n to node_v.
*
* @param node low-level operator
* @param node_v list of nodes
* @param exec_n list of nodes
*/
private void addParents(Lop node, ArrayList<Lop> node_v, ArrayList<Lop> exec_n) {
for (Lop enode : exec_n ) {
if (isChild(node, enode, IDMap)) {
if (!node_v.contains(enode)) {
if( LOG.isTraceEnabled() )
LOG.trace("Adding parent - " + enode.toString());
node_v.add(enode);
}
}
}
}
/**
* Method to add all relevant data nodes for set of exec nodes.
*
* @param node low-level operator
* @param node_v list of nodes
* @param exec_n list of nodes
*/
private static void addChildren(Lop node, ArrayList<Lop> node_v, ArrayList<Lop> exec_n) {
// add child in exec nodes that is not of type scalar
if (exec_n.contains(node)
&& node.getExecLocation() != ExecLocation.ControlProgram) {
if (!node_v.contains(node)) {
node_v.add(node);
if(LOG.isTraceEnabled())
LOG.trace(" Added child " + node.toString());
}
}
if (!exec_n.contains(node))
return;
// recurse
for (Lop n : node.getInputs() ) {
addChildren(n, node_v, exec_n);
}
}
/**
* Method that determines the output format for a given node.
*
* @param node low-level operator
* @param cellModeOverride override mode
* @return output info
* @throws LopsException if LopsException occurs
*/
private static OutputInfo getOutputInfo(Lop node, boolean cellModeOverride)
throws LopsException
{
if ( (node.getDataType() == DataType.SCALAR && node.getExecType() == ExecType.CP)
|| node instanceof FunctionCallCP )
return null;
OutputInfo oinfo = null;
OutputParameters oparams = node.getOutputParameters();
if (oparams.isBlocked()) {
if ( !cellModeOverride )
oinfo = OutputInfo.BinaryBlockOutputInfo;
else {
// output format is overridden, for example, due to recordReaderInstructions in the job
oinfo = OutputInfo.BinaryCellOutputInfo;
// record decision of overriding in lop's outputParameters so that
// subsequent jobs that use this lop's output know the correct format.
// TODO: ideally, this should be done by having a member variable in Lop
// which stores the outputInfo.
try {
oparams.setDimensions(oparams.getNumRows(), oparams.getNumCols(), -1, -1, oparams.getNnz(), oparams.getUpdateType());
} catch(HopsException e) {
throw new LopsException(node.printErrorLocation() + "error in getOutputInfo in Dag ", e);
}
}
} else {
if (oparams.getFormat() == Format.TEXT || oparams.getFormat() == Format.MM)
oinfo = OutputInfo.TextCellOutputInfo;
else if ( oparams.getFormat() == Format.CSV ) {
oinfo = OutputInfo.CSVOutputInfo;
}
else {
oinfo = OutputInfo.BinaryCellOutputInfo;
}
}
/* Instead of following hardcoding, one must get this information from Lops */
if (node.getType() == Type.SortKeys && node.getExecType() == ExecType.MR) {
if( ((SortKeys)node).getOpType() == SortKeys.OperationTypes.Indexes)
oinfo = OutputInfo.BinaryBlockOutputInfo;
else
oinfo = OutputInfo.OutputInfoForSortOutput;
} else if (node.getType() == Type.CombineBinary) {
// Output format of CombineBinary (CB) depends on how the output is consumed
CombineBinary combine = (CombineBinary) node;
if ( combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreSort ) {
oinfo = OutputInfo.OutputInfoForSortInput;
}
else if ( combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreCentralMoment
|| combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreCovUnweighted
|| combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreGroupedAggUnweighted ) {
oinfo = OutputInfo.WeightedPairOutputInfo;
}
} else if ( node.getType() == Type.CombineTernary) {
oinfo = OutputInfo.WeightedPairOutputInfo;
} else if (node.getType() == Type.CentralMoment
|| node.getType() == Type.CoVariance )
{
// CMMR always operate in "cell mode",
// and the output is always in cell format
oinfo = OutputInfo.BinaryCellOutputInfo;
}
return oinfo;
}
private String prepareAssignVarInstruction(Lop input, Lop node) {
StringBuilder sb = new StringBuilder();
sb.append(ExecType.CP);
sb.append(Lop.OPERAND_DELIMITOR);
sb.append("assignvar");
sb.append(Lop.OPERAND_DELIMITOR);
sb.append( input.prepScalarInputOperand(ExecType.CP) );
sb.append(Lop.OPERAND_DELIMITOR);
sb.append(node.prepOutputOperand());
return sb.toString();
}
/**
* Method to setup output filenames and outputInfos, and to generate related instructions
*
* @param node low-level operator
* @param et exec type
* @param cellModeOverride override mode
* @param copyTWrite ?
* @return node output
* @throws DMLRuntimeException if DMLRuntimeException occurs
* @throws LopsException if LopsException occurs
*/
private NodeOutput setupNodeOutputs(Lop node, ExecType et, boolean cellModeOverride, boolean copyTWrite)
throws DMLRuntimeException, LopsException {
OutputParameters oparams = node.getOutputParameters();
NodeOutput out = new NodeOutput();
node.setConsumerCount(node.getOutputs().size());
// Compute the output format for this node
out.setOutInfo(getOutputInfo(node, cellModeOverride));
// If node is NOT of type Data then we must generate
// a variable to hold the value produced by this node
// note: functioncallcp requires no createvar, rmvar since
// since outputs are explicitly specified
if (node.getExecLocation() != ExecLocation.Data )
{
if (node.getDataType() == DataType.SCALAR) {
oparams.setLabel(Lop.SCALAR_VAR_NAME_PREFIX + var_index.getNextID());
out.setVarName(oparams.getLabel());
Instruction currInstr = VariableCPInstruction.prepareRemoveInstruction(oparams.getLabel());
currInstr.setLocation(node);
out.addLastInstruction(currInstr);
}
else if(node instanceof ParameterizedBuiltin
&& ((ParameterizedBuiltin)node).getOp() == org.apache.sysml.lops.ParameterizedBuiltin.OperationTypes.TRANSFORM) {
ParameterizedBuiltin pbi = (ParameterizedBuiltin)node;
Lop input = pbi.getNamedInput(ParameterizedBuiltinFunctionExpression.TF_FN_PARAM_DATA);
if(input.getDataType()== DataType.FRAME) {
// Output of transform is in CSV format, which gets subsequently reblocked
// TODO: change it to output binaryblock
Data dataInput = (Data) input;
oparams.setFile_name(getNextUniqueFilename());
oparams.setLabel(getNextUniqueVarname(DataType.MATRIX));
// generate an instruction that creates a symbol table entry for the new variable in CSV format
Data delimLop = (Data) dataInput.getNamedInputLop(
DataExpression.DELIM_DELIMITER, DataExpression.DEFAULT_DELIM_DELIMITER);
Instruction createvarInst = VariableCPInstruction.prepareCreateVariableInstruction(
oparams.getLabel(), oparams.getFile_name(), true,
DataType.MATRIX, OutputInfo.outputInfoToString(OutputInfo.CSVOutputInfo),
new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), -1, -1, oparams.getNnz()), oparams.getUpdateType(),
false, delimLop.getStringValue(), true
);
createvarInst.setLocation(node);
out.addPreInstruction(createvarInst);
// temp file as well as the variable has to be deleted at the end
Instruction currInstr = VariableCPInstruction.prepareRemoveInstruction(oparams.getLabel());
currInstr.setLocation(node);
out.addLastInstruction(currInstr);
// finally, add the generated filename and variable name to the list of outputs
out.setFileName(oparams.getFile_name());
out.setVarName(oparams.getLabel());
}
else {
throw new LopsException("Input to transform() has an invalid type: " + input.getDataType() + ", it must be FRAME.");
}
}
else if(!(node instanceof FunctionCallCP)) //general case
{
// generate temporary filename and a variable name to hold the
// output produced by "rootNode"
oparams.setFile_name(getNextUniqueFilename());
oparams.setLabel(getNextUniqueVarname(node.getDataType()));
// generate an instruction that creates a symbol table entry for the new variable
//String createInst = prepareVariableInstruction("createvar", node);
//out.addPreInstruction(CPInstructionParser.parseSingleInstruction(createInst));
int rpb = (int) oparams.getRowsInBlock();
int cpb = (int) oparams.getColsInBlock();
Instruction createvarInst = VariableCPInstruction.prepareCreateVariableInstruction(
oparams.getLabel(),
oparams.getFile_name(),
true, node.getDataType(),
OutputInfo.outputInfoToString(getOutputInfo(node, false)),
new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), rpb, cpb, oparams.getNnz()),
oparams.getUpdateType()
);
createvarInst.setLocation(node);
out.addPreInstruction(createvarInst);
// temp file as well as the variable has to be deleted at the end
Instruction currInstr = VariableCPInstruction.prepareRemoveInstruction(oparams.getLabel());
currInstr.setLocation(node);
out.addLastInstruction(currInstr);
// finally, add the generated filename and variable name to the list of outputs
out.setFileName(oparams.getFile_name());
out.setVarName(oparams.getLabel());
}
else {
// If the function call is set with output lops (e.g., multi return builtin),
// generate a createvar instruction for each function output
FunctionCallCP fcall = (FunctionCallCP) node;
if ( fcall.getFunctionOutputs() != null ) {
for( Lop fnOut: fcall.getFunctionOutputs()) {
OutputParameters fnOutParams = fnOut.getOutputParameters();
//OutputInfo oinfo = getOutputInfo((N)fnOut, false);
Instruction createvarInst = VariableCPInstruction.prepareCreateVariableInstruction(
fnOutParams.getLabel(),
getFilePath() + fnOutParams.getLabel(),
true, fnOut.getDataType(),
OutputInfo.outputInfoToString(getOutputInfo(fnOut, false)),
new MatrixCharacteristics(fnOutParams.getNumRows(), fnOutParams.getNumCols(), (int)fnOutParams.getRowsInBlock(), (int)fnOutParams.getColsInBlock(), fnOutParams.getNnz()),
oparams.getUpdateType()
);
if (node._beginLine != 0)
createvarInst.setLocation(node);
else
createvarInst.setLocation(fnOut);
out.addPreInstruction(createvarInst);
}
}
}
}
// rootNode is of type Data
else {
if ( node.getDataType() == DataType.SCALAR ) {
// generate assignment operations for final and transient writes
if ( oparams.getFile_name() == null && !(node instanceof Data && ((Data)node).isPersistentWrite()) ) {
String io_inst = prepareAssignVarInstruction(node.getInputs().get(0), node);
CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(io_inst);
if (node._beginLine != 0)
currInstr.setLocation(node);
else if ( !node.getInputs().isEmpty() )
currInstr.setLocation(node.getInputs().get(0));
out.addLastInstruction(currInstr);
}
else {
//CP PERSISTENT WRITE SCALARS
Lop fname = ((Data)node).getNamedInputLop(DataExpression.IO_FILENAME);
String io_inst = node.getInstructions(node.getInputs().get(0).getOutputParameters().getLabel(), fname.getOutputParameters().getLabel());
CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(io_inst);
if (node._beginLine != 0)
currInstr.setLocation(node);
else if ( !node.getInputs().isEmpty() )
currInstr.setLocation(node.getInputs().get(0));
out.addLastInstruction(currInstr);
}
}
else {
if ( ((Data)node).isTransient() ) {
if ( et == ExecType.CP ) {
// If transient matrix write is in CP then its input MUST be executed in CP as well.
// get variable and filename associated with the input
String inputFileName = node.getInputs().get(0).getOutputParameters().getFile_name();
String inputVarName = node.getInputs().get(0).getOutputParameters().getLabel();
String constVarName = oparams.getLabel();
String constFileName = inputFileName + constVarName;
/*
* Symbol Table state must change as follows:
*
* FROM:
* mvar1 -> temp21
*
* TO:
* mVar1 -> temp21
* tVarH -> temp21
*/
Instruction currInstr = VariableCPInstruction.prepareCopyInstruction(inputVarName, constVarName);
currInstr.setLocation(node);
out.addLastInstruction(currInstr);
out.setFileName(constFileName);
}
else {
if(copyTWrite) {
Instruction currInstr = VariableCPInstruction.prepareCopyInstruction(node.getInputs().get(0).getOutputParameters().getLabel(), oparams.getLabel());
currInstr.setLocation(node);
out.addLastInstruction(currInstr);
return out;
}
/*
* Since the "rootNode" is a transient data node, we first need to generate a
* temporary filename as well as a variable name to hold the <i>immediate</i>
* output produced by "rootNode". These generated HDFS filename and the
* variable name must be changed at the end of an iteration/program block
* so that the subsequent iteration/program block can correctly access the
* generated data. Therefore, we need to distinguish between the following:
*
* 1) Temporary file name & variable name: They hold the immediate output
* produced by "rootNode". Both names are generated below.
*
* 2) Constant file name & variable name: They are constant across iterations.
* Variable name is given by rootNode's label that is created in the upper layers.
* File name is generated by concatenating "temporary file name" and "constant variable name".
*
* Temporary files must be moved to constant files at the end of the iteration/program block.
*/
// generate temporary filename & var name
String tempVarName = oparams.getLabel() + "temp";
String tempFileName = getNextUniqueFilename();
//String createInst = prepareVariableInstruction("createvar", tempVarName, node.getDataType(), node.getValueType(), tempFileName, oparams, out.getOutInfo());
//out.addPreInstruction(CPInstructionParser.parseSingleInstruction(createInst));
int rpb = (int) oparams.getRowsInBlock();
int cpb = (int) oparams.getColsInBlock();
Instruction createvarInst = VariableCPInstruction.prepareCreateVariableInstruction(
tempVarName,
tempFileName,
true, node.getDataType(),
OutputInfo.outputInfoToString(out.getOutInfo()),
new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), rpb, cpb, oparams.getNnz()),
oparams.getUpdateType()
);
createvarInst.setLocation(node);
out.addPreInstruction(createvarInst);
String constVarName = oparams.getLabel();
String constFileName = tempFileName + constVarName;
oparams.setFile_name(getFilePath() + constFileName);
/*
* Since this is a node that denotes a transient read/write, we need to make sure
* that the data computed for a given variable in a given iteration is passed on
* to the next iteration. This is done by generating miscellaneous instructions
* that gets executed at the end of the program block.
*
* The state of the symbol table must change
*
* FROM:
* tVarA -> temp21tVarA (old copy of temp21)
* tVarAtemp -> temp21 (new copy that should override the old copy)
*
* TO:
* tVarA -> temp21tVarA
*/
// rename the temp variable to constant variable (e.g., cpvar tVarAtemp tVarA)
/*Instruction currInstr = VariableCPInstruction.prepareCopyInstruction(tempVarName, constVarName);
if(DMLScript.ENABLE_DEBUG_MODE) {
currInstr.setLineNum(node._beginLine);
}
out.addLastInstruction(currInstr);
Instruction tempInstr = VariableCPInstruction.prepareRemoveInstruction(tempVarName);
if(DMLScript.ENABLE_DEBUG_MODE) {
tempInstr.setLineNum(node._beginLine);
}
out.addLastInstruction(tempInstr);*/
// Generate a single mvvar instruction (e.g., mvvar tempA A)
// instead of two instructions "cpvar tempA A" and "rmvar tempA"
Instruction currInstr = VariableCPInstruction.prepareMoveInstruction(tempVarName, constVarName);
currInstr.setLocation(node);
out.addLastInstruction(currInstr);
// finally, add the temporary filename and variable name to the list of outputs
out.setFileName(tempFileName);
out.setVarName(tempVarName);
}
}
// rootNode is not a transient write. It is a persistent write.
else {
if(et == ExecType.MR) { //MR PERSISTENT WRITE
// create a variable to hold the result produced by this "rootNode"
oparams.setLabel("pVar" + var_index.getNextID() );
//String createInst = prepareVariableInstruction("createvar", node);
//out.addPreInstruction(CPInstructionParser.parseSingleInstruction(createInst));
int rpb = (int) oparams.getRowsInBlock();
int cpb = (int) oparams.getColsInBlock();
Lop fnameLop = ((Data)node).getNamedInputLop(DataExpression.IO_FILENAME);
String fnameStr = (fnameLop instanceof Data && ((Data)fnameLop).isLiteral()) ?
fnameLop.getOutputParameters().getLabel()
: Lop.VARIABLE_NAME_PLACEHOLDER + fnameLop.getOutputParameters().getLabel() + Lop.VARIABLE_NAME_PLACEHOLDER;
Instruction createvarInst;
// for MatrixMarket format, the creatvar will output the result to a temporary file in textcell format
// the CP write instruction (post instruction) after the MR instruction will merge the result into a single
// part MM format file on hdfs.
if (oparams.getFormat() == Format.CSV) {
String tempFileName = getNextUniqueFilename();
String createInst = node.getInstructions(tempFileName);
createvarInst= CPInstructionParser.parseSingleInstruction(createInst);
//NOTE: no instruction patching because final write from cp instruction
String writeInst = node.getInstructions(oparams.getLabel(), fnameLop.getOutputParameters().getLabel() );
CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(writeInst);
currInstr.setLocation(node);
out.addPostInstruction(currInstr);
// remove the variable
CPInstruction tempInstr = CPInstructionParser.parseSingleInstruction(
"CP" + Lop.OPERAND_DELIMITOR + "rmfilevar" + Lop.OPERAND_DELIMITOR
+ oparams.getLabel() + Lop.VALUETYPE_PREFIX + Expression.ValueType.UNKNOWN + Lop.OPERAND_DELIMITOR
+ "true" + Lop.VALUETYPE_PREFIX + "BOOLEAN");
tempInstr.setLocation(node);
out.addLastInstruction(tempInstr);
}
else if (oparams.getFormat() == Format.MM ) {
createvarInst= VariableCPInstruction.prepareCreateVariableInstruction(
oparams.getLabel(),
getNextUniqueFilename(),
false, node.getDataType(),
OutputInfo.outputInfoToString(getOutputInfo(node, false)),
new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), rpb, cpb, oparams.getNnz()),
oparams.getUpdateType()
);
//NOTE: no instruction patching because final write from cp instruction
String writeInst = node.getInstructions(oparams.getLabel(), fnameLop.getOutputParameters().getLabel());
CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(writeInst);
currInstr.setLocation(node);
out.addPostInstruction(currInstr);
// remove the variable
CPInstruction tempInstr = CPInstructionParser.parseSingleInstruction(
"CP" + Lop.OPERAND_DELIMITOR + "rmfilevar" + Lop.OPERAND_DELIMITOR
+ oparams.getLabel() + Lop.VALUETYPE_PREFIX + Expression.ValueType.UNKNOWN + Lop.OPERAND_DELIMITOR
+ "true" + Lop.VALUETYPE_PREFIX + "BOOLEAN");
tempInstr.setLocation(node);
out.addLastInstruction(tempInstr);
}
else {
createvarInst= VariableCPInstruction.prepareCreateVariableInstruction(
oparams.getLabel(),
fnameStr,
false, node.getDataType(),
OutputInfo.outputInfoToString(getOutputInfo(node, false)),
new MatrixCharacteristics(oparams.getNumRows(), oparams.getNumCols(), rpb, cpb, oparams.getNnz()),
oparams.getUpdateType()
);
// remove the variable
CPInstruction currInstr = CPInstructionParser.parseSingleInstruction(
"CP" + Lop.OPERAND_DELIMITOR + "rmfilevar" + Lop.OPERAND_DELIMITOR
+ oparams.getLabel() + Lop.VALUETYPE_PREFIX + Expression.ValueType.UNKNOWN + Lop.OPERAND_DELIMITOR
+ "false" + Lop.VALUETYPE_PREFIX + "BOOLEAN");
currInstr.setLocation(node);
out.addLastInstruction(currInstr);
}
createvarInst.setLocation(node);
out.addPreInstruction(createvarInst);
// finally, add the filename and variable name to the list of outputs
out.setFileName(oparams.getFile_name());
out.setVarName(oparams.getLabel());
}
else { //CP PERSISTENT WRITE
// generate a write instruction that writes matrix to HDFS
Lop fname = ((Data)node).getNamedInputLop(DataExpression.IO_FILENAME);
Instruction currInstr = null;
Lop inputLop = node.getInputs().get(0);
// Case of a transient read feeding into only one output persistent binaryblock write
// Move the temporary file on HDFS to required persistent location, insteadof copying.
if (inputLop.getExecLocation() == ExecLocation.Data
&& inputLop.getOutputs().size() == 1
&& ((Data)inputLop).isTransient()
&& ((Data)inputLop).getOutputParameters().isBlocked()
&& node.getOutputParameters().isBlocked() ) {
// transient read feeding into persistent write in blocked representation
// simply, move the file
//prepare filename (literal or variable in order to support dynamic write)
String fnameStr = (fname instanceof Data && ((Data)fname).isLiteral()) ?
fname.getOutputParameters().getLabel()
: Lop.VARIABLE_NAME_PLACEHOLDER + fname.getOutputParameters().getLabel() + Lop.VARIABLE_NAME_PLACEHOLDER;
currInstr = (CPInstruction) VariableCPInstruction.prepareMoveInstruction(
inputLop.getOutputParameters().getLabel(),
fnameStr, "binaryblock" );
}
else {
String io_inst = node.getInstructions(
node.getInputs().get(0).getOutputParameters().getLabel(),
fname.getOutputParameters().getLabel());
if(node.getExecType() == ExecType.SPARK)
// This will throw an exception if the exectype of hop is set incorrectly
// Note: the exec type and exec location of lops needs to be set to SPARK and ControlProgram respectively
currInstr = SPInstructionParser.parseSingleInstruction(io_inst);
else
currInstr = CPInstructionParser.parseSingleInstruction(io_inst);
}
if ( !node.getInputs().isEmpty() && node.getInputs().get(0)._beginLine != 0)
currInstr.setLocation(node.getInputs().get(0));
else
currInstr.setLocation(node);
out.addLastInstruction(currInstr);
}
}
}
}
return out;
}
/**
* Method to generate MapReduce job instructions from a given set of nodes.
*
* @param execNodes list of exec nodes
* @param inst list of instructions
* @param writeinst list of write instructions
* @param deleteinst list of delete instructions
* @param rmvarinst list of rmvar instructions
* @param jt job type
* @throws LopsException if LopsException occurs
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private void generateMapReduceInstructions(ArrayList<Lop> execNodes,
ArrayList<Instruction> inst, ArrayList<Instruction> writeinst, ArrayList<Instruction> deleteinst, ArrayList<Instruction> rmvarinst,
JobType jt) throws LopsException, DMLRuntimeException
{
ArrayList<Byte> resultIndices = new ArrayList<Byte>();
ArrayList<String> inputs = new ArrayList<String>();
ArrayList<String> outputs = new ArrayList<String>();
ArrayList<InputInfo> inputInfos = new ArrayList<InputInfo>();
ArrayList<OutputInfo> outputInfos = new ArrayList<OutputInfo>();
ArrayList<Long> numRows = new ArrayList<Long>();
ArrayList<Long> numCols = new ArrayList<Long>();
ArrayList<Long> numRowsPerBlock = new ArrayList<Long>();
ArrayList<Long> numColsPerBlock = new ArrayList<Long>();
ArrayList<String> mapperInstructions = new ArrayList<String>();
ArrayList<String> randInstructions = new ArrayList<String>();
ArrayList<String> recordReaderInstructions = new ArrayList<String>();
int numReducers = 0;
int replication = 1;
ArrayList<String> inputLabels = new ArrayList<String>();
ArrayList<String> outputLabels = new ArrayList<String>();
ArrayList<Instruction> renameInstructions = new ArrayList<Instruction>();
ArrayList<Instruction> variableInstructions = new ArrayList<Instruction>();
ArrayList<Instruction> postInstructions = new ArrayList<Instruction>();
ArrayList<Integer> MRJobLineNumbers = null;
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers = new ArrayList<Integer>();
}
ArrayList<Lop> inputLops = new ArrayList<Lop>();
boolean cellModeOverride = false;
/* Find the nodes that produce an output */
ArrayList<Lop> rootNodes = new ArrayList<Lop>();
getOutputNodes(execNodes, rootNodes, jt);
if( LOG.isTraceEnabled() )
LOG.trace("# of root nodes = " + rootNodes.size());
/* Remove transient writes that are simple copy of transient reads */
if (jt == JobType.GMR || jt == JobType.GMRCELL) {
ArrayList<Lop> markedNodes = new ArrayList<Lop>();
// only keep data nodes that are results of some computation.
for ( Lop rnode : rootNodes ) {
if (rnode.getExecLocation() == ExecLocation.Data
&& ((Data) rnode).isTransient()
&& ((Data) rnode).getOperationType() == OperationTypes.WRITE
&& ((Data) rnode).getDataType() == DataType.MATRIX) {
// no computation, just a copy
if (rnode.getInputs().get(0).getExecLocation() == ExecLocation.Data
&& ((Data) rnode.getInputs().get(0)).isTransient()
&& rnode.getOutputParameters().getLabel().equals(
rnode.getInputs().get(0).getOutputParameters().getLabel()))
{
markedNodes.add(rnode);
}
}
}
// delete marked nodes
rootNodes.removeAll(markedNodes);
markedNodes.clear();
if ( rootNodes.isEmpty() )
return;
}
// structure that maps node to their indices that will be used in the instructions
HashMap<Lop, Integer> nodeIndexMapping = new HashMap<Lop, Integer>();
/* Determine all input data files */
for ( Lop rnode : rootNodes ) {
getInputPathsAndParameters(rnode, execNodes, inputs, inputInfos, numRows, numCols,
numRowsPerBlock, numColsPerBlock, nodeIndexMapping, inputLabels, inputLops, MRJobLineNumbers);
}
// In case of RAND job, instructions are defined in the input file
if (jt == JobType.DATAGEN)
randInstructions = inputs;
int[] start_index = new int[1];
start_index[0] = inputs.size();
/* Get RecordReader Instructions */
// currently, recordreader instructions are allowed only in GMR jobs
if (jt == JobType.GMR || jt == JobType.GMRCELL) {
for ( Lop rnode : rootNodes ) {
getRecordReaderInstructions(rnode, execNodes, inputs, recordReaderInstructions,
nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers);
if ( recordReaderInstructions.size() > 1 )
throw new LopsException("MapReduce job can only have a single recordreader instruction: " + recordReaderInstructions.toString());
}
}
/*
* Handle cases when job's output is FORCED to be cell format.
* - If there exist a cell input, then output can not be blocked.
* Only exception is when jobType = REBLOCK/CSVREBLOCK (for obvisous reason)
* or when jobType = RAND since RandJob takes a special input file,
* whose format should not be used to dictate the output format.
* - If there exists a recordReader instruction
* - If jobtype = GroupedAgg. This job can only run in cell mode.
*/
//
if ( jt != JobType.REBLOCK && jt != JobType.CSV_REBLOCK && jt != JobType.DATAGEN && jt != JobType.TRANSFORM) {
for (int i=0; i < inputInfos.size(); i++)
if ( inputInfos.get(i) == InputInfo.BinaryCellInputInfo || inputInfos.get(i) == InputInfo.TextCellInputInfo )
cellModeOverride = true;
}
if ( !recordReaderInstructions.isEmpty() || jt == JobType.GROUPED_AGG )
cellModeOverride = true;
/* Get Mapper Instructions */
for (int i = 0; i < rootNodes.size(); i++) {
getMapperInstructions(rootNodes.get(i), execNodes, inputs,
mapperInstructions, nodeIndexMapping, start_index,
inputLabels, inputLops, MRJobLineNumbers);
}
if (LOG.isTraceEnabled()) {
LOG.trace(" Input strings: " + inputs.toString());
if (jt == JobType.DATAGEN)
LOG.trace(" Rand instructions: " + getCSVString(randInstructions));
if (jt == JobType.GMR)
LOG.trace(" RecordReader instructions: " + getCSVString(recordReaderInstructions));
LOG.trace(" Mapper instructions: " + getCSVString(mapperInstructions));
}
/* Get Shuffle and Reducer Instructions */
ArrayList<String> shuffleInstructions = new ArrayList<String>();
ArrayList<String> aggInstructionsReducer = new ArrayList<String>();
ArrayList<String> otherInstructionsReducer = new ArrayList<String>();
for( Lop rn : rootNodes ) {
int resultIndex = getAggAndOtherInstructions(
rn, execNodes, shuffleInstructions, aggInstructionsReducer,
otherInstructionsReducer, nodeIndexMapping, start_index,
inputLabels, inputLops, MRJobLineNumbers);
if ( resultIndex == -1)
throw new LopsException("Unexpected error in piggybacking!");
if ( rn.getExecLocation() == ExecLocation.Data
&& ((Data)rn).getOperationType() == Data.OperationTypes.WRITE && ((Data)rn).isTransient()
&& rootNodes.contains(rn.getInputs().get(0))
) {
// Both rn (a transient write) and its input are root nodes.
// Instead of creating two copies of the data, simply generate a cpvar instruction
NodeOutput out = setupNodeOutputs(rn, ExecType.MR, cellModeOverride, true);
writeinst.addAll(out.getLastInstructions());
}
else {
resultIndices.add(Byte.valueOf((byte)resultIndex));
// setup output filenames and outputInfos and generate related instructions
NodeOutput out = setupNodeOutputs(rn, ExecType.MR, cellModeOverride, false);
outputLabels.add(out.getVarName());
outputs.add(out.getFileName());
outputInfos.add(out.getOutInfo());
if (LOG.isTraceEnabled()) {
LOG.trace(" Output Info: " + out.getFileName() + ";" + OutputInfo.outputInfoToString(out.getOutInfo()) + ";" + out.getVarName());
}
renameInstructions.addAll(out.getLastInstructions());
variableInstructions.addAll(out.getPreInstructions());
postInstructions.addAll(out.getPostInstructions());
}
}
/* Determine if the output dimensions are known */
byte[] resultIndicesByte = new byte[resultIndices.size()];
for (int i = 0; i < resultIndicesByte.length; i++) {
resultIndicesByte[i] = resultIndices.get(i).byteValue();
}
if (LOG.isTraceEnabled()) {
LOG.trace(" Shuffle Instructions: " + getCSVString(shuffleInstructions));
LOG.trace(" Aggregate Instructions: " + getCSVString(aggInstructionsReducer));
LOG.trace(" Other instructions =" + getCSVString(otherInstructionsReducer));
LOG.trace(" Output strings: " + outputs.toString());
LOG.trace(" ResultIndices = " + resultIndices.toString());
}
/* Prepare the MapReduce job instruction */
MRJobInstruction mr = new MRJobInstruction(jt);
// check if this is a map-only job. If not, set the number of reducers
if ( !shuffleInstructions.isEmpty() || !aggInstructionsReducer.isEmpty() || !otherInstructionsReducer.isEmpty() )
numReducers = total_reducers;
// set inputs, outputs, and other other properties for the job
mr.setInputOutputLabels(inputLabels.toArray(new String[0]), outputLabels.toArray(new String[0]));
mr.setOutputs(resultIndicesByte);
mr.setDimsUnknownFilePrefix(getFilePath());
mr.setNumberOfReducers(numReducers);
mr.setReplication(replication);
// set instructions for recordReader and mapper
mr.setRecordReaderInstructions(getCSVString(recordReaderInstructions));
mr.setMapperInstructions(getCSVString(mapperInstructions));
//compute and set mapper memory requirements (for consistency of runtime piggybacking)
if( jt == JobType.GMR ) {
double mem = 0;
for( Lop n : execNodes )
mem += computeFootprintInMapper(n);
mr.setMemoryRequirements(mem);
}
if ( jt == JobType.DATAGEN )
mr.setRandInstructions(getCSVString(randInstructions));
// set shuffle instructions
mr.setShuffleInstructions(getCSVString(shuffleInstructions));
// set reducer instruction
mr.setAggregateInstructionsInReducer(getCSVString(aggInstructionsReducer));
mr.setOtherInstructionsInReducer(getCSVString(otherInstructionsReducer));
if(DMLScript.ENABLE_DEBUG_MODE) {
// set line number information for each MR instruction
mr.setMRJobInstructionsLineNumbers(MRJobLineNumbers);
}
/* Add the prepared instructions to output set */
inst.addAll(variableInstructions);
inst.add(mr);
inst.addAll(postInstructions);
deleteinst.addAll(renameInstructions);
for (Lop l : inputLops) {
if(DMLScript.ENABLE_DEBUG_MODE) {
processConsumers(l, rmvarinst, deleteinst, l);
}
else {
processConsumers(l, rmvarinst, deleteinst, null);
}
}
}
/**
* converts an array list into a Lop.INSTRUCTION_DELIMITOR separated string
*
* @param inputStrings list of input strings
* @return Lop.INSTRUCTION_DELIMITOR separated string
*/
private static String getCSVString(ArrayList<String> inputStrings) {
StringBuilder sb = new StringBuilder();
for ( String str : inputStrings ) {
if( str != null ) {
if( sb.length()>0 )
sb.append(Lop.INSTRUCTION_DELIMITOR);
sb.append( str );
}
}
return sb.toString();
}
/**
* Method to populate aggregate and other instructions in reducer.
*
* @param node low-level operator
* @param execNodes list of exec nodes
* @param shuffleInstructions list of shuffle instructions
* @param aggInstructionsReducer ?
* @param otherInstructionsReducer ?
* @param nodeIndexMapping node index mapping
* @param start_index start index
* @param inputLabels list of input labels
* @param inputLops list of input lops
* @param MRJobLineNumbers MR job line numbers
* @return -1 if problem
* @throws LopsException if LopsException occurs
*/
private int getAggAndOtherInstructions(Lop node, ArrayList<Lop> execNodes,
ArrayList<String> shuffleInstructions,
ArrayList<String> aggInstructionsReducer,
ArrayList<String> otherInstructionsReducer,
HashMap<Lop, Integer> nodeIndexMapping, int[] start_index,
ArrayList<String> inputLabels, ArrayList<Lop> inputLops,
ArrayList<Integer> MRJobLineNumbers) throws LopsException
{
int ret_val = -1;
if (nodeIndexMapping.containsKey(node))
return nodeIndexMapping.get(node);
// if not an input source and not in exec nodes, return.
if (!execNodes.contains(node))
return ret_val;
ArrayList<Integer> inputIndices = new ArrayList<Integer>();
// recurse
// For WRITE, since the first element from input is the real input (the other elements
// are parameters for the WRITE operation), so we only need to take care of the
// first element.
if (node.getType() == Lop.Type.Data && ((Data)node).getOperationType() == Data.OperationTypes.WRITE) {
ret_val = getAggAndOtherInstructions(node.getInputs().get(0),
execNodes, shuffleInstructions, aggInstructionsReducer,
otherInstructionsReducer, nodeIndexMapping, start_index,
inputLabels, inputLops, MRJobLineNumbers);
inputIndices.add(ret_val);
}
else {
for ( Lop cnode : node.getInputs() ) {
ret_val = getAggAndOtherInstructions(cnode,
execNodes, shuffleInstructions, aggInstructionsReducer,
otherInstructionsReducer, nodeIndexMapping, start_index,
inputLabels, inputLops, MRJobLineNumbers);
inputIndices.add(ret_val);
}
}
if (node.getExecLocation() == ExecLocation.Data ) {
if ( ((Data)node).getFileFormatType() == FileFormatTypes.CSV
&& !(node.getInputs().get(0) instanceof ParameterizedBuiltin
&& ((ParameterizedBuiltin)node.getInputs().get(0)).getOp() == org.apache.sysml.lops.ParameterizedBuiltin.OperationTypes.TRANSFORM)) {
// Generate write instruction, which goes into CSV_WRITE Job
int output_index = start_index[0];
shuffleInstructions.add(node.getInstructions(inputIndices.get(0), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
nodeIndexMapping.put(node, output_index);
start_index[0]++;
return output_index;
}
else {
return ret_val;
}
}
if (node.getExecLocation() == ExecLocation.MapAndReduce) {
/* Generate Shuffle Instruction for "node", and return the index associated with produced output */
boolean instGenerated = true;
int output_index = start_index[0];
switch(node.getType()) {
/* Lop types that take a single input */
case ReBlock:
case CSVReBlock:
case SortKeys:
case CentralMoment:
case CoVariance:
case GroupedAgg:
case DataPartition:
shuffleInstructions.add(node.getInstructions(inputIndices.get(0), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
break;
case ParameterizedBuiltin:
if( ((ParameterizedBuiltin)node).getOp() == org.apache.sysml.lops.ParameterizedBuiltin.OperationTypes.TRANSFORM ) {
shuffleInstructions.add(node.getInstructions(output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
}
break;
/* Lop types that take two inputs */
case MMCJ:
case MMRJ:
case CombineBinary:
shuffleInstructions.add(node.getInstructions(inputIndices.get(0), inputIndices.get(1), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
break;
/* Lop types that take three inputs */
case CombineTernary:
shuffleInstructions.add(node.getInstructions(inputIndices
.get(0), inputIndices.get(1), inputIndices.get(2), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
break;
default:
instGenerated = false;
break;
}
if ( instGenerated ) {
nodeIndexMapping.put(node, output_index);
start_index[0]++;
return output_index;
}
else {
return inputIndices.get(0);
}
}
/* Get instructions for aligned reduce and other lops below the reduce. */
if (node.getExecLocation() == ExecLocation.Reduce
|| node.getExecLocation() == ExecLocation.MapOrReduce
|| hasChildNode(node, execNodes, ExecLocation.MapAndReduce)) {
if (inputIndices.size() == 1) {
int output_index = start_index[0];
start_index[0]++;
if (node.getType() == Type.Aggregate) {
aggInstructionsReducer.add(node.getInstructions(
inputIndices.get(0), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
}
else {
otherInstructionsReducer.add(node.getInstructions(
inputIndices.get(0), output_index));
}
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
nodeIndexMapping.put(node, output_index);
return output_index;
} else if (inputIndices.size() == 2) {
int output_index = start_index[0];
start_index[0]++;
otherInstructionsReducer.add(node.getInstructions(inputIndices
.get(0), inputIndices.get(1), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
nodeIndexMapping.put(node, output_index);
// populate list of input labels.
// only Unary lops can contribute to labels
if (node instanceof Unary && node.getInputs().size() > 1) {
int index = 0;
for (int i = 0; i < node.getInputs().size(); i++) {
if (node.getInputs().get(i).getDataType() == DataType.SCALAR) {
index = i;
break;
}
}
if (node.getInputs().get(index).getExecLocation() == ExecLocation.Data
&& !((Data) (node.getInputs().get(index))).isLiteral()) {
inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel());
inputLops.add(node.getInputs().get(index));
}
if (node.getInputs().get(index).getExecLocation() != ExecLocation.Data) {
inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel());
inputLops.add(node.getInputs().get(index));
}
}
return output_index;
} else if (inputIndices.size() == 3 || node.getType() == Type.Ternary) {
int output_index = start_index[0];
start_index[0]++;
if (node.getType() == Type.Ternary ) {
// in case of CTABLE_TRANSFORM_SCALAR_WEIGHT: inputIndices.get(2) would be -1
otherInstructionsReducer.add(node.getInstructions(
inputIndices.get(0), inputIndices.get(1),
inputIndices.get(2), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
nodeIndexMapping.put(node, output_index);
}
else if( node.getType() == Type.ParameterizedBuiltin ){
otherInstructionsReducer.add(node.getInstructions(
inputIndices.get(0), inputIndices.get(1),
inputIndices.get(2), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
nodeIndexMapping.put(node, output_index);
}
else
{
otherInstructionsReducer.add(node.getInstructions(
inputIndices.get(0), inputIndices.get(1),
inputIndices.get(2), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
nodeIndexMapping.put(node, output_index);
return output_index;
}
return output_index;
}
else if (inputIndices.size() == 4) {
int output_index = start_index[0];
start_index[0]++;
otherInstructionsReducer.add(node.getInstructions(
inputIndices.get(0), inputIndices.get(1),
inputIndices.get(2), inputIndices.get(3), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
nodeIndexMapping.put(node, output_index);
return output_index;
}
else
throw new LopsException("Invalid number of inputs to a lop: "
+ inputIndices.size());
}
return -1;
}
/**
* Method to get record reader instructions for a MR job.
*
* @param node low-level operator
* @param execNodes list of exec nodes
* @param inputStrings list of input strings
* @param recordReaderInstructions list of record reader instructions
* @param nodeIndexMapping node index mapping
* @param start_index start index
* @param inputLabels list of input labels
* @param inputLops list of input lops
* @param MRJobLineNumbers MR job line numbers
* @return -1 if problem
* @throws LopsException if LopsException occurs
*/
private static int getRecordReaderInstructions(Lop node, ArrayList<Lop> execNodes,
ArrayList<String> inputStrings,
ArrayList<String> recordReaderInstructions,
HashMap<Lop, Integer> nodeIndexMapping, int[] start_index,
ArrayList<String> inputLabels, ArrayList<Lop> inputLops,
ArrayList<Integer> MRJobLineNumbers) throws LopsException
{
// if input source, return index
if (nodeIndexMapping.containsKey(node))
return nodeIndexMapping.get(node);
// not input source and not in exec nodes, then return.
if (!execNodes.contains(node))
return -1;
ArrayList<Integer> inputIndices = new ArrayList<Integer>();
int max_input_index = -1;
//N child_for_max_input_index = null;
// get mapper instructions
for (int i = 0; i < node.getInputs().size(); i++) {
// recurse
Lop childNode = node.getInputs().get(i);
int ret_val = getRecordReaderInstructions(childNode, execNodes,
inputStrings, recordReaderInstructions, nodeIndexMapping,
start_index, inputLabels, inputLops, MRJobLineNumbers);
inputIndices.add(ret_val);
if (ret_val > max_input_index) {
max_input_index = ret_val;
//child_for_max_input_index = childNode;
}
}
// only lops with execLocation as RecordReader can contribute
// instructions
if ((node.getExecLocation() == ExecLocation.RecordReader)) {
int output_index = max_input_index;
// cannot reuse index if this is true
// need to add better indexing schemes
output_index = start_index[0];
start_index[0]++;
nodeIndexMapping.put(node, output_index);
// populate list of input labels.
// only Ranagepick lop can contribute to labels
if (node.getType() == Type.PickValues) {
PickByCount pbc = (PickByCount) node;
if (pbc.getOperationType() == PickByCount.OperationTypes.RANGEPICK) {
int scalarIndex = 1; // always the second input is a scalar
// if data lop not a literal -- add label
if (node.getInputs().get(scalarIndex).getExecLocation() == ExecLocation.Data
&& !((Data) (node.getInputs().get(scalarIndex))).isLiteral()) {
inputLabels.add(node.getInputs().get(scalarIndex).getOutputParameters().getLabel());
inputLops.add(node.getInputs().get(scalarIndex));
}
// if not data lop, then this is an intermediate variable.
if (node.getInputs().get(scalarIndex).getExecLocation() != ExecLocation.Data) {
inputLabels.add(node.getInputs().get(scalarIndex).getOutputParameters().getLabel());
inputLops.add(node.getInputs().get(scalarIndex));
}
}
}
// get recordreader instruction.
if (node.getInputs().size() == 2) {
recordReaderInstructions.add(node.getInstructions(inputIndices
.get(0), inputIndices.get(1), output_index));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
}
else
throw new LopsException(
"Unexpected number of inputs while generating a RecordReader Instruction");
return output_index;
}
return -1;
}
/**
* Method to get mapper instructions for a MR job.
*
* @param node low-level operator
* @param execNodes list of exec nodes
* @param inputStrings list of input strings
* @param instructionsInMapper list of instructions in mapper
* @param nodeIndexMapping ?
* @param start_index starting index
* @param inputLabels input labels
* @param MRJoblineNumbers MR job line numbers
* @return -1 if problem
* @throws LopsException if LopsException occurs
*/
private int getMapperInstructions(Lop node, ArrayList<Lop> execNodes,
ArrayList<String> inputStrings,
ArrayList<String> instructionsInMapper,
HashMap<Lop, Integer> nodeIndexMapping, int[] start_index,
ArrayList<String> inputLabels, ArrayList<Lop> inputLops,
ArrayList<Integer> MRJobLineNumbers) throws LopsException
{
// if input source, return index
if (nodeIndexMapping.containsKey(node))
return nodeIndexMapping.get(node);
// not input source and not in exec nodes, then return.
if (!execNodes.contains(node))
return -1;
ArrayList<Integer> inputIndices = new ArrayList<Integer>();
int max_input_index = -1;
// get mapper instructions
for( Lop childNode : node.getInputs()) {
int ret_val = getMapperInstructions(childNode, execNodes,
inputStrings, instructionsInMapper, nodeIndexMapping,
start_index, inputLabels, inputLops, MRJobLineNumbers);
inputIndices.add(ret_val);
if (ret_val > max_input_index) {
max_input_index = ret_val;
}
}
// only map and map-or-reduce without a reduce child node can contribute
// to mapper instructions.
if ((node.getExecLocation() == ExecLocation.Map || node
.getExecLocation() == ExecLocation.MapOrReduce)
&& !hasChildNode(node, execNodes, ExecLocation.MapAndReduce)
&& !hasChildNode(node, execNodes, ExecLocation.Reduce)
) {
int output_index = max_input_index;
// cannot reuse index if this is true
// need to add better indexing schemes
// if (child_for_max_input_index.getOutputs().size() > 1) {
output_index = start_index[0];
start_index[0]++;
// }
nodeIndexMapping.put(node, output_index);
// populate list of input labels.
// only Unary lops can contribute to labels
if (node instanceof Unary && node.getInputs().size() > 1) {
// Following code must be executed only for those Unary
// operators that have more than one input
// It should not be executed for "true" unary operators like
// cos(A).
int index = 0;
for (int i1 = 0; i1 < node.getInputs().size(); i1++) {
if (node.getInputs().get(i1).getDataType() == DataType.SCALAR) {
index = i1;
break;
}
}
// if data lop not a literal -- add label
if (node.getInputs().get(index).getExecLocation() == ExecLocation.Data
&& !((Data) (node.getInputs().get(index))).isLiteral()) {
inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel());
inputLops.add(node.getInputs().get(index));
}
// if not data lop, then this is an intermediate variable.
if (node.getInputs().get(index).getExecLocation() != ExecLocation.Data) {
inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel());
inputLops.add(node.getInputs().get(index));
}
}
// get mapper instruction.
if (node.getInputs().size() == 1)
instructionsInMapper.add(node.getInstructions(inputIndices
.get(0), output_index));
else if (node.getInputs().size() == 2) {
instructionsInMapper.add(node.getInstructions(inputIndices
.get(0), inputIndices.get(1), output_index));
}
else if (node.getInputs().size() == 3)
instructionsInMapper.add(node.getInstructions(inputIndices.get(0),
inputIndices.get(1),
inputIndices.get(2),
output_index));
else if ( node.getInputs().size() == 4) {
// Example: Reshape
instructionsInMapper.add(node.getInstructions(
inputIndices.get(0),
inputIndices.get(1),
inputIndices.get(2),
inputIndices.get(3),
output_index ));
}
else if ( node.getInputs().size() == 5) {
// Example: RangeBasedReIndex A[row_l:row_u, col_l:col_u]
instructionsInMapper.add(node.getInstructions(
inputIndices.get(0),
inputIndices.get(1),
inputIndices.get(2),
inputIndices.get(3),
inputIndices.get(4),
output_index ));
}
else if ( node.getInputs().size() == 7 ) {
// Example: RangeBasedReIndex A[row_l:row_u, col_l:col_u] = B
instructionsInMapper.add(node.getInstructions(
inputIndices.get(0),
inputIndices.get(1),
inputIndices.get(2),
inputIndices.get(3),
inputIndices.get(4),
inputIndices.get(5),
inputIndices.get(6),
output_index ));
}
else
throw new LopsException("Node with " + node.getInputs().size() + " inputs is not supported in dag.java.");
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
return output_index;
}
return -1;
}
// Method to populate inputs and also populates node index mapping.
private static void getInputPathsAndParameters(Lop node, ArrayList<Lop> execNodes,
ArrayList<String> inputStrings, ArrayList<InputInfo> inputInfos,
ArrayList<Long> numRows, ArrayList<Long> numCols,
ArrayList<Long> numRowsPerBlock, ArrayList<Long> numColsPerBlock,
HashMap<Lop, Integer> nodeIndexMapping, ArrayList<String> inputLabels,
ArrayList<Lop> inputLops, ArrayList<Integer> MRJobLineNumbers)
throws LopsException {
// treat rand as an input.
if (node.getType() == Type.DataGen && execNodes.contains(node)
&& !nodeIndexMapping.containsKey(node)) {
numRows.add(node.getOutputParameters().getNumRows());
numCols.add(node.getOutputParameters().getNumCols());
numRowsPerBlock.add(node.getOutputParameters().getRowsInBlock());
numColsPerBlock.add(node.getOutputParameters().getColsInBlock());
inputStrings.add(node.getInstructions(inputStrings.size(), inputStrings.size()));
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
inputInfos.add(InputInfo.TextCellInputInfo);
nodeIndexMapping.put(node, inputStrings.size() - 1);
return;
}
// get input file names
if (!execNodes.contains(node)
&& !nodeIndexMapping.containsKey(node)
&& !(node.getExecLocation() == ExecLocation.Data)
&& (!(node.getExecLocation() == ExecLocation.ControlProgram && node
.getDataType() == DataType.SCALAR))
|| (!execNodes.contains(node)
&& node.getExecLocation() == ExecLocation.Data
&& ((Data) node).getOperationType() == Data.OperationTypes.READ
&& ((Data) node).getDataType() != DataType.SCALAR && !nodeIndexMapping
.containsKey(node))) {
if (node.getOutputParameters().getFile_name() != null) {
inputStrings.add(node.getOutputParameters().getFile_name());
} else {
// use label name
inputStrings.add(Lop.VARIABLE_NAME_PLACEHOLDER + node.getOutputParameters().getLabel()
+ Lop.VARIABLE_NAME_PLACEHOLDER);
}
inputLabels.add(node.getOutputParameters().getLabel());
inputLops.add(node);
numRows.add(node.getOutputParameters().getNumRows());
numCols.add(node.getOutputParameters().getNumCols());
numRowsPerBlock.add(node.getOutputParameters().getRowsInBlock());
numColsPerBlock.add(node.getOutputParameters().getColsInBlock());
InputInfo nodeInputInfo = null;
// Check if file format type is binary or text and update infos
if (node.getOutputParameters().isBlocked()) {
if (node.getOutputParameters().getFormat() == Format.BINARY)
nodeInputInfo = InputInfo.BinaryBlockInputInfo;
else
throw new LopsException("Invalid format (" + node.getOutputParameters().getFormat() + ") encountered for a node/lop (ID=" + node.getID() + ") with blocked output.");
}
else {
if (node.getOutputParameters().getFormat() == Format.TEXT)
nodeInputInfo = InputInfo.TextCellInputInfo;
else
nodeInputInfo = InputInfo.BinaryCellInputInfo;
}
/*
* Hardcode output Key and Value Classes for SortKeys
*/
// TODO: statiko -- remove this hardcoding -- i.e., lops must encode
// the information on key/value classes
if (node.getType() == Type.SortKeys) {
// SortKeys is the input to some other lop (say, L)
// InputInfo of L is the ouputInfo of SortKeys, which is
// (compactformat, doubleWriteable, IntWritable)
nodeInputInfo = new InputInfo(PickFromCompactInputFormat.class,
DoubleWritable.class, IntWritable.class);
} else if (node.getType() == Type.CombineBinary) {
// CombineBinary is the input to some other lop (say, L)
// InputInfo of L is the ouputInfo of CombineBinary
// And, the outputInfo of CombineBinary depends on the operation!
CombineBinary combine = (CombineBinary) node;
if ( combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreSort ) {
nodeInputInfo = new InputInfo(SequenceFileInputFormat.class,
DoubleWritable.class, IntWritable.class);
}
else if ( combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreCentralMoment
|| combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreCovUnweighted
|| combine.getOperation() == org.apache.sysml.lops.CombineBinary.OperationTypes.PreGroupedAggUnweighted ) {
nodeInputInfo = InputInfo.WeightedPairInputInfo;
}
} else if ( node.getType() == Type.CombineTernary ) {
nodeInputInfo = InputInfo.WeightedPairInputInfo;
}
inputInfos.add(nodeInputInfo);
nodeIndexMapping.put(node, inputStrings.size() - 1);
return;
}
// if exec nodes does not contain node at this point, return.
if (!execNodes.contains(node))
return;
// process children recursively
for ( Lop lop : node.getInputs() ) {
getInputPathsAndParameters(lop, execNodes, inputStrings,
inputInfos, numRows, numCols, numRowsPerBlock,
numColsPerBlock, nodeIndexMapping, inputLabels, inputLops, MRJobLineNumbers);
}
}
/**
* Method to find all terminal nodes.
*
* @param execNodes list of exec nodes
* @param rootNodes list of root nodes
* @param jt job type
*/
private static void getOutputNodes(ArrayList<Lop> execNodes, ArrayList<Lop> rootNodes, JobType jt) {
for ( Lop node : execNodes ) {
// terminal node
if (node.getOutputs().isEmpty() && !rootNodes.contains(node)) {
rootNodes.add(node);
}
else {
// check for nodes with at least one child outside execnodes
int cnt = 0;
for (Lop lop : node.getOutputs() ) {
cnt += (!execNodes.contains(lop)) ? 1 : 0;
}
if (cnt > 0 && !rootNodes.contains(node) // not already a rootnode
&& !(node.getExecLocation() == ExecLocation.Data
&& ((Data) node).getOperationType() == OperationTypes.READ
&& ((Data) node).getDataType() == DataType.MATRIX) ) // Not a matrix Data READ
{
if ( jt.allowsSingleShuffleInstruction() && node.getExecLocation() != ExecLocation.MapAndReduce)
continue;
if (cnt < node.getOutputs().size()) {
if(!node.getProducesIntermediateOutput())
rootNodes.add(node);
}
else
rootNodes.add(node);
}
}
}
}
/**
* check to see if a is the child of b (i.e., there is a directed path from a to b)
*
* @param a child lop
* @param b parent lop
* @param IDMap id map
* @return true if a child of b
*/
private static boolean isChild(Lop a, Lop b, HashMap<Long, Integer> IDMap) {
int bID = IDMap.get(b.getID());
return a.get_reachable()[bID];
}
/**
* Method to topologically sort lops
*
* @param v list of lops
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private void doTopologicalSort_strict_order(ArrayList<Lop> v) {
//int numNodes = v.size();
/*
* Step 1: compute the level for each node in the DAG. Level for each node is
* computed as lops are created. So, this step is need not be performed here.
* Step 2: sort the nodes by level, and within a level by node ID.
*/
// Step1: Performed at the time of creating Lops
// Step2: sort nodes by level, and then by node ID
Lop[] nodearray = v.toArray(new Lop[0]);
Arrays.sort(nodearray, new LopComparator());
// Copy sorted nodes into "v" and construct a mapping between Lop IDs and sequence of numbers
v.clear();
IDMap.clear();
for (int i = 0; i < nodearray.length; i++) {
v.add(nodearray[i]);
IDMap.put(v.get(i).getID(), i);
}
/*
* Compute of All-pair reachability graph (Transitive Closure) of the DAG.
* - Perform a depth-first search (DFS) from every node $u$ in the DAG
* - and construct the list of reachable nodes from the node $u$
* - store the constructed reachability information in $u$.reachable[] boolean array
*/
//
//
for (int i = 0; i < nodearray.length; i++) {
boolean[] arr = v.get(i).create_reachable(nodearray.length);
Arrays.fill(arr, false);
dagDFS(v.get(i), arr);
}
// print the nodes in sorted order
if (LOG.isTraceEnabled()) {
for ( Lop vnode : v ) {
StringBuilder sb = new StringBuilder();
sb.append(vnode.getID());
sb.append("(");
sb.append(vnode.getLevel());
sb.append(") ");
sb.append(vnode.getType());
sb.append("(");
for(Lop vin : vnode.getInputs()) {
sb.append(vin.getID());
sb.append(",");
}
sb.append("), ");
LOG.trace(sb.toString());
}
LOG.trace("topological sort -- done");
}
}
/**
* Method to perform depth-first traversal from a given node in the DAG.
* Store the reachability information in marked[] boolean array.
*
* @param root low-level operator
* @param marked reachability results
*/
private void dagDFS(Lop root, boolean[] marked) {
//contains check currently required for globalopt, will be removed when cleaned up
if( !IDMap.containsKey(root.getID()) )
return;
int mapID = IDMap.get(root.getID());
if ( marked[mapID] )
return;
marked[mapID] = true;
for( Lop lop : root.getOutputs() ) {
dagDFS(lop, marked);
}
}
private static boolean hasDirectChildNode(Lop node, ArrayList<Lop> childNodes) {
if ( childNodes.isEmpty() )
return false;
for( Lop cnode : childNodes ) {
if ( cnode.getOutputs().contains(node))
return true;
}
return false;
}
private boolean hasChildNode(Lop node, ArrayList<Lop> nodes) {
return hasChildNode(node, nodes, ExecLocation.INVALID);
}
private boolean hasChildNode(Lop node, ArrayList<Lop> childNodes, ExecLocation type) {
if ( childNodes.isEmpty() )
return false;
int index = IDMap.get(node.getID());
for( Lop cnode : childNodes ) {
if ( (type == ExecLocation.INVALID || cnode.getExecLocation() == type) && cnode.get_reachable()[index])
return true;
}
return false;
}
private Lop getChildNode(Lop node, ArrayList<Lop> childNodes, ExecLocation type) {
if ( childNodes.isEmpty() )
return null;
int index = IDMap.get(node.getID());
for( Lop cnode : childNodes ) {
if ( cnode.getExecLocation() == type && cnode.get_reachable()[index])
return cnode;
}
return null;
}
/*
* Returns a node "n" such that
* 1) n \in parentNodes
* 2) n is an ancestor of "node"
* 3) n.ExecLocation = type
*
* Returns null if no such "n" exists
*
*/
private Lop getParentNode(Lop node, ArrayList<Lop> parentNodes, ExecLocation type) {
if ( parentNodes.isEmpty() )
return null;
for( Lop pn : parentNodes ) {
int index = IDMap.get( pn.getID() );
if ( pn.getExecLocation() == type && node.get_reachable()[index])
return pn;
}
return null;
}
// Checks if "node" has any descendants in nodesVec with definedMRJob flag
// set to true
private boolean hasMRJobChildNode(Lop node, ArrayList<Lop> nodesVec) {
if ( nodesVec.isEmpty() )
return false;
int index = IDMap.get(node.getID());
for( Lop n : nodesVec ) {
if ( n.definesMRJob() && n.get_reachable()[index])
return true;
}
return false;
}
private boolean checkDataGenAsChildNode(Lop node, ArrayList<Lop> nodesVec) {
if( nodesVec.isEmpty() )
return true;
int index = IDMap.get(node.getID());
boolean onlyDatagen = true;
for( Lop n : nodesVec ) {
if ( n.definesMRJob() && n.get_reachable()[index] && JobType.findJobTypeFromLop(n) != JobType.DATAGEN )
onlyDatagen = false;
}
// return true also when there is no lop in "nodesVec" that defines a MR job.
return onlyDatagen;
}
private static int getChildAlignment(Lop node, ArrayList<Lop> execNodes, ExecLocation type)
{
for (Lop n : node.getInputs() ) {
if (!execNodes.contains(n))
continue;
if (execNodes.contains(n) && n.getExecLocation() == type) {
if (n.getBreaksAlignment())
return MR_CHILD_FOUND_BREAKS_ALIGNMENT;
else
return MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT;
}
else {
int ret = getChildAlignment(n, execNodes, type);
if (ret == MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT
|| ret == CHILD_DOES_NOT_BREAK_ALIGNMENT) {
if (n.getBreaksAlignment())
return CHILD_BREAKS_ALIGNMENT;
else
return CHILD_DOES_NOT_BREAK_ALIGNMENT;
}
else if (ret == MRCHILD_NOT_FOUND
|| ret == CHILD_BREAKS_ALIGNMENT
|| ret == MR_CHILD_FOUND_BREAKS_ALIGNMENT)
return ret;
else
throw new RuntimeException("Something wrong in getChildAlignment().");
}
}
return MRCHILD_NOT_FOUND;
}
private boolean hasParentNode(Lop node, ArrayList<Lop> parentNodes) {
if ( parentNodes.isEmpty() )
return false;
for( Lop pnode : parentNodes ) {
int index = IDMap.get( pnode.getID() );
if ( node.get_reachable()[index])
return true;
}
return false;
}
}
| iyounus/incubator-systemml | src/main/java/org/apache/sysml/lops/compile/Dag.java | Java | apache-2.0 | 143,562 |
package com.iservport.et.service;
import java.nio.charset.Charset;
import org.apache.commons.codec.binary.Base64;
import org.springframework.http.HttpHeaders;
import org.springframework.web.util.UriComponentsBuilder;
/**
* Base class to Enterprise tester API calls.
*
* @author mauriciofernandesdecastro
*/
public class AbstractETApiService {
private String scheme = "http";
private String host = "et2.primecontrol.com.br";
private int port = 8807;
private String app = "/EnterpriseTester";
protected final UriComponentsBuilder getApiUriBuilder() {
return UriComponentsBuilder.newInstance().scheme(scheme).host(host).port(port).path(app);
}
/**
* Basic headers (for testing).
*
* @param username
* @param password
*/
@SuppressWarnings("serial")
protected HttpHeaders createHeaders(final String username, final String password ){
return new HttpHeaders(){
{
String auth = username + ":" + password;
byte[] encodedAuth = Base64.encodeBase64(
auth.getBytes(Charset.forName("US-ASCII")) );
String authHeader = "Basic " + new String( encodedAuth );
set( "Authorization", authHeader );
}
};
}
}
| chmulato/helianto-seed | src/main/java/com/iservport/et/service/AbstractETApiService.java | Java | apache-2.0 | 1,266 |
from artnet import *
import SocketServer
import time, os, random, datetime, sys
import argparse
import socket
import struct
from subprocess import Popen, PIPE, STDOUT
import glob
DEBUG = False
UDP_IP = "2.0.0.61"
UDP_PORT = 6454
| ScienceWorldCA/domelights | backend/artnet-bridge/artnet-server.py | Python | apache-2.0 | 234 |
/**
* Created by dmitry on 21.11.16.
*/
import React, { Component } from 'react';
import { Container, Content, Spinner } from 'native-base';
// TODO: Рядом лежат спиннеры, поди можно прикрячить
export default class Loading extends Component {
render() {
return (
<Container>
<Content contentContainerStyle={{
flex: 1,
flexDirection: 'row',
justifyContent: 'center'
}}>
<Spinner color="blue"/>
</Content>
</Container>
);
}
} | dima11221122/63pokupki-react-native | js/components/loading/index.js | JavaScript | apache-2.0 | 551 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 NAME
Bio::Tools::Run::Search::sge_wublastp - SGE BLASTP searches
=head1 SYNOPSIS
see Bio::Tools::Run::Search::SGE_WuBlast
see Bio::Tools::Run::Search::wublastp
=head1 DESCRIPTION
Multiple inheretance object combining
Bio::Tools::Run::Search::SGE_WuBlast and
Bio::Tools::Run::Search::wublastp
=cut
# Let the code begin...
package Bio::Tools::Run::Search::sge_wublastp;
use strict;
use vars qw( @ISA );
use Bio::Tools::Run::Search::SGE_WuBlast;
use Bio::Tools::Run::Search::wublastp;
@ISA = qw( Bio::Tools::Run::Search::SGE_WuBlast
Bio::Tools::Run::Search::wublastp );
BEGIN{
}
# Nastyness to get round multiple inheretance problems.
sub program_name{return Bio::Tools::Run::Search::wublastp::program_name(@_)}
sub algorithm {return Bio::Tools::Run::Search::wublastp::algorithm(@_)}
sub version {return Bio::Tools::Run::Search::wublastp::version(@_)}
sub parameter_options{
return Bio::Tools::Run::Search::wublastp::parameter_options(@_)
}
#----------------------------------------------------------------------
1;
| andrewyatz/public-plugins | sge_blast/modules/Bio/Tools/Run/Search/sge_wublastp.pm | Perl | apache-2.0 | 1,709 |
/*
* Copyright 2017 GcsSloop
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Last modified 2017-03-08 01:01:18
*
* GitHub: https://github.com/GcsSloop
* Website: http://www.gcssloop.com
* Weibo: http://weibo.com/GcsSloop
*/
package com.github.florent37.expectanim.core.position;
import android.view.View;
/**
* Created by florentchampigny on 17/02/2017.
*/
public class PositionAnimExpectationRightOf extends PositionAnimationViewDependant {
public PositionAnimExpectationRightOf(View otherView) {
super(otherView);
setForPositionX(true);
}
@Override
public Float getCalculatedValueX(View viewToMove) {
return viewCalculator.finalPositionRightOfView(otherView) + getMargin(viewToMove);
}
@Override
public Float getCalculatedValueY(View viewToMove) {
return null;
}
}
| GcsSloop/diycode | expectanim/src/main/java/com/github/florent37/expectanim/core/position/PositionAnimExpectationRightOf.java | Java | apache-2.0 | 1,365 |
/*
* Copyright 2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jsefa.common.converter;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
/**
* Enum constant annotation.
*
* @author Norman Lahme-Huetig
*
*/
@Retention(RUNTIME)
@Target({FIELD})
public @interface EnumConstant {
/**
* The display name of the enum constant.
*/
String value();
}
| Manmay/JSefa | src/main/java/org/jsefa/common/converter/EnumConstant.java | Java | apache-2.0 | 1,076 |
# Fusicoccum arbuti D.F. Farr & M. Elliott, 2005 SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Mycologia 97(3): 731 (2005)
#### Original name
Fusicoccum arbuti D.F. Farr & M. Elliott, 2005
### Remarks
null | mdoering/backbone | life/Fungi/Ascomycota/Dothideomycetes/Botryosphaeriales/Botryosphaeriaceae/Neofusicoccum/Neofusicoccum arbuti/ Syn. Fusicoccum arbuti/README.md | Markdown | apache-2.0 | 268 |
package com.douwe.notes.resource.impl;
import com.douwe.notes.entities.Cycle;
import com.douwe.notes.resource.ICycleResource;
import com.douwe.notes.service.ICycleService;
import com.douwe.notes.service.IInsfrastructureService;
import com.douwe.notes.service.ServiceException;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.ws.rs.Path;
/**
*
* @author Vincent Douwe <[email protected]>
*/
@Path("/cycles")
public class CycleResource implements ICycleResource{
@EJB
private IInsfrastructureService infranstructureService;
@EJB
private ICycleService cycleService;
public Cycle createCycle(Cycle cycle) {
try {
return cycleService.saveOrUpdateCycle(cycle);
} catch (ServiceException ex) {
Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
}
public List<Cycle> getAllCycle() {
try {
return cycleService.getAllCycles();
} catch (ServiceException ex) {
Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
}
public Cycle getCycle(long id) {
try {
return cycleService.findCycleById(id);
} catch (ServiceException ex) {
Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
}
public Cycle updateCycle(long id, Cycle cycle) {
try {
Cycle c = cycleService.findCycleById(id);
if(c != null){
c.setNom(cycle.getNom());
return cycleService.saveOrUpdateCycle(c);
}
return null;
} catch (ServiceException ex) {
Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
}
public void deleteCycle(long id) {
try {
cycleService.deleteCycle(id);
} catch (ServiceException ex) {
Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex);
}
}
public IInsfrastructureService getInfranstructureService() {
return infranstructureService;
}
public void setInfranstructureService(IInsfrastructureService infranstructureService) {
this.infranstructureService = infranstructureService;
}
public ICycleService getCycleService() {
return cycleService;
}
public void setCycleService(ICycleService cycleService) {
this.cycleService = cycleService;
}
}
| royken/notes | src/main/java/com/douwe/notes/resource/impl/CycleResource.java | Java | apache-2.0 | 2,688 |
//main javascript
(function init() {
// If we need to load requirejs before loading butter, make it so
if (typeof define === "undefined") {
var rscript = document.createElement("script");
rscript.onload = function () {
init();
};
rscript.src = "require.js";
document.head.appendChild(rscript);
return;
}
require.config({
baseUrl: 'js/',
paths: {
// the left side is the module ID,
// the right side is the path to
// the jQuery file, relative to baseUrl.
// Also, the path should NOT include
// the '.js' file extension. This example
// is using jQuery 1.8.2 located at
// js/jquery-1.8.2.js, relative to
// the HTML page.
jquery: 'lib/jquery-2.1.3.min',
namedwebsockets: 'lib/namedwebsockets',
qrcode: 'lib/qrcode.min',
webcodecam:'lib/WebCodeCam.min',
qrcodelib:'lib/qrcodelib',
socketio: '/socket.io/socket.io',
shake: 'lib/shake'
}
});
// Start the main app logic.
define("mediascape", ["mediascape/Agentcontext/agentcontext",
"mediascape/Association/association",
"mediascape/Discovery/discovery",
"mediascape/DiscoveryAgentContext/discoveryagentcontext",
"mediascape/Sharedstate/sharedstate",
"mediascape/Mappingservice/mappingservice",
"mediascape/Applicationcontext/applicationcontext"], function ($, Modules) {
//jQuery, modules and the discovery/modules module are all.
//loaded and can be used here now.
//creation of mediascape and discovery objects.
var mediascape = {};
var moduleList = Array.prototype.slice.apply(arguments);
mediascape.init = function (options) {
mediascapeOptions = {};
_this = Object.create(mediascape);
for (var i = 0; i < moduleList.length; i++) {
var name = moduleList[i].__moduleName;
var dontCall = ['sharedState', 'mappingService', 'applicationContext'];
if (dontCall.indexOf(name) === -1) {
mediascape[name] = new moduleList[i](mediascape, "gq" + i, mediascape);
} else {
mediascape[name] = moduleList[i];
}
}
return _this;
};
mediascape.version = "0.0.1";
// See if we have any waiting init calls that happened before we loaded require.
if (window.mediascape) {
var args = window.mediascape.__waiting;
delete window.mediascape;
if (args) {
mediascape.init.apply(this, args);
}
}
window.mediascape = mediascape;
//return of mediascape object with discovery and features objects and its functions
return mediascape;
});
require(["mediascape"], function (mediascape) {
mediascape.init();
/**
*
* Polyfill for custonevents
*/
(function () {
function CustomEvent(event, params) {
params = params || {
bubbles: false,
cancelable: false,
detail: undefined
};
var evt = document.createEvent('CustomEvent');
evt.initCustomEvent(event, params.bubbles, params.cancelable, params.detail);
return evt;
};
CustomEvent.prototype = window.Event.prototype;
window.CustomEvent = CustomEvent;
})();
var event = new CustomEvent("mediascape-ready", {
"detail": {
"loaded": true
}
});
document.dispatchEvent(event);
});
}());
| martinangel/association | helloworld/Triggers/js/mediascape/mediascape.js | JavaScript | apache-2.0 | 3,983 |
/*
* Copyright 2010 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multiset;
import com.google.common.collect.Sets;
import com.google.javascript.jscomp.CompilerOptions.AliasTransformation;
import com.google.javascript.jscomp.CompilerOptions.AliasTransformationHandler;
import com.google.javascript.jscomp.Scope.Var;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.SourcePosition;
import com.google.javascript.rhino.Token;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Process aliases in goog.scope blocks.
*
* goog.scope(function() {
* var dom = goog.dom;
* var DIV = dom.TagName.DIV;
*
* dom.createElement(DIV);
* });
*
* should become
*
* goog.dom.createElement(goog.dom.TagName.DIV);
*
* The advantage of using goog.scope is that the compiler will *guarantee*
* the anonymous function will be inlined, even if it can't prove
* that it's semantically correct to do so. For example, consider this case:
*
* goog.scope(function() {
* goog.getBar = function () { return alias; };
* ...
* var alias = foo.bar;
* })
*
* In theory, the compiler can't inline 'alias' unless it can prove that
* goog.getBar is called only after 'alias' is defined.
*
* In practice, the compiler will inline 'alias' anyway, at the risk of
* 'fixing' bad code.
*
* @author [email protected] (Robby Walker)
*/
class ScopedAliases implements HotSwapCompilerPass {
/** Name used to denote an scoped function block used for aliasing. */
static final String SCOPING_METHOD_NAME = "goog.scope";
private final AbstractCompiler compiler;
private final PreprocessorSymbolTable preprocessorSymbolTable;
private final AliasTransformationHandler transformationHandler;
// Errors
static final DiagnosticType GOOG_SCOPE_USED_IMPROPERLY = DiagnosticType.error(
"JSC_GOOG_SCOPE_USED_IMPROPERLY",
"The call to goog.scope must be alone in a single statement.");
static final DiagnosticType GOOG_SCOPE_HAS_BAD_PARAMETERS =
DiagnosticType.error(
"JSC_GOOG_SCOPE_HAS_BAD_PARAMETERS",
"The call to goog.scope must take only a single parameter. It must" +
" be an anonymous function that itself takes no parameters.");
static final DiagnosticType GOOG_SCOPE_REFERENCES_THIS = DiagnosticType.error(
"JSC_GOOG_SCOPE_REFERENCES_THIS",
"The body of a goog.scope function cannot reference 'this'.");
static final DiagnosticType GOOG_SCOPE_USES_RETURN = DiagnosticType.error(
"JSC_GOOG_SCOPE_USES_RETURN",
"The body of a goog.scope function cannot use 'return'.");
static final DiagnosticType GOOG_SCOPE_USES_THROW = DiagnosticType.error(
"JSC_GOOG_SCOPE_USES_THROW",
"The body of a goog.scope function cannot use 'throw'.");
static final DiagnosticType GOOG_SCOPE_ALIAS_REDEFINED = DiagnosticType.error(
"JSC_GOOG_SCOPE_ALIAS_REDEFINED",
"The alias {0} is assigned a value more than once.");
static final DiagnosticType GOOG_SCOPE_ALIAS_CYCLE = DiagnosticType.error(
"JSC_GOOG_SCOPE_ALIAS_CYCLE",
"The aliases {0} has a cycle.");
static final DiagnosticType GOOG_SCOPE_NON_ALIAS_LOCAL = DiagnosticType.error(
"JSC_GOOG_SCOPE_NON_ALIAS_LOCAL",
"The local variable {0} is in a goog.scope and is not an alias.");
private Multiset<String> scopedAliasNames = HashMultiset.create();
ScopedAliases(AbstractCompiler compiler,
@Nullable PreprocessorSymbolTable preprocessorSymbolTable,
AliasTransformationHandler transformationHandler) {
this.compiler = compiler;
this.preprocessorSymbolTable = preprocessorSymbolTable;
this.transformationHandler = transformationHandler;
}
@Override
public void process(Node externs, Node root) {
hotSwapScript(root, null);
}
@Override
public void hotSwapScript(Node root, Node originalRoot) {
Traversal traversal = new Traversal();
NodeTraversal.traverse(compiler, root, traversal);
if (!traversal.hasErrors()) {
// Apply the aliases.
List<AliasUsage> aliasWorkQueue =
Lists.newArrayList(traversal.getAliasUsages());
while (!aliasWorkQueue.isEmpty()) {
List<AliasUsage> newQueue = Lists.newArrayList();
for (AliasUsage aliasUsage : aliasWorkQueue) {
if (aliasUsage.referencesOtherAlias()) {
newQueue.add(aliasUsage);
} else {
aliasUsage.applyAlias();
}
}
// Prevent an infinite loop.
if (newQueue.size() == aliasWorkQueue.size()) {
Var cycleVar = newQueue.get(0).aliasVar;
compiler.report(JSError.make(
cycleVar.getNode(), GOOG_SCOPE_ALIAS_CYCLE, cycleVar.getName()));
break;
} else {
aliasWorkQueue = newQueue;
}
}
// Remove the alias definitions.
for (Node aliasDefinition : traversal.getAliasDefinitionsInOrder()) {
if (aliasDefinition.getParent().isVar() &&
aliasDefinition.getParent().hasOneChild()) {
aliasDefinition.getParent().detachFromParent();
} else {
aliasDefinition.detachFromParent();
}
}
// Collapse the scopes.
for (Node scopeCall : traversal.getScopeCalls()) {
Node expressionWithScopeCall = scopeCall.getParent();
Node scopeClosureBlock = scopeCall.getLastChild().getLastChild();
scopeClosureBlock.detachFromParent();
expressionWithScopeCall.getParent().replaceChild(
expressionWithScopeCall,
scopeClosureBlock);
NodeUtil.tryMergeBlock(scopeClosureBlock);
}
if (traversal.getAliasUsages().size() > 0 ||
traversal.getAliasDefinitionsInOrder().size() > 0 ||
traversal.getScopeCalls().size() > 0) {
compiler.reportCodeChange();
}
}
}
private abstract class AliasUsage {
final Var aliasVar;
final Node aliasReference;
AliasUsage(Var aliasVar, Node aliasReference) {
this.aliasVar = aliasVar;
this.aliasReference = aliasReference;
}
/** Checks to see if this references another alias. */
public boolean referencesOtherAlias() {
Node aliasDefinition = aliasVar.getInitialValue();
Node root = NodeUtil.getRootOfQualifiedName(aliasDefinition);
Var otherAliasVar = aliasVar.getScope().getOwnSlot(root.getString());
return otherAliasVar != null;
}
public abstract void applyAlias();
}
private class AliasedNode extends AliasUsage {
AliasedNode(Var aliasVar, Node aliasReference) {
super(aliasVar, aliasReference);
}
@Override
public void applyAlias() {
Node aliasDefinition = aliasVar.getInitialValue();
aliasReference.getParent().replaceChild(
aliasReference, aliasDefinition.cloneTree());
}
}
private class AliasedTypeNode extends AliasUsage {
AliasedTypeNode(Var aliasVar, Node aliasReference) {
super(aliasVar, aliasReference);
}
@Override
public void applyAlias() {
Node aliasDefinition = aliasVar.getInitialValue();
String aliasName = aliasVar.getName();
String typeName = aliasReference.getString();
String aliasExpanded =
Preconditions.checkNotNull(aliasDefinition.getQualifiedName());
Preconditions.checkState(typeName.startsWith(aliasName));
aliasReference.setString(typeName.replaceFirst(aliasName, aliasExpanded));
}
}
private class Traversal implements NodeTraversal.ScopedCallback {
// The job of this class is to collect these three data sets.
// The order of this list determines the order that aliases are applied.
private final List<Node> aliasDefinitionsInOrder = Lists.newArrayList();
private final List<Node> scopeCalls = Lists.newArrayList();
private final List<AliasUsage> aliasUsages = Lists.newArrayList();
// This map is temporary and cleared for each scope.
private final Map<String, Var> aliases = Maps.newHashMap();
// Suppose you create an alias.
// var x = goog.x;
// As a side-effect, this means you can shadow the namespace 'goog'
// in inner scopes. When we inline the namespaces, we have to rename
// these shadows.
//
// Fortunately, we already have a name uniquifier that runs during tree
// normalization (before optimizations). We run it here on a limited
// set of variables, but only as a last resort (because this will screw
// up warning messages downstream).
private final Set<String> forbiddenLocals = Sets.newHashSet("$jscomp");
private boolean hasNamespaceShadows = false;
private boolean hasErrors = false;
private AliasTransformation transformation = null;
Collection<Node> getAliasDefinitionsInOrder() {
return aliasDefinitionsInOrder;
}
private List<AliasUsage> getAliasUsages() {
return aliasUsages;
}
List<Node> getScopeCalls() {
return scopeCalls;
}
boolean hasErrors() {
return hasErrors;
}
private boolean isCallToScopeMethod(Node n) {
return n.isCall() &&
SCOPING_METHOD_NAME.equals(n.getFirstChild().getQualifiedName());
}
@Override
public void enterScope(NodeTraversal t) {
Node n = t.getCurrentNode().getParent();
if (n != null && isCallToScopeMethod(n)) {
transformation = transformationHandler.logAliasTransformation(
n.getSourceFileName(), getSourceRegion(n));
findAliases(t);
}
}
@Override
public void exitScope(NodeTraversal t) {
if (t.getScopeDepth() > 2) {
findNamespaceShadows(t);
}
if (t.getScopeDepth() == 2) {
renameNamespaceShadows(t);
aliases.clear();
forbiddenLocals.clear();
transformation = null;
hasNamespaceShadows = false;
}
}
@Override
public final boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
if (n.isFunction() && t.inGlobalScope()) {
// Do not traverse in to functions except for goog.scope functions.
if (parent == null || !isCallToScopeMethod(parent)) {
return false;
}
}
return true;
}
private SourcePosition<AliasTransformation> getSourceRegion(Node n) {
Node testNode = n;
Node next = null;
for (; next != null || testNode.isScript();) {
next = testNode.getNext();
testNode = testNode.getParent();
}
int endLine = next == null ? Integer.MAX_VALUE : next.getLineno();
int endChar = next == null ? Integer.MAX_VALUE : next.getCharno();
SourcePosition<AliasTransformation> pos =
new SourcePosition<AliasTransformation>() {};
pos.setPositionInformation(
n.getLineno(), n.getCharno(), endLine, endChar);
return pos;
}
private void report(NodeTraversal t, Node n, DiagnosticType error,
String... arguments) {
compiler.report(t.makeError(n, error, arguments));
hasErrors = true;
}
private void findAliases(NodeTraversal t) {
Scope scope = t.getScope();
for (Var v : scope.getVarIterable()) {
Node n = v.getNode();
Node parent = n.getParent();
boolean isVar = parent.isVar();
boolean isFunctionDecl = NodeUtil.isFunctionDeclaration(parent);
if (isVar && n.getFirstChild() != null && n.getFirstChild().isQualifiedName()) {
recordAlias(v);
} else if (v.isBleedingFunction()) {
// Bleeding functions already get a BAD_PARAMETERS error, so just
// do nothing.
} else if (parent.getType() == Token.LP) {
// Parameters of the scope function also get a BAD_PARAMETERS
// error.
} else if (isVar || isFunctionDecl) {
boolean isHoisted = NodeUtil.isHoistedFunctionDeclaration(parent);
Node grandparent = parent.getParent();
Node value = v.getInitialValue() != null ?
v.getInitialValue() :
null;
Node varNode = null;
String name = n.getString();
int nameCount = scopedAliasNames.count(name);
scopedAliasNames.add(name);
String globalName =
"$jscomp.scope." + name + (nameCount == 0 ? "" : ("$" + nameCount));
compiler.ensureLibraryInjected("base");
// First, we need to free up the function expression (EXPR)
// to be used in another expression.
if (isFunctionDecl) {
// Replace "function NAME() { ... }" with "var NAME;".
Node existingName = v.getNameNode();
// We can't keep the local name on the function expression,
// because IE is buggy and will leak the name into the global
// scope. This is covered in more detail here:
// http://wiki.ecmascript.org/lib/exe/fetch.php?id=resources:resources&cache=cache&media=resources:jscriptdeviationsfromes3.pdf
//
// This will only cause problems if this is a hoisted, recursive
// function, and the programmer is using the hoisting.
Node newName = IR.name("").useSourceInfoFrom(existingName);
value.replaceChild(existingName, newName);
varNode = IR.var(existingName).useSourceInfoFrom(existingName);
grandparent.replaceChild(parent, varNode);
} else {
if (value != null) {
// If this is a VAR, we can just detach the expression and
// the tree will still be valid.
value.detachFromParent();
}
varNode = parent;
}
// Add $jscomp.scope.name = EXPR;
// Make sure we copy over all the jsdoc and debug info.
if (value != null || v.getJSDocInfo() != null) {
Node newDecl = NodeUtil.newQualifiedNameNodeDeclaration(
compiler.getCodingConvention(),
globalName,
value,
v.getJSDocInfo())
.useSourceInfoIfMissingFromForTree(n);
NodeUtil.setDebugInformation(
newDecl.getFirstChild().getFirstChild(), n, name);
if (isHoisted) {
grandparent.addChildToFront(newDecl);
} else {
grandparent.addChildBefore(newDecl, varNode);
}
}
// Rewrite "var name = EXPR;" to "var name = $jscomp.scope.name;"
v.getNameNode().addChildToFront(
NodeUtil.newQualifiedNameNode(
compiler.getCodingConvention(), globalName, n, name));
recordAlias(v);
} else {
// Do not other kinds of local symbols, like catch params.
report(t, n, GOOG_SCOPE_NON_ALIAS_LOCAL, n.getString());
}
}
}
private void recordAlias(Var aliasVar) {
String name = aliasVar.getName();
aliases.put(name, aliasVar);
String qualifiedName =
aliasVar.getInitialValue().getQualifiedName();
transformation.addAlias(name, qualifiedName);
int rootIndex = qualifiedName.indexOf(".");
if (rootIndex != -1) {
String qNameRoot = qualifiedName.substring(0, rootIndex);
if (!aliases.containsKey(qNameRoot)) {
forbiddenLocals.add(qNameRoot);
}
}
}
/** Find out if there are any local shadows of namespaces. */
private void findNamespaceShadows(NodeTraversal t) {
if (hasNamespaceShadows) {
return;
}
Scope scope = t.getScope();
for (Var v : scope.getVarIterable()) {
if (forbiddenLocals.contains(v.getName())) {
hasNamespaceShadows = true;
return;
}
}
}
/**
* Rename any local shadows of namespaces.
* This should be a very rare occurrence, so only do this traversal
* if we know that we need it.
*/
private void renameNamespaceShadows(NodeTraversal t) {
if (hasNamespaceShadows) {
MakeDeclaredNamesUnique.Renamer renamer =
new MakeDeclaredNamesUnique.WhitelistedRenamer(
new MakeDeclaredNamesUnique.ContextualRenamer(),
forbiddenLocals);
for (String s : forbiddenLocals) {
renamer.addDeclaredName(s);
}
MakeDeclaredNamesUnique uniquifier =
new MakeDeclaredNamesUnique(renamer);
NodeTraversal.traverse(compiler, t.getScopeRoot(), uniquifier);
}
}
private void validateScopeCall(NodeTraversal t, Node n, Node parent) {
if (preprocessorSymbolTable != null) {
preprocessorSymbolTable.addReference(n.getFirstChild());
}
if (!parent.isExprResult()) {
report(t, n, GOOG_SCOPE_USED_IMPROPERLY);
}
if (n.getChildCount() != 2) {
// The goog.scope call should have exactly 1 parameter. The first
// child is the "goog.scope" and the second should be the parameter.
report(t, n, GOOG_SCOPE_HAS_BAD_PARAMETERS);
} else {
Node anonymousFnNode = n.getChildAtIndex(1);
if (!anonymousFnNode.isFunction() ||
NodeUtil.getFunctionName(anonymousFnNode) != null ||
NodeUtil.getFunctionParameters(anonymousFnNode).hasChildren()) {
report(t, anonymousFnNode, GOOG_SCOPE_HAS_BAD_PARAMETERS);
} else {
scopeCalls.add(n);
}
}
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (isCallToScopeMethod(n)) {
validateScopeCall(t, n, n.getParent());
}
if (t.getScopeDepth() < 2) {
return;
}
int type = n.getType();
Var aliasVar = null;
if (type == Token.NAME) {
String name = n.getString();
Var lexicalVar = t.getScope().getVar(n.getString());
if (lexicalVar != null && lexicalVar == aliases.get(name)) {
aliasVar = lexicalVar;
}
}
// Validate the top-level of the goog.scope block.
if (t.getScopeDepth() == 2) {
if (aliasVar != null && NodeUtil.isLValue(n)) {
if (aliasVar.getNode() == n) {
aliasDefinitionsInOrder.add(n);
// Return early, to ensure that we don't record a definition
// twice.
return;
} else {
report(t, n, GOOG_SCOPE_ALIAS_REDEFINED, n.getString());
}
}
if (type == Token.RETURN) {
report(t, n, GOOG_SCOPE_USES_RETURN);
} else if (type == Token.THIS) {
report(t, n, GOOG_SCOPE_REFERENCES_THIS);
} else if (type == Token.THROW) {
report(t, n, GOOG_SCOPE_USES_THROW);
}
}
// Validate all descendent scopes of the goog.scope block.
if (t.getScopeDepth() >= 2) {
// Check if this name points to an alias.
if (aliasVar != null) {
// Note, to support the transitive case, it's important we don't
// clone aliasedNode here. For example,
// var g = goog; var d = g.dom; d.createElement('DIV');
// The node in aliasedNode (which is "g") will be replaced in the
// changes pass above with "goog". If we cloned here, we'd end up
// with <code>g.dom.createElement('DIV')</code>.
aliasUsages.add(new AliasedNode(aliasVar, n));
}
JSDocInfo info = n.getJSDocInfo();
if (info != null) {
for (Node node : info.getTypeNodes()) {
fixTypeNode(node);
}
}
// TODO(robbyw): Error for goog.scope not at root.
}
}
private void fixTypeNode(Node typeNode) {
if (typeNode.isString()) {
String name = typeNode.getString();
int endIndex = name.indexOf('.');
if (endIndex == -1) {
endIndex = name.length();
}
String baseName = name.substring(0, endIndex);
Var aliasVar = aliases.get(baseName);
if (aliasVar != null) {
aliasUsages.add(new AliasedTypeNode(aliasVar, typeNode));
}
}
for (Node child = typeNode.getFirstChild(); child != null;
child = child.getNext()) {
fixTypeNode(child);
}
}
}
}
| jhiswin/idiil-closure-compiler | src/com/google/javascript/jscomp/ScopedAliases.java | Java | apache-2.0 | 21,121 |
//
// HKZStatusToolBar.h
// Weibo
//
// Created by hukezhu on 15/8/2.
// Copyright (c) 2015年 hukezhu. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "HKZStatus.h"
@interface HKZStatusToolBar : UIView
@property(nonatomic,strong)HKZStatus *status;/**< 数据模型 */
@end
| hukezhu/sina | Weibo/Classes/Home/View/cell/HKZStatusToolBar.h | C | apache-2.0 | 292 |
// Copyright 2019 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package restore
import (
"context"
"database/sql"
"fmt"
"io"
"math"
"os"
"strings"
"sync"
"time"
"github.com/coreos/go-semver/semver"
"github.com/docker/go-units"
"github.com/google/uuid"
"github.com/pingcap/errors"
"github.com/pingcap/failpoint"
sstpb "github.com/pingcap/kvproto/pkg/import_sstpb"
berrors "github.com/pingcap/tidb/br/pkg/errors"
"github.com/pingcap/tidb/br/pkg/lightning/backend"
"github.com/pingcap/tidb/br/pkg/lightning/backend/importer"
"github.com/pingcap/tidb/br/pkg/lightning/backend/kv"
"github.com/pingcap/tidb/br/pkg/lightning/backend/local"
"github.com/pingcap/tidb/br/pkg/lightning/backend/tidb"
"github.com/pingcap/tidb/br/pkg/lightning/checkpoints"
"github.com/pingcap/tidb/br/pkg/lightning/common"
"github.com/pingcap/tidb/br/pkg/lightning/config"
"github.com/pingcap/tidb/br/pkg/lightning/errormanager"
"github.com/pingcap/tidb/br/pkg/lightning/glue"
"github.com/pingcap/tidb/br/pkg/lightning/log"
"github.com/pingcap/tidb/br/pkg/lightning/metric"
"github.com/pingcap/tidb/br/pkg/lightning/mydump"
"github.com/pingcap/tidb/br/pkg/lightning/tikv"
verify "github.com/pingcap/tidb/br/pkg/lightning/verification"
"github.com/pingcap/tidb/br/pkg/lightning/web"
"github.com/pingcap/tidb/br/pkg/lightning/worker"
"github.com/pingcap/tidb/br/pkg/pdutil"
"github.com/pingcap/tidb/br/pkg/storage"
"github.com/pingcap/tidb/br/pkg/utils"
"github.com/pingcap/tidb/br/pkg/version"
"github.com/pingcap/tidb/br/pkg/version/build"
"github.com/pingcap/tidb/meta/autoid"
"github.com/pingcap/tidb/parser/model"
"github.com/pingcap/tidb/util/collate"
pd "github.com/tikv/pd/client"
"go.uber.org/atomic"
"go.uber.org/multierr"
"go.uber.org/zap"
"modernc.org/mathutil"
)
const (
FullLevelCompact = -1
Level1Compact = 1
)
const (
defaultGCLifeTime = 100 * time.Hour
)
const (
indexEngineID = -1
)
const (
compactStateIdle int32 = iota
compactStateDoing
)
const (
TaskMetaTableName = "task_meta"
TableMetaTableName = "table_meta"
// CreateTableMetadataTable stores the per-table sub jobs information used by TiDB Lightning
CreateTableMetadataTable = `CREATE TABLE IF NOT EXISTS %s (
task_id BIGINT(20) UNSIGNED,
table_id BIGINT(64) NOT NULL,
table_name VARCHAR(64) NOT NULL,
row_id_base BIGINT(20) NOT NULL DEFAULT 0,
row_id_max BIGINT(20) NOT NULL DEFAULT 0,
total_kvs_base BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
total_bytes_base BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
checksum_base BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
total_kvs BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
total_bytes BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
checksum BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
status VARCHAR(32) NOT NULL,
has_duplicates BOOL NOT NULL DEFAULT 0,
PRIMARY KEY (table_id, task_id)
);`
// CreateTaskMetaTable stores the pre-lightning metadata used by TiDB Lightning
CreateTaskMetaTable = `CREATE TABLE IF NOT EXISTS %s (
task_id BIGINT(20) UNSIGNED NOT NULL,
pd_cfgs VARCHAR(2048) NOT NULL DEFAULT '',
status VARCHAR(32) NOT NULL,
state TINYINT(1) NOT NULL DEFAULT 0 COMMENT '0: normal, 1: exited before finish',
source_bytes BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
cluster_avail BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
PRIMARY KEY (task_id)
);`
compactionLowerThreshold = 512 * units.MiB
compactionUpperThreshold = 32 * units.GiB
)
var (
minTiKVVersionForDuplicateResolution = *semver.New("5.2.0")
maxTiKVVersionForDuplicateResolution = version.NextMajorVersion()
)
// DeliverPauser is a shared pauser to pause progress to (*chunkRestore).encodeLoop
var DeliverPauser = common.NewPauser()
// nolint:gochecknoinits // TODO: refactor
func init() {
failpoint.Inject("SetMinDeliverBytes", func(v failpoint.Value) {
minDeliverBytes = uint64(v.(int))
})
}
type saveCp struct {
tableName string
merger checkpoints.TableCheckpointMerger
waitCh chan<- error
}
type errorSummary struct {
status checkpoints.CheckpointStatus
err error
}
type errorSummaries struct {
sync.Mutex
logger log.Logger
summary map[string]errorSummary
}
// makeErrorSummaries returns an initialized errorSummaries instance
func makeErrorSummaries(logger log.Logger) errorSummaries {
return errorSummaries{
logger: logger,
summary: make(map[string]errorSummary),
}
}
func (es *errorSummaries) emitLog() {
es.Lock()
defer es.Unlock()
if errorCount := len(es.summary); errorCount > 0 {
logger := es.logger
logger.Error("tables failed to be imported", zap.Int("count", errorCount))
for tableName, errorSummary := range es.summary {
logger.Error("-",
zap.String("table", tableName),
zap.String("status", errorSummary.status.MetricName()),
log.ShortError(errorSummary.err),
)
}
}
}
func (es *errorSummaries) record(tableName string, err error, status checkpoints.CheckpointStatus) {
es.Lock()
defer es.Unlock()
es.summary[tableName] = errorSummary{status: status, err: err}
}
const (
diskQuotaStateIdle int32 = iota
diskQuotaStateChecking
diskQuotaStateImporting
diskQuotaMaxReaders = 1 << 30
)
// diskQuotaLock is essentially a read/write lock. The implement here is inspired by sync.RWMutex.
// diskQuotaLock removed the unnecessary blocking `RLock` method and add a non-blocking `TryRLock` method.
type diskQuotaLock struct {
w sync.Mutex // held if there are pending writers
writerSem chan struct{} // semaphore for writers to wait for completing readers
readerCount atomic.Int32 // number of pending readers
readerWait atomic.Int32 // number of departing readers
}
func newDiskQuotaLock() *diskQuotaLock {
return &diskQuotaLock{writerSem: make(chan struct{})}
}
func (d *diskQuotaLock) Lock() {
d.w.Lock()
// Announce to readers there is a pending writer.
r := d.readerCount.Sub(diskQuotaMaxReaders) + diskQuotaMaxReaders
if r != 0 && d.readerWait.Add(r) != 0 {
// Wait for active readers.
<-d.writerSem
}
}
func (d *diskQuotaLock) Unlock() {
d.readerCount.Add(diskQuotaMaxReaders)
d.w.Unlock()
}
func (d *diskQuotaLock) TryRLock() (locked bool) {
r := d.readerCount.Load()
for r >= 0 {
if d.readerCount.CAS(r, r+1) {
return true
}
r = d.readerCount.Load()
}
return false
}
func (d *diskQuotaLock) RUnlock() {
if d.readerCount.Dec() < 0 {
if d.readerWait.Dec() == 0 {
// The last reader unblocks the writer.
d.writerSem <- struct{}{}
}
}
}
type Controller struct {
cfg *config.Config
dbMetas []*mydump.MDDatabaseMeta
dbInfos map[string]*checkpoints.TidbDBInfo
tableWorkers *worker.Pool
indexWorkers *worker.Pool
regionWorkers *worker.Pool
ioWorkers *worker.Pool
checksumWorks *worker.Pool
pauser *common.Pauser
backend backend.Backend
tidbGlue glue.Glue
alterTableLock sync.Mutex
sysVars map[string]string
tls *common.TLS
checkTemplate Template
errorSummaries errorSummaries
checkpointsDB checkpoints.DB
saveCpCh chan saveCp
checkpointsWg sync.WaitGroup
closedEngineLimit *worker.Pool
store storage.ExternalStorage
metaMgrBuilder metaMgrBuilder
errorMgr *errormanager.ErrorManager
taskMgr taskMetaMgr
diskQuotaLock *diskQuotaLock
diskQuotaState atomic.Int32
compactState atomic.Int32
status *LightningStatus
}
type LightningStatus struct {
FinishedFileSize atomic.Int64
TotalFileSize atomic.Int64
}
func NewRestoreController(
ctx context.Context,
dbMetas []*mydump.MDDatabaseMeta,
cfg *config.Config,
status *LightningStatus,
s storage.ExternalStorage,
g glue.Glue,
) (*Controller, error) {
return NewRestoreControllerWithPauser(ctx, dbMetas, cfg, status, s, DeliverPauser, g)
}
func NewRestoreControllerWithPauser(
ctx context.Context,
dbMetas []*mydump.MDDatabaseMeta,
cfg *config.Config,
status *LightningStatus,
s storage.ExternalStorage,
pauser *common.Pauser,
g glue.Glue,
) (*Controller, error) {
tls, err := cfg.ToTLS()
if err != nil {
return nil, err
}
cpdb, err := g.OpenCheckpointsDB(ctx, cfg)
if err != nil {
return nil, errors.Annotate(err, "open checkpoint db failed")
}
taskCp, err := cpdb.TaskCheckpoint(ctx)
if err != nil {
return nil, errors.Annotate(err, "get task checkpoint failed")
}
if err := verifyCheckpoint(cfg, taskCp); err != nil {
return nil, errors.Trace(err)
}
// reuse task id to reuse task meta correctly.
if taskCp != nil {
cfg.TaskID = taskCp.TaskID
}
// TODO: support Lightning via SQL
db, err := g.GetDB()
if err != nil {
return nil, errors.Trace(err)
}
errorMgr := errormanager.New(db, cfg)
if err := errorMgr.Init(ctx); err != nil {
return nil, errors.Annotate(err, "failed to init error manager")
}
var backend backend.Backend
switch cfg.TikvImporter.Backend {
case config.BackendImporter:
var err error
backend, err = importer.NewImporter(ctx, tls, cfg.TikvImporter.Addr, cfg.TiDB.PdAddr)
if err != nil {
return nil, errors.Annotate(err, "open importer backend failed")
}
case config.BackendTiDB:
db, err := g.GetDB()
if err != nil {
return nil, errors.Annotate(err, "open tidb backend failed")
}
backend = tidb.NewTiDBBackend(db, cfg.TikvImporter.OnDuplicate, errorMgr)
case config.BackendLocal:
var rLimit local.Rlim_t
rLimit, err = local.GetSystemRLimit()
if err != nil {
return nil, err
}
maxOpenFiles := int(rLimit / local.Rlim_t(cfg.App.TableConcurrency))
// check overflow
if maxOpenFiles < 0 {
maxOpenFiles = math.MaxInt32
}
if cfg.TikvImporter.DuplicateResolution != config.DupeResAlgNone {
if err := tikv.CheckTiKVVersion(ctx, tls, cfg.TiDB.PdAddr, minTiKVVersionForDuplicateResolution, maxTiKVVersionForDuplicateResolution); err != nil {
if berrors.Is(err, berrors.ErrVersionMismatch) {
log.L().Warn("TiKV version doesn't support duplicate resolution. The resolution algorithm will fall back to 'none'", zap.Error(err))
cfg.TikvImporter.DuplicateResolution = config.DupeResAlgNone
} else {
return nil, errors.Annotate(err, "check TiKV version for duplicate resolution failed")
}
}
}
backend, err = local.NewLocalBackend(ctx, tls, cfg, g, maxOpenFiles, errorMgr)
if err != nil {
return nil, errors.Annotate(err, "build local backend failed")
}
err = verifyLocalFile(ctx, cpdb, cfg.TikvImporter.SortedKVDir)
if err != nil {
return nil, err
}
default:
return nil, errors.New("unknown backend: " + cfg.TikvImporter.Backend)
}
var metaBuilder metaMgrBuilder
switch cfg.TikvImporter.Backend {
case config.BackendLocal, config.BackendImporter:
metaBuilder = &dbMetaMgrBuilder{
db: db,
taskID: cfg.TaskID,
schema: cfg.App.MetaSchemaName,
needChecksum: cfg.PostRestore.Checksum != config.OpLevelOff,
}
default:
metaBuilder = noopMetaMgrBuilder{}
}
rc := &Controller{
cfg: cfg,
dbMetas: dbMetas,
tableWorkers: nil,
indexWorkers: nil,
regionWorkers: worker.NewPool(ctx, cfg.App.RegionConcurrency, "region"),
ioWorkers: worker.NewPool(ctx, cfg.App.IOConcurrency, "io"),
checksumWorks: worker.NewPool(ctx, cfg.TiDB.ChecksumTableConcurrency, "checksum"),
pauser: pauser,
backend: backend,
tidbGlue: g,
sysVars: defaultImportantVariables,
tls: tls,
checkTemplate: NewSimpleTemplate(),
errorSummaries: makeErrorSummaries(log.L()),
checkpointsDB: cpdb,
saveCpCh: make(chan saveCp),
closedEngineLimit: worker.NewPool(ctx, cfg.App.TableConcurrency*2, "closed-engine"),
store: s,
metaMgrBuilder: metaBuilder,
errorMgr: errorMgr,
diskQuotaLock: newDiskQuotaLock(),
status: status,
taskMgr: nil,
}
return rc, nil
}
func (rc *Controller) Close() {
rc.backend.Close()
rc.tidbGlue.GetSQLExecutor().Close()
}
func (rc *Controller) Run(ctx context.Context) error {
opts := []func(context.Context) error{
rc.setGlobalVariables,
rc.restoreSchema,
rc.preCheckRequirements,
rc.restoreTables,
rc.fullCompact,
rc.cleanCheckpoints,
}
task := log.L().Begin(zap.InfoLevel, "the whole procedure")
var err error
finished := false
outside:
for i, process := range opts {
err = process(ctx)
if i == len(opts)-1 {
finished = true
}
logger := task.With(zap.Int("step", i), log.ShortError(err))
switch {
case err == nil:
case log.IsContextCanceledError(err):
logger.Info("task canceled")
err = nil
break outside
default:
logger.Error("run failed")
fmt.Fprintf(os.Stderr, "Error: %s\n", err)
break outside // ps : not continue
}
}
// if process is cancelled, should make sure checkpoints are written to db.
if !finished {
rc.waitCheckpointFinish()
}
task.End(zap.ErrorLevel, err)
rc.errorSummaries.emitLog()
return errors.Trace(err)
}
type schemaStmtType int
func (stmtType schemaStmtType) String() string {
switch stmtType {
case schemaCreateDatabase:
return "restore database schema"
case schemaCreateTable:
return "restore table schema"
case schemaCreateView:
return "restore view schema"
}
return "unknown statement of schema"
}
const (
schemaCreateDatabase schemaStmtType = iota
schemaCreateTable
schemaCreateView
)
type schemaJob struct {
dbName string
tblName string // empty for create db jobs
stmtType schemaStmtType
stmts []*schemaStmt
}
type schemaStmt struct {
sql string
}
type restoreSchemaWorker struct {
ctx context.Context
quit context.CancelFunc
jobCh chan *schemaJob
errCh chan error
wg sync.WaitGroup
glue glue.Glue
store storage.ExternalStorage
}
func (worker *restoreSchemaWorker) makeJobs(
dbMetas []*mydump.MDDatabaseMeta,
getTables func(context.Context, string) ([]*model.TableInfo, error),
) error {
defer func() {
close(worker.jobCh)
worker.quit()
}()
var err error
// 1. restore databases, execute statements concurrency
for _, dbMeta := range dbMetas {
restoreSchemaJob := &schemaJob{
dbName: dbMeta.Name,
stmtType: schemaCreateDatabase,
stmts: make([]*schemaStmt, 0, 1),
}
restoreSchemaJob.stmts = append(restoreSchemaJob.stmts, &schemaStmt{
sql: createDatabaseIfNotExistStmt(dbMeta.Name),
})
err = worker.appendJob(restoreSchemaJob)
if err != nil {
return err
}
}
err = worker.wait()
if err != nil {
return err
}
// 2. restore tables, execute statements concurrency
for _, dbMeta := range dbMetas {
// we can ignore error here, and let check failed later if schema not match
tables, _ := getTables(worker.ctx, dbMeta.Name)
tableMap := make(map[string]struct{})
for _, t := range tables {
tableMap[t.Name.L] = struct{}{}
}
for _, tblMeta := range dbMeta.Tables {
if _, ok := tableMap[strings.ToLower(tblMeta.Name)]; ok {
// we already has this table in TiDB.
// we should skip ddl job and let SchemaValid check.
continue
} else if tblMeta.SchemaFile.FileMeta.Path == "" {
return errors.Errorf("table `%s`.`%s` schema not found", dbMeta.Name, tblMeta.Name)
}
sql, err := tblMeta.GetSchema(worker.ctx, worker.store)
if sql != "" {
stmts, err := createTableIfNotExistsStmt(worker.glue.GetParser(), sql, dbMeta.Name, tblMeta.Name)
if err != nil {
return err
}
restoreSchemaJob := &schemaJob{
dbName: dbMeta.Name,
tblName: tblMeta.Name,
stmtType: schemaCreateTable,
stmts: make([]*schemaStmt, 0, len(stmts)),
}
for _, sql := range stmts {
restoreSchemaJob.stmts = append(restoreSchemaJob.stmts, &schemaStmt{
sql: sql,
})
}
err = worker.appendJob(restoreSchemaJob)
if err != nil {
return err
}
}
if err != nil {
return err
}
}
}
err = worker.wait()
if err != nil {
return err
}
// 3. restore views. Since views can cross database we must restore views after all table schemas are restored.
for _, dbMeta := range dbMetas {
for _, viewMeta := range dbMeta.Views {
sql, err := viewMeta.GetSchema(worker.ctx, worker.store)
if sql != "" {
stmts, err := createTableIfNotExistsStmt(worker.glue.GetParser(), sql, dbMeta.Name, viewMeta.Name)
if err != nil {
return err
}
restoreSchemaJob := &schemaJob{
dbName: dbMeta.Name,
tblName: viewMeta.Name,
stmtType: schemaCreateView,
stmts: make([]*schemaStmt, 0, len(stmts)),
}
for _, sql := range stmts {
restoreSchemaJob.stmts = append(restoreSchemaJob.stmts, &schemaStmt{
sql: sql,
})
}
err = worker.appendJob(restoreSchemaJob)
if err != nil {
return err
}
// we don't support restore views concurrency, cauz it maybe will raise a error
err = worker.wait()
if err != nil {
return err
}
}
if err != nil {
return err
}
}
}
return nil
}
func (worker *restoreSchemaWorker) doJob() {
var session *sql.Conn
defer func() {
if session != nil {
_ = session.Close()
}
}()
loop:
for {
select {
case <-worker.ctx.Done():
// don't `return` or throw `worker.ctx.Err()`here,
// if we `return`, we can't mark cancelled jobs as done,
// if we `throw(worker.ctx.Err())`, it will be blocked to death
break loop
case job := <-worker.jobCh:
if job == nil {
// successful exit
return
}
var err error
if session == nil {
session, err = func() (*sql.Conn, error) {
// TODO: support lightning in SQL
db, err := worker.glue.GetDB()
if err != nil {
return nil, errors.Trace(err)
}
return db.Conn(worker.ctx)
}()
if err != nil {
worker.wg.Done()
worker.throw(err)
// don't return
break loop
}
}
logger := log.With(zap.String("db", job.dbName), zap.String("table", job.tblName))
sqlWithRetry := common.SQLWithRetry{
Logger: log.L(),
DB: session,
}
for _, stmt := range job.stmts {
task := logger.Begin(zap.DebugLevel, fmt.Sprintf("execute SQL: %s", stmt.sql))
err = sqlWithRetry.Exec(worker.ctx, "run create schema job", stmt.sql)
task.End(zap.ErrorLevel, err)
if err != nil {
err = errors.Annotatef(err, "%s %s failed", job.stmtType.String(), common.UniqueTable(job.dbName, job.tblName))
worker.wg.Done()
worker.throw(err)
// don't return
break loop
}
}
worker.wg.Done()
}
}
// mark the cancelled job as `Done`, a little tricky,
// cauz we need make sure `worker.wg.Wait()` wouldn't blocked forever
for range worker.jobCh {
worker.wg.Done()
}
}
func (worker *restoreSchemaWorker) wait() error {
// avoid to `worker.wg.Wait()` blocked forever when all `doJob`'s goroutine exited.
// don't worry about goroutine below, it never become a zombie,
// cauz we have mechanism to clean cancelled jobs in `worker.jobCh`.
// means whole jobs has been send to `worker.jobCh` would be done.
waitCh := make(chan struct{})
go func() {
worker.wg.Wait()
close(waitCh)
}()
select {
case err := <-worker.errCh:
return err
case <-worker.ctx.Done():
return worker.ctx.Err()
case <-waitCh:
return nil
}
}
func (worker *restoreSchemaWorker) throw(err error) {
select {
case <-worker.ctx.Done():
// don't throw `worker.ctx.Err()` again, it will be blocked to death.
return
case worker.errCh <- err:
worker.quit()
}
}
func (worker *restoreSchemaWorker) appendJob(job *schemaJob) error {
worker.wg.Add(1)
select {
case err := <-worker.errCh:
// cancel the job
worker.wg.Done()
return err
case <-worker.ctx.Done():
// cancel the job
worker.wg.Done()
return worker.ctx.Err()
case worker.jobCh <- job:
return nil
}
}
func (rc *Controller) restoreSchema(ctx context.Context) error {
// create table with schema file
// we can handle the duplicated created with createIfNotExist statement
// and we will check the schema in TiDB is valid with the datafile in DataCheck later.
logTask := log.L().Begin(zap.InfoLevel, "restore all schema")
concurrency := utils.MinInt(rc.cfg.App.RegionConcurrency, 8)
childCtx, cancel := context.WithCancel(ctx)
worker := restoreSchemaWorker{
ctx: childCtx,
quit: cancel,
jobCh: make(chan *schemaJob, concurrency),
errCh: make(chan error),
glue: rc.tidbGlue,
store: rc.store,
}
for i := 0; i < concurrency; i++ {
go worker.doJob()
}
getTableFunc := rc.backend.FetchRemoteTableModels
if !rc.tidbGlue.OwnsSQLExecutor() {
getTableFunc = rc.tidbGlue.GetTables
}
err := worker.makeJobs(rc.dbMetas, getTableFunc)
logTask.End(zap.ErrorLevel, err)
if err != nil {
return err
}
dbInfos, err := LoadSchemaInfo(ctx, rc.dbMetas, getTableFunc)
if err != nil {
return errors.Trace(err)
}
rc.dbInfos = dbInfos
if rc.tidbGlue.OwnsSQLExecutor() {
if err = rc.DataCheck(ctx); err != nil {
return errors.Trace(err)
}
}
// Load new checkpoints
err = rc.checkpointsDB.Initialize(ctx, rc.cfg, dbInfos)
if err != nil {
return errors.Trace(err)
}
failpoint.Inject("InitializeCheckpointExit", func() {
log.L().Warn("exit triggered", zap.String("failpoint", "InitializeCheckpointExit"))
os.Exit(0)
})
go rc.listenCheckpointUpdates()
sysVars := ObtainImportantVariables(ctx, rc.tidbGlue.GetSQLExecutor(), !rc.isTiDBBackend())
// override by manually set vars
for k, v := range rc.cfg.TiDB.Vars {
sysVars[k] = v
}
rc.sysVars = sysVars
// Estimate the number of chunks for progress reporting
err = rc.estimateChunkCountIntoMetrics(ctx)
if err != nil {
return errors.Trace(err)
}
return nil
}
// verifyCheckpoint check whether previous task checkpoint is compatible with task config
func verifyCheckpoint(cfg *config.Config, taskCp *checkpoints.TaskCheckpoint) error {
if taskCp == nil {
return nil
}
// always check the backend value even with 'check-requirements = false'
retryUsage := "destroy all checkpoints"
if cfg.Checkpoint.Driver == config.CheckpointDriverFile {
retryUsage = fmt.Sprintf("delete the file '%s'", cfg.Checkpoint.DSN)
}
retryUsage += " and remove all restored tables and try again"
if cfg.TikvImporter.Backend != taskCp.Backend {
return errors.Errorf("config 'tikv-importer.backend' value '%s' different from checkpoint value '%s', please %s", cfg.TikvImporter.Backend, taskCp.Backend, retryUsage)
}
if cfg.App.CheckRequirements {
if build.ReleaseVersion != taskCp.LightningVer {
var displayVer string
if len(taskCp.LightningVer) != 0 {
displayVer = fmt.Sprintf("at '%s'", taskCp.LightningVer)
} else {
displayVer = "before v4.0.6/v3.0.19"
}
return errors.Errorf("lightning version is '%s', but checkpoint was created %s, please %s", build.ReleaseVersion, displayVer, retryUsage)
}
errorFmt := "config '%s' value '%s' different from checkpoint value '%s'. You may set 'check-requirements = false' to skip this check or " + retryUsage
if cfg.Mydumper.SourceDir != taskCp.SourceDir {
return errors.Errorf(errorFmt, "mydumper.data-source-dir", cfg.Mydumper.SourceDir, taskCp.SourceDir)
}
if cfg.TikvImporter.Backend == config.BackendLocal && cfg.TikvImporter.SortedKVDir != taskCp.SortedKVDir {
return errors.Errorf(errorFmt, "mydumper.sorted-kv-dir", cfg.TikvImporter.SortedKVDir, taskCp.SortedKVDir)
}
if cfg.TikvImporter.Backend == config.BackendImporter && cfg.TikvImporter.Addr != taskCp.ImporterAddr {
return errors.Errorf(errorFmt, "tikv-importer.addr", cfg.TikvImporter.Backend, taskCp.Backend)
}
if cfg.TiDB.Host != taskCp.TiDBHost {
return errors.Errorf(errorFmt, "tidb.host", cfg.TiDB.Host, taskCp.TiDBHost)
}
if cfg.TiDB.Port != taskCp.TiDBPort {
return errors.Errorf(errorFmt, "tidb.port", cfg.TiDB.Port, taskCp.TiDBPort)
}
if cfg.TiDB.PdAddr != taskCp.PdAddr {
return errors.Errorf(errorFmt, "tidb.pd-addr", cfg.TiDB.PdAddr, taskCp.PdAddr)
}
}
return nil
}
// for local backend, we should check if local SST exists in disk, otherwise we'll lost data
func verifyLocalFile(ctx context.Context, cpdb checkpoints.DB, dir string) error {
targetTables, err := cpdb.GetLocalStoringTables(ctx)
if err != nil {
return errors.Trace(err)
}
for tableName, engineIDs := range targetTables {
for _, engineID := range engineIDs {
_, eID := backend.MakeUUID(tableName, engineID)
file := local.Engine{UUID: eID}
err := file.Exist(dir)
if err != nil {
log.L().Error("can't find local file",
zap.String("table name", tableName),
zap.Int32("engine ID", engineID))
return errors.Trace(err)
}
}
}
return nil
}
func (rc *Controller) estimateChunkCountIntoMetrics(ctx context.Context) error {
estimatedChunkCount := 0.0
estimatedEngineCnt := int64(0)
batchSize := rc.cfg.Mydumper.BatchSize
if batchSize <= 0 {
// if rows in source files are not sorted by primary key(if primary is number or cluster index enabled),
// the key range in each data engine may have overlap, thus a bigger engine size can somewhat alleviate it.
batchSize = config.DefaultBatchSize
}
for _, dbMeta := range rc.dbMetas {
for _, tableMeta := range dbMeta.Tables {
tableName := common.UniqueTable(dbMeta.Name, tableMeta.Name)
dbCp, err := rc.checkpointsDB.Get(ctx, tableName)
if err != nil {
return errors.Trace(err)
}
fileChunks := make(map[string]float64)
for engineID, eCp := range dbCp.Engines {
if eCp.Status < checkpoints.CheckpointStatusImported {
estimatedEngineCnt++
}
if engineID == indexEngineID {
continue
}
for _, c := range eCp.Chunks {
if _, ok := fileChunks[c.Key.Path]; !ok {
fileChunks[c.Key.Path] = 0.0
}
remainChunkCnt := float64(c.Chunk.EndOffset-c.Chunk.Offset) / float64(c.Chunk.EndOffset-c.Key.Offset)
fileChunks[c.Key.Path] += remainChunkCnt
}
}
// estimate engines count if engine cp is empty
if len(dbCp.Engines) == 0 {
estimatedEngineCnt += ((tableMeta.TotalSize + int64(batchSize) - 1) / int64(batchSize)) + 1
}
for _, fileMeta := range tableMeta.DataFiles {
if cnt, ok := fileChunks[fileMeta.FileMeta.Path]; ok {
estimatedChunkCount += cnt
continue
}
if fileMeta.FileMeta.Type == mydump.SourceTypeCSV {
cfg := rc.cfg.Mydumper
if fileMeta.FileMeta.FileSize > int64(cfg.MaxRegionSize) && cfg.StrictFormat && !cfg.CSV.Header {
estimatedChunkCount += math.Round(float64(fileMeta.FileMeta.FileSize) / float64(cfg.MaxRegionSize))
} else {
estimatedChunkCount++
}
} else {
estimatedChunkCount++
}
}
}
}
metric.ChunkCounter.WithLabelValues(metric.ChunkStateEstimated).Add(estimatedChunkCount)
metric.ProcessedEngineCounter.WithLabelValues(metric.ChunkStateEstimated, metric.TableResultSuccess).
Add(float64(estimatedEngineCnt))
rc.tidbGlue.Record(glue.RecordEstimatedChunk, uint64(estimatedChunkCount))
return nil
}
func firstErr(errors ...error) error {
for _, err := range errors {
if err != nil {
return err
}
}
return nil
}
func (rc *Controller) saveStatusCheckpoint(ctx context.Context, tableName string, engineID int32, err error, statusIfSucceed checkpoints.CheckpointStatus) error {
merger := &checkpoints.StatusCheckpointMerger{Status: statusIfSucceed, EngineID: engineID}
logger := log.L().With(zap.String("table", tableName), zap.Int32("engine_id", engineID),
zap.String("new_status", statusIfSucceed.MetricName()), zap.Error(err))
logger.Debug("update checkpoint")
switch {
case err == nil:
break
case !common.IsContextCanceledError(err):
merger.SetInvalid()
rc.errorSummaries.record(tableName, err, statusIfSucceed)
default:
return nil
}
if engineID == checkpoints.WholeTableEngineID {
metric.RecordTableCount(statusIfSucceed.MetricName(), err)
} else {
metric.RecordEngineCount(statusIfSucceed.MetricName(), err)
}
waitCh := make(chan error, 1)
rc.saveCpCh <- saveCp{tableName: tableName, merger: merger, waitCh: waitCh}
select {
case saveCpErr := <-waitCh:
if saveCpErr != nil {
logger.Error("failed to save status checkpoint", log.ShortError(saveCpErr))
}
return saveCpErr
case <-ctx.Done():
return ctx.Err()
}
}
// listenCheckpointUpdates will combine several checkpoints together to reduce database load.
func (rc *Controller) listenCheckpointUpdates() {
rc.checkpointsWg.Add(1)
var lock sync.Mutex
coalesed := make(map[string]*checkpoints.TableCheckpointDiff)
var waiters []chan<- error
hasCheckpoint := make(chan struct{}, 1)
defer close(hasCheckpoint)
go func() {
for range hasCheckpoint {
lock.Lock()
cpd := coalesed
coalesed = make(map[string]*checkpoints.TableCheckpointDiff)
ws := waiters
waiters = nil
lock.Unlock()
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("SlowDownCheckpointUpdate", func() {})
if len(cpd) > 0 {
err := rc.checkpointsDB.Update(cpd)
for _, w := range ws {
w <- err
}
web.BroadcastCheckpointDiff(cpd)
}
rc.checkpointsWg.Done()
}
}()
for scp := range rc.saveCpCh {
lock.Lock()
cpd, ok := coalesed[scp.tableName]
if !ok {
cpd = checkpoints.NewTableCheckpointDiff()
coalesed[scp.tableName] = cpd
}
scp.merger.MergeInto(cpd)
if scp.waitCh != nil {
waiters = append(waiters, scp.waitCh)
}
if len(hasCheckpoint) == 0 {
rc.checkpointsWg.Add(1)
hasCheckpoint <- struct{}{}
}
lock.Unlock()
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("FailIfImportedChunk", func(val failpoint.Value) {
if merger, ok := scp.merger.(*checkpoints.ChunkCheckpointMerger); ok && merger.Checksum.SumKVS() >= uint64(val.(int)) {
rc.checkpointsWg.Done()
rc.checkpointsWg.Wait()
panic("forcing failure due to FailIfImportedChunk")
}
})
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("FailIfStatusBecomes", func(val failpoint.Value) {
if merger, ok := scp.merger.(*checkpoints.StatusCheckpointMerger); ok && merger.EngineID >= 0 && int(merger.Status) == val.(int) {
rc.checkpointsWg.Done()
rc.checkpointsWg.Wait()
panic("forcing failure due to FailIfStatusBecomes")
}
})
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("FailIfIndexEngineImported", func(val failpoint.Value) {
if merger, ok := scp.merger.(*checkpoints.StatusCheckpointMerger); ok &&
merger.EngineID == checkpoints.WholeTableEngineID &&
merger.Status == checkpoints.CheckpointStatusIndexImported && val.(int) > 0 {
rc.checkpointsWg.Done()
rc.checkpointsWg.Wait()
panic("forcing failure due to FailIfIndexEngineImported")
}
})
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("KillIfImportedChunk", func(val failpoint.Value) {
if merger, ok := scp.merger.(*checkpoints.ChunkCheckpointMerger); ok && merger.Checksum.SumKVS() >= uint64(val.(int)) {
if err := common.KillMySelf(); err != nil {
log.L().Warn("KillMySelf() failed to kill itself", log.ShortError(err))
}
}
})
}
rc.checkpointsWg.Done()
}
// buildRunPeriodicActionAndCancelFunc build the runPeriodicAction func and a cancel func
func (rc *Controller) buildRunPeriodicActionAndCancelFunc(ctx context.Context, stop <-chan struct{}) (func(), func(bool)) {
cancelFuncs := make([]func(bool), 0)
closeFuncs := make([]func(), 0)
// a nil channel blocks forever.
// if the cron duration is zero we use the nil channel to skip the action.
var logProgressChan <-chan time.Time
if rc.cfg.Cron.LogProgress.Duration > 0 {
logProgressTicker := time.NewTicker(rc.cfg.Cron.LogProgress.Duration)
closeFuncs = append(closeFuncs, func() {
logProgressTicker.Stop()
})
logProgressChan = logProgressTicker.C
}
glueProgressTicker := time.NewTicker(3 * time.Second)
closeFuncs = append(closeFuncs, func() {
glueProgressTicker.Stop()
})
var switchModeChan <-chan time.Time
// tidb backend don't need to switch tikv to import mode
if rc.cfg.TikvImporter.Backend != config.BackendTiDB && rc.cfg.Cron.SwitchMode.Duration > 0 {
switchModeTicker := time.NewTicker(rc.cfg.Cron.SwitchMode.Duration)
cancelFuncs = append(cancelFuncs, func(bool) { switchModeTicker.Stop() })
cancelFuncs = append(cancelFuncs, func(do bool) {
if do {
rc.switchToNormalMode(ctx)
}
})
switchModeChan = switchModeTicker.C
}
var checkQuotaChan <-chan time.Time
// only local storage has disk quota concern.
if rc.cfg.TikvImporter.Backend == config.BackendLocal && rc.cfg.Cron.CheckDiskQuota.Duration > 0 {
checkQuotaTicker := time.NewTicker(rc.cfg.Cron.CheckDiskQuota.Duration)
cancelFuncs = append(cancelFuncs, func(bool) { checkQuotaTicker.Stop() })
checkQuotaChan = checkQuotaTicker.C
}
return func() {
defer func() {
for _, f := range closeFuncs {
f()
}
}()
if rc.cfg.Cron.SwitchMode.Duration > 0 {
rc.switchToImportMode(ctx)
}
start := time.Now()
for {
select {
case <-ctx.Done():
log.L().Warn("stopping periodic actions", log.ShortError(ctx.Err()))
return
case <-stop:
log.L().Info("everything imported, stopping periodic actions")
return
case <-switchModeChan:
// periodically switch to import mode, as requested by TiKV 3.0
rc.switchToImportMode(ctx)
case <-logProgressChan:
// log the current progress periodically, so OPS will know that we're still working
nanoseconds := float64(time.Since(start).Nanoseconds())
// the estimated chunk is not accurate(likely under estimated), but the actual count is not accurate
// before the last table start, so use the bigger of the two should be a workaround
estimated := metric.ReadCounter(metric.ChunkCounter.WithLabelValues(metric.ChunkStateEstimated))
pending := metric.ReadCounter(metric.ChunkCounter.WithLabelValues(metric.ChunkStatePending))
if estimated < pending {
estimated = pending
}
finished := metric.ReadCounter(metric.ChunkCounter.WithLabelValues(metric.ChunkStateFinished))
totalTables := metric.ReadCounter(metric.TableCounter.WithLabelValues(metric.TableStatePending, metric.TableResultSuccess))
completedTables := metric.ReadCounter(metric.TableCounter.WithLabelValues(metric.TableStateCompleted, metric.TableResultSuccess))
bytesRead := metric.ReadHistogramSum(metric.RowReadBytesHistogram)
engineEstimated := metric.ReadCounter(metric.ProcessedEngineCounter.WithLabelValues(metric.ChunkStateEstimated, metric.TableResultSuccess))
enginePending := metric.ReadCounter(metric.ProcessedEngineCounter.WithLabelValues(metric.ChunkStatePending, metric.TableResultSuccess))
if engineEstimated < enginePending {
engineEstimated = enginePending
}
engineFinished := metric.ReadCounter(metric.ProcessedEngineCounter.WithLabelValues(metric.TableStateImported, metric.TableResultSuccess))
bytesWritten := metric.ReadCounter(metric.BytesCounter.WithLabelValues(metric.TableStateWritten))
bytesImported := metric.ReadCounter(metric.BytesCounter.WithLabelValues(metric.TableStateImported))
var state string
var remaining zap.Field
switch {
case finished >= estimated:
if engineFinished < engineEstimated {
state = "importing"
} else {
state = "post-processing"
}
case finished > 0:
state = "writing"
default:
state = "preparing"
}
// since we can't accurately estimate the extra time cost by import after all writing are finished,
// so here we use estimatedWritingProgress * 0.8 + estimatedImportingProgress * 0.2 as the total
// progress.
remaining = zap.Skip()
totalPercent := 0.0
if finished > 0 {
writePercent := math.Min(finished/estimated, 1.0)
importPercent := 1.0
if bytesWritten > 0 {
totalBytes := bytesWritten / writePercent
importPercent = math.Min(bytesImported/totalBytes, 1.0)
}
totalPercent = writePercent*0.8 + importPercent*0.2
if totalPercent < 1.0 {
remainNanoseconds := (1.0 - totalPercent) / totalPercent * nanoseconds
remaining = zap.Duration("remaining", time.Duration(remainNanoseconds).Round(time.Second))
}
}
formatPercent := func(finish, estimate float64) string {
speed := ""
if estimated > 0 {
speed = fmt.Sprintf(" (%.1f%%)", finish/estimate*100)
}
return speed
}
// avoid output bytes speed if there are no unfinished chunks
chunkSpeed := zap.Skip()
if bytesRead > 0 {
chunkSpeed = zap.Float64("speed(MiB/s)", bytesRead/(1048576e-9*nanoseconds))
}
// Note: a speed of 28 MiB/s roughly corresponds to 100 GiB/hour.
log.L().Info("progress",
zap.String("total", fmt.Sprintf("%.1f%%", totalPercent*100)),
// zap.String("files", fmt.Sprintf("%.0f/%.0f (%.1f%%)", finished, estimated, finished/estimated*100)),
zap.String("tables", fmt.Sprintf("%.0f/%.0f%s", completedTables, totalTables, formatPercent(completedTables, totalTables))),
zap.String("chunks", fmt.Sprintf("%.0f/%.0f%s", finished, estimated, formatPercent(finished, estimated))),
zap.String("engines", fmt.Sprintf("%.f/%.f%s", engineFinished, engineEstimated, formatPercent(engineFinished, engineEstimated))),
chunkSpeed,
zap.String("state", state),
remaining,
)
case <-checkQuotaChan:
// verify the total space occupied by sorted-kv-dir is below the quota,
// otherwise we perform an emergency import.
rc.enforceDiskQuota(ctx)
case <-glueProgressTicker.C:
finished := metric.ReadCounter(metric.ChunkCounter.WithLabelValues(metric.ChunkStateFinished))
rc.tidbGlue.Record(glue.RecordFinishedChunk, uint64(finished))
}
}
}, func(do bool) {
log.L().Info("cancel periodic actions", zap.Bool("do", do))
for _, f := range cancelFuncs {
f(do)
}
}
}
var checksumManagerKey struct{}
const (
pauseGCTTLForDupeRes = time.Hour
pauseGCIntervalForDupeRes = time.Minute
)
func (rc *Controller) keepPauseGCForDupeRes(ctx context.Context) (<-chan struct{}, error) {
tlsOpt := rc.tls.ToPDSecurityOption()
pdCli, err := pd.NewClientWithContext(ctx, []string{rc.cfg.TiDB.PdAddr}, tlsOpt)
if err != nil {
return nil, errors.Trace(err)
}
serviceID := "lightning-duplicate-resolution-" + uuid.New().String()
ttl := int64(pauseGCTTLForDupeRes / time.Second)
var (
safePoint uint64
paused bool
)
// Try to get the minimum safe point across all services as our GC safe point.
for i := 0; i < 10; i++ {
if i > 0 {
time.Sleep(time.Second * 3)
}
minSafePoint, err := pdCli.UpdateServiceGCSafePoint(ctx, serviceID, ttl, 1)
if err != nil {
pdCli.Close()
return nil, errors.Trace(err)
}
newMinSafePoint, err := pdCli.UpdateServiceGCSafePoint(ctx, serviceID, ttl, minSafePoint)
if err != nil {
pdCli.Close()
return nil, errors.Trace(err)
}
if newMinSafePoint <= minSafePoint {
safePoint = minSafePoint
paused = true
break
}
log.L().Warn(
"Failed to register GC safe point because the current minimum safe point is newer"+
" than what we assume, will retry newMinSafePoint next time",
zap.Uint64("minSafePoint", minSafePoint),
zap.Uint64("newMinSafePoint", newMinSafePoint),
)
}
if !paused {
pdCli.Close()
return nil, errors.New("failed to pause GC for duplicate resolution after all retries")
}
exitCh := make(chan struct{})
go func(safePoint uint64) {
defer pdCli.Close()
defer close(exitCh)
ticker := time.NewTicker(pauseGCIntervalForDupeRes)
defer ticker.Stop()
for {
select {
case <-ticker.C:
minSafePoint, err := pdCli.UpdateServiceGCSafePoint(ctx, serviceID, ttl, safePoint)
if err != nil {
log.L().Warn("Failed to register GC safe point", zap.Error(err))
continue
}
if minSafePoint > safePoint {
log.L().Warn("The current minimum safe point is newer than what we hold, duplicate records are at"+
"risk of being GC and not detectable",
zap.Uint64("safePoint", safePoint),
zap.Uint64("minSafePoint", minSafePoint),
)
safePoint = minSafePoint
}
case <-ctx.Done():
stopCtx, cancelFunc := context.WithTimeout(context.Background(), time.Second*5)
if _, err := pdCli.UpdateServiceGCSafePoint(stopCtx, serviceID, 0, safePoint); err != nil {
log.L().Warn("Failed to reset safe point ttl to zero", zap.Error(err))
}
// just make compiler happy
cancelFunc()
return
}
}
}(safePoint)
return exitCh, nil
}
func (rc *Controller) restoreTables(ctx context.Context) error {
if rc.cfg.TikvImporter.DuplicateResolution != config.DupeResAlgNone {
subCtx, cancel := context.WithCancel(ctx)
exitCh, err := rc.keepPauseGCForDupeRes(subCtx)
if err != nil {
cancel()
return errors.Trace(err)
}
defer func() {
cancel()
<-exitCh
}()
}
logTask := log.L().Begin(zap.InfoLevel, "restore all tables data")
if rc.tableWorkers == nil {
rc.tableWorkers = worker.NewPool(ctx, rc.cfg.App.TableConcurrency, "table")
}
if rc.indexWorkers == nil {
rc.indexWorkers = worker.NewPool(ctx, rc.cfg.App.IndexConcurrency, "index")
}
// for local backend, we should disable some pd scheduler and change some settings, to
// make split region and ingest sst more stable
// because importer backend is mostly use for v3.x cluster which doesn't support these api,
// so we also don't do this for import backend
finishSchedulers := func() {}
// if one lightning failed abnormally, and can't determine whether it needs to switch back,
// we do not do switch back automatically
cleanupFunc := func() {}
switchBack := false
taskFinished := false
if rc.cfg.TikvImporter.Backend == config.BackendLocal {
logTask.Info("removing PD leader®ion schedulers")
restoreFn, err := rc.taskMgr.CheckAndPausePdSchedulers(ctx)
finishSchedulers = func() {
if restoreFn != nil {
// use context.Background to make sure this restore function can still be executed even if ctx is canceled
restoreCtx := context.Background()
needSwitchBack, needCleanup, err := rc.taskMgr.CheckAndFinishRestore(restoreCtx, taskFinished)
if err != nil {
logTask.Warn("check restore pd schedulers failed", zap.Error(err))
return
}
switchBack = needSwitchBack
if needSwitchBack {
if restoreE := restoreFn(restoreCtx); restoreE != nil {
logTask.Warn("failed to restore removed schedulers, you may need to restore them manually", zap.Error(restoreE))
}
logTask.Info("add back PD leader®ion schedulers")
// clean up task metas
if needCleanup {
logTask.Info("cleanup task metas")
if cleanupErr := rc.taskMgr.Cleanup(restoreCtx); cleanupErr != nil {
logTask.Warn("failed to clean task metas, you may need to restore them manually", zap.Error(cleanupErr))
}
// cleanup table meta and schema db if needed.
cleanupFunc = func() {
if e := rc.taskMgr.CleanupAllMetas(restoreCtx); err != nil {
logTask.Warn("failed to clean table task metas, you may need to restore them manually", zap.Error(e))
}
}
}
}
}
rc.taskMgr.Close()
}
if err != nil {
return errors.Trace(err)
}
}
defer func() {
if switchBack {
cleanupFunc()
}
}()
type task struct {
tr *TableRestore
cp *checkpoints.TableCheckpoint
}
totalTables := 0
for _, dbMeta := range rc.dbMetas {
totalTables += len(dbMeta.Tables)
}
postProcessTaskChan := make(chan task, totalTables)
var wg sync.WaitGroup
var restoreErr common.OnceError
stopPeriodicActions := make(chan struct{})
periodicActions, cancelFunc := rc.buildRunPeriodicActionAndCancelFunc(ctx, stopPeriodicActions)
go periodicActions()
finishFuncCalled := false
defer func() {
if !finishFuncCalled {
finishSchedulers()
cancelFunc(switchBack)
finishFuncCalled = true
}
}()
defer close(stopPeriodicActions)
taskCh := make(chan task, rc.cfg.App.IndexConcurrency)
defer close(taskCh)
manager, err := newChecksumManager(ctx, rc)
if err != nil {
return errors.Trace(err)
}
ctx2 := context.WithValue(ctx, &checksumManagerKey, manager)
for i := 0; i < rc.cfg.App.IndexConcurrency; i++ {
go func() {
for task := range taskCh {
tableLogTask := task.tr.logger.Begin(zap.InfoLevel, "restore table")
web.BroadcastTableCheckpoint(task.tr.tableName, task.cp)
needPostProcess, err := task.tr.restoreTable(ctx2, rc, task.cp)
err = errors.Annotatef(err, "restore table %s failed", task.tr.tableName)
tableLogTask.End(zap.ErrorLevel, err)
web.BroadcastError(task.tr.tableName, err)
metric.RecordTableCount("completed", err)
restoreErr.Set(err)
if needPostProcess {
postProcessTaskChan <- task
}
wg.Done()
}
}()
}
for _, dbMeta := range rc.dbMetas {
dbInfo := rc.dbInfos[dbMeta.Name]
for _, tableMeta := range dbMeta.Tables {
tableInfo := dbInfo.Tables[tableMeta.Name]
tableName := common.UniqueTable(dbInfo.Name, tableInfo.Name)
cp, err := rc.checkpointsDB.Get(ctx, tableName)
if err != nil {
return errors.Trace(err)
}
igCols, err := rc.cfg.Mydumper.IgnoreColumns.GetIgnoreColumns(dbInfo.Name, tableInfo.Name, rc.cfg.Mydumper.CaseSensitive)
if err != nil {
return errors.Trace(err)
}
tr, err := NewTableRestore(tableName, tableMeta, dbInfo, tableInfo, cp, igCols.Columns)
if err != nil {
return errors.Trace(err)
}
wg.Add(1)
select {
case taskCh <- task{tr: tr, cp: cp}:
case <-ctx.Done():
return ctx.Err()
}
}
}
wg.Wait()
// if context is done, should return directly
select {
case <-ctx.Done():
err = restoreErr.Get()
if err == nil {
err = ctx.Err()
}
logTask.End(zap.ErrorLevel, err)
return err
default:
}
// stop periodic tasks for restore table such as pd schedulers and switch-mode tasks.
// this can help make cluster switching back to normal state more quickly.
// finishSchedulers()
// cancelFunc(switchBack)
// finishFuncCalled = true
taskFinished = true
close(postProcessTaskChan)
// otherwise, we should run all tasks in the post-process task chan
for i := 0; i < rc.cfg.App.TableConcurrency; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for task := range postProcessTaskChan {
metaMgr := rc.metaMgrBuilder.TableMetaMgr(task.tr)
// force all the remain post-process tasks to be executed
_, err = task.tr.postProcess(ctx2, rc, task.cp, true, metaMgr)
restoreErr.Set(err)
}
}()
}
wg.Wait()
err = restoreErr.Get()
logTask.End(zap.ErrorLevel, err)
return err
}
func (tr *TableRestore) restoreTable(
ctx context.Context,
rc *Controller,
cp *checkpoints.TableCheckpoint,
) (bool, error) {
// 1. Load the table info.
select {
case <-ctx.Done():
return false, ctx.Err()
default:
}
metaMgr := rc.metaMgrBuilder.TableMetaMgr(tr)
// no need to do anything if the chunks are already populated
if len(cp.Engines) > 0 {
tr.logger.Info("reusing engines and files info from checkpoint",
zap.Int("enginesCnt", len(cp.Engines)),
zap.Int("filesCnt", cp.CountChunks()),
)
} else if cp.Status < checkpoints.CheckpointStatusAllWritten {
if err := tr.populateChunks(ctx, rc, cp); err != nil {
return false, errors.Trace(err)
}
// fetch the max chunk row_id max value as the global max row_id
rowIDMax := int64(0)
for _, engine := range cp.Engines {
if len(engine.Chunks) > 0 && engine.Chunks[len(engine.Chunks)-1].Chunk.RowIDMax > rowIDMax {
rowIDMax = engine.Chunks[len(engine.Chunks)-1].Chunk.RowIDMax
}
}
db, _ := rc.tidbGlue.GetDB()
versionStr, err := version.FetchVersion(ctx, db)
if err != nil {
return false, errors.Trace(err)
}
versionInfo := version.ParseServerInfo(versionStr)
// "show table next_row_id" is only available after tidb v4.0.0
if versionInfo.ServerVersion.Major >= 4 &&
(rc.cfg.TikvImporter.Backend == config.BackendLocal || rc.cfg.TikvImporter.Backend == config.BackendImporter) {
// first, insert a new-line into meta table
if err = metaMgr.InitTableMeta(ctx); err != nil {
return false, err
}
checksum, rowIDBase, err := metaMgr.AllocTableRowIDs(ctx, rowIDMax)
if err != nil {
return false, err
}
tr.RebaseChunkRowIDs(cp, rowIDBase)
if checksum != nil {
if cp.Checksum != *checksum {
cp.Checksum = *checksum
rc.saveCpCh <- saveCp{
tableName: tr.tableName,
merger: &checkpoints.TableChecksumMerger{
Checksum: cp.Checksum,
},
}
}
tr.logger.Info("checksum before restore table", zap.Object("checksum", &cp.Checksum))
}
}
if err := rc.checkpointsDB.InsertEngineCheckpoints(ctx, tr.tableName, cp.Engines); err != nil {
return false, errors.Trace(err)
}
web.BroadcastTableCheckpoint(tr.tableName, cp)
// rebase the allocator so it exceeds the number of rows.
if tr.tableInfo.Core.PKIsHandle && tr.tableInfo.Core.ContainsAutoRandomBits() {
cp.AllocBase = mathutil.MaxInt64(cp.AllocBase, tr.tableInfo.Core.AutoRandID)
if err := tr.alloc.Get(autoid.AutoRandomType).Rebase(context.Background(), cp.AllocBase, false); err != nil {
return false, err
}
} else {
cp.AllocBase = mathutil.MaxInt64(cp.AllocBase, tr.tableInfo.Core.AutoIncID)
if err := tr.alloc.Get(autoid.RowIDAllocType).Rebase(context.Background(), cp.AllocBase, false); err != nil {
return false, err
}
}
rc.saveCpCh <- saveCp{
tableName: tr.tableName,
merger: &checkpoints.RebaseCheckpointMerger{
AllocBase: cp.AllocBase,
},
}
}
// 2. Restore engines (if still needed)
err := tr.restoreEngines(ctx, rc, cp)
if err != nil {
return false, errors.Trace(err)
}
err = metaMgr.UpdateTableStatus(ctx, metaStatusRestoreFinished)
if err != nil {
return false, errors.Trace(err)
}
// 3. Post-process. With the last parameter set to false, we can allow delay analyze execute latter
return tr.postProcess(ctx, rc, cp, false /* force-analyze */, metaMgr)
}
// do full compaction for the whole data.
func (rc *Controller) fullCompact(ctx context.Context) error {
if !rc.cfg.PostRestore.Compact {
log.L().Info("skip full compaction")
return nil
}
// wait until any existing level-1 compact to complete first.
task := log.L().Begin(zap.InfoLevel, "wait for completion of existing level 1 compaction")
for !rc.compactState.CAS(compactStateIdle, compactStateDoing) {
time.Sleep(100 * time.Millisecond)
}
task.End(zap.ErrorLevel, nil)
return errors.Trace(rc.doCompact(ctx, FullLevelCompact))
}
func (rc *Controller) doCompact(ctx context.Context, level int32) error {
tls := rc.tls.WithHost(rc.cfg.TiDB.PdAddr)
return tikv.ForAllStores(
ctx,
tls,
tikv.StoreStateDisconnected,
func(c context.Context, store *tikv.Store) error {
return tikv.Compact(c, tls, store.Address, level)
},
)
}
func (rc *Controller) switchToImportMode(ctx context.Context) {
log.L().Info("switch to import mode")
rc.switchTiKVMode(ctx, sstpb.SwitchMode_Import)
}
func (rc *Controller) switchToNormalMode(ctx context.Context) {
log.L().Info("switch to normal mode")
rc.switchTiKVMode(ctx, sstpb.SwitchMode_Normal)
}
func (rc *Controller) switchTiKVMode(ctx context.Context, mode sstpb.SwitchMode) {
// // tidb backend don't need to switch tikv to import mode
if rc.isTiDBBackend() {
return
}
// It is fine if we miss some stores which did not switch to Import mode,
// since we're running it periodically, so we exclude disconnected stores.
// But it is essential all stores be switched back to Normal mode to allow
// normal operation.
var minState tikv.StoreState
if mode == sstpb.SwitchMode_Import {
minState = tikv.StoreStateOffline
} else {
minState = tikv.StoreStateDisconnected
}
tls := rc.tls.WithHost(rc.cfg.TiDB.PdAddr)
// we ignore switch mode failure since it is not fatal.
// no need log the error, it is done in kv.SwitchMode already.
_ = tikv.ForAllStores(
ctx,
tls,
minState,
func(c context.Context, store *tikv.Store) error {
return tikv.SwitchMode(c, tls, store.Address, mode)
},
)
}
func (rc *Controller) enforceDiskQuota(ctx context.Context) {
if !rc.diskQuotaState.CAS(diskQuotaStateIdle, diskQuotaStateChecking) {
// do not run multiple the disk quota check / import simultaneously.
// (we execute the lock check in background to avoid blocking the cron thread)
return
}
go func() {
// locker is assigned when we detect the disk quota is exceeded.
// before the disk quota is confirmed exceeded, we keep the diskQuotaLock
// unlocked to avoid periodically interrupting the writer threads.
var locker sync.Locker
defer func() {
rc.diskQuotaState.Store(diskQuotaStateIdle)
if locker != nil {
locker.Unlock()
}
}()
isRetrying := false
for {
// sleep for a cycle if we are retrying because there is nothing new to import.
if isRetrying {
select {
case <-ctx.Done():
return
case <-time.After(rc.cfg.Cron.CheckDiskQuota.Duration):
}
} else {
isRetrying = true
}
quota := int64(rc.cfg.TikvImporter.DiskQuota)
largeEngines, inProgressLargeEngines, totalDiskSize, totalMemSize := rc.backend.CheckDiskQuota(quota)
metric.LocalStorageUsageBytesGauge.WithLabelValues("disk").Set(float64(totalDiskSize))
metric.LocalStorageUsageBytesGauge.WithLabelValues("mem").Set(float64(totalMemSize))
logger := log.With(
zap.Int64("diskSize", totalDiskSize),
zap.Int64("memSize", totalMemSize),
zap.Int64("quota", quota),
zap.Int("largeEnginesCount", len(largeEngines)),
zap.Int("inProgressLargeEnginesCount", inProgressLargeEngines))
if len(largeEngines) == 0 && inProgressLargeEngines == 0 {
logger.Debug("disk quota respected")
return
}
if locker == nil {
// blocks all writers when we detected disk quota being exceeded.
rc.diskQuotaLock.Lock()
locker = rc.diskQuotaLock
}
logger.Warn("disk quota exceeded")
if len(largeEngines) == 0 {
logger.Warn("all large engines are already importing, keep blocking all writes")
continue
}
// flush all engines so that checkpoints can be updated.
if err := rc.backend.FlushAll(ctx); err != nil {
logger.Error("flush engine for disk quota failed, check again later", log.ShortError(err))
return
}
// at this point, all engines are synchronized on disk.
// we then import every large engines one by one and complete.
// if any engine failed to import, we just try again next time, since the data are still intact.
rc.diskQuotaState.Store(diskQuotaStateImporting)
task := logger.Begin(zap.WarnLevel, "importing large engines for disk quota")
var importErr error
for _, engine := range largeEngines {
// Use a larger split region size to avoid split the same region by many times.
if err := rc.backend.UnsafeImportAndReset(ctx, engine, int64(config.SplitRegionSize)*int64(config.MaxSplitRegionSizeRatio)); err != nil {
importErr = multierr.Append(importErr, err)
}
}
task.End(zap.ErrorLevel, importErr)
return
}
}()
}
func (rc *Controller) setGlobalVariables(ctx context.Context) error {
// skip for tidb backend to be compatible with MySQL
if rc.isTiDBBackend() {
return nil
}
// set new collation flag base on tidb config
enabled := ObtainNewCollationEnabled(ctx, rc.tidbGlue.GetSQLExecutor())
// we should enable/disable new collation here since in server mode, tidb config
// may be different in different tasks
collate.SetNewCollationEnabledForTest(enabled)
return nil
}
func (rc *Controller) waitCheckpointFinish() {
// wait checkpoint process finish so that we can do cleanup safely
close(rc.saveCpCh)
rc.checkpointsWg.Wait()
}
func (rc *Controller) cleanCheckpoints(ctx context.Context) error {
rc.waitCheckpointFinish()
if !rc.cfg.Checkpoint.Enable {
return nil
}
logger := log.With(
zap.Stringer("keepAfterSuccess", rc.cfg.Checkpoint.KeepAfterSuccess),
zap.Int64("taskID", rc.cfg.TaskID),
)
task := logger.Begin(zap.InfoLevel, "clean checkpoints")
var err error
switch rc.cfg.Checkpoint.KeepAfterSuccess {
case config.CheckpointRename:
err = rc.checkpointsDB.MoveCheckpoints(ctx, rc.cfg.TaskID)
case config.CheckpointRemove:
err = rc.checkpointsDB.RemoveCheckpoint(ctx, "all")
}
task.End(zap.ErrorLevel, err)
return errors.Annotate(err, "clean checkpoints")
}
func (rc *Controller) isLocalBackend() bool {
return rc.cfg.TikvImporter.Backend == config.BackendLocal
}
func (rc *Controller) isTiDBBackend() bool {
return rc.cfg.TikvImporter.Backend == config.BackendTiDB
}
// preCheckRequirements checks
// 1. Cluster resource
// 2. Local node resource
// 3. Cluster region
// 4. Lightning configuration
// before restore tables start.
func (rc *Controller) preCheckRequirements(ctx context.Context) error {
if rc.cfg.App.CheckRequirements {
if err := rc.ClusterIsAvailable(ctx); err != nil {
return errors.Trace(err)
}
if err := rc.StoragePermission(ctx); err != nil {
return errors.Trace(err)
}
}
if err := rc.metaMgrBuilder.Init(ctx); err != nil {
return err
}
taskExist := false
// We still need to sample source data even if this task has existed, because we need to judge whether the
// source is in order as row key to decide how to sort local data.
source, err := rc.estimateSourceData(ctx)
if err != nil {
return errors.Trace(err)
}
if rc.isLocalBackend() {
pdController, err := pdutil.NewPdController(ctx, rc.cfg.TiDB.PdAddr,
rc.tls.TLSConfig(), rc.tls.ToPDSecurityOption())
if err != nil {
return errors.Trace(err)
}
// PdController will be closed when `taskMetaMgr` closes.
rc.taskMgr = rc.metaMgrBuilder.TaskMetaMgr(pdController)
taskExist, err = rc.taskMgr.CheckTaskExist(ctx)
if err != nil {
return errors.Trace(err)
}
if !taskExist {
if err = rc.taskMgr.InitTask(ctx, source); err != nil {
return errors.Trace(err)
}
if rc.cfg.App.CheckRequirements {
err = rc.localResource(source)
if err != nil {
return errors.Trace(err)
}
if err := rc.clusterResource(ctx, source); err != nil {
rc.taskMgr.CleanupTask(ctx)
return errors.Trace(err)
}
if err := rc.checkClusterRegion(ctx); err != nil {
return errors.Trace(err)
}
}
}
}
if rc.tidbGlue.OwnsSQLExecutor() && rc.cfg.App.CheckRequirements {
fmt.Print(rc.checkTemplate.Output())
}
if !rc.checkTemplate.Success() {
if !taskExist && rc.taskMgr != nil {
rc.taskMgr.CleanupTask(ctx)
}
return errors.Errorf("tidb-lightning check failed."+
" Please fix the failed check(s):\n %s", rc.checkTemplate.FailedMsg())
}
return nil
}
// DataCheck checks the data schema which needs #rc.restoreSchema finished.
func (rc *Controller) DataCheck(ctx context.Context) error {
var err error
if rc.cfg.App.CheckRequirements {
err = rc.HasLargeCSV(rc.dbMetas)
if err != nil {
return errors.Trace(err)
}
}
checkPointCriticalMsgs := make([]string, 0, len(rc.dbMetas))
schemaCriticalMsgs := make([]string, 0, len(rc.dbMetas))
var msgs []string
for _, dbInfo := range rc.dbMetas {
for _, tableInfo := range dbInfo.Tables {
// if hasCheckpoint is true, the table will start import from the checkpoint
// so we can skip TableHasDataInCluster and SchemaIsValid check.
noCheckpoint := true
if rc.cfg.Checkpoint.Enable {
if msgs, noCheckpoint, err = rc.CheckpointIsValid(ctx, tableInfo); err != nil {
return errors.Trace(err)
}
if len(msgs) != 0 {
checkPointCriticalMsgs = append(checkPointCriticalMsgs, msgs...)
}
}
if rc.cfg.App.CheckRequirements && noCheckpoint && rc.cfg.TikvImporter.Backend != config.BackendTiDB {
if msgs, err = rc.SchemaIsValid(ctx, tableInfo); err != nil {
return errors.Trace(err)
}
if len(msgs) != 0 {
schemaCriticalMsgs = append(schemaCriticalMsgs, msgs...)
}
}
}
}
err = rc.checkCSVHeader(ctx, rc.dbMetas)
if err != nil {
return err
}
if len(checkPointCriticalMsgs) != 0 {
rc.checkTemplate.Collect(Critical, false, strings.Join(checkPointCriticalMsgs, "\n"))
} else {
rc.checkTemplate.Collect(Critical, true, "checkpoints are valid")
}
if len(schemaCriticalMsgs) != 0 {
rc.checkTemplate.Collect(Critical, false, strings.Join(schemaCriticalMsgs, "\n"))
} else {
rc.checkTemplate.Collect(Critical, true, "table schemas are valid")
}
return nil
}
type chunkRestore struct {
parser mydump.Parser
index int
chunk *checkpoints.ChunkCheckpoint
}
func newChunkRestore(
ctx context.Context,
index int,
cfg *config.Config,
chunk *checkpoints.ChunkCheckpoint,
ioWorkers *worker.Pool,
store storage.ExternalStorage,
tableInfo *checkpoints.TidbTableInfo,
) (*chunkRestore, error) {
blockBufSize := int64(cfg.Mydumper.ReadBlockSize)
var reader storage.ReadSeekCloser
var err error
if chunk.FileMeta.Type == mydump.SourceTypeParquet {
reader, err = mydump.OpenParquetReader(ctx, store, chunk.FileMeta.Path, chunk.FileMeta.FileSize)
} else {
reader, err = store.Open(ctx, chunk.FileMeta.Path)
}
if err != nil {
return nil, errors.Trace(err)
}
var parser mydump.Parser
switch chunk.FileMeta.Type {
case mydump.SourceTypeCSV:
hasHeader := cfg.Mydumper.CSV.Header && chunk.Chunk.Offset == 0
// Create a utf8mb4 convertor to encode and decode data with the charset of CSV files.
charsetConvertor, err := mydump.NewCharsetConvertor(cfg.Mydumper.DataCharacterSet, cfg.Mydumper.DataInvalidCharReplace)
if err != nil {
return nil, err
}
parser, err = mydump.NewCSVParser(&cfg.Mydumper.CSV, reader, blockBufSize, ioWorkers, hasHeader, charsetConvertor)
if err != nil {
return nil, errors.Trace(err)
}
case mydump.SourceTypeSQL:
parser = mydump.NewChunkParser(cfg.TiDB.SQLMode, reader, blockBufSize, ioWorkers)
case mydump.SourceTypeParquet:
parser, err = mydump.NewParquetParser(ctx, store, reader, chunk.FileMeta.Path)
if err != nil {
return nil, errors.Trace(err)
}
default:
panic(fmt.Sprintf("file '%s' with unknown source type '%s'", chunk.Key.Path, chunk.FileMeta.Type.String()))
}
if err = parser.SetPos(chunk.Chunk.Offset, chunk.Chunk.PrevRowIDMax); err != nil {
return nil, errors.Trace(err)
}
if len(chunk.ColumnPermutation) > 0 {
parser.SetColumns(getColumnNames(tableInfo.Core, chunk.ColumnPermutation))
}
return &chunkRestore{
parser: parser,
index: index,
chunk: chunk,
}, nil
}
func (cr *chunkRestore) close() {
cr.parser.Close()
}
func getColumnNames(tableInfo *model.TableInfo, permutation []int) []string {
colIndexes := make([]int, 0, len(permutation))
for i := 0; i < len(permutation); i++ {
colIndexes = append(colIndexes, -1)
}
colCnt := 0
for i, p := range permutation {
if p >= 0 {
colIndexes[p] = i
colCnt++
}
}
names := make([]string, 0, colCnt)
for _, idx := range colIndexes {
// skip columns with index -1
if idx >= 0 {
// original fields contains _tidb_rowid field
if idx == len(tableInfo.Columns) {
names = append(names, model.ExtraHandleName.O)
} else {
names = append(names, tableInfo.Columns[idx].Name.O)
}
}
}
return names
}
var (
maxKVQueueSize = 32 // Cache at most this number of rows before blocking the encode loop
minDeliverBytes uint64 = 96 * units.KiB // 96 KB (data + index). batch at least this amount of bytes to reduce number of messages
)
type deliveredKVs struct {
kvs kv.Row // if kvs is nil, this indicated we've got the last message.
columns []string
offset int64
rowID int64
}
type deliverResult struct {
totalDur time.Duration
err error
}
//nolint:nakedret // TODO: refactor
func (cr *chunkRestore) deliverLoop(
ctx context.Context,
kvsCh <-chan []deliveredKVs,
t *TableRestore,
engineID int32,
dataEngine, indexEngine *backend.LocalEngineWriter,
rc *Controller,
) (deliverTotalDur time.Duration, err error) {
var channelClosed bool
deliverLogger := t.logger.With(
zap.Int32("engineNumber", engineID),
zap.Int("fileIndex", cr.index),
zap.Stringer("path", &cr.chunk.Key),
zap.String("task", "deliver"),
)
// Fetch enough KV pairs from the source.
dataKVs := rc.backend.MakeEmptyRows()
indexKVs := rc.backend.MakeEmptyRows()
dataSynced := true
for !channelClosed {
var dataChecksum, indexChecksum verify.KVChecksum
var columns []string
var kvPacket []deliveredKVs
// init these two field as checkpoint current value, so even if there are no kv pairs delivered,
// chunk checkpoint should stay the same
offset := cr.chunk.Chunk.Offset
rowID := cr.chunk.Chunk.PrevRowIDMax
populate:
for dataChecksum.SumSize()+indexChecksum.SumSize() < minDeliverBytes {
select {
case kvPacket = <-kvsCh:
if len(kvPacket) == 0 {
channelClosed = true
break populate
}
for _, p := range kvPacket {
p.kvs.ClassifyAndAppend(&dataKVs, &dataChecksum, &indexKVs, &indexChecksum)
columns = p.columns
offset = p.offset
rowID = p.rowID
}
case <-ctx.Done():
err = ctx.Err()
return
}
}
err = func() error {
// We use `TryRLock` with sleep here to avoid blocking current goroutine during importing when disk-quota is
// triggered, so that we can save chunkCheckpoint as soon as possible after `FlushEngine` is called.
// This implementation may not be very elegant or even completely correct, but it is currently a relatively
// simple and effective solution.
for !rc.diskQuotaLock.TryRLock() {
// try to update chunk checkpoint, this can help save checkpoint after importing when disk-quota is triggered
if !dataSynced {
dataSynced = cr.maybeSaveCheckpoint(rc, t, engineID, cr.chunk, dataEngine, indexEngine)
}
time.Sleep(time.Millisecond)
}
defer rc.diskQuotaLock.RUnlock()
// Write KVs into the engine
start := time.Now()
if err = dataEngine.WriteRows(ctx, columns, dataKVs); err != nil {
if !common.IsContextCanceledError(err) {
deliverLogger.Error("write to data engine failed", log.ShortError(err))
}
return errors.Trace(err)
}
if err = indexEngine.WriteRows(ctx, columns, indexKVs); err != nil {
if !common.IsContextCanceledError(err) {
deliverLogger.Error("write to index engine failed", log.ShortError(err))
}
return errors.Trace(err)
}
deliverDur := time.Since(start)
deliverTotalDur += deliverDur
metric.BlockDeliverSecondsHistogram.Observe(deliverDur.Seconds())
metric.BlockDeliverBytesHistogram.WithLabelValues(metric.BlockDeliverKindData).Observe(float64(dataChecksum.SumSize()))
metric.BlockDeliverBytesHistogram.WithLabelValues(metric.BlockDeliverKindIndex).Observe(float64(indexChecksum.SumSize()))
metric.BlockDeliverKVPairsHistogram.WithLabelValues(metric.BlockDeliverKindData).Observe(float64(dataChecksum.SumKVS()))
metric.BlockDeliverKVPairsHistogram.WithLabelValues(metric.BlockDeliverKindIndex).Observe(float64(indexChecksum.SumKVS()))
return nil
}()
if err != nil {
return
}
dataSynced = false
dataKVs = dataKVs.Clear()
indexKVs = indexKVs.Clear()
// Update the table, and save a checkpoint.
// (the write to the importer is effective immediately, thus update these here)
// No need to apply a lock since this is the only thread updating `cr.chunk.**`.
// In local mode, we should write these checkpoint after engine flushed.
cr.chunk.Checksum.Add(&dataChecksum)
cr.chunk.Checksum.Add(&indexChecksum)
cr.chunk.Chunk.Offset = offset
cr.chunk.Chunk.PrevRowIDMax = rowID
if dataChecksum.SumKVS() != 0 || indexChecksum.SumKVS() != 0 {
// No need to save checkpoint if nothing was delivered.
dataSynced = cr.maybeSaveCheckpoint(rc, t, engineID, cr.chunk, dataEngine, indexEngine)
}
failpoint.Inject("SlowDownWriteRows", func() {
deliverLogger.Warn("Slowed down write rows")
})
failpoint.Inject("FailAfterWriteRows", nil)
// TODO: for local backend, we may save checkpoint more frequently, e.g. after written
// 10GB kv pairs to data engine, we can do a flush for both data & index engine, then we
// can safely update current checkpoint.
failpoint.Inject("LocalBackendSaveCheckpoint", func() {
if !rc.isLocalBackend() && (dataChecksum.SumKVS() != 0 || indexChecksum.SumKVS() != 0) {
// No need to save checkpoint if nothing was delivered.
saveCheckpoint(rc, t, engineID, cr.chunk)
}
})
}
return
}
func (cr *chunkRestore) maybeSaveCheckpoint(
rc *Controller,
t *TableRestore,
engineID int32,
chunk *checkpoints.ChunkCheckpoint,
data, index *backend.LocalEngineWriter,
) bool {
if data.IsSynced() && index.IsSynced() {
saveCheckpoint(rc, t, engineID, chunk)
return true
}
return false
}
func saveCheckpoint(rc *Controller, t *TableRestore, engineID int32, chunk *checkpoints.ChunkCheckpoint) {
// We need to update the AllocBase every time we've finished a file.
// The AllocBase is determined by the maximum of the "handle" (_tidb_rowid
// or integer primary key), which can only be obtained by reading all data.
var base int64
if t.tableInfo.Core.PKIsHandle && t.tableInfo.Core.ContainsAutoRandomBits() {
base = t.alloc.Get(autoid.AutoRandomType).Base() + 1
} else {
base = t.alloc.Get(autoid.RowIDAllocType).Base() + 1
}
rc.saveCpCh <- saveCp{
tableName: t.tableName,
merger: &checkpoints.RebaseCheckpointMerger{
AllocBase: base,
},
}
rc.saveCpCh <- saveCp{
tableName: t.tableName,
merger: &checkpoints.ChunkCheckpointMerger{
EngineID: engineID,
Key: chunk.Key,
Checksum: chunk.Checksum,
Pos: chunk.Chunk.Offset,
RowID: chunk.Chunk.PrevRowIDMax,
ColumnPermutation: chunk.ColumnPermutation,
},
}
}
//nolint:nakedret // TODO: refactor
func (cr *chunkRestore) encodeLoop(
ctx context.Context,
kvsCh chan<- []deliveredKVs,
t *TableRestore,
logger log.Logger,
kvEncoder kv.Encoder,
deliverCompleteCh <-chan deliverResult,
rc *Controller,
) (readTotalDur time.Duration, encodeTotalDur time.Duration, err error) {
send := func(kvs []deliveredKVs) error {
select {
case kvsCh <- kvs:
return nil
case <-ctx.Done():
return ctx.Err()
case deliverResult, ok := <-deliverCompleteCh:
if deliverResult.err == nil && !ok {
deliverResult.err = ctx.Err()
}
if deliverResult.err == nil {
deliverResult.err = errors.New("unexpected premature fulfillment")
logger.DPanic("unexpected: deliverCompleteCh prematurely fulfilled with no error", zap.Bool("chIsOpen", ok))
}
return errors.Trace(deliverResult.err)
}
}
pauser, maxKvPairsCnt := rc.pauser, rc.cfg.TikvImporter.MaxKVPairs
initializedColumns, reachEOF := false, false
for !reachEOF {
if err = pauser.Wait(ctx); err != nil {
return
}
offset, _ := cr.parser.Pos()
if offset >= cr.chunk.Chunk.EndOffset {
break
}
var readDur, encodeDur time.Duration
canDeliver := false
kvPacket := make([]deliveredKVs, 0, maxKvPairsCnt)
curOffset := offset
var newOffset, rowID int64
var kvSize uint64
outLoop:
for !canDeliver {
readDurStart := time.Now()
err = cr.parser.ReadRow()
columnNames := cr.parser.Columns()
newOffset, rowID = cr.parser.Pos()
switch errors.Cause(err) {
case nil:
if !initializedColumns {
if len(cr.chunk.ColumnPermutation) == 0 {
if err = t.initializeColumns(columnNames, cr.chunk); err != nil {
return
}
}
initializedColumns = true
}
case io.EOF:
reachEOF = true
break outLoop
default:
err = errors.Annotatef(err, "in file %s at offset %d", &cr.chunk.Key, newOffset)
return
}
readDur += time.Since(readDurStart)
encodeDurStart := time.Now()
lastRow := cr.parser.LastRow()
// sql -> kv
kvs, encodeErr := kvEncoder.Encode(logger, lastRow.Row, lastRow.RowID, cr.chunk.ColumnPermutation, cr.chunk.Key.Path, curOffset)
encodeDur += time.Since(encodeDurStart)
hasIgnoredEncodeErr := false
if encodeErr != nil {
rowText := tidb.EncodeRowForRecord(t.encTable, rc.cfg.TiDB.SQLMode, lastRow.Row, cr.chunk.ColumnPermutation)
encodeErr = rc.errorMgr.RecordTypeError(ctx, logger, t.tableName, cr.chunk.Key.Path, newOffset, rowText, encodeErr)
err = errors.Annotatef(encodeErr, "in file %s at offset %d", &cr.chunk.Key, newOffset)
hasIgnoredEncodeErr = true
}
cr.parser.RecycleRow(lastRow)
curOffset = newOffset
if err != nil {
return
}
if hasIgnoredEncodeErr {
continue
}
kvPacket = append(kvPacket, deliveredKVs{kvs: kvs, columns: columnNames, offset: newOffset, rowID: rowID})
kvSize += kvs.Size()
failpoint.Inject("mock-kv-size", func(val failpoint.Value) {
kvSize += uint64(val.(int))
})
// pebble cannot allow > 4.0G kv in one batch.
// we will meet pebble panic when import sql file and each kv has the size larger than 4G / maxKvPairsCnt.
// so add this check.
if kvSize >= minDeliverBytes || len(kvPacket) >= maxKvPairsCnt || newOffset == cr.chunk.Chunk.EndOffset {
canDeliver = true
kvSize = 0
}
}
encodeTotalDur += encodeDur
metric.RowEncodeSecondsHistogram.Observe(encodeDur.Seconds())
readTotalDur += readDur
metric.RowReadSecondsHistogram.Observe(readDur.Seconds())
metric.RowReadBytesHistogram.Observe(float64(newOffset - offset))
if len(kvPacket) != 0 {
deliverKvStart := time.Now()
if err = send(kvPacket); err != nil {
return
}
metric.RowKVDeliverSecondsHistogram.Observe(time.Since(deliverKvStart).Seconds())
}
}
err = send([]deliveredKVs{})
return
}
func (cr *chunkRestore) restore(
ctx context.Context,
t *TableRestore,
engineID int32,
dataEngine, indexEngine *backend.LocalEngineWriter,
rc *Controller,
) error {
// Create the encoder.
kvEncoder, err := rc.backend.NewEncoder(t.encTable, &kv.SessionOptions{
SQLMode: rc.cfg.TiDB.SQLMode,
Timestamp: cr.chunk.Timestamp,
SysVars: rc.sysVars,
// use chunk.PrevRowIDMax as the auto random seed, so it can stay the same value after recover from checkpoint.
AutoRandomSeed: cr.chunk.Chunk.PrevRowIDMax,
})
if err != nil {
return err
}
kvsCh := make(chan []deliveredKVs, maxKVQueueSize)
deliverCompleteCh := make(chan deliverResult)
defer func() {
kvEncoder.Close()
kvEncoder = nil
close(kvsCh)
}()
go func() {
defer close(deliverCompleteCh)
dur, err := cr.deliverLoop(ctx, kvsCh, t, engineID, dataEngine, indexEngine, rc)
select {
case <-ctx.Done():
case deliverCompleteCh <- deliverResult{dur, err}:
}
}()
logTask := t.logger.With(
zap.Int32("engineNumber", engineID),
zap.Int("fileIndex", cr.index),
zap.Stringer("path", &cr.chunk.Key),
).Begin(zap.InfoLevel, "restore file")
readTotalDur, encodeTotalDur, err := cr.encodeLoop(ctx, kvsCh, t, logTask.Logger, kvEncoder, deliverCompleteCh, rc)
if err != nil {
return err
}
select {
case deliverResult, ok := <-deliverCompleteCh:
if ok {
logTask.End(zap.ErrorLevel, deliverResult.err,
zap.Duration("readDur", readTotalDur),
zap.Duration("encodeDur", encodeTotalDur),
zap.Duration("deliverDur", deliverResult.totalDur),
zap.Object("checksum", &cr.chunk.Checksum),
)
return errors.Trace(deliverResult.err)
}
// else, this must cause by ctx cancel
return ctx.Err()
case <-ctx.Done():
return ctx.Err()
}
}
| c4pt0r/tidb | br/pkg/lightning/restore/restore.go | GO | apache-2.0 | 75,726 |
/* -------------------------------------------------------------------------- */
/* Copyright 2002-2016, OpenNebula Project, OpenNebula Systems */
/* */
/* Licensed under the Apache License, Version 2.0 (the "License"); you may */
/* not use this file except in compliance with the License. You may obtain */
/* a copy of the License at */
/* */
/* http://www.apache.org/licenses/LICENSE-2.0 */
/* */
/* Unless required by applicable law or agreed to in writing, software */
/* distributed under the License is distributed on an "AS IS" BASIS, */
/* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */
/* See the License for the specific language governing permissions and */
/* limitations under the License. */
/* -------------------------------------------------------------------------- */
define(function(require) {
/*
DEPENDENCIES
*/
// require('foundation.tab');
var BaseFormPanel = require('utils/form-panels/form-panel');
var Sunstone = require('sunstone');
var Locale = require('utils/locale');
//var Tips = require('utils/tips');
var TemplateUtils = require('utils/template-utils');
var WizardFields = require('utils/wizard-fields');
var RoleTab = require('tabs/vmgroup-tab/utils/role-tab');
var AffinityRoleTab = require('tabs/vmgroup-tab/utils/affinity-role-tab');
var Notifier = require('utils/notifier');
var Utils = require('../utils/common');
/*
TEMPLATES
*/
var TemplateWizardHTML = require('hbs!./create/wizard');
var TemplateAdvancedHTML = require('hbs!./create/advanced');
/*
CONSTANTS
*/
var FORM_PANEL_ID = require('./create/formPanelId');
var TAB_ID = require('../tabId');
/*
CONSTRUCTOR
*/
function FormPanel() {
this.formPanelId = FORM_PANEL_ID;
this.tabId = TAB_ID;
this.affinity_role_tab = new AffinityRoleTab([]);
this.actions = {
'create': {
'title': Locale.tr("Create Virtual Machine Group"),
'buttonText': Locale.tr("Create"),
'resetButton': true
},
'update': {
'title': Locale.tr("Update Virtual Machine Group"),
'buttonText': Locale.tr("Update"),
'resetButton': false
}
};
BaseFormPanel.call(this);
}
FormPanel.FORM_PANEL_ID = FORM_PANEL_ID;
FormPanel.prototype = Object.create(BaseFormPanel.prototype);
FormPanel.prototype.constructor = FormPanel;
FormPanel.prototype.htmlWizard = _htmlWizard;
FormPanel.prototype.htmlAdvanced = _htmlAdvanced;
FormPanel.prototype.submitWizard = _submitWizard;
FormPanel.prototype.submitAdvanced = _submitAdvanced;
FormPanel.prototype.onShow = _onShow;
FormPanel.prototype.fill = _fill;
FormPanel.prototype.setup = _setup;
FormPanel.prototype.addRoleTab = _add_role_tab;
return FormPanel;
/*
FUNCTION DEFINITIONS
*/
function _htmlWizard() {
var opts = {
info: false,
select: true
};
return TemplateWizardHTML({
'affinity-role-tab': this.affinity_role_tab.html(),
'formPanelId': this.formPanelId
});
}
function _htmlAdvanced() {
return TemplateAdvancedHTML({formPanelId: this.formPanelId});
}
function _setup(context) {
this.roleTabObjects = {};
var that = this;
var roles_index = 0;
this.affinity_role_tab.setup(context);
// Fill parents table
// Each time a tab is clicked the table is filled with existing tabs (roles)
// Selected roles are kept
// TODO If the name of a role is changed and is selected, selection will be lost
$("#roles_tabs", context).on("click", "a", function() {
var tab_id = "#"+this.id+"Tab";
var str = "";
$(tab_id+" .parent_roles").hide();
var parent_role_available = false;
$("#roles_tabs_content #role_name", context).each(function(){
if ($(this).val() != "" && ($(this).val() != $(tab_id+" #role_name", context).val())) {
parent_role_available = true;
str += "<tr>\
<td style='width:10%'>\
<input class='check_item' type='checkbox' value='"+$(this).val()+"' id='"+$(this).val()+"'/>\
</td>\
<td>"+$(this).val()+"</td>\
</tr>";
}
});
if (parent_role_available) {
$(tab_id+" .parent_roles", context).show();
}
var selected_parents = [];
$(tab_id+" .parent_roles_body input:checked", context).each(function(){
selected_parents.push($(this).val());
});
$(tab_id+" .parent_roles_body", context).html(str);
$.each(selected_parents, function(){
$(tab_id+" .parent_roles_body #"+this, context).attr('checked', true);
});
});
$("#tf_btn_roles", context).bind("click", function(){
that.addRoleTab(roles_index, context);
roles_index++;
return false;
});
/*$("#btn_refresh_roles", context).bind("click", function(){
$("#btn_refresh_roles", context).html("<i class='fa fa-angle-double-down'></i> "+Locale.tr("Refresh roles"));
that.affinity_role_tab.refresh(context, that.roleTabObjects);
});*/
//---------btn_group_vm_roles
Foundation.reflow(context, 'tabs');
// Add first role
$("#tf_btn_roles", context).trigger("click");
//Tips.setup();
return false;
}
function _submitWizard(context) {
that = this;
var name = WizardFields.retrieveInput($('#vm_group_name', context));
var description = WizardFields.retrieveInput($('#vm_group_description', context));
var role = [];
$('.role_content', context).each(function() {
var role_id = $(this).attr("role_id");
role.push(that.roleTabObjects[role_id].retrieve($(this)));
});
//call to role-tab.js for retrieve data
var roles_affinity = this.affinity_role_tab.retrieve(context);
var vm_group_json = {
"NAME" : name,
"DESCRIPTION": description,
"ROLE" : role,
};
vm_group_json = $.extend(vm_group_json, roles_affinity);
if (this.action == "create") {
vm_group_json = {
"vm_group" : vm_group_json
};
Sunstone.runAction("VMGroup.create",JSON.parse(JSON.stringify(vm_group_json)));
return false;
} else if (this.action == "update") {
delete vm_group_json["NAME"];
Sunstone.runAction(
"VMGroup.update",
this.resourceId,
TemplateUtils.templateToString(vm_group_json));
return false;
}
}
function _submitAdvanced(context) {
if (this.action == "create") {
var template = $('textarea#template', context).val();
var vm_group_json = {vm_group: {vm_group_raw: template}};
Sunstone.runAction("VMGroup.create",vm_group_json);
return false;
} else if (this.action == "update") {
var template_raw = $('textarea#template', context).val();
Sunstone.runAction("VMGroup.update_template", this.resourceId, template_raw);
return false;
}
}
function _onShow(context) {
var that = this;
$('.role_content', context).each(function() {
var role_id = $(this).attr("role_id");
that.roleTabObjects[role_id].onShow();
});
}
function _fill(context, element) {
$("#new_role", context)[0].parentElement.remove();
var that = this;
this.setHeader(element);
this.resourceId = element.ID;
$('#template', context).val(TemplateUtils.templateToString(element.TEMPLATE));
WizardFields.fillInput($('#vm_group_name',context), element.NAME);
$('#vm_group_name',context).prop("disabled", true);
WizardFields.fillInput($('#vm_group_description', context), element.TEMPLATE.DESCRIPTION );
//Remove row of roles-----------------------------------------------------------------
$.each(element.ROLES.ROLE, function(index, value){
var name = value.NAME;
if(name){
var html = "<option id='" + name + "' class='roles' value=" + name + "> " + name + "</option>";
$("#list_roles_select").append(html);
$("select #" + name).mousedown(function(e) {
e.preventDefault();
$(this).prop('selected', !$(this).prop('selected'));
return false;
});
}
});
this.affinity_role_tab.fill(context, element);
$("#btn_refresh_roles", context).remove();
$("#affinity",context).show();
//Remove row of roles------------------------------------------------------------------
/*var role_context_first = $('.role_content', context).first();
var role_id_first = $(role_context_first).attr("role_id");
delete that.roleTabObjects[role_id_first];
// Populates the Avanced mode Tab
var roles_names = [];
var data = [];
if(Array.isArray(element.ROLES.ROLE))
data = element.ROLES.ROLE;
else
data.push(element.ROLES.ROLE);
$.each(data, function(index, value){
roles_names.push(value.NAME);
$("#tf_btn_roles", context).click();
var role_context = $('.role_content', context).last();
var role_id = $(role_context).attr("role_id");
that.roleTabObjects[role_id].fill(role_context, value,element);
});
$.each(data, function(index, value){
var role_context = $('.role_content', context)[index];
var str = "";
$.each(roles_names, function(){
if (this != value.NAME) {
str += "<tr>\
<td style='width:10%'>\
<input class='check_item' type='checkbox' value='"+this+"' id='"+this+"'/>\
</td>\
<td>"+this+"</td>\
</tr>";
}
});
$(".parent_roles_body", role_context).html(str);
if (value.parents) {
$.each(value.parents, function(index, value){
$(".parent_roles_body #"+this, role_context).attr('checked', true);
});
}
});*/
//Remove first tab role, is empty.
//$('i.remove-tab', context).first().click();
//$("#tf_btn_roles", context).click();
}
function _add_role_tab(role_id, dialog) {
var that = this;
var html_role_id = 'role' + role_id;
var role_tab = new RoleTab(html_role_id);
that.roleTabObjects[role_id] = role_tab;
// Append the new div containing the tab and add the tab to the list
var role_section = $('<div id="'+html_role_id+'Tab" class="tabs-panel role_content wizard_internal_tab" role_id="'+role_id+'">'+
role_tab.html() +
'</div>').appendTo($("#roles_tabs_content", dialog));
_redo_service_vmgroup_selector_role(dialog, role_section);
role_section.on("change", "#role_name", function(){
var val = true;
var chars = ['/','*','&','|',':', String.fromCharCode(92),'"', ';', '/',String.fromCharCode(39),'#','{','}','$','<','>','*'];
var newName = $(this).val();
$.each(chars, function(index, value){
if(newName.indexOf(value) != -1 && val){
val = false;
}
});
if(val){
that.affinity_role_tab.refresh($(this).val(), role_tab.oldName());
role_tab.changeNameTab(newName);
} else {
Notifier.notifyError(Locale.tr("The new role name contains invalid characters."));
}
});
//Tips.setup(role_section);
var a = $("<li class='tabs-title'>\
<a class='text-center' id='"+html_role_id+"' href='#"+html_role_id+"Tab'>\
<span>\
<i class='off-color fa fa-cube fa-3x'/>\
<br>\
<span id='role_name_text'>"+Locale.tr("Role ")+role_id+"</span>\
</span>\
<i class='fa fa-times-circle remove-tab'></i>\
</a>\
</li>").appendTo($("ul#roles_tabs", dialog));
Foundation.reInit($("ul#roles_tabs", dialog));
$("a", a).trigger("click");
// close icon: removing the tab on click
a.on("click", "i.remove-tab", function() {
var target = $(this).parent().attr("href");
var li = $(this).closest('li');
var ul = $(this).closest('ul');
var content = $(target);
var role_id = content.attr("role_id");
li.remove();
content.remove();
if (li.hasClass('is-active')) {
$('a', ul.children('li').last()).click();
}
that.affinity_role_tab.removeRole(role_tab.oldName());
delete that.roleTabObjects[role_id];
return false;
});
role_tab.setup(role_section);
role_tab.onShow();
}
function _redo_service_vmgroup_selector_role(dialog, role_section){
$('#roles_tabs_content .role_content', dialog).each(function(){
var role_section = this;
var role_tab_id = $(role_section).attr('id');
});
}
});
| goberle/one | src/sunstone/public/app/tabs/vmgroup-tab/form-panels/create.js | JavaScript | apache-2.0 | 12,941 |
import sys
from drone.actions.emr_launcher import launch_emr_task
from drone.actions.ssh_launcher import launch_ssh_task
from drone.job_runner.dependency_manager import dependencies_are_met
from drone.job_runner.job_progress_checker import check_running_job_progress
from drone.metadata.metadata import get_job_info, job_status, set_ready, set_running, set_failed
task_launcher = {'ssh': launch_ssh_task,
'emr': launch_emr_task}
def process(job_config, settings):
for job_id, schedule_time, execution_time, status, runs, uid in get_job_info(job_config.get('id'),
db_name=settings.metadata):
if status == job_status.get('failed'):
if (int(job_config.get('retry')) if job_config.get('retry') else 0) > int(runs):
settings.logger.debug(
'%s runs %s. set retries %s.' % (job_config.get('id'), runs, job_config.get('retry')))
if dependencies_are_met(job_config, schedule_time, settings):
set_ready(job_config.get('id'), schedule_time, db_name=settings.metadata)
settings.logger.info('Job "%s" "%s" set as ready' % (job_config.get('id'), schedule_time))
run(job_config, schedule_time, settings)
continue
else:
continue
else:
continue
elif status == job_status.get('running'):
check_running_job_progress(job_config, schedule_time, uid, settings)
continue
elif status == job_status.get('ready'):
run(job_config, schedule_time, settings)
elif status == job_status.get('succeeded'):
continue
elif status == job_status.get('not_ready'):
if dependencies_are_met(job_config, schedule_time, settings):
set_ready(job_config.get('id'), schedule_time, db_name=settings.metadata)
settings.logger.info('Job "%s" "%s" set as ready' % (job_config.get('id'), schedule_time))
run(job_config, schedule_time, settings)
else:
continue
else:
settings.logger.error('Unknown job status "%s"' % status)
sys.exit(1)
def run(job_config, schedule_time, settings):
settings.logger.info('Starting job "%s" "%s"' % (job_config.get('id'), schedule_time))
job_type = job_config.get('type')
try:
assert job_type in settings.supported_job_types
except:
settings.logger.warning(
'Unsupported job type %s. Valid types are %s' % (job_type, str(settings.supported_job_types)))
task_lauched_successfully, uid = task_launcher.get(job_type)(job_config, schedule_time, settings)
if task_lauched_successfully:
set_running(job_config.get('id'), schedule_time, uid, db_name=settings.metadata)
settings.logger.info('Started job "%s" "%s"' % (job_config.get('id'), schedule_time))
else:
set_failed(job_config.get('id'), schedule_time, db_name=settings.metadata)
settings.logger.warning('Failed to start job "%s" "%s"' % (job_config.get('id'), schedule_time))
| grafke/Drone-workflow-controller | drone/job_runner/job_runner.py | Python | apache-2.0 | 3,220 |
function Controller() {
function __alloyId24() {
__alloyId24.opts || {};
var models = __alloyId23.models;
var len = models.length;
var rows = [];
for (var i = 0; len > i; i++) {
var __alloyId9 = models[i];
__alloyId9.__transform = {};
var __alloyId10 = Ti.UI.createTableViewRow({
layout: "vertical",
font: {
fontSize: "16dp"
},
height: "auto",
title: "undefined" != typeof __alloyId9.__transform["nome"] ? __alloyId9.__transform["nome"] : __alloyId9.get("nome"),
model: "undefined" != typeof __alloyId9.__transform["alloy_id"] ? __alloyId9.__transform["alloy_id"] : __alloyId9.get("alloy_id"),
editable: "true"
});
rows.push(__alloyId10);
var __alloyId12 = Ti.UI.createView({
layout: "vertical"
});
__alloyId10.add(__alloyId12);
var __alloyId14 = Ti.UI.createLabel({
width: Ti.UI.SIZE,
height: Ti.UI.SIZE,
right: "10dp",
color: "blue",
font: {
fontSize: "16dp"
},
text: "undefined" != typeof __alloyId9.__transform["nome"] ? __alloyId9.__transform["nome"] : __alloyId9.get("nome")
});
__alloyId12.add(__alloyId14);
var __alloyId16 = Ti.UI.createView({
height: Ti.UI.SIZE,
width: Ti.UI.FILL
});
__alloyId12.add(__alloyId16);
var __alloyId18 = Ti.UI.createScrollView({
scrollType: "horizontal",
layout: "horizontal",
horizontalWrap: "false"
});
__alloyId16.add(__alloyId18);
var __alloyId20 = Ti.UI.createImageView({
top: "15dp",
image: "undefined" != typeof __alloyId9.__transform["foto1"] ? __alloyId9.__transform["foto1"] : __alloyId9.get("foto1"),
height: "180dp",
width: "320dp"
});
__alloyId18.add(__alloyId20);
var __alloyId22 = Ti.UI.createImageView({
top: "15dp",
image: "undefined" != typeof __alloyId9.__transform["foto2"] ? __alloyId9.__transform["foto2"] : __alloyId9.get("foto2"),
height: "180dp",
width: "320dp"
});
__alloyId18.add(__alloyId22);
}
$.__views.tableviewContatos.setData(rows);
}
function openAdd1() {
var add1 = Alloy.createController("add1");
add1.getView().open({
modal: true
});
}
function maisDetalhes(e) {
var contato = Alloy.Collections.contato.get(e.rowData.model);
var ctrl = Alloy.createController("detalhesContato", contato);
$.homeTab.open(ctrl.getView());
}
require("alloy/controllers/BaseController").apply(this, Array.prototype.slice.call(arguments));
this.__controllerPath = "home";
arguments[0] ? arguments[0]["__parentSymbol"] : null;
arguments[0] ? arguments[0]["$model"] : null;
arguments[0] ? arguments[0]["__itemTemplate"] : null;
var $ = this;
var exports = {};
var __defers = {};
$.__views.homeWindow = Ti.UI.createWindow({
backgroundColor: "white",
layout: "vertical",
id: "homeWindow",
titleid: "home"
});
$.__views.contatosSearch = Ti.UI.createSearchBar({
hinttextid: "procurarText",
height: "50dp",
id: "contatosSearch",
showCancel: "false"
});
$.__views.homeWindow.add($.__views.contatosSearch);
$.__views.Btadd = Ti.UI.createButton({
top: "10dp",
width: "200dp",
height: "auto",
borderRadius: "10dp",
font: {
fontSize: "17dp"
},
title: L("adicionar"),
id: "Btadd"
});
$.__views.homeWindow.add($.__views.Btadd);
openAdd1 ? $.__views.Btadd.addEventListener("click", openAdd1) : __defers["$.__views.Btadd!click!openAdd1"] = true;
$.__views.tableviewContatos = Ti.UI.createTableView({
id: "tableviewContatos"
});
$.__views.homeWindow.add($.__views.tableviewContatos);
var __alloyId23 = Alloy.Collections["contato"] || contato;
__alloyId23.on("fetch destroy change add remove reset", __alloyId24);
maisDetalhes ? $.__views.tableviewContatos.addEventListener("click", maisDetalhes) : __defers["$.__views.tableviewContatos!click!maisDetalhes"] = true;
$.__views.homeTab = Ti.UI.createTab({
backgroundSelectedColor: "#C8C8C8 ",
backgroundFocusedColor: "#999",
icon: "/images/ic_home.png",
window: $.__views.homeWindow,
id: "homeTab",
titleid: "home"
});
$.__views.homeTab && $.addTopLevelView($.__views.homeTab);
exports.destroy = function() {
__alloyId23.off("fetch destroy change add remove reset", __alloyId24);
};
_.extend($, $.__views);
Alloy.Collections.contato.fetch();
var contatos = Alloy.Collections.contato;
contatos.fetch();
$.tableviewContatos.search = $.contatosSearch;
__defers["$.__views.Btadd!click!openAdd1"] && $.__views.Btadd.addEventListener("click", openAdd1);
__defers["$.__views.tableviewContatos!click!maisDetalhes"] && $.__views.tableviewContatos.addEventListener("click", maisDetalhes);
_.extend($, exports);
}
var Alloy = require("alloy"), Backbone = Alloy.Backbone, _ = Alloy._;
module.exports = Controller; | Geeosp/SnapContacts | Resources/alloy/controllers/home.js | JavaScript | apache-2.0 | 5,638 |
# Leptospermum recurvifolium K.D.Koenig & Sims SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Myrtales/Myrtaceae/Leptospermum/Leptospermum arachnoides/ Syn. Leptospermum recurvifolium/README.md | Markdown | apache-2.0 | 201 |
package com.xinfan.msgbox.service.dao.dialect;
public class MysqlDialect extends Dialect {
@Override
public boolean supportsLimit() {
return true;
}
@Override
public boolean supportsLimitOffset() {
return true;
}
@Override
public String getLimitString(String sql, int offset, String offsetPlaceholder, int limit, String limitPlaceholder) {
sql = sql.trim();
boolean isForUpdate = false;
if (sql.toLowerCase().endsWith(" for update")) {
sql = sql.substring(0, sql.length() - 11);
isForUpdate = true;
}
StringBuffer pagingSelect = new StringBuffer(sql.length() + 100);
pagingSelect.append("select * from ( ");
pagingSelect.append(sql);
int endInt = Integer.parseInt(offsetPlaceholder) + + + Integer.parseInt(limitPlaceholder);
pagingSelect.append(" ) _t limit " + offset + "," + endInt);
if (isForUpdate) {
pagingSelect.append(" for update");
}
return pagingSelect.toString();
}
public String getCountSql(String sql)
{
sql = sql.trim();
if (sql.toLowerCase().endsWith(" for update")) {
sql = sql.substring(0, sql.length() - 11);
}
StringBuffer countSelect = new StringBuffer(sql.length() + 100);
countSelect.append("select count(*) from ( ");
countSelect.append(sql);
countSelect.append(" ) _t ");
return countSelect.toString();
}
}
| xinfan123/blue-server | bulu-service/src/main/java/com/xinfan/msgbox/service/dao/dialect/MysqlDialect.java | Java | apache-2.0 | 1,374 |
package uk.ac.ebi.embl.api.validation.fixer.entry;
import uk.ac.ebi.embl.api.entry.Entry;
import uk.ac.ebi.embl.api.entry.Text;
import uk.ac.ebi.embl.api.entry.feature.Feature;
import uk.ac.ebi.embl.api.entry.qualifier.Qualifier;
import uk.ac.ebi.embl.api.entry.reference.Person;
import uk.ac.ebi.embl.api.entry.reference.Reference;
import uk.ac.ebi.embl.api.validation.Severity;
import uk.ac.ebi.embl.api.validation.ValidationResult;
import uk.ac.ebi.embl.api.validation.ValidationScope;
import uk.ac.ebi.embl.api.validation.annotation.Description;
import uk.ac.ebi.embl.api.validation.annotation.ExcludeScope;
import uk.ac.ebi.embl.api.validation.check.entry.EntryValidationCheck;
import uk.ac.ebi.embl.api.validation.helper.Utils;
/**
* Fix works for certain non-ascii characters only. Check Utils.removeAccents limitations.
* If it is not possible to transliterate certain chars, it will be caught in and rejected
* by AsciiCharacterCheck.
*/
@Description("Non-ascii characters fixed from \"{0}\" to \"{1}\".")
@ExcludeScope(validationScope = {ValidationScope.NCBI, ValidationScope.NCBI_MASTER})
public class NonAsciiCharacterFix extends EntryValidationCheck {
private static final String ASCII_CHARACTER_FIX = "AsciiCharacterFix_1";
public ValidationResult check(Entry entry) {
result = new ValidationResult();
if (entry == null)
return result;
attemptFix(entry.getComment());
attemptFix(entry.getDescription());
for (Reference reference : entry.getReferences()) {
if (reference.getPublication() != null) {
String pubTitle = reference.getPublication().getTitle();
if (pubTitle != null) {
String fixedPubTitle = fixedStr(pubTitle);
if (!fixedPubTitle.equals(pubTitle)) {
reference.getPublication().setTitle(fixedPubTitle);
reportMessage(Severity.FIX, reference.getOrigin(), ASCII_CHARACTER_FIX, pubTitle, fixedPubTitle);
}
}
if (reference.getPublication().getAuthors() != null) {
for (Person author : reference.getPublication().getAuthors()) {
String firstName = author.getFirstName();
if (firstName != null) {
String fixedFirstName = fixedStr(firstName);
if (!fixedFirstName.equals(firstName)) {
author.setFirstName(fixedFirstName);
reportMessage(Severity.FIX, reference.getOrigin(), ASCII_CHARACTER_FIX, firstName, fixedFirstName);
}
}
String surname = author.getSurname();
if (surname != null) {
String fixedSurname = fixedStr(surname);
if (!fixedSurname.equals(surname)) {
author.setSurname(fixedSurname);
reportMessage(Severity.FIX, reference.getOrigin(), ASCII_CHARACTER_FIX, surname, fixedSurname);
}
}
}
}
}
}
for (Feature feature : entry.getFeatures()) {
for (Qualifier qualifier : feature.getQualifiers()) {
if (qualifier.getName().equals(Qualifier.COUNTRY_QUALIFIER_NAME)
|| qualifier.getName().equals(Qualifier.ISOLATE_QUALIFIER_NAME) ) {
String qualifierValue = qualifier.getValue();
if (qualifierValue != null) {
String fixedVal = fixedStr(qualifierValue);
if (!fixedVal.equals(qualifierValue)) {
qualifier.setValue(fixedVal);
reportMessage(Severity.FIX, qualifier.getOrigin(), ASCII_CHARACTER_FIX, qualifierValue, fixedVal);
}
}
}
}
}
return result;
}
private void attemptFix(Text text) {
if (text != null && text.getText() != null) {
if (Utils.hasNonAscii(text.getText())) {
String fixed = Utils.removeAccents(text.getText());
if (!fixed.equals(text.getText())) {
text.setText(fixed);
reportMessage(Severity.FIX, text.getOrigin(), ASCII_CHARACTER_FIX, text.getText(), fixed);
}
}
}
}
private String fixedStr(String str) {
if (Utils.hasNonAscii(str)) {
return Utils.removeAccents(str);
}
return str;
}
}
| enasequence/sequencetools | src/main/java/uk/ac/ebi/embl/api/validation/fixer/entry/NonAsciiCharacterFix.java | Java | apache-2.0 | 4,774 |
import scrapy
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
import copy
import json
import re
VARIANT_JSON_REGEX = re.compile("product: ({.*}),")
class ShendronesSpider(CrawlSpider):
name = "shendrones"
allowed_domains = ["shendrones.myshopify.com"]
start_urls = ["http://shendrones.myshopify.com/collections/all"]
rules = (
Rule(LinkExtractor(restrict_css=[".grid-item"]), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = self.name
variant = {}
item["variants"] = [variant]
base_url = response.url
item["manufacturer"] = "Shendrones"
# Find the json info for variants.
body = response.body_as_unicode()
m = VARIANT_JSON_REGEX.search(body)
if m:
shopify_info = json.loads(m.group(1))
global_title = shopify_info["title"]
preorder = False
if global_title.endswith("Pre Order"):
global_title = global_title[:-len("Pre Order")].strip()
variant["stock_state"] = "backordered"
preorder = True
for v in shopify_info["variants"]:
if v["title"] != "Default Title":
item["name"] = global_title + " " + v["title"]
variant["url"] = base_url + "?variant=" + str(v["id"])
else:
item["name"] = global_title
variant["url"] = base_url
variant["price"] = "${:.2f}".format(v["price"] / 100)
if not preorder:
if v["inventory_quantity"] <= 0:
if v["inventory_policy"] == "deny":
variant["stock_state"] = "out_of_stock"
else:
variant["stock_state"] = "backordered"
elif v["inventory_quantity"] < 3:
variant["stock_state"] = "low_stock"
variant["stock_text"] = "Only " + str(v["inventory_quantity"]) + " left!"
else:
variant["stock_state"] = "in_stock"
yield item
item = copy.deepcopy(item)
variant = item["variants"][0]
| rcbuild-info/scrape | rcbi/rcbi/spiders/Shendrones.py | Python | apache-2.0 | 2,069 |
import * as React from 'react';
import {camelCase} from 'change-case';
import {IVueComponent} from '../ReactifyVue';
import {createReactElement} from '../react-element-creation/CreateReactElements';
export const copyMethodsToVueComponent = (vueComponent: IVueComponent) => {
if (vueComponent.methods) {
Object.keys(vueComponent.methods)
.forEach(methodName => vueComponent[methodName] = vueComponent.methods[methodName]);
delete vueComponent.methods;
}
};
export const copyPropsToVueComponent = (vueComponent: IVueComponent, props: any) => {
if (props) {
Object.keys(props)
.forEach(propName => {
if (typeof vueComponent[propName] !== 'function' || typeof vueComponent[propName] === 'function' && !vueComponent[propName]) {
vueComponent[propName] = props[propName];
}
});
}
};
export const getComponentTag = (component: any) => {
if (component.type && component.type.tag) {
return component.type.tag;
} else if (component.type && typeof component.type === 'string') {
return component.type;
} else {
return undefined;
}
};
export const copySlotsToVueComponent = (vueComponent: IVueComponent, slotMapping, props) => {
const reactChildrenArray = props && props.children && React.Children.toArray(props.children) as (React.ReactElement<any>)[];
const slots = {
default: (reactChildrenArray && reactChildrenArray.length) ? reactChildrenArray : null
};
if (slotMapping && props) {
Object.keys(slotMapping)
.forEach(slotName => {
slots[slotName] = props[slotMapping[slotName]] || [];
});
}
Object.keys(slots)
.forEach(slotName => {
const slot = slots[slotName];
if (Array.isArray(slot)) {
slot.forEach((slotChild, index) => {
if (typeof slotChild !== 'string') {
slots[slotName][index] = {...slotChild, tag: getComponentTag(slotChild)};
}
});
}
});
vueComponent.$slots = slots;
}
export const copyArgsToVueComponent = (vueComponent: IVueComponent, args: any) => {
if (args) {
Object.keys(args)
.forEach(argName => vueComponent[argName] = args[argName]);
}
}
export const handleWatchedProperties = (vueComponent: IVueComponent, currentProps: any, nextProps: any) => {
if (vueComponent.watch) {
copyPropsToVueComponent(vueComponent,nextProps);
handleComputedProperties(vueComponent);
Object.keys(vueComponent.watch)
.forEach(watchedProperty => {
if (currentProps[watchedProperty] !== nextProps[watchedProperty]) {
vueComponent.watch[watchedProperty].apply(vueComponent, [nextProps[watchedProperty]]);
}
});
}
};
export const handleComputedProperties = (vueComponent: IVueComponent) => {
if (vueComponent.computed) {
Object.keys(vueComponent.computed)
.forEach(propertyName => {
vueComponent[propertyName] = vueComponent.computed[propertyName].apply(vueComponent, [])
});
}
}
export const getDefaultProps = (vueComponent: IVueComponent) => {
if (vueComponent.props) {
const defaultProps = Object.keys(vueComponent.props).reduce((defaultProps, propName) => {
const propDef = vueComponent.props[propName];
if (propDef.default) {
return {
...defaultProps,
[camelCase(propName)]: propDef.default
};
} else {
return defaultProps;
}
}, {});
return Object.keys(defaultProps).length ? defaultProps : null;
} else {
return null;
}
};
export const addCompiledTemplateFunctionsToVueComponent = (vueComponent: any, createElement: Function) => {
vueComponent._self = { _c: createElement.bind(vueComponent) };
vueComponent._t = (slotName: string, fallback) => {
const slotValue = vueComponent.$slots[slotName];
if (fallback && (!slotValue || !slotValue.length)) {
return fallback;
} else {
return slotValue;
}
};
vueComponent._v = (text: string) => text || '';
vueComponent._s = (text: string) => text || '';
vueComponent._e = () => null;
};
export const generateCreateElementFunctionForClass = (classVueComponentInstance, instantiatedComponents, vueComponent) => {
return (element, args, children) => {
if (typeof args !== 'object' || Array.isArray(args)) {
//Children passed in as second argument
return createReactElement(element, {}, args, instantiatedComponents, vueComponent);
} else {
return createReactElement(element, args, children, instantiatedComponents, vueComponent);
}
};
};
export const applyPropOverridesToTopLevelElement = (reactElement: React.ReactElement<any>, tag: string, self) => {
const refFunc = (e: HTMLElement) => {
(reactElement as any).ref(e);
self.element = e;
self.nextTickCallbacks.forEach(callback => callback.apply(this.vueComponent, []));
self.nextTickCallbacks = [];
self.hasUnrenderedStateChanges = false;
};
const elementWithPropOverrides = {...reactElement, props: { ...reactElement.props}, tag: tag, ref: refFunc};
if (self.vueComponent.className) {
const existingClassName = elementWithPropOverrides.props.className || '';
elementWithPropOverrides.props.className = [existingClassName, ' ', self.vueComponent.className].join('');
}
if (self.vueComponent.style) {
const existingStyles = elementWithPropOverrides.props.style || {};
elementWithPropOverrides.props.style = {
...existingStyles,
...self.vueComponent.style
};
}
if (self.vueComponent.id) {
elementWithPropOverrides.props.id = self.vueComponent.id;
}
return elementWithPropOverrides;
};
export const initData = (vueComponent) => {
let state = null;
if (vueComponent.data) {
state = vueComponent.data();
Object.keys(state).forEach(stateKey => {
vueComponent[stateKey] = state[stateKey];
});
}
return state;
}; | bencompton/framework7-react | src/utils/reactify-vue/react-class-creation-and-runtime/ReactClassRuntime.ts | TypeScript | apache-2.0 | 6,487 |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.os.AsyncTask;
import android.text.format.Time;
import android.util.Log;
import com.example.android.sunshine.app.data.WeatherContract;
import com.example.android.sunshine.app.data.WeatherContract.WeatherEntry;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Vector;
public class FetchWeatherTask extends AsyncTask<String, Void, Void>
{
private final String LOG_TAG = FetchWeatherTask.class.getSimpleName();
private final Context mContext;
public FetchWeatherTask (Context context)
{
mContext = context;
}
private boolean DEBUG = true;
/**
* Helper method to handle insertion of a new location in the weather database.
*
* @param locationSetting The location string used to request updates from the server.
* @param cityName A human-readable city name, e.g "Mountain View"
* @param lat the latitude of the city
* @param lon the longitude of the city
* @return the row ID of the added location.
*/
long addLocation (String locationSetting, String cityName, double lat, double lon)
{
long locationId;
// First, check if the location with this city name exists in the db
Cursor locationCursor = mContext.getContentResolver().query(
WeatherContract.LocationEntry.CONTENT_URI,
new String[] {WeatherContract.LocationEntry._ID},
WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ?",
new String[] {locationSetting},
null);
if (locationCursor.moveToFirst())
{
int locationIdIndex = locationCursor.getColumnIndex(WeatherContract.LocationEntry._ID);
locationId = locationCursor.getLong(locationIdIndex);
}
else
{
// Now that the content provider is set up, inserting rows of data is pretty simple.
// First create a ContentValues object to hold the data you want to insert.
ContentValues locationValues = new ContentValues();
// Then add the data, along with the corresponding name of the data type,
// so the content provider knows what kind of value is being inserted.
locationValues.put(WeatherContract.LocationEntry.COLUMN_CITY_NAME, cityName);
locationValues.put(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING, locationSetting);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LAT, lat);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LONG, lon);
// Finally, insert location data into the database.
Uri insertedUri = mContext.getContentResolver().insert(
WeatherContract.LocationEntry.CONTENT_URI,
locationValues
);
// The resulting URI contains the ID for the row. Extract the locationId from the Uri.
locationId = ContentUris.parseId(insertedUri);
}
locationCursor.close();
// Wait, that worked? Yes!
return locationId;
}
/**
* Take the String representing the complete forecast in JSON Format and
* pull out the data we need to construct the Strings needed for the wireframes.
* <p>
* Fortunately parsing is easy: constructor takes the JSON string and converts it
* into an Object hierarchy for us.
*/
private void getWeatherDataFromJson (String forecastJsonStr, String locationSetting) throws JSONException
{
// Now we have a String representing the complete forecast in JSON Format.
// Fortunately parsing is easy: constructor takes the JSON string and converts it
// into an Object hierarchy for us.
// These are the names of the JSON objects that need to be extracted.
// Location information
final String OWM_CITY = "city";
final String OWM_CITY_NAME = "name";
final String OWM_COORD = "coord";
// Location coordinate
final String OWM_LATITUDE = "lat";
final String OWM_LONGITUDE = "lon";
// Weather information. Each day's forecast info is an element of the "list" array.
final String OWM_LIST = "list";
final String OWM_PRESSURE = "pressure";
final String OWM_HUMIDITY = "humidity";
final String OWM_WINDSPEED = "speed";
final String OWM_WIND_DIRECTION = "deg";
// All temperatures are children of the "temp" object.
final String OWM_TEMPERATURE = "temp";
final String OWM_MAX = "max";
final String OWM_MIN = "min";
final String OWM_WEATHER = "weather";
final String OWM_DESCRIPTION = "main";
final String OWM_WEATHER_ID = "id";
try
{
JSONObject forecastJson = new JSONObject(forecastJsonStr);
JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST);
JSONObject cityJson = forecastJson.getJSONObject(OWM_CITY);
String cityName = cityJson.getString(OWM_CITY_NAME);
JSONObject cityCoord = cityJson.getJSONObject(OWM_COORD);
double cityLatitude = cityCoord.getDouble(OWM_LATITUDE);
double cityLongitude = cityCoord.getDouble(OWM_LONGITUDE);
long locationId = addLocation(locationSetting, cityName, cityLatitude, cityLongitude);
// Insert the new weather information into the database
Vector<ContentValues> cVVector = new Vector<>(weatherArray.length());
// OWM returns daily forecasts based upon the local time of the city that is being
// asked for, which means that we need to know the GMT offset to translate this data
// properly.
// Since this data is also sent in-order and the first day is always the
// current day, we're going to take advantage of that to get a nice
// normalized UTC date for all of our weather.
Time dayTime = new Time();
dayTime.setToNow();
// we start at the day returned by local time. Otherwise this is a mess.
int julianStartDay = Time.getJulianDay(System.currentTimeMillis(), dayTime.gmtoff);
// now we work exclusively in UTC
dayTime = new Time();
for (int i = 0; i < weatherArray.length(); i++)
{
// These are the values that will be collected.
long dateTime;
double pressure;
int humidity;
double windSpeed;
double windDirection;
double high;
double low;
String description;
int weatherId;
// Get the JSON object representing the day
JSONObject dayForecast = weatherArray.getJSONObject(i);
// Cheating to convert this to UTC time, which is what we want anyhow
dateTime = dayTime.setJulianDay(julianStartDay + i);
pressure = dayForecast.getDouble(OWM_PRESSURE);
humidity = dayForecast.getInt(OWM_HUMIDITY);
windSpeed = dayForecast.getDouble(OWM_WINDSPEED);
windDirection = dayForecast.getDouble(OWM_WIND_DIRECTION);
// Description is in a child array called "weather", which is 1 element long.
// That element also contains a weather code.
JSONObject weatherObject = dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0);
description = weatherObject.getString(OWM_DESCRIPTION);
weatherId = weatherObject.getInt(OWM_WEATHER_ID);
// Temperatures are in a child object called "temp". Try not to name variables
// "temp" when working with temperature. It confuses everybody.
JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE);
high = temperatureObject.getDouble(OWM_MAX);
low = temperatureObject.getDouble(OWM_MIN);
ContentValues weatherValues = new ContentValues();
weatherValues.put(WeatherEntry.COLUMN_LOC_KEY, locationId);
weatherValues.put(WeatherEntry.COLUMN_DATE, dateTime);
weatherValues.put(WeatherEntry.COLUMN_HUMIDITY, humidity);
weatherValues.put(WeatherEntry.COLUMN_PRESSURE, pressure);
weatherValues.put(WeatherEntry.COLUMN_WIND_SPEED, windSpeed);
weatherValues.put(WeatherEntry.COLUMN_DEGREES, windDirection);
weatherValues.put(WeatherEntry.COLUMN_MAX_TEMP, high);
weatherValues.put(WeatherEntry.COLUMN_MIN_TEMP, low);
weatherValues.put(WeatherEntry.COLUMN_SHORT_DESC, description);
weatherValues.put(WeatherEntry.COLUMN_WEATHER_ID, weatherId);
cVVector.add(weatherValues);
}
int inserted = 0;
// add to database
if (cVVector.size() > 0)
{
ContentValues[] cvArray = new ContentValues[cVVector.size()];
cVVector.toArray(cvArray);
inserted = mContext.getContentResolver().bulkInsert(WeatherEntry.CONTENT_URI, cvArray);
}
Log.d(LOG_TAG, "FetchWeatherTask Complete. " + inserted + " Inserted");
}
catch (JSONException e)
{
Log.e(LOG_TAG, e.getMessage(), e);
e.printStackTrace();
}
}
@Override
protected Void doInBackground (String... params)
{
// If there's no zip code, there's nothing to look up. Verify size of params.
if (params.length == 0)
{
return null;
}
String locationQuery = params[0];
// These two need to be declared outside the try/catch
// so that they can be closed in the finally block.
HttpURLConnection urlConnection = null;
BufferedReader reader = null;
// Will contain the raw JSON response as a string.
String forecastJsonStr = null;
String format = "json";
String units = "metric";
int numDays = 14;
try
{
// Construct the URL for the OpenWeatherMap query
// Possible parameters are avaiable at OWM's forecast API page, at
// http://openweathermap.org/API#forecast
final String FORECAST_BASE_URL = "http://api.openweathermap.org/data/2.5/forecast/daily?";
final String ZIP_CODE_PARAM = "zip";
final String FORMAT_PARAM = "mode";
final String UNITS_PARAM = "units";
final String DAYS_PARAM = "cnt";
final String APP_ID_PARAM = "appid";
Uri builtUri = Uri.parse(FORECAST_BASE_URL).buildUpon()
.appendQueryParameter(APP_ID_PARAM, BuildConfig.OPEN_WEATHER_MAP_API_KEY)
.appendQueryParameter(ZIP_CODE_PARAM, params[0])
.appendQueryParameter(FORMAT_PARAM, format)
.appendQueryParameter(UNITS_PARAM, units)
.appendQueryParameter(DAYS_PARAM, Integer.toString(numDays))
.build();
URL url = new URL(builtUri.toString());
// Create the request to OpenWeatherMap, and open the connection
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("GET");
urlConnection.connect();
// Read the input stream into a String
InputStream inputStream = urlConnection.getInputStream();
StringBuffer buffer = new StringBuffer();
if (inputStream == null)
{
// Nothing to do.
return null;
}
reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while ((line = reader.readLine()) != null)
{
// Since it's JSON, adding a newline isn't necessary (it won't affect parsing)
// But it does make debugging a *lot* easier if you print out the completed
// buffer for debugging.
buffer.append(line + "\n");
}
if (buffer.length() == 0)
{
// Stream was empty. No point in parsing.
return null;
}
forecastJsonStr = buffer.toString();
getWeatherDataFromJson(forecastJsonStr, locationQuery);
}
catch (IOException e)
{
Log.e(LOG_TAG, "Error ", e);
// If the code didn't successfully get the weather data, there's no point in attemping
// to parse it.
return null;
}
catch (JSONException e)
{
Log.e(LOG_TAG, e.getMessage(), e);
e.printStackTrace();
}
finally
{
if (urlConnection != null)
{
urlConnection.disconnect();
}
if (reader != null)
{
try
{
reader.close();
}
catch (final IOException e)
{
Log.e(LOG_TAG, "Error closing stream", e);
}
}
}
return null;
}
}
| denis-evteev/udacity-android-sunshine-app | app/src/main/java/com/example/android/sunshine/app/FetchWeatherTask.java | Java | apache-2.0 | 12,246 |
using System;
using System.Linq;
namespace NBi.Core.Analysis.Request
{
public class CaptionFilter: IFilter
{
protected readonly string captionFilter;
protected readonly DiscoveryTarget targetFilter;
public CaptionFilter(string caption, DiscoveryTarget target)
{
captionFilter = caption;
targetFilter = target;
}
public string Value { get { return captionFilter; } }
public DiscoveryTarget Target { get { return targetFilter; } }
}
}
| Seddryck/NBi | NBi.Core/Analysis/Request/CaptionFilter.cs | C# | apache-2.0 | 553 |
<!DOCTYPE HTML>
<html xmlns:th="http://www.thymeleaf.org"
xmlns:layout="http://www.w3.org/1999/xhtml">
<head th:replace="fragments/head :: head">
<title>BioSamples < EMBL-EBI</title>
<!-- A few keywords that relate to the content of THIS PAGE (not the whole project) -->
<meta name="keywords" content="biosamples, europe, EBI" />
<!-- Describe what this page is about -->
<meta name="description" content="EMBL-EBI" />
<meta name="ebi:last-review" content="2016-12-20" />
<!-- The last time the content was reviewed -->
<meta name="ebi:expiry" content="2017-12-20" />
<!-- When this content is no longer relevant -->
<link rel="stylesheet"
href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/styles/default.min.css">
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/highlight.min.js"></script>
<script>hljs.initHighlightingOnLoad();</script>
</head>
<body>
<div th:insert="fragments/header :: header"></div>
<div layout:fragment="content" id="content">
<th:block th:include="fragments/header :: masterhead"></th:block>
<div id="main-content-area" class="row padding-top-xlarge">
<div class="small-12 medium-9 columns" th:insert="asciidoc/ref_api_submit :: div"></div>
<div th:insert="fragments/help :: sidebar(active='refs/api/submit', recipes=${recipes})"></div>
</div>
</div>
<div th:insert="fragments/footer :: footer"></div>
</body>
</html>
| EBIBioSamples/biosamples-v4 | webapps/core/src/main/resources/templates/docs/references/api/submit.html | HTML | apache-2.0 | 1,408 |
/*
* Copyright 2019 CJWW Development
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package enums
object DisplayName extends Enumeration {
val full = Value
val short = Value
val user = Value
}
| cjww-development/auth-service | app/enums/DisplayName.scala | Scala | apache-2.0 | 719 |
//
// Generated by the J2ObjC translator. DO NOT EDIT!
// source: /Volumes/Personal/Documents/raspi-config/client-framework/build/j2oSources/com/google/common/util/concurrent/ThreadFactoryBuilder.java
//
#include "J2ObjC_header.h"
#pragma push_macro("INCLUDE_ALL_ComGoogleCommonUtilConcurrentThreadFactoryBuilder")
#ifdef RESTRICT_ComGoogleCommonUtilConcurrentThreadFactoryBuilder
#define INCLUDE_ALL_ComGoogleCommonUtilConcurrentThreadFactoryBuilder 0
#else
#define INCLUDE_ALL_ComGoogleCommonUtilConcurrentThreadFactoryBuilder 1
#endif
#undef RESTRICT_ComGoogleCommonUtilConcurrentThreadFactoryBuilder
#if __has_feature(nullability)
#pragma clang diagnostic push
#pragma GCC diagnostic ignored "-Wnullability-completeness"
#endif
#if !defined (ComGoogleCommonUtilConcurrentThreadFactoryBuilder_) && (INCLUDE_ALL_ComGoogleCommonUtilConcurrentThreadFactoryBuilder || defined(INCLUDE_ComGoogleCommonUtilConcurrentThreadFactoryBuilder))
#define ComGoogleCommonUtilConcurrentThreadFactoryBuilder_
@protocol JavaLangThread_UncaughtExceptionHandler;
@protocol JavaUtilConcurrentThreadFactory;
@interface ComGoogleCommonUtilConcurrentThreadFactoryBuilder : NSObject
#pragma mark Public
- (instancetype)init;
- (id<JavaUtilConcurrentThreadFactory>)build;
- (ComGoogleCommonUtilConcurrentThreadFactoryBuilder *)setDaemonWithBoolean:(jboolean)daemon;
- (ComGoogleCommonUtilConcurrentThreadFactoryBuilder *)setNameFormatWithNSString:(NSString *)nameFormat;
- (ComGoogleCommonUtilConcurrentThreadFactoryBuilder *)setPriorityWithInt:(jint)priority;
- (ComGoogleCommonUtilConcurrentThreadFactoryBuilder *)setThreadFactoryWithJavaUtilConcurrentThreadFactory:(id<JavaUtilConcurrentThreadFactory>)backingThreadFactory;
- (ComGoogleCommonUtilConcurrentThreadFactoryBuilder *)setUncaughtExceptionHandlerWithJavaLangThread_UncaughtExceptionHandler:(id<JavaLangThread_UncaughtExceptionHandler>)uncaughtExceptionHandler;
@end
J2OBJC_EMPTY_STATIC_INIT(ComGoogleCommonUtilConcurrentThreadFactoryBuilder)
FOUNDATION_EXPORT void ComGoogleCommonUtilConcurrentThreadFactoryBuilder_init(ComGoogleCommonUtilConcurrentThreadFactoryBuilder *self);
FOUNDATION_EXPORT ComGoogleCommonUtilConcurrentThreadFactoryBuilder *new_ComGoogleCommonUtilConcurrentThreadFactoryBuilder_init() NS_RETURNS_RETAINED;
FOUNDATION_EXPORT ComGoogleCommonUtilConcurrentThreadFactoryBuilder *create_ComGoogleCommonUtilConcurrentThreadFactoryBuilder_init();
J2OBJC_TYPE_LITERAL_HEADER(ComGoogleCommonUtilConcurrentThreadFactoryBuilder)
#endif
#if __has_feature(nullability)
#pragma clang diagnostic pop
#endif
#pragma pop_macro("INCLUDE_ALL_ComGoogleCommonUtilConcurrentThreadFactoryBuilder")
| kebernet/erigo | ios/app/Erigo/com/google/common/util/concurrent/ThreadFactoryBuilder.h | C | apache-2.0 | 2,663 |
/**
* @license Copyright 2017 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
/**
* Expected Lighthouse audit values for redirects tests
*/
const cacheBuster = Number(new Date());
module.exports = [
{
initialUrl: `http://localhost:10200/online-only.html?delay=500&redirect=%2Foffline-only.html%3Fcb=${cacheBuster}%26delay=500%26redirect%3D%2Fredirects-final.html`,
url: 'http://localhost:10200/redirects-final.html',
audits: {
'redirects': {
score: '<100',
rawValue: '>=500',
details: {
items: {
length: 3,
},
},
},
},
},
{
initialUrl: `http://localhost:10200/online-only.html?delay=300&redirect=%2Fredirects-final.html`,
url: 'http://localhost:10200/redirects-final.html',
audits: {
'redirects': {
score: 100,
rawValue: '>=250',
details: {
items: {
length: 2,
},
},
},
},
},
];
| tkadlec/lighthouse | lighthouse-cli/test/smokehouse/redirects/expectations.js | JavaScript | apache-2.0 | 1,503 |
package org.gsonformat.intellij.process;
import com.intellij.psi.*;
import org.apache.http.util.TextUtils;
import org.gsonformat.intellij.config.Config;
import org.gsonformat.intellij.config.Constant;
import org.gsonformat.intellij.entity.FieldEntity;
import org.gsonformat.intellij.entity.ClassEntity;
import java.util.regex.Pattern;
/**
* Created by dim on 16/11/7.
*/
class AutoValueProcessor extends Processor {
@Override
public void onStarProcess(ClassEntity classEntity, PsiElementFactory factory, PsiClass cls,IProcessor visitor) {
super.onStarProcess(classEntity, factory, cls, visitor);
injectAutoAnnotation(factory, cls);
}
private void injectAutoAnnotation(PsiElementFactory factory, PsiClass cls) {
PsiModifierList modifierList = cls.getModifierList();
PsiElement firstChild = modifierList.getFirstChild();
Pattern pattern = Pattern.compile("@.*?AutoValue");
if (firstChild != null && !pattern.matcher(firstChild.getText()).find()) {
PsiAnnotation annotationFromText = factory.createAnnotationFromText("@com.google.auto.value.AutoValue", cls);
modifierList.addBefore(annotationFromText, firstChild);
}
if (!modifierList.hasModifierProperty(PsiModifier.ABSTRACT)) {
modifierList.setModifierProperty(PsiModifier.ABSTRACT, true);
}
}
@Override
public void generateField(PsiElementFactory factory, FieldEntity fieldEntity, PsiClass cls, ClassEntity classEntity) {
if (fieldEntity.isGenerate()) {
StringBuilder fieldSb = new StringBuilder();
String filedName = fieldEntity.getGenerateFieldName();
if (!TextUtils.isEmpty(classEntity.getExtra())) {
fieldSb.append(classEntity.getExtra()).append("\n");
classEntity.setExtra(null);
}
if (fieldEntity.getTargetClass() != null) {
fieldEntity.getTargetClass().setGenerate(true);
}
fieldSb.append(String.format("public abstract %s %s() ; ", fieldEntity.getFullNameType(), filedName));
cls.add(factory.createMethodFromText(fieldSb.toString(), cls));
}
}
@Override
public void generateGetterAndSetter(PsiElementFactory factory, PsiClass cls, ClassEntity classEntity) {
}
@Override
public void generateConvertMethod(PsiElementFactory factory, PsiClass cls, ClassEntity classEntity) {
super.generateConvertMethod(factory, cls, classEntity);
createMethod(factory, Constant.autoValueMethodTemplate.replace("$className$", cls.getName()).trim(), cls);
}
@Override
protected void onEndGenerateClass(PsiElementFactory factory, ClassEntity classEntity, PsiClass parentClass, PsiClass generateClass, IProcessor visitor) {
super.onEndGenerateClass(factory, classEntity, parentClass, generateClass, visitor);
injectAutoAnnotation(factory, generateClass);
}
}
| gengjiawen/GsonFormat | src/main/java/org/gsonformat/intellij/process/AutoValueProcessor.java | Java | apache-2.0 | 2,976 |
#if !defined(lint) && !defined(DOS)
static char rcsid[] = "$Id: tty.c 672 2007-08-15 23:07:18Z [email protected] $";
#endif
/*
* ========================================================================
* Copyright 2006-2007 University of Washington
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* ========================================================================
*
* Program: tty routines
*/
#include <system.h>
#include <general.h>
#include "../estruct.h"
#include "../mode.h"
#include "../pico.h"
#include "../edef.h"
#include "../efunc.h"
#include "../keydefs.h"
#include "signals.h"
#ifndef _WINDOWS
#include "terminal.h"
#include "raw.h"
#include "read.h"
#else
#include "mswin.h"
#endif /* _WINDOWS */
#ifdef MOUSE
#include "mouse.h"
#endif /* MOUSE */
#include "tty.h"
#ifndef _WINDOWS
/*
* ttopen - this function is called once to set up the terminal device
* streams. if called as pine composer, don't mess with
* tty modes, but set signal handlers.
*/
int
ttopen(void)
{
if(Pmaster == NULL){
Raw(1);
#ifdef MOUSE
if(gmode & MDMOUSE)
init_mouse();
#endif /* MOUSE */
xonxoff_proc(preserve_start_stop);
}
picosigs();
return(1);
}
/*
* ttclose - this function gets called just before we go back home to
* the command interpreter. If called as pine composer, don't
* worry about modes, but set signals to default, pine will
* rewire things as needed.
*/
int
ttclose(void)
{
if(Pmaster){
signal(SIGHUP, SIG_DFL);
#ifdef SIGCONT
signal(SIGCONT, SIG_DFL);
#endif
#if defined(SIGWINCH) && defined(TIOCGWINSZ)
signal(SIGWINCH, SIG_DFL);
#endif
}
else{
Raw(0);
#ifdef MOUSE
end_mouse();
#endif
}
return(1);
}
/*
* ttgetc - Read a character from the terminal, performing no editing
* and doing no echo at all.
*
* Args: return_on_intr -- Function to get a single character from stdin,
* recorder -- If non-NULL, function used to record keystroke.
* bail_handler -- Function used to bail out on read error.
*
* Returns: The character read from stdin.
* Return_on_intr is returned if read is interrupted.
* If read error, BAIL_OUT is returned unless bail_handler is
* non-NULL, in which case it is called (and usually it exits).
*
* If recorder is non-null, it is used to record the keystroke.
*/
int
ttgetc(int return_on_intr, int (*recorder)(int), void (*bail_handler)(void))
{
int c;
switch(c = read_one_char()){
case READ_INTR:
return(return_on_intr);
case BAIL_OUT:
if(bail_handler)
(*bail_handler)();
else
return(BAIL_OUT);
default:
return(recorder ? (*recorder)(c) : c);
}
}
/*
* Simple version of ttgetc with simple error handling
*
* Args: recorder -- If non-NULL, function used to record keystroke.
* bail_handler -- Function used to bail out on read error.
*
* Returns: The character read from stdin.
* If read error, BAIL_OUT is returned unless bail_handler is
* non-NULL, in which case it is called (and usually it exits).
*
* If recorder is non-null, it is used to record the keystroke.
* Retries if interrupted.
*/
int
simple_ttgetc(int (*recorder)(int), void (*bail_handler)(void))
{
int res;
unsigned char c;
while((res = read(STDIN_FD, &c, 1)) <= 0)
if(!(res < 0 && errno == EINTR))
(*bail_handler)();
return(recorder ? (*recorder)((int)c) : (int)c);
}
/*
* ttputc - Write a character to the display.
*/
int
ttputc(UCS ucs)
{
unsigned char obuf[MAX(MB_LEN_MAX,32)];
int r, i, width = 0, outchars = 0;
int ret = 0;
if(ucs < 0x80)
return(putchar((unsigned char) ucs));
width = wcellwidth(ucs);
if(width < 0){
width = 1;
obuf[outchars++] = '?';
}
else{
/*
* Convert the ucs into the multibyte
* character that corresponds to the
* ucs in the users locale.
*/
outchars = wtomb((char *) obuf, ucs);
if(outchars < 0){
width = 1;
obuf[0] = '?';
outchars = 1;
}
}
for(i = 0; i < outchars; i++){
r = putchar(obuf[i]);
ret = (ret == EOF) ? EOF : r;
}
return(ret);
}
/*
* ttflush - flush terminal buffer. Does real work where the terminal
* output is buffered up. A no-operation on systems where byte
* at a time terminal I/O is done.
*/
int
ttflush(void)
{
return(fflush(stdout));
}
/*
* ttresize - recompute the screen dimensions if necessary, and then
* adjust pico's internal buffers accordingly.
*/
void
ttresize(void)
{
int row = -1, col = -1;
ttgetwinsz(&row, &col);
resize_pico(row, col);
}
/*
* ttgetwinsz - set global row and column values (if we can get them)
* and return.
*/
void
ttgetwinsz(int *row, int *col)
{
extern int _tlines, _tcolumns;
if(*row < 0)
*row = (_tlines > 0) ? _tlines - 1 : NROW - 1;
if(*col <= 0)
*col = (_tcolumns > 0) ? _tcolumns : NCOL;
#if defined(SIGWINCH) && defined(TIOCGWINSZ)
{
struct winsize win;
if (ioctl(0, TIOCGWINSZ, &win) == 0) { /* set to anything useful.. */
if(win.ws_row) /* ... the tty drivers says */
*row = win.ws_row - 1;
if(win.ws_col)
*col = win.ws_col;
}
signal(SIGWINCH, winch_handler); /* window size changes */
}
#endif
if(*col > NLINE-1)
*col = NLINE-1;
}
#else /* _WINDOWS */
#define MARGIN 8 /* size of minimim margin and */
#define SCRSIZ 64 /* scroll size for extended lines */
#define MROW 2 /* rows in menu */
/* internal prototypes */
int mswin_resize (int, int);
/*
* Standard terminal interface dispatch table. Fields point to functions
* that operate the terminal. All these functions live in mswin.c, but
* this structure is defined here because it is specific to pico.
*/
TERM term = {
0,
0,
MARGIN,
MROW,
ttopen,
NULL,
ttclose,
NULL, /* was mswin_getc, but not used? */
mswin_putc,
mswin_flush,
mswin_move,
mswin_eeol,
mswin_eeop,
mswin_beep,
mswin_rev
};
/*
* This function is called once to set up the terminal device streams.
*/
int
ttopen(void)
{
int rows, columns;
mswin_getscreensize (&rows, &columns);
term.t_nrow = rows - 1;
term.t_ncol = columns;
/* term.t_scrsiz = (columns * 2) / 3; */
/*
* Do we implement optimized character insertion and deletion?
* o_insert() and o_delete()
*/
/* inschar = delchar = FALSE; */
/* revexist = TRUE; dead code? */
mswin_setresizecallback (mswin_resize);
init_mouse();
return(1);
}
/*
* This function gets called just before we go back home to the command
* interpreter.
*/
int
ttclose(void)
{
mswin_clearresizecallback (mswin_resize);
return(1);
}
/*
* Flush terminal buffer. Does real work where the terminal output is buffered
* up. A no-operation on systems where byte at a time terminal I/O is done.
*/
int
ttflush(void)
{
return(1);
}
/*
* ttresize - recompute the screen dimensions if necessary, and then
* adjust pico's internal buffers accordingly.
*/
void
ttresize(void)
{
int row, col;
mswin_getscreensize(&row, &col);
resize_pico (row-1, col);
}
/*
* mswin_resize - windows specific callback to set pico's internal tables
* to new screen dimensions.
*/
int
mswin_resize(int row, int col)
{
if (wheadp)
resize_pico (row-1, col);
return (0);
}
#endif /* _WINDOWS */
| cornerhost/alpine | pico/osdep/tty.c | C | apache-2.0 | 7,779 |
/*
* File:
* Author: ezio
*
* Created on 2016年2月25日, 下午9:14
*/
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <errno.h>
#include <ctype.h>
#include <sys/un.h>
#include <sys/socket.h>
#define SV_SOCK_PATH "/tmp/us_xfr"
#define BUF_SIZE 10
#define BACKLOG 5
int main(void)
{
struct sockaddr_un svaddr,claddr;
int sfd,j;
ssize_t numRead;
socklen_t len;
char buf[BUF_SIZE];
sfd = socket(AF_UNIX , SOCK_DGRAM, 0);
if (sfd == -1)
exit(-1);
if(unlink(SV_SOCK_PATH) == -1 && errno != ENOENT)
exit(-2);
memset(&svaddr, 0, sizeof(struct sockaddr_un));
svaddr.sun_family = AF_UNIX;
strncpy(svaddr.sun_path, SV_SOCK_PATH, sizeof(svaddr.sun_path) -1 );
if(bind(sfd, (struct sockaddr *)&svaddr, sizeof(struct sockaddr_un))== -1)
exit(-3);
for(;;) {
len = sizeof(struct sockaddr_un);
numRead = recvfrom(sfd, buf, BUF_SIZE, 0, (struct sockaddr *)&claddr, &len);
if(numRead == -1)
exit(-4);
printf("server recved %ld bytes from %s\n",(long) numRead, claddr.sun_path);
for(j = 0; j<numRead; j++)
buf[j] = toupper((unsigned char )buf[j]);
if (sendto(sfd, buf, numRead, 0, (struct sockaddr *)&claddr, len) != numRead)
perror("sendto ");
}
return 0;
}
| oska874/cCode | socket/ud_ser.c | C | apache-2.0 | 1,436 |
#include <bits/stdc++.h>
template<typename T> T gcd(T a, T b) {
if(!b) return a;
return gcd(b, a % b);
}
template<typename T> T lcm(T a, T b) {
return a * b / gcd(a, b);
}
template<typename T> void chmin(T& a, T b) { a = (a > b) ? b : a; }
template<typename T> void chmax(T& a, T b) { a = (a < b) ? b : a; }
int in() { int x; scanf("%d", &x); return x; }
using namespace std;
typedef long long Int;
typedef unsigned uint;
int TL[10];
char S[110];
int map_key[10];
string key[10];
int main(void) {
key[1] = "";
key[2] = "abc";
key[3] = "def";
key[4] = "ghi";
key[5] = "jkl";
key[6] = "mno";
key[7] = "pqrs";
key[8] = "tuv";
key[9] = "wxyz";
for (int i = 1; i <= 9; i++) {
scanf("%d", &TL[i]);
map_key[i] = TL[i];
}
scanf("%s", S);
int N = strlen(S);
int last = -1;
for (int i = 0; i < N; i++) {
int id = -1, press = 0;
bool sharp = false;
for (int j = 1; j <= 9; j++) {
if (key[map_key[j]].find(S[i]) != string::npos) {
//cout << "\n" << j << " " << S[i] << " " << key[map_key[j]] << "\n";
id = j;
for (int k = 0; k < key[map_key[j]].size(); k++) {
if (key[map_key[j]][k] == S[i]) {
press = k;
break;
}
}
if (i > 0) {
if (id == last) {
sharp = true;
}
}
last = j;
break;
}
}
if (sharp) {
putchar('#');
}
for (int i = 0; i <= press; i++) {
printf("%d", id);
}
}
printf("\n");
return 0;
}
| aajjbb/contest-files | COCI/Mobitel.cpp | C++ | apache-2.0 | 1,433 |
/* Copyright 2008, 2009, 2010 by the Oxford University Computing Laboratory
This file is part of HermiT.
HermiT is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
HermiT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with HermiT. If not, see <http://www.gnu.org/licenses/>.
*/
package org.semanticweb.HermiT.datatypes.owlreal;
public final class PlusInfinity extends Number {
private static final long serialVersionUID = -205551124673073593L;
public static final PlusInfinity INSTANCE = new PlusInfinity();
private PlusInfinity() {
}
public boolean equals(Object that) {
return this == that;
}
public String toString() {
return "+INF";
}
public double doubleValue() {
throw new UnsupportedOperationException();
}
public float floatValue() {
throw new UnsupportedOperationException();
}
public int intValue() {
throw new UnsupportedOperationException();
}
public long longValue() {
throw new UnsupportedOperationException();
}
protected Object readResolve() {
return INSTANCE;
}
}
| CPoirot3/OWL-Reasoner | project/src/org/semanticweb/HermiT/datatypes/owlreal/PlusInfinity.java | Java | apache-2.0 | 1,607 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mturk.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.mturk.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* HITLayoutParameter JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class HITLayoutParameterJsonUnmarshaller implements Unmarshaller<HITLayoutParameter, JsonUnmarshallerContext> {
public HITLayoutParameter unmarshall(JsonUnmarshallerContext context) throws Exception {
HITLayoutParameter hITLayoutParameter = new HITLayoutParameter();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("Name", targetDepth)) {
context.nextToken();
hITLayoutParameter.setName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Value", targetDepth)) {
context.nextToken();
hITLayoutParameter.setValue(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return hITLayoutParameter;
}
private static HITLayoutParameterJsonUnmarshaller instance;
public static HITLayoutParameterJsonUnmarshaller getInstance() {
if (instance == null)
instance = new HITLayoutParameterJsonUnmarshaller();
return instance;
}
}
| dagnir/aws-sdk-java | aws-java-sdk-mechanicalturkrequester/src/main/java/com/amazonaws/services/mturk/model/transform/HITLayoutParameterJsonUnmarshaller.java | Java | apache-2.0 | 2,995 |
package com.nhpatt.myconference.entities;
import com.google.gson.JsonArray;
/**
* @author Javier Gamarra
*/
public class TalkEvent {
private final JsonArray talks;
public TalkEvent(JsonArray talks) {
this.talks = talks;
}
public JsonArray getTalks() {
return talks;
}
}
| nhpatt/MyConference | app/src/main/java/com/nhpatt/myconference/entities/TalkEvent.java | Java | apache-2.0 | 312 |
/*
* Copyright (C) 2009-2015 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.aws.identity;
import org.dasein.cloud.AbstractCapabilities;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.aws.AWSCloud;
import org.dasein.cloud.identity.IdentityAndAccessCapabilities;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Locale;
/**
* Created by stas on 18/06/15.
*/
public class IAMCapabilities extends AbstractCapabilities<AWSCloud> implements IdentityAndAccessCapabilities {
public IAMCapabilities(AWSCloud provider) {
super(provider);
}
@Override
public boolean supportsAccessControls() throws CloudException, InternalException {
return true;
}
@Override
public boolean supportsConsoleAccess() throws CloudException, InternalException {
return true;
}
@Override
public boolean supportsAPIAccess() throws CloudException, InternalException {
return true;
}
@Nullable
@Override
public String getConsoleUrl() throws CloudException, InternalException {
return String.format("https://%s.signin.aws.amazon.com/console", getContext().getAccountNumber());
}
@Nonnull
@Override
public String getProviderTermForUser(Locale locale) {
return "user";
}
@Nonnull
@Override
public String getProviderTermForGroup(@Nonnull Locale locale) {
return "group";
}
}
| maksimov/dasein-cloud-aws-old | src/main/java/org/dasein/cloud/aws/identity/IAMCapabilities.java | Java | apache-2.0 | 2,228 |
# Meniscus glaucopis Irgens, 1977 (Approved Lists, 1980) SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Bacteria/Bacteroidetes/Sphingobacteria/Sphingobacteriales/Flexibacteraceae/Meniscus/Meniscus glaucopis/README.md | Markdown | apache-2.0 | 212 |
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html>
<head>
<title>AndIncludeWord - ScalaTest 2.1.7 - org.scalatest.matchers.MatcherFactory1.AndIncludeWord</title>
<meta name="description" content="AndIncludeWord - ScalaTest 2.1.7 - org.scalatest.matchers.MatcherFactory1.AndIncludeWord" />
<meta name="keywords" content="AndIncludeWord ScalaTest 2.1.7 org.scalatest.matchers.MatcherFactory1.AndIncludeWord" />
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<link href="../../../lib/template.css" media="screen" type="text/css" rel="stylesheet" />
<link href="../../../lib/diagrams.css" media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
<script type="text/javascript" src="../../../lib/jquery.js" id="jquery-js"></script>
<script type="text/javascript" src="../../../lib/jquery-ui.js"></script>
<script type="text/javascript" src="../../../lib/template.js"></script>
<script type="text/javascript" src="../../../lib/tools.tooltip.js"></script>
<script type="text/javascript">
if(top === self) {
var url = '../../../index.html';
var hash = 'org.scalatest.matchers.MatcherFactory1$AndIncludeWord';
var anchor = window.location.hash;
var anchor_opt = '';
if (anchor.length >= 1)
anchor_opt = '@' + anchor.substring(1);
window.location.href = url + '#' + hash + anchor_opt;
}
</script>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-71294502-3', 'auto');
ga('send', 'pageview');
</script>
</head>
<body class="type">
<!-- Top of doc.scalatest.org [javascript] -->
<script type="text/javascript">
var rnd = window.rnd || Math.floor(Math.random()*10e6);
var pid204546 = window.pid204546 || rnd;
var plc204546 = window.plc204546 || 0;
var abkw = window.abkw || '';
var absrc = 'http://ab167933.adbutler-ikon.com/adserve/;ID=167933;size=468x60;setID=204546;type=js;sw='+screen.width+';sh='+screen.height+';spr='+window.devicePixelRatio+';kw='+abkw+';pid='+pid204546+';place='+(plc204546++)+';rnd='+rnd+';click=CLICK_MACRO_PLACEHOLDER';
document.write('<scr'+'ipt src="'+absrc+'" type="text/javascript"></scr'+'ipt>');
</script>
<div id="definition">
<img src="../../../lib/class_big.png" />
<p id="owner"><a href="../../package.html" class="extype" name="org">org</a>.<a href="../package.html" class="extype" name="org.scalatest">scalatest</a>.<a href="package.html" class="extype" name="org.scalatest.matchers">matchers</a>.<a href="MatcherFactory1.html" class="extype" name="org.scalatest.matchers.MatcherFactory1">MatcherFactory1</a></p>
<h1>AndIncludeWord</h1>
</div>
<h4 id="signature" class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">class</span>
</span>
<span class="symbol">
<span class="name">AndIncludeWord</span><span class="result"> extends <span class="extype" name="scala.AnyRef">AnyRef</span></span>
</span>
</h4>
<div id="comment" class="fullcommenttop"><div class="comment cmt"><p>This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="../Matchers.html"><code>Matchers</code></a> for an overview of
the matchers DSL.
</p></div><dl class="attributes block"> <dt>Source</dt><dd><a href="https://github.com/scalatest/scalatest/tree/release-2.1.7-for-scala-2.10/src/main/scala/org/scalatest/matchers/MatcherFactory1.scala" target="_blank">MatcherFactory1.scala</a></dd></dl><div class="toggleContainer block">
<span class="toggle">Linear Supertypes</span>
<div class="superTypes hiddenContent"><span class="extype" name="scala.AnyRef">AnyRef</span>, <span class="extype" name="scala.Any">Any</span></div>
</div></div>
<div id="mbrsel">
<div id="textfilter"><span class="pre"></span><span class="input"><input id="mbrsel-input" type="text" accesskey="/" /></span><span class="post"></span></div>
<div id="order">
<span class="filtertype">Ordering</span>
<ol>
<li class="alpha in"><span>Alphabetic</span></li>
<li class="inherit out"><span>By inheritance</span></li>
</ol>
</div>
<div id="ancestors">
<span class="filtertype">Inherited<br />
</span>
<ol id="linearization">
<li class="in" name="org.scalatest.matchers.MatcherFactory1.AndIncludeWord"><span>AndIncludeWord</span></li><li class="in" name="scala.AnyRef"><span>AnyRef</span></li><li class="in" name="scala.Any"><span>Any</span></li>
</ol>
</div><div id="ancestors">
<span class="filtertype"></span>
<ol>
<li class="hideall out"><span>Hide All</span></li>
<li class="showall in"><span>Show all</span></li>
</ol>
<a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
</div>
<div id="visbl">
<span class="filtertype">Visibility</span>
<ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
</div>
</div>
<div id="template">
<div id="allMembers">
<div id="constructors" class="members">
<h3>Instance Constructors</h3>
<ol><li name="org.scalatest.matchers.MatcherFactory1.AndIncludeWord#<init>" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped">
<a id="<init>():MatcherFactory1.this.AndIncludeWord"></a>
<a id="<init>:AndIncludeWord"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">new</span>
</span>
<span class="symbol">
<span class="name">AndIncludeWord</span><span class="params">()</span>
</span>
</h4>
</li></ol>
</div>
<div id="values" class="values members">
<h3>Value Members</h3>
<ol><li name="scala.AnyRef#!=" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="!=(x$1:AnyRef):Boolean"></a>
<a id="!=(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $bang$eq" class="name">!=</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.Any#!=" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="!=(x$1:Any):Boolean"></a>
<a id="!=(Any):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $bang$eq" class="name">!=</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef###" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="##():Int"></a>
<a id="##():Int"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $hash$hash" class="name">##</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#==" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="==(x$1:AnyRef):Boolean"></a>
<a id="==(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $eq$eq" class="name">==</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.Any#==" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="==(x$1:Any):Boolean"></a>
<a id="==(Any):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $eq$eq" class="name">==</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.Any#asInstanceOf" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="asInstanceOf[T0]:T0"></a>
<a id="asInstanceOf[T0]:T0"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">asInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <span class="extype" name="scala.Any.asInstanceOf.T0">T0</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef#clone" visbl="prt" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="clone():Object"></a>
<a id="clone():AnyRef"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">clone</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.AnyRef">AnyRef</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<a href="../../../java$lang.html" class="extype" name="java.lang">java.lang</a>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#eq" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="eq(x$1:AnyRef):Boolean"></a>
<a id="eq(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">eq</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#equals" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="equals(x$1:Any):Boolean"></a>
<a id="equals(Any):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">equals</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#finalize" visbl="prt" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="finalize():Unit"></a>
<a id="finalize():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">finalize</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<a href="../../../java$lang.html" class="extype" name="java.lang">java.lang</a>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#getClass" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="getClass():Class[_]"></a>
<a id="getClass():Class[_]"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">getClass</span><span class="params">()</span><span class="result">: <span class="extype" name="java.lang.Class">Class</span>[_]</span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#hashCode" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="hashCode():Int"></a>
<a id="hashCode():Int"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">hashCode</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.Any#isInstanceOf" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="isInstanceOf[T0]:Boolean"></a>
<a id="isInstanceOf[T0]:Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">isInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef#ne" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="ne(x$1:AnyRef):Boolean"></a>
<a id="ne(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">ne</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#notify" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notify():Unit"></a>
<a id="notify():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notify</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#notifyAll" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notifyAll():Unit"></a>
<a id="notifyAll():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notifyAll</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="org.scalatest.matchers.MatcherFactory1.AndIncludeWord#regex" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="regex(regex:scala.util.matching.Regex):org.scalatest.matchers.MatcherFactory1[SCwithString,TC1]"></a>
<a id="regex(Regex):MatcherFactory1[SCwithString,TC1]"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">regex</span><span class="params">(<span name="regex">regex: <span class="extype" name="scala.util.matching.Regex">Regex</span></span>)</span><span class="result">: <a href="MatcherFactory1.html" class="extype" name="org.scalatest.matchers.MatcherFactory1">MatcherFactory1</a>[<span class="extype" name="org.scalatest.matchers.MatcherFactory1.SC">SC</span> with <span class="extype" name="scala.Predef.String">String</span>, <span class="extype" name="org.scalatest.matchers.MatcherFactory1.TC1">TC1</span>]</span>
</span>
</h4>
<p class="shortcomment cmt">This method enables the following syntax given a <code>MatcherFactory1</code>:</p><div class="fullcomment"><div class="comment cmt"><p>This method enables the following syntax given a <code>MatcherFactory1</code>:</p><p><pre class="stHighlighted">
aMatcherFactory and include regex (decimalRegex)
^
</pre>
</p></div></div>
</li><li name="org.scalatest.matchers.MatcherFactory1.AndIncludeWord#regex" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="regex(regexWithGroups:org.scalatest.words.RegexWithGroups):org.scalatest.matchers.MatcherFactory1[SCwithString,TC1]"></a>
<a id="regex(RegexWithGroups):MatcherFactory1[SCwithString,TC1]"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">regex</span><span class="params">(<span name="regexWithGroups">regexWithGroups: <a href="../words/RegexWithGroups.html" class="extype" name="org.scalatest.words.RegexWithGroups">RegexWithGroups</a></span>)</span><span class="result">: <a href="MatcherFactory1.html" class="extype" name="org.scalatest.matchers.MatcherFactory1">MatcherFactory1</a>[<span class="extype" name="org.scalatest.matchers.MatcherFactory1.SC">SC</span> with <span class="extype" name="scala.Predef.String">String</span>, <span class="extype" name="org.scalatest.matchers.MatcherFactory1.TC1">TC1</span>]</span>
</span>
</h4>
<p class="shortcomment cmt">This method enables the following syntax given a <code>MatcherFactory1</code>:</p><div class="fullcomment"><div class="comment cmt"><p>This method enables the following syntax given a <code>MatcherFactory1</code>:</p><p><pre class="stHighlighted">
aMatcherFactory and include regex (<span class="stQuotedString">"a(b*)c"</span> withGroup <span class="stQuotedString">"bb"</span>)
^
</pre>
</p></div></div>
</li><li name="org.scalatest.matchers.MatcherFactory1.AndIncludeWord#regex" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="regex(regexString:String):org.scalatest.matchers.MatcherFactory1[SCwithString,TC1]"></a>
<a id="regex(String):MatcherFactory1[SCwithString,TC1]"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">regex</span><span class="params">(<span name="regexString">regexString: <span class="extype" name="scala.Predef.String">String</span></span>)</span><span class="result">: <a href="MatcherFactory1.html" class="extype" name="org.scalatest.matchers.MatcherFactory1">MatcherFactory1</a>[<span class="extype" name="org.scalatest.matchers.MatcherFactory1.SC">SC</span> with <span class="extype" name="scala.Predef.String">String</span>, <span class="extype" name="org.scalatest.matchers.MatcherFactory1.TC1">TC1</span>]</span>
</span>
</h4>
<p class="shortcomment cmt">This method enables the following syntax given a <code>MatcherFactory1</code>:</p><div class="fullcomment"><div class="comment cmt"><p>This method enables the following syntax given a <code>MatcherFactory1</code>:</p><p><pre class="stHighlighted">
aMatcherFactory and include regex (decimal)
^
</pre>
</p></div></div>
</li><li name="scala.AnyRef#synchronized" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="synchronized[T0](x$1:=>T0):T0"></a>
<a id="synchronized[T0](⇒T0):T0"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">synchronized</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="params">(<span name="arg0">arg0: ⇒ <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>)</span><span class="result">: <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#toString" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="toString():String"></a>
<a id="toString():String"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">toString</span><span class="params">()</span><span class="result">: <span class="extype" name="java.lang.String">String</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait():Unit"></a>
<a id="wait():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long,x$2:Int):Unit"></a>
<a id="wait(Long,Int):Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Long">Long</span></span>, <span name="arg1">arg1: <span class="extype" name="scala.Int">Int</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long):Unit"></a>
<a id="wait(Long):Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Long">Long</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li></ol>
</div>
</div>
<div id="inheritedMembers">
<div class="parent" name="scala.AnyRef">
<h3>Inherited from <span class="extype" name="scala.AnyRef">AnyRef</span></h3>
</div><div class="parent" name="scala.Any">
<h3>Inherited from <span class="extype" name="scala.Any">Any</span></h3>
</div>
</div>
<div id="groupedMembers">
<div class="group" name="Ungrouped">
<h3>Ungrouped</h3>
</div>
</div>
</div>
<div id="tooltip"></div>
<div id="footer"> </div>
</body>
</html> | scalatest/scalatest-website | public/scaladoc/2.1.7/org/scalatest/matchers/MatcherFactory1$AndIncludeWord.html | HTML | apache-2.0 | 28,106 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_162) on Sat Feb 02 18:57:44 CET 2019 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Class com.communote.plugins.api.rest.v24.resource.topic.property.PropertyResourceHandler (Communote 3.5 API)</title>
<meta name="date" content="2019-02-02">
<link rel="stylesheet" type="text/css" href="../../../../../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class com.communote.plugins.api.rest.v24.resource.topic.property.PropertyResourceHandler (Communote 3.5 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../../../com/communote/plugins/api/rest/v24/resource/topic/property/PropertyResourceHandler.html" title="class in com.communote.plugins.api.rest.v24.resource.topic.property">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../../../index.html?com/communote/plugins/api/rest/v24/resource/topic/property/class-use/PropertyResourceHandler.html" target="_top">Frames</a></li>
<li><a href="PropertyResourceHandler.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class com.communote.plugins.api.rest.v24.resource.topic.property.PropertyResourceHandler" class="title">Uses of Class<br>com.communote.plugins.api.rest.v24.resource.topic.property.PropertyResourceHandler</h2>
</div>
<div class="classUseContainer">No usage of com.communote.plugins.api.rest.v24.resource.topic.property.PropertyResourceHandler</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../../../com/communote/plugins/api/rest/v24/resource/topic/property/PropertyResourceHandler.html" title="class in com.communote.plugins.api.rest.v24.resource.topic.property">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../../../index.html?com/communote/plugins/api/rest/v24/resource/topic/property/class-use/PropertyResourceHandler.html" target="_top">Frames</a></li>
<li><a href="PropertyResourceHandler.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2019 <a href="https://communote.github.io/">Communote team</a>. All rights reserved.</small></p>
</body>
</html>
| Communote/communote.github.io | generated/javadoc/com/communote/plugins/api/rest/v24/resource/topic/property/class-use/PropertyResourceHandler.html | HTML | apache-2.0 | 5,353 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Collection;
import java.util.UUID;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtLocalPartition;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicAbstractUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionDemandMessage;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionSupplyMessage;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionsExchangeFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPreloaderAssignments;
import org.apache.ignite.internal.util.future.GridFinishedFuture;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.lang.IgnitePredicate;
import org.jetbrains.annotations.Nullable;
/**
* Adapter for preloading which always assumes that preloading finished.
*/
public class GridCachePreloaderAdapter implements GridCachePreloader {
/** */
protected final CacheGroupContext grp;
/** */
protected final GridCacheSharedContext ctx;
/** Logger. */
protected final IgniteLogger log;
/** Start future (always completed by default). */
private final IgniteInternalFuture finFut;
/** Preload predicate. */
protected IgnitePredicate<GridCacheEntryInfo> preloadPred;
/**
* @param grp Cache group.
*/
public GridCachePreloaderAdapter(CacheGroupContext grp) {
assert grp != null;
this.grp = grp;
ctx = grp.shared();
log = ctx.logger(getClass());
finFut = new GridFinishedFuture();
}
/** {@inheritDoc} */
@Override public void start() throws IgniteCheckedException {
// No-op.
}
/** {@inheritDoc} */
@Override public void onKernalStop() {
// No-op.
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> forceRebalance() {
return new GridFinishedFuture<>(true);
}
/** {@inheritDoc} */
@Override public boolean needForceKeys() {
return false;
}
/** {@inheritDoc} */
@Override public void onReconnected() {
// No-op.
}
/** {@inheritDoc} */
@Override public void preloadPredicate(IgnitePredicate<GridCacheEntryInfo> preloadPred) {
this.preloadPred = preloadPred;
}
/** {@inheritDoc} */
@Override public IgnitePredicate<GridCacheEntryInfo> preloadPredicate() {
return preloadPred;
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Object> startFuture() {
return finFut;
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> syncFuture() {
return finFut;
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> rebalanceFuture() {
return finFut;
}
/** {@inheritDoc} */
@Override public void unwindUndeploys() {
grp.unwindUndeploys();
}
/** {@inheritDoc} */
@Override public void handleSupplyMessage(int idx, UUID id, GridDhtPartitionSupplyMessage s) {
// No-op.
}
/** {@inheritDoc} */
@Override public void handleDemandMessage(int idx, UUID id, GridDhtPartitionDemandMessage d) {
// No-op.
}
/** {@inheritDoc} */
@Override public GridDhtFuture<Object> request(GridCacheContext ctx, Collection<KeyCacheObject> keys,
AffinityTopologyVersion topVer) {
return null;
}
/** {@inheritDoc} */
@Override public GridDhtFuture<Object> request(GridCacheContext ctx, GridNearAtomicAbstractUpdateRequest req,
AffinityTopologyVersion topVer) {
return null;
}
/** {@inheritDoc} */
@Override public void onInitialExchangeComplete(@Nullable Throwable err) {
// No-op.
}
/** {@inheritDoc} */
@Override public GridDhtPreloaderAssignments assign(GridDhtPartitionsExchangeFuture exchFut) {
return null;
}
/** {@inheritDoc} */
@Override public Runnable addAssignments(GridDhtPreloaderAssignments assignments,
boolean forcePreload,
int cnt,
Runnable next,
@Nullable GridFutureAdapter<Boolean> forcedRebFut) {
return null;
}
/** {@inheritDoc} */
@Override public void evictPartitionAsync(GridDhtLocalPartition part) {
// No-op.
}
/** {@inheritDoc} */
@Override public void onTopologyChanged(GridDhtPartitionsExchangeFuture lastFut) {
// No-op.
}
/** {@inheritDoc} */
@Override public void dumpDebugInfo() {
// No-op.
}
}
| a1vanov/ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCachePreloaderAdapter.java | Java | apache-2.0 | 5,818 |
/*
* Copyright © 2009 HotPads ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datarouter.virtualnode.replication;
import java.util.Optional;
import io.datarouter.storage.node.tableconfig.NodewatchConfigurationBuilder;
public class ReplicationNodeOptions{
public final Optional<String> tableName;
public final Optional<Integer> everyNToPrimary;
public final Optional<Boolean> disableForcePrimary;
public final Optional<Boolean> disableIntroducer;
public final Optional<NodewatchConfigurationBuilder> nodewatchConfigurationBuilder;
private ReplicationNodeOptions(
Optional<String> tableName,
Optional<Integer> everyNToPrimary,
Optional<Boolean> disableForcePrimary,
Optional<Boolean> disableIntroducer,
Optional<NodewatchConfigurationBuilder> nodewatchConfigurationBuilder){
this.tableName = tableName;
this.everyNToPrimary = everyNToPrimary;
this.disableForcePrimary = disableForcePrimary;
this.disableIntroducer = disableIntroducer;
this.nodewatchConfigurationBuilder = nodewatchConfigurationBuilder;
}
public static class ReplicationNodeOptionsBuilder{
public Optional<String> tableName = Optional.empty();
public Optional<Integer> everyNToPrimary = Optional.empty();
public Optional<Boolean> disableForcePrimary = Optional.empty();
public Optional<Boolean> disableIntroducer = Optional.empty();
public Optional<NodewatchConfigurationBuilder> nodewatchConfigurationBuilder = Optional.empty();
public ReplicationNodeOptionsBuilder withTableName(String tableName){
this.tableName = Optional.of(tableName);
return this;
}
public ReplicationNodeOptionsBuilder withEveryNToPrimary(Integer everyNToPrimary){
this.everyNToPrimary = Optional.of(everyNToPrimary);
return this;
}
public ReplicationNodeOptionsBuilder withDisableForcePrimary(boolean disableForcePrimary){
this.disableForcePrimary = Optional.of(disableForcePrimary);
return this;
}
public ReplicationNodeOptionsBuilder withDisableIntroducer(boolean disableIntroducer){
this.disableIntroducer = Optional.of(disableIntroducer);
return this;
}
public ReplicationNodeOptionsBuilder withNodewatchConfigurationBuilder(
NodewatchConfigurationBuilder nodewatchConfigurationBuilder){
this.nodewatchConfigurationBuilder = Optional.of(nodewatchConfigurationBuilder);
return this;
}
public ReplicationNodeOptions build(){
return new ReplicationNodeOptions(
tableName,
everyNToPrimary,
disableForcePrimary,
disableIntroducer,
nodewatchConfigurationBuilder);
}
}
} | hotpads/datarouter | datarouter-virtual-node/src/main/java/io/datarouter/virtualnode/replication/ReplicationNodeOptions.java | Java | apache-2.0 | 3,098 |
package fr.javatronic.blog.massive.annotation1;
import fr.javatronic.blog.processor.Annotation_001;
@Annotation_001
public class Class_175 {
}
| lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/Class_175.java | Java | apache-2.0 | 145 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_111) on Wed Jan 04 22:31:29 EST 2017 -->
<title>Uses of Class org.drip.spline.bspline.SegmentMonicBasisFunction</title>
<meta name="date" content="2017-01-04">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.drip.spline.bspline.SegmentMonicBasisFunction";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/drip/spline/bspline/SegmentMonicBasisFunction.html" title="class in org.drip.spline.bspline">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/drip/spline/bspline/class-use/SegmentMonicBasisFunction.html" target="_top">Frames</a></li>
<li><a href="SegmentMonicBasisFunction.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.drip.spline.bspline.SegmentMonicBasisFunction" class="title">Uses of Class<br>org.drip.spline.bspline.SegmentMonicBasisFunction</h2>
</div>
<div class="classUseContainer">No usage of org.drip.spline.bspline.SegmentMonicBasisFunction</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/drip/spline/bspline/SegmentMonicBasisFunction.html" title="class in org.drip.spline.bspline">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/drip/spline/bspline/class-use/SegmentMonicBasisFunction.html" target="_top">Frames</a></li>
<li><a href="SegmentMonicBasisFunction.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| lakshmiDRIP/DRIP | Javadoc/org/drip/spline/bspline/class-use/SegmentMonicBasisFunction.html | HTML | apache-2.0 | 4,520 |
// ATTENTION: The code in this file is highly EXPERIMENTAL.
// Adventurous users should note that the APIs will probably change.
#pragma once
#include "onnx/common/ir.h"
#include "onnx/common/ir_pb_converter.h"
#include "onnx/common/stl_backports.h"
#include "onnx/optimizer/passes/eliminate_deadend.h"
#include "onnx/optimizer/passes/eliminate_identity.h"
#include "onnx/optimizer/passes/eliminate_nop_dropout.h"
#include "onnx/optimizer/passes/eliminate_nop_monotone_argmax.h"
#include "onnx/optimizer/passes/eliminate_nop_pad.h"
#include "onnx/optimizer/passes/eliminate_nop_transpose.h"
#include "onnx/optimizer/passes/eliminate_unused_initializer.h"
#include "onnx/optimizer/passes/extract_constant_to_initializer.h"
#include "onnx/optimizer/passes/fuse_add_bias_into_conv.h"
#include "onnx/optimizer/passes/fuse_bn_into_conv.h"
#include "onnx/optimizer/passes/fuse_consecutive_concats.h"
#include "onnx/optimizer/passes/fuse_consecutive_log_softmax.h"
#include "onnx/optimizer/passes/fuse_consecutive_reduce_unsqueeze.h"
#include "onnx/optimizer/passes/fuse_consecutive_squeezes.h"
#include "onnx/optimizer/passes/fuse_consecutive_transposes.h"
#include "onnx/optimizer/passes/fuse_matmul_add_bias_into_gemm.h"
#include "onnx/optimizer/passes/fuse_pad_into_conv.h"
#include "onnx/optimizer/passes/fuse_transpose_into_gemm.h"
#include "onnx/optimizer/passes/lift_lexical_references.h"
#include "onnx/optimizer/passes/nop.h"
#include "onnx/optimizer/passes/split.h"
#include "onnx/proto_utils.h"
#include <unordered_set>
#include <vector>
namespace ONNX_NAMESPACE {
namespace optimization {
// Registry containing all passes available in ONNX.
struct GlobalPassRegistry {
std::map<std::string, std::shared_ptr<Pass>> passes;
GlobalPassRegistry() {
// Register the optimization passes to the optimizer.
registerPass<NopEmptyPass>();
registerPass<EliminateDeadEnd>();
registerPass<EliminateNopDropout>();
registerPass<EliminateIdentity>();
registerPass<EliminateNopMonotoneArgmax>();
registerPass<EliminateNopPad>();
registerPass<EliminateNopTranspose>();
registerPass<EliminateUnusedInitializer>();
registerPass<ExtractConstantToInitializer>();
registerPass<FuseAddBiasIntoConv>();
registerPass<FuseBNIntoConv>();
registerPass<FuseConsecutiveConcats>();
registerPass<FuseConsecutiveLogSoftmax>();
registerPass<FuseConsecutiveReduceUnsqueeze>();
registerPass<FuseConsecutiveSqueezes>();
registerPass<FuseConsecutiveTransposes>();
registerPass<FuseMatMulAddBiasIntoGemm>();
registerPass<FusePadIntoConv>();
registerPass<FuseTransposeIntoGemm>();
registerPass<LiftLexicalReferences>();
registerPass<SplitInit>();
registerPass<SplitPredict>();
}
~GlobalPassRegistry() {
this->passes.clear();
}
std::shared_ptr<Pass> find(std::string pass_name) {
auto it = this->passes.find(pass_name);
ONNX_ASSERTM(
it != this->passes.end(), "pass %s is unknown.", pass_name.c_str());
return it->second;
}
const std::vector<std::string> GetAvailablePasses();
template <typename T>
void registerPass() {
static_assert(std::is_base_of<Pass, T>::value, "T must inherit from Pass");
std::shared_ptr<Pass> pass(new T());
passes[pass->getPassName()] = pass;
}
};
} // namespace optimization
} // namespace ONNX_NAMESPACE
| mlperf/training_results_v0.7 | Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/onnx-tensorrt/third_party/onnx/onnx/optimizer/pass_registry.h | C | apache-2.0 | 3,357 |
<?php
/**
* Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
namespace Amazon\Login\Plugin;
use Magento\Customer\Controller\Account\Login;
use Magento\Customer\Model\Session;
use Magento\Customer\Model\Url;
use Magento\Framework\Controller\ResultInterface;
class LoginController
{
/**
* @var Session
*/
protected $session;
/**
* @var Url
*/
protected $url;
public function __construct(Session $session, Url $url)
{
$this->session = $session;
$this->url = $url;
}
public function afterExecute(Login $login, ResultInterface $result)
{
$this->session->setAfterAmazonAuthUrl($this->url->getAccountUrl());
return $result;
}
}
| Smith-and-Associates/amazon-payments-magento-2-plugin | src/Login/Plugin/LoginController.php | PHP | apache-2.0 | 1,249 |
/*-
* -\-\-
* docker-client
* --
* Copyright (C) 2016 Spotify AB
* --
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* -/-/-
*/
package com.spotify.docker.client.messages;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.ANY;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
import java.util.List;
@AutoValue
@JsonAutoDetect(fieldVisibility = ANY, getterVisibility = NONE, setterVisibility = NONE)
public abstract class CpuStats {
@JsonProperty("cpu_usage")
public abstract CpuUsage cpuUsage();
@JsonProperty("system_cpu_usage")
public abstract Long systemCpuUsage();
@JsonProperty("throttling_data")
public abstract ThrottlingData throttlingData();
@JsonCreator
static CpuStats create(
@JsonProperty("cpu_usage") final CpuUsage cpuUsage,
@JsonProperty("system_cpu_usage") final Long systemCpuUsage,
@JsonProperty("throttling_data") final ThrottlingData throttlingData) {
return new AutoValue_CpuStats(cpuUsage, systemCpuUsage, throttlingData);
}
@AutoValue
public abstract static class CpuUsage {
@JsonProperty("total_usage")
public abstract Long totalUsage();
@JsonProperty("percpu_usage")
public abstract ImmutableList<Long> percpuUsage();
@JsonProperty("usage_in_kernelmode")
public abstract Long usageInKernelmode();
@JsonProperty("usage_in_usermode")
public abstract Long usageInUsermode();
@JsonCreator
static CpuUsage create(
@JsonProperty("total_usage") final Long totalUsage,
@JsonProperty("percpu_usage") final List<Long> perCpuUsage,
@JsonProperty("usage_in_kernelmode") final Long usageInKernelmode,
@JsonProperty("usage_in_usermode") final Long usageInUsermode) {
return new AutoValue_CpuStats_CpuUsage(totalUsage, ImmutableList.copyOf(perCpuUsage),
usageInKernelmode, usageInUsermode);
}
}
@AutoValue
public abstract static class ThrottlingData {
@JsonProperty("periods")
public abstract Long periods();
@JsonProperty("throttled_periods")
public abstract Long throttledPeriods();
@JsonProperty("throttled_time")
public abstract Long throttledTime();
@JsonCreator
static ThrottlingData create(
@JsonProperty("periods") final Long periods,
@JsonProperty("throttled_periods") final Long throttledPeriods,
@JsonProperty("throttled_time") final Long throttledTime) {
return new AutoValue_CpuStats_ThrottlingData(periods, throttledPeriods, throttledTime);
}
}
}
| MarcoLotz/docker-client | src/main/java/com/spotify/docker/client/messages/CpuStats.java | Java | apache-2.0 | 3,323 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_112) on Thu Apr 06 08:02:44 MST 2017 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>org.wildfly.swarm.keycloak.server (Public javadocs 2017.4.0 API)</title>
<meta name="date" content="2017-04-06">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="org.wildfly.swarm.keycloak.server (Public javadocs 2017.4.0 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-use.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">WildFly Swarm API, 2017.4.0</div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../org/wildfly/swarm/keycloak/deployment/package-summary.html">Prev Package</a></li>
<li><a href="../../../../../org/wildfly/swarm/logging/package-summary.html">Next Package</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/wildfly/swarm/keycloak/server/package-summary.html" target="_top">Frames</a></li>
<li><a href="package-summary.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="Package" class="title">Package org.wildfly.swarm.keycloak.server</h1>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table class="typeSummary" border="0" cellpadding="3" cellspacing="0" summary="Interface Summary table, listing interfaces, and an explanation">
<caption><span>Interface Summary</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Interface</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../../../org/wildfly/swarm/keycloak/server/KeycloakServerProperties.html" title="interface in org.wildfly.swarm.keycloak.server">KeycloakServerProperties</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<table class="typeSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation">
<caption><span>Class Summary</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Class</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../../../org/wildfly/swarm/keycloak/server/KeycloakServerFraction.html" title="class in org.wildfly.swarm.keycloak.server">KeycloakServerFraction</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-use.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">WildFly Swarm API, 2017.4.0</div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../org/wildfly/swarm/keycloak/deployment/package-summary.html">Prev Package</a></li>
<li><a href="../../../../../org/wildfly/swarm/logging/package-summary.html">Next Package</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/wildfly/swarm/keycloak/server/package-summary.html" target="_top">Frames</a></li>
<li><a href="package-summary.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2017 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p>
</body>
</html>
| wildfly-swarm/wildfly-swarm-javadocs | 2017.4.0/apidocs/org/wildfly/swarm/keycloak/server/package-summary.html | HTML | apache-2.0 | 6,069 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_151) on Fri Apr 06 09:47:11 MST 2018 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>EnhancedServerConsumer (BOM: * : All 2018.4.2 API)</title>
<meta name="date" content="2018-04-06">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="EnhancedServerConsumer (BOM: * : All 2018.4.2 API)";
}
}
catch(err) {
}
//-->
var methods = {"i0":18};
var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],16:["t5","Default Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/EnhancedServerConsumer.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">WildFly Swarm API, 2018.4.2</div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/wildfly/swarm/messaging/EnhancedServer.html" title="class in org.wildfly.swarm.messaging"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../org/wildfly/swarm/messaging/MessagingFraction.html" title="class in org.wildfly.swarm.messaging"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/wildfly/swarm/messaging/EnhancedServerConsumer.html" target="_top">Frames</a></li>
<li><a href="EnhancedServerConsumer.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.wildfly.swarm.messaging</div>
<h2 title="Interface EnhancedServerConsumer" class="title">Interface EnhancedServerConsumer</h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Superinterfaces:</dt>
<dd><a href="../../../../org/wildfly/swarm/config/messaging/activemq/ServerConsumer.html" title="interface in org.wildfly.swarm.config.messaging.activemq">ServerConsumer</a><<a href="../../../../org/wildfly/swarm/messaging/EnhancedServer.html" title="class in org.wildfly.swarm.messaging">EnhancedServer</a>></dd>
</dl>
<dl>
<dt>Functional Interface:</dt>
<dd>This is a functional interface and can therefore be used as the assignment target for a lambda expression or method reference.</dd>
</dl>
<hr>
<br>
<pre><a href="http://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true" title="class or interface in java.lang">@FunctionalInterface</a>
public interface <span class="typeNameLabel">EnhancedServerConsumer</span>
extends <a href="../../../../org/wildfly/swarm/config/messaging/activemq/ServerConsumer.html" title="interface in org.wildfly.swarm.config.messaging.activemq">ServerConsumer</a><<a href="../../../../org/wildfly/swarm/messaging/EnhancedServer.html" title="class in org.wildfly.swarm.messaging">EnhancedServer</a>></pre>
<dl>
<dt><span class="simpleTagLabel">Author:</span></dt>
<dd>Bob McWhirter</dd>
</dl>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t5" class="tableTab"><span><a href="javascript:show(16);">Default Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>default <a href="../../../../org/wildfly/swarm/messaging/EnhancedServerConsumer.html" title="interface in org.wildfly.swarm.messaging">EnhancedServerConsumer</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/wildfly/swarm/messaging/EnhancedServerConsumer.html#then-org.wildfly.swarm.messaging.EnhancedServerConsumer-">then</a></span>(<a href="../../../../org/wildfly/swarm/messaging/EnhancedServerConsumer.html" title="interface in org.wildfly.swarm.messaging">EnhancedServerConsumer</a> after)</code> </td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.org.wildfly.swarm.config.messaging.activemq.ServerConsumer">
<!-- -->
</a>
<h3>Methods inherited from interface org.wildfly.swarm.config.messaging.activemq.<a href="../../../../org/wildfly/swarm/config/messaging/activemq/ServerConsumer.html" title="interface in org.wildfly.swarm.config.messaging.activemq">ServerConsumer</a></h3>
<code><a href="../../../../org/wildfly/swarm/config/messaging/activemq/ServerConsumer.html#accept-T-">accept</a>, <a href="../../../../org/wildfly/swarm/config/messaging/activemq/ServerConsumer.html#andThen-org.wildfly.swarm.config.messaging.activemq.ServerConsumer-">andThen</a></code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="then-org.wildfly.swarm.messaging.EnhancedServerConsumer-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>then</h4>
<pre>default <a href="../../../../org/wildfly/swarm/messaging/EnhancedServerConsumer.html" title="interface in org.wildfly.swarm.messaging">EnhancedServerConsumer</a> then(<a href="../../../../org/wildfly/swarm/messaging/EnhancedServerConsumer.html" title="interface in org.wildfly.swarm.messaging">EnhancedServerConsumer</a> after)</pre>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/EnhancedServerConsumer.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">WildFly Swarm API, 2018.4.2</div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/wildfly/swarm/messaging/EnhancedServer.html" title="class in org.wildfly.swarm.messaging"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../org/wildfly/swarm/messaging/MessagingFraction.html" title="class in org.wildfly.swarm.messaging"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/wildfly/swarm/messaging/EnhancedServerConsumer.html" target="_top">Frames</a></li>
<li><a href="EnhancedServerConsumer.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2018 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p>
</body>
</html>
| wildfly-swarm/wildfly-swarm-javadocs | 2018.4.2/apidocs/org/wildfly/swarm/messaging/EnhancedServerConsumer.html | HTML | apache-2.0 | 10,555 |
package io.sensesecure.hadoop.xz;
import java.io.BufferedInputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.compress.CompressionInputStream;
import org.tukaani.xz.XZInputStream;
/**
*
* @author yongtang
*/
public class XZCompressionInputStream extends CompressionInputStream {
private BufferedInputStream bufferedIn;
private XZInputStream xzIn;
private boolean resetStateNeeded;
public XZCompressionInputStream(InputStream in) throws IOException {
super(in);
resetStateNeeded = false;
bufferedIn = new BufferedInputStream(super.in);
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (resetStateNeeded) {
resetStateNeeded = false;
bufferedIn = new BufferedInputStream(super.in);
xzIn = null;
}
return getInputStream().read(b, off, len);
}
@Override
public void resetState() throws IOException {
resetStateNeeded = true;
}
@Override
public int read() throws IOException {
byte b[] = new byte[1];
int result = this.read(b, 0, 1);
return (result < 0) ? result : (b[0] & 0xff);
}
@Override
public void close() throws IOException {
if (!resetStateNeeded) {
if (xzIn != null) {
xzIn.close();
xzIn = null;
}
resetStateNeeded = true;
}
}
/**
* This compression stream ({@link #xzIn}) is initialized lazily, in case
* the data is not available at the time of initialization. This is
* necessary for the codec to be used in a {@link SequenceFile.Reader}, as
* it constructs the {@link XZCompressionInputStream} before putting data
* into its buffer. Eager initialization of {@link #xzIn} there results in
* an {@link EOFException}.
*/
private XZInputStream getInputStream() throws IOException {
if (xzIn == null) {
xzIn = new XZInputStream(bufferedIn);
}
return xzIn;
}
}
| yongtang/hadoop-xz | src/main/java/io/sensesecure/hadoop/xz/XZCompressionInputStream.java | Java | apache-2.0 | 2,173 |
#include "../control/ControlSystem_Peep.hpp"
#include "../control/ControlSystem_Teach.hpp"
#include <iostream>
#include "../types.hpp"
using namespace pathos::peepingpanel;
std::vector<ControlSystem_Peep*> CreateControlSystem(AllConfigArray configData,
SensorsAreasArray sensorsArea_1, SensorsAreasArray sensorsArea_2,
SensorsAreasArray sensorsArea_3, SensorsAreasArray sensorsArea_4,
SensorsThread* sensorsThread)
{
std::vector<ControlSystem_Peep*> controlSystems;
int config = configData[0];
if(config == 0)
throw eeros::EEROSException("No motor connected");
else if(config == 1 ){ // 0 0 0 1
std::cout << "motor 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc4", "dac4", sensorsThread, sensorsArea_4));
}
else if(config == 2 ){ // 0 0 1 0
std::cout << "motor 3 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc3", "dac3", sensorsThread, sensorsArea_3));
}
else if(config == 3 ){ // 0 0 1 1
std::cout << "motor 3 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc3", "dac3", sensorsThread, sensorsArea_3));
controlSystems.push_back(new ControlSystem_Peep("enc4", "dac4", sensorsThread, sensorsArea_4));
}
else if(config == 4 ){ // 0 1 0 0
std::cout << "motor 2 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc2", "dac2", sensorsThread, sensorsArea_2));
}
else if(config == 5 ){ // 0 1 0 1
std::cout << "motor 2 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc2", "dac2", sensorsThread, sensorsArea_2));
controlSystems.push_back(new ControlSystem_Peep("enc4", "dac4", sensorsThread, sensorsArea_4));
}
else if(config == 6 ){ // 0 1 1 0
std::cout << "motor 2 and 3 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc2", "dac2", sensorsThread, sensorsArea_2));
controlSystems.push_back(new ControlSystem_Peep("enc3", "dac3", sensorsThread, sensorsArea_3));
}
else if(config == 7 ){ // 0 1 1 1
std::cout << "motor 2, 3 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc2", "dac2", sensorsThread, sensorsArea_2));
controlSystems.push_back(new ControlSystem_Peep("enc3", "dac3", sensorsThread, sensorsArea_3));
controlSystems.push_back(new ControlSystem_Peep("enc4", "dac4", sensorsThread, sensorsArea_4));
}
else if(config == 8 ){ // 1 0 0 0
std::cout << "motor 1 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc1", "dac1", sensorsThread, sensorsArea_1));
}
else if(config == 9 ){ // 1 0 0 1
std::cout << "motor 1 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc1", "dac1", sensorsThread, sensorsArea_1));
controlSystems.push_back(new ControlSystem_Peep("enc4", "dac4", sensorsThread, sensorsArea_4));
}
else if(config == 10){ // 1 0 1 0
std::cout << "motor 1 and 3 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc1", "dac1", sensorsThread, sensorsArea_1));
controlSystems.push_back(new ControlSystem_Peep("enc3", "dac3", sensorsThread, sensorsArea_3));
}
else if(config == 11){ // 1 0 1 1
std::cout << "motor 1, 3 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc1", "dac1", sensorsThread, sensorsArea_1));
controlSystems.push_back(new ControlSystem_Peep("enc3", "dac3", sensorsThread, sensorsArea_3));
controlSystems.push_back(new ControlSystem_Peep("enc4", "dac4", sensorsThread, sensorsArea_4));
}
else if(config == 12){ // 1 1 0 0
std::cout << "motor 1 and 2 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc1", "dac1", sensorsThread, sensorsArea_1));
controlSystems.push_back(new ControlSystem_Peep("enc2", "dac2", sensorsThread, sensorsArea_2));
}
else if(config == 13){ // 1 1 0 1
std::cout << "motor 1, 2 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc1", "dac1", sensorsThread, sensorsArea_1));
controlSystems.push_back(new ControlSystem_Peep("enc2", "dac2", sensorsThread, sensorsArea_2));
controlSystems.push_back(new ControlSystem_Peep("enc4", "dac4", sensorsThread, sensorsArea_4));
}
else if(config == 14){ // 1 1 1 0
std::cout << "motor 1, 2 and 3 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc1", "dac1", sensorsThread, sensorsArea_1));
controlSystems.push_back(new ControlSystem_Peep("enc2", "dac2", sensorsThread, sensorsArea_2));
controlSystems.push_back(new ControlSystem_Peep("enc3", "dac3", sensorsThread, sensorsArea_3));
}
else if(config == 15){ // 1 1 1 1
std::cout << "all motors connected" << std::endl;
controlSystems.push_back(new ControlSystem_Peep("enc1", "dac1", sensorsThread, sensorsArea_1));
controlSystems.push_back(new ControlSystem_Peep("enc2", "dac2", sensorsThread, sensorsArea_2));
controlSystems.push_back(new ControlSystem_Peep("enc3", "dac3", sensorsThread, sensorsArea_3));
controlSystems.push_back(new ControlSystem_Peep("enc4", "dac4", sensorsThread, sensorsArea_4));
}
else
throw eeros::EEROSException("Invalid motor config value");
return controlSystems;
}
std::vector<ControlSystem_Teach*> CreateControlSystem_teach(AllConfigArray configData) {
std::vector<ControlSystem_Teach*> controlSystems;
int config = configData[0];
if(config == 0)
throw eeros::EEROSException("No motor connected");
else if(config == 1 ){ // 0 0 0 1
std::cout << "motor 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc4", "dac4"));
}
else if(config == 2 ){ // 0 0 1 0
std::cout << "motor 3 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc3", "dac3"));
}
else if(config == 3 ){ // 0 0 1 1
std::cout << "motor 3 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc3", "dac3"));
controlSystems.push_back(new ControlSystem_Teach("enc4", "dac4"));
}
else if(config == 4 ){ // 0 1 0 0
std::cout << "motor 2 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc2", "dac2"));
}
else if(config == 5 ){ // 0 1 0 1
std::cout << "motor 2 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc2", "dac2"));
controlSystems.push_back(new ControlSystem_Teach("enc4", "dac4"));
}
else if(config == 6 ){ // 0 1 1 0
std::cout << "motor 2 and 3 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc2", "dac2"));
controlSystems.push_back(new ControlSystem_Teach("enc3", "dac3"));
}
else if(config == 7 ){ // 0 1 1 1
std::cout << "motor 2, 3 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc2", "dac2"));
controlSystems.push_back(new ControlSystem_Teach("enc3", "dac3"));
controlSystems.push_back(new ControlSystem_Teach("enc4", "dac4"));
}
else if(config == 8 ){ // 1 0 0 0
std::cout << "motor 1 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc1", "dac1"));
}
else if(config == 9 ){ // 1 0 0 1
std::cout << "motor 1 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc1", "dac1"));
controlSystems.push_back(new ControlSystem_Teach("enc4", "dac4"));
}
else if(config == 10){ // 1 0 1 0
std::cout << "motor 1 and 3 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc1", "dac1"));
controlSystems.push_back(new ControlSystem_Teach("enc3", "dac3"));
}
else if(config == 11){ // 1 0 1 1
std::cout << "motor 1, 3 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc1", "dac1"));
controlSystems.push_back(new ControlSystem_Teach("enc3", "dac3"));
controlSystems.push_back(new ControlSystem_Teach("enc4", "dac4"));
}
else if(config == 12){ // 1 1 0 0
std::cout << "motor 1 and 2 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc1", "dac1"));
controlSystems.push_back(new ControlSystem_Teach("enc2", "dac2"));
}
else if(config == 13){ // 1 1 0 1
std::cout << "motor 1, 2 and 4 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc1", "dac1"));
controlSystems.push_back(new ControlSystem_Teach("enc2", "dac2"));
controlSystems.push_back(new ControlSystem_Teach("enc4", "dac4"));
}
else if(config == 14){ // 1 1 1 0
std::cout << "motor 1, 2 and 3 connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc1", "dac1"));
controlSystems.push_back(new ControlSystem_Teach("enc2", "dac2"));
controlSystems.push_back(new ControlSystem_Teach("enc3", "dac3"));
}
else if(config == 15){ // 1 1 1 1
std::cout << "all motors connected" << std::endl;
controlSystems.push_back(new ControlSystem_Teach("enc1", "dac1"));
controlSystems.push_back(new ControlSystem_Teach("enc2", "dac2"));
controlSystems.push_back(new ControlSystem_Teach("enc3", "dac3"));
controlSystems.push_back(new ControlSystem_Teach("enc4", "dac4"));
}
else
throw eeros::EEROSException("Invalid motor config value");
return controlSystems;
}
| ntb-ch/pathos | peeping-panels/config/CreateControlSystems.hpp | C++ | apache-2.0 | 12,090 |
package generics;
//: generics/SuperTypeWildcards.java
import java.util.*;
public class SuperTypeWildcards {
static void writeTo(List<? super Apple> apples) {
apples.add(new Apple());
apples.add(new Jonathan());
// apples.add(new Fruit()); // Error
}
} ///:~
| Shelley132/java-review | thinkinginjava/generics/SuperTypeWildcards.java | Java | apache-2.0 | 276 |
package org.giwi.geotracker.routes.priv;
import io.vertx.core.Vertx;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.RoutingContext;
import org.giwi.geotracker.annotation.VertxRoute;
import org.giwi.geotracker.beans.AuthUtils;
import org.giwi.geotracker.exception.BusinessException;
import org.giwi.geotracker.services.ParamService;
import javax.inject.Inject;
/**
* The type Param route.
*/
@VertxRoute(rootPath = "/api/1/private/param")
public class ParamRoute implements VertxRoute.Route {
@Inject
private ParamService paramService;
@Inject
private AuthUtils authUtils;
/**
* Init router.
*
* @param vertx the vertx
* @return the router
*/
@Override
public Router init(Vertx vertx) {
Router router = Router.router(vertx);
router.get("/roles").handler(this::getRoles);
return router;
}
/**
* @api {get} /api/1/private/param/roles Get roles
* @apiName getRoles
* @apiGroup Params
* @apiDescription Get roles
* @apiHeader {String} secureToken User secureToken
* @apiSuccess {Array} roles Role[]
*/
private void getRoles(RoutingContext ctx) {
paramService.getRoles(res -> {
if (res.succeeded()) {
ctx.response().end(res.result().encode());
} else {
ctx.fail(new BusinessException(res.cause()));
}
});
}
}
| Giwi/geoTracker | src/main/java/org/giwi/geotracker/routes/priv/ParamRoute.java | Java | apache-2.0 | 1,430 |
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.model.translators;
import google.registry.util.CidrAddressBlock;
/** Stores {@link CidrAddressBlock} as a canonicalized string. */
public class CidrAddressBlockTranslatorFactory
extends AbstractSimpleTranslatorFactory<CidrAddressBlock, String> {
public CidrAddressBlockTranslatorFactory() {
super(CidrAddressBlock.class);
}
@Override
SimpleTranslator<CidrAddressBlock, String> createTranslator() {
return new SimpleTranslator<CidrAddressBlock, String>(){
@Override
public CidrAddressBlock loadValue(String datastoreValue) {
return CidrAddressBlock.create(datastoreValue);
}
@Override
public String saveValue(CidrAddressBlock pojoValue) {
return pojoValue.toString();
}};
}
}
| google/nomulus | core/src/main/java/google/registry/model/translators/CidrAddressBlockTranslatorFactory.java | Java | apache-2.0 | 1,397 |
describe("", function() {
var rootEl;
beforeEach(function() {
rootEl = browser.rootEl;
browser.get("build/docs/examples/example-example60/index.html");
});
it('should check ng-bind', function() {
var nameInput = element(by.model('name'));
expect(element(by.binding('name')).getText()).toBe('Whirled');
nameInput.clear();
nameInput.sendKeys('world');
expect(element(by.binding('name')).getText()).toBe('world');
});
}); | LADOSSIFPB/nutrif | nutrif-web/lib/angular/docs/ptore2e/example-example60/default_test.js | JavaScript | apache-2.0 | 461 |
/*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2021 Fabian Prasser and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.gui.view.impl.define;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.deidentifier.arx.gui.Controller;
import org.deidentifier.arx.gui.model.Model;
import org.deidentifier.arx.gui.model.ModelCriterion;
import org.deidentifier.arx.gui.model.ModelEvent;
import org.deidentifier.arx.gui.model.ModelEvent.ModelPart;
import org.deidentifier.arx.gui.model.ModelBLikenessCriterion;
import org.deidentifier.arx.gui.model.ModelDDisclosurePrivacyCriterion;
import org.deidentifier.arx.gui.model.ModelExplicitCriterion;
import org.deidentifier.arx.gui.model.ModelLDiversityCriterion;
import org.deidentifier.arx.gui.model.ModelRiskBasedCriterion;
import org.deidentifier.arx.gui.model.ModelTClosenessCriterion;
import org.deidentifier.arx.gui.resources.Resources;
import org.deidentifier.arx.gui.view.SWTUtil;
import org.deidentifier.arx.gui.view.def.IView;
import org.deidentifier.arx.gui.view.impl.common.ClipboardHandlerTable;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.TableItem;
import de.linearbits.swt.table.DynamicTable;
import de.linearbits.swt.table.DynamicTableColumn;
/**
* This class displays a list of all defined privacy criteria.
*
* @author fabian
*/
public class ViewPrivacyModels implements IView {
/** Controller */
private Controller controller;
/** Model */
private Model model = null;
/** View */
private final DynamicTable table;
/** View */
private final DynamicTableColumn column1;
/** View */
private final DynamicTableColumn column2;
/** View */
private final DynamicTableColumn column3;
/** View */
private final Composite root;
/** View */
private final Image symbolL;
/** View */
private final Image symbolT;
/** View */
private final Image symbolK;
/** View */
private final Image symbolD;
/** View */
private final Image symbolDP;
/** View */
private final Image symbolR;
/** View */
private final Image symbolG;
/** View */
private final Image symbolB;
/** View */
private final LayoutPrivacySettings layout;
/**
* Creates a new instance.
*
* @param parent
* @param controller
* @param layoutCriteria
*/
public ViewPrivacyModels(final Composite parent, final Controller controller, LayoutPrivacySettings layoutCriteria) {
// Register
this.controller = controller;
this.controller.addListener(ModelPart.CRITERION_DEFINITION, this);
this.controller.addListener(ModelPart.MODEL, this);
this.controller.addListener(ModelPart.ATTRIBUTE_TYPE, this);
this.controller.addListener(ModelPart.ATTRIBUTE_TYPE_BULK_UPDATE, this);
this.layout = layoutCriteria;
this.symbolL = controller.getResources().getManagedImage("symbol_l.png"); //$NON-NLS-1$
this.symbolT = controller.getResources().getManagedImage("symbol_t.png"); //$NON-NLS-1$
this.symbolK = controller.getResources().getManagedImage("symbol_k.png"); //$NON-NLS-1$
this.symbolD = controller.getResources().getManagedImage("symbol_d.png"); //$NON-NLS-1$
this.symbolDP = controller.getResources().getManagedImage("symbol_dp.png"); //$NON-NLS-1$
this.symbolR = controller.getResources().getManagedImage("symbol_r.png"); //$NON-NLS-1$
this.symbolG = controller.getResources().getManagedImage("symbol_gt.png"); //$NON-NLS-1$
this.symbolB = controller.getResources().getManagedImage("symbol_b.png"); //$NON-NLS-1$
this.root = parent;
this.table = SWTUtil.createTableDynamic(root, SWT.SINGLE | SWT.V_SCROLL | SWT.FULL_SELECTION);
this.table.setHeaderVisible(true);
this.table.setLinesVisible(true);
GridData gd = SWTUtil.createFillHorizontallyGridData();
gd.heightHint = 100;
this.table.setLayoutData(gd);
SWTUtil.createGenericTooltip(table);
this.table.setMenu(new ClipboardHandlerTable(table).getMenu());
this.table.addSelectionListener(new SelectionAdapter(){
public void widgetSelected(SelectionEvent arg0) {
layout.updateButtons();
}
});
this.column1 = new DynamicTableColumn(table, SWT.NONE);
this.column1.setText(Resources.getMessage("ViewCriteriaList.0")); //$NON-NLS-1$
this.column1.setWidth("10%", "30px"); //$NON-NLS-1$ //$NON-NLS-2$
this.column2 = new DynamicTableColumn(table, SWT.NONE);
this.column2.setText(Resources.getMessage("CriterionSelectionDialog.2")); //$NON-NLS-1$
this.column2.setWidth("45%", "100px"); //$NON-NLS-1$ //$NON-NLS-2$
this.column3 = new DynamicTableColumn(table, SWT.NONE);
this.column3.setText(Resources.getMessage("CriterionSelectionDialog.3")); //$NON-NLS-1$
this.column3.setWidth("45%", "100px"); //$NON-NLS-1$ //$NON-NLS-2$
this.column1.pack();
this.column2.pack();
this.column3.pack();
this.layout.updateButtons();
reset();
}
/**
* Add
*/
public void actionAdd() {
controller.actionCriterionAdd();
}
/**
* Configure
*/
public void actionConfigure() {
ModelCriterion criterion = this.getSelectedCriterion();
if (criterion != null) {
controller.actionCriterionConfigure(criterion);
}
}
/**
* Pull
*/
public void actionPull() {
ModelCriterion criterion = this.getSelectedCriterion();
if (criterion != null && criterion instanceof ModelExplicitCriterion) {
controller.actionCriterionPull(criterion);
}
}
/**
* Push
*/
public void actionPush() {
ModelCriterion criterion = this.getSelectedCriterion();
if (criterion != null && criterion instanceof ModelExplicitCriterion) {
controller.actionCriterionPush(criterion);
}
}
/**
* Remove
*/
public void actionRemove() {
ModelCriterion criterion = this.getSelectedCriterion();
if (criterion != null) {
controller.actionCriterionEnable(criterion);
}
}
@Override
public void dispose() {
this.controller.removeListener(this);
}
/**
* Returns the currently selected criterion, if any
* @return
*/
public ModelCriterion getSelectedCriterion() {
if (table.getSelection() == null || table.getSelection().length == 0) {
return null;
}
return (ModelCriterion)table.getSelection()[0].getData();
}
/**
* May criteria be added
* @return
*/
public boolean isAddEnabled() {
return model != null && model.getInputDefinition() != null &&
model.getInputDefinition().getQuasiIdentifyingAttributes() != null;
}
@Override
public void reset() {
root.setRedraw(false);
if (table != null) {
table.removeAll();
}
root.setRedraw(true);
SWTUtil.disable(root);
}
@Override
public void update(ModelEvent event) {
// Model update
if (event.part == ModelPart.MODEL) {
this.model = (Model)event.data;
}
// Other updates
if (event.part == ModelPart.CRITERION_DEFINITION ||
event.part == ModelPart.ATTRIBUTE_TYPE ||
event.part == ModelPart.ATTRIBUTE_TYPE_BULK_UPDATE ||
event.part == ModelPart.MODEL) {
// Update table
if (model!=null) {
updateTable();
}
}
}
/**
* Update table
*/
private void updateTable() {
root.setRedraw(false);
table.removeAll();
if (model.getDifferentialPrivacyModel().isEnabled()) {
TableItem item = new TableItem(table, SWT.NONE);
item.setText(new String[] { "", model.getDifferentialPrivacyModel().toString(), "" }); //$NON-NLS-1$ //$NON-NLS-2$
item.setImage(0, symbolDP);
item.setData(model.getDifferentialPrivacyModel());
}
if (model.getKAnonymityModel().isEnabled()) {
TableItem item = new TableItem(table, SWT.NONE);
item.setText(new String[] { "", model.getKAnonymityModel().toString(), "" }); //$NON-NLS-1$ //$NON-NLS-2$
item.setImage(0, symbolK);
item.setData(model.getKAnonymityModel());
}
if (model.getKMapModel().isEnabled()) {
TableItem item = new TableItem(table, SWT.NONE);
item.setText(new String[] { "", model.getKMapModel().toString(), "" }); //$NON-NLS-1$ //$NON-NLS-2$
item.setImage(0, symbolK);
item.setData(model.getKMapModel());
}
if (model.getDPresenceModel().isEnabled()) {
TableItem item = new TableItem(table, SWT.NONE);
item.setText(new String[] { "", model.getDPresenceModel().toString(), "" }); //$NON-NLS-1$ //$NON-NLS-2$
item.setImage(0, symbolD);
item.setData(model.getDPresenceModel());
}
if (model.getStackelbergModel().isEnabled()) {
TableItem item = new TableItem(table, SWT.NONE);
item.setText(new String[] { "", model.getStackelbergModel().toString(), ""});
item.setImage(0, symbolG);
item.setData(model.getStackelbergModel());
}
List<ModelExplicitCriterion> explicit = new ArrayList<ModelExplicitCriterion>();
for (ModelLDiversityCriterion other : model.getLDiversityModel().values()) {
if (other.isEnabled()) {
explicit.add(other);
}
}
for (ModelTClosenessCriterion other : model.getTClosenessModel().values()) {
if (other.isEnabled()) {
explicit.add(other);
}
}
for (ModelDDisclosurePrivacyCriterion other : model.getDDisclosurePrivacyModel().values()) {
if (other.isEnabled()) {
explicit.add(other);
}
}
for (ModelBLikenessCriterion other : model.getBLikenessModel().values()) {
if (other.isEnabled()) {
explicit.add(other);
}
}
Collections.sort(explicit, new Comparator<ModelExplicitCriterion>(){
public int compare(ModelExplicitCriterion o1, ModelExplicitCriterion o2) {
return o1.getAttribute().compareTo(o2.getAttribute());
}
});
for (ModelExplicitCriterion c :explicit) {
TableItem item = new TableItem(table, SWT.NONE);
item.setText(new String[] { "", c.toString(), c.getAttribute() }); //$NON-NLS-1$
if (c instanceof ModelLDiversityCriterion) {
item.setImage(0, symbolL);
} else if (c instanceof ModelTClosenessCriterion) {
item.setImage(0, symbolT);
} else if (c instanceof ModelDDisclosurePrivacyCriterion) {
item.setImage(0, symbolD);
} else if (c instanceof ModelBLikenessCriterion) {
item.setImage(0, symbolB);
}
item.setData(c);
}
List<ModelRiskBasedCriterion> riskBased = new ArrayList<ModelRiskBasedCriterion>();
for (ModelRiskBasedCriterion other : model.getRiskBasedModel()) {
if (other.isEnabled()) {
riskBased.add(other);
}
}
Collections.sort(riskBased, new Comparator<ModelRiskBasedCriterion>(){
public int compare(ModelRiskBasedCriterion o1, ModelRiskBasedCriterion o2) {
return o1.getLabel().compareTo(o2.getLabel());
}
});
for (ModelRiskBasedCriterion c : riskBased) {
TableItem item = new TableItem(table, SWT.NONE);
item.setText(new String[] { "", c.toString(), "" }); //$NON-NLS-1$ //$NON-NLS-2$
item.setImage(0, symbolR);
item.setData(c);
}
// Update
layout.updateButtons();
root.setRedraw(true);
SWTUtil.enable(root);
table.redraw();
}
}
| arx-deidentifier/arx | src/gui/org/deidentifier/arx/gui/view/impl/define/ViewPrivacyModels.java | Java | apache-2.0 | 13,712 |
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html>
<head>
<title>Filter - ScalaTest 2.2.4 - org.scalatest.Filter</title>
<meta name="description" content="Filter - ScalaTest 2.2.4 - org.scalatest.Filter" />
<meta name="keywords" content="Filter ScalaTest 2.2.4 org.scalatest.Filter" />
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<link href="../../lib/template.css" media="screen" type="text/css" rel="stylesheet" />
<link href="../../lib/diagrams.css" media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
<script type="text/javascript" src="../../lib/jquery.js" id="jquery-js"></script>
<script type="text/javascript" src="../../lib/jquery-ui.js"></script>
<script type="text/javascript" src="../../lib/template.js"></script>
<script type="text/javascript" src="../../lib/tools.tooltip.js"></script>
<script type="text/javascript">
if(top === self) {
var url = '../../index.html';
var hash = 'org.scalatest.Filter$';
var anchor = window.location.hash;
var anchor_opt = '';
if (anchor.length >= 1)
anchor_opt = '@' + anchor.substring(1);
window.location.href = url + '#' + hash + anchor_opt;
}
</script>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-71294502-3', 'auto');
ga('send', 'pageview');
</script>
</head>
<body class="value">
<!-- Top of doc.scalatest.org [javascript] -->
<script type="text/javascript">
var rnd = window.rnd || Math.floor(Math.random()*10e6);
var pid204546 = window.pid204546 || rnd;
var plc204546 = window.plc204546 || 0;
var abkw = window.abkw || '';
var absrc = 'http://ab167933.adbutler-ikon.com/adserve/;ID=167933;size=468x60;setID=204546;type=js;sw='+screen.width+';sh='+screen.height+';spr='+window.devicePixelRatio+';kw='+abkw+';pid='+pid204546+';place='+(plc204546++)+';rnd='+rnd+';click=CLICK_MACRO_PLACEHOLDER';
document.write('<scr'+'ipt src="'+absrc+'" type="text/javascript"></scr'+'ipt>');
</script>
<div id="definition">
<a href="Filter.html" title="Go to companion"><img src="../../lib/object_to_class_big.png" /></a>
<p id="owner"><a href="../package.html" class="extype" name="org">org</a>.<a href="package.html" class="extype" name="org.scalatest">scalatest</a></p>
<h1><a href="Filter.html" title="Go to companion">Filter</a></h1>
</div>
<h4 id="signature" class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">object</span>
</span>
<span class="symbol">
<span class="name">Filter</span><span class="result"> extends <span class="extype" name="scala.Serializable">Serializable</span></span>
</span>
</h4>
<div id="comment" class="fullcommenttop"><dl class="attributes block"> <dt>Source</dt><dd><a href="https://github.com/scalatest/scalatest/tree/release-2.2.4-for-scala-2.11-and-2.10/src/main/scala/org/scalatest/Filter.scala" target="_blank">Filter.scala</a></dd></dl><div class="toggleContainer block">
<span class="toggle">Linear Supertypes</span>
<div class="superTypes hiddenContent"><span class="extype" name="scala.Serializable">Serializable</span>, <span class="extype" name="java.io.Serializable">Serializable</span>, <span class="extype" name="scala.AnyRef">AnyRef</span>, <span class="extype" name="scala.Any">Any</span></div>
</div></div>
<div id="mbrsel">
<div id="textfilter"><span class="pre"></span><span class="input"><input id="mbrsel-input" type="text" accesskey="/" /></span><span class="post"></span></div>
<div id="order">
<span class="filtertype">Ordering</span>
<ol>
<li class="alpha in"><span>Alphabetic</span></li>
<li class="inherit out"><span>By inheritance</span></li>
</ol>
</div>
<div id="ancestors">
<span class="filtertype">Inherited<br />
</span>
<ol id="linearization">
<li class="in" name="org.scalatest.Filter"><span>Filter</span></li><li class="in" name="scala.Serializable"><span>Serializable</span></li><li class="in" name="java.io.Serializable"><span>Serializable</span></li><li class="in" name="scala.AnyRef"><span>AnyRef</span></li><li class="in" name="scala.Any"><span>Any</span></li>
</ol>
</div><div id="ancestors">
<span class="filtertype"></span>
<ol>
<li class="hideall out"><span>Hide All</span></li>
<li class="showall in"><span>Show all</span></li>
</ol>
<a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
</div>
<div id="visbl">
<span class="filtertype">Visibility</span>
<ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
</div>
</div>
<div id="template">
<div id="allMembers">
<div id="values" class="values members">
<h3>Value Members</h3>
<ol><li name="scala.AnyRef#!=" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="!=(x$1:AnyRef):Boolean"></a>
<a id="!=(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $bang$eq" class="name">!=</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.Any#!=" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="!=(x$1:Any):Boolean"></a>
<a id="!=(Any):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $bang$eq" class="name">!=</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef###" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="##():Int"></a>
<a id="##():Int"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $hash$hash" class="name">##</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#==" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="==(x$1:AnyRef):Boolean"></a>
<a id="==(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $eq$eq" class="name">==</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.Any#==" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="==(x$1:Any):Boolean"></a>
<a id="==(Any):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $eq$eq" class="name">==</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="org.scalatest.Filter#apply" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="apply(tagsToInclude:Option[Set[String]],tagsToExclude:Set[String],excludeNestedSuites:Boolean,dynaTags:org.scalatest.DynaTags):org.scalatest.Filter"></a>
<a id="apply(Option[Set[String]],Set[String],Boolean,DynaTags):Filter"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">apply</span><span class="params">(<span name="tagsToInclude">tagsToInclude: <span class="extype" name="scala.Option">Option</span>[<span class="extype" name="scala.Predef.Set">Set</span>[<span class="extype" name="scala.Predef.String">String</span>]] = <span class="symbol">None</span></span>, <span name="tagsToExclude">tagsToExclude: <span class="extype" name="scala.Predef.Set">Set</span>[<span class="extype" name="scala.Predef.String">String</span>] = <span class="symbol">Set(IgnoreTag)</span></span>, <span name="excludeNestedSuites">excludeNestedSuites: <span class="extype" name="scala.Boolean">Boolean</span> = <span class="symbol">false</span></span>, <span name="dynaTags">dynaTags: <a href="DynaTags.html" class="extype" name="org.scalatest.DynaTags">DynaTags</a> = <span class="symbol">DynaTags(Map.empty, Map.empty)</span></span>)</span><span class="result">: <a href="Filter.html" class="extype" name="org.scalatest.Filter">Filter</a></span>
</span>
</h4>
<p class="shortcomment cmt">Factory method for a <code>Filter</code> initialized with the passed <code>tagsToInclude</code>
and <code>tagsToExclude</code>.</p><div class="fullcomment"><div class="comment cmt"><p>Factory method for a <code>Filter</code> initialized with the passed <code>tagsToInclude</code>
and <code>tagsToExclude</code>.
</p></div><dl class="paramcmts block"><dt class="param">tagsToInclude</dt><dd class="cmt"><p>an optional <code>Set</code> of <code>String</code> tag names to include (<em>i.e.</em>, not filter out) when filtering tests</p></dd><dt class="param">tagsToExclude</dt><dd class="cmt"><p>a <code>Set</code> of <code>String</code> tag names to exclude (<em>i.e.</em>, filter out) when filtering tests</p></dd><dt class="param">excludeNestedSuites</dt><dd class="cmt"><p>a <code>Boolean</code> to indicate whether to run nested suites</p></dd><dt class="param">dynaTags</dt><dd class="cmt"><p>dynamic tags for the filter
</p></dd></dl><dl class="attributes block"> <dt>Exceptions thrown</dt><dd><span class="cmt">IllegalArgumentException<p>if <code>tagsToInclude</code> is defined, but contains an empty set
</p></span><span class="cmt">NullPointerException<p>if either <code>tagsToInclude</code> or <code>tagsToExclude</code> are null</p></span></dd></dl></div>
</li><li name="scala.Any#asInstanceOf" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="asInstanceOf[T0]:T0"></a>
<a id="asInstanceOf[T0]:T0"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">asInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <span class="extype" name="scala.Any.asInstanceOf.T0">T0</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef#clone" visbl="prt" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="clone():Object"></a>
<a id="clone():AnyRef"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">clone</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.AnyRef">AnyRef</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<a href="../../java$lang.html" class="extype" name="java.lang">java.lang</a>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li><li name="org.scalatest.Filter#default" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="default:org.scalatest.Filter"></a>
<a id="default:Filter"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">default</span><span class="result">: <a href="Filter.html" class="extype" name="org.scalatest.Filter">Filter</a></span>
</span>
</h4>
<p class="shortcomment cmt">Factory method for a default <code>Filter</code>, for which <code>tagsToInclude is <code>None</code>,
<code>tagsToExclude</code> is <code>Set("org.scalatest.Ignore")</code>, and <code>excludeNestedSuites</code> is false.
</p><div class="fullcomment"><div class="comment cmt"><p>Factory method for a default <code>Filter</code>, for which <code>tagsToInclude is <code>None</code>,
<code>tagsToExclude</code> is <code>Set("org.scalatest.Ignore")</code>, and <code>excludeNestedSuites</code> is false.
</p></div><dl class="paramcmts block"><dt>returns</dt><dd class="cmt"><p>a default <code>Filter</code>
</p></dd></dl></div>
</li><li name="scala.AnyRef#eq" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="eq(x$1:AnyRef):Boolean"></a>
<a id="eq(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">eq</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#equals" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="equals(x$1:Any):Boolean"></a>
<a id="equals(Any):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">equals</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#finalize" visbl="prt" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="finalize():Unit"></a>
<a id="finalize():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">finalize</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<a href="../../java$lang.html" class="extype" name="java.lang">java.lang</a>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#getClass" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="getClass():Class[_]"></a>
<a id="getClass():Class[_]"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">getClass</span><span class="params">()</span><span class="result">: <span class="extype" name="java.lang.Class">Class</span>[_]</span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#hashCode" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="hashCode():Int"></a>
<a id="hashCode():Int"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">hashCode</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.Any#isInstanceOf" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="isInstanceOf[T0]:Boolean"></a>
<a id="isInstanceOf[T0]:Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">isInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef#ne" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="ne(x$1:AnyRef):Boolean"></a>
<a id="ne(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">ne</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#notify" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notify():Unit"></a>
<a id="notify():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notify</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#notifyAll" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notifyAll():Unit"></a>
<a id="notifyAll():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notifyAll</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#synchronized" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="synchronized[T0](x$1:=>T0):T0"></a>
<a id="synchronized[T0](⇒T0):T0"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">synchronized</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="params">(<span name="arg0">arg0: ⇒ <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>)</span><span class="result">: <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#toString" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="toString():String"></a>
<a id="toString():String"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">toString</span><span class="params">()</span><span class="result">: <span class="extype" name="java.lang.String">String</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait():Unit"></a>
<a id="wait():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long,x$2:Int):Unit"></a>
<a id="wait(Long,Int):Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Long">Long</span></span>, <span name="arg1">arg1: <span class="extype" name="scala.Int">Int</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long):Unit"></a>
<a id="wait(Long):Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Long">Long</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
</h4>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">()</span>
</dd></dl></div>
</li></ol>
</div>
</div>
<div id="inheritedMembers">
<div class="parent" name="scala.Serializable">
<h3>Inherited from <span class="extype" name="scala.Serializable">Serializable</span></h3>
</div><div class="parent" name="java.io.Serializable">
<h3>Inherited from <span class="extype" name="java.io.Serializable">Serializable</span></h3>
</div><div class="parent" name="scala.AnyRef">
<h3>Inherited from <span class="extype" name="scala.AnyRef">AnyRef</span></h3>
</div><div class="parent" name="scala.Any">
<h3>Inherited from <span class="extype" name="scala.Any">Any</span></h3>
</div>
</div>
<div id="groupedMembers">
<div class="group" name="Ungrouped">
<h3>Ungrouped</h3>
</div>
</div>
</div>
<div id="tooltip"></div>
<div id="footer"> </div>
</body>
</html> | scalatest/scalatest-website | public/scaladoc/2.2.4/org/scalatest/Filter$.html | HTML | apache-2.0 | 26,972 |
<?php
namespace Circle314\Component\Data\Persistence\Operation\Cache;
use Circle314\Component\Data\Persistence\Operation\Response\ResponseInterface;
use Circle314\Concept\Identification\IdentifiableInterface;
interface QueryInterface extends IdentifiableInterface
{
/**
* Gets an existing Response for the Query.
*
* @param $responseID
* @return ResponseInterface
*/
public function getResponse($responseID);
/**
* Whether or not there is an existing Response for the Query.
*
* @param $responseID
* @return bool
*/
public function hasResponse($responseID);
/**
* Saves a Response against the Query.
*
* @param $responseID
* @param $response
*/
public function saveResponse($responseID, $response);
} | circle314/circle314 | src/Component/Data/Persistence/Operation/Cache/QueryInterface.php | PHP | apache-2.0 | 804 |
import React from 'react';
import { action } from '@storybook/addon-actions';
import Checkbox from '.';
const onChange = action('onChange');
const defaultProps = {
id: 'id1',
onChange,
};
const intermediate = {
id: 'id2',
onChange,
intermediate: true,
};
const checked = {
id: 'id3',
onChange,
checked: true,
};
const disabled = {
id: 'id4',
onChange,
disabled: true,
};
const withLabel = {
id: 'id5',
onChange,
label: 'Some label',
};
export default {
title: 'Form/Controls/Checkbox',
};
export const Default = () => (
<div style={{ padding: 30 }}>
<h1>Checkbox</h1>
<h2>Definition</h2>
<p>The Checkbox component is basically a fancy checkbox like you have in your iphone</p>
<h2>Examples</h2>
<form>
<h3>Default Checkbox</h3>
<Checkbox {...defaultProps} />
<h3>
Checkbox with <code>intermediate: true</code>
</h3>
<Checkbox {...intermediate} />
<h3>
Checkbox with <code>checked: true</code>
</h3>
<Checkbox {...checked} />
<h3>
Checkbox with <code>disabled: true</code>
</h3>
<Checkbox {...disabled} />
<h3>
Checkbox with <code>label: Some label</code>
</h3>
<Checkbox {...withLabel} />
</form>
</div>
);
| Talend/ui | packages/components/src/Checkbox/Checkbox.stories.js | JavaScript | apache-2.0 | 1,203 |
<table class="table table-striped" style="max-width: 800px;">
<thead>
<tr>
<th>#</th>
<th>Name</th>
<th>IBAN</th>
<th>Bank</th>
</tr>
</thead>
<tbody>
<tr>
<th scope="row">1</th>
<td>Rabo personal</td>
<td>NL15 RABO 01398 1237</td>
<td>Rabobank</td>
<td><button type="button" class="btn btn-danger">
Disconnect
</button></td>
</tr>
<tr>
<th scope="row">2</th>
<td>Danska</td>
<td>FI14 DANS 02130 1233</td>
<td>Danska Bank</td>
<td>
<button type="button" class="btn btn-danger">
Disconnect
</button>
</td>
</tr>
</tbody>
</table>
<form>
<p>
<button type="button" class="btn btn-default">
<span class="glyphicon glyphicon-plus-sign"></span> Connect another bank account
</button>
</p>
</form>
<div class="panel panel-default" style="max-width: 800px;">
<div class="panel-body">
<form>
<div class="form-group">
<label for="exampleInputEmail1">Email address</label>
<input type="email" class="form-control" id="exampleInputEmail1" placeholder="Enter email" value="[email protected]" >
</div>
<div class="form-group">
<label for="exampleInputPassword1">Password</label>
<input type="password" class="form-control" id="exampleInputPassword1" placeholder="Password">
</div>
<button type="submit" class="btn btn-default">Submit</button>
</form>
</div>
</div> | hanguyenhuu/doekoe | app/settings/settings.html | HTML | apache-2.0 | 1,419 |
/*
* Copyright (c) 2016 Hugo Matalonga & João Paulo Fernandes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hmatalonga.greenhub.ui;
import android.annotation.TargetApi;
import android.app.ActivityManager;
import android.app.AppOpsManager;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.provider.Settings;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v4.content.ContextCompat;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.support.v7.widget.helper.ItemTouchHelper;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.hmatalonga.greenhub.Config;
import com.hmatalonga.greenhub.R;
import com.hmatalonga.greenhub.events.OpenTaskDetailsEvent;
import com.hmatalonga.greenhub.events.TaskRemovedEvent;
import com.hmatalonga.greenhub.managers.TaskController;
import com.hmatalonga.greenhub.models.Memory;
import com.hmatalonga.greenhub.models.ui.Task;
import com.hmatalonga.greenhub.ui.adapters.TaskAdapter;
import com.hmatalonga.greenhub.util.SettingsUtils;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class TaskListActivity extends BaseActivity {
private ArrayList<Task> mTaskList;
private RecyclerView mRecyclerView;
private TaskAdapter mAdapter;
/**
* The {@link android.support.v4.widget.SwipeRefreshLayout} that detects swipe gestures and
* triggers callbacks in the app.
*/
private SwipeRefreshLayout mSwipeRefreshLayout;
private ProgressBar mLoader;
private Task mLastKilledApp;
private long mLastKilledTimestamp;
private boolean mIsUpdating;
private int mSortOrderName;
private int mSortOrderMemory;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (!SettingsUtils.isTosAccepted(getApplicationContext())) {
startActivity(new Intent(this, WelcomeActivity.class));
finish();
return;
}
setContentView(R.layout.activity_task_list);
Toolbar toolbar = findViewById(R.id.toolbar_actionbar);
if (toolbar != null) {
setSupportActionBar(toolbar);
}
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setDisplayHomeAsUpEnabled(true);
}
loadComponents();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_task_list, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
startActivity(new Intent(this, SettingsActivity.class));
return true;
} else if (id == R.id.action_sort_memory) {
sortTasksBy(Config.SORT_BY_MEMORY, mSortOrderMemory);
mSortOrderMemory = -mSortOrderMemory;
return true;
} else if (id == R.id.action_sort_name) {
sortTasksBy(Config.SORT_BY_NAME, mSortOrderName);
mSortOrderName = -mSortOrderName;
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onStart() {
super.onStart();
EventBus.getDefault().register(this);
}
@Override
public void onStop() {
EventBus.getDefault().unregister(this);
super.onStop();
}
@Override
public void onResume() {
super.onResume();
if (!mIsUpdating) initiateRefresh();
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onTaskRemovedEvent(TaskRemovedEvent event) {
updateHeaderInfo();
mLastKilledApp = event.task;
mLastKilledTimestamp = System.currentTimeMillis();
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onOpenTaskDetailsEvent(OpenTaskDetailsEvent event) {
startActivity(new Intent(
Settings.ACTION_APPLICATION_DETAILS_SETTINGS,
Uri.parse("package:" + event.task.getPackageInfo().packageName)
));
}
private void loadComponents() {
Toolbar toolbar = findViewById(R.id.toolbar_actionbar);
setSupportActionBar(toolbar);
mLoader = findViewById(R.id.loader);
mLastKilledApp = null;
mSortOrderName = 1;
mSortOrderMemory = 1;
FloatingActionButton fab = findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mTaskList.isEmpty()) {
Snackbar.make(
view,
getString(R.string.task_no_apps_running),
Snackbar.LENGTH_LONG
).show();
return;
}
int apps = 0;
double memory = 0;
String message;
TaskController controller = new TaskController(getApplicationContext());
for (Task task : mTaskList) {
if (!task.isChecked()) continue;
controller.killApp(task);
memory += task.getMemory();
apps++;
}
memory = Math.round(memory * 100.0) / 100.0;
mRecyclerView.setVisibility(View.GONE);
mLoader.setVisibility(View.VISIBLE);
initiateRefresh();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
message = (apps > 0) ?
makeMessage(apps) :
getString(R.string.task_no_apps_killed);
} else {
message = (apps > 0) ?
makeMessage(apps, memory) :
getString(R.string.task_no_apps_killed);
}
Snackbar.make(
view,
message,
Snackbar.LENGTH_LONG
).show();
}
});
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N &&
!hasSpecialPermission(getApplicationContext())) {
showPermissionInfoDialog();
}
mTaskList = new ArrayList<>();
mIsUpdating = false;
setupRefreshLayout();
setupRecyclerView();
}
private void sortTasksBy(final int filter, final int order) {
if (filter == Config.SORT_BY_MEMORY) {
// Sort by memory
Collections.sort(mTaskList, new Comparator<Task>() {
@Override
public int compare(Task t1, Task t2) {
int result;
if (t1.getMemory() < t2.getMemory()) {
result = -1;
} else if (t1.getMemory() == t2.getMemory()) {
result = 0;
} else {
result = 1;
}
return order * result;
}
});
} else if (filter == Config.SORT_BY_NAME) {
// Sort by name
Collections.sort(mTaskList, new Comparator<Task>() {
@Override
public int compare(Task t1, Task t2) {
return order * t1.getLabel().compareTo(t2.getLabel());
}
});
}
mAdapter.notifyDataSetChanged();
}
private String makeMessage(int apps) {
return getString(R.string.task_killed) + " " + apps + " apps!";
}
private String makeMessage(int apps, double memory) {
return getString(R.string.task_killed) + " " + apps + " apps! " +
getString(R.string.task_cleared) + " " + memory + " MB";
}
private void setupRecyclerView() {
mRecyclerView = findViewById(R.id.rv);
// use this setting to improve performance if you know that changes
// in content do not change the layout size of the RecyclerView
mRecyclerView.setHasFixedSize(true);
// use a linear layout manager
mRecyclerView.setLayoutManager(new LinearLayoutManager(this));
mAdapter = new TaskAdapter(getApplicationContext(), mTaskList);
mRecyclerView.setAdapter(mAdapter);
setUpItemTouchHelper();
setUpAnimationDecoratorHelper();
}
private void setupRefreshLayout() {
mSwipeRefreshLayout = findViewById(R.id.swipe_layout);
//noinspection ResourceAsColor
if (Build.VERSION.SDK_INT >= 23) {
mSwipeRefreshLayout.setColorSchemeColors(
getColor(R.color.color_accent),
getColor(R.color.color_primary_dark)
);
} else {
final Context context = getApplicationContext();
mSwipeRefreshLayout.setColorSchemeColors(
ContextCompat.getColor(context, R.color.color_accent),
ContextCompat.getColor(context, R.color.color_primary_dark)
);
}
mSwipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
if (!mIsUpdating) initiateRefresh();
}
});
}
/**
* This is the standard support library way of implementing "swipe to delete" feature.
* You can do custom drawing in onChildDraw method but whatever you draw will
* disappear once the swipe is over, and while the items are animating to their
* new position the recycler view background will be visible.
* That is rarely an desired effect.
*/
private void setUpItemTouchHelper() {
ItemTouchHelper.SimpleCallback simpleItemTouchCallback =
new ItemTouchHelper.SimpleCallback(0, ItemTouchHelper.LEFT) {
// we want to cache these and not allocate anything repeatedly in the onChildDraw method
Drawable background;
Drawable xMark;
int xMarkMargin;
boolean initiated;
private void init() {
background = new ColorDrawable(Color.DKGRAY);
xMark = ContextCompat.getDrawable(
TaskListActivity.this, R.drawable.ic_delete_white_24dp
);
xMark.setColorFilter(Color.WHITE, PorterDuff.Mode.SRC_ATOP);
xMarkMargin = (int) TaskListActivity.this.getResources()
.getDimension(R.dimen.fab_margin);
initiated = true;
}
// not important, we don't want drag & drop
@Override
public boolean onMove(RecyclerView recyclerView, RecyclerView.ViewHolder viewHolder,
RecyclerView.ViewHolder target) {
return false;
}
@Override
public int getSwipeDirs(RecyclerView recyclerView, RecyclerView.ViewHolder viewHolder) {
int position = viewHolder.getAdapterPosition();
TaskAdapter testAdapter = (TaskAdapter) recyclerView.getAdapter();
if (testAdapter.isUndoOn() && testAdapter.isPendingRemoval(position)) {
return 0;
}
return super.getSwipeDirs(recyclerView, viewHolder);
}
@Override
public void onSwiped(RecyclerView.ViewHolder viewHolder, int swipeDir) {
int swipedPosition = viewHolder.getAdapterPosition();
TaskAdapter adapter = (TaskAdapter) mRecyclerView.getAdapter();
boolean undoOn = adapter.isUndoOn();
if (undoOn) {
adapter.pendingRemoval(swipedPosition);
} else {
adapter.remove(swipedPosition);
}
}
@Override
public void onChildDraw(Canvas canvas, RecyclerView recyclerView,
RecyclerView.ViewHolder viewHolder, float dX, float dY,
int actionState, boolean isCurrentlyActive) {
View itemView = viewHolder.itemView;
// not sure why, but this method get's called
// for viewholder that are already swiped away
if (viewHolder.getAdapterPosition() == -1) {
// not interested in those
return;
}
if (!initiated) {
init();
}
// draw background
background.setBounds(
itemView.getRight() + (int) dX,
itemView.getTop(),
itemView.getRight(),
itemView.getBottom()
);
background.draw(canvas);
// draw x mark
int itemHeight = itemView.getBottom() - itemView.getTop();
int intrinsicWidth = xMark.getIntrinsicWidth();
int intrinsicHeight = xMark.getIntrinsicWidth();
int xMarkLeft = itemView.getRight() - xMarkMargin - intrinsicWidth;
int xMarkRight = itemView.getRight() - xMarkMargin;
int xMarkTop = itemView.getTop() + (itemHeight - intrinsicHeight) / 2;
int xMarkBottom = xMarkTop + intrinsicHeight;
xMark.setBounds(xMarkLeft, xMarkTop, xMarkRight, xMarkBottom);
xMark.draw(canvas);
super.onChildDraw(canvas, recyclerView, viewHolder,
dX, dY, actionState, isCurrentlyActive);
}
};
ItemTouchHelper mItemTouchHelper = new ItemTouchHelper(simpleItemTouchCallback);
mItemTouchHelper.attachToRecyclerView(mRecyclerView);
}
/**
* We're gonna setup another ItemDecorator that will draw the red background
* in the empty space while the items are animating to thier new positions
* after an item is removed.
*/
private void setUpAnimationDecoratorHelper() {
mRecyclerView.addItemDecoration(new RecyclerView.ItemDecoration() {
// we want to cache this and not allocate anything repeatedly in the onDraw method
Drawable background;
boolean initiated;
private void init() {
background = new ColorDrawable(Color.DKGRAY);
initiated = true;
}
@Override
public void onDraw(Canvas canvas, RecyclerView parent, RecyclerView.State state) {
if (!initiated) {
init();
}
// only if animation is in progress
if (parent.getItemAnimator().isRunning()) {
// some items might be animating down and some items might be
// animating up to close the gap left by the removed item
// this is not exclusive, both movement can be happening at the same time
// to reproduce this leave just enough items so the first one
// and the last one would be just a little off screen
// then remove one from the middle
// find first child with translationY > 0
// and last one with translationY < 0
// we're after a rect that is not covered in recycler-view views
// at this point in time
View lastViewComingDown = null;
View firstViewComingUp = null;
// this is fixed
int left = 0;
int right = parent.getWidth();
// this we need to find out
int top = 0;
int bottom = 0;
// find relevant translating views
int childCount = parent.getLayoutManager().getChildCount();
for (int i = 0; i < childCount; i++) {
View child = parent.getLayoutManager().getChildAt(i);
if (child.getTranslationY() < 0) {
// view is coming down
lastViewComingDown = child;
} else if (child.getTranslationY() > 0) {
// view is coming up
if (firstViewComingUp == null) {
firstViewComingUp = child;
}
}
}
if (lastViewComingDown != null && firstViewComingUp != null) {
// views are coming down AND going up to fill the void
top = lastViewComingDown.getBottom() +
(int) lastViewComingDown.getTranslationY();
bottom = firstViewComingUp.getTop() +
(int) firstViewComingUp.getTranslationY();
} else if (lastViewComingDown != null) {
// views are going down to fill the void
top = lastViewComingDown.getBottom() +
(int) lastViewComingDown.getTranslationY();
bottom = lastViewComingDown.getBottom();
} else if (firstViewComingUp != null) {
// views are coming up to fill the void
top = firstViewComingUp.getTop();
bottom = firstViewComingUp.getTop() +
(int) firstViewComingUp.getTranslationY();
}
background.setBounds(left, top, right, bottom);
background.draw(canvas);
}
super.onDraw(canvas, parent, state);
}
});
}
/**
* By abstracting the refresh process to a single method, the app allows both the
* SwipeGestureLayout onRefresh() method and the Refresh action item to refresh the content.
*/
private void initiateRefresh() {
mIsUpdating = true;
setHeaderToRefresh();
/**
* Execute the background task, which uses {@link android.os.AsyncTask} to load the data.
*/
new LoadRunningProcessesTask().execute(getApplicationContext());
}
/**
* When the AsyncTask finishes, it calls onRefreshComplete(), which updates the data in the
* ListAdapter and turns off the progress bar.
*/
private void onRefreshComplete(List<Task> result) {
if (mLoader.getVisibility() == View.VISIBLE) {
mLoader.setVisibility(View.GONE);
mRecyclerView.setVisibility(View.VISIBLE);
}
// Remove all items from the ListAdapter, and then replace them with the new items
mAdapter.swap(result);
mIsUpdating = false;
updateHeaderInfo();
// Stop the refreshing indicator
mSwipeRefreshLayout.setRefreshing(false);
}
private void updateHeaderInfo() {
String text;
TextView textView = findViewById(R.id.count);
text = "Apps " + mTaskList.size();
textView.setText(text);
textView = findViewById(R.id.usage);
double memory = Memory.getAvailableMemoryMB(getApplicationContext());
if (memory > 1000) {
text = getString(R.string.task_free_ram) + " " +
(Math.round(memory / 1000.0)) + " GB";
} else {
text = getString(R.string.task_free_ram) + " " + memory + " MB";
}
textView.setText(text);
}
private void setHeaderToRefresh() {
TextView textView = findViewById(R.id.count);
textView.setText(getString(R.string.header_status_loading));
textView = findViewById(R.id.usage);
textView.setText("");
}
private double getTotalUsage(List<Task> list) {
double usage = 0;
for (Task task : list) {
usage += task.getMemory();
}
return Math.round(usage * 100.0) / 100.0;
}
private boolean isKilledAppAlive(final String label) {
long now = System.currentTimeMillis();
if (mLastKilledTimestamp < (now - Config.KILL_APP_TIMEOUT)) {
mLastKilledApp = null;
return false;
}
for (Task task : mTaskList) {
if (task.getLabel().equals(label)) {
return true;
}
}
return false;
}
private void checkIfLastAppIsKilled() {
if (mLastKilledApp != null && isKilledAppAlive(mLastKilledApp.getLabel())) {
final String packageName = mLastKilledApp.getPackageInfo().packageName;
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage(getString(R.string.kill_app_dialog_text))
.setTitle(mLastKilledApp.getLabel());
builder.setPositiveButton(R.string.force_close, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
// User clicked OK button
startActivity(new Intent(
Settings.ACTION_APPLICATION_DETAILS_SETTINGS,
Uri.parse("package:" + packageName)
));
}
});
builder.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
// User cancelled the dialog
dialog.cancel();
}
});
builder.create().show();
}
mLastKilledApp = null;
}
@TargetApi(21)
private boolean hasSpecialPermission(final Context context) {
AppOpsManager appOps = (AppOpsManager) context
.getSystemService(Context.APP_OPS_SERVICE);
int mode = appOps.checkOpNoThrow("android:get_usage_stats",
android.os.Process.myUid(), context.getPackageName());
return mode == AppOpsManager.MODE_ALLOWED;
}
@TargetApi(21)
private void showPermissionInfoDialog() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage(getString(R.string.package_usage_permission_text))
.setTitle(getString(R.string.package_usage_permission_title));
builder.setPositiveButton(R.string.open_settings, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
// User clicked OK button
startActivity(new Intent(Settings.ACTION_USAGE_ACCESS_SETTINGS));
}
});
builder.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
// User cancelled the dialog
dialog.cancel();
}
});
builder.create().show();
}
private class LoadRunningProcessesTask extends AsyncTask<Context, Void, List<Task>> {
@Override
protected List<Task> doInBackground(Context... params) {
TaskController taskController = new TaskController(params[0]);
return taskController.getRunningTasks();
}
@Override
protected void onPostExecute(List<Task> result) {
super.onPostExecute(result);
onRefreshComplete(result);
checkIfLastAppIsKilled();
}
}
} | hmatalonga/GreenHub | app/src/main/java/com/hmatalonga/greenhub/ui/TaskListActivity.java | Java | apache-2.0 | 25,536 |
/*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.api.ops.impl.accum;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.complex.IComplexNumber;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.Op;
/**
* Calculate the mean of the vector
*
* @author Adam Gibson
*/
public class Mean extends Sum {
public Mean() {
}
public Mean(INDArray x, INDArray y, INDArray z, int n) {
super(x, y, z, n);
}
public Mean(INDArray x, INDArray y, int n) {
super(x, y, n);
}
public Mean(INDArray x) {
super(x);
}
public Mean(INDArray x, INDArray y) {
super(x, y);
}
@Override
public String name() {
return "mean";
}
@Override
public Op opForDimension(int index, int dimension) {
INDArray xAlongDimension = x.vectorAlongDimension(index, dimension);
if (y() != null)
return new Mean(xAlongDimension, y.vectorAlongDimension(index, dimension), xAlongDimension.length());
else
return new Mean(x.vectorAlongDimension(index, dimension));
}
@Override
public Op opForDimension(int index, int... dimension) {
INDArray xAlongDimension = x.tensorAlongDimension(index, dimension);
if (y() != null)
return new Mean(xAlongDimension, y.tensorAlongDimension(index, dimension), xAlongDimension.length());
else
return new Mean(x.tensorAlongDimension(index, dimension));
}
@Override
public double getAndSetFinalResult(double accum){
double d = accum / n();
this.finalResult = d;
return d;
}
@Override
public float getAndSetFinalResult(float accum){
float f = accum / n();
this.finalResult = f;
return f;
}
@Override
public double calculateFinalResult(double accum, int n) {
return accum / n;
}
@Override
public float calculateFinalResult(float accum, int n) {
return accum / n;
}
@Override
public IComplexNumber getAndSetFinalResult(IComplexNumber accum){
finalResultComplex = accum.div(n());
return finalResultComplex;
}
}
| GeorgeMe/nd4j | nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/accum/Mean.java | Java | apache-2.0 | 2,853 |
from karld.loadump import dump_dicts_to_json_file
from karld.loadump import ensure_dir
from karld.loadump import ensure_file_path_dir
from karld.loadump import i_get_csv_data
from karld.loadump import is_file_csv
from karld.loadump import i_get_json_data
from karld.loadump import is_file_json
from karld.loadump import raw_line_reader
from karld.loadump import split_csv_file
from karld.loadump import split_file
from karld.loadump import split_file_output
from karld.loadump import split_file_output_csv
from karld.loadump import split_file_output_json
from karld.loadump import write_as_csv
from karld.loadump import write_as_json
| johnwlockwood/karl_data | karld/io.py | Python | apache-2.0 | 641 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.