code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 3
942
| language
stringclasses 30
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
/*!
* Variables
*/
@font-face {
font-family: 'Open Sans';
src: url('/skins/neoclassic/fonts/OpenSans-Light-webfont.woff') format('woff'), url('/skins/neoclassic/fonts/OpenSans-Light-webfont.ttf') format('truetype'), url('/skins/neoclassic/fonts/OpenSans-Light-webfont.svg#OpenSansLight') format('svg');
font-weight: 300;
font-style: normal;
}
@font-face {
font-family: 'Open Sans';
src: url('/skins/neoclassic/fonts/OpenSans-LightItalic-webfont.woff') format('woff'), url('/skins/neoclassic/fonts/OpenSans-LightItalic-webfont.ttf') format('truetype'), url('/skins/neoclassic/fonts/OpenSans-LightItalic-webfont.svg#OpenSansLightItalic') format('svg');
font-weight: 300;
font-style: italic;
}
@font-face {
font-family: 'Open Sans';
src: url('/skins/neoclassic/fonts/OpenSans-Regular-webfont.woff') format('woff'), url('/skins/neoclassic/fonts/OpenSans-Regular-webfont.ttf') format('truetype'), url('/skins/neoclassic/fonts/OpenSans-Regular-webfont.svg#OpenSansRegular') format('svg');
font-weight: normal;
font-style: normal;
}
@font-face {
font-family: 'Open Sans';
src: url('/skins/neoclassic/fonts/OpenSans-Italic-webfont.woff') format('woff'), url('/skins/neoclassic/fonts/OpenSans-Italic-webfont.ttf') format('truetype'), url('/skins/neoclassic/fonts/OpenSans-Italic-webfont.svg#OpenSansItalic') format('svg');
font-weight: normal;
font-style: italic;
}
@font-face {
font-family: 'Open Sans';
src: url('/skins/neoclassic/fonts/OpenSans-Semibold-webfont.woff') format('woff'), url('/skins/neoclassic/fonts/OpenSans-Semibold-webfont.ttf') format('truetype'), url('/skins/neoclassic/fonts/OpenSans-Semibold-webfont.svg#OpenSansSemibold') format('svg');
font-weight: bold;
font-style: normal;
}
@font-face {
font-family: 'Open Sans';
src: url('/skins/neoclassic/fonts/OpenSans-SemiboldItalic-webfont.woff') format('woff'), url('/skins/neoclassic/fonts/OpenSans-SemiboldItalic-webfont.ttf') format('truetype'), url('/skins/neoclassic/fonts/OpenSans-SemiboldItalic-webfont.svg#OpenSansSemiboldItalic') format('svg');
font-weight: bold;
font-style: italic;
}
/*Colores*/
.btn {
background-image: none;
color: #333333;
text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75);
background-color: #f1f1f1;
background-image: -moz-linear-gradient(top, #ffffff, #dddddd);
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#dddddd));
background-image: -webkit-linear-gradient(top, #ffffff, #dddddd);
background-image: -o-linear-gradient(top, #ffffff, #dddddd);
background-image: linear-gradient(to bottom, #ffffff, #dddddd);
background-repeat: repeat-x;
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#ffdddddd', GradientType=0);
border-color: #dddddd #dddddd #b7b7b7;
border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);
*background-color: #dddddd;
/* Darken IE7 buttons by default so they stand out more given they won't have borders */
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
filter: progid:DXImageTransform.Microsoft.gradient(startColorStr='#ffffff', EndColorStr='#dddddd');
border: 1px solid #ccc;
font-size: 10px;
font-weight: 400;
text-transform: capitalize;
text-decoration: none;
line-height: 14px;
color: #000;
}
.btn:hover,
.btn:focus,
.btn:active,
.btn.active,
.btn.disabled,
.btn[disabled] {
color: #333333;
background-color: #dddddd;
*background-color: #d0d0d0;
}
.btn:active,
.btn.active {
background-color: #c4c4c4 \9;
}
.btn:hover {
color: #333333;
text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75);
background-color: #ebebeb;
background-image: -moz-linear-gradient(top, #dddddd, #ffffff);
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#dddddd), to(#ffffff));
background-image: -webkit-linear-gradient(top, #dddddd, #ffffff);
background-image: -o-linear-gradient(top, #dddddd, #ffffff);
background-image: linear-gradient(to bottom, #dddddd, #ffffff);
background-repeat: repeat-x;
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdddddd', endColorstr='#ffffffff', GradientType=0);
border-color: #ffffff #ffffff #d9d9d9;
border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);
*background-color: #ffffff;
/* Darken IE7 buttons by default so they stand out more given they won't have borders */
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
filter: progid:DXImageTransform.Microsoft.gradient(startColorStr='#dddddd', EndColorStr='#ffffff');
border: 1px solid #aaa;
background-image: url(data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiA/Pgo8c3ZnIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgd2lkdGg9IjEwMCUiIGhlaWdodD0iMTAwJSIgdmlld0JveD0iMCAwIDEgMSIgcHJlc2VydmVBc3BlY3RSYXRpbz0ibm9uZSI+CiAgPGxpbmVhckdyYWRpZW50IGlkPSJncmFkLXVjZ2ctZ2VuZXJhdGVkIiBncmFkaWVudFVuaXRzPSJ1c2VyU3BhY2VPblVzZSIgeDE9IjAlIiB5MT0iMCUiIHgyPSIwJSIgeTI9IjEwMCUiPgogICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3RvcC1jb2xvcj0iI2RkZGRkZCIgc3RvcC1vcGFjaXR5PSIxIi8+CiAgICA8c3RvcCBvZmZzZXQ9IjEwMCUiIHN0b3AtY29sb3I9IiNmZmZmZmYiIHN0b3Atb3BhY2l0eT0iMSIvPgogIDwvbGluZWFyR3JhZGllbnQ+CiAgPHJlY3QgeD0iMCIgeT0iMCIgd2lkdGg9IjEiIGhlaWdodD0iMSIgZmlsbD0idXJsKCNncmFkLXVjZ2ctZ2VuZXJhdGVkKSIgLz4KPC9zdmc+);
}
.btn:hover:hover,
.btn:hover:focus,
.btn:hover:active,
.btn:hover.active,
.btn:hover.disabled,
.btn:hover[disabled] {
color: #333333;
background-color: #ffffff;
*background-color: #f2f2f2;
}
.btn:hover:active,
.btn:hover.active {
background-color: #e6e6e6 \9;
}
.btn:focus {
border: 1px solid #45626F;
outline: 0;
}
.btn:focus::-moz-focus-inner {
border: 1px solid transparent;
}
.btn-over {
color: #333333;
text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75);
background-color: #ebebeb;
background-image: -moz-linear-gradient(top, #dddddd, #ffffff);
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#dddddd), to(#ffffff));
background-image: -webkit-linear-gradient(top, #dddddd, #ffffff);
background-image: -o-linear-gradient(top, #dddddd, #ffffff);
background-image: linear-gradient(to bottom, #dddddd, #ffffff);
background-repeat: repeat-x;
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdddddd', endColorstr='#ffffffff', GradientType=0);
border-color: #ffffff #ffffff #d9d9d9;
border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);
*background-color: #ffffff;
/* Darken IE7 buttons by default so they stand out more given they won't have borders */
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
filter: progid:DXImageTransform.Microsoft.gradient(startColorStr='#dddddd', EndColorStr='#ffffff');
border: 1px solid #aaa;
background-image: url(data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiA/Pgo8c3ZnIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgd2lkdGg9IjEwMCUiIGhlaWdodD0iMTAwJSIgdmlld0JveD0iMCAwIDEgMSIgcHJlc2VydmVBc3BlY3RSYXRpbz0ibm9uZSI+CiAgPGxpbmVhckdyYWRpZW50IGlkPSJncmFkLXVjZ2ctZ2VuZXJhdGVkIiBncmFkaWVudFVuaXRzPSJ1c2VyU3BhY2VPblVzZSIgeDE9IjAlIiB5MT0iMCUiIHgyPSIwJSIgeTI9IjEwMCUiPgogICAgPHN0b3Agb2Zmc2V0PSIwJSIgc3RvcC1jb2xvcj0iI2RkZGRkZCIgc3RvcC1vcGFjaXR5PSIxIi8+CiAgICA8c3RvcCBvZmZzZXQ9IjEwMCUiIHN0b3AtY29sb3I9IiNmZmZmZmYiIHN0b3Atb3BhY2l0eT0iMSIvPgogIDwvbGluZWFyR3JhZGllbnQ+CiAgPHJlY3QgeD0iMCIgeT0iMCIgd2lkdGg9IjEiIGhlaWdodD0iMSIgZmlsbD0idXJsKCNncmFkLXVjZ2ctZ2VuZXJhdGVkKSIgLz4KPC9zdmc+);
}
.btn-over:hover,
.btn-over:focus,
.btn-over:active,
.btn-over.active,
.btn-over.disabled,
.btn-over[disabled] {
color: #333333;
background-color: #ffffff;
*background-color: #f2f2f2;
}
.btn-over:active,
.btn-over.active {
background-color: #e6e6e6 \9;
}
.btn-focus {
border: 1px solid #45626F;
outline: 0;
}
.btn-focus::-moz-focus-inner {
border: 1px solid transparent;
}
.btn-no-button {
background: none;
filter: none;
border: 1px transparent solid;
}
.link {
color: #00a1e4;
text-decoration: none;
}
.link:hover {
color: #00597e;
}
.link-hover {
color: #00597e;
}
/*
botones del tinymce
.o2k7Skin .mceButton{
.btn();
}
*/body {
margin: 0px;
background-color: #FFFFFF !important;
color: #000000;
font: normal 8pt sans-serif, Tahoma, MiscFixed;
}
@media print {
a:link,
a:visited {
text-decoration: none;
color: black;
}
}
@media print {
@page {
margin: 10%;
}
blockquote,
pre {
page-break-inside: avoid;
}
}
.GridLink {
visibility: hidden !important;
}
.page-break {
page-break-before: always;
}
input[type=submit],
input[type=button] {
visibility: hidden !important;
}
input[type=file] {
visibility: hidden !important;
display: none !important;
overflow: hidden !important;
}
.FormRequiredTextMessage {
visibility: hidden !important;
}
.tableOption {
visibility: hidden !important;
}
.boxTop div.a,
.boxTop div.b,
.boxTop div.c {
visibility: hidden !important;
}
form.formDefault .FormButton {
visibility: hidden !important;
}
.Record {
visibility: hidden !important;
overflow: hidden !important;
display: none !important;
}
/**------------------------------------------**/
/* Box Top Model BEGIN */
.boxTop,
.boxTopBlue {
height: 9px;
padding-left: 8px;
padding-right: 8px;
position: relative;
overflow: hidden;
}
.boxTop div,
.boxTopBlue div {
background-color: #FFF;
}
.boxTop div.a,
.boxTop div.c,
.boxTopBlue div.a,
.boxTopBlue div.c {
position: absolute;
width: 9px;
height: 9px;
}
.boxTop div.a,
.boxTopBlue div.a {
left: 0px;
top: 0px;
background-image: url(../images/ftl.png);
background-color: transparent;
}
.boxTop div.c,
.boxTopBlue div.c {
top: 0px;
right: 0px;
background-image: url(../images/ftr.png);
background-color: transparent;
}
.boxTop div.b,
.boxTopBlue div.b {
width: 100%;
height: 9px;
border-top: 1px solid #DADADA;
background-color: #FFF;
}
/* Box Top Model END */
/* Box Bottom Model BEGIN */
.boxBottom,
.boxBottomBlue {
visibility: hidden;
overflow: hidden;
width: 0px;
height: 0px;
}
.boxBottom div.a,
.boxBottom div.c,
.boxBottomBlue div.a,
.boxBottomBlue div.c {
visibility: hidden;
overflow: hidden;
width: 0px;
height: 0px;
}
.boxBottom div.a,
.boxBottomBlue div.a {
visibility: hidden;
overflow: hidden;
width: 0px;
height: 0px;
}
.boxBottom div.c,
.boxBottomBlue div.c {
visibility: hidden;
overflow: hidden;
width: 0px;
height: 0px;
}
.boxBottom div.b,
.boxBottomBlue div.b {
visibility: hidden;
overflow: hidden;
width: 0px;
height: 0px;
}
/* Box Bottom Model END */
/* Box Bottom Model Blue BEGIN */
.boxBottomBlue div.c {
visibility: hidden;
overflow: hidden;
width: 0px;
height: 0px;
}
.boxBottomBlue div.a {
visibility: hidden;
overflow: hidden;
width: 0px;
height: 0px;
}
.boxBottomBlue div.b {
visibility: hidden;
overflow: hidden;
width: 0px;
height: 0px;
}
/* Box Bottom Model Blue END */
/* BoxPanel Bottom Model BEGIN */
.boxTopPanel {
height: 15px;
padding-left: 24px;
padding-right: 24px;
position: relative;
overflow: hidden;
}
.boxTopPanel div.a,
.boxTopPanel div.c {
position: absolute;
width: 25px;
height: 15px;
}
.boxTopPanel div.a {
left: 0px;
top: 0px;
background-image: url(../images/ftlL.png);
background-color: transparent;
}
.boxTopPanel div.c {
top: 0px;
right: 0px;
background-image: url(../images/ftrL.png);
background-color: transparent;
}
.boxTopPanel div.b {
width: 100%;
height: 16px;
background: transparent url(../images/ftc.png) repeat-x;
}
/* BoxPanel Bottom Model END */
/* XmlForm BEGIN */
/* form BEGIN */
form {
font: normal 11px sans-serif, MiscFixed;
color: #808080;
}
form table {
font: normal 11px sans-serif, MiscFixed;
color: #808080;
}
form.formDefault select {
font: normal 11px sans-serif, MiscFixed;
color: #000;
}
form.formDefault table {
font: normal 11px sans-serif, MiscFixed;
color: #808080;
line-height: 180%;
}
form.formDefault td {
padding: 2px;
}
form.formDefault .content {
background-color: #FFF;
border: 1px solid #CCC;
-moz-border-radius: 10px;
-webkit-border-radius: 10px;
}
form.formDefault input.FormField {
border: 1px solid #CCC;
background: #ffffff url(../images/input_back.gif) repeat-x;
color: #333333;
font: normal 11px Arial, Helvetica, sans-serif;
}
form.formDefault input.FormFieldInvalid {
border: 1px solid red;
}
form.formDefault input.FormFieldValid {
border: 1px solid green;
}
form.formDefault .FormLabel {
color: #808080;
text-align: right;
padding-right: 5px;
}
form.formDefault .FormFieldContent {
color: #000;
background-color: #EFEFEF;
padding-left: 5px;
}
form.formDefault textarea.FormTextArea {
border: 1px solid #CCC;
background: #ffffff url(../images/input_back.gif) repeat-x;
color: #333333;
font: normal 11px Arial, Helvetica, sans-serif;
overflow: auto;
}
form.formDefault textarea.FormTextPM {
border: 1px solid #CCC;
background: #ffffff url(../images/input_back.gif) repeat-x;
color: #333333;
font: normal 12 Courier New, monospace ;
overflow: auto;
}
form.formDefault .FormTitle {
color: #000;
padding-left: 5px;
font-weight: bold;
background-color: #E0EFE6;
}
form.formDefault .FormSubTitle {
background-color: #D1DEDF !important;
color: black;
}
form.formDefault .FormButton {
text-align: center;
}
form.formDefault a {
text-decoration: none;
color: #006699;
}
form.formDefault a:hover {
color: orange;
}
form.formDefault td.withoutLabel,
form.formDefault td.withoutLabel table td {
padding: 0px;
height: 8px;
}
/* form END */
/* pagedTable BEGIN */
.pagedTableDefault {
border-left: 1px solid #DADADA;
border-right: 1px solid #DADADA;
background-color: #FFF;
padding-left: 5px;
padding-right: 5px;
}
.pagedTableDefault,
.pagedTableDefault table {
font: normal 11px sans-serif, MiscFixed;
color: #808080;
}
.pagedTableDefault,
.pagedTableDefault .headerContent .tableOption a {
color: #2078A8;
text-decoration: none;
}
.pagedTableDefault,
.pagedTableDefault .headerContent .tableOption a:hover {
color: orange;
}
.pagedTableDefault td {
padding: 0px;
}
.pagedTableDefault .pagedTable td {
padding: 5px;
}
.pagedTableDefault .pagedTable {
border: 1px solid #DFDFDF;
border-collapse: collapse;
color: #27373F;
}
.pagedTable td {
text-align: left;
}
.pagedTableDefault .pagedTable .Row1 {
background-color: #FFF;
}
.pagedTableDefault .pagedTable .Row2 {
background-color: #EEE;
}
tr.Selected {
background-color: #D8DDFF;
}
.pagedTableDefault .pagedTable .RowPointer {
background-color: #E0EAEF;
}
.pagedTableDefault .cellSelected1 {
font-weight: bold;
}
.pagedTableDefault .cellSelected2 {
font-weight: bold;
}
.pagedTableDefault .pagedTable a {
color: #FFF;
}
.pagedTableDefault .pagedTable a:hover {
color: orange;
}
.pagedTableDefault .pagedTable .pagedTableHeader {
border-bottom: 0px solid #DFDFDF;
background-color: #E0E9EF;
background-color: #6F7F75;
color: #5B5B5B;
font-weight: bold;
background-image: url(/js/maborak/core/images/silverBackgroundTableTitle.jpg);
background-repeat: repeat-x;
height: 26px;
padding: 0px;
overflow: hidden;
}
.pagedTableDefault .pagedTable .pagedTableHeader a {
text-decoration: none;
color: #5B5B5B;
padding-left: 5px;
font: normal 8pt Tahoma, sans-serif, MiscFixed;
}
.pagedTableDefault .pagedTable .pagedTableHeader a:hover {
color: orange;
}
/* Grid BEGIN */
div.pattern .content {
padding-left: 5px;
padding-right: 5px;
border-left: 1px solid #DADADA;
border-right: 1px solid #DADADA;
background-color: #FFF;
}
div.pattern .FormTitle {
font-weight: bold;
color: black;
background-color: #E0EFE6;
padding: 2px;
}
div.grid {
font: normal 11px sans-serif, MiscFixed;
padding-left: 10px;
padding-right: 10px;
margin-top: 7px;
}
div.grid .content {
padding-left: 5px;
padding-right: 5px;
border-left: 1px solid #DADADA;
border-right: 1px solid #DADADA;
background-color: #FFF;
width: 100%;
}
html > body div.grid .content {
width: auto;
}
div.grid .tableGrid {
width: 100%;
}
/* Grid End */
/* XmlForm END */
.tableGrid_view {
width: 100%;
border-top: 0px solid #DADADA;
border-bottom: 0px solid #DADADA;
border-left: 0px solid #DADADA;
border-right: 0px solid #DADADA;
padding: 0px;
}
table.tableGrid_view td {
border-top: 1px solid #DADADA;
border-bottom: 1px solid #DADADA;
border-left: 0px solid #DADADA;
border-right: 0px solid #DADADA;
padding: 0px;
}
| FusionProfessionalsAU/ProcessMaker | workflow/engine/skinEngine/neoclassic/css/printstyle.css | CSS | agpl-3.0 | 16,430 |
<%inherit file="base.html" />
<%def name="online_help_token()"><% return "outline" %></%def>
<%!
import json
import logging
from util.date_utils import get_default_time_display
from django.utils.translation import ugettext as _
from contentstore.utils import reverse_usage_url
%>
<%block name="title">${_("Course Outline")}</%block>
<%block name="bodyclass">is-signedin course view-outline</%block>
<%namespace name='static' file='static_content.html'/>
<%block name="requirejs">
require(["js/factories/outline"], function (OutlineFactory) {
OutlineFactory(${json.dumps(course_structure) | n}, ${json.dumps(initial_state) | n});
});
</%block>
<%block name="header_extras">
<link rel="stylesheet" type="text/css" href="${static.url('js/vendor/timepicker/jquery.timepicker.css')}" />
% for template_name in ['course-outline', 'xblock-string-field-editor', 'basic-modal', 'modal-button', 'course-outline-modal', 'due-date-editor', 'release-date-editor', 'grading-editor', 'publish-editor', 'staff-lock-editor']:
<script type="text/template" id="${template_name}-tpl">
<%static:include path="js/${template_name}.underscore" />
</script>
% endfor
</%block>
<%block name="page_alert">
%if notification_dismiss_url is not None:
<div class="wrapper wrapper-alert wrapper-alert-announcement is-shown">
<div class="alert announcement has-actions">
<i class="feedback-symbol icon-bullhorn"></i>
<div class="copy">
<h2 class="title title-3">${_("This course was created as a re-run. Some manual configuration is needed.")}</h2>
<p>${_("No course content is currently visible, and no students are enrolled. Be sure to review and reset all dates, including the Course Start Date; set up the course team; review course updates and other assets for dated material; and seed the discussions and wiki.")}</p>
</div>
<ul class="nav-actions">
<li class="action action-dismiss">
<a href="#" class="button dismiss-button" data-dismiss-link='${notification_dismiss_url}'>
<i class="icon icon-remove-sign"></i>
<span class="button-copy">${_("Dismiss")}</span>
</a>
</li>
</ul>
</div>
</div>
%endif
</%block>
<%block name="content">
<div class="wrapper-mast wrapper">
<header class="mast has-actions has-subtitle">
<h1 class="page-header">
<small class="subtitle">${_("Content")}</small>
<span class="sr">> </span>${_("Course Outline")}
</h1>
<nav class="nav-actions">
<h3 class="sr">${_("Page Actions")}</h3>
<ul>
<li class="nav-item">
<a href="#" class="button button-new" data-category="chapter" data-parent="${context_course.location | h}" data-default-name="${_('Section')}" title="${_('Click to add a new section')}">
<i class="icon-plus"></i>${_('New Section')}
</a>
</li>
<li class="nav-item">
<a href="#" class="button button-toggle button-toggle-expand-collapse collapse-all is-hidden">
<span class="collapse-all"><i class="icon-arrow-up"></i> <span class="label">${_("Collapse All Sections")}</span></span>
<span class="expand-all"><i class="icon-arrow-down"></i> <span class="label">${_("Expand All Sections")}</span></span>
</a>
</li>
<li class="nav-item">
<a href="${lms_link}" rel="external" class="button view-button view-live-button"
title="${_('Click to open the courseware in the LMS in a new tab')}">${_("View Live")}</a>
</li>
</ul>
</nav>
</header>
</div>
<div class="wrapper-content wrapper">
<section class="content">
<article class="content-primary" role="main">
<div class="course-status">
<div class="status-release">
<h2 class="status-release-label">${_("Course Start Date:")}</h2>
<p class="status-release-value">${course_release_date}</p>
<ul class="status-actions">
<li class="action-item action-edit">
<a href="${settings_url}" class="edit-button action-button" data-tooltip="${_("Edit Start Date")}">
<i class="icon-pencil"></i>
<span class="action-button-text sr">${_("Edit Start Date")}</span>
</a>
</li>
</ul>
</div>
</div>
<div class="wrapper-dnd">
<%
course_locator = context_course.location
%>
<h2 class="sr">${_("Course Outline")}</h2>
<article class="outline outline-complex outline-course" data-locator="${course_locator | h}" data-course-key="${course_locator.course_key | h}">
</article>
</div>
<div class="ui-loading">
<p><span class="spin"><i class="icon-refresh"></i></span> <span class="copy">${_("Loading...")}</span></p>
</div>
</article>
<aside class="content-supplementary" role="complementary">
<div class="bit">
<h3 class="title-3">${_("Creating your course organization")}</h3>
<p>${_("You add sections, subsections, and units directly in the outline.")}</p>
<p>${_("Create a section, then add subsections and units. Open a unit to add course components.")}</p>
<h3 class="title-3">${_("Reorganizing your course")}</h3>
<p>${_("Drag sections, subsections, and units to new locations in the outline.")}</p>
<h3 class="title-3">${_("Setting release dates and grading policies")}</h3>
<p>${_("Select the Configure icon for a section or subsection to set its release date. When you configure a subsection, you can also set the grading policy and due date.")}</p>
<h3 class="title-3">${_("Changing the content students see")}</h3>
<p>${_("To publish draft content, select the Publish icon for a section, subsection, or unit.")}</p>
<p>${_("To hide content from students, select the Configure icon for a section, subsection, or unit, then select {em_start}Hide from students{em_end}.").format(em_start='<strong>', em_end="</strong>")}</p>
</div>
<div class="bit external-help">
<a href="${get_online_help_info(online_help_token())['doc_url']}" target="_blank" class="button external-help-button">${_("Learn more about the course outline")}</a>
</div>
</aside>
</section>
</div>
</%block>
| wwj718/ANALYSE | cms/templates/course_outline.html | HTML | agpl-3.0 | 6,869 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import note
| addition-it-solutions/project-all | addons/note/__init__.py | Python | agpl-3.0 | 991 |
package BibTeX::Parser::Entry;
our $VERSION = '0.4';
use warnings;
use strict;
use Encode;
use charnames ':full';
use BibTeX::Parser::Author;
use BibTeX::Parser::Defly;
use BibTeX::Parser::EncodingTable;
=head1 NAME
BibTeX::Entry - Contains a single entry of a BibTeX document.
=head1 VERSION
version 0.4
=cut
=head1 SYNOPSIS
This class ist a wrapper for a single BibTeX entry. It is usually created
by a BibTeX::Parser.
use BibTeX::Parser::Entry;
my $entry = BibTeX::Parser::Entry->new($type, $key, $parse_ok, \%fields);
if ($entry->parse_ok) {
my $type = $entry->type;
my $key = $enty->key;
print $entry->field("title");
my @authors = $entry->author;
my @editors = $entry->editor;
...
}
=head1 FUNCTIONS
=head2 new
Create new entry.
=cut
sub new {
my ( $class, $type, $key, $parse_ok, $fieldsref ) = @_;
my %fields = defined $fieldsref ? %$fieldsref : ();
$fields{_type} = uc($type);
$fields{_key} = $key;
$fields{_parse_ok} = $parse_ok;
$fields{_raw} = '';
return bless \%fields, $class;
}
=head2 parse_ok
If the entry was correctly parsed, this method returns a true value, false otherwise.
=cut
sub parse_ok {
my $self = shift;
if (@_) {
$self->{_parse_ok} = shift;
}
$self->{_parse_ok};
}
=head2 error
Return the error message, if the entry could not be parsed or undef otherwise.
=cut
sub error {
my $self = shift;
if (@_) {
$self->{_error} = shift;
$self->parse_ok(0);
}
return $self->parse_ok ? undef : $self->{_error};
}
=head2 type
Get or set the type of the entry, eg. 'ARTICLE' or 'BOOK'. Return value is
always uppercase.
=cut
sub type {
if ( scalar @_ == 1 ) {
# get
my $self = shift;
return $self->{_type};
} else {
# set
my ( $self, $newval ) = @_;
$self->{_type} = uc($newval);
}
}
=head2 key
Get or set the reference key of the entry.
=cut
sub key {
if ( scalar @_ == 1 ) {
# get
my $self = shift;
return $self->{_key};
} else {
# set
my ( $self, $newval ) = @_;
$self->{_key} = $newval;
}
}
=head2 field($name [, $value])
Get or set the contents of a field. The first parameter is the name of the
field, the second (optional) value is the new value. The third (optional) value
tells wheter to clean (1) the field from LaTeX commands or not (0). Default is
cleaning enabled.
=cut
sub field {
if ( scalar @_ == 2 ) {
# get
my ( $self, $field ) = @_;
my $tmp = $self->{ lc($field) };
$tmp =~ s/^\s*//;
$tmp =~ s/\s*$//;
return $tmp;
} else {
my ( $self, $key, $value, $clean ) = @_;
# different cleaning level for authors
if ( lc($key) eq 'author' ) {
$clean = 2;
# sometimes we see BibTex files with multiple
# author fields, we do a simple 'and' join
# it is not correct BibTex style, but it is
# necessary to read it
if ( $self->{ lc($key) } ) {
$value = $self->{ lc($key) } . ' and ' . $value;
}
}
$self->{ lc($key) } = _sanitize_field( $value, $clean );
}
}
sub _handle_author_editor {
my $type = shift;
my $self = shift;
if (@_) {
if ( @_ == 1 ) { #single string
my @names = _split_author_field( $_[0] );
$self->{"_$type"} = [ map { new BibTeX::Parser::Author $_} @names ];
$self->field( $type, join " and ", @{ $self->{"_$type"} } );
} else {
$self->{"_$type"} = [];
foreach my $param (@_) {
if ( ref $param eq "BibTeX::Author" ) {
push @{ $self->{"_$type"} }, $param;
} else {
push @{ $self->{"_$type"} }, new BibTeX::Parser::Author $param;
}
$self->field( $type, join " and ", @{ $self->{"_$type"} } );
}
}
} else {
unless ( defined $self->{"_$type"} ) {
my @names = _split_author_field( $self->{$type} || "" );
$self->{"_$type"} = [ map { new BibTeX::Parser::Author $_} @names ];
}
return @{ $self->{"_$type"} };
}
}
# _split_author_field($field)
#
# Split an author field into different author names.
# Handles quoted names ({name}).
sub _split_author_field {
my $field = shift;
return () if !defined $field || $field eq '';
# real world Bibtex data can be a mess
# we do some cleaning of standard typos
$field =~ s/\s+AND\s+/ and /g;
$field =~ s/\sand\sand\s/ and /g;
$field =~ s/\sand(?=[A-Z])/ and /g;
$field =~ s/(?<=\.)and\s/ and /g;
$field =~ s/(?<=\.)\sad\s(?=[A-Z])/ and /g;
my @names;
my @tmp = split( /\s+/, $field );
my @buffer = ();
my $opening = 0;
my $closing = 0;
foreach my $word (@tmp) {
my $count_opening = ( $word =~ tr/\{// );
my $count_closing = ( $word =~ tr/\}// );
$opening += $count_opening;
$closing += $count_closing;
if ( $word eq 'and' and $opening == $closing ) {
push @names, join( " ", @buffer );
@buffer = ();
} else {
push @buffer, $word;
}
}
push @names, join( " ", @buffer ) if ( $#buffer > -1 );
return @names;
}
=head2 author([@authors])
Get or set the authors. Returns an array of L<BibTeX::Author|BibTeX::Author>
objects. The parameters can either be L<BibTeX::Author|BibTeX::Author> objects
or strings.
Note: You can also change the authors with $entry->field('author', $authors_string)
=cut
sub author {
_handle_author_editor('author', @_);
}
=head2 editor([@editors])
Get or set the editors. Returns an array of L<BibTeX::Author|BibTeX::Author>
objects. The parameters can either be L<BibTeX::Author|BibTeX::Author> objects
or strings.
Note: You can also change the authors with $entry->field('editor', $editors_string)
=cut
sub editor {
_handle_author_editor('editor', @_);
}
=head2 fieldlist()
Returns a list of all the fields used in this entry.
=cut
sub fieldlist {
my $self = shift;
return grep { !/^_/ } keys %$self;
}
=head2 has($fieldname)
Returns a true value if this entry has a value for $fieldname.
=cut
sub has {
my ( $self, $field ) = @_;
return defined $self->{$field};
}
sub uchr {
my($c) = @_;
encode_utf8(chr($c));
}
sub _sanitize_field {
my $value = shift;
my $clean = shift;
# We always convert Umlaute and Co. to the
# corresponding UTF-8 char
# This can do no harm as this is
# completely round trip safe
# the defly module does UTF-8 conversion
# for umlaute and Co.
$value = defly $value;
# If clean >= 1 then LaTeX code is stripped
if ( $clean >= 1 ) {
# remove various white space notations
$value =~ s/(\\\s|\\,|\\;|\\\!|\\quad|\\qquad)/ /g;
$value =~ s/(?<!\\)~/ /g;
$value =~ s/\\~\{\}/~/g;
$value =~ s/\\\././g;
# now we process math stuff
my $tmp1 = $BibTeX::Parser::EncodingTable::latex_math_symbols_string;
my %tmp2 = %BibTeX::Parser::EncodingTable::latex_math_symbols_table;
$value =~ s/($tmp1)/uchr(hex($tmp2{$1}))/gxe;
# following latex commands are replaced without any substitution
$value =~ s/\\textit//g;
$value =~ s/\\textbf//g;
$value =~ s/\\textsl//g;
$value =~ s/\\textsc//g;
$value =~ s/\\textsf//g;
$value =~ s/\\texttt//g;
$value =~ s/\\cal//g;
$value =~ s/\\textsubscript//g;
$value =~ s/\\textsuperscript//g;
$value =~ s/\\mbox//g;
$value =~ s/\\url//g;
$value =~ s/\\it//g;
$value =~ s/\\emph//g;
$value =~ s/\\em//g;
$value =~ s/\\tt//g;
# remove non-escaped braces
# exception: $clean > 1 (e.g. authors field)
$value =~ s/(?<!\\)\{//g if ( $clean == 1 );
$value =~ s/(?<!\\)\}//g if ( $clean == 1 );
# convert escaped braces to regular ones
$value =~ s/\\\{/\{/g;
$value =~ s/\\\}/\}/g;
# remove non-escaped dollar signs
$value =~ s/(?<!\\)\$//g;
# other LaTeX symbols
my $tmp3 = $BibTeX::Parser::EncodingTable::latex_other_symbols_string;
my %tmp4 = %BibTeX::Parser::EncodingTable::latex_other_symbols_table;
$value =~ s/($tmp3)/uchr(hex($tmp4{$1}))/gxe;
$value =~ s/\s+/ /g;
}
return $value;
}
=head2 raw_bibtex
Return raw BibTeX entry (if available).
=cut
sub raw_bibtex {
my $self = shift;
if (@_) {
$self->{_raw} = shift;
}
return $self->{_raw};
}
1; # End of BibTeX::Entry
| jondo/paperpile | plack/perl5/win32/site/lib/BibTeX/Parser/Entry.pm | Perl | agpl-3.0 | 8,198 |
<!DOCTYPE HTML>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
<title>SlickGrid example: CompositeEditor</title>
<link rel="stylesheet" href="../slick.grid.css" type="text/css" media="screen" charset="utf-8" />
<link rel="stylesheet" href="../css/smoothness/jquery-ui-1.8.5.custom.css" type="text/css" media="screen" charset="utf-8" />
<link rel="stylesheet" href="examples.css" type="text/css" media="screen" charset="utf-8" />
<style>
.cell-title {
font-weight: bold;
}
.cell-effort-driven {
text-align: center;
}
.item-details-form {
z-index: 10000;
display:inline-block;
border:1px solid black;
margin:8px;
padding:10px;
background: #efefef;
-moz-box-shadow: 0px 0px 15px black;
-webkit-box-shadow: 0px 0px 15px black;
box-shadow: 0px 0px 15px black;
position:absolute;
top: 10px;
left: 150px;
}
.item-details-form-buttons {
float: right;
}
.item-details-row {
}
.item-details-label {
margin-left:10px;
margin-top:20px;
display:block;
font-weight:bold;
}
.item-details-editor-container {
width:200px;
height:20px;
border:1px solid silver;
background:white;
display:block;
margin:10px;
margin-top:4px;
padding:0;
padding-left:4px;
padding-right:4px;
}
</style>
</head>
<body>
<div style="position:relative">
<div style="width:600px;">
<div id="myGrid" style="width:100%;height:500px;"></div>
</div>
<div class="options-panel">
<h2>Demonstrates:</h2>
<ul>
<li>using a CompositeEditor to implement detached item edit form</li>
</ul>
<h2>Options:</h2>
<button onclick="openDetails()">Open Item Edit for active row</button>
</div>
</div>
<script id="itemDetailsTemplate" type="text/x-jquery-tmpl">
<div class='item-details-form'>
{{each columns}}
<div class='item-details-label'>
${name}
</div>
<div class='item-details-editor-container' data-editorid='${id}'></div>
{{/each}}
<hr/>
<div class='item-details-form-buttons'>
<button data-action='save'>Save</button>
<button data-action='cancel'>Cancel</button>
</div>
</div>
</script>
<script src="../lib/firebugx.js"></script>
<script src="../lib/jquery-1.4.3.min.js"></script>
<script src="../lib/jquery-ui-1.8.5.custom.min.js"></script>
<script src="../lib/jquery.event.drag-2.0.min.js"></script>
<script src="http://ajax.microsoft.com/ajax/jquery.templates/beta1/jquery.tmpl.min.js"></script>
<script src="../slick.core.js"></script>
<script src="../plugins/slick.cellrangeselector.js"></script>
<script src="../plugins/slick.cellselectionmodel.js"></script>
<script src="../slick.editors.js"></script>
<script src="../slick.grid.js"></script>
<script src="slick.compositeeditor.js"></script>
<script>
function requiredFieldValidator(value) {
if (value == null || value == undefined || !value.length)
return {valid:false, msg:"This is a required field"};
else
return {valid:true, msg:null};
}
var grid;
var data = [];
var columns = [
{id:"title", name:"Title", field:"title", width:120, cssClass:"cell-title", editor:TextCellEditor, validator:requiredFieldValidator},
{id:"desc", name:"Description", field:"description", width:100, editor:TextCellEditor},
{id:"duration", name:"Duration", field:"duration", editor:TextCellEditor},
{id:"%", name:"% Complete", field:"percentComplete", width:80, resizable:false, formatter:GraphicalPercentCompleteCellFormatter, editor:PercentCompleteCellEditor},
{id:"start", name:"Start", field:"start", minWidth:60, editor:DateCellEditor},
{id:"finish", name:"Finish", field:"finish", minWidth:60, editor:DateCellEditor},
{id:"effort-driven", name:"Effort Driven", width:80, minWidth:20, maxWidth:80, cssClass:"cell-effort-driven", field:"effortDriven", formatter:BoolCellFormatter, editor:YesNoCheckboxCellEditor}
];
var options = {
editable: true,
enableAddRow: true,
enableCellNavigation: true,
asyncEditorLoading: false,
autoEdit: false
};
function openDetails() {
if (grid.getEditorLock().isActive() && !grid.getEditorLock().commitCurrentEdit()) {
return;
}
var $modal = $("<div class='item-details-form'></div>");
$modal = $("#itemDetailsTemplate")
.tmpl({
context: grid.getDataItem(grid.getActiveCell().row),
columns: columns
})
.appendTo("body");
$modal.keydown(function(e) {
if (e.which == $.ui.keyCode.ENTER) {
grid.getEditController().commitCurrentEdit();
e.stopPropagation();
e.preventDefault();
}
else if (e.which == $.ui.keyCode.ESCAPE) {
grid.getEditController().cancelCurrentEdit();
e.stopPropagation();
e.preventDefault();
}
});
$modal.find("[data-action=save]").click(function() {
grid.getEditController().commitCurrentEdit();
});
$modal.find("[data-action=cancel]").click(function() {
grid.getEditController().cancelCurrentEdit();
});
var containers = $.map(columns, function(c) { return $modal.find("[data-editorid=" + c.id + "]"); });
var compositeEditor = new Slick.CompositeEditor(
columns,
containers,
{
destroy: function () { $modal.remove(); }
}
);
grid.editActiveCell(compositeEditor);
}
$(function()
{
for (var i=0; i<500; i++) {
var d = (data[i] = {});
d["title"] = "Task " + i;
d["description"] = "This is a sample task description.\n It can be multiline";
d["duration"] = "5 days";
d["percentComplete"] = Math.round(Math.random() * 100);
d["start"] = "01/01/2009";
d["finish"] = "01/05/2009";
d["effortDriven"] = (i % 5 == 0);
}
grid = new Slick.Grid("#myGrid", data, columns, options);
grid.onAddNewRow.subscribe(function(e, args) {
var item = args.item;
var column = args.column;
grid.invalidateRow(data.length);
data.push(item);
grid.updateRowCount();
grid.render();
});
grid.onValidationError.subscribe(function(e, args) {
// handle validation errors originating from the CompositeEditor
if (args.editor && (args.editor instanceof Slick.CompositeEditor)) {
var err;
var idx = args.validationResults.errors.length;
while (idx--) {
err = args.validationResults.errors[idx];
$(err.container).stop(true,true).effect("highlight", {color:"red"});
}
}
});
grid.setActiveCell(0, 0);
})
</script>
</body>
</html>
| airsim/tvlsim | tvlsim/ui/browser/js/mylibs/SlickGrid/examples/example-composite-editor-item-details.html | HTML | lgpl-2.1 | 7,963 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2007, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
* (C) 2005-2006,
* @author JBoss Inc.
*/
//
// Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003
//
// Arjuna Technologies Ltd.,
// Newcastle upon Tyne,
// Tyne and Wear,
// UK.
//
package org.jboss.jbossts.qa.CrashRecovery02Clients1;
/*
* Copyright (C) 1999-2001 by HP Bluestone Software, Inc. All rights Reserved.
*
* HP Arjuna Labs,
* Newcastle upon Tyne,
* Tyne and Wear,
* UK.
*
* $Id: Client05a.java,v 1.2 2003/06/26 11:43:18 rbegg Exp $
*/
/*
* Try to get around the differences between Ansi CPP and
* K&R cpp with concatenation.
*/
/*
* Copyright (C) 1999-2001 by HP Bluestone Software, Inc. All rights Reserved.
*
* HP Arjuna Labs,
* Newcastle upon Tyne,
* Tyne and Wear,
* UK.
*
* $Id: Client05a.java,v 1.2 2003/06/26 11:43:18 rbegg Exp $
*/
import org.jboss.jbossts.qa.CrashRecovery02.*;
import org.jboss.jbossts.qa.Utils.OAInterface;
import org.jboss.jbossts.qa.Utils.ORBInterface;
import org.jboss.jbossts.qa.Utils.ServerIORStore;
import org.jboss.jbossts.qa.Utils.CrashRecoveryDelays;
public class Client05a
{
public static void main(String[] args)
{
try
{
ORBInterface.initORB(args, null);
OAInterface.initOA();
String serviceIOR = ServerIORStore.loadIOR(args[args.length - 1]);
AfterCrashService service = AfterCrashServiceHelper.narrow(ORBInterface.orb().string_to_object(serviceIOR));
CheckBehavior[] checkBehaviors = new CheckBehavior[1];
checkBehaviors[0] = new CheckBehavior();
checkBehaviors[0].allow_done = false;
checkBehaviors[0].allow_returned_prepared = false;
checkBehaviors[0].allow_returned_committing = false;
checkBehaviors[0].allow_returned_committed = false;
checkBehaviors[0].allow_returned_rolledback = true;
checkBehaviors[0].allow_raised_not_prepared = false;
boolean correct = true;
service.setup_oper(1);
correct = service.check_oper(checkBehaviors) && service.is_correct();
CrashRecoveryDelays.awaitReplayCompletionCR02();
ResourceTrace resourceTrace = service.get_resource_trace(0);
correct = correct && (resourceTrace == ResourceTrace.ResourceTraceRollback);
if (correct)
{
System.out.println("Passed");
}
else
{
System.out.println("Failed");
}
}
catch (Exception exception)
{
System.out.println("Failed");
System.err.println("Client05a.main: " + exception);
exception.printStackTrace(System.err);
}
try
{
OAInterface.shutdownOA();
ORBInterface.shutdownORB();
}
catch (Exception exception)
{
System.err.println("Client05a.main: " + exception);
exception.printStackTrace(System.err);
}
}
}
| gytis/narayana | qa/tests/src/org/jboss/jbossts/qa/CrashRecovery02Clients1/Client05a.java | Java | lgpl-2.1 | 3,535 |
#include "clar_libgit2.h"
#include "notes.h"
static git_repository *_repo;
static git_note *_note;
static git_signature *_sig;
static git_config *_cfg;
void test_notes_notesref__initialize(void)
{
cl_fixture_sandbox("testrepo.git");
cl_git_pass(git_repository_open(&_repo, "testrepo.git"));
}
void test_notes_notesref__cleanup(void)
{
git_note_free(_note);
_note = NULL;
git_signature_free(_sig);
_sig = NULL;
git_config_free(_cfg);
_cfg = NULL;
git_repository_free(_repo);
_repo = NULL;
cl_fixture_cleanup("testrepo.git");
}
void test_notes_notesref__config_corenotesref(void)
{
git_oid oid, note_oid;
const char *default_ref;
cl_git_pass(git_signature_now(&_sig, "alice", "[email protected]"));
cl_git_pass(git_oid_fromstr(&oid, "8496071c1b46c854b31185ea97743be6a8774479"));
cl_git_pass(git_repository_config(&_cfg, _repo));
cl_git_pass(git_config_set_string(_cfg, "core.notesRef", "refs/notes/mydefaultnotesref"));
cl_git_pass(git_note_create(¬e_oid, _repo, _sig, _sig, NULL, &oid, "test123test\n", 0));
cl_git_pass(git_note_read(&_note, _repo, NULL, &oid));
cl_assert_equal_s("test123test\n", git_note_message(_note));
cl_assert(!git_oid_cmp(git_note_id(_note), ¬e_oid));
git_note_free(_note);
cl_git_pass(git_note_read(&_note, _repo, "refs/notes/mydefaultnotesref", &oid));
cl_assert_equal_s("test123test\n", git_note_message(_note));
cl_assert(!git_oid_cmp(git_note_id(_note), ¬e_oid));
cl_git_pass(git_note_default_ref(&default_ref, _repo));
cl_assert_equal_s("refs/notes/mydefaultnotesref", default_ref);
cl_git_pass(git_config_delete_entry(_cfg, "core.notesRef"));
cl_git_pass(git_note_default_ref(&default_ref, _repo));
cl_assert_equal_s(GIT_NOTES_DEFAULT_REF, default_ref);
}
| Acidburn0zzz/libgit2 | tests/notes/notesref.c | C | lgpl-2.1 | 1,745 |
\hypertarget{namespaceCqrs_1_1Ninject_1_1InProcess_1_1EventBus}{}\section{Cqrs.\+Ninject.\+In\+Process.\+Event\+Bus Namespace Reference}
\label{namespaceCqrs_1_1Ninject_1_1InProcess_1_1EventBus}\index{Cqrs.\+Ninject.\+In\+Process.\+Event\+Bus@{Cqrs.\+Ninject.\+In\+Process.\+Event\+Bus}}
\subsection*{Namespaces}
\begin{DoxyCompactItemize}
\item
namespace \hyperlink{namespaceCqrs_1_1Ninject_1_1InProcess_1_1EventBus_1_1Configuration}{Configuration}
\end{DoxyCompactItemize}
| Chinchilla-Software-Com/CQRS | wiki/docs/2.4/latex/namespaceCqrs_1_1Ninject_1_1InProcess_1_1EventBus.tex | TeX | lgpl-2.1 | 476 |
/***************************************************************************
* Copyright (c) Konstantinos Poulios ([email protected]) 2011 *
* *
* This file is part of the FreeCAD CAx development system. *
* *
* This library is free software; you can redistribute it and/or *
* modify it under the terms of the GNU Library General Public *
* License as published by the Free Software Foundation; either *
* version 2 of the License, or (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU Library General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this library; see the file COPYING.LIB. If not, *
* write to the Free Software Foundation, Inc., 59 Temple Place, *
* Suite 330, Boston, MA 02111-1307, USA *
* *
***************************************************************************/
#include <iostream>
#include <algorithm>
#include <cfloat>
#include <limits>
#include "GCS.h"
#include "qp_eq.h"
// NOTE: In CMakeList.txt -DEIGEN_NO_DEBUG is set (it does not work with a define here), to solve this:
// this is needed to fix this SparseQR crash http://forum.freecadweb.org/viewtopic.php?f=10&t=11341&p=92146#p92146,
// until Eigen library fixes its own problem with the assertion (definitely not solved in 3.2.0 branch)
#define EIGEN_VERSION (EIGEN_WORLD_VERSION * 10000 \
+ EIGEN_MAJOR_VERSION * 100 \
+ EIGEN_MINOR_VERSION)
#if EIGEN_VERSION >= 30202
#define EIGEN_SPARSEQR_COMPATIBLE
#endif
//#undef EIGEN_SPARSEQR_COMPATIBLE
#include <Eigen/QR>
#ifdef EIGEN_SPARSEQR_COMPATIBLE
#include <Eigen/Sparse>
#include <Eigen/OrderingMethods>
#endif
#undef _GCS_DEBUG
#undef _GCS_DEBUG_SOLVER_JACOBIAN_QR_DECOMPOSITION_TRIANGULAR_MATRIX
#include <FCConfig.h>
#include <Base/Console.h>
#include <boost/graph/adjacency_list.hpp>
#include <boost/graph/connected_components.hpp>
// http://forum.freecadweb.org/viewtopic.php?f=3&t=4651&start=40
namespace Eigen {
typedef Matrix<double,-1,-1,0,-1,-1> MatrixdType;
template<>
FullPivLU<MatrixdType>& FullPivLU<MatrixdType>::compute(const MatrixdType& matrix)
{
m_isInitialized = true;
m_lu = matrix;
const Index size = matrix.diagonalSize();
const Index rows = matrix.rows();
const Index cols = matrix.cols();
// will store the transpositions, before we accumulate them at the end.
// can't accumulate on-the-fly because that will be done in reverse order for the rows.
m_rowsTranspositions.resize(matrix.rows());
m_colsTranspositions.resize(matrix.cols());
Index number_of_transpositions = 0; // number of NONTRIVIAL transpositions, i.e. m_rowsTranspositions[i]!=i
m_nonzero_pivots = size; // the generic case is that in which all pivots are nonzero (invertible case)
m_maxpivot = RealScalar(0);
RealScalar cutoff(0);
for(Index k = 0; k < size; ++k)
{
// First, we need to find the pivot.
// biggest coefficient in the remaining bottom-right corner (starting at row k, col k)
Index row_of_biggest_in_corner, col_of_biggest_in_corner;
RealScalar biggest_in_corner;
biggest_in_corner = m_lu.bottomRightCorner(rows-k, cols-k)
.cwiseAbs()
.maxCoeff(&row_of_biggest_in_corner, &col_of_biggest_in_corner);
row_of_biggest_in_corner += k; // correct the values! since they were computed in the corner,
col_of_biggest_in_corner += k; // need to add k to them.
// when k==0, biggest_in_corner is the biggest coeff absolute value in the original matrix
if(k == 0) cutoff = biggest_in_corner * NumTraits<Scalar>::epsilon();
// if the pivot (hence the corner) is "zero", terminate to avoid generating nan/inf values.
// Notice that using an exact comparison (biggest_in_corner==0) here, as Golub-van Loan do in
// their pseudo-code, results in numerical instability! The cutoff here has been validated
// by running the unit test 'lu' with many repetitions.
if(biggest_in_corner < cutoff)
{
// before exiting, make sure to initialize the still uninitialized transpositions
// in a sane state without destroying what we already have.
m_nonzero_pivots = k;
for(Index i = k; i < size; ++i)
{
m_rowsTranspositions.coeffRef(i) = i;
m_colsTranspositions.coeffRef(i) = i;
}
break;
}
if(biggest_in_corner > m_maxpivot) m_maxpivot = biggest_in_corner;
// Now that we've found the pivot, we need to apply the row/col swaps to
// bring it to the location (k,k).
m_rowsTranspositions.coeffRef(k) = row_of_biggest_in_corner;
m_colsTranspositions.coeffRef(k) = col_of_biggest_in_corner;
if(k != row_of_biggest_in_corner) {
m_lu.row(k).swap(m_lu.row(row_of_biggest_in_corner));
++number_of_transpositions;
}
if(k != col_of_biggest_in_corner) {
m_lu.col(k).swap(m_lu.col(col_of_biggest_in_corner));
++number_of_transpositions;
}
// Now that the pivot is at the right location, we update the remaining
// bottom-right corner by Gaussian elimination.
if(k<rows-1)
m_lu.col(k).tail(rows-k-1) /= m_lu.coeff(k,k);
if(k<size-1)
m_lu.block(k+1,k+1,rows-k-1,cols-k-1).noalias() -= m_lu.col(k).tail(rows-k-1) * m_lu.row(k).tail(cols-k-1);
}
// the main loop is over, we still have to accumulate the transpositions to find the
// permutations P and Q
m_p.setIdentity(rows);
for(Index k = size-1; k >= 0; --k)
m_p.applyTranspositionOnTheRight(k, m_rowsTranspositions.coeff(k));
m_q.setIdentity(cols);
for(Index k = 0; k < size; ++k)
m_q.applyTranspositionOnTheRight(k, m_colsTranspositions.coeff(k));
m_det_pq = (number_of_transpositions%2) ? -1 : 1;
return *this;
}
} // Eigen
namespace GCS
{
typedef boost::adjacency_list <boost::vecS, boost::vecS, boost::undirectedS> Graph;
///////////////////////////////////////
// Solver
///////////////////////////////////////
// System
System::System()
: plist(0), clist(0),
c2p(), p2c(),
subSystems(0), subSystemsAux(0),
reference(0),
hasUnknowns(false), hasDiagnosis(false), isInit(false),
maxIter(100), maxIterRedundant(100),
sketchSizeMultiplier(true), sketchSizeMultiplierRedundant(true),
convergence(1e-10), convergenceRedundant(1e-10),
qrAlgorithm(EigenSparseQR), debugMode(Minimal),
LM_eps(1E-10), LM_eps1(1E-80), LM_tau(1E-3),
DL_tolg(1E-80), DL_tolx(1E-80), DL_tolf(1E-10),
LM_epsRedundant(1E-10), LM_eps1Redundant(1E-80), LM_tauRedundant(1E-3),
DL_tolgRedundant(1E-80), DL_tolxRedundant(1E-80), DL_tolfRedundant(1E-10),
qrpivotThreshold(1E-13)
{
// currently Eigen only supports multithreading for multiplications
// There is no appreciable gain from using more threads
#ifdef EIGEN_SPARSEQR_COMPATIBLE
Eigen::setNbThreads(1);
#endif
}
/*DeepSOIC: seriously outdated, needs redesign
System::System(std::vector<Constraint *> clist_)
: plist(0),
c2p(), p2c(),
subSystems(0), subSystemsAux(0),
reference(0),
hasUnknowns(false), hasDiagnosis(false), isInit(false)
{
// create own (shallow) copy of constraints
for (std::vector<Constraint *>::iterator constr=clist_.begin();
constr != clist_.end(); ++constr) {
Constraint *newconstr = 0;
switch ((*constr)->getTypeId()) {
case Equal: {
ConstraintEqual *oldconstr = static_cast<ConstraintEqual *>(*constr);
newconstr = new ConstraintEqual(*oldconstr);
break;
}
case Difference: {
ConstraintDifference *oldconstr = static_cast<ConstraintDifference *>(*constr);
newconstr = new ConstraintDifference(*oldconstr);
break;
}
case P2PDistance: {
ConstraintP2PDistance *oldconstr = static_cast<ConstraintP2PDistance *>(*constr);
newconstr = new ConstraintP2PDistance(*oldconstr);
break;
}
case P2PAngle: {
ConstraintP2PAngle *oldconstr = static_cast<ConstraintP2PAngle *>(*constr);
newconstr = new ConstraintP2PAngle(*oldconstr);
break;
}
case P2LDistance: {
ConstraintP2LDistance *oldconstr = static_cast<ConstraintP2LDistance *>(*constr);
newconstr = new ConstraintP2LDistance(*oldconstr);
break;
}
case PointOnLine: {
ConstraintPointOnLine *oldconstr = static_cast<ConstraintPointOnLine *>(*constr);
newconstr = new ConstraintPointOnLine(*oldconstr);
break;
}
case Parallel: {
ConstraintParallel *oldconstr = static_cast<ConstraintParallel *>(*constr);
newconstr = new ConstraintParallel(*oldconstr);
break;
}
case Perpendicular: {
ConstraintPerpendicular *oldconstr = static_cast<ConstraintPerpendicular *>(*constr);
newconstr = new ConstraintPerpendicular(*oldconstr);
break;
}
case L2LAngle: {
ConstraintL2LAngle *oldconstr = static_cast<ConstraintL2LAngle *>(*constr);
newconstr = new ConstraintL2LAngle(*oldconstr);
break;
}
case MidpointOnLine: {
ConstraintMidpointOnLine *oldconstr = static_cast<ConstraintMidpointOnLine *>(*constr);
newconstr = new ConstraintMidpointOnLine(*oldconstr);
break;
}
case None:
break;
}
if (newconstr)
addConstraint(newconstr);
}
}
*/
System::~System()
{
clear();
}
void System::clear()
{
plist.clear();
pIndex.clear();
hasUnknowns = false;
hasDiagnosis = false;
redundant.clear();
conflictingTags.clear();
redundantTags.clear();
reference.clear();
clearSubSystems();
free(clist);
c2p.clear();
p2c.clear();
}
void System::clearByTag(int tagId)
{
std::vector<Constraint *> constrvec;
for (std::vector<Constraint *>::const_iterator
constr=clist.begin(); constr != clist.end(); ++constr) {
if ((*constr)->getTag() == tagId)
constrvec.push_back(*constr);
}
for (std::vector<Constraint *>::const_iterator
constr=constrvec.begin(); constr != constrvec.end(); ++constr) {
removeConstraint(*constr);
}
}
int System::addConstraint(Constraint *constr)
{
isInit = false;
if (constr->getTag() >= 0) // negatively tagged constraints have no impact
hasDiagnosis = false; // on the diagnosis
clist.push_back(constr);
VEC_pD constr_params = constr->params();
for (VEC_pD::const_iterator param=constr_params.begin();
param != constr_params.end(); ++param) {
// jacobi.set(constr, *param, 0.);
c2p[constr].push_back(*param);
p2c[*param].push_back(constr);
}
return clist.size()-1;
}
void System::removeConstraint(Constraint *constr)
{
std::vector<Constraint *>::iterator it;
it = std::find(clist.begin(), clist.end(), constr);
if (it == clist.end())
return;
clist.erase(it);
if (constr->getTag() >= 0)
hasDiagnosis = false;
clearSubSystems();
VEC_pD constr_params = c2p[constr];
for (VEC_pD::const_iterator param=constr_params.begin();
param != constr_params.end(); ++param) {
std::vector<Constraint *> &constraints = p2c[*param];
it = std::find(constraints.begin(), constraints.end(), constr);
constraints.erase(it);
}
c2p.erase(constr);
std::vector<Constraint *> constrvec;
constrvec.push_back(constr);
free(constrvec);
}
// basic constraints
int System::addConstraintEqual(double *param1, double *param2, int tagId)
{
Constraint *constr = new ConstraintEqual(param1, param2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintDifference(double *param1, double *param2,
double *difference, int tagId)
{
Constraint *constr = new ConstraintDifference(param1, param2, difference);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintP2PDistance(Point &p1, Point &p2, double *distance, int tagId)
{
Constraint *constr = new ConstraintP2PDistance(p1, p2, distance);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintP2PAngle(Point &p1, Point &p2, double *angle,
double incrAngle, int tagId)
{
Constraint *constr = new ConstraintP2PAngle(p1, p2, angle, incrAngle);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintP2PAngle(Point &p1, Point &p2, double *angle, int tagId)
{
return addConstraintP2PAngle(p1, p2, angle, 0.);
}
int System::addConstraintP2LDistance(Point &p, Line &l, double *distance, int tagId)
{
Constraint *constr = new ConstraintP2LDistance(p, l, distance);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintPointOnLine(Point &p, Line &l, int tagId)
{
Constraint *constr = new ConstraintPointOnLine(p, l);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintPointOnLine(Point &p, Point &lp1, Point &lp2, int tagId)
{
Constraint *constr = new ConstraintPointOnLine(p, lp1, lp2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintPointOnPerpBisector(Point &p, Line &l, int tagId)
{
Constraint *constr = new ConstraintPointOnPerpBisector(p, l);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintPointOnPerpBisector(Point &p, Point &lp1, Point &lp2, int tagId)
{
Constraint *constr = new ConstraintPointOnPerpBisector(p, lp1, lp2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintParallel(Line &l1, Line &l2, int tagId)
{
Constraint *constr = new ConstraintParallel(l1, l2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintPerpendicular(Line &l1, Line &l2, int tagId)
{
Constraint *constr = new ConstraintPerpendicular(l1, l2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintPerpendicular(Point &l1p1, Point &l1p2,
Point &l2p1, Point &l2p2, int tagId)
{
Constraint *constr = new ConstraintPerpendicular(l1p1, l1p2, l2p1, l2p2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintL2LAngle(Line &l1, Line &l2, double *angle, int tagId)
{
Constraint *constr = new ConstraintL2LAngle(l1, l2, angle);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintL2LAngle(Point &l1p1, Point &l1p2,
Point &l2p1, Point &l2p2, double *angle, int tagId)
{
Constraint *constr = new ConstraintL2LAngle(l1p1, l1p2, l2p1, l2p2, angle);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintAngleViaPoint(Curve &crv1, Curve &crv2, Point &p, double *angle, int tagId)
{
Constraint *constr = new ConstraintAngleViaPoint(crv1, crv2, p, angle);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintMidpointOnLine(Line &l1, Line &l2, int tagId)
{
Constraint *constr = new ConstraintMidpointOnLine(l1, l2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintMidpointOnLine(Point &l1p1, Point &l1p2,
Point &l2p1, Point &l2p2, int tagId)
{
Constraint *constr = new ConstraintMidpointOnLine(l1p1, l1p2, l2p1, l2p2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintTangentCircumf(Point &p1, Point &p2, double *rad1, double *rad2,
bool internal, int tagId)
{
Constraint *constr = new ConstraintTangentCircumf(p1, p2, rad1, rad2, internal);
constr->setTag(tagId);
return addConstraint(constr);
}
// derived constraints
int System::addConstraintP2PCoincident(Point &p1, Point &p2, int tagId)
{
addConstraintEqual(p1.x, p2.x, tagId);
return addConstraintEqual(p1.y, p2.y, tagId);
}
int System::addConstraintHorizontal(Line &l, int tagId)
{
return addConstraintEqual(l.p1.y, l.p2.y, tagId);
}
int System::addConstraintHorizontal(Point &p1, Point &p2, int tagId)
{
return addConstraintEqual(p1.y, p2.y, tagId);
}
int System::addConstraintVertical(Line &l, int tagId)
{
return addConstraintEqual(l.p1.x, l.p2.x, tagId);
}
int System::addConstraintVertical(Point &p1, Point &p2, int tagId)
{
return addConstraintEqual(p1.x, p2.x, tagId);
}
int System::addConstraintCoordinateX(Point &p, double *x, int tagId)
{
return addConstraintEqual(p.x, x, tagId);
}
int System::addConstraintCoordinateY(Point &p, double *y, int tagId)
{
return addConstraintEqual(p.y, y, tagId);
}
int System::addConstraintArcRules(Arc &a, int tagId)
{
addConstraintP2PAngle(a.center, a.start, a.startAngle, tagId);
addConstraintP2PAngle(a.center, a.end, a.endAngle, tagId);
addConstraintP2PDistance(a.center, a.start, a.rad, tagId);
return addConstraintP2PDistance(a.center, a.end, a.rad, tagId);
}
int System::addConstraintPointOnCircle(Point &p, Circle &c, int tagId)
{
return addConstraintP2PDistance(p, c.center, c.rad, tagId);
}
int System::addConstraintPointOnEllipse(Point &p, Ellipse &e, int tagId)
{
Constraint *constr = new ConstraintPointOnEllipse(p, e);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintEllipticalArcRangeToEndPoints(Point &p, ArcOfEllipse &a, double *angle, int tagId)
{
Constraint *constr = new ConstraintEllipticalArcRangeToEndPoints(p,a,angle);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintArcOfEllipseRules(ArcOfEllipse &a, int tagId)
{
addConstraintEllipticalArcRangeToEndPoints(a.start,a,a.startAngle, tagId);
addConstraintEllipticalArcRangeToEndPoints(a.end,a,a.endAngle, tagId);
addConstraintPointOnEllipse(a.start, a, tagId);
return addConstraintPointOnEllipse(a.end, a, tagId);
}
int System::addConstraintPointOnArc(Point &p, Arc &a, int tagId)
{
return addConstraintP2PDistance(p, a.center, a.rad, tagId);
}
int System::addConstraintPerpendicularLine2Arc(Point &p1, Point &p2, Arc &a,
int tagId)
{
addConstraintP2PCoincident(p2, a.start, tagId);
double dx = *(p2.x) - *(p1.x);
double dy = *(p2.y) - *(p1.y);
if (dx * cos(*(a.startAngle)) + dy * sin(*(a.startAngle)) > 0)
return addConstraintP2PAngle(p1, p2, a.startAngle, 0, tagId);
else
return addConstraintP2PAngle(p1, p2, a.startAngle, M_PI, tagId);
}
int System::addConstraintPerpendicularArc2Line(Arc &a, Point &p1, Point &p2,
int tagId)
{
addConstraintP2PCoincident(p1, a.end, tagId);
double dx = *(p2.x) - *(p1.x);
double dy = *(p2.y) - *(p1.y);
if (dx * cos(*(a.endAngle)) + dy * sin(*(a.endAngle)) > 0)
return addConstraintP2PAngle(p1, p2, a.endAngle, 0, tagId);
else
return addConstraintP2PAngle(p1, p2, a.endAngle, M_PI, tagId);
}
int System::addConstraintPerpendicularCircle2Arc(Point ¢er, double *radius,
Arc &a, int tagId)
{
addConstraintP2PDistance(a.start, center, radius, tagId);
double incrAngle = *(a.startAngle) < *(a.endAngle) ? M_PI/2 : -M_PI/2;
double tangAngle = *a.startAngle + incrAngle;
double dx = *(a.start.x) - *(center.x);
double dy = *(a.start.y) - *(center.y);
if (dx * cos(tangAngle) + dy * sin(tangAngle) > 0)
return addConstraintP2PAngle(center, a.start, a.startAngle, incrAngle, tagId);
else
return addConstraintP2PAngle(center, a.start, a.startAngle, -incrAngle, tagId);
}
int System::addConstraintPerpendicularArc2Circle(Arc &a, Point ¢er,
double *radius, int tagId)
{
addConstraintP2PDistance(a.end, center, radius, tagId);
double incrAngle = *(a.startAngle) < *(a.endAngle) ? -M_PI/2 : M_PI/2;
double tangAngle = *a.endAngle + incrAngle;
double dx = *(a.end.x) - *(center.x);
double dy = *(a.end.y) - *(center.y);
if (dx * cos(tangAngle) + dy * sin(tangAngle) > 0)
return addConstraintP2PAngle(center, a.end, a.endAngle, incrAngle, tagId);
else
return addConstraintP2PAngle(center, a.end, a.endAngle, -incrAngle, tagId);
}
int System::addConstraintPerpendicularArc2Arc(Arc &a1, bool reverse1,
Arc &a2, bool reverse2, int tagId)
{
Point &p1 = reverse1 ? a1.start : a1.end;
Point &p2 = reverse2 ? a2.end : a2.start;
addConstraintP2PCoincident(p1, p2, tagId);
return addConstraintPerpendicular(a1.center, p1, a2.center, p2, tagId);
}
int System::addConstraintTangent(Line &l, Circle &c, int tagId)
{
return addConstraintP2LDistance(c.center, l, c.rad, tagId);
}
int System::addConstraintTangent(Line &l, Ellipse &e, int tagId)
{
Constraint *constr = new ConstraintEllipseTangentLine(l, e);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintTangent(Line &l, Arc &a, int tagId)
{
return addConstraintP2LDistance(a.center, l, a.rad, tagId);
}
int System::addConstraintTangent(Circle &c1, Circle &c2, int tagId)
{
double dx = *(c2.center.x) - *(c1.center.x);
double dy = *(c2.center.y) - *(c1.center.y);
double d = sqrt(dx*dx + dy*dy);
return addConstraintTangentCircumf(c1.center, c2.center, c1.rad, c2.rad,
(d < *c1.rad || d < *c2.rad), tagId);
}
int System::addConstraintTangent(Arc &a1, Arc &a2, int tagId)
{
double dx = *(a2.center.x) - *(a1.center.x);
double dy = *(a2.center.y) - *(a1.center.y);
double d = sqrt(dx*dx + dy*dy);
return addConstraintTangentCircumf(a1.center, a2.center, a1.rad, a2.rad,
(d < *a1.rad || d < *a2.rad), tagId);
}
int System::addConstraintTangent(Circle &c, Arc &a, int tagId)
{
double dx = *(a.center.x) - *(c.center.x);
double dy = *(a.center.y) - *(c.center.y);
double d = sqrt(dx*dx + dy*dy);
return addConstraintTangentCircumf(c.center, a.center, c.rad, a.rad,
(d < *c.rad || d < *a.rad), tagId);
}
int System::addConstraintCircleRadius(Circle &c, double *radius, int tagId)
{
return addConstraintEqual(c.rad, radius, tagId);
}
int System::addConstraintArcRadius(Arc &a, double *radius, int tagId)
{
return addConstraintEqual(a.rad, radius, tagId);
}
int System::addConstraintEqualLength(Line &l1, Line &l2, double *length, int tagId)
{
addConstraintP2PDistance(l1.p1, l1.p2, length, tagId);
return addConstraintP2PDistance(l2.p1, l2.p2, length, tagId);
}
int System::addConstraintEqualRadius(Circle &c1, Circle &c2, int tagId)
{
return addConstraintEqual(c1.rad, c2.rad, tagId);
}
int System::addConstraintEqualRadii(Ellipse &e1, Ellipse &e2, int tagId)
{
//addConstraintEqual(e1.radmaj, e2.radmaj, tagId);
addConstraintEqual(e1.radmin, e2.radmin, tagId);
Constraint *constr = new ConstraintEqualMajorAxesEllipse(e1,e2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintEqualRadius(Circle &c1, Arc &a2, int tagId)
{
return addConstraintEqual(c1.rad, a2.rad, tagId);
}
int System::addConstraintEqualRadius(Arc &a1, Arc &a2, int tagId)
{
return addConstraintEqual(a1.rad, a2.rad, tagId);
}
int System::addConstraintP2PSymmetric(Point &p1, Point &p2, Line &l, int tagId)
{
addConstraintPerpendicular(p1, p2, l.p1, l.p2, tagId);
return addConstraintMidpointOnLine(p1, p2, l.p1, l.p2, tagId);
}
int System::addConstraintP2PSymmetric(Point &p1, Point &p2, Point &p, int tagId)
{
addConstraintPointOnPerpBisector(p, p1, p2, tagId);
return addConstraintPointOnLine(p, p1, p2, tagId);
}
int System::addConstraintSnellsLaw(Curve &ray1, Curve &ray2,
Curve &boundary, Point p,
double *n1, double *n2,
bool flipn1, bool flipn2,
int tagId)
{
Constraint *constr = new ConstraintSnell(ray1,ray2,boundary,p,n1,n2,flipn1,flipn2);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintInternalAlignmentPoint2Ellipse(Ellipse &e, Point &p1, InternalAlignmentType alignmentType, int tagId)
{
Constraint *constr = new ConstraintInternalAlignmentPoint2Ellipse(e, p1, alignmentType);
constr->setTag(tagId);
return addConstraint(constr);
}
int System::addConstraintInternalAlignmentEllipseMajorDiameter(Ellipse &e, Point &p1, Point &p2, int tagId)
{
double X_1=*p1.x;
double Y_1=*p1.y;
double X_2=*p2.x;
double Y_2=*p2.y;
double X_c=*e.center.x;
double Y_c=*e.center.y;
double X_F1=*e.focus1.x;
double Y_F1=*e.focus1.y;
double b=*e.radmin;
// P1=vector([X_1,Y_1])
// P2=vector([X_2,Y_2])
// dF1= (F1-C)/sqrt((F1-C)*(F1-C))
// print "these are the extreme points of the major axis"
// PA = C + a * dF1
// PN = C - a * dF1
// print "this is a simple function to know which point is closer to the positive edge of the ellipse"
// DMC=(P1-PA)*(P1-PA)-(P2-PA)*(P2-PA)
double closertopositivemajor=pow(X_1 - X_c - (X_F1 - X_c)*sqrt(pow(b, 2) + pow(X_F1 - X_c,
2) + pow(Y_F1 - Y_c, 2))/sqrt(pow(X_F1 - X_c, 2) + pow(Y_F1 - Y_c, 2)),
2) - pow(X_2 - X_c - (X_F1 - X_c)*sqrt(pow(b, 2) + pow(X_F1 - X_c, 2) +
pow(Y_F1 - Y_c, 2))/sqrt(pow(X_F1 - X_c, 2) + pow(Y_F1 - Y_c, 2)), 2) +
pow(Y_1 - Y_c - (Y_F1 - Y_c)*sqrt(pow(b, 2) + pow(X_F1 - X_c, 2) +
pow(Y_F1 - Y_c, 2))/sqrt(pow(X_F1 - X_c, 2) + pow(Y_F1 - Y_c, 2)), 2) -
pow(Y_2 - Y_c - (Y_F1 - Y_c)*sqrt(pow(b, 2) + pow(X_F1 - X_c, 2) +
pow(Y_F1 - Y_c, 2))/sqrt(pow(X_F1 - X_c, 2) + pow(Y_F1 - Y_c, 2)), 2);
if(closertopositivemajor>0){
//p2 is closer to positivemajor. Assign constraints back-to-front.
addConstraintInternalAlignmentPoint2Ellipse(e,p2,EllipsePositiveMajorX,tagId);
addConstraintInternalAlignmentPoint2Ellipse(e,p2,EllipsePositiveMajorY,tagId);
addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipseNegativeMajorX,tagId);
return addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipseNegativeMajorY,tagId);
}
else{
//p1 is closer to positivemajor
addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipsePositiveMajorX,tagId);
addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipsePositiveMajorY,tagId);
addConstraintInternalAlignmentPoint2Ellipse(e,p2,EllipseNegativeMajorX,tagId);
return addConstraintInternalAlignmentPoint2Ellipse(e,p2,EllipseNegativeMajorY,tagId);
}
}
int System::addConstraintInternalAlignmentEllipseMinorDiameter(Ellipse &e, Point &p1, Point &p2, int tagId)
{
double X_1=*p1.x;
double Y_1=*p1.y;
double X_2=*p2.x;
double Y_2=*p2.y;
double X_c=*e.center.x;
double Y_c=*e.center.y;
double X_F1=*e.focus1.x;
double Y_F1=*e.focus1.y;
double b=*e.radmin;
// Same idea as for major above, but for minor
// DMC=(P1-PA)*(P1-PA)-(P2-PA)*(P2-PA)
double closertopositiveminor= pow(X_1 - X_c + b*(Y_F1 - Y_c)/sqrt(pow(X_F1 - X_c, 2) +
pow(Y_F1 - Y_c, 2)), 2) - pow(X_2 - X_c + b*(Y_F1 - Y_c)/sqrt(pow(X_F1 -
X_c, 2) + pow(Y_F1 - Y_c, 2)), 2) + pow(-Y_1 + Y_c + b*(X_F1 -
X_c)/sqrt(pow(X_F1 - X_c, 2) + pow(Y_F1 - Y_c, 2)), 2) - pow(-Y_2 + Y_c
+ b*(X_F1 - X_c)/sqrt(pow(X_F1 - X_c, 2) + pow(Y_F1 - Y_c, 2)), 2);
if(closertopositiveminor>0){
addConstraintInternalAlignmentPoint2Ellipse(e,p2,EllipsePositiveMinorX,tagId);
addConstraintInternalAlignmentPoint2Ellipse(e,p2,EllipsePositiveMinorY,tagId);
addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipseNegativeMinorX,tagId);
return addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipseNegativeMinorY,tagId);
} else {
addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipsePositiveMinorX,tagId);
addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipsePositiveMinorY,tagId);
addConstraintInternalAlignmentPoint2Ellipse(e,p2,EllipseNegativeMinorX,tagId);
return addConstraintInternalAlignmentPoint2Ellipse(e,p2,EllipseNegativeMinorY,tagId);
}
}
int System::addConstraintInternalAlignmentEllipseFocus1(Ellipse &e, Point &p1, int tagId)
{
addConstraintEqual(e.focus1.x, p1.x, tagId);
return addConstraintEqual(e.focus1.y, p1.y, tagId);
}
int System::addConstraintInternalAlignmentEllipseFocus2(Ellipse &e, Point &p1, int tagId)
{
addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipseFocus2X,tagId);
return addConstraintInternalAlignmentPoint2Ellipse(e,p1,EllipseFocus2Y,tagId);
}
//calculates angle between two curves at point of their intersection p. If two
//points are supplied, p is used for first curve and p2 for second, yielding a
//remote angle computation (this is useful when the endpoints haven't) been
//made coincident yet
double System::calculateAngleViaPoint(Curve &crv1, Curve &crv2, Point &p)
{return calculateAngleViaPoint(crv1, crv2, p, p);}
double System::calculateAngleViaPoint(Curve &crv1, Curve &crv2, Point &p1, Point &p2)
{
GCS::DeriVector2 n1 = crv1.CalculateNormal(p1);
GCS::DeriVector2 n2 = crv2.CalculateNormal(p2);
return atan2(-n2.x*n1.y+n2.y*n1.x, n2.x*n1.x + n2.y*n1.y);
}
void System::calculateNormalAtPoint(Curve &crv, Point &p, double &rtnX, double &rtnY)
{
GCS::DeriVector2 n1 = crv.CalculateNormal(p);
rtnX = n1.x;
rtnY = n1.y;
}
double System::calculateConstraintErrorByTag(int tagId)
{
int cnt = 0; //how many constraints have been accumulated
double sqErr = 0.0; //accumulator of squared errors
double err = 0.0;//last computed signed error value
for (std::vector<Constraint *>::const_iterator
constr=clist.begin(); constr != clist.end(); ++constr) {
if ((*constr)->getTag() == tagId){
err = (*constr)->error();
sqErr += err*err;
cnt++;
};
}
switch (cnt) {
case 0: //constraint not found!
return std::numeric_limits<double>::quiet_NaN();
break;
case 1:
return err;
break;
default:
return sqrt(sqErr/(double)cnt);
}
}
void System::rescaleConstraint(int id, double coeff)
{
if (id >= clist.size() || id < 0)
return;
if (clist[id])
clist[id]->rescale(coeff);
}
void System::declareUnknowns(VEC_pD ¶ms)
{
plist = params;
pIndex.clear();
for (int i=0; i < int(plist.size()); ++i)
pIndex[plist[i]] = i;
hasUnknowns = true;
}
void System::initSolution(Algorithm alg)
{
// - Stores the current parameters values in the vector "reference"
// - identifies any decoupled subsystems and partitions the original
// system into corresponding components
// - Stores the current parameters in the vector "reference"
// - Identifies the equality constraints tagged with ids >= 0
// and prepares a corresponding system reduction
// - Organizes the rest of constraints into two subsystems for
// tag ids >=0 and < 0 respectively and applies the
// system reduction specified in the previous step
isInit = false;
if (!hasUnknowns)
return;
// storing reference configuration
setReference();
// diagnose conflicting or redundant constraints
if (!hasDiagnosis) {
diagnose(alg);
if (!hasDiagnosis)
return;
}
std::vector<Constraint *> clistR;
if (redundant.size()) {
for (std::vector<Constraint *>::const_iterator constr=clist.begin();
constr != clist.end(); ++constr)
if (redundant.count(*constr) == 0)
clistR.push_back(*constr);
}
else
clistR = clist;
// partitioning into decoupled components
Graph g;
for (int i=0; i < int(plist.size() + clistR.size()); i++)
boost::add_vertex(g);
int cvtid = int(plist.size());
for (std::vector<Constraint *>::const_iterator constr=clistR.begin();
constr != clistR.end(); ++constr, cvtid++) {
VEC_pD &cparams = c2p[*constr];
for (VEC_pD::const_iterator param=cparams.begin();
param != cparams.end(); ++param) {
MAP_pD_I::const_iterator it = pIndex.find(*param);
if (it != pIndex.end())
boost::add_edge(cvtid, it->second, g);
}
}
VEC_I components(boost::num_vertices(g));
int componentsSize = 0;
if (!components.empty())
componentsSize = boost::connected_components(g, &components[0]);
// identification of equality constraints and parameter reduction
std::set<Constraint *> reducedConstrs; // constraints that will be eliminated through reduction
reductionmaps.clear(); // destroy any maps
reductionmaps.resize(componentsSize); // create empty maps to be filled in
{
VEC_pD reducedParams=plist;
for (std::vector<Constraint *>::const_iterator constr=clistR.begin();
constr != clistR.end(); ++constr) {
if ((*constr)->getTag() >= 0 && (*constr)->getTypeId() == Equal) {
MAP_pD_I::const_iterator it1,it2;
it1 = pIndex.find((*constr)->params()[0]);
it2 = pIndex.find((*constr)->params()[1]);
if (it1 != pIndex.end() && it2 != pIndex.end()) {
reducedConstrs.insert(*constr);
double *p_kept = reducedParams[it1->second];
double *p_replaced = reducedParams[it2->second];
for (int i=0; i < int(plist.size()); ++i)
if (reducedParams[i] == p_replaced)
reducedParams[i] = p_kept;
}
}
}
for (int i=0; i < int(plist.size()); ++i)
if (plist[i] != reducedParams[i]) {
int cid = components[i];
reductionmaps[cid][plist[i]] = reducedParams[i];
}
}
clists.clear(); // destroy any lists
clists.resize(componentsSize); // create empty lists to be filled in
int i = int(plist.size());
for (std::vector<Constraint *>::const_iterator constr=clistR.begin();
constr != clistR.end(); ++constr, i++) {
if (reducedConstrs.count(*constr) == 0) {
int cid = components[i];
clists[cid].push_back(*constr);
}
}
plists.clear(); // destroy any lists
plists.resize(componentsSize); // create empty lists to be filled in
for (int i=0; i < int(plist.size()); ++i) {
int cid = components[i];
plists[cid].push_back(plist[i]);
}
// calculates subSystems and subSystemsAux from clists, plists and reductionmaps
clearSubSystems();
for (int cid=0; cid < clists.size(); cid++) {
std::vector<Constraint *> clist0, clist1;
for (std::vector<Constraint *>::const_iterator constr=clists[cid].begin();
constr != clists[cid].end(); ++constr) {
if ((*constr)->getTag() >= 0)
clist0.push_back(*constr);
else // move or distance from reference constraints
clist1.push_back(*constr);
}
subSystems.push_back(NULL);
subSystemsAux.push_back(NULL);
if (clist0.size() > 0)
subSystems[cid] = new SubSystem(clist0, plists[cid], reductionmaps[cid]);
if (clist1.size() > 0)
subSystemsAux[cid] = new SubSystem(clist1, plists[cid], reductionmaps[cid]);
}
isInit = true;
}
void System::setReference()
{
reference.clear();
reference.reserve(plist.size());
for (VEC_pD::const_iterator param=plist.begin();
param != plist.end(); ++param)
reference.push_back(**param);
}
void System::resetToReference()
{
if (reference.size() == plist.size()) {
VEC_D::const_iterator ref=reference.begin();
VEC_pD::iterator param=plist.begin();
for (; ref != reference.end(); ++ref, ++param)
**param = *ref;
}
}
int System::solve(VEC_pD ¶ms, bool isFine, Algorithm alg, bool isRedundantsolving)
{
declareUnknowns(params);
initSolution();
return solve(isFine, alg, isRedundantsolving);
}
int System::solve(bool isFine, Algorithm alg, bool isRedundantsolving)
{
if (!isInit)
return Failed;
bool isReset = false;
// return success by default in order to permit coincidence constraints to be applied
// even if no other system has to be solved
int res = Success;
for (int cid=0; cid < int(subSystems.size()); cid++) {
if ((subSystems[cid] || subSystemsAux[cid]) && !isReset) {
resetToReference();
isReset = true;
}
if (subSystems[cid] && subSystemsAux[cid])
res = std::max(res, solve(subSystems[cid], subSystemsAux[cid], isFine, isRedundantsolving));
else if (subSystems[cid])
res = std::max(res, solve(subSystems[cid], isFine, alg, isRedundantsolving));
else if (subSystemsAux[cid])
res = std::max(res, solve(subSystemsAux[cid], isFine, alg, isRedundantsolving));
}
if (res == Success) {
for (std::set<Constraint *>::const_iterator constr=redundant.begin();
constr != redundant.end(); constr++){
//DeepSOIC: there used to be a comparison of signed error value to
//convergence, which makes no sense. Potentially I fixed bug, and
//chances are low I've broken anything.
double err = (*constr)->error();
if (err*err > (isRedundantsolving?convergenceRedundant:convergence)) {
res = Converged;
return res;
}
}
}
return res;
}
int System::solve(SubSystem *subsys, bool isFine, Algorithm alg, bool isRedundantsolving)
{
if (alg == BFGS)
return solve_BFGS(subsys, isFine, isRedundantsolving);
else if (alg == LevenbergMarquardt)
return solve_LM(subsys, isRedundantsolving);
else if (alg == DogLeg)
return solve_DL(subsys, isRedundantsolving);
else
return Failed;
}
int System::solve_BFGS(SubSystem *subsys, bool isFine, bool isRedundantsolving)
{
int xsize = subsys->pSize();
if (xsize == 0)
return Success;
subsys->redirectParams();
Eigen::MatrixXd D = Eigen::MatrixXd::Identity(xsize, xsize);
Eigen::VectorXd x(xsize);
Eigen::VectorXd xdir(xsize);
Eigen::VectorXd grad(xsize);
Eigen::VectorXd h(xsize);
Eigen::VectorXd y(xsize);
Eigen::VectorXd Dy(xsize);
// Initial unknowns vector and initial gradient vector
subsys->getParams(x);
subsys->calcGrad(grad);
// Initial search direction oposed to gradient (steepest-descent)
xdir = -grad;
lineSearch(subsys, xdir);
double err = subsys->error();
h = x;
subsys->getParams(x);
h = x - h; // = x - xold
//double convergence = isFine ? convergence : XconvergenceRough;
int maxIterNumber = (isRedundantsolving?
(sketchSizeMultiplierRedundant?maxIterRedundant * xsize:maxIterRedundant):
(sketchSizeMultiplier?maxIter * xsize:maxIter));
if(debugMode==IterationLevel) {
std::stringstream stream;
stream << "BFGS: convergence: " << (isRedundantsolving?convergenceRedundant:convergence)
<< ", xsize: " << xsize
<< ", maxIter: " << maxIterNumber << "\n";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
double divergingLim = 1e6*err + 1e12;
double h_norm;
for (int iter=1; iter < maxIterNumber; iter++) {
h_norm = h.norm();
if (h_norm <= (isRedundantsolving?convergenceRedundant:convergence) || err <= smallF){
if(debugMode==IterationLevel) {
std::stringstream stream;
stream << "BFGS Converged!!: "
<< ", err: " << err
<< ", h_norm: " << h_norm << "\n";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
break;
}
if (err > divergingLim || err != err) { // check for diverging and NaN
if(debugMode==IterationLevel) {
std::stringstream stream;
stream << "BFGS Failed: Diverging!!: "
<< ", err: " << err
<< ", divergingLim: " << divergingLim << "\n";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
break;
}
y = grad;
subsys->calcGrad(grad);
y = grad - y; // = grad - gradold
double hty = h.dot(y);
//make sure that hty is never 0
if (hty == 0)
hty = .0000000001;
Dy = D * y;
double ytDy = y.dot(Dy);
//Now calculate the BFGS update on D
D += (1.+ytDy/hty)/hty * h * h.transpose();
D -= 1./hty * (h * Dy.transpose() + Dy * h.transpose());
xdir = -D * grad;
lineSearch(subsys, xdir);
err = subsys->error();
h = x;
subsys->getParams(x);
h = x - h; // = x - xold
if(debugMode==IterationLevel) {
std::stringstream stream;
stream << "BFGS, Iteration: " << iter
<< ", err: " << err
<< ", h_norm: " << h_norm << "\n";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
}
subsys->revertParams();
if (err <= smallF)
return Success;
if (h.norm() <= (isRedundantsolving?convergenceRedundant:convergence))
return Converged;
return Failed;
}
int System::solve_LM(SubSystem* subsys, bool isRedundantsolving)
{
int xsize = subsys->pSize();
int csize = subsys->cSize();
if (xsize == 0)
return Success;
Eigen::VectorXd e(csize), e_new(csize); // vector of all function errors (every constraint is one function)
Eigen::MatrixXd J(csize, xsize); // Jacobi of the subsystem
Eigen::MatrixXd A(xsize, xsize);
Eigen::VectorXd x(xsize), h(xsize), x_new(xsize), g(xsize), diag_A(xsize);
subsys->redirectParams();
subsys->getParams(x);
subsys->calcResidual(e);
e*=-1;
int maxIterNumber = (isRedundantsolving?
(sketchSizeMultiplierRedundant?maxIterRedundant * xsize:maxIterRedundant):
(sketchSizeMultiplier?maxIter * xsize:maxIter));
double divergingLim = 1e6*e.squaredNorm() + 1e12;
double eps=(isRedundantsolving?LM_epsRedundant:LM_eps);
double eps1=(isRedundantsolving?LM_eps1Redundant:LM_eps1);
double tau=(isRedundantsolving?LM_tauRedundant:LM_tau);
if(debugMode==IterationLevel) {
std::stringstream stream;
stream << "LM: eps: " << eps
<< ", eps1: " << eps1
<< ", tau: " << tau
<< ", convergence: " << (isRedundantsolving?convergenceRedundant:convergence)
<< ", xsize: " << xsize
<< ", maxIter: " << maxIterNumber << "\n";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
double nu=2, mu=0;
int iter=0, stop=0;
for (iter=0; iter < maxIterNumber && !stop; ++iter) {
// check error
double err=e.squaredNorm();
if (err <= eps) { // error is small, Success
stop = 1;
break;
}
else if (err > divergingLim || err != err) { // check for diverging and NaN
stop = 6;
break;
}
// J^T J, J^T e
subsys->calcJacobi(J);;
A = J.transpose()*J;
g = J.transpose()*e;
// Compute ||J^T e||_inf
double g_inf = g.lpNorm<Eigen::Infinity>();
diag_A = A.diagonal(); // save diagonal entries so that augmentation can be later canceled
// check for convergence
if (g_inf <= eps1) {
stop = 2;
break;
}
// compute initial damping factor
if (iter == 0)
mu = tau * diag_A.lpNorm<Eigen::Infinity>();
double h_norm;
// determine increment using adaptive damping
int k=0;
while (k < 50) {
// augment normal equations A = A+uI
for (int i=0; i < xsize; ++i)
A(i,i) += mu;
//solve augmented functions A*h=-g
h = A.fullPivLu().solve(g);
double rel_error = (A*h - g).norm() / g.norm();
// check if solving works
if (rel_error < 1e-5) {
// restrict h according to maxStep
double scale = subsys->maxStep(h);
if (scale < 1.)
h *= scale;
// compute par's new estimate and ||d_par||^2
x_new = x + h;
h_norm = h.squaredNorm();
if (h_norm <= eps1*eps1*x.norm()) { // relative change in p is small, stop
stop = 3;
break;
}
else if (h_norm >= (x.norm()+eps1)/(DBL_EPSILON*DBL_EPSILON)) { // almost singular
stop = 4;
break;
}
subsys->setParams(x_new);
subsys->calcResidual(e_new);
e_new *= -1;
double dF = e.squaredNorm() - e_new.squaredNorm();
double dL = h.dot(mu*h+g);
if (dF>0. && dL>0.) { // reduction in error, increment is accepted
double tmp=2*dF/dL-1.;
mu *= std::max(1./3., 1.-tmp*tmp*tmp);
nu=2;
// update par's estimate
x = x_new;
e = e_new;
break;
}
}
// if this point is reached, either the linear system could not be solved or
// the error did not reduce; in any case, the increment must be rejected
mu*=nu;
nu*=2.0;
for (int i=0; i < xsize; ++i) // restore diagonal J^T J entries
A(i,i) = diag_A(i);
k++;
}
if (k > 50) {
stop = 7;
break;
}
if(debugMode==IterationLevel) {
std::stringstream stream;
// Iteration: 1, residual: 1e-3, tolg: 1e-5, tolx: 1e-3
stream << "LM, Iteration: " << iter
<< ", err(eps): " << err
<< ", g_inf(eps1): " << g_inf
<< ", h_norm: " << h_norm << "\n";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
}
if (iter >= maxIterNumber)
stop = 5;
subsys->revertParams();
return (stop == 1) ? Success : Failed;
}
int System::solve_DL(SubSystem* subsys, bool isRedundantsolving)
{
double tolg=(isRedundantsolving?DL_tolgRedundant:DL_tolg);
double tolx=(isRedundantsolving?DL_tolxRedundant:DL_tolx);
double tolf=(isRedundantsolving?DL_tolfRedundant:DL_tolf);
int xsize = subsys->pSize();
int csize = subsys->cSize();
if (xsize == 0)
return Success;
int maxIterNumber = (isRedundantsolving?
(sketchSizeMultiplierRedundant?maxIterRedundant * xsize:maxIterRedundant):
(sketchSizeMultiplier?maxIter * xsize:maxIter));
if(debugMode==IterationLevel) {
std::stringstream stream;
stream << "DL: tolg: " << tolg
<< ", tolx: " << tolx
<< ", tolf: " << tolf
<< ", convergence: " << (isRedundantsolving?convergenceRedundant:convergence)
<< ", xsize: " << xsize
<< ", csize: " << csize
<< ", maxIter: " << maxIterNumber << "\n";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
Eigen::VectorXd x(xsize), x_new(xsize);
Eigen::VectorXd fx(csize), fx_new(csize);
Eigen::MatrixXd Jx(csize, xsize), Jx_new(csize, xsize);
Eigen::VectorXd g(xsize), h_sd(xsize), h_gn(xsize), h_dl(xsize);
subsys->redirectParams();
double err;
subsys->getParams(x);
subsys->calcResidual(fx, err);
subsys->calcJacobi(Jx);
g = Jx.transpose()*(-fx);
// get the infinity norm fx_inf and g_inf
double g_inf = g.lpNorm<Eigen::Infinity>();
double fx_inf = fx.lpNorm<Eigen::Infinity>();
double divergingLim = 1e6*err + 1e12;
double delta=0.1;
double alpha=0.;
double nu=2.;
int iter=0, stop=0, reduce=0;
while (!stop) {
// check if finished
if (fx_inf <= tolf) // Success
stop = 1;
else if (g_inf <= tolg)
stop = 2;
else if (delta <= tolx*(tolx + x.norm()))
stop = 2;
else if (iter >= maxIterNumber)
stop = 4;
else if (err > divergingLim || err != err) { // check for diverging and NaN
stop = 6;
}
else {
// get the steepest descent direction
alpha = g.squaredNorm()/(Jx*g).squaredNorm();
h_sd = alpha*g;
// get the gauss-newton step
h_gn = Jx.fullPivLu().solve(-fx);
double rel_error = (Jx*h_gn + fx).norm() / fx.norm();
if (rel_error > 1e15)
break;
// compute the dogleg step
if (h_gn.norm() < delta) {
h_dl = h_gn;
if (h_dl.norm() <= tolx*(tolx + x.norm())) {
stop = 5;
break;
}
}
else if (alpha*g.norm() >= delta) {
h_dl = (delta/(alpha*g.norm()))*h_sd;
}
else {
//compute beta
double beta = 0;
Eigen::VectorXd b = h_gn - h_sd;
double bb = (b.transpose()*b).norm();
double gb = (h_sd.transpose()*b).norm();
double c = (delta + h_sd.norm())*(delta - h_sd.norm());
if (gb > 0)
beta = c / (gb + sqrt(gb * gb + c * bb));
else
beta = (sqrt(gb * gb + c * bb) - gb)/bb;
// and update h_dl and dL with beta
h_dl = h_sd + beta*b;
}
}
// see if we are already finished
if (stop)
break;
// it didn't work in some tests
// // restrict h_dl according to maxStep
// double scale = subsys->maxStep(h_dl);
// if (scale < 1.)
// h_dl *= scale;
// get the new values
double err_new;
x_new = x + h_dl;
subsys->setParams(x_new);
subsys->calcResidual(fx_new, err_new);
subsys->calcJacobi(Jx_new);
// calculate the linear model and the update ratio
double dL = err - 0.5*(fx + Jx*h_dl).squaredNorm();
double dF = err - err_new;
double rho = dL/dF;
if (dF > 0 && dL > 0) {
x = x_new;
Jx = Jx_new;
fx = fx_new;
err = err_new;
g = Jx.transpose()*(-fx);
// get infinity norms
g_inf = g.lpNorm<Eigen::Infinity>();
fx_inf = fx.lpNorm<Eigen::Infinity>();
}
else
rho = -1;
// update delta
if (fabs(rho-1.) < 0.2 && h_dl.norm() > delta/3. && reduce <= 0) {
delta = 3*delta;
nu = 2;
reduce = 0;
}
else if (rho < 0.25) {
delta = delta/nu;
nu = 2*nu;
reduce = 2;
}
else
reduce--;
if(debugMode==IterationLevel) {
std::stringstream stream;
// Iteration: 1, residual: 1e-3, tolg: 1e-5, tolx: 1e-3
stream << "DL, Iteration: " << iter
<< ", fx_inf(tolf): " << fx_inf
<< ", g_inf(tolg): " << g_inf
<< ", delta(f(tolx)): " << delta
<< ", err(divergingLim): " << err << "\n";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
// count this iteration and start again
iter++;
}
subsys->revertParams();
if(debugMode==IterationLevel) {
std::stringstream stream;
stream << "DL: stopcode: " << stop << ((stop == 1) ? ", Success" : ", Failed") << "\n";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
return (stop == 1) ? Success : Failed;
}
// The following solver variant solves a system compound of two subsystems
// treating the first of them as of higher priority than the second
int System::solve(SubSystem *subsysA, SubSystem *subsysB, bool isFine, bool isRedundantsolving)
{
int xsizeA = subsysA->pSize();
int xsizeB = subsysB->pSize();
int csizeA = subsysA->cSize();
VEC_pD plistAB(xsizeA+xsizeB);
{
VEC_pD plistA, plistB;
subsysA->getParamList(plistA);
subsysB->getParamList(plistB);
std::sort(plistA.begin(),plistA.end());
std::sort(plistB.begin(),plistB.end());
VEC_pD::const_iterator it;
it = std::set_union(plistA.begin(),plistA.end(),
plistB.begin(),plistB.end(),plistAB.begin());
plistAB.resize(it-plistAB.begin());
}
int xsize = plistAB.size();
Eigen::MatrixXd B = Eigen::MatrixXd::Identity(xsize, xsize);
Eigen::MatrixXd JA(csizeA, xsize);
Eigen::MatrixXd Y,Z;
Eigen::VectorXd resA(csizeA);
Eigen::VectorXd lambda(csizeA), lambda0(csizeA), lambdadir(csizeA);
Eigen::VectorXd x(xsize), x0(xsize), xdir(xsize), xdir1(xsize);
Eigen::VectorXd grad(xsize);
Eigen::VectorXd h(xsize);
Eigen::VectorXd y(xsize);
Eigen::VectorXd Bh(xsize);
// We assume that there are no common constraints in subsysA and subsysB
subsysA->redirectParams();
subsysB->redirectParams();
subsysB->getParams(plistAB,x);
subsysA->getParams(plistAB,x);
subsysB->setParams(plistAB,x); // just to ensure that A and B are synchronized
subsysB->calcGrad(plistAB,grad);
subsysA->calcJacobi(plistAB,JA);
subsysA->calcResidual(resA);
//double convergence = isFine ? XconvergenceFine : XconvergenceRough;
int maxIterNumber = (isRedundantsolving?
(sketchSizeMultiplierRedundant?maxIterRedundant * xsize:maxIterRedundant):
(sketchSizeMultiplier?maxIter * xsize:maxIter));
double divergingLim = 1e6*subsysA->error() + 1e12;
double mu = 0;
lambda.setZero();
for (int iter=1; iter < maxIterNumber; iter++) {
int status = qp_eq(B, grad, JA, resA, xdir, Y, Z);
if (status)
break;
x0 = x;
lambda0 = lambda;
lambda = Y.transpose() * (B * xdir + grad);
lambdadir = lambda - lambda0;
// line search
{
double eta=0.25;
double tau=0.5;
double rho=0.5;
double alpha=1;
alpha = std::min(alpha, subsysA->maxStep(plistAB,xdir));
// Eq. 18.32
// double mu = lambda.lpNorm<Eigen::Infinity>() + 0.01;
// Eq. 18.33
// double mu = grad.dot(xdir) / ( (1.-rho) * resA.lpNorm<1>());
// Eq. 18.36
mu = std::max(mu,
(grad.dot(xdir) + std::max(0., 0.5*xdir.dot(B*xdir))) /
( (1. - rho) * resA.lpNorm<1>() ) );
// Eq. 18.27
double f0 = subsysB->error() + mu * resA.lpNorm<1>();
// Eq. 18.29
double deriv = grad.dot(xdir) - mu * resA.lpNorm<1>();
x = x0 + alpha * xdir;
subsysA->setParams(plistAB,x);
subsysB->setParams(plistAB,x);
subsysA->calcResidual(resA);
double f = subsysB->error() + mu * resA.lpNorm<1>();
// line search, Eq. 18.28
bool first = true;
while (f > f0 + eta * alpha * deriv) {
if (first) { // try a second order step
// xdir1 = JA.jacobiSvd(Eigen::ComputeThinU |
// Eigen::ComputeThinV).solve(-resA);
xdir1 = -Y*resA;
x += xdir1; // = x0 + alpha * xdir + xdir1
subsysA->setParams(plistAB,x);
subsysB->setParams(plistAB,x);
subsysA->calcResidual(resA);
f = subsysB->error() + mu * resA.lpNorm<1>();
if (f < f0 + eta * alpha * deriv)
break;
}
alpha = tau * alpha;
if (alpha < 1e-8) // let the linesearch fail
alpha = 0.;
x = x0 + alpha * xdir;
subsysA->setParams(plistAB,x);
subsysB->setParams(plistAB,x);
subsysA->calcResidual(resA);
f = subsysB->error() + mu * resA.lpNorm<1>();
if (alpha < 1e-8) // let the linesearch fail
break;
}
lambda = lambda0 + alpha * lambdadir;
}
h = x - x0;
y = grad - JA.transpose() * lambda;
{
subsysB->calcGrad(plistAB,grad);
subsysA->calcJacobi(plistAB,JA);
subsysA->calcResidual(resA);
}
y = grad - JA.transpose() * lambda - y; // Eq. 18.13
if (iter > 1) {
double yTh = y.dot(h);
if (yTh != 0) {
Bh = B * h;
//Now calculate the BFGS update on B
B += 1./yTh * y * y.transpose();
B -= 1./h.dot(Bh) * (Bh * Bh.transpose());
}
}
double err = subsysA->error();
if (h.norm() <= (isRedundantsolving?convergenceRedundant:convergence) && err <= smallF)
break;
if (err > divergingLim || err != err) // check for diverging and NaN
break;
}
int ret;
if (subsysA->error() <= smallF)
ret = Success;
else if (h.norm() <= (isRedundantsolving?convergenceRedundant:convergence))
ret = Converged;
else
ret = Failed;
subsysA->revertParams();
subsysB->revertParams();
return ret;
}
void System::applySolution()
{
for (int cid=0; cid < int(subSystems.size()); cid++) {
if (subSystemsAux[cid])
subSystemsAux[cid]->applySolution();
if (subSystems[cid])
subSystems[cid]->applySolution();
for (MAP_pD_pD::const_iterator it=reductionmaps[cid].begin();
it != reductionmaps[cid].end(); ++it)
*(it->first) = *(it->second);
}
}
void System::undoSolution()
{
resetToReference();
}
int System::diagnose(Algorithm alg)
{
// Analyses the constrainess grad of the system and provides feedback
// The vector "conflictingTags" will hold a group of conflicting constraints
// Hint 1: Only constraints with tag >= 0 are taken into account
// Hint 2: Constraints tagged with 0 are treated as high priority
// constraints and they are excluded from the returned
// list of conflicting constraints. Therefore, this function
// will provide no feedback about possible conflicts between
// two high priority constraints. For this reason, tagging
// constraints with 0 should be used carefully.
hasDiagnosis = false;
if (!hasUnknowns) {
dofs = -1;
return dofs;
}
redundant.clear();
conflictingTags.clear();
redundantTags.clear();
Eigen::MatrixXd J(clist.size(), plist.size());
int count=0;
for (std::vector<Constraint *>::iterator constr=clist.begin();
constr != clist.end(); ++constr) {
(*constr)->revertParams();
if ((*constr)->getTag() >= 0) {
count++;
for (int j=0; j < int(plist.size()); j++)
J(count-1,j) = (*constr)->grad(plist[j]);
}
}
#ifdef EIGEN_SPARSEQR_COMPATIBLE
Eigen::SparseMatrix<double> SJ;
if(qrAlgorithm==EigenSparseQR){
// this creation is not optimized (done using triplets)
// however the time this takes is negligible compared to the
// time the QR decomposition itself takes
SJ = J.sparseView();
SJ.makeCompressed();
}
Eigen::SparseQR<Eigen::SparseMatrix<double>, Eigen::COLAMDOrdering<int> > SqrJT;
#else
if(qrAlgorithm==EigenSparseQR){
Base::Console().Warning("SparseQR not supported by you current version of Eigen. It requires Eigen 3.2.2 or higher. Falling back to Dense QR\n");
qrAlgorithm=EigenDenseQR;
}
#endif
#ifdef _GCS_DEBUG
// Debug code starts
std::stringstream stream;
stream << "[";
stream << J ;
stream << "]";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
// Debug code ends
#endif
Eigen::MatrixXd R;
int paramsNum;
int constrNum;
int rank;
Eigen::FullPivHouseholderQR<Eigen::MatrixXd> qrJT;
if(qrAlgorithm==EigenDenseQR){
if (J.rows() > 0) {
qrJT=Eigen::FullPivHouseholderQR<Eigen::MatrixXd>(J.topRows(count).transpose());
Eigen::MatrixXd Q = qrJT.matrixQ ();
paramsNum = qrJT.rows();
constrNum = qrJT.cols();
qrJT.setThreshold(qrpivotThreshold);
rank = qrJT.rank();
if (constrNum >= paramsNum)
R = qrJT.matrixQR().triangularView<Eigen::Upper>();
else
R = qrJT.matrixQR().topRows(constrNum)
.triangularView<Eigen::Upper>();
}
}
#ifdef EIGEN_SPARSEQR_COMPATIBLE
else if(qrAlgorithm==EigenSparseQR){
if (SJ.rows() > 0) {
SqrJT=Eigen::SparseQR<Eigen::SparseMatrix<double>, Eigen::COLAMDOrdering<int> >(SJ.topRows(count).transpose());
// Do not ask for Q Matrix!!
// At Eigen 3.2 still has a bug that this only works for square matrices
// if enabled it will crash
//Eigen::SparseMatrix<double> Q = qrJT.matrixQ();
//qrJT.matrixQ().evalTo(Q);
paramsNum = SqrJT.rows();
constrNum = SqrJT.cols();
SqrJT.setPivotThreshold(qrpivotThreshold);
rank = SqrJT.rank();
if (constrNum >= paramsNum)
R = SqrJT.matrixR().triangularView<Eigen::Upper>();
else
R = SqrJT.matrixR().topRows(constrNum)
.triangularView<Eigen::Upper>();
}
}
#endif
if(debugMode==IterationLevel) {
std::stringstream stream;
stream << (qrAlgorithm==EigenSparseQR?"EigenSparseQR":(qrAlgorithm==EigenDenseQR?"DenseQR":""));
if (J.rows() > 0) {
stream
#ifdef EIGEN_SPARSEQR_COMPATIBLE
<< ", Threads: " << Eigen::nbThreads()
#endif
#ifdef EIGEN_VECTORIZE
<< ", Vectorization: On"
#endif
<< ", Pivot Threshold: " << qrpivotThreshold
<< ", Params: " << paramsNum
<< ", Constr: " << constrNum
<< ", Rank: " << rank << "\n";
}
else {
stream
#ifdef EIGEN_SPARSEQR_COMPATIBLE
<< ", Threads: " << Eigen::nbThreads()
#endif
#ifdef EIGEN_VECTORIZE
<< ", Vectorization: On"
#endif
<< ", Empty Sketch, nothing to solve" << "\n";
}
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
}
if (J.rows() > 0) {
#ifdef _GCS_DEBUG_SOLVER_JACOBIAN_QR_DECOMPOSITION_TRIANGULAR_MATRIX
// Debug code starts
std::stringstream stream;
stream << "[";
stream << R ;
stream << "]";
const std::string tmp = stream.str();
Base::Console().Log(tmp.c_str());
// Debug code ends
#endif
if (constrNum > rank) { // conflicting or redundant constraints
for (int i=1; i < rank; i++) {
// eliminate non zeros above pivot
assert(R(i,i) != 0);
for (int row=0; row < i; row++) {
if (R(row,i) != 0) {
double coef=R(row,i)/R(i,i);
R.block(row,i+1,1,constrNum-i-1) -= coef * R.block(i,i+1,1,constrNum-i-1);
R(row,i) = 0;
}
}
}
std::vector< std::vector<Constraint *> > conflictGroups(constrNum-rank);
for (int j=rank; j < constrNum; j++) {
for (int row=0; row < rank; row++) {
if (fabs(R(row,j)) > 1e-10) {
int origCol;
if(qrAlgorithm==EigenDenseQR)
origCol=qrJT.colsPermutation().indices()[row];
#ifdef EIGEN_SPARSEQR_COMPATIBLE
else if(qrAlgorithm==EigenSparseQR)
origCol=SqrJT.colsPermutation().indices()[row];
#endif
conflictGroups[j-rank].push_back(clist[origCol]);
}
}
int origCol;
if(qrAlgorithm==EigenDenseQR)
origCol=qrJT.colsPermutation().indices()[j];
#ifdef EIGEN_SPARSEQR_COMPATIBLE
else if(qrAlgorithm==EigenSparseQR)
origCol=SqrJT.colsPermutation().indices()[j];
#endif
conflictGroups[j-rank].push_back(clist[origCol]);
}
// try to remove the conflicting constraints and solve the
// system in order to check if the removed constraints were
// just redundant but not really conflicting
std::set<Constraint *> skipped;
SET_I satisfiedGroups;
while (1) {
std::map< Constraint *, SET_I > conflictingMap;
for (int i=0; i < conflictGroups.size(); i++) {
if (satisfiedGroups.count(i) == 0) {
for (int j=0; j < conflictGroups[i].size(); j++) {
Constraint *constr = conflictGroups[i][j];
if (constr->getTag() != 0) // exclude constraints tagged with zero
conflictingMap[constr].insert(i);
}
}
}
if (conflictingMap.empty())
break;
int maxPopularity = 0;
Constraint *mostPopular = NULL;
for (std::map< Constraint *, SET_I >::const_iterator it=conflictingMap.begin();
it != conflictingMap.end(); it++) {
if (it->second.size() > maxPopularity ||
(it->second.size() == maxPopularity && mostPopular &&
it->first->getTag() > mostPopular->getTag())) {
mostPopular = it->first;
maxPopularity = it->second.size();
}
}
if (maxPopularity > 0) {
skipped.insert(mostPopular);
for (SET_I::const_iterator it=conflictingMap[mostPopular].begin();
it != conflictingMap[mostPopular].end(); it++)
satisfiedGroups.insert(*it);
}
}
std::vector<Constraint *> clistTmp;
clistTmp.reserve(clist.size());
for (std::vector<Constraint *>::iterator constr=clist.begin();
constr != clist.end(); ++constr)
if (skipped.count(*constr) == 0)
clistTmp.push_back(*constr);
SubSystem *subSysTmp = new SubSystem(clistTmp, plist);
int res = solve(subSysTmp,true,alg,true);
if(debugMode==Minimal || debugMode==IterationLevel) {
std::string solvername;
switch (alg) {
case 0:
solvername = "BFGS";
break;
case 1: // solving with the LevenbergMarquardt solver
solvername = "LevenbergMarquardt";
break;
case 2: // solving with the BFGS solver
solvername = "DogLeg";
break;
}
Base::Console().Log("Sketcher::RedundantSolving-%s-\n",solvername.c_str());
}
if (res == Success) {
subSysTmp->applySolution();
for (std::set<Constraint *>::const_iterator constr=skipped.begin();
constr != skipped.end(); constr++) {
double err = (*constr)->error();
if (err * err < convergenceRedundant)
redundant.insert(*constr);
}
resetToReference();
if(debugMode==Minimal || debugMode==IterationLevel) {
Base::Console().Log("Sketcher Redundant solving: %d redundants\n",redundant.size());
}
std::vector< std::vector<Constraint *> > conflictGroupsOrig=conflictGroups;
conflictGroups.clear();
for (int i=conflictGroupsOrig.size()-1; i >= 0; i--) {
bool isRedundant = false;
for (int j=0; j < conflictGroupsOrig[i].size(); j++) {
if (redundant.count(conflictGroupsOrig[i][j]) > 0) {
isRedundant = true;
break;
}
}
if (!isRedundant)
conflictGroups.push_back(conflictGroupsOrig[i]);
else
constrNum--;
}
}
delete subSysTmp;
// simplified output of conflicting tags
SET_I conflictingTagsSet;
for (int i=0; i < conflictGroups.size(); i++) {
for (int j=0; j < conflictGroups[i].size(); j++) {
conflictingTagsSet.insert(conflictGroups[i][j]->getTag());
}
}
conflictingTagsSet.erase(0); // exclude constraints tagged with zero
conflictingTags.resize(conflictingTagsSet.size());
std::copy(conflictingTagsSet.begin(), conflictingTagsSet.end(),
conflictingTags.begin());
// output of redundant tags
SET_I redundantTagsSet;
for (std::set<Constraint *>::iterator constr=redundant.begin();
constr != redundant.end(); ++constr)
redundantTagsSet.insert((*constr)->getTag());
// remove tags represented at least in one non-redundant constraint
for (std::vector<Constraint *>::iterator constr=clist.begin();
constr != clist.end(); ++constr)
if (redundant.count(*constr) == 0)
redundantTagsSet.erase((*constr)->getTag());
redundantTags.resize(redundantTagsSet.size());
std::copy(redundantTagsSet.begin(), redundantTagsSet.end(),
redundantTags.begin());
if (paramsNum == rank && constrNum > rank) { // over-constrained
hasDiagnosis = true;
dofs = paramsNum - constrNum;
return dofs;
}
}
hasDiagnosis = true;
dofs = paramsNum - rank;
return dofs;
}
hasDiagnosis = true;
dofs = plist.size();
return dofs;
}
void System::clearSubSystems()
{
isInit = false;
free(subSystems);
free(subSystemsAux);
subSystems.clear();
subSystemsAux.clear();
}
double lineSearch(SubSystem *subsys, Eigen::VectorXd &xdir)
{
double f1,f2,f3,alpha1,alpha2,alpha3,alphaStar;
double alphaMax = subsys->maxStep(xdir);
Eigen::VectorXd x0, x;
//Save initial values
subsys->getParams(x0);
//Start at the initial position alpha1 = 0
alpha1 = 0.;
f1 = subsys->error();
//Take a step of alpha2 = 1
alpha2 = 1.;
x = x0 + alpha2 * xdir;
subsys->setParams(x);
f2 = subsys->error();
//Take a step of alpha3 = 2*alpha2
alpha3 = alpha2*2;
x = x0 + alpha3 * xdir;
subsys->setParams(x);
f3 = subsys->error();
//Now reduce or lengthen alpha2 and alpha3 until the minimum is
//Bracketed by the triplet f1>f2<f3
while (f2 > f1 || f2 > f3) {
if (f2 > f1) {
//If f2 is greater than f1 then we shorten alpha2 and alpha3 closer to f1
//Effectively both are shortened by a factor of two.
alpha3 = alpha2;
f3 = f2;
alpha2 = alpha2 / 2;
x = x0 + alpha2 * xdir;
subsys->setParams(x);
f2 = subsys->error();
}
else if (f2 > f3) {
if (alpha3 >= alphaMax)
break;
//If f2 is greater than f3 then we increase alpha2 and alpha3 away from f1
//Effectively both are lengthened by a factor of two.
alpha2 = alpha3;
f2 = f3;
alpha3 = alpha3 * 2;
x = x0 + alpha3 * xdir;
subsys->setParams(x);
f3 = subsys->error();
}
}
//Get the alpha for the minimum f of the quadratic approximation
alphaStar = alpha2 + ((alpha2-alpha1)*(f1-f3))/(3*(f1-2*f2+f3));
//Guarantee that the new alphaStar is within the bracket
if (alphaStar >= alpha3 || alphaStar <= alpha1)
alphaStar = alpha2;
if (alphaStar > alphaMax)
alphaStar = alphaMax;
if (alphaStar != alphaStar)
alphaStar = 0.;
//Take a final step to alphaStar
x = x0 + alphaStar * xdir;
subsys->setParams(x);
return alphaStar;
}
void free(VEC_pD &doublevec)
{
for (VEC_pD::iterator it = doublevec.begin();
it != doublevec.end(); ++it)
if (*it) delete *it;
doublevec.clear();
}
void free(std::vector<Constraint *> &constrvec)
{
for (std::vector<Constraint *>::iterator constr=constrvec.begin();
constr != constrvec.end(); ++constr) {
if (*constr) {
switch ((*constr)->getTypeId()) {
case Equal:
delete static_cast<ConstraintEqual *>(*constr);
break;
case Difference:
delete static_cast<ConstraintDifference *>(*constr);
break;
case P2PDistance:
delete static_cast<ConstraintP2PDistance *>(*constr);
break;
case P2PAngle:
delete static_cast<ConstraintP2PAngle *>(*constr);
break;
case P2LDistance:
delete static_cast<ConstraintP2LDistance *>(*constr);
break;
case PointOnLine:
delete static_cast<ConstraintPointOnLine *>(*constr);
break;
case Parallel:
delete static_cast<ConstraintParallel *>(*constr);
break;
case Perpendicular:
delete static_cast<ConstraintPerpendicular *>(*constr);
break;
case L2LAngle:
delete static_cast<ConstraintL2LAngle *>(*constr);
break;
case MidpointOnLine:
delete static_cast<ConstraintMidpointOnLine *>(*constr);
break;
case None:
default:
delete *constr;
}
}
}
constrvec.clear();
}
void free(std::vector<SubSystem *> &subsysvec)
{
for (std::vector<SubSystem *>::iterator it=subsysvec.begin();
it != subsysvec.end(); ++it)
if (*it) delete *it;
}
} //namespace GCS
| yantrabuddhi/FreeCAD | src/Mod/Sketcher/App/planegcs/GCS.cpp | C++ | lgpl-2.1 | 79,002 |
/****************************************************************/
/* DO NOT MODIFY THIS HEADER */
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* (c) 2010 Battelle Energy Alliance, LLC */
/* ALL RIGHTS RESERVED */
/* */
/* Prepared by Battelle Energy Alliance, LLC */
/* Under Contract No. DE-AC07-05ID14517 */
/* With the U. S. Department of Energy */
/* */
/* See COPYRIGHT for full restrictions */
/****************************************************************/
#include "ExampleDiffusion.h"
/**
* This function defines the valid parameters for
* this Kernel and their default values
*/
template<>
InputParameters validParams<ExampleDiffusion>()
{
InputParameters params = validParams<Diffusion>();
return params;
}
ExampleDiffusion::ExampleDiffusion(const InputParameters & parameters) :
Diffusion(parameters),
_diffusivity(getMaterialProperty<Real>("diffusivity"))
{}
Real
ExampleDiffusion::computeQpResidual()
{
// We're dereferencing the _diffusivity pointer to get to the
// material properties vector... which gives us one property
// value per quadrature point.
// Also... we're reusing the Diffusion Kernel's residual
// so that we don't have to recode that.
return _diffusivity[_qp] * Diffusion::computeQpResidual();
}
Real
ExampleDiffusion::computeQpJacobian()
{
// We're dereferencing the _diffusivity pointer to get to the
// material properties vector... which gives us one property
// value per quadrature point.
// Also... we're reusing the Diffusion Kernel's residual
// so that we don't have to recode that.
return _diffusivity[_qp] * Diffusion::computeQpJacobian();
}
| backmari/moose | examples/ex20_user_objects/src/kernels/ExampleDiffusion.C | C++ | lgpl-2.1 | 2,042 |
REM # makecint demonstration for Symantec C++ 7.2
REM # Array.cxx as Dynamic Link Library
move Array.C Array.cxx
move Fundamen.h Fundament.h
move Fundamen.C Fundament.cxx
copy ..\Complex\Complex.cxx Complex.cxx
copy ..\Complex\Complex.C Complex.cxx
copy ..\Complex\Complex.h Complex.h
REM # Create Makefile
makecint -mk Makefile -dl Array.dll -I../Complex -H Fundament.h Array.h -C++ Fundament.cxx Array.cxx Complex.cxx
REM # Compile
smake clean
smake
REM # Test
cint -I../Complex ../Complex/Complex.cxx Fundament.cxx Array.cxx test.C
cint Array.dll test.C
| dawehner/root | cint/demo/makecint/Array/scdll.bat | Batchfile | lgpl-2.1 | 561 |
//------------------------------------------------------------------------------
// CLING - the C++ LLVM-based InterpreterG :)
//
// This file is dual-licensed: you can choose to license it under the University
// of Illinois Open Source License or the GNU Lesser General Public License. See
// LICENSE.TXT for details.
//------------------------------------------------------------------------------
// RUN: cat %s | %cling 2>&1 | FileCheck %s
// XFAIL:*
extern "C" int printf(const char* fmt, ...);
// force emission of cxa_atexit such that it doesn't pollute the diff.
class MyClass{public: ~MyClass(){} }mm;
.storeState "preUnload"
class ClassWithDtor{
private:
int N;
public:
ClassWithDtor() : N(0){ N++; }
~ClassWithDtor() {
N--;
printf("Dtor called, N=%d\n", N);
}
}; ClassWithDtor m;
.undo
//CHECK: Dtor called, N=0
.compareState "preUnload"
//CHECK-NOT: Differences
// Make sure that the static template member inits get unloaded correctly.
// See CodeGenModule::EmitCXXGlobalVarDeclInitFunc() - they get emitted *next*
// to GLOBAL__I_a, not as call nodes within GLOBAL__I_a.
.storeState "preUnload3"
.rawInput 1
struct XYZ {
XYZ(int I = -10): m(I) {}
int m;
};
template <typename T> struct S {
static XYZ one;
static XYZ two;
};
template <typename T> XYZ S<T>::one = XYZ(12);
template <typename T> XYZ S<T>::two = XYZ(17);
XYZ a = XYZ(12);
XYZ b = XYZ(12);
int T(){
S<int> o;
return o.one.m;
}
.rawInput 0
.undo 7
.compareState "preUnload3"
// Make sure we have exactly one symbol of ~X(), i.e. that the unloading does
// not remove it and CodeGen re-emits in upon seeing a new use in X c;
.storeState "preUnload2"
.rawInput 1
extern "C" int printf(const char*, ...);
struct X {
X(): i(12) {}
~X() { static int I = 0; printf("~X: %d\n", ++I); }
int i;
};
X a;
int S() {
X b;
return a.i + b.i;
}
.rawInput 0
S() // CHECK: (int) 24
.undo 3 // Remove up to "X a;"
// CHECK-NEXT: ~X: 1
// CHECK-NEXT: ~X: 2
X c;
.undo 3
// CHECK-NEXT: ~X: 3
.compareState "preUnload2"
.q
| perovic/cling | test/CodeUnloading/Dtors.C | C++ | lgpl-2.1 | 2,041 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<link rel="stylesheet" href="style.css" type="text/css">
<meta content="text/html; charset=iso-8859-1" http-equiv="Content-Type">
<link rel="Start" href="index.html">
<link rel="next" href="Pretty.MakeSetPrinter.html">
<link rel="Up" href="Pretty.html">
<link title="Index of types" rel=Appendix href="index_types.html">
<link title="Index of exceptions" rel=Appendix href="index_exceptions.html">
<link title="Index of values" rel=Appendix href="index_values.html">
<link title="Index of class methods" rel=Appendix href="index_methods.html">
<link title="Index of classes" rel=Appendix href="index_classes.html">
<link title="Index of class types" rel=Appendix href="index_class_types.html">
<link title="Index of modules" rel=Appendix href="index_modules.html">
<link title="Index of module types" rel=Appendix href="index_module_types.html">
<link title="Pretty" rel="Chapter" href="Pretty.html">
<link title="Errormsg" rel="Chapter" href="Errormsg.html">
<link title="Clist" rel="Chapter" href="Clist.html">
<link title="Stats" rel="Chapter" href="Stats.html">
<link title="Cil" rel="Chapter" href="Cil.html">
<link title="Formatcil" rel="Chapter" href="Formatcil.html">
<link title="Alpha" rel="Chapter" href="Alpha.html">
<link title="Cillower" rel="Chapter" href="Cillower.html">
<link title="Cfg" rel="Chapter" href="Cfg.html">
<link title="Dataflow" rel="Chapter" href="Dataflow.html">
<link title="Dominators" rel="Chapter" href="Dominators.html"><title>CIL API Documentation (version 1.3.7) : Pretty.MakeMapPrinter</title>
</head>
<body>
<div class="navbar"> <a href="Pretty.html">Up</a>
<a href="Pretty.MakeSetPrinter.html">Next</a>
</div>
<center><h1>Functor <a href="type_Pretty.MakeMapPrinter.html">Pretty.MakeMapPrinter</a></h1></center>
<br>
<pre><span class="keyword">module</span> MakeMapPrinter: <div class="sig_block"><code class="code">functor (</code><code class="code">Map</code><code class="code"> : </code><code class="code">sig</code><div class="sig_block"><pre><span class="keyword">type</span> <a name="TYPEkey"></a><code class="type"></code>key </pre>
<pre><span class="keyword">type</span> <a name="TYPEt"></a><code class="type">'a</code> t </pre>
<pre><span class="keyword">val</span> <a name="VALfold"></a>fold : <code class="type">(key -> 'a -> 'b -> 'b) -><br> 'a t -> 'b -> 'b</code></pre></div><code class="code">end</code><code class="code">) -> </code><code class="code">sig</code> <a href="Pretty.MakeMapPrinter.html">..</a> <code class="code">end</code></div></pre>Format maps.<br>
<table border="0" cellpadding="3" width="100%">
<tr>
<td align="left" valign="top" width="1%%"><b>Parameters: </b></td>
<td>
<table class="paramstable">
<tr>
<td align="center" valign="top" width="15%">
<code>Map</code></td>
<td align="center" valign="top">:</td>
<td><code class="type">sig
type key
type 'a t
val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
end</code>
</table>
</td>
</tr>
</table>
<hr width="100%">
<pre><span class="keyword">val</span> <a name="VALdocMap"></a>docMap : <code class="type">?sep:<a href="Pretty.html#TYPEdoc">Pretty.doc</a> -><br> (Map.key -> 'a -> <a href="Pretty.html#TYPEdoc">Pretty.doc</a>) -> unit -> 'a Map.t -> <a href="Pretty.html#TYPEdoc">Pretty.doc</a></code></pre><div class="info">
Format a map, analogous to docList.<br>
</div>
<pre><span class="keyword">val</span> <a name="VALd_map"></a>d_map : <code class="type">?dmaplet:(<a href="Pretty.html#TYPEdoc">Pretty.doc</a> -> <a href="Pretty.html#TYPEdoc">Pretty.doc</a> -> <a href="Pretty.html#TYPEdoc">Pretty.doc</a>) -><br> string -><br> (unit -> Map.key -> <a href="Pretty.html#TYPEdoc">Pretty.doc</a>) -><br> (unit -> 'a -> <a href="Pretty.html#TYPEdoc">Pretty.doc</a>) -> unit -> 'a Map.t -> <a href="Pretty.html#TYPEdoc">Pretty.doc</a></code></pre><div class="info">
Format a map, analogous to d_list.<br>
</div>
</body></html> | kitsune-dsu/kitsune-core | tools/cil-1.3.7/doc/api/Pretty.MakeMapPrinter.html | HTML | lgpl-3.0 | 4,053 |
/*
* File: ftk_source_dfb.h
* Author: Li XianJing <[email protected]>
* Brief: source to handle directfb event.
*
* Copyright (c) 2009 - 2010 Li XianJing <[email protected]>
*
* Licensed under the Academic Free License version 2.1
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/*
* History:
* ================================================================
* 2009-11-28 Li XianJing <[email protected]> created
*
*/
#ifndef FTK_SOURCE_DFB_H
#define FTK_SOURCE_DFB_H
#include "ftk_event.h"
#include "ftk_source.h"
#include <directfb.h>
FtkSource* ftk_source_dfb_create(IDirectFB* dfb, IDirectFBEventBuffer* event_buffer);
#endif/*FTK_SOURCE_DFB_H*/
| xuxiandi/ftk | src/backend/directfb/ftk_source_dfb.h | C | lgpl-3.0 | 1,359 |
<?php
function getUrlData($url,$sec)
{
$URL_parsed = parse_url($url);
$host = $URL_parsed['host'];
$port = $URL_parsed['port'];
$path = $URL_parsed['path'];
$query= $URL_parsed['query'];
if (!$host) $host = $_SERVER['HTTP_HOST'];
if (!$port) $port = 80;
$out = "GET ".$path.'?'.$query." HTTP/1.1\r\n";
$out .= "Host: ".$host."\r\n";
$out .= "Connection: Close\r\n\r\n";
$fp = fsockopen($host,$port,$errno,$errstr,$sec);
if (!$fp)
{
return false;
}
else
{
fputs($fp, $out);
$body = false;
while (!feof($fp)) {
$s = fgets($fp, 128);
if ( $body )
$in .= $s;
if ( $s == "\r\n" )
$body = true;
}
fclose($fp);
return $in;
}
}
function getRssArray($url,$tag)
{
return explode('<'.$tag.'>',getUrlData($url,10));
}
function getRssTagValue($str,$tag)
{
$str_exp = explode('<'.$tag.'>' , $str);
$str_exp = explode('</'.$tag.'>' , $str_exp[1]);
$result = getUTFtoUTF($str_exp[0]) == $str_exp[0] ? $str_exp[0] : getKRtoUTF($str_exp[0]);
return trim($result);
}
function getRssPageTitle($str,$tag)
{
return getRssTagValue($str,$tag);
}
function getRssContent($str,$tag)
{
$str = str_replace('>','>',$str);
$str = str_replace('<','<',$str);
$str = str_replace('"','"',$str);
$str = str_replace(''','\'',$str);
$str = getRssTagValue($str,$tag);
$str = str_replace(']]>','',$str);
$str = str_replace('<![CDATA[','',$str);
return $str;
}
function getRssDomain($url)
{
$e = explode('/',str_replace('www.','',str_replace('http://','',$url)));
return $e[0];
}
function getJSONData($data,$f)
{
$arr1 = explode('"'.$f.'":"',str_replace(': "',':"',$data));
$arr2 = explode('"',$arr1[1]);
return $arr2[0];
}
?> | kiminmug/rb | _core/function/rss.func.php | PHP | lgpl-3.0 | 1,685 |
/**
* Copyright 2012-2013 University Of Southern California
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.workflowsim.examples.scheduling;
import java.io.File;
import java.util.Calendar;
import java.util.List;
import org.cloudbus.cloudsim.Log;
import org.cloudbus.cloudsim.core.CloudSim;
import org.workflowsim.CondorVM;
import org.workflowsim.WorkflowDatacenter;
import org.workflowsim.Job;
import org.workflowsim.WorkflowEngine;
import org.workflowsim.WorkflowPlanner;
import org.workflowsim.utils.ClusteringParameters;
import org.workflowsim.utils.OverheadParameters;
import org.workflowsim.utils.Parameters;
import org.workflowsim.utils.ReplicaCatalog;
/**
* This RoundRobin Scheduling Algorithm
*
* @author Weiwei Chen
* @since WorkflowSim Toolkit 1.1
* @date Nov 9, 2013
*/
public class RoundRobinSchedulingAlgorithmExample extends DataAwareSchedulingAlgorithmExample {
////////////////////////// STATIC METHODS ///////////////////////
/**
* Creates main() to run this example This example has only one datacenter
* and one storage
*/
public static void main(String[] args) {
try {
// First step: Initialize the WorkflowSim package.
/**
* However, the exact number of vms may not necessarily be vmNum If
* the data center or the host doesn't have sufficient resources the
* exact vmNum would be smaller than that. Take care.
*/
int vmNum = 10;//number of vms;
/**
* Should change this based on real physical path
*/
String daxPath = "/Users/weiweich/NetBeansProjects/WorkflowSim-1.0/config/dax/Montage_100.xml";
File daxFile = new File(daxPath);
if (!daxFile.exists()) {
Log.printLine("Warning: Please replace daxPath with the physical path in your working environment!");
return;
}
/**
* Since we are using HEFT planning algorithm, the scheduling
* algorithm should be static such that the scheduler would not
* override the result of the planner
*/
Parameters.SchedulingAlgorithm sch_method = Parameters.SchedulingAlgorithm.ROUNDROBIN;
Parameters.PlanningAlgorithm pln_method = Parameters.PlanningAlgorithm.INVALID;
ReplicaCatalog.FileSystem file_system = ReplicaCatalog.FileSystem.LOCAL;
/**
* No overheads
*/
OverheadParameters op = new OverheadParameters(0, null, null, null, null, 0);
/**
* No Clustering
*/
ClusteringParameters.ClusteringMethod method = ClusteringParameters.ClusteringMethod.NONE;
ClusteringParameters cp = new ClusteringParameters(0, 0, method, null);
/**
* Initialize static parameters
*/
Parameters.init(vmNum, daxPath, null,
null, op, cp, sch_method, pln_method,
null, 0);
ReplicaCatalog.init(file_system);
// before creating any entities.
int num_user = 1; // number of grid users
Calendar calendar = Calendar.getInstance();
boolean trace_flag = false; // mean trace events
// Initialize the CloudSim library
CloudSim.init(num_user, calendar, trace_flag);
WorkflowDatacenter datacenter0 = createDatacenter("Datacenter_0");
/**
* Create a WorkflowPlanner with one schedulers.
*/
WorkflowPlanner wfPlanner = new WorkflowPlanner("planner_0", 1);
/**
* Create a WorkflowEngine.
*/
WorkflowEngine wfEngine = wfPlanner.getWorkflowEngine();
/**
* Create a list of VMs.The userId of a vm is basically the id of
* the scheduler that controls this vm.
*/
List<CondorVM> vmlist0 = createVM(wfEngine.getSchedulerId(0), Parameters.getVmNum());
/**
* Submits this list of vms to this WorkflowEngine.
*/
wfEngine.submitVmList(vmlist0, 0);
/**
* Binds the data centers with the scheduler.
*/
wfEngine.bindSchedulerDatacenter(datacenter0.getId(), 0);
CloudSim.startSimulation();
List<Job> outputList0 = wfEngine.getJobsReceivedList();
CloudSim.stopSimulation();
printJobList(outputList0);
} catch (Exception e) {
Log.printLine("The simulation has been terminated due to an unexpected error");
}
}
}
| Nishi-Inc/WorkflowSim-1.0 | examples/org/workflowsim/examples/scheduling/RoundRobinSchedulingAlgorithmExample.java | Java | lgpl-3.0 | 5,250 |
package thebeast.pml.solve.weightedsat;
import thebeast.pml.*;
import thebeast.pml.solve.PropositionalModel;
import thebeast.pml.formula.FactorFormula;
import thebeast.util.Profiler;
import thebeast.util.NullProfiler;
import thebeast.util.HashMultiMapList;
import thebeast.util.Util;
import thebeast.nod.variable.RelationVariable;
import thebeast.nod.variable.IntVariable;
import thebeast.nod.variable.Index;
import thebeast.nod.expression.RelationExpression;
import thebeast.nod.statement.Interpreter;
import thebeast.nod.type.Heading;
import thebeast.nod.type.Attribute;
import thebeast.nod.util.TypeBuilder;
import thebeast.nod.util.ExpressionBuilder;
import thebeast.nod.value.TupleValue;
import thebeast.nod.value.ArrayValue;
import thebeast.nod.value.RelationValue;
import java.util.*;
/**
* A WeightedSatProblem represents a set of weighted clauses in CNF. For effiency it stores clauses
* in database tables.
*
* @author Sebastian Riedel
*/
public class WeightedSatProblem implements PropositionalModel {
private Model model;
private WeightedSatSolver solver;
private Scores scores;
private Weights weights;
private Profiler profiler = new NullProfiler();
private double epsilon = 0.0;
private boolean singleCallMode;
private int oldNumAtoms;
private HashMap<UserPredicate, RelationVariable>
mappings = new HashMap<UserPredicate, RelationVariable>(),
newMappings = new HashMap<UserPredicate, RelationVariable>();
private IntVariable atomCounter;
private GroundFormulas groundFormulas;
private GroundAtoms solution;
private GroundAtoms lastSolution;
private GroundAtoms atoms;
private HashMap<FactorFormula, RelationExpression>
groundingQueries = new HashMap<FactorFormula, RelationExpression>(),
newQueries = new HashMap<FactorFormula, RelationExpression>();
private HashMap<UserPredicate, RelationExpression>
getTrueIndices = new HashMap<UserPredicate, RelationExpression>(),
groundObjective = new HashMap<UserPredicate, RelationExpression>(),
scoresAndIndices = new HashMap<UserPredicate, RelationExpression>(),
removeFalseAtoms = new HashMap<UserPredicate, RelationExpression>(),
addTrueAtoms = new HashMap<UserPredicate, RelationExpression>();
private RelationExpression newAtomCostsQuery, newClausesQuery;
private RelationVariable
clauses, newClauses, groundedClauses,
newAtomCosts, oldAtomCosts, atomCosts, trueAtoms, falseAtoms, lastTrueIndices;
boolean changed = false;
private static Heading index_heading;
private static Heading clause_heading;
private Interpreter interpreter = TheBeast.getInstance().getNodServer().interpreter();
private ExpressionBuilder builder = TheBeast.getInstance().getNodServer().expressionBuilder();
private static Heading indexScore_heading;
private boolean buildLocalModel = false;
private WeightedSatGrounder grounder;
static {
TypeBuilder typeBuilder = new TypeBuilder(TheBeast.getInstance().getNodServer());
typeBuilder.doubleType().att("weight").
boolType().arrayType().arrayType().att("signs").
intType().arrayType().arrayType().att("atoms").
intType().att("ub").intType().att("lb").intType().att("disjunction").intType().att("index").
relationType(1).att("items").relationType(4).att("constraints").
relationType(4);
clause_heading = typeBuilder.buildRelationType().heading();
index_heading = typeBuilder.intType().att("index").relationType(1).buildRelationType().heading();
indexScore_heading = typeBuilder.intType().att("index").doubleType().att("score").
relationType(2).buildRelationType().heading();
}
public WeightedSatProblem(WeightedSatSolver solver) {
this.solver = solver;
grounder = new WeightedSatGrounder();
}
/**
* This returns a table that maps ground atoms to indices and a score.
*
* @param userPredicate the predicate we want the table for
* @return a table with the following format: |arg1|arg2|...|argn|index|score|.
*/
public RelationVariable getMapping(UserPredicate userPredicate) {
return mappings.get(userPredicate);
}
public RelationVariable getNewMapping(UserPredicate pred) {
return newMappings.get(pred);
}
/**
* This is the variable that represents the current count of atoms. It can be used
*
* @return a no d variable representing the current number of atoms.
*/
IntVariable getAtomCounter() {
return atomCounter;
}
public void init(Scores scores) {
this.scores.load(scores);
solver.init();
clear();
}
public void buildLocalModel() {
oldNumAtoms = atomCounter.value().getInt();
for (UserPredicate predicate : model.getHiddenPredicates()) {
RelationVariable target = mappings.get(predicate);
interpreter.assign(target, groundObjective.get(predicate));
//interpreter.insert(atomCosts, scoresAndIndices.get(predicate));
}
//interpreter.assign(newAtomCosts, newAtomCostsQuery);
buildLocalModel = true;
}
public void solve(GroundAtoms solution) {
this.solution.load(solution);
profiler.start("updatesolver");
updateSolver();
profiler.end().start("solve");
boolean[] result = solver.solve();
profiler.end().start("filltables");
//System.out.println(Arrays.toString(result));
fillTrueFalseTables(result);
profiler.end().start("extractresult");
//System.out.println(trueAtoms.value());
//System.out.println(falseAtoms.value());
for (UserPredicate pred : model.getHiddenPredicates()) {
RelationVariable target = solution.getGroundAtomsOf(pred).getRelationVariable();
interpreter.assign(target, removeFalseAtoms.get(pred));
interpreter.insert(target, addTrueAtoms.get(pred));
}
profiler.end();
//System.out.println(solution);
}
public boolean isFractional() {
return false;
}
public void update(GroundFormulas formulas, GroundAtoms atoms) {
update(formulas, atoms, model.getGlobalFactorFormulas());
//System.out.println(formulas);
//System.out.println(toString());
}
private void clear() {
interpreter.assign(atomCounter, builder.num(0).getInt());
interpreter.clear(clauses);
interpreter.clear(newClauses);
interpreter.clear(groundedClauses);
interpreter.clear(oldAtomCosts);
interpreter.clear(atomCosts);
for (UserPredicate pred : model.getHiddenPredicates()) {
//interpreter.insert(this.mappings.get(pred), newMappings.get(pred));
interpreter.clear(mappings.get(pred));
}
}
public void update(GroundFormulas formulas, GroundAtoms atoms, Collection<FactorFormula> factors) {
lastSolution.load(atoms, model.getHiddenPredicates());
this.atoms.load(atoms);
int oldNumClauses = clauses.value().size();
if (!buildLocalModel)
oldNumAtoms = atomCounter.value().getInt();
else
buildLocalModel = false;
groundFormulas.load(formulas);
interpreter.clear(newClauses);
interpreter.clear(groundedClauses);
if (!singleCallMode) {
for (FactorFormula factor : factors) {
interpreter.append(groundedClauses, groundingQueries.get(factor));
}
interpreter.assign(newClauses, newClausesQuery);
//interpreter.
// System.out.println("newClauses.byteSize() = " + newClauses.byteSize());
interpreter.append(clauses, newClauses);
// System.out.println("clauses.byteSize() = " + clauses.byteSize());
interpreter.assign(oldAtomCosts, atomCosts);
interpreter.clear(atomCosts);
for (UserPredicate pred : model.getHiddenPredicates()) {
interpreter.insert(atomCosts, scoresAndIndices.get(pred));
}
interpreter.assign(newAtomCosts, newAtomCostsQuery);
} else {
//System.out.println("Single-Call-Mode");
for (FactorFormula factor : factors) {
interpreter.append(newClauses, groundingQueries.get(factor));
}
//interpreter.assign(newClauses, newClausesQuery);
//interpreter.
// System.out.println("newClauses.byteSize() = " + newClauses.byteSize());
//interpreter.assign(clauses, newClauses);
// System.out.println("clauses.byteSize() = " + clauses.byteSize());
interpreter.assign(oldAtomCosts, atomCosts);
interpreter.clear(atomCosts);
for (UserPredicate pred : model.getHiddenPredicates()) {
interpreter.append(atomCosts, scoresAndIndices.get(pred));
}
interpreter.assign(newAtomCosts, atomCosts);
}
if (epsilon == 0.0)
changed = clauses.value().size() > oldNumClauses || atomCounter.value().getInt() > oldNumAtoms;
else {
double bound = 0;
double[] scores = newAtomCosts.getDoubleColumn("score");
for (double score : scores) bound += Math.abs(score);
scores = newClauses.getDoubleColumn("score");
for (double score : scores) bound += Math.abs(score);
changed = bound < epsilon;
}
}
public boolean changed() {
return changed;
}
public void setClosure(GroundAtoms closure) {
}
private void fillTrueFalseTables(boolean[] result) {
int trueCount = 0;
for (boolean state : result) if (state) ++trueCount;
int falseCount = result.length - trueCount;
int[] trueIndices = new int[trueCount];
int[] falseIndices = new int[falseCount];
int truePointer = 0;
int falsePointer = 0;
for (int i = 0; i < result.length; ++i)
if (result[i]) trueIndices[truePointer++] = i;
else falseIndices[falsePointer++] = i;
trueAtoms.assignByArray(trueIndices, null);
falseAtoms.assignByArray(falseIndices, null);
}
public void enforceIntegerSolution() {
}
public void setFullyGround(FactorFormula formula, boolean fullyGround) {
groundingQueries.put(formula, grounder.createGroundingQuery(formula, groundFormulas, atoms, fullyGround, weights, this));
}
public int getGroundAtomCount() {
return atomCosts.value().size();
}
public int getGroundFormulaCount() {
return clauses.value().size();
}
public String getPropertyString() {
return null;
}
public void configure(Model model, Weights weights) {
this.model = model;
this.weights = weights;
groundFormulas = new GroundFormulas(model, weights);
solution = model.getSignature().createGroundAtoms();
atoms = model.getSignature().createGroundAtoms();
lastSolution = model.getSignature().createGroundAtoms();
scores = new Scores(model, weights);
clauses = interpreter.createRelationVariable(clause_heading);
newClauses = interpreter.createRelationVariable(clause_heading);
groundedClauses = interpreter.createRelationVariable(clause_heading);
trueAtoms = interpreter.createRelationVariable(index_heading);
interpreter.addIndex(trueAtoms, "index", Index.Type.HASH, "index");
falseAtoms = interpreter.createRelationVariable(index_heading);
interpreter.addIndex(falseAtoms, "index", Index.Type.HASH, "index");
atomCounter = interpreter.createIntVariable();
atomCosts = interpreter.createRelationVariable(indexScore_heading);
interpreter.addIndex(atomCosts, "index", Index.Type.HASH, "index");
oldAtomCosts = interpreter.createRelationVariable(indexScore_heading);
interpreter.addIndex(oldAtomCosts, "index", Index.Type.HASH, "index");
newAtomCosts = interpreter.createRelationVariable(indexScore_heading);
interpreter.addIndex(newAtomCosts, "index", Index.Type.HASH, "index");
mappings = new HashMap<UserPredicate, RelationVariable>();
clauses = interpreter.createRelationVariable(clause_heading);
groundedClauses = interpreter.createRelationVariable(clause_heading);
lastTrueIndices = interpreter.createRelationVariable(index_heading);
for (UserPredicate pred : model.getHiddenPredicates()) {
RelationVariable mapping = interpreter.createRelationVariable(pred.getHeadingArgsIndexScore());
mappings.put(pred, mapping);
interpreter.addIndex(mapping, "args", Index.Type.HASH, pred.getHeading().getAttributeNames());
interpreter.addIndex(mapping, "index", Index.Type.HASH, "index");
//create query to update solutions
//remove wrong solutions
builder.expr(solution.getGroundAtomsOf(pred).getRelationVariable());
builder.expr(mapping).from("mapping").expr(falseAtoms).from("falseAtoms");
builder.intAttribute("mapping", "index").intAttribute("falseAtoms", "index").equality().where();
for (int i = 0; i < pred.getArity(); ++i) {
builder.id(pred.getColumnName(i)).attribute("mapping", pred.getAttribute(i));
}
builder.tuple(pred.getArity()).select().query();
builder.relationMinus();
removeFalseAtoms.put(pred, builder.getRelation());
//a query that produces a table with atoms to add
builder.expr(mapping).from("mapping").expr(trueAtoms).from("trueAtoms");
builder.intAttribute("mapping", "index").intAttribute("trueAtoms", "index").equality().where();
for (int i = 0; i < pred.getArity(); ++i) {
builder.id(pred.getColumnName(i)).attribute("mapping", pred.getAttribute(i));
}
builder.tuple(pred.getArity()).select().query();
addTrueAtoms.put(pred, builder.getRelation());
//a query that takes a solution (ground atoms) and creates a list of corresponding indices
builder.expr(mapping).from("mapping").
expr(lastSolution.getGroundAtomsOf(pred).getRelationVariable()).from("last");
for (int i = 0; i < pred.getArity(); ++i) {
builder.attribute("mapping", pred.getAttribute(i)).attribute("last", pred.getAttribute(i)).equality();
}
builder.and(pred.getArity()).where();
builder.id("index").intAttribute("mapping", "index").tuple(1).select().query();
getTrueIndices.put(pred, builder.getRelation());
//a query that selects the scores and indices from the mapping
builder.expr(mapping).from("mapping").
id("index").intAttribute("mapping", "index").
id("score").doubleAttribute("mapping", "score").tupleForIds().select().query();
scoresAndIndices.put(pred, builder.getRelation());
builder.expr(scores.getScoreRelation(pred)).from("scores");
for (Attribute attribute : pred.getHeading().attributes()) {
builder.id(attribute.name()).attribute("scores", attribute);
}
builder.id("index").expr(atomCounter).intPostInc();
builder.id("score").doubleAttribute("scores", "score");
builder.tupleForIds().select().query();
groundObjective.put(pred, builder.getRelation());
}
builder.expr(atomCosts).expr(oldAtomCosts).relationMinus();
newAtomCostsQuery = builder.getRelation();
for (FactorFormula formula : model.getGlobalFactorFormulas()) {
groundingQueries.put(formula, grounder.createGroundingQuery(formula, groundFormulas, atoms,false, weights, this));
//newQueries.put(formula, builder.expr(groundedClauses).expr(clauses).relationMinus().getRelation());
}
newClausesQuery = builder.expr(groundedClauses).expr(clauses).relationMinus().getRelation();
}
public void setProperty(PropertyName name, Object value) {
if (name.getHead().equals("solver")) {
if (name.isTerminal()) {
if ("maxwalksat".equals(value))
solver = new MaxWalkSat();
else if ("maxproduct".equals(value))
solver = new MaxProduct();
} else
solver.setProperty(name.getTail(), value);
} else if (name.getHead().equals("singleCallMode")) {
setSingleCallMode((Boolean) value);
} else if (name.getHead().equals("detWeight")) {
grounder.setDetWeight((Double)value);
}
}
public Object getProperty(PropertyName name) {
return null;
}
public void setProfiler(Profiler profiler) {
this.profiler = profiler;
if (solver != null) solver.setProfiler(profiler);
}
private static WeightedSatClause toClause(TupleValue tuple) {
ArrayValue signs = (ArrayValue) tuple.element("signs");
boolean[][] signsArr = new boolean[signs.size()][];
for (int i = 0; i < signsArr.length; ++i) {
ArrayValue disjunction = (ArrayValue) signs.element(i);
signsArr[i] = new boolean[disjunction.size()];
for (int j = 0; j < signsArr[i].length; ++j)
signsArr[i][j] = disjunction.boolElement(j).getBool();
}
ArrayValue indices = (ArrayValue) tuple.element("atoms");
int[][] indicesArr = new int[signs.size()][];
for (int i = 0; i < indicesArr.length; ++i) {
ArrayValue disjunction = (ArrayValue) indices.element(i);
indicesArr[i] = new int[disjunction.size()];
for (int j = 0; j < indicesArr[i].length; ++j)
indicesArr[i][j] = disjunction.intElement(j).getInt();
}
double weight = tuple.doubleElement("weight").getDouble();
WeightedSatClause.Constraint[][] constraints = new WeightedSatClause.Constraint[indices.size()][];
RelationValue cardConstraints = tuple.relationElement("constraints");
HashMultiMapList<Integer, WeightedSatClause.Constraint> disjunction2Constraints =
new HashMultiMapList<Integer, WeightedSatClause.Constraint>();
for (TupleValue c : cardConstraints) {
RelationValue items = c.relationElement("items");
int[] itemIndices = new int[items.size()];
int itemIndex = 0;
for (TupleValue item : items) {
itemIndices[itemIndex++] = item.intElement(0).getInt();
}
int lb = c.intElement("lb").getInt();
int ub = c.intElement("ub").getInt();
int disjunctionIndex = c.intElement("disjunction").getInt();
disjunction2Constraints.add(disjunctionIndex, new WeightedSatClause.Constraint(lb, ub, itemIndices));
}
for (Map.Entry<Integer, List<WeightedSatClause.Constraint>> entry : disjunction2Constraints.entrySet()) {
constraints[entry.getKey()] = entry.getValue().toArray(new WeightedSatClause.Constraint[0]);
}
return new WeightedSatClause(weight, indicesArr, signsArr, constraints);
}
private void updateSolver() {
profiler.start("prepare-update");
interpreter.clear(lastTrueIndices);
for (UserPredicate pred : model.getHiddenPredicates()) {
interpreter.append(lastTrueIndices, getTrueIndices.get(pred));
}
int[] trueIndices = lastTrueIndices.getIntColumn("index");
int howMany = atomCounter.value().getInt() - oldNumAtoms;
double[] scores = newAtomCosts.getDoubleColumn("score");
int[] indices = newAtomCosts.getIntColumn("index");
double[] ordered = new double[scores.length];
for (int i = 0; i < scores.length; ++i)
ordered[indices[i] - oldNumAtoms] = scores[i];
//boolean[] states = new boolean[howMany];
boolean[] states = new boolean[atomCounter.value().getInt()];
for (int trueIndex : trueIndices)
states[trueIndex] = true;
//Arrays.fill(states, true);
solver.addAtoms(ordered);
solver.setStates(states);
//System.err.println("Before transformation we use " + Util.toMemoryString(Runtime.getRuntime().totalMemory()));
WeightedSatClause[] clauses = new WeightedSatClause[newClauses.value().size()];
int i = 0;
for (TupleValue tuple : newClauses.value()) {
clauses[i++] = toClause(tuple);
}
profiler.end().start("transfer");
if (singleCallMode){
interpreter.clear(newClauses);
interpreter.compactify(newClauses);
}
//System.err.println("transferring " + clauses.length + " clauses to solver");
//System.err.println("Using " + Util.toMemoryString(Runtime.getRuntime().totalMemory()));
solver.addClauses(clauses);
profiler.end();
}
public Model getModel() {
return model;
}
public WeightedSatSolver getSolver() {
return solver;
}
public Scores getScores() {
return scores;
}
public Weights getWeights() {
return weights;
}
public String toString() {
StringBuffer result = new StringBuffer();
//result.append(groundedClauses.value());
//result.append(newClauses.value());
result.append(clauses.value());
result.append(atomCosts.value());
for (UserPredicate pred : model.getHiddenPredicates()) {
result.append(mappings.get(pred).value());
}
return result.toString();
}
public boolean isSingleCallMode() {
return singleCallMode;
}
public void setSingleCallMode(boolean singleCallMode) {
this.singleCallMode = singleCallMode;
}
}
| 52nlp/thebeast | src/thebeast/pml/solve/weightedsat/WeightedSatProblem.java | Java | lgpl-3.0 | 20,419 |
/**
* Copyright 2016 Cisco Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function() {
'use strict';
describe('contenteditable directive', function() {
var $compile, $scope;
beforeEach(module('horizon.framework.widgets'));
beforeEach(module('horizon.framework.widgets.contenteditable'));
beforeEach(inject(function ($injector) {
$compile = $injector.get('$compile');
$scope = $injector.get('$rootScope').$new();
$scope.testData = '';
}));
describe('using a model', function() {
var element;
beforeEach(function before() {
element = $compile('<pre contenteditable ng-model="testData"></pre>')($scope);
$scope.$digest();
});
it('should update the model when content is edited', function () {
element.triggerHandler('focus');
element.html('foo');
$scope.$digest();
element.triggerHandler('blur');
expect($scope.testData).toBe('foo');
});
it('should update the view when model is changed', function () {
element.triggerHandler('focus');
$scope.testData = 'spam';
$scope.$digest();
expect(element.html()).toBe('spam');
});
});
it('should not do anything without an accompanying ng-model', function() {
var element = $compile('<pre contenteditable></pre>')($scope);
$scope.$digest();
element.triggerHandler('focus');
element.html('bar');
$scope.$digest();
element.triggerHandler('blur');
expect($scope.testData).toBe('');
});
});
})();
| kogotko/carburetor | static/framework/widgets/contenteditable/contenteditable.directive.spec.js | JavaScript | apache-2.0 | 2,093 |
<html lang="en" xml:lang="en" xmlns="http://www.w3.org/1999/xhtml">
<!--Form Location: https://new.aol.com/productsweb/?promocode=825345&ncid=txtlnkuswebr00000106--><head>
<title>AOL</title>
<meta http-equiv="Pragma" content="no-cache">
<meta http-equiv="Expires" content="-1">
<meta http-equiv="X-UA-Compatible" content="IE=EmulateIE7">
<link rel="Stylesheet" type="text/css" href="https://s.aolcdn.com/os/productsregv2/regpath4.css">
<!--[if IE]><style type="text/css">div.errCont {padding-top:10px;}</style><![endif]--><meta name="viewport" content="width=760, initial-scale=0.4, minimum-scale=0.4">
<!-- Custom JavaScript --><meta http-equiv="Pragma" content="no-cache">
<meta http-equiv="Expires" content="-1">
</head>
<body>
<div id="header-g3">
<div class="top-nav-signin">
<div class="top-nav-links">
<span class="aol-help"><a href="http://www.aol.com/" class="bigger-aol">Aol.</a> | <a href="http://help.aol.com/">Help</a></span>
<span class="sigin-link">Already have a username? <a href="https://my.screenname.aol.com/_cqr/login/login.psp?offerId=aol-com-jv3-en-us&sitedomain=startpage.aol.com&siteState=http://www.aol.com&lang=en&locale=us" class="login-link" id="loginLink">Sign In</a></span>
</div>
</div>
</div>
<div class="main-cont">
<div class="form-container">
<div class="top-edge-cont"></div>
<div class="mid-edge-cont">
<form name="initRegistrationActionForm" id="initRegistrationActionForm" action="/productsweb/subflows/FreeMemberRegistration/FreeAolRegistrationAction.do" class="mainCont" method="post" >
<div class="beforeRegFields">
</div>
<fieldset class="contBlock" id="createEmail">
<h3>Create Your Account</h3>
<h2 class="sect-headline">Let's create your account</h2>
<div id="name-cont">
<div id="fname-field">
<label for="firstName">
First Name
</label>
<input type="text" name="{actionForm.firstName}" id="firstName" maxlength="15"><div id="firstNameError" class="errorMsg"></div>
</div>
<div id="lname-field">
<label for="lastName">
Last Name
</label>
<input type="text" name="{actionForm.lastName}" id="lastName" maxlength="15"><div id="lastNameError" class="errorMsg"></div>
</div>
</div>
<div id="username-cont">
<label for="desiredSN" id="username-label">
<span class="callout">NEW!</span>
<span id="usernameLabelText">
Pick a username:
</span>
<a class="whatsNew" >What's new?</a>
</label>
<input type="text" name="{actionForm.desiredSN}" id="desiredSN" value="" maxlength="16"><span id="postInpSN">@aol.com</span>
<p id="check-avail-butt" class="phantomButt">
<span >CHECK</span>
</p>
<p id="change-link" class="changeLink"><a >Change</a></p>
</div>
<div id="usernameError" class="errorMsg"></div>
<div id="password-cont">
<input type="hidden" id="snChoice" name="snChoice" value="newEmail" class="hiddenInp"><label for="password">
Password
</label>
<input type="password" name="{actionForm.password}" id="password" maxlength="16"><input type="text" id="verifyPasswordHint" value="Retype password" readonly ><input type="password" name="{actionForm.verifyPassword}" id="verifyPassword" maxlength="16"><div id="passwordError" class="errorMsg"></div>
</div>
<div id="pwStrngthMeter"></div>
<br class="clearBreak"><div class="spacer"></div>
<div class="panel-nav-cont">
<span class="button-wrapper">
<a id="step-one" class="button next" ><span class="ltbtnedge">Next</span></a>
</span>
</div>
</fieldset>
<fieldset class="contBlock" id="aboutYou">
<h3>Tell Us About Yourself</h3>
<div id="conf-un-pi">
</div>
<h2 class="sect-headline">Okay, now let's get personal...</h2>
<label class="selectLabel" for="dob" id="dob-label">
Date of Birth
<a class="whatsNew" id="whats-new" >Why so Personal?</a>
</label>
<div id="dob">
<div id="month-cont" class="month-cont">
<div class="ipt"><label class="hidden-label" for="dobMonth">Select Month</label></div>
<input type="hidden" name="wlw-select_key:{actionForm.dobMonth}OldValue" value="true"><select name="wlw-select_key:{actionForm.dobMonth}" id="dobMonth" class="dobMonth"><option value="">Select Month</option>
<option value="01">January</option>
<option value="02">February</option>
<option value="03">March</option>
<option value="04">April</option>
<option value="05">May</option>
<option value="06">June</option>
<option value="07">July</option>
<option value="08">August</option>
<option value="09">September</option>
<option value="10">October</option>
<option value="11">November</option>
<option value="12">December</option></select>
</div>
<div id="day-cont" class="day-cont">
<input type="text" name="{actionForm.dobDay}" id="dobDay" value="Day (dd)" maxlength="2">
</div>
<div id="year-cont" class="year-cont">
<input type="text" name="{actionForm.dobYear}" id="dobYear" value="Year (yyyy)" maxlength="4">
</div>
<div id="dobError" class="errorMsg"></div>
</div>
<div class="gender-cont">
<label for="gender">
Gender
</label>
<div class="female-choice">
<input type="radio" name="wlw-radio_button_group_key:{actionForm.gender}" id="femaleChoice" value="Female"><span><span> Female</span></span>
</div>
<div class="male-choice">
<input type="radio" name="wlw-radio_button_group_key:{actionForm.gender}" id="maleChoice" value="Male"><span><span> Male</span></span>
</div>
<div id="genderError" class="errorMsg"></div>
</div>
<div class="zip-cont">
<label for="zipCode">
Zip Code
</label>
<input type="text" name="{actionForm.zipCode}" id="zipCode" class="bbWidth"><div id="zipCodeError" class="errorMsg"></div>
</div>
<h2 class="sect-headline secondary">and keep it secure</h2>
<h4>If you forget your password, you'll need this information to reset your account.</h4>
<div id="asq-toggle-sect">
<label class="selectLabel" for="acctSecurityQuestion">
Set a Security Question
</label>
<div class="asq-block">
<div id="asq-field-cont" class="asq-field-cont">
<input name="wlw-select_key:{actionForm.acctSecurityQuestion}OldValue" value="true" type="hidden"><input type="hidden" name="wlw-select_key:{actionForm.acctSecurityQuestion}OldValue" value="true"><select name="wlw-select_key:{actionForm.acctSecurityQuestion}" id="acctSecurityQuestion"><option value="">Select a Question</option>
<option value="What is your frequent flyer number?">What is your frequent flyer number?</option>
<option value="What is your library card number?">What is your library card number?</option>
<option value="In which city did your parents meet?">In which city did your parents meet?</option>
<option value="In what year was your mother born?">In what year was your mother born?</option>
<option value="What was your favorite childhood cartoon?">What was your favorite childhood cartoon?</option>
<option value="What was your favorite childhood book?">What was your favorite childhood book?</option>
<option value="In what year was your father born?">In what year was your father born?</option>
<option value="What was your childhood nickname?">What was your childhood nickname?</option>
<option value="What is your grandmother's first name?">What is your grandmother's first name?</option>
<option value="What is your father's middle name?">What is your father's middle name?</option>
<option value="In what city were you born?">In what city were you born?</option>
<option value="What is your mother's maiden name?">What is your mother's maiden name?</option>
<option value="What was the name of your first pet?">What was the name of your first pet?</option></select>
</div>
<div id="secQuestError" class="errorMsg"></div>
</div>
<input type="text" name="{actionForm.acctSecurityAnswer}" id="acctSecurityAnswer" value="Your Answer" maxlength="32"><div id="secAnswerError" class="errorMsg"></div>
</div>
<!--End asq-toggle-sect -->
<div id="altemail-toggle-sect">
<label for="altEMail">
Alternate Email<span class="opt-label">(optional)</span>
</label>
<input type="text" name="{actionForm.altEMail}" id="altEMail" maxlength="97"><div id="altEmailError" class="errorMsg"></div>
</div>
<div class="panel-nav-cont">
<a class="back" >< Back</a>
<span class="button-wrapper">
<a id="step-two" class="button next" ><span class="ltbtnedge">Next</span></a>
</span>
</div>
<div style="clear:both;"></div>
</fieldset>
<fieldset class="contBlock" id="verify">
<h3>Verify Your Registration</h3>
<div id="conf-un-ct"></div>
<h2 class="sect-headline">One last step, and you're done.</h2>
<div class="wordVerify" id="wordVerCont">
<div class="wordVerButtons">
<a id="refreshWordverBtn" >
<span class="has-graphic">Refresh<br> Image</span>
</a>
<a id="playAudBtn" >
<span class="has-graphic">Audio Assistance</span>
</a>
</div>
<img id="wordVerImage" src="#" alt="">
</div>
<label for="wordVerify" class="wordVer">
Enter what you see above
</label>
<input type="text" name="{actionForm.wordVerify}" id="wordVerify" maxlength="10"><div id="captchaError" class="errorMsg"></div>
<p>
By clicking "Sign Up" below, you electronically agree to our <a >Terms of Service</a> and <a >Privacy Policy</a> (the "Terms"); you acknowledge receipt of our Terms, and you agree to receive notices and disclosures from us electronically, including any updates of these Terms.
</p>
<div class="panel-nav-cont">
<a class="back" >< Back</a>
<span class="button-wrapper">
<a id="step-three" class="button signup" ><span class="ltbtnedge">Sign Up</span></a>
</span>
</div>
</fieldset>
<br class="clearBreak">
</form>
<div class="afterRegFields"><!--nothing yet--></div>
</div>
<div class="nav-dots"></div>
<div class="bottom-edge-cont"></div>
</div>
<div class="footer-nav">
<a href="http://www.aol.com">AOL Inc.</a> | <a href="http://legal.aol.com/TOS">Terms of Service</a> | <a href="http://privacy.aol.com">Privacy Policy</a> | <a href="http://help.aol.com/">Help</a> <span class="copy">©2011 AOL Inc. All Rights Reserved</span>
</div>
</div>
<div id="whatsnewOverlay" class="overlay">
<h2>What's new?</h2>
<p>Now you can get the perfect email address! AOL Mail now supports additional characters like dots and underscores.</p>
<p>Maybe you're in the mood for [email protected], feeling spunky with [email protected], or maybe you'd like to go formal with [email protected] — the choice is now yours.</p>
<span class="button-wrapper"><a class="button" ><span class="ltbtnedge">OK</span></a></span>
</div>
<div id="whyneededOverlay" class="overlay">
<h2>Why so Personal?</h2>
<p>You need to provide your date of birth in order to verify your identity in account management, and to ensure that you are eligible to use our products. Other data, such as Zip Code and Gender are used to personalize your AOL experience (e.g., display local weather and news).</p>
<p>The use of this information is governed by the <a >Privacy Policy</a>.</p>
<span class="button-wrapper"><a class="button" ><span class="ltbtnedge">OK</span></a></span>
</div>
</body>
</html>
| plxaye/chromium | src/chrome/test/data/autofill/heuristics/input/08_register_aol.com.html | HTML | apache-2.0 | 13,003 |
/*
* Copyright (c) 2006-2021, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2020-10-14 Dozingfiretruck first version
*/
#ifndef __UART_CONFIG_H__
#define __UART_CONFIG_H__
#include <rtthread.h>
#ifdef __cplusplus
extern "C" {
#endif
#if defined(BSP_USING_LPUART1)
#ifndef LPUART1_CONFIG
#define LPUART1_CONFIG \
{ \
.name = "lpuart1", \
.Instance = LPUART1, \
.irq_type = LPUART1_IRQn, \
}
#endif /* LPUART1_CONFIG */
#if defined(BSP_LPUART1_RX_USING_DMA)
#ifndef LPUART1_DMA_CONFIG
#define LPUART1_DMA_CONFIG \
{ \
.Instance = LPUART1_RX_DMA_INSTANCE, \
.request = LPUART1_RX_DMA_REQUEST, \
.dma_rcc = LPUART1_RX_DMA_RCC, \
.dma_irq = LPUART1_RX_DMA_IRQ, \
}
#endif /* LPUART1_DMA_CONFIG */
#endif /* BSP_LPUART1_RX_USING_DMA */
#endif /* BSP_USING_LPUART1 */
#if defined(BSP_USING_UART1)
#ifndef UART1_CONFIG
#define UART1_CONFIG \
{ \
.name = "uart1", \
.Instance = USART1, \
.irq_type = USART1_IRQn, \
}
#endif /* UART1_CONFIG */
#endif /* BSP_USING_UART1 */
#if defined(BSP_UART1_RX_USING_DMA)
#ifndef UART1_DMA_RX_CONFIG
#define UART1_DMA_RX_CONFIG \
{ \
.Instance = UART1_RX_DMA_INSTANCE, \
.request = UART1_RX_DMA_REQUEST, \
.dma_rcc = UART1_RX_DMA_RCC, \
.dma_irq = UART1_RX_DMA_IRQ, \
}
#endif /* UART1_DMA_RX_CONFIG */
#endif /* BSP_UART1_RX_USING_DMA */
#if defined(BSP_UART1_TX_USING_DMA)
#ifndef UART1_DMA_TX_CONFIG
#define UART1_DMA_TX_CONFIG \
{ \
.Instance = UART1_TX_DMA_INSTANCE, \
.request = UART1_TX_DMA_REQUEST, \
.dma_rcc = UART1_TX_DMA_RCC, \
.dma_irq = UART1_TX_DMA_IRQ, \
}
#endif /* UART1_DMA_TX_CONFIG */
#endif /* BSP_UART1_TX_USING_DMA */
#if defined(BSP_USING_UART2)
#ifndef UART2_CONFIG
#define UART2_CONFIG \
{ \
.name = "uart2", \
.Instance = USART2, \
.irq_type = USART2_IRQn, \
}
#endif /* UART2_CONFIG */
#endif /* BSP_USING_UART2 */
#if defined(BSP_UART2_RX_USING_DMA)
#ifndef UART2_DMA_RX_CONFIG
#define UART2_DMA_RX_CONFIG \
{ \
.Instance = UART2_RX_DMA_INSTANCE, \
.request = UART2_RX_DMA_REQUEST, \
.dma_rcc = UART2_RX_DMA_RCC, \
.dma_irq = UART2_RX_DMA_IRQ, \
}
#endif /* UART2_DMA_RX_CONFIG */
#endif /* BSP_UART2_RX_USING_DMA */
#if defined(BSP_UART2_TX_USING_DMA)
#ifndef UART2_DMA_TX_CONFIG
#define UART2_DMA_TX_CONFIG \
{ \
.Instance = UART2_TX_DMA_INSTANCE, \
.request = UART2_TX_DMA_REQUEST, \
.dma_rcc = UART2_TX_DMA_RCC, \
.dma_irq = UART2_TX_DMA_IRQ, \
}
#endif /* UART2_DMA_TX_CONFIG */
#endif /* BSP_UART2_TX_USING_DMA */
#if defined(BSP_USING_UART3)
#ifndef UART3_CONFIG
#define UART3_CONFIG \
{ \
.name = "uart3", \
.Instance = USART3, \
.irq_type = USART3_IRQn, \
}
#endif /* UART3_CONFIG */
#endif /* BSP_USING_UART3 */
#if defined(BSP_UART3_RX_USING_DMA)
#ifndef UART3_DMA_RX_CONFIG
#define UART3_DMA_RX_CONFIG \
{ \
.Instance = UART3_RX_DMA_INSTANCE, \
.request = UART3_RX_DMA_REQUEST, \
.dma_rcc = UART3_RX_DMA_RCC, \
.dma_irq = UART3_RX_DMA_IRQ, \
}
#endif /* UART3_DMA_RX_CONFIG */
#endif /* BSP_UART3_RX_USING_DMA */
#if defined(BSP_UART3_TX_USING_DMA)
#ifndef UART3_DMA_TX_CONFIG
#define UART3_DMA_TX_CONFIG \
{ \
.Instance = UART3_TX_DMA_INSTANCE, \
.request = UART3_TX_DMA_REQUEST, \
.dma_rcc = UART3_TX_DMA_RCC, \
.dma_irq = UART3_TX_DMA_IRQ, \
}
#endif /* UART3_DMA_TX_CONFIG */
#endif /* BSP_UART3_TX_USING_DMA */
#ifdef __cplusplus
}
#endif
#endif
| armink/rt-thread | bsp/stm32/libraries/HAL_Drivers/config/wb/uart_config.h | C | apache-2.0 | 6,163 |
/*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.balana.utils.policy.dto;
import java.util.ArrayList;
import java.util.List;
/**
*
*/
public class ApplyElementDTO {
private int applyElementNumber;
private String applyElementId;
private String functionId;
private String variableId;
private String functionFunctionId;
private int addApplyElementPageNumber;
private int attributeDesignatorsElementCount;
private int attributeValueElementCount;
private int attributeSelectorElementCount;
private List<ApplyElementDTO> applyElements = new ArrayList<ApplyElementDTO>();
private List<AttributeDesignatorDTO> attributeDesignators = new ArrayList<AttributeDesignatorDTO>();
private List<AttributeSelectorDTO> attributeSelectors = new ArrayList<AttributeSelectorDTO>();
private List<AttributeValueElementDTO> attributeValueElementDTOs = new ArrayList<AttributeValueElementDTO>();
public String getApplyElementId() {
return applyElementId;
}
public void setApplyElementId(String applyElementId) {
this.applyElementId = applyElementId;
}
public int getAddApplyElementPageNumber() {
return addApplyElementPageNumber;
}
public void setAddApplyElementPageNumber(int addApplyElementPageNumber) {
this.addApplyElementPageNumber = addApplyElementPageNumber;
}
public List<ApplyElementDTO> getApplyElements() {
return applyElements;
}
public void setApplyElement(ApplyElementDTO applyElement) {
this.applyElements.add(applyElement);
}
public List<AttributeValueElementDTO> getAttributeValueElementDTOs() {
return attributeValueElementDTOs;
}
public void setAttributeValueElementDTO(AttributeValueElementDTO attributeValueElementDTO) {
this.attributeValueElementDTOs.add(attributeValueElementDTO);
}
public int getApplyElementNumber() {
return applyElementNumber;
}
public void setApplyElementNumber(int applyElementNumber) {
this.applyElementNumber = applyElementNumber;
}
public String getFunctionFunctionId() {
return functionFunctionId;
}
public void setFunctionFunctionId(String functionFunctionId) {
this.functionFunctionId = functionFunctionId;
}
public List<AttributeDesignatorDTO> getAttributeDesignators() {
return attributeDesignators;
}
public void setAttributeDesignators(AttributeDesignatorDTO attributeDesignator) {
this.attributeDesignators.add(attributeDesignator);
}
public List<AttributeSelectorDTO> getAttributeSelectors() {
return attributeSelectors;
}
public void setAttributeSelectors(AttributeSelectorDTO attributeSelector) {
this.attributeSelectors.add(attributeSelector);
}
public String getVariableId() {
return variableId;
}
public void setVariableId(String variableId) {
this.variableId = variableId;
}
public String getFunctionId() {
return functionId;
}
public void setFunctionId(String functionId) {
this.functionId = functionId;
}
public int getAttributeDesignatorsElementCount() {
return attributeDesignatorsElementCount;
}
public void setAttributeDesignatorsElementCount(int attributeDesignatorsElementCount) {
this.attributeDesignatorsElementCount = attributeDesignatorsElementCount;
}
public int getAttributeValueElementCount() {
return attributeValueElementCount;
}
public void setAttributeValueElementCount(int attributeValueElementCount) {
this.attributeValueElementCount = attributeValueElementCount;
}
public int getAttributeSelectorElementCount() {
return attributeSelectorElementCount;
}
public void setAttributeSelectorElementCount(int attributeSelectorElementCount) {
this.attributeSelectorElementCount = attributeSelectorElementCount;
}
}
| TU-Berlin-SNET/tresor-pdp-caching | modules/balana/modules/balana-utils/src/main/java/org/wso2/balana/utils/policy/dto/ApplyElementDTO.java | Java | apache-2.0 | 4,616 |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.autoconfigure.metrics.redis;
import io.lettuce.core.metrics.MicrometerCommandLatencyRecorder;
import io.lettuce.core.resource.ClientResources;
import org.junit.jupiter.api.Test;
import org.springframework.boot.actuate.autoconfigure.metrics.test.MetricsRun;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.autoconfigure.data.redis.RedisAutoConfiguration;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link LettuceMetricsAutoConfiguration}.
*
* @author Antonin Arquey
*/
class LettuceMetricsAutoConfigurationTests {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(LettuceMetricsAutoConfiguration.class));
@Test
void whenThereIsAMeterRegistryThenCommandLatencyRecorderIsAdded() {
this.contextRunner.with(MetricsRun.simple())
.withConfiguration(AutoConfigurations.of(RedisAutoConfiguration.class)).run((context) -> {
ClientResources clientResources = context.getBean(LettuceConnectionFactory.class)
.getClientResources();
assertThat(clientResources.commandLatencyRecorder())
.isInstanceOf(MicrometerCommandLatencyRecorder.class);
});
}
@Test
void whenThereIsNoMeterRegistryThenClientResourcesCustomizationBacksOff() {
this.contextRunner.withConfiguration(AutoConfigurations.of(RedisAutoConfiguration.class)).run((context) -> {
ClientResources clientResources = context.getBean(LettuceConnectionFactory.class).getClientResources();
assertThat(clientResources.commandLatencyRecorder())
.isNotInstanceOf(MicrometerCommandLatencyRecorder.class);
});
}
}
| mdeinum/spring-boot | spring-boot-project/spring-boot-actuator-autoconfigure/src/test/java/org/springframework/boot/actuate/autoconfigure/metrics/redis/LettuceMetricsAutoConfigurationTests.java | Java | apache-2.0 | 2,501 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.test.rest.yaml;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.client.NodeSelector;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Execution context passed across the REST tests.
* Holds the REST client used to communicate with elasticsearch.
* Caches the last obtained test response and allows to stash part of it within variables
* that can be used as input values in following requests.
*/
public class ClientYamlTestExecutionContext {
private static final Logger logger = LogManager.getLogger(ClientYamlTestExecutionContext.class);
private static final XContentType[] STREAMING_CONTENT_TYPES = new XContentType[]{XContentType.JSON, XContentType.SMILE};
private final Stash stash = new Stash();
private final ClientYamlTestClient clientYamlTestClient;
private final ClientYamlTestCandidate clientYamlTestCandidate;
private ClientYamlTestResponse response;
private final boolean randomizeContentType;
ClientYamlTestExecutionContext(
ClientYamlTestCandidate clientYamlTestCandidate,
ClientYamlTestClient clientYamlTestClient,
boolean randomizeContentType) {
this.clientYamlTestClient = clientYamlTestClient;
this.clientYamlTestCandidate = clientYamlTestCandidate;
this.randomizeContentType = randomizeContentType;
}
/**
* Calls an elasticsearch api with the parameters and request body provided as arguments.
* Saves the obtained response in the execution context.
*/
public ClientYamlTestResponse callApi(String apiName, Map<String, String> params, List<Map<String, Object>> bodies,
Map<String, String> headers) throws IOException {
return callApi(apiName, params, bodies, headers, NodeSelector.ANY);
}
/**
* Calls an elasticsearch api with the parameters and request body provided as arguments.
* Saves the obtained response in the execution context.
*/
public ClientYamlTestResponse callApi(String apiName, Map<String, String> params, List<Map<String, Object>> bodies,
Map<String, String> headers, NodeSelector nodeSelector) throws IOException {
//makes a copy of the parameters before modifying them for this specific request
Map<String, String> requestParams = new HashMap<>(params);
requestParams.putIfAbsent("error_trace", "true"); // By default ask for error traces, this my be overridden by params
for (Map.Entry<String, String> entry : requestParams.entrySet()) {
if (stash.containsStashedValue(entry.getValue())) {
entry.setValue(stash.getValue(entry.getValue()).toString());
}
}
//make a copy of the headers before modifying them for this specific request
Map<String, String> requestHeaders = new HashMap<>(headers);
for (Map.Entry<String, String> entry : requestHeaders.entrySet()) {
if (stash.containsStashedValue(entry.getValue())) {
entry.setValue(stash.getValue(entry.getValue()).toString());
}
}
HttpEntity entity = createEntity(bodies, requestHeaders);
try {
response = callApiInternal(apiName, requestParams, entity, requestHeaders, nodeSelector);
return response;
} catch(ClientYamlTestResponseException e) {
response = e.getRestTestResponse();
throw e;
} finally {
// if we hit a bad exception the response is null
Object responseBody = response != null ? response.getBody() : null;
//we always stash the last response body
stash.stashValue("body", responseBody);
if(requestHeaders.isEmpty() == false) {
stash.stashValue("request_headers", requestHeaders);
}
}
}
private HttpEntity createEntity(List<Map<String, Object>> bodies, Map<String, String> headers) throws IOException {
if (bodies.isEmpty()) {
return null;
}
if (bodies.size() == 1) {
XContentType xContentType = getContentType(headers, XContentType.values());
BytesRef bytesRef = bodyAsBytesRef(bodies.get(0), xContentType);
return new ByteArrayEntity(bytesRef.bytes, bytesRef.offset, bytesRef.length,
ContentType.create(xContentType.mediaTypeWithoutParameters(), StandardCharsets.UTF_8));
} else {
XContentType xContentType = getContentType(headers, STREAMING_CONTENT_TYPES);
List<BytesRef> bytesRefList = new ArrayList<>(bodies.size());
int totalBytesLength = 0;
for (Map<String, Object> body : bodies) {
BytesRef bytesRef = bodyAsBytesRef(body, xContentType);
bytesRefList.add(bytesRef);
totalBytesLength += bytesRef.length - bytesRef.offset + 1;
}
byte[] bytes = new byte[totalBytesLength];
int position = 0;
for (BytesRef bytesRef : bytesRefList) {
for (int i = bytesRef.offset; i < bytesRef.length; i++) {
bytes[position++] = bytesRef.bytes[i];
}
bytes[position++] = xContentType.xContent().streamSeparator();
}
return new ByteArrayEntity(bytes, ContentType.create(xContentType.mediaTypeWithoutParameters(), StandardCharsets.UTF_8));
}
}
private XContentType getContentType(Map<String, String> headers, XContentType[] supportedContentTypes) {
XContentType xContentType = null;
String contentType = headers.get("Content-Type");
if (contentType != null) {
xContentType = XContentType.fromMediaType(contentType);
}
if (xContentType != null) {
return xContentType;
}
if (randomizeContentType) {
return RandomizedTest.randomFrom(supportedContentTypes);
}
return XContentType.JSON;
}
private BytesRef bodyAsBytesRef(Map<String, Object> bodyAsMap, XContentType xContentType) throws IOException {
Map<String, Object> finalBodyAsMap = stash.replaceStashedValues(bodyAsMap);
try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType)) {
return BytesReference.bytes(builder.map(finalBodyAsMap)).toBytesRef();
}
}
// pkg-private for testing
ClientYamlTestResponse callApiInternal(String apiName, Map<String, String> params, HttpEntity entity,
Map<String, String> headers, NodeSelector nodeSelector) throws IOException {
return clientYamlTestClient.callApi(apiName, params, entity, headers, nodeSelector);
}
/**
* Extracts a specific value from the last saved response
*/
public Object response(String path) throws IOException {
return response.evaluate(path, stash);
}
/**
* Clears the last obtained response and the stashed fields
*/
public void clear() {
logger.debug("resetting client, response and stash");
response = null;
stash.clear();
}
public Stash stash() {
return stash;
}
/**
* Returns the current es version as a string
*/
public Version esVersion() {
return clientYamlTestClient.getEsVersion();
}
public Version masterVersion() {
return clientYamlTestClient.getMasterVersion();
}
public String os() {
return clientYamlTestClient.getOs();
}
public ClientYamlTestCandidate getClientYamlTestCandidate() {
return clientYamlTestCandidate;
}
}
| robin13/elasticsearch | test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java | Java | apache-2.0 | 8,689 |
//: interfaces/nesting/NestingInterfaces.java
package interfaces.nesting;
class A {
interface B {
void f();
}
public class BImp implements B {
public void f() {}
}
private class BImp2 implements B {
public void f() {}
}
public interface C {
void f();
}
class CImp implements C {
public void f() {}
}
private class CImp2 implements C {
public void f() {}
}
private interface D {
void f();
}
private class DImp implements D {
public void f() {}
}
public class DImp2 implements D {
public void f() {}
}
public D getD() { return new DImp2(); }
private D dRef;
public void receiveD(D d) {
dRef = d;
dRef.f();
}
}
interface E {
interface G {
void f();
}
// Redundant "public":
public interface H {
void f();
}
void g();
// Cannot be private within an interface:
//! private interface I {}
}
public class NestingInterfaces {
public class BImp implements A.B {
public void f() {}
}
class CImp implements A.C {
public void f() {}
}
// Cannot implement a private interface except
// within that interface's defining class:
//! class DImp implements A.D {
//! public void f() {}
//! }
class EImp implements E {
public void g() {}
}
class EGImp implements E.G {
public void f() {}
}
class EImp2 implements E {
public void g() {}
class EG implements E.G {
public void f() {}
}
}
public static void main(String[] args) {
A a = new A();
// Can't access A.D:
//! A.D ad = a.getD();
// Doesn't return anything but A.D:
//! A.DImp2 di2 = a.getD();
// Cannot access a member of the interface:
//! a.getD().f();
// Only another A can do anything with getD():
A a2 = new A();
a2.receiveD(a.getD());
}
} ///:~
| vowovrz/thinkinj | thinkinj/src/main/java/interfaces/nesting/NestingInterfaces.java | Java | apache-2.0 | 1,905 |
/*
* 官网地站:http://www.mob.com
* 技术支持QQ: 4006852216
* 官方微信:ShareSDK (如果发布新版本的话,我们将会第一时间通过微信将版本更新内容推送给您。如果使用过程中有任何问题,也可以通过微信与我们取得联系,我们将会在24小时内给予回复)
*
* Copyright (c) 2013年 mob.com. All rights reserved.
*/
package cn.sharesdk.onekeyshare;
import static com.mob.tools.utils.BitmapHelper.captureView;
import java.util.ArrayList;
import java.util.HashMap;
import android.content.Context;
import android.graphics.Bitmap;
import android.text.TextUtils;
import android.view.View;
import android.view.View.OnClickListener;
import cn.sharesdk.framework.Platform;
import cn.sharesdk.framework.PlatformActionListener;
import cn.sharesdk.framework.ShareSDK;
import com.mob.tools.utils.ResHelper;
/**
* 快捷分享的入口
* <p>
* 通过不同的setter设置参数,然后调用{@link #show(Context)}方法启动快捷分享
*/
public class OnekeyShare {
private HashMap<String, Object> params;
public OnekeyShare() {
params = new HashMap<String, Object>();
params.put("customers", new ArrayList<CustomerLogo>());
params.put("hiddenPlatforms", new HashMap<String, String>());
}
/** address是接收人地址,仅在信息和邮件使用,否则可以不提供 */
public void setAddress(String address) {
params.put("address", address);
}
/**
* title标题,在印象笔记、邮箱、信息、微信(包括好友、朋友圈和收藏)、
* 易信(包括好友、朋友圈)、人人网和QQ空间使用,否则可以不提供
*/
public void setTitle(String title) {
params.put("title", title);
}
/** titleUrl是标题的网络链接,仅在人人网和QQ空间使用,否则可以不提供 */
public void setTitleUrl(String titleUrl) {
params.put("titleUrl", titleUrl);
}
/** text是分享文本,所有平台都需要这个字段 */
public void setText(String text) {
params.put("text", text);
}
/** 获取text字段的值 */
public String getText() {
return params.containsKey("text") ? String.valueOf(params.get("text")) : null;
}
/** imagePath是本地的图片路径,除Linked-In外的所有平台都支持这个字段 */
public void setImagePath(String imagePath) {
if(!TextUtils.isEmpty(imagePath))
params.put("imagePath", imagePath);
}
/** imageUrl是图片的网络路径,新浪微博、人人网、QQ空间和Linked-In支持此字段 */
public void setImageUrl(String imageUrl) {
if (!TextUtils.isEmpty(imageUrl))
params.put("imageUrl", imageUrl);
}
/** url在微信(包括好友、朋友圈收藏)和易信(包括好友和朋友圈)中使用,否则可以不提供 */
public void setUrl(String url) {
params.put("url", url);
}
/** filePath是待分享应用程序的本地路劲,仅在微信(易信)好友和Dropbox中使用,否则可以不提供 */
public void setFilePath(String filePath) {
params.put("filePath", filePath);
}
/** comment是我对这条分享的评论,仅在人人网和QQ空间使用,否则可以不提供 */
public void setComment(String comment) {
params.put("comment", comment);
}
/** site是分享此内容的网站名称,仅在QQ空间使用,否则可以不提供 */
public void setSite(String site) {
params.put("site", site);
}
/** siteUrl是分享此内容的网站地址,仅在QQ空间使用,否则可以不提供 */
public void setSiteUrl(String siteUrl) {
params.put("siteUrl", siteUrl);
}
/** foursquare分享时的地方名 */
public void setVenueName(String venueName) {
params.put("venueName", venueName);
}
/** foursquare分享时的地方描述 */
public void setVenueDescription(String venueDescription) {
params.put("venueDescription", venueDescription);
}
/** 分享地纬度,新浪微博、腾讯微博和foursquare支持此字段 */
public void setLatitude(float latitude) {
params.put("latitude", latitude);
}
/** 分享地经度,新浪微博、腾讯微博和foursquare支持此字段 */
public void setLongitude(float longitude) {
params.put("longitude", longitude);
}
/** 是否直接分享 */
public void setSilent(boolean silent) {
params.put("silent", silent);
}
/** 设置编辑页的初始化选中平台 */
public void setPlatform(String platform) {
params.put("platform", platform);
}
/** 设置KakaoTalk的应用下载地址 */
public void setInstallUrl(String installurl) {
params.put("installurl", installurl);
}
/** 设置KakaoTalk的应用打开地址 */
public void setExecuteUrl(String executeurl) {
params.put("executeurl", executeurl);
}
/** 设置微信分享的音乐的地址 */
public void setMusicUrl(String musicUrl) {
params.put("musicUrl", musicUrl);
}
/** 设置自定义的外部回调 */
public void setCallback(PlatformActionListener callback) {
params.put("callback", callback);
}
/** 返回操作回调 */
public PlatformActionListener getCallback() {
return ResHelper.forceCast(params.get("callback"));
}
/** 设置用于分享过程中,根据不同平台自定义分享内容的回调 */
public void setShareContentCustomizeCallback(ShareContentCustomizeCallback callback) {
params.put("customizeCallback", callback);
}
/** 自定义不同平台分享不同内容的回调 */
public ShareContentCustomizeCallback getShareContentCustomizeCallback() {
return ResHelper.forceCast(params.get("customizeCallback"));
}
/** 设置自己图标和点击事件,可以重复调用添加多次 */
public void setCustomerLogo(Bitmap logo, String label, OnClickListener ocl) {
CustomerLogo cl = new CustomerLogo();
cl.logo = logo;
cl.label = label;
cl.listener = ocl;
ArrayList<CustomerLogo> customers = ResHelper.forceCast(params.get("customers"));
customers.add(cl);
}
/** 设置一个总开关,用于在分享前若需要授权,则禁用sso功能 */
public void disableSSOWhenAuthorize() {
params.put("disableSSO", true);
}
/** 设置视频网络地址 */
public void setVideoUrl(String url) {
params.put("url", url);
params.put("shareType", Platform.SHARE_VIDEO);
}
/** 设置编辑页面的显示模式为Dialog模式 */
@Deprecated
public void setDialogMode() {
params.put("dialogMode", true);
}
/** 添加一个隐藏的platform */
public void addHiddenPlatform(String platform) {
HashMap<String, String> hiddenPlatforms = ResHelper.forceCast(params.get("hiddenPlatforms"));
hiddenPlatforms.put(platform, platform);
}
/** 设置一个将被截图分享的View , surfaceView是截不了图片的*/
public void setViewToShare(View viewToShare) {
try {
Bitmap bm = captureView(viewToShare, viewToShare.getWidth(), viewToShare.getHeight());
params.put("viewToShare", bm);
} catch (Throwable e) {
e.printStackTrace();
}
}
/** 腾讯微博分享多张图片 */
public void setImageArray(String[] imageArray) {
params.put("imageArray", imageArray);
}
/** 设置在执行分享到QQ或QZone的同时,分享相同的内容腾讯微博 */
public void setShareToTencentWeiboWhenPerformingQQOrQZoneSharing() {
params.put("isShareTencentWeibo", true);
}
/** 设置分享界面的样式,目前只有一种,不需要设置 */
public void setTheme(OnekeyShareTheme theme) {
params.put("theme", theme.getValue());
}
@SuppressWarnings("unchecked")
public void show(Context context) {
HashMap<String, Object> shareParamsMap = new HashMap<String, Object>();
shareParamsMap.putAll(params);
ShareSDK.initSDK(context.getApplicationContext());
// 打开分享菜单的统计
ShareSDK.logDemoEvent(1, null);
int iTheme = 0;
try {
iTheme = ResHelper.parseInt(String.valueOf(shareParamsMap.remove("theme")));
} catch (Throwable t) {}
OnekeyShareTheme theme = OnekeyShareTheme.fromValue(iTheme);
OnekeyShareThemeImpl themeImpl = theme.getImpl();
themeImpl.setShareParamsMap(shareParamsMap);
themeImpl.setDialogMode(shareParamsMap.containsKey("dialogMode") ? ((Boolean) shareParamsMap.remove("dialogMode")) : false);
themeImpl.setSilent(shareParamsMap.containsKey("silent") ? ((Boolean) shareParamsMap.remove("silent")) : false);
themeImpl.setCustomerLogos((ArrayList<CustomerLogo>) shareParamsMap.remove("customers"));
themeImpl.setHiddenPlatforms((HashMap<String, String>) shareParamsMap.remove("hiddenPlatforms"));
themeImpl.setPlatformActionListener((PlatformActionListener) shareParamsMap.remove("callback"));
themeImpl.setShareContentCustomizeCallback((ShareContentCustomizeCallback) shareParamsMap.remove("customizeCallback"));
if (shareParamsMap.containsKey("disableSSO") ? ((Boolean) shareParamsMap.remove("disableSSO")) : false) {
themeImpl.disableSSO();
}
themeImpl.show(context.getApplicationContext());
}
}
| gulingfengze/Guide-AR | src/cn/sharesdk/onekeyshare/OnekeyShare.java | Java | apache-2.0 | 8,827 |
/*
* Copyright (c) 2015 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich
package hadoop
package good
// Scala
import scala.collection.mutable.Buffer
// Specs2
import org.specs2.mutable.Specification
// Scalding
import com.twitter.scalding._
// Cascading
import cascading.tuple.TupleEntry
// This project
import JobSpecHelpers._
/**
* Holds the input and expected data
* for the test.
*/
object CljTomcatSendgridEventSpec {
val lines = Lines(
"2014-10-09 16:28:31 - 13 255.255.255.255 POST 255.255.255.255 /com.sendgrid/v3 404 - - aid=email&cv=clj-0.6.0-tom-0.0.4&nuid=- - - - application%2Fjson W3siZW1haWwiOiJleGFtcGxlQHRlc3QuY29tIiwidGltZXN0YW1wIjoxNDQ2NTQ5NjE1LCJzbXRwLWlkIjoiXHUwMDNjMTRjNWQ3NWNlOTMuZGZkLjY0YjQ2OUBpc210cGQtNTU1XHUwMDNlIiwiZXZlbnQiOiJwcm9jZXNzZWQiLCJjYXRlZ29yeSI6ImNhdCBmYWN0cyIsInNnX2V2ZW50X2lkIjoic1pST3dNR01hZ0Znbk9FbVNkdmhpZz09Iiwic2dfbWVzc2FnZV9pZCI6IjE0YzVkNzVjZTkzLmRmZC42NGI0NjkuZmlsdGVyMDAwMS4xNjY0OC41NTE1RTBCODguMCJ9XQ=="
)
val expected = List(
"email",
"srv",
EtlTimestamp,
"2014-10-09 16:28:31.000",
null,
"unstruct",
null, // We can't predict the event_id
null,
null, // No tracker namespace
"com.sendgrid-v3",
"clj-0.6.0-tom-0.0.4",
EtlVersion,
null, // No user_id set
"255.255.x.x",
null,
null,
null,
"-", // TODO: fix this, https://github.com/snowplow/snowplow/issues/1133
null, // No geo-location for this IP address
null,
null,
null,
null,
null,
null,
null, // No additional MaxMind databases used
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null, // Marketing campaign fields empty
null, //
null, //
null, //
null, //
null, // No custom contexts
null, // Structured event fields empty
null, //
null, //
null, //
null, //
"""{"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.sendgrid/processed/jsonschema/1-0-0","data":{"email":"[email protected]","timestamp":"2015-11-03T11:20:15.000Z","smtp-id":"\u003c14c5d75ce93.dfd.64b469@ismtpd-555\u003e","category":"cat facts","sg_event_id":"sZROwMGMagFgnOEmSdvhig==","sg_message_id":"14c5d75ce93.dfd.64b469.filter0001.16648.5515E0B88.0"}}}""",
null, // Transaction fields empty
null, //
null, //
null, //
null, //
null, //
null, //
null, //
null, // Transaction item fields empty
null, //
null, //
null, //
null, //
null, //
null, // Page ping fields empty
null, //
null, //
null, //
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
)
}
/**
* Integration test for the EtlJob:
*
* For details:
* https://forums.aws.amazon.com/thread.jspa?threadID=134017&tstart=0#
*/
class CljTomcatSendgridEventSpec extends Specification {
"A job which processes a Clojure-Tomcat file containing a Sendgrid POST raw event representing 1 valid completed call" should {
EtlJobSpec("clj-tomcat", "2", true, List("geo")).
source(MultipleTextLineFiles("inputFolder"), CljTomcatSendgridEventSpec.lines).
sink[TupleEntry](Tsv("outputFolder")){ buf : Buffer[TupleEntry] =>
"correctly output 1 completed call" in {
buf.size must_== 1
val actual = buf.head
for (idx <- CljTomcatSendgridEventSpec.expected.indices) {
actual.getString(idx) must beFieldEqualTo(CljTomcatSendgridEventSpec.expected(idx), withIndex = idx)
}
}
}.
sink[TupleEntry](Tsv("exceptionsFolder")){ trap =>
"not trap any exceptions" in {
trap must beEmpty
}
}.
sink[String](Tsv("badFolder")){ error =>
"not write any bad rows" in {
error must beEmpty
}
}.
run.
finish
}
}
| bigdecisions/snowplow | 3-enrich/scala-hadoop-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.hadoop/good/CljTomcatSendgridEventSpec.scala | Scala | apache-2.0 | 4,891 |
<!DOCTYPE HTML>
<html>
<!--
Copyright 2007 The Closure Library Authors. All Rights Reserved.
Use of this source code is governed by an Apache 2.0 License.
See the COPYING file for details.
-->
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" >
<title>Modifing Graphic Elements Demo</title>
<script type="text/javascript" src="../../base.js"></script>
<script type="text/javascript">
goog.require('goog.dom');
goog.require('goog.graphics');
</script>
<script type="text/javascript">
/**
* A rectangle, returned from graphics.drawRect.
* @type goog.graphics.RectElement.
*/
var rectElement;
/**
* An ellipse, returned from graphics.drawEllipse.
* @type goog.graphics.EllipseElement.
*/
var ellipseElement;
/**
* A path element, returned from graphics.drawPath.
* @type goog.graphics.PathElement.
*/
var pathElement;
/**
* A text element, returned from graphics.drawText
* @type goog.graphics.PathElement.
*/
var textElement;
var graphics, fill, stroke, font;
var rectColor = [];
var pathData1, pathData2;
function setupElements() {
graphics = goog.graphics.createGraphics(600, 200);
fill = new goog.graphics.SolidFill('yellow');
stroke = new goog.graphics.Stroke(2, 'green');
font = new goog.graphics.Font(26, 'Arial');
rectColor.push({s: stroke, f: fill});
rectColor.push({s: new goog.graphics.Stroke(4, 'blue'),
f: new goog.graphics.SolidFill('red')});
rectColor.push({s: null, f: new goog.graphics.SolidFill('#c0c0c0')});
rectColor.push({s: new goog.graphics.Stroke(0.5, 'red'), f: null});
var gradient = new goog.graphics.LinearGradient(0, 0, 0, 300, '#8080ff',
'#000080');
rectColor.push({s: new goog.graphics.Stroke(1, 'black'), f: gradient});
drawElements();
graphics.render(document.getElementById('shapes'));
}
function drawElements() {
rectElement = graphics.drawRect(30, 10, 100, 80, stroke, fill);
ellipseElement = graphics.drawEllipse(400, 150, 100, 40, stroke, fill);
pathData1 = graphics.createPath()
.moveTo(200, 180)
.lineTo(230, 100)
.lineTo(280, 30)
.lineTo(280, 80)
.lineTo(200, 90);
pathData2 = graphics.createPath()
.moveTo(200, 180)
.curveTo(220, 50, 260, 180, 280, 30);
pathElement = graphics.drawPath(pathData1, stroke, null);
textElement = graphics.drawTextOnLine(
document.getElementById('text').value,
0, 20, 590, 20, 'right', font, stroke, fill);
}
function setRectColors(index) {
var c = rectColor[index];
rectElement.setFill(c.f);
rectElement.setStroke(c.s);
ellipseElement.setFill(c.f);
ellipseElement.setStroke(c.s);
pathElement.setStroke(c.s);
textElement.setStroke(c.s);
textElement.setFill(c.f);
}
function setRectPosition(x, y) {
rectElement.setPosition(x, y);
}
function setRectSize(width, height) {
rectElement.setSize(width, height);
}
function setEllipseCenter(cx, cy) {
ellipseElement.setCenter(cx, cy);
}
function setEllipseRadius(rx, ry) {
ellipseElement.setRadius(rx, ry);
}
function setPath(i) {
pathElement.setPath(i == 1 ? pathData1 : pathData2);
}
function setText() {
textElement.setText(document.getElementById('text').value);
}
</script>
</head>
<body onload="setupElements()">
<div id="shapes"
style="border:1px solid black; width:600px; height:200px;"></div>
<table>
<tr valign="top">
<td>Colors (stroke/fill):</td>
<td>
<input type="button" value="Green(2):yellow" onclick="setRectColors(0)">
<input type="button" value="Blue(4):red" onclick="setRectColors(1)">
<input type="button" value="null:#c0c0c0" onclick="setRectColors(2)">
<input type="button" value="Red(0.5):null" onclick="setRectColors(3)">
<input type="button" value="Gradient" onclick="setRectColors(4)">
</td>
</tr>
<tr valign="top">
<td>Rectangle position:</td>
<td>
<input type="button" value="30,30" onclick="setRectPosition(30, 10)">
<input type="button" value="200,20" onclick="setRectPosition(200, 20)">
<input type="button" value="0,60" onclick="setRectPosition(0, 60)">
</td>
</tr>
<tr valign="top">
<td>Rectangle size:</td>
<td>
<input type="button" value="100,80" onclick="setRectSize(100, 80)">
<input type="button" value="120,120" onclick="setRectSize(120, 120)">
<input type="button" value="40,60" onclick="setRectSize(40, 60)">
</td>
</tr>
<tr valign="top">
<td>Ellipse center:</td>
<td>
<input type="button" value="400,150"
onclick="setEllipseCenter(400, 150)">
<input type="button" value="200,80"
onclick="setEllipseCenter(200, 80)">
<input type="button" value="350,200"
onclick="setEllipseCenter(350, 200)">
</td>
</tr>
<tr valign="top">
<td>Ellipse radius:</td>
<td>
<input type="button" value="100,40" onclick="setEllipseRadius(100, 40)">
<input type="button" value="80,80" onclick="setEllipseRadius(80, 80)">
<input type="button" value="40,06" onclick="setEllipseRadius(40, 60)">
</td>
</tr>
<tr valign="top">
<td>Path:</td>
<td>
<input type="button" value="Line" onclick="setPath(1)">
<input type="button" value="Curve" onclick="setPath(2)">
</td>
</tr>
<tr valign="top">
<td>Text:</td>
<td>
<input type="text" id="text" value="Text Sample" onkeyup="setText()"
onchange="setText()" size="20">
</td>
</tr>
<tr valign="top">
<td colspan="2">
<input type="button" value="Clear Surface" onclick="graphics.clear()">
<input type="button" value="Redraw Elements"
onclick="graphics.clear(); drawElements()">
</td>
</tr>
</table>
</body>
</html>
| xs2ranjeet/mdv | third_party/closure/closure/goog/demos/graphics/modifyelements.html | HTML | apache-2.0 | 6,173 |
/*
* Copyright 2006-2015 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.ksb.security.soap;
import org.apache.wss4j.common.crypto.PasswordEncryptor;
/**
* Created by chrwad on 03/05/2015.
*/
public class PlainTextPasswordEcryptor implements PasswordEncryptor {
@Override
public String encrypt(String password) {
return password;
}
@Override
public String decrypt(String encryptedPassword) {
return encryptedPassword;
}
}
| bhutchinson/rice | rice-middleware/ksb/client-impl/src/main/java/org/kuali/rice/ksb/security/soap/PlainTextPasswordEcryptor.java | Java | apache-2.0 | 1,074 |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = getImplicitRoleForLink;
var _jsxAstUtils = require("jsx-ast-utils");
/**
* Returns the implicit role for a link tag.
*/
function getImplicitRoleForLink(attributes) {
if ((0, _jsxAstUtils.getProp)(attributes, 'href')) {
return 'link';
}
return '';
} | BigBoss424/portfolio | v8/development/node_modules/eslint-plugin-jsx-a11y/lib/util/implicitRoles/link.js | JavaScript | apache-2.0 | 364 |
package test.cdi.beans_g;
import javax.enterprise.context.SessionScoped;
import org.osgi.service.cdi.annotations.Reference;
@SessionScoped
public class SessionScopedBean {
@Reference
Buz buz;
}
| psoreide/bnd | biz.aQute.bndlib.tests/test/test/cdi/beans_g/SessionScopedBean.java | Java | apache-2.0 | 200 |
#!/bin/bash
set -o nounset
set -o errexit
cd "$(dirname "$0")"
source $INSTALLER_ROOT/utils/mac_utils.sh
mkdir -p "$BUNDLE_CONTENTS/Lib/plugins/tools/kitPlugins"
cp $BIN_DIR/plugins/tools/kitPlugins/librobots-trik-v62-interpreter.dylib "$BUNDLE_CONTENTS/Lib/plugins/tools/kitPlugins/"
cp $BIN_DIR/plugins/tools/kitPlugins/librobots-trik-v62-qts-generator.dylib "$BUNDLE_CONTENTS/Lib/plugins/tools/kitPlugins/"
cp $BIN_DIR/plugins/tools/librobots-trik-v62-runtime-uploader-plugin.dylib "$BUNDLE_CONTENTS/Lib/plugins/tools/"
fix_qreal_dependencies "$BUNDLE_CONTENTS/Lib/plugins/tools/kitPlugins/librobots-trik-v62-interpreter.dylib"
fix_qreal_dependencies "$BUNDLE_CONTENTS/Lib/plugins/tools/kitPlugins/librobots-trik-v62-qts-generator.dylib"
fix_qreal_dependencies "$BUNDLE_CONTENTS/Lib/plugins/tools/librobots-trik-v62-runtime-uploader-plugin.dylib"
# Cleaning up prebuild-common.sh results...
rm -rf $PWD/../data/plugins/
| RomanBelkov/qreal | installer/packages/trik-studio/ru.qreal.root.trik.core.v62/meta/prebuild-mac.sh | Shell | apache-2.0 | 976 |
require 'spec_helper'
describe 'nova::manage::network' do
let :facts do
{:osfamily => 'RedHat'}
end
let :pre_condition do
'include nova'
end
let :title do
'foo'
end
describe 'with only required parameters' do
let :params do
{
:network => '10.0.0.0/24'
}
end
it { should contain_nova_network('foo').with(
:ensure => 'present',
:network => '10.0.0.0/24',
:num_networks => 1,
:project => nil,
:notify => 'Exec[nova-db-sync]'
) }
end
describe 'when overriding num networks' do
let :params do
{
:network => '10.0.0.0/20',
:num_networks => 2
}
end
it { should contain_nova_network('foo').with(
:network => '10.0.0.0/20',
:num_networks => 2
) }
end
describe 'when overriding projects' do
let :params do
{
:network => '10.0.0.0/20',
:project => 'foo'
}
end
it { should contain_nova_network('foo').with(
:network => '10.0.0.0/20',
:project => 'foo'
) }
end
end
| mattymo/fuel-docker-nailgun | etc/puppet/modules/nova/spec/defines/nova_manage_networks.rb | Ruby | apache-2.0 | 1,104 |
chorus.views.KaggleUserItem = chorus.views.Base.extend({
constructorName: "KaggleUserItem",
templateName: "kaggle/user_item",
tagName: "div",
additionalContext: function() {
return {
kaggleRank: new Handlebars.SafeString(t('kaggle.rank', {rankHtml: Handlebars.helpers.spanFor(this.model.get('rank'), {'class': 'kaggle_rank'})})),
iconUrl: this.model.get("gravatarUrl") || "/images/kaggle/default_user.jpeg",
name: this.model.name()
};
}
});
| nvoron23/chorus | app/assets/javascripts/views/kaggle/kaggle_user_item_view.js | JavaScript | apache-2.0 | 514 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Immutable;
using System.Diagnostics;
using Microsoft.CodeAnalysis.Text;
using System.Collections.Generic;
namespace Microsoft.CodeAnalysis.CodeGen
{
internal partial struct SwitchIntegralJumpTableEmitter
{
private struct SwitchBucket
{
// sorted case labels
private readonly ImmutableArray<KeyValuePair<ConstantValue, object>> _allLabels;
// range of sorted case labels within this bucket
private readonly int _startLabelIndex;
private readonly int _endLabelIndex;
private readonly bool _isKnownDegenerate;
/// <summary>
/// Degenerate buckets here are buckets with contiguous range of constants
/// leading to the same label. Like:
///
/// case 0:
/// case 1:
/// case 2:
/// case 3:
/// DoOneThing();
/// break;
///
/// case 4:
/// case 5:
/// case 6:
/// case 7:
/// DoAnotherThing();
/// break;
///
/// NOTE: A trivial bucket with only one case constant is by definition degenerate.
/// </summary>
internal bool IsDegenerate
{
get
{
return _isKnownDegenerate;
}
}
internal SwitchBucket(ImmutableArray<KeyValuePair<ConstantValue, object>> allLabels, int index)
{
_startLabelIndex = index;
_endLabelIndex = index;
_allLabels = allLabels;
_isKnownDegenerate = true;
}
private SwitchBucket(ImmutableArray<KeyValuePair<ConstantValue, object>> allLabels, int startIndex, int endIndex)
{
Debug.Assert((uint)startIndex < (uint)endIndex);
_startLabelIndex = startIndex;
_endLabelIndex = endIndex;
_allLabels = allLabels;
_isKnownDegenerate = false;
}
internal SwitchBucket(ImmutableArray<KeyValuePair<ConstantValue, object>> allLabels, int startIndex, int endIndex, bool isDegenerate)
{
Debug.Assert((uint)startIndex <= (uint)endIndex);
Debug.Assert((uint)startIndex != (uint)endIndex || isDegenerate);
_startLabelIndex = startIndex;
_endLabelIndex = endIndex;
_allLabels = allLabels;
_isKnownDegenerate = isDegenerate;
}
internal uint LabelsCount
{
get
{
return (uint)(_endLabelIndex - _startLabelIndex + 1);
}
}
internal KeyValuePair<ConstantValue, object> this[int i]
{
get
{
Debug.Assert(i < LabelsCount, "index out of range");
return _allLabels[i + _startLabelIndex];
}
}
internal ulong BucketSize
{
get
{
return GetBucketSize(this.StartConstant, this.EndConstant);
}
}
// if a bucket could be split into two degenerate ones
// specifies a label index where the second bucket would start
// -1 indicates that the bucket cannot be split into degenerate ones
// 0 indicates that the bucket is already degenerate
//
// Code Review question: why are we supporting splitting only in two buckets. Why not in more?
// Explanation:
// The input here is a "dense" bucket - the one that previous heuristics
// determined as not worth splitting.
//
// A dense bucket has rough execution cost of 1 conditional branch (range check)
// and 1 computed branch (which cost roughly the same as conditional one or perhaps more).
// The only way to surely beat that cost via splitting is if the bucket can be
// split into 2 degenerate buckets. Then we have just 2 conditional branches.
//
// 3 degenerate buckets would require up to 3 conditional branches.
// On some hardware computed jumps may cost significantly more than
// conditional ones (because they are harder to predict or whatever),
// so it could still be profitable, but I did not want to guess that.
//
// Basically if we have 3 degenerate buckets that can be merged into a dense bucket,
// we prefer a dense bucket, which we emit as "switch" opcode.
//
internal int DegenerateBucketSplit
{
get
{
if (IsDegenerate)
{
return 0;
}
Debug.Assert(_startLabelIndex != _endLabelIndex, "1-sized buckets should be already known as degenerate.");
var allLabels = this._allLabels;
var split = 0;
var lastConst = this.StartConstant;
var lastLabel = allLabels[_startLabelIndex].Value;
for (int idx = _startLabelIndex + 1; idx <= _endLabelIndex; idx++)
{
var switchLabel = allLabels[idx];
if (lastLabel != switchLabel.Value ||
!IsContiguous(lastConst, switchLabel.Key))
{
if (split != 0)
{
// found another discontinuity, so cannot be split
return -1;
}
split = idx;
lastLabel = switchLabel.Value;
}
lastConst = switchLabel.Key;
}
return split;
}
}
private bool IsContiguous(ConstantValue lastConst, ConstantValue nextConst)
{
if (!lastConst.IsNumeric || !nextConst.IsNumeric)
{
return false;
}
return GetBucketSize(lastConst, nextConst) == 2;
}
private static ulong GetBucketSize(ConstantValue startConstant, ConstantValue endConstant)
{
Debug.Assert(!BucketOverflowUInt64Limit(startConstant, endConstant));
Debug.Assert(endConstant.Discriminator == startConstant.Discriminator);
ulong bucketSize;
if (startConstant.IsNegativeNumeric || endConstant.IsNegativeNumeric)
{
Debug.Assert(endConstant.Int64Value >= startConstant.Int64Value);
bucketSize = unchecked((ulong)(endConstant.Int64Value - startConstant.Int64Value + 1));
}
else
{
Debug.Assert(endConstant.UInt64Value >= startConstant.UInt64Value);
bucketSize = endConstant.UInt64Value - startConstant.UInt64Value + 1;
}
return bucketSize;
}
// Check if bucket size exceeds UInt64.MaxValue
private static bool BucketOverflowUInt64Limit(ConstantValue startConstant, ConstantValue endConstant)
{
Debug.Assert(IsValidSwitchBucketConstantPair(startConstant, endConstant));
if (startConstant.Discriminator == ConstantValueTypeDiscriminator.Int64)
{
return startConstant.Int64Value == Int64.MinValue
&& endConstant.Int64Value == Int64.MaxValue;
}
else if (startConstant.Discriminator == ConstantValueTypeDiscriminator.UInt64)
{
return startConstant.UInt64Value == UInt64.MinValue
&& endConstant.UInt64Value == UInt64.MaxValue;
}
return false;
}
// Virtual switch instruction has a max limit of Int32.MaxValue labels
// Check if bucket size exceeds Int32.MaxValue
private static bool BucketOverflow(ConstantValue startConstant, ConstantValue endConstant)
{
return BucketOverflowUInt64Limit(startConstant, endConstant)
|| GetBucketSize(startConstant, endConstant) > Int32.MaxValue;
}
internal int StartLabelIndex
{
get
{
return _startLabelIndex;
}
}
internal int EndLabelIndex
{
get
{
return _endLabelIndex;
}
}
internal ConstantValue StartConstant
{
get
{
return _allLabels[_startLabelIndex].Key;
}
}
internal ConstantValue EndConstant
{
get
{
return _allLabels[_endLabelIndex].Key;
}
}
private static bool IsValidSwitchBucketConstant(ConstantValue constant)
{
return constant != null
&& SwitchConstantValueHelper.IsValidSwitchCaseLabelConstant(constant)
&& !constant.IsNull
&& !constant.IsString;
}
private static bool IsValidSwitchBucketConstantPair(ConstantValue startConstant, ConstantValue endConstant)
{
return IsValidSwitchBucketConstant(startConstant)
&& IsValidSwitchBucketConstant(endConstant)
&& startConstant.IsUnsigned == endConstant.IsUnsigned;
}
private static bool IsSparse(uint labelsCount, ulong bucketSize)
{
// TODO: consider changing threshold bucket density to 33%
return bucketSize >= labelsCount * 2;
}
internal static bool MergeIsAdvantageous(SwitchBucket bucket1, SwitchBucket bucket2)
{
var startConstant = bucket1.StartConstant;
var endConstant = bucket2.EndConstant;
if (BucketOverflow(startConstant, endConstant))
{
// merged bucket would overflow
return false;
}
uint labelsCount = (uint)(bucket1.LabelsCount + bucket2.LabelsCount);
ulong bucketSize = GetBucketSize(startConstant, endConstant);
return !IsSparse(labelsCount, bucketSize);
}
/// <summary>
/// Try to merge with the nextBucket.
/// If merge results in a better bucket than two original ones, merge and return true.
/// Else don't merge and return false.
/// </summary>
internal bool TryMergeWith(SwitchBucket prevBucket)
{
Debug.Assert(prevBucket._endLabelIndex + 1 == _startLabelIndex);
if (MergeIsAdvantageous(prevBucket, this))
{
this = new SwitchBucket(_allLabels, prevBucket._startLabelIndex, _endLabelIndex);
return true;
}
return false;
}
}
}
}
| balazssimon/meta-cs | src/Main/MetaDslx.CodeAnalysis.Common/CodeGen/SwitchIntegralJumpTableEmitter.SwitchBucket.cs | C# | apache-2.0 | 12,069 |
---
license: Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---
# iOS Command-line Tools
Il `cordova` l'utilità della riga di comando è uno strumento ad alto livello che consente di creare applicazioni su piattaforme diverse in una volta. Una versione precedente di Cordova framework fornisce il set di strumenti da riga di comando specifici per ogni piattaforma. Per utilizzarli come alternativa alla CLI, dovete scaricare questa versione di Cordova da [cordova.apache.org][1]. Il download contiene archivi separati per ciascuna piattaforma. Espandere la piattaforma che si desidera fare riferimento. Gli strumenti qui descritti sono in genere disponibili nel livello superiore `bin` directory, altrimenti consultare il file **Leggimi** per ulteriori indicazioni.
[1]: http://cordova.apache.org
Gli strumenti della riga di comando di iOS sono costruiti su script di shell e fare affidamento sugli strumenti della riga di comando di Xcode come `xcode-select` e`xcodebuild`.
## Creare un progetto
Eseguire il `create` comando, specificando il percorso esistente per il progetto, l'identificatore del pacchetto stile retro-dominio e nome visualizzato dell'app.
$ ./path/to/cordova-ios/bin/create /path/to/my_new_project com.example.project_name ProjectName
## Costruire un progetto
$ /path/to/my_new_project/cordova/build
## Eseguire l'applicazione sull'emulatore
$ /path/to/my_new_project/cordova/run
## Rilasciando
$ /path/to/my_new_project/cordova/release
## Registrazione
$ /path/to/my_new_project/cordova/log | rakatyal/cordova-docs | docs/it/3.1.0/guide/platforms/ios/tools.md | Markdown | apache-2.0 | 2,342 |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>at_c</title>
<link rel="stylesheet" href="../../../../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../../../index.html" title="Chapter 1. Fusion 2.1">
<link rel="up" href="../metafunctions.html" title="Metafunctions">
<link rel="prev" href="at.html" title="at">
<link rel="next" href="value_at.html" title="value_at">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../../../../boost.png"></td>
<td align="center"><a href="../../../../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="at.html"><img src="../../../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../metafunctions.html"><img src="../../../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../../index.html"><img src="../../../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="value_at.html"><img src="../../../../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h5 class="title">
<a name="fusion.sequence.intrinsic.metafunctions.at_c"></a><a class="link" href="at_c.html" title="at_c">at_c</a>
</h5></div></div></div>
<h6>
<a name="fusion.sequence.intrinsic.metafunctions.at_c.h0"></a>
<span class="phrase"><a name="fusion.sequence.intrinsic.metafunctions.at_c.description"></a></span><a class="link" href="at_c.html#fusion.sequence.intrinsic.metafunctions.at_c.description">Description</a>
</h6>
<p>
Returns the result type of <a class="link" href="../functions/at_c.html" title="at_c"><code class="computeroutput"><span class="identifier">at_c</span></code></a><a href="#ftn.fusion.sequence.intrinsic.metafunctions.at_c.f0" class="footnote" name="fusion.sequence.intrinsic.metafunctions.at_c.f0"><sup class="footnote">[7]</sup></a>.
</p>
<h6>
<a name="fusion.sequence.intrinsic.metafunctions.at_c.h1"></a>
<span class="phrase"><a name="fusion.sequence.intrinsic.metafunctions.at_c.synopsis"></a></span><a class="link" href="at_c.html#fusion.sequence.intrinsic.metafunctions.at_c.synopsis">Synopsis</a>
</h6>
<pre class="programlisting"><span class="keyword">template</span><span class="special"><</span>
<span class="keyword">typename</span> <span class="identifier">Seq</span><span class="special">,</span>
<span class="keyword">int</span> <span class="identifier">M</span><span class="special">></span>
<span class="keyword">struct</span> <span class="identifier">at_c</span>
<span class="special">{</span>
<span class="keyword">typedef</span> <span class="emphasis"><em>unspecified</em></span> <span class="identifier">type</span><span class="special">;</span>
<span class="special">};</span>
</pre>
<div class="table">
<a name="fusion.sequence.intrinsic.metafunctions.at_c.t0"></a><p class="title"><b>Table 1.30. Parameters</b></p>
<div class="table-contents"><table class="table" summary="Parameters">
<colgroup>
<col>
<col>
<col>
</colgroup>
<thead><tr>
<th>
<p>
Parameter
</p>
</th>
<th>
<p>
Requirement
</p>
</th>
<th>
<p>
Description
</p>
</th>
</tr></thead>
<tbody>
<tr>
<td>
<p>
<code class="computeroutput"><span class="identifier">Seq</span></code>
</p>
</td>
<td>
<p>
A model of <a class="link" href="../../concepts/random_access_sequence.html" title="Random Access Sequence">Random
Access Sequence</a>
</p>
</td>
<td>
<p>
Argument sequence
</p>
</td>
</tr>
<tr>
<td>
<p>
<code class="computeroutput"><span class="identifier">M</span></code>
</p>
</td>
<td>
<p>
Positive integer index
</p>
</td>
<td>
<p>
Index of element
</p>
</td>
</tr>
</tbody>
</table></div>
</div>
<br class="table-break"><h6>
<a name="fusion.sequence.intrinsic.metafunctions.at_c.h2"></a>
<span class="phrase"><a name="fusion.sequence.intrinsic.metafunctions.at_c.expression_semantics"></a></span><a class="link" href="at_c.html#fusion.sequence.intrinsic.metafunctions.at_c.expression_semantics">Expression
Semantics</a>
</h6>
<pre class="programlisting"><span class="identifier">result_of</span><span class="special">::</span><span class="identifier">at_c</span><span class="special"><</span><span class="identifier">Seq</span><span class="special">,</span> <span class="identifier">M</span><span class="special">>::</span><span class="identifier">type</span>
</pre>
<p>
<span class="bold"><strong>Return type</strong></span>: Any type
</p>
<p>
<span class="bold"><strong>Semantics</strong></span>: Returns the result type of
using <a class="link" href="../functions/at_c.html" title="at_c"><code class="computeroutput"><span class="identifier">at_c</span></code></a> to access the <code class="computeroutput"><span class="identifier">M</span></code>th element of <code class="computeroutput"><span class="identifier">Seq</span></code>.
</p>
<h6>
<a name="fusion.sequence.intrinsic.metafunctions.at_c.h3"></a>
<span class="phrase"><a name="fusion.sequence.intrinsic.metafunctions.at_c.header"></a></span><a class="link" href="at_c.html#fusion.sequence.intrinsic.metafunctions.at_c.header">Header</a>
</h6>
<pre class="programlisting"><span class="preprocessor">#include</span> <span class="special"><</span><span class="identifier">boost</span><span class="special">/</span><span class="identifier">fusion</span><span class="special">/</span><span class="identifier">sequence</span><span class="special">/</span><span class="identifier">intrinsic</span><span class="special">/</span><span class="identifier">at</span><span class="special">.</span><span class="identifier">hpp</span><span class="special">></span>
<span class="preprocessor">#include</span> <span class="special"><</span><span class="identifier">boost</span><span class="special">/</span><span class="identifier">fusion</span><span class="special">/</span><span class="identifier">include</span><span class="special">/</span><span class="identifier">at</span><span class="special">.</span><span class="identifier">hpp</span><span class="special">></span>
</pre>
<h6>
<a name="fusion.sequence.intrinsic.metafunctions.at_c.h4"></a>
<span class="phrase"><a name="fusion.sequence.intrinsic.metafunctions.at_c.example"></a></span><a class="link" href="at_c.html#fusion.sequence.intrinsic.metafunctions.at_c.example">Example</a>
</h6>
<pre class="programlisting"><span class="keyword">typedef</span> <a class="link" href="../../../container/vector.html" title="vector"><code class="computeroutput"><span class="identifier">vector</span></code></a><span class="special"><</span><span class="keyword">int</span><span class="special">,</span><span class="keyword">float</span><span class="special">,</span><span class="keyword">char</span><span class="special">></span> <span class="identifier">vec</span><span class="special">;</span>
<span class="identifier">BOOST_MPL_ASSERT</span><span class="special">((</span><span class="identifier">boost</span><span class="special">::</span><span class="identifier">is_same</span><span class="special"><</span><a class="link" href="at_c.html" title="at_c"><code class="computeroutput"><span class="identifier">result_of</span><span class="special">::</span><span class="identifier">at_c</span></code></a><span class="special"><</span><span class="identifier">vec</span><span class="special">,</span> <span class="number">1</span><span class="special">>::</span><span class="identifier">type</span><span class="special">,</span> <span class="keyword">float</span><span class="special">&>));</span>
</pre>
<div class="footnotes">
<br><hr style="width:100; text-align:left;margin-left: 0">
<div id="ftn.fusion.sequence.intrinsic.metafunctions.at_c.f0" class="footnote"><p><a href="#fusion.sequence.intrinsic.metafunctions.at_c.f0" class="para"><sup class="para">[7] </sup></a>
<a class="link" href="at_c.html" title="at_c"><code class="computeroutput"><span class="identifier">result_of</span><span class="special">::</span><span class="identifier">at_c</span></code></a> reflects the actual return
type of the function <a class="link" href="../functions/at_c.html" title="at_c"><code class="computeroutput"><span class="identifier">at_c</span></code></a>. <a class="link" href="../../../sequence.html" title="Sequence">Sequence</a>(s)
typically return references to its elements via the <a class="link" href="../functions/at_c.html" title="at_c"><code class="computeroutput"><span class="identifier">at_c</span></code></a> function. If you want
to get the actual element type, use <a class="link" href="value_at_c.html" title="value_at_c"><code class="computeroutput"><span class="identifier">result_of</span><span class="special">::</span><span class="identifier">value_at_c</span></code></a>
</p></div>
</div>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2001-2006, 2011, 2012 Joel de Guzman,
Dan Marsden, Tobias Schwinger<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="at.html"><img src="../../../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../metafunctions.html"><img src="../../../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../../index.html"><img src="../../../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="value_at.html"><img src="../../../../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
| NixaSoftware/CVis | venv/bin/libs/fusion/doc/html/fusion/sequence/intrinsic/metafunctions/at_c.html | HTML | apache-2.0 | 11,398 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class UnionDataSource implements DataSource
{
@JsonProperty
private final List<TableDataSource> dataSources;
@JsonCreator
public UnionDataSource(@JsonProperty("dataSources") List<TableDataSource> dataSources)
{
if (dataSources == null || dataSources.isEmpty()) {
throw new ISE("'dataSources' must be non-null and non-empty for 'union'");
}
this.dataSources = dataSources;
}
@Override
public Set<String> getTableNames()
{
return dataSources.stream()
.map(input -> Iterables.getOnlyElement(input.getTableNames()))
.collect(Collectors.toSet());
}
@JsonProperty
public List<TableDataSource> getDataSources()
{
return dataSources;
}
@Override
public List<DataSource> getChildren()
{
return ImmutableList.copyOf(dataSources);
}
@Override
public DataSource withChildren(List<DataSource> children)
{
if (children.size() != dataSources.size()) {
throw new IAE("Expected [%d] children, got [%d]", dataSources.size(), children.size());
}
if (!children.stream().allMatch(dataSource -> dataSource instanceof TableDataSource)) {
throw new IAE("All children must be tables");
}
return new UnionDataSource(
children.stream().map(dataSource -> (TableDataSource) dataSource).collect(Collectors.toList())
);
}
@Override
public boolean isCacheable(boolean isBroker)
{
// Disables result-level caching for 'union' datasources, which doesn't work currently.
// See https://github.com/apache/druid/issues/8713 for reference.
//
// Note that per-segment caching is still effective, since at the time the per-segment cache evaluates a query
// for cacheability, it would have already been rewritten to a query on a single table.
return false;
}
@Override
public boolean isGlobal()
{
return dataSources.stream().allMatch(DataSource::isGlobal);
}
@Override
public boolean isConcrete()
{
return dataSources.stream().allMatch(DataSource::isConcrete);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UnionDataSource that = (UnionDataSource) o;
if (!dataSources.equals(that.dataSources)) {
return false;
}
return true;
}
@Override
public int hashCode()
{
return dataSources.hashCode();
}
@Override
public String toString()
{
return "UnionDataSource{" +
"dataSources=" + dataSources +
'}';
}
}
| druid-io/druid | processing/src/main/java/org/apache/druid/query/UnionDataSource.java | Java | apache-2.0 | 3,818 |
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.plugin.access.common.settings;
import com.thoughtworks.go.plugin.access.DefaultPluginInteractionCallback;
import com.thoughtworks.go.plugin.access.PluginRequestHelper;
import com.thoughtworks.go.plugin.api.response.validation.ValidationResult;
import com.thoughtworks.go.plugin.infra.PluginManager;
import java.util.HashMap;
import java.util.Map;
public abstract class AbstractExtension implements GoPluginExtension {
protected PluginManager pluginManager;
protected final PluginRequestHelper pluginRequestHelper;
private final String extensionName;
protected Map<String, PluginSettingsJsonMessageHandler> pluginSettingsMessageHandlerMap = new HashMap<>();
protected AbstractExtension(PluginManager pluginManager, PluginRequestHelper pluginRequestHelper, String extensionName) {
this.pluginManager = pluginManager;
this.pluginRequestHelper = pluginRequestHelper;
this.extensionName = extensionName;
}
@Override
public boolean canHandlePlugin(String pluginId) {
return pluginManager.isPluginOfType(this.extensionName, pluginId);
}
public PluginSettingsConfiguration getPluginSettingsConfiguration(String pluginId) {
return pluginRequestHelper.submitRequest(pluginId, PluginSettingsConstants.REQUEST_PLUGIN_SETTINGS_CONFIGURATION, new DefaultPluginInteractionCallback<PluginSettingsConfiguration>() {
@Override
public PluginSettingsConfiguration onSuccess(String responseBody, String resolvedExtensionVersion) {
return pluginSettingsMessageHandlerMap.get(resolvedExtensionVersion).responseMessageForPluginSettingsConfiguration(responseBody);
}
});
}
public String getPluginSettingsView(String pluginId) {
return pluginRequestHelper.submitRequest(pluginId, PluginSettingsConstants.REQUEST_PLUGIN_SETTINGS_VIEW, new DefaultPluginInteractionCallback<String>() {
@Override
public String onSuccess(String responseBody, String resolvedExtensionVersion) {
return pluginSettingsMessageHandlerMap.get(resolvedExtensionVersion).responseMessageForPluginSettingsView(responseBody);
}
});
}
public ValidationResult validatePluginSettings(String pluginId, final PluginSettingsConfiguration configuration) {
return pluginRequestHelper.submitRequest(pluginId, PluginSettingsConstants.REQUEST_VALIDATE_PLUGIN_SETTINGS, new DefaultPluginInteractionCallback<ValidationResult>() {
@Override
public String requestBody(String resolvedExtensionVersion) {
return pluginSettingsMessageHandlerMap.get(resolvedExtensionVersion).requestMessageForPluginSettingsValidation(configuration);
}
@Override
public ValidationResult onSuccess(String responseBody, String resolvedExtensionVersion) {
return pluginSettingsMessageHandlerMap.get(resolvedExtensionVersion).responseMessageForPluginSettingsValidation(responseBody);
}
});
}
}
| tomwscott/GoCD | plugin-infra/go-plugin-access/src/com/thoughtworks/go/plugin/access/common/settings/AbstractExtension.java | Java | apache-2.0 | 3,684 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.continuous;
import org.jetbrains.annotations.*;
/**
* Continuous processor message types.
*/
enum GridContinuousMessageType {
/** Remote event notification. */
MSG_EVT_NOTIFICATION,
/** Event notification acknowledgement for synchronous events. */
MSG_EVT_ACK;
/** Enumerated values. */
private static final GridContinuousMessageType[] VALS = values();
/**
* Efficiently gets enumerated value from its ordinal.
*
* @param ord Ordinal value.
* @return Enumerated value.
*/
@Nullable public static GridContinuousMessageType fromOrdinal(byte ord) {
return ord >= 0 && ord < VALS.length ? VALS[ord] : null;
}
}
| akuznetsov-gridgain/ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousMessageType.java | Java | apache-2.0 | 1,533 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.metadata;
import com.facebook.presto.common.Page;
import com.facebook.presto.common.PageBuilder;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.spi.ColumnMetadata;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.facebook.presto.common.type.TypeUtils.writeNativeValue;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
public class InternalTable
{
private final Map<String, Integer> columnIndexes;
private final List<Page> pages;
public InternalTable(Map<String, Integer> columnIndexes, Iterable<Page> pages)
{
this.columnIndexes = ImmutableMap.copyOf(requireNonNull(columnIndexes, "columnIndexes is null"));
this.pages = ImmutableList.copyOf(requireNonNull(pages, "pages is null"));
}
public int getColumnIndex(String columnName)
{
Integer index = columnIndexes.get(columnName);
checkArgument(index != null, "Column %s not found", columnName);
return index;
}
public List<Page> getPages()
{
return pages;
}
public static Builder builder(ColumnMetadata... columns)
{
return builder(ImmutableList.copyOf(columns));
}
public static Builder builder(List<ColumnMetadata> columns)
{
ImmutableList.Builder<String> names = ImmutableList.builder();
ImmutableList.Builder<Type> types = ImmutableList.builder();
for (ColumnMetadata column : columns) {
names.add(column.getName());
types.add(column.getType());
}
return new Builder(names.build(), types.build());
}
public static class Builder
{
private final Map<String, Integer> columnIndexes;
private final List<Type> types;
private final List<Page> pages;
private final PageBuilder pageBuilder;
public Builder(List<String> columnNames, List<Type> types)
{
requireNonNull(columnNames, "columnNames is null");
ImmutableMap.Builder<String, Integer> columnIndexes = ImmutableMap.builder();
int columnIndex = 0;
for (String columnName : columnNames) {
columnIndexes.put(columnName, columnIndex++);
}
this.columnIndexes = columnIndexes.build();
this.types = ImmutableList.copyOf(requireNonNull(types, "types is null"));
checkArgument(columnNames.size() == types.size(),
"Column name count does not match type count: columnNames=%s, types=%s", columnNames, types.size());
pages = new ArrayList<>();
pageBuilder = new PageBuilder(types);
}
public Builder add(Object... values)
{
pageBuilder.declarePosition();
for (int i = 0; i < types.size(); i++) {
writeNativeValue(types.get(i), pageBuilder.getBlockBuilder(i), values[i]);
}
if (pageBuilder.isFull()) {
flushPage();
}
return this;
}
public InternalTable build()
{
flushPage();
return new InternalTable(columnIndexes, pages);
}
private void flushPage()
{
if (!pageBuilder.isEmpty()) {
pages.add(pageBuilder.build());
pageBuilder.reset();
}
}
}
}
| prestodb/presto | presto-main/src/main/java/com/facebook/presto/metadata/InternalTable.java | Java | apache-2.0 | 4,134 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.writer;
import static java.lang.String.format;
import org.apache.drill.categories.ParquetTest;
import org.apache.drill.PlanTestBase;
import org.apache.drill.test.TestBuilder;
import org.apache.drill.categories.UnlikelyTest;
import org.apache.drill.common.exceptions.UserRemoteException;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.store.parquet.metadata.Metadata;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDate;
/**
* Tests for compatibility reading old parquet files after date corruption
* issue was fixed in DRILL-4203.
*
* Drill could write non-standard dates into parquet files. This issue is related to
* all drill releases where {@link org.apache.drill.exec.store.parquet.ParquetRecordWriter#WRITER_VERSION_PROPERTY} <
* {@link org.apache.drill.exec.store.parquet.ParquetReaderUtility#DRILL_WRITER_VERSION_STD_DATE_FORMAT}
* The values have been read correctly by Drill, but external tools like Spark reading the files will see
* corrupted values for all dates that have been written by Drill.
*
* This change corrects the behavior of the Drill parquet writer to correctly
* store dates in the format given in the parquet specification.
*
* To maintain compatibility with old files, the parquet reader code has
* been updated to check for the old format and automatically shift the
* corrupted values into corrected ones automatically.
*
* The test cases included here should ensure that all files produced by
* historical versions of Drill will continue to return the same values they
* had in previous releases. For compatibility with external tools, any old
* files with corrupted dates can be re-written using the CREATE TABLE AS
* command (as the writer will now only produce the specification-compliant
* values, even if after reading out of older corrupt files).
*
* While the old behavior was a consistent shift into an unlikely range
* to be used in a modern database (over 10,000 years in the future), these are still
* valid date values. In the case where these may have been written into
* files intentionally, an option is included to turn off the auto-correction.
* Use of this option is assumed to be extremely unlikely, but it is included
* for completeness.
*/
@Category({ParquetTest.class, UnlikelyTest.class})
public class TestCorruptParquetDateCorrection extends PlanTestBase {
private static final Path PARQUET_4203 = Paths.get("parquet", "4203_corrupt_dates");
// 4 files are in the directory:
// - one created with the parquet-writer version number of "2"
// - files have extra meta field: parquet-writer.version = 2
// - one from and old version of Drill, before we put in proper created by in metadata
// - this is read properly by looking at a Max value in the file statistics, to see that
// it is way off of a typical date value
// - this behavior will be able to be turned off, but will be on by default
// - one from the 0.6 version of Drill, before files had min/max statistics
// - detecting corrupt values must be deferred to actual data page reading
// - one from 1.4, where there is a proper created-by, but the corruption is present
private static final Path MIXED_CORRUPTED_AND_CORRECT_DATES_PATH = PARQUET_4203.resolve("mixed_drill_versions");
// partitioned with 1.2.0, no certain metadata that these were written with Drill
// the value will be checked to see that they look corrupt and they will be corrected
// by default. Users can use the format plugin option autoCorrectCorruptDates to disable
// this behavior if they have foreign parquet files with valid rare date values that are
// in the similar range as Drill's corrupt values
private static final Path PARTITIONED_1_2_FOLDER = Paths.get("partitioned_with_corruption_4203_1_2");
private static final Path CORRUPTED_PARTITIONED_DATES_1_2_PATH = PARQUET_4203.resolve(PARTITIONED_1_2_FOLDER);
// partitioned with 1.4.0, no certain metadata regarding the date corruption status.
// The same detection approach of the corrupt date values as for the files partitioned with 1.2.0
private static final Path PARTITIONED_1_4_FOLDER = Paths.get("partitioned_with_corruption_4203");
private static final Path CORRUPTED_PARTITIONED_DATES_1_4_0_PATH = PARQUET_4203.resolve(PARTITIONED_1_4_FOLDER);
private static final Path PARQUET_DATE_FILE_WITH_NULL_FILLED_COLS = PARQUET_4203.resolve("null_date_cols_with_corruption_4203.parquet");
private static final Path PARTITIONED_1_9_FOLDER = Paths.get("1_9_0_partitioned_no_corruption");
private static final Path CORRECT_PARTITIONED_DATES_1_9_PATH = PARQUET_4203.resolve(PARTITIONED_1_9_FOLDER);
private static final Path VARCHAR_PARTITIONED = PARQUET_4203.resolve("fewtypes_varcharpartition");
private static final Path DATE_PARTITIONED = PARQUET_4203.resolve("fewtypes_datepartition");
private static final Path EXCEPTION_WHILE_PARSING_CREATED_BY_META = PARQUET_4203.resolve("hive1dot2_fewtypes_null");
private static final Path CORRECT_DATES_1_6_0_PATH = PARQUET_4203.resolve("correct_dates_and_old_drill_parquet_writer.parquet");
private static final Path MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER = Paths.get("mixed_partitioned");
@BeforeClass
public static void initFs() throws Exception {
// Move files into temp directory, rewrite the metadata cache file to contain the appropriate absolute path
dirTestWatcher.copyResourceToRoot(PARQUET_4203);
dirTestWatcher.copyResourceToRoot(CORRUPTED_PARTITIONED_DATES_1_2_PATH, PARTITIONED_1_2_FOLDER);
dirTestWatcher.copyResourceToRoot(CORRUPTED_PARTITIONED_DATES_1_4_0_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER);
dirTestWatcher.copyResourceToRoot(CORRUPTED_PARTITIONED_DATES_1_2_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER.resolve(PARTITIONED_1_2_FOLDER));
dirTestWatcher.copyResourceToRoot(CORRECT_PARTITIONED_DATES_1_9_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER.resolve(PARTITIONED_1_9_FOLDER));
dirTestWatcher.copyResourceToRoot(CORRUPTED_PARTITIONED_DATES_1_4_0_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER.resolve(PARTITIONED_1_4_FOLDER));
File metaData = dirTestWatcher.copyResourceToRoot(PARQUET_4203.resolve("drill.parquet.metadata_1_2.requires_replace.txt"),
PARTITIONED_1_2_FOLDER.resolve(Metadata.METADATA_FILENAME));
dirTestWatcher.replaceMetaDataContents(metaData, dirTestWatcher.getRootDir(), null);
}
/**
* Test reading a directory full of partitioned parquet files with dates, these files have a drill version
* number of "1.9.0-SNAPSHOT" and parquet-writer version number of "2" in their footers, so we can be certain
* they do not have corruption. The option to disable the correction is passed, but it will not change the result
* in the case where we are certain correction is NOT needed. For more info see DRILL-4203.
*/
@Test
public void testReadPartitionedOnCorrectDates() throws Exception {
try {
for (String selection : new String[]{"*", "date_col"}) {
// for sanity, try reading all partitions without a filter
TestBuilder builder = testBuilder()
.sqlQuery("select %s from table(dfs.`%s` (type => 'parquet', autoCorrectCorruptDates => false))",
selection, CORRECT_PARTITIONED_DATES_1_9_PATH)
.unOrdered()
.baselineColumns("date_col");
addDateBaselineValues(builder);
builder.go();
String query = format("select %s from table(dfs.`%s` (type => 'parquet', autoCorrectCorruptDates => false))" +
" where date_col = date '1970-01-01'", selection, CORRECT_PARTITIONED_DATES_1_9_PATH);
// verify that pruning is actually taking place
testPlanMatchingPatterns(query, new String[]{"numFiles=1"}, null);
// read with a filter on the partition column
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("date_col")
.baselineValues(LocalDate.of(1970, 1, 1))
.go();
}
} finally {
test("alter session reset all");
}
}
@Test
public void testVarcharPartitionedReadWithCorruption() throws Exception {
testBuilder()
.sqlQuery("select date_col from dfs.`%s` where length(varchar_col) = 12", VARCHAR_PARTITIONED)
.baselineColumns("date_col")
.unOrdered()
.baselineValues(LocalDate.of(2039, 4, 9))
.baselineValues(LocalDate.of(1999, 1, 8))
.go();
}
@Test
public void testDatePartitionedReadWithCorruption() throws Exception {
testBuilder()
.sqlQuery("select date_col from dfs.`%s` where date_col = '1999-04-08'", DATE_PARTITIONED)
.baselineColumns("date_col")
.unOrdered()
.baselineValues(LocalDate.of(1999, 4, 8))
.go();
String query = format("select date_col from dfs.`%s` where date_col > '1999-04-08'", DATE_PARTITIONED);
testPlanMatchingPatterns(query, new String[]{"numFiles=6"}, null);
}
@Test
public void testCorrectDatesAndExceptionWhileParsingCreatedBy() throws Exception {
testBuilder()
.sqlQuery("select date_col from dfs.`%s` where to_date(date_col, 'yyyy-mm-dd') < '1997-01-02'",
EXCEPTION_WHILE_PARSING_CREATED_BY_META)
.baselineColumns("date_col")
.unOrdered()
.baselineValues(LocalDate.of(1996, 1, 29))
.baselineValues(LocalDate.of(1996, 3, 1))
.baselineValues(LocalDate.of(1996, 3, 2))
.baselineValues(LocalDate.of(1997, 3, 1))
.go();
}
// according to SQL spec. '4.4.3.5 Datetime types' year should be less than 9999
@Test(expected = UserRemoteException.class)
public void testQueryWithCorruptedDates() throws Exception {
try {
TestBuilder builder = testBuilder()
.sqlQuery("select * from table(dfs.`%s` (type => 'parquet', autoCorrectCorruptDates => false))",
CORRUPTED_PARTITIONED_DATES_1_2_PATH)
.unOrdered()
.baselineColumns("date_col");
addCorruptedDateBaselineValues(builder);
builder.go();
String query = "select * from table(dfs.`%s` (type => 'parquet', " +
"autoCorrectCorruptDates => false)) where date_col = cast('15334-03-17' as date)";
test(query, CORRUPTED_PARTITIONED_DATES_1_2_PATH);
} catch (UserRemoteException e) {
Assert.assertTrue(e.getMessage()
.contains("Year out of range"));
throw e;
} finally {
test("alter session reset all");
}
}
@Test
public void testCorruptValueDetectionDuringPruning() throws Exception {
try {
for (String selection : new String[]{"*", "date_col"}) {
for (Path table : new Path[]{CORRUPTED_PARTITIONED_DATES_1_2_PATH, CORRUPTED_PARTITIONED_DATES_1_4_0_PATH}) {
// for sanity, try reading all partitions without a filter
TestBuilder builder = testBuilder()
.sqlQuery("select %s from dfs.`%s`", selection, table)
.unOrdered()
.baselineColumns("date_col");
addDateBaselineValues(builder);
builder.go();
String query = format("select %s from dfs.`%s`" +
" where date_col = date '1970-01-01'", selection, table);
// verify that pruning is actually taking place
testPlanMatchingPatterns(query, new String[]{"numFiles=1"}, null);
// read with a filter on the partition column
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("date_col")
.baselineValues(LocalDate.of(1970, 1, 1))
.go();
}
}
} finally {
test("alter session reset all");
}
}
/**
* To fix some of the corrupted dates fixed as part of DRILL-4203 it requires
* actually looking at the values stored in the file. A column with date values
* actually stored must be located to check a value. Just because we find one
* column where the all values are null does not mean we can safely avoid reading
* date columns with auto-correction, although null values do not need fixing,
* other columns may contain actual corrupt date values.
*
* This test checks the case where the first columns in the file are all null filled
* and a later column must be found to identify that the file is corrupt.
*/
@Test
public void testReadCorruptDatesWithNullFilledColumns() throws Exception {
testBuilder()
.sqlQuery("select null_dates_1, null_dates_2, non_existent_field, date_col from dfs.`%s`",
PARQUET_DATE_FILE_WITH_NULL_FILLED_COLS)
.unOrdered()
.baselineColumns("null_dates_1", "null_dates_2", "non_existent_field", "date_col")
.baselineValues(null, null, null, LocalDate.of(1970, 1, 1))
.baselineValues(null, null, null, LocalDate.of(1970, 1, 2))
.baselineValues(null, null, null, LocalDate.of(1969, 12, 31))
.baselineValues(null, null, null, LocalDate.of(1969, 12, 30))
.baselineValues(null, null, null, LocalDate.of(1900, 1, 1))
.baselineValues(null, null, null, LocalDate.of(2015, 1, 1))
.go();
}
@Test
public void testUserOverrideDateCorrection() throws Exception {
// read once with the flat reader
readFilesWithUserDisabledAutoCorrection();
try {
test("alter session set %s = true", ExecConstants.PARQUET_NEW_RECORD_READER);
// read all of the types with the complex reader
readFilesWithUserDisabledAutoCorrection();
} finally {
test("alter session reset all");
}
}
/**
* Test reading a directory full of parquet files with dates, some of which have corrupted values
* due to DRILL-4203.
*
* Tests reading the files with both the vectorized and complex parquet readers.
*
* @throws Exception
*/
@Test
public void testReadMixedOldAndNewBothReaders() throws Exception {
/// read once with the flat reader
readMixedCorruptedAndCorrectDates();
try {
// read all of the types with the complex reader
test("alter session set %s = true", ExecConstants.PARQUET_NEW_RECORD_READER);
readMixedCorruptedAndCorrectDates();
} finally {
test("alter session set %s = false", ExecConstants.PARQUET_NEW_RECORD_READER);
}
}
@Test
public void testReadOldMetadataCacheFile() throws Exception {
// for sanity, try reading all partitions without a filter
String query = format("select date_col from dfs.`%s`", PARTITIONED_1_2_FOLDER);
TestBuilder builder = testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("date_col");
addDateBaselineValues(builder);
builder.go();
testPlanMatchingPatterns(query, new String[]{"usedMetadataFile=true"}, null);
}
@Test
public void testReadOldMetadataCacheFileWithPruning() throws Exception {
String query = format("select date_col from dfs.`%s` where date_col = date '1970-01-01'",
PARTITIONED_1_2_FOLDER);
// verify that pruning is actually taking place
testPlanMatchingPatterns(query, new String[]{"numFiles=1", "usedMetadataFile=true"}, null);
// read with a filter on the partition column
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("date_col")
.baselineValues(LocalDate.of(1970, 1, 1))
.go();
}
@Test
public void testReadNewMetadataCacheFileOverOldAndNewFiles() throws Exception {
File meta = dirTestWatcher.copyResourceToRoot(
PARQUET_4203.resolve("mixed_version_partitioned_metadata.requires_replace.txt"),
MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER.resolve(Metadata.METADATA_FILENAME));
dirTestWatcher.replaceMetaDataContents(meta, dirTestWatcher.getRootDir(), null);
// for sanity, try reading all partitions without a filter
TestBuilder builder = testBuilder()
.sqlQuery("select date_col from dfs.`%s`", MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER)
.unOrdered()
.baselineColumns("date_col");
addDateBaselineValues(builder);
addDateBaselineValues(builder);
addDateBaselineValues(builder);
builder.go();
String query = format("select date_col from dfs.`%s` where date_col = date '1970-01-01'", MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER);
// verify that pruning is actually taking place
testPlanMatchingPatterns(query, new String[]{"numFiles=3", "usedMetadataFile=true"}, null);
// read with a filter on the partition column
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("date_col")
.baselineValues(LocalDate.of(1970, 1, 1))
.baselineValues(LocalDate.of(1970, 1, 1))
.baselineValues(LocalDate.of(1970, 1, 1))
.go();
}
@Test
public void testCorrectDateValuesGeneratedByOldVersionOfDrill() throws Exception {
testBuilder()
.sqlQuery("select i_rec_end_date from dfs.`%s` limit 1", CORRECT_DATES_1_6_0_PATH)
.baselineColumns("i_rec_end_date")
.unOrdered()
.baselineValues(LocalDate.of(2000, 10, 26))
.go();
}
/**
* Read a directory with parquet files where some have corrupted dates, see DRILL-4203.
* @throws Exception
*/
private void readMixedCorruptedAndCorrectDates() throws Exception {
// ensure that selecting the date column explicitly or as part of a star still results
// in checking the file metadata for date columns (when we need to check the statistics
// for bad values) to set the flag that the values are corrupt
for (String selection : new String[] {"*", "date_col"}) {
TestBuilder builder = testBuilder()
.sqlQuery("select %s from dfs.`%s`", selection, MIXED_CORRUPTED_AND_CORRECT_DATES_PATH)
.unOrdered()
.baselineColumns("date_col");
for (int i = 0; i < 4; i++) {
addDateBaselineValues(builder);
}
builder.go();
}
}
private void addDateBaselineValues(TestBuilder builder) {
builder
.baselineValues(LocalDate.of(1970, 1, 1))
.baselineValues(LocalDate.of(1970, 1, 2))
.baselineValues(LocalDate.of(1969, 12, 31))
.baselineValues(LocalDate.of(1969, 12, 30))
.baselineValues(LocalDate.of(1900, 1, 1))
.baselineValues(LocalDate.of(2015, 1, 1));
}
/**
* These are the same values added in the addDateBaselineValues, shifted as corrupt values
*/
private void addCorruptedDateBaselineValues(TestBuilder builder) {
builder
.baselineValues(LocalDate.of(15334, 3, 17))
.baselineValues(LocalDate.of(15334, 3, 18))
.baselineValues(LocalDate.of(15334, 3, 15))
.baselineValues(LocalDate.of(15334, 3, 16))
.baselineValues(LocalDate.of(15264, 3, 16))
.baselineValues(LocalDate.of(15379, 3, 17));
}
private void readFilesWithUserDisabledAutoCorrection() throws Exception {
// ensure that selecting the date column explicitly or as part of a star still results
// in checking the file metadata for date columns (when we need to check the statistics
// for bad values) to set the flag that the values are corrupt
for (String selection : new String[] {"*", "date_col"}) {
TestBuilder builder = testBuilder()
.sqlQuery("select %s from table(dfs.`%s` (type => 'parquet', autoCorrectCorruptDates => false))",
selection, MIXED_CORRUPTED_AND_CORRECT_DATES_PATH)
.unOrdered()
.baselineColumns("date_col");
addDateBaselineValues(builder);
addCorruptedDateBaselineValues(builder);
addCorruptedDateBaselineValues(builder);
addCorruptedDateBaselineValues(builder);
builder.go();
}
}
}
| parthchandra/incubator-drill | exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java | Java | apache-2.0 | 20,758 |
#region License
/*
* Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#endregion
using System;
using System.ComponentModel;
using System.Globalization;
namespace Spring.Transaction.Interceptor
{
/// <summary>
/// Type converter for <see cref="Spring.Transaction.Interceptor.ITransactionAttribute"/>
/// objects.
/// </summary>
/// <remarks>
/// Takes <see cref="System.String"/>s of the form
/// <p><code>PROPAGATION_NAME,ISOLATION_NAME,readOnly,timeout_NNNN,+Exception1,-Exception2</code></p>
/// <p>where only propagation code is required. For example:</p>
/// <p><code>PROPAGATION_MANDATORY,ISOLATION_DEFAULT</code></p>
/// <p>
/// The tokens can be in <strong>any</strong> order. Propagation and isolation codes
/// must use the names of the values in the <see cref="Spring.Transaction.TransactionPropagation"/>
/// enumeration. Timeout values are in seconds. If no timeout is specified, the transaction
/// manager will apply a default timeout specific to the particular transaction manager.
/// </p>
/// <p>
/// A "+" before an exception name substring indicates that transactions should commit even
/// if this exception is thrown; a "-" that they should roll back.
/// </p>
/// </remarks>
/// <author>Mark Pollack</author>
public class TransactionAttributeConverter : TypeConverter
{
/// <summary>
/// Returns whether this converter can convert an object of the given type to an ITransactionAttribute, using the specified context.
/// </summary>
/// <param name="context">An <see cref="T:System.ComponentModel.ITypeDescriptorContext"></see> that provides a format context.</param>
/// <param name="sourceType">A <see cref="T:System.Type"></see> that represents the type you want to convert from.</param>
/// <returns>
/// true if this converter can perform the conversion; otherwise, false.
/// </returns>
public override bool CanConvertFrom(
ITypeDescriptorContext context, Type sourceType)
{
if (sourceType == typeof (string))
{
return true;
}
return base.CanConvertFrom(context, sourceType);
}
/// <summary>
/// Converts from string to ITransactionAttribute
/// </summary>
/// <param name="context">The context.</param>
/// <param name="culture">The culture.</param>
/// <param name="val">The string value to convert</param>
/// <returns>An ITransactionAttribute instance</returns>
public override object ConvertFrom(
ITypeDescriptorContext context, CultureInfo culture, object val)
{
if (val is string)
{
string value = val as string;
TransactionAttributeEditor editor = new TransactionAttributeEditor();
editor.SetAsText(value);
return editor.Value;
}
return base.ConvertFrom(context, culture, val);
}
}
} | spring-projects/spring-net | src/Spring/Spring.Data/Transaction/Interceptor/TransactionAttributeConverter.cs | C# | apache-2.0 | 3,656 |
#!/usr/bin/env python
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs bazel build/test for current repo."""
import argparse
import os
import subprocess
import sys
ORIG_CWD = os.getcwd()
def test_infra(*paths):
"""Return path relative to root of test-infra repo."""
return os.path.join(ORIG_CWD, os.path.dirname(__file__), '..', *paths)
def check(*cmd):
"""Log and run the command, raising on errors."""
print >>sys.stderr, 'Run:', cmd
subprocess.check_call(cmd)
def check_output(*cmd):
"""Log and run the command, raising on errors, return output"""
print >>sys.stderr, 'Run:', cmd
return subprocess.check_output(cmd)
class Bazel(object):
def __init__(self, cfgs):
self.cfgs = cfgs or []
def _commands(self, cmd, *args, **kw):
commands = ['bazel', cmd]
if self.cfgs and kw.get('config', True):
commands.extend(['--config=%s' % c for c in self.cfgs])
if args:
commands.extend(args)
return commands
def check(self, cmd, *args, **kw):
"""wrapper for check('bazel', *cmd)."""
check(*self._commands(cmd, *args, **kw))
def check_output(self, cmd, *args, **kw):
"""wrapper for check_output('bazel', *cmd)."""
return check_output(*self._commands(cmd, *args, **kw))
def query(self, kind, selected_pkgs, changed_pkgs):
"""
Run a bazel query against target kind, include targets from args.
Returns a list of kind objects from bazel query.
"""
# Changes are calculated and no packages found, return empty list.
if changed_pkgs == []:
return []
selection = '//...'
if selected_pkgs:
# targets without a '-' operator prefix are implicitly additive
# when specifying build targets
selection = selected_pkgs[0]
for pkg in selected_pkgs[1:]:
if pkg.startswith('-'):
selection += ' '+pkg
else:
selection += ' +'+pkg
changes = '//...'
if changed_pkgs:
changes = 'set(%s)' % ' '.join(changed_pkgs)
query_pat = 'kind(%s, rdeps(%s, %s)) except attr(\'tags\', \'manual\', //...)'
return [target for target in self.check_output(
'query',
'--keep_going',
'--noshow_progress',
query_pat % (kind, selection, changes),
config=False,
).split('\n') if target.startswith("//")]
def upload_string(gcs_path, text):
"""Uploads text to gcs_path"""
cmd = ['gsutil', '-q', '-h', 'Content-Type:text/plain', 'cp', '-', gcs_path]
print >>sys.stderr, 'Run:', cmd, 'stdin=%s'%text
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
proc.communicate(input=text)
def echo_result(res):
"""echo error message bazed on value of res"""
echo_map = {
0:'Success',
1:'Build failed',
2:'Bad environment or flags',
3:'Build passed, tests failed or timed out',
4:'Build passed, no tests found',
5:'Interrupted'
}
print echo_map.get(res, 'Unknown exit code : %s' % res)
def get_version():
"""Return kubernetes version"""
# The check for version in bazel-genfiles can be removed once everyone is
# off of versions before 0.25.0.
# https://github.com/bazelbuild/bazel/issues/8651
if os.path.isfile('bazel-genfiles/version'):
with open('bazel-genfiles/version') as fp:
return fp.read().strip()
with open('bazel-bin/version') as fp:
return fp.read().strip()
def get_changed(base, pull):
"""Get affected packages between base sha and pull sha."""
diff = check_output(
'git', 'diff', '--name-only',
'--diff-filter=d', '%s...%s' % (base, pull))
return check_output(
'bazel', 'query',
'--noshow_progress',
'set(%s)' % diff).split('\n')
def clean_file_in_dir(dirname, filename):
"""Recursively remove all file with filename in dirname."""
for parent, _, filenames in os.walk(dirname):
for name in filenames:
if name == filename:
os.remove(os.path.join(parent, name))
def main(args):
"""Trigger a bazel build/test run, and upload results."""
# pylint:disable=too-many-branches, too-many-statements, too-many-locals
if args.install:
for install in args.install:
if not os.path.isfile(install):
raise ValueError('Invalid install path: %s' % install)
check('pip', 'install', '-r', install)
bazel = Bazel(args.config)
bazel.check('version', config=False)
res = 0
try:
affected = None
if args.affected:
base = os.getenv('PULL_BASE_SHA', '')
pull = os.getenv('PULL_PULL_SHA', 'HEAD')
if not base:
raise ValueError('PULL_BASE_SHA must be set!')
affected = get_changed(base, pull)
build_pkgs = []
manual_build_targets = []
test_pkgs = []
manual_test_targets = []
if args.build:
build_pkgs = args.build.split(' ')
if args.manual_build:
manual_build_targets = args.manual_build.split(' ')
if args.test:
test_pkgs = args.test.split(' ')
if args.manual_test:
manual_test_targets = args.manual_test.split(' ')
buildables = []
if build_pkgs or manual_build_targets or affected:
buildables = bazel.query('.*_binary', build_pkgs, affected) + manual_build_targets
if args.release:
buildables.extend(args.release.split(' '))
if buildables:
bazel.check('build', *buildables)
else:
# Call bazel build regardless, to establish bazel symlinks
bazel.check('build')
# clean up previous test.xml
clean_file_in_dir('./bazel-testlogs', 'test.xml')
if test_pkgs or manual_test_targets or affected:
tests = bazel.query('test', test_pkgs, affected) + manual_test_targets
if tests:
if args.test_args:
tests = args.test_args + tests
bazel.check('test', *tests)
except subprocess.CalledProcessError as exp:
res = exp.returncode
if args.push or args.release and res == 0:
version = get_version()
if not version:
print 'Kubernetes version missing; not uploading ci artifacts.'
res = 1
else:
try:
if args.version_suffix:
version += args.version_suffix
gcs_build = '%s/%s' % (args.gcs, version)
bazel.check('run', '//:push-build', '--', gcs_build)
# log push-build location to path child jobs can find
# (gs://<shared-bucket>/$PULL_REFS/bazel-build-location.txt)
pull_refs = os.getenv('PULL_REFS', '')
gcs_shared = os.path.join(args.gcs_shared, pull_refs, 'bazel-build-location.txt')
if pull_refs:
upload_string(gcs_shared, gcs_build)
if args.publish_version:
upload_string(args.publish_version, version)
except subprocess.CalledProcessError as exp:
res = exp.returncode
# Coalesce test results into one file for upload.
check(test_infra('hack/coalesce.py'))
echo_result(res)
if res != 0:
sys.exit(res)
def create_parser():
"""Create argparser."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--affected', action='store_true',
help='If build/test affected targets. Filtered by --build and --test flags.')
parser.add_argument(
'--build', help='Bazel build target patterns, split by one space')
parser.add_argument(
'--manual-build',
help='Bazel build targets that should always be manually included, split by one space'
)
parser.add_argument(
'--config', action='append', help='--config=foo rules to apply to bazel commands')
# TODO(krzyzacy): Convert to bazel build rules
parser.add_argument(
'--install', action="append", help='Python dependency(s) that need to be installed')
parser.add_argument(
'--push', action='store_true', help='Push release without building it')
parser.add_argument(
'--release', help='Run bazel build, and push release build to --gcs bucket')
parser.add_argument(
'--gcs-shared',
default="gs://kubernetes-jenkins/shared-results/",
help='If $PULL_REFS is set push build location to this bucket')
parser.add_argument(
'--publish-version',
help='publish GCS file here with the build version, like ci/latest.txt',
)
parser.add_argument(
'--test', help='Bazel test target patterns, split by one space')
parser.add_argument(
'--manual-test',
help='Bazel test targets that should always be manually included, split by one space'
)
parser.add_argument(
'--test-args', action="append", help='Bazel test args')
parser.add_argument(
'--gcs',
default='gs://k8s-release-dev/bazel',
help='GCS path for where to push build')
parser.add_argument(
'--version-suffix',
help='version suffix for build pushing')
return parser
def parse_args(args=None):
"""Return parsed args."""
parser = create_parser()
return parser.parse_args(args)
if __name__ == '__main__':
main(parse_args())
| michelle192837/test-infra | scenarios/kubernetes_bazel.py | Python | apache-2.0 | 10,159 |
import argparse
import logging
import django
from django.db import connection, transaction
from framework.celery_tasks import app as celery_app
logger = logging.getLogger(__name__)
ADD_COLUMNS = [
'ALTER TABLE osf_basefilenode ADD COLUMN created timestamp with time zone;',
'ALTER TABLE osf_basefilenode ADD COLUMN modified timestamp with time zone;',
"ALTER TABLE osf_blacklistguid ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_blacklistguid ADD COLUMN modified timestamp with time zone;",
'ALTER TABLE osf_fileversion ADD COLUMN created timestamp with time zone;',
'ALTER TABLE osf_fileversion ADD COLUMN modified timestamp with time zone;',
"ALTER TABLE osf_guid ADD COLUMN modified timestamp with time zone;",
'ALTER TABLE osf_nodelog ADD COLUMN created timestamp with time zone;',
'ALTER TABLE osf_nodelog ADD COLUMN modified timestamp with time zone;',
"ALTER TABLE osf_pagecounter ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_pagecounter ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_abstractnode ADD COLUMN last_logged timestamp with time zone;",
"ALTER TABLE osf_institution ADD COLUMN last_logged timestamp with time zone;",
]
POPULATE_COLUMNS = [
"SET statement_timeout = 10000; UPDATE osf_basefilenode SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch' WHERE id IN (SELECT id FROM osf_basefilenode WHERE created IS NULL LIMIT 1000) RETURNING id;",
"SET statement_timeout = 10000; UPDATE osf_blacklistguid SET created='epoch', modified='epoch' WHERE id IN (SELECT id FROM osf_blacklistguid WHERE created IS NULL LIMIT 1000) RETURNING id;",
"SET statement_timeout = 10000; UPDATE osf_fileversion SET created=date_created, modified='epoch' WHERE id IN (SELECT id FROM osf_fileversion WHERE created IS NULL LIMIT 1000) RETURNING id;",
"SET statement_timeout = 10000; UPDATE osf_guid SET modified='epoch' WHERE id IN (SELECT id FROM osf_guid WHERE modified IS NULL LIMIT 1000) RETURNING id;",
"SET statement_timeout = 50000; UPDATE osf_nodelog SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch' WHERE id IN (SELECT id FROM osf_nodelog WHERE created IS NULL LIMIT 1000) RETURNING id;",
"SET statement_timeout = 10000; UPDATE osf_pagecounter SET created='epoch', modified='epoch' WHERE id IN (SELECT id FROM osf_pagecounter WHERE created IS NULL LIMIT 1000) RETURNING id;",
]
FINALIZE_MIGRATION = [
"UPDATE osf_basefilenode SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch' WHERE created IS NULL;",
'ALTER TABLE osf_basefilenode ALTER COLUMN created SET NOT NULL;',
'ALTER TABLE osf_basefilenode ALTER COLUMN modified SET NOT NULL;',
"UPDATE osf_blacklistguid SET created='epoch', modified='epoch' WHERE created IS NULL;",
"ALTER TABLE osf_blacklistguid ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_blacklistguid ALTER COLUMN modified SET NOT NULL;",
"UPDATE osf_fileversion SET created=date_created, modified='epoch' WHERE created IS NULL;",
'ALTER TABLE osf_fileversion ALTER COLUMN created SET NOT NULL;',
'ALTER TABLE osf_fileversion ALTER COLUMN modified SET NOT NULL;',
'ALTER TABLE osf_fileversion DROP COLUMN date_created;',
"UPDATE osf_guid SET modified='epoch' WHERE modified IS NULL;",
"ALTER TABLE osf_guid ALTER COLUMN modified SET NOT NULL;",
"UPDATE osf_nodelog SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch' WHERE created IS NULL;",
'ALTER TABLE osf_nodelog ALTER COLUMN created SET NOT NULL;',
'ALTER TABLE osf_nodelog ALTER COLUMN modified SET NOT NULL;',
"UPDATE osf_pagecounter SET created='epoch', modified='epoch' WHERE created IS NULL;",
"ALTER TABLE osf_pagecounter ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_pagecounter ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_apioauth2application ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_apioauth2personaltoken ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_apioauth2personaltoken ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_apioauth2scope ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_apioauth2scope ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_archivejob ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_archivejob ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_archivetarget ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_archivetarget ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_citationstyle ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_citationstyle ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_conference ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_conference ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_draftregistration ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_draftregistration ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_draftregistrationapproval ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_draftregistrationapproval ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_draftregistrationlog ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_draftregistrationlog ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_embargo ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_embargo ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_embargoterminationapproval ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_embargoterminationapproval ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_externalaccount ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_externalaccount ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_identifier ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_identifier ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_institution ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_institution ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_mailrecord ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_mailrecord ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_metaschema ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_metaschema ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_nodelicense ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_nodelicense ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_nodelicenserecord ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_nodelicenserecord ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_noderelation ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_noderelation ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_notificationdigest ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_notificationdigest ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_notificationsubscription ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_notificationsubscription ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_osfuser ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_osfuser ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_preprintprovider ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_preprintprovider ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_privatelink ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_queuedmail ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_queuedmail ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_registrationapproval ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_registrationapproval ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_retraction ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_retraction ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_subject ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_subject ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_tag ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_tag ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_useractivitycounter ADD COLUMN created timestamp with time zone;",
"ALTER TABLE osf_useractivitycounter ADD COLUMN modified timestamp with time zone;",
"ALTER TABLE osf_abstractnode RENAME COLUMN date_created TO created;",
"ALTER TABLE osf_abstractnode RENAME COLUMN date_modified TO modified;",
"ALTER TABLE osf_action RENAME COLUMN date_created TO created;",
"ALTER TABLE osf_action RENAME COLUMN date_modified TO modified;",
"ALTER TABLE osf_apioauth2application RENAME COLUMN date_created TO created;",
"ALTER TABLE osf_comment RENAME COLUMN date_created TO created;",
"ALTER TABLE osf_comment RENAME COLUMN date_modified TO modified;",
"ALTER TABLE osf_fileversion RENAME COLUMN date_modified TO external_modified;",
"ALTER TABLE osf_preprintservice RENAME COLUMN date_created TO created;",
"ALTER TABLE osf_preprintservice RENAME COLUMN date_modified TO modified;",
"ALTER TABLE osf_privatelink RENAME COLUMN date_created TO created;",
"ALTER TABLE osf_session RENAME COLUMN date_created TO created;",
"ALTER TABLE osf_session RENAME COLUMN date_modified TO modified;",
"""
UPDATE osf_abstractnode
SET last_logged=(
SELECT date
FROM osf_nodelog
WHERE node_id = "osf_abstractnode"."id"
ORDER BY date DESC
LIMIT 1)
WHERE (SELECT COUNT(id) FROM osf_nodelog WHERE node_id = "osf_abstractnode"."id" LIMIT 1) > 0;
""",
"""
UPDATE osf_abstractnode
SET last_logged=modified
WHERE (SELECT COUNT(id) FROM osf_nodelog WHERE node_id = "osf_abstractnode"."id" LIMIT 1) = 0;
""",
"UPDATE osf_apioauth2application SET modified='epoch';",
"UPDATE osf_apioauth2personaltoken SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_apioauth2scope SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_archivejob SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_archivetarget SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_citationstyle SET created=date_parsed, modified='epoch';",
"UPDATE osf_conference SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_draftregistration SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_draftregistrationapproval SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_draftregistrationlog SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_embargo SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_embargoterminationapproval SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_externalaccount SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_identifier SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_institution SET created='epoch', modified='epoch';",
"UPDATE osf_mailrecord SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_metaschema SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_nodelicense SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_nodelicenserecord SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"""
UPDATE osf_noderelation SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch'
WHERE LENGTH(_id) > 5;
UPDATE osf_noderelation SET created='epoch', modified='epoch'
WHERE LENGTH(_id) <= 5;
""",
"UPDATE osf_notificationdigest SET created=timestamp, modified='epoch';",
"UPDATE osf_notificationsubscription SET created='epoch', modified='epoch';",
"UPDATE osf_osfuser SET created='epoch', modified='epoch';",
"UPDATE osf_preprintprovider SET created='epoch', modified='epoch';",
"UPDATE osf_privatelink SET modified='epoch';",
"UPDATE osf_queuedmail SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_registrationapproval SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_retraction SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_subject SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
"UPDATE osf_tag SET created='epoch', modified='epoch';",
"UPDATE osf_useractivitycounter SET created='epoch', modified='epoch';",
"ALTER TABLE osf_apioauth2application ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_apioauth2personaltoken ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_apioauth2personaltoken ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_apioauth2scope ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_apioauth2scope ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_archivejob ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_archivejob ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_archivetarget ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_archivetarget ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_citationstyle ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_citationstyle ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_conference ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_conference ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_draftregistration ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_draftregistration ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_draftregistrationapproval ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_draftregistrationapproval ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_draftregistrationlog ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_draftregistrationlog ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_embargo ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_embargo ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_embargoterminationapproval ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_embargoterminationapproval ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_externalaccount ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_externalaccount ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_identifier ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_identifier ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_institution ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_institution ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_mailrecord ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_mailrecord ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_metaschema ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_metaschema ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_nodelicense ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_nodelicense ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_nodelicenserecord ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_nodelicenserecord ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_noderelation ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_noderelation ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_notificationdigest ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_notificationdigest ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_notificationsubscription ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_notificationsubscription ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_osfuser ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_osfuser ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_preprintprovider ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_preprintprovider ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_privatelink ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_queuedmail ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_queuedmail ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_registrationapproval ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_registrationapproval ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_retraction ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_retraction ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_subject ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_subject ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_tag ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_tag ALTER COLUMN modified SET NOT NULL;",
"ALTER TABLE osf_useractivitycounter ALTER COLUMN created SET NOT NULL;",
"ALTER TABLE osf_useractivitycounter ALTER COLUMN modified SET NOT NULL;"
]
@celery_app.task
def run_sql(sql):
table = sql.split(' ')[5]
logger.info('Updating table {}'.format(table))
with transaction.atomic():
with connection.cursor() as cursor:
cursor.execute(sql)
rows = cursor.fetchall()
if not rows:
raise Exception('Sentry notification that {} is migrated'.format(table))
@celery_app.task(name='scripts.premigrate_created_modified')
def migrate():
# Note:
# To update data slowly without requiring lots of downtime,
# add the following to CELERYBEAT_SCHEDULE in website/settings:
#
# '1-minute-incremental-migrations':{
# 'task': 'scripts.premigrate_created_modified',
# 'schedule': crontab(minute='*/1'),
# },
#
# And let it run for about a week
for statement in POPULATE_COLUMNS:
run_sql.delay(statement)
def add_columns():
for statement in ADD_COLUMNS:
with connection.cursor() as cursor:
cursor.execute(statement)
def finalize_migration():
for statement in FINALIZE_MIGRATION:
with connection.cursor() as cursor:
cursor.execute(statement)
def main():
django.setup()
parser = argparse.ArgumentParser(
description='Handles long-running, non-breaking db changes slowly without requiring much downtime'
)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Run migration and roll back changes to db',
)
parser.add_argument(
'--start',
action='store_true',
dest='start',
help='Adds columns',
)
parser.add_argument(
'--finish',
action='store_true',
dest='finish',
help='Sets NOT NULL',
)
pargs = parser.parse_args()
if pargs.start and pargs.finish:
raise Exception('Cannot start and finish in the same run')
with transaction.atomic():
if pargs.start:
add_columns()
elif pargs.finish:
finalize_migration()
else:
raise Exception('Must specify start or finish')
if pargs.dry_run:
raise Exception('Dry Run -- Transaction aborted.')
if __name__ == '__main__':
main()
| laurenrevere/osf.io | scripts/premigrate_created_modified.py | Python | apache-2.0 | 20,748 |
//
// GoogleAppEngineAuth.h
// whaleops
//
// Created by cameron ring on 2/26/10.
// Copyright 2010 __MyCompanyName__. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "GoogleClientLogin.h"
// GoogleAppEngineAuth is a GoogleClientLoginDelegate because we need to do some more work after getting the auth
// cookie to get the app engine auth cookie
@interface GoogleAppEngineAuth : NSObject <GoogleClientLoginDelegate> {
id<GoogleClientLoginDelegate> m_delegate;
GoogleClientLogin *m_googleClientLogin;
NSURL *m_appURL;
BOOL m_useDevServer;
}
// Delegate is a weak pointer. If the delegate can go away before this class, you need to clear
// the delegate in the delegate's dealloc method
@property (nonatomic, assign) id<GoogleClientLoginDelegate> delegate;
-(id)initWithDelegate:(id<GoogleClientLoginDelegate>)delegate andAppURL:(NSURL *)appURL;
// Try to auth with the passed-in credentials for Google App Engine:
// username User's full email address. It must include the domain (i.e. [email protected]).
// password User's password
// source Short string identifying your application, for logging purposes. This string should take the form:
// "companyName-applicationName-versionID".
// captcha (optional) String entered by the user as an answer to a CAPTCHA challenge.
// captchaToken (optional) Token representing the specific CAPTCHA challenge. Google supplies this token and the CAPTCHA image URL
// in a login failed response with the error code "CaptchaRequired".
-(void)authWithUsername:(NSString *)username andPassword:(NSString *)password withSource:(NSString *)source;
-(void)authWithUsername:(NSString *)username andPassword:(NSString *)password andCaptcha:(NSString *)captcha
andCaptchaToken:(NSString *)captchaToken withSource:(NSString *)source;
@end
| kehollin/paco | Paco-iOS/vendor/touchengine/iPhone/GoogleAppEngineAuthGithub/GoogleAppEngineAuth.h | C | apache-2.0 | 1,887 |
package eu.drus.jpa.unit.test.model;
public enum ContactType {
TELEPHONE,
MOBILE,
FAX,
EMAIL
}
| dadrus/jpa-unit | mongodb/src/integrationtest/java/eu/drus/jpa/unit/test/model/ContactType.java | Java | apache-2.0 | 112 |
// -----------------------------------------------------------------------
// <copyright file="LoggingElement.cs" company="OSharp开源团队">
// Copyright (c) 2014-2015 OSharp. All rights reserved.
// </copyright>
// <last-editor>郭明锋</last-editor>
// <last-date>2015-06-30 15:39</last-date>
// -----------------------------------------------------------------------
using System.Configuration;
namespace OSharp.Core.Configs.ConfigFile
{
/// <summary>
/// 日志配置节点
/// </summary>
internal class LoggingElement : ConfigurationElement
{
private const string LoggingEntryKey = "entry";
private const string DataLoggingKey = "data";
private const string BasicLoggingKey = "basic";
/// <summary>
/// 获取或设置 日志输入配置节点
/// </summary>
[ConfigurationProperty(LoggingEntryKey)]
public virtual LoggingEntryElement LoggingEntry
{
get { return (LoggingEntryElement)this[LoggingEntryKey]; }
set { this[LoggingEntryKey] = value; }
}
/// <summary>
/// 获取或设置 数据日志配置节点
/// </summary>
[ConfigurationProperty(DataLoggingKey)]
public virtual DataLoggingElement DataLogging
{
get { return (DataLoggingElement)this[DataLoggingKey]; }
set { this[DataLoggingKey] = value; }
}
/// <summary>
/// 获取或设置 基础日志配置节点
/// </summary>
[ConfigurationProperty(BasicLoggingKey)]
public virtual BasicLoggingElement BasicLogging
{
get { return (BasicLoggingElement)this[BasicLoggingKey]; }
set { this[BasicLoggingKey] = value; }
}
}
} | ccccccmd/osharp-1 | src/OSharp.Core/Configs/ConfigFile/Logging/LoggingElement.cs | C# | apache-2.0 | 1,793 |
var createDefaultStream = require('./lib/default_stream');
var Render = require('./lib/render');
var Test = require('./lib/test');
exports = module.exports = createHarness();
exports.createHarness = createHarness;
exports.Test = Test;
var canEmitExit = typeof process !== 'undefined' && process
&& typeof process.on === 'function'
;
var canExit = typeof process !== 'undefined' && process
&& typeof process.exit === 'function'
;
var onexit = (function () {
var stack = [];
if (canEmitExit) process.on('exit', function (code) {
for (var i = 0; i < stack.length; i++) stack[i](code);
});
return function (cb) { stack.push(cb) };
})();
function createHarness (conf_) {
var pending = [];
var running = false;
var count = 0;
var began = false;
var only = false;
var closed = false;
var out = new Render();
var test = function (name, conf, cb) {
count++;
var t = new Test(name, conf, cb);
if (!conf || typeof conf !== 'object') conf = conf_ || {};
if (conf.exit !== false) {
onexit(function (code) {
t._exit();
if (!closed) {
closed = true
out.close();
}
if (!code && !t._ok && (!only || name === only)) {
process.exit(1);
}
});
}
process.nextTick(function () {
if (!out.piped) out.pipe(createDefaultStream());
if (!began) out.begin();
began = true;
var run = function () {
running = true;
out.push(t);
t.run();
};
if (only && name !== only) {
count--;
return;
}
if (running || pending.length) {
pending.push(run);
}
else run();
});
t.on('test', function sub (st) {
count++;
st.on('test', sub);
st.on('end', onend);
});
t.on('end', onend);
return t;
function onend () {
count--;
if (this._progeny.length) {
var unshifts = this._progeny.map(function (st) {
return function () {
running = true;
out.push(st);
st.run();
};
});
pending.unshift.apply(pending, unshifts);
}
process.nextTick(function () {
running = false;
if (pending.length) return pending.shift()();
if (count === 0 && !closed) {
closed = true
out.close();
}
if (conf.exit !== false && canExit && !t._ok) {
process.exit(1);
}
});
}
};
test.only = function (name) {
if (only) {
throw new Error("there can only be one only test");
}
only = name;
return test.apply(null, arguments);
};
test.stream = out;
return test;
}
// vim: set softtabstop=4 shiftwidth=4:
| org-scn-design-studio-community/sdkpackage | src/org.scn.community.geovis/script/node_modules/grunt-contrib-uglify/node_modules/maxmin/node_modules/gzip-size/node_modules/zlib-browserify/node_modules/tape/index.js | JavaScript | apache-2.0 | 3,497 |
package com.taobao.tddl.rule;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import com.google.common.collect.Lists;
import com.taobao.tddl.common.exception.TddlException;
import com.taobao.tddl.rule.BaseRuleTest.Choicer;
import com.taobao.tddl.rule.exceptions.RouteCompareDiffException;
import com.taobao.tddl.rule.exceptions.TddlRuleException;
import com.taobao.tddl.rule.model.MatcherResult;
import com.taobao.tddl.rule.model.TargetDB;
import com.taobao.tddl.rule.model.sqljep.Comparative;
import com.taobao.tddl.rule.utils.MatchResultCompare;
/**
* 一些常见的本地rule测试
*
* @author jianghang 2013-11-6 下午9:22:54
* @since 5.0.0
*/
public class LocalRuleTest {
static TddlRule rule;
static TddlRule mvrRule;
@BeforeClass
public static void setUp() {
ApplicationContext context = new ClassPathXmlApplicationContext("classpath:local/spring-context.xml");
rule = (TddlRule) context.getBean("rule");
mvrRule = (TddlRule) context.getBean("mvrRule");
}
@AfterClass
public static void tearDown() throws TddlException {
rule.destory();
mvrRule.destory();
}
@Test
public void testRule_equals() {
MatcherResult result = rule.route("nserch", "message_id = 1");
List<TargetDB> dbs = result.getCalculationResult();
Assert.assertEquals(1, dbs.size());
Assert.assertEquals("NSEARCH_GROUP_2", dbs.get(0).getDbIndex());
Assert.assertEquals(1, dbs.get(0).getTableNames().size());
Assert.assertEquals("nserch_1", dbs.get(0).getTableNames().iterator().next());
}
@Test
public void testRule_in() {
String conditionStr = "message_id in (996,997,998,999,1000,1001,1002,1003,1004):int";
MatcherResult result = rule.route("nserch", conditionStr);
List<TargetDB> dbs = result.getCalculationResult();
Assert.assertEquals(3, dbs.size());
StringBuilder sb = new StringBuilder("目标库:");
sb.append(dbs.get(0).getDbIndex());
sb.append(" 所要执行的表:");
for (String table : dbs.get(0).getTableNames()) {
sb.append(table);
sb.append(" ");
}
Assert.assertEquals("目标库:NSEARCH_GROUP_1 所要执行的表:nserch_18 nserch_15 nserch_12 ", sb.toString());
}
@Test(expected = TddlRuleException.class)
public void testRoute_noVersion() {
// 不存在该version版本
rule.route("nserch", "message_id = 1", "V1");
}
@Test
public void testRule_Trim() {
String conditionStr = "message_id in (996 ,997 , 998,999,1000 ,1001, 1002,1003,1004):int";
MatcherResult result = rule.route("nserch", conditionStr);
List<TargetDB> dbs = result.getCalculationResult();
Assert.assertEquals(3, dbs.size());
StringBuilder sb = new StringBuilder("目标库:");
sb.append(dbs.get(0).getDbIndex());
sb.append(" 所要执行的表:");
for (String table : dbs.get(0).getTableNames()) {
sb.append(table);
sb.append(" ");
}
Assert.assertEquals("目标库:NSEARCH_GROUP_1 所要执行的表:nserch_18 nserch_15 nserch_12 ", sb.toString());
}
@Test
public void testRule_date() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
String conditionStr = "message_id >24:int and message_id<=26:int;gmt_create>=" + sdf.format(new Date())
+ ":date";
MatcherResult result = rule.route("nserch", conditionStr);
Assert.assertEquals(true,
MatchResultCompare.oriDbTabCompareWithMatchResult(result, "NSEARCH_GROUP_2", "nserch_1"));
}
@Test
public void testRule_FullTableScan() {
String conditionStr = "";
MatcherResult result = rule.route("nserch", conditionStr);
List<TargetDB> dbs = result.getCalculationResult();
StringBuilder sb = new StringBuilder("目标库:");
sb.append(dbs.get(0).getDbIndex());
sb.append(" 所要执行的表:");
for (String table : dbs.get(0).getTableNames()) {
sb.append(table);
sb.append(" ");
}
System.out.println(sb.toString());
StringBuilder sb2 = new StringBuilder("目标库:");
sb2.append(dbs.get(1).getDbIndex());
sb2.append(" 所要执行的表:");
for (String table : dbs.get(1).getTableNames()) {
sb2.append(table);
sb2.append(" ");
}
System.out.println(sb2.toString());
StringBuilder sb3 = new StringBuilder("目标库:");
sb3.append(dbs.get(2).getDbIndex());
sb3.append(" 所要执行的表:");
for (String table : dbs.get(2).getTableNames()) {
sb3.append(table);
sb3.append(" ");
}
System.out.println(sb3.toString());
}
@Test
public void testRouteWithSpecifyRuleVersion() {
MatcherResult result = mvrRule.route("nserch", "message_id = 1", "V1");
List<TargetDB> dbs = result.getCalculationResult();
Assert.assertEquals(1, dbs.size());
Assert.assertEquals("NSEARCH_GROUP_2", dbs.get(0).getDbIndex());
Assert.assertEquals(1, dbs.get(0).getTableNames().size());
Assert.assertEquals("nserch_1", dbs.get(0).getTableNames().iterator().next());
}
@Test
public void testRouteMultiVersionAndCompareTSqlTypeStringString() {
Choicer choicer = new Choicer();
choicer.addComparative("MESSAGE_ID", new Comparative(Comparative.Equivalent, 1)); // 一定要大写
MatcherResult result = null;
try {
result = mvrRule.routeMverAndCompare(false, "nserch", choicer, Lists.newArrayList());
} catch (RouteCompareDiffException e) {
Assert.fail(ExceptionUtils.getFullStackTrace(e));
}
List<TargetDB> dbs = result.getCalculationResult();
Assert.assertEquals(1, dbs.size());
Assert.assertEquals("NSEARCH_GROUP_2", dbs.get(0).getDbIndex());
Assert.assertEquals(1, dbs.get(0).getTableNames().size());
Assert.assertEquals("nserch_1", dbs.get(0).getTableNames().iterator().next());
}
}
| cpsing/tddl | tddl-rule/src/test/java/com/taobao/tddl/rule/LocalRuleTest.java | Java | apache-2.0 | 6,698 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.service.reads.trackwarnings;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Map;
import java.util.Set;
import org.apache.cassandra.exceptions.RequestFailureReason;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.net.ParamType;
public class WarningContext
{
private static EnumSet<ParamType> SUPPORTED = EnumSet.of(ParamType.TOMBSTONE_WARNING, ParamType.TOMBSTONE_ABORT,
ParamType.LOCAL_READ_SIZE_WARN, ParamType.LOCAL_READ_SIZE_ABORT,
ParamType.ROW_INDEX_SIZE_WARN, ParamType.ROW_INDEX_SIZE_ABORT);
final WarnAbortCounter tombstones = new WarnAbortCounter();
final WarnAbortCounter localReadSize = new WarnAbortCounter();
final WarnAbortCounter rowIndexTooLarge = new WarnAbortCounter();
public static boolean isSupported(Set<ParamType> keys)
{
return !Collections.disjoint(keys, SUPPORTED);
}
public RequestFailureReason updateCounters(Map<ParamType, Object> params, InetAddressAndPort from)
{
for (Map.Entry<ParamType, Object> entry : params.entrySet())
{
WarnAbortCounter counter = null;
RequestFailureReason reason = null;
switch (entry.getKey())
{
case ROW_INDEX_SIZE_ABORT:
reason = RequestFailureReason.READ_SIZE;
case ROW_INDEX_SIZE_WARN:
counter = rowIndexTooLarge;
break;
case LOCAL_READ_SIZE_ABORT:
reason = RequestFailureReason.READ_SIZE;
case LOCAL_READ_SIZE_WARN:
counter = localReadSize;
break;
case TOMBSTONE_ABORT:
reason = RequestFailureReason.READ_TOO_MANY_TOMBSTONES;
case TOMBSTONE_WARNING:
counter = tombstones;
break;
}
if (reason != null)
{
counter.addAbort(from, ((Number) entry.getValue()).longValue());
return reason;
}
if (counter != null)
counter.addWarning(from, ((Number) entry.getValue()).longValue());
}
return null;
}
public WarningsSnapshot snapshot()
{
return WarningsSnapshot.create(tombstones.snapshot(), localReadSize.snapshot(), rowIndexTooLarge.snapshot());
}
}
| belliottsmith/cassandra | src/java/org/apache/cassandra/service/reads/trackwarnings/WarningContext.java | Java | apache-2.0 | 3,361 |
#
# Author:: kaustubh (<[email protected]>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../service"
class Chef
class Provider
class Service
class Aix < Chef::Provider::Service
attr_reader :status_load_success
provides :service, os: "aix"
def initialize(new_resource, run_context)
super
end
def load_current_resource
@current_resource = Chef::Resource::Service.new(@new_resource.name)
@current_resource.service_name(@new_resource.service_name)
@status_load_success = true
@priority_success = true
@is_resource_group = false
determine_current_status!
@current_resource
end
def start_service
if @is_resource_group
shell_out!("startsrc -g #{@new_resource.service_name}")
else
shell_out!("startsrc -s #{@new_resource.service_name}")
end
end
def stop_service
if @is_resource_group
shell_out!("stopsrc -g #{@new_resource.service_name}")
else
shell_out!("stopsrc -s #{@new_resource.service_name}")
end
end
def restart_service
stop_service
start_service
end
def reload_service
if @is_resource_group
shell_out!("refresh -g #{@new_resource.service_name}")
else
shell_out!("refresh -s #{@new_resource.service_name}")
end
end
def shared_resource_requirements
super
requirements.assert(:all_actions) do |a|
a.assertion { @status_load_success }
a.whyrun ["Service status not available. Assuming a prior action would have installed the service.", "Assuming status of not running."]
end
end
def define_resource_requirements
# FIXME? need reload from service.rb
shared_resource_requirements
end
protected
def determine_current_status!
logger.trace "#{@new_resource} using lssrc to check the status"
begin
if is_resource_group?
# Groups as a whole have no notion of whether they're running
@current_resource.running false
else
service = shell_out!("lssrc -s #{@new_resource.service_name}").stdout
if service.split(" ").last == "active"
@current_resource.running true
else
@current_resource.running false
end
end
logger.trace "#{@new_resource} running: #{@current_resource.running}"
# ShellOut sometimes throws different types of Exceptions than ShellCommandFailed.
# Temporarily catching different types of exceptions here until we get Shellout fixed.
# TODO: Remove the line before one we get the ShellOut fix.
rescue Mixlib::ShellOut::ShellCommandFailed, SystemCallError
@status_load_success = false
@current_resource.running false
nil
end
end
def is_resource_group?
so = shell_out("lssrc -g #{@new_resource.service_name}")
if so.exitstatus == 0
logger.trace("#{@new_resource.service_name} is a group")
@is_resource_group = true
end
end
end
end
end
end
| chef/chef | lib/chef/provider/service/aix.rb | Ruby | apache-2.0 | 4,039 |
--
-- Test the LOCK statement
--
-- Setup
CREATE SCHEMA lock_schema1;
SET search_path = lock_schema1;
CREATE TABLE lock_tbl1 (a BIGINT);
CREATE TABLE lock_tbl1a (a BIGINT);
CREATE VIEW lock_view1 AS SELECT * FROM lock_tbl1;
CREATE VIEW lock_view2(a,b) AS SELECT * FROM lock_tbl1, lock_tbl1a;
CREATE VIEW lock_view3 AS SELECT * from lock_view2;
CREATE VIEW lock_view4 AS SELECT (select a from lock_tbl1a limit 1) from lock_tbl1;
CREATE VIEW lock_view5 AS SELECT * from lock_tbl1 where a in (select * from lock_tbl1a);
CREATE VIEW lock_view6 AS SELECT * from (select * from lock_tbl1) sub;
CREATE ROLE regress_rol_lock1;
ALTER ROLE regress_rol_lock1 SET search_path = lock_schema1;
GRANT USAGE ON SCHEMA lock_schema1 TO regress_rol_lock1;
-- Try all valid lock options; also try omitting the optional TABLE keyword.
BEGIN TRANSACTION;
LOCK TABLE lock_tbl1 IN ACCESS SHARE MODE;
LOCK lock_tbl1 IN ROW SHARE MODE;
LOCK TABLE lock_tbl1 IN ROW EXCLUSIVE MODE;
LOCK TABLE lock_tbl1 IN SHARE UPDATE EXCLUSIVE MODE;
LOCK TABLE lock_tbl1 IN SHARE MODE;
LOCK lock_tbl1 IN SHARE ROW EXCLUSIVE MODE;
LOCK TABLE lock_tbl1 IN EXCLUSIVE MODE;
LOCK TABLE lock_tbl1 IN ACCESS EXCLUSIVE MODE;
ROLLBACK;
-- Try using NOWAIT along with valid options.
BEGIN TRANSACTION;
LOCK TABLE lock_tbl1 IN ACCESS SHARE MODE NOWAIT;
LOCK TABLE lock_tbl1 IN ROW SHARE MODE NOWAIT;
LOCK TABLE lock_tbl1 IN ROW EXCLUSIVE MODE NOWAIT;
LOCK TABLE lock_tbl1 IN SHARE UPDATE EXCLUSIVE MODE NOWAIT;
LOCK TABLE lock_tbl1 IN SHARE MODE NOWAIT;
LOCK TABLE lock_tbl1 IN SHARE ROW EXCLUSIVE MODE NOWAIT;
LOCK TABLE lock_tbl1 IN EXCLUSIVE MODE NOWAIT;
LOCK TABLE lock_tbl1 IN ACCESS EXCLUSIVE MODE NOWAIT;
ROLLBACK;
-- Verify that we can lock views.
BEGIN TRANSACTION;
LOCK TABLE lock_view1 IN EXCLUSIVE MODE;
-- lock_view1 and lock_tbl1 are locked.
select l.gp_segment_id, relname from pg_locks l, pg_class c
where l.relation = c.oid and relname like '%lock_%' and mode = 'ExclusiveLock'
order by relname;
ROLLBACK;
BEGIN TRANSACTION;
LOCK TABLE lock_view2 IN EXCLUSIVE MODE;
-- lock_view1, lock_tbl1, and lock_tbl1a are locked.
select l.gp_segment_id, relname from pg_locks l, pg_class c
where l.relation = c.oid and relname like '%lock_%' and mode = 'ExclusiveLock'
order by relname;
ROLLBACK;
BEGIN TRANSACTION;
LOCK TABLE lock_view3 IN EXCLUSIVE MODE;
-- lock_view3, lock_view2, lock_tbl1, and lock_tbl1a are locked recursively.
select l.gp_segment_id, relname from pg_locks l, pg_class c
where l.relation = c.oid and relname like '%lock_%' and mode = 'ExclusiveLock'
order by relname;
ROLLBACK;
BEGIN TRANSACTION;
LOCK TABLE lock_view4 IN EXCLUSIVE MODE;
-- lock_view4, lock_tbl1, and lock_tbl1a are locked.
select l.gp_segment_id, relname from pg_locks l, pg_class c
where l.relation = c.oid and relname like '%lock_%' and mode = 'ExclusiveLock'
order by relname;
ROLLBACK;
BEGIN TRANSACTION;
LOCK TABLE lock_view5 IN EXCLUSIVE MODE;
-- lock_view5, lock_tbl1, and lock_tbl1a are locked.
select l.gp_segment_id, relname from pg_locks l, pg_class c
where l.relation = c.oid and relname like '%lock_%' and mode = 'ExclusiveLock'
order by relname;
ROLLBACK;
BEGIN TRANSACTION;
LOCK TABLE lock_view6 IN EXCLUSIVE MODE;
-- lock_view6 an lock_tbl1 are locked.
select l.gp_segment_id, relname from pg_locks l, pg_class c
where l.relation = c.oid and relname like '%lock_%' and mode = 'ExclusiveLock'
order by relname;
ROLLBACK;
-- detecting infinite recursions in view definitions
CREATE OR REPLACE VIEW lock_view2 AS SELECT * from lock_view3;
BEGIN TRANSACTION;
LOCK TABLE lock_view2 IN EXCLUSIVE MODE;
ROLLBACK;
CREATE VIEW lock_view7 AS SELECT * from lock_view2;
BEGIN TRANSACTION;
LOCK TABLE lock_view7 IN EXCLUSIVE MODE;
ROLLBACK;
-- Verify that we can lock a table with inheritance children.
CREATE TABLE lock_tbl2 (b BIGINT) INHERITS (lock_tbl1);
CREATE TABLE lock_tbl3 () INHERITS (lock_tbl2);
BEGIN TRANSACTION;
LOCK TABLE lock_tbl1 * IN ACCESS EXCLUSIVE MODE;
ROLLBACK;
-- Verify that we can't lock a child table just because we have permission
-- on the parent, but that we can lock the parent only.
GRANT UPDATE ON TABLE lock_tbl1 TO regress_rol_lock1;
SET ROLE regress_rol_lock1;
BEGIN;
LOCK TABLE lock_tbl1 * IN ACCESS EXCLUSIVE MODE;
ROLLBACK;
BEGIN;
LOCK TABLE ONLY lock_tbl1;
ROLLBACK;
RESET ROLE;
--
-- Clean up
--
DROP VIEW lock_view7;
DROP VIEW lock_view6;
DROP VIEW lock_view5;
DROP VIEW lock_view4;
DROP VIEW lock_view3 CASCADE;
DROP VIEW lock_view1;
DROP TABLE lock_tbl3;
DROP TABLE lock_tbl2;
DROP TABLE lock_tbl1;
DROP TABLE lock_tbl1a;
DROP SCHEMA lock_schema1 CASCADE;
DROP ROLE regress_rol_lock1;
-- atomic ops tests
RESET search_path;
SELECT test_atomic_ops();
| 50wu/gpdb | src/test/regress/sql/lock.sql | SQL | apache-2.0 | 4,662 |
#region
using System;
using System.Globalization;
#endregion
namespace FluentAssertions.Equivalency
{
public class EnumEqualityStep : IEquivalencyStep
{
/// <summary>
/// Gets a value indicating whether this step can handle the current subject and/or expectation.
/// </summary>
public bool CanHandle(IEquivalencyValidationContext context, IEquivalencyAssertionOptions config)
{
Type subjectType = config.GetSubjectType(context);
return ((subjectType != null) && subjectType.IsEnum()) ||
((context.Expectation != null) && context.Expectation.GetType().IsEnum());
}
/// <summary>
/// Applies a step as part of the task to compare two objects for structural equality.
/// </summary>
/// <value>
/// Should return <c>true</c> if the subject matches the expectation or if no additional assertions
/// have to be executed. Should return <c>false</c> otherwise.
/// </value>
/// <remarks>
/// May throw when preconditions are not met or if it detects mismatching data.
/// </remarks>
public bool Handle(IEquivalencyValidationContext context, IEquivalencyValidator parent,
IEquivalencyAssertionOptions config)
{
switch (config.EnumEquivalencyHandling)
{
case EnumEquivalencyHandling.ByValue:
decimal? subjectsUnderlyingValue = (context.Subject != null) ? Convert.ToDecimal(context.Subject) : (decimal?)null;
decimal? expectationsUnderlyingValue = (context.Expectation != null) ? Convert.ToDecimal(context.Expectation) : (decimal?)null;
subjectsUnderlyingValue.Should().Be(expectationsUnderlyingValue, context.Because, context.BecauseArgs);
break;
case EnumEquivalencyHandling.ByName:
context.Subject.ToString().Should().Be(context.Expectation.ToString(), context.Because, context.BecauseArgs);
break;
default:
throw new InvalidOperationException(string.Format("Don't know how to handle {0}",
config.EnumEquivalencyHandling));
}
return true;
}
}
} | jeroenpot/FluentAssertions | Src/Core/Equivalency/EnumEqualityStep.cs | C# | apache-2.0 | 2,327 |
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 1.0 Transitional//EN">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<link rel="stylesheet" href="../includes/main.css" type="text/css">
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
<title>Apache CloudStack | The Power Behind Your Cloud</title>
</head>
<body>
<div id="insidetopbg">
<div id="inside_wrapper">
<div class="uppermenu_panel">
<div class="uppermenu_box"></div>
</div>
<div id="main_master">
<div id="inside_header">
<div class="header_top">
<a class="cloud_logo" href="http://cloudstack.org"></a>
<div class="mainemenu_panel"></div>
</div>
</div>
<div id="main_content">
<div class="inside_apileftpanel">
<div class="inside_contentpanel" style="width:930px;">
<div class="api_titlebox">
<div class="api_titlebox_left">
<span>
Apache CloudStack v4.8.0 Domain Admin API Reference
</span>
<p></p>
<h1>deleteFirewallRule</h1>
<p>Deletes a firewall rule</p>
</div>
<div class="api_titlebox_right">
<a class="api_backbutton" href="../TOC_Domain_Admin.html"></a>
</div>
</div>
<div class="api_tablepanel">
<h2>Request parameters</h2>
<table class="apitable">
<tr class="hed">
<td style="width:200px;"><strong>Parameter Name</strong></td><td style="width:500px;">Description</td><td style="width:180px;">Required</td>
</tr>
<tr>
<td style="width:200px;"><strong>id</strong></td><td style="width:500px;"><strong>the ID of the firewall rule</strong></td><td style="width:180px;"><strong>true</strong></td>
</tr>
</table>
</div>
<div class="api_tablepanel">
<h2>Response Tags</h2>
<table class="apitable">
<tr class="hed">
<td style="width:200px;"><strong>Response Name</strong></td><td style="width:500px;">Description</td>
</tr>
<tr>
<td style="width:200px;"><strong>displaytext</strong></td><td style="width:500px;">any text associated with the success or failure</td>
</tr>
<tr>
<td style="width:200px;"><strong>success</strong></td><td style="width:500px;">true if operation is executed successfully</td>
</tr>
</table>
</div>
</div>
</div>
</div>
</div>
<div id="footer">
<div id="footer_mainmaster">
<p>Copyright © 2016 The Apache Software Foundation, Licensed under the
<a href="http://www.apache.org/licenses/LICENSE-2.0">Apache License, Version 2.0.</a>
<br>
Apache, CloudStack, Apache CloudStack, the Apache CloudStack logo, the CloudMonkey logo and the Apache feather logo are trademarks of The Apache Software Foundation.</p>
</div>
</div>
</div>
</div>
</body>
</html>
| pdion891/cloudstack-www | source/api/apidocs-4.8/domain_admin/deleteFirewallRule.html | HTML | apache-2.0 | 2,647 |
/*****************************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
package org.apache.xmpbox.type;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Object representation for arrays content This Class could be used to define directly a property with more than one
* field (structure) and also schemas
*
* @author a183132
*
*/
public class ComplexPropertyContainer
{
private final List<AbstractField> properties;
/**
* Complex Property type constructor (namespaceURI is given)
*
*/
public ComplexPropertyContainer()
{
properties = new ArrayList<AbstractField>();
}
/**
* Give the first property found in this container with type and localname expected
*
* @param localName
* the localname of property wanted
* @param type
* the property type of property wanted
* @return the property wanted
*/
protected AbstractField getFirstEquivalentProperty(String localName, Class<? extends AbstractField> type)
{
List<AbstractField> list = getPropertiesByLocalName(localName);
if (list != null)
{
for (AbstractField abstractField : list)
{
if (abstractField.getClass().equals(type))
{
return abstractField;
}
}
}
return null;
}
/**
* Add a property to the current structure
*
* @param obj
* the property to add
*/
public void addProperty(AbstractField obj)
{
if (containsProperty(obj))
{
removeProperty(obj);
}
properties.add(obj);
}
/**
* Return all children associated to this property
*
* @return All Properties contained in this container
*/
public List<AbstractField> getAllProperties()
{
return properties;
}
/**
* Return all properties with this specified localName
*
* @param localName
* the local name wanted
* @return All properties with local name which match with localName given
*/
public List<AbstractField> getPropertiesByLocalName(String localName)
{
List<AbstractField> absFields = getAllProperties();
if (absFields != null)
{
List<AbstractField> list = new ArrayList<AbstractField>();
for (AbstractField abstractField : absFields)
{
if (abstractField.getPropertyName().equals(localName))
{
list.add(abstractField);
}
}
if (list.isEmpty())
{
return null;
}
else
{
return list;
}
}
return null;
}
/**
* Check if two property are similar
*
* @param prop1
* First property
* @param prop2
* Second property
* @return True if these properties are equals
*/
public boolean isSameProperty(AbstractField prop1, AbstractField prop2)
{
if (prop1.getClass().equals(prop2.getClass()))
{
String pn1 = prop1.getPropertyName();
String pn2 = prop2.getPropertyName();
if (pn1 == null)
{
return pn2 == null;
}
else
{
if (pn1.equals(pn2))
{
return prop1.equals(prop2);
}
}
}
return false;
}
/**
* Check if a XMPFieldObject is in the complex property
*
* @param property
* The property to check
* @return True if property is present in this container
*/
public boolean containsProperty(AbstractField property)
{
Iterator<AbstractField> it = getAllProperties().iterator();
AbstractField tmp;
while (it.hasNext())
{
tmp = it.next();
if (isSameProperty(tmp, property))
{
return true;
}
}
return false;
}
/**
* Remove a property
*
* @param property
* The property to remove
*/
public void removeProperty(AbstractField property)
{
if (containsProperty(property))
{
properties.remove(property);
}
}
}
| joansmith/pdfbox | xmpbox/src/main/java/org/apache/xmpbox/type/ComplexPropertyContainer.java | Java | apache-2.0 | 5,413 |
/*
* Copyright (c) 2013, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
package org.opencloudb.sequence.handler;
/**
*
* @author <a href="http://www.micmiu.com">Michael</a>
* @time Create on 2013-12-20 下午3:35:53
* @version 1.0
*/
public interface SequenceHandler {
public long nextId(String prefixName);
} | wenerme/Mycat-Server | src/main/java/org/opencloudb/sequence/handler/SequenceHandler.java | Java | apache-2.0 | 1,348 |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.cache.impl;
import com.intellij.lexer.Lexer;
import com.intellij.openapi.util.Key;
import com.intellij.psi.impl.cache.impl.id.IdIndexEntry;
import com.intellij.psi.impl.cache.impl.id.IdTableBuilding;
import com.intellij.psi.impl.cache.impl.id.LexerBasedIdIndexer;
import com.intellij.psi.impl.cache.impl.todo.TodoIndexEntry;
import com.intellij.psi.search.IndexPattern;
import com.intellij.util.indexing.FileContent;
import com.intellij.util.indexing.IdDataConsumer;
import gnu.trove.THashMap;
import java.util.Collections;
import java.util.Map;
public class BaseFilterLexerUtil {
private static final Key<ScanContent> scanContentKey = Key.create("id.todo.scan.content");
public static ScanContent scanContent(FileContent content, IdAndToDoScannerBasedOnFilterLexer indexer) {
ScanContent data = content.getUserData(scanContentKey);
if (data != null) {
content.putUserData(scanContentKey, null);
return data;
}
final boolean needTodo = content.getFile().isInLocalFileSystem(); // same as TodoIndex.getFilter().isAcceptable
final boolean needIdIndex = IdTableBuilding.getFileTypeIndexer(content.getFileType()) instanceof LexerBasedIdIndexer;
final IdDataConsumer consumer = needIdIndex? new IdDataConsumer():null;
final OccurrenceConsumer todoOccurrenceConsumer = new OccurrenceConsumer(consumer, needTodo);
final Lexer filterLexer = indexer.createLexer(todoOccurrenceConsumer);
filterLexer.start(content.getContentAsText());
while (filterLexer.getTokenType() != null) filterLexer.advance();
Map<TodoIndexEntry,Integer> todoMap = null;
if (needTodo) {
for (IndexPattern indexPattern : IndexPatternUtil.getIndexPatterns()) {
final int count = todoOccurrenceConsumer.getOccurrenceCount(indexPattern);
if (count > 0) {
if (todoMap == null) todoMap = new THashMap<>();
todoMap.put(new TodoIndexEntry(indexPattern.getPatternString(), indexPattern.isCaseSensitive()), count);
}
}
}
data = new ScanContent(
consumer != null? consumer.getResult():Collections.emptyMap(),
todoMap != null ? todoMap: Collections.emptyMap()
);
if (needIdIndex && needTodo) content.putUserData(scanContentKey, data);
return data;
}
public static class ScanContent {
public final Map<IdIndexEntry, Integer> idMap;
public final Map<TodoIndexEntry,Integer> todoMap;
public ScanContent(Map<IdIndexEntry, Integer> _idMap, Map<TodoIndexEntry, Integer> _todoMap) {
idMap = _idMap;
todoMap = _todoMap;
}
}
}
| vvv1559/intellij-community | platform/indexing-impl/src/com/intellij/psi/impl/cache/impl/BaseFilterLexerUtil.java | Java | apache-2.0 | 3,210 |
---
date: 2016-03-09T19:56:50+01:00
title: Adding content
---
## Hello world
Let's create our first content file for your documentation. Open a terminal and add the following command for each new file you want to add. Replace `<section-name>` with a general term that describes your document in detail.
```sh
hugo new <section-name>/filename.md
```
Visitors of your website will find the final document under `www.example.com/<section-name>/filename/`.
Since it's possible to have multiple content files in the same section I recommend to create at least one `index.md` file per section. This ensures that users will find an index page under `www.example.com/<section-name>`.
## Homepage
To add content to the homepage you need to add a small indicator to the frontmatter of the content file:
```toml
type: index
```
Otherwise the theme will not be able to find the corresponding content file.
## Table of contents
You maybe noticed that the menu on the left contains a small table of contents of the current page. All `<h2>` tags (`## Headline` in Markdown) will be added automatically.
## Admonitions
Admonition is a handy feature that adds block-styled side content to your documentation, for example hints, notes or warnings. It can be enabled by using the corresponding [shortcodes](http://gohugo.io/extras/shortcodes/) inside your content:
```go
{{</* note title="Note" */>}}
Nothing to see here, move along.
{{</* /note */>}}
```
This will print the following block:
{{< note title="Note" >}}
Nothing to see here, move along.
{{< /note >}}
The shortcode adds a neutral color for the note class and a red color for the warning class. You can also add a custom title:
```go
{{</* warning title="Don't try this at home" */>}}
Nothing to see here, move along.
{{</* /warning */>}}
```
This will print the following block:
{{< warning title="Don't try this at home" >}}
Nothing to see here, move along.
{{< /warning >}} | anynines/summit-training-classes | operator/site/themes/hugo-material-docs/exampleSite/content/adding-content/index.md | Markdown | apache-2.0 | 1,941 |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>
Exception: Selenium::WebDriver::Error::NoSuchFrameError
— Documentation by YARD 0.8.1
</title>
<link rel="stylesheet" href="../../../css/style.css" type="text/css" media="screen" charset="utf-8" />
<link rel="stylesheet" href="../../../css/common.css" type="text/css" media="screen" charset="utf-8" />
<script type="text/javascript" charset="utf-8">
hasFrames = window.top.frames.main ? true : false;
relpath = '../../../';
framesUrl = "../../../frames.html#!" + escape(window.location.href);
</script>
<script type="text/javascript" charset="utf-8" src="../../../js/jquery.js"></script>
<script type="text/javascript" charset="utf-8" src="../../../js/app.js"></script>
</head>
<body>
<div id="header">
<div id="menu">
<a href="../../../_index.html">Index (N)</a> »
<span class='title'><span class='object_link'><a href="../../../Selenium.html" title="Selenium (module)">Selenium</a></span></span> » <span class='title'><span class='object_link'><a href="../../WebDriver.html" title="Selenium::WebDriver (module)">WebDriver</a></span></span> » <span class='title'><span class='object_link'><a href="../Error.html" title="Selenium::WebDriver::Error (module)">Error</a></span></span>
»
<span class="title">NoSuchFrameError</span>
<div class="noframes"><span class="title">(</span><a href="." target="_top">no frames</a><span class="title">)</span></div>
</div>
<div id="search">
<a class="full_list_link" id="class_list_link"
href="../../../class_list.html">
Class List
</a>
<a class="full_list_link" id="method_list_link"
href="../../../method_list.html">
Method List
</a>
<a class="full_list_link" id="file_list_link"
href="../../../file_list.html">
File List
</a>
</div>
<div class="clear"></div>
</div>
<iframe id="search_frame"></iframe>
<div id="content"><h1>Exception: Selenium::WebDriver::Error::NoSuchFrameError
</h1>
<dl class="box">
<dt class="r1">Inherits:</dt>
<dd class="r1">
<span class="inheritName"><span class='object_link'><a href="WebDriverError.html" title="Selenium::WebDriver::Error::WebDriverError (class)">WebDriverError</a></span></span>
<ul class="fullTree">
<li>Object</li>
<li class="next">StandardError</li>
<li class="next"><span class='object_link'><a href="WebDriverError.html" title="Selenium::WebDriver::Error::WebDriverError (class)">WebDriverError</a></span></li>
<li class="next">Selenium::WebDriver::Error::NoSuchFrameError</li>
</ul>
<a href="#" class="inheritanceTree">show all</a>
</dd>
<dt class="r2 last">Defined in:</dt>
<dd class="r2 last">rb/lib/selenium/webdriver/common/error.rb</dd>
</dl>
<div class="clear"></div>
<h2>Overview</h2><div class="docstring">
<div class="discussion">
<p>A request to switch to a frame could not be satisfied because the frame
could not be found.</p>
</div>
</div>
<div class="tags">
</div>
</div>
<div id="footer">
Generated on Thu Jul 19 12:32:26 2012 by
<a href="http://yardoc.org" title="Yay! A Ruby Documentation Tool" target="_parent">yard</a>
0.8.1 (ruby-1.9.3).
</div>
</body>
</html> | jmt4/Selenium2 | docs/api/rb/Selenium/WebDriver/Error/NoSuchFrameError.html | HTML | apache-2.0 | 3,726 |
package org.apache.lucene.codecs.simpletext;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.codecs.FieldInfosReader;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.StringHelper;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldInfosWriter.*;
/**
* reads plaintext field infos files
* <p>
* <b><font color="red">FOR RECREATIONAL USE ONLY</font></B>
* @lucene.experimental
*/
public class SimpleTextFieldInfosReader extends FieldInfosReader {
@Override
public FieldInfos read(Directory directory, String segmentName, String segmentSuffix, IOContext iocontext) throws IOException {
final String fileName = IndexFileNames.segmentFileName(segmentName, segmentSuffix, FIELD_INFOS_EXTENSION);
IndexInput input = directory.openInput(fileName, iocontext);
BytesRef scratch = new BytesRef();
boolean success = false;
try {
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, NUMFIELDS);
final int size = Integer.parseInt(readString(NUMFIELDS.length, scratch));
FieldInfo infos[] = new FieldInfo[size];
for (int i = 0; i < size; i++) {
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, NAME);
String name = readString(NAME.length, scratch);
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, NUMBER);
int fieldNumber = Integer.parseInt(readString(NUMBER.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, ISINDEXED);
boolean isIndexed = Boolean.parseBoolean(readString(ISINDEXED.length, scratch));
final IndexOptions indexOptions;
if (isIndexed) {
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, INDEXOPTIONS);
indexOptions = IndexOptions.valueOf(readString(INDEXOPTIONS.length, scratch));
} else {
indexOptions = null;
}
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, STORETV);
boolean storeTermVector = Boolean.parseBoolean(readString(STORETV.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, PAYLOADS);
boolean storePayloads = Boolean.parseBoolean(readString(PAYLOADS.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, NORMS);
boolean omitNorms = !Boolean.parseBoolean(readString(NORMS.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, NORMS_TYPE);
String nrmType = readString(NORMS_TYPE.length, scratch);
final DocValuesType normsType = docValuesType(nrmType);
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, DOCVALUES);
String dvType = readString(DOCVALUES.length, scratch);
final DocValuesType docValuesType = docValuesType(dvType);
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, DOCVALUES_GEN);
final long dvGen = Long.parseLong(readString(DOCVALUES_GEN.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, NUM_ATTS);
int numAtts = Integer.parseInt(readString(NUM_ATTS.length, scratch));
Map<String,String> atts = new HashMap<String,String>();
for (int j = 0; j < numAtts; j++) {
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, ATT_KEY);
String key = readString(ATT_KEY.length, scratch);
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, ATT_VALUE);
String value = readString(ATT_VALUE.length, scratch);
atts.put(key, value);
}
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
omitNorms, storePayloads, indexOptions, docValuesType, normsType, Collections.unmodifiableMap(atts));
infos[i].setDocValuesGen(dvGen);
}
if (input.getFilePointer() != input.length()) {
throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.getFilePointer() + " vs size " + input.length() + " (resource: " + input + ")");
}
FieldInfos fieldInfos = new FieldInfos(infos);
success = true;
return fieldInfos;
} finally {
if (success) {
input.close();
} else {
IOUtils.closeWhileHandlingException(input);
}
}
}
public DocValuesType docValuesType(String dvType) {
if ("false".equals(dvType)) {
return null;
} else {
return DocValuesType.valueOf(dvType);
}
}
private String readString(int offset, BytesRef scratch) {
return new String(scratch.bytes, scratch.offset+offset, scratch.length-offset, IOUtils.CHARSET_UTF_8);
}
}
| yintaoxue/read-open-source-code | solr-4.7.2/src/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java | Java | apache-2.0 | 6,548 |
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.client.lienzo.wires;
import java.util.Objects;
import javax.enterprise.event.Event;
import com.ait.lienzo.client.core.shape.wires.PickerPart;
import com.ait.lienzo.client.core.shape.wires.WiresManager;
import com.ait.lienzo.client.core.shape.wires.WiresShape;
import com.ait.lienzo.client.core.shape.wires.handlers.WiresShapeHighlight;
import org.kie.workbench.common.stunner.core.client.canvas.util.CanvasUnhighlightEvent;
import org.kie.workbench.common.stunner.core.client.shape.HasShapeState;
import org.kie.workbench.common.stunner.core.client.shape.ShapeState;
public class StunnerWiresShapeStateHighlight implements WiresShapeHighlight<PickerPart.ShapePart> {
private final StunnerWiresShapeHighlight delegate;
private HasShapeState current;
private PickerPart.ShapePart currentPartState;
public StunnerWiresShapeStateHighlight(final WiresManager wiresManager,
final Event<CanvasUnhighlightEvent> unhighlightEvent) {
this(new StunnerWiresShapeHighlight(wiresManager, unhighlightEvent));
}
StunnerWiresShapeStateHighlight(final StunnerWiresShapeHighlight delegate) {
this.delegate = delegate;
}
@Override
public void highlight(final WiresShape shape,
final PickerPart.ShapePart part) {
highlight(shape,
part,
ShapeState.HIGHLIGHT);
}
@Override
public void error(final WiresShape shape,
final PickerPart.ShapePart shapePart) {
highlight(shape,
shapePart,
ShapeState.INVALID);
}
@Override
public void restore() {
if (null != current &&
(Objects.isNull(currentPartState) || Objects.equals(PickerPart.ShapePart.BODY, currentPartState))) {
current.applyState(ShapeState.NONE);
setCurrent(null);
} else {
//always restore because the highlightBorder is not applying state, it is only delegating.
delegate.restore();
}
currentPartState = null;
}
private void highlight(final WiresShape shape,
final PickerPart.ShapePart part,
final ShapeState state) {
this.currentPartState = part;
switch (part) {
case BODY:
highlightBody(shape,
state);
break;
default:
highlightBorder(shape);
}
}
private void highlightBody(final WiresShape shape,
final ShapeState state) {
restore();
if (shape instanceof HasShapeState) {
setCurrent((HasShapeState) shape);
current.applyState(state);
} else {
delegate.highlight(shape, PickerPart.ShapePart.BODY);
}
}
private void highlightBorder(final WiresShape shape) {
delegate.highlight(shape, PickerPart.ShapePart.BORDER);
}
void setCurrent(final HasShapeState current) {
this.current = current;
}
} | romartin/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-client/kie-wb-common-stunner-lienzo/src/main/java/org/kie/workbench/common/stunner/client/lienzo/wires/StunnerWiresShapeStateHighlight.java | Java | apache-2.0 | 3,781 |
/*
* Copyright 2010 Manuel Carrasco Moñino. (manolo at apache/org)
* http://code.google.com/p/gwtupload
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package jsupload.client;
import gwtupload.client.IFileInput.FileInputType;
import gwtupload.client.IUploader;
import gwtupload.client.MultiUploader;
import gwtupload.client.SingleUploader;
import org.timepedia.exporter.client.Export;
import org.timepedia.exporter.client.ExportPackage;
import org.timepedia.exporter.client.Exportable;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.dom.client.Document;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Element;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.Hidden;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.Widget;
/**
* @author Manolo Carrasco Moñino
*
* Exportable version of gwt Uploader.
*
* <h3>Features</h3>
* <ul>
* <li>Three kind of progress bar, the most advanced one shows upload speed, time remaining, sizes, progress</li>
* <li>Single upload form: while the file is being sent the modal dialog avoid the user to interact with the application,
* Then the form can be used again for uploading more files.</li>
* <li>Multiple upload form: Each time the user selects a file it goes to the queue and the user can select more files.</li>
* <li>It can call configurable functions on the events of onChange, onStart and onFinish</li>
* <li>The user can cancel the current upload, can delete files in the queue or remove uploaded files</li>
* </ul>
*
*/
@Export
@ExportPackage("jsu")
public class Upload implements Exportable {
IUploader uploader = null;
Panel panel;
private JsProperties jsProp;
public Upload(JavaScriptObject prop) {
this.jsProp = new JsProperties(prop);
boolean multipleUploader = jsProp.getBoolean(Const.MULTIPLE);
ChismesUploadProgress status = null;
FileInputType type = FileInputType.BROWSER_INPUT;
String choose = jsProp.get(Const.CHOOSE_TYPE);
if ("button".equals(choose)) {
type = FileInputType.BUTTON;
} else if ("label".equals(choose)) {
type = FileInputType.LABEL;
} else if ("anchor".equals(choose)) {
type = FileInputType.ANCHOR;
} else if ("drop".equals(choose)) {
type = FileInputType.DROPZONE;
}
if ("incubator".equals(jsProp.get(Const.TYPE))) {
if (multipleUploader) {
uploader = new MultiUploader(type, new IncubatorUploadProgress());
} else {
uploader = new SingleUploader(type);
}
} else if ("basic".equals(jsProp.get(Const.TYPE))) {
if (multipleUploader) {
uploader = new MultiUploader(type);
} else {
uploader = new SingleUploader(type);
}
} else {
status = new ChismesUploadProgress(!multipleUploader);
uploader = multipleUploader ? new MultiUploader(type, status) : new SingleUploader(type, status);
}
if (multipleUploader) {
((MultiUploader) uploader).setMaximumFiles(jsProp.getInt(Const.MAX_FILES));
} else if (jsProp.getBoolean(Const.EMPTY)){
((SingleUploader) uploader).avoidEmptyFiles(false);
}
boolean auto = jsProp.defined(Const.AUTO) ? jsProp.getBoolean(Const.AUTO) : multipleUploader;
uploader.setAutoSubmit(auto);
boolean multipleSelection = jsProp.getBoolean(Const.MULTIPLE_SELECTION);
uploader.setMultipleSelection(multipleSelection);
uploader.addOnStartUploadHandler(JsUtils.getOnStartUploaderHandler(jsProp.getClosure(Const.ON_START)));
uploader.addOnChangeUploadHandler(JsUtils.getOnChangeUploaderHandler(jsProp.getClosure(Const.ON_CHANGE)));
uploader.addOnFinishUploadHandler(JsUtils.getOnFinishUploaderHandler(jsProp.getClosure(Const.ON_FINISH)));
uploader.addOnCancelUploadHandler(JsUtils.getOnCancelUploaderHandler(jsProp.getClosure(Const.ON_CANCEL)));
uploader.addOnStatusChangedHandler(JsUtils.getStatusChangedHandler(jsProp.getClosure(Const.ON_STATUS)));
panel = RootPanel.get(jsProp.get(Const.CONT_ID, "NoId"));
if (panel == null) {
panel = RootPanel.get();
}
panel.add((Widget) uploader);
if (jsProp.defined(Const.ACTION)) {
uploader.setServletPath(jsProp.get(Const.ACTION));
}
if (jsProp.defined(Const.VALID_EXTENSIONS)) {
String[] extensions = jsProp.get(Const.VALID_EXTENSIONS).split("[, ;:]+");
uploader.setValidExtensions(extensions);
}
uploader.setI18Constants(new I18nConstants(jsProp, Const.REGIONAL));
if (status != null) {
if (jsProp.defined(Const.TXT_PERCENT)) {
status.setPercentMessage(jsProp.get(Const.TXT_PERCENT));
}
if (jsProp.defined(Const.TXT_HOURS)) {
status.setHoursMessage(jsProp.get(Const.TXT_HOURS));
}
if (jsProp.defined(Const.TXT_MINUTES)) {
status.setMinutesMessage(jsProp.get(Const.TXT_MINUTES));
}
if (jsProp.defined(Const.TXT_SECONDS)) {
status.setSecondsMessage(jsProp.get(Const.TXT_SECONDS));
}
}
}
/**
* adds a javascript DOM element to the upload form.
*/
public void addElement(Element e) {
addElement(e, -1);
}
/**
* adds a javascript DOM element to the upload form at the specified position
*/
public void addElement(Element e, int index) {
Widget w = null;
if (e.getTagName().toLowerCase().equals("input") && e.getAttribute("type").toLowerCase().equals("hidden")) {
if (! Document.get().getBody().isOrHasChild(e)) {
Document.get().getBody().appendChild(e);
}
w = Hidden.wrap(e);
} else {
w = new HTML();
DOM.appendChild(w.getElement(), e);
}
uploader.add(w, index);
}
/**
* Depending on the multiple feature configuration, it returns a javascript
* array of as many elements as images uploaded or one element.
*
* The element with the uploaded info has this structure:
* upload.data().url // The url to download the uploaded file from the server
* upload.data().name // The name of the input form element
* upload.data().filename // The name of the file selected by the user as is reported by the browser
* upload.data().basename // The name of the file selected by the user without path
* upload.data().response // The raw server xml response
* upload.data().message // The server text in the message tag
* upload.data().size // The size of the file
* upload.data().status // The upload status (UNINITIALIZED, QUEUED, INPROGRESS, SUCCESS, ERROR, CANCELING, CANCELED, SUBMITING)
*/
public JavaScriptObject data() {
return uploader.getData();
}
/**
* submit the upload form to the server.
*/
public void submit() {
uploader.submit();
}
}
| cyrilmhansen/gwtupload | jsupload/src/main/java/jsupload/client/Upload.java | Java | apache-2.0 | 7,390 |
package org.wso2.carbon.apimgt.rest.api.admin.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.wso2.carbon.apimgt.rest.api.admin.dto.ThrottleLimitDTO;
import org.wso2.carbon.apimgt.rest.api.admin.dto.ThrottlePolicyDTO;
import java.util.Objects;
/**
* AdvancedThrottlePolicyInfoDTO
*/
public class AdvancedThrottlePolicyInfoDTO extends ThrottlePolicyDTO {
@JsonProperty("defaultLimit")
private ThrottleLimitDTO defaultLimit = null;
public AdvancedThrottlePolicyInfoDTO defaultLimit(ThrottleLimitDTO defaultLimit) {
this.defaultLimit = defaultLimit;
return this;
}
/**
* Get defaultLimit
* @return defaultLimit
**/
@ApiModelProperty(value = "")
public ThrottleLimitDTO getDefaultLimit() {
return defaultLimit;
}
public void setDefaultLimit(ThrottleLimitDTO defaultLimit) {
this.defaultLimit = defaultLimit;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AdvancedThrottlePolicyInfoDTO advancedThrottlePolicyInfo = (AdvancedThrottlePolicyInfoDTO) o;
return Objects.equals(this.defaultLimit, advancedThrottlePolicyInfo.defaultLimit) &&
super.equals(o);
}
@Override
public int hashCode() {
return Objects.hash(defaultLimit, super.hashCode());
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class AdvancedThrottlePolicyInfoDTO {\n");
sb.append(" ").append(toIndentedString(super.toString())).append("\n");
sb.append(" defaultLimit: ").append(toIndentedString(defaultLimit)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| sambaheerathan/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.rest.api.admin/src/gen/java/org/wso2/carbon/apimgt/rest/api/admin/dto/AdvancedThrottlePolicyInfoDTO.java | Java | apache-2.0 | 2,163 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.as2.api.entity;
import org.apache.camel.component.as2.api.AS2MediaType;
import org.apache.http.entity.ContentType;
public class ApplicationEDIX12Entity extends ApplicationEDIEntity {
public ApplicationEDIX12Entity(String content, String charset, String contentTransferEncoding,
boolean isMainBody) {
super(content, ContentType.create(AS2MediaType.APPLICATION_EDI_X12, charset), contentTransferEncoding, isMainBody);
}
}
| nikhilvibhav/camel | components/camel-as2/camel-as2-api/src/main/java/org/apache/camel/component/as2/api/entity/ApplicationEDIX12Entity.java | Java | apache-2.0 | 1,313 |
/*
* Copyright Beijing 58 Information Technology Co.,Ltd.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bj58.oceanus.core.context;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.bj58.oceanus.core.jdbc.ConnectionCallback;
import com.bj58.oceanus.core.jdbc.ConnectionManager;
import com.bj58.oceanus.core.timetracker.TrackPoint;
import com.bj58.oceanus.core.timetracker.TrackerExecutor;
import com.bj58.oceanus.core.tx.DelegateTransaction;
import com.bj58.oceanus.core.tx.Transaction;
/**
* 线程执行生命周期的上下文
*
* @author Service Platform Architecture Team ([email protected])
*/
public class ConnectionContext {
private static Logger logger = LoggerFactory.getLogger(ConnectionContext.class);
DelegateTransaction transactions = null;
/**
* callback根据statment的生命周期进行处理
*/
final List<ConnectionCallback> connectionCallbacks = new LinkedList<ConnectionCallback>();
final Set<Connection> connections = new LinkedHashSet<Connection>();
final Set<Connection> commitedConnections = new LinkedHashSet<Connection>();
static final ThreadLocal<ConnectionContext> threadLocal = new ThreadLocal<ConnectionContext>();
Set<Clob> clobs;
Set<Blob> blobs;
final Connection orgConnection;
final Connection wrapper;
ConnectionManager orgConnectionManager;
ResultSet generateKeys;
/**
* 当前使用的连接
*/
Connection currentConnection;
public ConnectionContext(Connection orgConnection, Connection wrapper) {
this.orgConnection = orgConnection;
this.wrapper = wrapper;
}
public DelegateTransaction getTransaction() {
return transactions;
}
public void addTransaction(Transaction tx) {
transactions.addTransaction(tx);
}
public static void setContext(ConnectionContext context) {
threadLocal.set(context);
if(context == null) {
TrackerExecutor.trackEnd(TrackPoint.CONNECTION_CONTEXT);
TrackerExecutor.trackRelease();
} else {
TrackerExecutor.trackBegin(TrackPoint.CONNECTION_CONTEXT);
}
}
public static ConnectionContext getContext() {
return threadLocal.get();
}
public List<ConnectionCallback> getConnectionCallbacks() {
return connectionCallbacks;
}
public Set<Connection> getConnections() {
return connections;
}
public void addConnection(Connection connection) {
connections.add(connection);
}
public void addCommitedConnection(Connection connection) {
commitedConnections.add(connection);
}
public void addCallback(ConnectionCallback callback) {
connectionCallbacks.add(callback);
}
public void beginTransaction() {
if (transactions != null) {
} else {
transactions = new DelegateTransaction();
}
}
public Connection getOrgConnection() {
return orgConnection;
}
/**
* 因为后续的connection中可能还有事务操作 begin commit do something commit 。。。 end
* 所以能够接受这样的场景我们需要在这个时候释放一些连接
*
* @throws SQLException
*/
public void rollbackOrCommited() throws SQLException {
transactions = new DelegateTransaction();
Iterator<Connection> iterator = connections.iterator();
while (iterator.hasNext()) {
Connection connection = iterator.next();
for (ConnectionCallback callback : connectionCallbacks) {// 恢复到执行前的状态
callback.reset(connection);
}
if (connection == this.orgConnection) {
orgConnectionManager.release(connection);
} else if (!connection.isClosed()) {
connection.close();
}
}
connections.clear();
}
public void releaseAfterClosed() throws SQLException {
setContext(null);
transactions = null;
for (Connection connection : connections) {
for (ConnectionCallback callback : connectionCallbacks) {// 恢复到执行前的状态
callback.reset(connection);
}
if (connection == this.orgConnection) {
orgConnectionManager.release(connection);
} else if (!connection.isClosed()) {
connection.close();
}
}
connectionCallbacks.clear();
connections.clear();
}
public void setOrgConnectionManager(ConnectionManager orgConnectionManager) {
this.orgConnectionManager = orgConnectionManager;
}
public boolean isTransaction(){
return transactions != null;
}
public Connection getCurrentConnection() {
return currentConnection;
}
public void setCurrentConnection(Connection currentConnection) {
this.currentConnection = currentConnection;
}
}
| zhangzuoqiang/Oceanus | oceanus-all/oceanus-core/src/main/java/com/bj58/oceanus/core/context/ConnectionContext.java | Java | apache-2.0 | 5,454 |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/dynamodb/model/ReturnItemCollectionMetrics.h>
#include <aws/core/utils/HashingUtils.h>
using namespace Aws::Utils;
static const int SIZE_HASH = HashingUtils::HashString("SIZE");
static const int NONE_HASH = HashingUtils::HashString("NONE");
namespace Aws
{
namespace DynamoDB
{
namespace Model
{
namespace ReturnItemCollectionMetricsMapper
{
ReturnItemCollectionMetrics GetReturnItemCollectionMetricsForName(const Aws::String& name)
{
int hashCode = HashingUtils::HashString(name.c_str());
if (hashCode == SIZE_HASH)
{
return ReturnItemCollectionMetrics::SIZE;
}
else if (hashCode == NONE_HASH)
{
return ReturnItemCollectionMetrics::NONE;
}
return ReturnItemCollectionMetrics::NOT_SET;
}
Aws::String GetNameForReturnItemCollectionMetrics(ReturnItemCollectionMetrics value)
{
switch(value)
{
case ReturnItemCollectionMetrics::SIZE:
return "SIZE";
case ReturnItemCollectionMetrics::NONE:
return "NONE";
default:
return "";
}
}
} // namespace ReturnItemCollectionMetricsMapper
} // namespace Model
} // namespace DynamoDB
} // namespace Aws
| d9magai/aws-sdk-cpp | aws-cpp-sdk-dynamodb/source/model/ReturnItemCollectionMetrics.cpp | C++ | apache-2.0 | 1,679 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.apache.jackrabbit.oak.query;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.apache.jackrabbit.oak.spi.query.QueryLimits;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Filtering iterators that are useful for queries with limit, offset, order by,
* or distinct.
*/
public class FilterIterators {
private static final Logger LOG = LoggerFactory.getLogger(FilterIterators.class);
private FilterIterators() {
}
/**
* Verify the number of in-memory nodes is below the limit.
*
* @param count the number of nodes
* @param settings the query engine settings
* @throws UnsupportedOperationException if the limit was exceeded
*/
public static void checkMemoryLimit(long count, QueryLimits settings) {
long maxMemoryEntries = settings.getLimitInMemory();
if (count > maxMemoryEntries) {
String message = "The query read more than " +
maxMemoryEntries + " nodes in memory.";
UnsupportedOperationException e = new UnsupportedOperationException(
message +
" To avoid running out of memory, processing was stopped.");
LOG.warn(message, e);
throw e;
}
}
/**
* Verify the number of node read operations is below the limit.
*
* @param count the number of read operations
* @param settings the query engine settings
* @throws RuntimeNodeTraversalException if the limit was exceeded
*/
public static void checkReadLimit(long count, QueryLimits settings) {
long maxReadEntries = settings.getLimitReads();
if (count > maxReadEntries) {
String message = "The query read or traversed more than " +
maxReadEntries + " nodes.";
RuntimeNodeTraversalException e = new RuntimeNodeTraversalException(
message +
" To avoid affecting other tasks, processing was stopped.");
LOG.warn(message, e);
throw e;
}
}
public static <K> Iterator<K> newCombinedFilter(
Iterator<K> it, boolean distinct, long limit, long offset,
Comparator<K> orderBy, QueryLimits settings) {
if (distinct) {
it = FilterIterators.newDistinct(it, settings);
}
if (orderBy != null) {
// avoid overflow (both offset and limit could be Long.MAX_VALUE)
int max = (int) Math.min(Integer.MAX_VALUE,
Math.min(Integer.MAX_VALUE, offset) +
Math.min(Integer.MAX_VALUE, limit));
it = FilterIterators.newSort(it, orderBy, max, settings);
}
if (offset != 0) {
it = FilterIterators.newOffset(it, offset);
}
if (limit < Long.MAX_VALUE) {
it = FilterIterators.newLimit(it, limit);
}
return it;
}
public static <K> DistinctIterator<K> newDistinct(Iterator<K> it, QueryLimits settings) {
return new DistinctIterator<K>(it, settings);
}
public static <K> Iterator<K> newLimit(Iterator<K> it, long limit) {
return new LimitIterator<K>(it, limit);
}
public static <K> Iterator<K> newOffset(Iterator<K> it, long offset) {
return new OffsetIterator<K>(it, offset);
}
public static <K> Iterator<K> newSort(Iterator<K> it, Comparator<K> orderBy, int max, QueryLimits settings) {
return new SortIterator<K>(it, orderBy, max, settings);
}
/**
* An iterator that filters duplicate entries, that is, it only returns each
* unique entry once. The internal set of unique entries is filled only when
* needed (on demand).
*
* @param <K> the entry type
*/
static class DistinctIterator<K> implements Iterator<K> {
private final Iterator<K> source;
private final QueryLimits settings;
private final HashSet<K> distinctSet;
private K current;
private boolean end;
DistinctIterator(Iterator<K> source, QueryLimits settings) {
this.source = source;
this.settings = settings;
distinctSet = new HashSet<K>();
}
private void fetchNext() {
if (end) {
return;
}
while (source.hasNext()) {
current = source.next();
if (distinctSet.add(current)) {
checkMemoryLimit(distinctSet.size(), settings);
return;
}
}
current = null;
end = true;
}
@Override
public boolean hasNext() {
if (current == null) {
fetchNext();
}
return !end;
}
@Override
public K next() {
if (end) {
throw new NoSuchElementException();
}
if (current == null) {
fetchNext();
}
K r = current;
current = null;
return r;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
/**
* An iterator that returns entries in sorted order. The internal list of
* sorted entries can be limited to a given number of entries, and the
* entries are only read when needed (on demand).
*
* @param <K> the entry type
*/
static class SortIterator<K> implements Iterator<K> {
private final Iterator<K> source;
private final QueryLimits settings;
private final Comparator<K> orderBy;
private Iterator<K> result;
private final int max;
SortIterator(Iterator<K> source, Comparator<K> orderBy, int max, QueryLimits settings) {
this.source = source;
this.orderBy = orderBy;
this.max = max;
this.settings = settings;
}
private void init() {
if (result != null) {
return;
}
ArrayList<K> list = new ArrayList<K>();
while (source.hasNext()) {
K x = source.next();
list.add(x);
checkMemoryLimit(list.size(), settings);
// from time to time, sort and truncate
// this should need less than O(n*log(3*keep)) operations,
// which is close to the optimum O(n*log(keep))
if (list.size() > (long) max * 2) {
// remove tail entries right now, to save memory
Collections.sort(list, orderBy);
keepFirst(list, max);
}
}
Collections.sort(list, orderBy);
keepFirst(list, max);
result = list.iterator();
}
/**
* Truncate a list.
*
* @param list the list
* @param keep the maximum number of entries to keep
*/
private static <K> void keepFirst(ArrayList<K> list, int keep) {
while (list.size() > keep) {
// remove the entries starting at the end,
// to avoid n^2 performance
list.remove(list.size() - 1);
}
}
@Override
public boolean hasNext() {
init();
return result.hasNext();
}
@Override
public K next() {
init();
return result.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
/**
* An iterator that ignores the first number of entries. Entries are only
* read when needed (on demand).
*
* @param <K> the entry type
*/
static class OffsetIterator<K> implements Iterator<K> {
private final Iterator<K> source;
private final long offset;
private boolean init;
OffsetIterator(Iterator<K> source, long offset) {
this.source = source;
this.offset = offset;
}
private void init() {
if (init) {
return;
}
init = true;
for (int i = 0; i < offset && source.hasNext(); i++) {
source.next();
}
}
@Override
public boolean hasNext() {
init();
return source.hasNext();
}
@Override
public K next() {
init();
return source.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
/**
* An iterator that limits the number of returned entries. Entries are only
* read when needed (on demand).
*
* @param <K> the entry type
*/
static class LimitIterator<K> implements Iterator<K> {
private final Iterator<K> source;
private final long limit;
private long count;
LimitIterator(Iterator<K> source, long limit) {
this.source = source;
this.limit = limit;
}
@Override
public boolean hasNext() {
return count < limit && source.hasNext();
}
@Override
public K next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
count++;
return source.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
}
| stillalex/jackrabbit-oak | oak-core/src/main/java/org/apache/jackrabbit/oak/query/FilterIterators.java | Java | apache-2.0 | 10,713 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.eql.session;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.ParentTaskAssigningClient;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.tasks.TaskCancelledException;
import org.elasticsearch.xpack.eql.analysis.Analyzer;
import org.elasticsearch.xpack.eql.analysis.PostAnalyzer;
import org.elasticsearch.xpack.eql.analysis.PreAnalyzer;
import org.elasticsearch.xpack.eql.analysis.Verifier;
import org.elasticsearch.xpack.eql.optimizer.Optimizer;
import org.elasticsearch.xpack.eql.parser.EqlParser;
import org.elasticsearch.xpack.eql.parser.ParserParams;
import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan;
import org.elasticsearch.xpack.eql.planner.Planner;
import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry;
import org.elasticsearch.xpack.ql.index.IndexResolver;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import static org.elasticsearch.action.ActionListener.wrap;
import static org.elasticsearch.xpack.ql.util.ActionListeners.map;
public class EqlSession {
private final Client client;
private final EqlConfiguration configuration;
private final IndexResolver indexResolver;
private final PreAnalyzer preAnalyzer;
private final PostAnalyzer postAnalyzer;
private final Analyzer analyzer;
private final Optimizer optimizer;
private final Planner planner;
private final CircuitBreaker circuitBreaker;
public EqlSession(
Client client,
EqlConfiguration cfg,
IndexResolver indexResolver,
PreAnalyzer preAnalyzer,
PostAnalyzer postAnalyzer,
FunctionRegistry functionRegistry,
Verifier verifier,
Optimizer optimizer,
Planner planner,
CircuitBreaker circuitBreaker
) {
this.client = new ParentTaskAssigningClient(client, cfg.getTaskId());
this.configuration = cfg;
this.indexResolver = indexResolver;
this.preAnalyzer = preAnalyzer;
this.postAnalyzer = postAnalyzer;
this.analyzer = new Analyzer(cfg, functionRegistry, verifier);
this.optimizer = optimizer;
this.planner = planner;
this.circuitBreaker = circuitBreaker;
}
public Client client() {
return client;
}
public Optimizer optimizer() {
return optimizer;
}
public EqlConfiguration configuration() {
return configuration;
}
public CircuitBreaker circuitBreaker() {
return circuitBreaker;
}
public void eql(String eql, ParserParams params, ActionListener<Results> listener) {
eqlExecutable(eql, params, wrap(e -> e.execute(this, map(listener, Results::fromPayload)), listener::onFailure));
}
public void eqlExecutable(String eql, ParserParams params, ActionListener<PhysicalPlan> listener) {
try {
physicalPlan(doParse(eql, params), listener);
} catch (Exception ex) {
listener.onFailure(ex);
}
}
public void physicalPlan(LogicalPlan optimized, ActionListener<PhysicalPlan> listener) {
optimizedPlan(optimized, map(listener, planner::plan));
}
public void optimizedPlan(LogicalPlan verified, ActionListener<LogicalPlan> listener) {
analyzedPlan(verified, map(listener, optimizer::optimize));
}
public void analyzedPlan(LogicalPlan parsed, ActionListener<LogicalPlan> listener) {
if (parsed.analyzed()) {
listener.onResponse(parsed);
return;
}
preAnalyze(parsed, map(listener, p -> postAnalyze(analyzer.analyze(p))));
}
private <T> void preAnalyze(LogicalPlan parsed, ActionListener<LogicalPlan> listener) {
String indexWildcard = configuration.indexAsWildcard();
if (configuration.isCancelled()) {
listener.onFailure(new TaskCancelledException("cancelled"));
return;
}
indexResolver.resolveAsMergedMapping(
indexWildcard,
configuration.indicesOptions(),
configuration.runtimeMappings(),
map(listener, r -> preAnalyzer.preAnalyze(parsed, r))
);
}
private LogicalPlan postAnalyze(LogicalPlan verified) {
return postAnalyzer.postAnalyze(verified, configuration);
}
private LogicalPlan doParse(String eql, ParserParams params) {
return new EqlParser().createStatement(eql, params);
}
}
| GlenRSmith/elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/session/EqlSession.java | Java | apache-2.0 | 4,770 |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// XLA-specific MatMul Op.
#include "tensorflow/compiler/tf2xla/xla_helpers.h"
#include "tensorflow/compiler/tf2xla/xla_op_kernel.h"
#include "tensorflow/compiler/tf2xla/xla_op_registry.h"
#include "tensorflow/core/framework/op_kernel.h"
namespace tensorflow {
namespace {
constexpr std::array<DataType, 5> kMatmulTypes = {
{DT_HALF, DT_BFLOAT16, DT_FLOAT, DT_DOUBLE, DT_COMPLEX64}};
class MatMulOp : public XlaOpKernel {
public:
explicit MatMulOp(OpKernelConstruction* ctx, bool is_sparse = false)
: XlaOpKernel(ctx), is_sparse_(is_sparse) {
OP_REQUIRES_OK(ctx, ctx->GetAttr("transpose_a", &transpose_a_));
OP_REQUIRES_OK(ctx, ctx->GetAttr("transpose_b", &transpose_b_));
if (is_sparse) {
OP_REQUIRES_OK(ctx, ctx->GetAttr("Ta", &a_type_));
OP_REQUIRES_OK(ctx, ctx->GetAttr("Tb", &b_type_));
// SparseMatMul is actually dense matmul with a hint that one or
// both of the inputs may contain a lot of zeroes. On CPU these
// inputs are dynamically converted to sparse representation
// before multiplication. For now in XLA we ignore the hints
// and always do dense multiplication.
bool dummy_is_sparse;
OP_REQUIRES_OK(ctx, ctx->GetAttr("a_is_sparse", &dummy_is_sparse));
OP_REQUIRES_OK(ctx, ctx->GetAttr("b_is_sparse", &dummy_is_sparse));
}
}
~MatMulOp() override = default;
void Compile(XlaOpKernelContext* ctx) override {
const TensorShape a_shape = ctx->InputShape(0);
const TensorShape b_shape = ctx->InputShape(1);
// Check that the dimensions of the two matrices are valid.
OP_REQUIRES(ctx, TensorShapeUtils::IsMatrix(a_shape),
errors::InvalidArgument("In[0] is not a matrix"));
OP_REQUIRES(ctx, TensorShapeUtils::IsMatrix(b_shape),
errors::InvalidArgument("In[1] is not a matrix"));
int first_index = transpose_a_ ? 0 : 1;
int second_index = transpose_b_ ? 1 : 0;
OP_REQUIRES(ctx,
a_shape.dim_size(first_index) == b_shape.dim_size(second_index),
errors::InvalidArgument("Matrix size-compatible: In[0]: ",
a_shape.DebugString(), ", In[1]: ",
b_shape.DebugString()));
xla::ComputationDataHandle a = ctx->Input(0);
xla::ComputationDataHandle b = ctx->Input(1);
if (is_sparse_) {
if (a_type_ == DT_BFLOAT16) {
a = ctx->builder()->ConvertElementType(a, xla::F32);
}
if (b_type_ == DT_BFLOAT16) {
b = ctx->builder()->ConvertElementType(b, xla::F32);
}
}
auto lhs = (transpose_a_) ? ctx->builder()->Transpose(a, {1, 0}) : a;
auto rhs = (transpose_b_) ? ctx->builder()->Transpose(b, {1, 0}) : b;
ctx->SetOutput(0, ctx->builder()->Dot(lhs, rhs));
}
private:
bool is_sparse_;
bool transpose_a_;
bool transpose_b_;
DataType a_type_;
DataType b_type_;
};
REGISTER_XLA_OP(Name("MatMul").TypeConstraint("T", kMatmulTypes), MatMulOp);
class SparseMatMulOp : public MatMulOp {
public:
explicit SparseMatMulOp(OpKernelConstruction* ctx) : MatMulOp(ctx, true) {}
~SparseMatMulOp() override = default;
};
REGISTER_XLA_OP(Name("SparseMatMul"), SparseMatMulOp);
} // namespace
} // namespace tensorflow
| rabipanda/tensorflow | tensorflow/compiler/tf2xla/kernels/matmul_op.cc | C++ | apache-2.0 | 3,934 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.utils.FBUtilities;
/* ViewFilteringTest class has been split into multiple ones because of timeout issues (CASSANDRA-16670, CASSANDRA-17167)
* Any changes here check if they apply to the other classes
* - ViewFilteringPKTest
* - ViewFilteringClustering1Test
* - ViewFilteringClustering2Test
* - ViewFilteringTest
* - ...
* - ViewFiltering*Test
*/
public class ViewFiltering2Test extends ViewAbstractParameterizedTest
{
@BeforeClass
public static void startup()
{
ViewFiltering1Test.startup();
}
@AfterClass
public static void tearDown()
{
ViewFiltering1Test.tearDown();
}
@Test
public void testAllTypes() throws Throwable
{
String myType = createType("CREATE TYPE %s (a int, b uuid, c set<text>)");
String columnNames = "asciival, " +
"bigintval, " +
"blobval, " +
"booleanval, " +
"dateval, " +
"decimalval, " +
"doubleval, " +
"floatval, " +
"inetval, " +
"intval, " +
"textval, " +
"timeval, " +
"timestampval, " +
"timeuuidval, " +
"uuidval," +
"varcharval, " +
"varintval, " +
"frozenlistval, " +
"frozensetval, " +
"frozenmapval, " +
"tupleval, " +
"udtval";
createTable(
"CREATE TABLE %s (" +
"asciival ascii, " +
"bigintval bigint, " +
"blobval blob, " +
"booleanval boolean, " +
"dateval date, " +
"decimalval decimal, " +
"doubleval double, " +
"floatval float, " +
"inetval inet, " +
"intval int, " +
"textval text, " +
"timeval time, " +
"timestampval timestamp, " +
"timeuuidval timeuuid, " +
"uuidval uuid," +
"varcharval varchar, " +
"varintval varint, " +
"frozenlistval frozen<list<int>>, " +
"frozensetval frozen<set<uuid>>, " +
"frozenmapval frozen<map<ascii, int>>," +
"tupleval frozen<tuple<int, ascii, uuid>>," +
"udtval frozen<" + myType + ">, " +
"PRIMARY KEY (" + columnNames + "))");
createView("CREATE MATERIALIZED VIEW %s AS SELECT * FROM %s WHERE " +
"asciival = 'abc' AND " +
"bigintval = 123 AND " +
"blobval = 0xfeed AND " +
"booleanval = true AND " +
"dateval = '1987-03-23' AND " +
"decimalval = 123.123 AND " +
"doubleval = 123.123 AND " +
"floatval = 123.123 AND " +
"inetval = '127.0.0.1' AND " +
"intval = 123 AND " +
"textval = 'abc' AND " +
"timeval = '07:35:07.000111222' AND " +
"timestampval = 123123123 AND " +
"timeuuidval = 6BDDC89A-5644-11E4-97FC-56847AFE9799 AND " +
"uuidval = 6BDDC89A-5644-11E4-97FC-56847AFE9799 AND " +
"varcharval = 'abc' AND " +
"varintval = 123123123 AND " +
"frozenlistval = [1, 2, 3] AND " +
"frozensetval = {6BDDC89A-5644-11E4-97FC-56847AFE9799} AND " +
"frozenmapval = {'a': 1, 'b': 2} AND " +
"tupleval = (1, 'foobar', 6BDDC89A-5644-11E4-97FC-56847AFE9799) AND " +
"udtval = {a: 1, b: 6BDDC89A-5644-11E4-97FC-56847AFE9799, c: {'foo', 'bar'}} " +
"PRIMARY KEY (" + columnNames + ")");
execute("INSERT INTO %s (" + columnNames + ") VALUES (" +
"'abc'," +
"123," +
"0xfeed," +
"true," +
"'1987-03-23'," +
"123.123," +
"123.123," +
"123.123," +
"'127.0.0.1'," +
"123," +
"'abc'," +
"'07:35:07.000111222'," +
"123123123," +
"6BDDC89A-5644-11E4-97FC-56847AFE9799," +
"6BDDC89A-5644-11E4-97FC-56847AFE9799," +
"'abc'," +
"123123123," +
"[1, 2, 3]," +
"{6BDDC89A-5644-11E4-97FC-56847AFE9799}," +
"{'a': 1, 'b': 2}," +
"(1, 'foobar', 6BDDC89A-5644-11E4-97FC-56847AFE9799)," +
"{a: 1, b: 6BDDC89A-5644-11E4-97FC-56847AFE9799, c: {'foo', 'bar'}})");
assert !executeView("SELECT * FROM %s").isEmpty();
executeNet("ALTER TABLE %s RENAME inetval TO foo");
assert !executeView("SELECT * FROM %s").isEmpty();
}
@Test
public void testMVCreationWithNonPrimaryRestrictions()
{
createTable("CREATE TABLE %s (a int, b int, c int, d int, PRIMARY KEY (a, b))");
try
{
String mv = createView("CREATE MATERIALIZED VIEW %s AS SELECT * FROM %s " +
"WHERE a IS NOT NULL AND b IS NOT NULL AND c IS NOT NULL AND d = 1 " +
"PRIMARY KEY (a, b, c)");
dropView(mv);
}
catch (Exception e)
{
throw new RuntimeException("MV creation with non primary column restrictions failed.", e);
}
dropTable("DROP TABLE %s");
}
@Test
public void testNonPrimaryRestrictions() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, d int, PRIMARY KEY (a, b))");
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 0, 0, 0, 0);
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 0, 0, 1, 0);
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 0, 1, 0, 0);
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 0, 1, 1, 0);
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 1, 0, 0, 0);
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 1, 0, 1, 0);
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 1, 1, 0, 0);
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 1, 1, 1, 0);
// only accept rows where c = 1
createView("CREATE MATERIALIZED VIEW %s AS SELECT * FROM %s " +
"WHERE a IS NOT NULL AND b IS NOT NULL AND c IS NOT NULL AND c = 1 " +
"PRIMARY KEY (a, b, c)");
assertRowsIgnoringOrder(executeView("SELECT a, b, c, d FROM %s"),
row(0, 0, 1, 0),
row(0, 1, 1, 0),
row(1, 0, 1, 0),
row(1, 1, 1, 0)
);
// insert new rows that do not match the filter
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 2, 0, 0, 0);
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 2, 1, 2, 0);
assertRowsIgnoringOrder(executeView("SELECT a, b, c, d FROM %s"),
row(0, 0, 1, 0),
row(0, 1, 1, 0),
row(1, 0, 1, 0),
row(1, 1, 1, 0)
);
// insert new row that does match the filter
execute("INSERT INTO %s (a, b, c, d) VALUES (?, ?, ?, ?)", 1, 2, 1, 0);
assertRowsIgnoringOrder(executeView("SELECT a, b, c, d FROM %s"),
row(0, 0, 1, 0),
row(0, 1, 1, 0),
row(1, 0, 1, 0),
row(1, 1, 1, 0),
row(1, 2, 1, 0)
);
// update rows that don't match the filter
execute("UPDATE %s SET d = ? WHERE a = ? AND b = ?", 2, 2, 0);
execute("UPDATE %s SET d = ? WHERE a = ? AND b = ?", 1, 2, 1);
assertRowsIgnoringOrder(executeView("SELECT a, b, c, d FROM %s"),
row(0, 0, 1, 0),
row(0, 1, 1, 0),
row(1, 0, 1, 0),
row(1, 1, 1, 0),
row(1, 2, 1, 0)
);
// update a row that does match the filter
execute("UPDATE %s SET d = ? WHERE a = ? AND b = ?", 1, 1, 0);
assertRowsIgnoringOrder(executeView("SELECT a, b, c, d FROM %s"),
row(0, 0, 1, 0),
row(0, 1, 1, 0),
row(1, 0, 1, 1),
row(1, 1, 1, 0),
row(1, 2, 1, 0)
);
// delete rows that don't match the filter
execute("DELETE FROM %s WHERE a = ? AND b = ?", 2, 0);
assertRowsIgnoringOrder(executeView("SELECT a, b, c, d FROM %s"),
row(0, 0, 1, 0),
row(0, 1, 1, 0),
row(1, 0, 1, 1),
row(1, 1, 1, 0),
row(1, 2, 1, 0)
);
// delete a row that does match the filter
execute("DELETE FROM %s WHERE a = ? AND b = ?", 1, 2);
assertRowsIgnoringOrder(executeView("SELECT a, b, c, d FROM %s"),
row(0, 0, 1, 0),
row(0, 1, 1, 0),
row(1, 0, 1, 1),
row(1, 1, 1, 0)
);
// delete a partition that matches the filter
execute("DELETE FROM %s WHERE a = ?", 1);
assertRowsIgnoringOrder(executeView("SELECT a, b, c, d FROM %s"),
row(0, 0, 1, 0),
row(0, 1, 1, 0)
);
dropView();
dropTable("DROP TABLE %s");
}
@Test
public void complexRestrictedTimestampUpdateTestWithFlush() throws Throwable
{
complexRestrictedTimestampUpdateTest(true);
}
@Test
public void complexRestrictedTimestampUpdateTestWithoutFlush() throws Throwable
{
complexRestrictedTimestampUpdateTest(false);
}
public void complexRestrictedTimestampUpdateTest(boolean flush) throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, d int, e int, PRIMARY KEY (a, b))");
Keyspace ks = Keyspace.open(keyspace());
String mv = createView("CREATE MATERIALIZED VIEW %s AS SELECT * FROM %s " +
"WHERE a IS NOT NULL AND b IS NOT NULL AND c IS NOT NULL AND c = 1 " +
"PRIMARY KEY (c, a, b)");
ks.getColumnFamilyStore(mv).disableAutoCompaction();
//Set initial values TS=0, matching the restriction and verify view
executeNet("INSERT INTO %s (a, b, c, d) VALUES (0, 0, 1, 0) USING TIMESTAMP 0");
assertRows(executeView("SELECT d FROM %s WHERE c = ? and a = ? and b = ?", 1, 0, 0), row(0));
if (flush)
FBUtilities.waitOnFutures(ks.flush());
//update c's timestamp TS=2
executeNet("UPDATE %s USING TIMESTAMP 2 SET c = ? WHERE a = ? and b = ? ", 1, 0, 0);
assertRows(executeView("SELECT d FROM %s WHERE c = ? and a = ? and b = ?", 1, 0, 0), row(0));
if (flush)
FBUtilities.waitOnFutures(ks.flush());
//change c's value and TS=3, tombstones c=1 and adds c=0 record
executeNet("UPDATE %s USING TIMESTAMP 3 SET c = ? WHERE a = ? and b = ? ", 0, 0, 0);
assertRows(executeView("SELECT d FROM %s WHERE c = ? and a = ? and b = ?", 0, 0, 0));
if (flush)
{
ks.getColumnFamilyStore(mv).forceMajorCompaction();
FBUtilities.waitOnFutures(ks.flush());
}
//change c's value back to 1 with TS=4, check we can see d
executeNet("UPDATE %s USING TIMESTAMP 4 SET c = ? WHERE a = ? and b = ? ", 1, 0, 0);
if (flush)
{
ks.getColumnFamilyStore(mv).forceMajorCompaction();
FBUtilities.waitOnFutures(ks.flush());
}
assertRows(executeView("SELECT d, e FROM %s WHERE c = ? and a = ? and b = ?", 1, 0, 0), row(0, null));
//Add e value @ TS=1
executeNet("UPDATE %s USING TIMESTAMP 1 SET e = ? WHERE a = ? and b = ? ", 1, 0, 0);
assertRows(executeView("SELECT d, e FROM %s WHERE c = ? and a = ? and b = ?", 1, 0, 0), row(0, 1));
if (flush)
FBUtilities.waitOnFutures(ks.flush());
//Change d value @ TS=2
executeNet("UPDATE %s USING TIMESTAMP 2 SET d = ? WHERE a = ? and b = ? ", 2, 0, 0);
assertRows(executeView("SELECT d FROM %s WHERE c = ? and a = ? and b = ?", 1, 0, 0), row(2));
if (flush)
FBUtilities.waitOnFutures(ks.flush());
//Change d value @ TS=3
executeNet("UPDATE %s USING TIMESTAMP 3 SET d = ? WHERE a = ? and b = ? ", 1, 0, 0);
assertRows(executeView("SELECT d FROM %s WHERE c = ? and a = ? and b = ?", 1, 0, 0), row(1));
//Tombstone c
executeNet("DELETE FROM %s WHERE a = ? and b = ?", 0, 0);
assertRowsIgnoringOrder(executeView("SELECT d FROM %s"));
assertRows(executeView("SELECT d FROM %s"));
//Add back without D
executeNet("INSERT INTO %s (a, b, c) VALUES (0, 0, 1)");
//Make sure D doesn't pop back in.
assertRows(executeView("SELECT d FROM %s WHERE c = ? and a = ? and b = ?", 1, 0, 0), row((Object) null));
//New partition
// insert a row with timestamp 0
executeNet("INSERT INTO %s (a, b, c, d, e) VALUES (?, ?, ?, ?, ?) USING TIMESTAMP 0", 1, 0, 1, 0, 0);
// overwrite pk and e with timestamp 1, but don't overwrite d
executeNet("INSERT INTO %s (a, b, c, e) VALUES (?, ?, ?, ?) USING TIMESTAMP 1", 1, 0, 1, 0);
// delete with timestamp 0 (which should only delete d)
executeNet("DELETE FROM %s USING TIMESTAMP 0 WHERE a = ? AND b = ?", 1, 0);
assertRows(executeView("SELECT a, b, c, d, e FROM %s WHERE c = ? and a = ? and b = ?", 1, 1, 0),
row(1, 0, 1, null, 0)
);
executeNet("UPDATE %s USING TIMESTAMP 2 SET c = ? WHERE a = ? AND b = ?", 1, 1, 1);
executeNet("UPDATE %s USING TIMESTAMP 3 SET c = ? WHERE a = ? AND b = ?", 1, 1, 0);
assertRows(executeView("SELECT a, b, c, d, e FROM %s WHERE c = ? and a = ? and b = ?", 1, 1, 0),
row(1, 0, 1, null, 0)
);
executeNet("UPDATE %s USING TIMESTAMP 3 SET d = ? WHERE a = ? AND b = ?", 0, 1, 0);
assertRows(executeView("SELECT a, b, c, d, e FROM %s WHERE c = ? and a = ? and b = ?", 1, 1, 0),
row(1, 0, 1, 0, 0)
);
}
@Test
public void testRestrictedRegularColumnTimestampUpdates() throws Throwable
{
// Regression test for CASSANDRA-10910
createTable("CREATE TABLE %s (" +
"k int PRIMARY KEY, " +
"c int, " +
"val int)");
createView("CREATE MATERIALIZED VIEW %s AS SELECT * FROM %s " +
"WHERE k IS NOT NULL AND c IS NOT NULL AND c = 1 " +
"PRIMARY KEY (k,c)");
updateView("UPDATE %s SET c = ?, val = ? WHERE k = ?", 0, 0, 0);
updateView("UPDATE %s SET val = ? WHERE k = ?", 1, 0);
updateView("UPDATE %s SET c = ? WHERE k = ?", 1, 0);
assertRows(executeView("SELECT c, k, val FROM %s"), row(1, 0, 1));
updateView("TRUNCATE %s");
updateView("UPDATE %s USING TIMESTAMP 1 SET c = ?, val = ? WHERE k = ?", 0, 0, 0);
updateView("UPDATE %s USING TIMESTAMP 3 SET c = ? WHERE k = ?", 1, 0);
updateView("UPDATE %s USING TIMESTAMP 2 SET val = ? WHERE k = ?", 1, 0);
updateView("UPDATE %s USING TIMESTAMP 4 SET c = ? WHERE k = ?", 1, 0);
updateView("UPDATE %s USING TIMESTAMP 3 SET val = ? WHERE k = ?", 2, 0);
assertRows(executeView("SELECT c, k, val FROM %s"), row(1, 0, 2));
}
@Test
public void testOldTimestampsWithRestrictions() throws Throwable
{
createTable("CREATE TABLE %s (" +
"k int, " +
"c int, " +
"val text, " + "" +
"PRIMARY KEY(k, c))");
createView("CREATE MATERIALIZED VIEW %s AS SELECT * FROM %s " +
"WHERE val IS NOT NULL AND k IS NOT NULL AND c IS NOT NULL AND val = 'baz' " +
"PRIMARY KEY (val,k,c)");
for (int i = 0; i < 100; i++)
updateView("INSERT into %s (k,c,val)VALUES(?,?,?)", 0, i % 2, "baz");
Keyspace.open(keyspace()).getColumnFamilyStore(currentTable()).forceBlockingFlush();
Assert.assertEquals(2, execute("select * from %s").size());
Assert.assertEquals(2, executeView("select * from %s").size());
assertRows(execute("SELECT val from %s where k = 0 and c = 0"), row("baz"));
assertRows(executeView("SELECT c from %s where k = 0 and val = ?", "baz"), row(0), row(1));
//Make sure an old TS does nothing
updateView("UPDATE %s USING TIMESTAMP 100 SET val = ? where k = ? AND c = ?", "bar", 0, 1);
assertRows(execute("SELECT val from %s where k = 0 and c = 1"), row("baz"));
assertRows(executeView("SELECT c from %s where k = 0 and val = ?", "baz"), row(0), row(1));
assertRows(executeView("SELECT c from %s where k = 0 and val = ?", "bar"));
//Latest TS
updateView("UPDATE %s SET val = ? where k = ? AND c = ?", "bar", 0, 1);
assertRows(execute("SELECT val from %s where k = 0 and c = 1"), row("bar"));
assertRows(executeView("SELECT c from %s where k = 0 and val = ?", "bar"));
assertRows(executeView("SELECT c from %s where k = 0 and val = ?", "baz"), row(0));
}
}
| belliottsmith/cassandra | test/unit/org/apache/cassandra/cql3/ViewFiltering2Test.java | Java | apache-2.0 | 19,240 |
require_relative '../spec_helper'
describe GridServiceLink do
it { should be_embedded_in(:grid_service) }
it { should belong_to(:linked_grid_service)}
it { should have_fields(:alias)}
end | jnummelin/kontena | server/spec/models/grid_service_link_spec.rb | Ruby | apache-2.0 | 195 |
// RUN: %clang_cc1 %s -triple=x86_64-apple-darwin10 -emit-llvm-only -fdump-vtable-layouts > %t 2>&1
// RUN: FileCheck --check-prefix=CHECK-1 %s < %t
// RUN: FileCheck --check-prefix=CHECK-2 %s < %t
// RUN: FileCheck --check-prefix=CHECK-3 %s < %t
// RUN: FileCheck --check-prefix=CHECK-4 %s < %t
// RUN: FileCheck --check-prefix=CHECK-5 %s < %t
// RUN: FileCheck --check-prefix=CHECK-6 %s < %t
// RUN: FileCheck --check-prefix=CHECK-7 %s < %t
// RUN: FileCheck --check-prefix=CHECK-8 %s < %t
// RUN: FileCheck --check-prefix=CHECK-9 %s < %t
// RUN: FileCheck --check-prefix=CHECK-10 %s < %t
// RUN: FileCheck --check-prefix=CHECK-11 %s < %t
// RUN: FileCheck --check-prefix=CHECK-12 %s < %t
// RUN: FileCheck --check-prefix=CHECK-13 %s < %t
// RUN: FileCheck --check-prefix=CHECK-14 %s < %t
// RUN: FileCheck --check-prefix=CHECK-15 %s < %t
// RUN: FileCheck --check-prefix=CHECK-16 %s < %t
// RUN: FileCheck --check-prefix=CHECK-17 %s < %t
// RUN: FileCheck --check-prefix=CHECK-18 %s < %t
// RUN: FileCheck --check-prefix=CHECK-19 %s < %t
// RUN: FileCheck --check-prefix=CHECK-20 %s < %t
// RUN: FileCheck --check-prefix=CHECK-21 %s < %t
// RUN: FileCheck --check-prefix=CHECK-22 %s < %t
// RUN: FileCheck --check-prefix=CHECK-23 %s < %t
// RUN: FileCheck --check-prefix=CHECK-24 %s < %t
// RUN: FileCheck --check-prefix=CHECK-25 %s < %t
// RUN: FileCheck --check-prefix=CHECK-26 %s < %t
// RUN: FileCheck --check-prefix=CHECK-27 %s < %t
// RUN: FileCheck --check-prefix=CHECK-28 %s < %t
// RUN: FileCheck --check-prefix=CHECK-29 %s < %t
// RUN: FileCheck --check-prefix=CHECK-30 %s < %t
// RUN: FileCheck --check-prefix=CHECK-31 %s < %t
// RUN: FileCheck --check-prefix=CHECK-32 %s < %t
// RUN: FileCheck --check-prefix=CHECK-33 %s < %t
// RUN: FileCheck --check-prefix=CHECK-34 %s < %t
// RUN: FileCheck --check-prefix=CHECK-35 %s < %t
// RUN: FileCheck --check-prefix=CHECK-36 %s < %t
// RUN: FileCheck --check-prefix=CHECK-37 %s < %t
// RUN: FileCheck --check-prefix=CHECK-38 %s < %t
// RUN: FileCheck --check-prefix=CHECK-39 %s < %t
// RUN: FileCheck --check-prefix=CHECK-40 %s < %t
// RUN: FileCheck --check-prefix=CHECK-41 %s < %t
// RUN: FileCheck --check-prefix=CHECK-42 %s < %t
// RUN: FileCheck --check-prefix=CHECK-43 %s < %t
// RUN: FileCheck --check-prefix=CHECK-44 %s < %t
// For now, just verify this doesn't crash.
namespace test0 {
struct Obj {};
struct Base { virtual const Obj *foo() = 0; };
struct Derived : Base { virtual Obj *foo() { return new Obj(); } };
void test(Derived *D) { D->foo(); }
}
namespace Test1 {
// CHECK-1: Vtable for 'Test1::A' (3 entries).
// CHECK-1-NEXT: 0 | offset_to_top (0)
// CHECK-1-NEXT: 1 | Test1::A RTTI
// CHECK-1-NEXT: -- (Test1::A, 0) vtable address --
// CHECK-1-NEXT: 2 | void Test1::A::f()
struct A {
virtual void f();
};
void A::f() { }
}
namespace Test2 {
// This is a smoke test of the vtable dumper.
// CHECK-2: Vtable for 'Test2::A' (9 entries).
// CHECK-2-NEXT: 0 | offset_to_top (0)
// CHECK-2-NEXT: 1 | Test2::A RTTI
// CHECK-2-NEXT: -- (Test2::A, 0) vtable address --
// CHECK-2-NEXT: 2 | void Test2::A::f()
// CHECK-2-NEXT: 3 | void Test2::A::f() const
// CHECK-2-NEXT: 4 | Test2::A *Test2::A::g(int)
// CHECK-2-NEXT: 5 | Test2::A::~A() [complete]
// CHECK-2-NEXT: 6 | Test2::A::~A() [deleting]
// CHECK-2-NEXT: 7 | void Test2::A::h()
// CHECK-2-NEXT: 8 | Test2::A &Test2::A::operator=(const Test2::A &)
struct A {
virtual void f();
virtual void f() const;
virtual A* g(int a);
virtual ~A();
virtual void h();
virtual A& operator=(const A&);
};
void A::f() { }
// Another simple vtable dumper test.
// CHECK-3: Vtable for 'Test2::B' (6 entries).
// CHECK-3-NEXT: 0 | offset_to_top (0)
// CHECK-3-NEXT: 1 | Test2::B RTTI
// CHECK-3-NEXT: -- (Test2::B, 0) vtable address --
// CHECK-3-NEXT: 2 | void Test2::B::f()
// CHECK-3-NEXT: 3 | void Test2::B::g() [pure]
// CHECK-3-NEXT: 4 | Test2::B::~B() [complete] [pure]
// CHECK-3-NEXT: 5 | Test2::B::~B() [deleting] [pure]
struct B {
virtual void f();
virtual void g() = 0;
virtual ~B() = 0;
};
void B::f() { }
}
namespace Test3 {
// If a function in a derived class overrides a function in a primary base,
// then the function should not have an entry in the derived class (unless the return
// value requires adjusting).
// CHECK-4: Vtable for 'Test3::A' (3 entries).
// CHECK-4-NEXT: 0 | offset_to_top (0)
// CHECK-4-NEXT: 1 | Test3::A RTTI
// CHECK-4-NEXT: -- (Test3::A, 0) vtable address --
// CHECK-4-NEXT: 2 | void Test3::A::f()
struct A {
virtual void f();
};
void A::f() { }
// CHECK-5: Vtable for 'Test3::B' (4 entries).
// CHECK-5-NEXT: 0 | offset_to_top (0)
// CHECK-5-NEXT: 1 | Test3::B RTTI
// CHECK-5-NEXT: -- (Test3::A, 0) vtable address --
// CHECK-5-NEXT: -- (Test3::B, 0) vtable address --
// CHECK-5-NEXT: 2 | void Test3::B::f()
// CHECK-5-NEXT: 3 | void Test3::B::g()
struct B : A {
virtual void f();
virtual void g();
};
void B::f() { }
// CHECK-6: Vtable for 'Test3::C' (5 entries).
// CHECK-6-NEXT: 0 | offset_to_top (0)
// CHECK-6-NEXT: 1 | Test3::C RTTI
// CHECK-6-NEXT: -- (Test3::A, 0) vtable address --
// CHECK-6-NEXT: -- (Test3::C, 0) vtable address --
// CHECK-6-NEXT: 2 | void Test3::A::f()
// CHECK-6-NEXT: 3 | void Test3::C::g()
// CHECK-6-NEXT: 4 | void Test3::C::h()
struct C : A {
virtual void g();
virtual void h();
};
void C::g() { }
// CHECK-7: Vtable for 'Test3::D' (5 entries).
// CHECK-7-NEXT: 0 | offset_to_top (0)
// CHECK-7-NEXT: 1 | Test3::D RTTI
// CHECK-7-NEXT: -- (Test3::A, 0) vtable address --
// CHECK-7-NEXT: -- (Test3::B, 0) vtable address --
// CHECK-7-NEXT: -- (Test3::D, 0) vtable address --
// CHECK-7-NEXT: 2 | void Test3::D::f()
// CHECK-7-NEXT: 3 | void Test3::D::g()
// CHECK-7-NEXT: 4 | void Test3::D::h()
struct D : B {
virtual void f();
virtual void g();
virtual void h();
};
void D::f() { }
}
namespace Test4 {
// Test non-virtual result adjustments.
struct R1 { int r1; };
struct R2 { int r2; };
struct R3 : R1, R2 { int r3; };
struct A {
virtual R2 *f();
};
// CHECK-8: Vtable for 'Test4::B' (4 entries).
// CHECK-8-NEXT: 0 | offset_to_top (0)
// CHECK-8-NEXT: 1 | Test4::B RTTI
// CHECK-8-NEXT: -- (Test4::A, 0) vtable address --
// CHECK-8-NEXT: -- (Test4::B, 0) vtable address --
// CHECK-8-NEXT: 2 | Test4::R3 *Test4::B::f()
// CHECK-8-NEXT: [return adjustment: 4 non-virtual]
// CHECK-8-NEXT: 3 | Test4::R3 *Test4::B::f()
struct B : A {
virtual R3 *f();
};
R3 *B::f() { return 0; }
// Test virtual result adjustments.
struct V1 { int v1; };
struct V2 : virtual V1 { int v1; };
struct C {
virtual V1 *f();
};
// CHECK-9: Vtable for 'Test4::D' (4 entries).
// CHECK-9-NEXT: 0 | offset_to_top (0)
// CHECK-9-NEXT: 1 | Test4::D RTTI
// CHECK-9-NEXT: -- (Test4::C, 0) vtable address --
// CHECK-9-NEXT: -- (Test4::D, 0) vtable address --
// CHECK-9-NEXT: 2 | Test4::V2 *Test4::D::f()
// CHECK-9-NEXT: [return adjustment: 0 non-virtual, -24 vbase offset offset]
// CHECK-9-NEXT: 3 | Test4::V2 *Test4::D::f()
struct D : C {
virtual V2 *f();
};
V2 *D::f() { return 0; };
// Virtual result adjustments with an additional non-virtual adjustment.
struct V3 : virtual R3 { int r3; };
// CHECK-10: Vtable for 'Test4::E' (4 entries).
// CHECK-10-NEXT: 0 | offset_to_top (0)
// CHECK-10-NEXT: 1 | Test4::E RTTI
// CHECK-10-NEXT: -- (Test4::A, 0) vtable address --
// CHECK-10-NEXT: -- (Test4::E, 0) vtable address --
// CHECK-10-NEXT: 2 | Test4::V3 *Test4::E::f()
// CHECK-10-NEXT: [return adjustment: 4 non-virtual, -24 vbase offset offset]
// CHECK-10-NEXT: 3 | Test4::V3 *Test4::E::f()
struct E : A {
virtual V3 *f();
};
V3 *E::f() { return 0;}
// Test that a pure virtual member doesn't get a thunk.
// CHECK-11: Vtable for 'Test4::F' (5 entries).
// CHECK-11-NEXT: 0 | offset_to_top (0)
// CHECK-11-NEXT: 1 | Test4::F RTTI
// CHECK-11-NEXT: -- (Test4::A, 0) vtable address --
// CHECK-11-NEXT: -- (Test4::F, 0) vtable address --
// CHECK-11-NEXT: 2 | Test4::R3 *Test4::F::f() [pure]
// CHECK-11-NEXT: 3 | void Test4::F::g()
// CHECK-11-NEXT: 4 | Test4::R3 *Test4::F::f() [pure]
struct F : A {
virtual void g();
virtual R3 *f() = 0;
};
void F::g() { }
}
namespace Test5 {
// Simple secondary vtables without 'this' pointer adjustments.
struct A {
virtual void f();
virtual void g();
int a;
};
struct B1 : A {
virtual void f();
int b1;
};
struct B2 : A {
virtual void g();
int b2;
};
// CHECK-12: Vtable for 'Test5::C' (9 entries).
// CHECK-12-NEXT: 0 | offset_to_top (0)
// CHECK-12-NEXT: 1 | Test5::C RTTI
// CHECK-12-NEXT: -- (Test5::A, 0) vtable address --
// CHECK-12-NEXT: -- (Test5::B1, 0) vtable address --
// CHECK-12-NEXT: -- (Test5::C, 0) vtable address --
// CHECK-12-NEXT: 2 | void Test5::B1::f()
// CHECK-12-NEXT: 3 | void Test5::A::g()
// CHECK-12-NEXT: 4 | void Test5::C::h()
// CHECK-12-NEXT: 5 | offset_to_top (-16)
// CHECK-12-NEXT: 6 | Test5::C RTTI
// CHECK-12-NEXT: -- (Test5::A, 16) vtable address --
// CHECK-12-NEXT: -- (Test5::B2, 16) vtable address --
// CHECK-12-NEXT: 7 | void Test5::A::f()
// CHECK-12-NEXT: 8 | void Test5::B2::g()
struct C : B1, B2 {
virtual void h();
};
void C::h() { }
}
namespace Test6 {
// Simple non-virtual 'this' pointer adjustments.
struct A1 {
virtual void f();
int a;
};
struct A2 {
virtual void f();
int a;
};
// CHECK-13: Vtable for 'Test6::C' (6 entries).
// CHECK-13-NEXT: 0 | offset_to_top (0)
// CHECK-13-NEXT: 1 | Test6::C RTTI
// CHECK-13-NEXT: -- (Test6::A1, 0) vtable address --
// CHECK-13-NEXT: -- (Test6::C, 0) vtable address --
// CHECK-13-NEXT: 2 | void Test6::C::f()
// CHECK-13-NEXT: 3 | offset_to_top (-16)
// CHECK-13-NEXT: 4 | Test6::C RTTI
// CHECK-13-NEXT: -- (Test6::A2, 16) vtable address --
// CHECK-13-NEXT: 5 | void Test6::C::f()
// CHECK-13-NEXT: [this adjustment: -16 non-virtual]
struct C : A1, A2 {
virtual void f();
};
void C::f() { }
}
namespace Test7 {
// Test that the D::f overrider for A::f have different 'this' pointer
// adjustments in the two A base subobjects.
struct A {
virtual void f();
int a;
};
struct B1 : A { };
struct B2 : A { };
struct C { virtual void c(); };
// CHECK-14: Vtable for 'Test7::D' (10 entries).
// CHECK-14-NEXT: 0 | offset_to_top (0)
// CHECK-14-NEXT: 1 | Test7::D RTTI
// CHECK-14-NEXT: -- (Test7::C, 0) vtable address --
// CHECK-14-NEXT: -- (Test7::D, 0) vtable address --
// CHECK-14-NEXT: 2 | void Test7::C::c()
// CHECK-14-NEXT: 3 | void Test7::D::f()
// CHECK-14-NEXT: 4 | offset_to_top (-8)
// CHECK-14-NEXT: 5 | Test7::D RTTI
// CHECK-14-NEXT: -- (Test7::A, 8) vtable address --
// CHECK-14-NEXT: -- (Test7::B1, 8) vtable address --
// CHECK-14-NEXT: 6 | void Test7::D::f()
// CHECK-14-NEXT: [this adjustment: -8 non-virtual]
// CHECK-14-NEXT: 7 | offset_to_top (-24)
// CHECK-14-NEXT: 8 | Test7::D RTTI
// CHECK-14-NEXT: -- (Test7::A, 24) vtable address --
// CHECK-14-NEXT: -- (Test7::B2, 24) vtable address --
// CHECK-14-NEXT: 9 | void Test7::D::f()
// CHECK-14-NEXT: [this adjustment: -24 non-virtual]
struct D : C, B1, B2 {
virtual void f();
};
void D::f() { }
}
namespace Test8 {
// Test that we don't try to layout vtables for classes that don't have
// virtual bases or virtual member functions.
struct A { };
// CHECK-15: Vtable for 'Test8::B' (3 entries).
// CHECK-15-NEXT: 0 | offset_to_top (0)
// CHECK-15-NEXT: 1 | Test8::B RTTI
// CHECK-15-NEXT: -- (Test8::B, 0) vtable address --
// CHECK-15-NEXT: 2 | void Test8::B::f()
struct B : A {
virtual void f();
};
void B::f() { }
}
namespace Test9 {
// Simple test of vbase offsets.
struct A1 { int a1; };
struct A2 { int a2; };
// CHECK-16: Vtable for 'Test9::B' (5 entries).
// CHECK-16-NEXT: 0 | vbase_offset (16)
// CHECK-16-NEXT: 1 | vbase_offset (12)
// CHECK-16-NEXT: 2 | offset_to_top (0)
// CHECK-16-NEXT: 3 | Test9::B RTTI
// CHECK-16-NEXT: -- (Test9::B, 0) vtable address --
// CHECK-16-NEXT: 4 | void Test9::B::f()
struct B : virtual A1, virtual A2 {
int b;
virtual void f();
};
void B::f() { }
}
namespace Test10 {
// Test for a bug where we would not emit secondary vtables for bases
// of a primary base.
struct A1 { virtual void a1(); };
struct A2 { virtual void a2(); };
// CHECK-17: Vtable for 'Test10::C' (7 entries).
// CHECK-17-NEXT: 0 | offset_to_top (0)
// CHECK-17-NEXT: 1 | Test10::C RTTI
// CHECK-17-NEXT: -- (Test10::A1, 0) vtable address --
// CHECK-17-NEXT: -- (Test10::B, 0) vtable address --
// CHECK-17-NEXT: -- (Test10::C, 0) vtable address --
// CHECK-17-NEXT: 2 | void Test10::A1::a1()
// CHECK-17-NEXT: 3 | void Test10::C::f()
// CHECK-17-NEXT: 4 | offset_to_top (-8)
// CHECK-17-NEXT: 5 | Test10::C RTTI
// CHECK-17-NEXT: -- (Test10::A2, 8) vtable address --
// CHECK-17-NEXT: 6 | void Test10::A2::a2()
struct B : A1, A2 {
int b;
};
struct C : B {
virtual void f();
};
void C::f() { }
}
namespace Test11 {
// Very simple test of vtables for virtual bases.
struct A1 { int a; };
struct A2 { int b; };
struct B : A1, virtual A2 {
int b;
};
// CHECK-18: Vtable for 'Test11::C' (8 entries).
// CHECK-18-NEXT: 0 | vbase_offset (24)
// CHECK-18-NEXT: 1 | vbase_offset (8)
// CHECK-18-NEXT: 2 | offset_to_top (0)
// CHECK-18-NEXT: 3 | Test11::C RTTI
// CHECK-18-NEXT: -- (Test11::C, 0) vtable address --
// CHECK-18-NEXT: 4 | void Test11::C::f()
// CHECK-18-NEXT: 5 | vbase_offset (16)
// CHECK-18-NEXT: 6 | offset_to_top (-8)
// CHECK-18-NEXT: 7 | Test11::C RTTI
struct C : virtual B {
virtual void f();
};
void C::f() { }
}
namespace Test12 {
// Test that the right vcall offsets are generated in the right order.
// CHECK-19: Vtable for 'Test12::B' (19 entries).
// CHECK-19-NEXT: 0 | vbase_offset (8)
// CHECK-19-NEXT: 1 | offset_to_top (0)
// CHECK-19-NEXT: 2 | Test12::B RTTI
// CHECK-19-NEXT: -- (Test12::B, 0) vtable address --
// CHECK-19-NEXT: 3 | void Test12::B::f()
// CHECK-19-NEXT: 4 | void Test12::B::a()
// CHECK-19-NEXT: 5 | vcall_offset (32)
// CHECK-19-NEXT: 6 | vcall_offset (16)
// CHECK-19-NEXT: 7 | vcall_offset (-8)
// CHECK-19-NEXT: 8 | vcall_offset (0)
// CHECK-19-NEXT: 9 | offset_to_top (-8)
// CHECK-19-NEXT: 10 | Test12::B RTTI
// CHECK-19-NEXT: -- (Test12::A, 8) vtable address --
// CHECK-19-NEXT: -- (Test12::A1, 8) vtable address --
// CHECK-19-NEXT: 11 | void Test12::A1::a1()
// CHECK-19-NEXT: 12 | void Test12::B::a()
// CHECK-19-NEXT: [this adjustment: 0 non-virtual, -32 vcall offset offset]
// CHECK-19-NEXT: 13 | offset_to_top (-24)
// CHECK-19-NEXT: 14 | Test12::B RTTI
// CHECK-19-NEXT: -- (Test12::A2, 24) vtable address --
// CHECK-19-NEXT: 15 | void Test12::A2::a2()
// CHECK-19-NEXT: 16 | offset_to_top (-40)
// CHECK-19-NEXT: 17 | Test12::B RTTI
// CHECK-19-NEXT: -- (Test12::A3, 40) vtable address --
// CHECK-19-NEXT: 18 | void Test12::A3::a3()
struct A1 {
virtual void a1();
int a;
};
struct A2 {
virtual void a2();
int a;
};
struct A3 {
virtual void a3();
int a;
};
struct A : A1, A2, A3 {
virtual void a();
int i;
};
struct B : virtual A {
virtual void f();
virtual void a();
};
void B::f() { }
}
namespace Test13 {
// Test that we don't try to emit a vtable for 'A' twice.
struct A {
virtual void f();
};
struct B : virtual A {
virtual void f();
};
// CHECK-20: Vtable for 'Test13::C' (6 entries).
// CHECK-20-NEXT: 0 | vbase_offset (0)
// CHECK-20-NEXT: 1 | vbase_offset (0)
// CHECK-20-NEXT: 2 | vcall_offset (0)
// CHECK-20-NEXT: 3 | offset_to_top (0)
// CHECK-20-NEXT: 4 | Test13::C RTTI
// CHECK-20-NEXT: -- (Test13::A, 0) vtable address --
// CHECK-20-NEXT: -- (Test13::B, 0) vtable address --
// CHECK-20-NEXT: -- (Test13::C, 0) vtable address --
// CHECK-20-NEXT: 5 | void Test13::C::f()
struct C : virtual B, virtual A {
virtual void f();
};
void C::f() { }
}
namespace Test14 {
// Verify that we handle A being a non-virtual base of B, which is a virtual base.
struct A {
virtual void f();
};
struct B : A { };
struct C : virtual B { };
// CHECK-21: Vtable for 'Test14::D' (5 entries).
// CHECK-21-NEXT: 0 | vbase_offset (0)
// CHECK-21-NEXT: 1 | vcall_offset (0)
// CHECK-21-NEXT: 2 | offset_to_top (0)
// CHECK-21-NEXT: 3 | Test14::D RTTI
// CHECK-21-NEXT: -- (Test14::A, 0) vtable address --
// CHECK-21-NEXT: -- (Test14::B, 0) vtable address --
// CHECK-21-NEXT: -- (Test14::C, 0) vtable address --
// CHECK-21-NEXT: -- (Test14::D, 0) vtable address --
// CHECK-21-NEXT: 4 | void Test14::D::f()
struct D : C, virtual B {
virtual void f();
};
void D::f() { }
}
namespace Test15 {
// Test that we don't emit an extra vtable for B since it's a primary base of C.
struct A { virtual void a(); };
struct B { virtual void b(); };
struct C : virtual B { };
// CHECK-22: Vtable for 'Test15::D' (11 entries).
// CHECK-22-NEXT: 0 | vbase_offset (8)
// CHECK-22-NEXT: 1 | vbase_offset (8)
// CHECK-22-NEXT: 2 | offset_to_top (0)
// CHECK-22-NEXT: 3 | Test15::D RTTI
// CHECK-22-NEXT: -- (Test15::A, 0) vtable address --
// CHECK-22-NEXT: -- (Test15::D, 0) vtable address --
// CHECK-22-NEXT: 4 | void Test15::A::a()
// CHECK-22-NEXT: 5 | void Test15::D::f()
// CHECK-22-NEXT: 6 | vbase_offset (0)
// CHECK-22-NEXT: 7 | vcall_offset (0)
// CHECK-22-NEXT: 8 | offset_to_top (-8)
// CHECK-22-NEXT: 9 | Test15::D RTTI
// CHECK-22-NEXT: -- (Test15::B, 8) vtable address --
// CHECK-22-NEXT: -- (Test15::C, 8) vtable address --
// CHECK-22-NEXT: 10 | void Test15::B::b()
struct D : A, virtual B, virtual C {
virtual void f();
};
void D::f() { }
}
namespace Test16 {
// Test that destructors share vcall offsets.
struct A { virtual ~A(); };
struct B { virtual ~B(); };
struct C : A, B { virtual ~C(); };
// CHECK-23: Vtable for 'Test16::D' (15 entries).
// CHECK-23-NEXT: 0 | vbase_offset (8)
// CHECK-23-NEXT: 1 | offset_to_top (0)
// CHECK-23-NEXT: 2 | Test16::D RTTI
// CHECK-23-NEXT: -- (Test16::D, 0) vtable address --
// CHECK-23-NEXT: 3 | void Test16::D::f()
// CHECK-23-NEXT: 4 | Test16::D::~D() [complete]
// CHECK-23-NEXT: 5 | Test16::D::~D() [deleting]
// CHECK-23-NEXT: 6 | vcall_offset (-8)
// CHECK-23-NEXT: 7 | offset_to_top (-8)
// CHECK-23-NEXT: 8 | Test16::D RTTI
// CHECK-23-NEXT: -- (Test16::A, 8) vtable address --
// CHECK-23-NEXT: -- (Test16::C, 8) vtable address --
// CHECK-23-NEXT: 9 | Test16::D::~D() [complete]
// CHECK-23-NEXT: [this adjustment: 0 non-virtual, -24 vcall offset offset]
// CHECK-23-NEXT: 10 | Test16::D::~D() [deleting]
// CHECK-23-NEXT: [this adjustment: 0 non-virtual, -24 vcall offset offset]
// CHECK-23-NEXT: 11 | offset_to_top (-16)
// CHECK-23-NEXT: 12 | Test16::D RTTI
// CHECK-23-NEXT: -- (Test16::B, 16) vtable address --
// CHECK-23-NEXT: 13 | Test16::D::~D() [complete]
// CHECK-23-NEXT: [this adjustment: -8 non-virtual, -24 vcall offset offset]
// CHECK-23-NEXT: 14 | Test16::D::~D() [deleting]
// CHECK-23-NEXT: [this adjustment: -8 non-virtual, -24 vcall offset offset]
struct D : virtual C {
virtual void f();
};
void D::f() { }
}
namespace Test17 {
// Test that we don't mark E::f in the C-in-E vtable as unused.
struct A { virtual void f(); };
struct B : virtual A { virtual void f(); };
struct C : virtual A { virtual void f(); };
struct D : virtual B, virtual C { virtual void f(); };
// CHECK-24: Vtable for 'Test17::E' (13 entries).
// CHECK-24-NEXT: 0 | vbase_offset (0)
// CHECK-24-NEXT: 1 | vbase_offset (8)
// CHECK-24-NEXT: 2 | vbase_offset (0)
// CHECK-24-NEXT: 3 | vbase_offset (0)
// CHECK-24-NEXT: 4 | vcall_offset (0)
// CHECK-24-NEXT: 5 | offset_to_top (0)
// CHECK-24-NEXT: 6 | Test17::E RTTI
// CHECK-24-NEXT: -- (Test17::A, 0) vtable address --
// CHECK-24-NEXT: -- (Test17::B, 0) vtable address --
// CHECK-24-NEXT: -- (Test17::D, 0) vtable address --
// CHECK-24-NEXT: -- (Test17::E, 0) vtable address --
// CHECK-24-NEXT: 7 | void Test17::E::f()
// CHECK-24-NEXT: 8 | vbase_offset (-8)
// CHECK-24-NEXT: 9 | vcall_offset (-8)
// CHECK-24-NEXT: 10 | offset_to_top (-8)
// CHECK-24-NEXT: 11 | Test17::E RTTI
// CHECK-24-NEXT: -- (Test17::C, 8) vtable address --
// CHECK-24-NEXT: 12 | void Test17::E::f()
// CHECK-24-NEXT: [this adjustment: 0 non-virtual, -24 vcall offset offset]
class E : virtual D {
virtual void f();
};
void E::f() {}
}
namespace Test18 {
// Test that we compute the right 'this' adjustment offsets.
struct A {
virtual void f();
virtual void g();
};
struct B : virtual A {
virtual void f();
};
struct C : A, B {
virtual void g();
};
// CHECK-25: Vtable for 'Test18::D' (24 entries).
// CHECK-25-NEXT: 0 | vbase_offset (8)
// CHECK-25-NEXT: 1 | vbase_offset (0)
// CHECK-25-NEXT: 2 | vbase_offset (0)
// CHECK-25-NEXT: 3 | vcall_offset (8)
// CHECK-25-NEXT: 4 | vcall_offset (0)
// CHECK-25-NEXT: 5 | offset_to_top (0)
// CHECK-25-NEXT: 6 | Test18::D RTTI
// CHECK-25-NEXT: -- (Test18::A, 0) vtable address --
// CHECK-25-NEXT: -- (Test18::B, 0) vtable address --
// CHECK-25-NEXT: -- (Test18::D, 0) vtable address --
// CHECK-25-NEXT: 7 | void Test18::D::f()
// CHECK-25-NEXT: 8 | void Test18::C::g()
// CHECK-25-NEXT: [this adjustment: 0 non-virtual, -32 vcall offset offset]
// CHECK-25-NEXT: 9 | void Test18::D::h()
// CHECK-25-NEXT: 10 | vcall_offset (0)
// CHECK-25-NEXT: 11 | vcall_offset (-8)
// CHECK-25-NEXT: 12 | vbase_offset (-8)
// CHECK-25-NEXT: 13 | offset_to_top (-8)
// CHECK-25-NEXT: 14 | Test18::D RTTI
// CHECK-25-NEXT: -- (Test18::A, 8) vtable address --
// CHECK-25-NEXT: -- (Test18::C, 8) vtable address --
// CHECK-25-NEXT: 15 | void Test18::D::f()
// CHECK-25-NEXT: [this adjustment: 0 non-virtual, -32 vcall offset offset]
// CHECK-25-NEXT: 16 | void Test18::C::g()
// CHECK-25-NEXT: 17 | vbase_offset (-16)
// CHECK-25-NEXT: 18 | vcall_offset (-8)
// CHECK-25-NEXT: 19 | vcall_offset (-16)
// CHECK-25-NEXT: 20 | offset_to_top (-16)
// CHECK-25-NEXT: 21 | Test18::D RTTI
// CHECK-25-NEXT: -- (Test18::B, 16) vtable address --
// CHECK-25-NEXT: 22 | void Test18::D::f()
// CHECK-25-NEXT: [this adjustment: -8 non-virtual, -32 vcall offset offset]
// CHECK-25-NEXT: 23 | [unused] void Test18::C::g()
// CHECK-25: Construction vtable for ('Test18::B', 0) in 'Test18::D' (7 entries).
// CHECK-25-NEXT: 0 | vbase_offset (0)
// CHECK-25-NEXT: 1 | vcall_offset (0)
// CHECK-25-NEXT: 2 | vcall_offset (0)
// CHECK-25-NEXT: 3 | offset_to_top (0)
// CHECK-25-NEXT: 4 | Test18::B RTTI
// CHECK-25-NEXT: -- (Test18::A, 0) vtable address --
// CHECK-25-NEXT: -- (Test18::B, 0) vtable address --
// CHECK-25-NEXT: 5 | void Test18::B::f()
// CHECK-25-NEXT: 6 | void Test18::A::g()
// CHECK-25: Construction vtable for ('Test18::C', 8) in 'Test18::D' (20 entries).
// CHECK-25-NEXT: 0 | vcall_offset (0)
// CHECK-25-NEXT: 1 | vcall_offset (0)
// CHECK-25-NEXT: 2 | vbase_offset (-8)
// CHECK-25-NEXT: 3 | offset_to_top (0)
// CHECK-25-NEXT: 4 | Test18::C RTTI
// CHECK-25-NEXT: -- (Test18::A, 8) vtable address --
// CHECK-25-NEXT: -- (Test18::C, 8) vtable address --
// CHECK-25-NEXT: 5 | void Test18::A::f()
// CHECK-25-NEXT: 6 | void Test18::C::g()
// CHECK-25-NEXT: 7 | vbase_offset (-16)
// CHECK-25-NEXT: 8 | vcall_offset (-8)
// CHECK-25-NEXT: 9 | vcall_offset (0)
// CHECK-25-NEXT: 10 | offset_to_top (-8)
// CHECK-25-NEXT: 11 | Test18::C RTTI
// CHECK-25-NEXT: -- (Test18::B, 16) vtable address --
// CHECK-25-NEXT: 12 | void Test18::B::f()
// CHECK-25-NEXT: 13 | [unused] void Test18::C::g()
// CHECK-25-NEXT: 14 | vcall_offset (8)
// CHECK-25-NEXT: 15 | vcall_offset (16)
// CHECK-25-NEXT: 16 | offset_to_top (8)
// CHECK-25-NEXT: 17 | Test18::C RTTI
// CHECK-25-NEXT: -- (Test18::A, 0) vtable address --
// CHECK-25-NEXT: 18 | void Test18::B::f()
// CHECK-25-NEXT: [this adjustment: 0 non-virtual, -24 vcall offset offset]
// CHECK-25-NEXT: 19 | void Test18::C::g()
// CHECK-25-NEXT: [this adjustment: 0 non-virtual, -32 vcall offset offset]
// CHECK-25: Construction vtable for ('Test18::B', 16) in 'Test18::D' (13 entries).
// CHECK-25-NEXT: 0 | vbase_offset (-16)
// CHECK-25-NEXT: 1 | vcall_offset (-16)
// CHECK-25-NEXT: 2 | vcall_offset (0)
// CHECK-25-NEXT: 3 | offset_to_top (0)
// CHECK-25-NEXT: 4 | Test18::B RTTI
// CHECK-25-NEXT: -- (Test18::B, 16) vtable address --
// CHECK-25-NEXT: 5 | void Test18::B::f()
// CHECK-25-NEXT: 6 | [unused] void Test18::A::g()
// CHECK-25-NEXT: 7 | vcall_offset (0)
// CHECK-25-NEXT: 8 | vcall_offset (16)
// CHECK-25-NEXT: 9 | offset_to_top (16)
// CHECK-25-NEXT: 10 | Test18::B RTTI
// CHECK-25-NEXT: -- (Test18::A, 0) vtable address --
// CHECK-25-NEXT: 11 | void Test18::B::f()
// CHECK-25-NEXT: [this adjustment: 0 non-virtual, -24 vcall offset offset]
// CHECK-25-NEXT: 12 | void Test18::A::g()
struct D : virtual B, virtual C, virtual A
{
virtual void f();
virtual void h();
};
void D::f() {}
}
namespace Test19 {
// Another 'this' adjustment test.
struct A {
int a;
virtual void f();
};
struct B : A {
int b;
virtual void g();
};
struct C {
virtual void c();
};
// CHECK-26: Vtable for 'Test19::D' (13 entries).
// CHECK-26-NEXT: 0 | vbase_offset (24)
// CHECK-26-NEXT: 1 | offset_to_top (0)
// CHECK-26-NEXT: 2 | Test19::D RTTI
// CHECK-26-NEXT: -- (Test19::C, 0) vtable address --
// CHECK-26-NEXT: -- (Test19::D, 0) vtable address --
// CHECK-26-NEXT: 3 | void Test19::C::c()
// CHECK-26-NEXT: 4 | void Test19::D::f()
// CHECK-26-NEXT: 5 | offset_to_top (-8)
// CHECK-26-NEXT: 6 | Test19::D RTTI
// CHECK-26-NEXT: -- (Test19::A, 8) vtable address --
// CHECK-26-NEXT: -- (Test19::B, 8) vtable address --
// CHECK-26-NEXT: 7 | void Test19::D::f()
// CHECK-26-NEXT: [this adjustment: -8 non-virtual]
// CHECK-26-NEXT: 8 | void Test19::B::g()
// CHECK-26-NEXT: 9 | vcall_offset (-24)
// CHECK-26-NEXT: 10 | offset_to_top (-24)
// CHECK-26-NEXT: 11 | Test19::D RTTI
// CHECK-26-NEXT: -- (Test19::A, 24) vtable address --
// CHECK-26-NEXT: 12 | void Test19::D::f()
// CHECK-26-NEXT: [this adjustment: 0 non-virtual, -24 vcall offset offset]
struct D : C, B, virtual A {
virtual void f();
};
void D::f() { }
}
namespace Test20 {
// pure virtual member functions should never have 'this' adjustments.
struct A {
virtual void f() = 0;
virtual void g();
};
struct B : A { };
// CHECK-27: Vtable for 'Test20::C' (9 entries).
// CHECK-27-NEXT: 0 | offset_to_top (0)
// CHECK-27-NEXT: 1 | Test20::C RTTI
// CHECK-27-NEXT: -- (Test20::A, 0) vtable address --
// CHECK-27-NEXT: -- (Test20::C, 0) vtable address --
// CHECK-27-NEXT: 2 | void Test20::C::f() [pure]
// CHECK-27-NEXT: 3 | void Test20::A::g()
// CHECK-27-NEXT: 4 | void Test20::C::h()
// CHECK-27-NEXT: 5 | offset_to_top (-8)
// CHECK-27-NEXT: 6 | Test20::C RTTI
// CHECK-27-NEXT: -- (Test20::A, 8) vtable address --
// CHECK-27-NEXT: -- (Test20::B, 8) vtable address --
// CHECK-27-NEXT: 7 | void Test20::C::f() [pure]
// CHECK-27-NEXT: 8 | void Test20::A::g()
struct C : A, B {
virtual void f() = 0;
virtual void h();
};
void C::h() { }
}
namespace Test21 {
// Test that we get vbase offsets right in secondary vtables.
struct A {
virtual void f();
};
struct B : virtual A { };
class C : virtual B { };
class D : virtual C { };
class E : virtual C { };
// CHECK-28: Vtable for 'Test21::F' (16 entries).
// CHECK-28-NEXT: 0 | vbase_offset (8)
// CHECK-28-NEXT: 1 | vbase_offset (0)
// CHECK-28-NEXT: 2 | vbase_offset (0)
// CHECK-28-NEXT: 3 | vbase_offset (0)
// CHECK-28-NEXT: 4 | vbase_offset (0)
// CHECK-28-NEXT: 5 | vcall_offset (0)
// CHECK-28-NEXT: 6 | offset_to_top (0)
// CHECK-28-NEXT: 7 | Test21::F RTTI
// CHECK-28-NEXT: -- (Test21::A, 0) vtable address --
// CHECK-28-NEXT: -- (Test21::B, 0) vtable address --
// CHECK-28-NEXT: -- (Test21::C, 0) vtable address --
// CHECK-28-NEXT: -- (Test21::D, 0) vtable address --
// CHECK-28-NEXT: -- (Test21::F, 0) vtable address --
// CHECK-28-NEXT: 8 | void Test21::F::f()
// CHECK-28-NEXT: 9 | vbase_offset (-8)
// CHECK-28-NEXT: 10 | vbase_offset (-8)
// CHECK-28-NEXT: 11 | vbase_offset (-8)
// CHECK-28-NEXT: 12 | vcall_offset (-8)
// CHECK-28-NEXT: 13 | offset_to_top (-8)
// CHECK-28-NEXT: 14 | Test21::F RTTI
// CHECK-28-NEXT: -- (Test21::E, 8) vtable address --
// CHECK-28-NEXT: 15 | [unused] void Test21::F::f()
//
// CHECK-28: Virtual base offset offsets for 'Test21::F' (5 entries).
// CHECK-28-NEXT: Test21::A | -32
// CHECK-28-NEXT: Test21::B | -40
// CHECK-28-NEXT: Test21::C | -48
// CHECK-28-NEXT: Test21::D | -56
// CHECK-28-NEXT: Test21::E | -64
class F : virtual D, virtual E {
virtual void f();
};
void F::f() { }
}
namespace Test22 {
// Very simple construction vtable test.
struct V1 {
int v1;
};
struct V2 : virtual V1 {
int v2;
};
// CHECK-29: Vtable for 'Test22::C' (8 entries).
// CHECK-29-NEXT: 0 | vbase_offset (16)
// CHECK-29-NEXT: 1 | vbase_offset (12)
// CHECK-29-NEXT: 2 | offset_to_top (0)
// CHECK-29-NEXT: 3 | Test22::C RTTI
// CHECK-29-NEXT: -- (Test22::C, 0) vtable address --
// CHECK-29-NEXT: 4 | void Test22::C::f()
// CHECK-29-NEXT: 5 | vbase_offset (-4)
// CHECK-29-NEXT: 6 | offset_to_top (-16)
// CHECK-29-NEXT: 7 | Test22::C RTTI
// CHECK-29-NEXT: -- (Test22::V2, 16) vtable address --
// CHECK-29: Construction vtable for ('Test22::V2', 16) in 'Test22::C' (3 entries).
// CHECK-29-NEXT: 0 | vbase_offset (-4)
// CHECK-29-NEXT: 1 | offset_to_top (0)
// CHECK-29-NEXT: 2 | Test22::V2 RTTI
struct C : virtual V1, virtual V2 {
int c;
virtual void f();
};
void C::f() { }
}
namespace Test23 {
struct A {
int a;
};
struct B : virtual A {
int b;
};
struct C : A, virtual B {
int c;
};
// CHECK-30: Vtable for 'Test23::D' (7 entries).
// CHECK-30-NEXT: 0 | vbase_offset (20)
// CHECK-30-NEXT: 1 | vbase_offset (24)
// CHECK-30-NEXT: 2 | offset_to_top (0)
// CHECK-30-NEXT: 3 | Test23::D RTTI
// CHECK-30-NEXT: -- (Test23::C, 0) vtable address --
// CHECK-30-NEXT: -- (Test23::D, 0) vtable address --
// CHECK-30-NEXT: 4 | vbase_offset (-4)
// CHECK-30-NEXT: 5 | offset_to_top (-24)
// CHECK-30-NEXT: 6 | Test23::D RTTI
// CHECK-30-NEXT: -- (Test23::B, 24) vtable address --
// CHECK-30: Construction vtable for ('Test23::C', 0) in 'Test23::D' (7 entries).
// CHECK-30-NEXT: 0 | vbase_offset (20)
// CHECK-30-NEXT: 1 | vbase_offset (24)
// CHECK-30-NEXT: 2 | offset_to_top (0)
// CHECK-30-NEXT: 3 | Test23::C RTTI
// CHECK-30-NEXT: -- (Test23::C, 0) vtable address --
// CHECK-30-NEXT: 4 | vbase_offset (-4)
// CHECK-30-NEXT: 5 | offset_to_top (-24)
// CHECK-30-NEXT: 6 | Test23::C RTTI
// CHECK-30-NEXT: -- (Test23::B, 24) vtable address --
// CHECK-30: Construction vtable for ('Test23::B', 24) in 'Test23::D' (3 entries).
// CHECK-30-NEXT: 0 | vbase_offset (-4)
// CHECK-30-NEXT: 1 | offset_to_top (0)
// CHECK-30-NEXT: 2 | Test23::B RTTI
// CHECK-30-NEXT: -- (Test23::B, 24) vtable address --
struct D : virtual A, virtual B, C {
int d;
void f();
};
void D::f() { }
D d;
}
namespace Test24 {
// Another construction vtable test.
struct A {
virtual void f();
};
struct B : virtual A { };
struct C : virtual A { };
// CHECK-31: Vtable for 'Test24::D' (10 entries).
// CHECK-31-NEXT: 0 | vbase_offset (0)
// CHECK-31-NEXT: 1 | vcall_offset (0)
// CHECK-31-NEXT: 2 | offset_to_top (0)
// CHECK-31-NEXT: 3 | Test24::D RTTI
// CHECK-31-NEXT: -- (Test24::A, 0) vtable address --
// CHECK-31-NEXT: -- (Test24::B, 0) vtable address --
// CHECK-31-NEXT: -- (Test24::D, 0) vtable address --
// CHECK-31-NEXT: 4 | void Test24::D::f()
// CHECK-31-NEXT: 5 | vbase_offset (-8)
// CHECK-31-NEXT: 6 | vcall_offset (-8)
// CHECK-31-NEXT: 7 | offset_to_top (-8)
// CHECK-31-NEXT: 8 | Test24::D RTTI
// CHECK-31-NEXT: -- (Test24::C, 8) vtable address --
// CHECK-31-NEXT: 9 | [unused] void Test24::D::f()
// CHECK-31: Construction vtable for ('Test24::B', 0) in 'Test24::D' (5 entries).
// CHECK-31-NEXT: 0 | vbase_offset (0)
// CHECK-31-NEXT: 1 | vcall_offset (0)
// CHECK-31-NEXT: 2 | offset_to_top (0)
// CHECK-31-NEXT: 3 | Test24::B RTTI
// CHECK-31-NEXT: -- (Test24::A, 0) vtable address --
// CHECK-31-NEXT: -- (Test24::B, 0) vtable address --
// CHECK-31-NEXT: 4 | void Test24::A::f()
// CHECK-31: Construction vtable for ('Test24::C', 8) in 'Test24::D' (9 entries).
// CHECK-31-NEXT: 0 | vbase_offset (-8)
// CHECK-31-NEXT: 1 | vcall_offset (-8)
// CHECK-31-NEXT: 2 | offset_to_top (0)
// CHECK-31-NEXT: 3 | Test24::C RTTI
// CHECK-31-NEXT: -- (Test24::C, 8) vtable address --
// CHECK-31-NEXT: 4 | [unused] void Test24::A::f()
// CHECK-31-NEXT: 5 | vcall_offset (0)
// CHECK-31-NEXT: 6 | offset_to_top (8)
// CHECK-31-NEXT: 7 | Test24::C RTTI
// CHECK-31-NEXT: -- (Test24::A, 0) vtable address --
// CHECK-31-NEXT: 8 | void Test24::A::f()
struct D : B, C {
virtual void f();
};
void D::f() { }
}
namespace Test25 {
// This mainly tests that we don't assert on this class hierarchy.
struct V {
virtual void f();
};
struct A : virtual V { };
struct B : virtual V { };
// CHECK-32: Vtable for 'Test25::C' (11 entries).
// CHECK-32-NEXT: 0 | vbase_offset (0)
// CHECK-32-NEXT: 1 | vcall_offset (0)
// CHECK-32-NEXT: 2 | offset_to_top (0)
// CHECK-32-NEXT: 3 | Test25::C RTTI
// CHECK-32-NEXT: -- (Test25::A, 0) vtable address --
// CHECK-32-NEXT: -- (Test25::C, 0) vtable address --
// CHECK-32-NEXT: -- (Test25::V, 0) vtable address --
// CHECK-32-NEXT: 4 | void Test25::V::f()
// CHECK-32-NEXT: 5 | void Test25::C::g()
// CHECK-32-NEXT: 6 | vbase_offset (-8)
// CHECK-32-NEXT: 7 | vcall_offset (-8)
// CHECK-32-NEXT: 8 | offset_to_top (-8)
// CHECK-32-NEXT: 9 | Test25::C RTTI
// CHECK-32-NEXT: -- (Test25::B, 8) vtable address --
// CHECK-32-NEXT: 10 | [unused] void Test25::V::f()
// CHECK-32: Construction vtable for ('Test25::A', 0) in 'Test25::C' (5 entries).
// CHECK-32-NEXT: 0 | vbase_offset (0)
// CHECK-32-NEXT: 1 | vcall_offset (0)
// CHECK-32-NEXT: 2 | offset_to_top (0)
// CHECK-32-NEXT: 3 | Test25::A RTTI
// CHECK-32-NEXT: -- (Test25::A, 0) vtable address --
// CHECK-32-NEXT: -- (Test25::V, 0) vtable address --
// CHECK-32-NEXT: 4 | void Test25::V::f()
// CHECK-32: Construction vtable for ('Test25::B', 8) in 'Test25::C' (9 entries).
// CHECK-32-NEXT: 0 | vbase_offset (-8)
// CHECK-32-NEXT: 1 | vcall_offset (-8)
// CHECK-32-NEXT: 2 | offset_to_top (0)
// CHECK-32-NEXT: 3 | Test25::B RTTI
// CHECK-32-NEXT: -- (Test25::B, 8) vtable address --
// CHECK-32-NEXT: 4 | [unused] void Test25::V::f()
// CHECK-32-NEXT: 5 | vcall_offset (0)
// CHECK-32-NEXT: 6 | offset_to_top (8)
// CHECK-32-NEXT: 7 | Test25::B RTTI
// CHECK-32-NEXT: -- (Test25::V, 0) vtable address --
// CHECK-32-NEXT: 8 | void Test25::V::f()
struct C : A, virtual V, B {
virtual void g();
};
void C::g() { }
}
namespace Test26 {
// Test that we generate the right number of entries in the C-in-D construction vtable, and that
// we don't mark A::a as unused.
struct A {
virtual void a();
};
struct B {
virtual void c();
};
struct C : virtual A {
virtual void b();
};
// CHECK-33: Vtable for 'Test26::D' (15 entries).
// CHECK-33-NEXT: 0 | vbase_offset (8)
// CHECK-33-NEXT: 1 | vbase_offset (8)
// CHECK-33-NEXT: 2 | vbase_offset (0)
// CHECK-33-NEXT: 3 | vcall_offset (0)
// CHECK-33-NEXT: 4 | offset_to_top (0)
// CHECK-33-NEXT: 5 | Test26::D RTTI
// CHECK-33-NEXT: -- (Test26::B, 0) vtable address --
// CHECK-33-NEXT: -- (Test26::D, 0) vtable address --
// CHECK-33-NEXT: 6 | void Test26::B::c()
// CHECK-33-NEXT: 7 | void Test26::D::d()
// CHECK-33-NEXT: 8 | vcall_offset (0)
// CHECK-33-NEXT: 9 | vbase_offset (0)
// CHECK-33-NEXT: 10 | vcall_offset (0)
// CHECK-33-NEXT: 11 | offset_to_top (-8)
// CHECK-33-NEXT: 12 | Test26::D RTTI
// CHECK-33-NEXT: -- (Test26::A, 8) vtable address --
// CHECK-33-NEXT: -- (Test26::C, 8) vtable address --
// CHECK-33-NEXT: 13 | void Test26::A::a()
// CHECK-33-NEXT: 14 | void Test26::C::b()
// CHECK-33: Construction vtable for ('Test26::C', 8) in 'Test26::D' (7 entries).
// CHECK-33-NEXT: 0 | vcall_offset (0)
// CHECK-33-NEXT: 1 | vbase_offset (0)
// CHECK-33-NEXT: 2 | vcall_offset (0)
// CHECK-33-NEXT: 3 | offset_to_top (0)
// CHECK-33-NEXT: 4 | Test26::C RTTI
// CHECK-33-NEXT: -- (Test26::A, 8) vtable address --
// CHECK-33-NEXT: -- (Test26::C, 8) vtable address --
// CHECK-33-NEXT: 5 | void Test26::A::a()
// CHECK-33-NEXT: 6 | void Test26::C::b()
class D : virtual B, virtual C {
virtual void d();
};
void D::d() { }
}
namespace Test27 {
// Test that we don't generate a secondary vtable for C in the D-in-E vtable, since
// C doesn't have any virtual bases.
struct A {
virtual void a();
};
struct B {
virtual void b();
};
struct C {
virtual void c();
};
struct D : A, virtual B, C {
virtual void d();
};
// CHECK-34: Vtable for 'Test27::E' (13 entries).
// CHECK-34-NEXT: 0 | vbase_offset (16)
// CHECK-34-NEXT: 1 | offset_to_top (0)
// CHECK-34-NEXT: 2 | Test27::E RTTI
// CHECK-34-NEXT: -- (Test27::A, 0) vtable address --
// CHECK-34-NEXT: -- (Test27::D, 0) vtable address --
// CHECK-34-NEXT: -- (Test27::E, 0) vtable address --
// CHECK-34-NEXT: 3 | void Test27::A::a()
// CHECK-34-NEXT: 4 | void Test27::D::d()
// CHECK-34-NEXT: 5 | void Test27::E::e()
// CHECK-34-NEXT: 6 | offset_to_top (-8)
// CHECK-34-NEXT: 7 | Test27::E RTTI
// CHECK-34-NEXT: -- (Test27::C, 8) vtable address --
// CHECK-34-NEXT: 8 | void Test27::C::c()
// CHECK-34-NEXT: 9 | vcall_offset (0)
// CHECK-34-NEXT: 10 | offset_to_top (-16)
// CHECK-34-NEXT: 11 | Test27::E RTTI
// CHECK-34-NEXT: -- (Test27::B, 16) vtable address --
// CHECK-34-NEXT: 12 | void Test27::B::b()
// CHECK-34: Construction vtable for ('Test27::D', 0) in 'Test27::E' (9 entries).
// CHECK-34-NEXT: 0 | vbase_offset (16)
// CHECK-34-NEXT: 1 | offset_to_top (0)
// CHECK-34-NEXT: 2 | Test27::D RTTI
// CHECK-34-NEXT: -- (Test27::A, 0) vtable address --
// CHECK-34-NEXT: -- (Test27::D, 0) vtable address --
// CHECK-34-NEXT: 3 | void Test27::A::a()
// CHECK-34-NEXT: 4 | void Test27::D::d()
// CHECK-34-NEXT: 5 | vcall_offset (0)
// CHECK-34-NEXT: 6 | offset_to_top (-16)
// CHECK-34-NEXT: 7 | Test27::D RTTI
// CHECK-34-NEXT: -- (Test27::B, 16) vtable address --
// CHECK-34-NEXT: 8 | void Test27::B::b()
struct E : D {
virtual void e();
};
void E::e() { }
}
namespace Test28 {
// Check that we do include the vtable for B in the D-in-E construction vtable, since
// B is a base class of a virtual base (C).
struct A {
virtual void a();
};
struct B {
virtual void b();
};
struct C : A, B {
virtual void c();
};
struct D : virtual C {
};
// CHECK-35: Vtable for 'Test28::E' (14 entries).
// CHECK-35-NEXT: 0 | vbase_offset (8)
// CHECK-35-NEXT: 1 | offset_to_top (0)
// CHECK-35-NEXT: 2 | Test28::E RTTI
// CHECK-35-NEXT: -- (Test28::D, 0) vtable address --
// CHECK-35-NEXT: -- (Test28::E, 0) vtable address --
// CHECK-35-NEXT: 3 | void Test28::E::e()
// CHECK-35-NEXT: 4 | vcall_offset (8)
// CHECK-35-NEXT: 5 | vcall_offset (0)
// CHECK-35-NEXT: 6 | vcall_offset (0)
// CHECK-35-NEXT: 7 | offset_to_top (-8)
// CHECK-35-NEXT: 8 | Test28::E RTTI
// CHECK-35-NEXT: -- (Test28::A, 8) vtable address --
// CHECK-35-NEXT: -- (Test28::C, 8) vtable address --
// CHECK-35-NEXT: 9 | void Test28::A::a()
// CHECK-35-NEXT: 10 | void Test28::C::c()
// CHECK-35-NEXT: 11 | offset_to_top (-16)
// CHECK-35-NEXT: 12 | Test28::E RTTI
// CHECK-35-NEXT: -- (Test28::B, 16) vtable address --
// CHECK-35-NEXT: 13 | void Test28::B::b()
// CHECK-35: Construction vtable for ('Test28::D', 0) in 'Test28::E' (13 entries).
// CHECK-35-NEXT: 0 | vbase_offset (8)
// CHECK-35-NEXT: 1 | offset_to_top (0)
// CHECK-35-NEXT: 2 | Test28::D RTTI
// CHECK-35-NEXT: -- (Test28::D, 0) vtable address --
// CHECK-35-NEXT: 3 | vcall_offset (8)
// CHECK-35-NEXT: 4 | vcall_offset (0)
// CHECK-35-NEXT: 5 | vcall_offset (0)
// CHECK-35-NEXT: 6 | offset_to_top (-8)
// CHECK-35-NEXT: 7 | Test28::D RTTI
// CHECK-35-NEXT: -- (Test28::A, 8) vtable address --
// CHECK-35-NEXT: -- (Test28::C, 8) vtable address --
// CHECK-35-NEXT: 8 | void Test28::A::a()
// CHECK-35-NEXT: 9 | void Test28::C::c()
// CHECK-35-NEXT: 10 | offset_to_top (-16)
// CHECK-35-NEXT: 11 | Test28::D RTTI
// CHECK-35-NEXT: -- (Test28::B, 16) vtable address --
// CHECK-35-NEXT: 12 | void Test28::B::b()
struct E : D {
virtual void e();
};
void E::e() { }
}
namespace Test29 {
// Test that the covariant return thunk for B::f will have a virtual 'this' adjustment,
// matching gcc.
struct V1 { };
struct V2 : virtual V1 { };
struct A {
virtual V1 *f();
};
// CHECK-36: Vtable for 'Test29::B' (6 entries).
// CHECK-36-NEXT: 0 | vbase_offset (0)
// CHECK-36-NEXT: 1 | vcall_offset (0)
// CHECK-36-NEXT: 2 | offset_to_top (0)
// CHECK-36-NEXT: 3 | Test29::B RTTI
// CHECK-36-NEXT: -- (Test29::A, 0) vtable address --
// CHECK-36-NEXT: -- (Test29::B, 0) vtable address --
// CHECK-36-NEXT: 4 | Test29::V2 *Test29::B::f()
// CHECK-36-NEXT: [return adjustment: 0 non-virtual, -24 vbase offset offset]
// CHECK-36-NEXT: [this adjustment: 0 non-virtual, -24 vcall offset offset]
// CHECK-36-NEXT: 5 | Test29::V2 *Test29::B::f()
struct B : virtual A {
virtual V2 *f();
};
V2 *B::f() { return 0; }
}
namespace Test30 {
// Test that we don't assert when generating a vtable for F.
struct A { };
struct B : virtual A {
int i;
};
struct C {
virtual void f();
};
struct D : virtual C, B { };
struct E : virtual D { };
struct F : E {
virtual void f();
};
void F::f() { }
}
namespace Test31 {
// Test that we don't add D::f twice to the primary vtable.
struct A {
int a;
};
struct B {
virtual void f();
};
struct C : A, virtual B {
virtual void f();
};
// CHECK-37: Vtable for 'Test31::D' (11 entries).
// CHECK-37-NEXT: 0 | vbase_offset (0)
// CHECK-37-NEXT: 1 | vbase_offset (8)
// CHECK-37-NEXT: 2 | vcall_offset (0)
// CHECK-37-NEXT: 3 | offset_to_top (0)
// CHECK-37-NEXT: 4 | Test31::D RTTI
// CHECK-37-NEXT: -- (Test31::B, 0) vtable address --
// CHECK-37-NEXT: -- (Test31::D, 0) vtable address --
// CHECK-37-NEXT: 5 | void Test31::D::f()
// CHECK-37-NEXT: 6 | vbase_offset (-8)
// CHECK-37-NEXT: 7 | vcall_offset (-8)
// CHECK-37-NEXT: 8 | offset_to_top (-8)
// CHECK-37-NEXT: 9 | Test31::D RTTI
// CHECK-37-NEXT: -- (Test31::C, 8) vtable address --
// CHECK-37-NEXT: 10 | void Test31::D::f()
// CHECK-37-NEXT: [this adjustment: 0 non-virtual, -24 vcall offset offset]
struct D : virtual C {
virtual void f();
};
void D::f() { }
}
namespace Test32 {
// Check that we correctly lay out the virtual bases of 'Test32::D'.
struct A {
virtual void f();
};
struct B : virtual A { };
struct C : A, virtual B { };
struct D : virtual B { };
// CHECK-38: Virtual base offset offsets for 'Test32::E' (3 entries).
// CHECK-38-NEXT: Test32::A | -32
// CHECK-38-NEXT: Test32::B | -24
// CHECK-38-NEXT: Test32::D | -40
struct E : C, virtual D {
virtual void f();
};
void E::f() { }
}
namespace Test33 {
// Test that we don't emit too many vcall offsets in 'Test32::F'.
struct A {
virtual void a();
};
struct B {
virtual void b();
};
struct C : virtual A, virtual B {
virtual void c();
};
struct D : virtual C { };
struct E : A, D {
virtual void e();
};
// CHECK-39: Vtable for 'Test33::F' (30 entries).
// CHECK-39-NEXT: 0 | vbase_offset (24)
// CHECK-39-NEXT: 1 | vbase_offset (16)
// CHECK-39-NEXT: 2 | vbase_offset (16)
// CHECK-39-NEXT: 3 | vbase_offset (8)
// CHECK-39-NEXT: 4 | offset_to_top (0)
// CHECK-39-NEXT: 5 | Test33::F RTTI
// CHECK-39-NEXT: -- (Test33::A, 0) vtable address --
// CHECK-39-NEXT: -- (Test33::F, 0) vtable address --
// CHECK-39-NEXT: 6 | void Test33::A::a()
// CHECK-39-NEXT: 7 | void Test33::F::f()
// CHECK-39-NEXT: 8 | vcall_offset (0)
// CHECK-39-NEXT: 9 | vcall_offset (0)
// CHECK-39-NEXT: 10 | vbase_offset (16)
// CHECK-39-NEXT: 11 | vbase_offset (8)
// CHECK-39-NEXT: 12 | vbase_offset (8)
// CHECK-39-NEXT: 13 | offset_to_top (-8)
// CHECK-39-NEXT: 14 | Test33::F RTTI
// CHECK-39-NEXT: -- (Test33::A, 8) vtable address --
// CHECK-39-NEXT: -- (Test33::E, 8) vtable address --
// CHECK-39-NEXT: 15 | void Test33::A::a()
// CHECK-39-NEXT: 16 | void Test33::E::e()
// CHECK-39-NEXT: 17 | vbase_offset (0)
// CHECK-39-NEXT: 18 | vcall_offset (0)
// CHECK-39-NEXT: 19 | vbase_offset (8)
// CHECK-39-NEXT: 20 | vbase_offset (0)
// CHECK-39-NEXT: 21 | vcall_offset (0)
// CHECK-39-NEXT: 22 | offset_to_top (-16)
// CHECK-39-NEXT: 23 | Test33::F RTTI
// CHECK-39-NEXT: -- (Test33::A, 16) vtable address --
// CHECK-39-NEXT: -- (Test33::C, 16) vtable address --
// CHECK-39-NEXT: -- (Test33::D, 16) vtable address --
// CHECK-39-NEXT: 24 | void Test33::A::a()
// CHECK-39-NEXT: 25 | void Test33::C::c()
// CHECK-39-NEXT: 26 | vcall_offset (0)
// CHECK-39-NEXT: 27 | offset_to_top (-24)
// CHECK-39-NEXT: 28 | Test33::F RTTI
// CHECK-39-NEXT: -- (Test33::B, 24) vtable address --
// CHECK-39-NEXT: 29 | void Test33::B::b()
struct F : virtual E, A {
virtual void f();
};
void F::f() { }
}
namespace Test34 {
// Test that we lay out the construction vtable for 'Test34::E' in 'Test34::F' correctly.
struct A {
virtual void a();
};
struct B : virtual A { };
struct C : B, A {
virtual void c();
};
struct D : A, C { };
struct E : virtual D {
virtual void e();
};
// CHECK-40: Construction vtable for ('Test34::E', 0) in 'Test34::F' (22 entries).
// CHECK-40-NEXT: 0 | vbase_offset (0)
// CHECK-40-NEXT: 1 | vbase_offset (8)
// CHECK-40-NEXT: 2 | vcall_offset (0)
// CHECK-40-NEXT: 3 | offset_to_top (0)
// CHECK-40-NEXT: 4 | Test34::E RTTI
// CHECK-40-NEXT: -- (Test34::A, 0) vtable address --
// CHECK-40-NEXT: -- (Test34::E, 0) vtable address --
// CHECK-40-NEXT: 5 | void Test34::A::a()
// CHECK-40-NEXT: 6 | void Test34::E::e()
// CHECK-40-NEXT: 7 | vcall_offset (8)
// CHECK-40-NEXT: 8 | vcall_offset (0)
// CHECK-40-NEXT: 9 | vbase_offset (-8)
// CHECK-40-NEXT: 10 | offset_to_top (-8)
// CHECK-40-NEXT: 11 | Test34::E RTTI
// CHECK-40-NEXT: -- (Test34::A, 8) vtable address --
// CHECK-40-NEXT: -- (Test34::D, 8) vtable address --
// CHECK-40-NEXT: 12 | void Test34::A::a()
// CHECK-40-NEXT: 13 | vbase_offset (-16)
// CHECK-40-NEXT: 14 | vcall_offset (-16)
// CHECK-40-NEXT: 15 | offset_to_top (-16)
// CHECK-40-NEXT: 16 | Test34::E RTTI
// CHECK-40-NEXT: -- (Test34::B, 16) vtable address --
// CHECK-40-NEXT: -- (Test34::C, 16) vtable address --
// CHECK-40-NEXT: 17 | [unused] void Test34::A::a()
// CHECK-40-NEXT: 18 | void Test34::C::c()
// CHECK-40-NEXT: 19 | offset_to_top (-24)
// CHECK-40-NEXT: 20 | Test34::E RTTI
// CHECK-40-NEXT: -- (Test34::A, 24) vtable address --
// CHECK-40-NEXT: 21 | void Test34::A::a()
struct F : E {
virtual void f();
};
void F::f() { }
}
namespace Test35 {
// Test that we lay out the virtual bases of 'Test35::H' in the correct order.
struct A {
virtual void a();
int i;
};
struct B : virtual A {
virtual void b();
};
struct C {
virtual void c();
};
struct D : C, virtual B {
virtual void d();
};
struct E : D {
virtual void e();
bool b;
};
struct F : virtual D { };
struct G : virtual E { };
// CHECK-41: Vtable for 'Test35::H' (32 entries).
// CHECK-41-NEXT: 0 | vbase_offset (32)
// CHECK-41-NEXT: 1 | vbase_offset (0)
// CHECK-41-NEXT: 2 | vcall_offset (0)
// CHECK-41-NEXT: 3 | vcall_offset (0)
// CHECK-41-NEXT: 4 | vbase_offset (16)
// CHECK-41-NEXT: 5 | vbase_offset (8)
// CHECK-41-NEXT: 6 | offset_to_top (0)
// CHECK-41-NEXT: 7 | Test35::H RTTI
// CHECK-41-NEXT: -- (Test35::C, 0) vtable address --
// CHECK-41-NEXT: -- (Test35::D, 0) vtable address --
// CHECK-41-NEXT: -- (Test35::F, 0) vtable address --
// CHECK-41-NEXT: -- (Test35::H, 0) vtable address --
// CHECK-41-NEXT: 8 | void Test35::C::c()
// CHECK-41-NEXT: 9 | void Test35::D::d()
// CHECK-41-NEXT: 10 | void Test35::H::h()
// CHECK-41-NEXT: 11 | vbase_offset (0)
// CHECK-41-NEXT: 12 | vbase_offset (24)
// CHECK-41-NEXT: 13 | vcall_offset (0)
// CHECK-41-NEXT: 14 | vbase_offset (8)
// CHECK-41-NEXT: 15 | offset_to_top (-8)
// CHECK-41-NEXT: 16 | Test35::H RTTI
// CHECK-41-NEXT: -- (Test35::B, 8) vtable address --
// CHECK-41-NEXT: -- (Test35::G, 8) vtable address --
// CHECK-41-NEXT: 17 | void Test35::B::b()
// CHECK-41-NEXT: 18 | vcall_offset (0)
// CHECK-41-NEXT: 19 | offset_to_top (-16)
// CHECK-41-NEXT: 20 | Test35::H RTTI
// CHECK-41-NEXT: -- (Test35::A, 16) vtable address --
// CHECK-41-NEXT: 21 | void Test35::A::a()
// CHECK-41-NEXT: 22 | vcall_offset (0)
// CHECK-41-NEXT: 23 | vcall_offset (0)
// CHECK-41-NEXT: 24 | vcall_offset (0)
// CHECK-41-NEXT: 25 | vbase_offset (-16)
// CHECK-41-NEXT: 26 | vbase_offset (-24)
// CHECK-41-NEXT: 27 | offset_to_top (-32)
// CHECK-41-NEXT: 28 | Test35::H RTTI
// CHECK-41-NEXT: -- (Test35::C, 32) vtable address --
// CHECK-41-NEXT: -- (Test35::D, 32) vtable address --
// CHECK-41-NEXT: -- (Test35::E, 32) vtable address --
// CHECK-41-NEXT: 29 | void Test35::C::c()
// CHECK-41-NEXT: 30 | void Test35::D::d()
// CHECK-41-NEXT: 31 | void Test35::E::e()
// CHECK-41: Virtual base offset offsets for 'Test35::H' (4 entries).
// CHECK-41-NEXT: Test35::A | -32
// CHECK-41-NEXT: Test35::B | -24
// CHECK-41-NEXT: Test35::D | -56
// CHECK-41-NEXT: Test35::E | -64
struct H : F, G {
virtual void h();
};
void H::h() { }
}
namespace Test36 {
// Test that we don't mark B::f as unused in the vtable for D.
struct A {
virtual void f();
};
struct B : virtual A { };
struct C : virtual A {
virtual void f();
};
// CHECK-42: Vtable for 'Test36::D' (12 entries).
// CHECK-42-NEXT: 0 | vbase_offset (8)
// CHECK-42-NEXT: 1 | vbase_offset (8)
// CHECK-42-NEXT: 2 | vcall_offset (0)
// CHECK-42-NEXT: 3 | offset_to_top (0)
// CHECK-42-NEXT: 4 | Test36::D RTTI
// CHECK-42-NEXT: -- (Test36::C, 0) vtable address --
// CHECK-42-NEXT: -- (Test36::D, 0) vtable address --
// CHECK-42-NEXT: 5 | void Test36::C::f()
// CHECK-42-NEXT: 6 | void Test36::D::g()
// CHECK-42-NEXT: 7 | vbase_offset (0)
// CHECK-42-NEXT: 8 | vcall_offset (-8)
// CHECK-42-NEXT: 9 | offset_to_top (-8)
// CHECK-42-NEXT: 10 | Test36::D RTTI
// CHECK-42-NEXT: -- (Test36::A, 8) vtable address --
// CHECK-42-NEXT: -- (Test36::B, 8) vtable address --
// CHECK-42-NEXT: 11 | void Test36::C::f()
// CHECK-42-NEXT: [this adjustment: 0 non-virtual, -24 vcall offset offset]
struct D : virtual B, C {
virtual void g();
};
void D::g() { }
}
namespace Test37 {
// Test that we give C::f the right vtable index. (PR9660).
struct A {
virtual A* f() = 0;
};
struct B : virtual A {
virtual B* f();
};
// CHECK-43: VTable indices for 'Test37::C' (1 entries).
// CHECK-43-NEXT: 1 | Test37::C *Test37::C::f()
struct C : B {
virtual C* f();
};
C* C::f() { return 0; }
}
// rdar://problem/10959710
namespace Test38 {
struct A {
virtual void *foo();
virtual const void *foo() const;
};
// CHECK-44: Vtable for 'Test38::B' (7 entries).
// CHECK-44-NEXT: 0 | vbase_offset (0)
// CHECK-44-NEXT: 1 | vcall_offset (0)
// CHECK-44-NEXT: 2 | vcall_offset (0)
// CHECK-44-NEXT: 3 | offset_to_top (0)
// CHECK-44-NEXT: 4 | Test38::B RTTI
// CHECK-44-NEXT: -- (Test38::A, 0) vtable address --
// CHECK-44-NEXT: -- (Test38::B, 0) vtable address --
// CHECK-44-NEXT: 5 | void *Test38::B::foo()
// CHECK-44-NEXT: 6 | const void *Test38::B::foo() const
class B : virtual public A {
void *foo();
const void *foo() const;
};
void *B::foo() { return 0; }
}
| jeltz/rust-debian-package | src/llvm/tools/clang/test/CodeGenCXX/vtable-layout.cpp | C++ | apache-2.0 | 53,829 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph.restart;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.configuration.AkkaOptions;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.concurrent.duration.Duration;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public abstract class RestartStrategyFactory implements Serializable {
private static final long serialVersionUID = 7320252552640522191L;
private static final Logger LOG = LoggerFactory.getLogger(RestartStrategyFactory.class);
private static final String CREATE_METHOD = "createFactory";
/**
* Factory method to create a restart strategy
* @return The created restart strategy
*/
public abstract RestartStrategy createRestartStrategy();
/**
* Creates a {@link RestartStrategy} instance from the given {@link org.apache.flink.api.common.restartstrategy.RestartStrategies.RestartStrategyConfiguration}.
*
* @param restartStrategyConfiguration Restart strategy configuration which specifies which
* restart strategy to instantiate
* @return RestartStrategy instance
*/
public static RestartStrategy createRestartStrategy(RestartStrategies.RestartStrategyConfiguration restartStrategyConfiguration) {
if (restartStrategyConfiguration instanceof RestartStrategies.NoRestartStrategyConfiguration) {
return new NoRestartStrategy();
} else if (restartStrategyConfiguration instanceof RestartStrategies.FixedDelayRestartStrategyConfiguration) {
RestartStrategies.FixedDelayRestartStrategyConfiguration fixedDelayConfig =
(RestartStrategies.FixedDelayRestartStrategyConfiguration) restartStrategyConfiguration;
return new FixedDelayRestartStrategy(
fixedDelayConfig.getRestartAttempts(),
fixedDelayConfig.getDelayBetweenAttemptsInterval().toMilliseconds());
} else if (restartStrategyConfiguration instanceof RestartStrategies.FailureRateRestartStrategyConfiguration) {
RestartStrategies.FailureRateRestartStrategyConfiguration config =
(RestartStrategies.FailureRateRestartStrategyConfiguration) restartStrategyConfiguration;
return new FailureRateRestartStrategy(
config.getMaxFailureRate(),
config.getFailureInterval(),
config.getDelayBetweenAttemptsInterval()
);
} else if (restartStrategyConfiguration instanceof RestartStrategies.FallbackRestartStrategyConfiguration) {
return null;
} else {
throw new IllegalArgumentException("Unknown restart strategy configuration " +
restartStrategyConfiguration + ".");
}
}
/**
* Creates a {@link RestartStrategy} instance from the given {@link Configuration}.
*
* @return RestartStrategy instance
* @throws Exception which indicates that the RestartStrategy could not be instantiated.
*/
public static RestartStrategyFactory createRestartStrategyFactory(Configuration configuration) throws Exception {
String restartStrategyName = configuration.getString(ConfigConstants.RESTART_STRATEGY, "none");
switch (restartStrategyName.toLowerCase()) {
case "none":
// support deprecated ConfigConstants values
final int numberExecutionRetries = configuration.getInteger(ConfigConstants.EXECUTION_RETRIES_KEY,
ConfigConstants.DEFAULT_EXECUTION_RETRIES);
String pauseString = configuration.getString(AkkaOptions.WATCH_HEARTBEAT_PAUSE);
String delayString = configuration.getString(ConfigConstants.EXECUTION_RETRY_DELAY_KEY,
pauseString);
long delay;
try {
delay = Duration.apply(delayString).toMillis();
} catch (NumberFormatException nfe) {
if (delayString.equals(pauseString)) {
throw new Exception("Invalid config value for " +
AkkaOptions.WATCH_HEARTBEAT_PAUSE.key() + ": " + pauseString +
". Value must be a valid duration (such as '10 s' or '1 min')");
} else {
throw new Exception("Invalid config value for " +
ConfigConstants.EXECUTION_RETRY_DELAY_KEY + ": " + delayString +
". Value must be a valid duration (such as '100 milli' or '10 s')");
}
}
if (numberExecutionRetries > 0 && delay >= 0) {
return new FixedDelayRestartStrategy.FixedDelayRestartStrategyFactory(numberExecutionRetries, delay);
} else {
return NoRestartStrategy.createFactory(configuration);
}
case "off":
case "disable":
return NoRestartStrategy.createFactory(configuration);
case "fixeddelay":
case "fixed-delay":
return FixedDelayRestartStrategy.createFactory(configuration);
case "failurerate":
case "failure-rate":
return FailureRateRestartStrategy.createFactory(configuration);
default:
try {
Class<?> clazz = Class.forName(restartStrategyName);
if (clazz != null) {
Method method = clazz.getMethod(CREATE_METHOD, Configuration.class);
if (method != null) {
Object result = method.invoke(null, configuration);
if (result != null) {
return (RestartStrategyFactory) result;
}
}
}
} catch (ClassNotFoundException cnfe) {
LOG.warn("Could not find restart strategy class {}.", restartStrategyName);
} catch (NoSuchMethodException nsme) {
LOG.warn("Class {} does not has static method {}.", restartStrategyName, CREATE_METHOD);
} catch (InvocationTargetException ite) {
LOG.warn("Cannot call static method {} from class {}.", CREATE_METHOD, restartStrategyName);
} catch (IllegalAccessException iae) {
LOG.warn("Illegal access while calling method {} from class {}.", CREATE_METHOD, restartStrategyName);
}
// fallback in case of an error
return NoRestartStrategy.createFactory(configuration);
}
}
}
| WangTaoTheTonic/flink | flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/restart/RestartStrategyFactory.java | Java | apache-2.0 | 6,652 |
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="refresh" content="0;URL=constant.RB_AUTOBOOT.html">
</head>
<body>
<p>Redirecting to <a href="constant.RB_AUTOBOOT.html">constant.RB_AUTOBOOT.html</a>...</p>
<script>location.replace("constant.RB_AUTOBOOT.html" + location.search + location.hash);</script>
</body>
</html> | nitro-devs/nitro-game-engine | docs/libc/unix/notbsd/linux/RB_AUTOBOOT.v.html | HTML | apache-2.0 | 341 |
/*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.server.single.security;
import com.fasterxml.jackson.databind.ObjectMapper;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import okhttp3.mockwebserver.RecordedRequest;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
import org.springframework.cloud.common.security.support.AuthoritiesMapper;
import org.springframework.cloud.common.security.support.ExternalOauth2ResourceAuthoritiesMapper;
import org.springframework.cloud.dataflow.server.single.LocalDataflowResource;
import org.springframework.security.oauth2.client.OAuth2RestTemplate;
import org.springframework.security.oauth2.client.token.grant.client.ClientCredentialsResourceDetails;
import org.springframework.security.oauth2.common.OAuth2AccessToken;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* @author Gunnar Hillert
*/
public class LocalServerSecurityWithOAuth2AndExternalAuthoritiesTests {
private final static OAuth2ServerResource oAuth2ServerResource = new OAuth2ServerResource();
private final static MockWebServer externalAuthoritiesServer = new MockWebServer();
static {
System.setProperty("externalAuthoritiesUrl", externalAuthoritiesServer.url("/").toString());
}
private final static LocalDataflowResource localDataflowResource = new LocalDataflowResource(
"classpath:org/springframework/cloud/dataflow/server/single/security/oauthConfigUsingExternalAuthorities.yml");
@ClassRule
public static TestRule springDataflowAndOAuth2Server = RuleChain.outerRule(oAuth2ServerResource).around(externalAuthoritiesServer)
.around(localDataflowResource);
@Test
public void testAuthoritiesMapperBean() throws Exception {
final AuthoritiesMapper authoritiesMapper = localDataflowResource.getWebApplicationContext().getBean(AuthoritiesMapper.class);
Assert.assertTrue(authoritiesMapper instanceof ExternalOauth2ResourceAuthoritiesMapper);
}
@Test
public void testDataflowCallingExternalAuthoritiesServer() throws Exception {
final String[] roles = {"VIEW", "CREATE", "MANAGE"};
final ObjectMapper objectMapper = new ObjectMapper();
externalAuthoritiesServer.enqueue(new MockResponse()
.setBody(objectMapper.writeValueAsString(roles))
.addHeader("Content-Type", "application/json"));
final ClientCredentialsResourceDetails resourceDetails = new ClientCredentialsResourceDetails();
resourceDetails.setClientId("myclient");
resourceDetails.setClientSecret("mysecret");
resourceDetails.setGrantType("client_credentials");
resourceDetails
.setAccessTokenUri("http://localhost:" + oAuth2ServerResource.getOauth2ServerPort() + "/oauth/token");
Assert.assertEquals(0, externalAuthoritiesServer.getRequestCount());
final OAuth2RestTemplate oAuth2RestTemplate = new OAuth2RestTemplate(resourceDetails);
final OAuth2AccessToken accessToken = oAuth2RestTemplate.getAccessToken();
final String accessTokenAsString = accessToken.getValue();
localDataflowResource.getMockMvc()
.perform(get("/security/info").header("Authorization", "bearer " + accessTokenAsString)).andDo(print())
.andExpect(status().isOk())
.andExpect(jsonPath("$.authenticated", is(Boolean.TRUE)))
.andExpect(jsonPath("$.authenticationEnabled", is(Boolean.TRUE)))
.andExpect(jsonPath("$.roles", hasSize(3)));
assertThat(externalAuthoritiesServer.getRequestCount(), is(1));
final RecordedRequest recordedRequest = externalAuthoritiesServer.takeRequest();
assertThat(recordedRequest.getHeader("Authorization"), is("Bearer " + accessTokenAsString));
}
}
| mminella/spring-cloud-data | spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/single/security/LocalServerSecurityWithOAuth2AndExternalAuthoritiesTests.java | Java | apache-2.0 | 4,709 |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.madvoc;
import jodd.http.HttpRequest;
import jodd.http.HttpResponse;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class IntcptActionTest {
@BeforeClass
public static void beforeClass() {
MadvocSuite.startTomcat();
}
@AfterClass
public static void afterClass() {
MadvocSuite.stopTomcat();
}
@Test
public void testIn1Action() {
HttpResponse response = HttpRequest.get("localhost:8173/cpt.in1.html?foo=173").send();
assertEquals("param: = 173", response.bodyText().trim());
}
@Test
public void testIn2Action() {
HttpResponse response = HttpRequest.get("localhost:8173/cpt.in2.html?foo=173&foo2=173").send();
assertEquals("param: 173 = 173", response.bodyText().trim());
}
@Test
public void testAppendingAction() {
HttpResponse response = HttpRequest.get("localhost:8173/cpt.inap.html").send();
assertEquals("value=appending<jodd>", response.bodyText().trim());
}
@Test
public void testAppending2Action() {
HttpResponse response = HttpRequest.get("localhost:8173/cpt.inap2.html").send();
assertEquals("value=appending2<heyp>", response.bodyText().trim());
}
@Test
public void testAppending3Action() {
HttpResponse response = HttpRequest.get("localhost:8173/cpt.inap3.html").send();
assertEquals("value=appending3<jodd>", response.bodyText().trim());
}
} | wjw465150/jodd | jodd-madvoc/src/testInt/java/jodd/madvoc/IntcptActionTest.java | Java | bsd-2-clause | 2,792 |
class Lftp < Formula
desc "Sophisticated file transfer program"
homepage "https://lftp.tech"
url "https://lftp.yar.ru/ftp/lftp-4.7.7.tar.bz2"
mirror "ftp://ftp.st.ryukoku.ac.jp/pub/network/ftp/lftp/lftp-4.7.7.tar.bz2"
sha256 "fe441f20a9a317cfb99a8b8e628ba0457df472b6d93964d17374d5b5ebdf9280"
bottle do
sha256 "d27973d0a87c6cb2362be22562ae8082f521c68895a394294219aa2812409d32" => :sierra
sha256 "cba2c456d098e8d7db989207ba27d383e602b88de78fe25c186932e5be18f6e6" => :el_capitan
sha256 "5ba5ec09928d5314ec91ce77825b3415f8e323e8812889cb6839d30304fb608e" => :yosemite
end
depends_on "readline"
depends_on "openssl"
depends_on "libidn"
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-openssl=#{Formula["openssl"].opt_prefix}",
"--with-readline=#{Formula["readline"].opt_prefix}",
"--with-libidn=#{Formula["libidn"].opt_prefix}"
system "make", "install"
end
test do
system "#{bin}/lftp", "-c", "open ftp://ftp.gnu.org/; ls"
end
end
| gserra-olx/homebrew-core | Formula/lftp.rb | Ruby | bsd-2-clause | 1,134 |
cask :v1 => 'qgis' do
version '2.12.0-1'
sha256 'b398f12904f7762ee74a1d9ddebd1fd44eef85007488320dcd4899aebefc6089'
url "http://www.kyngchaos.com/files/software/qgis/QGIS-#{version}.dmg"
name 'QGIS'
homepage 'http://www.kyngchaos.com/software/qgis'
license :gpl
pkg 'Install QGIS.pkg'
uninstall :pkgutil => 'org.qgis.qgis-*'
depends_on :cask => 'gdal-framework'
depends_on :formula => 'matplotlib'
end
| gibsjose/homebrew-cask | Casks/qgis.rb | Ruby | bsd-2-clause | 424 |
/**
* @module ol/source/TileEventType
*/
/**
* @enum {string}
*/
export default {
/**
* Triggered when a tile starts loading.
* @event module:ol/source/Tile.TileSourceEvent#tileloadstart
* @api
*/
TILELOADSTART: 'tileloadstart',
/**
* Triggered when a tile finishes loading, either when its data is loaded,
* or when loading was aborted because the tile is no longer needed.
* @event module:ol/source/Tile.TileSourceEvent#tileloadend
* @api
*/
TILELOADEND: 'tileloadend',
/**
* Triggered if tile loading results in an error.
* @event module:ol/source/Tile.TileSourceEvent#tileloaderror
* @api
*/
TILELOADERROR: 'tileloaderror'
};
| fredj/ol3 | src/ol/source/TileEventType.js | JavaScript | bsd-2-clause | 690 |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.madvoc.interceptor;
import jodd.log.Logger;
import jodd.log.LoggerFactory;
/**
* Echo interceptor that outputs to logger.
*/
public class LogEchoInterceptor extends EchoInterceptor {
private static final Logger log = LoggerFactory.getLogger(LogEchoInterceptor.class);
@Override
protected void out(String message) {
log.debug(message);
}
}
| javachengwc/jodd | jodd-madvoc/src/main/java/jodd/madvoc/interceptor/LogEchoInterceptor.java | Java | bsd-2-clause | 1,789 |
Supported Geometry Oper/Functions
---------------------------------
#### Geometry Constructors
| Slick Oper/Function | PostGIS Oper/Function | Description | Example |
| ------------------- | --------------------- | ------------------------------------------------------ | ----------------------------- |
| geomFromText | ST_GeomFromText | create a ST_Geometry from a Well-Known Text | ST_GeomFromText(wkt) |
| geomFromWKB | ST_GeomFromWKB | create a geometry from a Well-Known Binary | ST_GeomFromWKB(wkb) |
| geomFromEWKT | ST_GeomFromEWKT | create a ST_Geometry from a Extended Well-Known Text | ST_GeomFromEWKT(ewkt) |
| geomFromEWKB | ST_GeomFromWKB | create a geometry from a Well-Known Binary | ST_GeomFromWKB(ewkb) |
| geomFromGML | ST_GeomFromGML | create a geometry from input GML | ST_GeomFromGML(gml[, srid]) |
| geomFromKML | ST_GeomFromKML | create a geometry from input KML | ST_GeomFromKML(kml) |
| geomFromGeoJSON | ST_GeomFromGeoJSON | create a geometry from input geojson | ST_GeomFromGeoJSON( json) |
| makeBox | ST_MakeBox2D | Creates a BOX2D defined by the given point geometries | ST_MakeBox2D( pointLowLeft, pointUpRight) |
| makeBox3d | ST_3DMakeBox | Creates a BOX3D defined by the given 3d point geometries | ST_3DMakeBox( point3dLowLeft, point3dUpRight) |
| makeEnvelope | ST_MakeEnvelope | Creates a rectangular Polygon formed from the given minimums and maximums | ST_MakeEnvelope(xmin, ymin, xmax, ymax, srid=unknown) |
| makePoint | ST_MakePoint<br/>ST_MakePointM | Creates a 2D,3DZ or 4D point geometry | ST_MakePoint(x,y)<br/>ST_MakePointM(x,y,m) |
| makeLine | ST_MakeLine | Creates a Linestring from point or line geometries | ST_MakeLine(point1, point2) |
| makePolygon | ST_MakePolygon | Creates a Polygon formed by the given CLOSED linestring | ST_MakePolygon(linestring) |
#### Geometry Operators
| Slick Oper/Function | PostGIS Oper/Function | Description | Example |
| ------------------- | --------------------- | ------------------------------------------------------ | ----------------------------- |
| @&& | && | if A's 2D bounding box intersects B's 2D bounding box | geomA && geomB |
| @&&& | &&& | if A's 3D bounding box intersects B's 3D bounding box | geomA &&& geomB |
| @> | ~ | A's bounding box contains B's | geomA ~ geomB |
| <@ | @ | if A's bounding box is contained by B's | geomA @ geomB |
| <-> | <-> | the distance between two points | geomA <-> geomB |
| <#> | <#> | the distance between bounding box of 2 geometries | geomA <#> geomB |
| &< | &< | if A's bounding box overlaps or is to the left of B's | geomA &< geomB |
| << | << | if A's bounding box is strictly to the left of B's | geomA << geomB |
| &<| | &<| | if A's bounding box overlaps or is below B's | geomA &<| geomB |
| <<| | <<| | if A's bounding box is strictly below B's | goemA <<| geomB |
| &> | &> | if A' bounding box overlaps or is to the right of B's | geomA &> geomB |
| >> | >> | if A's bounding box is strictly to the right of B's | geomA >> geomB |
| |&> | |&> | if A's bounding box overlaps or is above B's | geomA |&> geomB |
| |>> | |>> | if A's bounding box is strictly above B's | geomA |>> geomB |
#### Geometry Accessors
| Slick Oper/Function | PostGIS Oper/Function | Description | Example |
| ------------------- | --------------------- | ------------------------------------------------------ | ----------------------------- |
| geomType | ST_GeometryType | the geometry type of the ST_Geometry value | ST_GeometryType(geom) |
| srid | ST_SRID | the spatial reference identifier for the ST_Geometry | ST_SRID(geom) |
| isValid | ST_IsValid | if the ST_Geometry is well formed | ST_IsValid(geom[, flags]) |
| isClosed | ST_IsClosed | if the LINESTRING's start and end points are coincident| ST_IsClosed(geom) |
| isCollection | ST_IsCollection | if the argument is a collection (MULTI*, GEOMETRYCOLLECTION, ...) | ST_IsCollection(geom) |
| isEmpty | ST_IsEmpty | if this Geometry is an empty geometrycollection, polygon, point etc | ST_IsEmpty(geom) |
| isRing | ST_IsRing | if this LINESTRING is both closed and simple | ST_IsRing(geom) |
| isSimple | ST_IsSimple | if this Geometry has no anomalous geometric points, such as self intersection or self tangency | ST_IsSimple(geom) |
| hasArc | ST_HasArc | if a geometry or geometry collection contains a circular string | ST_HasArc(geom) |
| area | ST_Area | area of the surface if it's a polygon or multi-polygon | ST_Area(geom) |
| boundary | ST_Boundary | closure of the combinatorial boundary of the Geometry | ST_Boundary(geom) |
| dimension | ST_Dimension | inherent dimension of this Geometry object, which must be less than or equal to the coordinate dimension | ST_Dimension(geom) |
| coordDim | ST_CoordDim | the coordinate dimension of the ST_Geometry value | ST_CoordDim(geom) |
| nDims | ST_NDims | coordinate dimension of the geometry | ST_NDims(geom) |
| nPoints | ST_NPoints | number of points (vertexes) in a geometry | ST_NPoints(geom) |
| nRings | ST_NRings | number of rings if the geometry is a polygon or multi-polygon | ST_NRings(geom) |
| x | ST_X | Return the X coordinate of the point | ST_X(point) |
| y | ST_Y | Return the Y coordinate of the point | ST_Y(point) |
| z | ST_Z | Return the Z coordinate of the point | ST_Z(point) |
| xmin | ST_XMin | Returns X minima of a bounding box 2d or 3d or a geometry | ST_XMin(Box3D(geom)) |
| xmax | ST_XMax | Returns X maxima of a bounding box 2d or 3d or a geometry | ST_XMax(Box3D(geom)) |
| ymin | ST_YMin | Returns Y minima of a bounding box 2d or 3d or a geometry | ST_YMin(Box3D(geom)) |
| ymax | ST_YMax | Returns Y maxima of a bounding box 2d or 3d or a geometry | ST_YMax(Box3D(geom)) |
| zmin | ST_ZMin | Returns Z minima of a bounding box 2d or 3d or a geometry | ST_ZMin(Box3D(geom)) |
| zmax | ST_ZMax | Returns Z maxima of a bounding box 2d or 3d or a geometry | ST_ZMax(Box3D(geom)) |
| zmflag | ST_Zmflag | Returns ZM (dimension semantic) flag of the geometries as a small int. Values are: 0=2d, 1=3dm, 2=3dz, 3=4d | ST_Zmflag(geom) |
#### Geometry Outputs
| Slick Oper/Function | PostGIS Oper/Function | Description | Example |
| ------------------- | --------------------- | ------------------------------------------------------ | ----------------------------- |
| asBinary | ST_AsBinary | Well-Known Binary of the geometry without SRID | ST_AsBinary(geom[, NDRorXDR] |
| asText | ST_AsText | Well-Known Text of the geometry without SRID | ST_AsText(geom) |
| asLatLonText | ST_AsLatLonText | Degrees, Minutes, Seconds representation of the point | ST_AsLatLonText(geom[, format]) |
| asEWKB | ST_AsEWKB | Well-Known Binary of the geometry with SRID | ST_AsEWKB(geom[, NDRorXDR]) |
| asEWKT | ST_AsEWKT | Well-Known Text of the geometry with SRID | ST_AsEWKT(geom) |
| asHEXEWKB | ST_AsHEXEWKB | HEXEWKB format text of the geometry with SRID | ST_AsHEXEWKB(geom[, NDRorXDR])|
| asGeoJSON | ST_AsGeoJSON | GeoJSON format text of the geometry | ST_AsGeoJSON( [ver, ]geom, maxdigits, options) |
| asGeoHash | ST_GeoHash | GeoHash representation (geohash.org) of the geometry | ST_GeoHash(geom, maxchars) |
| asGML | ST_AsGML | GML format text of the geometry | ST_AsGML([ver, ]geom, maxdigits, options) |
| asKML | ST_AsKML | KML format text of the geometry | ST_AsKML([ver, ]geom, maxdigits[, nprefix]) |
| asSVG | ST_AsSVG | SVG format text of the geometry | ST_AsSVG(geom, rel, maxdigits) |
| asX3D | ST_AsX3D | X3D format text of the geometry | ST_AsX3D(geom, maxdigits, options) |
#### Spatial Relationships
| Slick Oper/Function | PostGIS Oper/Function | Description | Example |
| ------------------- | --------------------- | ------------------------------------------------------ | ----------------------------- |
| gEquals | ST_Equals | if the given geometries represent the same geometry | ST_Equals(geomA, geomB) |
| orderingEquals | ST_OrderingEquals | if the given geometries represent the same geometry and points are in the same directional order | ST_OrderingEquals( geomA, geomB) |
| overlaps | ST_Overlaps | if the Geometries share space, are of the same dimension, but are not completely contained by each other | ST_Overlaps(geomA, geomB) |
| intersects | ST_Intersects | if the geometries "spatially intersect in 2D" | ST_Intersects(geomA, geomB) |
| crosses | ST_Crosses | if the supplied geometries have some, but not all, interior points in common | ST_Crosses(geomA, geomB) |
| disjoint | ST_Disjoint | if the Geometries do not "spatially intersect" | ST_Disjoint(geomA, geomB) |
| contains | ST_Contains | if geometry A contains geometry B | ST_Contains(geomA, geomB) |
| containsProperly | ST_ContainsProperly | if geometry A contains geometry B and no boundary | ST_ContainsProperly(geomA, geomB) |
| within | ST_Within | if the geometry A is completely inside geometry B | ST_Within(geomA, geomB) |
| dWithin | ST_DWithin | if the geometry are within the specified distance of another | ST_DWithin(geomA, geomB, distance) |
| dFullyWithin | ST_DFullyWithin | if all of the geometries are within the specified distance of one another | ST_DFullyWithin( geomA, geomB, distance) |
| touches | ST_Touches | if the geometries have at least one point in common, but their interiors do not intersect | ST_Touches(geomA, geomB)|
| relate | ST_Relate | if this geometry is spatially related to another | ST_Relate(geomA, geomB, intersectionMatrixPattern) |
| relatePattern | ST_Relate | maximum intersectionMatrixPattern that relates the 2 geometries | ST_Relate(geomA, geomB[, boundaryNodeRule]) |
#### Spatial Measurements
| Slick Oper/Function | PostGIS Oper/Function | Description | Example |
| ------------------- | --------------------- | ------------------------------------------------------ | ----------------------------- |
| azimuth | ST_Azimuth | angle in radians from the horizontal of the vector defined by pointA and pointB | ST_Azimuth(pointA, pointB) |
| centroid | ST_Centroid | geometric center of the geometry | ST_Centroid(geom) |
| closestPoint | ST_ClosestPoint | the first point of the shortest line from geomA to geomB | ST_ClosestPoint( geomA, geomB) |
| pointOnSurface | ST_PointOnSurface | a POINT guaranteed to lie on the surface | ST_PointOnSurface( geom) |
| project | ST_Project | a POINT projected from a start point using a distance in meters and bearing (azimuth) in radians | ST_Project(geog, distance, azimuth) |
| length | ST_Length | 2d length of the geometry if it is a linestring or multilinestring | ST_Length(geom) |
| length3d | ST_3DLength | 3d length of the geometry if it is a linestring or multi-linestring| ST_3DLength(geom) |
| perimeter | ST_Perimeter | length measurement of the boundary of an ST_Surface or ST_MultiSurface geometry | ST_Perimeter(geom) |
| distance | ST_Distance | minimum distance between the two geometries | ST_Distance(geomA, geomB |
| distanceSphere | ST_Distance_Sphere | minimum distance in meters between two lon/lat geometries | ST_Distance_Sphere( geomA, geomB) |
| maxDistance | ST_MaxDistance | largest distance between the two geometries | ST_MaxDistance(geomA, geomB) |
| hausdorffDistance | ST_HausdorffDistance | Hausdorff distance between two geometries, a measure of how similar or dissimilar they are. | ST_HausdorffDistance( geomA, geomB[, densifyFrac]) |
| longestLine | ST_LongestLine | longest line points of the two geometries | ST_LongestLine(geomA, geomB) |
| shortestLine | ST_ShortestLine | shortest line between the two geometries | ST_ShortestLine(geomA, geomB) |
#### Geometry Processing
| Slick Oper/Function | PostGIS Oper/Function | Description | Example |
| ------------------- | --------------------- | ------------------------------------------------------ | ----------------------------- |
| setSRID | ST_SetSRID | set the SRID on a geometry | ST_SetSRID(geom, srid) |
| transform | ST_Transform | new geometry with its coordinates transformed to the SRID | ST_Transform(geom, srid) |
| simplify | ST_Simplify | "simplified" version of the given geometry using the Douglas-Peucker algorithm | ST_Simplify(geom, tolerance) |
| simplifyPreserveTopology| ST_SimplifyPreserveTopology | "simplified" version of the given geometry using the Douglas-Peucker algorithm | ST_SimplifyPreserveTopology( geom, tolerance) |
| removeRepeatedPoints| ST_RemoveRepeatedPoints | version of the given geometry with duplicated points removed | ST_RemoveRepeatedPoints( geom) |
| difference | ST_Difference | part of geometry A that does not intersect with geometry B | ST_Difference(geomA, geomB) |
| symDifference | ST_SymDifference | portions of A and B that do not intersect | ST_SymDifference(geomA, geomB)|
| intersection | ST_Intersection | the shared portion of geomA and geomB | ST_Intersection(geomA, geomB) |
| sharedPaths | ST_SharedPaths | collection of shared paths by the two input linestrings/multilinestrings | ST_SharedPaths(line1, line2) |
| split | ST_Split | collection of geometries resulting by splitting a geometry | ST_Split(geomA, bladeGeomB) |
| minBoundingCircle | ST_MinimumBoundingCircle| smallest circle polygon that can fully contain a geometry | ST_MinimumBoundingCircle( geom, num_segs_per_qt_circ)|
| buffer | ST_Buffer | a geometry that all its points is less than or equal to distance from the geometry | ST_Buffer(geom, radius[, bufferStyles]) |
| multi | ST_Multi | a geometry as a MULTI* geometry | ST_Multi(geom) |
| lineMerge | ST_LineMerge | lineString(s) formed by sewing together a MULTILINESTRING | ST_LineMerge(geom) |
| collectionExtract | ST_CollectionExtract | a (multi)geometry consisting only of elements of the specified type from the geometry | ST_CollectionExtract( geom, type) |
| collectionHomogenize| ST_CollectionHomogenize | "simplest" representation of the geometry collection | ST_CollectionHomogenize( geom)|
| addPoint | ST_AddPoint | Adds a point to a LineString before point [position] | ST_AddPoint(lineGeom, point, position) |
| setPoint | ST_SetPoint | Replace point N of linestring with given point | ST_SetPoint(lineGeom, position, point) |
| removePoint | ST_RemovePoint | Removes point from a linestring | ST_RemovePoint(lineGeom, offset) |
| reverse | ST_Reverse | the geometry with vertex order reversed | ST_Reverse(geom) |
| scale | ST_Scale | Scales the geometry to a new size by multiplying the ordinates with the parameters | ST_Scale(geom, xfactor, yfactor[, zfactor]) |
| segmentize | ST_Segmentize | geometry having no segment longer than the given distance from the geometry | ST_Segmentize( geom, maxLength) |
| snap | ST_Snap | Snap segments and vertices of input geometry to vertices of a reference geometry | ST_Snap(geom, refGeom, tolerance) |
| translate | ST_Translate | Translates the geometry to a new location using the numeric parameters as offsets | ST_Translate(geom, deltax, deltay[, deltaz]) |
| timcharper/slick-pg | core/src/main/scala/com/github/tminglei/slickpg/geom/README.md | Markdown | bsd-2-clause | 19,211 |
class Dromeaudio < Formula
desc "Small C++ audio manipulation and playback library"
homepage "https://github.com/joshb/dromeaudio/"
url "https://github.com/joshb/DromeAudio/archive/v0.3.0.tar.gz"
sha256 "d226fa3f16d8a41aeea2d0a32178ca15519aebfa109bc6eee36669fa7f7c6b83"
head "https://github.com/joshb/dromeaudio.git"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "17e070192156e0e50f983a857110b1b9efcb5476dca7047adcdd151f9b14b81e" => :high_sierra
sha256 "6a8617ee09ea859c079db275005a89d38738e497f07383ec2ba96b8df5c657f7" => :sierra
sha256 "48f99a1a83ddf9d7ab3a3e6823a5bb715c8f781ad014727995ad8b8a8fc212bc" => :el_capitan
sha256 "2d8165381db24b35e50cf29e6c745e05149dd2e00e8f1d0c61133a45355c3dc6" => :yosemite
end
depends_on "cmake" => :build
def install
# install FindDromeAudio.cmake under share/cmake/Modules/
inreplace "share/CMakeLists.txt", "${CMAKE_ROOT}", "#{share}/cmake"
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
system "#{bin}/DromeAudioPlayer", test_fixtures("test.mp3")
end
end
| robohack/homebrew-core | Formula/dromeaudio.rb | Ruby | bsd-2-clause | 1,102 |
<!doctype html>
<html lang="en" ng-app="docsApp" ng-strict-di ng-controller="DocsController">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="Description"
content="AngularJS is what HTML would have been, had it been designed for building web-apps.
Declarative templates with data-binding, MVC, dependency injection and great
testability story all implemented with pure client-side JavaScript!">
<meta name="fragment" content="!">
<title ng-bind-template="AngularJS: {{ currentArea.name }}: {{ currentPage.name || 'Error: Page not found'}}">AngularJS</title>
<script type="text/javascript">
// dynamically add base tag as well as css and javascript files.
// we can't add css/js the usual way, because some browsers (FF) eagerly prefetch resources
// before the base attribute is added, causing 404 and terribly slow loading of the docs app.
(function() {
var indexFile = (location.pathname.match(/\/(index[^\.]*\.html)/) || ['', ''])[1],
rUrl = /(#!\/|api|guide|misc|tutorial|error|index[^\.]*\.html).*$/,
baseUrl = location.href.replace(rUrl, indexFile),
production = location.hostname === 'docs.angularjs.org',
headEl = document.getElementsByTagName('head')[0],
sync = true;
addTag('base', {href: baseUrl});
addTag('link', {rel: 'stylesheet', href: 'components/bootstrap-3.1.1/css/bootstrap.min.css', type: 'text/css'});
addTag('link', {rel: 'stylesheet', href: 'components/open-sans-fontface-1.0.4/open-sans.css', type: 'text/css'});
addTag('link', {rel: 'stylesheet', href: 'css/prettify-theme.css', type: 'text/css'});
addTag('link', {rel: 'stylesheet', href: 'css/docs.css', type: 'text/css'});
addTag('link', {rel: 'stylesheet', href: 'css/animations.css', type: 'text/css'});
addTag('script', {src: 'components/jquery-2.1.1/jquery.js' }, sync);
addTag('script', {src: '../angular.min.js' }, sync);
addTag('script', {src: '../angular-resource.min.js' }, sync);
addTag('script', {src: '../angular-route.min.js' }, sync);
addTag('script', {src: '../angular-cookies.min.js' }, sync);
addTag('script', {src: '../angular-sanitize.min.js' }, sync);
addTag('script', {src: '../angular-touch.min.js' }, sync);
addTag('script', {src: '../angular-animate.min.js' }, sync);
addTag('script', {src: 'components/marked-0.3.3/lib/marked.js' }, sync);
addTag('script', {src: 'js/angular-bootstrap/dropdown-toggle.min.js' }, sync);
addTag('script', {src: 'components/lunr.js-0.4.2/lunr.min.js' }, sync);
addTag('script', {src: 'components/google-code-prettify-1.0.1/src/prettify.js' }, sync);
addTag('script', {src: 'components/google-code-prettify-1.0.1/src/lang-css.js' }, sync);
addTag('script', {src: 'js/versions-data.js' }, sync);
addTag('script', {src: 'js/pages-data.js' }, sync);
addTag('script', {src: 'js/nav-data.js' }, sync);
addTag('script', {src: 'js/docs.min.js' }, sync);
function addTag(name, attributes, sync) {
var el = document.createElement(name),
attrName;
for (attrName in attributes) {
el.setAttribute(attrName, attributes[attrName]);
}
sync ? document.write(outerHTML(el)) : headEl.appendChild(el);
}
function outerHTML(node){
// if IE, Chrome take the internal method otherwise build one
return node.outerHTML || (
function(n){
var div = document.createElement('div'), h;
div.appendChild(n);
h = div.innerHTML;
div = null;
return h;
})(node);
}
})();
// GA asynchronous tracker
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-8594346-3']);
_gaq.push(['_setDomainName', '.angularjs.org']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
</head>
<body>
<div id="wrapper">
<header scroll-y-offset-element class="header header-fixed">
<section class="navbar navbar-inverse docs-navbar-primary" ng-controller="DocsSearchCtrl">
<div class="container">
<div class="row">
<div class="col-md-9 header-branding">
<a class="brand navbar-brand" href="http://angularjs.org">
<img width="117" height="30" class="logo" alt="Link to Angular JS Homepage" ng-src="img/angularjs-for-header-only.svg">
</a>
<ul class="nav navbar-nav">
<li class="divider-vertical"></li>
<li><a href="http://angularjs.org"><i class="icon-home icon-white"></i> Home</a></li>
<li class="divider-vertical"></li>
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="icon-eye-open icon-white"></i> Learn <b class="caret"></b>
</a>
<ul class="dropdown-menu">
<li class="disabled"><a href="http://angularjs.org/">Why AngularJS?</a></li>
<li><a href="http://www.youtube.com/user/angularjs">Watch</a></li>
<li><a href="tutorial">Tutorial</a></li>
<li><a href="https://www.madewithangular.com/">Case Studies</a></li>
<li><a href="https://github.com/angular/angular-seed">Seed App project template</a></li>
<li><a href="misc/faq">FAQ</a></li>
</ul>
</li>
<li class="divider-vertical"></li>
<li class="dropdown active">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="icon-book icon-white"></i> Develop <b class="caret"></b>
</a>
<ul class="dropdown-menu">
<li><a href="tutorial">Tutorial</a></li>
<li><a href="guide">Developer Guide</a></li>
<li><a href="api">API Reference</a></li>
<li><a href="error">Error Reference</a></li>
<li><a href="misc/contribute">Contribute</a></li>
<li><a href="http://code.angularjs.org/">Download</a></li>
</ul>
</li>
<li class="divider-vertical"></li>
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="icon-comment icon-white"></i> Discuss <b class="caret"></b>
</a>
<ul class="dropdown-menu">
<li><a href="http://blog.angularjs.org">Blog</a></li>
<li><a href="http://groups.google.com/group/angular">Mailing List</a></li>
<li><a href="http://webchat.freenode.net/?channels=angularjs&uio=d4">Chat Room</a></li>
<li class="divider"></li>
<li><a href="https://twitter.com/#!/angularjs">Twitter</a></li>
<li><a href="https://plus.google.com/110323587230527980117">Google+</a></li>
<li class="divider"></li>
<li><a href="https://github.com/angular/angular.js">GitHub</a></li>
<li><a href="https://github.com/angular/angular.js/issues">Issue Tracker</a></li>
</ul>
</li>
<li class="divider-vertical"></li>
</ul>
</div>
<form ng-class="{focus:focus}" class="navbar-search col-md-3 docs-search" ng-submit="submit()">
<span class="glyphicon glyphicon-search search-icon"></span>
<input type="text"
name="as_q"
class="search-query"
placeholder="Click or press / to search"
ng-focus="focus=true"
ng-blur="focus=false"
ng-change="search(q)"
ng-model="q"
docs-search-input
autocomplete="off" />
</form>
</div>
</div>
<div class="search-results-container" ng-show="hasResults">
<div class="container">
<div class="search-results-frame">
<div ng-repeat="(key, value) in results" class="search-results-group" ng-class="colClassName + ' col-group-' + key">
<h4 class="search-results-group-heading">{{ key }}</h4>
<div class="search-results">
<div ng-repeat="item in value" class="search-result">
- <a ng-click="hideResults()" ng-href="{{ item.path }}">{{ item.name }}</a>
</div>
</div>
</div>
</div>
<a href="" ng-click="hideResults()" class="search-close">
<span class="glyphicon glyphicon-remove search-close-icon"></span> Close
</a>
</div>
</div>
</section>
<section class="sup-header">
<div class="container main-grid main-header-grid">
<div class="grid-left">
<div ng-controller="DocsVersionsCtrl" class="picker version-picker">
<select ng-options="v as ('v' + v.version + (v.isSnapshot ? ' (snapshot)' : '')) group by getGroupName(v) for v in docs_versions"
ng-model="docs_version"
ng-change="jumpToDocsVersion(docs_version)"
class="docs-version-jump">
</select>
</div>
</div>
<div class="grid-right">
<ul class="nav-breadcrumb">
<li ng-repeat="crumb in breadcrumb" class="nav-breadcrumb-entry naked-list">
<span class="divider"> /</span>
<a ng-href="{{crumb.url}}">{{crumb.name}}</a>
</li>
</ul>
</div>
</div>
</section>
</header>
<section role="main" class="container main-body">
<div class="main-grid main-body-grid">
<div class="grid-left">
<a class="btn toc-toggle visible-xs" ng-click="toc=!toc">Show / Hide Table of Contents</a>
<div class="side-navigation" ng-show="toc==true">
<ul class="nav-list naked-list">
<li ng-repeat="navGroup in currentArea.navGroups track by navGroup.name" class="nav-index-group">
<a href="{{ navGroup.href }}" ng-class="navClass(navGroup)" class="nav-index-group-heading">{{ navGroup.name }}</a>
<ul class="aside-nav">
<li ng-repeat="navItem in navGroup.navItems" ng-class="navClass(navItem)" class="nav-index-listing">
<a ng-if="navItem.extra.href" ng-class="navClass(navItem.extra)" href="{{navItem.extra.href}}">
{{navItem.extra.text}}<i ng-if="navItem.extra.icon" class="icon-{{navItem.extra.icon}}"></i>
</a>
<a tabindex="2" ng-class="linkClass(navItem)" href="{{navItem.href}}">{{navItem.name}}</a>
</li>
</ul>
</li>
</ul>
<a href="" ng-click="toc=false" class="toc-close visible-xs">
<span class="glyphicon glyphicon-remove toc-close-icon"></span> Close
</a>
</div>
</div>
<div class="grid-right">
<div id="loading" ng-show="loading">Loading...</div>
<div ng-hide="loading" ng-include="partialPath" autoscroll></div>
</div>
</div>
</section>
<footer class="footer">
<div class="container">
<p class="pull-right"><a back-to-top>Back to top</a></p>
<p>
Super-powered by Google ©2010-2015
( <a id="version"
ng-href="https://github.com/angular/angular.js/blob/master/CHANGELOG.md#{{versionNumber}}"
ng-bind-template="v{{version}}" title="Changelog of this version of Angular JS">
</a>
)
</p>
<p>
Code licensed under the
<a href="https://github.com/angular/angular.js/blob/master/LICENSE" target="_blank">The
MIT License</a>. Documentation licensed under <a
href="http://creativecommons.org/licenses/by/3.0/">CC BY 3.0</a>.
</p>
</div>
</footer>
</div>
</body>
</html>
| bborbe/portfolio | files/libs/angularjs/1.4.12/docs/index-jquery.html | HTML | bsd-2-clause | 12,860 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef SKIA_EXT_PLATFORM_DEVICE_H_
#define SKIA_EXT_PLATFORM_DEVICE_H_
#include "build/build_config.h"
#if defined(OS_WIN)
#include <windows.h>
#include <vector>
#endif
#include "skia/ext/platform_surface.h"
#include "third_party/skia/include/core/SkBitmapDevice.h"
#include "third_party/skia/include/core/SkTypes.h"
class SkMatrix;
class SkPath;
class SkRegion;
namespace skia {
class PlatformDevice;
// The following routines provide accessor points for the functionality
// exported by the various PlatformDevice ports.
// All calls to PlatformDevice::* should be routed through these
// helper functions.
// Bind a PlatformDevice instance, |platform_device| to |device|. Subsequent
// calls to the functions exported below will forward the request to the
// corresponding method on the bound PlatformDevice instance. If no
// PlatformDevice has been bound to the SkBaseDevice passed, then the
// routines are NOPS.
SK_API void SetPlatformDevice(SkBaseDevice* device,
PlatformDevice* platform_device);
SK_API PlatformDevice* GetPlatformDevice(SkBaseDevice* device);
// A SkBitmapDevice is basically a wrapper around SkBitmap that provides a
// surface for SkCanvas to draw into. PlatformDevice provides a surface
// Windows can also write to. It also provides functionality to play well
// with GDI drawing functions. This class is abstract and must be subclassed.
// It provides the basic interface to implement it either with or without
// a bitmap backend.
//
// PlatformDevice provides an interface which sub-classes of SkBaseDevice can
// also provide to allow for drawing by the native platform into the device.
// TODO(robertphillips): Once the bitmap-specific entry points are removed
// from SkBaseDevice it might make sense for PlatformDevice to be derived
// from it.
class SK_API PlatformDevice {
public:
virtual ~PlatformDevice() {}
#if defined(OS_MACOSX)
// The CGContext that corresponds to the bitmap, used for CoreGraphics
// operations drawing into the bitmap. This is possibly heavyweight, so it
// should exist only during one pass of rendering.
virtual CGContextRef GetBitmapContext() = 0;
#endif
// The DC that corresponds to the bitmap, used for GDI operations drawing
// into the bitmap. This is possibly heavyweight, so it should be existant
// only during one pass of rendering.
virtual PlatformSurface BeginPlatformPaint();
// Finish a previous call to beginPlatformPaint.
virtual void EndPlatformPaint();
// Returns true if GDI operations can be used for drawing into the bitmap.
virtual bool SupportsPlatformPaint();
#if defined(OS_WIN)
// Loads a SkPath into the GDI context. The path can there after be used for
// clipping or as a stroke. Returns false if the path failed to be loaded.
static bool LoadPathToDC(HDC context, const SkPath& path);
// Loads a SkRegion into the GDI context.
static void LoadClippingRegionToDC(HDC context, const SkRegion& region,
const SkMatrix& transformation);
// Draws to the given screen DC, if the bitmap DC doesn't exist, this will
// temporarily create it. However, if you have created the bitmap DC, it will
// be more efficient if you don't free it until after this call so it doesn't
// have to be created twice. If src_rect is null, then the entirety of the
// source device will be copied.
virtual void DrawToHDC(HDC, int x, int y, const RECT* src_rect);
#endif
protected:
#if defined(OS_WIN)
// Arrays must be inside structures.
struct CubicPoints {
SkPoint p[4];
};
typedef std::vector<CubicPoints> CubicPath;
typedef std::vector<CubicPath> CubicPaths;
// Loads the specified Skia transform into the device context, excluding
// perspective (which GDI doesn't support).
static void LoadTransformToDC(HDC dc, const SkMatrix& matrix);
// Transforms SkPath's paths into a series of cubic path.
static bool SkPathToCubicPaths(CubicPaths* paths, const SkPath& skpath);
#endif
};
} // namespace skia
#endif // SKIA_EXT_PLATFORM_DEVICE_H_
| js0701/chromium-crosswalk | skia/ext/platform_device.h | C | bsd-3-clause | 4,260 |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "extensions/common/trace_util.h"
#include "content/public/common/pseudonymization_util.h"
namespace extensions {
void ExtensionIdForTracing::WriteIntoTrace(
perfetto::TracedProto<perfetto::protos::pbzero::ChromeExtensionId> proto)
const {
proto->set_extension_id(extension_id_);
proto->set_pseudonymized_extension_id(
content::PseudonymizationUtil::PseudonymizeString(extension_id_));
}
} // namespace extensions
| chromium/chromium | extensions/common/trace_util.cc | C++ | bsd-3-clause | 611 |
/*
Copyright (C) 2014, The University of Texas at Austin
This file is part of libflame and is available under the 3-Clause
BSD license, which can be found in the LICENSE file at the top-level
directory, or at http://opensource.org/licenses/BSD-3-Clause
*/
#include "FLAME.h"
FLA_Error REF_Trsm( FLA_Side side, FLA_Uplo uploA, FLA_Trans transA, FLA_Diag diagA, FLA_Obj alpha, FLA_Obj A, FLA_Obj B )
{
FLA_Trsm_external( side, uploA, transA, diagA, alpha, A, B );
return 0;
}
| dxander/libflame | src/blas/3/trsm/lut/flamec/test/REF_Trsm_lut.c | C | bsd-3-clause | 500 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GOOGLE_APIS_GAIA_FAKE_OAUTH2_TOKEN_SERVICE_H_
#define GOOGLE_APIS_GAIA_FAKE_OAUTH2_TOKEN_SERVICE_H_
#include <set>
#include <string>
#include "base/compiler_specific.h"
#include "base/memory/weak_ptr.h"
#include "google_apis/gaia/oauth2_token_service.h"
namespace net {
class URLRequestContextGetter;
}
// Do-nothing implementation of OAuth2TokenService.
class FakeOAuth2TokenService : public OAuth2TokenService {
public:
FakeOAuth2TokenService();
virtual ~FakeOAuth2TokenService();
virtual std::vector<std::string> GetAccounts() OVERRIDE;
void AddAccount(const std::string& account_id);
void RemoveAccount(const std::string& account_id);
// Helper routines to issue tokens for pending requests.
void IssueAllTokensForAccount(const std::string& account_id,
const std::string& access_token,
const base::Time& expiration);
void set_request_context(net::URLRequestContextGetter* request_context) {
request_context_ = request_context;
}
protected:
// OAuth2TokenService overrides.
virtual void FetchOAuth2Token(RequestImpl* request,
const std::string& account_id,
net::URLRequestContextGetter* getter,
const std::string& client_id,
const std::string& client_secret,
const ScopeSet& scopes) OVERRIDE;
virtual void InvalidateOAuth2Token(const std::string& account_id,
const std::string& client_id,
const ScopeSet& scopes,
const std::string& access_token) OVERRIDE;
virtual bool RefreshTokenIsAvailable(const std::string& account_id) const
OVERRIDE;
private:
struct PendingRequest {
PendingRequest();
~PendingRequest();
std::string account_id;
std::string client_id;
std::string client_secret;
ScopeSet scopes;
base::WeakPtr<RequestImpl> request;
};
// OAuth2TokenService overrides.
virtual net::URLRequestContextGetter* GetRequestContext() OVERRIDE;
virtual OAuth2AccessTokenFetcher* CreateAccessTokenFetcher(
const std::string& account_id,
net::URLRequestContextGetter* getter,
OAuth2AccessTokenConsumer* consumer) OVERRIDE;
std::set<std::string> account_ids_;
std::vector<PendingRequest> pending_requests_;
net::URLRequestContextGetter* request_context_; // weak
DISALLOW_COPY_AND_ASSIGN(FakeOAuth2TokenService);
};
#endif // GOOGLE_APIS_GAIA_FAKE_OAUTH2_TOKEN_SERVICE_H_
| boundarydevices/android_external_chromium_org | google_apis/gaia/fake_oauth2_token_service.h | C | bsd-3-clause | 2,801 |
/*********************************************************************
* *
* ConfigFemtoAnalysis.C - configuration macro for the femtoscopic *
* analysis, meant as a QA process for two-particle effects *
* *
* Author: Adam Kisiel ([email protected]) *
*********************************************************************
* K+K- in [email protected] *
* Update: [email protected] *
* KpmHBT16 0010 03-Mar-2016 *
* TOF PID from 0.45 GeV/c *
* SetNsigmaTPC400_450(1.0) and other set to standard *
* no cut on mom and no on gamma *
* no cut on phi* eta (set to 0) *
* 10 MeV/q-bin in CF *
* 20 nov 2017 new AliFemtoModelCorrFctnKK *
*********************************************************************/
#if !defined(__CINT__) || defined(__MAKECINT_)
#include "AliFemtoManager.h"
#include "AliFemtoEventReaderESDChain.h"
#include "AliFemtoEventReaderESDChainKine.h"
#include "AliFemtoEventReaderAODChain.h"
#include "AliFemtoSimpleAnalysis.h"
#include "AliFemtoBasicEventCut.h"
#include "AliFemtoESDTrackCut.h"
//#include "AliFemtoKKTrackCut.h"
#include "AliFemtoKpm45TrackCut.h"
#include "AliFemtoCorrFctn.h"
#include "AliFemtoCutMonitorParticleYPt.h"
#include "AliFemtoCutMonitorParticleVertPos.h"
#include "AliFemtoCutMonitorParticleMomRes.h"
#include "AliFemtoCutMonitorParticlePID.h"
#include "AliFemtoCutMonitorEventMult.h"
#include "AliFemtoCutMonitorEventVertex.h"
#include "AliFemtoShareQualityTPCEntranceSepPairCut.h"
#include "AliFemtoPairCutAntiGammaAlpha.h"
#include "AliFemtoPairCutRadialDistance.h"
#include "AliFemtoPairCutRadialDistanceKK.h"
#include "AliFemtoQinvCorrFctn.h"
#include "AliFemtoShareQualityCorrFctn.h"
#include "AliFemtoTPCInnerCorrFctn.h"
#include "AliFemtoVertexMultAnalysis.h"
#include "AliFemtoCorrFctn3DSpherical.h"
#include "AliFemtoChi2CorrFctn.h"
#include "AliFemtoCorrFctnTPCNcls.h"
#include "AliFemtoBPLCMS3DCorrFctn.h"
#include "AliFemtoCorrFctn3DLCMSSym.h"
#include "AliFemtoModelBPLCMSCorrFctn.h"
#include "AliFemtoModelCorrFctn3DSpherical.h"
#include "AliFemtoModelGausLCMSFreezeOutGenerator.h"
#include "AliFemtoModelGausRinvFreezeOutGenerator.h"
#include "AliFemtoModelManager.h"
#include "AliFemtoModelWeightGeneratorBasic.h"
#include "AliFemtoModelWeightGeneratorLednicky.h"
#include "AliFemtoCorrFctnDirectYlm.h"
#include "AliFemtoModelCorrFctnDirectYlm.h"
#include "AliFemtoModelCorrFctnSource.h"
#include "AliFemtoModelCorrFctnKK.h"
#include "AliFemtoKTPairCut.h"
//... double ratio ...
#include "AliFemtoCorrFctnNonIdDR.h"
#endif
//________________________________________________________________________
AliFemtoManager* ConfigFemtoAnalysis() {
double PionMass = 0.13956995;
double KaonMass = 0.493677;
const int cMu=1;
//const int cKt=2;
const int cKt=8;
//-------Single track cuts------------------------------------------------->
double DCAxy=2.4;//cm // our standard is 0.20 cm; super narrow was 0.015cm
double DCAz =3.0;//cm // our standard is 0.15 cm;
//-------Single track cuts-------------------------------------------------<
//=======Double track cuts=================================================>
//Dhevan's : PhiStarDifferenceMinimum=0.06; EtaDifferenceMinimum=0.02;
double PhiStarDifferenceMinimum=0.;//0.02; //[radian]
double EtaDifferenceMinimum=0.;//0.04; //[radian]
//=======Double track cuts=================================================<
// Switches for QA analyses
int runmults[cMu] = {1};//, 0, 0, 0};
int multbins[cMu+1] = {0, 900};//, 300, 500, 900};
int runch[2] = {1, 0};//K+-
const char *chrgs[2] = { "Kp", "Km"};
int runktdep = 1;
//double ktrng[cKt+1] = {0.2, 0.5, 1.0};//orig 2 KT
//double ktrng[cKt+1] = {0.2, 0.5, 1.5};// 2 KT
double ktrng[cKt+1] = {0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 1.0, 1.3};// 8 KT
//double ktrng[cKt+1] = {0.2, 1.0};
int run3d = 0;
int runshlcms = 0;
int runtype = 2; // Types 0 - global, 1 - ITS only, 2 - TPC Inner
int isrealdata = 1;
// AliFemtoEventReaderESDChainKine* Reader=new AliFemtoEventReaderESDChainKine();
// Reader->SetConstrained(true);
// Reader->SetUseTPCOnly(false);
double shqmax;
double shqmaxSH;
//int nbinssh = 200;// 10 MeV/q-bin //orig 100, 20 MeV per bin
int nbinssh = 400;// 5 MeV/q-bin //orig 100, 20 MeV per bin
if (runshlcms) shqmaxSH = 0.25;
shqmax = 2.0;//K+-
;
AliFemtoEventReaderAODChain *Reader = new AliFemtoEventReaderAODChain();
Reader->SetFilterBit(7);
Reader->SetCentralityPreSelection(0.0001, 900);
Reader->SetReadMC(kTRUE);
Reader->SetDCAglobalTrack(kTRUE);//proverit'!!!
//
Reader->SetKaonAnalysis(kTRUE);// new set to choose Kaon by PDG code (7Nov2016)
/*AliFemtoModelGausLCMSFreezeOutGenerator *tFreeze = new AliFemtoModelGausLCMSFreezeOutGenerator();
tFreeze->SetSizeOut(3.0*TMath::Sqrt(2.0));
tFreeze->SetSizeSide(3.0*TMath::Sqrt(2.0));
tFreeze->SetSizeLong(3.0*TMath::Sqrt(2.0));*/
//Generate freeze-out coordinates as a 3D gaussian sphere in PRF
AliFemtoModelGausRinvFreezeOutGenerator *tFreeze = new AliFemtoModelGausRinvFreezeOutGenerator();
//tFreeze->SetSizeInv(3.0*TMath::Sqrt(2.0));//r_0=3fm it should be time to sqrt(2) !!!!KM
//tFreeze->SetSizeInv(4.0*TMath::Sqrt(2.0));//r_0=4fm it should be time to sqrt(2) !!!!KM
tFreeze->SetSizeInv(5.0*TMath::Sqrt(2.0));//r_0=5fm it should be time to sqrt(2) !!!!KM
//tFreeze->SetSizeInv(6.0*TMath::Sqrt(2.0));//r_0=6fm it should be time to sqrt(2) !!!!KM
//Feb 2018: Switch off all FSI and phi-meson
AliFemtoModelWeightGeneratorLednicky *tWeight = new AliFemtoModelWeightGeneratorLednicky();
tWeight->SetPairType(AliFemtoModelWeightGenerator::KaonPlusKaonMinus());
tWeight->SetCoulOff();
//tWeight->SetCoulOn();
tWeight->SetQuantumOff();
tWeight->SetStrongOff();
//tWeight->SetStrongOn();
tWeight->Set3BodyOff();
//tWeight->SetKpKmModelType(14,1);//(Martin-f0, Achasov2-a0,0->phi off;1->phi on)
//No phi:
tWeight->SetKpKmModelType(14,0);//(Martin-f0, Achasov2-a0,0->phi off;1->phi on)
AliFemtoModelManager *tModelManager = new AliFemtoModelManager();
tModelManager->AcceptFreezeOutGenerator(tFreeze);
tModelManager->AcceptWeightGenerator(tWeight);
tModelManager->CreateCopyHiddenInfo(kTRUE);
AliFemtoManager* Manager=new AliFemtoManager();
Manager->SetEventReader(Reader);
AliFemtoVertexMultAnalysis *anetaphitpc[20];
AliFemtoBasicEventCut *mecetaphitpc[20];
AliFemtoCutMonitorEventMult *cutPassEvMetaphitpc[20];
AliFemtoCutMonitorEventMult *cutFailEvMetaphitpc[20];
AliFemtoCutMonitorEventVertex *cutPassEvVetaphitpc[20];
AliFemtoCutMonitorEventVertex *cutFailEvVetaphitpc[20];
//AliFemtoKKTrackCut *dtc1etaphitpc[20];
//AliFemtoKKTrackCut *dtc2etaphitpc[20];
AliFemtoKpm45TrackCut *dtc1etaphitpc[20];
AliFemtoKpm45TrackCut *dtc2etaphitpc[20];
// AliFemtoESDTrackCut *dtc1etaphitpc[20];
// AliFemtoESDTrackCut *dtc2etaphitpc[20];
AliFemtoCutMonitorParticleYPt *cutPass1YPtetaphitpc[20];
AliFemtoCutMonitorParticleYPt *cutFail1YPtetaphitpc[20];
AliFemtoCutMonitorParticlePID *cutPass1PIDetaphitpc[20];
AliFemtoCutMonitorParticlePID *cutFail1PIDetaphitpc[20];
AliFemtoCutMonitorParticleYPt *cutPass2YPtetaphitpc[20];
AliFemtoCutMonitorParticleYPt *cutFail2YPtetaphitpc[20];
AliFemtoCutMonitorParticlePID *cutPass2PIDetaphitpc[20];
AliFemtoCutMonitorParticlePID *cutFail2PIDetaphitpc[20];
//AliFemtoPairCutAntiGammaAlpha *sqpcetaphitpc[20];//K+-//sept 2017
// AliFemtoShareQualityTPCEntranceSepPairCut *sqpcetaphitpc[20];
//AliFemtoPairCutRadialDistance *sqpcetaphitpc[20];//AliFemto dphi* cut
//~~~
AliFemtoPairCutRadialDistanceKK *sqpcetaphitpc[20];//Dhevan's dphi* cut//sept 2017
//
//AliFemtoCorrFctnDirectYlm *cylmetaphitpc[20];
AliFemtoCorrFctnDEtaDPhi *cdedpetaphi[20*10];//20->20*10 due to kT
//AliFemtoChi2CorrFctn *cchiqinvetaphitpc[20];
AliFemtoKTPairCut *ktpcuts[20*8];
//AliFemtoCorrFctnDirectYlm *cylmkttpc[20*8];
AliFemtoQinvCorrFctn *cqinvkttpc[20*8];
//AliFemtoCorrFctn3DLCMSSym *cq3dlcmskttpc[20*8];
AliFemtoCorrFctnTPCNcls *cqinvnclstpc[20];
AliFemtoShareQualityCorrFctn *cqinvsqtpc[20*10];
AliFemtoChi2CorrFctn *cqinvchi2tpc[20];
AliFemtoTPCInnerCorrFctn *cqinvinnertpc[20*10];
AliFemtoCorrFctnGammaMonitorAlpha *cgamma[20*10];
//... double ratio ...
// AliFemtoCorrFctnNonIdDR *cfdourat[20*10];
AliFemtoModelCorrFctnKK *cqinvkttpcmodel[20*8];
// AliFemtoCorrFctnGammaMonitorAlpha *cgamma[20*10];
// *** Third QA task - HBT analysis with all pair cuts off, TPC only ***
// *** Begin K+K- analysis ***
int aniter = 0;
for (int imult=0; imult<cMu/*4*/; imult++) {
if (runmults[imult]) {
for (int ichg=0; ichg<1/*K+-*/; ichg++) {//one loop
if (runch[ichg]) {
aniter = ichg*5+imult;
//anetaphitpc[aniter] = new AliFemtoVertexMultAnalysis(4, -8.0, 8.0, 5, multbins[imult], multbins[imult+1]);//orig
anetaphitpc[aniter] = new AliFemtoVertexMultAnalysis(1, -8.0, 8.0, 1, multbins[imult], multbins[imult+1]);
anetaphitpc[aniter]->SetNumEventsToMix(30);//for MR
//anetaphitpc[aniter]->SetNumEventsToMix(3);
//test of SetNumEventsToMix
//anetaphitpc[aniter]->SetNumEventsToMix(574);
anetaphitpc[aniter]->SetMinSizePartCollection(1);
mecetaphitpc[aniter] = new AliFemtoBasicEventCut();
mecetaphitpc[aniter]->SetEventMult(0,100000);
mecetaphitpc[aniter]->SetVertZPos(-8.0,8.0);
// mecetaphitpc->SetAcceptBadVertex(kTRUE);
//Multiplicity---->
cutPassEvMetaphitpc[aniter] = new AliFemtoCutMonitorEventMult(Form("cutPass%stpcM%i", chrgs[ichg], imult));
cutFailEvMetaphitpc[aniter] = new AliFemtoCutMonitorEventMult(Form("cutFail%stpcM%i", chrgs[ichg], imult));
mecetaphitpc[aniter]->AddCutMonitor(cutPassEvMetaphitpc[aniter], cutFailEvMetaphitpc[aniter]);
//Multiplicity----<
//Vertex---------->
cutPassEvVetaphitpc[aniter] = new AliFemtoCutMonitorEventVertex(Form("cutPass%stpcM%i", chrgs[ichg], imult));
cutFailEvVetaphitpc[aniter] = new AliFemtoCutMonitorEventVertex(Form("cutFail%stpcM%i", chrgs[ichg], imult));
mecetaphitpc[aniter]->AddCutMonitor(cutPassEvVetaphitpc[aniter], cutFailEvVetaphitpc[aniter]);
//Vertex----------<
//dtc1etaphitpc[aniter] = new AliFemtoKKTrackCut();
//dtc2etaphitpc[aniter] = new AliFemtoKKTrackCut();//K+-
dtc1etaphitpc[aniter] = new AliFemtoKpm45TrackCut();
dtc2etaphitpc[aniter] = new AliFemtoKpm45TrackCut();//K+-
//--- K+- --->
dtc1etaphitpc[aniter]->SetCharge(1.0);
dtc2etaphitpc[aniter]->SetCharge(-1.0);
//--- K+- ---<
dtc1etaphitpc[aniter]->SetPt(0.14,1.5); //0.14,1.5); //--- K+-
dtc1etaphitpc[aniter]->SetEta(-0.8,0.8);//--- K+-
dtc2etaphitpc[aniter]->SetPt(0.14,1.5); //0.14,1.5); //--- K+-
dtc2etaphitpc[aniter]->SetEta(-0.8,0.8); //--- K+-
//--- Choose Kaon as Most Probable (switch on all cuts: TPC, TOF)---
dtc1etaphitpc[aniter]->SetMass(KaonMass);
dtc1etaphitpc[aniter]->SetMostProbableKaon();
dtc2etaphitpc[aniter]->SetMass(KaonMass);
dtc2etaphitpc[aniter]->SetMostProbableKaon();
//------------------- November 2013 -----------------------------------<
// new cuts to remove electron (do not take into analysis if 400<p<500)
dtc1etaphitpc[aniter]->SetNsigmaTPCle250(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTPC250_400(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTPC400_450(1.0);//cut on e+e- orig(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTPC450_500(2.0);//cut on e+e- orig(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTPCge500(3.0);
// new cuts are stronger, better separation of pion in TOF
// when momentum is greater then 800 MeV/c
dtc1etaphitpc[aniter]->SetNsigmaTOF500_800(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTOF800_1000(1.5);
dtc1etaphitpc[aniter]->SetNsigmaTOFge1000(1.0);
//K+ contour (default p0=9999, p1=0, p2=0):
//dtc1etaphitpc[aniter]->SetKaonTCPdEdxContourP0( 196.25);//december 2014
//dtc1etaphitpc[aniter]->SetKaonTCPdEdxContourP1(-420.00);//december 2014
//dtc1etaphitpc[aniter]->SetKaonTCPdEdxContourP2( 300.00);//december 2014
dtc2etaphitpc[aniter]->SetNsigmaTPCle250(2.0);
dtc2etaphitpc[aniter]->SetNsigmaTPC250_400(2.0);
dtc2etaphitpc[aniter]->SetNsigmaTPC400_450(1.0);//cut on e+e- orig(2.0);
dtc2etaphitpc[aniter]->SetNsigmaTPC450_500(2.0);//cut on e+e- orig(2.0);
dtc2etaphitpc[aniter]->SetNsigmaTPCge500(3.0);
// new cuts are stronger, better separation of pion in TOF
// when momentum is greater then 800 MeV/c
dtc2etaphitpc[aniter]->SetNsigmaTOF500_800(2.0);
dtc2etaphitpc[aniter]->SetNsigmaTOF800_1000(1.5);
dtc2etaphitpc[aniter]->SetNsigmaTOFge1000(1.0);
//K+ contour (default p0=9999, p1=0, p2=0):
//dtc2etaphitpc[aniter]->SetKaonTCPdEdxContourP0( 196.25);//december 2014
//dtc2etaphitpc[aniter]->SetKaonTCPdEdxContourP1(-420.00);//december 2014
//dtc2etaphitpc[aniter]->SetKaonTCPdEdxContourP2( 300.00);//december 2014
//------------------- November 2013 ----------------------------------->
// *** Track quality cuts ***
//K+
dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCin);
dtc1etaphitpc[aniter]->SetminTPCncls(80);
dtc1etaphitpc[aniter]->SetRemoveKinks(kTRUE);
dtc1etaphitpc[aniter]->SetLabel(kFALSE);
dtc1etaphitpc[aniter]->SetMaxTPCChiNdof(4.0);
dtc1etaphitpc[aniter]->SetMaxImpactXY(DCAxy);
dtc1etaphitpc[aniter]->SetMaxImpactZ(DCAz);
//K-
dtc2etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCin);
dtc2etaphitpc[aniter]->SetminTPCncls(80);
dtc2etaphitpc[aniter]->SetRemoveKinks(kTRUE);
dtc2etaphitpc[aniter]->SetLabel(kFALSE);
dtc2etaphitpc[aniter]->SetMaxTPCChiNdof(4.0);
dtc2etaphitpc[aniter]->SetMaxImpactXY(DCAxy);
dtc2etaphitpc[aniter]->SetMaxImpactZ(DCAz);
//K+
cutPass1YPtetaphitpc[aniter] = new AliFemtoCutMonitorParticleYPt(Form("cutPass1%stpcM%i", chrgs[0], imult), 0.493677);
cutFail1YPtetaphitpc[aniter] = new AliFemtoCutMonitorParticleYPt(Form("cutFail1%stpcM%i", chrgs[0], imult), 0.493677);
dtc1etaphitpc[aniter]->AddCutMonitor(cutPass1YPtetaphitpc[aniter], cutFail1YPtetaphitpc[aniter]);
//K-
cutPass2YPtetaphitpc[aniter] = new AliFemtoCutMonitorParticleYPt(Form("cutPass2%stpcM%i", chrgs[1], imult), 0.493677);
cutFail2YPtetaphitpc[aniter] = new AliFemtoCutMonitorParticleYPt(Form("cutFail2%stpcM%i", chrgs[1], imult), 0.493677);
dtc2etaphitpc[aniter]->AddCutMonitor(cutPass2YPtetaphitpc[aniter], cutFail2YPtetaphitpc[aniter]);
//K+
cutPass1PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutPass1%stpcM%i", chrgs[0], imult),1);
cutFail1PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutFail1%stpcM%i", chrgs[0], imult),1);
dtc1etaphitpc[aniter]->AddCutMonitor(cutPass1PIDetaphitpc[aniter], cutFail1PIDetaphitpc[aniter]);
//K-
cutPass2PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutPass2%stpcM%i", chrgs[1], imult),1);
cutFail2PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutFail2%stpcM%i", chrgs[1], imult),1);
dtc1etaphitpc[aniter]->AddCutMonitor(cutPass2PIDetaphitpc[aniter], cutFail2PIDetaphitpc[aniter]);
// AliFemtoCorrFctnGammaMonitorAlpha
// sqpcetaphitpc[aniter] = new AliFemtoPairCutAntiGammaAlpha();//sept 2017
// sqpcetaphitpc[aniter] = new AliFemtoShareQualityTPCEntranceSepPairCut();
//if (ichg < 2) {
//sqpcetaphitpc[aniter] = new AliFemtoPairCutRadialDistance();//AliFemto dphi* cut
sqpcetaphitpc[aniter] = new AliFemtoPairCutRadialDistanceKK(); //Dhevan's dphi* cut
//~~~ sqpcetaphitpc[aniter] = new AliFemtoPairCutAntiGamma();
sqpcetaphitpc[aniter]->SetRemoveSameLabel(kFALSE);
//e+e-e+e-e+e-e+e-e+e-e+e-e+e-e+e- Set cut on gumma conversion e+e-
//sqpcetaphitpc[aniter]->SetMaxEEMinv(0.05);
//sqpcetaphitpc[aniter]->SetMaxAlphaDiff(0.9998);
//sqpcetaphitpc[aniter]->SetTPCEntranceSepMinimum(0.00001);
//~~~ sqpcetaphitpc[aniter]->SetMaxEEMinv(0.0);
//~~~ sqpcetaphitpc[aniter]->SetMaxThetaDiff(0.0);
//~~~ sqpcetaphitpc[aniter]->SetTPCEntranceSepMinimum(0.0);
//--------- km: eta-phi* Dhevan's custs ----------->>>>
sqpcetaphitpc[aniter]->SetPhiStarDifferenceMinimum(PhiStarDifferenceMinimum);//sept 2017
sqpcetaphitpc[aniter]->SetEtaDifferenceMinimum(EtaDifferenceMinimum); //sept 2017
//sqpcetaphitpc[aniter]->SetMinimumRadius(0.8);//not need for AliFemtoPairCutRadialDistanceKK()
anetaphitpc[aniter]->SetEventCut(mecetaphitpc[aniter]);
anetaphitpc[aniter]->SetFirstParticleCut(dtc1etaphitpc[aniter]);//K+
anetaphitpc[aniter]->SetSecondParticleCut(dtc2etaphitpc[aniter]);//K-
anetaphitpc[aniter]->SetPairCut(sqpcetaphitpc[aniter]);
//Qinv (without kT bins)-->
// cqinvkttpc[aniter] = new AliFemtoQinvCorrFctn(Form("cqinv%stpcM%i", chrgs[ichg], imult),nbinssh,0.0,shqmax);
// anetaphitpc[aniter]->AddCorrFctn(cqinvkttpc[aniter]);
//theoretical Qinv
// cqinvkttpcmodel[aniter] = new AliFemtoModelCorrFctnKK(Form("cqinvModel%stpcM%i", chrgs[ichg], imult),nbinssh,0.0,shqmax);
// cqinvkttpcmodel[aniter]->ConnectToManager(tModelManager);
// anetaphitpc[aniter]->AddCorrFctn(cqinvkttpcmodel[aniter]);
if (runktdep) {
int ktm;
for (int ikt=0; ikt<cKt/*8*/; ikt++) {
ktm = aniter*cKt/*8*/ + ikt;
ktpcuts[ktm] = new AliFemtoKTPairCut(ktrng[ikt], ktrng[ikt+1]);
cqinvkttpc[ktm] = new AliFemtoQinvCorrFctn(Form("cqinv%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinssh,0.0,(imult>6)?shqmax*2.5:shqmax);
cqinvkttpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);//add kT bins
anetaphitpc[aniter]->AddCorrFctn(cqinvkttpc[ktm]);// add CF histos
//~~~
//cqinvsqtpc[ktm] = new AliFemtoShareQualityCorrFctn(Form("cqinvsq%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinssh,0.0,shqmax);
//cqinvsqtpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
// anetaphitpc[aniter]->AddCorrFctn(cqinvsqtpc[ktm]);
// model--------------
cqinvkttpcmodel[ktm] = new AliFemtoModelCorrFctnKK(Form("cqinvModel%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinssh,0.0,(imult>6)?shqmax*2.5:shqmax);
cqinvkttpcmodel[ktm]->SetPairSelectionCut(ktpcuts[ktm]);//add kT bins
cqinvkttpcmodel[ktm]->SetKaonPDG(kTRUE);//Special MC analysis for K selected by PDG code -->
cqinvkttpcmodel[ktm]->ConnectToManager(tModelManager);
anetaphitpc[aniter]->AddCorrFctn(cqinvkttpcmodel[ktm]);// add CF histos
cgamma[aniter] = new AliFemtoCorrFctnGammaMonitorAlpha(Form("cgammaM%ikT%i", imult, ikt),200,200);
anetaphitpc[aniter]->AddCorrFctn(cgamma[aniter]);
//... double ratio ...>
/*
cfdourat[ktm] = new
AliFemtoCorrFctnNonIdDR(Form("cfKstr%stpcM%ikT%i", chrgs[ichg], imult, ikt), 60, 0, 0.3);//AliFemtoCorrFctnNonIdDR(char* title, const int& nbins, const float& QinvLo, const float& QinvHi)
cfdourat[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
anetaphitpc[aniter]->AddCorrFctn(cfdourat[ktm]);// add CF histos
//... double ratio ...<
*/
//if (run3d) {
//cq3dlcmskttpc[ktm] = new AliFemtoCorrFctn3DLCMSSym(Form("cq3d%stpcM%ikT%i", chrgs[ichg], imult, ikt),60,(imult>3)?((imult>6)?((imult>7)?0.6:0.4):0.25):0.15);
// //cq3dlcmskttpc[ktm] = new AliFemtoCorrFctn3DLCMSSym(Form("cq3d%stpcM%ikT%i", chrgs[ichg], imult, ikt),50,0.5);
//cq3dlcmskttpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
//anetaphitpc[aniter]->AddCorrFctn(cq3dlcmskttpc[ktm]);
//}
}
}
// cdedpetaphi[aniter] = new AliFemtoCorrFctnDEtaDPhi(Form("cdedp%stpcM%i", chrgs[ichg], imult),24, 24);
// anetaphitpc[aniter]->AddCorrFctn(cdedpetaphi[aniter]);
Manager->AddAnalysis(anetaphitpc[aniter]);
}
}
}
}
// *** End Kaon-Kaon (positive) analysis
return Manager;
}
| dstocco/AliPhysics | PWGCF/FEMTOSCOPY/macros/Train/KpmHBT_MC/Feb2018/ConfigFemtoAnalysis.C | C++ | bsd-3-clause | 20,577 |
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#include <vtkProperty.h>
#include "mitkGridRepresentationProperty.h"
mitk::GridRepresentationProperty::GridRepresentationProperty( )
{
AddRepresentationTypes();
SetValue( WIREFRAME );
}
mitk::GridRepresentationProperty::GridRepresentationProperty(const mitk::GridRepresentationProperty& other)
: mitk::EnumerationProperty(other)
{
}
mitk::GridRepresentationProperty::GridRepresentationProperty( const IdType& value )
{
AddRepresentationTypes();
if ( IsValidEnumerationValue( value ) )
{
SetValue( value );
}
else
{
SetValue( WIREFRAME );
}
}
mitk::GridRepresentationProperty::GridRepresentationProperty( const std::string& value )
{
AddRepresentationTypes();
if ( IsValidEnumerationValue( value ) )
{
SetValue( value );
}
else
{
SetValue( WIREFRAME );
}
}
void mitk::GridRepresentationProperty::SetRepresentationToPoints()
{
SetValue( POINTS );
}
void mitk::GridRepresentationProperty::SetRepresentationToWireframe()
{
SetValue( WIREFRAME );
}
void mitk::GridRepresentationProperty::SetRepresentationToSurface()
{
SetValue( SURFACE );
}
void mitk::GridRepresentationProperty::SetRepresentationToWireframeSurface()
{
//SetValue( WIREFRAME_SURFACE );
}
void mitk::GridRepresentationProperty::AddRepresentationTypes()
{
AddEnum( "Points", POINTS );
AddEnum( "Wireframe", WIREFRAME );
AddEnum( "Surface", SURFACE );
//AddEnum( "WireframeSurface", WIREFRAME_SURFACE );
}
bool mitk::GridRepresentationProperty::AddEnum( const std::string& name, const IdType& id )
{
return Superclass::AddEnum( name, id );
}
itk::LightObject::Pointer mitk::GridRepresentationProperty::InternalClone() const
{
itk::LightObject::Pointer result(new Self(*this));
result->UnRegister();
return result;
}
| rfloca/MITK | Modules/MitkDataTypesExt/mitkGridRepresentationProperty.cpp | C++ | bsd-3-clause | 2,270 |
// bb10.c
// "construction of efficient generalized lr parsers": i == 10
int main()
{
int a, b;
a = b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
+b+b+b+b+b+b+b+b+b+b
;
return a;
}
| BackupTheBerlios/codemorpher-svn | trunk/elsa/elkhound/in/bb300.c | C | bsd-3-clause | 947 |
---
draft: false
slider_enable: true
description: ""
disclaimer: "The contents of this web site and the associated <a href=\"https://github.com/pmem\">GitHub repositories</a> are BSD-licensed open source."
aliases: ["pmem2_get_flush_fn.3.html"]
title: "libpmem2 | PMDK"
header: "pmem2 API version 1.0"
---
[comment]: <> (SPDX-License-Identifier: BSD-3-Clause
[comment]: <> (Copyright 2020, Intel Corporation)
[comment]: <> (pmem2_get_flush_fn.3 -- man page for pmem2_get_flush_fn)
[NAME](#name)<br />
[SYNOPSIS](#synopsis)<br />
[DESCRIPTION](#description)<br />
[RETURN VALUE](#return-value)<br />
[SEE ALSO](#see-also)<br />
# NAME #
**pmem2_get_flush_fn**() - get a flush function
# SYNOPSIS #
```c
#include <libpmem2.h>
typedef void (*pmem2_flush_fn)(const void *ptr, size_t size);
struct pmem2_map;
pmem2_flush_fn pmem2_get_flush_fn(struct pmem2_map *map);
```
# DESCRIPTION #
The **pmem2_get_flush_fn**() function returns a pointer to a function
responsible for efficiently flushing data in the range owned by the *map*.
Flushing data using *pmem2_flush_fn* **does not** guarantee that the data
is stored durably by the time it returns. To get this guarantee, application
should either use the persist operation (see **pmem2_get_persist_fn**(3))
or follow *pmem2_flush_fn* by a drain operation (see **pmem2_get_drain_fn**(3)).
There are no alignment restrictions on the range described by *ptr* and *size*,
but *pmem2_flush_fn* may expand the range as necessary to meet platform
alignment requirements.
There is nothing atomic or transactional about *pmem2_flush_fn*. Any
unwritten stores in the given range will be written, but some stores may have
already been written by virtue of normal cache eviction/replacement policies.
Correctly written code must not depend on stores waiting until
*pmem2_flush_fn* is called to be flushed -- they can be flushed
at any time before *pmem2_flush_fn* is called.
If two (or more) mappings share the same *pmem2_flush_fn* and they are
adjacent to each other, it is safe to call this function for a range spanning
those mappings.
# RETURN VALUE #
The **pmem2_get_flush_fn**() function never returns NULL.
The **pmem2_get_flush_fn**() for the same *map* always returns the same function.
This means that it's safe to cache its return value. However, this function
is very cheap (because it returns a precomputed value), so caching may not
be necessary.
# SEE ALSO #
**pmem2_get_drain_fn**(3), **pmem2_get_persist_fn**(3), **pmem2_map_new**(3),
**libpmem2**(7) and **<https://pmem.io>**
| pbalcer/pbalcer.github.io | content/pmdk/manpages/windows/v1.11/libpmem2/pmem2_get_flush_fn.3.md | Markdown | bsd-3-clause | 2,549 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/net/load_timing_observer.h"
#include "base/compiler_specific.h"
#include "base/format_macros.h"
#include "base/message_loop.h"
#include "base/stringprintf.h"
#include "base/time.h"
#include "content/test/test_browser_thread.h"
#include "net/base/load_flags.h"
#include "net/url_request/url_request_netlog_params.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace {
using base::TimeDelta;
using content::BrowserThread;
using net::NetLog;
// Serves to identify the current thread as the IO thread.
class LoadTimingObserverTest : public testing::Test {
public:
LoadTimingObserverTest() : io_thread_(BrowserThread::IO, &message_loop_) {
}
private:
MessageLoop message_loop_;
content::TestBrowserThread io_thread_;
};
base::TimeTicks current_time;
void AddStartEntry(LoadTimingObserver& observer,
const NetLog::Source& source,
NetLog::EventType type,
NetLog::EventParameters* params) {
observer.OnAddEntry(type, current_time, source, NetLog::PHASE_BEGIN, params);
}
void AddEndEntry(LoadTimingObserver& observer,
const NetLog::Source& source,
NetLog::EventType type,
NetLog::EventParameters* params) {
observer.OnAddEntry(type, current_time, source, NetLog::PHASE_END, params);
}
void AddStartURLRequestEntries(LoadTimingObserver& observer,
uint32 id,
bool request_timing) {
scoped_refptr<net::URLRequestStartEventParameters> params(
new net::URLRequestStartEventParameters(
GURL(base::StringPrintf("http://req%d", id)),
"GET",
request_timing ? net::LOAD_ENABLE_LOAD_TIMING : 0,
net::LOW));
NetLog::Source source(NetLog::SOURCE_URL_REQUEST, id);
AddStartEntry(observer, source, NetLog::TYPE_REQUEST_ALIVE, NULL);
AddStartEntry(observer,
source,
NetLog::TYPE_URL_REQUEST_START_JOB,
params.get());
}
void AddEndURLRequestEntries(LoadTimingObserver& observer, uint32 id) {
NetLog::Source source(NetLog::SOURCE_URL_REQUEST, id);
AddEndEntry(observer, source, NetLog::TYPE_REQUEST_ALIVE, NULL);
AddEndEntry(observer,
source,
NetLog::TYPE_URL_REQUEST_START_JOB,
NULL);
}
void AddStartHTTPStreamJobEntries(LoadTimingObserver& observer, uint32 id) {
NetLog::Source source(NetLog::SOURCE_HTTP_STREAM_JOB, id);
AddStartEntry(observer, source, NetLog::TYPE_HTTP_STREAM_JOB, NULL);
}
void AddEndHTTPStreamJobEntries(LoadTimingObserver& observer, uint32 id) {
NetLog::Source source(NetLog::SOURCE_HTTP_STREAM_JOB, id);
AddEndEntry(observer, source, NetLog::TYPE_HTTP_STREAM_JOB, NULL);
}
void AddStartConnectJobEntries(LoadTimingObserver& observer, uint32 id) {
NetLog::Source source(NetLog::SOURCE_CONNECT_JOB, id);
AddStartEntry(observer, source, NetLog::TYPE_SOCKET_POOL_CONNECT_JOB, NULL);
}
void AddEndConnectJobEntries(LoadTimingObserver& observer, uint32 id) {
NetLog::Source source(NetLog::SOURCE_CONNECT_JOB, id);
AddEndEntry(observer, source, NetLog::TYPE_SOCKET_POOL_CONNECT_JOB, NULL);
}
void AddStartSocketEntries(LoadTimingObserver& observer, uint32 id) {
NetLog::Source source(NetLog::SOURCE_SOCKET, id);
AddStartEntry(observer, source, NetLog::TYPE_SOCKET_ALIVE, NULL);
}
void AddEndSocketEntries(LoadTimingObserver& observer, uint32 id) {
NetLog::Source source(NetLog::SOURCE_SOCKET, id);
AddEndEntry(observer, source, NetLog::TYPE_SOCKET_ALIVE, NULL);
}
void BindURLRequestToHTTPStreamJob(LoadTimingObserver& observer,
NetLog::Source url_request_source,
NetLog::Source http_stream_job_source) {
scoped_refptr<net::NetLogSourceParameter> params(
new net::NetLogSourceParameter("source_dependency",
http_stream_job_source));
AddStartEntry(observer,
url_request_source,
NetLog::TYPE_HTTP_STREAM_REQUEST_BOUND_TO_JOB,
params.get());
}
void BindHTTPStreamJobToConnectJob(LoadTimingObserver& observer,
NetLog::Source& http_stream_job_source,
NetLog::Source& connect_source) {
scoped_refptr<net::NetLogSourceParameter> params(
new net::NetLogSourceParameter("source_dependency", connect_source));
AddStartEntry(observer,
http_stream_job_source,
NetLog::TYPE_SOCKET_POOL_BOUND_TO_CONNECT_JOB,
params.get());
}
void BindHTTPStreamJobToSocket(LoadTimingObserver& observer,
NetLog::Source& http_stream_job_source,
NetLog::Source& socket_source) {
scoped_refptr<net::NetLogSourceParameter> params(
new net::NetLogSourceParameter("source_dependency", socket_source));
AddStartEntry(observer,
http_stream_job_source,
NetLog::TYPE_SOCKET_POOL_BOUND_TO_SOCKET,
params.get());
}
} // namespace
// Test that net::URLRequest with no load timing flag is not processed.
TEST_F(LoadTimingObserverTest, NoLoadTimingEnabled) {
LoadTimingObserver observer;
AddStartURLRequestEntries(observer, 0, false);
LoadTimingObserver::URLRequestRecord* record =
observer.GetURLRequestRecord(0);
ASSERT_TRUE(record == NULL);
}
// Test that URLRequestRecord is created, deleted and is not growing unbound.
TEST_F(LoadTimingObserverTest, URLRequestRecord) {
LoadTimingObserver observer;
// Create record.
AddStartURLRequestEntries(observer, 0, true);
LoadTimingObserver::URLRequestRecord* record =
observer.GetURLRequestRecord(0);
ASSERT_FALSE(record == NULL);
// Collect record.
AddEndURLRequestEntries(observer, 0);
record = observer.GetURLRequestRecord(0);
ASSERT_TRUE(record == NULL);
// Check unbound growth.
for (size_t i = 1; i < 1100; ++i)
AddStartURLRequestEntries(observer, i, true);
record = observer.GetURLRequestRecord(1);
ASSERT_TRUE(record == NULL);
}
// Test that HTTPStreamJobRecord is created, deleted and is not growing unbound.
TEST_F(LoadTimingObserverTest, HTTPStreamJobRecord) {
LoadTimingObserver observer;
// Create record.
AddStartHTTPStreamJobEntries(observer, 0);
ASSERT_FALSE(observer.http_stream_job_to_record_.find(0) ==
observer.http_stream_job_to_record_.end());
// Collect record.
AddEndHTTPStreamJobEntries(observer, 0);
ASSERT_TRUE(observer.http_stream_job_to_record_.find(0) ==
observer.http_stream_job_to_record_.end());
// Check unbound growth.
for (size_t i = 1; i < 1100; ++i)
AddStartHTTPStreamJobEntries(observer, i);
ASSERT_TRUE(observer.http_stream_job_to_record_.find(1) ==
observer.http_stream_job_to_record_.end());
}
// Test that ConnectJobRecord is created, deleted and is not growing unbound.
TEST_F(LoadTimingObserverTest, ConnectJobRecord) {
LoadTimingObserver observer;
// Create record.
AddStartConnectJobEntries(observer, 0);
ASSERT_FALSE(observer.connect_job_to_record_.find(0) ==
observer.connect_job_to_record_.end());
// Collect record.
AddEndConnectJobEntries(observer, 0);
ASSERT_TRUE(observer.connect_job_to_record_.find(0) ==
observer.connect_job_to_record_.end());
// Check unbound growth.
for (size_t i = 1; i < 1100; ++i)
AddStartConnectJobEntries(observer, i);
ASSERT_TRUE(observer.connect_job_to_record_.find(1) ==
observer.connect_job_to_record_.end());
}
// Test that SocketRecord is created, deleted and is not growing unbound.
TEST_F(LoadTimingObserverTest, SocketRecord) {
LoadTimingObserver observer;
// Create record.
AddStartSocketEntries(observer, 0);
ASSERT_FALSE(observer.socket_to_record_.find(0) ==
observer.socket_to_record_.end());
// Collect record.
AddEndSocketEntries(observer, 0);
ASSERT_TRUE(observer.socket_to_record_.find(0) ==
observer.socket_to_record_.end());
// Check unbound growth.
for (size_t i = 1; i < 1100; ++i)
AddStartSocketEntries(observer, i);
ASSERT_TRUE(observer.socket_to_record_.find(1) ==
observer.socket_to_record_.end());
}
// Test that basic time is set to the request.
TEST_F(LoadTimingObserverTest, BaseTicks) {
LoadTimingObserver observer;
current_time += TimeDelta::FromSeconds(1);
AddStartURLRequestEntries(observer, 0, true);
LoadTimingObserver::URLRequestRecord* record =
observer.GetURLRequestRecord(0);
ASSERT_EQ(1000000, record->base_ticks.ToInternalValue());
}
// Test proxy time detection.
TEST_F(LoadTimingObserverTest, ProxyTime) {
LoadTimingObserver observer;
current_time += TimeDelta::FromSeconds(1);
AddStartURLRequestEntries(observer, 0, true);
NetLog::Source source(NetLog::SOURCE_URL_REQUEST, 0);
current_time += TimeDelta::FromSeconds(2);
AddStartEntry(observer, source, NetLog::TYPE_PROXY_SERVICE, NULL);
current_time += TimeDelta::FromSeconds(3);
AddEndEntry(observer, source, NetLog::TYPE_PROXY_SERVICE, NULL);
LoadTimingObserver::URLRequestRecord* record =
observer.GetURLRequestRecord(0);
ASSERT_EQ(2000, record->timing.proxy_start);
ASSERT_EQ(5000, record->timing.proxy_end);
}
// Test connect time detection.
TEST_F(LoadTimingObserverTest, ConnectTime) {
LoadTimingObserver observer;
current_time += TimeDelta::FromSeconds(1);
AddStartURLRequestEntries(observer, 0, true);
NetLog::Source source(NetLog::SOURCE_URL_REQUEST, 0);
NetLog::Source http_stream_job_source(NetLog::SOURCE_HTTP_STREAM_JOB, 1);
AddStartHTTPStreamJobEntries(observer, 1);
current_time += TimeDelta::FromSeconds(2);
AddStartEntry(observer, http_stream_job_source, NetLog::TYPE_SOCKET_POOL,
NULL);
current_time += TimeDelta::FromSeconds(3);
AddEndEntry(observer, http_stream_job_source, NetLog::TYPE_SOCKET_POOL, NULL);
BindURLRequestToHTTPStreamJob(observer, source, http_stream_job_source);
LoadTimingObserver::URLRequestRecord* record =
observer.GetURLRequestRecord(0);
ASSERT_EQ(2000, record->timing.connect_start);
ASSERT_EQ(5000, record->timing.connect_end);
}
// Test dns time detection.
TEST_F(LoadTimingObserverTest, DnsTime) {
LoadTimingObserver observer;
// Start request.
NetLog::Source source(NetLog::SOURCE_URL_REQUEST, 0);
AddStartURLRequestEntries(observer, 0, true);
current_time += TimeDelta::FromSeconds(1);
// Add resolver entry.
AddStartConnectJobEntries(observer, 1);
NetLog::Source connect_source(NetLog::SOURCE_CONNECT_JOB, 1);
AddStartEntry(observer,
connect_source,
NetLog::TYPE_HOST_RESOLVER_IMPL,
NULL);
current_time += TimeDelta::FromSeconds(2);
AddEndEntry(observer, connect_source, NetLog::TYPE_HOST_RESOLVER_IMPL, NULL);
AddEndConnectJobEntries(observer, 1);
NetLog::Source http_stream_job_source(NetLog::SOURCE_HTTP_STREAM_JOB, 2);
AddStartHTTPStreamJobEntries(observer, 2);
BindHTTPStreamJobToConnectJob(observer, http_stream_job_source,
connect_source);
BindURLRequestToHTTPStreamJob(observer, source, http_stream_job_source);
LoadTimingObserver::URLRequestRecord* record =
observer.GetURLRequestRecord(0);
ASSERT_EQ(1000, record->timing.dns_start);
ASSERT_EQ(3000, record->timing.dns_end);
}
// Test send time detection.
TEST_F(LoadTimingObserverTest, SendTime) {
LoadTimingObserver observer;
// Start request.
NetLog::Source source(NetLog::SOURCE_URL_REQUEST, 0);
AddStartURLRequestEntries(observer, 0, true);
current_time += TimeDelta::FromSeconds(2);
// Add send request entry.
AddStartEntry(observer,
source,
NetLog::TYPE_HTTP_TRANSACTION_SEND_REQUEST,
NULL);
current_time += TimeDelta::FromSeconds(5);
AddEndEntry(observer,
source,
NetLog::TYPE_HTTP_TRANSACTION_SEND_REQUEST,
NULL);
LoadTimingObserver::URLRequestRecord* record =
observer.GetURLRequestRecord(0);
ASSERT_EQ(2000, record->timing.send_start);
ASSERT_EQ(7000, record->timing.send_end);
}
// Test receive time detection.
TEST_F(LoadTimingObserverTest, ReceiveTime) {
LoadTimingObserver observer;
// Start request.
NetLog::Source source(NetLog::SOURCE_URL_REQUEST, 0);
AddStartURLRequestEntries(observer, 0, true);
current_time += TimeDelta::FromSeconds(2);
// Add send request entry.
AddStartEntry(observer,
source,
NetLog::TYPE_HTTP_TRANSACTION_READ_HEADERS,
NULL);
current_time += TimeDelta::FromSeconds(5);
AddEndEntry(observer,
source,
NetLog::TYPE_HTTP_TRANSACTION_READ_HEADERS,
NULL);
LoadTimingObserver::URLRequestRecord* record =
observer.GetURLRequestRecord(0);
ASSERT_EQ(2000, record->timing.receive_headers_start);
ASSERT_EQ(7000, record->timing.receive_headers_end);
}
// Test ssl time detection.
TEST_F(LoadTimingObserverTest, SslTime) {
LoadTimingObserver observer;
// Start request.
NetLog::Source source(NetLog::SOURCE_URL_REQUEST, 0);
AddStartURLRequestEntries(observer, 0, true);
current_time += TimeDelta::FromSeconds(1);
// Add resolver entry.
AddStartSocketEntries(observer, 1);
NetLog::Source socket_source(NetLog::SOURCE_SOCKET, 1);
AddStartEntry(observer, socket_source, NetLog::TYPE_SSL_CONNECT, NULL);
current_time += TimeDelta::FromSeconds(2);
AddEndEntry(observer, socket_source, NetLog::TYPE_SSL_CONNECT, NULL);
NetLog::Source http_stream_job_source(NetLog::SOURCE_HTTP_STREAM_JOB, 2);
AddStartHTTPStreamJobEntries(observer, 2);
BindHTTPStreamJobToSocket(observer, http_stream_job_source, socket_source);
BindURLRequestToHTTPStreamJob(observer, source, http_stream_job_source);
LoadTimingObserver::URLRequestRecord* record =
observer.GetURLRequestRecord(0);
ASSERT_EQ(1000, record->timing.ssl_start);
ASSERT_EQ(3000, record->timing.ssl_end);
}
| rogerwang/chromium | chrome/browser/net/load_timing_observer_unittest.cc | C++ | bsd-3-clause | 14,286 |
# -*- coding: utf-8 -*-
#
# gapic-google-iam-admin-v1 documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
__version__ = '0.11.1'
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
]
# autodoc/autosummary flags
autoclass_content = 'both'
autodoc_default_flags = ['members']
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'gapic-google-iam-admin-v1'
copyright = u'2016, Google'
author = u'Google APIs'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = '.'.join(release.split('.')[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'gapic-google-iam-admin-v1-doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'gapic-google-iam-admin-v1.tex', u'gapic-google-iam-admin-v1 Documentation',
author, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'gapic-google-iam-admin-v1', u'gapic-google-iam-admin-v1 Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'gapic-google-iam-admin-v1', u'gapic-google-iam-admin-v1 Documentation',
author, 'gapic-google-iam-admin-v1', 'GAPIC library for the google-iam-admin (api.version) service',
'APIs'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/latest/', None),
'gax': ('https://gax-python.readthedocs.org/en/latest/', None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| eoogbe/api-client-staging | generated/python/gapic-google-iam-admin-v1/docs/conf.py | Python | bsd-3-clause | 10,253 |
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef PDEF_TBB_CERT_H
#define PDEF_TBB_CERT_H
#include <tbbr/tbb_cert.h>
/*
* Enumerate the certificates that are used to establish the chain of trust
*/
enum {
DDR_FW_KEY_CERT = FWU_CERT + 1,
DDR_UDIMM_FW_CONTENT_CERT,
DDR_RDIMM_FW_CONTENT_CERT
};
#endif /* PDEF_TBB_CERT_H */
| jenswi-linaro/arm-trusted-firmware | tools/nxp/cert_create_helper/include/pdef_tbb_cert.h | C | bsd-3-clause | 362 |
<!DOCTYPE html>
<html>
<head>
<!--
If you are serving your web app in a path other than the root, change the
href value below to reflect the base path you are serving from.
The path provided below has to start and end with a slash "/" in order for
it to work correctly.
For more details:
* https://developer.mozilla.org/en-US/docs/Web/HTML/Element/base
This is a placeholder for base href that will be replaced by the value of
the `--base-href` argument provided to `flutter build`.
-->
<base href="$FLUTTER_BASE_HREF">
<meta charset="UTF-8">
<meta content="IE=Edge" http-equiv="X-UA-Compatible">
<meta name="description" content="A new Flutter project.">
<!-- iOS meta tags & icons -->
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-status-bar-style" content="black">
<meta name="apple-mobile-web-app-title" content="startup_namer">
<link rel="apple-touch-icon" href="icons/Icon-192.png">
<!-- Favicon -->
<link rel="icon" type="image/png" href="favicon.png"/>
<title>startup_namer</title>
<link rel="manifest" href="manifest.json">
</head>
<body>
<!-- This script installs service_worker.js to provide PWA functionality to
application. For more information, see:
https://developers.google.com/web/fundamentals/primers/service-workers -->
<script>
var serviceWorkerVersion = null;
var scriptLoaded = false;
function loadMainDartJs() {
if (scriptLoaded) {
return;
}
scriptLoaded = true;
var scriptTag = document.createElement('script');
scriptTag.src = 'main.dart.js';
scriptTag.type = 'application/javascript';
document.body.append(scriptTag);
}
if ('serviceWorker' in navigator) {
// Service workers are supported. Use them.
window.addEventListener('load', function () {
// Wait for registration to finish before dropping the <script> tag.
// Otherwise, the browser will load the script multiple times,
// potentially different versions.
var serviceWorkerUrl = 'flutter_service_worker.js?v=' + serviceWorkerVersion;
navigator.serviceWorker.register(serviceWorkerUrl)
.then((reg) => {
function waitForActivation(serviceWorker) {
serviceWorker.addEventListener('statechange', () => {
if (serviceWorker.state == 'activated') {
console.log('Installed new service worker.');
loadMainDartJs();
}
});
}
if (!reg.active && (reg.installing || reg.waiting)) {
// No active web worker and we have installed or are installing
// one for the first time. Simply wait for it to activate.
waitForActivation(reg.installing || reg.waiting);
} else if (!reg.active.scriptURL.endsWith(serviceWorkerVersion)) {
// When the app updates the serviceWorkerVersion changes, so we
// need to ask the service worker to update.
console.log('New service worker available.');
reg.update();
waitForActivation(reg.installing);
} else {
// Existing service worker is still good.
console.log('Loading app from service worker.');
loadMainDartJs();
}
});
// If service worker doesn't succeed in a reasonable amount of time,
// fallback to plaint <script> tag.
setTimeout(() => {
if (!scriptLoaded) {
console.warn(
'Failed to load app from service worker. Falling back to plain <script> tag.',
);
loadMainDartJs();
}
}, 4000);
});
} else {
// Service workers not supported. Just drop the <script> tag.
loadMainDartJs();
}
</script>
</body>
</html>
| flutter/codelabs | startup_namer/step8_themes/web/index.html | HTML | bsd-3-clause | 3,938 |
/*
* Copyright (c) 2014-2021, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef FVP_DEF_H
#define FVP_DEF_H
#include <lib/utils_def.h>
#ifndef FVP_CLUSTER_COUNT
#error "FVP_CLUSTER_COUNT is not set in makefile"
#endif
#ifndef FVP_MAX_CPUS_PER_CLUSTER
#error "FVP_MAX_CPUS_PER_CLUSTER is not set in makefile"
#endif
#ifndef FVP_MAX_PE_PER_CPU
#error "FVP_MAX_PE_PER_CPU is not set in makefile"
#endif
#define FVP_PRIMARY_CPU 0x0
/* Defines for the Interconnect build selection */
#define FVP_CCI 1
#define FVP_CCN 2
/******************************************************************************
* Definition of platform soc id
*****************************************************************************/
#define FVP_SOC_ID 0
/*******************************************************************************
* FVP memory map related constants
******************************************************************************/
#define FLASH1_BASE UL(0x0c000000)
#define FLASH1_SIZE UL(0x04000000)
#define PSRAM_BASE UL(0x14000000)
#define PSRAM_SIZE UL(0x04000000)
#define VRAM_BASE UL(0x18000000)
#define VRAM_SIZE UL(0x02000000)
/* Aggregate of all devices in the first GB */
#define DEVICE0_BASE UL(0x20000000)
#define DEVICE0_SIZE UL(0x0c200000)
/*
* In case of FVP models with CCN, the CCN register space overlaps into
* the NSRAM area.
*/
#if FVP_INTERCONNECT_DRIVER == FVP_CCN
#define DEVICE1_BASE UL(0x2e000000)
#define DEVICE1_SIZE UL(0x1A00000)
#else
#define DEVICE1_BASE BASE_GICD_BASE
#if GIC_ENABLE_V4_EXTN
/* GICv4 mapping: GICD + CORE_COUNT * 256KB */
#define DEVICE1_SIZE ((BASE_GICR_BASE - BASE_GICD_BASE) + \
(PLATFORM_CORE_COUNT * 0x40000))
#else
/* GICv2 and GICv3 mapping: GICD + CORE_COUNT * 128KB */
#define DEVICE1_SIZE ((BASE_GICR_BASE - BASE_GICD_BASE) + \
(PLATFORM_CORE_COUNT * 0x20000))
#endif /* GIC_ENABLE_V4_EXTN */
#define NSRAM_BASE UL(0x2e000000)
#define NSRAM_SIZE UL(0x10000)
#endif
/* Devices in the second GB */
#define DEVICE2_BASE UL(0x7fe00000)
#define DEVICE2_SIZE UL(0x00200000)
#define PCIE_EXP_BASE UL(0x40000000)
#define TZRNG_BASE UL(0x7fe60000)
/* Non-volatile counters */
#define TRUSTED_NVCTR_BASE UL(0x7fe70000)
#define TFW_NVCTR_BASE (TRUSTED_NVCTR_BASE + UL(0x0000))
#define TFW_NVCTR_SIZE UL(4)
#define NTFW_CTR_BASE (TRUSTED_NVCTR_BASE + UL(0x0004))
#define NTFW_CTR_SIZE UL(4)
/* Keys */
#define SOC_KEYS_BASE UL(0x7fe80000)
#define TZ_PUB_KEY_HASH_BASE (SOC_KEYS_BASE + UL(0x0000))
#define TZ_PUB_KEY_HASH_SIZE UL(32)
#define HU_KEY_BASE (SOC_KEYS_BASE + UL(0x0020))
#define HU_KEY_SIZE UL(16)
#define END_KEY_BASE (SOC_KEYS_BASE + UL(0x0044))
#define END_KEY_SIZE UL(32)
/* Constants to distinguish FVP type */
#define HBI_BASE_FVP U(0x020)
#define REV_BASE_FVP_V0 U(0x0)
#define REV_BASE_FVP_REVC U(0x2)
#define HBI_FOUNDATION_FVP U(0x010)
#define REV_FOUNDATION_FVP_V2_0 U(0x0)
#define REV_FOUNDATION_FVP_V2_1 U(0x1)
#define REV_FOUNDATION_FVP_v9_1 U(0x2)
#define REV_FOUNDATION_FVP_v9_6 U(0x3)
#define BLD_GIC_VE_MMAP U(0x0)
#define BLD_GIC_A53A57_MMAP U(0x1)
#define ARCH_MODEL U(0x1)
/* FVP Power controller base address*/
#define PWRC_BASE UL(0x1c100000)
/* FVP SP804 timer frequency is 35 MHz*/
#define SP804_TIMER_CLKMULT 1
#define SP804_TIMER_CLKDIV 35
/* SP810 controller. FVP specific flags */
#define FVP_SP810_CTRL_TIM0_OV BIT_32(16)
#define FVP_SP810_CTRL_TIM1_OV BIT_32(18)
#define FVP_SP810_CTRL_TIM2_OV BIT_32(20)
#define FVP_SP810_CTRL_TIM3_OV BIT_32(22)
/*******************************************************************************
* GIC & interrupt handling related constants
******************************************************************************/
/* VE compatible GIC memory map */
#define VE_GICD_BASE UL(0x2c001000)
#define VE_GICC_BASE UL(0x2c002000)
#define VE_GICH_BASE UL(0x2c004000)
#define VE_GICV_BASE UL(0x2c006000)
/* Base FVP compatible GIC memory map */
#define BASE_GICD_BASE UL(0x2f000000)
#define BASE_GICD_SIZE UL(0x10000)
#define BASE_GICR_BASE UL(0x2f100000)
#if GIC_ENABLE_V4_EXTN
/* GICv4 redistributor size: 256KB */
#define BASE_GICR_SIZE UL(0x40000)
#else
#define BASE_GICR_SIZE UL(0x20000)
#endif /* GIC_ENABLE_V4_EXTN */
#define BASE_GICC_BASE UL(0x2c000000)
#define BASE_GICH_BASE UL(0x2c010000)
#define BASE_GICV_BASE UL(0x2c02f000)
#define FVP_IRQ_TZ_WDOG 56
#define FVP_IRQ_SEC_SYS_TIMER 57
/*******************************************************************************
* TrustZone address space controller related constants
******************************************************************************/
/* NSAIDs used by devices in TZC filter 0 on FVP */
#define FVP_NSAID_DEFAULT 0
#define FVP_NSAID_PCI 1
#define FVP_NSAID_VIRTIO 8 /* from FVP v5.6 onwards */
#define FVP_NSAID_AP 9 /* Application Processors */
#define FVP_NSAID_VIRTIO_OLD 15 /* until FVP v5.5 */
/* NSAIDs used by devices in TZC filter 2 on FVP */
#define FVP_NSAID_HDLCD0 2
#define FVP_NSAID_CLCD 7
/*******************************************************************************
* Memprotect definitions
******************************************************************************/
/* PSCI memory protect definitions:
* This variable is stored in a non-secure flash because some ARM reference
* platforms do not have secure NVRAM. Real systems that provided MEM_PROTECT
* support must use a secure NVRAM to store the PSCI MEM_PROTECT definitions.
*/
#define PLAT_ARM_MEM_PROT_ADDR (V2M_FLASH0_BASE + \
V2M_FLASH0_SIZE - V2M_FLASH_BLOCK_SIZE)
#endif /* FVP_DEF_H */
| achingupta/arm-trusted-firmware | plat/arm/board/fvp/fvp_def.h | C | bsd-3-clause | 5,725 |
// Ceres Solver - A fast non-linear least squares minimizer
// Copyright 2010, 2011, 2012 Google Inc. All rights reserved.
// http://code.google.com/p/ceres-solver/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Google Inc. nor the names of its contributors may be
// used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
// Author: [email protected] (Keir Mierle)
//
// Computation of the Jacobian matrix for vector-valued functions of multiple
// variables, using automatic differentiation based on the implementation of
// dual numbers in jet.h. Before reading the rest of this file, it is adivsable
// to read jet.h's header comment in detail.
//
// The helper wrapper AutoDiff::Differentiate() computes the jacobian of
// functors with templated operator() taking this form:
//
// struct F {
// template<typename T>
// bool operator(const T *x, const T *y, ..., T *z) {
// // Compute z[] based on x[], y[], ...
// // return true if computation succeeded, false otherwise.
// }
// };
//
// All inputs and outputs may be vector-valued.
//
// To understand how jets are used to compute the jacobian, a
// picture may help. Consider a vector-valued function, F, returning 3
// dimensions and taking a vector-valued parameter of 4 dimensions:
//
// y x
// [ * ] F [ * ]
// [ * ] <--- [ * ]
// [ * ] [ * ]
// [ * ]
//
// Similar to the 2-parameter example for f described in jet.h, computing the
// jacobian dy/dx is done by substutiting a suitable jet object for x and all
// intermediate steps of the computation of F. Since x is has 4 dimensions, use
// a Jet<double, 4>.
//
// Before substituting a jet object for x, the dual components are set
// appropriately for each dimension of x:
//
// y x
// [ * | * * * * ] f [ * | 1 0 0 0 ] x0
// [ * | * * * * ] <--- [ * | 0 1 0 0 ] x1
// [ * | * * * * ] [ * | 0 0 1 0 ] x2
// ---+--- [ * | 0 0 0 1 ] x3
// | ^ ^ ^ ^
// dy/dx | | | +----- infinitesimal for x3
// | | +------- infinitesimal for x2
// | +--------- infinitesimal for x1
// +----------- infinitesimal for x0
//
// The reason to set the internal 4x4 submatrix to the identity is that we wish
// to take the derivative of y separately with respect to each dimension of x.
// Each column of the 4x4 identity is therefore for a single component of the
// independent variable x.
//
// Then the jacobian of the mapping, dy/dx, is the 3x4 sub-matrix of the
// extended y vector, indicated in the above diagram.
//
// Functors with multiple parameters
// ---------------------------------
// In practice, it is often convenient to use a function f of two or more
// vector-valued parameters, for example, x[3] and z[6]. Unfortunately, the jet
// framework is designed for a single-parameter vector-valued input. The wrapper
// in this file addresses this issue adding support for functions with one or
// more parameter vectors.
//
// To support multiple parameters, all the parameter vectors are concatenated
// into one and treated as a single parameter vector, except that since the
// functor expects different inputs, we need to construct the jets as if they
// were part of a single parameter vector. The extended jets are passed
// separately for each parameter.
//
// For example, consider a functor F taking two vector parameters, p[2] and
// q[3], and producing an output y[4]:
//
// struct F {
// template<typename T>
// bool operator(const T *p, const T *q, T *z) {
// // ...
// }
// };
//
// In this case, the necessary jet type is Jet<double, 5>. Here is a
// visualization of the jet objects in this case:
//
// Dual components for p ----+
// |
// -+-
// y [ * | 1 0 | 0 0 0 ] --- p[0]
// [ * | 0 1 | 0 0 0 ] --- p[1]
// [ * | . . | + + + ] |
// [ * | . . | + + + ] v
// [ * | . . | + + + ] <--- F(p, q)
// [ * | . . | + + + ] ^
// ^^^ ^^^^^ |
// dy/dp dy/dq [ * | 0 0 | 1 0 0 ] --- q[0]
// [ * | 0 0 | 0 1 0 ] --- q[1]
// [ * | 0 0 | 0 0 1 ] --- q[2]
// --+--
// |
// Dual components for q --------------+
//
// where the 4x2 submatrix (marked with ".") and 4x3 submatrix (marked with "+"
// of y in the above diagram are the derivatives of y with respect to p and q
// respectively. This is how autodiff works for functors taking multiple vector
// valued arguments (up to 6).
//
// Jacobian NULL pointers
// ----------------------
// In general, the functions below will accept NULL pointers for all or some of
// the Jacobian parameters, meaning that those Jacobians will not be computed.
#ifndef CERES_PUBLIC_INTERNAL_AUTODIFF_H_
#define CERES_PUBLIC_INTERNAL_AUTODIFF_H_
#include <stddef.h>
#include <glog/logging.h>
#include "ceres/jet.h"
#include "ceres/internal/eigen.h"
#include "ceres/internal/fixed_array.h"
namespace ceres {
namespace internal {
// Extends src by a 1st order pertubation for every dimension and puts it in
// dst. The size of src is N. Since this is also used for perturbations in
// blocked arrays, offset is used to shift which part of the jet the
// perturbation occurs. This is used to set up the extended x augmented by an
// identity matrix. The JetT type should be a Jet type, and T should be a
// numeric type (e.g. double). For example,
//
// 0 1 2 3 4 5 6 7 8
// dst[0] [ * | . . | 1 0 0 | . . . ]
// dst[1] [ * | . . | 0 1 0 | . . . ]
// dst[2] [ * | . . | 0 0 1 | . . . ]
//
// is what would get put in dst if N was 3, offset was 3, and the jet type JetT
// was 8-dimensional.
template <typename JetT, typename T>
inline void Make1stOrderPerturbation(int offset, int N, const T *src,
JetT *dst) {
DCHECK(src);
DCHECK(dst);
for (int j = 0; j < N; ++j) {
dst[j] = JetT(src[j], offset + j);
}
}
// Takes the 0th order part of src, assumed to be a Jet type, and puts it in
// dst. This is used to pick out the "vector" part of the extended y.
template <typename JetT, typename T>
inline void Take0thOrderPart(int M, const JetT *src, T dst) {
DCHECK(src);
for (int i = 0; i < M; ++i) {
dst[i] = src[i].a;
}
}
// Takes N 1st order parts, starting at index N0, and puts them in the M x N
// matrix 'dst'. This is used to pick out the "matrix" parts of the extended y.
template <typename JetT, typename T, int N0, int N>
inline void Take1stOrderPart(const int M, const JetT *src, T *dst) {
DCHECK(src);
DCHECK(dst);
for (int i = 0; i < M; ++i) {
Eigen::Map<Eigen::Matrix<T, N, 1> >(dst + N * i, N) = src[i].v.template segment<N>(N0);
}
}
// This block of quasi-repeated code calls the user-supplied functor, which may
// take a variable number of arguments. This is accomplished by specializing the
// struct based on the size of the trailing parameters; parameters with 0 size
// are assumed missing.
//
// Supporting variadic functions is the primary source of complexity in the
// autodiff implementation.
template<typename Functor, typename T,
int N0, int N1, int N2, int N3, int N4, int N5>
struct VariadicEvaluate {
static bool Call(const Functor& functor, T const *const *input, T* output) {
return functor(input[0],
input[1],
input[2],
input[3],
input[4],
input[5],
output);
}
};
template<typename Functor, typename T,
int N0, int N1, int N2, int N3, int N4>
struct VariadicEvaluate<Functor, T, N0, N1, N2, N3, N4, 0> {
static bool Call(const Functor& functor, T const *const *input, T* output) {
return functor(input[0],
input[1],
input[2],
input[3],
input[4],
output);
}
};
template<typename Functor, typename T,
int N0, int N1, int N2, int N3>
struct VariadicEvaluate<Functor, T, N0, N1, N2, N3, 0, 0> {
static bool Call(const Functor& functor, T const *const *input, T* output) {
return functor(input[0],
input[1],
input[2],
input[3],
output);
}
};
template<typename Functor, typename T,
int N0, int N1, int N2>
struct VariadicEvaluate<Functor, T, N0, N1, N2, 0, 0, 0> {
static bool Call(const Functor& functor, T const *const *input, T* output) {
return functor(input[0],
input[1],
input[2],
output);
}
};
template<typename Functor, typename T,
int N0, int N1>
struct VariadicEvaluate<Functor, T, N0, N1, 0, 0, 0, 0> {
static bool Call(const Functor& functor, T const *const *input, T* output) {
return functor(input[0],
input[1],
output);
}
};
template<typename Functor, typename T, int N0>
struct VariadicEvaluate<Functor, T, N0, 0, 0, 0, 0, 0> {
static bool Call(const Functor& functor, T const *const *input, T* output) {
return functor(input[0],
output);
}
};
// This is in a struct because default template parameters on a function are not
// supported in C++03 (though it is available in C++0x). N0 through N5 are the
// dimension of the input arguments to the user supplied functor.
template <typename Functor, typename T,
int N0 = 0, int N1 = 0, int N2 = 0, int N3 = 0, int N4 = 0, int N5=0>
struct AutoDiff {
static bool Differentiate(const Functor& functor,
T const *const *parameters,
int num_outputs,
T *function_value,
T **jacobians) {
typedef Jet<T, N0 + N1 + N2 + N3 + N4 + N5> JetT;
DCHECK_GT(N0, 0)
<< "Cost functions must have at least one parameter block.";
DCHECK((!N1 && !N2 && !N3 && !N4 && !N5) ||
((N1 > 0) && !N2 && !N3 && !N4 && !N5) ||
((N1 > 0) && (N2 > 0) && !N3 && !N4 && !N5) ||
((N1 > 0) && (N2 > 0) && (N3 > 0) && !N4 && !N5) ||
((N1 > 0) && (N2 > 0) && (N3 > 0) && (N4 > 0) && !N5) ||
((N1 > 0) && (N2 > 0) && (N3 > 0) && (N4 > 0) && (N5 > 0)))
<< "Zero block cannot precede a non-zero block. Block sizes are "
<< "(ignore trailing 0s): " << N0 << ", " << N1 << ", " << N2 << ", "
<< N3 << ", " << N4 << ", " << N5;
DCHECK_GT(num_outputs, 0);
FixedArray<JetT, (256 * 7) / sizeof(JetT)> x(
N0 + N1 + N2 + N3 + N4 + N5 + num_outputs);
// It's ugly, but it works.
const int jet0 = 0;
const int jet1 = N0;
const int jet2 = N0 + N1;
const int jet3 = N0 + N1 + N2;
const int jet4 = N0 + N1 + N2 + N3;
const int jet5 = N0 + N1 + N2 + N3 + N4;
const int jet6 = N0 + N1 + N2 + N3 + N4 + N5;
const JetT *unpacked_parameters[6] = {
x.get() + jet0,
x.get() + jet1,
x.get() + jet2,
x.get() + jet3,
x.get() + jet4,
x.get() + jet5,
};
JetT *output = x.get() + jet6;
#define CERES_MAKE_1ST_ORDER_PERTURBATION(i) \
if (N ## i) { \
internal::Make1stOrderPerturbation(jet ## i, \
N ## i, \
parameters[i], \
x.get() + jet ## i); \
}
CERES_MAKE_1ST_ORDER_PERTURBATION(0);
CERES_MAKE_1ST_ORDER_PERTURBATION(1);
CERES_MAKE_1ST_ORDER_PERTURBATION(2);
CERES_MAKE_1ST_ORDER_PERTURBATION(3);
CERES_MAKE_1ST_ORDER_PERTURBATION(4);
CERES_MAKE_1ST_ORDER_PERTURBATION(5);
#undef CERES_MAKE_1ST_ORDER_PERTURBATION
if (!VariadicEvaluate<Functor, JetT,
N0, N1, N2, N3, N4, N5>::Call(
functor, unpacked_parameters, output)) {
return false;
}
internal::Take0thOrderPart(num_outputs, output, function_value);
#define CERES_TAKE_1ST_ORDER_PERTURBATION(i) \
if (N ## i) { \
if (jacobians[i]) { \
internal::Take1stOrderPart<JetT, T, \
jet ## i, \
N ## i>(num_outputs, \
output, \
jacobians[i]); \
} \
}
CERES_TAKE_1ST_ORDER_PERTURBATION(0);
CERES_TAKE_1ST_ORDER_PERTURBATION(1);
CERES_TAKE_1ST_ORDER_PERTURBATION(2);
CERES_TAKE_1ST_ORDER_PERTURBATION(3);
CERES_TAKE_1ST_ORDER_PERTURBATION(4);
CERES_TAKE_1ST_ORDER_PERTURBATION(5);
#undef CERES_TAKE_1ST_ORDER_PERTURBATION
return true;
}
};
} // namespace internal
} // namespace ceres
#endif // CERES_PUBLIC_INTERNAL_AUTODIFF_H_
| kashif/ceres-solver | include/ceres/internal/autodiff.h | C | bsd-3-clause | 14,451 |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ash/base/file_flusher.h"
#include <algorithm>
#include <set>
#include "base/bind.h"
#include "base/files/file.h"
#include "base/files/file_enumerator.h"
#include "base/logging.h"
#include "base/synchronization/atomic_flag.h"
#include "base/task/thread_pool.h"
#include "content/public/browser/browser_task_traits.h"
#include "content/public/browser/browser_thread.h"
namespace ash {
////////////////////////////////////////////////////////////////////////////////
// FileFlusher::Job
class FileFlusher::Job {
public:
Job(const base::WeakPtr<FileFlusher>& flusher,
const base::FilePath& path,
bool recursive,
const FileFlusher::OnFlushCallback& on_flush_callback,
base::OnceClosure callback);
Job(const Job&) = delete;
Job& operator=(const Job&) = delete;
~Job() = default;
void Start();
void Cancel();
const base::FilePath& path() const { return path_; }
bool started() const { return started_; }
private:
// Flush files on a blocking pool thread.
void FlushAsync();
// Schedule a FinishOnUIThread task to run on the UI thread.
void ScheduleFinish();
// Finish the job by notifying |flusher_| and self destruct on the UI thread.
void FinishOnUIThread();
base::WeakPtr<FileFlusher> flusher_;
const base::FilePath path_;
const bool recursive_;
const FileFlusher::OnFlushCallback on_flush_callback_;
base::OnceClosure callback_;
bool started_ = false;
base::AtomicFlag cancel_flag_;
bool finish_scheduled_ = false;
};
FileFlusher::Job::Job(const base::WeakPtr<FileFlusher>& flusher,
const base::FilePath& path,
bool recursive,
const FileFlusher::OnFlushCallback& on_flush_callback,
base::OnceClosure callback)
: flusher_(flusher),
path_(path),
recursive_(recursive),
on_flush_callback_(on_flush_callback),
callback_(std::move(callback)) {}
void FileFlusher::Job::Start() {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
DCHECK(!started());
started_ = true;
if (cancel_flag_.IsSet()) {
ScheduleFinish();
return;
}
base::ThreadPool::PostTaskAndReply(
FROM_HERE, {base::MayBlock(), base::TaskPriority::BEST_EFFORT},
base::BindOnce(&FileFlusher::Job::FlushAsync, base::Unretained(this)),
base::BindOnce(&FileFlusher::Job::FinishOnUIThread,
base::Unretained(this)));
}
void FileFlusher::Job::Cancel() {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
cancel_flag_.Set();
// Cancel() could be called in an iterator/range loop in |flusher_| thus don't
// invoke FinishOnUIThread in-place.
if (!started())
ScheduleFinish();
}
void FileFlusher::Job::FlushAsync() {
VLOG(1) << "Flushing files under " << path_.value();
base::FileEnumerator traversal(path_, recursive_,
base::FileEnumerator::FILES);
for (base::FilePath current = traversal.Next();
!current.empty() && !cancel_flag_.IsSet(); current = traversal.Next()) {
base::File currentFile(current,
base::File::FLAG_OPEN | base::File::FLAG_WRITE);
if (!currentFile.IsValid()) {
VLOG(1) << "Unable to flush file:" << current.value();
continue;
}
currentFile.Flush();
currentFile.Close();
if (!on_flush_callback_.is_null())
on_flush_callback_.Run(current);
}
}
void FileFlusher::Job::ScheduleFinish() {
if (finish_scheduled_)
return;
finish_scheduled_ = true;
content::GetUIThreadTaskRunner({})->PostTask(
FROM_HERE,
base::BindOnce(&Job::FinishOnUIThread, base::Unretained(this)));
}
void FileFlusher::Job::FinishOnUIThread() {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
if (!callback_.is_null())
std::move(callback_).Run();
if (flusher_)
flusher_->OnJobDone(this);
delete this;
}
////////////////////////////////////////////////////////////////////////////////
// FileFlusher
FileFlusher::FileFlusher() = default;
FileFlusher::~FileFlusher() {
for (auto* job : jobs_)
job->Cancel();
}
void FileFlusher::RequestFlush(const base::FilePath& path,
bool recursive,
base::OnceClosure callback) {
for (auto* job : jobs_) {
if (path == job->path() || path.IsParent(job->path()))
job->Cancel();
}
jobs_.push_back(new Job(weak_factory_.GetWeakPtr(), path, recursive,
on_flush_callback_for_test_, std::move(callback)));
ScheduleJob();
}
void FileFlusher::PauseForTest() {
DCHECK(std::none_of(jobs_.begin(), jobs_.end(),
[](const Job* job) { return job->started(); }));
paused_for_test_ = true;
}
void FileFlusher::ResumeForTest() {
paused_for_test_ = false;
ScheduleJob();
}
void FileFlusher::ScheduleJob() {
if (jobs_.empty() || paused_for_test_)
return;
auto* job = jobs_.front();
if (!job->started())
job->Start();
}
void FileFlusher::OnJobDone(FileFlusher::Job* job) {
for (auto it = jobs_.begin(); it != jobs_.end(); ++it) {
if (*it == job) {
jobs_.erase(it);
break;
}
}
ScheduleJob();
}
} // namespace ash
| ric2b/Vivaldi-browser | chromium/chrome/browser/ash/base/file_flusher.cc | C++ | bsd-3-clause | 5,381 |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMEOS_SERVICES_ASSISTANT_PLATFORM_AUDIO_INPUT_HOST_IMPL_H_
#define CHROMEOS_SERVICES_ASSISTANT_PLATFORM_AUDIO_INPUT_HOST_IMPL_H_
#include "chromeos/services/assistant/platform/audio_input_host.h"
#include <string>
#include "base/component_export.h"
#include "base/memory/weak_ptr.h"
#include "base/scoped_observation.h"
#include "base/time/time.h"
#include "chromeos/dbus/power/power_manager_client.h"
#include "chromeos/services/assistant/platform/audio_devices.h"
#include "chromeos/services/libassistant/public/mojom/audio_input_controller.mojom.h"
#include "mojo/public/cpp/bindings/remote.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
namespace chromeos {
namespace assistant {
// Class that provides the bridge between the ChromeOS Browser thread and the
// Libassistant audio input mojom service.
class COMPONENT_EXPORT(ASSISTANT_SERVICE) AudioInputHostImpl
: public AudioInputHost,
private chromeos::PowerManagerClient::Observer,
private AudioDevices::Observer {
public:
AudioInputHostImpl(
mojo::PendingRemote<chromeos::libassistant::mojom::AudioInputController>
pending_remote,
CrasAudioHandler* cras_audio_handler,
chromeos::PowerManagerClient* power_manager_client,
const std::string& locale);
AudioInputHostImpl(const AudioInputHost&) = delete;
AudioInputHostImpl& operator=(const AudioInputHostImpl&) = delete;
~AudioInputHostImpl() override;
// AudioInputHost implementation:
void SetMicState(bool mic_open) override;
void OnHotwordEnabled(bool enable) override;
void OnConversationTurnStarted() override;
// AudioDevices::Observer implementation:
void SetDeviceId(const absl::optional<std::string>& device_id) override;
void SetHotwordDeviceId(
const absl::optional<std::string>& device_id) override;
private:
// chromeos::PowerManagerClient::Observer overrides:
void LidEventReceived(chromeos::PowerManagerClient::LidState state,
base::TimeTicks timestamp) override;
void OnInitialLidStateReceived(
absl::optional<chromeos::PowerManagerClient::SwitchStates> switch_states);
mojo::Remote<chromeos::libassistant::mojom::AudioInputController> remote_;
chromeos::PowerManagerClient* const power_manager_client_;
base::ScopedObservation<chromeos::PowerManagerClient,
chromeos::PowerManagerClient::Observer>
power_manager_client_observer_;
// Observes available audio devices and will set device-id/hotword-device-id
// accordingly.
AudioDevices audio_devices_;
AudioDevices::ScopedObservation audio_devices_observation_{this};
base::WeakPtrFactory<AudioInputHostImpl> weak_factory_{this};
};
} // namespace assistant
} // namespace chromeos
#endif // CHROMEOS_SERVICES_ASSISTANT_PLATFORM_AUDIO_INPUT_HOST_IMPL_H_
| ric2b/Vivaldi-browser | chromium/chromeos/services/assistant/platform/audio_input_host_impl.h | C | bsd-3-clause | 2,996 |
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.password_check;
import android.content.Context;
import android.content.Intent;
import android.view.MenuItem;
/**
* This component is responsible for handling the UI logic for the password check.
*/
public interface PasswordCheckComponentUi {
/**
* A delegate that handles native tasks for the UI component.
*/
interface Delegate {
/**
* Launch the UI allowing the user to edit the given credential.
*
* @param credential A {@link CompromisedCredential} to be edited.
* @param context The context to launch the editing UI from.
*/
void onEditCredential(CompromisedCredential credential, Context context);
/**
* Remove the given credential from the password store.
* @param credential A {@link CompromisedCredential}.
*/
void removeCredential(CompromisedCredential credential);
}
/**
* Functional interface to start a Chrome Custom Tab for the given intent, e.g. by using
* {@link org.chromium.chrome.browser.LaunchIntentDispatcher#createCustomTabActivityIntent}.
* TODO(crbug.com/1092444): Remove this when the LaunchIntentDispatcher is modularized.
*/
interface CustomTabIntentHelper {
/**
* @see org.chromium.chrome.browser.LaunchIntentDispatcher#createCustomTabActivityIntent
*/
Intent createCustomTabActivityIntent(Context context, Intent intent);
}
/**
* Functional interface to append trusted extras to the given intent, e.g. by using
* {@link org.chromium.chrome.browser.IntentUtils.addTrustedIntentExtras(Intent)}.
* TODO(crbug.com/1092444): Remove this when the IntentHandler is available in a module.
*/
interface TrustedIntentHelper {
/** @see org.chromium.chrome.browser.IntentUtils.addTrustedIntentExtras(Intent) */
void addTrustedIntentExtras(Intent intent);
}
/**
* Handle the request of the user to show the help page for the Check Passwords view.
* @param item A {@link MenuItem}.
*/
boolean handleHelp(MenuItem item);
/**
* Forwards the signal that the fragment was started.
*/
void onStartFragment();
/**
* Forwards the signal that the fragment is being resumed.
*/
void onResumeFragment();
/**
* Forwards the signal that the fragment is being destroyed.
*/
void onDestroyFragment();
/**
* Tears down the component when it's no longer needed.
*/
void destroy();
}
| ric2b/Vivaldi-browser | chromium/chrome/browser/password_check/android/java/src/org/chromium/chrome/browser/password_check/PasswordCheckComponentUi.java | Java | bsd-3-clause | 2,731 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.