code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
3
942
language
stringclasses
30 values
license
stringclasses
15 values
size
int32
3
1.05M
package dao import ( "context" "testing" "github.com/smartystreets/goconvey/convey" ) func TestDaoBusinessAllV2(t *testing.T) { convey.Convey("BusinessAllV2", t, func(convCtx convey.C) { var ( c = context.Background() ) convCtx.Convey("When everything goes positive", func(convCtx convey.C) { list, err := d.BusinessAllV2(c) convCtx.Convey("Then err should be nil.list should not be nil.", func(convCtx convey.C) { convCtx.So(err, convey.ShouldBeNil) convCtx.So(list, convey.ShouldNotBeNil) }) }) }) } func TestDaoBusinessInfoV2(t *testing.T) { convey.Convey("BusinessInfoV2", t, func(convCtx convey.C) { var ( c = context.Background() name = "dm" ) convCtx.Convey("When everything goes positive", func(convCtx convey.C) { b, err := d.BusinessInfoV2(c, name) convCtx.Convey("Then err should be nil.b should not be nil.", func(convCtx convey.C) { convCtx.So(err, convey.ShouldBeNil) convCtx.So(b, convey.ShouldNotBeNil) }) }) }) } //func TestDaoBusinessIns(t *testing.T) { // convey.Convey("BusinessIns", t, func(convCtx convey.C) { // var ( // c = context.Background() // pid = int64(0) // name = "" // description = "" // ) // convCtx.Convey("When everything goes positive", func(convCtx convey.C) { // rows, err := d.BusinessIns(c, pid, name, description) // convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) { // convCtx.So(err, convey.ShouldBeNil) // convCtx.So(rows, convey.ShouldNotBeNil) // }) // }) // }) //} //func TestDaoBusinessUpdate(t *testing.T) { // convey.Convey("BusinessUpdate", t, func(convCtx convey.C) { // var ( // c = context.Background() // name = "" // field = "" // value = "" // ) // convCtx.Convey("When everything goes positive", func(convCtx convey.C) { // rows, err := d.BusinessUpdate(c, name, field, value) // convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) { // convCtx.So(err, convey.ShouldBeNil) // convCtx.So(rows, convey.ShouldNotBeNil) // }) // }) // }) //} func TestDaoAssetDBTables(t *testing.T) { convey.Convey("AssetDBTables", t, func(convCtx convey.C) { var ( c = context.Background() ) convCtx.Convey("When everything goes positive", func(convCtx convey.C) { list, err := d.AssetDBTables(c) convCtx.Convey("Then err should be nil.list should not be nil.", func(convCtx convey.C) { convCtx.So(err, convey.ShouldBeNil) convCtx.So(list, convey.ShouldNotBeNil) }) }) }) } // //func TestDaoAssetDBIns(t *testing.T) { // convey.Convey("AssetDBIns", t, func(convCtx convey.C) { // var ( // c = context.Background() // name = "" // description = "" // dsn = "" // ) // convCtx.Convey("When everything goes positive", func(convCtx convey.C) { // rows, err := d.AssetDBIns(c, name, description, dsn) // convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) { // convCtx.So(err, convey.ShouldBeNil) // convCtx.So(rows, convey.ShouldNotBeNil) // }) // }) // }) //} //func TestDaoAssetTableIns(t *testing.T) { // convey.Convey("AssetTableIns", t, func(convCtx convey.C) { // var ( // c = context.Background() // name = "" // db = "" // regex = "" // fields = "" // description = "" // ) // convCtx.Convey("When everything goes positive", func(convCtx convey.C) { // rows, err := d.AssetTableIns(c, name, db, regex, fields, description) // convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) { // convCtx.So(err, convey.ShouldBeNil) // convCtx.So(rows, convey.ShouldNotBeNil) // }) // }) // }) //} func TestDaoAsset(t *testing.T) { convey.Convey("Asset", t, func(convCtx convey.C) { var ( c = context.Background() name = "bilibili_article" ) convCtx.Convey("When everything goes positive", func(convCtx convey.C) { r, err := d.Asset(c, name) convCtx.Convey("Then err should be nil.r should not be nil.", func(convCtx convey.C) { convCtx.So(err, convey.ShouldBeNil) convCtx.So(r, convey.ShouldNotBeNil) }) }) }) }
LQJJ/demo
126-go-common-master/app/admin/main/search/dao/mng_v2_test.go
GO
apache-2.0
4,229
using UnityEngine; namespace DefaultNamespace { public class FieldGenerationWithRespectToCodeStyleTest : MonoBehaviour { public void Update() { int[,] test = new int[2,2]; test[0, 0] = 5; test[test[0,{caret} 1], test[0, test[0,1]]] = 5; } } }
JetBrains/resharper-unity
resharper/resharper-unity/test/data/Unity/CSharp/Intentions/QuickFixes/InefficientMultidimensionalArrayUsage/LocalDeclarationType2.cs
C#
apache-2.0
316
/* * Copyright 2013 JCertifLab. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jcertif.android.fragments; import android.app.Activity; import android.content.Intent; import android.content.res.Configuration; import android.net.Uri; import android.os.Bundle; import android.provider.CalendarContract; import android.provider.CalendarContract.Events; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ListView; import android.widget.Toast; import com.actionbarsherlock.view.ActionMode; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuInflater; import com.actionbarsherlock.view.MenuItem; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.jcertif.android.JcertifApplication; import com.jcertif.android.MainActivity; import com.jcertif.android.R; import com.jcertif.android.adapters.SessionAdapter; import com.jcertif.android.adapters.SpeedScrollListener; import com.jcertif.android.dao.SessionProvider; import com.jcertif.android.dao.SpeakerProvider; import com.jcertif.android.model.Session; import com.jcertif.android.model.Speaker; import com.jcertif.android.service.RESTService; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.GregorianCalendar; import java.util.List; import uk.co.senab.actionbarpulltorefresh.extras.actionbarsherlock.PullToRefreshAttacher; /** * * @author Patrick Bashizi * */ public class SessionListFragment extends RESTResponderFragment implements PullToRefreshAttacher.OnRefreshListener{ public static final String SESSIONS_LIST_URI = JcertifApplication.BASE_URL + "/session/list"; public static final String CATEGORY_LIST_URI = JcertifApplication.BASE_URL + "/ref/category/list"; private static String TAG = SessionListFragment.class.getName(); private List<Session> mSessions = new ArrayList<Session>();; private ListView mLvSessions; private SessionAdapter mAdapter; private SessionProvider mProvider; private SpeedScrollListener mListener; private ActionMode mActionMode; private Session mSelectedSession; private PullToRefreshAttacher mPullToRefreshAttacher ; public SessionListFragment() { // Empty constructor required for fragment subclasses } public interface OnSessionUpdatedListener { void onSessionUpdated(Session session); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // setRetainInstance(true); View rootView = inflater.inflate(R.layout.fragment_session, container, false); mLvSessions = (ListView) rootView.findViewById(R.id.lv_session); String session = getResources().getStringArray(R.array.menu_array)[0]; setHasOptionsMenu(true); getActivity().setTitle(session); mLvSessions = (ListView) rootView.findViewById(R.id.lv_session); mPullToRefreshAttacher=((MainActivity)getSherlockActivity()).getmPullToRefreshAttacher(); mPullToRefreshAttacher.addRefreshableView(mLvSessions, this); mLvSessions.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int pos, long position) { mAdapter.setSelectedIndex(pos); mSelectedSession = ((Session) parent .getItemAtPosition((int) position)); updateSession(mSelectedSession); } }); mLvSessions .setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() { @Override public boolean onItemLongClick(AdapterView<?> arg0, View arg1, int pos, long arg3) { if (mActionMode != null) { return false; } mActionMode = getSherlockActivity().startActionMode( mActionModeCallback); mSelectedSession = ((Session) arg0 .getItemAtPosition((int) pos)); mAdapter.setSelectedIndex(pos); return true; } }); return rootView; } private ActionMode.Callback mActionModeCallback = new ActionMode.Callback() { @Override public boolean onCreateActionMode(ActionMode mode, Menu menu) { MenuInflater inflater = mode.getMenuInflater(); inflater.inflate(R.menu.context_menu_session, menu); return true; } @Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) { return false; } @Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) { switch (item.getItemId()) { case R.id.menu_share: shareSessionItem(); mode.finish(); // Action picked, so close the CAB break; case R.id.menu_add_to_schedule: addSessionItemToSchedule(); mode.finish(); // Action picked, so close the CAB break; default: return false; } return true; } public void onDestroyActionMode(ActionMode mode) { mActionMode = null; } }; private void addSessionItemToSchedule() { if (android.os.Build.VERSION.SDK_INT >= 14){ Intent intent = new Intent(Intent.ACTION_INSERT); intent.setType("vnd.android.cursor.item/event"); intent.putExtra(Events.TITLE, mSelectedSession.getTitle()); intent.putExtra(Events.EVENT_LOCATION,"Room"+ mSelectedSession.getSalle()); intent.putExtra(Events.DESCRIPTION, mSelectedSession.getDescription()); Date evStartDate= mSelectedSession.getStart(); Date evEndDate= mSelectedSession.getStart(); // Setting dates GregorianCalendar startcalDate = new GregorianCalendar(); startcalDate.setTime(evStartDate); // Setting dates GregorianCalendar endCalDate = new GregorianCalendar(); endCalDate.setTime(evEndDate); intent.putExtra(CalendarContract.EXTRA_EVENT_BEGIN_TIME,startcalDate.getTimeInMillis()); intent.putExtra(CalendarContract.EXTRA_EVENT_END_TIME,endCalDate.getTimeInMillis()); // Make it a full day event intent.putExtra(CalendarContract.EXTRA_EVENT_ALL_DAY, true); // Make it a recurring Event // intent.putExtra(Events.RRULE, "WKST=SU"); // Making it private and shown as busy intent.putExtra(Events.ACCESS_LEVEL, Events.ACCESS_PRIVATE); intent.putExtra(Events.AVAILABILITY, Events.AVAILABILITY_BUSY); //intent.putExtra(Events.DISPLAY_COLOR, Events.EVENT_COLOR); startActivity(intent); }else{ Toast.makeText(this.getSherlockActivity(), "Not supported for your device :(", Toast.LENGTH_SHORT).show(); } } private void shareSessionItem() { Speaker sp = new SpeakerProvider(this.getSherlockActivity()) .getByEmail(mSelectedSession.getSpeakers()[0]); Intent intent = new Intent(android.content.Intent.ACTION_SEND); intent.setType("text/plain"); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.putExtra(Intent.EXTRA_SUBJECT, "Share Session"); intent.putExtra( Intent.EXTRA_TEXT, "Checking out this #Jcertif2013 session : " + mSelectedSession.getTitle() + " by " + sp.getFirstname() + " " + sp.getLastname()); startActivity(intent); } protected void updateSession(Session s) { if(onTablet()){ ((OnSessionUpdatedListener) getParentFragment()).onSessionUpdated(s); }else{ Intent intent = new Intent(this.getActivity().getApplicationContext(), SessionDetailFragmentActivity.class); String sessionJson= new Gson().toJson(s); intent.putExtra("session",sessionJson); startActivity(intent); getSherlockActivity().overridePendingTransition ( 0 , R.anim.slide_up_left); } } public SessionProvider getProvider() { if (mProvider == null) mProvider = new SessionProvider(this.getSherlockActivity()); return mProvider; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // This gets called each time our Activity has finished creating itself. // First check the local cache, if it's empty data will be fetched from // web mSessions = loadSessionsFromCache(); setSessions(); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); } /** * We cache our stored session here so that we can return right away on * multiple calls to setSession() during the Activity lifecycle events (such * as when the user rotates their device). */ private void setSessions() { MainActivity activity = (MainActivity) getActivity(); setLoading(true); if (mSessions.isEmpty() && activity != null) { // This is where we make our REST call to the service. We also pass // in our ResultReceiver // defined in the RESTResponderFragment super class. // We will explicitly call our Service since we probably want to // keep it as a private component in our app. Intent intent = new Intent(activity, RESTService.class); intent.setData(Uri.parse(SESSIONS_LIST_URI)); // Here we are going to place our REST call parameters. Bundle params = new Bundle(); params.putString(RESTService.KEY_JSON_PLAYLOAD, null); intent.putExtra(RESTService.EXTRA_PARAMS, params); intent.putExtra(RESTService.EXTRA_RESULT_RECEIVER,getResultReceiver()); // Here we send our Intent to our RESTService. activity.startService(intent); } else if (activity != null) { // Here we check to see if our activity is null or not. // We only want to update our views if our activity exists. // Load our list adapter with our session. updateList(); setLoading(false); } } void updateList() { mListener = new SpeedScrollListener(); mLvSessions.setOnScrollListener(mListener); mAdapter = new SessionAdapter(this.getActivity(), mListener, mSessions); mLvSessions.setAdapter(mAdapter); if(refreshing){ refreshing=false; mPullToRefreshAttacher.setRefreshComplete(); } } private boolean onTablet() { return ((getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_LARGE); } public void updateList(String cat) { if (cat.equals("All") || cat.equals("Tous")) { mSessions = loadSessionsFromCache(); } else { mSessions = getProvider().getSessionsByCategory(cat); } updateList(); } @Override public void onRESTResult(int code, Bundle resultData) { // Here is where we handle our REST response. // Check to see if we got an HTTP 200 code and have some data. String result = null; if (resultData != null) { result = resultData.getString(RESTService.REST_RESULT); } else { return; } if (code == 200 && result != null) { mSessions = parseSessionJson(result); Log.d(TAG, result); setSessions(); saveToCache(mSessions); } else { Activity activity = getActivity(); if (activity != null) { Toast.makeText( activity, "Failed to load Session data. Check your internet settings.", Toast.LENGTH_SHORT).show(); } } setLoading(false); } private List<Session> parseSessionJson(String result) { Gson gson = new GsonBuilder().setDateFormat("dd/MM/yyyy hh:mm") .create(); Session[] sessions = gson.fromJson(result, Session[].class); return Arrays.asList(sessions); } protected void saveToCache(final List<Session> sessions) { new Thread(new Runnable() { @Override public void run() { for (Session session : sessions) mProvider.store(session); } }).start(); } private List<Session> loadSessionsFromCache() { List<Session> list = getProvider().getAll(Session.class); return list; } @Override public void onPause() { super.onDestroy(); } @Override public void onDestroy() { super.onDestroy(); } @Override public void onRefreshStarted(View view) { mProvider.deleteAll(Session.class); //mLvSessions.setAdapter(null); mSessions = loadSessionsFromCache(); setSessions(); refreshing=true; } }
JCERTIFLab/jcertif-android-2013
src/main/java/com/jcertif/android/fragments/SessionListFragment.java
Java
apache-2.0
12,340
/** * Copyright 2014 Jordan Zimmerman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.soabase.core.features.attributes; import io.soabase.core.listening.Listenable; import java.util.Collection; /** * Gives access to dynamic attributes. The various get methods return * the current value for the given key after applying overrides and scopes, etc. * Always call the methods to get the current value as it may change during runtime. */ public interface DynamicAttributes { public String getAttribute(String key); public String getAttribute(String key, String defaultValue); public boolean getAttributeBoolean(String key); public boolean getAttributeBoolean(String key, boolean defaultValue); public int getAttributeInt(String key); public int getAttributeInt(String key, int defaultValue); public long getAttributeLong(String key); public long getAttributeLong(String key, long defaultValue); public double getAttributeDouble(String key); public double getAttributeDouble(String key, double defaultValue); public void temporaryOverride(String key, boolean value); public void temporaryOverride(String key, int value); public void temporaryOverride(String key, long value); public void temporaryOverride(String key, double value); public void temporaryOverride(String key, String value); public boolean removeOverride(String key); public Collection<String> getKeys(); public Listenable<DynamicAttributeListener> getListenable(); }
soabase/soabase
soabase-core/src/main/java/io/soabase/core/features/attributes/DynamicAttributes.java
Java
apache-2.0
2,045
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { AfterContentInit, ChangeDetectionStrategy, Component, ElementRef, EventEmitter, Input, OnInit, Output, ViewChild } from '@angular/core'; import * as d3 from 'd3'; import { select, Selection } from 'd3-selection'; import { zoom, ZoomBehavior } from 'd3-zoom'; import { SafeAny } from 'interfaces'; @Component({ selector: 'flink-svg-container', templateUrl: './svg-container.component.html', styleUrls: ['./svg-container.component.less'], changeDetection: ChangeDetectionStrategy.OnPush }) export class SvgContainerComponent implements OnInit, AfterContentInit { zoom = 1; width: number; height: number; transform = 'translate(0, 0) scale(1)'; containerTransform = { x: 0, y: 0, k: 1 }; svgSelect: Selection<SafeAny, SafeAny, SafeAny, SafeAny>; zoomController: ZoomBehavior<SafeAny, SafeAny>; @ViewChild('svgContainer', { static: true }) svgContainer: ElementRef<SVGAElement>; @ViewChild('svgInner', { static: true }) svgInner: ElementRef<SVGAElement>; @Input() nzMaxZoom = 5; @Input() nzMinZoom = 0.1; @Output() clickBgEvent: EventEmitter<MouseEvent> = new EventEmitter(); @Output() zoomEvent: EventEmitter<number> = new EventEmitter(); @Output() transformEvent: EventEmitter<{ x: number; y: number; scale: number }> = new EventEmitter(); /** * Zoom to spec level * * @param zoomLevel */ zoomTo(zoomLevel: number): void { this.svgSelect .transition() .duration(0) .call(this.zoomController.scaleTo, zoomLevel); } /** * Set transform position * * @param transform * @param animate */ setPositionByTransform(transform: { x: number; y: number; k: number }, animate = false): void { this.svgSelect .transition() .duration(animate ? 500 : 0) .call(this.zoomController.transform, transform); } constructor(private el: ElementRef) {} ngOnInit(): void { this.svgSelect = select(this.svgContainer.nativeElement); this.zoomController = zoom() .scaleExtent([this.nzMinZoom, this.nzMaxZoom]) .on('zoom', () => { this.containerTransform = d3.event.transform; this.zoom = this.containerTransform.k; if (!isNaN(this.containerTransform.x)) { this.transform = `translate(${this.containerTransform.x} ,${this.containerTransform.y})scale(${this.containerTransform.k})`; } this.zoomEvent.emit(this.zoom); this.transformEvent.emit(this.containerTransform as SafeAny); }); this.svgSelect.call(this.zoomController).on('wheel.zoom', null); } ngAfterContentInit(): void { const hostElem = this.el.nativeElement; if (hostElem.parentNode !== null) { const dims = hostElem.parentNode.getBoundingClientRect(); this.width = dims.width; this.height = dims.height; this.zoomTo(this.zoom); } } }
StephanEwen/incubator-flink
flink-runtime-web/web-dashboard/src/app/share/common/dagre/svg-container.component.ts
TypeScript
apache-2.0
3,657
/* * Copyright © 2009 HotPads ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.datarouter.instrumentation.trace; import java.time.Instant; import java.util.Objects; import java.util.Optional; import java.util.Random; import java.util.regex.Pattern; public class Traceparent{ private static final Pattern TRACEPARENT_PATTERN = Pattern.compile( "^[0-9a-f]{2}-[0-9a-f]{32}-[0-9a-f]{16}-[0-9a-f]{2}$"); private static final String TRACEPARENT_DELIMITER = "-"; private static final Integer MIN_CHARS_TRACEPARENT = 55; private static final String CURRENT_VERSION = "00"; public static final int TRACE_ID_HEX_SIZE = 32; public static final int PARENT_ID_HEX_SIZE = 16; public final String version = CURRENT_VERSION; public final String traceId; public final String parentId; private String traceFlags; public Traceparent(String traceId, String parentId, String traceFlags){ this.traceId = traceId; this.parentId = parentId; this.traceFlags = traceFlags; } public Traceparent(String traceId){ this(traceId, createNewParentId()); } public Traceparent(String traceId, String parentId){ this(traceId, parentId, createDefaultTraceFlag()); } public static Traceparent generateNew(long createdTimestamp){ return new Traceparent(createNewTraceId(createdTimestamp), createNewParentId(), createDefaultTraceFlag()); } public static Traceparent generateNewWithCurrentTimeInNs(){ return new Traceparent(createNewTraceId(Trace2Dto.getCurrentTimeInNs()), createNewParentId(), createDefaultTraceFlag()); } public Traceparent updateParentId(){ return new Traceparent(traceId, createNewParentId(), traceFlags); } /* * TraceId is a 32 hex digit String. We convert the root request created unix time into lowercase base16 * and append it with a randomly generated long lowercase base16 representation. * */ private static String createNewTraceId(long createdTimestamp){ return String.format("%016x", createdTimestamp) + String.format("%016x", new Random().nextLong()); } /* * ParentId is a 16 hex digit String. We use a randomly generated long and convert it into lowercase base16 * representation. * */ public static String createNewParentId(){ return String.format("%016x", new Random().nextLong()); } public long getTimestampInMs(){ return Long.parseLong(traceId.substring(0, 16), 16); } public Instant getInstant(){ return Instant.ofEpochMilli(getTimestampInMs()); } /*----------- trace flags ------------*/ private static String createDefaultTraceFlag(){ return TraceContextFlagMask.DEFAULT.toHexCode(); } public void enableSample(){ this.traceFlags = TraceContextFlagMask.enableTrace(traceFlags); } public void enableLog(){ this.traceFlags = TraceContextFlagMask.enableLog(traceFlags); } public boolean shouldSample(){ return TraceContextFlagMask.isTraceEnabled(traceFlags); } public boolean shouldLog(){ return TraceContextFlagMask.isLogEnabled(traceFlags); } @Override public String toString(){ return String.join(TRACEPARENT_DELIMITER, version, traceId, parentId, traceFlags); } @Override public boolean equals(Object obj){ if(!(obj instanceof Traceparent)){ return false; } Traceparent other = (Traceparent)obj; return Objects.equals(version, other.version) && Objects.equals(traceId, other.traceId) && Objects.equals(parentId, other.parentId) && Objects.equals(traceFlags, other.traceFlags); } @Override public int hashCode(){ return Objects.hash(version, traceId, parentId, traceFlags); } public static Optional<Traceparent> parse(String traceparentStr){ if(traceparentStr == null || traceparentStr.isEmpty()){ return Optional.empty(); }else if(traceparentStr.length() < MIN_CHARS_TRACEPARENT){ return Optional.empty(); }else if(!TRACEPARENT_PATTERN.matcher(traceparentStr).matches()){ return Optional.empty(); } String[] tokens = traceparentStr.split(Traceparent.TRACEPARENT_DELIMITER); if(!Traceparent.CURRENT_VERSION.equals(tokens[0])){ return Optional.empty(); } return Optional.of(new Traceparent(tokens[1], tokens[2], tokens[3])); } }
hotpads/datarouter
datarouter-instrumentation/src/main/java/io/datarouter/instrumentation/trace/Traceparent.java
Java
apache-2.0
4,663
package com.bzu.yhd.pocketcampus.bottomnav.user.view; import android.content.Context; import android.util.AttributeSet; import com.facebook.rebound.SimpleSpringListener; import com.facebook.rebound.Spring; import com.facebook.rebound.SpringSystem; import de.hdodenhof.circleimageview.CircleImageView; /** * Created by xmuSistone. */ public class AnimateImageView extends CircleImageView { private Spring springX, springY; private SimpleSpringListener followerListenerX, followerListenerY; // 此为跟踪的回调,当前面一个view移动的时候,此为后面的view,需要更新endValue public AnimateImageView(Context context) { this(context, null); } public AnimateImageView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public AnimateImageView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); SpringSystem mSpringSystem = SpringSystem.create(); springX = mSpringSystem.createSpring(); springY = mSpringSystem.createSpring(); springX.addListener(new SimpleSpringListener() { @Override public void onSpringUpdate(Spring spring) { int xPos = (int) spring.getCurrentValue(); setScreenX(xPos); } }); springY.addListener(new SimpleSpringListener() { @Override public void onSpringUpdate(Spring spring) { int yPos = (int) spring.getCurrentValue(); setScreenY(yPos); } }); followerListenerX = new SimpleSpringListener() { @Override public void onSpringUpdate(Spring spring) { int xPos = (int) spring.getCurrentValue(); springX.setEndValue(xPos); } }; followerListenerY = new SimpleSpringListener() { @Override public void onSpringUpdate(Spring spring) { int yPos = (int) spring.getCurrentValue(); springY.setEndValue(yPos); } }; } private void setScreenX(int screenX) { this.offsetLeftAndRight(screenX - getLeft()); } private void setScreenY(int screenY) { this.offsetTopAndBottom(screenY - getTop()); } public void animTo(int xPos, int yPos) { springX.setEndValue(xPos); springY.setEndValue(yPos); } /** * 顶部ImageView强行停止动画 */ public void stopAnimation() { springX.setAtRest(); springY.setAtRest(); } /** * 只为最顶部的view调用,触点松开后,回归原点 */ public void onRelease(int xPos, int yPos) { setCurrentSpringPos(getLeft(), getTop()); animTo(xPos, yPos); } /** * 设置当前spring位置 */ public void setCurrentSpringPos(int xPos, int yPos) { springX.setCurrentValue(xPos); springY.setCurrentValue(yPos); } public Spring getSpringX() { return springX; } public Spring getSpringY() { return springY; } public SimpleSpringListener getFollowerListenerX() { return followerListenerX; } public SimpleSpringListener getFollowerListenerY() { return followerListenerY; } }
GolvenH/PocketCampus
app/src/main/java/com/bzu/yhd/pocketcampus/bottomnav/user/view/AnimateImageView.java
Java
apache-2.0
3,350
import Vue from 'vue' import { hasFetch, normalizeError, addLifecycleHook } from '../utils' const isSsrHydration = (vm) => vm.$vnode && vm.$vnode.elm && vm.$vnode.elm.dataset && vm.$vnode.elm.dataset.fetchKey const nuxtState = window.<%= globals.context %> export default { beforeCreate () { if (!hasFetch(this)) { return } this._fetchDelay = typeof this.$options.fetchDelay === 'number' ? this.$options.fetchDelay : 200 Vue.util.defineReactive(this, '$fetchState', { pending: false, error: null, timestamp: Date.now() }) this.$fetch = $fetch.bind(this) addLifecycleHook(this, 'created', created) addLifecycleHook(this, 'beforeMount', beforeMount) } } function beforeMount() { if (!this._hydrated) { return this.$fetch() } } function created() { if (!isSsrHydration(this)) { return } // Hydrate component this._hydrated = true this._fetchKey = +this.$vnode.elm.dataset.fetchKey const data = nuxtState.fetch[this._fetchKey] // If fetch error if (data && data._error) { this.$fetchState.error = data._error return } // Merge data for (const key in data) { Vue.set(this.$data, key, data[key]) } } async function $fetch() { this.$nuxt.nbFetching++ this.$fetchState.pending = true this.$fetchState.error = null this._hydrated = false let error = null const startTime = Date.now() try { await this.$options.fetch.call(this) } catch (err) { error = normalizeError(err) } const delayLeft = this._fetchDelay - (Date.now() - startTime) if (delayLeft > 0) { await new Promise(resolve => setTimeout(resolve, delayLeft)) } this.$fetchState.error = error this.$fetchState.pending = false this.$fetchState.timestamp = Date.now() this.$nextTick(() => this.$nuxt.nbFetching--) }
BigBoss424/portfolio
v6/node_modules/@nuxt/vue-app-edge/template/mixins/fetch.client.js
JavaScript
apache-2.0
1,830
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.simplesystemsmanagement.model.transform; import static com.amazonaws.util.StringUtils.UTF8; import static com.amazonaws.util.StringUtils.COMMA_SEPARATOR; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.simplesystemsmanagement.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.json.*; /** * DeleteAssociationRequest Marshaller */ public class DeleteAssociationRequestMarshaller implements Marshaller<Request<DeleteAssociationRequest>, DeleteAssociationRequest> { public Request<DeleteAssociationRequest> marshall( DeleteAssociationRequest deleteAssociationRequest) { if (deleteAssociationRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } Request<DeleteAssociationRequest> request = new DefaultRequest<DeleteAssociationRequest>( deleteAssociationRequest, "AWSSimpleSystemsManagement"); request.addHeader("X-Amz-Target", "AmazonSSM.DeleteAssociation"); request.setHttpMethod(HttpMethodName.POST); request.setResourcePath(""); try { StringWriter stringWriter = new StringWriter(); JSONWriter jsonWriter = new JSONWriter(stringWriter); jsonWriter.object(); if (deleteAssociationRequest.getName() != null) { jsonWriter.key("Name") .value(deleteAssociationRequest.getName()); } if (deleteAssociationRequest.getInstanceId() != null) { jsonWriter.key("InstanceId").value( deleteAssociationRequest.getInstanceId()); } jsonWriter.endObject(); String snippet = stringWriter.toString(); byte[] content = snippet.getBytes(UTF8); request.setContent(new StringInputStream(snippet)); request.addHeader("Content-Length", Integer.toString(content.length)); request.addHeader("Content-Type", "application/x-amz-json-1.1"); } catch (Throwable t) { throw new AmazonClientException( "Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } }
trasa/aws-sdk-java
aws-java-sdk-ssm/src/main/java/com/amazonaws/services/simplesystemsmanagement/model/transform/DeleteAssociationRequestMarshaller.java
Java
apache-2.0
3,452
# AUTOGENERATED FILE FROM balenalib/zc702-zynq7-ubuntu:xenial-run ENV NODE_VERSION 14.18.3 ENV YARN_VERSION 1.22.4 RUN buildDeps='curl libatomic1' \ && set -x \ && for key in \ 6A010C5166006599AA17F08146C2130DFD2497F5 \ ; do \ gpg --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \ done \ && apt-get update && apt-get install -y $buildDeps --no-install-recommends \ && rm -rf /var/lib/apt/lists/* \ && curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \ && echo "6f19aa4d9c1b1706d44742218c8a7742d3fa62033d953156095bdde09f8375e5 node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \ && rm -rf /tmp/* CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \ && echo "Running test-stack@node" \ && chmod +x [email protected] \ && bash [email protected] \ && rm -rf [email protected] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu xenial \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.18.3, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
resin-io-library/base-images
balena-base-images/node/zc702-zynq7/ubuntu/xenial/14.18.3/run/Dockerfile
Dockerfile
apache-2.0
2,913
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #ifndef KUDU_UTIL_FLAGS_H #define KUDU_UTIL_FLAGS_H #include <cstdint> #include <string> #include <unordered_map> #include "kudu/util/status.h" namespace google { struct CommandLineFlagInfo; } namespace kudu { // The umask of the process, set based on the --umask flag during // HandleCommonFlags(). extern uint32_t g_parsed_umask; // Looks for flags in argv and parses them. Rearranges argv to put // flags first, or removes them entirely if remove_flags is true. // If a flag is defined more than once in the command line or flag // file, the last definition is used. Returns the index (into argv) // of the first non-flag argument. // // This is a wrapper around google::ParseCommandLineFlags, but integrates // with Kudu flag tags. For example, --helpxml will include the list of // tags for each flag. This should be be used instead of // google::ParseCommandLineFlags in any user-facing binary. // // See gflags.h for more information. int ParseCommandLineFlags(int* argc, char*** argv, bool remove_flags); // Handle common flags such as -version, -disable_core_dumps, etc. // This includes the GFlags common flags such as "-help". // // Requires that flags have already been parsed using // google::ParseCommandLineNonHelpFlags(). void HandleCommonFlags(); // Verifies that the flags are allowed to be set and valid. // Should be called after logging is initialized. Otherwise // logging will write to stderr. void ValidateFlags(); enum class EscapeMode { HTML, NONE }; // Stick the flags into a string. If redaction is enabled, the values of // flags tagged as sensitive will be redacted. Otherwise, the values // will be written to the string as-is. The values will be HTML escaped // if EscapeMode is HTML. std::string CommandlineFlagsIntoString(EscapeMode mode); typedef std::unordered_map<std::string, google::CommandLineFlagInfo> GFlagsMap; // Get all the flags different from their defaults. The output is a nicely // formatted string with --flag=value pairs per line. Redact any flags that // are tagged as sensitive, if redaction is enabled. std::string GetNonDefaultFlags(); GFlagsMap GetFlagsMap(); enum class TriStateFlag { DISABLED, OPTIONAL, REQUIRED, }; Status ParseTriState(const char* flag_name, const std::string& flag_value, TriStateFlag* tri_state); std::string CheckFlagAndRedact(const google::CommandLineFlagInfo& flag, EscapeMode mode); } // namespace kudu #endif /* KUDU_UTIL_FLAGS_H */
helifu/kudu
src/kudu/util/flags.h
C
apache-2.0
3,254
# Encoding: utf-8 # # Author:: [email protected] (Danial Klimkin) # # Copyright:: Copyright 2012, Google Inc. All Rights Reserved. # # License:: Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # This class extracts data received from Savon and enriches it. module AdsCommon class ResultsExtractor # Instance initializer. # # Args: # - registry: a registry that defines service # def initialize(registry) @registry = registry end # Extracts the finest results possible for the given result. Returns the # response itself in worst case (contents unknown). def extract_result(response, action_name) method = @registry.get_method_signature(action_name) action = method[:output][:name].to_sym result = response.to_hash result = result[action] if result.include?(action) result = normalize_output(result, method) return result[:rval] || result end # Extracts misc data from response header. def extract_header_data(response) header_type = get_full_type_signature(:SoapResponseHeader) headers = response.header[:response_header].dup process_attributes(headers, false) headers = normalize_fields(headers, header_type[:fields]) return headers end # Extracts misc data from SOAP fault. def extract_exception_data(soap_fault, exception_name) exception_type = get_full_type_signature(exception_name) process_attributes(soap_fault, false) soap_fault = normalize_fields(soap_fault, exception_type[:fields]) return soap_fault end private # Normalizes output starting with root output node. def normalize_output(output_data, method_definition) fields = method_definition[:output][:fields] result = normalize_fields(output_data, fields) end # Normalizes all fields for the given data based on the fields list # provided. def normalize_fields(data, fields) fields.each do |field| field_name = field[:name] if data.include?(field_name) field_data = data[field_name] field_data = normalize_output_field(field_data, field) field_data = check_array_collapse(field_data, field) data[field_name] = field_data unless field_data.nil? end end return data end # Normalizes one field of a given data recursively. # # Args: # - field_data: XML data to normalize # - field_def: field type definition for the data # def normalize_output_field(field_data, field_def) return case field_data when Array normalize_array_field(field_data, field_def) when Hash normalize_hash_field(field_data, field_def) else normalize_item(field_data, field_def) end end # Normalizes every item of an Array. def normalize_array_field(data, field_def) return data.map {|item| normalize_output_field(item, field_def)} end # Normalizes every item of a Hash. def normalize_hash_field(field, field_def) process_attributes(field, true) field_type = field_def[:type] field_def = get_full_type_signature(field_type) # First checking for xsi:type provided. xsi_type_override = determine_xsi_type_override(field, field_def) unless xsi_type_override.nil? field_def = get_full_type_signature(xsi_type_override) return (field_def.nil?) ? field : normalize_fields(field, field_def[:fields]) end # Now checking for choice options from wsdl. choice_type_override = determine_choice_type_override(field, field_def) unless choice_type_override.nil? # For overrides we need to process sub-field and than return it # in the original structure. field_key = field.keys.first field_data = field[field_key] field_def = get_full_type_signature(choice_type_override) if !field_def.nil? and field_data.kind_of?(Hash) field_data = normalize_fields(field_data, field_def[:fields]) end return {field_key => field_data} end # Otherwise using the best we have. field = normalize_fields(field, field_def[:fields]) unless field_def.nil? return field end # Determines an xsi:type override for for the field. Returns nil if no # override found. def determine_xsi_type_override(field_data, field_def) result = nil if field_data.kind_of?(Hash) and field_data.include?(:xsi_type) result = field_data[:xsi_type] end return result end # Determines a choice type override for for the field. Returns nil if no # override found. def determine_choice_type_override(field_data, field_def) result = nil if field_data.kind_of?(Hash) and field_def.include?(:choices) result = determine_choice(field_data, field_def[:choices]) end return result end # Finds the choice option matching data provided. def determine_choice(field_data, field_choices) result = nil key_name = field_data.keys.first unless key_name.nil? choice = find_named_entry(field_choices, key_name) result = choice[:type] unless choice.nil? end return result end # Finds an item in an Array based on its ':name' field. def find_named_entry(data_array, name) index = data_array.index {|item| name.eql?(item[:name])} return index.nil? ? nil : data_array[index] end # Converts one leaf item to a built-in type. def normalize_item(item, field_def) return case field_def[:type] when 'long', 'int' then Integer(item) when 'double', 'float' then Float(item) when 'boolean' then item.kind_of?(String) ? item.casecmp('true') == 0 : item else item end end # Checks if the field signature allows an array and forces array structure # even for a signle item. def check_array_collapse(data, field_def) result = data if !field_def[:min_occurs].nil? and (field_def[:max_occurs] == :unbounded || (!field_def[:max_occurs].nil? and field_def[:max_occurs] > 1)) result = arrayize(result) end return result end # Makes sure object is an array. def arrayize(object) return [] if object.nil? return object.is_a?(Array) ? object : [object] end # Returns all inherited fields of superclasses for given type. def implode_parent(data_type) result = [] if data_type[:base] parent_type = @registry.get_type_signature(data_type[:base]) result += implode_parent(parent_type) unless parent_type.nil? end data_type[:fields].each do |field| # If the parent type includes a field with the same name, overwrite it. result.reject! {|parent_field| parent_field[:name].eql?(field[:name])} result << field end return result end # Returns type signature with all inherited fields. def get_full_type_signature(type_name) result = (type_name.nil?) ? nil : @registry.get_type_signature(type_name) result[:fields] = implode_parent(result) if result and result[:base] return result end # Handles attributes received from Savon. def process_attributes(data, keep_xsi_type = false) if keep_xsi_type xsi_type = data.delete(:"@xsi:type") data[:xsi_type] = xsi_type if xsi_type end data.reject! {|key, value| key.to_s.start_with?('@')} end end end
voke/google-ads-common
lib/ads_common/results_extractor.rb
Ruby
apache-2.0
8,192
# ----------------------------------------------------------------------------- # Copyright * 2014, United States Government, as represented by the # Administrator of the National Aeronautics and Space Administration. All # rights reserved. # # The Crisis Mapping Toolkit (CMT) v1 platform is licensed under the Apache # License, Version 2.0 (the "License"); you may not use this file except in # compliance with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # ----------------------------------------------------------------------------- import ee import math from cmt.mapclient_qt import addToMap from cmt.util.miscUtilities import safe_get_info import modis_utilities ''' Contains implementations of several simple MODIS-based flood detection algorithms. ''' #============================================================== def dem_threshold(domain, b): '''Just use a height threshold on the DEM!''' heightLevel = float(domain.algorithm_params['dem_threshold']) dem = domain.get_dem().image return dem.lt(heightLevel).select(['elevation'], ['b1']) #============================================================== def evi(domain, b): '''Simple EVI based classifier''' #no_clouds = b['b3'].lte(2100).select(['sur_refl_b03'], ['b1']) criteria1 = b['EVI'].lte(0.3).And(b['LSWI'].subtract(b['EVI']).gte(0.05)).select(['sur_refl_b02'], ['b1']) criteria2 = b['EVI'].lte(0.05).And(b['LSWI'].lte(0.0)).select(['sur_refl_b02'], ['b1']) #return no_clouds.And(criteria1.Or(criteria2)) return criteria1.Or(criteria2) def xiao(domain, b): '''Method from paper: Xiao, Boles, Frolking, et. al. Mapping paddy rice agriculture in South and Southeast Asia using multi-temporal MODIS images, Remote Sensing of Environment, 2006. This method implements a very simple decision tree from several standard MODIS data products. The default constants were tuned for (wet) rice paddy detection. ''' return b['LSWI'].subtract(b['NDVI']).gte(0.05).Or(b['LSWI'].subtract(b['EVI']).gte(0.05)).select(['sur_refl_b02'], ['b1']); #============================================================== def get_diff(b): '''Just the internals of the difference method''' return b['b2'].subtract(b['b1']).select(['sur_refl_b02'], ['b1']) def diff_learned(domain, b): '''modis_diff but with the threshold calculation included (training image required)''' if domain.unflooded_domain == None: print('No unflooded training domain provided.') return None unflooded_b = modis_utilities.compute_modis_indices(domain.unflooded_domain) water_mask = modis_utilities.get_permanent_water_mask() threshold = modis_utilities.compute_binary_threshold(get_diff(unflooded_b), water_mask, domain.bounds) return modis_diff(domain, b, threshold) def modis_diff(domain, b, threshold=None): '''Compute (b2-b1) < threshold, a simple water detection index. This method may be all that is needed in cases where the threshold can be hand tuned. ''' if threshold == None: # If no threshold value passed in, load it based on the data set. threshold = float(domain.algorithm_params['modis_diff_threshold']) return get_diff(b).lte(threshold) #============================================================== def get_dartmouth(b): A = 500 B = 2500 return b['b2'].add(A).divide(b['b1'].add(B)).select(['sur_refl_b02'], ['b1']) def dart_learned(domain, b): '''The dartmouth method but with threshold calculation included (training image required)''' if domain.unflooded_domain == None: print('No unflooded training domain provided.') return None unflooded_b = modis_utilities.compute_modis_indices(domain.unflooded_domain) water_mask = modis_utilities.get_permanent_water_mask() threshold = modis_utilities.compute_binary_threshold(get_dartmouth(unflooded_b), water_mask, domain.bounds) return dartmouth(domain, b, threshold) def dartmouth(domain, b, threshold=None): '''A flood detection method from the Dartmouth Flood Observatory. This method is a refinement of the simple b2-b1 detection method. ''' if threshold == None: threshold = float(domain.algorithm_params['dartmouth_threshold']) return get_dartmouth(b).lte(threshold) #============================================================== def get_mod_ndwi(b): return b['b6'].subtract(b['b4']).divide(b['b4'].add(b['b6'])).select(['sur_refl_b06'], ['b1']) def mod_ndwi_learned(domain, b): if domain.unflooded_domain == None: print('No unflooded training domain provided.') return None unflooded_b = modis_utilities.compute_modis_indices(domain.unflooded_domain) water_mask = modis_utilities.get_permanent_water_mask() threshold = modis_utilities.compute_binary_threshold(get_mod_ndwi(unflooded_b), water_mask, domain.bounds) return mod_ndwi(domain, b, threshold) def mod_ndwi(domain, b, threshold=None): if threshold == None: threshold = float(domain.algorithm_params['mod_ndwi_threshold']) return get_mod_ndwi(b).lte(threshold) #============================================================== def get_fai(b): '''Just the internals of the FAI method''' return b['b2'].subtract(b['b1'].add(b['b5'].subtract(b['b1']).multiply((859.0 - 645) / (1240 - 645)))).select(['sur_refl_b02'], ['b1']) def fai_learned(domain, b): if domain.unflooded_domain == None: print('No unflooded training domain provided.') return None unflooded_b = modis_utilities.compute_modis_indices(domain.unflooded_domain) water_mask = modis_utilities.get_permanent_water_mask() threshold = modis_utilities.compute_binary_threshold(get_fai(unflooded_b), water_mask, domain.bounds) return fai(domain, b, threshold) def fai(domain, b, threshold=None): ''' Floating Algae Index. Method from paper: Feng, Hu, Chen, Cai, Tian, Gan, Assessment of inundation changes of Poyang Lake using MODIS observations between 2000 and 2010. Remote Sensing of Environment, 2012. ''' if threshold == None: threshold = float(domain.algorithm_params['fai_threshold']) return get_fai(b).lte(threshold)
nasa/CrisisMappingToolkit
cmt/modis/simple_modis_algorithms.py
Python
apache-2.0
6,672
# Urocystis poae (Liro) Padwick & A. Khan, 1944 SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in Mycol. Pap. 10: 2 (1944) #### Original name Tuburcinia poae Liro, 1922 ### Remarks null
mdoering/backbone
life/Fungi/Basidiomycota/Ustilaginomycetes/Urocystidiales/Urocystidaceae/Urocystis/Urocystis poae/README.md
Markdown
apache-2.0
245
package com.qmx.wxmp.common.web; /** * 带UTF-8 charset 定义的MediaType. * * Jax-RS和Spring的MediaType没有UTF-8的版本, * Google的MediaType必须再调用toString()函数而不是常量,不能用于Restful方法的annotation。 * * @author free lance */ public class MediaTypes { public static final String APPLICATION_XML = "application/xml"; public static final String APPLICATION_XML_UTF_8 = "application/xml; charset=UTF-8"; public static final String JSON = "application/json"; public static final String JSON_UTF_8 = "application/json; charset=UTF-8"; public static final String JAVASCRIPT = "application/javascript"; public static final String JAVASCRIPT_UTF_8 = "application/javascript; charset=UTF-8"; public static final String APPLICATION_XHTML_XML = "application/xhtml+xml"; public static final String APPLICATION_XHTML_XML_UTF_8 = "application/xhtml+xml; charset=UTF-8"; public static final String TEXT_PLAIN = "text/plain"; public static final String TEXT_PLAIN_UTF_8 = "text/plain; charset=UTF-8"; public static final String TEXT_XML = "text/xml"; public static final String TEXT_XML_UTF_8 = "text/xml; charset=UTF-8"; public static final String TEXT_HTML = "text/html"; public static final String TEXT_HTML_UTF_8 = "text/html; charset=UTF-8"; }
lingyi2017/wxmp
src/main/java/com/qmx/wxmp/common/web/MediaTypes.java
Java
apache-2.0
1,306
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.fortress.core.samples; import org.apache.directory.fortress.core.DelAdminMgr; import org.apache.directory.fortress.core.DelAdminMgrFactory; import org.apache.directory.fortress.core.SecurityException; import org.apache.directory.fortress.core.model.OrgUnit; import org.apache.directory.fortress.core.impl.TestUtils; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * CreateUserOrgHierarchySample JUnit Test. This test program will show how to build a simple User OrgUnit hierarchy which are * used to enable administrators to group Users by organizational structure. This system supports multiple * inheritance between OrgUnits and there are no limits on how deep a hierarchy can be. The OrgUnits require name and type. Optionally can * include a description. The User OrgUnit must be associated with Users and are used to provide Administratrive RBAC control * over who may perform User Role assigns and deassigns in directory. * @author <a href="mailto:[email protected]">Apache Directory Project</a> */ public class CreateUserOrgHierarchySample extends TestCase { private static final String CLS_NM = CreateUserOrgHierarchySample.class.getName(); private static final Logger LOG = LoggerFactory.getLogger( CLS_NM ); // This constant will be added to index for creation of multiple nodes in directory. public static final String TEST_HIER_USERORG_PREFIX = "sampleHierUserOrg"; public static final String TEST_HIER_BASE_USERORG = "sampleHierUserOrg1"; public static final int TEST_NUMBER = 6; public static final String TEST_HIER_DESC_USERORG_PREFIX = "sampleHierUserOrgD"; public static final String TEST_HIER_ASC_USERORG_PREFIX = "sampleHierUserOrgA"; /** * Simple constructor kicks off JUnit test suite. * @param name */ public CreateUserOrgHierarchySample(String name) { super(name); } /** * Run the User OrgUnit test cases. * * @return Test */ public static Test suite() { TestSuite suite = new TestSuite(); if(!AllSamplesJUnitTest.isFirstRun()) { suite.addTest(new CreateUserOrgHierarchySample("testDeleteHierUserOrgs")); suite.addTest(new CreateUserOrgHierarchySample("testDeleteDescendantUserOrgs")); suite.addTest(new CreateUserOrgHierarchySample("testDeleteAscendantUserOrgs")); } suite.addTest(new CreateUserOrgHierarchySample("testCreateHierUserOrgs")); suite.addTest(new CreateUserOrgHierarchySample("testCreateDescendantUserOrgs")); suite.addTest(new CreateUserOrgHierarchySample("testCreateAscendantUserOrgs")); /* suite.addTest(new CreateUserOrgHierarchySample("testDeleteHierUserOrgs")); suite.addTest(new CreateUserOrgHierarchySample("testCreateHierUserOrgs")); suite.addTest(new CreateUserOrgHierarchySample("testDeleteDescendantUserOrgs")); suite.addTest(new CreateUserOrgHierarchySample("testCreateDescendantUserOrgs")); suite.addTest(new CreateUserOrgHierarchySample("testDeleteAscendantUserOrgs")); suite.addTest(new CreateUserOrgHierarchySample("testCreateAscendantUserOrgs")); */ return suite; } /** * Remove the simple hierarchical OrgUnits from the directory. Before removal call the API to move the relationship * between the parent and child OrgUnits. Once the relationship is removed the parent OrgUnit can be removed. * User OrgUnit removal is not allowed (SecurityException will be thrown) if ou is assigned to Users in ldap. * <p> * <img src="./doc-files/HierUserOrgSimple.png" alt=""> */ public static void testDeleteHierUserOrgs() { String szLocation = ".testDeleteHierUserOrgs"; if(AllSamplesJUnitTest.isFirstRun()) { return; } try { // Instantiate the DelAdminMgr implementation which is used to provision ARBAC policies. DelAdminMgr delAdminMgr = DelAdminMgrFactory.createInstance(TestUtils.getContext()); for (int i = 1; i < TEST_NUMBER; i++) { // The key that must be set to locate any OrgUnit is simply the name and type. OrgUnit parentOrgUnit = new OrgUnit(TEST_HIER_USERORG_PREFIX + i, OrgUnit.Type.USER); OrgUnit childOrgUnit = new OrgUnit(TEST_HIER_USERORG_PREFIX + (i + 1), OrgUnit.Type.USER); // Remove the relationship from the parent and child OrgUnit: delAdminMgr.deleteInheritance(parentOrgUnit, childOrgUnit); // Remove the parent OrgUnit from directory: delAdminMgr.delete(parentOrgUnit); } // Remove the child OrgUnit from directory: delAdminMgr.delete(new OrgUnit(TEST_HIER_USERORG_PREFIX + TEST_NUMBER, OrgUnit.Type.USER)); LOG.info(szLocation + " success"); } catch (SecurityException ex) { LOG.error(szLocation + " caught SecurityException rc=" + ex.getErrorId() + ", msg=" + ex.getMessage(), ex); fail(ex.getMessage()); } } /** * Add a simple OrgUnit hierarchy to ldap. The OrgUnits will named to include a name,'sampleHierUserOrg', appended with the * sequence of 1 - 6. 'sampleHierUserOrg1' is the root or highest level OrgUnit in the structure while sampleHierUserOrg6 is the lowest * most child. Fortress OrgUnits may have multiple parents which is demonstrated in testCreateAscendantUserOrgs sample. * <p> * <img src="./doc-files/HierUserOrgSimple.png" alt=""> */ public static void testCreateHierUserOrgs() { String szLocation = ".testCreateHierUserOrgs"; try { // Instantiate the DelAdminMgr implementation which is used to provision ARBAC policies. DelAdminMgr delAdminMgr = DelAdminMgrFactory.createInstance(TestUtils.getContext()); // Instantiate the root OrgUnit entity. OrgUnit requires name and type before addition. OrgUnit baseOrgUnit = new OrgUnit(TEST_HIER_BASE_USERORG, OrgUnit.Type.USER); // Add the root OrgUnit entity to the directory. delAdminMgr.add(baseOrgUnit); // Create User OrgUnits, 'sampleHierUserOrg2' - 'sampleHierUserOrg6'. for (int i = 2; i < TEST_NUMBER + 1; i++) { // Instantiate the OrgUnit entity. OrgUnit childOrgUnit = new OrgUnit(TEST_HIER_USERORG_PREFIX + i, OrgUnit.Type.USER); // Add the OrgUnit entity to the directory. delAdminMgr.add(childOrgUnit); // Instantiate the parent OrgUnit. The key is the name and type. OrgUnit parentOrgUnit = new OrgUnit(TEST_HIER_USERORG_PREFIX + (i - 1), OrgUnit.Type.USER); // Add a relationship between the parent and child OrgUnits: delAdminMgr.addInheritance(parentOrgUnit, childOrgUnit); } LOG.info(szLocation + " success"); } catch (SecurityException ex) { LOG.error(szLocation + " caught SecurityException rc=" + ex.getErrorId() + ", msg=" + ex.getMessage(), ex); fail(ex.getMessage()); } } /** * Demonstrate teardown of a parent to child relationship of one-to-many. Each child must first remove the inheritance * relationship with parent before being removed from ldap. The parent OrgUnit will be removed from ldap last. * User OrgUnit removal is not allowed (SecurityException will be thrown) if ou is assigned to Users in ldap. * <p> * <img src="./doc-files/HierUserOrgDescendants.png" alt=""> */ public static void testDeleteDescendantUserOrgs() { String szLocation = ".testDeleteDescendantUserOrgs"; if(AllSamplesJUnitTest.isFirstRun()) { return; } try { // Instantiate the DelAdminMgr implementation which is used to provision ARBAC policies. DelAdminMgr delAdminMgr = DelAdminMgrFactory.createInstance(TestUtils.getContext()); // This parent has many children. They must be deleted before parent itself can. OrgUnit parentOrgUnit = new OrgUnit(TEST_HIER_DESC_USERORG_PREFIX + 1, OrgUnit.Type.USER); // There are N User OrgUnits to process: for (int i = 2; i < TEST_NUMBER + 1; i++) { // Instantiate the child OrgUnit entity. The key is the name and type. OrgUnit childOrgUnit = new OrgUnit(TEST_HIER_DESC_USERORG_PREFIX + i, OrgUnit.Type.USER); // Remove the relationship from the parent and child OrgUnit: delAdminMgr.deleteInheritance(parentOrgUnit, childOrgUnit); // Remove the child OrgUnit from directory: delAdminMgr.delete(childOrgUnit); } // Remove the parent OrgUnit from directory: delAdminMgr.delete(parentOrgUnit); LOG.info(szLocation + " success"); } catch (SecurityException ex) { LOG.error(szLocation + " caught SecurityException rc=" + ex.getErrorId() + ", msg=" + ex.getMessage(), ex); fail(ex.getMessage()); } } /** * Demonstrate a parent to child OrgUnit structure of one-to-many. The parent OrgUnit must be created before * the call to addDescendant which will Add a new OrgUnit node and set a OrgUnit relationship with parent node. * <p> * <img src="./doc-files/HierUserOrgDescendants.png" alt=""> */ public static void testCreateDescendantUserOrgs() { String szLocation = ".testCreateDescendantUserOrgs"; try { // Instantiate the DelAdminMgr implementation which is used to provision ARBAC policies. DelAdminMgr delAdminMgr = DelAdminMgrFactory.createInstance(TestUtils.getContext()); // Instantiate the parent User OrgUnit entity. This needs a name and type before it can be added to ldap. OrgUnit parentOrgUnit = new OrgUnit(TEST_HIER_DESC_USERORG_PREFIX + 1, OrgUnit.Type.USER); // This parent will have many children: delAdminMgr.add(parentOrgUnit); // Create User OrgUnits, 'sampleHierUserOrgD2' - 'sampleHierUserOrgD6'. for (int i = 1; i < TEST_NUMBER; i++) { // Now add relationship to the directory between parent and child User OrgUnits. OrgUnit childOrgUnit = new OrgUnit(TEST_HIER_DESC_USERORG_PREFIX + (i + 1), OrgUnit.Type.USER); // Now add child OrgUnit entity to directory and add relationship with existing parent OrgUnit. delAdminMgr.addDescendant(parentOrgUnit, childOrgUnit); } LOG.info(szLocation + " success"); } catch (SecurityException ex) { LOG.error(szLocation + " caught SecurityException rc=" + ex.getErrorId() + ", msg=" + ex.getMessage(), ex); fail(ex.getMessage()); } } /** * This example demonstrates tear down of a child to parent represented as one-to-many. The parents must all * be removed from the child before the child can be removed. * User OrgUnit removal is not allowed (SecurityException will be thrown) if ou is assigned to Users in ldap. * <p> * <img src="./doc-files/HierUserOrgAscendants.png" alt=""> */ public static void testDeleteAscendantUserOrgs() { String szLocation = ".testDeleteAscendantUserOrgs"; if(AllSamplesJUnitTest.isFirstRun()) { return; } try { // Instantiate the DelAdminMgr implementation which is used to provision ARBAC policies. DelAdminMgr delAdminMgr = DelAdminMgrFactory.createInstance(TestUtils.getContext()); // This child OrgUnit has many parents: OrgUnit childOrgUnit = new OrgUnit(TEST_HIER_ASC_USERORG_PREFIX + 1, OrgUnit.Type.USER); for (int i = 2; i < TEST_NUMBER + 1; i++) { // Instantiate the parent. This needs a name and type before it can be used in operation. OrgUnit parentOrgUnit = new OrgUnit(TEST_HIER_ASC_USERORG_PREFIX + i, OrgUnit.Type.USER); // Remove the relationship between parent and child OrgUnits: delAdminMgr.deleteInheritance(parentOrgUnit, childOrgUnit); // Remove the parent OrgUnit from directory: delAdminMgr.delete(parentOrgUnit); } // Remove the child OrgUnit from directory: delAdminMgr.delete(childOrgUnit); LOG.info(szLocation + " success"); } catch (SecurityException ex) { LOG.error(szLocation + " caught SecurityException rc=" + ex.getErrorId() + ", msg=" + ex.getMessage(), ex); fail(ex.getMessage()); } } /** * Demonstrate a child to parent OrgUnit structure of one-to-many. To use this API, the child OrgUnit must be created before * the call to addAscendant which will Add a new OrgUnit node and set a OrgUnit relationship with child node. * <p> * <img src="./doc-files/HierUserOrgAscendants.png" alt=""> */ public static void testCreateAscendantUserOrgs() { String szLocation = ".testCreateAscendantUserOrgs"; try { // Instantiate the DelAdminMgr implementation which is used to provision ARBAC policies. DelAdminMgr delAdminMgr = DelAdminMgrFactory.createInstance(TestUtils.getContext()); // Instantiate the child OrgUnit. This needs a name and type. OrgUnit childOrgUnit = new OrgUnit(TEST_HIER_ASC_USERORG_PREFIX + 1, OrgUnit.Type.USER); // This child will have many parents: delAdminMgr.add(childOrgUnit); // Create OrgUnits, 'sampleHierUserOrgA2' - 'sampleHierUserOrgA6'. for (int i = 1; i < TEST_NUMBER; i++) { // Instantiate the parent OrgUnit. This needs a name and type before it can be added to ldap. OrgUnit parentOrgUnit = new OrgUnit(TEST_HIER_ASC_USERORG_PREFIX + (i + 1), OrgUnit.Type.USER); // Now add parent OrgUnit entity to directory and add relationship with existing child OrgUnit. delAdminMgr.addAscendant(childOrgUnit, parentOrgUnit); } } catch (SecurityException ex) { LOG.error(szLocation + " caught SecurityException rc=" + ex.getErrorId() + ", msg=" + ex.getMessage(), ex); fail(ex.getMessage()); } } }
PennState/directory-fortress-core-1
src/test/java/org/apache/directory/fortress/core/samples/CreateUserOrgHierarchySample.java
Java
apache-2.0
15,806
// // Copyright (c) 2014 Limit Point Systems, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package tools.viewer.user; import tools.viewer.common.*; import tools.viewer.render.*; import tools.common.gui.*; import java.awt.*; import java.awt.event.*; import java.util.*; import javax.swing.*; import javax.swing.event.*; import javax.swing.border.*; import java.text.*; import vtk.*; /** * Implementation of <code>G3DFieldActorPropertiesPanel</code> for editing the * values of a <code>HedgeHogFieldActorDescriptor</code>. */ public class HedgeHogFieldActorPropertiesPanel extends G3DFieldActorPropertiesPanel { // CONSTANTS FACET protected static final String[] VECTOR_MODES = { ViewerConstants.VECTOR_MAGNITUDE, ViewerConstants.VECTOR_NORMAL }; // GUI FACET protected JPanel hedgeHogPanel; protected JSpinner scaleFactorSpinner; protected JComboBox vectorModeComboBox; // CONSTRUCTORS /** * Constructor */ public HedgeHogFieldActorPropertiesPanel(G3DViewer xviewer, FieldActorDescriptor[] xdescriptors) { super(xviewer, xdescriptors); hedgeHogPanel = createHedgeHogPanel(); tabbedPane.addTab("Hedge Hog", hedgeHogPanel); initValues(); } // CREATE FACET /** * Create hedge hog panel */ protected JPanel createHedgeHogPanel() { JPanel result = new JPanel(); result.setLayout(new BoxLayout(result, BoxLayout.PAGE_AXIS)); result.setBorder( BorderFactory.createCompoundBorder( BorderFactory.createEmptyBorder(6, 12, 6, 12), BorderFactory.createTitledBorder("Hedge Hog:"))); //===== result.add(Box.createVerticalGlue()); JPanel panel = new JPanel(); JLabel scaleFactorLabel = new JLabel("Scale Factor: ", JLabel.RIGHT); scaleFactorLabel.setAlignmentX(Component.CENTER_ALIGNMENT); SpinnerModel scaleFactorModel = new SpinnerNumberModel(1.0, 0.0, 10000000.0, 0.01); scaleFactorSpinner = new JSpinner(scaleFactorModel); panel.add(scaleFactorLabel); panel.add(scaleFactorSpinner); result.add(panel); result.add(Box.createVerticalGlue()); //===== panel = new JPanel(); JLabel vectorModeLabel = new JLabel("Vector Mode:", JLabel.RIGHT); vectorModeLabel.setAlignmentX(Component.CENTER_ALIGNMENT); vectorModeComboBox = new JComboBox(VECTOR_MODES); panel.add(vectorModeLabel); panel.add(vectorModeComboBox); result.add(panel); result.add(Box.createVerticalGlue()); //===== return result; } // INITIALIZE FACET /** * */ public void initValues() { super.initValues(); // Use the first actor in the list to initialize the // user interface. HedgeHogFieldActorDescriptor actor = (HedgeHogFieldActorDescriptor) descriptors[0]; initHedgeHogPanel(actor); } /** * */ protected void initHedgeHogPanel(HedgeHogFieldActorDescriptor actor) { scaleFactorSpinner.setValue(actor.scaleFactor); vectorModeComboBox.setSelectedItem(actor.vectorMode); } // APPLY FACET /** * */ public void doApply() { // Set the wait state to true, it is restored by // UpdatePropertiesPanelEvent. setWaitState(true); synchronized (viewer.getScript()) { synchronized (viewer.getScene()) { // Apply the changed to the descriptors HedgeHogFieldActorDescriptor actor; for(int i=0; i<descriptors.length; i++) { actor = (HedgeHogFieldActorDescriptor) descriptors[i]; applyHedgeHog(actor); } } } super.doApply(false); } /** * */ public void applyHedgeHog(HedgeHogFieldActorDescriptor actor) { actor.scaleFactor = ((SpinnerNumberModel)scaleFactorSpinner.getModel()).getNumber().doubleValue(); actor.vectorMode = (String) vectorModeComboBox.getSelectedItem(); } }
LimitPointSystems/SheafSystem
tools/viewer/user/HedgeHogFieldActorPropertiesPanel.java
Java
apache-2.0
4,423
<p> Even - {{ number }} </p>
davidokun/Angular-js
cmp-databinding-assignment/src/app/even/even.component.html
HTML
apache-2.0
31
<link rel="import" href="../../bower_components/polymer/polymer-element.html"> <dom-module id="balance-text-field"> <script> class BalanceTextField extends Vaadin.TextFieldElement { static get is() { return 'balance-text-field'; } static get properties() { return { max: Number, min: Number, message: { type: String, computed: '_computedMessage(errorMessage)', } } } _computedMessage(errorMessage) { return errorMessage; } checkValidity() { if (super.checkValidity()) { if (this.max && Number(this.value) > this.max) { this.errorMessage = ' You have insufficient Balance'; return false; } if(this.min && Number(this.value) < this.min){ this.errorMessage = ' must be bigger than ' + this.min; return false; } return true; } else { this.errorMessage = this.message; return false; } } } window.customElements.define(BalanceTextField.is, BalanceTextField); </script> </dom-module>
Loopring/looper
src/utils/balance-text-field.html
HTML
apache-2.0
1,346
/*! * Copyright 2012 Sakai Foundation (SF) Licensed under the * Educational Community License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. */ require(['jquery', 'oae.core'], function($, oae) { // Get the group id from the URL. The expected URL is /group/<groupId> var groupId = document.location.pathname.split('/')[2]; if (!groupId) { oae.api.util.redirect().login(); } // Variable used to cache the requested user's profile var groupProfile = null; // Variable used to cache the group's base URL var baseUrl = '/group/' + groupId; /** * Get the group's basic profile and set up the screen. If the groups * can't be found or is private to the current user, the appropriate * error page will be shown */ var getGroupProfile = function() { oae.api.group.getGroup(groupId, function(err, profile) { if (err && err.code === 404) { oae.api.util.redirect().notfound(); } else if (err && err.code === 401) { oae.api.util.redirect().accessdenied(); } groupProfile = profile; setUpClip(); setUpNavigation(); // Set the browser title oae.api.util.setBrowserTitle(groupProfile.displayName); }); }; $(document).on('oae.context.get', function() { $(document).trigger('oae.context.send', groupProfile); }); $(document).trigger('oae.context.send', groupProfile); /** * Render the group's clip, containing the profile picture, display name as well as the * group's admin options */ var setUpClip = function() { oae.api.util.template().render($('#group-clip-template'), {'group': groupProfile}, $('#group-clip-container')); // Only show the create and upload clips to managers if (groupProfile.isManager) { $('#group-actions').show(); } }; /** * Set up the left hand navigation with the me space page structure */ var setUpNavigation = function() { // Structure that will be used to construct the left hand navigation var lhNavigation = [ { 'id': 'activity', 'title': oae.api.i18n.translate('__MSG__RECENT_ACTIVITY__'), 'icon': 'icon-dashboard', 'layout': [ { 'width': 'span8', 'widgets': [ { 'id': 'activity', 'settings': { 'principalId': groupProfile.id, 'canManage': groupProfile.isManager } } ] } ] }, { 'id': 'library', 'title': oae.api.i18n.translate('__MSG__LIBRARY__'), 'icon': 'icon-briefcase', 'layout': [ { 'width': 'span12', 'widgets': [ { 'id': 'library', 'settings': { 'principalId': groupProfile.id, 'canManage': groupProfile.isManager } } ] } ] }, { 'id': 'members', 'title': oae.api.i18n.translate('__MSG__MEMBERS__'), 'icon': 'icon-user', 'layout': [ { 'width': 'span12', 'widgets': [ { 'id': 'participants', 'settings': { 'principalId': groupProfile.id, 'canManage': groupProfile.isManager } } ] } ] } ]; $(window).trigger('oae.trigger.lhnavigation', [lhNavigation, baseUrl]); $(window).on('oae.ready.lhnavigation', function() { $(window).trigger('oae.trigger.lhnavigation', [lhNavigation, baseUrl]); }); }; getGroupProfile(); });
Coenego/3akai-ux
ui/js/group.js
JavaScript
apache-2.0
5,033
package cn.oeaom.CoolWeather; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Typeface; import android.media.Image; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import java.io.IOException; import cn.oeaom.CoolWeather.GSON.Weather; import cn.oeaom.CoolWeather.Util.Utility; import okhttp3.Call; import okhttp3.Callback; import cn.oeaom.CoolWeather.Util.HttpUtil; import okhttp3.Response; public class WeatherActivity extends AppCompatActivity { private static final String TAG = "WeatherActivity"; private static final String API_KEY = "bc0418b57b2d4918819d3974ac1285d9"; //鉴权码 //天气信息面板所要展现的东西 public DrawerLayout drawerLayout; //左侧滑动和点击小房子展现的界面 //public TextView tvTitle; //标题 *弃用 private TextView weatherTime; //天气信息的时间 private TextView weatherDegree; //天气信息的温度值 private TextView measure2; //天气信息的温度单位 private TextView weatherPlace; //天气信息的地点 private TextView weatherType; //天气信息的类型 private String mWeatherId; //城市的编号 private ImageView weatherStat; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_weather); //Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); //setSupportActionBar(toolbar); // FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); // fab.setOnClickListener(new View.OnClickListener() { // @Override // public void onClick(View view) { // Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG) // .setAction("Action", null).show(); // } // }); Typeface fontFace = Typeface.createFromAsset(getAssets(), "fonts/AndroidClock.ttf"); // 字体文件必须是true type font的格式(ttf); // 当使用外部字体却又发现字体没有变化的时候(以 Droid Sans代替),通常是因为 // 这个字体android没有支持,而非你的程序发生了错误 weatherTime = (TextView)findViewById(R.id.weather_info_time); weatherTime.setTypeface(fontFace); // weatherDegree = (TextView)findViewById(R.id.degree_value); weatherDegree.setTypeface(fontFace); TextView measure = (TextView)findViewById(R.id.degree_measure); // measure.setTypeface(fontFace); measure2 = (TextView)findViewById(R.id.degree_measure2); //measure2.setTypeface(fontFace); weatherPlace = (TextView)findViewById(R.id.weather_info_place); //weatherPlace.setTypeface(fontFace); weatherType = (TextView)findViewById(R.id.weather_info_text); //weatherType.setTypeface(fontFace); weatherStat = (ImageView)findViewById(R.id.weatherIcon); // // TextView weatherInfo = (TextView)findViewById(R.id.weather_info_text); // // weatherInfo.setTypeface(fontFace); // //text.setTextSize(50); Intent intent=getIntent(); //获取这个Intent对象的Extra中对应键的值 String weatherId=intent.getStringExtra("weather_id"); String CountryName = intent.getStringExtra("CountryName"); // tvTitle = (TextView)findViewById(R.id.title_text_weather); // //tvTitle.setText(weatherId); // tvTitle.setText(CountryName); // // tvTitle.setTextSize(60); // tvTitle.setTypeface(fontFace); drawerLayout = (DrawerLayout)findViewById(R.id.drawer_layout); Button btnBack = (Button)findViewById(R.id.btn_home); btnBack.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { //Intent intent = new Intent(WeatherActivity.this,MainActivity.class); //startActivity(intent); // WeatherActivity.this.finish(); drawerLayout.openDrawer(GravityCompat.START); Log.v(TAG,"Clicked nav btn"); } }); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); String weatherString = prefs.getString("weather", null); if (weatherString != null) { // 有缓存时直接解析天气数据 Weather weather = Utility.handleWeatherResponse(weatherString); mWeatherId = weather.basic.weatherId; showWeatherInfo(weather); } else { // 无缓存时去服务器查询天气 mWeatherId = getIntent().getStringExtra("weather_id"); // weatherLayout.setVisibility(View.INVISIBLE); requestWeather(mWeatherId); } // swipeRefresh.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { // @Override // public void onRefresh() { // requestWeather(mWeatherId); // } // }); } // public void requestWeather(final String weatherId){ // tvTitle.setText(weatherId); // } public void requestWeather(final String weatherId) { String weatherUrl = "http://guolin.tech/api/weather?cityid=" + weatherId + "&key="+API_KEY; HttpUtil.sendOkHttpRequest(weatherUrl, new Callback() { @Override public void onResponse(Call call, Response response) throws IOException { final String responseText = response.body().string(); Log.v(TAG,"======================================================================="); Log.v(TAG,responseText); Log.v(TAG,"======================================================================="); final Weather weather = Utility.handleWeatherResponse(responseText); runOnUiThread(new Runnable() { @Override public void run() { if (weather != null && "ok".equals(weather.status)) { SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit(); editor.putString("weather", responseText); editor.apply(); //mWeatherId = weather.basic.weatherId; showWeatherInfo(weather); } else { Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show(); } //swipeRefresh.setRefreshing(false); } }); } @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show(); //swipeRefresh.setRefreshing(false); } }); } }); //loadBingPic(); } private int findWeatherIconByName(String weatherName) { switch(weatherName) { case "晴":return R.drawable.a044; case "多云":return R.drawable.a045; case "少云":return R.drawable.a046; case "晴间多云":return R.drawable.a047; case "阴":return R.drawable.a048; case "有风":return R.drawable.a049; case "平静":return R.drawable.a050; case "微风":return R.drawable.a000; case "和风":return R.drawable.a001; case "清风":return R.drawable.a002; case "强风":return R.drawable.a003; case "劲风":return R.drawable.a003; case "大风":return R.drawable.a004; case "烈风":return R.drawable.a005; case "风暴":return R.drawable.a006; case "狂爆风":return R.drawable.a007; case "龙卷风":return R.drawable.a008; case "热带风暴":return R.drawable.a009; case "阵雨":return R.drawable.a012; case "强阵雨":return R.drawable.a013; case "雷阵雨":return R.drawable.a014; case "强雷阵雨":return R.drawable.a015; case "雷阵雨伴有冰雹":return R.drawable.a016; case "小雨":return R.drawable.a017; case "中雨":return R.drawable.a018; case "大雨":return R.drawable.a019; case "极端降雨":return R.drawable.a020; case "毛毛雨":return R.drawable.a021; case "细雨":return R.drawable.a021; case "暴雨":return R.drawable.a022; case "大暴雨":return R.drawable.a023; case "特大暴雨":return R.drawable.a024; case "冻雨":return R.drawable.a025; case "小雪":return R.drawable.a026; case "中雪":return R.drawable.a027; case "大雪":return R.drawable.a028; case "暴雪":return R.drawable.a029; case "雨夹雪":return R.drawable.a030; case "雨雪天气":return R.drawable.a031; case "阵雨夹雪":return R.drawable.a032; case "阵雪":return R.drawable.a033; case "薄雾":return R.drawable.a034; case "雾":return R.drawable.a035; case "霾":return R.drawable.a036; case "扬沙":return R.drawable.a037; case "浮尘":return R.drawable.a038; case "沙尘暴":return R.drawable.a039; case "热":return R.drawable.a041; case "冷":return R.drawable.a042; case "强沙尘暴":return R.drawable.a040; case "未知":return R.drawable.a043; default:{ break; } } return -1; } private void showWeatherInfo(Weather weather) { String cityName = weather.basic.cityName; String updateTime = weather.basic.update.updateTime.split(" ")[1]; String degree = weather.now.temperature; String weatherInfo = weather.now.more.info; weatherPlace.setText(cityName); weatherTime.setText(updateTime); weatherDegree.setText(degree); weatherType.setText(weatherInfo); weatherStat.setImageResource(findWeatherIconByName(weatherInfo)); // forecastLayout.removeAllViews(); // for (Forecast forecast : weather.forecastList) { // View view = LayoutInflater.from(this).inflate(R.layout.forecast_item, forecastLayout, false); // TextView dateText = (TextView) view.findViewById(R.id.date_text); // TextView infoText = (TextView) view.findViewById(R.id.info_text); // TextView maxText = (TextView) view.findViewById(R.id.max_text); // TextView minText = (TextView) view.findViewById(R.id.min_text); // dateText.setText(forecast.date); // infoText.setText(forecast.more.info); // maxText.setText(forecast.temperature.max); // minText.setText(forecast.temperature.min); // forecastLayout.addView(view); // } // if (weather.aqi != null) { // aqiText.setText(weather.aqi.city.aqi); // pm25Text.setText(weather.aqi.city.pm25); // } // String comfort = "舒适度:" + weather.suggestion.comfort.info; // String carWash = "洗车指数:" + weather.suggestion.carWash.info; // String sport = "运行建议:" + weather.suggestion.sport.info; // comfortText.setText(comfort); // carWashText.setText(carWash); // sportText.setText(sport); // weatherLayout.setVisibility(View.VISIBLE); // Intent intent = new Intent(this, AutoUpdateService.class); // startService(intent); } }
applicationsky/MyCoolWeather
app/src/main/java/cn/oeaom/CoolWeather/WeatherActivity.java
Java
apache-2.0
12,783
--- layout: "fluid/docs_base" version: "3.9.2" versionHref: "/docs/v3/3.9.2" path: "" category: api id: "toastcontroller" title: "ToastController" header_sub_title: "Ionic API Documentation" doc: "ToastController" docType: "class" show_preview_device: true preview_device_url: "/docs/v3/demos/src/toast/www/" angular_controller: APIDemoCtrl --- <h1 class="api-title"> <a class="anchor" name="toast-controller" href="#toast-controller"></a> ToastController </h1> <a class="improve-v2-docs" href="http://github.com/ionic-team/ionic/edit/v3/src/components/toast/toast-controller.ts#L5"> Improve this doc </a> <p>A Toast is a subtle notification commonly used in modern applications. It can be used to provide feedback about an operation or to display a system message. The toast appears on top of the app&#39;s content, and can be dismissed by the app to resume user interaction with the app.</p> <h3><a class="anchor" name="creating" href="#creating">Creating</a></h3> <p>All of the toast options should be passed in the first argument of the create method: <code>create(opts)</code>. The message to display should be passed in the <code>message</code> property. The <code>showCloseButton</code> option can be set to true in order to display a close button on the toast. See the <a href="#create">create</a> method below for all available options.</p> <h3><a class="anchor" name="positioning" href="#positioning">Positioning</a></h3> <p>Toasts can be positioned at the top, bottom or middle of the view port. The position can be passed to the <code>Toast.create(opts)</code> method. The position option is a string, and the values accepted are <code>top</code>, <code>bottom</code> and <code>middle</code>. If the position is not specified, the toast will be displayed at the bottom of the view port.</p> <h3><a class="anchor" name="dismissing" href="#dismissing">Dismissing</a></h3> <p>The toast can be dismissed automatically after a specific amount of time by passing the number of milliseconds to display it in the <code>duration</code> of the toast options. If <code>showCloseButton</code> is set to true, then the close button will dismiss the toast. To dismiss the toast after creation, call the <code>dismiss()</code> method on the Toast instance. The <code>onDidDismiss</code> function can be called to perform an action after the toast is dismissed.</p> <!-- @usage tag --> <h2><a class="anchor" name="usage" href="#usage">Usage</a></h2> <pre><code class="lang-ts">import { ToastController } from &#39;ionic-angular&#39;; constructor(private toastCtrl: ToastController) { } presentToast() { let toast = this.toastCtrl.create({ message: &#39;User was added successfully&#39;, duration: 3000, position: &#39;top&#39; }); toast.onDidDismiss(() =&gt; { console.log(&#39;Dismissed toast&#39;); }); toast.present(); } </code></pre> <!-- @property tags --> <!-- instance methods on the class --> <h2><a class="anchor" name="instance-members" href="#instance-members">Instance Members</a></h2> <div id="config"></div> <h3> <a class="anchor" name="config" href="#config"> <code>config</code> </a> </h3> <div id="create"></div> <h3> <a class="anchor" name="create" href="#create"> <code>create(opts)</code> </a> </h3> Create a new toast component. See options below <table class="table param-table" style="margin:0;"> <thead> <tr> <th>Param</th> <th>Type</th> <th>Details</th> </tr> </thead> <tbody> <tr> <td> opts </td> <td> <code>ToastOptions</code> </td> <td> <p>Toast options. See the below table for available options.</p> </td> </tr> </tbody> </table> <h2><a class="anchor" name="advanced" href="#advanced">Advanced</a></h2> <table> <thead> <tr> <th>Property</th> <th>Type</th> <th>Default</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td>message</td> <td><code>string</code></td> <td>-</td> <td>The message for the toast. Long strings will wrap and the toast container will expand.</td> </tr> <tr> <td>duration</td> <td><code>number</code></td> <td>-</td> <td>How many milliseconds to wait before hiding the toast. By default, it will show until <code>dismiss()</code> is called.</td> </tr> <tr> <td>position</td> <td><code>string</code></td> <td>&quot;bottom&quot;</td> <td>The position of the toast on the screen. Accepted values: &quot;top&quot;, &quot;middle&quot;, &quot;bottom&quot;.</td> </tr> <tr> <td>cssClass</td> <td><code>string</code></td> <td>-</td> <td>Additional classes for custom styles, separated by spaces.</td> </tr> <tr> <td>showCloseButton</td> <td><code>boolean</code></td> <td>false</td> <td>Whether or not to show a button to close the toast.</td> </tr> <tr> <td>closeButtonText</td> <td><code>string</code></td> <td>&quot;Close&quot;</td> <td>Text to display in the close button.</td> </tr> <tr> <td>dismissOnPageChange</td> <td><code>boolean</code></td> <td>false</td> <td>Whether to dismiss the toast when navigating to a new page.</td> </tr> </tbody> </table> <h2 id="sass-variable-header"><a class="anchor" name="sass-variables" href="#sass-variables">Sass Variables</a></h2> <div id="sass-variables" ng-controller="SassToggleCtrl"> <div class="sass-platform-toggle"> <a ng-init="setSassPlatform('base')" ng-class="{ active: active === 'base' }" ng-click="setSassPlatform('base')" >All</a> <a ng-class="{ active: active === 'ios' }" ng-click="setSassPlatform('ios')">iOS</a> <a ng-class="{ active: active === 'md' }" ng-click="setSassPlatform('md')">Material Design</a> <a ng-class="{ active: active === 'wp' }" ng-click="setSassPlatform('wp')">Windows Platform</a> </div> <table ng-show="active === 'base'" id="sass-base" class="table param-table" style="margin:0;"> <thead> <tr> <th>Property</th> <th>Default</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td><code>$toast-width</code></td> <td><code>100%</code></td> <td><p>Width of the toast</p> </td> </tr> <tr> <td><code>$toast-max-width</code></td> <td><code>700px</code></td> <td><p>Max width of the toast</p> </td> </tr> </tbody> </table> <table ng-show="active === 'ios'" id="sass-ios" class="table param-table" style="margin:0;"> <thead> <tr> <th>Property</th> <th>Default</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td><code>$toast-ios-background</code></td> <td><code>rgba(0, 0, 0, .9)</code></td> <td><p>Background of the toast wrapper</p> </td> </tr> <tr> <td><code>$toast-ios-border-radius</code></td> <td><code>.65rem</code></td> <td><p>Border radius of the toast wrapper</p> </td> </tr> <tr> <td><code>$toast-ios-title-color</code></td> <td><code>#fff</code></td> <td><p>Color of the toast title</p> </td> </tr> <tr> <td><code>$toast-ios-title-font-size</code></td> <td><code>1.4rem</code></td> <td><p>Font size of the toast title</p> </td> </tr> <tr> <td><code>$toast-ios-title-padding-top</code></td> <td><code>1.5rem</code></td> <td><p>Padding top of the toast title</p> </td> </tr> <tr> <td><code>$toast-ios-title-padding-end</code></td> <td><code>$toast-ios-title-padding-top</code></td> <td><p>Padding end of the toast title</p> </td> </tr> <tr> <td><code>$toast-ios-title-padding-bottom</code></td> <td><code>$toast-ios-title-padding-top</code></td> <td><p>Padding bottom of the toast title</p> </td> </tr> <tr> <td><code>$toast-ios-title-padding-start</code></td> <td><code>$toast-ios-title-padding-end</code></td> <td><p>Padding start of the toast title</p> </td> </tr> </tbody> </table> <table ng-show="active === 'md'" id="sass-md" class="table param-table" style="margin:0;"> <thead> <tr> <th>Property</th> <th>Default</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td><code>$toast-md-background</code></td> <td><code>#333</code></td> <td><p>Background of the toast wrapper</p> </td> </tr> <tr> <td><code>$toast-md-title-color</code></td> <td><code>#fff</code></td> <td><p>Color of the toast title</p> </td> </tr> <tr> <td><code>$toast-md-title-font-size</code></td> <td><code>1.5rem</code></td> <td><p>Font size of the toast title</p> </td> </tr> <tr> <td><code>$toast-md-title-padding-top</code></td> <td><code>19px</code></td> <td><p>Padding top of the toast title</p> </td> </tr> <tr> <td><code>$toast-md-title-padding-end</code></td> <td><code>16px</code></td> <td><p>Padding end of the toast title</p> </td> </tr> <tr> <td><code>$toast-md-title-padding-bottom</code></td> <td><code>17px</code></td> <td><p>Padding bottom of the toast title</p> </td> </tr> <tr> <td><code>$toast-md-title-padding-start</code></td> <td><code>$toast-md-title-padding-end</code></td> <td><p>Padding start of the toast title</p> </td> </tr> </tbody> </table> <table ng-show="active === 'wp'" id="sass-wp" class="table param-table" style="margin:0;"> <thead> <tr> <th>Property</th> <th>Default</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td><code>$toast-wp-background</code></td> <td><code>rgba(0, 0, 0, 1)</code></td> <td><p>Background of the toast wrapper</p> </td> </tr> <tr> <td><code>$toast-wp-border-radius</code></td> <td><code>0</code></td> <td><p>Border radius of the toast wrapper</p> </td> </tr> <tr> <td><code>$toast-wp-button-color</code></td> <td><code>#fff</code></td> <td><p>Color of the toast button</p> </td> </tr> <tr> <td><code>$toast-wp-title-color</code></td> <td><code>#fff</code></td> <td><p>Color of the toast title</p> </td> </tr> <tr> <td><code>$toast-wp-title-font-size</code></td> <td><code>1.4rem</code></td> <td><p>Font size of the toast title</p> </td> </tr> <tr> <td><code>$toast-wp-title-padding-top</code></td> <td><code>1.5rem</code></td> <td><p>Padding top of the toast title</p> </td> </tr> <tr> <td><code>$toast-wp-title-padding-end</code></td> <td><code>$toast-wp-title-padding-top</code></td> <td><p>Padding end of the toast title</p> </td> </tr> <tr> <td><code>$toast-wp-title-padding-bottom</code></td> <td><code>$toast-wp-title-padding-top</code></td> <td><p>Padding bottom of the toast title</p> </td> </tr> <tr> <td><code>$toast-wp-title-padding-start</code></td> <td><code>$toast-wp-title-padding-end</code></td> <td><p>Padding start of the toast title</p> </td> </tr> </tbody> </table> </div> <!-- related link --><!-- end content block --> <!-- end body block -->
driftyco/ionic-site
content/docs/v3/3.9.2/api/components/toast/ToastController/index.md
Markdown
apache-2.0
11,615
#include <Wire.h> #include "lis331dlh.h" #include "l3g4200d.h" #include "lis3mdl.h" #include "LPS331.h" #include "MadgwickAHRS.h" // Accelerometer #define ACCEL_ADDRESS_V1 LIS331DLH_TWI_ADDRESS #define ACCEL_ADDRESS_V2 LIS331DLH_TWI_ADDRESS_V2 // Gyroscope #define GYRO_ADDRESS_V1 L3G4200D_TWI_ADDRESS #define GYRO_ADDRESS_V2 L3G4200D_TWI_ADDRESS_V2 // Compass #define COMPASS_ADDRESS_V1 LIS3MDL_TWI_ADDRESS #define COMPASS_ADDRESS_V2 LIS3MDL_TWI_ADDRESS_V2 // Barometer #define BARO_ADDRESS_V1 LPS331AP_TWI_ADDRESS #define BARO_ADDRESS_V2 LPS331AP_TWI_ADDRESS_V2 class Accelerometer : public LIS331DLH_TWI { public: Accelerometer(uint8_t addr = ACCEL_ADDRESS_V1) : LIS331DLH_TWI(addr) {} }; class Gyroscope : public L3G4200D_TWI { public: Gyroscope(uint8_t addr = GYRO_ADDRESS_V1) : L3G4200D_TWI(addr) {} }; class Compass : public LIS3MDL_TWI { public: Compass(uint8_t addr = COMPASS_ADDRESS_V1) : LIS3MDL_TWI(addr) {} }; class Barometer : public LPS331 { public: Barometer(uint8_t addr = BARO_ADDRESS_V1) : LPS331(addr) {} };
Garuda-1/RealityGateway
ESP_SOFTWARE/IMU-LIB Fixed/Troyka-IMU-master/TroykaIMU.h
C
apache-2.0
1,103
# -*- coding:utf-8 -*- __author__ = 'q00222219@huawei' import time from heat.openstack.common import log as logging import heat.engine.resources.cloudmanager.commonutils as commonutils import heat.engine.resources.cloudmanager.constant as constant import heat.engine.resources.cloudmanager.exception as exception import pdb LOG = logging.getLogger(__name__) class CascadedConfiger(object): def __init__(self, public_ip_api, api_ip, domain, user, password, cascading_domain, cascading_api_ip, cascaded_domain, cascaded_api_ip, cascaded_api_subnet_gateway): self.public_ip_api = public_ip_api self.api_ip = api_ip self.domain = domain self.user = user self.password = password self.cascading_domain = cascading_domain self.cascading_api_ip = cascading_api_ip self.cascaded_domain = cascaded_domain self.cascaded_ip = cascaded_api_ip self.gateway = cascaded_api_subnet_gateway def do_config(self): start_time = time.time() #pdb.set_trace() LOG.info("start config cascaded, cascaded: %s" % self.domain) # wait cascaded tunnel can visit commonutils.check_host_status(host=self.public_ip_api, user=self.user, password=self.password, retry_time=500, interval=1) # config cascaded host self._config_az_cascaded() cost_time = time.time() - start_time LOG.info("first config success, cascaded: %s, cost time: %d" % (self.domain, cost_time)) # check config result for i in range(3): try: # check 90s commonutils.check_host_status( host=self.public_ip_api, user=constant.VcloudConstant.ROOT, password=constant.VcloudConstant.ROOT_PWD, retry_time=15, interval=1) LOG.info("cascaded api is ready..") break except exception.CheckHostStatusFailure: if i == 2: LOG.error("check cascaded api failed ...") break LOG.error("check cascaded api error, " "retry config cascaded ...") self._config_az_cascaded() cost_time = time.time() - start_time LOG.info("config cascaded success, cascaded: %s, cost_time: %d" % (self.domain, cost_time)) def _config_az_cascaded(self): LOG.info("start config cascaded host, host: %s" % self.api_ip) # modify dns server address address = "/%(cascading_domain)s/%(cascading_ip)s,/%(cascaded_domain)s/%(cascaded_ip)s" \ % {"cascading_domain": self.cascading_domain, "cascading_ip": self.cascading_api_ip, "cascaded_domain":self.cascaded_domain, "cascaded_ip":self.cascaded_ip} for i in range(30): try: commonutils.execute_cmd_without_stdout( host=self.public_ip_api, user=self.user, password=self.password, cmd='cd %(dir)s; source /root/adminrc; sh %(script)s replace %(address)s' % {"dir": constant.PublicConstant.SCRIPTS_DIR, "script": constant.PublicConstant. MODIFY_DNS_SERVER_ADDRESS, "address": address}) break except exception.SSHCommandFailure as e: LOG.error("modify cascaded dns address error, cascaded: " "%s, error: %s" % (self.domain, e.format_message())) time.sleep(1) LOG.info( "config cascaded dns address success, cascaded: %s" % self.public_ip_api) return True
hgqislub/hybird-orchard
code/cloudmanager/install/hws/hws_cascaded_configer.py
Python
apache-2.0
4,081
#ifdef WINDOWS_PLATFORM #include "WindowsInputService.hpp" #include "WindowsMouseInterface.hpp" #include "WindowsKeyboardInterface.hpp" namespace MPACK { namespace Input { WindowsInputService::WindowsInputService() { m_pMouse = new WindowsMouseInterface; m_pKeyboard = new WindowsKeyboardInterface; Reset(); } WindowsInputService::~WindowsInputService() { } void WindowsInputService::Update() { m_pMouse->Update(); m_pKeyboard->Update(); } void WindowsInputService::Reset() { m_pMouse->Reset(); m_pKeyboard->Reset(); } MouseInterface* WindowsInputService::GetMouse() const { return m_pMouse; } KeyboardInterface* WindowsInputService::GetKeyboard() const { return m_pKeyboard; } } } #endif
mirceamt/MPACK
jni/MPACK/Input/Desktop/Windows/WindowsInputService.cpp
C++
apache-2.0
764
angular.module('app.services', [ 'app.services.actions', 'app.services.connection', 'app.services.coverart', 'app.services.locale', 'app.services.logging', 'app.services.mopidy', 'app.services.paging', 'app.services.platform', 'app.services.router', 'app.services.servers', 'app.services.settings' ]);
tkem/mopidy-mobile
www/app/services/services.js
JavaScript
apache-2.0
324
' Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. Imports System.Collections.Immutable Imports System.Diagnostics Imports System.Runtime.InteropServices Imports Microsoft.CodeAnalysis.PooledObjects Imports Microsoft.CodeAnalysis.Text Imports Microsoft.CodeAnalysis.VisualBasic.Symbols Imports Microsoft.CodeAnalysis.VisualBasic.Syntax Imports TypeKind = Microsoft.CodeAnalysis.TypeKind Namespace Microsoft.CodeAnalysis.VisualBasic Partial Friend NotInheritable Class LocalRewriter Private Function WrapInNullable(expr As BoundExpression, nullableType As TypeSymbol) As BoundExpression Debug.Assert(nullableType.GetNullableUnderlyingType.IsSameTypeIgnoringAll(expr.Type)) Dim ctor = GetNullableMethod(expr.Syntax, nullableType, SpecialMember.System_Nullable_T__ctor) If ctor IsNot Nothing Then Return New BoundObjectCreationExpression(expr.Syntax, ctor, ImmutableArray.Create(expr), Nothing, nullableType) End If Return New BoundBadExpression(expr.Syntax, LookupResultKind.NotReferencable, ImmutableArray(Of Symbol).Empty, ImmutableArray.Create(expr), nullableType, hasErrors:=True) End Function ''' <summary> ''' Splits nullable operand into a hasValueExpression and an expression that represents underlying value (returned). ''' ''' Underlying value can be called after calling hasValueExpr without duplicated side-effects. ''' Note that hasValueExpr is guaranteed to have NO SIDE-EFFECTS, while result value is ''' expected to be called exactly ONCE. That is the normal pattern in operator lifting. ''' ''' All necessary temps and side-effecting initializations are appended to temps and inits ''' </summary> Private Function ProcessNullableOperand(operand As BoundExpression, <Out> ByRef hasValueExpr As BoundExpression, ByRef temps As ArrayBuilder(Of LocalSymbol), ByRef inits As ArrayBuilder(Of BoundExpression), doNotCaptureLocals As Boolean) As BoundExpression Return ProcessNullableOperand(operand, hasValueExpr, temps, inits, doNotCaptureLocals, HasValue(operand)) End Function Private Function ProcessNullableOperand(operand As BoundExpression, <Out> ByRef hasValueExpr As BoundExpression, ByRef temps As ArrayBuilder(Of LocalSymbol), ByRef inits As ArrayBuilder(Of BoundExpression), doNotCaptureLocals As Boolean, operandHasValue As Boolean) As BoundExpression Debug.Assert(Not HasNoValue(operand), "processing nullable operand when it is known to be null") If operandHasValue Then operand = NullableValueOrDefault(operand) End If Dim captured = CaptureNullableIfNeeded(operand, temps, inits, doNotCaptureLocals) If operandHasValue Then hasValueExpr = New BoundLiteral(operand.Syntax, ConstantValue.True, Me.GetSpecialType(SpecialType.System_Boolean)) Return captured End If hasValueExpr = NullableHasValue(captured) Return NullableValueOrDefault(captured) End Function ' Right operand could be a method that takes Left operand byref. Ex: " local And TakesArgByref(local) " ' So in general we must capture Left even if it is a local. ' however in many case we do not need that. Private Function RightCanChangeLeftLocal(left As BoundExpression, right As BoundExpression) As Boolean ' TODO: in most cases right operand does not change value of the left one ' we could be smarter than this. Return right.Kind = BoundKind.Local OrElse right.Kind = BoundKind.Parameter End Function ''' <summary> ''' Returns a NOT-SIDE-EFFECTING expression that represents results of the operand ''' If such transformation requires a temp, the temp and its initializing expression ''' are returned in temp/init ''' </summary> Private Function CaptureNullableIfNeeded(operand As BoundExpression, <Out> ByRef temp As SynthesizedLocal, <Out> ByRef init As BoundExpression, doNotCaptureLocals As Boolean) As BoundExpression temp = Nothing init = Nothing If operand.IsConstant Then Return operand End If If doNotCaptureLocals Then If operand.Kind = BoundKind.Local AndAlso Not DirectCast(operand, BoundLocal).LocalSymbol.IsByRef Then Return operand End If If operand.Kind = BoundKind.Parameter AndAlso Not DirectCast(operand, BoundParameter).ParameterSymbol.IsByRef Then Return operand End If End If ' capture into local. Return CaptureOperand(operand, temp, init) End Function Private Function CaptureOperand(operand As BoundExpression, <Out> ByRef temp As SynthesizedLocal, <Out> ByRef init As BoundExpression) As BoundExpression temp = New SynthesizedLocal(Me._currentMethodOrLambda, operand.Type, SynthesizedLocalKind.LoweringTemp) Dim localAccess = New BoundLocal(operand.Syntax, temp, True, temp.Type) init = New BoundAssignmentOperator(operand.Syntax, localAccess, operand, True, operand.Type) Return localAccess.MakeRValue End Function Private Function CaptureNullableIfNeeded( operand As BoundExpression, <[In], Out> ByRef temps As ArrayBuilder(Of LocalSymbol), <[In], Out> ByRef inits As ArrayBuilder(Of BoundExpression), doNotCaptureLocals As Boolean ) As BoundExpression Dim temp As SynthesizedLocal = Nothing Dim init As BoundExpression = Nothing Dim captured = CaptureNullableIfNeeded(operand, temp, init, doNotCaptureLocals) If temp IsNot Nothing Then temps = If(temps, ArrayBuilder(Of LocalSymbol).GetInstance) temps.Add(temp) Debug.Assert(init IsNot Nothing) inits = If(inits, ArrayBuilder(Of BoundExpression).GetInstance) inits.Add(init) Else Debug.Assert(captured Is operand) End If Return captured End Function ''' <summary> ''' Returns expression that - ''' a) evaluates the operand if needed ''' b) produces it's ValueOrDefault. ''' The helper is familiar with wrapping expressions and will go directly after the value ''' skipping wrap/unwrap steps. ''' </summary> Private Function NullableValueOrDefault(expr As BoundExpression) As BoundExpression Debug.Assert(expr.Type.IsNullableType) ' check if we are not getting value from freshly constructed nullable ' no need to wrap/unwrap it then. If expr.Kind = BoundKind.ObjectCreationExpression Then Dim objectCreation = DirectCast(expr, BoundObjectCreationExpression) ' passing one argument means we are calling New Nullable<T>(arg) If objectCreation.Arguments.Length = 1 Then Return objectCreation.Arguments(0) End If End If Dim getValueOrDefaultMethod = GetNullableMethod(expr.Syntax, expr.Type, SpecialMember.System_Nullable_T_GetValueOrDefault) If getValueOrDefaultMethod IsNot Nothing Then Return New BoundCall(expr.Syntax, getValueOrDefaultMethod, Nothing, expr, ImmutableArray(Of BoundExpression).Empty, Nothing, isLValue:=False, suppressObjectClone:=True, type:=getValueOrDefaultMethod.ReturnType) End If Return New BoundBadExpression(expr.Syntax, LookupResultKind.NotReferencable, ImmutableArray(Of Symbol).Empty, ImmutableArray.Create(expr), expr.Type.GetNullableUnderlyingType(), hasErrors:=True) End Function Private Function NullableValue(expr As BoundExpression) As BoundExpression Debug.Assert(expr.Type.IsNullableType) If HasValue(expr) Then Return NullableValueOrDefault(expr) End If Dim getValueMethod As MethodSymbol = GetNullableMethod(expr.Syntax, expr.Type, SpecialMember.System_Nullable_T_get_Value) If getValueMethod IsNot Nothing Then Return New BoundCall(expr.Syntax, getValueMethod, Nothing, expr, ImmutableArray(Of BoundExpression).Empty, Nothing, isLValue:=False, suppressObjectClone:=True, type:=getValueMethod.ReturnType) End If Return New BoundBadExpression(expr.Syntax, LookupResultKind.NotReferencable, ImmutableArray(Of Symbol).Empty, ImmutableArray.Create(expr), expr.Type.GetNullableUnderlyingType(), hasErrors:=True) End Function ''' <summary> ''' Evaluates expr and calls HasValue on it. ''' </summary> Private Function NullableHasValue(expr As BoundExpression) As BoundExpression Debug.Assert(expr.Type.IsNullableType) ' when we statically know if expr HasValue we may skip ' evaluation depending on context. Debug.Assert(Not HasValue(expr)) Debug.Assert(Not HasNoValue(expr)) Dim hasValueMethod As MethodSymbol = GetNullableMethod(expr.Syntax, expr.Type, SpecialMember.System_Nullable_T_get_HasValue) If hasValueMethod IsNot Nothing Then Return New BoundCall(expr.Syntax, hasValueMethod, Nothing, expr, ImmutableArray(Of BoundExpression).Empty, Nothing, isLValue:=False, suppressObjectClone:=True, type:=hasValueMethod.ReturnType) End If Return New BoundBadExpression(expr.Syntax, LookupResultKind.NotReferencable, ImmutableArray(Of Symbol).Empty, ImmutableArray.Create(expr), Me.Compilation.GetSpecialType(SpecialType.System_Boolean), hasErrors:=True) End Function Private Shared Function NullableNull(syntax As SyntaxNode, nullableType As TypeSymbol) As BoundExpression Debug.Assert(nullableType.IsNullableType) Return New BoundObjectCreationExpression(syntax, Nothing, ImmutableArray(Of BoundExpression).Empty, Nothing, nullableType) End Function ''' <summary> ''' Checks that candidate Null expression is a simple expression that produces Null of the desired type ''' (not a conversion or anything like that) and returns it. ''' Otherwise creates "New T?()" expression. ''' </summary> Private Shared Function NullableNull(candidateNullExpression As BoundExpression, type As TypeSymbol) As BoundExpression Debug.Assert(HasNoValue(candidateNullExpression)) ' in case if the expression is any more complicated than just creating a Null ' simplify it. This may happen if HasNoValue gets smarter and can ' detect situations other than "New T?()" If (Not type.IsSameTypeIgnoringAll(candidateNullExpression.Type)) OrElse candidateNullExpression.Kind <> BoundKind.ObjectCreationExpression Then Return NullableNull(candidateNullExpression.Syntax, type) End If Return candidateNullExpression End Function Private Function NullableFalse(syntax As SyntaxNode, nullableOfBoolean As TypeSymbol) As BoundExpression Debug.Assert(nullableOfBoolean.IsNullableOfBoolean) Dim booleanType = nullableOfBoolean.GetNullableUnderlyingType Return WrapInNullable(New BoundLiteral(syntax, ConstantValue.False, booleanType), nullableOfBoolean) End Function Private Function NullableTrue(syntax As SyntaxNode, nullableOfBoolean As TypeSymbol) As BoundExpression Debug.Assert(nullableOfBoolean.IsNullableOfBoolean) Dim booleanType = nullableOfBoolean.GetNullableUnderlyingType Return WrapInNullable(New BoundLiteral(syntax, ConstantValue.True, booleanType), nullableOfBoolean) End Function Private Function GetNullableMethod(syntax As SyntaxNode, nullableType As TypeSymbol, member As SpecialMember) As MethodSymbol Dim method As MethodSymbol = Nothing If TryGetSpecialMember(method, member, syntax) Then Dim substitutedType = DirectCast(nullableType, SubstitutedNamedType) Return DirectCast(substitutedType.GetMemberForDefinition(method), MethodSymbol) End If Return Nothing End Function Private Function NullableOfBooleanValue(syntax As SyntaxNode, isTrue As Boolean, nullableOfBoolean As TypeSymbol) As BoundExpression If isTrue Then Return NullableTrue(syntax, nullableOfBoolean) Else Return NullableFalse(syntax, nullableOfBoolean) End If End Function ''' <summary> ''' returns true when expression has NO SIDE-EFFECTS and is known to produce nullable NULL ''' </summary> Private Shared Function HasNoValue(expr As BoundExpression) As Boolean Debug.Assert(expr.Type.IsNullableType) If expr.Kind = BoundKind.ObjectCreationExpression Then Dim objCreation = DirectCast(expr, BoundObjectCreationExpression) ' Nullable<T> has only one ctor with parameters and only that one sets hasValue = true Return objCreation.Arguments.Length = 0 End If ' by default we do not know Return False End Function ''' <summary> ''' Returns true when expression is known to produce nullable NOT-NULL ''' NOTE: unlike HasNoValue case, HasValue expressions may have side-effects. ''' </summary> Private Shared Function HasValue(expr As BoundExpression) As Boolean Debug.Assert(expr.Type.IsNullableType) If expr.Kind = BoundKind.ObjectCreationExpression Then Dim objCreation = DirectCast(expr, BoundObjectCreationExpression) ' Nullable<T> has only one ctor with parameters and only that one sets hasValue = true Return objCreation.Arguments.Length <> 0 End If ' by default we do not know Return False End Function ''' <summary> ''' Helper to generate binary expressions. ''' Performs some trivial constant folding. ''' TODO: Perhaps belong to a different file ''' </summary> Private Function MakeBinaryExpression(syntax As SyntaxNode, binaryOpKind As BinaryOperatorKind, left As BoundExpression, right As BoundExpression, isChecked As Boolean, resultType As TypeSymbol) As BoundExpression Debug.Assert(Not left.Type.IsNullableType) Debug.Assert(Not right.Type.IsNullableType) Dim intOverflow As Boolean = False Dim divideByZero As Boolean = False Dim lengthOutOfLimit As Boolean = False Dim constant = OverloadResolution.TryFoldConstantBinaryOperator(binaryOpKind, left, right, resultType, intOverflow, divideByZero, lengthOutOfLimit) If constant IsNot Nothing AndAlso Not divideByZero AndAlso Not (intOverflow And isChecked) AndAlso Not lengthOutOfLimit Then Debug.Assert(Not constant.IsBad) Return New BoundLiteral(syntax, constant, resultType) End If Select Case binaryOpKind Case BinaryOperatorKind.Subtract If right.IsDefaultValueConstant Then Return left End If Case BinaryOperatorKind.Add, BinaryOperatorKind.Or, BinaryOperatorKind.OrElse ' if one of operands is trivial, return the other one If left.IsDefaultValueConstant Then Return right End If If right.IsDefaultValueConstant Then Return left End If ' if one of operands is True, evaluate the other and return the True one If left.IsTrueConstant Then Return MakeSequence(right, left) End If If right.IsTrueConstant Then Return MakeSequence(left, right) End If Case BinaryOperatorKind.And, BinaryOperatorKind.AndAlso, BinaryOperatorKind.Multiply ' if one of operands is trivial, evaluate the other and return the trivial one If left.IsDefaultValueConstant Then Return MakeSequence(right, left) End If If right.IsDefaultValueConstant Then Return MakeSequence(left, right) End If ' if one of operands is True, return the other one If left.IsTrueConstant Then Return right End If If right.IsTrueConstant Then Return left End If Case BinaryOperatorKind.Equals If left.IsTrueConstant Then Return right End If If right.IsTrueConstant Then Return left End If Case BinaryOperatorKind.NotEquals If left.IsFalseConstant Then Return right End If If right.IsFalseConstant Then Return left End If End Select Return TransformRewrittenBinaryOperator(New BoundBinaryOperator(syntax, binaryOpKind, left, right, isChecked, resultType)) End Function ''' <summary> ''' Simpler helper for binary expressions. ''' When operand are boolean, the result type is same as operand's and is never checked ''' so do not need to pass that in. ''' </summary> Private Function MakeBooleanBinaryExpression(syntax As SyntaxNode, binaryOpKind As BinaryOperatorKind, left As BoundExpression, right As BoundExpression) As BoundExpression Debug.Assert(TypeSymbol.Equals(left.Type, right.Type, TypeCompareKind.ConsiderEverything)) Debug.Assert(left.Type.IsBooleanType) Return MakeBinaryExpression(syntax, binaryOpKind, left, right, False, left.Type) End Function Private Shared Function MakeNullLiteral(syntax As SyntaxNode, type As TypeSymbol) As BoundLiteral Return New BoundLiteral(syntax, ConstantValue.Nothing, type) End Function ''' <summary> ''' Takes two expressions and makes sequence. ''' </summary> Private Shared Function MakeSequence(first As BoundExpression, second As BoundExpression) As BoundExpression Return MakeSequence(second.Syntax, first, second) End Function ''' <summary> ''' Takes two expressions and makes sequence. ''' </summary> Private Shared Function MakeSequence(syntax As SyntaxNode, first As BoundExpression, second As BoundExpression) As BoundExpression Dim sideeffects = GetSideeffects(first) If sideeffects Is Nothing Then Return second End If Return New BoundSequence(syntax, ImmutableArray(Of LocalSymbol).Empty, ImmutableArray.Create(sideeffects), second, second.Type) End Function ''' <summary> ''' Takes two expressions and makes sequence. ''' </summary> Private Function MakeTernaryConditionalExpression(syntax As SyntaxNode, condition As BoundExpression, whenTrue As BoundExpression, whenFalse As BoundExpression) As BoundExpression Debug.Assert(condition.Type.IsBooleanType, "ternary condition must be boolean") Debug.Assert(whenTrue.Type.IsSameTypeIgnoringAll(whenFalse.Type), "ternary branches must have same types") Dim ifConditionConst = condition.ConstantValueOpt If ifConditionConst IsNot Nothing Then Return MakeSequence(syntax, condition, If(ifConditionConst Is ConstantValue.True, whenTrue, whenFalse)) End If Return TransformRewrittenTernaryConditionalExpression(New BoundTernaryConditionalExpression(syntax, condition, whenTrue, whenFalse, Nothing, whenTrue.Type)) End Function ''' <summary> ''' Returns an expression that can be used instead of the original one when ''' we want to run the expression for side-effects only (i.e. we intend to ignore result). ''' </summary> Private Shared Function GetSideeffects(operand As BoundExpression) As BoundExpression If operand.IsConstant Then Return Nothing End If Select Case operand.Kind Case BoundKind.Local, BoundKind.Parameter Return Nothing Case BoundKind.ObjectCreationExpression If operand.Type.IsNullableType Then Dim objCreation = DirectCast(operand, BoundObjectCreationExpression) Dim args = objCreation.Arguments If args.Length = 0 Then Return Nothing Else Return GetSideeffects(args(0)) End If End If End Select Return operand End Function End Class End Namespace
aelij/roslyn
src/Compilers/VisualBasic/Portable/Lowering/LocalRewriter/LocalRewriter_NullableHelpers.vb
Visual Basic
apache-2.0
24,667
//----------------------------------------------------------------------- // <copyright file="FilterTreeDragDropArgs.cs" company="Development In Progress Ltd"> // Copyright © Development In Progress Ltd 2015. All rights reserved. // </copyright> //----------------------------------------------------------------------- namespace DevelopmentInProgress.WPFControls.FilterTree { /// <summary> /// Arguments for a drag and drop operation in the <see cref="XamlFilterTree"/>. /// </summary> public class FilterTreeDragDropArgs { /// <summary> /// Initialises a new instance of the FilterTreeDragDropArgs class. /// </summary> /// <param name="dragItem">The item being dragged.</param> /// <param name="dropTarget">The target where the dragged item will be dropped.</param> public FilterTreeDragDropArgs(object dragItem, object dropTarget) { DragItem = dragItem; DropTarget = dropTarget; } /// <summary> /// Gets the object being dragged. /// </summary> public object DragItem { get; private set; } /// <summary> /// Gets the drop target for the object being dragged. /// </summary> public object DropTarget { get; private set; } } }
grantcolley/wpfcontrols
DevelopmentInProgress.WPFControls/FilterTree/FilterTreeDragDropArgs.cs
C#
apache-2.0
1,316
package com.structurizr.view; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.structurizr.model.*; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Set; import java.util.stream.Collectors; @JsonIgnoreProperties(ignoreUnknown=true) public abstract class View implements Comparable<View> { private SoftwareSystem softwareSystem; private String softwareSystemId; private String description = ""; private PaperSize paperSize = PaperSize.A4_Portrait; private Set<ElementView> elementViews = new LinkedHashSet<>(); View() { } public View(SoftwareSystem softwareSystem) { this.softwareSystem = softwareSystem; } @JsonIgnore public Model getModel() { return softwareSystem.getModel(); } @JsonIgnore public SoftwareSystem getSoftwareSystem() { return softwareSystem; } public void setSoftwareSystem(SoftwareSystem softwareSystem) { this.softwareSystem = softwareSystem; } public String getSoftwareSystemId() { if (this.softwareSystem != null) { return this.softwareSystem.getId(); } else { return this.softwareSystemId; } } void setSoftwareSystemId(String softwareSystemId) { this.softwareSystemId = softwareSystemId; } public abstract ViewType getType(); public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public PaperSize getPaperSize() { return paperSize; } public void setPaperSize(PaperSize paperSize) { this.paperSize = paperSize; } /** * Adds all software systems in the model to this view. */ public void addAllSoftwareSystems() { getModel().getSoftwareSystems().forEach(this::addElement); } /** * Adds the given software system to this view. * * @param softwareSystem the SoftwareSystem to add */ public void addSoftwareSystem(SoftwareSystem softwareSystem) { addElement(softwareSystem); } /** * Adds all software systems in the model to this view. */ public void addAllPeople() { getModel().getPeople().forEach(this::addElement); } /** * Adds the given person to this view. * * @param person the Person to add */ public void addPerson(Person person) { addElement(person); } protected void addElement(Element element) { if (softwareSystem.getModel().contains(element)) { elementViews.add(new ElementView(element)); } } protected void removeElement(Element element) { ElementView elementView = new ElementView(element); elementViews.remove(elementView); } /** * Gets the set of elements in this view. * * @return a Set of ElementView objects */ public Set<ElementView> getElements() { return elementViews; } public Set<RelationshipView> getRelationships() { Set<Relationship> relationships = new HashSet<>(); Set<Element> elements = getElements().stream() .map(ElementView::getElement) .collect(Collectors.toSet()); elements.forEach(b -> relationships.addAll(b.getRelationships())); return relationships.stream() .filter(r -> elements.contains(r.getSource()) && elements.contains(r.getDestination())) .map(RelationshipView::new) .collect(Collectors.toSet()); } public void setRelationships(Set<RelationshipView> relationships) { // do nothing ... this are determined automatically } /** * Removes all elements that have no relationships * to other elements in this view. */ public void removeElementsWithNoRelationships() { Set<RelationshipView> relationships = getRelationships(); Set<String> elementIds = new HashSet<>(); relationships.forEach(rv -> elementIds.add(rv.getRelationship().getSourceId())); relationships.forEach(rv -> elementIds.add(rv.getRelationship().getDestinationId())); elementViews.removeIf(ev -> !elementIds.contains(ev.getId())); } public void removeElementsThatCantBeReachedFrom(Element element) { Set<String> elementIdsToShow = new HashSet<>(); findElementsToShow(element, elementIdsToShow, 1); elementViews.removeIf(ev -> !elementIdsToShow.contains(ev.getId())); } private void findElementsToShow(Element element, Set<String> elementIds, int depth) { if (elementViews.contains(new ElementView(element))) { elementIds.add(element.getId()); if (depth < 100) { element.getRelationships().forEach(r -> findElementsToShow(r.getDestination(), elementIds, depth + 1)); } } } public abstract String getName(); @Override public int compareTo(View view) { return getTitle().compareTo(view.getTitle()); } private String getTitle() { return getName() + " - " + getDescription(); } ElementView findElementView(Element element) { for (ElementView elementView : getElements()) { if (elementView.getElement().equals(element)) { return elementView; } } return null; } public void copyLayoutInformationFrom(View source) { this.setPaperSize(source.getPaperSize()); for (ElementView sourceElementView : source.getElements()) { ElementView destinationElementView = findElementView(sourceElementView.getElement()); if (destinationElementView != null) { destinationElementView.copyLayoutInformationFrom(sourceElementView); } } } }
JDriven/structurizr-java
structurizr-core/src/com/structurizr/view/View.java
Java
apache-2.0
5,965
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // <auto-generated/> #nullable disable using System.Text.Json; using Azure.Core; namespace Azure.Management.Network.Models { internal partial class ErrorDetails { internal static ErrorDetails DeserializeErrorDetails(JsonElement element) { string code = default; string target = default; string message = default; foreach (var property in element.EnumerateObject()) { if (property.NameEquals("code")) { if (property.Value.ValueKind == JsonValueKind.Null) { continue; } code = property.Value.GetString(); continue; } if (property.NameEquals("target")) { if (property.Value.ValueKind == JsonValueKind.Null) { continue; } target = property.Value.GetString(); continue; } if (property.NameEquals("message")) { if (property.Value.ValueKind == JsonValueKind.Null) { continue; } message = property.Value.GetString(); continue; } } return new ErrorDetails(code, target, message); } } }
stankovski/azure-sdk-for-net
sdk/testcommon/Azure.Management.Network.2020_04/src/Generated/Models/ErrorDetails.Serialization.cs
C#
apache-2.0
1,613
/* * Copyright (c) 2016 The original author or authors * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package io.vertx.ext.consul; import io.vertx.codegen.annotations.DataObject; import io.vertx.core.json.JsonObject; import java.util.List; /** * Holds network coordinates of node * * @author <a href="mailto:[email protected]">Ruslan Sennov</a> * @see <a href="https://www.consul.io/docs/internals/coordinates.html">Network coordinates</a> */ @DataObject(generateConverter = true) public class Coordinate { private String node; private float adj; private float err; private float height; private List<Float> vec; /** * Default constructor */ public Coordinate() {} /** * Copy constructor * * @param coordinate the one to copy */ public Coordinate(Coordinate coordinate) { this.node = coordinate.node; this.adj = coordinate.adj; this.err = coordinate.err; this.height = coordinate.height; this.vec = coordinate.vec; } /** * Constructor from JSON * * @param coordinate the JSON */ public Coordinate(JsonObject coordinate) { CoordinateConverter.fromJson(coordinate, this); } /** * Convert to JSON * * @return the JSON */ public JsonObject toJson() { JsonObject jsonObject = new JsonObject(); CoordinateConverter.toJson(this, jsonObject); return jsonObject; } /** * Get name of node * * @return name of node */ public String getNode() { return node; } /** * Get adjustment * * @return adjustment */ public float getAdj() { return adj; } /** * Get error * * @return error */ public float getErr() { return err; } /** * Get height * * @return height */ public float getHeight() { return height; } /** * Get vector * * @return vector */ public List<Float> getVec() { return vec; } /** * Set name of node * * @param node name of node * @return reference to this, for fluency */ public Coordinate setNode(String node) { this.node = node; return this; } /** * Set adjustment * * @param adj adjustment * @return reference to this, for fluency */ public Coordinate setAdj(float adj) { this.adj = adj; return this; } /** * Set error * * @param err error * @return reference to this, for fluency */ public Coordinate setErr(float err) { this.err = err; return this; } /** * Set height * * @param height height * @return reference to this, for fluency */ public Coordinate setHeight(float height) { this.height = height; return this; } /** * Set vector * * @param vec vector * @return reference to this, for fluency */ public Coordinate setVec(List<Float> vec) { this.vec = vec; return this; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Coordinate that = (Coordinate) o; if (Float.compare(that.adj, adj) != 0) return false; if (Float.compare(that.err, err) != 0) return false; if (Float.compare(that.height, height) != 0) return false; if (node != null ? !node.equals(that.node) : that.node != null) return false; return vec != null ? vec.equals(that.vec) : that.vec == null; } @Override public int hashCode() { int result = node != null ? node.hashCode() : 0; result = 31 * result + (adj != +0.0f ? Float.floatToIntBits(adj) : 0); result = 31 * result + (err != +0.0f ? Float.floatToIntBits(err) : 0); result = 31 * result + (height != +0.0f ? Float.floatToIntBits(height) : 0); result = 31 * result + (vec != null ? vec.hashCode() : 0); return result; } }
ruslansennov/vertx-consul-client
src/main/java/io/vertx/ext/consul/Coordinate.java
Java
apache-2.0
4,253
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xml.security.test.dom.transforms.implementations; import java.io.ByteArrayInputStream; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathFactory; import org.apache.xml.security.signature.XMLSignatureInput; import org.apache.xml.security.test.dom.DSNamespaceContext; import org.apache.xml.security.transforms.Transforms; import org.apache.xml.security.transforms.implementations.TransformBase64Decode; import org.apache.xml.security.utils.XMLUtils; import org.w3c.dom.Document; import org.w3c.dom.Node; /** * Unit test for {@link org.apache.xml.security.transforms.implementations.TransformBase64Decode} * * @author Christian Geuer-Pollmann */ public class TransformBase64DecodeTest extends org.junit.Assert { static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(TransformBase64DecodeTest.class); static { org.apache.xml.security.Init.init(); } @org.junit.Test public void test1() throws Exception { // base64 encoded String s1 = "VGhlIFVSSSBvZiB0aGUgdHJhbnNmb3JtIGlzIGh0dHA6Ly93d3cudzMub3JnLzIwMDAvMDkveG1s\n" + "ZHNpZyNiYXNlNjQ="; Document doc = TransformBase64DecodeTest.createDocument(); Transforms t = new Transforms(doc); doc.appendChild(t.getElement()); t.addTransform(TransformBase64Decode.implementedTransformURI); XMLSignatureInput in = new XMLSignatureInput(new ByteArrayInputStream(s1.getBytes())); XMLSignatureInput out = t.performTransforms(in); String result = new String(out.getBytes()); assertTrue( result.equals("The URI of the transform is http://www.w3.org/2000/09/xmldsig#base64") ); } @org.junit.Test public void test2() throws Exception { // base64 encoded twice String s2 = "VkdobElGVlNTU0J2WmlCMGFHVWdkSEpoYm5ObWIzSnRJR2x6SUdoMGRIQTZMeTkzZDNjdWR6TXVi\n" + "M0puTHpJd01EQXZNRGt2ZUcxcwpaSE5wWnlOaVlYTmxOalE9"; Document doc = TransformBase64DecodeTest.createDocument(); Transforms t = new Transforms(doc); doc.appendChild(t.getElement()); t.addTransform(TransformBase64Decode.implementedTransformURI); XMLSignatureInput in = new XMLSignatureInput(new ByteArrayInputStream(s2.getBytes())); XMLSignatureInput out = t.performTransforms(t.performTransforms(in)); String result = new String(out.getBytes()); assertTrue( result.equals("The URI of the transform is http://www.w3.org/2000/09/xmldsig#base64") ); } @org.junit.Test public void test3() throws Exception { //J- String input = "" + "<Object xmlns:signature='http://www.w3.org/2000/09/xmldsig#'>\n" + "<signature:Base64>\n" + "VGhlIFVSSSBvZiB0aGU gdHJhbn<RealText>Nmb 3JtIGlzIG<test/>h0dHA6</RealText>Ly93d3cudzMub3JnLzIwMDAvMDkveG1s\n" + "ZHNpZyNiYXNlNjQ=\n" + "</signature:Base64>\n" + "</Object>\n" ; //J+ DocumentBuilder db = XMLUtils.createDocumentBuilder(false); db.setErrorHandler(new org.apache.xml.security.utils.IgnoreAllErrorHandler()); Document doc = db.parse(new ByteArrayInputStream(input.getBytes())); //XMLUtils.circumventBug2650(doc); XPathFactory xpf = XPathFactory.newInstance(); XPath xpath = xpf.newXPath(); xpath.setNamespaceContext(new DSNamespaceContext()); String expression = "//ds:Base64"; Node base64Node = (Node) xpath.evaluate(expression, doc, XPathConstants.NODE); XMLSignatureInput xmlinput = new XMLSignatureInput(base64Node); Document doc2 = TransformBase64DecodeTest.createDocument(); Transforms t = new Transforms(doc2); doc2.appendChild(t.getElement()); t.addTransform(Transforms.TRANSFORM_BASE64_DECODE); XMLSignatureInput out = t.performTransforms(xmlinput); String result = new String(out.getBytes()); assertTrue( "\"" + result + "\"", result.equals("The URI of the transform is http://www.w3.org/2000/09/xmldsig#base64") ); } private static Document createDocument() throws ParserConfigurationException { DocumentBuilder db = XMLUtils.createDocumentBuilder(false); Document doc = db.newDocument(); if (doc == null) { throw new RuntimeException("Could not create a Document"); } else { log.debug("I could create the Document"); } return doc; } }
Legostaev/xmlsec-gost
src/test/java/org/apache/xml/security/test/dom/transforms/implementations/TransformBase64DecodeTest.java
Java
apache-2.0
5,574
/* * Copyright (C) 2010 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zzn.aeassistant.zxing.decoding; import android.app.Activity; import android.content.DialogInterface; /** * Simple listener used to exit the app in a few cases. * * @author Sean Owen */ public final class FinishListener implements DialogInterface.OnClickListener, DialogInterface.OnCancelListener, Runnable { private final Activity activityToFinish; public FinishListener(Activity activityToFinish) { this.activityToFinish = activityToFinish; } @Override public void onCancel(DialogInterface dialogInterface) { run(); } @Override public void onClick(DialogInterface dialogInterface, int i) { run(); } @Override public void run() { activityToFinish.finish(); } }
ShawnDongAi/AEASSISTANT
AEAssistant/src/com/zzn/aeassistant/zxing/decoding/FinishListener.java
Java
apache-2.0
1,311
import { Component, Input, EventEmitter, SimpleChanges, OnChanges } from '@angular/core'; import { ToasterService } from 'angular2-toaster'; import { TranslateService } from 'ng2-translate'; import { Notification } from '../notification.model'; import { NotificationService } from '../notification.service'; import { PaginationComponent } from '../../shared/pagination/pagination.component'; @Component({ moduleId: module.id, selector: 'hip-notifications-list', templateUrl: 'notifications-list.component.html', styleUrls: ['notifications-list.component.css'] }) export class NotificationsListComponent { @Input() notifications: Notification[]; // @Input() selectedStatus: String; // @Input() selectedNotificationType: String; translatedResponse: any; // pagination parameters currentPage = 1; pageSize = 10; totalItems: number; // will contain the notification satisfying the selected status and type filteredNotifications: Notification[] = []; constructor(private notificationService: NotificationService, private toasterService: ToasterService, private translateService: TranslateService) {} private markAsRead(notificationId: number) { this.notificationService.markNotificationAsRead(notificationId) .then( (response: any) => { let readNotification = this.notifications.filter( function (notification) { return notification.id === notificationId; } )[0]; readNotification.read = true; // notify change to the service which notifies the toolbar this.notificationService.announceUnreadNotificationCountDecrease(1); } ).catch( (error: any) => { this.toasterService.pop('error', this.getTranslatedString('Could not mark notification as read')); } ); } getTranslatedString(data: any) { this.translateService.get(data).subscribe( (value: any) => { this.translatedResponse = value; } ); return this.translatedResponse; } getPage(page: number) { this.currentPage = page; } }
HiP-App/HiP-CmsAngularApp
app/notifications/notifications-list/notifications-list.component.ts
TypeScript
apache-2.0
2,139
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * $Header:$ */ package org.apache.beehive.netui.util; import java.util.Map; import java.util.List; import java.lang.reflect.Array; import org.apache.beehive.netui.util.logging.Logger; /** * This class is used by NetUI tags that use parameters. */ public class ParamHelper { private static final Logger logger = Logger.getInstance(ParamHelper.class); /** * Add a new parameter or update an existing parameter's list of values. * <p/> * <em>Implementation Note:</em> in the case that a Map was provided for * the <code>value</code> parameter, the this returns without doing * anything; in any other case, params is updated (even in * <code>value</code> is null). * </p> * <p/> * If value is some object (not an array or list), the string * representation of that object is added as a value for name. If the * value is a list (or array) of objects, then the string representation * of each element is added as a value for name. When there are multiple * values for a name, then an array of Strings is used in Map. * </p> * * @param params an existing Map of names and values to update * @param name the name of the parameter to add or update * @param value an item or list of items to put into the map * @throws IllegalArgumentException in the case that either the params * <p/> * or name given was null */ public static void addParam(Map params, String name, Object value) { if (params == null) throw new IllegalArgumentException("Parameter map cannot be null"); if (name == null) throw new IllegalArgumentException("Parameter name cannot be null"); if (value instanceof Map) { logger.warn(Bundle.getString("Tags_BadParameterType", name)); return; } if (value == null) value = ""; // check to see if we are adding a new element // or if this is an existing element Object o = params.get(name); int length = 0; if (o != null) { assert (o instanceof String || o instanceof String[]); if (o.getClass().isArray()) { length = Array.getLength(o); } else { length++; } } // check how much size the output needs to be if (value.getClass().isArray()) { length += Array.getLength(value); } else if (value instanceof List) { length += ((List) value).size(); } else { length++; } if (length == 0) return; //System.err.println("Number of vaues:" + length); // if there is only a single value push it to the parameter table if (length == 1) { if (value.getClass().isArray()) { Object val = Array.get(value, 0); if (val != null) params.put(name,val.toString()); else params.put(name,""); } else if (value instanceof List) { List list = (List) value; Object val = list.get(0); if (val != null) params.put(name,val.toString()); else params.put(name,""); } else params.put(name,value.toString()); return; } // allocate the string for the multiple values String[] values = new String[length]; int offset = 0; // if we had old values, push them to the new array if (o != null) { if (o.getClass().isArray()) { String[] obs = (String[]) o; for (;offset<obs.length;offset++) { values[offset] = obs[offset]; } } else { values[0] = o.toString(); offset = 1; } } // now move the new values to the array starting at the offset // position if (value.getClass().isArray()) { //need to convert this array into a String[] int size = Array.getLength(value); for (int i=0; i < size; i++) { Object val = Array.get(value, i); if (val != null) values[i+offset] = val.toString(); else values[i+offset] = ""; } } else if (value instanceof List) { List list = (List) value; int size = list.size(); for (int i=0; i < size; i++) { if (list.get(i) != null) values[i+offset] = list.get(i).toString(); else values[i+offset] = ""; } } else { values[offset] = value.toString(); } // store the new values array params.put(name, values); } }
moparisthebest/beehive
beehive-netui-core/src/main/java/org/apache/beehive/netui/util/ParamHelper.java
Java
apache-2.0
5,988
package org.jboss.resteasy.spi; import org.jboss.resteasy.specimpl.MultivaluedMapImpl; import org.jboss.resteasy.specimpl.PathSegmentImpl; import org.jboss.resteasy.specimpl.ResteasyUriBuilder; import org.jboss.resteasy.util.Encode; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.PathSegment; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URLDecoder; import java.util.ArrayList; import java.util.List; /** * UriInfo implementation with some added extra methods to help process requests * * @author <a href="mailto:[email protected]">Bill Burke</a> * @version $Revision: 1 $ */ public class ResteasyUriInfo implements UriInfo { private String path; private String encodedPath; private String matchingPath; private MultivaluedMap<String, String> queryParameters; private MultivaluedMap<String, String> encodedQueryParameters; private MultivaluedMap<String, String> pathParameters; private MultivaluedMap<String, String> encodedPathParameters; private MultivaluedMap<String, PathSegment[]> pathParameterPathSegments; private MultivaluedMap<String, PathSegment[]> encodedPathParameterPathSegments; private List<PathSegment> pathSegments; private List<PathSegment> encodedPathSegments; private URI absolutePath; private URI requestURI; private URI baseURI; private List<String> matchedUris; private List<String> encodedMatchedUris; private List<String> encodedMatchedPaths = new ArrayList<String>(); private List<Object> ancestors; public ResteasyUriInfo(URI base, URI relative) { String b = base.toString(); if (!b.endsWith("/")) b += "/"; String r = relative.getRawPath(); if (r.startsWith("/")) { encodedPath = r; path = relative.getPath(); } else { encodedPath = "/" + r; path = "/" + relative.getPath(); } requestURI = UriBuilder.fromUri(base).path(relative.getRawPath()).replaceQuery(relative.getRawQuery()).build(); baseURI = base; encodedPathSegments = PathSegmentImpl.parseSegments(encodedPath, false); this.pathSegments = new ArrayList<PathSegment>(encodedPathSegments.size()); for (PathSegment segment : encodedPathSegments) { pathSegments.add(new PathSegmentImpl(((PathSegmentImpl) segment).getOriginal(), true)); } extractParameters(requestURI.getRawQuery()); extractMatchingPath(encodedPathSegments); absolutePath = UriBuilder.fromUri(requestURI).replaceQuery(null).build(); } public ResteasyUriInfo(URI requestURI) { String r = requestURI.getRawPath(); if (r.startsWith("/")) { encodedPath = r; path = requestURI.getPath(); } else { encodedPath = "/" + r; path = "/" + requestURI.getPath(); } this.requestURI = requestURI; baseURI = UriBuilder.fromUri(requestURI).replacePath("").build(); encodedPathSegments = PathSegmentImpl.parseSegments(encodedPath, false); this.pathSegments = new ArrayList<PathSegment>(encodedPathSegments.size()); for (PathSegment segment : encodedPathSegments) { pathSegments.add(new PathSegmentImpl(((PathSegmentImpl) segment).getOriginal(), true)); } extractParameters(requestURI.getRawQuery()); extractMatchingPath(encodedPathSegments); absolutePath = UriBuilder.fromUri(requestURI).replaceQuery(null).build(); } /** * matching path without matrix parameters * * @param encodedPathSegments */ protected void extractMatchingPath(List<PathSegment> encodedPathSegments) { StringBuilder preprocessedPath = new StringBuilder(); for (PathSegment pathSegment : encodedPathSegments) { preprocessedPath.append("/").append(pathSegment.getPath()); } matchingPath = preprocessedPath.toString(); } /** * Encoded path without matrix parameters * * @return */ public String getMatchingPath() { return matchingPath; } /** * Create a UriInfo from the baseURI * * @param relative * @return */ public ResteasyUriInfo setRequestUri(URI relative) { String rel = relative.toString(); if (rel.startsWith(baseURI.toString())) { relative = URI.create(rel.substring(baseURI.toString().length())); } return new ResteasyUriInfo(baseURI, relative); } public String getPath() { return path; } public String getPath(boolean decode) { if (decode) return getPath(); return encodedPath; } public List<PathSegment> getPathSegments() { return pathSegments; } public List<PathSegment> getPathSegments(boolean decode) { if (decode) return getPathSegments(); return encodedPathSegments; } public URI getRequestUri() { return requestURI; } public UriBuilder getRequestUriBuilder() { return UriBuilder.fromUri(requestURI); } public URI getAbsolutePath() { return absolutePath; } public UriBuilder getAbsolutePathBuilder() { return UriBuilder.fromUri(absolutePath); } public URI getBaseUri() { return baseURI; } public UriBuilder getBaseUriBuilder() { return UriBuilder.fromUri(baseURI); } public MultivaluedMap<String, String> getPathParameters() { if (pathParameters == null) { pathParameters = new MultivaluedMapImpl<String, String>(); } return pathParameters; } public void addEncodedPathParameter(String name, String value) { getEncodedPathParameters().add(name, value); String value1 = Encode.decodePath(value); getPathParameters().add(name, value1); } private MultivaluedMap<String, String> getEncodedPathParameters() { if (encodedPathParameters == null) { encodedPathParameters = new MultivaluedMapImpl<String, String>(); } return encodedPathParameters; } public MultivaluedMap<String, PathSegment[]> getEncodedPathParameterPathSegments() { if (encodedPathParameterPathSegments == null) { encodedPathParameterPathSegments = new MultivaluedMapImpl<String, PathSegment[]>(); } return encodedPathParameterPathSegments; } public MultivaluedMap<String, PathSegment[]> getPathParameterPathSegments() { if (pathParameterPathSegments == null) { pathParameterPathSegments = new MultivaluedMapImpl<String, PathSegment[]>(); } return pathParameterPathSegments; } public MultivaluedMap<String, String> getPathParameters(boolean decode) { if (decode) return getPathParameters(); return getEncodedPathParameters(); } public MultivaluedMap<String, String> getQueryParameters() { if (queryParameters == null) { queryParameters = new MultivaluedMapImpl<String, String>(); } return queryParameters; } protected MultivaluedMap<String, String> getEncodedQueryParameters() { if (encodedQueryParameters == null) { this.encodedQueryParameters = new MultivaluedMapImpl<String, String>(); } return encodedQueryParameters; } public MultivaluedMap<String, String> getQueryParameters(boolean decode) { if (decode) return getQueryParameters(); else return getEncodedQueryParameters(); } protected void extractParameters(String queryString) { if (queryString == null || queryString.equals("")) return; String[] params = queryString.split("&"); for (String param : params) { if (param.indexOf('=') >= 0) { String[] nv = param.split("=", 2); try { String name = URLDecoder.decode(nv[0], "UTF-8"); String val = nv.length > 1 ? nv[1] : ""; getEncodedQueryParameters().add(name, val); getQueryParameters().add(name, URLDecoder.decode(val, "UTF-8")); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } else { try { String name = URLDecoder.decode(param, "UTF-8"); getEncodedQueryParameters().add(name, ""); getQueryParameters().add(name, ""); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } } } public List<String> getMatchedURIs(boolean decode) { if (decode) { if (matchedUris == null) matchedUris = new ArrayList<String>(); return matchedUris; } else { if (encodedMatchedUris == null) encodedMatchedUris = new ArrayList<String>(); return encodedMatchedUris; } } public List<String> getMatchedURIs() { return getMatchedURIs(true); } public List<Object> getMatchedResources() { if (ancestors == null) ancestors = new ArrayList<Object>(); return ancestors; } public void pushCurrentResource(Object resource) { if (ancestors == null) ancestors = new ArrayList<Object>(); ancestors.add(0, resource); } public void pushMatchedPath(String encoded) { encodedMatchedPaths.add(0, encoded); } public List<String> getEncodedMatchedPaths() { return encodedMatchedPaths; } public void popMatchedPath() { encodedMatchedPaths.remove(0); } public void pushMatchedURI(String encoded) { if (encoded.endsWith("/")) encoded = encoded.substring(0, encoded.length() - 1); if (encoded.startsWith("/")) encoded = encoded.substring(1); String decoded = Encode.decode(encoded); if (encodedMatchedUris == null) encodedMatchedUris = new ArrayList<String>(); encodedMatchedUris.add(0, encoded); if (matchedUris == null) matchedUris = new ArrayList<String>(); matchedUris.add(0, decoded); } @Override public URI resolve(URI uri) { return getBaseUri().resolve(uri); } @Override public URI relativize(URI uri) { URI from = getRequestUri(); URI to = uri; if (uri.getScheme() == null && uri.getHost() == null) { to = getBaseUriBuilder().replaceQuery(null).path(uri.getPath()).replaceQuery(uri.getQuery()).fragment(uri.getFragment()).build(); } return ResteasyUriBuilder.relativize(from, to); } }
raphaelning/resteasy-client-android
jaxrs/resteasy-jaxrs/src/main/java/org/jboss/resteasy/spi/ResteasyUriInfo.java
Java
apache-2.0
10,699
package com.cabinetms.client; import java.util.List; import com.google.common.collect.Lists; public class TacticMediaCommand { private String command; // 指令 private String clientIp; // 终端IP地址 private String destination; // 终端队列地址 private Integer startDate;// 策略开始日期 private Integer endDate;// 策略结束日期 private List<TacticDetailMediaCommand> detailList = Lists.newLinkedList(); public List<TacticDetailMediaCommand> getDetailList() { return detailList; } public void setDetailList(List<TacticDetailMediaCommand> detailList) { this.detailList = detailList; } public String getCommand() { return command; } public void setCommand(String command) { this.command = command; } public String getClientIp() { return clientIp; } public void setClientIp(String clientIp) { this.clientIp = clientIp; } public String getDestination() { return destination; } public void setDestination(String destination) { this.destination = destination; } public Integer getStartDate() { return startDate; } public void setStartDate(Integer startDate) { this.startDate = startDate; } public Integer getEndDate() { return endDate; } public void setEndDate(Integer endDate) { this.endDate = endDate; } }
wangning82/CabinetMS
src/main/java/com/cabinetms/client/TacticMediaCommand.java
Java
apache-2.0
1,292
# Leckenbya A.C. Seward, 1894 GENUS #### Status ACCEPTED #### According to Interim Register of Marine and Nonmarine Genera #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Leckenbya/README.md
Markdown
apache-2.0
191
# Uromyces ciceris-soongaricae S. Ahmad SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in null #### Original name Uromyces ciceris-soongaricae S. Ahmad ### Remarks null
mdoering/backbone
life/Fungi/Basidiomycota/Pucciniomycetes/Pucciniales/Pucciniaceae/Uromyces/Uromyces ciceris-soongaricae/README.md
Markdown
apache-2.0
203
/** * @license Highcharts JS v7.1.1 (2019-04-09) * * (c) 2014-2019 Highsoft AS * Authors: Jon Arild Nygard / Oystein Moseng * * License: www.highcharts.com/license */ 'use strict'; (function (factory) { if (typeof module === 'object' && module.exports) { factory['default'] = factory; module.exports = factory; } else if (typeof define === 'function' && define.amd) { define('highcharts/modules/treemap', ['highcharts'], function (Highcharts) { factory(Highcharts); factory.Highcharts = Highcharts; return factory; }); } else { factory(typeof Highcharts !== 'undefined' ? Highcharts : undefined); } }(function (Highcharts) { var _modules = Highcharts ? Highcharts._modules : {}; function _registerModule(obj, path, args, fn) { if (!obj.hasOwnProperty(path)) { obj[path] = fn.apply(null, args); } } _registerModule(_modules, 'mixins/tree-series.js', [_modules['parts/Globals.js']], function (H) { var extend = H.extend, isArray = H.isArray, isBoolean = function (x) { return typeof x === 'boolean'; }, isFn = function (x) { return typeof x === 'function'; }, isObject = H.isObject, isNumber = H.isNumber, merge = H.merge, pick = H.pick; // TODO Combine buildTree and buildNode with setTreeValues // TODO Remove logic from Treemap and make it utilize this mixin. var setTreeValues = function setTreeValues(tree, options) { var before = options.before, idRoot = options.idRoot, mapIdToNode = options.mapIdToNode, nodeRoot = mapIdToNode[idRoot], levelIsConstant = ( isBoolean(options.levelIsConstant) ? options.levelIsConstant : true ), points = options.points, point = points[tree.i], optionsPoint = point && point.options || {}, childrenTotal = 0, children = [], value; extend(tree, { levelDynamic: tree.level - (levelIsConstant ? 0 : nodeRoot.level), name: pick(point && point.name, ''), visible: ( idRoot === tree.id || (isBoolean(options.visible) ? options.visible : false) ) }); if (isFn(before)) { tree = before(tree, options); } // First give the children some values tree.children.forEach(function (child, i) { var newOptions = extend({}, options); extend(newOptions, { index: i, siblings: tree.children.length, visible: tree.visible }); child = setTreeValues(child, newOptions); children.push(child); if (child.visible) { childrenTotal += child.val; } }); tree.visible = childrenTotal > 0 || tree.visible; // Set the values value = pick(optionsPoint.value, childrenTotal); extend(tree, { children: children, childrenTotal: childrenTotal, isLeaf: tree.visible && !childrenTotal, val: value }); return tree; }; var getColor = function getColor(node, options) { var index = options.index, mapOptionsToLevel = options.mapOptionsToLevel, parentColor = options.parentColor, parentColorIndex = options.parentColorIndex, series = options.series, colors = options.colors, siblings = options.siblings, points = series.points, getColorByPoint, chartOptionsChart = series.chart.options.chart, point, level, colorByPoint, colorIndexByPoint, color, colorIndex; function variation(color) { var colorVariation = level && level.colorVariation; if (colorVariation) { if (colorVariation.key === 'brightness') { return H.color(color).brighten( colorVariation.to * (index / siblings) ).get(); } } return color; } if (node) { point = points[node.i]; level = mapOptionsToLevel[node.level] || {}; getColorByPoint = point && level.colorByPoint; if (getColorByPoint) { colorIndexByPoint = point.index % (colors ? colors.length : chartOptionsChart.colorCount ); colorByPoint = colors && colors[colorIndexByPoint]; } // Select either point color, level color or inherited color. if (!series.chart.styledMode) { color = pick( point && point.options.color, level && level.color, colorByPoint, parentColor && variation(parentColor), series.color ); } colorIndex = pick( point && point.options.colorIndex, level && level.colorIndex, colorIndexByPoint, parentColorIndex, options.colorIndex ); } return { color: color, colorIndex: colorIndex }; }; /** * Creates a map from level number to its given options. * * @private * @function getLevelOptions * * @param {object} params * Object containing parameters. * - `defaults` Object containing default options. The default options * are merged with the userOptions to get the final options for a * specific level. * - `from` The lowest level number. * - `levels` User options from series.levels. * - `to` The highest level number. * * @return {Highcharts.Dictionary<object>} * Returns a map from level number to its given options. */ var getLevelOptions = function getLevelOptions(params) { var result = null, defaults, converted, i, from, to, levels; if (isObject(params)) { result = {}; from = isNumber(params.from) ? params.from : 1; levels = params.levels; converted = {}; defaults = isObject(params.defaults) ? params.defaults : {}; if (isArray(levels)) { converted = levels.reduce(function (obj, item) { var level, levelIsConstant, options; if (isObject(item) && isNumber(item.level)) { options = merge({}, item); levelIsConstant = ( isBoolean(options.levelIsConstant) ? options.levelIsConstant : defaults.levelIsConstant ); // Delete redundant properties. delete options.levelIsConstant; delete options.level; // Calculate which level these options apply to. level = item.level + (levelIsConstant ? 0 : from - 1); if (isObject(obj[level])) { extend(obj[level], options); } else { obj[level] = options; } } return obj; }, {}); } to = isNumber(params.to) ? params.to : 1; for (i = 0; i <= to; i++) { result[i] = merge( {}, defaults, isObject(converted[i]) ? converted[i] : {} ); } } return result; }; /** * Update the rootId property on the series. Also makes sure that it is * accessible to exporting. * * @private * @function updateRootId * * @param {object} series * The series to operate on. * * @return {string} * Returns the resulting rootId after update. */ var updateRootId = function (series) { var rootId, options; if (isObject(series)) { // Get the series options. options = isObject(series.options) ? series.options : {}; // Calculate the rootId. rootId = pick(series.rootNode, options.rootId, ''); // Set rootId on series.userOptions to pick it up in exporting. if (isObject(series.userOptions)) { series.userOptions.rootId = rootId; } // Set rootId on series to pick it up on next update. series.rootNode = rootId; } return rootId; }; var result = { getColor: getColor, getLevelOptions: getLevelOptions, setTreeValues: setTreeValues, updateRootId: updateRootId }; return result; }); _registerModule(_modules, 'mixins/draw-point.js', [], function () { var isFn = function (x) { return typeof x === 'function'; }; /** * Handles the drawing of a component. * Can be used for any type of component that reserves the graphic property, and * provides a shouldDraw on its context. * * @private * @function draw * * @param {object} params * Parameters. * * TODO: add type checking. * TODO: export this function to enable usage */ var draw = function draw(params) { var component = this, graphic = component.graphic, animatableAttribs = params.animatableAttribs, onComplete = params.onComplete, css = params.css, renderer = params.renderer; if (component.shouldDraw()) { if (!graphic) { component.graphic = graphic = renderer[params.shapeType](params.shapeArgs).add(params.group); } graphic .css(css) .attr(params.attribs) .animate( animatableAttribs, params.isNew ? false : undefined, onComplete ); } else if (graphic) { var destroy = function () { component.graphic = graphic = graphic.destroy(); if (isFn(onComplete)) { onComplete(); } }; // animate only runs complete callback if something was animated. if (Object.keys(animatableAttribs).length) { graphic.animate(animatableAttribs, undefined, function () { destroy(); }); } else { destroy(); } } }; /** * An extended version of draw customized for points. * It calls additional methods that is expected when rendering a point. * * @param {object} params Parameters */ var drawPoint = function drawPoint(params) { var point = this, attribs = params.attribs = params.attribs || {}; // Assigning class in dot notation does go well in IE8 // eslint-disable-next-line dot-notation attribs['class'] = point.getClassName(); // Call draw to render component draw.call(point, params); }; return drawPoint; }); _registerModule(_modules, 'modules/treemap.src.js', [_modules['parts/Globals.js'], _modules['mixins/tree-series.js'], _modules['mixins/draw-point.js']], function (H, mixinTreeSeries, drawPoint) { /* * * (c) 2014-2019 Highsoft AS * * Authors: Jon Arild Nygard / Oystein Moseng * * License: www.highcharts.com/license */ var seriesType = H.seriesType, seriesTypes = H.seriesTypes, addEvent = H.addEvent, merge = H.merge, extend = H.extend, error = H.error, defined = H.defined, noop = H.noop, fireEvent = H.fireEvent, getColor = mixinTreeSeries.getColor, getLevelOptions = mixinTreeSeries.getLevelOptions, isArray = H.isArray, isBoolean = function (x) { return typeof x === 'boolean'; }, isNumber = H.isNumber, isObject = H.isObject, isString = H.isString, pick = H.pick, Series = H.Series, stableSort = H.stableSort, color = H.Color, eachObject = function (list, func, context) { context = context || this; H.objectEach(list, function (val, key) { func.call(context, val, key, list); }); }, // @todo find correct name for this function. // @todo Similar to reduce, this function is likely redundant recursive = function (item, func, context) { var next; context = context || this; next = func.call(context, item); if (next !== false) { recursive(next, func, context); } }, updateRootId = mixinTreeSeries.updateRootId; /** * @private * @class * @name Highcharts.seriesTypes.treemap * * @augments Highcharts.Series */ seriesType( 'treemap', 'scatter' /** * A treemap displays hierarchical data using nested rectangles. The data * can be laid out in varying ways depending on options. * * @sample highcharts/demo/treemap-large-dataset/ * Treemap * * @extends plotOptions.scatter * @excluding marker, jitter * @product highcharts * @optionparent plotOptions.treemap */ , { /** * When enabled the user can click on a point which is a parent and * zoom in on its children. Deprecated and replaced by * [allowTraversingTree](#plotOptions.treemap.allowTraversingTree). * * @sample {highcharts} highcharts/plotoptions/treemap-allowdrilltonode/ * Enabled * * @deprecated * @type {boolean} * @default false * @since 4.1.0 * @product highcharts * @apioption plotOptions.treemap.allowDrillToNode */ /** * When enabled the user can click on a point which is a parent and * zoom in on its children. * * @sample {highcharts} highcharts/plotoptions/treemap-allowtraversingtree/ * Enabled * * @since 7.0.3 * @product highcharts */ allowTraversingTree: false, animationLimit: 250, /** * When the series contains less points than the crop threshold, all * points are drawn, event if the points fall outside the visible plot * area at the current zoom. The advantage of drawing all points * (including markers and columns), is that animation is performed on * updates. On the other hand, when the series contains more points than * the crop threshold, the series data is cropped to only contain points * that fall within the plot area. The advantage of cropping away * invisible points is to increase performance on large series. * * @type {number} * @default 300 * @since 4.1.0 * @product highcharts * @apioption plotOptions.treemap.cropThreshold */ /** * Fires on a request for change of root node for the tree, before the * update is made. An event object is passed to the function, containing * additional properties `newRootId`, `previousRootId`, `redraw` and * `trigger`. * * @type {function} * @default undefined * @sample {highcharts} highcharts/plotoptions/treemap-events-setrootnode/ * Alert update information on setRootNode event. * @since 7.0.3 * @product highcharts * @apioption plotOptions.treemap.events.setRootNode */ /** * This option decides if the user can interact with the parent nodes * or just the leaf nodes. When this option is undefined, it will be * true by default. However when allowTraversingTree is true, then it * will be false by default. * * @sample {highcharts} highcharts/plotoptions/treemap-interactbyleaf-false/ * False * @sample {highcharts} highcharts/plotoptions/treemap-interactbyleaf-true-and-allowtraversingtree/ * InteractByLeaf and allowTraversingTree is true * * @type {boolean} * @since 4.1.2 * @product highcharts * @apioption plotOptions.treemap.interactByLeaf */ /** * The sort index of the point inside the treemap level. * * @sample {highcharts} highcharts/plotoptions/treemap-sortindex/ * Sort by years * * @type {number} * @since 4.1.10 * @product highcharts * @apioption plotOptions.treemap.sortIndex */ /** * When using automatic point colors pulled from the `options.colors` * collection, this option determines whether the chart should receive * one color per series or one color per point. * * @see [series colors](#plotOptions.treemap.colors) * * @type {boolean} * @default false * @since 2.0 * @product highcharts * @apioption plotOptions.treemap.colorByPoint */ /** * A series specific or series type specific color set to apply instead * of the global [colors](#colors) when * [colorByPoint](#plotOptions.treemap.colorByPoint) is true. * * @type {Array<Highcharts.ColorString|Highcharts.GradientColorObject|Highcharts.PatternObject>} * @since 3.0 * @product highcharts * @apioption plotOptions.treemap.colors */ /** * Whether to display this series type or specific series item in the * legend. */ showInLegend: false, /** * @ignore-option */ marker: false, colorByPoint: false, /** * @since 4.1.0 */ dataLabels: { /** @ignore-option */ defer: false, /** @ignore-option */ enabled: true, /** @ignore-option */ formatter: function () { var point = this && this.point ? this.point : {}, name = isString(point.name) ? point.name : ''; return name; }, /** @ignore-option */ inside: true, /** @ignore-option */ verticalAlign: 'middle' }, tooltip: { headerFormat: '', pointFormat: '<b>{point.name}</b>: {point.value}<br/>' }, /** * Whether to ignore hidden points when the layout algorithm runs. * If `false`, hidden points will leave open spaces. * * @since 5.0.8 */ ignoreHiddenPoint: true, /** * This option decides which algorithm is used for setting position * and dimensions of the points. * * @see [How to write your own algorithm](https://www.highcharts.com/docs/chart-and-series-types/treemap) * * @sample {highcharts} highcharts/plotoptions/treemap-layoutalgorithm-sliceanddice/ * SliceAndDice by default * @sample {highcharts} highcharts/plotoptions/treemap-layoutalgorithm-stripes/ * Stripes * @sample {highcharts} highcharts/plotoptions/treemap-layoutalgorithm-squarified/ * Squarified * @sample {highcharts} highcharts/plotoptions/treemap-layoutalgorithm-strip/ * Strip * * @since 4.1.0 * @validvalue ["sliceAndDice", "stripes", "squarified", "strip"] */ layoutAlgorithm: 'sliceAndDice', /** * Defines which direction the layout algorithm will start drawing. * * @since 4.1.0 * @validvalue ["vertical", "horizontal"] */ layoutStartingDirection: 'vertical', /** * Enabling this option will make the treemap alternate the drawing * direction between vertical and horizontal. The next levels starting * direction will always be the opposite of the previous. * * @sample {highcharts} highcharts/plotoptions/treemap-alternatestartingdirection-true/ * Enabled * * @since 4.1.0 */ alternateStartingDirection: false, /** * Used together with the levels and allowTraversingTree options. When * set to false the first level visible to be level one, which is * dynamic when traversing the tree. Otherwise the level will be the * same as the tree structure. * * @since 4.1.0 */ levelIsConstant: true, /** * Options for the button appearing when drilling down in a treemap. * Deprecated and replaced by * [traverseUpButton](#plotOptions.treemap.traverseUpButton). * * @deprecated */ drillUpButton: { /** * The position of the button. * * @deprecated */ position: { /** * Vertical alignment of the button. * * @deprecated * @type {Highcharts.VerticalAlignValue} * @default top * @product highcharts * @apioption plotOptions.treemap.drillUpButton.position.verticalAlign */ /** * Horizontal alignment of the button. * * @deprecated * @type {Highcharts.AlignValue} */ align: 'right', /** * Horizontal offset of the button. * * @deprecated */ x: -10, /** * Vertical offset of the button. * * @deprecated */ y: 10 } }, /** * Options for the button appearing when traversing down in a treemap. */ traverseUpButton: { /** * The position of the button. */ position: { /** * Vertical alignment of the button. * * @type {Highcharts.VerticalAlignValue} * @default top * @product highcharts * @apioption plotOptions.treemap.traverseUpButton.position.verticalAlign */ /** * Horizontal alignment of the button. * * @type {Highcharts.AlignValue} */ align: 'right', /** * Horizontal offset of the button. */ x: -10, /** * Vertical offset of the button. */ y: 10 } }, /** * Set options on specific levels. Takes precedence over series options, * but not point options. * * @sample {highcharts} highcharts/plotoptions/treemap-levels/ * Styling dataLabels and borders * @sample {highcharts} highcharts/demo/treemap-with-levels/ * Different layoutAlgorithm * * @type {Array<*>} * @since 4.1.0 * @product highcharts * @apioption plotOptions.treemap.levels */ /** * Can set a `borderColor` on all points which lies on the same level. * * @type {Highcharts.ColorString} * @since 4.1.0 * @product highcharts * @apioption plotOptions.treemap.levels.borderColor */ /** * Set the dash style of the border of all the point which lies on the * level. See <a href"#plotoptions.scatter.dashstyle"> * plotOptions.scatter.dashStyle</a> for possible options. * * @type {string} * @since 4.1.0 * @product highcharts * @apioption plotOptions.treemap.levels.borderDashStyle */ /** * Can set the borderWidth on all points which lies on the same level. * * @type {number} * @since 4.1.0 * @product highcharts * @apioption plotOptions.treemap.levels.borderWidth */ /** * Can set a color on all points which lies on the same level. * * @type {Highcharts.ColorString|Highcharts.GradientColorObject|Highcharts.PatternObject} * @since 4.1.0 * @product highcharts * @apioption plotOptions.treemap.levels.color */ /** * A configuration object to define how the color of a child varies from * the parent's color. The variation is distributed among the children * of node. For example when setting brightness, the brightness change * will range from the parent's original brightness on the first child, * to the amount set in the `to` setting on the last node. This allows a * gradient-like color scheme that sets children out from each other * while highlighting the grouping on treemaps and sectors on sunburst * charts. * * @sample highcharts/demo/sunburst/ * Sunburst with color variation * * @since 6.0.0 * @product highcharts * @apioption plotOptions.treemap.levels.colorVariation */ /** * The key of a color variation. Currently supports `brightness` only. * * @type {string} * @since 6.0.0 * @product highcharts * @validvalue ["brightness"] * @apioption plotOptions.treemap.levels.colorVariation.key */ /** * The ending value of a color variation. The last sibling will receive * this value. * * @type {number} * @since 6.0.0 * @product highcharts * @apioption plotOptions.treemap.levels.colorVariation.to */ /** * Can set the options of dataLabels on each point which lies on the * level. * [plotOptions.treemap.dataLabels](#plotOptions.treemap.dataLabels) for * possible values. * * @type {object} * @since 4.1.0 * @product highcharts * @apioption plotOptions.treemap.levels.dataLabels */ /** * Can set the layoutAlgorithm option on a specific level. * * @type {string} * @since 4.1.0 * @product highcharts * @validvalue ["sliceAndDice", "stripes", "squarified", "strip"] * @apioption plotOptions.treemap.levels.layoutAlgorithm */ /** * Can set the layoutStartingDirection option on a specific level. * * @type {string} * @since 4.1.0 * @product highcharts * @validvalue ["vertical", "horizontal"] * @apioption plotOptions.treemap.levels.layoutStartingDirection */ /** * Decides which level takes effect from the options set in the levels * object. * * @sample {highcharts} highcharts/plotoptions/treemap-levels/ * Styling of both levels * * @type {number} * @since 4.1.0 * @product highcharts * @apioption plotOptions.treemap.levels.level */ // Presentational options /** * The color of the border surrounding each tree map item. * * @type {Highcharts.ColorString} */ borderColor: '#e6e6e6', /** * The width of the border surrounding each tree map item. */ borderWidth: 1, /** * The opacity of a point in treemap. When a point has children, the * visibility of the children is determined by the opacity. * * @since 4.2.4 */ opacity: 0.15, /** * A wrapper object for all the series options in specific states. * * @extends plotOptions.heatmap.states */ states: { /** * Options for the hovered series * * @extends plotOptions.heatmap.states.hover * @excluding halo */ hover: { /** * The border color for the hovered state. */ borderColor: '#999999', /** * Brightness for the hovered point. Defaults to 0 if the * heatmap series is loaded first, otherwise 0.1. * * @type {number} * @default undefined */ brightness: seriesTypes.heatmap ? 0 : 0.1, /** * @extends plotOptions.heatmap.states.hover.halo */ halo: false, /** * The opacity of a point in treemap. When a point has children, * the visibility of the children is determined by the opacity. * * @since 4.2.4 */ opacity: 0.75, /** * The shadow option for hovered state. */ shadow: false } } // Prototype members }, { pointArrayMap: ['value'], directTouch: true, optionalAxis: 'colorAxis', getSymbol: noop, parallelArrays: ['x', 'y', 'value', 'colorValue'], colorKey: 'colorValue', // Point color option key trackerGroups: ['group', 'dataLabelsGroup'], /** * Creates an object map from parent id to childrens index. * * @private * @function Highcharts.Series#getListOfParents * * @param {Highcharts.SeriesTreemapDataOptions} data * List of points set in options. * * @param {Array<string>} existingIds * List of all point ids. * * @return {object} * Map from parent id to children index in data. */ getListOfParents: function (data, existingIds) { var arr = isArray(data) ? data : [], ids = isArray(existingIds) ? existingIds : [], listOfParents = arr.reduce(function (prev, curr, i) { var parent = pick(curr.parent, ''); if (prev[parent] === undefined) { prev[parent] = []; } prev[parent].push(i); return prev; }, { '': [] // Root of tree }); // If parent does not exist, hoist parent to root of tree. eachObject(listOfParents, function (children, parent, list) { if ((parent !== '') && (ids.indexOf(parent) === -1)) { children.forEach(function (child) { list[''].push(child); }); delete list[parent]; } }); return listOfParents; }, // Creates a tree structured object from the series points getTree: function () { var series = this, allIds = this.data.map(function (d) { return d.id; }), parentList = series.getListOfParents(this.data, allIds); series.nodeMap = []; return series.buildNode('', -1, 0, parentList, null); }, // Define hasData function for non-cartesian series. // Returns true if the series has points at all. hasData: function () { return !!this.processedXData.length; // != 0 }, init: function (chart, options) { var series = this, colorSeriesMixin = H.colorSeriesMixin; // If color series logic is loaded, add some properties if (H.colorSeriesMixin) { this.translateColors = colorSeriesMixin.translateColors; this.colorAttribs = colorSeriesMixin.colorAttribs; this.axisTypes = colorSeriesMixin.axisTypes; } // Handle deprecated options. addEvent(series, 'setOptions', function (event) { var options = event.userOptions; if ( defined(options.allowDrillToNode) && !defined(options.allowTraversingTree) ) { options.allowTraversingTree = options.allowDrillToNode; delete options.allowDrillToNode; } if ( defined(options.drillUpButton) && !defined(options.traverseUpButton) ) { options.traverseUpButton = options.drillUpButton; delete options.drillUpButton; } }); Series.prototype.init.call(series, chart, options); if (series.options.allowTraversingTree) { addEvent(series, 'click', series.onClickDrillToNode); } }, buildNode: function (id, i, level, list, parent) { var series = this, children = [], point = series.points[i], height = 0, node, child; // Actions ((list[id] || [])).forEach(function (i) { child = series.buildNode( series.points[i].id, i, (level + 1), list, id ); height = Math.max(child.height + 1, height); children.push(child); }); node = { id: id, i: i, children: children, height: height, level: level, parent: parent, visible: false // @todo move this to better location }; series.nodeMap[node.id] = node; if (point) { point.node = node; } return node; }, setTreeValues: function (tree) { var series = this, options = series.options, idRoot = series.rootNode, mapIdToNode = series.nodeMap, nodeRoot = mapIdToNode[idRoot], levelIsConstant = ( isBoolean(options.levelIsConstant) ? options.levelIsConstant : true ), childrenTotal = 0, children = [], val, point = series.points[tree.i]; // First give the children some values tree.children.forEach(function (child) { child = series.setTreeValues(child); children.push(child); if (!child.ignore) { childrenTotal += child.val; } }); // Sort the children stableSort(children, function (a, b) { return a.sortIndex - b.sortIndex; }); // Set the values val = pick(point && point.options.value, childrenTotal); if (point) { point.value = val; } extend(tree, { children: children, childrenTotal: childrenTotal, // Ignore this node if point is not visible ignore: !(pick(point && point.visible, true) && (val > 0)), isLeaf: tree.visible && !childrenTotal, levelDynamic: ( tree.level - (levelIsConstant ? 0 : nodeRoot.level) ), name: pick(point && point.name, ''), sortIndex: pick(point && point.sortIndex, -val), val: val }); return tree; }, /** * Recursive function which calculates the area for all children of a * node. * * @private * @function Highcharts.Series#calculateChildrenAreas * * @param {object} node * The node which is parent to the children. * * @param {object} area * The rectangular area of the parent. */ calculateChildrenAreas: function (parent, area) { var series = this, options = series.options, mapOptionsToLevel = series.mapOptionsToLevel, level = mapOptionsToLevel[parent.level + 1], algorithm = pick( ( series[level && level.layoutAlgorithm] && level.layoutAlgorithm ), options.layoutAlgorithm ), alternate = options.alternateStartingDirection, childrenValues = [], children; // Collect all children which should be included children = parent.children.filter(function (n) { return !n.ignore; }); if (level && level.layoutStartingDirection) { area.direction = level.layoutStartingDirection === 'vertical' ? 0 : 1; } childrenValues = series[algorithm](area, children); children.forEach(function (child, index) { var values = childrenValues[index]; child.values = merge(values, { val: child.childrenTotal, direction: (alternate ? 1 - area.direction : area.direction) }); child.pointValues = merge(values, { x: (values.x / series.axisRatio), width: (values.width / series.axisRatio) }); // If node has children, then call method recursively if (child.children.length) { series.calculateChildrenAreas(child, child.values); } }); }, setPointValues: function () { var series = this, xAxis = series.xAxis, yAxis = series.yAxis; series.points.forEach(function (point) { var node = point.node, values = node.pointValues, x1, x2, y1, y2, crispCorr = 0; // Get the crisp correction in classic mode. For this to work in // styled mode, we would need to first add the shape (without x, // y, width and height), then read the rendered stroke width // using point.graphic.strokeWidth(), then modify and apply the // shapeArgs. This applies also to column series, but the // downside is performance and code complexity. if (!series.chart.styledMode) { crispCorr = ( (series.pointAttribs(point)['stroke-width'] || 0) % 2 ) / 2; } // Points which is ignored, have no values. if (values && node.visible) { x1 = Math.round( xAxis.translate(values.x, 0, 0, 0, 1) ) - crispCorr; x2 = Math.round( xAxis.translate(values.x + values.width, 0, 0, 0, 1) ) - crispCorr; y1 = Math.round( yAxis.translate(values.y, 0, 0, 0, 1) ) - crispCorr; y2 = Math.round( yAxis.translate(values.y + values.height, 0, 0, 0, 1) ) - crispCorr; // Set point values point.shapeArgs = { x: Math.min(x1, x2), y: Math.min(y1, y2), width: Math.abs(x2 - x1), height: Math.abs(y2 - y1) }; point.plotX = point.shapeArgs.x + (point.shapeArgs.width / 2); point.plotY = point.shapeArgs.y + (point.shapeArgs.height / 2); } else { // Reset visibility delete point.plotX; delete point.plotY; } }); }, // Set the node's color recursively, from the parent down. setColorRecursive: function ( node, parentColor, colorIndex, index, siblings ) { var series = this, chart = series && series.chart, colors = chart && chart.options && chart.options.colors, colorInfo, point; if (node) { colorInfo = getColor(node, { colors: colors, index: index, mapOptionsToLevel: series.mapOptionsToLevel, parentColor: parentColor, parentColorIndex: colorIndex, series: series, siblings: siblings }); point = series.points[node.i]; if (point) { point.color = colorInfo.color; point.colorIndex = colorInfo.colorIndex; } // Do it all again with the children (node.children || []).forEach(function (child, i) { series.setColorRecursive( child, colorInfo.color, colorInfo.colorIndex, i, node.children.length ); }); } }, algorithmGroup: function (h, w, d, p) { this.height = h; this.width = w; this.plot = p; this.direction = d; this.startDirection = d; this.total = 0; this.nW = 0; this.lW = 0; this.nH = 0; this.lH = 0; this.elArr = []; this.lP = { total: 0, lH: 0, nH: 0, lW: 0, nW: 0, nR: 0, lR: 0, aspectRatio: function (w, h) { return Math.max((w / h), (h / w)); } }; this.addElement = function (el) { this.lP.total = this.elArr[this.elArr.length - 1]; this.total = this.total + el; if (this.direction === 0) { // Calculate last point old aspect ratio this.lW = this.nW; this.lP.lH = this.lP.total / this.lW; this.lP.lR = this.lP.aspectRatio(this.lW, this.lP.lH); // Calculate last point new aspect ratio this.nW = this.total / this.height; this.lP.nH = this.lP.total / this.nW; this.lP.nR = this.lP.aspectRatio(this.nW, this.lP.nH); } else { // Calculate last point old aspect ratio this.lH = this.nH; this.lP.lW = this.lP.total / this.lH; this.lP.lR = this.lP.aspectRatio(this.lP.lW, this.lH); // Calculate last point new aspect ratio this.nH = this.total / this.width; this.lP.nW = this.lP.total / this.nH; this.lP.nR = this.lP.aspectRatio(this.lP.nW, this.nH); } this.elArr.push(el); }; this.reset = function () { this.nW = 0; this.lW = 0; this.elArr = []; this.total = 0; }; }, algorithmCalcPoints: function ( directionChange, last, group, childrenArea ) { var pX, pY, pW, pH, gW = group.lW, gH = group.lH, plot = group.plot, keep, i = 0, end = group.elArr.length - 1; if (last) { gW = group.nW; gH = group.nH; } else { keep = group.elArr[group.elArr.length - 1]; } group.elArr.forEach(function (p) { if (last || (i < end)) { if (group.direction === 0) { pX = plot.x; pY = plot.y; pW = gW; pH = p / pW; } else { pX = plot.x; pY = plot.y; pH = gH; pW = p / pH; } childrenArea.push({ x: pX, y: pY, width: pW, height: H.correctFloat(pH) }); if (group.direction === 0) { plot.y = plot.y + pH; } else { plot.x = plot.x + pW; } } i = i + 1; }); // Reset variables group.reset(); if (group.direction === 0) { group.width = group.width - gW; } else { group.height = group.height - gH; } plot.y = plot.parent.y + (plot.parent.height - group.height); plot.x = plot.parent.x + (plot.parent.width - group.width); if (directionChange) { group.direction = 1 - group.direction; } // If not last, then add uncalculated element if (!last) { group.addElement(keep); } }, algorithmLowAspectRatio: function (directionChange, parent, children) { var childrenArea = [], series = this, pTot, plot = { x: parent.x, y: parent.y, parent: parent }, direction = parent.direction, i = 0, end = children.length - 1, group = new this.algorithmGroup( // eslint-disable-line new-cap parent.height, parent.width, direction, plot ); // Loop through and calculate all areas children.forEach(function (child) { pTot = (parent.width * parent.height) * (child.val / parent.val); group.addElement(pTot); if (group.lP.nR > group.lP.lR) { series.algorithmCalcPoints( directionChange, false, group, childrenArea, plot ); } // If last child, then calculate all remaining areas if (i === end) { series.algorithmCalcPoints( directionChange, true, group, childrenArea, plot ); } i = i + 1; }); return childrenArea; }, algorithmFill: function (directionChange, parent, children) { var childrenArea = [], pTot, direction = parent.direction, x = parent.x, y = parent.y, width = parent.width, height = parent.height, pX, pY, pW, pH; children.forEach(function (child) { pTot = (parent.width * parent.height) * (child.val / parent.val); pX = x; pY = y; if (direction === 0) { pH = height; pW = pTot / pH; width = width - pW; x = x + pW; } else { pW = width; pH = pTot / pW; height = height - pH; y = y + pH; } childrenArea.push({ x: pX, y: pY, width: pW, height: pH }); if (directionChange) { direction = 1 - direction; } }); return childrenArea; }, strip: function (parent, children) { return this.algorithmLowAspectRatio(false, parent, children); }, squarified: function (parent, children) { return this.algorithmLowAspectRatio(true, parent, children); }, sliceAndDice: function (parent, children) { return this.algorithmFill(true, parent, children); }, stripes: function (parent, children) { return this.algorithmFill(false, parent, children); }, translate: function () { var series = this, options = series.options, // NOTE: updateRootId modifies series. rootId = updateRootId(series), rootNode, pointValues, seriesArea, tree, val; // Call prototype function Series.prototype.translate.call(series); // @todo Only if series.isDirtyData is true tree = series.tree = series.getTree(); rootNode = series.nodeMap[rootId]; series.renderTraverseUpButton(rootId); series.mapOptionsToLevel = getLevelOptions({ from: rootNode.level + 1, levels: options.levels, to: tree.height, defaults: { levelIsConstant: series.options.levelIsConstant, colorByPoint: options.colorByPoint } }); if ( rootId !== '' && (!rootNode || !rootNode.children.length) ) { series.setRootNode('', false); rootId = series.rootNode; rootNode = series.nodeMap[rootId]; } // Parents of the root node is by default visible recursive(series.nodeMap[series.rootNode], function (node) { var next = false, p = node.parent; node.visible = true; if (p || p === '') { next = series.nodeMap[p]; } return next; }); // Children of the root node is by default visible recursive( series.nodeMap[series.rootNode].children, function (children) { var next = false; children.forEach(function (child) { child.visible = true; if (child.children.length) { next = (next || []).concat(child.children); } }); return next; } ); series.setTreeValues(tree); // Calculate plotting values. series.axisRatio = (series.xAxis.len / series.yAxis.len); series.nodeMap[''].pointValues = pointValues = { x: 0, y: 0, width: 100, height: 100 }; series.nodeMap[''].values = seriesArea = merge(pointValues, { width: (pointValues.width * series.axisRatio), direction: ( options.layoutStartingDirection === 'vertical' ? 0 : 1 ), val: tree.val }); series.calculateChildrenAreas(tree, seriesArea); // Logic for point colors if (series.colorAxis) { series.translateColors(); } else if (!options.colorByPoint) { series.setColorRecursive(series.tree); } // Update axis extremes according to the root node. if (options.allowTraversingTree) { val = rootNode.pointValues; series.xAxis.setExtremes(val.x, val.x + val.width, false); series.yAxis.setExtremes(val.y, val.y + val.height, false); series.xAxis.setScale(); series.yAxis.setScale(); } // Assign values to points. series.setPointValues(); }, /** * Extend drawDataLabels with logic to handle custom options related to * the treemap series: * * - Points which is not a leaf node, has dataLabels disabled by * default. * * - Options set on series.levels is merged in. * * - Width of the dataLabel is set to match the width of the point * shape. * * @private * @function Highcharts.Series#drawDataLabels */ drawDataLabels: function () { var series = this, mapOptionsToLevel = series.mapOptionsToLevel, points = series.points.filter(function (n) { return n.node.visible; }), options, level; points.forEach(function (point) { level = mapOptionsToLevel[point.node.level]; // Set options to new object to avoid problems with scope options = { style: {} }; // If not a leaf, then label should be disabled as default if (!point.node.isLeaf) { options.enabled = false; } // If options for level exists, include them as well if (level && level.dataLabels) { options = merge(options, level.dataLabels); series._hasPointLabels = true; } // Set dataLabel width to the width of the point shape. if (point.shapeArgs) { options.style.width = point.shapeArgs.width; if (point.dataLabel) { point.dataLabel.css({ width: point.shapeArgs.width + 'px' }); } } // Merge custom options with point options point.dlOptions = merge(options, point.options.dataLabels); }); Series.prototype.drawDataLabels.call(this); }, // Over the alignment method by setting z index alignDataLabel: function (point, dataLabel, labelOptions) { var style = labelOptions.style; // #8160: Prevent the label from exceeding the point's // boundaries in treemaps by applying ellipsis overflow. // The issue was happening when datalabel's text contained a // long sequence of characters without a whitespace. if ( !H.defined(style.textOverflow) && dataLabel.text && dataLabel.getBBox().width > dataLabel.text.textWidth ) { dataLabel.css({ textOverflow: 'ellipsis', // unit (px) is required when useHTML is true width: style.width += 'px' }); } seriesTypes.column.prototype.alignDataLabel.apply(this, arguments); if (point.dataLabel) { // point.node.zIndex could be undefined (#6956) point.dataLabel.attr({ zIndex: (point.node.zIndex || 0) + 1 }); } }, // Get presentational attributes pointAttribs: function (point, state) { var series = this, mapOptionsToLevel = ( isObject(series.mapOptionsToLevel) ? series.mapOptionsToLevel : {} ), level = point && mapOptionsToLevel[point.node.level] || {}, options = this.options, attr, stateOptions = (state && options.states[state]) || {}, className = (point && point.getClassName()) || '', opacity; // Set attributes by precedence. Point trumps level trumps series. // Stroke width uses pick because it can be 0. attr = { 'stroke': (point && point.borderColor) || level.borderColor || stateOptions.borderColor || options.borderColor, 'stroke-width': pick( point && point.borderWidth, level.borderWidth, stateOptions.borderWidth, options.borderWidth ), 'dashstyle': (point && point.borderDashStyle) || level.borderDashStyle || stateOptions.borderDashStyle || options.borderDashStyle, 'fill': (point && point.color) || this.color }; // Hide levels above the current view if (className.indexOf('highcharts-above-level') !== -1) { attr.fill = 'none'; attr['stroke-width'] = 0; // Nodes with children that accept interaction } else if ( className.indexOf('highcharts-internal-node-interactive') !== -1 ) { opacity = pick(stateOptions.opacity, options.opacity); attr.fill = color(attr.fill).setOpacity(opacity).get(); attr.cursor = 'pointer'; // Hide nodes that have children } else if (className.indexOf('highcharts-internal-node') !== -1) { attr.fill = 'none'; } else if (state) { // Brighten and hoist the hover nodes attr.fill = color(attr.fill) .brighten(stateOptions.brightness) .get(); } return attr; }, // Override drawPoints drawPoints: function () { var series = this, chart = series.chart, renderer = chart.renderer, points = series.points, styledMode = chart.styledMode, options = series.options, shadow = styledMode ? {} : options.shadow, borderRadius = options.borderRadius, withinAnimationLimit = chart.pointCount < options.animationLimit, allowTraversingTree = options.allowTraversingTree; points.forEach(function (point) { var levelDynamic = point.node.levelDynamic, animate = {}, attr = {}, css = {}, groupKey = 'level-group-' + levelDynamic, hasGraphic = !!point.graphic, shouldAnimate = withinAnimationLimit && hasGraphic, shapeArgs = point.shapeArgs; // Don't bother with calculate styling if the point is not drawn if (point.shouldDraw()) { if (borderRadius) { attr.r = borderRadius; } merge( true, // Extend object // Which object to extend shouldAnimate ? animate : attr, // Add shapeArgs to animate/attr if graphic exists hasGraphic ? shapeArgs : {}, // Add style attribs if !styleMode styledMode ? {} : series.pointAttribs( point, point.selected && 'select' ) ); // In styled mode apply point.color. Use CSS, otherwise the // fill used in the style sheet will take precedence over // the fill attribute. if (series.colorAttribs && styledMode) { // Heatmap is loaded extend(css, series.colorAttribs(point)); } if (!series[groupKey]) { series[groupKey] = renderer.g(groupKey) .attr({ // @todo Set the zIndex based upon the number of // levels, instead of using 1000 zIndex: 1000 - levelDynamic }) .add(series.group); } } // Draw the point point.draw({ animatableAttribs: animate, attribs: attr, css: css, group: series[groupKey], renderer: renderer, shadow: shadow, shapeArgs: shapeArgs, shapeType: 'rect' }); // If setRootNode is allowed, set a point cursor on clickables & // add drillId to point if (allowTraversingTree && point.graphic) { point.drillId = options.interactByLeaf ? series.drillToByLeaf(point) : series.drillToByGroup(point); } }); }, // Add drilling on the suitable points onClickDrillToNode: function (event) { var series = this, point = event.point, drillId = point && point.drillId; // If a drill id is returned, add click event and cursor. if (isString(drillId)) { point.setState(''); // Remove hover series.setRootNode(drillId, true, { trigger: 'click' }); } }, /** * Finds the drill id for a parent node. Returns false if point should * not have a click event. * * @private * @function Highcharts.Series#drillToByGroup * * @param {object} point * * @return {boolean|string} * Drill to id or false when point should not have a click * event. */ drillToByGroup: function (point) { var series = this, drillId = false; if ((point.node.level - series.nodeMap[series.rootNode].level) === 1 && !point.node.isLeaf ) { drillId = point.id; } return drillId; }, /** * Finds the drill id for a leaf node. Returns false if point should not * have a click event * * @private * @function Highcharts.Series#drillToByLeaf * * @param {object} point * * @return {boolean|string} * Drill to id or false when point should not have a click * event. */ drillToByLeaf: function (point) { var series = this, drillId = false, nodeParent; if ((point.node.parent !== series.rootNode) && point.node.isLeaf ) { nodeParent = point.node; while (!drillId) { nodeParent = series.nodeMap[nodeParent.parent]; if (nodeParent.parent === series.rootNode) { drillId = nodeParent.id; } } } return drillId; }, drillUp: function () { var series = this, node = series.nodeMap[series.rootNode]; if (node && isString(node.parent)) { series.setRootNode( node.parent, true, { trigger: 'traverseUpButton' } ); } }, // TODO remove this function at a suitable version. drillToNode: function (id, redraw) { error( 'WARNING: treemap.drillToNode has been renamed to treemap.' + 'setRootNode, and will be removed in the next major version.' ); this.setRootNode(id, redraw); }, /** * Sets a new root node for the series. * * @private * @function Highcharts.Series#setRootNode * * @param {string} id The id of the new root node. * @param {boolean} [redraw=true] Wether to redraw the chart or not. * @param {object} [eventArguments] Arguments to be accessed in * event handler. * @param {string} [eventArguments.newRootId] Id of the new root. * @param {string} [eventArguments.previousRootId] Id of the previous * root. * @param {boolean} [eventArguments.redraw] Wether to redraw the * chart after. * @param {object} [eventArguments.series] The series to update the root * of. * @param {string} [eventArguments.trigger] The action which * triggered the event. Undefined if the setRootNode is called * directly. */ setRootNode: function (id, redraw, eventArguments) { var series = this, eventArgs = extend({ newRootId: id, previousRootId: series.rootNode, redraw: pick(redraw, true), series: series }, eventArguments); /** * The default functionality of the setRootNode event. * * @private * @param {object} args The event arguments. * @param {string} args.newRootId Id of the new root. * @param {string} args.previousRootId Id of the previous root. * @param {boolean} args.redraw Wether to redraw the chart after. * @param {object} args.series The series to update the root of. * @param {string} [args.trigger=undefined] The action which * triggered the event. Undefined if the setRootNode is called * directly. */ var defaultFn = function (args) { var series = args.series; // Store previous and new root ids on the series. series.idPreviousRoot = args.previousRootId; series.rootNode = args.newRootId; // Redraw the chart series.isDirty = true; // Force redraw if (args.redraw) { series.chart.redraw(); } }; // Fire setRootNode event. fireEvent(series, 'setRootNode', eventArgs, defaultFn); }, renderTraverseUpButton: function (rootId) { var series = this, nodeMap = series.nodeMap, node = nodeMap[rootId], name = node.name, buttonOptions = series.options.traverseUpButton, backText = pick(buttonOptions.text, name, '< Back'), attr, states; if (rootId === '') { if (series.drillUpButton) { series.drillUpButton = series.drillUpButton.destroy(); } } else if (!this.drillUpButton) { attr = buttonOptions.theme; states = attr && attr.states; this.drillUpButton = this.chart.renderer.button( backText, null, null, function () { series.drillUp(); }, attr, states && states.hover, states && states.select ) .addClass('highcharts-drillup-button') .attr({ align: buttonOptions.position.align, zIndex: 7 }) .add() .align( buttonOptions.position, false, buttonOptions.relativeTo || 'plotBox' ); } else { this.drillUpButton.placed = false; this.drillUpButton.attr({ text: backText }) .align(); } }, buildKDTree: noop, drawLegendSymbol: H.LegendSymbolMixin.drawRectangle, getExtremes: function () { // Get the extremes from the value data Series.prototype.getExtremes.call(this, this.colorValueData); this.valueMin = this.dataMin; this.valueMax = this.dataMax; // Get the extremes from the y data Series.prototype.getExtremes.call(this); }, getExtremesFromAll: true, bindAxes: function () { var treeAxis = { endOnTick: false, gridLineWidth: 0, lineWidth: 0, min: 0, dataMin: 0, minPadding: 0, max: 100, dataMax: 100, maxPadding: 0, startOnTick: false, title: null, tickPositions: [] }; Series.prototype.bindAxes.call(this); H.extend(this.yAxis.options, treeAxis); H.extend(this.xAxis.options, treeAxis); }, /** * Workaround for `inactive` state. Since `series.opacity` option is * already reserved, don't use that state at all by disabling * `inactiveOtherPoints` and not inheriting states by points. * * @private */ setState: function (state) { this.options.inactiveOtherPoints = true; Series.prototype.setState.call(this, state, false); this.options.inactiveOtherPoints = false; }, utils: { recursive: recursive } // Point class }, { draw: drawPoint, getClassName: function () { var className = H.Point.prototype.getClassName.call(this), series = this.series, options = series.options; // Above the current level if (this.node.level <= series.nodeMap[series.rootNode].level) { className += ' highcharts-above-level'; } else if ( !this.node.isLeaf && !pick(options.interactByLeaf, !options.allowTraversingTree) ) { className += ' highcharts-internal-node-interactive'; } else if (!this.node.isLeaf) { className += ' highcharts-internal-node'; } return className; }, /** * A tree point is valid if it has han id too, assume it may be a parent * item. * * @private * @function Highcharts.Point#isValid */ isValid: function () { return this.id || isNumber(this.value); }, setState: function (state) { H.Point.prototype.setState.call(this, state); // Graphic does not exist when point is not visible. if (this.graphic) { this.graphic.attr({ zIndex: state === 'hover' ? 1 : 0 }); } }, setVisible: seriesTypes.pie.prototype.pointClass.prototype.setVisible, shouldDraw: function () { var point = this; return isNumber(point.plotY) && point.y !== null; } } ); /** * A `treemap` series. If the [type](#series.treemap.type) option is * not specified, it is inherited from [chart.type](#chart.type). * * @extends series,plotOptions.treemap * @excluding dataParser, dataURL, stack * @product highcharts * @apioption series.treemap */ /** * An array of data points for the series. For the `treemap` series * type, points can be given in the following ways: * * 1. An array of numerical values. In this case, the numerical values will be * interpreted as `value` options. Example: * ```js * data: [0, 5, 3, 5] * ``` * * 2. An array of objects with named values. The following snippet shows only a * few settings, see the complete options set below. If the total number of * data points exceeds the series' * [turboThreshold](#series.treemap.turboThreshold), * this option is not available. * ```js * data: [{ * value: 9, * name: "Point2", * color: "#00FF00" * }, { * value: 6, * name: "Point1", * color: "#FF00FF" * }] * ``` * * @sample {highcharts} highcharts/chart/reflow-true/ * Numerical values * @sample {highcharts} highcharts/series/data-array-of-objects/ * Config objects * * @type {Array<number|null|*>} * @extends series.heatmap.data * @excluding x, y * @product highcharts * @apioption series.treemap.data */ /** * The value of the point, resulting in a relative area of the point * in the treemap. * * @type {number|null} * @product highcharts * @apioption series.treemap.data.value */ /** * Serves a purpose only if a `colorAxis` object is defined in the chart * options. This value will decide which color the point gets from the * scale of the colorAxis. * * @type {number} * @since 4.1.0 * @product highcharts * @apioption series.treemap.data.colorValue */ /** * Only for treemap. Use this option to build a tree structure. The * value should be the id of the point which is the parent. If no points * has a matching id, or this option is undefined, then the parent will * be set to the root. * * @sample {highcharts} highcharts/point/parent/ * Point parent * @sample {highcharts} highcharts/demo/treemap-with-levels/ * Example where parent id is not matching * * @type {string} * @since 4.1.0 * @product highcharts * @apioption series.treemap.data.parent */ }); _registerModule(_modules, 'masters/modules/treemap.src.js', [], function () { }); }));
Qesy/Q-Frame
Static/bootstrap/Highcharts-7.1.1/modules/treemap.src.js
JavaScript
apache-2.0
92,176
"""Neural network operations.""" from __future__ import absolute_import as _abs from . import _make def conv2d(data, weight, strides=(1, 1), padding=(0, 0), dilation=(1, 1), groups=1, channels=None, kernel_size=None, data_layout="NCHW", weight_layout="OIHW", out_layout="", out_dtype=""): r"""2D convolution. This operator takes the weight as the convolution kernel and convolves it with data to produce an output. In the default case, where the data_layout is `NCHW` and weight_layout is `OIHW`, conv2d takes in a data Tensor with shape `(batch_size, in_channels, height, width)`, and a weight Tensor with shape `(channels, in_channels, kernel_size[0], kernel_size[1])` to produce an output Tensor with the following rule: .. math:: \mbox{out}[b, c, y, x] = \sum_{dy, dx, k} \mbox{data}[b, k, \mbox{strides}[0] * y + dy, \mbox{strides}[1] * x + dx] * \mbox{weight}[c, k, dy, dx] Padding and dilation are applied to data and weight respectively before the computation. This operator accepts data layout specification. Semantically, the operator will convert the layout to the canonical layout (`NCHW` for data and `OIHW` for weight), perform the computation, then convert to the out_layout. Parameters ---------- data : relay.Expr The input data to the operator. weight : relay.Expr The weight expressions. strides : tuple of int, optional The strides of convoltution. padding : tuple of int, optional The padding of convolution on both sides of inputs before convolution. dilation : tuple of int, optional Specifies the dilation rate to be used for dilated convolution. groups : int, optional Number of groups for grouped convolution. channels : int, optional Number of output channels of this convolution. kernel_size : tuple of int, optional The spatial of the convolution kernel. data_layout : str, optional Layout of the input. weight_layout : str, optional Layout of the weight. out_layout : str, optional Layout of the output, by default, out_layout is the same as data_layout out_dtype : str, optional Specifies the output data type for mixed precision conv2d. Returns ------- result : relay.Expr The computed result. """ return _make.conv2d(data, weight, strides, padding, dilation, groups, channels, kernel_size, data_layout, weight_layout, out_layout, out_dtype) def softmax(data, axis): r"""Computes softmax. .. math:: \text{softmax}(x)_i = \frac{exp(x_i)}{\sum_j exp(x_j)} .. note:: This operator can be optimized away for inference. Parameters ---------- data: relay.Expr The input data to the operator. axis: int The axis to sum over when computing softmax """ return _make.softmax(data, axis) def log_softmax(data, axis): r"""Computes log softmax. .. math:: \text{log_softmax}(x)_i = \log \frac{exp(x_i)}{\sum_j exp(x_j)} .. note:: This operator can be optimized away for inference. Parameters ---------- data: relay.Expr The input data to the operator. axis: int The axis to sum over when computing softmax """ return _make.log_softmax(data, axis) def max_pool2d(data, pool_size=(1, 1), strides=(1, 1), padding=(0, 0), layout="NCHW", ceil_mode=False): r"""2D maximum pooling operator. This operator takes data as input and does 2D max value calculation with in pool_size sized window by striding defined by stride In the default case, where the data_layout is `NCHW` a data Tensor with shape `(batch_size, in_channels, height, width)`, to produce an output Tensor with the following rule: with data of shape (b, c, h, w) and pool_size (kh, kw) .. math:: \mbox{out}(b, c, y, x) = \max_{m=0, \ldots, kh-1} \max_{n=0, \ldots, kw-1} \mbox{data}(b, c, \mbox{stride}[0] * y + m, \mbox{stride}[1] * x + n) Padding is applied to data before the computation. ceil_mode is used to take ceil or floor while computing out shape. This operator accepts data layout specification. Parameters ---------- data : relay.Expr The input data to the operator. strides : tuple of int, optional The strides of pooling. padding : tuple of int, optional The padding for pooling. layout : str, optional Layout of the input. ceil_mode : bool, optional To enable or disable ceil while pooling. Returns ------- result : relay.Expr The computed result. """ return _make.max_pool2d(data, pool_size, strides, padding, layout, ceil_mode) def avg_pool2d(data, pool_size=(1, 1), strides=(1, 1), padding=(0, 0), layout="NCHW", ceil_mode=False, count_include_pad=False): r"""2D average pooling operator. This operator takes data as input and does 2D average value calculation with in pool_size sized window by striding defined by stride In the default case, where the data_layout is `NCHW` a data Tensor with shape `(batch_size, in_channels, height, width)`, to produce an output Tensor with the following rule: with data of shape (b, c, h, w), pool_size (kh, kw) .. math:: \mbox{out}(b, c, y, x) = \frac{1}{kh * kw} \sum_{m=0}^{kh-1} \sum_{n=0}^{kw-1} \mbox{data}(b, c, \mbox{stride}[0] * y + m, \mbox{stride}[1] * x + n) Padding is applied to data before the computation. ceil_mode is used to take ceil or floor while computing out shape. count_include_pad indicates including or excluding padded input values in computation. This operator accepts data layout specification. Parameters ---------- data : relay.Expr The input data to the operator. strides : tuple of int, optional The strides of pooling. padding : tuple of int, optional The padding for pooling. layout : str, optional Layout of the input. ceil_mode : bool, optional To enable or disable ceil while pooling. count_include_pad : bool, optional To include padding to compute the average. Returns ------- result : relay.Expr The computed result. """ return _make.avg_pool2d(data, pool_size, strides, padding, layout, ceil_mode, count_include_pad) def global_max_pool2d(data, layout="NCHW"): r"""2D global maximum pooling operator. This operator takes data as input and does 2D max value calculation across each window represented by WxH. In the default case, where the data_layout is `NCHW` a data Tensor with shape `(batch_size, in_channels, height, width)`, to produce an output Tensor with the following rule: with data of shape (b, c, h, w) .. math:: \mbox{out}(b, c, 1, 1) = \max_{m=0, \ldots, h} \max_{n=0, \ldots, w} \mbox{data}(b, c, m, n) Parameters ---------- data : relay.Expr The input data to the operator. layout : str, optional Layout of the input. Returns ------- result : relay.Expr The computed result. """ return _make.global_max_pool2d(data, layout) def global_avg_pool2d(data, layout="NCHW"): r"""2D global average pooling operator. This operator takes data as input and does 2D average value calculation across each window represented by WxH. In the default case, where the data_layout is `NCHW` a data Tensor with shape `(batch_size, in_channels, height, width)`, to produce an output Tensor with the following rule: with data of shape (b, c, h, w) .. math:: \mbox{out}(b, c, 1, 1) = \frac{1}{h * w} \sum_{m=0}^{h-1} \sum_{n=0}^{w-1} \mbox{data}(b, c, m, n) Parameters ---------- data : relay.Expr The input data to the operator. layout : str, optional Layout of the input. Returns ------- result : relay.Expr The computed result. """ return _make.global_avg_pool2d(data, layout) def upsampling(data, scale=1, layout="NCHW", method="NEAREST_NEIGHBOR"): """Upsampling. This operator takes data as input and does 2D scaling to the given scale factor. In the default case, where the data_layout is `NCHW` with data of shape (n, c, h, w) out will have a shape (n, c, h*scale, w*scale) method indicates the algorithm to be used while calculating ghe out value and method can be one of ("BILINEAR", "NEAREST_NEIGHBOR") Parameters ---------- data : relay.Expr The input data to the operator. scale : relay.Expr The scale factor for upsampling. layout : str, optional Layout of the input. method : str, optional Scale method to used [NEAREST_NEIGHBOR, BILINEAR]. Returns ------- result : relay.Expr The computed result. """ return _make.upsampling(data, scale, layout, method) def batch_flatten(data): """BatchFlatten. This operator flattens all the dimensions except for the batch dimension. which results a 2D output. For data with shape ``(d1, d2, ..., dk)`` batch_flatten(data) returns reshaped output of shape ``(d1, d2*...*dk)``. Parameters ---------- data : relay.Expr The input data to the operator. Returns ------- result: relay.Expr The Flattened result. """ return _make.batch_flatten(data)
mlperf/training_results_v0.6
Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/python/tvm/relay/op/nn/nn.py
Python
apache-2.0
10,085
<?php /** * Project Name: map-board * File Name: create_benchmark_fixture.php * Last modified: 2017/11/20 12:51 * Author: Hiroaki Goto * * Copyright (c) 2017 Hiroaki Goto. All rights reserved. */ require_once __DIR__.'/../vendor/autoload.php'; $db = connectDB(); $redis = connectRedis(); const USER_SIZE = 1000; const THREAD_NUM = 200; const MIN_POST = 0; const MAX_POST = 10000; const CONTENT_MIN = 3; const CONTENT_MAX = 1000; function unichr( $unicode , $encoding = 'UTF-8' ) { return mb_convert_encoding("&#{$unicode};", $encoding, 'HTML-ENTITIES'); } class RandomStringGenerator { private $seed; private $strtmp; private $seedSize; private $cnt = 0; public function __construct(bool $only_ascii = false, bool $multi_line = true) { $this->seed = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'; if(!$only_ascii) { // ひらがな for ($i = 12353; $i <= 12435; $i++) { $this->seed .= unichr($i); } // カタカナ for ($i = 12449; $i <= 12534; $i++) { $this->seed .= unichr($i); } // 常用漢字 $file_content = file_get_contents('joyo.csv'); $unicode_list = preg_split('/(\r\n)|[\r\n]/', $file_content); foreach ($unicode_list as $code_point) { $this->seed .= unichr(hexdec($code_point)); } } // 改行文字 if($multi_line) { $this->seed .= '\n'; } $this->seedSize = mb_strlen($this->seed); $this->shuffle(); } private function shuffle() { $this->strtmp = ''; for($i = 0; $i < $this->seedSize; $i++) { $this->strtmp .= mb_substr($this->seed, mt_rand(0, $this->seedSize - 1), 1); } } public function pseudo(int $length) { if(++$this->cnt > 1000) { $this->cnt = 0; $this->shuffle(); } $offset_max = $this->seedSize - $length; return mb_substr($this->strtmp, mt_rand(0, $offset_max), $length); } public function generate(int $length) { $str = ''; for($i = 0; $i < $length; $i++) { $str .= mb_substr($this->seed, mt_rand(0, $this->seedSize - 1), 1); } return $str; } } $single_gen = new RandomStringGenerator(true, false); $content_gen = new RandomStringGenerator(); $gen_content = function() use($content_gen) { return $content_gen->pseudo(mt_rand(CONTENT_MIN, CONTENT_MAX)); }; echo "Creating users...\n"; $user_ids = []; for($i = 0; $i < USER_SIZE; $i++) { $password = $single_gen->pseudo(mt_rand(7, 40)); $user_name = $single_gen->generate(mt_rand(4, 8)); $user = new mb\models\User($user_name, $user_name.'@example.com', $password, $password); if($user->create($db)) { $user_ids[] = $user->id; } } $user_count = count($user_ids); $gen_user_id = function() use($user_ids, $user_count) { return $user_ids[mt_rand(0, $user_count - 1)]; }; echo "End creating users.\n"; echo "Creating threads...\n"; for($i = 0; $i < THREAD_NUM; $i++) { $thread_owner = $gen_user_id(); $thread = new mb\models\Thread($db, $single_gen->generate(mt_rand(5, 80)), $thread_owner); $thread->create($db, $redis); $post_first = new mb\models\Post($db, $thread->id, $thread_owner, $gen_content()); $post_first->create($db); $post_num = mt_rand(MIN_POST, MAX_POST); for($j = 0; $j < $post_num; $j++) { $post = new mb\models\Post($db, $thread->id, $gen_user_id(), $gen_content()); $post->create($db); } } echo "End creating thread.\n";
StoneDot/map-board
database/create_benchmark_fixture.php
PHP
apache-2.0
3,675
package com.jason.showcase.lambdas; /** * Created by Qinjianf on 2016/7/19. */ public class Lambda { public void execute(Action action) { action.run("Hello Lambda!"); } public void test() { execute(System.out::println); } public static void main(String[] args) { new Lambda().test(); } }
fuyongde/jason
showcase/src/main/java/com/jason/showcase/lambdas/Lambda.java
Java
apache-2.0
342
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.action.hadoop; import com.google.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.apache.commons.lang.StringUtils; import org.apache.directory.api.util.Strings; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.Path; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringReader; import java.io.Writer; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.regex.Pattern; import static org.apache.oozie.action.hadoop.SparkActionExecutor.SPARK_DEFAULT_OPTS; @SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "Properties file should be specified by user") class SparkArgsExtractor { private static final Pattern SPARK_DEFAULTS_FILE_PATTERN = Pattern.compile("spark-defaults.conf"); private static final String FILES_OPTION = "--files"; private static final String ARCHIVES_OPTION = "--archives"; private static final String LOG4J_CONFIGURATION_JAVA_OPTION = "-Dlog4j.configuration="; private static final String SECURITY_TOKENS_HADOOPFS = "spark.yarn.security.tokens.hadoopfs.enabled"; private static final String SECURITY_TOKENS_HIVE = "spark.yarn.security.tokens.hive.enabled"; private static final String SECURITY_TOKENS_HBASE = "spark.yarn.security.tokens.hbase.enabled"; private static final String SECURITY_CREDENTIALS_HADOOPFS = "spark.yarn.security.credentials.hadoopfs.enabled"; private static final String SECURITY_CREDENTIALS_HIVE = "spark.yarn.security.credentials.hive.enabled"; private static final String SECURITY_CREDENTIALS_HBASE = "spark.yarn.security.credentials.hbase.enabled"; private static final String PWD = "$PWD" + File.separator + "*"; private static final String MASTER_OPTION = "--master"; private static final String MODE_OPTION = "--deploy-mode"; private static final String JOB_NAME_OPTION = "--name"; private static final String CLASS_NAME_OPTION = "--class"; private static final String VERBOSE_OPTION = "--verbose"; private static final String DRIVER_CLASSPATH_OPTION = "--driver-class-path"; private static final String EXECUTOR_CLASSPATH = "spark.executor.extraClassPath="; private static final String DRIVER_CLASSPATH = "spark.driver.extraClassPath="; private static final String EXECUTOR_EXTRA_JAVA_OPTIONS = "spark.executor.extraJavaOptions="; private static final String DRIVER_EXTRA_JAVA_OPTIONS = "spark.driver.extraJavaOptions="; private static final Pattern SPARK_VERSION_1 = Pattern.compile("^1.*"); private static final String SPARK_YARN_JAR = "spark.yarn.jar"; private static final String SPARK_YARN_JARS = "spark.yarn.jars"; private static final String OPT_SEPARATOR = "="; private static final String OPT_VALUE_SEPARATOR = ","; private static final String CONF_OPTION = "--conf"; private static final String MASTER_OPTION_YARN_CLUSTER = "yarn-cluster"; private static final String MASTER_OPTION_YARN_CLIENT = "yarn-client"; private static final String MASTER_OPTION_YARN = "yarn"; private static final String DEPLOY_MODE_CLUSTER = "cluster"; private static final String DEPLOY_MODE_CLIENT = "client"; private static final String SPARK_YARN_TAGS = "spark.yarn.tags"; private static final String OPT_PROPERTIES_FILE = "--properties-file"; public static final String SPARK_DEFAULTS_GENERATED_PROPERTIES = "spark-defaults-oozie-generated.properties"; private boolean pySpark = false; private final Configuration actionConf; SparkArgsExtractor(final Configuration actionConf) { this.actionConf = actionConf; } boolean isPySpark() { return pySpark; } List<String> extract(final String[] mainArgs) throws OozieActionConfiguratorException, IOException, URISyntaxException { final List<String> sparkArgs = new ArrayList<>(); sparkArgs.add(MASTER_OPTION); final String master = actionConf.get(SparkActionExecutor.SPARK_MASTER); sparkArgs.add(master); // In local mode, everything runs here in the Launcher Job. // In yarn-client mode, the driver runs here in the Launcher Job and the // executor in Yarn. // In yarn-cluster mode, the driver and executor run in Yarn. final String sparkDeployMode = actionConf.get(SparkActionExecutor.SPARK_MODE); if (sparkDeployMode != null) { sparkArgs.add(MODE_OPTION); sparkArgs.add(sparkDeployMode); } final boolean yarnClusterMode = master.equals(MASTER_OPTION_YARN_CLUSTER) || (master.equals(MASTER_OPTION_YARN) && sparkDeployMode != null && sparkDeployMode.equals(DEPLOY_MODE_CLUSTER)); final boolean yarnClientMode = master.equals(MASTER_OPTION_YARN_CLIENT) || (master.equals(MASTER_OPTION_YARN) && sparkDeployMode != null && sparkDeployMode.equals(DEPLOY_MODE_CLIENT)); sparkArgs.add(JOB_NAME_OPTION); sparkArgs.add(actionConf.get(SparkActionExecutor.SPARK_JOB_NAME)); final String className = actionConf.get(SparkActionExecutor.SPARK_CLASS); if (className != null) { sparkArgs.add(CLASS_NAME_OPTION); sparkArgs.add(className); } appendOoziePropertiesToSparkConf(sparkArgs); String jarPath = actionConf.get(SparkActionExecutor.SPARK_JAR); if (jarPath != null && jarPath.endsWith(".py")) { pySpark = true; } boolean addedSecurityTokensHadoopFS = false; boolean addedSecurityTokensHive = false; boolean addedSecurityTokensHBase = false; boolean addedSecurityCredentialsHadoopFS = false; boolean addedSecurityCredentialsHive = false; boolean addedSecurityCredentialsHBase = false; boolean addedLog4jDriverSettings = false; boolean addedLog4jExecutorSettings = false; final StringBuilder driverClassPath = new StringBuilder(); final StringBuilder executorClassPath = new StringBuilder(); final StringBuilder userFiles = new StringBuilder(); final StringBuilder userArchives = new StringBuilder(); final String sparkOpts = actionConf.get(SparkActionExecutor.SPARK_OPTS); String propertiesFile = null; if (StringUtils.isNotEmpty(sparkOpts)) { final List<String> sparkOptions = SparkOptionsSplitter.splitSparkOpts(sparkOpts); for (int i = 0; i < sparkOptions.size(); i++) { String opt = sparkOptions.get(i); boolean addToSparkArgs = true; if (yarnClusterMode || yarnClientMode) { if (opt.startsWith(EXECUTOR_CLASSPATH)) { appendWithPathSeparator(opt.substring(EXECUTOR_CLASSPATH.length()), executorClassPath); addToSparkArgs = false; } if (opt.startsWith(DRIVER_CLASSPATH)) { appendWithPathSeparator(opt.substring(DRIVER_CLASSPATH.length()), driverClassPath); addToSparkArgs = false; } if (opt.equals(DRIVER_CLASSPATH_OPTION)) { // we need the next element after this option appendWithPathSeparator(sparkOptions.get(i + 1), driverClassPath); // increase i to skip the next element. i++; addToSparkArgs = false; } } if (opt.startsWith(SECURITY_TOKENS_HADOOPFS)) { addedSecurityTokensHadoopFS = true; } if (opt.startsWith(SECURITY_TOKENS_HIVE)) { addedSecurityTokensHive = true; } if (opt.startsWith(SECURITY_TOKENS_HBASE)) { addedSecurityTokensHBase = true; } if (opt.startsWith(SECURITY_CREDENTIALS_HADOOPFS)) { addedSecurityCredentialsHadoopFS = true; } if (opt.startsWith(SECURITY_CREDENTIALS_HIVE)) { addedSecurityCredentialsHive = true; } if (opt.startsWith(SECURITY_CREDENTIALS_HBASE)) { addedSecurityCredentialsHBase = true; } if (opt.startsWith(OPT_PROPERTIES_FILE)){ i++; propertiesFile = sparkOptions.get(i); addToSparkArgs = false; } if (opt.startsWith(EXECUTOR_EXTRA_JAVA_OPTIONS) || opt.startsWith(DRIVER_EXTRA_JAVA_OPTIONS)) { if (!opt.contains(LOG4J_CONFIGURATION_JAVA_OPTION)) { opt += " " + LOG4J_CONFIGURATION_JAVA_OPTION + SparkMain.SPARK_LOG4J_PROPS; } else { System.out.println("Warning: Spark Log4J settings are overwritten." + " Child job IDs may not be available"); } if (opt.startsWith(EXECUTOR_EXTRA_JAVA_OPTIONS)) { addedLog4jExecutorSettings = true; } else { addedLog4jDriverSettings = true; } } if (opt.startsWith(FILES_OPTION)) { final String userFile; if (opt.contains(OPT_SEPARATOR)) { userFile = opt.substring(opt.indexOf(OPT_SEPARATOR) + OPT_SEPARATOR.length()); } else { userFile = sparkOptions.get(i + 1); i++; } if (userFiles.length() > 0) { userFiles.append(OPT_VALUE_SEPARATOR); } userFiles.append(userFile); addToSparkArgs = false; } if (opt.startsWith(ARCHIVES_OPTION)) { final String userArchive; if (opt.contains(OPT_SEPARATOR)) { userArchive = opt.substring(opt.indexOf(OPT_SEPARATOR) + OPT_SEPARATOR.length()); } else { userArchive = sparkOptions.get(i + 1); i++; } if (userArchives.length() > 0) { userArchives.append(OPT_VALUE_SEPARATOR); } userArchives.append(userArchive); addToSparkArgs = false; } if (addToSparkArgs) { sparkArgs.add(opt); } else if (sparkArgs.get(sparkArgs.size() - 1).equals(CONF_OPTION)) { sparkArgs.remove(sparkArgs.size() - 1); } } } if ((yarnClusterMode || yarnClientMode)) { // Include the current working directory (of executor container) // in executor classpath, because it will contain localized // files appendWithPathSeparator(PWD, executorClassPath); appendWithPathSeparator(PWD, driverClassPath); sparkArgs.add(CONF_OPTION); sparkArgs.add(EXECUTOR_CLASSPATH + executorClassPath.toString()); sparkArgs.add(CONF_OPTION); sparkArgs.add(DRIVER_CLASSPATH + driverClassPath.toString()); } if (actionConf.get(LauncherMain.MAPREDUCE_JOB_TAGS) != null) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SPARK_YARN_TAGS + OPT_SEPARATOR + actionConf.get(LauncherMain.MAPREDUCE_JOB_TAGS)); } if (!addedSecurityTokensHadoopFS) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_TOKENS_HADOOPFS + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityTokensHive) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_TOKENS_HIVE + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityTokensHBase) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_TOKENS_HBASE + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityCredentialsHadoopFS) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_CREDENTIALS_HADOOPFS + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityCredentialsHive) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_CREDENTIALS_HIVE + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityCredentialsHBase) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_CREDENTIALS_HBASE + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedLog4jExecutorSettings) { sparkArgs.add(CONF_OPTION); sparkArgs.add(EXECUTOR_EXTRA_JAVA_OPTIONS + LOG4J_CONFIGURATION_JAVA_OPTION + SparkMain.SPARK_LOG4J_PROPS); } if (!addedLog4jDriverSettings) { sparkArgs.add(CONF_OPTION); sparkArgs.add(DRIVER_EXTRA_JAVA_OPTIONS + LOG4J_CONFIGURATION_JAVA_OPTION + SparkMain.SPARK_LOG4J_PROPS); } mergeAndAddPropertiesFile(sparkArgs, propertiesFile); if ((yarnClusterMode || yarnClientMode)) { final Map<String, URI> fixedFileUrisMap = SparkMain.fixFsDefaultUrisAndFilterDuplicates(DistributedCache.getCacheFiles(actionConf)); fixedFileUrisMap.put(SparkMain.SPARK_LOG4J_PROPS, new Path(SparkMain.SPARK_LOG4J_PROPS).toUri()); fixedFileUrisMap.put(SparkMain.HIVE_SITE_CONF, new Path(SparkMain.HIVE_SITE_CONF).toUri()); addUserDefined(userFiles.toString(), fixedFileUrisMap); final Collection<URI> fixedFileUris = fixedFileUrisMap.values(); final JarFilter jarFilter = new JarFilter(fixedFileUris, jarPath); jarFilter.filter(); jarPath = jarFilter.getApplicationJar(); final String cachedFiles = StringUtils.join(fixedFileUris, OPT_VALUE_SEPARATOR); if (cachedFiles != null && !cachedFiles.isEmpty()) { sparkArgs.add(FILES_OPTION); sparkArgs.add(cachedFiles); } final Map<String, URI> fixedArchiveUrisMap = SparkMain.fixFsDefaultUrisAndFilterDuplicates(DistributedCache. getCacheArchives(actionConf)); addUserDefined(userArchives.toString(), fixedArchiveUrisMap); final String cachedArchives = StringUtils.join(fixedArchiveUrisMap.values(), OPT_VALUE_SEPARATOR); if (cachedArchives != null && !cachedArchives.isEmpty()) { sparkArgs.add(ARCHIVES_OPTION); sparkArgs.add(cachedArchives); } setSparkYarnJarsConf(sparkArgs, jarFilter.getSparkYarnJar(), jarFilter.getSparkVersion()); } if (!sparkArgs.contains(VERBOSE_OPTION)) { sparkArgs.add(VERBOSE_OPTION); } sparkArgs.add(jarPath); sparkArgs.addAll(Arrays.asList(mainArgs)); return sparkArgs; } private void mergeAndAddPropertiesFile(final List<String> sparkArgs, final String userDefinedPropertiesFile) throws IOException { final Properties properties = new Properties(); loadServerDefaultProperties(properties); loadLocalizedDefaultPropertiesFile(properties); loadUserDefinedPropertiesFile(userDefinedPropertiesFile, properties); final boolean persisted = persistMergedProperties(properties); if (persisted) { sparkArgs.add(OPT_PROPERTIES_FILE); sparkArgs.add(SPARK_DEFAULTS_GENERATED_PROPERTIES); } } private boolean persistMergedProperties(final Properties properties) throws IOException { if (!properties.isEmpty()) { try (final Writer writer = new OutputStreamWriter( new FileOutputStream(new File(SPARK_DEFAULTS_GENERATED_PROPERTIES)), StandardCharsets.UTF_8.name())) { properties.store(writer, "Properties file generated by Oozie"); System.out.println(String.format("Persisted merged Spark configs in file %s. Merged properties are: %s", SPARK_DEFAULTS_GENERATED_PROPERTIES, Arrays.toString(properties.stringPropertyNames().toArray()))); return true; } catch (IOException e) { System.err.println(String.format("Could not persist derived Spark config file. Reason: %s", e.getMessage())); throw e; } } return false; } private void loadUserDefinedPropertiesFile(final String userDefinedPropertiesFile, final Properties properties) { if (userDefinedPropertiesFile != null) { System.out.println(String.format("Reading Spark config from %s %s...", OPT_PROPERTIES_FILE, userDefinedPropertiesFile)); loadProperties(new File(userDefinedPropertiesFile), properties); } } private void loadLocalizedDefaultPropertiesFile(final Properties properties) { final File localizedDefaultConfFile = SparkMain.getMatchingFile(SPARK_DEFAULTS_FILE_PATTERN); if (localizedDefaultConfFile != null) { System.out.println(String.format("Reading Spark config from file %s...", localizedDefaultConfFile.getName())); loadProperties(localizedDefaultConfFile, properties); } } private void loadServerDefaultProperties(final Properties properties) { final String sparkDefaultsFromServer = actionConf.get(SPARK_DEFAULT_OPTS, ""); if (!sparkDefaultsFromServer.isEmpty()) { System.out.println("Reading Spark config propagated from Oozie server..."); try (final StringReader reader = new StringReader(sparkDefaultsFromServer)) { properties.load(reader); } catch (IOException e) { System.err.println(String.format("Could not read propagated Spark config! Reason: %s", e.getMessage())); } } } private void loadProperties(final File file, final Properties target) { try (final Reader reader = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8.name())) { final Properties properties = new Properties(); properties.load(reader); for(String key :properties.stringPropertyNames()) { Object prevProperty = target.setProperty(key, properties.getProperty(key)); if(prevProperty != null){ System.out.println(String.format("Value of %s was overwritten from %s", key, file.getName())); } } } catch (IOException e) { System.err.println(String.format("Could not read Spark configs from file %s. Reason: %s", file.getName(), e.getMessage())); } } private void appendWithPathSeparator(final String what, final StringBuilder to) { if (to.length() > 0) { to.append(File.pathSeparator); } to.append(what); } private void addUserDefined(final String userList, final Map<String, URI> urisMap) { if (userList != null) { for (final String file : userList.split(OPT_VALUE_SEPARATOR)) { if (!Strings.isEmpty(file)) { final Path p = new Path(file); urisMap.put(p.getName(), p.toUri()); } } } } /* * Get properties that needs to be passed to Spark as Spark configuration from actionConf. */ @VisibleForTesting void appendOoziePropertiesToSparkConf(final List<String> sparkArgs) { for (final Map.Entry<String, String> oozieConfig : actionConf .getValByRegex("^oozie\\.(?!launcher|spark).+").entrySet()) { sparkArgs.add(CONF_OPTION); sparkArgs.add(String.format("spark.%s=%s", oozieConfig.getKey(), oozieConfig.getValue())); } } /** * Sets spark.yarn.jars for Spark 2.X. Sets spark.yarn.jar for Spark 1.X. * * @param sparkArgs * @param sparkYarnJar * @param sparkVersion */ private void setSparkYarnJarsConf(final List<String> sparkArgs, final String sparkYarnJar, final String sparkVersion) { if (SPARK_VERSION_1.matcher(sparkVersion).find()) { // In Spark 1.X.X, set spark.yarn.jar to avoid // multiple distribution sparkArgs.add(CONF_OPTION); sparkArgs.add(SPARK_YARN_JAR + OPT_SEPARATOR + sparkYarnJar); } else { // In Spark 2.X.X, set spark.yarn.jars sparkArgs.add(CONF_OPTION); sparkArgs.add(SPARK_YARN_JARS + OPT_SEPARATOR + sparkYarnJar); } } }
cbaenziger/oozie
sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
Java
apache-2.0
22,247
package com.common.dao; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.logging.Logger; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceException; /** * La Clase BaseDAO implementa las operaciones básicas de acceso a datos DAO * utilizando usado por las clases DAO del módulo de ejecución de transacciones. * * @author Gestorinc S.A. * @version $Rev $ */ public class BaseDAO { /** * Constante que representa el character '%'. */ public static final String SYMBOLO_LIKE = "%"; /** * Constante que representa la cadena "'". */ public static final String SYMBOLO_APOSTROFE = "'"; /** * Creación del log de auditoría. */ protected static final Logger LOGGER = Logger.getLogger(BaseDAO.class.getName()); /** * Objeto que maneja las operaciones de persistencia. */ @PersistenceContext(name = "punit") private EntityManager em; /** * Constructor por defecto. */ public BaseDAO() { } /** * Retorna una referencia al objeto que maneja las operaciones de * persistencia definidas por JPA. * * @return Referencia al objeto que maneja las operaciones de persistencia. * En caso de que el objeto no este inicializado lanza la excepción * @see java.lang.IllegalStateException */ protected EntityManager getEntityManager() { if (em == null) { throw new IllegalStateException( "EntityManager no ha sido asignado a DAO antes del uso."); } else { return em; } } /** * Ejecuta una sentencia SQL obteniendo una conexión a la BD, referenciado * por la unidad de persistencia: <b>punit</b>.<br/> * No utilizar este método para ejecutar sentencias SELECT. * * @param sentencia Sentencia SQL que será ejecutada. */ public void ejecutarNativo(String sentencia) { try { java.sql.Connection connection = em.unwrap(java.sql.Connection.class); PreparedStatement ps = connection.prepareStatement(sentencia); ps.execute(); ps.close(); } catch (PersistenceException e) { LOGGER.info("Error al ejecutar sentencia"+ e.getMessage()); } catch (SQLException e) { LOGGER.info("Error al ejecutar sentencia"+ e.getMessage()); } } /** * Pone apóstrofes a una cadena de caracteres. * * @param cadena la cadena * @return la cadena con apóstrofes */ protected String comillar(String cadena) { return SYMBOLO_APOSTROFE + cadena + SYMBOLO_APOSTROFE; } }
ServicioReparaciones/ServicioReparaciones
ServicioReparaciones-ejb/src/main/java/com/common/dao/BaseDAO.java
Java
apache-2.0
2,856
#include <memory> #include "envoy/config/endpoint/v3/endpoint.pb.h" #include "envoy/config/endpoint/v3/endpoint.pb.validate.h" #include "envoy/service/discovery/v3/discovery.pb.h" #include "source/common/common/empty_string.h" #include "source/common/config/api_version.h" #include "source/common/config/grpc_mux_impl.h" #include "source/common/config/protobuf_link_hacks.h" #include "source/common/config/utility.h" #include "source/common/protobuf/protobuf.h" #include "source/common/stats/isolated_store_impl.h" #include "test/common/stats/stat_test_utility.h" #include "test/mocks/common.h" #include "test/mocks/config/mocks.h" #include "test/mocks/event/mocks.h" #include "test/mocks/grpc/mocks.h" #include "test/mocks/local_info/mocks.h" #include "test/mocks/runtime/mocks.h" #include "test/test_common/logging.h" #include "test/test_common/resources.h" #include "test/test_common/simulated_time_system.h" #include "test/test_common/test_time.h" #include "test/test_common/utility.h" #include "gmock/gmock.h" #include "gtest/gtest.h" using testing::_; using testing::AtLeast; using testing::InSequence; using testing::Invoke; using testing::IsSubstring; using testing::NiceMock; using testing::Return; using testing::ReturnRef; namespace Envoy { namespace Config { namespace { // We test some mux specific stuff below, other unit test coverage for singleton use of GrpcMuxImpl // is provided in [grpc_]subscription_impl_test.cc. class GrpcMuxImplTestBase : public testing::Test { public: GrpcMuxImplTestBase() : async_client_(new Grpc::MockAsyncClient()), control_plane_connected_state_( stats_.gauge("control_plane.connected_state", Stats::Gauge::ImportMode::NeverImport)), control_plane_pending_requests_( stats_.gauge("control_plane.pending_requests", Stats::Gauge::ImportMode::NeverImport)) {} void setup() { grpc_mux_ = std::make_unique<GrpcMuxImpl>( local_info_, std::unique_ptr<Grpc::MockAsyncClient>(async_client_), dispatcher_, *Protobuf::DescriptorPool::generated_pool()->FindMethodByName( "envoy.service.discovery.v3.AggregatedDiscoveryService.StreamAggregatedResources"), random_, stats_, rate_limit_settings_, true); } void setup(const RateLimitSettings& custom_rate_limit_settings) { grpc_mux_ = std::make_unique<GrpcMuxImpl>( local_info_, std::unique_ptr<Grpc::MockAsyncClient>(async_client_), dispatcher_, *Protobuf::DescriptorPool::generated_pool()->FindMethodByName( "envoy.service.discovery.v3.AggregatedDiscoveryService.StreamAggregatedResources"), random_, stats_, custom_rate_limit_settings, true); } void expectSendMessage(const std::string& type_url, const std::vector<std::string>& resource_names, const std::string& version, bool first = false, const std::string& nonce = "", const Protobuf::int32 error_code = Grpc::Status::WellKnownGrpcStatus::Ok, const std::string& error_message = "") { envoy::service::discovery::v3::DiscoveryRequest expected_request; if (first) { expected_request.mutable_node()->CopyFrom(local_info_.node()); } for (const auto& resource : resource_names) { expected_request.add_resource_names(resource); } if (!version.empty()) { expected_request.set_version_info(version); } expected_request.set_response_nonce(nonce); expected_request.set_type_url(type_url); if (error_code != Grpc::Status::WellKnownGrpcStatus::Ok) { ::google::rpc::Status* error_detail = expected_request.mutable_error_detail(); error_detail->set_code(error_code); error_detail->set_message(error_message); } EXPECT_CALL(async_stream_, sendMessageRaw_(Grpc::ProtoBufferEq(expected_request), false)); } NiceMock<Event::MockDispatcher> dispatcher_; NiceMock<Random::MockRandomGenerator> random_; NiceMock<LocalInfo::MockLocalInfo> local_info_; Grpc::MockAsyncClient* async_client_; Grpc::MockAsyncStream async_stream_; GrpcMuxImplPtr grpc_mux_; NiceMock<MockSubscriptionCallbacks> callbacks_; NiceMock<MockOpaqueResourceDecoder> resource_decoder_; Stats::TestUtil::TestStore stats_; Envoy::Config::RateLimitSettings rate_limit_settings_; Stats::Gauge& control_plane_connected_state_; Stats::Gauge& control_plane_pending_requests_; }; class GrpcMuxImplTest : public GrpcMuxImplTestBase { public: Event::SimulatedTimeSystem time_system_; }; // Validate behavior when multiple type URL watches are maintained, watches are created/destroyed // (via RAII). TEST_F(GrpcMuxImplTest, MultipleTypeUrlStreams) { setup(); InSequence s; auto foo_sub = grpc_mux_->addWatch("foo", {"x", "y"}, callbacks_, resource_decoder_, {}); auto bar_sub = grpc_mux_->addWatch("bar", {}, callbacks_, resource_decoder_, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage("foo", {"x", "y"}, "", true); expectSendMessage("bar", {}, ""); grpc_mux_->start(); EXPECT_EQ(1, control_plane_connected_state_.value()); expectSendMessage("bar", {"z"}, ""); auto bar_z_sub = grpc_mux_->addWatch("bar", {"z"}, callbacks_, resource_decoder_, {}); expectSendMessage("bar", {"zz", "z"}, ""); auto bar_zz_sub = grpc_mux_->addWatch("bar", {"zz"}, callbacks_, resource_decoder_, {}); expectSendMessage("bar", {"z"}, ""); expectSendMessage("bar", {}, ""); expectSendMessage("foo", {}, ""); } // Validate behavior when dynamic context parameters are updated. TEST_F(GrpcMuxImplTest, DynamicContextParameters) { setup(); InSequence s; auto foo_sub = grpc_mux_->addWatch("foo", {"x", "y"}, callbacks_, resource_decoder_, {}); auto bar_sub = grpc_mux_->addWatch("bar", {}, callbacks_, resource_decoder_, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage("foo", {"x", "y"}, "", true); expectSendMessage("bar", {}, ""); grpc_mux_->start(); // Unknown type, shouldn't do anything. local_info_.context_provider_.update_cb_handler_.runCallbacks("baz"); // Update to foo type should resend Node. expectSendMessage("foo", {"x", "y"}, "", true); local_info_.context_provider_.update_cb_handler_.runCallbacks("foo"); // Update to bar type should resend Node. expectSendMessage("bar", {}, "", true); local_info_.context_provider_.update_cb_handler_.runCallbacks("bar"); // Adding a new foo resource to the watch shouldn't send Node. expectSendMessage("foo", {"z", "x", "y"}, ""); auto foo_z_sub = grpc_mux_->addWatch("foo", {"z"}, callbacks_, resource_decoder_, {}); expectSendMessage("foo", {"x", "y"}, ""); expectSendMessage("foo", {}, ""); } // Validate behavior when multiple type URL watches are maintained and the stream is reset. TEST_F(GrpcMuxImplTest, ResetStream) { InSequence s; auto* timer = new Event::MockTimer(&dispatcher_); // TTL timers. new Event::MockTimer(&dispatcher_); new Event::MockTimer(&dispatcher_); new Event::MockTimer(&dispatcher_); setup(); auto foo_sub = grpc_mux_->addWatch("foo", {"x", "y"}, callbacks_, resource_decoder_, {}); auto bar_sub = grpc_mux_->addWatch("bar", {}, callbacks_, resource_decoder_, {}); auto baz_sub = grpc_mux_->addWatch("baz", {"z"}, callbacks_, resource_decoder_, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage("foo", {"x", "y"}, "", true); expectSendMessage("bar", {}, ""); expectSendMessage("baz", {"z"}, ""); grpc_mux_->start(); // Send another message for foo so that the node is cleared in the cached request. // This is to test that the node is set again in the first message below. expectSendMessage("foo", {"z", "x", "y"}, ""); auto foo_z_sub = grpc_mux_->addWatch("foo", {"z"}, callbacks_, resource_decoder_, {}); EXPECT_CALL(callbacks_, onConfigUpdateFailed(Envoy::Config::ConfigUpdateFailureReason::ConnectionFailure, _)) .Times(4); EXPECT_CALL(random_, random()); EXPECT_CALL(*timer, enableTimer(_, _)); grpc_mux_->grpcStreamForTest().onRemoteClose(Grpc::Status::WellKnownGrpcStatus::Canceled, ""); EXPECT_EQ(0, control_plane_connected_state_.value()); EXPECT_EQ(0, control_plane_pending_requests_.value()); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage("foo", {"z", "x", "y"}, "", true); expectSendMessage("bar", {}, ""); expectSendMessage("baz", {"z"}, ""); expectSendMessage("foo", {"x", "y"}, ""); timer->invokeCallback(); expectSendMessage("baz", {}, ""); expectSendMessage("foo", {}, ""); } // Validate pause-resume behavior. TEST_F(GrpcMuxImplTest, PauseResume) { setup(); InSequence s; GrpcMuxWatchPtr foo_sub; GrpcMuxWatchPtr foo_z_sub; GrpcMuxWatchPtr foo_zz_sub; foo_sub = grpc_mux_->addWatch("foo", {"x", "y"}, callbacks_, resource_decoder_, {}); { ScopedResume a = grpc_mux_->pause("foo"); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); grpc_mux_->start(); expectSendMessage("foo", {"x", "y"}, "", true); } { ScopedResume a = grpc_mux_->pause("bar"); expectSendMessage("foo", {"z", "x", "y"}, ""); foo_z_sub = grpc_mux_->addWatch("foo", {"z"}, callbacks_, resource_decoder_, {}); } { ScopedResume a = grpc_mux_->pause("foo"); foo_zz_sub = grpc_mux_->addWatch("foo", {"zz"}, callbacks_, resource_decoder_, {}); expectSendMessage("foo", {"zz", "z", "x", "y"}, ""); } // When nesting, we only have a single resumption. { ScopedResume a = grpc_mux_->pause("foo"); ScopedResume b = grpc_mux_->pause("foo"); foo_zz_sub = grpc_mux_->addWatch("foo", {"zz"}, callbacks_, resource_decoder_, {}); expectSendMessage("foo", {"zz", "z", "x", "y"}, ""); } grpc_mux_->pause("foo")->cancel(); } // Validate behavior when type URL mismatches occur. TEST_F(GrpcMuxImplTest, TypeUrlMismatch) { setup(); auto invalid_response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); InSequence s; auto foo_sub = grpc_mux_->addWatch("foo", {"x", "y"}, callbacks_, resource_decoder_, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage("foo", {"x", "y"}, "", true); grpc_mux_->start(); { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url("bar"); response->set_version_info("bar-version"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } { invalid_response->set_type_url("foo"); invalid_response->set_version_info("foo-version"); invalid_response->mutable_resources()->Add()->set_type_url("bar"); EXPECT_CALL(callbacks_, onConfigUpdateFailed(_, _)) .WillOnce(Invoke([](Envoy::Config::ConfigUpdateFailureReason, const EnvoyException* e) { EXPECT_TRUE(IsSubstring( "", "", "bar does not match the message-wide type URL foo in DiscoveryResponse", e->what())); })); expectSendMessage( "foo", {"x", "y"}, "", false, "", Grpc::Status::WellKnownGrpcStatus::Internal, fmt::format("bar does not match the message-wide type URL foo in DiscoveryResponse {}", invalid_response->DebugString())); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(invalid_response)); } expectSendMessage("foo", {}, ""); } TEST_F(GrpcMuxImplTest, RpcErrorMessageTruncated) { setup(); auto invalid_response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); InSequence s; auto foo_sub = grpc_mux_->addWatch("foo", {"x", "y"}, callbacks_, resource_decoder_, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage("foo", {"x", "y"}, "", true); grpc_mux_->start(); { // Large error message sent back to management server is truncated. const std::string very_large_type_url(1 << 20, 'A'); invalid_response->set_type_url("foo"); invalid_response->set_version_info("invalid"); invalid_response->mutable_resources()->Add()->set_type_url(very_large_type_url); EXPECT_CALL(callbacks_, onConfigUpdateFailed(_, _)) .WillOnce(Invoke([&very_large_type_url](Envoy::Config::ConfigUpdateFailureReason, const EnvoyException* e) { EXPECT_TRUE(IsSubstring( "", "", fmt::format("{} does not match the message-wide type URL foo in DiscoveryResponse", very_large_type_url), // Local error message is not truncated. e->what())); })); expectSendMessage("foo", {"x", "y"}, "", false, "", Grpc::Status::WellKnownGrpcStatus::Internal, fmt::format("{}...(truncated)", std::string(4096, 'A'))); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(invalid_response)); } expectSendMessage("foo", {}, ""); } envoy::service::discovery::v3::Resource heartbeatResource(std::chrono::milliseconds ttl, const std::string& name) { envoy::service::discovery::v3::Resource resource; resource.mutable_ttl()->CopyFrom(Protobuf::util::TimeUtil::MillisecondsToDuration(ttl.count())); resource.set_name(name); return resource; } envoy::service::discovery::v3::Resource resourceWithTtl(std::chrono::milliseconds ttl, envoy::config::endpoint::v3::ClusterLoadAssignment& cla) { envoy::service::discovery::v3::Resource resource; resource.mutable_resource()->PackFrom(cla); resource.mutable_ttl()->CopyFrom(Protobuf::util::TimeUtil::MillisecondsToDuration(ttl.count())); resource.set_name(cla.cluster_name()); return resource; } // Validates the behavior when the TTL timer expires. TEST_F(GrpcMuxImplTest, ResourceTTL) { setup(); time_system_.setSystemTime(std::chrono::seconds(0)); TestUtility::TestOpaqueResourceDecoderImpl<envoy::config::endpoint::v3::ClusterLoadAssignment> resource_decoder("cluster_name"); const std::string& type_url = Config::TypeUrl::get().ClusterLoadAssignment; InSequence s; auto* ttl_timer = new Event::MockTimer(&dispatcher_); auto eds_sub = grpc_mux_->addWatch(type_url, {"x"}, callbacks_, resource_decoder, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage(type_url, {"x"}, "", true); grpc_mux_->start(); { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment; load_assignment.set_cluster_name("x"); auto wrapped_resource = resourceWithTtl(std::chrono::milliseconds(1000), load_assignment); response->add_resources()->PackFrom(wrapped_resource); EXPECT_CALL(callbacks_, onConfigUpdate(_, "1")) .WillOnce(Invoke([](const std::vector<DecodedResourceRef>& resources, const std::string&) { EXPECT_EQ(1, resources.size()); })); EXPECT_CALL(*ttl_timer, enabled()); EXPECT_CALL(*ttl_timer, enableTimer(std::chrono::milliseconds(1000), _)); expectSendMessage(type_url, {"x"}, "1"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } // Increase the TTL. { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment; load_assignment.set_cluster_name("x"); auto wrapped_resource = resourceWithTtl(std::chrono::milliseconds(10000), load_assignment); response->add_resources()->PackFrom(wrapped_resource); EXPECT_CALL(callbacks_, onConfigUpdate(_, "1")) .WillOnce(Invoke([](const std::vector<DecodedResourceRef>& resources, const std::string&) { EXPECT_EQ(1, resources.size()); })); EXPECT_CALL(*ttl_timer, enabled()); EXPECT_CALL(*ttl_timer, enableTimer(std::chrono::milliseconds(10000), _)); // No update, just a change in TTL. expectSendMessage(type_url, {"x"}, "1"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } // Refresh the TTL with a heartbeat response. { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); auto wrapped_resource = heartbeatResource(std::chrono::milliseconds(10000), "x"); response->add_resources()->PackFrom(wrapped_resource); EXPECT_CALL(*ttl_timer, enabled()); // No update, just a change in TTL. expectSendMessage(type_url, {"x"}, "1"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } // Remove the TTL. { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment; load_assignment.set_cluster_name("x"); response->add_resources()->PackFrom(load_assignment); EXPECT_CALL(callbacks_, onConfigUpdate(_, "1")) .WillOnce(Invoke([](const std::vector<DecodedResourceRef>& resources, const std::string&) { EXPECT_EQ(1, resources.size()); })); EXPECT_CALL(*ttl_timer, disableTimer()); expectSendMessage(type_url, {"x"}, "1"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } // Put the TTL back. { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment; load_assignment.set_cluster_name("x"); auto wrapped_resource = resourceWithTtl(std::chrono::milliseconds(10000), load_assignment); response->add_resources()->PackFrom(wrapped_resource); EXPECT_CALL(callbacks_, onConfigUpdate(_, "1")) .WillOnce(Invoke([](const std::vector<DecodedResourceRef>& resources, const std::string&) { EXPECT_EQ(1, resources.size()); })); EXPECT_CALL(*ttl_timer, enabled()); EXPECT_CALL(*ttl_timer, enableTimer(std::chrono::milliseconds(10000), _)); // No update, just a change in TTL. expectSendMessage(type_url, {"x"}, "1"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } time_system_.setSystemTime(std::chrono::seconds(11)); EXPECT_CALL(callbacks_, onConfigUpdate(_, _, "")) .WillOnce(Invoke([](auto, const auto& removed, auto) { EXPECT_EQ(1, removed.size()); EXPECT_EQ("x", removed.Get(0)); })); // Fire the TTL timer. EXPECT_CALL(*ttl_timer, disableTimer()); ttl_timer->invokeCallback(); expectSendMessage(type_url, {}, "1"); } // Checks that the control plane identifier is logged TEST_F(GrpcMuxImplTest, LogsControlPlaneIndentifier) { setup(); std::string type_url = "foo"; auto foo_sub = grpc_mux_->addWatch(type_url, {}, callbacks_, resource_decoder_, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage(type_url, {}, "", true); grpc_mux_->start(); { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); response->mutable_control_plane()->set_identifier("control_plane_ID"); EXPECT_CALL(callbacks_, onConfigUpdate(_, _)); expectSendMessage(type_url, {}, "1"); EXPECT_LOG_CONTAINS("debug", "for foo from control_plane_ID", grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response))); } { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("2"); response->mutable_control_plane()->set_identifier("different_ID"); EXPECT_CALL(callbacks_, onConfigUpdate(_, _)); expectSendMessage(type_url, {}, "2"); EXPECT_LOG_CONTAINS("debug", "for foo from different_ID", grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response))); } } // Validate behavior when watches has an unknown resource name. TEST_F(GrpcMuxImplTest, WildcardWatch) { setup(); InSequence s; const std::string& type_url = Config::TypeUrl::get().ClusterLoadAssignment; TestUtility::TestOpaqueResourceDecoderImpl<envoy::config::endpoint::v3::ClusterLoadAssignment> resource_decoder("cluster_name"); auto foo_sub = grpc_mux_->addWatch(type_url, {}, callbacks_, resource_decoder, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage(type_url, {}, "", true); grpc_mux_->start(); { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment; load_assignment.set_cluster_name("x"); response->add_resources()->PackFrom(load_assignment); EXPECT_CALL(callbacks_, onConfigUpdate(_, "1")) .WillOnce(Invoke([&load_assignment](const std::vector<DecodedResourceRef>& resources, const std::string&) { EXPECT_EQ(1, resources.size()); const auto& expected_assignment = dynamic_cast<const envoy::config::endpoint::v3::ClusterLoadAssignment&>( resources[0].get().resource()); EXPECT_TRUE(TestUtility::protoEqual(expected_assignment, load_assignment)); })); expectSendMessage(type_url, {}, "1"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } } // Validate behavior when watches specify resources (potentially overlapping). TEST_F(GrpcMuxImplTest, WatchDemux) { setup(); InSequence s; TestUtility::TestOpaqueResourceDecoderImpl<envoy::config::endpoint::v3::ClusterLoadAssignment> resource_decoder("cluster_name"); const std::string& type_url = Config::TypeUrl::get().ClusterLoadAssignment; NiceMock<MockSubscriptionCallbacks> foo_callbacks; auto foo_sub = grpc_mux_->addWatch(type_url, {"x", "y"}, foo_callbacks, resource_decoder, {}); NiceMock<MockSubscriptionCallbacks> bar_callbacks; auto bar_sub = grpc_mux_->addWatch(type_url, {"y", "z"}, bar_callbacks, resource_decoder, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); // Should dedupe the "x" resource. expectSendMessage(type_url, {"y", "z", "x"}, "", true); grpc_mux_->start(); { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment; load_assignment.set_cluster_name("x"); response->add_resources()->PackFrom(load_assignment); EXPECT_CALL(bar_callbacks, onConfigUpdate(_, "1")).Times(0); EXPECT_CALL(foo_callbacks, onConfigUpdate(_, "1")) .WillOnce(Invoke([&load_assignment](const std::vector<DecodedResourceRef>& resources, const std::string&) { EXPECT_EQ(1, resources.size()); const auto& expected_assignment = dynamic_cast<const envoy::config::endpoint::v3::ClusterLoadAssignment&>( resources[0].get().resource()); EXPECT_TRUE(TestUtility::protoEqual(expected_assignment, load_assignment)); })); expectSendMessage(type_url, {"y", "z", "x"}, "1"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("2"); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment_x; load_assignment_x.set_cluster_name("x"); response->add_resources()->PackFrom(load_assignment_x); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment_y; load_assignment_y.set_cluster_name("y"); response->add_resources()->PackFrom(load_assignment_y); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment_z; load_assignment_z.set_cluster_name("z"); response->add_resources()->PackFrom(load_assignment_z); EXPECT_CALL(bar_callbacks, onConfigUpdate(_, "2")) .WillOnce(Invoke([&load_assignment_y, &load_assignment_z]( const std::vector<DecodedResourceRef>& resources, const std::string&) { EXPECT_EQ(2, resources.size()); const auto& expected_assignment = dynamic_cast<const envoy::config::endpoint::v3::ClusterLoadAssignment&>( resources[0].get().resource()); EXPECT_TRUE(TestUtility::protoEqual(expected_assignment, load_assignment_y)); const auto& expected_assignment_1 = dynamic_cast<const envoy::config::endpoint::v3::ClusterLoadAssignment&>( resources[1].get().resource()); EXPECT_TRUE(TestUtility::protoEqual(expected_assignment_1, load_assignment_z)); })); EXPECT_CALL(foo_callbacks, onConfigUpdate(_, "2")) .WillOnce(Invoke([&load_assignment_x, &load_assignment_y]( const std::vector<DecodedResourceRef>& resources, const std::string&) { EXPECT_EQ(2, resources.size()); const auto& expected_assignment = dynamic_cast<const envoy::config::endpoint::v3::ClusterLoadAssignment&>( resources[0].get().resource()); EXPECT_TRUE(TestUtility::protoEqual(expected_assignment, load_assignment_x)); const auto& expected_assignment_1 = dynamic_cast<const envoy::config::endpoint::v3::ClusterLoadAssignment&>( resources[1].get().resource()); EXPECT_TRUE(TestUtility::protoEqual(expected_assignment_1, load_assignment_y)); })); expectSendMessage(type_url, {"y", "z", "x"}, "2"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } expectSendMessage(type_url, {"x", "y"}, "2"); expectSendMessage(type_url, {}, "2"); } // Validate behavior when we have multiple watchers that send empty updates. TEST_F(GrpcMuxImplTest, MultipleWatcherWithEmptyUpdates) { setup(); InSequence s; const std::string& type_url = Config::TypeUrl::get().ClusterLoadAssignment; NiceMock<MockSubscriptionCallbacks> foo_callbacks; auto foo_sub = grpc_mux_->addWatch(type_url, {"x", "y"}, foo_callbacks, resource_decoder_, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage(type_url, {"x", "y"}, "", true); grpc_mux_->start(); auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); EXPECT_CALL(foo_callbacks, onConfigUpdate(_, "1")).Times(0); expectSendMessage(type_url, {"x", "y"}, "1"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); expectSendMessage(type_url, {}, "1"); } // Validate behavior when we have Single Watcher that sends Empty updates. TEST_F(GrpcMuxImplTest, SingleWatcherWithEmptyUpdates) { setup(); const std::string& type_url = Config::TypeUrl::get().Cluster; NiceMock<MockSubscriptionCallbacks> foo_callbacks; auto foo_sub = grpc_mux_->addWatch(type_url, {}, foo_callbacks, resource_decoder_, {}); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); expectSendMessage(type_url, {}, "", true); grpc_mux_->start(); auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_type_url(type_url); response->set_version_info("1"); // Validate that onConfigUpdate is called with empty resources. EXPECT_CALL(foo_callbacks, onConfigUpdate(_, "1")) .WillOnce(Invoke([](const std::vector<DecodedResourceRef>& resources, const std::string&) { EXPECT_TRUE(resources.empty()); })); expectSendMessage(type_url, {}, "1"); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } // Exactly one test requires a mock time system to provoke behavior that cannot // easily be achieved with a SimulatedTimeSystem. class GrpcMuxImplTestWithMockTimeSystem : public GrpcMuxImplTestBase { public: Event::DelegatingTestTimeSystem<MockTimeSystem> mock_time_system_; }; // Verifies that rate limiting is not enforced with defaults. TEST_F(GrpcMuxImplTestWithMockTimeSystem, TooManyRequestsWithDefaultSettings) { auto ttl_timer = new Event::MockTimer(&dispatcher_); // Retry timer, new Event::MockTimer(&dispatcher_); // Validate that rate limiter is not created. EXPECT_CALL(*mock_time_system_, monotonicTime()).Times(0); setup(); EXPECT_CALL(async_stream_, sendMessageRaw_(_, false)).Times(AtLeast(99)); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); const auto onReceiveMessage = [&](uint64_t burst) { for (uint64_t i = 0; i < burst; i++) { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_version_info("baz"); response->set_nonce("bar"); response->set_type_url("foo"); EXPECT_CALL(*ttl_timer, disableTimer()); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } }; auto foo_sub = grpc_mux_->addWatch("foo", {"x"}, callbacks_, resource_decoder_, {}); expectSendMessage("foo", {"x"}, "", true); grpc_mux_->start(); // Exhausts the limit. onReceiveMessage(99); // API calls go over the limit but we do not see the stat incremented. onReceiveMessage(1); EXPECT_EQ(0, stats_.counter("control_plane.rate_limit_enforced").value()); } // Verifies that default rate limiting is enforced with empty RateLimitSettings. TEST_F(GrpcMuxImplTest, TooManyRequestsWithEmptyRateLimitSettings) { // Validate that request drain timer is created. auto ttl_timer = new Event::MockTimer(&dispatcher_); Event::MockTimer* drain_request_timer = new Event::MockTimer(&dispatcher_); Event::MockTimer* retry_timer = new Event::MockTimer(&dispatcher_); RateLimitSettings custom_rate_limit_settings; custom_rate_limit_settings.enabled_ = true; setup(custom_rate_limit_settings); // Attempt to send 99 messages. One of them is rate limited (and we never drain). EXPECT_CALL(async_stream_, sendMessageRaw_(_, false)).Times(99); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); const auto onReceiveMessage = [&](uint64_t burst) { for (uint64_t i = 0; i < burst; i++) { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_version_info("baz"); response->set_nonce("bar"); response->set_type_url("foo"); EXPECT_CALL(*ttl_timer, disableTimer()); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } }; auto foo_sub = grpc_mux_->addWatch("foo", {"x"}, callbacks_, resource_decoder_, {}); expectSendMessage("foo", {"x"}, "", true); grpc_mux_->start(); // Validate that drain_request_timer is enabled when there are no tokens. EXPECT_CALL(*drain_request_timer, enableTimer(std::chrono::milliseconds(100), _)); // The drain timer enable is checked twice, once when we limit, again when the watch is destroyed. EXPECT_CALL(*drain_request_timer, enabled()).Times(11); onReceiveMessage(110); EXPECT_EQ(11, stats_.counter("control_plane.rate_limit_enforced").value()); EXPECT_EQ(11, control_plane_pending_requests_.value()); // Validate that when we reset a stream with pending requests, it reverts back to the initial // query (i.e. the queue is discarded). EXPECT_CALL(callbacks_, onConfigUpdateFailed(Envoy::Config::ConfigUpdateFailureReason::ConnectionFailure, _)); EXPECT_CALL(random_, random()); EXPECT_CALL(*retry_timer, enableTimer(_, _)); grpc_mux_->grpcStreamForTest().onRemoteClose(Grpc::Status::WellKnownGrpcStatus::Canceled, ""); EXPECT_EQ(11, control_plane_pending_requests_.value()); EXPECT_EQ(0, control_plane_connected_state_.value()); EXPECT_CALL(async_stream_, sendMessageRaw_(_, false)); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); time_system_.setMonotonicTime(std::chrono::seconds(30)); retry_timer->invokeCallback(); EXPECT_EQ(0, control_plane_pending_requests_.value()); // One more message on the way out when the watch is destroyed. EXPECT_CALL(async_stream_, sendMessageRaw_(_, false)); } // Verifies that rate limiting is enforced with custom RateLimitSettings. TEST_F(GrpcMuxImplTest, TooManyRequestsWithCustomRateLimitSettings) { // Validate that request drain timer is created. // TTL timer. auto ttl_timer = new Event::MockTimer(&dispatcher_); Event::MockTimer* drain_request_timer = new Event::MockTimer(&dispatcher_); // Retry timer. new Event::MockTimer(&dispatcher_); RateLimitSettings custom_rate_limit_settings; custom_rate_limit_settings.enabled_ = true; custom_rate_limit_settings.max_tokens_ = 250; custom_rate_limit_settings.fill_rate_ = 2; setup(custom_rate_limit_settings); EXPECT_CALL(async_stream_, sendMessageRaw_(_, false)).Times(AtLeast(260)); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); const auto onReceiveMessage = [&](uint64_t burst) { for (uint64_t i = 0; i < burst; i++) { auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_version_info("baz"); response->set_nonce("bar"); response->set_type_url("foo"); EXPECT_CALL(*ttl_timer, disableTimer()); grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); } }; auto foo_sub = grpc_mux_->addWatch("foo", {"x"}, callbacks_, resource_decoder_, {}); expectSendMessage("foo", {"x"}, "", true); grpc_mux_->start(); // Validate that rate limit is not enforced for 100 requests. onReceiveMessage(100); EXPECT_EQ(0, stats_.counter("control_plane.rate_limit_enforced").value()); // Validate that drain_request_timer is enabled when there are no tokens. EXPECT_CALL(*drain_request_timer, enableTimer(std::chrono::milliseconds(500), _)); EXPECT_CALL(*drain_request_timer, enabled()).Times(11); onReceiveMessage(160); EXPECT_EQ(11, stats_.counter("control_plane.rate_limit_enforced").value()); EXPECT_EQ(11, control_plane_pending_requests_.value()); // Validate that drain requests call when there are multiple requests in queue. time_system_.setMonotonicTime(std::chrono::seconds(10)); drain_request_timer->invokeCallback(); // Check that the pending_requests stat is updated with the queue drain. EXPECT_EQ(0, control_plane_pending_requests_.value()); } // Verifies that a message with no resources is accepted. TEST_F(GrpcMuxImplTest, UnwatchedTypeAcceptsEmptyResources) { setup(); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); const std::string& type_url = Config::TypeUrl::get().ClusterLoadAssignment; grpc_mux_->start(); { // subscribe and unsubscribe to simulate a cluster added and removed expectSendMessage(type_url, {"y"}, "", true); auto temp_sub = grpc_mux_->addWatch(type_url, {"y"}, callbacks_, resource_decoder_, {}); expectSendMessage(type_url, {}, ""); } // simulate the server sending empty CLA message to notify envoy that the CLA was removed. auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_nonce("bar"); response->set_version_info("1"); response->set_type_url(type_url); // TODO(fredlas) the expectation of no discovery request here is against the xDS spec. // The upcoming xDS overhaul (part of/followup to PR7293) will fix this. // // This contains zero resources. No discovery request should be sent. grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response)); // when we add the new subscription version should be 1 and nonce should be bar expectSendMessage(type_url, {"x"}, "1", false, "bar"); // simulate a new cluster x is added. add CLA subscription for it. auto sub = grpc_mux_->addWatch(type_url, {"x"}, callbacks_, resource_decoder_, {}); expectSendMessage(type_url, {}, "1", false, "bar"); } // Verifies that a message with some resources is rejected when there are no watches. TEST_F(GrpcMuxImplTest, UnwatchedTypeRejectsResources) { setup(); EXPECT_CALL(*async_client_, startRaw(_, _, _, _)).WillOnce(Return(&async_stream_)); const std::string& type_url = Config::TypeUrl::get().ClusterLoadAssignment; grpc_mux_->start(); // subscribe and unsubscribe (by not keeping the return watch) so that the type is known to envoy expectSendMessage(type_url, {"y"}, "", true); expectSendMessage(type_url, {}, ""); grpc_mux_->addWatch(type_url, {"y"}, callbacks_, resource_decoder_, {}); // simulate the server sending CLA message to notify envoy that the CLA was added, // even though envoy doesn't expect it. Envoy should reject this update. auto response = std::make_unique<envoy::service::discovery::v3::DiscoveryResponse>(); response->set_nonce("bar"); response->set_version_info("1"); response->set_type_url(type_url); envoy::config::endpoint::v3::ClusterLoadAssignment load_assignment; load_assignment.set_cluster_name("x"); response->add_resources()->PackFrom(load_assignment); // The message should be rejected. expectSendMessage(type_url, {}, "", false, "bar"); EXPECT_LOG_CONTAINS("warning", "Ignoring unwatched type URL " + type_url, grpc_mux_->grpcStreamForTest().onReceiveMessage(std::move(response))); } TEST_F(GrpcMuxImplTest, BadLocalInfoEmptyClusterName) { EXPECT_CALL(local_info_, clusterName()).WillOnce(ReturnRef(EMPTY_STRING)); EXPECT_THROW_WITH_MESSAGE( GrpcMuxImpl( local_info_, std::unique_ptr<Grpc::MockAsyncClient>(async_client_), dispatcher_, *Protobuf::DescriptorPool::generated_pool()->FindMethodByName( "envoy.service.discovery.v3.AggregatedDiscoveryService.StreamAggregatedResources"), random_, stats_, rate_limit_settings_, true), EnvoyException, "ads: node 'id' and 'cluster' are required. Set it either in 'node' config or via " "--service-node and --service-cluster options."); } TEST_F(GrpcMuxImplTest, BadLocalInfoEmptyNodeName) { EXPECT_CALL(local_info_, nodeName()).WillOnce(ReturnRef(EMPTY_STRING)); EXPECT_THROW_WITH_MESSAGE( GrpcMuxImpl( local_info_, std::unique_ptr<Grpc::MockAsyncClient>(async_client_), dispatcher_, *Protobuf::DescriptorPool::generated_pool()->FindMethodByName( "envoy.service.discovery.v3.AggregatedDiscoveryService.StreamAggregatedResources"), random_, stats_, rate_limit_settings_, true), EnvoyException, "ads: node 'id' and 'cluster' are required. Set it either in 'node' config or via " "--service-node and --service-cluster options."); } } // namespace } // namespace Config } // namespace Envoy
lyft/envoy
test/common/config/grpc_mux_impl_test.cc
C++
apache-2.0
39,581
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.exec.planner.sql.parser; import java.util.List; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.SqlSpecialOperator; import org.apache.calcite.sql.SqlWriter; import org.apache.calcite.sql.parser.SqlParserPos; import com.dremio.service.namespace.NamespaceKey; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; public class SqlTruncateTable extends SqlCall { public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("TRUNCATE_TABLE", SqlKind.OTHER_DDL) { @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { Preconditions.checkArgument(operands.length == 3, "SqlTruncateTable.createCall() " + "has to get 3 operands!"); return new SqlTruncateTable(pos, (SqlIdentifier) operands[0], (SqlLiteral) operands[1], (SqlLiteral) operands[2]); } }; private SqlIdentifier tableName; private boolean tableExistenceCheck; private boolean tableKeywordPresent; public SqlTruncateTable(SqlParserPos pos, SqlIdentifier tableName, SqlLiteral tableExistenceCheck, SqlLiteral tableKeywordPresent) { this(pos, tableName, tableExistenceCheck.booleanValue(), tableKeywordPresent.booleanValue()); } public SqlTruncateTable(SqlParserPos pos, SqlIdentifier tableName, boolean tableExistenceCheck, boolean tableKeywordPresent) { super(pos); this.tableName = tableName; this.tableExistenceCheck = tableExistenceCheck; this.tableKeywordPresent = tableKeywordPresent; } @Override public SqlOperator getOperator() { return OPERATOR; } @Override public List<SqlNode> getOperandList() { return ImmutableList.of( tableName, SqlLiteral.createBoolean(tableExistenceCheck, SqlParserPos.ZERO), SqlLiteral.createBoolean(tableKeywordPresent, SqlParserPos.ZERO) ); } @Override public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { writer.keyword("TRUNCATE"); if (tableKeywordPresent) { writer.keyword("TABLE"); } if (tableExistenceCheck) { writer.keyword("IF"); writer.keyword("EXISTS"); } tableName.unparse(writer, leftPrec, rightPrec); } public NamespaceKey getPath() { return new NamespaceKey(tableName.names); } public boolean checkTableExistence() { return tableExistenceCheck; } }
dremio/dremio-oss
sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlTruncateTable.java
Java
apache-2.0
3,280
# Inga lallensis Benth. SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Inga/Inga lallensis/README.md
Markdown
apache-2.0
179
<!DOCTYPE HTML> <html> <head> <meta http-equiv="Content-type" content="text/html; charset=utf-8"> <title>Hovercards6 userオプション</title> <script src="http://platform.twitter.com/anywhere.js?id=[APIキー]&amp;v=1" type="text/javascript"></script> </head> <body> @twitterapi をフォローしましょう<br> <img src="./twitter4j.png" id="image" alt="t4j_news"/> <script type="text/javascript"> twttr.anywhere(function (T) { T.hovercards(); T("#image").hovercards( {username: function(node){return node.alt}}); }); </script> </body> </html>
yusuke/twtr-api-pocket-reference
at_anywhere/hovercards6.html
HTML
apache-2.0
626
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio; import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Unit tests for {@link StorageTierAssoc}. */ public class StorageTierAssocTest { private void checkStorageTierAssoc(StorageTierAssoc assoc, PropertyKey levelsProperty, PropertyKeyFormat aliasFormat) { int size = Configuration.getInt(levelsProperty); Assert.assertEquals(size, assoc.size()); List<String> expectedOrderedAliases = new ArrayList<>(); for (int i = 0; i < size; i++) { String alias = Configuration.get(aliasFormat.format(i)); Assert.assertEquals(i, assoc.getOrdinal(alias)); Assert.assertEquals(alias, assoc.getAlias(i)); expectedOrderedAliases.add(alias); } Assert.assertEquals(expectedOrderedAliases, assoc.getOrderedStorageAliases()); } /** * Tests the constructors of the {@link MasterStorageTierAssoc} and {@link WorkerStorageTierAssoc} * classes with a {@link Configuration}. */ @Test public void masterWorkerConfConstructor() { Configuration.set(PropertyKey.MASTER_TIERED_STORE_GLOBAL_LEVELS, "3"); Configuration.set( PropertyKeyFormat.MASTER_TIERED_STORE_GLOBAL_LEVEL_ALIAS_FORMAT.format(2), "BOTTOM"); Configuration.set(PropertyKey.WORKER_TIERED_STORE_LEVELS, "2"); Configuration.set( PropertyKeyFormat.WORKER_TIERED_STORE_LEVEL_ALIAS_FORMAT.format(1), "BOTTOM"); checkStorageTierAssoc(new MasterStorageTierAssoc(), PropertyKey.MASTER_TIERED_STORE_GLOBAL_LEVELS, PropertyKeyFormat.MASTER_TIERED_STORE_GLOBAL_LEVEL_ALIAS_FORMAT); checkStorageTierAssoc(new WorkerStorageTierAssoc(), PropertyKey.WORKER_TIERED_STORE_LEVELS, PropertyKeyFormat.WORKER_TIERED_STORE_LEVEL_ALIAS_FORMAT); ConfigurationTestUtils.resetConfiguration(); } /** * Tests the constructors of the {@link MasterStorageTierAssoc} and {@link WorkerStorageTierAssoc} * classes with different storage alias. */ @Test public void storageAliasListConstructor() { List<String> orderedAliases = Arrays.asList("MEM", "HDD", "SOMETHINGELSE", "SSD"); MasterStorageTierAssoc masterAssoc = new MasterStorageTierAssoc(orderedAliases); WorkerStorageTierAssoc workerAssoc = new WorkerStorageTierAssoc(orderedAliases); Assert.assertEquals(orderedAliases.size(), masterAssoc.size()); Assert.assertEquals(orderedAliases.size(), workerAssoc.size()); for (int i = 0; i < orderedAliases.size(); i++) { String alias = orderedAliases.get(i); Assert.assertEquals(alias, masterAssoc.getAlias(i)); Assert.assertEquals(i, masterAssoc.getOrdinal(alias)); Assert.assertEquals(alias, workerAssoc.getAlias(i)); Assert.assertEquals(i, workerAssoc.getOrdinal(alias)); } Assert.assertEquals(orderedAliases, masterAssoc.getOrderedStorageAliases()); Assert.assertEquals(orderedAliases, workerAssoc.getOrderedStorageAliases()); } }
bit-zyl/Alluxio-Nvdimm
core/server/src/test/java/alluxio/StorageTierAssocTest.java
Java
apache-2.0
3,480
/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.uci.ics.asterix.lexergenerator.rules; public class RuleAnythingUntil implements Rule { private char expected; public RuleAnythingUntil clone() { return new RuleAnythingUntil(expected); } public RuleAnythingUntil(char expected) { this.expected = expected; } @Override public String toString() { return " .* " + String.valueOf(expected); } @Override public int hashCode() { return 10 * (int) expected; } @Override public boolean equals(Object o) { if (o == null) return false; if (o instanceof RuleAnythingUntil) { if (((RuleAnythingUntil) o).expected == this.expected) { return true; } } return false; } @Override public String javaAction() { return "currentChar = readNextChar();"; } @Override public String javaMatch(String action) { return "boolean escaped = false;\n" + "while (currentChar != '" + expected + "' || escaped) {\n" + "if(!escaped && currentChar == '\\\\\\\\') {\n" + "escaped = true;\n" + "containsEscapes = true;\n" + "} else {\n" + "escaped = false;\n" + "}\n" + "currentChar = readNextChar();\n" + "}\n" + "if (currentChar == '" + expected + "') {" + action + "}\n"; } }
parshimers/incubator-asterixdb
asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleAnythingUntil.java
Java
apache-2.0
2,007
package com.netwebx.hackerrank.rpc.client; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.net.InetSocketAddress; import java.net.Socket; /** * Created by apple on 2017/2/26. */ public class RpcImporter<S> { public S importer(final Class<?> serviceClass, final InetSocketAddress addr) { return (S) Proxy.newProxyInstance( serviceClass.getClassLoader(), new Class<?>[]{serviceClass.getInterfaces()[0]}, new InvocationHandler() { @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { Socket socket = null; ObjectOutputStream output = null; ObjectInputStream input = null; try { socket = new Socket(); socket.connect(addr); output = new ObjectOutputStream(socket.getOutputStream()); output.writeUTF(serviceClass.getName()); output.writeUTF(method.getName()); output.writeObject(method.getParameterTypes()); output.writeObject(args); input = new ObjectInputStream(socket.getInputStream()); return input.readObject(); } finally { if (socket != null) { socket.close(); } if (output != null) { output.close(); } if (input != null) { input.close(); } } } } ); } }
WengJunFeng/hackerrank_java
src/main/java/com/netwebx/hackerrank/rpc/client/RpcImporter.java
Java
apache-2.0
2,053
require 'adrian/queue' require 'fileutils' module Adrian class DirectoryQueue < Adrian::Queue include Filters def self.create(options = {}) queue = new(options) FileUtils.mkdir_p(queue.available_path) FileUtils.mkdir_p(queue.reserved_path) queue end attr_reader :available_path, :reserved_path, :logger # Note: # There is the possibility of an item being consumed by multiple processes when its still in the queue after its lock expires. # The reason for allowing this is: # 1. It's much simpler than introducing a seperate monitoring process to handle lock expiry. # 2. This is an acceptable and rare event. e.g. it only happens when the process working on the item crashes without being able to release the lock def initialize(options = {}) super @available_path = options.fetch(:path) @reserved_path = options.fetch(:reserved_path, default_reserved_path) @logger = options[:logger] filters << Filters::FileLock.new(:duration => options[:lock_duration], :reserved_path => reserved_path) filters << Filters::Delay.new(:duration => options[:delay]) if options[:delay] end def pop_item items.each do |item| return item if reserve(item) end nil end def push_item(value) item = wrap_item(value) item.move(available_path) item.touch self end def length available_files.count { |file| File.file?(file) } end def include?(value) item = wrap_item(value) items.include?(item) end protected def wrap_item(value) item = value.is_a?(FileItem) ? value : FileItem.new(value) item.logger ||= logger item end def reserve(item) item.move(reserved_path) item.touch true rescue Errno::ENOENT => e false end def items items = files.map { |file| wrap_item(file) } items.reject! { |item| !item.exist? || filter?(item) } items.sort_by(&:updated_at) end def files (available_files + reserved_files).select { |file| File.file?(file) } end def available_files Dir.glob("#{available_path}/*") end def reserved_files Dir.glob("#{reserved_path}/*") end def default_reserved_path File.join(@available_path, 'cur') end end end
staugaard/adrian
lib/adrian/directory_queue.rb
Ruby
apache-2.0
2,384
// SERVER-4516 and SERVER-6913: test that update and findAndModify tolerate // an _id in the update document, as long as the _id will not be modified var t = db.jstests_server4516; var startingDoc = {_id: 1, a: 1}; function prepare() { t.drop(); t.save(startingDoc); } function update_succeeds(updateDoc, qid, resultDoc) { prepare(); t.update({_id: qid}, updateDoc, true); assert.eq(t.findOne({_id: qid}), resultDoc); prepare(); t.findAndModify({query: {_id: qid}, update: updateDoc, upsert: true}); assert.eq(t.findOne({_id: qid}), resultDoc); } update_succeeds({_id: 1, a: 2}, 1, {_id: 1, a: 2}); update_succeeds({$set: {_id: 1}}, 1, {_id: 1, a: 1}); update_succeeds({_id: 1, b: "a"}, 1, {_id: 1, b: "a"}); update_succeeds({_id: 2, a: 3}, 2, {_id: 2, a: 3}); function update_fails(updateDoc, qid) { prepare(); var res = t.update({_id: qid}, updateDoc, true); assert.writeError(res); assert.eq(t.count(), 1); assert.eq(t.findOne(), startingDoc); prepare(); assert.throws(function() { t.findAndModify({query: {_id: qid}, update: updateDoc, upsert: true}); }); assert.eq(t.count(), 1); assert.eq(t.findOne(), startingDoc); } update_fails({$set: {_id: 2}}, 1); update_fails({_id: 2, a: 3}, 1); update_fails({_id: 2, a: 3}, 3);
christkv/mongo-shell
test/jstests/core/update_find_and_modify_id.js
JavaScript
apache-2.0
1,313
package com.oath.cyclops.internal.stream.spliterators.push; import com.oath.cyclops.types.persistent.PersistentCollection; import java.util.Collection; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; /** * Created by johnmcclean on 12/01/2017. */ public class GroupedByTimeOperator<T,C extends PersistentCollection<? super T>,R> extends BaseOperator<T,R> { private final Supplier<? extends C> factory; private final Function<? super C, ? extends R> finalizer; private final long time; private final TimeUnit t; public GroupedByTimeOperator(Operator<T> source, Supplier<? extends C> factory, Function<? super C, ? extends R> finalizer,long time, TimeUnit t){ super(source); this.factory = factory; this.finalizer = finalizer; this.time = time; this.t = t; } @Override public StreamSubscription subscribe(Consumer<? super R> onNext, Consumer<? super Throwable> onError, Runnable onComplete) { long toRun = t.toNanos(time); PersistentCollection[] next = {factory.get()}; long[] start ={System.nanoTime()}; StreamSubscription[] upstream = {null}; StreamSubscription sub = new StreamSubscription(){ @Override public void request(long n) { if(n<=0) { onError.accept(new IllegalArgumentException("3.9 While the Subscription is not cancelled, Subscription.request(long n) MUST throw a java.lang.IllegalArgumentException if the argument is <= 0.")); return; } if(!isOpen) return; super.request(n); upstream[0].request(n); } @Override public void cancel() { upstream[0].cancel(); super.cancel(); } }; upstream[0] = source.subscribe(e-> { try { next[0] = next[0].plus(e); if(System.nanoTime()-start[0] > toRun){ onNext.accept(finalizer.apply((C)next[0])); sub.requested.decrementAndGet(); next[0] = factory.get(); start[0] = System.nanoTime(); } else{ request( upstream,1l); } } catch (Throwable t) { onError.accept(t); } } ,t->{onError.accept(t); sub.requested.decrementAndGet(); if(sub.isActive()) request( upstream,1); },()->{ if(next[0].size()>0) { try { onNext.accept(finalizer.apply((C) next[0])); } catch(Throwable t){ onError.accept(t); } sub.requested.decrementAndGet(); } sub.cancel(); onComplete.run(); }); return sub; } @Override public void subscribeAll(Consumer<? super R> onNext, Consumer<? super Throwable> onError, Runnable onCompleteDs) { long toRun = t.toNanos(time); PersistentCollection[] next = {factory.get()}; long[] start ={System.nanoTime()}; source.subscribeAll(e-> { try { next[0] = next[0].plus(e); if(System.nanoTime()-start[0] > toRun){ onNext.accept(finalizer.apply((C)next[0])); next[0] = factory.get(); start[0] = System.nanoTime(); } } catch (Throwable t) { onError.accept(t); } } ,onError,()->{ if(next[0].size()>0) { try { onNext.accept(finalizer.apply((C) next[0])); } catch(Throwable t){ onError.accept(t); } } onCompleteDs.run(); }); } }
aol/cyclops
cyclops/src/main/java/com/oath/cyclops/internal/stream/spliterators/push/GroupedByTimeOperator.java
Java
apache-2.0
4,567
<?php /*************************************************************************** * * * (c) 2004 Vladimir V. Kalynyak, Alexey V. Vinokurov, Ilya M. Shalnev * * * * This is commercial software, only users who have purchased a valid * * license and accept to the terms of the License Agreement can install * * and use this program. * * * **************************************************************************** * PLEASE READ THE FULL TEXT OF THE SOFTWARE LICENSE AGREEMENT IN THE * * "copyright.txt" FILE PROVIDED WITH THIS DISTRIBUTION PACKAGE. * ****************************************************************************/ namespace Tygh\Exceptions; class ClassNotFoundException extends AException { }
sandymariscal22/BrandsCsCart
public_html/app/Tygh/Exceptions/ClassNotFoundException.php
PHP
apache-2.0
1,012
// +build linux /* Copyright The containerd Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package linux import ( "context" "fmt" "io/ioutil" "os" "path/filepath" "time" "github.com/boltdb/bolt" eventstypes "github.com/containerd/containerd/api/events" "github.com/containerd/containerd/api/types" "github.com/containerd/containerd/containers" "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/events/exchange" "github.com/containerd/containerd/identifiers" "github.com/containerd/containerd/linux/proc" "github.com/containerd/containerd/linux/runctypes" shim "github.com/containerd/containerd/linux/shim/v1" "github.com/containerd/containerd/log" "github.com/containerd/containerd/metadata" "github.com/containerd/containerd/mount" "github.com/containerd/containerd/namespaces" "github.com/containerd/containerd/platforms" "github.com/containerd/containerd/plugin" "github.com/containerd/containerd/runtime" runc "github.com/containerd/go-runc" "github.com/containerd/typeurl" ptypes "github.com/gogo/protobuf/types" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/pkg/errors" "github.com/sirupsen/logrus" "golang.org/x/sys/unix" ) var ( pluginID = fmt.Sprintf("%s.%s", plugin.RuntimePlugin, "linux") empty = &ptypes.Empty{} ) const ( configFilename = "config.json" defaultRuntime = "runc" defaultShim = "containerd-shim" ) func init() { plugin.Register(&plugin.Registration{ Type: plugin.RuntimePlugin, ID: "linux", InitFn: New, Requires: []plugin.Type{ plugin.TaskMonitorPlugin, plugin.MetadataPlugin, }, Config: &Config{ Shim: defaultShim, Runtime: defaultRuntime, }, }) } var _ = (runtime.Runtime)(&Runtime{}) // Config options for the runtime type Config struct { // Shim is a path or name of binary implementing the Shim GRPC API Shim string `toml:"shim"` // Runtime is a path or name of an OCI runtime used by the shim Runtime string `toml:"runtime"` // RuntimeRoot is the path that shall be used by the OCI runtime for its data RuntimeRoot string `toml:"runtime_root"` // NoShim calls runc directly from within the pkg NoShim bool `toml:"no_shim"` // Debug enable debug on the shim ShimDebug bool `toml:"shim_debug"` } // New returns a configured runtime func New(ic *plugin.InitContext) (interface{}, error) { ic.Meta.Platforms = []ocispec.Platform{platforms.DefaultSpec()} if err := os.MkdirAll(ic.Root, 0711); err != nil { return nil, err } if err := os.MkdirAll(ic.State, 0711); err != nil { return nil, err } monitor, err := ic.Get(plugin.TaskMonitorPlugin) if err != nil { return nil, err } m, err := ic.Get(plugin.MetadataPlugin) if err != nil { return nil, err } cfg := ic.Config.(*Config) r := &Runtime{ root: ic.Root, state: ic.State, monitor: monitor.(runtime.TaskMonitor), tasks: runtime.NewTaskList(), db: m.(*metadata.DB), address: ic.Address, events: ic.Events, config: cfg, } tasks, err := r.restoreTasks(ic.Context) if err != nil { return nil, err } // TODO: need to add the tasks to the monitor for _, t := range tasks { if err := r.tasks.AddWithNamespace(t.namespace, t); err != nil { return nil, err } } return r, nil } // Runtime for a linux based system type Runtime struct { root string state string address string monitor runtime.TaskMonitor tasks *runtime.TaskList db *metadata.DB events *exchange.Exchange config *Config } // ID of the runtime func (r *Runtime) ID() string { return pluginID } // Create a new task func (r *Runtime) Create(ctx context.Context, id string, opts runtime.CreateOpts) (_ runtime.Task, err error) { namespace, err := namespaces.NamespaceRequired(ctx) if err != nil { return nil, err } if err := identifiers.Validate(id); err != nil { return nil, errors.Wrapf(err, "invalid task id") } ropts, err := r.getRuncOptions(ctx, id) if err != nil { return nil, err } bundle, err := newBundle(id, filepath.Join(r.state, namespace), filepath.Join(r.root, namespace), opts.Spec.Value) if err != nil { return nil, err } defer func() { if err != nil { bundle.Delete() } }() shimopt := ShimLocal(r.config, r.events) if !r.config.NoShim { var cgroup string if opts.Options != nil { v, err := typeurl.UnmarshalAny(opts.Options) if err != nil { return nil, err } cgroup = v.(*runctypes.CreateOptions).ShimCgroup } exitHandler := func() { log.G(ctx).WithField("id", id).Info("shim reaped") t, err := r.tasks.Get(ctx, id) if err != nil { // Task was never started or was already sucessfully deleted return } lc := t.(*Task) // Stop the monitor if err := r.monitor.Stop(lc); err != nil { log.G(ctx).WithError(err).WithFields(logrus.Fields{ "id": id, "namespace": namespace, }).Warn("failed to stop monitor") } log.G(ctx).WithFields(logrus.Fields{ "id": id, "namespace": namespace, }).Warn("cleaning up after killed shim") if err = r.cleanupAfterDeadShim(context.Background(), bundle, namespace, id, lc.pid); err != nil { log.G(ctx).WithError(err).WithFields(logrus.Fields{ "id": id, "namespace": namespace, }).Warn("failed to clen up after killed shim") } } shimopt = ShimRemote(r.config, r.address, cgroup, exitHandler) } s, err := bundle.NewShimClient(ctx, namespace, shimopt, ropts) if err != nil { return nil, err } defer func() { if err != nil { if kerr := s.KillShim(ctx); kerr != nil { log.G(ctx).WithError(err).Error("failed to kill shim") } } }() rt := r.config.Runtime if ropts != nil && ropts.Runtime != "" { rt = ropts.Runtime } sopts := &shim.CreateTaskRequest{ ID: id, Bundle: bundle.path, Runtime: rt, Stdin: opts.IO.Stdin, Stdout: opts.IO.Stdout, Stderr: opts.IO.Stderr, Terminal: opts.IO.Terminal, Checkpoint: opts.Checkpoint, Options: opts.Options, } for _, m := range opts.Rootfs { sopts.Rootfs = append(sopts.Rootfs, &types.Mount{ Type: m.Type, Source: m.Source, Options: m.Options, }) } cr, err := s.Create(ctx, sopts) if err != nil { return nil, errdefs.FromGRPC(err) } t, err := newTask(id, namespace, int(cr.Pid), s, r.monitor, r.events, proc.NewRunc(ropts.RuntimeRoot, sopts.Bundle, namespace, rt, ropts.CriuPath, ropts.SystemdCgroup)) if err != nil { return nil, err } if err := r.tasks.Add(ctx, t); err != nil { return nil, err } // after the task is created, add it to the monitor if it has a cgroup // this can be different on a checkpoint/restore if t.cg != nil { if err = r.monitor.Monitor(t); err != nil { if _, err := r.Delete(ctx, t); err != nil { log.G(ctx).WithError(err).Error("deleting task after failed monitor") } return nil, err } } r.events.Publish(ctx, runtime.TaskCreateEventTopic, &eventstypes.TaskCreate{ ContainerID: sopts.ID, Bundle: sopts.Bundle, Rootfs: sopts.Rootfs, IO: &eventstypes.TaskIO{ Stdin: sopts.Stdin, Stdout: sopts.Stdout, Stderr: sopts.Stderr, Terminal: sopts.Terminal, }, Checkpoint: sopts.Checkpoint, Pid: uint32(t.pid), }) return t, nil } // Delete a task removing all on disk state func (r *Runtime) Delete(ctx context.Context, c runtime.Task) (*runtime.Exit, error) { namespace, err := namespaces.NamespaceRequired(ctx) if err != nil { return nil, err } lc, ok := c.(*Task) if !ok { return nil, fmt.Errorf("task cannot be cast as *linux.Task") } if err := r.monitor.Stop(lc); err != nil { return nil, err } bundle := loadBundle( lc.id, filepath.Join(r.state, namespace, lc.id), filepath.Join(r.root, namespace, lc.id), ) rsp, err := lc.shim.Delete(ctx, empty) if err != nil { if cerr := r.cleanupAfterDeadShim(ctx, bundle, namespace, c.ID(), lc.pid); cerr != nil { log.G(ctx).WithError(err).Error("unable to cleanup task") } return nil, errdefs.FromGRPC(err) } r.tasks.Delete(ctx, lc.id) if err := lc.shim.KillShim(ctx); err != nil { log.G(ctx).WithError(err).Error("failed to kill shim") } if err := bundle.Delete(); err != nil { log.G(ctx).WithError(err).Error("failed to delete bundle") } r.events.Publish(ctx, runtime.TaskDeleteEventTopic, &eventstypes.TaskDelete{ ContainerID: lc.id, ExitStatus: rsp.ExitStatus, ExitedAt: rsp.ExitedAt, Pid: rsp.Pid, }) return &runtime.Exit{ Status: rsp.ExitStatus, Timestamp: rsp.ExitedAt, Pid: rsp.Pid, }, nil } // Tasks returns all tasks known to the runtime func (r *Runtime) Tasks(ctx context.Context) ([]runtime.Task, error) { return r.tasks.GetAll(ctx) } func (r *Runtime) restoreTasks(ctx context.Context) ([]*Task, error) { dir, err := ioutil.ReadDir(r.state) if err != nil { return nil, err } var o []*Task for _, namespace := range dir { if !namespace.IsDir() { continue } name := namespace.Name() log.G(ctx).WithField("namespace", name).Debug("loading tasks in namespace") tasks, err := r.loadTasks(ctx, name) if err != nil { return nil, err } o = append(o, tasks...) } return o, nil } // Get a specific task by task id func (r *Runtime) Get(ctx context.Context, id string) (runtime.Task, error) { return r.tasks.Get(ctx, id) } func (r *Runtime) loadTasks(ctx context.Context, ns string) ([]*Task, error) { dir, err := ioutil.ReadDir(filepath.Join(r.state, ns)) if err != nil { return nil, err } var o []*Task for _, path := range dir { if !path.IsDir() { continue } id := path.Name() bundle := loadBundle( id, filepath.Join(r.state, ns, id), filepath.Join(r.root, ns, id), ) ctx = namespaces.WithNamespace(ctx, ns) pid, _ := runc.ReadPidFile(filepath.Join(bundle.path, proc.InitPidFile)) s, err := bundle.NewShimClient(ctx, ns, ShimConnect(r.config, func() { err := r.cleanupAfterDeadShim(ctx, bundle, ns, id, pid) if err != nil { log.G(ctx).WithError(err).WithField("bundle", bundle.path). Error("cleaning up after dead shim") } }), nil) if err != nil { log.G(ctx).WithError(err).WithFields(logrus.Fields{ "id": id, "namespace": ns, }).Error("connecting to shim") err := r.cleanupAfterDeadShim(ctx, bundle, ns, id, pid) if err != nil { log.G(ctx).WithError(err).WithField("bundle", bundle.path). Error("cleaning up after dead shim") } continue } ropts, err := r.getRuncOptions(ctx, id) if err != nil { log.G(ctx).WithError(err).WithField("id", id). Error("get runtime options") continue } t, err := newTask(id, ns, pid, s, r.monitor, r.events, proc.NewRunc(ropts.RuntimeRoot, bundle.path, ns, ropts.Runtime, ropts.CriuPath, ropts.SystemdCgroup)) if err != nil { log.G(ctx).WithError(err).Error("loading task type") continue } o = append(o, t) } return o, nil } func (r *Runtime) cleanupAfterDeadShim(ctx context.Context, bundle *bundle, ns, id string, pid int) error { ctx = namespaces.WithNamespace(ctx, ns) if err := r.terminate(ctx, bundle, ns, id); err != nil { if r.config.ShimDebug { return errors.Wrap(err, "failed to terminate task, leaving bundle for debugging") } log.G(ctx).WithError(err).Warn("failed to terminate task") } // Notify Client exitedAt := time.Now().UTC() r.events.Publish(ctx, runtime.TaskExitEventTopic, &eventstypes.TaskExit{ ContainerID: id, ID: id, Pid: uint32(pid), ExitStatus: 128 + uint32(unix.SIGKILL), ExitedAt: exitedAt, }) r.tasks.Delete(ctx, id) if err := bundle.Delete(); err != nil { log.G(ctx).WithError(err).Error("delete bundle") } r.events.Publish(ctx, runtime.TaskDeleteEventTopic, &eventstypes.TaskDelete{ ContainerID: id, Pid: uint32(pid), ExitStatus: 128 + uint32(unix.SIGKILL), ExitedAt: exitedAt, }) return nil } func (r *Runtime) terminate(ctx context.Context, bundle *bundle, ns, id string) error { rt, err := r.getRuntime(ctx, ns, id) if err != nil { return err } if err := rt.Delete(ctx, id, &runc.DeleteOpts{ Force: true, }); err != nil { log.G(ctx).WithError(err).Warnf("delete runtime state %s", id) } if err := mount.Unmount(filepath.Join(bundle.path, "rootfs"), 0); err != nil { log.G(ctx).WithError(err).WithFields(logrus.Fields{ "path": bundle.path, "id": id, }).Warnf("unmount task rootfs") } return nil } func (r *Runtime) getRuntime(ctx context.Context, ns, id string) (*runc.Runc, error) { ropts, err := r.getRuncOptions(ctx, id) if err != nil { return nil, err } var ( cmd = r.config.Runtime root = proc.RuncRoot ) if ropts != nil { if ropts.Runtime != "" { cmd = ropts.Runtime } if ropts.RuntimeRoot != "" { root = ropts.RuntimeRoot } } return &runc.Runc{ Command: cmd, LogFormat: runc.JSON, PdeathSignal: unix.SIGKILL, Root: filepath.Join(root, ns), }, nil } func (r *Runtime) getRuncOptions(ctx context.Context, id string) (*runctypes.RuncOptions, error) { var container containers.Container if err := r.db.View(func(tx *bolt.Tx) error { store := metadata.NewContainerStore(tx) var err error container, err = store.Get(ctx, id) return err }); err != nil { return nil, err } if container.Runtime.Options != nil { v, err := typeurl.UnmarshalAny(container.Runtime.Options) if err != nil { return nil, err } ropts, ok := v.(*runctypes.RuncOptions) if !ok { return nil, errors.New("invalid runtime options format") } return ropts, nil } return &runctypes.RuncOptions{}, nil }
mikebrow/cri-containerd
vendor/github.com/containerd/containerd/linux/runtime.go
GO
apache-2.0
14,104
package net.tcp.socket; import java.io.DataOutputStream; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; /** * 必须先启动服务器 后连接 1、创建服务器 指定端口 ServerSocket(int port) 2、接收客户端连接 3、发送数据+接收数据 * */ public class Server { /** * @param args * @throws IOException */ public static void main(String[] args) throws IOException { // 1、创建服务器 指定端口 ServerSocket(int port) ServerSocket server = new ServerSocket(8888); // 2、接收客户端连接 阻塞式 while (true) { Socket socket = server.accept(); System.out.println("一个客户端建立连接"); // 3、发送数据 String msg = "欢迎使用"; // 输出流 /* * BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( * socket.getOutputStream())); * * bw.write(msg); bw.newLine(); bw.flush(); */ DataOutputStream dos = new DataOutputStream(socket.getOutputStream()); dos.writeUTF(msg); dos.flush(); } } }
zhangxx0/Java_Topic_prictice
src/net/tcp/socket/Server.java
Java
apache-2.0
1,059
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.management.impl; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Hashtable; import java.util.List; import java.util.NoSuchElementException; import javax.management.MBeanServerConnection; import javax.management.MalformedObjectNameException; import javax.management.ObjectInstance; import javax.management.ObjectName; import org.neo4j.jmx.ManagementInterface; /** * Does not have any public methods - since the public interface of * {@link org.neo4j.management.Neo4jManager} should be defined completely in * that class. * * Does not have any (direct or transitive) dependencies on any part of the jmx * component - since this class is used in * {@link org.neo4j.management.impl.jconsole.Neo4jPlugin the JConsole plugin}, * and the jmx component is not on the class path in JConsole. * * @author Tobias Ivarsson <[email protected]> */ public abstract class KernelProxy { static final String KERNEL_BEAN_TYPE = "org.neo4j.jmx.Kernel"; protected static final String KERNEL_BEAN_NAME = "Kernel"; static final String MBEAN_QUERY = "MBeanQuery"; protected final MBeanServerConnection server; protected final ObjectName kernel; protected KernelProxy( MBeanServerConnection server, ObjectName kernel ) { String className = null; try { className = server.getMBeanInfo( kernel ).getClassName(); } catch ( Exception e ) { // fall through } if ( !KERNEL_BEAN_TYPE.equals( className ) ) { throw new IllegalArgumentException( "The specified ObjectName does not represent a Neo4j Kernel bean in the specified MBean server." ); } this.server = server; this.kernel = kernel; } protected List<Object> allBeans() { List<Object> beans = new ArrayList<Object>(); Iterable<ObjectInstance> mbeans; try { mbeans = server.queryMBeans( mbeanQuery(), null ); } catch ( IOException handled ) { return beans; } for ( ObjectInstance instance : mbeans ) { String className = instance.getClassName(); Class<?> beanType = null; try { if ( className != null ) beanType = Class.forName( className ); } catch ( Exception ignored ) { // fall through } catch ( LinkageError ignored ) { // fall through } if ( beanType != null ) { try { beans.add( BeanProxy.load( server, beanType, instance.getObjectName() ) ); } catch ( Exception ignored ) { // fall through } } } return beans; } private ObjectName assertExists( ObjectName name ) { try { if ( !server.queryNames( name, null ).isEmpty() ) { return name; } } catch ( IOException handled ) { // fall through } throw new NoSuchElementException( "No MBeans matching " + name ); } protected <T> T getBean( Class<T> beanInterface ) { return BeanProxy.load( server, beanInterface, createObjectName( beanInterface ) ); } protected <T> Collection<T> getBeans( Class<T> beanInterface ) { return BeanProxy.loadAll( server, beanInterface, createObjectNameQuery( beanInterface ) ); } private ObjectName createObjectNameQuery( Class<?> beanInterface ) { return createObjectNameQuery( mbeanQuery(), beanInterface ); } private ObjectName createObjectName( Class<?> beanInterface ) { return assertExists( createObjectName( mbeanQuery(), beanInterface ) ); } protected ObjectName createObjectName( String beanName ) { return assertExists( createObjectName( mbeanQuery(), beanName, false ) ); } protected ObjectName mbeanQuery() { try { return (ObjectName) server.getAttribute( kernel, MBEAN_QUERY ); } catch ( Exception cause ) { throw new IllegalStateException( "Could not get MBean query.", cause ); } } protected static ObjectName createObjectName( String kernelIdentifier, Class<?> beanInterface ) { return createObjectName( kernelIdentifier, beanName( beanInterface ) ); } protected static ObjectName createObjectName( String kernelIdentifier, String beanName, String... extraNaming ) { Hashtable<String, String> properties = new Hashtable<String, String>(); properties.put( "instance", "kernel#" + kernelIdentifier ); return createObjectName( "org.neo4j", properties, beanName, false, extraNaming ); } static ObjectName createObjectNameQuery( String kernelIdentifier, String beanName, String... extraNaming ) { Hashtable<String, String> properties = new Hashtable<String, String>(); properties.put( "instance", "kernel#" + kernelIdentifier ); return createObjectName( "org.neo4j", properties, beanName, true, extraNaming ); } static ObjectName createObjectName( ObjectName query, Class<?> beanInterface ) { return createObjectName( query, beanName( beanInterface ), false ); } static ObjectName createObjectNameQuery( ObjectName query, Class<?> beanInterface ) { return createObjectName( query, beanName( beanInterface ), true ); } private static ObjectName createObjectName( ObjectName query, String beanName, boolean isQuery ) { Hashtable<String, String> properties = new Hashtable<String, String>(query.getKeyPropertyList()); return createObjectName( query.getDomain(), properties, beanName, isQuery ); } static String beanName( Class<?> beanInterface ) { if ( beanInterface.isInterface() ) { ManagementInterface management = beanInterface.getAnnotation( ManagementInterface.class ); if ( management != null ) { return management.name(); } } throw new IllegalArgumentException( beanInterface + " is not a Neo4j Management Been interface" ); } private static ObjectName createObjectName( String domain, Hashtable<String, String> properties, String beanName, boolean query, String... extraNaming ) { properties.put( "name", beanName ); for ( int i = 0; i < extraNaming.length; i++ ) { properties.put( "name" + i, extraNaming[i] ); } ObjectName result; try { result = new ObjectName( domain, properties ); if ( query ) result = ObjectName.getInstance( result.toString() + ",*" ); } catch ( MalformedObjectNameException e ) { return null; } return result; } }
HuangLS/neo4j
advanced/management/src/main/java/org/neo4j/management/impl/KernelProxy.java
Java
apache-2.0
8,016
<!--//页头--> <div class="contanier"> <div class="link right"> <span>第一次使用微信?</span> <a href="#">立即注册</a> <a href="#">腾讯客服</a> </div> <div class="logo"> <img src="../img/talk_bg.png" alt=""> <span>微信,是一种生活方式</span> </div> </div>
wuhaoxiangfau/wechat
wechat_location/public/details/header2.html
HTML
apache-2.0
339
# AUTOGENERATED FILE FROM balenalib/bananapi-m1-plus-ubuntu:disco-build ENV NODE_VERSION 10.23.1 ENV YARN_VERSION 1.22.4 RUN for key in \ 6A010C5166006599AA17F08146C2130DFD2497F5 \ ; do \ gpg --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \ done \ && curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \ && echo "8f965f2757efcf3077d655bfcea36f7a29c58958355e0eb23cfb725740c3ccbe node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \ && rm -rf /tmp/* CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \ && echo "Running test-stack@node" \ && chmod +x [email protected] \ && bash [email protected] \ && rm -rf [email protected] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu disco \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v10.23.1, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
nghiant2710/base-images
balena-base-images/node/bananapi-m1-plus/ubuntu/disco/10.23.1/build/Dockerfile
Dockerfile
apache-2.0
2,767
package de.mhus.cha.cao.action; import java.io.File; import de.mhus.lib.cao.CaoElement; import de.mhus.lib.cao.CaoException; import de.mhus.lib.cao.CaoList; import de.mhus.lib.cao.CaoMonitor; import de.mhus.lib.cao.CaoOperation; import de.mhus.cap.core.Access; import de.mhus.cha.cao.ChaConnection; import de.mhus.cha.cao.ChaElement; import de.mhus.lib.MFile; import de.mhus.lib.form.MForm; import de.mhus.lib.form.annotations.FormElement; import de.mhus.lib.form.annotations.FormSortId; @FormElement("name='cha_copy_to_folder' title='Copy'") public class CopyToOperation extends CaoOperation implements MForm { private CaoList<Access> sources; private ChaElement target; private ChaConnection connection; public CopyToOperation(ChaElement ChaElement) { target = ChaElement; } @Override public void dispose() throws CaoException { } @Override public void execute() throws CaoException { connection = (ChaConnection)target.getConnection(); //collect all affected entries monitor.beginTask("count", CaoMonitor.UNKNOWN); int cnt = 0; for (CaoElement<Access> element : sources.getElements()) { cnt = count( ((ChaElement)element).getFile(), cnt ); } monitor.beginTask("copy", cnt); cnt = 0; for (CaoElement<Access> element : sources.getElements()) { cnt = copy( target.getFile(), ((ChaElement)element).getFile(), cnt ); } } private int copy(File target, File file, int cnt) { // validate action if (monitor.isCanceled()) return cnt; if ( !file.isDirectory()) return cnt; // for secure // new path File newTarget = null; cnt++; monitor.worked(cnt); newTarget = new File(target,connection.createUID()); monitor.log().debug("Create Dir: " + newTarget.getAbsolutePath()); monitor.subTask(file.getAbsolutePath()); // validate path if ( newTarget.exists() ) { monitor.log().warn("Folder already exists: " + newTarget.getAbsolutePath()); return cnt; } // create if ( ! newTarget.mkdir() ) { newTarget = null; monitor.log().warn("Can't create folder: " + target.getAbsolutePath() + "/" + file.getName()); return cnt; } // set id connection.addIdPath(newTarget.getName(), newTarget.getAbsolutePath()); // events connection.fireElementCreated(newTarget.getName()); connection.fireElementLink(target.getName(), newTarget.getName()); // copy files for ( File sub : file.listFiles()) { if (sub.isFile()) { monitor.log().debug("Copy File: " + file.getAbsolutePath()); File targetFile = new File(target,file.getName()); if (targetFile.exists()) { monitor.log().warn("Can't overwrite file: " + file.getAbsolutePath()); } else if ( !MFile.copyFile(file, targetFile) ) { monitor.log().warn("Can't copy file: " + file.getAbsolutePath()); } } } // copy sub folders for ( File sub : file.listFiles(connection.getDefaultFileFilter())) { cnt = copy(newTarget, sub,cnt); } return cnt; } private int count(File file, int cnt) { if (monitor.isCanceled()) return cnt; if ( file.isDirectory() ) cnt++; if (!file.isDirectory()) return cnt; // for secure for ( File sub : file.listFiles(connection.getDefaultFileFilter())) { cnt = count(sub,cnt); } return cnt; } @Override public void initialize() throws CaoException { } public void setSources(CaoList<Access> list) { sources = list; } }
mhus/mhus-inka
de.mhus.hair/hair3/de.mhus.cha.app/src/de/mhus/cha/cao/action/CopyToOperation.java
Java
apache-2.0
3,411
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.util.csv; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Timestamp; import java.sql.Types; import java.util.Base64; import java.util.List; import java.util.Properties; import javax.annotation.Nullable; import org.apache.commons.csv.CSVRecord; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.expression.function.EncodeFormat; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; import org.apache.phoenix.schema.IllegalDataException; import org.apache.phoenix.schema.types.PBinary; import org.apache.phoenix.schema.types.PBoolean; import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.schema.types.PDataType.PDataCodec; import org.apache.phoenix.schema.types.PTimestamp; import org.apache.phoenix.schema.types.PVarbinary; import org.apache.phoenix.util.ColumnInfo; import org.apache.phoenix.util.DateUtil; import org.apache.phoenix.util.ReadOnlyProps; import org.apache.phoenix.util.UpsertExecutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; /** {@link UpsertExecutor} over {@link CSVRecord}s. */ public class CsvUpsertExecutor extends UpsertExecutor<CSVRecord, String> { private static final Logger LOG = LoggerFactory.getLogger(CsvUpsertExecutor.class); protected final String arrayElementSeparator; /** Testing constructor. Do not use in prod. */ @VisibleForTesting protected CsvUpsertExecutor(Connection conn, List<ColumnInfo> columnInfoList, PreparedStatement stmt, UpsertListener<CSVRecord> upsertListener, String arrayElementSeparator) { super(conn, columnInfoList, stmt, upsertListener); this.arrayElementSeparator = arrayElementSeparator; finishInit(); } public CsvUpsertExecutor(Connection conn, String tableName, List<ColumnInfo> columnInfoList, UpsertListener<CSVRecord> upsertListener, String arrayElementSeparator) { super(conn, tableName, columnInfoList, upsertListener); this.arrayElementSeparator = arrayElementSeparator; finishInit(); } @Override protected void execute(CSVRecord csvRecord) { try { if (csvRecord.size() < conversionFunctions.size()) { String message = String.format("CSV record does not have enough values (has %d, but needs %d)", csvRecord.size(), conversionFunctions.size()); throw new IllegalArgumentException(message); } for (int fieldIndex = 0; fieldIndex < conversionFunctions.size(); fieldIndex++) { Object sqlValue = conversionFunctions.get(fieldIndex).apply(csvRecord.get(fieldIndex)); if (sqlValue != null) { preparedStatement.setObject(fieldIndex + 1, sqlValue); } else { preparedStatement.setNull(fieldIndex + 1, dataTypes.get(fieldIndex).getSqlType()); } } preparedStatement.execute(); upsertListener.upsertDone(++upsertCount); } catch (Exception e) { if (LOG.isDebugEnabled()) { // Even though this is an error we only log it with debug logging because we're notifying the // listener, and it can do its own logging if needed LOG.debug("Error on CSVRecord " + csvRecord, e); } upsertListener.errorOnRecord(csvRecord, e); } } @Override protected Function<String, Object> createConversionFunction(PDataType dataType) { if (dataType.isArrayType()) { return new ArrayDatatypeConversionFunction( new StringToArrayConverter( conn, arrayElementSeparator, PDataType.fromTypeId(dataType.getSqlType() - PDataType.ARRAY_TYPE_BASE))); } else { return new SimpleDatatypeConversionFunction(dataType, this.conn); } } /** * Performs typed conversion from String values to a given column value type. */ static class SimpleDatatypeConversionFunction implements Function<String, Object> { private final PDataType dataType; private final PDataCodec codec; private final DateUtil.DateTimeParser dateTimeParser; private final String binaryEncoding; SimpleDatatypeConversionFunction(PDataType dataType, Connection conn) { ReadOnlyProps props; try { props = conn.unwrap(PhoenixConnection.class).getQueryServices().getProps(); } catch (SQLException e) { throw new RuntimeException(e); } this.dataType = dataType; PDataCodec codec = dataType.getCodec(); if(dataType.isCoercibleTo(PTimestamp.INSTANCE)) { codec = DateUtil.getCodecFor(dataType); // TODO: move to DateUtil String dateFormat; int dateSqlType = dataType.getResultSetSqlType(); if (dateSqlType == Types.DATE) { dateFormat = props.get(QueryServices.DATE_FORMAT_ATTRIB, DateUtil.DEFAULT_DATE_FORMAT); } else if (dateSqlType == Types.TIME) { dateFormat = props.get(QueryServices.TIME_FORMAT_ATTRIB, DateUtil.DEFAULT_TIME_FORMAT); } else { dateFormat = props.get(QueryServices.TIMESTAMP_FORMAT_ATTRIB, DateUtil.DEFAULT_TIMESTAMP_FORMAT); } String timeZoneId = props.get(QueryServices.DATE_FORMAT_TIMEZONE_ATTRIB, QueryServicesOptions.DEFAULT_DATE_FORMAT_TIMEZONE); this.dateTimeParser = DateUtil.getDateTimeParser(dateFormat, dataType, timeZoneId); } else { this.dateTimeParser = null; } this.codec = codec; this.binaryEncoding = props.get(QueryServices.UPLOAD_BINARY_DATA_TYPE_ENCODING, QueryServicesOptions.DEFAULT_UPLOAD_BINARY_DATA_TYPE_ENCODING); } @Nullable @Override public Object apply(@Nullable String input) { if (input == null || input.isEmpty()) { return null; } if (dataType == PTimestamp.INSTANCE) { return DateUtil.parseTimestamp(input); } if (dateTimeParser != null) { long epochTime = dateTimeParser.parseDateTime(input); byte[] byteValue = new byte[dataType.getByteSize()]; codec.encodeLong(epochTime, byteValue, 0); return dataType.toObject(byteValue); } else if (dataType == PBoolean.INSTANCE) { switch (input.toLowerCase()) { case "true": case "t": case "1": return Boolean.TRUE; case "false": case "f": case "0": return Boolean.FALSE; default: throw new RuntimeException("Invalid boolean value: '" + input + "', must be one of ['true','t','1','false','f','0']"); } }else if (dataType == PVarbinary.INSTANCE || dataType == PBinary.INSTANCE){ EncodeFormat format = EncodeFormat.valueOf(binaryEncoding.toUpperCase()); Object object = null; switch (format) { case BASE64: object = Base64.getDecoder().decode(input); if (object == null) { throw new IllegalDataException( "Input: [" + input + "] is not base64 encoded"); } break; case ASCII: object = Bytes.toBytes(input); break; default: throw new IllegalDataException("Unsupported encoding \"" + binaryEncoding + "\""); } return object; } return dataType.toObject(input); } } /** * Converts string representations of arrays into Phoenix arrays of the correct type. */ private static class ArrayDatatypeConversionFunction implements Function<String, Object> { private final StringToArrayConverter arrayConverter; private ArrayDatatypeConversionFunction(StringToArrayConverter arrayConverter) { this.arrayConverter = arrayConverter; } @Nullable @Override public Object apply(@Nullable String input) { try { return arrayConverter.toArray(input); } catch (SQLException e) { throw new RuntimeException(e); } } } }
ohadshacham/phoenix
phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
Java
apache-2.0
10,075
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.reteoo.common; import org.drools.core.SessionConfiguration; import org.drools.core.WorkingMemoryEntryPoint; import org.drools.core.base.DroolsQuery; import org.drools.core.common.BaseNode; import org.drools.core.common.InternalAgenda; import org.drools.core.common.InternalFactHandle; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.common.WorkingMemoryAction; import org.drools.core.event.AgendaEventSupport; import org.drools.core.event.RuleEventListenerSupport; import org.drools.core.event.RuleRuntimeEventSupport; import org.drools.core.impl.InternalKnowledgeBase; import org.drools.core.impl.StatefulKnowledgeSessionImpl; import org.drools.core.phreak.PropagationEntry; import org.drools.core.reteoo.LIANodePropagation; import org.drools.core.spi.FactHandleFactory; import org.drools.core.spi.PropagationContext; import org.kie.api.runtime.Environment; import org.kie.api.runtime.rule.AgendaFilter; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicBoolean; public class ReteWorkingMemory extends StatefulKnowledgeSessionImpl { private List<LIANodePropagation> liaPropagations; private Queue<WorkingMemoryAction> actionQueue; private AtomicBoolean evaluatingActionQueue = new AtomicBoolean(false); /** Flag to determine if a rule is currently being fired. */ private volatile AtomicBoolean firing = new AtomicBoolean(false); public ReteWorkingMemory() { } public ReteWorkingMemory(long id, InternalKnowledgeBase kBase) { super(id, kBase); } public ReteWorkingMemory(long id, InternalKnowledgeBase kBase, boolean initInitFactHandle, SessionConfiguration config, Environment environment) { super(id, kBase, initInitFactHandle, config, environment); } public ReteWorkingMemory(long id, InternalKnowledgeBase kBase, FactHandleFactory handleFactory, long propagationContext, SessionConfiguration config, InternalAgenda agenda, Environment environment) { super(id, kBase, handleFactory, propagationContext, config, agenda, environment); } public ReteWorkingMemory(long id, InternalKnowledgeBase kBase, FactHandleFactory handleFactory, InternalFactHandle initialFactHandle, long propagationContext, SessionConfiguration config, Environment environment, RuleRuntimeEventSupport workingMemoryEventSupport, AgendaEventSupport agendaEventSupport, RuleEventListenerSupport ruleEventListenerSupport, InternalAgenda agenda) { super(id, kBase, handleFactory, false, propagationContext, config, environment, workingMemoryEventSupport, agendaEventSupport, ruleEventListenerSupport, agenda); } @Override protected void init() { this.actionQueue = new ConcurrentLinkedQueue<WorkingMemoryAction>(); this.propagationList = new RetePropagationList(this); } @Override public void reset() { super.reset(); actionQueue.clear(); } @Override public void reset(int handleId, long handleCounter, long propagationCounter) { super.reset(handleId, handleCounter, propagationCounter ); if (liaPropagations != null) liaPropagations.clear(); actionQueue.clear(); } @Override public WorkingMemoryEntryPoint getWorkingMemoryEntryPoint(String name) { WorkingMemoryEntryPoint ep = this.entryPoints.get(name); return ep != null ? new ReteWorkingMemoryEntryPoint( this, ep ) : null; } public void addLIANodePropagation(LIANodePropagation liaNodePropagation) { if (liaPropagations == null) liaPropagations = new ArrayList<LIANodePropagation>(); liaPropagations.add( liaNodePropagation ); } private final Object syncLock = new Object(); public void initInitialFact() { if ( initialFactHandle == null ) { synchronized ( syncLock ) { if ( initialFactHandle == null ) { // double check, inside of sync point incase some other thread beat us to it. initInitialFact(kBase, null); } } } } @Override public void fireUntilHalt(final AgendaFilter agendaFilter) { initInitialFact(); super.fireUntilHalt( agendaFilter ); } @Override public int fireAllRules(final AgendaFilter agendaFilter, int fireLimit) { checkAlive(); if ( this.firing.compareAndSet( false, true ) ) { initInitialFact(); try { startOperation(); return internalFireAllRules(agendaFilter, fireLimit); } finally { endOperation(); this.firing.set( false ); } } return 0; } private int internalFireAllRules(AgendaFilter agendaFilter, int fireLimit) { int fireCount = 0; try { kBase.readLock(); // If we're already firing a rule, then it'll pick up the firing for any other assertObject(..) that get // nested inside, avoiding concurrent-modification exceptions, depending on code paths of the actions. if ( liaPropagations != null && isSequential() ) { for ( LIANodePropagation liaPropagation : liaPropagations ) { ( liaPropagation ).doPropagation( this ); } } // do we need to call this in advance? executeQueuedActionsForRete(); fireCount = this.agenda.fireAllRules( agendaFilter, fireLimit ); } finally { kBase.readUnlock(); if (kBase.flushModifications()) { fireCount += internalFireAllRules(agendaFilter, fireLimit); } } return fireCount; } @Override public void closeLiveQuery(final InternalFactHandle factHandle) { try { startOperation(); this.kBase.readLock(); this.lock.lock(); final PropagationContext pCtx = pctxFactory.createPropagationContext(getNextPropagationIdCounter(), PropagationContext.INSERTION, null, null, factHandle, getEntryPoint()); getEntryPointNode().retractQuery( factHandle, pCtx, this ); pCtx.evaluateActionQueue(this); getFactHandleFactory().destroyFactHandle( factHandle ); } finally { this.lock.unlock(); this.kBase.readUnlock(); endOperation(); } } @Override protected BaseNode[] evalQuery(String queryName, DroolsQuery queryObject, InternalFactHandle handle, PropagationContext pCtx) { initInitialFact(); BaseNode[] tnodes = kBase.getReteooBuilder().getTerminalNodesForQuery( queryName ); // no need to call retract, as no leftmemory used. getEntryPointNode().assertQuery( handle, pCtx, this ); pCtx.evaluateActionQueue( this ); return tnodes; } public Collection<WorkingMemoryAction> getActionQueue() { return actionQueue; } @Override public void queueWorkingMemoryAction(final WorkingMemoryAction action) { try { startOperation(); actionQueue.add(action); notifyWaitOnRest(); } finally { endOperation(); } } public void addPropagation(PropagationEntry propagationEntry) { if (propagationEntry instanceof WorkingMemoryAction) { actionQueue.add((WorkingMemoryAction) propagationEntry); } else { super.addPropagation(propagationEntry); } } @Override public void executeQueuedActionsForRete() { try { startOperation(); if ( evaluatingActionQueue.compareAndSet( false, true ) ) { try { if ( actionQueue!= null && !actionQueue.isEmpty() ) { WorkingMemoryAction action; while ( (action = actionQueue.poll()) != null ) { try { action.execute( (InternalWorkingMemory) this ); } catch ( Exception e ) { throw new RuntimeException( "Unexpected exception executing action " + action.toString(), e ); } } } } finally { evaluatingActionQueue.compareAndSet( true, false ); } } } finally { endOperation(); } } @Override public Iterator<? extends PropagationEntry> getActionsIterator() { return actionQueue.iterator(); } }
mrietveld/drools
drools-reteoo/src/main/java/org/drools/reteoo/common/ReteWorkingMemory.java
Java
apache-2.0
9,985
# Senecillis veitchiana (Hemsl.) Kitam. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Ligularia/Ligularia veitchiana/ Syn. Senecillis veitchiana/README.md
Markdown
apache-2.0
194
/* Copyright (c) DataStax, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #include "integration.hpp" /** * Prepared metadata related tests */ class PreparedMetadataTests : public Integration { public: void SetUp() { Integration::SetUp(); session_.execute( format_string(CASSANDRA_KEY_VALUE_TABLE_FORMAT, table_name_.c_str(), "int", "int")); session_.execute( format_string(CASSANDRA_KEY_VALUE_INSERT_FORMAT, table_name_.c_str(), "1", "99")); } /** * Check the column count of a bound statement before and after adding a * column to a table. * * @param session * @param expected_column_count_after_update */ void prepared_check_column_count_after_alter(Session session, size_t expected_column_count_after_update) { Statement bound_statement = session.prepare(format_string("SELECT * FROM %s WHERE key = 1", table_name_.c_str())) .bind(); // Verify that the table has two columns in the metadata { Result result = session.execute(bound_statement); EXPECT_EQ(2u, result.column_count()); } // Add a column to the table session.execute(format_string("ALTER TABLE %s ADD value2 int", table_name_.c_str())); // The column count should match the expected after the alter { Result result = session.execute(bound_statement); EXPECT_EQ(expected_column_count_after_update, result.column_count()); } } }; /** * Verify that the column count of a bound statement's result metadata doesn't * change for older protocol versions (v4 and less) when a table's schema is altered. * * @since 2.8 */ CASSANDRA_INTEGRATION_TEST_F(PreparedMetadataTests, AlterDoesntUpdateColumnCount) { CHECK_FAILURE; // Ensure beta protocol is not set Session session = default_cluster() .with_beta_protocol(false) .with_protocol_version(CASS_PROTOCOL_VERSION_V4) .connect(keyspace_name_); // The column count will stay the same even after the alter prepared_check_column_count_after_alter(session, 2u); } /** * Verify that the column count of a bound statement's result metadata is * properly updated for newer protocol versions (v5 and greater) when a table's * schema is altered. * * @since 2.8 */ CASSANDRA_INTEGRATION_TEST_F(PreparedMetadataTests, AlterProperlyUpdatesColumnCount) { CHECK_FAILURE; CHECK_VERSION(4.0.0); // Ensure protocol v5 or greater Session session = default_cluster().with_beta_protocol(true).connect(keyspace_name_); // The column count will properly update after the alter prepared_check_column_count_after_alter(session, 3u); }
datastax/cpp-driver
tests/src/integration/tests/test_prepared_metadata.cpp
C++
apache-2.0
3,220
<!DOCTYPE html> <html> <!-- Copyright 2008 The Closure Library Authors. All Rights Reserved. Use of this source code is governed by the Apache License, Version 2.0. See the COPYING file for details. --> <!-- --> <head> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <title>Closure Unit Tests - goog.ui.SliderBase</title> <script src="../base.js"></script> <script type="text/javascript"> goog.require('goog.dom'); goog.require('goog.a11y.aria'); goog.require('goog.a11y.aria.State'); goog.require('goog.dom.classes'); goog.require('goog.events'); goog.require('goog.events.EventType'); goog.require('goog.events.KeyCodes'); goog.require('goog.fx.Animation'); goog.require('goog.math.Coordinate'); goog.require('goog.style'); goog.require('goog.style.bidi'); goog.require('goog.testing.MockClock'); goog.require('goog.testing.MockControl'); goog.require('goog.testing.events'); goog.require('goog.testing.jsunit'); goog.require('goog.testing.mockmatchers'); goog.require('goog.testing.recordFunction'); goog.require('goog.ui.Component'); goog.require('goog.ui.SliderBase'); goog.require('goog.userAgent'); </script> <style type="text/css"> #oneThumbSlider { position: relative; width: 1000px; background: grey; height: 20px; } #oneThumbSlider.test-slider-vertical { height: 1000px; width: 20px; } #twoThumbSlider { position: relative; /* Extra 20px is so distance between thumb centers is 1000px */ width: 1020px; } #valueThumb, #extentThumb { position: absolute; width: 20px; } #thumb { position: absolute; width: 20px; height: 20px; background: black; top: 5px; } .test-slider-vertical > #thumb { left: 5px; top: auto; } #rangeHighlight { position: absolute; } </style> </head> <body> <div id="sandbox"></div> <script type="text/javascript"> var oneThumbSlider; var oneThumbSliderRtl; var oneChangeEventCount; var twoThumbSlider; var twoThumbSliderRtl; var twoChangeEventCount; var mockClock; var mockAnimation; /** * A basic class to implement the abstract goog.ui.SliderBase for testing. * @constructor * @extends {goog.ui.SliderBase} */ function OneThumbSlider() { goog.ui.SliderBase.call(this); } goog.inherits(OneThumbSlider, goog.ui.SliderBase); /** {@override} */ OneThumbSlider.prototype.createThumbs = function() { this.valueThumb = this.extentThumb = goog.dom.getElement('thumb'); }; /** {@override} */ OneThumbSlider.prototype.getCssClass = function(orientation) { return goog.getCssName('test-slider', orientation); }; /** * A basic class to implement the abstract goog.ui.SliderBase for testing. * @constructor * @extends {goog.ui.SliderBase} */ function TwoThumbSlider() { goog.ui.SliderBase.call(this); } goog.inherits(TwoThumbSlider, goog.ui.SliderBase); /** {@override} */ TwoThumbSlider.prototype.createThumbs = function() { this.valueThumb = goog.dom.getElement('valueThumb'); this.extentThumb = goog.dom.getElement('extentThumb'); this.rangeHighlight = goog.dom.getElement('rangeHighlight'); }; /** {@override} */ TwoThumbSlider.prototype.getCssClass = function(orientation) { return goog.getCssName('test-slider', orientation); }; /** * Basic class that implements the AnimationFactory interface for testing. * @param {!goog.fx.Animation|!Array.<!goog.fx.Animation>} testAnimations The * test animations to use. * @constructor * @implements {goog.ui.SliderBase.AnimationFactory} */ function AnimationFactory(testAnimations) { this.testAnimations = testAnimations; } /** @override */ AnimationFactory.prototype.createAnimations = function() { return this.testAnimations; }; function setUp() { var sandBox = goog.dom.getElement('sandbox'); mockClock = new goog.testing.MockClock(true); var oneThumbElem = goog.dom.createDom( 'div', {'id': 'oneThumbSlider'}, goog.dom.createDom('span', {'id': 'thumb'})); sandBox.appendChild(oneThumbElem); oneThumbSlider = new OneThumbSlider(); oneThumbSlider.decorate(oneThumbElem); oneChangeEventCount = 0; goog.events.listen(oneThumbSlider, goog.ui.Component.EventType.CHANGE, function() { oneChangeEventCount++; }); var twoThumbElem = goog.dom.createDom( 'div', {'id': 'twoThumbSlider'}, goog.dom.createDom('div', {'id': 'rangeHighlight'}), goog.dom.createDom('span', {'id': 'valueThumb'}), goog.dom.createDom('span', {'id': 'extentThumb'})); sandBox.appendChild(twoThumbElem); twoThumbSlider = new TwoThumbSlider(); twoThumbSlider.decorate(twoThumbElem); twoChangeEventCount = 0; goog.events.listen(twoThumbSlider, goog.ui.Component.EventType.CHANGE, function() { twoChangeEventCount++; }); var sandBoxRtl = goog.dom.createDom('div', {'dir': 'rtl', 'style': 'position:absolute;'}); sandBox.appendChild(sandBoxRtl); var oneThumbElemRtl = goog.dom.createDom( 'div', {'id': 'oneThumbSliderRtl'}, goog.dom.createDom('span', {'id': 'thumbRtl'})); sandBoxRtl.appendChild(oneThumbElemRtl); oneThumbSliderRtl = new OneThumbSlider(); oneThumbSliderRtl.enableFlipForRtl(true); oneThumbSliderRtl.decorate(oneThumbElemRtl); goog.events.listen(oneThumbSliderRtl, goog.ui.Component.EventType.CHANGE, function() { oneChangeEventCount++; }); var twoThumbElemRtl = goog.dom.createDom( 'div', {'id': 'twoThumbSliderRtl'}, goog.dom.createDom('div', {'id': 'rangeHighlightRtl'}), goog.dom.createDom('span', {'id': 'valueThumbRtl'}), goog.dom.createDom('span', {'id': 'extentThumbRtl'})); sandBoxRtl.appendChild(twoThumbElemRtl); twoThumbSliderRtl = new TwoThumbSlider(); twoThumbSliderRtl.enableFlipForRtl(true); twoThumbSliderRtl.decorate(twoThumbElemRtl); twoChangeEventCount = 0; goog.events.listen(twoThumbSliderRtl, goog.ui.Component.EventType.CHANGE, function() { twoChangeEventCount++; }); } function tearDown() { <<<<<<< HEAD goog.events.removeAll(); ======= goog.events.removeAllNativeListeners(); >>>>>>> newgitrepo oneThumbSlider.dispose(); twoThumbSlider.dispose(); oneThumbSliderRtl.dispose(); twoThumbSliderRtl.dispose(); mockClock.dispose(); goog.dom.getElement('sandbox').innerHTML = ''; } function testGetAndSetValue() { oneThumbSlider.setValue(30); assertEquals(30, oneThumbSlider.getValue()); assertEquals('Setting valid value must dispatch only a single change event.', 1, oneChangeEventCount); oneThumbSlider.setValue(30); assertEquals(30, oneThumbSlider.getValue()); assertEquals('Setting to same value must not dispatch change event.', 1, oneChangeEventCount); oneThumbSlider.setValue(-30); assertEquals('Setting invalid value must not change value.', 30, oneThumbSlider.getValue()); assertEquals('Setting invalid value must not dispatch change event.', 1, oneChangeEventCount); // Value thumb can't go past extent thumb, so we must move that first to // allow setting value. twoThumbSlider.setExtent(70); twoChangeEventCount = 0; twoThumbSlider.setValue(60); assertEquals(60, twoThumbSlider.getValue()); assertEquals('Setting valid value must dispatch only a single change event.', 1, twoChangeEventCount); twoThumbSlider.setValue(60); assertEquals(60, twoThumbSlider.getValue()); assertEquals('Setting to same value must not dispatch change event.', 1, twoChangeEventCount); twoThumbSlider.setValue(-60); assertEquals('Setting invalid value must not change value.', 60, twoThumbSlider.getValue()); assertEquals('Setting invalid value must not dispatch change event.', 1, twoChangeEventCount); } function testGetAndSetValueRtl() { var thumbElement = goog.dom.getElement('thumbRtl'); assertEquals(0, goog.style.bidi.getOffsetStart(thumbElement)); assertEquals('', thumbElement.style.left); assertTrue(thumbElement.style.right >= 0); oneThumbSliderRtl.setValue(30); assertEquals(30, oneThumbSliderRtl.getValue()); assertEquals('Setting valid value must dispatch only a single change event.', 1, oneChangeEventCount); assertEquals('', thumbElement.style.left); assertTrue(thumbElement.style.right >= 0); oneThumbSliderRtl.setValue(30); assertEquals(30, oneThumbSliderRtl.getValue()); assertEquals('Setting to same value must not dispatch change event.', 1, oneChangeEventCount); oneThumbSliderRtl.setValue(-30); assertEquals('Setting invalid value must not change value.', 30, oneThumbSliderRtl.getValue()); assertEquals('Setting invalid value must not dispatch change event.', 1, oneChangeEventCount); // Value thumb can't go past extent thumb, so we must move that first to // allow setting value. var valueThumbElement = goog.dom.getElement('valueThumbRtl'); var extentThumbElement = goog.dom.getElement('extentThumbRtl'); assertEquals(0, goog.style.bidi.getOffsetStart(valueThumbElement)); assertEquals(0, goog.style.bidi.getOffsetStart(extentThumbElement)); assertEquals('', valueThumbElement.style.left); assertTrue(valueThumbElement.style.right >= 0); assertEquals('', extentThumbElement.style.left); assertTrue(extentThumbElement.style.right >= 0); twoThumbSliderRtl.setExtent(70); twoChangeEventCount = 0; twoThumbSliderRtl.setValue(60); assertEquals(60, twoThumbSliderRtl.getValue()); assertEquals('Setting valid value must dispatch only a single change event.', 1, twoChangeEventCount); twoThumbSliderRtl.setValue(60); assertEquals(60, twoThumbSliderRtl.getValue()); assertEquals('Setting to same value must not dispatch change event.', 1, twoChangeEventCount); assertEquals('', valueThumbElement.style.left); assertTrue(valueThumbElement.style.right >= 0); assertEquals('', extentThumbElement.style.left); assertTrue(extentThumbElement.style.right >= 0); twoThumbSliderRtl.setValue(-60); assertEquals('Setting invalid value must not change value.', 60, twoThumbSliderRtl.getValue()); assertEquals('Setting invalid value must not dispatch change event.', 1, twoChangeEventCount); } function testGetAndSetExtent() { // Note(user): With a one thumb slider the API only really makes sense if you // always use setValue since there is no extent. twoThumbSlider.setExtent(7); assertEquals(7, twoThumbSlider.getExtent()); assertEquals('Setting valid value must dispatch only a single change event.', 1, twoChangeEventCount); twoThumbSlider.setExtent(7); assertEquals(7, twoThumbSlider.getExtent()); assertEquals('Setting to same value must not dispatch change event.', 1, twoChangeEventCount); twoThumbSlider.setExtent(-7); assertEquals('Setting invalid value must not change value.', 7, twoThumbSlider.getExtent()); assertEquals('Setting invalid value must not dispatch change event.', 1, twoChangeEventCount); } function testUpdateValueExtent() { twoThumbSlider.setValueAndExtent(30, 50); assertNotNull(twoThumbSlider.getElement()); assertEquals('Setting value results in updating aria-valuenow', '30', goog.a11y.aria.getState(twoThumbSlider.getElement(), goog.a11y.aria.State.VALUENOW)); assertEquals(30, twoThumbSlider.getValue()); assertEquals(50, twoThumbSlider.getExtent()); } function testRangeListener() { var slider = new goog.ui.SliderBase; slider.updateUi_ = slider.updateAriaStates = function() {}; slider.rangeModel.setValue(0); var f = goog.testing.recordFunction(); goog.events.listen(slider, goog.ui.Component.EventType.CHANGE, f); slider.rangeModel.setValue(50); assertEquals(1, f.getCallCount()); slider.exitDocument(); slider.rangeModel.setValue(0); assertEquals('The range model listener should not have been removed so we ' + 'should have gotten a second event dispatch', 2, f.getCallCount()); } /** * Verifies that rangeHighlight position and size are correct for the given * startValue and endValue. Assumes slider has default min/max values [0, 100], * width of 1020px, and thumb widths of 20px, with rangeHighlight drawn from * the centers of the thumbs. * @param {number} rangeHighlight The range highlight. * @param {number} startValue The start value. * @param {number} endValue The end value. */ function assertHighlightedRange(rangeHighlight, startValue, endValue) { var rangeStr = '[' + startValue + ', ' + endValue + ']'; var rangeStart = 10 + 10 * startValue; assertEquals('Range highlight for ' + rangeStr + ' should start at ' + rangeStart + 'px.', rangeStart, rangeHighlight.offsetLeft); var rangeSize = 10 * (endValue - startValue); assertEquals('Range highlight for ' + rangeStr + ' should have size ' + rangeSize + 'px.', rangeSize, rangeHighlight.offsetWidth); } function testKeyHandlingTests() { twoThumbSlider.setValue(0); twoThumbSlider.setExtent(100); assertEquals(0, twoThumbSlider.getValue()); assertEquals(100, twoThumbSlider.getExtent()); goog.testing.events.fireKeySequence( twoThumbSlider.getElement(), goog.events.KeyCodes.RIGHT); assertEquals(1, twoThumbSlider.getValue()); assertEquals(99, twoThumbSlider.getExtent()); goog.testing.events.fireKeySequence( twoThumbSlider.getElement(), goog.events.KeyCodes.RIGHT); assertEquals(2, twoThumbSlider.getValue()); assertEquals(98, twoThumbSlider.getExtent()); goog.testing.events.fireKeySequence( twoThumbSlider.getElement(), goog.events.KeyCodes.LEFT); assertEquals(1, twoThumbSlider.getValue()); assertEquals(98, twoThumbSlider.getExtent()); goog.testing.events.fireKeySequence( twoThumbSlider.getElement(), goog.events.KeyCodes.LEFT); assertEquals(0, twoThumbSlider.getValue()); assertEquals(98, twoThumbSlider.getExtent()); goog.testing.events.fireKeySequence( twoThumbSlider.getElement(), goog.events.KeyCodes.RIGHT, { shiftKey: true }); assertEquals(10, twoThumbSlider.getValue()); assertEquals(90, twoThumbSlider.getExtent()); goog.testing.events.fireKeySequence( twoThumbSlider.getElement(), goog.events.KeyCodes.RIGHT, { shiftKey: true }); assertEquals(20, twoThumbSlider.getValue()); assertEquals(80, twoThumbSlider.getExtent()); goog.testing.events.fireKeySequence( twoThumbSlider.getElement(), goog.events.KeyCodes.LEFT, { shiftKey: true }); assertEquals(10, twoThumbSlider.getValue()); assertEquals(80, twoThumbSlider.getExtent()); goog.testing.events.fireKeySequence( twoThumbSlider.getElement(), goog.events.KeyCodes.LEFT, { shiftKey: true }); assertEquals(0, twoThumbSlider.getValue()); assertEquals(80, twoThumbSlider.getExtent()); } function testKeyHandlingRtl() { twoThumbSliderRtl.setValue(0); twoThumbSliderRtl.setExtent(100); assertEquals(0, twoThumbSliderRtl.getValue()); assertEquals(100, twoThumbSliderRtl.getExtent()); goog.testing.events.fireKeySequence( twoThumbSliderRtl.getElement(), goog.events.KeyCodes.RIGHT); assertEquals(0, twoThumbSliderRtl.getValue()); assertEquals(99, twoThumbSliderRtl.getExtent()); goog.testing.events.fireKeySequence( twoThumbSliderRtl.getElement(), goog.events.KeyCodes.RIGHT); assertEquals(0, twoThumbSliderRtl.getValue()); assertEquals(98, twoThumbSliderRtl.getExtent()); goog.testing.events.fireKeySequence( twoThumbSliderRtl.getElement(), goog.events.KeyCodes.LEFT); assertEquals(1, twoThumbSliderRtl.getValue()); assertEquals(98, twoThumbSliderRtl.getExtent()); goog.testing.events.fireKeySequence( twoThumbSliderRtl.getElement(), goog.events.KeyCodes.LEFT); assertEquals(2, twoThumbSliderRtl.getValue()); assertEquals(98, twoThumbSliderRtl.getExtent()); goog.testing.events.fireKeySequence( twoThumbSliderRtl.getElement(), goog.events.KeyCodes.RIGHT, { shiftKey: true }); assertEquals(0, twoThumbSliderRtl.getValue()); assertEquals(90, twoThumbSliderRtl.getExtent()); goog.testing.events.fireKeySequence( twoThumbSliderRtl.getElement(), goog.events.KeyCodes.RIGHT, { shiftKey: true }); assertEquals(0, twoThumbSliderRtl.getValue()); assertEquals(80, twoThumbSliderRtl.getExtent()); goog.testing.events.fireKeySequence( twoThumbSliderRtl.getElement(), goog.events.KeyCodes.LEFT, { shiftKey: true }); assertEquals(10, twoThumbSliderRtl.getValue()); assertEquals(80, twoThumbSliderRtl.getExtent()); goog.testing.events.fireKeySequence( twoThumbSliderRtl.getElement(), goog.events.KeyCodes.LEFT, { shiftKey: true }); assertEquals(20, twoThumbSliderRtl.getValue()); assertEquals(80, twoThumbSliderRtl.getExtent()); } function testRangeHighlight() { var rangeHighlight = goog.dom.getElement('rangeHighlight'); // Test [0, 100] twoThumbSlider.setValue(0); twoThumbSlider.setExtent(100); assertHighlightedRange(rangeHighlight, 0, 100); // Test [25, 75] twoThumbSlider.setValue(25); twoThumbSlider.setExtent(50); assertHighlightedRange(rangeHighlight, 25, 75); // Test [50, 50] twoThumbSlider.setValue(50); twoThumbSlider.setExtent(0); assertHighlightedRange(rangeHighlight, 50, 50); } function testRangeHighlightAnimation() { var animationDelay = 160; // Delay in ms, is a bit higher than actual delay. if (goog.userAgent.IE) { // For some reason, (probably due to how timing works), IE7 and IE8 will not // stop if we don't wait for it. animationDelay = 250; } var rangeHighlight = goog.dom.getElement('rangeHighlight'); twoThumbSlider.setValue(0); twoThumbSlider.setExtent(100); // Animate right thumb, final range is [0, 75] twoThumbSlider.animatedSetValue(75); assertHighlightedRange(rangeHighlight, 0, 100); mockClock.tick(animationDelay); assertHighlightedRange(rangeHighlight, 0, 75); // Animate left thumb, final range is [25, 75] twoThumbSlider.animatedSetValue(25); assertHighlightedRange(rangeHighlight, 0, 75); mockClock.tick(animationDelay); assertHighlightedRange(rangeHighlight, 25, 75); } /** * Verifies that no error occurs and that the range highlight is sized correctly * for a zero-size slider (i.e. doesn't attempt to set a negative size). The * test tries to resize the slider from its original size to 0, then checks * that the range highlight's size is correctly set to 0. * * The size verification is needed because Webkit/Gecko outright ignore calls * to set negative sizes on an element, leaving it at its former size. IE * throws an error in the same situation. */ function testRangeHighlightForZeroSizeSlider() { // Make sure range highlight spans whole slider before zeroing width. twoThumbSlider.setExtent(100); twoThumbSlider.getElement().style.width = 0; // The setVisible call is used to force a UI update. twoThumbSlider.setVisible(true); assertEquals('Range highlight size should be 0 when slider size is 0', 0, goog.dom.getElement('rangeHighlight').offsetWidth); } function testAnimatedSetValueAnimatesFactoryCreatedAnimations() { // Create and set the factory. var ignore = goog.testing.mockmatchers.ignoreArgument; var mockControl = new goog.testing.MockControl(); var mockAnimation1 = mockControl.createLooseMock(goog.fx.Animation); var mockAnimation2 = mockControl.createLooseMock(goog.fx.Animation); var testAnimations = [mockAnimation1, mockAnimation2]; oneThumbSlider.setAdditionalAnimations(new AnimationFactory(testAnimations)); // Expect the animations to be played. mockAnimation1.play(false); mockAnimation2.play(false); mockAnimation1.addEventListener(ignore, ignore, ignore); mockAnimation2.addEventListener(ignore, ignore, ignore); // Animate and verify. mockControl.$replayAll(); oneThumbSlider.animatedSetValue(50); mockControl.$verifyAll(); mockControl.$resetAll(); mockControl.$tearDown(); } function testMouseWheelEventHandlerEnable() { // Mouse wheel handling should be enabled by default. assertTrue(oneThumbSlider.isHandleMouseWheel()); // Test disabling the mouse wheel handler oneThumbSlider.setHandleMouseWheel(false); assertFalse(oneThumbSlider.isHandleMouseWheel()); // Test that enabling again works fine. oneThumbSlider.setHandleMouseWheel(true); assertTrue(oneThumbSlider.isHandleMouseWheel()); // Test that mouse wheel handling can be disabled before rendering a slider. var wheelDisabledElem = goog.dom.createDom( 'div', {}, goog.dom.createDom('span')); var wheelDisabledSlider = new OneThumbSlider(); wheelDisabledSlider.setHandleMouseWheel(false); wheelDisabledSlider.decorate(wheelDisabledElem); assertFalse(wheelDisabledSlider.isHandleMouseWheel()); } function testDisabledAndEnabledSlider() { // Check that a slider is enabled by default assertTrue(oneThumbSlider.isEnabled()); var listenerCount = oneThumbSlider.getHandler().getListenerCount(); // Disable the slider and check its state oneThumbSlider.setEnabled(false); assertFalse(oneThumbSlider.isEnabled()); assertTrue(goog.dom.classes.has( oneThumbSlider.getElement(), 'goog-slider-disabled')); assertEquals(0, oneThumbSlider.getHandler().getListenerCount()); // setValue should work unaffected even when the slider is disabled. oneThumbSlider.setValue(30); assertEquals(30, oneThumbSlider.getValue()); assertEquals('Setting valid value must dispatch a change event ' + 'even when slider is disabled.', 1, oneChangeEventCount); // Test the transition from disabled to enabled oneThumbSlider.setEnabled(true); assertTrue(oneThumbSlider.isEnabled()); assertFalse(goog.dom.classes.has( oneThumbSlider.getElement(), 'goog-slider-disabled')); assertTrue(listenerCount == oneThumbSlider.getHandler().getListenerCount()); } function testBlockIncrementingWithEnableAndDisabled() { var doc = goog.dom.getOwnerDocument(oneThumbSlider.getElement()); // Case when slider is not disabled between the mouse down and up events. goog.testing.events.fireMouseDownEvent(oneThumbSlider.getElement()); assertEquals(1, goog.events.getListeners( oneThumbSlider.getElement(), goog.events.EventType.MOUSEMOVE, false).length); assertEquals(1, goog.events.getListeners( doc, goog.events.EventType.MOUSEUP, true).length); goog.testing.events.fireMouseUpEvent(oneThumbSlider.getElement()); assertEquals(0, goog.events.getListeners( oneThumbSlider.getElement(), goog.events.EventType.MOUSEMOVE, false).length); assertEquals(0, goog.events.getListeners( doc, goog.events.EventType.MOUSEUP, true).length); // Case when the slider is disabled between the mouse down and up events. goog.testing.events.fireMouseDownEvent(oneThumbSlider.getElement()); assertEquals(1, goog.events.getListeners( oneThumbSlider.getElement(), goog.events.EventType.MOUSEMOVE, false).length); assertEquals(1, goog.events.getListeners(doc, goog.events.EventType.MOUSEUP, true).length); oneThumbSlider.setEnabled(false); assertEquals(0, goog.events.getListeners( oneThumbSlider.getElement(), goog.events.EventType.MOUSEMOVE, false).length); assertEquals(0, goog.events.getListeners( doc, goog.events.EventType.MOUSEUP, true).length); assertEquals(1, oneThumbSlider.getHandler().getListenerCount()); goog.testing.events.fireMouseUpEvent(oneThumbSlider.getElement()); assertEquals(0, goog.events.getListeners( oneThumbSlider.getElement(), goog.events.EventType.MOUSEMOVE, false).length); assertEquals(0, goog.events.getListeners( doc, goog.events.EventType.MOUSEUP, true).length); } function testMouseClickWithMoveToPointEnabled() { var stepSize = 20; oneThumbSlider.setStep(stepSize); oneThumbSlider.setMoveToPointEnabled(true); var initialValue = oneThumbSlider.getValue(); // Figure out the number of pixels per step. var numSteps = Math.round( (oneThumbSlider.getMaximum() - oneThumbSlider.getMinimum()) / stepSize); var size = goog.style.getSize(oneThumbSlider.getElement()); var pixelsPerStep = Math.round(size.width / numSteps); var coords = goog.style.getClientPosition(oneThumbSlider.getElement()); coords.x += pixelsPerStep / 2; // Case when value is increased goog.testing.events.fireClickSequence(oneThumbSlider.getElement(), /* opt_button */ undefined, coords); assertEquals(oneThumbSlider.getValue(), initialValue + stepSize); // Case when value is decreased goog.testing.events.fireClickSequence(oneThumbSlider.getElement(), /* opt_button */ undefined, coords); assertEquals(oneThumbSlider.getValue(), initialValue); // Case when thumb is clicked goog.testing.events.fireClickSequence(oneThumbSlider.getElement()); assertEquals(oneThumbSlider.getValue(), initialValue); } <<<<<<< HEAD ======= function testNonIntegerStepSize() { var stepSize = 0.02; oneThumbSlider.setStep(stepSize); oneThumbSlider.setMinimum(-1); oneThumbSlider.setMaximum(1); oneThumbSlider.setValue(0.7); assertRoughlyEquals(0.7, oneThumbSlider.getValue(), 0.000001); oneThumbSlider.setValue(0.3); assertRoughlyEquals(0.3, oneThumbSlider.getValue(), 0.000001); } >>>>>>> newgitrepo /** * Tests getThumbCoordinateForValue method. */ function testThumbCoordinateForValueWithHorizontalSlider() { // Make sure the y-coordinate stays the same for the horizontal slider. var originalY = goog.style.getPosition(oneThumbSlider.valueThumb).y; var width = oneThumbSlider.getElement().clientWidth - oneThumbSlider.valueThumb.offsetWidth; var range = oneThumbSlider.getMaximum() - oneThumbSlider.getMinimum(); // Verify coordinate for a particular value. var value = 20; var expectedX = Math.round(value / range * width); var expectedCoord = new goog.math.Coordinate(expectedX, originalY); var coord = oneThumbSlider.getThumbCoordinateForValue(value); assertObjectEquals(expectedCoord, coord); // Verify this works regardless of current position. oneThumbSlider.setValue(value / 2); coord = oneThumbSlider.getThumbCoordinateForValue(value); assertObjectEquals(expectedCoord, coord); } function testThumbCoordinateForValueWithVerticalSlider() { // Make sure the x-coordinate stays the same for the vertical slider. oneThumbSlider.setOrientation(goog.ui.SliderBase.Orientation.VERTICAL); var originalX = goog.style.getPosition(oneThumbSlider.valueThumb).x; var height = oneThumbSlider.getElement().clientHeight - oneThumbSlider.valueThumb.offsetHeight; var range = oneThumbSlider.getMaximum() - oneThumbSlider.getMinimum(); // Verify coordinate for a particular value. var value = 20; var expectedY = height - Math.round(value / range * height); var expectedCoord = new goog.math.Coordinate(originalX, expectedY); var coord = oneThumbSlider.getThumbCoordinateForValue(value); assertObjectEquals(expectedCoord, coord); // Verify this works regardless of current position. oneThumbSlider.setValue(value / 2); coord = oneThumbSlider.getThumbCoordinateForValue(value); assertObjectEquals(expectedCoord, coord); } /** * Tests getValueFromMousePosition method. */ function testValueFromMousePosition() { var value = 30; oneThumbSlider.setValue(value); var offset = goog.style.getPageOffset(oneThumbSlider.valueThumb); var size = goog.style.getSize(oneThumbSlider.valueThumb); offset.x += size.width / 2; offset.y += size.height / 2; var e = null; goog.events.listen(oneThumbSlider, goog.events.EventType.MOUSEMOVE, function(evt) { e = evt; }); goog.testing.events.fireMouseMoveEvent(oneThumbSlider, offset); assertNotEquals(e, null); assertEquals( value, Math.round(oneThumbSlider.getValueFromMousePosition(e))); // Verify this works regardless of current position. oneThumbSlider.setValue(value / 2); assertEquals( value, Math.round(oneThumbSlider.getValueFromMousePosition(e))); } /** * Tests dragging events. */ function testDragEvents() { var offset = goog.style.getPageOffset(oneThumbSlider.valueThumb); var size = goog.style.getSize(oneThumbSlider.valueThumb); offset.x += size.width / 2; offset.y += size.height / 2; var event_types = []; var handler = function(evt) { event_types.push(evt.type); }; goog.events.listen(oneThumbSlider, [goog.ui.SliderBase.EventType.DRAG_START, goog.ui.SliderBase.EventType.DRAG_END, goog.ui.SliderBase.EventType.DRAG_VALUE_START, goog.ui.SliderBase.EventType.DRAG_VALUE_END, goog.ui.SliderBase.EventType.DRAG_EXTENT_START, goog.ui.SliderBase.EventType.DRAG_EXTENT_END, goog.ui.Component.EventType.CHANGE], handler); // Since the order of the events between value and extent is not guaranteed // accross browsers, we need to allow for both here and once we have // them all, make sure that they were different. function isValueOrExtentDragStart(type) { return type == goog.ui.SliderBase.EventType.DRAG_VALUE_START || type == goog.ui.SliderBase.EventType.DRAG_EXTENT_START; }; function isValueOrExtentDragEnd(type) { return type == goog.ui.SliderBase.EventType.DRAG_VALUE_END || type == goog.ui.SliderBase.EventType.DRAG_EXTENT_END; }; // Test that dragging the thumb calls all the correct events. goog.testing.events.fireMouseDownEvent(oneThumbSlider.valueThumb); offset.x += 100; goog.testing.events.fireMouseMoveEvent(oneThumbSlider.valueThumb, offset); goog.testing.events.fireMouseUpEvent(oneThumbSlider.valueThumb); assertEquals(9, event_types.length); assertEquals(goog.ui.SliderBase.EventType.DRAG_START, event_types[0]); assertTrue(isValueOrExtentDragStart(event_types[1])); assertEquals(goog.ui.SliderBase.EventType.DRAG_START, event_types[2]); assertTrue(isValueOrExtentDragStart(event_types[3])); assertEquals(goog.ui.Component.EventType.CHANGE, event_types[4]); assertEquals(goog.ui.SliderBase.EventType.DRAG_END, event_types[5]); assertTrue(isValueOrExtentDragEnd(event_types[6])); assertEquals(goog.ui.SliderBase.EventType.DRAG_END, event_types[7]); assertTrue(isValueOrExtentDragEnd(event_types[8])); assertFalse(event_types[1] == event_types[3]); assertFalse(event_types[6] == event_types[8]); // Test that clicking the thumb without moving the mouse does not cause a // CHANGE event between DRAG_START/DRAG_END. event_types = []; goog.testing.events.fireMouseDownEvent(oneThumbSlider.valueThumb); goog.testing.events.fireMouseUpEvent(oneThumbSlider.valueThumb); assertEquals(8, event_types.length); assertEquals(goog.ui.SliderBase.EventType.DRAG_START, event_types[0]); assertTrue(isValueOrExtentDragStart(event_types[1])); assertEquals(goog.ui.SliderBase.EventType.DRAG_START, event_types[2]); assertTrue(isValueOrExtentDragStart(event_types[3])); assertEquals(goog.ui.SliderBase.EventType.DRAG_END, event_types[4]); assertTrue(isValueOrExtentDragEnd(event_types[5])); assertEquals(goog.ui.SliderBase.EventType.DRAG_END, event_types[6]); assertTrue(isValueOrExtentDragEnd(event_types[7])); assertFalse(event_types[1] == event_types[3]); assertFalse(event_types[5] == event_types[7]); // Early listener removal, do not wait for tearDown, to avoid building up // arrays of events unnecessarilly in further tests. goog.events.removeAll(oneThumbSlider); } </script> </body> </html>
knutwalker/google-closure-library
closure/goog/ui/sliderbase_test.html
HTML
apache-2.0
31,256
# Verrucaria floerkeana f. congregata (Hepp) Zahlbr. FORM #### Status ACCEPTED #### According to Index Fungorum #### Published in Cat. Lich. Univers. 1: 40 (1921) #### Original name Verrucaria papillosa f. congregata Hepp ### Remarks null
mdoering/backbone
life/Fungi/Ascomycota/Eurotiomycetes/Verrucariales/Verrucariaceae/Verrucaria/Verrucaria floerkeana/Verrucaria floerkeana congregata/README.md
Markdown
apache-2.0
243
package jp.hashiwa.elasticsearch.authplugin; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.rest.*; import java.util.*; import java.util.regex.Pattern; import java.util.stream.Stream; public class AuthRestHandler implements RestHandler { private final Logger logger = Loggers.getLogger(AuthRestHandler.class); private final RestHandler originalHandler; private final RestResponse unauthorizedResponse = new RestResponse() { @Override public String contentType() { return "application/json"; } @Override public BytesReference content() { return new BytesArray(""); } @Override public RestStatus status() { return RestStatus.UNAUTHORIZED; } }; private final Map<RestRequest.Method, Stream<Pattern>> authPatterns = new HashMap<RestRequest.Method, Stream<Pattern>>() { { this.put(RestRequest.Method.POST, Stream.of( Pattern.compile("^/testindex(/.*)?$") )); this.put(RestRequest.Method.PUT, Stream.of( Pattern.compile("^/testindex(/.*)?$") )); // all methods this.put(null, Stream.of( Pattern.compile("^/adminindex(/.*)?$") )); } }; AuthRestHandler(RestHandler restHandler) { this.originalHandler = restHandler; } @Override public void handleRequest(RestRequest restRequest, RestChannel restChannel, NodeClient nodeClient) throws Exception { this.logger.debug(restRequest.path()); this.logger.debug(restRequest.rawPath()); if (isOk(restRequest)) { this.originalHandler.handleRequest(restRequest, restChannel, nodeClient); } else { restChannel.sendResponse(unauthorizedResponse); } } private boolean needAuth(RestRequest.Method method, String path) { if (authPatterns.containsKey(method)) { Stream<Pattern> patterns = authPatterns.get(method); boolean match = patterns.anyMatch( p -> p.matcher(path).matches() ); return match; } return false; } private boolean isOk(RestRequest restRequest) { RestRequest.Method method = restRequest.method(); String path = restRequest.path(); // use rawpath() ? boolean needAuth = needAuth(method, path) || needAuth(null, path); if (! needAuth) { return true; } for (java.util.Map.Entry<String, String> entry: restRequest.headers()) { String key = entry.getKey(); String value = entry.getValue(); if (key.equals("user") && value.equals("admin")) { return true; } } return false; // ES 5.4 // return restRequest.getHeaders().get("user").equals("admin"); } }
hashiwa000/Elasticsearch-Auth-Plugin
src/jp/hashiwa/elasticsearch/authplugin/AuthRestHandler.java
Java
apache-2.0
2,877
#pragma once #include "il2cpp-config.h" #ifndef _MSC_VER # include <alloca.h> #else # include <malloc.h> #endif #include <stdint.h> // System.Object struct Il2CppObject; // System.IAsyncResult struct IAsyncResult_t537683269; // System.AsyncCallback struct AsyncCallback_t1363551830; #include "mscorlib_System_MulticastDelegate2585444626.h" #include "mscorlib_System_Int322847414787.h" #ifdef __clang__ #pragma clang diagnostic push #pragma clang diagnostic ignored "-Winvalid-offsetof" #pragma clang diagnostic ignored "-Wunused-variable" #endif // System.Collections.Generic.Dictionary`2/Transform`1<System.Int32,System.Object,System.Object> struct Transform_1_t4035712581 : public MulticastDelegate_t2585444626 { public: public: }; #ifdef __clang__ #pragma clang diagnostic pop #endif
moixxsyc/Unity3dLearningDemos
01Dongzuo/Temp/il2cppOutput/il2cppOutput/mscorlib_System_Collections_Generic_Dictionary_2_T4035712581.h
C
apache-2.0
801
/* * Copyright (c) 2010 Yahoo! Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the * License. See accompanying LICENSE file. */ package io.s4.persist; import io.s4.util.clock.Clock; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import org.apache.log4j.Logger; public class ConMapPersister implements Persister { private AtomicInteger persistCount = new AtomicInteger(0); private boolean selfClean = false; private int cleanWaitTime = 40; // 20 seconds by default private String loggerName = "s4"; ConcurrentHashMap<String, CacheEntry> cache; Clock s4Clock; private int startCapacity = 5000; public void setStartCapacity(int startCapacity) { this.startCapacity = startCapacity; } public int getStartCapacity() { return startCapacity; } public void setSelfClean(boolean selfClean) { this.selfClean = selfClean; } public void setCleanWaitTime(int cleanWaitTime) { this.cleanWaitTime = cleanWaitTime; } public void setLoggerName(String loggerName) { this.loggerName = loggerName; } public ConMapPersister(Clock s4Clock) { this.s4Clock = s4Clock; } public void setS4Clock(Clock s4Clock) { this.s4Clock = s4Clock; } public ConMapPersister() { } public void init() { cache = new ConcurrentHashMap<String, CacheEntry>(this.getStartCapacity()); if (selfClean) { Runnable r = new Runnable() { public void run() { while (!Thread.interrupted()) { int cleanCount = ConMapPersister.this.cleanOutGarbage(); Logger.getLogger(loggerName).info("Cleaned out " + cleanCount + " entries; Persister has " + cache.size() + " entries"); try { Thread.sleep(cleanWaitTime * 1000); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } } } }; Thread t = new Thread(r); t.start(); t.setPriority(Thread.MIN_PRIORITY); } } public int getQueueSize() { return 0; } public int getPersistCount() { return persistCount.get(); } public int getCacheEntryCount() { return cache.size(); } public void setAsynch(String key, Object value, int period) { // there really is no asynch for the local cache set(key, value, period); } public void set(String key, Object value, int period) { persistCount.getAndIncrement(); CacheEntry ce = new CacheEntry(); ce.value = value; ce.period = period; ce.addTime = s4Clock.getCurrentTime(); cache.put(key, ce); } public Object get(String key) { CacheEntry ce = cache.get(key); if (ce == null) { return null; } if (ce.isExpired()) { return null; } return ce.value; } public Map<String, Object> getBulk(String[] keys) { HashMap map = new HashMap<String, Object>(); for (String key : keys) { Object value = get(key); if (value != null) { map.put(key, value); } } return map; } public Object getObject(String key) { return get(key); } public Map<String, Object> getBulkObjects(String[] keys) { return getBulk(keys); } public void remove(String key) { cache.remove(key); } public int cleanOutGarbage() { int count = 0; for (Enumeration en = cache.keys(); en.hasMoreElements();) { String key = (String) en.nextElement(); CacheEntry ce = cache.get(key); if (ce != null && ce.isExpired()) { count++; cache.remove(key); } } return count; } public Set<String> keySet() { return cache.keySet(); } public class CacheEntry { Object value; long addTime; int period; public boolean isExpired() { if (period > 0) { if ((addTime + (1000 * (long) period)) <= s4Clock.getCurrentTime()) { return true; } } return false; } } }
s4/core
src/main/java/io/s4/persist/ConMapPersister.java
Java
apache-2.0
5,403
--- layout: post title: Spring基础 subtitle: date: 2017-12-19 author: Felix header-img: img/home-bg-art.jpg catalog: true tags: - hibernate --- # 说明 以下内容为初学hibernate的体会,该框架的深入使用还未了解 ## 什么是hibernate 1.hibernate是一个对象关系映射框架,是对JDBC的轻量级封装。基于这两个问题,所以在以下的jar包使用中,要使用到的至少包括hibernate相关包,以及数据库处理相关包。 2.关系和对象的映射,就是把数据库中的关系映射到类的对象中。数据库中存储的数据实际上是一种实体与实体之间的关系。而当数据存储到数据库中时,就是持久化的过程。 ## jar包(-.-指版本号) 以下是学习中用到的jar包 ```swift 1.antlr-.-jar 2.classmate-.-jar 3.dom4j-.-jar 4.hibernate-commons-annotations-.-jar 5.hibernate-core-.-jar 6.hibernate-jpa-.-api-.-jar 7.jandex-.-jar 8.javassist-logging-.-jar 9.jboss-logging-.-jar 10.jboss-transaction-api-.-jar //操作mysql所需jar包 11.mysql-connector-jar-.-jar ``` ## .hbm.xml文件 该文件是用来映射关系与对象(一个pojo类和一个表的映射关系),与需要映射的对象放在同一文件夹下即可,命名方式为:*.hbm.xml,其中*为要映射的类名。 文件头: ```swift <!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://www.hibernate.org/dtd/hibernate-mapping-3.0.dtd"> ``` 在`<hibernate-mapping>`标签中定义关系的`class,table,id,type,property`等属性 ## config文件 即hibernate.cfg.xml,该文件放置在class目录/src目录下即可, ```swift <!-- 当调用Configuration cfg = new Configuration().configure()时,自动搜索本文件,并将其读取到内存中,作为后续操作的基础配置 --> <!DOCTYPE hibernate-configuration PUBLIC "-//Hibernate/Hibernate Configuration DTD 3.0//EN" "http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd"> <hibernate-configuration> <!-- 产生操作数据库的session工厂 --> <session-factory> <!-- 连接数据库的URL --> <property name="connection.url">jdbc:mysql://localhost:3306/hiberstudy</property> <!-- 连接数据库的用户名 --> <property name="connection.username">root</property> <!-- 连接数据库的密码 --> <property name="connection.password">123456</property> <!-- 连接数据库的驱动类 --> <property name="connection.diver_class">com.mysql.jdbc.Driver</property> <!-- 数据库方言 --> <property name="hibernate.dialect">org.hibernate.dialect.MySQLDialect</property> <!-- 显示hibernate对数据库操作语句 --> <property name="hibernate.show_sql">true</property> <!-- 自动创建/更新/验证数据库表结构 --> <property name="hibernate.hbm2ddl.auto">create</property> <!-- 批量操作 --> <property name="hibernate.jdbc.batch_size">50</property><!-- 设置批量尺寸 --> <property name="hibernate.cache.use_second_level_cache">false</property><!-- 关闭二级缓存 --> <property name="hibernate.query.factory_class"> org.hibernate.hql.ast.ASTQueryTranslatorFactory</property><!-- 设置HQL/SQL查询翻译器属性 更新/删除操作都需要设置 --> <!-- 类与表的注册文件,下面是自己在学习过程中的注册文件,--> <mapping resource="com/lzf/vo/User.hbm.xml"/> </session-factory> </hibernate-configuration> ``` ## 使用过程 ```swift //整个hibernate程序的启动类,如果config配置文件放置在默认路径下,会自动加载不需要带参数 Configuration cfg = new Configuration(); //获得session对象的工厂,保存了对应当前数据库配置的所用映射 SesstionFactory sessionFactory = cfg.configure().buildSessionFactory(); //准备应用session对象来操作数据库,该接口提供了众多持久化方法,如增删改查(非线程安全) Session session = sessionFactory.openSession(); //事务操作, Transaction t= session.beginTransaction(); //只有commit之后,才会在数据库中得到更新 t.commit(); ```
MichaelLZF/MichaelLZF.github.io
_posts/2018-01-17-Spring-Spring配置文件.md
Markdown
apache-2.0
4,111
# # Copyright 2015, SUSE Linux GmbH # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # shared_examples "a request class" do |with_body| before(:each) do Crowbar::Client::Config.configure( Crowbar::Client::Config.defaults.merge( server: "http://crowbar:80" ) ) end it "provides a method value" do expect(subject.method).to( eq(method) ) end it "provides a specific url" do expect(subject.url).to( eq(url) ) end it "provides a valid payload" do expect(subject.content).to( eq(params) ) end it "submits payload to an API" do content = if with_body params else "" end allow(Crowbar::Client::Request::Rest).to receive(:new).and_return( Crowbar::Client::Request::Rest.new( url: url, auth_type: nil ) ) stub_request( method, "http://crowbar:80/#{url}" ).to_return( status: 200, body: "", headers: {} ) subject.process expect( Crowbar::Client::Request::Rest.new(url: url).send( method, content ).code ).to eq(200) end end
crowbar/crowbar-client
spec/support/request_examples.rb
Ruby
apache-2.0
1,652
--TEST-- swoole_server/ssl: dtls --SKIPIF-- <?php require __DIR__ . '/../../include/skipif.inc'; ?> --FILE-- <?php require __DIR__ . '/../../include/bootstrap.php'; $pm = new SwooleTest\ProcessManager; $pm->parentFunc = function ($pid) use ($pm) { $client = new Swoole\Client(SWOOLE_SOCK_UDP | SWOOLE_SSL, SWOOLE_SOCK_SYNC); //同步阻塞 if (!$client->connect('127.0.0.1', $pm->getFreePort())) { exit("connect failed\n"); } $client->send("hello world"); Assert::same($client->recv(), "Swoole hello world"); $pm->kill(); }; $pm->childFunc = function () use ($pm) { $serv = new Swoole\Server('127.0.0.1', $pm->getFreePort(), SWOOLE_BASE, SWOOLE_SOCK_UDP | SWOOLE_SSL); $serv->set([ 'log_file' => '/dev/null', 'ssl_cert_file' => SSL_FILE_DIR . '/server.crt', 'ssl_key_file' => SSL_FILE_DIR . '/server.key', ]); $serv->on("workerStart", function ($serv) use ($pm) { $pm->wakeup(); }); $serv->on('receive', function ($serv, $fd, $tid, $data) { $serv->send($fd, "Swoole $data"); }); $serv->on('packet', function ($serv, $fd, $tid, $data) { $serv->send($fd, "Swoole $data"); }); $serv->start(); }; $pm->childFirst(); $pm->run(); ?> --EXPECT--
swoole/swoole-src
tests/swoole_server/ssl/dtls.phpt
PHP
apache-2.0
1,266
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/orchestration/airflow/service/v1/environments.proto package com.google.cloud.orchestration.airflow.service.v1; /** * * * <pre> * The configuration of Cloud SQL instance that is used by the Apache Airflow * software. * </pre> * * Protobuf type {@code google.cloud.orchestration.airflow.service.v1.DatabaseConfig} */ public final class DatabaseConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.orchestration.airflow.service.v1.DatabaseConfig) DatabaseConfigOrBuilder { private static final long serialVersionUID = 0L; // Use DatabaseConfig.newBuilder() to construct. private DatabaseConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DatabaseConfig() { machineType_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DatabaseConfig(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DatabaseConfig( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); machineType_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_DatabaseConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_DatabaseConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig.class, com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig.Builder.class); } public static final int MACHINE_TYPE_FIELD_NUMBER = 1; private volatile java.lang.Object machineType_; /** * * * <pre> * Optional. Cloud SQL machine type used by Airflow database. * It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 * or db-n1-standard-16. If not specified, db-n1-standard-2 will be used. * </pre> * * <code>string machine_type = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The machineType. */ @java.lang.Override public java.lang.String getMachineType() { java.lang.Object ref = machineType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); machineType_ = s; return s; } } /** * * * <pre> * Optional. Cloud SQL machine type used by Airflow database. * It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 * or db-n1-standard-16. If not specified, db-n1-standard-2 will be used. * </pre> * * <code>string machine_type = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for machineType. */ @java.lang.Override public com.google.protobuf.ByteString getMachineTypeBytes() { java.lang.Object ref = machineType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); machineType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(machineType_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, machineType_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(machineType_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, machineType_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig)) { return super.equals(obj); } com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig other = (com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig) obj; if (!getMachineType().equals(other.getMachineType())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + MACHINE_TYPE_FIELD_NUMBER; hash = (53 * hash) + getMachineType().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The configuration of Cloud SQL instance that is used by the Apache Airflow * software. * </pre> * * Protobuf type {@code google.cloud.orchestration.airflow.service.v1.DatabaseConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.orchestration.airflow.service.v1.DatabaseConfig) com.google.cloud.orchestration.airflow.service.v1.DatabaseConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_DatabaseConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_DatabaseConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig.class, com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig.Builder.class); } // Construct using com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); machineType_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1_DatabaseConfig_descriptor; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig getDefaultInstanceForType() { return com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig build() { com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig buildPartial() { com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig result = new com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig(this); result.machineType_ = machineType_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig) { return mergeFrom((com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig other) { if (other == com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig.getDefaultInstance()) return this; if (!other.getMachineType().isEmpty()) { machineType_ = other.machineType_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object machineType_ = ""; /** * * * <pre> * Optional. Cloud SQL machine type used by Airflow database. * It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 * or db-n1-standard-16. If not specified, db-n1-standard-2 will be used. * </pre> * * <code>string machine_type = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The machineType. */ public java.lang.String getMachineType() { java.lang.Object ref = machineType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); machineType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Cloud SQL machine type used by Airflow database. * It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 * or db-n1-standard-16. If not specified, db-n1-standard-2 will be used. * </pre> * * <code>string machine_type = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for machineType. */ public com.google.protobuf.ByteString getMachineTypeBytes() { java.lang.Object ref = machineType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); machineType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Cloud SQL machine type used by Airflow database. * It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 * or db-n1-standard-16. If not specified, db-n1-standard-2 will be used. * </pre> * * <code>string machine_type = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The machineType to set. * @return This builder for chaining. */ public Builder setMachineType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } machineType_ = value; onChanged(); return this; } /** * * * <pre> * Optional. Cloud SQL machine type used by Airflow database. * It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 * or db-n1-standard-16. If not specified, db-n1-standard-2 will be used. * </pre> * * <code>string machine_type = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearMachineType() { machineType_ = getDefaultInstance().getMachineType(); onChanged(); return this; } /** * * * <pre> * Optional. Cloud SQL machine type used by Airflow database. * It has to be one of: db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 * or db-n1-standard-16. If not specified, db-n1-standard-2 will be used. * </pre> * * <code>string machine_type = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for machineType to set. * @return This builder for chaining. */ public Builder setMachineTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); machineType_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.orchestration.airflow.service.v1.DatabaseConfig) } // @@protoc_insertion_point(class_scope:google.cloud.orchestration.airflow.service.v1.DatabaseConfig) private static final com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig(); } public static com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DatabaseConfig> PARSER = new com.google.protobuf.AbstractParser<DatabaseConfig>() { @java.lang.Override public DatabaseConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DatabaseConfig(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DatabaseConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DatabaseConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1.DatabaseConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/java-orchestration-airflow
proto-google-cloud-orchestration-airflow-v1/src/main/java/com/google/cloud/orchestration/airflow/service/v1/DatabaseConfig.java
Java
apache-2.0
23,250
/******************************************************************************* * Copyright 2016 Francesco Calimeri, Davide Fusca', Simona Perri and Jessica Zangari * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ /* * GroundingPreferences.h * * Created on: Mar 10, 2016 * Author: jessica */ #ifndef SRC_GROUNDER_STATEMENT_GROUNDINGPREFERENCES_H_ #define SRC_GROUNDER_STATEMENT_GROUNDINGPREFERENCES_H_ #include <unordered_map> #include "Rule.h" #include <iostream> #include <vector> #include <list> using namespace std; namespace DLV2 { namespace grounder { struct HashAtomPointer{ inline size_t operator()(Atom* obj) const { return size_t(obj); } inline bool operator()(Atom* obj1, Atom* obj2) const { return obj1==obj2; } }; enum AnnotationsError {OK, ATOM_NOT_PRESENT, ARITY_ERROR, CONFLICT_FOUND}; typedef unordered_map<Atom*,vector<unsigned>,HashAtomPointer,HashAtomPointer> unordered_map_pointers_atom_arguments; typedef unordered_map<Atom*,vector<unsigned>,HashForTable<Atom>,HashForTable<Atom>> unordered_map_atom_arguments; class GroundingPreferences { public: bool addRuleOrderingType(Rule* rule, unsigned orderingType); bool addRuleProjectionType(Rule* rule, unsigned pType){ rulesProjectionTypes.insert({rule->getIndex(), pType}); return true; } void addRuleRewArith(Rule* rule){ rulesRewArith.insert(rule->getIndex()); } void addRuleLookAhead(Rule* rule){ rulesLookAhead.insert(rule->getIndex()); } void addRuleAlignSubstitutions(Rule* rule){ rulesAlignSubstitutions.insert(rule->getIndex()); } AnnotationsError addRuleAtomIndexingSetting(Rule* rule, Atom* atom, vector<unsigned>& arguments); void addRulePartialOrder(Rule* rule){rulesPartialOrders[rule->getIndex()].emplace_back();rulesPartialOrdersAtoms[rule->getIndex()].emplace_back();} AnnotationsError addRulePartialOrderAtom(Rule* rule, Atom* atom); AnnotationsError checkRulePartialOrderConflicts(Rule* rule); AnnotationsError applyRulePartialOrder(Rule* rule); bool addGlobalOrderingType(unsigned orderingType); void addGlobalAtomIndexingSetting(Atom* atom, vector<unsigned>& arguments); void addGlobalPartialOrder(){ globalPartialOrdersAtoms.emplace_back();} void addGlobalPartialOrderAtomStart(Atom* atom); void addGlobalPartialOrderAtomEnd(Atom* atom); int getOrderingType(Rule* r) ; pair<bool,int> getProjectionType(Rule* r){ auto i =r->getIndex(); if(rulesProjectionTypes.count(i)) return {true,rulesProjectionTypes[i]}; return {false,-1}; } bool getRewArith(Rule* r){ return rulesRewArith.count(r->getIndex()); } bool getLookAhead(Rule* r){ return rulesLookAhead.count(r->getIndex()); } bool getAlignSubstitutions(Rule* r){ return rulesAlignSubstitutions.count(r->getIndex()); } bool checkPartialOrder(Rule* rule,unsigned atomPosition,const list<unsigned>& atoms) ; bool checkAtomIndexed(Rule* rule,Atom* atom,const vector<unsigned>& possibileArgs, vector<unsigned>& idxTerms) ; static GroundingPreferences* getGroundingPreferences() { if(groundingPreferences==0) groundingPreferences=new GroundingPreferences(); return groundingPreferences; } ~GroundingPreferences(){}; static void freeInstance(){ delete groundingPreferences;} static void checkIfAtomIsPresentInRule(Rule* rule, Atom* atom, vector<unsigned>& positions); void print(Rule* rule) const; private: unordered_map<unsigned,unsigned> rulesOrderingTypes; unordered_map<unsigned,unsigned> rulesProjectionTypes; unordered_set<unsigned> rulesRewArith; unordered_set<unsigned> rulesLookAhead; unordered_set<unsigned> rulesAlignSubstitutions; unordered_map<unsigned,unordered_map_pointers_atom_arguments> rulesAtomsIndexed; unordered_map<unsigned,vector<vector<bool>>> rulesPartialOrders; unordered_map<unsigned,vector<vector<Atom*>>> rulesPartialOrdersAtoms; int globalOrderingType; unordered_map_atom_arguments globalAtomsIndexed; vector<list<Atom*>> globalPartialOrdersAtoms; bool applayedGlobalAnnotations; bool applyGlobalAtomIndexingSetting(); bool applyGlobalPartialOrder(); void setGlobalAnnotations(); GroundingPreferences():globalOrderingType(-1),applayedGlobalAnnotations(false){}; static GroundingPreferences* groundingPreferences; }; } /* namespace grounder */ } /* namespace DLV2 */ #endif /* SRC_GROUNDER_STATEMENT_GROUNDINGPREFERENCES_H_ */
DeMaCS-UNICAL/I-DLV
src/grounder/statement/GroundingPreferences.h
C
apache-2.0
4,920
# Zymonema album C.W. Dodge, 1935 SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Fungi/Ascomycota/Saccharomycetes/Saccharomycetales/Candida/Candida albicans/ Syn. Zymonema album/README.md
Markdown
apache-2.0
188
package com.bagri.server.hazelcast.task.schema; import static com.bagri.core.Constants.pn_schema_password; import static com.bagri.server.hazelcast.serialize.TaskSerializationFactory.cli_UpdateSchemaTask; import static com.bagri.support.security.Encryptor.encrypt; import java.io.IOException; import java.util.Properties; import java.util.Map.Entry; import com.bagri.core.system.Schema; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import com.hazelcast.nio.serialization.IdentifiedDataSerializable; public class SchemaUpdater extends SchemaProcessor implements IdentifiedDataSerializable { private boolean override; private Properties properties; public SchemaUpdater() { // } public SchemaUpdater(int version, String admin, boolean override, Properties properties) { super(version, admin); this.override = override; this.properties = properties; } @Override public Object process(Entry<String, Schema> entry) { logger.debug("process.enter; entry: {}", entry); if (entry.getValue() != null) { Schema schema = entry.getValue(); if (schema.getVersion() == getVersion()) { //if (schema.isActive()) { // if (denitSchemaInCluster(schema) > 0) { // don't go further // return null; // } //} if (override) { String pwd = properties.getProperty(pn_schema_password); if (pwd != null) { properties.setProperty(pn_schema_password, encrypt(pwd)); } schema.setProperties(properties); } else { for (String name: properties.stringPropertyNames()) { String value = properties.getProperty(name); if (pn_schema_password.equals(name)) { value = encrypt(value); } schema.setProperty(name, value); } } //if (schema.isActive()) { // if (initSchemaInCluster(schema) == 0) { // schema.setActive(false); // } //} schema.updateVersion(getAdmin()); entry.setValue(schema); auditEntity(AuditType.update, schema); return schema; } } return null; } @Override public int getId() { return cli_UpdateSchemaTask; } @Override public void readData(ObjectDataInput in) throws IOException { super.readData(in); override = in.readBoolean(); properties = in.readObject(); } @Override public void writeData(ObjectDataOutput out) throws IOException { super.writeData(out); out.writeBoolean(override); out.writeObject(properties); } }
dsukhoroslov/bagri
bagri-server/bagri-server-hazelcast/src/main/java/com/bagri/server/hazelcast/task/schema/SchemaUpdater.java
Java
apache-2.0
2,552
package uk.co.bluegecko.core.swing.table.rendering; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.awt.Color; import java.awt.Font; import org.junit.Before; import org.junit.Test; public class RenderingHintTest { private Font font; private Color color; @Before public final void setUp() { font = Font.decode( "Monospaced-12" ); color = new Color( 0x808080 ); } @Test public final void testWeightExceeds() { final FontHint min = new FontHint( HintWeight.MIN_WEIGHT ); final FontHint low = new FontHint( HintWeight.LOW_WEIGHT ); final FontHint def = new FontHint( HintWeight.DEFAULT_WEIGHT ); final FontHint selected = new FontHint( HintWeight.SELECTED_WEIGHT ); final FontHint high = new FontHint( HintWeight.HIGH_WEIGHT ); final FontHint focused = new FontHint( HintWeight.FOCUSED_WEIGHT ); final FontHint max = new FontHint( HintWeight.MAX_WEIGHT ); assertFalse( "min-min", min.exceeds( min ) ); assertFalse( "min-low", min.exceeds( low ) ); assertTrue( "low-min", low.exceeds( min ) ); assertTrue( "default-low", def.exceeds( low ) ); assertTrue( "selected-default", selected.exceeds( def ) ); assertTrue( "high-selected", high.exceeds( selected ) ); assertTrue( "focused-high", focused.exceeds( high ) ); assertTrue( "max-focused", max.exceeds( focused ) ); } @Test public final void testGetValueNone() { assertEquals( font, new FontHint( HintWeight.MAX_WEIGHT ).getValue( font ) ); assertNull( new FontHint( HintWeight.MAX_WEIGHT ).getValue() ); } @Test public final void testGetValueNonDerived() { final Font value = Font.decode( "Monospaced-BOLD-14" ); assertEquals( value, new FontHint( HintWeight.MAX_WEIGHT, value ).getValue( font ) ); assertEquals( value, new FontHint( HintWeight.MAX_WEIGHT, value ).getValue() ); } @Test public final void testGetValueDerived() { final Font value = Font.decode( "Monospaced-14" ); final FontHint fontHint = new FontHint( HintWeight.MAX_WEIGHT ) { private static final long serialVersionUID = 1L; @Override protected Font derive( final Font original ) { return original.deriveFont( 14.0f ); } }; assertEquals( value, fontHint.getValue( font ) ); assertNull( fontHint.getValue() ); } @Test public final void testFontHintSize() { final Font value = Font.decode( "Monospaced-14" ); assertEquals( value, FontHint.size( HintWeight.MAX_WEIGHT, 14 ) .getValue( font ) ); } @Test public final void testFontHintLarger() { final Font value = Font.decode( "Monospaced-14" ); assertEquals( value, FontHint.larger( HintWeight.MAX_WEIGHT, 2 ) .getValue( font ) ); } @Test public final void testFontHintSmaller() { final Font value = Font.decode( "Monospaced-10" ); assertEquals( value, FontHint.smaller( HintWeight.MAX_WEIGHT, 2 ) .getValue( font ) ); } @Test public final void testFontHintScaled() { final Font value = Font.decode( "Monospaced-6" ); assertEquals( value, FontHint.scaled( HintWeight.MAX_WEIGHT, 0.5f ) .getValue( font ) ); } @Test public final void testFontHintStyle() { final Font value = Font.decode( "Monospaced-BOLD-12" ); assertEquals( value, FontHint.style( HintWeight.MAX_WEIGHT, Font.BOLD ) .getValue( font ) ); } @Test public final void testFontHintStyleAndSize() { final Font value = Font.decode( "Monospaced-BOLD-14" ); assertEquals( value, FontHint.style( HintWeight.MAX_WEIGHT, Font.BOLD, 14 ) .getValue( font ) ); } @Test public final void testForegroundHintDarker() { final Color value = new Color( 0x595959 ); assertEquals( value, ForegroundHint.darker( HintWeight.MAX_WEIGHT ) .getValue( color ) ); } @Test public final void testForegroundHintBrighter() { final Color value = new Color( 0xB6B6B6 ); assertEquals( value, ForegroundHint.brighter( HintWeight.MAX_WEIGHT ) .getValue( color ) ); } @Test public final void testBackgroundHintDarker() { final Color value = new Color( 0x595959 ); assertEquals( value, BackgroundHint.darker( HintWeight.MAX_WEIGHT ) .getValue( color ) ); } @Test public final void testBackgroundHintBrighter() { final Color value = new Color( 0xB6B6B6 ); assertEquals( value, BackgroundHint.brighter( HintWeight.MAX_WEIGHT ) .getValue( color ) ); } }
caveman-frak/java-core
core-swing/src/test/java/uk/co/bluegecko/core/swing/table/rendering/RenderingHintTest.java
Java
apache-2.0
4,586
# AUTOGENERATED FILE FROM balenalib/beaglebone-green-gateway-ubuntu:cosmic-build # remove several traces of debian python RUN apt-get purge -y python.* # http://bugs.python.org/issue19846 # > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. ENV LANG C.UTF-8 # key 63C7CC90: public key "Simon McVittie <[email protected]>" imported # key 3372DCFA: public key "Donald Stufft (dstufft) <[email protected]>" imported RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \ && gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \ && gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059 ENV PYTHON_VERSION 3.5.10 # if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'" ENV PYTHON_PIP_VERSION 21.0.1 ENV SETUPTOOLS_VERSION 56.0.0 RUN set -x \ && curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-armv7hf-openssl1.1.tar.gz" \ && echo "4abc87b995e08c143de14f26d8ab6ffd9017aad400bf91bc36a802efda7fe27a Python-$PYTHON_VERSION.linux-armv7hf-openssl1.1.tar.gz" | sha256sum -c - \ && tar -xzf "Python-$PYTHON_VERSION.linux-armv7hf-openssl1.1.tar.gz" --strip-components=1 \ && rm -rf "Python-$PYTHON_VERSION.linux-armv7hf-openssl1.1.tar.gz" \ && ldconfig \ && if [ ! -e /usr/local/bin/pip3 ]; then : \ && curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \ && echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \ && python3 get-pip.py \ && rm get-pip.py \ ; fi \ && pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \ && find /usr/local \ \( -type d -a -name test -o -name tests \) \ -o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \ -exec rm -rf '{}' + \ && cd / \ && rm -rf /usr/src/python ~/.cache # install "virtualenv", since the vast majority of users of this image will want it RUN pip3 install --no-cache-dir virtualenv ENV PYTHON_DBUS_VERSION 1.2.8 # install dbus-python dependencies RUN apt-get update && apt-get install -y --no-install-recommends \ libdbus-1-dev \ libdbus-glib-1-dev \ && rm -rf /var/lib/apt/lists/* \ && apt-get -y autoremove # install dbus-python RUN set -x \ && mkdir -p /usr/src/dbus-python \ && curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz" -o dbus-python.tar.gz \ && curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz.asc" -o dbus-python.tar.gz.asc \ && gpg --verify dbus-python.tar.gz.asc \ && tar -xzC /usr/src/dbus-python --strip-components=1 -f dbus-python.tar.gz \ && rm dbus-python.tar.gz* \ && cd /usr/src/dbus-python \ && PYTHON_VERSION=$(expr match "$PYTHON_VERSION" '\([0-9]*\.[0-9]*\)') ./configure \ && make -j$(nproc) \ && make install -j$(nproc) \ && cd / \ && rm -rf /usr/src/dbus-python # make some useful symlinks that are expected to exist RUN cd /usr/local/bin \ && ln -sf pip3 pip \ && { [ -e easy_install ] || ln -s easy_install-* easy_install; } \ && ln -sf idle3 idle \ && ln -sf pydoc3 pydoc \ && ln -sf python3 python \ && ln -sf python3-config python-config # set PYTHONPATH to point to dist-packages ENV PYTHONPATH /usr/lib/python3/dist-packages:$PYTHONPATH CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \ && echo "Running test-stack@python" \ && chmod +x [email protected] \ && bash [email protected] \ && rm -rf [email protected] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu cosmic \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.5.10, Pip v21.0.1, Setuptools v56.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
nghiant2710/base-images
balena-base-images/python/beaglebone-green-gateway/ubuntu/cosmic/3.5.10/build/Dockerfile
Dockerfile
apache-2.0
4,847
/* * Copyright 2015-2016 DevCon5 GmbH, [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.devcon5.cli; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import org.junit.Test; /** */ public class CLIExample { @CliOption(value = "x", hasArg = true) private String example; @CliOptionGroup private Structured credentials; private String postProcessed; @PostInject private void init(){ postProcessed = "an " + example; } @Test public void example() { //arrange String[] exampleArgs = {"-u", "hans", "-p", "wurst", "-x", "example"}; //act CLI.parse(exampleArgs).into(this); run(); //assert assertEquals("an example", postProcessed); } public void run() { assertThat(example, is(not(nullValue()))); assertThat(credentials.user, is(not(nullValue()))); assertThat(credentials.password, is(not(nullValue()))); } static class Structured { @CliOption(value = "u", hasArg = true) private String user; @CliOption(value = "p", hasArg = true) private String password; } }
devcon5io/common
cli/src/test/java/io/devcon5/cli/CLIExample.java
Java
apache-2.0
1,919
# -*- coding: utf-8 -*- """ Authors: Tim Hessels UNESCO-IHE 2016 Contact: [email protected] Repository: https://github.com/wateraccounting/wa Module: Collect/MOD17 Description: This module downloads MOD17 GPP data from http://e4ftl01.cr.usgs.gov/. Use the MOD17.GPP_8daily function to download and create 8 daily GPP images in Gtiff format. The data is available between 2000-02-18 till present. Examples: from wa.Collect import MOD17 MOD17.GPP_8daily(Dir='C:/Temp3/', Startdate='2003-12-01', Enddate='2003-12-20', latlim=[41, 45], lonlim=[-8, -5]) MOD17.NPP_yearly(Dir='C:/Temp3/', Startdate='2003-12-01', Enddate='2003-12-20', latlim=[41, 45], lonlim=[-8, -5]) """ from .GPP_8daily import main as GPP_8daily from .NPP_yearly import main as NPP_yearly __all__ = ['GPP_8daily', 'NPP_yearly'] __version__ = '0.1'
wateraccounting/wa
Collect/MOD17/__init__.py
Python
apache-2.0
860
/* * Copyright 2015-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.docksidestage.app.web.product; import org.apache.commons.lang3.builder.ToStringBuilder; import org.docksidestage.dbflute.allcommon.CDef; import org.hibernate.validator.constraints.Length; /** * @author jflute */ public class ProductSearchForm { @Length(max = 10) // #simple_for_example just for validtion example public String productName; public CDef.ProductStatus productStatus; @Length(max = 5) // #simple_for_example just for validtion example public String purchaseMemberName; @Override public String toString() { return ToStringBuilder.reflectionToString(this); } }
dbflute-session/lastaflute-test-catalog
src/main/java/org/docksidestage/app/web/product/ProductSearchForm.java
Java
apache-2.0
1,252
/** * @fileoverview Defines compressors utility methods. * * @see https://google.github.io/styleguide/javascriptguide.xml * @see https://developers.google.com/closure/compiler/docs/js-for-compiler * @module glize/compressors */ import * as lzw from 'lzw-compressor'; /** * Enumeration of available compression types. * @enum {string} */ export const TYPE = { LZW: 'lzw' }; /** * Compress data string using specified compression type. * @param {string} data Data to compress. * @param {string=} [opt_type=TYPE.LZW] Optional compression type. * @return {string} Returns compressed data. * @method * @example * const result = compress( * 'Any string of any length. Any string of any length. Any string of any length.'); * console.log(result); * //> Any string of aā leĈth. ĀĂĄĆĈĊČĎĂđēĕėďĚćĉċčďġgĔ. */ export const compress = (data, opt_type = TYPE.LZW) => { let result = ''; if (TYPE.LZW === opt_type) { result = lzw.compress(data); } return result; }; /** * Decompress data string using specified compression type. * @param {string} data Data to compress. * @param {string=} [opt_type=TYPE.LZW] Optional compression type. * @return {string} Returns compressed data. * @method * @example * const result = decompress('Any string of aā leĈth. ĀĂĄĆĈĊČĎĂđēĕėďĚćĉċčďġgĔ.'); * console.log(result); * //> Any string of any length. Any string of any length. Any string of any length. */ export const decompress = (data, opt_type = TYPE.LZW) => { let result = ''; if (TYPE.LZW === opt_type) { result = lzw.decompress(data); } return result; };
Datamart/Glize
src/compressors/index.js
JavaScript
apache-2.0
1,646
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use thread::Mutex; use mem::{replace, transmute}; use kinds::{Freeze, Send, marker}; use clone::{Clone, DeepClone}; use ops::Drop; use cmp::{Eq, Ord}; use atomic::{atomic_fence_acq, atomic_xadd_relaxed, atomic_xsub_rel}; struct ArcBox<T> { value: T, count: int } #[unsafe_no_drop_flag] pub struct Arc<T> { ptr: *mut ArcBox<T> } impl<T: Send + Freeze> Arc<T> { #[inline(always)] pub fn new(value: T) -> Arc<T> { unsafe { Arc::new_unchecked(value) } } } impl<T> Arc<T> { pub unsafe fn new_unchecked(value: T) -> Arc<T> { Arc{ptr: transmute(~ArcBox{value: value, count: 1})} } } impl<T> Arc<T> { #[inline(always)] pub fn borrow<'a>(&'a self) -> &'a T { unsafe { &(*self.ptr).value } } } // Reasoning behind the atomic memory ordering: // http://www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html #[unsafe_destructor] impl<T> Drop for Arc<T> { fn drop(&mut self) { if self.ptr != 0 as *mut ArcBox<T> { unsafe { if atomic_xsub_rel(&mut (*self.ptr).count, 1) == 1 { atomic_fence_acq(); let _: ~ArcBox<T> = transmute(self.ptr); } } } } } impl<T> Clone for Arc<T> { fn clone(&self) -> Arc<T> { unsafe { atomic_xadd_relaxed(&mut (*self.ptr).count, 1); Arc { ptr: self.ptr } } } } impl<T: DeepClone> DeepClone for Arc<T> { fn deep_clone(&self) -> Arc<T> { unsafe { Arc::new_unchecked(self.borrow().deep_clone()) } } } impl<T: Eq> Eq for Arc<T> { #[inline(always)] fn eq(&self, other: &Arc<T>) -> bool { *self.borrow() == *other.borrow() } #[inline(always)] fn ne(&self, other: &Arc<T>) -> bool { *self.borrow() != *other.borrow() } } impl<T: Ord> Ord for Arc<T> { #[inline(always)] fn lt(&self, other: &Arc<T>) -> bool { *self.borrow() < *other.borrow() } #[inline(always)] fn le(&self, other: &Arc<T>) -> bool { *self.borrow() <= *other.borrow() } #[inline(always)] fn gt(&self, other: &Arc<T>) -> bool { *self.borrow() > *other.borrow() } #[inline(always)] fn ge(&self, other: &Arc<T>) -> bool { *self.borrow() >= *other.borrow() } } struct MutexArcBox<T> { mutex: Mutex, value: T, no_freeze: marker::NoFreeze } pub struct MutexArc<T> { ptr: Arc<MutexArcBox<T>> } impl<T: Send> MutexArc<T> { pub fn new(value: T) -> MutexArc<T> { let b = MutexArcBox { mutex: Mutex::new(), value: value, no_freeze: marker::NoFreeze }; unsafe { MutexArc { ptr: Arc::new_unchecked(b) } } } pub fn swap(&self, value: T) -> T { unsafe { let ptr: &mut MutexArcBox<T> = transmute(self.ptr.borrow()); let _guard = ptr.mutex.lock_guard(); replace(&mut ptr.value, value) } } } impl<T> Clone for MutexArc<T> { #[inline(always)] fn clone(&self) -> MutexArc<T> { MutexArc { ptr: self.ptr.clone() } } }
thestinger/rust-core
core/arc.rs
Rust
apache-2.0
3,527
# AUTOGENERATED FILE FROM balenalib/imx8mm-var-dart-fedora:33-build ENV NODE_VERSION 15.6.0 ENV YARN_VERSION 1.22.4 RUN for key in \ 6A010C5166006599AA17F08146C2130DFD2497F5 \ ; do \ gpg --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \ done \ && curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-arm64.tar.gz" \ && echo "b0660398fe590f8588431a787e9b032c7271a2fa88306c7a26e751571df998e4 node-v$NODE_VERSION-linux-arm64.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$NODE_VERSION-linux-arm64.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$NODE_VERSION-linux-arm64.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \ && rm -rf /tmp/* CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \ && echo "Running test-stack@node" \ && chmod +x [email protected] \ && bash [email protected] \ && rm -rf [email protected] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Fedora 33 \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v15.6.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
nghiant2710/base-images
balena-base-images/node/imx8mm-var-dart/fedora/33/15.6.0/build/Dockerfile
Dockerfile
apache-2.0
2,756
package com.flora.support; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Map; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.VelocityEngine; import org.apache.velocity.context.Context; import com.flora.Config; public class VelocityTemplate { private VelocityEngine velocityEngine; private Config config; public VelocityTemplate(){ } public String parseTemplate(String template, Map model){ model.putAll(Config.getPageTools()); Context context = new VelocityContext(model); Writer writer = new StringWriter(); try { velocityEngine.mergeTemplate(template, "UTF-8", context, writer); } catch (Exception e) { } return writer.toString(); } public void parseTemplate(String template, Map model, Writer writer){ model.putAll(Config.getPageTools()); Context context = new VelocityContext(model); try { velocityEngine.mergeTemplate(template, "UTF-8", context, writer); } catch (Exception e) { } } public void parseTemplate(String template, Map model, OutputStream os){ model.putAll(Config.getPageTools()); Context context = new VelocityContext(model); Writer writer = new OutputStreamWriter(os); try { velocityEngine.mergeTemplate(template, "UTF-8", context, writer); } catch (Exception e) { } } public void setVelocityEngine(VelocityEngine velocityEngine) { this.velocityEngine = velocityEngine; } public Config getConfig() { return config; } public void setConfig(Config config) { this.config = config; } }
liqilun/flora
src/main/java/com/flora/support/VelocityTemplate.java
Java
apache-2.0
1,644
package org.apache.lucene.search; /** * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; /** * Subclass of FilteredTermEnum for enumerating all terms that match the * specified wildcard filter term. * <p> * Term enumerations are always ordered by Term.compareTo(). Each term in * the enumeration is greater than all that precede it. * * @version $Id: WildcardTermEnum.java 329859 2005-10-31 17:05:36Z bmesser $ */ public class WildcardTermEnum extends FilteredTermEnum { Term searchTerm; String field = ""; String text = ""; String pre = ""; int preLen = 0; boolean endEnum = false; /** * Creates a new <code>WildcardTermEnum</code>. Passing in a * {@link org.apache.lucene.index.Term Term} that does not contain a * <code>WILDCARD_CHAR</code> will cause an exception to be thrown. * <p> * After calling the constructor the enumeration is already pointing to the first * valid term if such a term exists. */ public WildcardTermEnum(IndexReader reader, Term term) throws IOException { super(); searchTerm = term; field = searchTerm.field(); text = searchTerm.text(); int sidx = text.indexOf(WILDCARD_STRING); int cidx = text.indexOf(WILDCARD_CHAR); int idx = sidx; if (idx == -1) { idx = cidx; } else if (cidx >= 0) { idx = Math.min(idx, cidx); } pre = searchTerm.text().substring(0,idx); preLen = pre.length(); text = text.substring(preLen); setEnum(reader.terms(new Term(searchTerm.field(), pre))); } protected final boolean termCompare(Term term) { if (field == term.field()) { String searchText = term.text(); if (searchText.startsWith(pre)) { return wildcardEquals(text, 0, searchText, preLen); } } endEnum = true; return false; } public final float difference() { return 1.0f; } public final boolean endEnum() { return endEnum; } /******************************************** * String equality with support for wildcards ********************************************/ public static final char WILDCARD_STRING = '*'; public static final char WILDCARD_CHAR = '?'; /** * Determines if a word matches a wildcard pattern. * <small>Work released by Granta Design Ltd after originally being done on * company time.</small> */ public static final boolean wildcardEquals(String pattern, int patternIdx, String string, int stringIdx) { int p = patternIdx; for (int s = stringIdx; ; ++p, ++s) { // End of string yet? boolean sEnd = (s >= string.length()); // End of pattern yet? boolean pEnd = (p >= pattern.length()); // If we're looking at the end of the string... if (sEnd) { // Assume the only thing left on the pattern is/are wildcards boolean justWildcardsLeft = true; // Current wildcard position int wildcardSearchPos = p; // While we haven't found the end of the pattern, // and haven't encountered any non-wildcard characters while (wildcardSearchPos < pattern.length() && justWildcardsLeft) { // Check the character at the current position char wildchar = pattern.charAt(wildcardSearchPos); // If it's not a wildcard character, then there is more // pattern information after this/these wildcards. if (wildchar != WILDCARD_CHAR && wildchar != WILDCARD_STRING) { justWildcardsLeft = false; } else { // to prevent "cat" matches "ca??" if (wildchar == WILDCARD_CHAR) { return false; } // Look at the next character wildcardSearchPos++; } } // This was a prefix wildcard search, and we've matched, so // return true. if (justWildcardsLeft) { return true; } } // If we've gone past the end of the string, or the pattern, // return false. if (sEnd || pEnd) { break; } // Match a single character, so continue. if (pattern.charAt(p) == WILDCARD_CHAR) { continue; } // if (pattern.charAt(p) == WILDCARD_STRING) { // Look at the character beyond the '*'. ++p; // Examine the string, starting at the last character. for (int i = string.length(); i >= s; --i) { if (wildcardEquals(pattern, p, string, i)) { return true; } } break; } if (pattern.charAt(p) != string.charAt(s)) { break; } } return false; } public void close() throws IOException { super.close(); searchTerm = null; field = null; text = null; } }
lpxz/grail-lucene358684
src/java/org/apache/lucene/search/WildcardTermEnum.java
Java
apache-2.0
5,708
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 #![forbid(unsafe_code)] //! Test infrastructure for the Diem VM. //! //! This crate contains helpers for executing tests against the Diem VM. use diem_types::{transaction::TransactionStatus, vm_status::KeptVMStatus}; pub mod account; pub mod account_universe; pub mod common_transactions; pub mod compile; pub mod currencies; pub mod data_store; pub mod execution_strategies; pub mod executor; pub mod gas_costs; mod golden_outputs; pub mod keygen; mod proptest_types; pub fn assert_status_eq(s1: &KeptVMStatus, s2: &KeptVMStatus) -> bool { assert_eq!(s1, s2); true } pub fn transaction_status_eq(t1: &TransactionStatus, t2: &TransactionStatus) -> bool { match (t1, t2) { (TransactionStatus::Discard(s1), TransactionStatus::Discard(s2)) => { assert_eq!(s1, s2); true } (TransactionStatus::Keep(s1), TransactionStatus::Keep(s2)) => { assert_eq!(s1, s2); true } _ => false, } } #[macro_export] macro_rules! assert_prologue_parity { ($e1:expr, $e2:expr, $e3:expr) => { assert_eq!($e1.unwrap(), $e3); assert!(transaction_status_eq($e2, &TransactionStatus::Discard($e3))); }; } #[macro_export] macro_rules! assert_prologue_disparity { ($e1:expr => $e2:expr, $e3:expr => $e4:expr) => { assert_eq!($e1, $e2); assert!(transaction_status_eq($e3, &$e4)); }; } /// Returns the name of the current function. This macro is used to derive the name for the golden /// file of each test case. #[macro_export] macro_rules! current_function_name { () => {{ fn f() {} fn type_name_of<T>(_: T) -> &'static str { std::any::type_name::<T>() } let name = type_name_of(f); &name[..name.len() - 3] }}; }
libra/libra
language/testing-infra/e2e-tests/src/lib.rs
Rust
apache-2.0
1,883
# Carlina rhopalachyron Cadevall & Sallent SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Carlina/Carlina rhopalachyron/README.md
Markdown
apache-2.0
190
package com.github.ayltai.foscam.client; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import android.support.annotation.NonNull; import android.support.annotation.VisibleForTesting; import android.support.v4.util.Pair; import rx.Subscriber; import rx.Subscription; import rx.subjects.PublishSubject; import rx.subjects.SerializedSubject; import rx.subjects.Subject; public /* final */ class RxBus { private static final ThreadLocal<RxBus> INSTANCE = new ThreadLocal<>(); private final Map<Pair<Class, Subscriber>, Subscription> subscriptions = new HashMap<>(); private final Subject<Object, ?> bus = new SerializedSubject<>(PublishSubject.create()); public static RxBus getInstance() { final RxBus instance = RxBus.INSTANCE.get(); if (instance == null) { RxBus.INSTANCE.set(new RxBus()); return RxBus.INSTANCE.get(); } return instance; } @VisibleForTesting RxBus() { } public <T> void register(@NonNull final Class<T> eventType, @NonNull final Subscriber<T> subscriber) { final Pair<Class, Subscriber> key = Pair.create(eventType, subscriber); if (this.subscriptions.containsKey(key)) throw new IllegalArgumentException("The given subscriber is already registered"); this.subscriptions.put(key, this.bus.filter(event -> event != null && event.getClass().equals(eventType)).subscribe(value -> subscriber.onNext((T)value))); } public <T> void unregister(@NonNull final Class<T> eventType, @NonNull final Subscriber<T> subscriber) { final Pair<Class, Subscriber> key = Pair.create(eventType, subscriber); if (this.subscriptions.containsKey(key)) this.subscriptions.remove(key).unsubscribe(); } public void unregisterAll() { for (final Pair<Class, Subscriber> pair : new HashSet<>(this.subscriptions.keySet())) { this.unregister(pair.first, pair.second); } } public <T> void send(@NonNull final T event) { if (!this.subscriptions.isEmpty()) this.bus.onNext(event); } }
ayltai/Foscam-CGI-Client
app/src/main/java/com/github/ayltai/foscam/client/RxBus.java
Java
apache-2.0
2,141
# Copyright 2019 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Term aggregations.""" from __future__ import unicode_literals from timesketch.lib.aggregators import manager from timesketch.lib.aggregators import interface def get_spec(field, limit=10, query='', query_dsl=''): """Returns aggregation specs for a term of filtered events. The aggregation spec will summarize values of an attribute whose events fall under a filter. Args: field (str): this denotes the event attribute that is used for aggregation. limit (int): How many buckets to return, defaults to 10. query (str): the query field to run on all documents prior to aggregating the results. query_dsl (str): the query DSL field to run on all documents prior to aggregating the results (optional). Either a query string or a query DSL has to be present. Raises: ValueError: if neither query_string or query_dsl is provided. Returns: a dict value that can be used as an aggregation spec. """ if query: query_filter = { 'bool': { 'must': [ { 'query_string': { 'query': query } } ] } } elif query_dsl: query_filter = query_dsl else: raise ValueError('Neither query nor query DSL provided.') return { 'query': query_filter, 'aggs': { 'aggregation': { 'terms': { 'field': field, 'size': limit } } } } class FilteredTermsAggregation(interface.BaseAggregator): """Query Filter Term Aggregation.""" NAME = 'query_bucket' DISPLAY_NAME = 'Filtered Terms Aggregation' DESCRIPTION = 'Aggregating values of a field after applying a filter' SUPPORTED_CHARTS = frozenset( ['barchart', 'circlechart', 'hbarchart', 'linechart', 'table']) FORM_FIELDS = [ { 'type': 'ts-dynamic-form-select-input', 'name': 'supported_charts', 'label': 'Chart type to render', 'options': list(SUPPORTED_CHARTS), 'display': True }, { 'name': 'query_string', 'type': 'ts-dynamic-form-text-input', 'label': 'The filter query to narrow down the result set', 'placeholder': 'Query', 'default_value': '', 'display': True }, { 'name': 'query_dsl', 'type': 'ts-dynamic-form-text-input', 'label': 'The filter query DSL to narrow down the result', 'placeholder': 'Query DSL', 'default_value': '', 'display': False }, { 'name': 'field', 'type': 'ts-dynamic-form-text-input', 'label': 'What field to aggregate.', 'display': True }, { 'type': 'ts-dynamic-form-datetime-input', 'name': 'start_time', 'label': ( 'ISO formatted timestamp for the start time ' 'of the aggregated data'), 'placeholder': 'Enter a start date for the aggregation', 'default_value': '', 'display': True }, { 'type': 'ts-dynamic-form-datetime-input', 'name': 'end_time', 'label': 'ISO formatted end time for the aggregation', 'placeholder': 'Enter an end date for the aggregation', 'default_value': '', 'display': True }, { 'type': 'ts-dynamic-form-text-input', 'name': 'limit', 'label': 'Number of results to return', 'placeholder': 'Enter number of results to return', 'default_value': '10', 'display': True } ] @property def chart_title(self): """Returns a title for the chart.""" if self.field: return 'Top filtered results for "{0:s}"'.format(self.field) return 'Top results for an unknown field after filtering' # pylint: disable=arguments-differ def run( self, field, query_string='', query_dsl='', supported_charts='table', start_time='', end_time='', limit=10): """Run the aggregation. Args: field (str): this denotes the event attribute that is used for aggregation. query_string (str): the query field to run on all documents prior to aggregating the results. query_dsl (str): the query DSL field to run on all documents prior to aggregating the results. Either a query string or a query DSL has to be present. supported_charts: Chart type to render. Defaults to table. start_time: Optional ISO formatted date string that limits the time range for the aggregation. end_time: Optional ISO formatted date string that limits the time range for the aggregation. limit (int): How many buckets to return, defaults to 10. Returns: Instance of interface.AggregationResult with aggregation result. Raises: ValueError: if neither query_string or query_dsl is provided. """ if not (query_string or query_dsl): raise ValueError('Both query_string and query_dsl are missing') self.field = field formatted_field_name = self.format_field_by_type(field) aggregation_spec = get_spec( field=formatted_field_name, limit=limit, query=query_string, query_dsl=query_dsl) aggregation_spec = self._add_query_to_aggregation_spec( aggregation_spec, start_time=start_time, end_time=end_time) # Encoding information for Vega-Lite. encoding = { 'x': { 'field': field, 'type': 'nominal', 'sort': { 'op': 'sum', 'field': 'count', 'order': 'descending' } }, 'y': {'field': 'count', 'type': 'quantitative'}, 'tooltip': [ {'field': field, 'type': 'nominal'}, {'field': 'count', 'type': 'quantitative'}], } response = self.opensearch_aggregation(aggregation_spec) aggregations = response.get('aggregations', {}) aggregation = aggregations.get('aggregation', {}) buckets = aggregation.get('buckets', []) values = [] for bucket in buckets: d = { field: bucket.get('key', 'N/A'), 'count': bucket.get('doc_count', 0) } values.append(d) if query_string: extra_query_url = 'AND {0:s}'.format(query_string) else: extra_query_url = '' return interface.AggregationResult( encoding=encoding, values=values, chart_type=supported_charts, sketch_url=self._sketch_url, field=field, extra_query_url=extra_query_url) manager.AggregatorManager.register_aggregator(FilteredTermsAggregation)
google/timesketch
timesketch/lib/aggregators/term.py
Python
apache-2.0
7,953
<?xml version="1.0" encoding="ascii"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <title>gluon.validators.IS_IN_DB</title> <link rel="stylesheet" href="epydoc.css" type="text/css" /> <script type="text/javascript" src="epydoc.js"></script> </head> <body bgcolor="white" text="black" link="blue" vlink="#204080" alink="#204080"> <!-- ==================== NAVIGATION BAR ==================== --> <table class="navbar" border="0" width="100%" cellpadding="0" bgcolor="#a0c0ff" cellspacing="0"> <tr valign="middle"> <!-- Home link --> <th>&nbsp;&nbsp;&nbsp;<a href="gluon-module.html">Home</a>&nbsp;&nbsp;&nbsp;</th> <!-- Tree link --> <th>&nbsp;&nbsp;&nbsp;<a href="module-tree.html">Trees</a>&nbsp;&nbsp;&nbsp;</th> <!-- Index link --> <th>&nbsp;&nbsp;&nbsp;<a href="identifier-index.html">Indices</a>&nbsp;&nbsp;&nbsp;</th> <!-- Help link --> <th>&nbsp;&nbsp;&nbsp;<a href="help.html">Help</a>&nbsp;&nbsp;&nbsp;</th> <!-- Project homepage --> <th class="navbar" align="right" width="100%"> <table border="0" cellpadding="0" cellspacing="0"> <tr><th class="navbar" align="center" ><a class="navbar" target="_top" href="http://www.web2py.com">web2py Web Framework</a></th> </tr></table></th> </tr> </table> <table width="100%" cellpadding="0" cellspacing="0"> <tr valign="top"> <td width="100%"> <span class="breadcrumbs"> <a href="gluon-module.html">Package&nbsp;gluon</a> :: <a href="gluon.validators-module.html" onclick="show_private();">Module&nbsp;validators</a> :: Class&nbsp;IS_IN_DB </span> </td> <td> <table cellpadding="0" cellspacing="0"> <!-- hide/show private --> <tr><td align="right"><span class="options">[<a href="javascript:void(0);" class="privatelink" onclick="toggle_private();">hide&nbsp;private</a>]</span></td></tr> <tr><td align="right"><span class="options" >[<a href="frames.html" target="_top">frames</a >]&nbsp;|&nbsp;<a href="gluon.validators.IS_IN_DB-class.html" target="_top">no&nbsp;frames</a>]</span></td></tr> </table> </td> </tr> </table> <!-- ==================== CLASS DESCRIPTION ==================== --> <h1 class="epydoc">Class IS_IN_DB</h1><p class="nomargin-top"><span class="codelink"><a href="gluon.validators-pysrc.html#IS_IN_DB">source&nbsp;code</a></span></p> <pre class="base-tree"> object --+ | <a href="gluon.validators.Validator-class.html" onclick="show_private();">Validator</a> --+ | <strong class="uidshort">IS_IN_DB</strong> </pre> <hr /> <p>example:</p> <pre class="literalblock"> INPUT(_type='text', _name='name', requires=IS_IN_DB(db, db.mytable.myfield, zero='')) </pre> <p>used for reference fields, rendered as a dropbox</p> <!-- ==================== INSTANCE METHODS ==================== --> <a name="section-InstanceMethods"></a> <table class="summary" border="1" cellpadding="3" cellspacing="0" width="100%" bgcolor="white"> <tr bgcolor="#70b0f0" class="table-header"> <td colspan="2" class="table-header"> <table border="0" cellpadding="0" cellspacing="0" width="100%"> <tr valign="top"> <td align="left"><span class="table-header">Instance Methods</span></td> <td align="right" valign="top" ><span class="options">[<a href="#section-InstanceMethods" class="privatelink" onclick="toggle_private();" >hide private</a>]</span></td> </tr> </table> </td> </tr> <tr> <td width="15%" align="right" valign="top" class="summary"> <span class="summary-type">&nbsp;</span> </td><td class="summary"> <table width="100%" cellpadding="0" cellspacing="0" border="0"> <tr> <td><span class="summary-sig"><a href="gluon.validators.IS_IN_DB-class.html#__init__" class="summary-sig-name">__init__</a>(<span class="summary-sig-arg">self</span>, <span class="summary-sig-arg">dbset</span>, <span class="summary-sig-arg">field</span>, <span class="summary-sig-arg">label</span>=<span class="summary-sig-default">None</span>, <span class="summary-sig-arg">error_message</span>=<span class="summary-sig-default"><code class="variable-quote">'</code><code class="variable-string">value not in database</code><code class="variable-quote">'</code></span>, <span class="summary-sig-arg">orderby</span>=<span class="summary-sig-default">None</span>, <span class="summary-sig-arg">groupby</span>=<span class="summary-sig-default">None</span>, <span class="summary-sig-arg">distinct</span>=<span class="summary-sig-default">None</span>, <span class="summary-sig-arg">cache</span>=<span class="summary-sig-default">None</span>, <span class="summary-sig-arg">multiple</span>=<span class="summary-sig-default">False</span>, <span class="summary-sig-arg">zero</span>=<span class="summary-sig-default"><code class="variable-quote">'</code><code class="variable-string"></code><code class="variable-quote">'</code></span>, <span class="summary-sig-arg">sort</span>=<span class="summary-sig-default">False</span>, <span class="summary-sig-arg">_and</span>=<span class="summary-sig-default">None</span>)</span><br /> x.__init__(...) initializes x; see help(type(x)) for signature</td> <td align="right" valign="top"> <span class="codelink"><a href="gluon.validators-pysrc.html#IS_IN_DB.__init__">source&nbsp;code</a></span> </td> </tr> </table> </td> </tr> <tr> <td width="15%" align="right" valign="top" class="summary"> <span class="summary-type">&nbsp;</span> </td><td class="summary"> <table width="100%" cellpadding="0" cellspacing="0" border="0"> <tr> <td><span class="summary-sig"><a name="set_self_id"></a><span class="summary-sig-name">set_self_id</span>(<span class="summary-sig-arg">self</span>, <span class="summary-sig-arg">id</span>)</span></td> <td align="right" valign="top"> <span class="codelink"><a href="gluon.validators-pysrc.html#IS_IN_DB.set_self_id">source&nbsp;code</a></span> </td> </tr> </table> </td> </tr> <tr> <td width="15%" align="right" valign="top" class="summary"> <span class="summary-type">&nbsp;</span> </td><td class="summary"> <table width="100%" cellpadding="0" cellspacing="0" border="0"> <tr> <td><span class="summary-sig"><a name="build_set"></a><span class="summary-sig-name">build_set</span>(<span class="summary-sig-arg">self</span>)</span></td> <td align="right" valign="top"> <span class="codelink"><a href="gluon.validators-pysrc.html#IS_IN_DB.build_set">source&nbsp;code</a></span> </td> </tr> </table> </td> </tr> <tr> <td width="15%" align="right" valign="top" class="summary"> <span class="summary-type">&nbsp;</span> </td><td class="summary"> <table width="100%" cellpadding="0" cellspacing="0" border="0"> <tr> <td><span class="summary-sig"><a name="options"></a><span class="summary-sig-name">options</span>(<span class="summary-sig-arg">self</span>, <span class="summary-sig-arg">zero</span>=<span class="summary-sig-default">True</span>)</span></td> <td align="right" valign="top"> <span class="codelink"><a href="gluon.validators-pysrc.html#IS_IN_DB.options">source&nbsp;code</a></span> </td> </tr> </table> </td> </tr> <tr> <td width="15%" align="right" valign="top" class="summary"> <span class="summary-type">&nbsp;</span> </td><td class="summary"> <table width="100%" cellpadding="0" cellspacing="0" border="0"> <tr> <td><span class="summary-sig"><a href="gluon.validators.IS_IN_DB-class.html#__call__" class="summary-sig-name">__call__</a>(<span class="summary-sig-arg">self</span>, <span class="summary-sig-arg">value</span>)</span></td> <td align="right" valign="top"> <span class="codelink"><a href="gluon.validators-pysrc.html#IS_IN_DB.__call__">source&nbsp;code</a></span> </td> </tr> </table> </td> </tr> <tr> <td colspan="2" class="summary"> <p class="indent-wrapped-lines"><b>Inherited from <code><a href="gluon.validators.Validator-class.html" onclick="show_private();">Validator</a></code></b>: <code><a href="gluon.validators.Validator-class.html#formatter">formatter</a></code> </p> <p class="indent-wrapped-lines"><b>Inherited from <code>object</code></b>: <code>__delattr__</code>, <code>__format__</code>, <code>__getattribute__</code>, <code>__hash__</code>, <code>__new__</code>, <code>__reduce__</code>, <code>__reduce_ex__</code>, <code>__repr__</code>, <code>__setattr__</code>, <code>__sizeof__</code>, <code>__str__</code>, <code>__subclasshook__</code> </p> </td> </tr> </table> <!-- ==================== PROPERTIES ==================== --> <a name="section-Properties"></a> <table class="summary" border="1" cellpadding="3" cellspacing="0" width="100%" bgcolor="white"> <tr bgcolor="#70b0f0" class="table-header"> <td colspan="2" class="table-header"> <table border="0" cellpadding="0" cellspacing="0" width="100%"> <tr valign="top"> <td align="left"><span class="table-header">Properties</span></td> <td align="right" valign="top" ><span class="options">[<a href="#section-Properties" class="privatelink" onclick="toggle_private();" >hide private</a>]</span></td> </tr> </table> </td> </tr> <tr> <td colspan="2" class="summary"> <p class="indent-wrapped-lines"><b>Inherited from <code>object</code></b>: <code>__class__</code> </p> </td> </tr> </table> <!-- ==================== METHOD DETAILS ==================== --> <a name="section-MethodDetails"></a> <table class="details" border="1" cellpadding="3" cellspacing="0" width="100%" bgcolor="white"> <tr bgcolor="#70b0f0" class="table-header"> <td colspan="2" class="table-header"> <table border="0" cellpadding="0" cellspacing="0" width="100%"> <tr valign="top"> <td align="left"><span class="table-header">Method Details</span></td> <td align="right" valign="top" ><span class="options">[<a href="#section-MethodDetails" class="privatelink" onclick="toggle_private();" >hide private</a>]</span></td> </tr> </table> </td> </tr> </table> <a name="__init__"></a> <div> <table class="details" border="1" cellpadding="3" cellspacing="0" width="100%" bgcolor="white"> <tr><td> <table width="100%" cellpadding="0" cellspacing="0" border="0"> <tr valign="top"><td> <h3 class="epydoc"><span class="sig"><span class="sig-name">__init__</span>(<span class="sig-arg">self</span>, <span class="sig-arg">dbset</span>, <span class="sig-arg">field</span>, <span class="sig-arg">label</span>=<span class="sig-default">None</span>, <span class="sig-arg">error_message</span>=<span class="sig-default"><code class="variable-quote">'</code><code class="variable-string">value not in database</code><code class="variable-quote">'</code></span>, <span class="sig-arg">orderby</span>=<span class="sig-default">None</span>, <span class="sig-arg">groupby</span>=<span class="sig-default">None</span>, <span class="sig-arg">distinct</span>=<span class="sig-default">None</span>, <span class="sig-arg">cache</span>=<span class="sig-default">None</span>, <span class="sig-arg">multiple</span>=<span class="sig-default">False</span>, <span class="sig-arg">zero</span>=<span class="sig-default"><code class="variable-quote">'</code><code class="variable-string"></code><code class="variable-quote">'</code></span>, <span class="sig-arg">sort</span>=<span class="sig-default">False</span>, <span class="sig-arg">_and</span>=<span class="sig-default">None</span>)</span> <br /><em class="fname">(Constructor)</em> </h3> </td><td align="right" valign="top" ><span class="codelink"><a href="gluon.validators-pysrc.html#IS_IN_DB.__init__">source&nbsp;code</a></span>&nbsp; </td> </tr></table> <p>x.__init__(...) initializes x; see help(type(x)) for signature</p> <dl class="fields"> <dt>Overrides: object.__init__ <dd><em class="note">(inherited documentation)</em></dd> </dt> </dl> </td></tr></table> </div> <a name="__call__"></a> <div> <table class="details" border="1" cellpadding="3" cellspacing="0" width="100%" bgcolor="white"> <tr><td> <table width="100%" cellpadding="0" cellspacing="0" border="0"> <tr valign="top"><td> <h3 class="epydoc"><span class="sig"><span class="sig-name">__call__</span>(<span class="sig-arg">self</span>, <span class="sig-arg">value</span>)</span> <br /><em class="fname">(Call operator)</em> </h3> </td><td align="right" valign="top" ><span class="codelink"><a href="gluon.validators-pysrc.html#IS_IN_DB.__call__">source&nbsp;code</a></span>&nbsp; </td> </tr></table> <dl class="fields"> <dt>Overrides: <a href="gluon.validators.Validator-class.html#__call__">Validator.__call__</a> </dt> </dl> </td></tr></table> </div> <br /> <!-- ==================== NAVIGATION BAR ==================== --> <table class="navbar" border="0" width="100%" cellpadding="0" bgcolor="#a0c0ff" cellspacing="0"> <tr valign="middle"> <!-- Home link --> <th>&nbsp;&nbsp;&nbsp;<a href="gluon-module.html">Home</a>&nbsp;&nbsp;&nbsp;</th> <!-- Tree link --> <th>&nbsp;&nbsp;&nbsp;<a href="module-tree.html">Trees</a>&nbsp;&nbsp;&nbsp;</th> <!-- Index link --> <th>&nbsp;&nbsp;&nbsp;<a href="identifier-index.html">Indices</a>&nbsp;&nbsp;&nbsp;</th> <!-- Help link --> <th>&nbsp;&nbsp;&nbsp;<a href="help.html">Help</a>&nbsp;&nbsp;&nbsp;</th> <!-- Project homepage --> <th class="navbar" align="right" width="100%"> <table border="0" cellpadding="0" cellspacing="0"> <tr><th class="navbar" align="center" ><a class="navbar" target="_top" href="http://www.web2py.com">web2py Web Framework</a></th> </tr></table></th> </tr> </table> <table border="0" cellpadding="0" cellspacing="0" width="100%%"> <tr> <td align="left" class="footer"> Generated by Epydoc 3.0.1 on Thu Nov 28 13:54:45 2013 </td> <td align="right" class="footer"> <a target="mainFrame" href="http://epydoc.sourceforge.net" >http://epydoc.sourceforge.net</a> </td> </tr> </table> <script type="text/javascript"> <!-- // Private objects are initially displayed (because if // javascript is turned off then we want them to be // visible); but by default, we want to hide them. So hide // them unless we have a cookie that says to show them. checkCookie(); // --> </script> </body> </html>
elrafael/web2py-test
applications/examples/static/epydoc/gluon.validators.IS_IN_DB-class.html
HTML
apache-2.0
15,504
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.retail.v2; /** * Available OAuth 2.0 scopes for use with the Retail API. * * @since 1.4 */ public class CloudRetailScopes { /** See, edit, configure, and delete your Google Cloud data and see the email address for your Google Account.. */ public static final String CLOUD_PLATFORM = "https://www.googleapis.com/auth/cloud-platform"; /** * Returns an unmodifiable set that contains all scopes declared by this class. * * @since 1.16 */ public static java.util.Set<String> all() { java.util.Set<String> set = new java.util.HashSet<String>(); set.add(CLOUD_PLATFORM); return java.util.Collections.unmodifiableSet(set); } private CloudRetailScopes() { } }
googleapis/google-api-java-client-services
clients/google-api-services-retail/v2/1.31.0/com/google/api/services/retail/v2/CloudRetailScopes.java
Java
apache-2.0
1,411
# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os from textwrap import dedent import unittest from eventlet.green import ssl import mock from six.moves.configparser import NoSectionError, NoOptionError from swift.common.middleware import memcache from swift.common.memcached import MemcacheRing from swift.common.swob import Request from swift.common.wsgi import loadapp from test.unit import with_tempdir, patch_policies class FakeApp(object): def __call__(self, env, start_response): return env class ExcConfigParser(object): def read(self, path): raise Exception('read called with %r' % path) class EmptyConfigParser(object): def read(self, path): return False def get_config_parser(memcache_servers='1.2.3.4:5', memcache_serialization_support='1', memcache_max_connections='4', section='memcache'): _srvs = memcache_servers _sers = memcache_serialization_support _maxc = memcache_max_connections _section = section class SetConfigParser(object): def items(self, section_name): if section_name != section: raise NoSectionError(section_name) return { 'memcache_servers': memcache_servers, 'memcache_serialization_support': memcache_serialization_support, 'memcache_max_connections': memcache_max_connections, } def read(self, path): return True def get(self, section, option): if _section == section: if option == 'memcache_servers': if _srvs == 'error': raise NoOptionError(option, section) return _srvs elif option == 'memcache_serialization_support': if _sers == 'error': raise NoOptionError(option, section) return _sers elif option in ('memcache_max_connections', 'max_connections'): if _maxc == 'error': raise NoOptionError(option, section) return _maxc else: raise NoOptionError(option, section) else: raise NoSectionError(option) return SetConfigParser def start_response(*args): pass class TestCacheMiddleware(unittest.TestCase): def setUp(self): self.app = memcache.MemcacheMiddleware(FakeApp(), {}) def test_cache_middleware(self): req = Request.blank('/something', environ={'REQUEST_METHOD': 'GET'}) resp = self.app(req.environ, start_response) self.assertTrue('swift.cache' in resp) self.assertTrue(isinstance(resp['swift.cache'], MemcacheRing)) def test_conf_default_read(self): with mock.patch.object(memcache, 'ConfigParser', ExcConfigParser): for d in ({}, {'memcache_servers': '6.7.8.9:10'}, {'memcache_serialization_support': '0'}, {'memcache_max_connections': '30'}, {'memcache_servers': '6.7.8.9:10', 'memcache_serialization_support': '0'}, {'memcache_servers': '6.7.8.9:10', 'memcache_max_connections': '30'}, {'memcache_serialization_support': '0', 'memcache_max_connections': '30'} ): with self.assertRaises(Exception) as catcher: memcache.MemcacheMiddleware(FakeApp(), d) self.assertEqual( str(catcher.exception), "read called with '/etc/swift/memcache.conf'") def test_conf_set_no_read(self): with mock.patch.object(memcache, 'ConfigParser', ExcConfigParser): exc = None try: memcache.MemcacheMiddleware( FakeApp(), {'memcache_servers': '1.2.3.4:5', 'memcache_serialization_support': '2', 'memcache_max_connections': '30'}) except Exception as err: exc = err self.assertIsNone(exc) def test_conf_default(self): with mock.patch.object(memcache, 'ConfigParser', EmptyConfigParser): app = memcache.MemcacheMiddleware(FakeApp(), {}) self.assertEqual(app.memcache_servers, '127.0.0.1:11211') self.assertEqual(app.memcache._allow_pickle, False) self.assertEqual(app.memcache._allow_unpickle, False) self.assertEqual( app.memcache._client_cache['127.0.0.1:11211'].max_size, 2) def test_conf_inline(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): app = memcache.MemcacheMiddleware( FakeApp(), {'memcache_servers': '6.7.8.9:10', 'memcache_serialization_support': '0', 'memcache_max_connections': '5'}) self.assertEqual(app.memcache_servers, '6.7.8.9:10') self.assertEqual(app.memcache._allow_pickle, True) self.assertEqual(app.memcache._allow_unpickle, True) self.assertEqual( app.memcache._client_cache['6.7.8.9:10'].max_size, 5) def test_conf_inline_ratelimiting(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): app = memcache.MemcacheMiddleware( FakeApp(), {'error_suppression_limit': '5', 'error_suppression_interval': '2.5'}) self.assertEqual(app.memcache._error_limit_count, 5) self.assertEqual(app.memcache._error_limit_time, 2.5) self.assertEqual(app.memcache._error_limit_duration, 2.5) def test_conf_inline_tls(self): fake_context = mock.Mock() with mock.patch.object(ssl, 'create_default_context', return_value=fake_context): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): memcache.MemcacheMiddleware( FakeApp(), {'tls_enabled': 'true', 'tls_cafile': 'cafile', 'tls_certfile': 'certfile', 'tls_keyfile': 'keyfile'}) ssl.create_default_context.assert_called_with(cafile='cafile') fake_context.load_cert_chain.assert_called_with('certfile', 'keyfile') def test_conf_extra_no_section(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser(section='foobar')): app = memcache.MemcacheMiddleware(FakeApp(), {}) self.assertEqual(app.memcache_servers, '127.0.0.1:11211') self.assertEqual(app.memcache._allow_pickle, False) self.assertEqual(app.memcache._allow_unpickle, False) self.assertEqual( app.memcache._client_cache['127.0.0.1:11211'].max_size, 2) def test_conf_extra_no_option(self): replacement_parser = get_config_parser( memcache_servers='error', memcache_serialization_support='error', memcache_max_connections='error') with mock.patch.object(memcache, 'ConfigParser', replacement_parser): app = memcache.MemcacheMiddleware(FakeApp(), {}) self.assertEqual(app.memcache_servers, '127.0.0.1:11211') self.assertEqual(app.memcache._allow_pickle, False) self.assertEqual(app.memcache._allow_unpickle, False) self.assertEqual( app.memcache._client_cache['127.0.0.1:11211'].max_size, 2) def test_conf_inline_other_max_conn(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): app = memcache.MemcacheMiddleware( FakeApp(), {'memcache_servers': '6.7.8.9:10', 'memcache_serialization_support': '0', 'max_connections': '5'}) self.assertEqual(app.memcache_servers, '6.7.8.9:10') self.assertEqual(app.memcache._allow_pickle, True) self.assertEqual(app.memcache._allow_unpickle, True) self.assertEqual( app.memcache._client_cache['6.7.8.9:10'].max_size, 5) def test_conf_inline_bad_max_conn(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): app = memcache.MemcacheMiddleware( FakeApp(), {'memcache_servers': '6.7.8.9:10', 'memcache_serialization_support': '0', 'max_connections': 'bad42'}) self.assertEqual(app.memcache_servers, '6.7.8.9:10') self.assertEqual(app.memcache._allow_pickle, True) self.assertEqual(app.memcache._allow_unpickle, True) self.assertEqual( app.memcache._client_cache['6.7.8.9:10'].max_size, 4) def test_conf_from_extra_conf(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): app = memcache.MemcacheMiddleware(FakeApp(), {}) self.assertEqual(app.memcache_servers, '1.2.3.4:5') self.assertEqual(app.memcache._allow_pickle, False) self.assertEqual(app.memcache._allow_unpickle, True) self.assertEqual( app.memcache._client_cache['1.2.3.4:5'].max_size, 4) def test_conf_from_extra_conf_bad_max_conn(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser( memcache_max_connections='bad42')): app = memcache.MemcacheMiddleware(FakeApp(), {}) self.assertEqual(app.memcache_servers, '1.2.3.4:5') self.assertEqual(app.memcache._allow_pickle, False) self.assertEqual(app.memcache._allow_unpickle, True) self.assertEqual( app.memcache._client_cache['1.2.3.4:5'].max_size, 2) def test_conf_from_inline_and_maxc_from_extra_conf(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): app = memcache.MemcacheMiddleware( FakeApp(), {'memcache_servers': '6.7.8.9:10', 'memcache_serialization_support': '0'}) self.assertEqual(app.memcache_servers, '6.7.8.9:10') self.assertEqual(app.memcache._allow_pickle, True) self.assertEqual(app.memcache._allow_unpickle, True) self.assertEqual( app.memcache._client_cache['6.7.8.9:10'].max_size, 4) def test_conf_from_inline_and_sers_from_extra_conf(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): app = memcache.MemcacheMiddleware( FakeApp(), {'memcache_servers': '6.7.8.9:10', 'memcache_max_connections': '42'}) self.assertEqual(app.memcache_servers, '6.7.8.9:10') self.assertEqual(app.memcache._allow_pickle, False) self.assertEqual(app.memcache._allow_unpickle, True) self.assertEqual( app.memcache._client_cache['6.7.8.9:10'].max_size, 42) def test_filter_factory(self): factory = memcache.filter_factory({'max_connections': '3'}, memcache_servers='10.10.10.10:10', memcache_serialization_support='1') thefilter = factory('myapp') self.assertEqual(thefilter.app, 'myapp') self.assertEqual(thefilter.memcache_servers, '10.10.10.10:10') self.assertEqual(thefilter.memcache._allow_pickle, False) self.assertEqual(thefilter.memcache._allow_unpickle, True) self.assertEqual( thefilter.memcache._client_cache['10.10.10.10:10'].max_size, 3) @patch_policies def _loadapp(self, proxy_config_path): """ Load a proxy from an app.conf to get the memcache_ring :returns: the memcache_ring of the memcache middleware filter """ with mock.patch('swift.proxy.server.Ring'): app = loadapp(proxy_config_path) memcache_ring = None while True: memcache_ring = getattr(app, 'memcache', None) if memcache_ring: break app = app.app return memcache_ring @with_tempdir def test_real_config(self, tempdir): config = """ [pipeline:main] pipeline = cache proxy-server [app:proxy-server] use = egg:swift#proxy [filter:cache] use = egg:swift#memcache """ config_path = os.path.join(tempdir, 'test.conf') with open(config_path, 'w') as f: f.write(dedent(config)) memcache_ring = self._loadapp(config_path) # only one server by default self.assertEqual(list(memcache_ring._client_cache.keys()), ['127.0.0.1:11211']) # extra options self.assertEqual(memcache_ring._connect_timeout, 0.3) self.assertEqual(memcache_ring._pool_timeout, 1.0) # tries is limited to server count self.assertEqual(memcache_ring._tries, 1) self.assertEqual(memcache_ring._io_timeout, 2.0) @with_tempdir def test_real_config_with_options(self, tempdir): config = """ [pipeline:main] pipeline = cache proxy-server [app:proxy-server] use = egg:swift#proxy [filter:cache] use = egg:swift#memcache memcache_servers = 10.0.0.1:11211,10.0.0.2:11211,10.0.0.3:11211, 10.0.0.4:11211 connect_timeout = 1.0 pool_timeout = 0.5 tries = 4 io_timeout = 1.0 tls_enabled = true """ config_path = os.path.join(tempdir, 'test.conf') with open(config_path, 'w') as f: f.write(dedent(config)) memcache_ring = self._loadapp(config_path) self.assertEqual(sorted(memcache_ring._client_cache.keys()), ['10.0.0.%d:11211' % i for i in range(1, 5)]) # extra options self.assertEqual(memcache_ring._connect_timeout, 1.0) self.assertEqual(memcache_ring._pool_timeout, 0.5) # tries is limited to server count self.assertEqual(memcache_ring._tries, 4) self.assertEqual(memcache_ring._io_timeout, 1.0) self.assertEqual(memcache_ring._error_limit_count, 10) self.assertEqual(memcache_ring._error_limit_time, 60) self.assertEqual(memcache_ring._error_limit_duration, 60) self.assertIsInstance( list(memcache_ring._client_cache.values())[0]._tls_context, ssl.SSLContext) @with_tempdir def test_real_memcache_config(self, tempdir): proxy_config = """ [DEFAULT] swift_dir = %s [pipeline:main] pipeline = cache proxy-server [app:proxy-server] use = egg:swift#proxy [filter:cache] use = egg:swift#memcache connect_timeout = 1.0 """ % tempdir proxy_config_path = os.path.join(tempdir, 'test.conf') with open(proxy_config_path, 'w') as f: f.write(dedent(proxy_config)) memcache_config = """ [memcache] memcache_servers = 10.0.0.1:11211,10.0.0.2:11211,10.0.0.3:11211, 10.0.0.4:11211 connect_timeout = 0.5 io_timeout = 1.0 error_suppression_limit = 0 error_suppression_interval = 1.5 """ memcache_config_path = os.path.join(tempdir, 'memcache.conf') with open(memcache_config_path, 'w') as f: f.write(dedent(memcache_config)) memcache_ring = self._loadapp(proxy_config_path) self.assertEqual(sorted(memcache_ring._client_cache.keys()), ['10.0.0.%d:11211' % i for i in range(1, 5)]) # proxy option takes precedence self.assertEqual(memcache_ring._connect_timeout, 1.0) # default tries are not limited by servers self.assertEqual(memcache_ring._tries, 3) # memcache conf options are defaults self.assertEqual(memcache_ring._io_timeout, 1.0) self.assertEqual(memcache_ring._error_limit_count, 0) self.assertEqual(memcache_ring._error_limit_time, 1.5) self.assertEqual(memcache_ring._error_limit_duration, 1.5) if __name__ == '__main__': unittest.main()
swiftstack/swift
test/unit/common/middleware/test_memcache.py
Python
apache-2.0
17,061
<?php /** * HiPay fullservice Magento2 * * NOTICE OF LICENSE * * This source file is subject to the Apache 2.0 Licence * that is bundled with this package in the file LICENSE.md. * It is also available through the world-wide-web at this URL: * http://www.apache.org/licenses/LICENSE-2.0 * * @copyright Copyright (c) 2019 - HiPay * @license http://www.apache.org/licenses/LICENSE-2.0 Apache 2.0 Licence */ namespace HiPay\FullserviceMagento\Model\Request\ThreeDS; use HiPay\FullserviceMagento\Model\Request\AbstractRequest; use HiPay\Fullservice\Gateway\Model\Request\ThreeDSTwo\BrowserInfo; /** * * @author HiPay <[email protected]> * @copyright Copyright (c) 2019 - HiPay * @license http://www.apache.org/licenses/LICENSE-2.0 Apache 2.0 Licence * @link https://github.com/hipay/hipay-fullservice-sdk-magento2 */ class BrowserInfoFormatter extends AbstractRequest { /** * @var \HiPay\FullserviceMagento\Helper\ThreeDSTwo */ protected $_threeDSHelper; /** * @var \Magento\Sales\Model\Order */ protected $_order; /** * BrowserInfoFormatter constructor. * * @param \Psr\Log\LoggerInterface $logger * @param \Magento\Checkout\Helper\Data $checkoutData * @param \Magento\Customer\Model\Session $customerSession * @param \Magento\Checkout\Model\Session $checkoutSession * @param \Magento\Framework\Locale\ResolverInterface $localeResolver * @param \HiPay\FullserviceMagento\Model\Request\Type\Factory $requestFactory * @param \Magento\Framework\UrlInterface $urlBuilder * @param \HiPay\FullserviceMagento\Helper\Data $helper * @param \HiPay\FullserviceMagento\Helper\ThreeDSTwo $threeDSHelper * @param array $params * @throws \Magento\Framework\Exception\LocalizedException */ public function __construct( \Psr\Log\LoggerInterface $logger, \Magento\Checkout\Helper\Data $checkoutData, \Magento\Customer\Model\Session $customerSession, \Magento\Checkout\Model\Session $checkoutSession, \Magento\Framework\Locale\ResolverInterface $localeResolver, \HiPay\FullserviceMagento\Model\Request\Type\Factory $requestFactory, \Magento\Framework\UrlInterface $urlBuilder, \HiPay\FullserviceMagento\Helper\Data $helper, \HiPay\FullserviceMagento\Helper\ThreeDSTwo $threeDSHelper, $params = [] ) { parent::__construct( $logger, $checkoutData, $customerSession, $checkoutSession, $localeResolver, $requestFactory, $urlBuilder, $helper, $params ); $this->_threeDSHelper = $threeDSHelper; $this->_order = $params["order"]; } /** * {@inheritDoc} * * @return BrowserInfo * @see \HiPay\FullserviceMagento\Model\Request\AbstractRequest::mapRequest() */ protected function mapRequest() { $browserInfo = new BrowserInfo(); $browserData = json_decode($this->_order->getPayment()->getAdditionalInformation('browser_info')); $browserInfo->ipaddr = $this->_order->getRemoteIp(); $browserInfo->http_accept = isset($_SERVER['HTTP_ACCEPT']) ? $_SERVER['HTTP_ACCEPT'] : null; $browserInfo->javascript_enabled = true; if ($browserData !== null) { $browserInfo->java_enabled = isset($browserData->java_enabled) ? $browserData->java_enabled : null; $browserInfo->language = isset($browserData->language) ? $browserData->language : null; $browserInfo->color_depth = isset($browserData->color_depth) ? $browserData->color_depth : null; $browserInfo->screen_height = isset($browserData->screen_height) ? $browserData->screen_height : null; $browserInfo->screen_width = isset($browserData->screen_width) ? $browserData->screen_width : null; $browserInfo->timezone = isset($browserData->timezone) ? $browserData->timezone : null; $browserInfo->http_user_agent = isset($browserData->http_user_agent) ? $browserData->http_user_agent : null; } return $browserInfo; } }
hipay/hipay-fullservice-sdk-magento2
Model/Request/ThreeDS/BrowserInfoFormatter.php
PHP
apache-2.0
4,402
package com.github.andriell.collection; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * Created by Andrey on 13.02.2016 */ public class HashThreeTest { public static void main(String[] args) { HashThreeTest test = new HashThreeTest(); test.test1(); } @Test public void test1() { ObjectTest test1 = new ObjectTest(0x50000000); ObjectTest test2 = new ObjectTest(0x60000000); ObjectTest test3 = new ObjectTest(0x70000000); ObjectTest test4 = new ObjectTest(0x00000005); ObjectTest test5 = new ObjectTest(0x00000006); ObjectTest test6 = new ObjectTest(0x00000007); HashThree<ObjectTest> three = new HashThree<ObjectTest>(); assertEquals(0, three.getSize()); assertEquals(false, three.remove(test1)); assertEquals(true, three.add(test1)); assertEquals(1, three.getSize()); assertEquals(true, three.add(test2)); assertEquals(2, three.getSize()); assertEquals(true, three.add(test3)); assertEquals(3, three.getSize()); assertEquals(true, three.add(test4)); assertEquals(4, three.getSize()); assertEquals(true, three.add(test5)); assertEquals(5, three.getSize()); assertEquals(true, three.add(test6)); assertEquals(6, three.getSize()); assertEquals(false, three.add(test1)); assertEquals(false, three.add(test2)); assertEquals(false, three.add(test3)); assertEquals(false, three.add(test4)); assertEquals(true, three.replace(test1)); assertEquals(true, three.replace(test2)); assertEquals(true, three.replace(test3)); assertEquals(true, three.replace(test4)); System.out.println(three); assertEquals(true, three.exist(test2)); assertEquals(true, three.remove(test2)); //assertEquals(false, three.remove(test2)); //assertEquals(true, three.exist(test1)); //assertEquals(false, three.exist(test2)); //assertEquals(true, three.exist(test3)); //assertEquals(true, three.exist(test4)); System.out.println(three); } private class ObjectTest { private int hashCode; public ObjectTest(int hashCode) { this.hashCode = hashCode; } @Override public int hashCode() { return hashCode; } @Override public String toString() { return Integer.toString(hashCode); } } }
andriell/craftyfox
src/test/java/com/github/andriell/collection/HashThreeTest.java
Java
apache-2.0
2,540
--- layout: page title: "About" description: "你是你周围关系的反映 " header-img: "img/green.jpg" --- 我是Edward,通过建立博客,希望能够学习互联网语言,read 这个时代 现在正在学习**python** 。 ###remind - 正确的激励产生正确的行为 - 解释宇宙需要不仅仅一个真理 - 你是你周围关系的反映 - 广末凉子很漂亮 ###关注: - [Python](http://liaoxuefeng.com) ###代表作: - [《暂无,此为模板测试》](http://cnfeat.com/blog/2015/05/22/a-24-chinese-fonts/) ###我的朋友们 - [lomo](http://huangyafei.com) ###联系 - [知乎@ewadrd.lv](http://www.zhihu.com/people/yinsi) - 公众号:暂无nulltext <center> <p><img src="" align="center"></p> </center>
Edwardvi/edwardvi.github.io
about.md
Markdown
apache-2.0
846
nurikabe ======== This project is a qml graphical UI for playing the logic puzzle nurikabe. Requirements ------------ * golang >= 1.3 To install golang visit: https://golang.org/doc/install * Qt >= 5 To install Qt visit: http://qt-project.org/downloads * go-qml run 'go get github.com/gopkg.in/qml.v1' for documentation visit: http://github.com/go-qml/qml Building -------- Once all requirements have been met, you should be able to run 'go build' from the command line. This will build a binary which you can then execute. Note that you must run the binary in the same directory as the qml folder. Levels ---- Nurikabe uses json format for all its levels. You may also generate levels using the nurikabe/gen helper binary. The gen utility also allows for solving levels by piping the json level via stdin and issuing the 'solve' flag. ie. cat my_level.json | gen -solve Usage of ./gen: -base=2: minimum garden size -debug=false: enable debug output -growth=4: garden growth. base + growth is max garden size -height=5: grid height -min=3: minimum gardens count -smart=true: solve using smart algorithm -solve=false: solve generated grid -v=false: Verbose -width=5: grid width
ostlerc/nurikabe
README.md
Markdown
apache-2.0
1,265
// Copyright 2018 syzkaller project authors. All rights reserved. // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. //go:build !race // +build !race package testutil const RaceEnabled = false
google/syzkaller
pkg/testutil/norace.go
GO
apache-2.0
243