repo_name
stringlengths 4
116
| path
stringlengths 3
942
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
alefherrera/sisalud
|
SiSaludSRL/src/main/java/ar/edu/ungs/presentation/componente/DownloadButton.java
|
284
|
package ar.edu.ungs.presentation.componente;
public class DownloadButton extends IconButton {
/**
*
*/
private static final long serialVersionUID = 1L;
public DownloadButton() {
super();
initialize("images/downloadIcon.png");
setToolTipText("Descarga archivo");
}
}
|
apache-2.0
|
KodyKantor/p2p-gossip
|
id/id.go
|
876
|
//Package id provides an interface that defines a random N-byte id generator,
//and an implementation of the ID interface.
package id
import (
"github.com/Sirupsen/logrus"
)
const (
//DefaultSize is the default size of generated IDs.
DefaultSize int = 16
)
var log = logrus.New()
func init() {
log.Debugln("Initialized id")
}
//ID interface provides functionality for creating bufferizable IDs to send
// in packets.
type ID interface {
ServeIDs(chan ID) // sends IDs through the provided channel
Equals(ID) bool // tells whether or not two ids are equal
GetBytes() []byte // returns a byte-slice representation of the ID
GetLengthInBytes() int // returns the number of bytes needed for the ID
SetLength(int) // sets the number of bytes an ID uses
CreateFromBytes([]byte) (ID, error)
GetZeroID() (ID, error)
createID() (ID, error)
}
|
apache-2.0
|
vcdemon/wechat-framework
|
wechat-api/src/main/java/com/itfvck/wechatframework/api/coupon/location/model/CardInfo.java
|
2627
|
package com.itfvck.wechatframework.api.coupon.location.model;
import com.itfvck.wechatframework.core.common.BaseData;
public class CardInfo extends BaseData {
private static final long serialVersionUID = 241391040579231164L;
// 卡券类型
private String card_type;
// 基本的卡券数据,见下表,所有卡券通用,描述
private GeneralCoupon general_coupon;
// 文本卡卷
private Groupon groupon;
// 礼品卡卷
private Gift gift;
// 代金卡卷
private Cash cash;
// 折扣卡卷
private Discount discount;
// 积分卷
private MemberCard member_card;
// 票类型
private ScenicTicket scenic_ticket;
// 电影卡卷
private MovieTicket movie_ticket;
// 机票卡卷
private BoardingPass boarding_pass;
// 红包
private LuckyMoney lucky_money;
// 会议详情
private MeetingTicket meeting_ticket;
public String getCard_type() {
return card_type;
}
public void setCard_type(String card_type) {
this.card_type = card_type;
}
public GeneralCoupon getGeneral_coupon() {
return general_coupon;
}
public void setGeneral_coupon(GeneralCoupon general_coupon) {
this.general_coupon = general_coupon;
}
public Groupon getGroupon() {
return groupon;
}
public void setGroupon(Groupon groupon) {
this.groupon = groupon;
}
public Gift getGift() {
return gift;
}
public void setGift(Gift gift) {
this.gift = gift;
}
public Cash getCash() {
return cash;
}
public void setCash(Cash cash) {
this.cash = cash;
}
public Discount getDiscount() {
return discount;
}
public void setDiscount(Discount discount) {
this.discount = discount;
}
public MemberCard getMember_card() {
return member_card;
}
public void setMember_card(MemberCard member_card) {
this.member_card = member_card;
}
public ScenicTicket getScenic_ticket() {
return scenic_ticket;
}
public void setScenic_ticket(ScenicTicket scenic_ticket) {
this.scenic_ticket = scenic_ticket;
}
public MovieTicket getMovie_ticket() {
return movie_ticket;
}
public void setMovie_ticket(MovieTicket movie_ticket) {
this.movie_ticket = movie_ticket;
}
public BoardingPass getBoarding_pass() {
return boarding_pass;
}
public void setBoarding_pass(BoardingPass boarding_pass) {
this.boarding_pass = boarding_pass;
}
public LuckyMoney getLucky_money() {
return lucky_money;
}
public void setLucky_money(LuckyMoney lucky_money) {
this.lucky_money = lucky_money;
}
public MeetingTicket getMeeting_ticket() {
return meeting_ticket;
}
public void setMeeting_ticket(MeetingTicket meeting_ticket) {
this.meeting_ticket = meeting_ticket;
}
}
|
apache-2.0
|
google/tock-on-titan
|
kernel/h1/src/usb/types.rs
|
18631
|
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(dead_code)]
use core::ops::Deref;
use super::serialize::Serialize;
use crate::usb::constants::Descriptor;
use crate::usb::constants::MAX_PACKET_SIZE;
use crate::usb::constants::U2F_REPORT_SIZE;
/// A StaticRef is a pointer to statically allocated mutable data such
/// as memory mapped I/O registers.
///
/// It is a simple wrapper around a raw pointer that encapsulates an
/// unsafe dereference in a safe manner. It serves the role of
/// creating a `&'static T` given a raw address and acts similarly to
/// `extern` definitions, except `StaticRef` is subject to module and
/// crate bounderies, while `extern` definitions can be imported
/// anywhere.
///
/// TODO(alevy): move into `common` crate or replace with other mechanism.
pub struct StaticRef<T> {
ptr: *const T,
}
impl<T> StaticRef<T> {
/// Create a new `StaticRef` from a raw pointer
///
/// ## Safety
///
/// Callers must pass in a reference to statically allocated memory which
/// does not overlap with other values.
pub const unsafe fn new(ptr: *const T) -> StaticRef<T> {
StaticRef { ptr: ptr }
}
}
impl<T> Deref for StaticRef<T> {
type Target = T;
fn deref(&self) -> &'static T {
unsafe { &*self.ptr }
}
}
#[derive(Debug)]
#[repr(C)]
pub struct DeviceDescriptor {
pub b_length: u8,
pub b_descriptor_type: u8,
pub bcd_usb: u16,
pub b_device_class: u8,
pub b_device_sub_class: u8,
pub b_device_protocol: u8,
pub b_max_packet_size0: u8,
pub id_vendor: u16,
pub id_product: u16,
pub bcd_device: u16,
pub i_manufacturer: u8,
pub i_product: u8,
pub i_serial_number: u8,
pub b_num_configurations: u8,
}
impl DeviceDescriptor {
}
unsafe impl Serialize for DeviceDescriptor {}
#[derive(Debug)]
#[repr(C)]
pub struct ConfigurationDescriptor {
pub b_length: u8,
pub b_descriptor_type: u8,
pub w_total_length: u16,
pub b_num_interfaces: u8,
pub b_configuration_value: u8,
pub i_configuration: u8,
pub bm_attributes: u8,
pub b_max_power: u8,
}
const CONFIGURATION_DESCRIPTOR_LENGTH: u8 = 9;
impl ConfigurationDescriptor {
/// Creates a configuration with `num_interfaces` and whose string
/// descriptor is `i_configuration`. The value `b_max_power` sets
/// the maximum power of the device in 2mA increments. The
/// configuration has `bm_attributes` set to bus powered (not
/// remote wakeup).
pub fn new(num_interfaces: u8,
i_configuration: u8,
b_max_power: u8) -> ConfigurationDescriptor {
ConfigurationDescriptor {
b_length: CONFIGURATION_DESCRIPTOR_LENGTH,
b_descriptor_type: Descriptor::Configuration as u8,
w_total_length: CONFIGURATION_DESCRIPTOR_LENGTH as u16,
b_num_interfaces: num_interfaces,
b_configuration_value: 1,
i_configuration: i_configuration,
bm_attributes: 0b10000000,
b_max_power: b_max_power,
}
}
/// Take the configuration and write it out as bytes into
/// the u32 buffer, returning the number of bytes written.
pub fn into_u32_buf(&self, buf: &mut [u32; 64]) -> usize {
buf[0] = (self.b_length as u32) << 0 |
(self.b_descriptor_type as u32) << 8 |
(self.w_total_length as u32) << 16;
buf[1] = (self.b_num_interfaces as u32) << 0 |
(self.b_configuration_value as u32) << 8 |
(self.i_configuration as u32) << 16 |
(self.bm_attributes as u32) << 24;
buf[2] = (self.b_max_power as u32) << 0;
CONFIGURATION_DESCRIPTOR_LENGTH as usize
}
/// Take the configuration and write it out as a bytes into
/// the u8 buffer, returning the number of bytes written
pub fn into_u8_buf(&self, buf: &mut [u8]) -> usize {
buf[0] = self.b_length as u8;
buf[1] = self.b_descriptor_type as u8;
buf[2] = self.w_total_length as u8;
buf[3] = (self.w_total_length >> 8) as u8;
buf[4] = self.b_num_interfaces as u8;
buf[5] = self.b_configuration_value as u8;
buf[6] = self.i_configuration as u8;
buf[7] = self.bm_attributes as u8;
buf[8] = self.b_max_power as u8;
CONFIGURATION_DESCRIPTOR_LENGTH as usize
}
pub fn get_total_length(&self) -> u16 {
self.w_total_length
}
pub fn set_total_length(&mut self, len: u16) {
self.w_total_length = len;
}
pub fn length(&self) -> usize {
CONFIGURATION_DESCRIPTOR_LENGTH as usize
}
}
#[derive(Debug)]
pub struct StringDescriptor {
pub b_length: u8,
pub b_descriptor_type: u8,
pub b_string: &'static [u16],
}
impl StringDescriptor {
pub fn new(str: &'static [u16]) -> StringDescriptor {
StringDescriptor {
b_length: (str.len() * 2 + 2) as u8,
b_descriptor_type: Descriptor::String as u8,
b_string: str,
}
}
pub fn into_u32_buf(&self, buf: &mut [u32; 64]) -> usize {
let count = self.b_string.len();
if count == 0 {
buf[0] = (self.b_length as u32) << 0 |
(self.b_descriptor_type as u32) << 8;
2
} else {
buf[0] = (self.b_length as u32) << 0 |
(self.b_descriptor_type as u32) << 8 |
(self.b_string[0] as u32) << 16;
for i in 1..count {
// The first 16 bits of the message are the
// length and type. The next 16 bits of the message are the first
// wide character of the string (index 0). So this means that bits
// 16..31 of buf[0] are b_string[0], bits 0..15 of buf[1] are string[1],
// bits 16..31 of buf[1] are string[2].
if i % 2 == 1 {
buf[(i / 2) + 1] = self.b_string[i] as u32;
} else {
buf[i / 2] = buf[i / 2] | (self.b_string[i] as u32) << 16;
}
}
2 + 2 * count
}
}
pub fn length(&self) -> usize {
self.b_length as usize
}
}
unsafe impl Serialize for ConfigurationDescriptor {}
#[derive(Debug)]
pub struct InterfaceDescriptor {
pub b_length: u8,
pub b_descriptor_type: u8,
pub b_interface_number: u8,
pub b_alternate_setting: u8,
pub b_num_endpoints: u8,
pub b_interface_class: u8,
pub b_interface_sub_class: u8,
pub b_interface_protocol: u8,
pub i_interface: u8
}
impl InterfaceDescriptor {
// This is the interface descriptor for a FIDO U2F device.
// Taken from Section 3.1 of FIDO U2F HID protocol document.
pub fn new(interface_string: u8, which: u8, class: u8, sub_class: u8, protocol: u8) -> InterfaceDescriptor {
InterfaceDescriptor {
b_length: 9,
b_descriptor_type: 4, // Interface descriptor
b_interface_number: which,
b_alternate_setting: 0,
b_num_endpoints: 2,
b_interface_class: class,
b_interface_sub_class: sub_class,
b_interface_protocol: protocol,
i_interface: interface_string,
}
}
/// Take the interface and write it out as bytes into
/// the u32 buffer, returning the number of bytes written.
pub fn into_u32_buf(&self, buf: &mut [u32; 64]) -> usize {
buf[0] = (self.b_length as u32) << 0 |
(self.b_descriptor_type as u32) << 8 |
(self.b_interface_number as u32) << 16 |
(self.b_alternate_setting as u32) << 24;
buf[1] = (self.b_num_endpoints as u32) << 0 |
(self.b_interface_class as u32) << 8 |
(self.b_interface_sub_class as u32) << 16 |
(self.b_interface_protocol as u32) << 24;
buf[2] = (self.i_interface as u32) << 0;
9
}
/// Take the interface and write it out as bytes into the u8
/// buffer, returning the number of bytes written.
pub fn into_u8_buf(&self, buf: &mut [u8]) -> usize {
buf[0] = self.b_length;
buf[1] = self.b_descriptor_type;
buf[2] = self.b_interface_number;
buf[3] = self.b_alternate_setting;
buf[4] = self.b_num_endpoints;
buf[5] = self.b_interface_class;
buf[6] = self.b_interface_sub_class;
buf[7] = self.b_interface_protocol;
buf[8] = self.i_interface;
9
}
pub fn length(&self) -> usize {
9
}
}
#[repr(u8)]
#[derive(Debug)]
pub enum EndpointTransferType {
Control = 0b00,
Isochronous = 0b01,
Bulk = 0b10,
Interrupt = 0b11,
}
#[repr(u8)]
#[derive(Debug)]
pub enum EndpointSynchronizationType {
None = 0b00,
Asynchronous = 0b01,
Adaptive = 0b10,
Synchronous = 0b11
}
#[repr(u8)]
#[derive(Debug)]
pub enum EndpointUsageType {
Data = 0b00,
Feedback = 0b01,
ExplicitFeedback = 0b10,
Reserved = 0b11,
}
#[derive(Debug)]
pub struct EndpointAttributes {
pub transfer: EndpointTransferType,
pub synchronization: EndpointSynchronizationType,
pub usage: EndpointUsageType,
}
impl Into<u8> for EndpointAttributes {
fn into(self) -> u8 {
match self.transfer {
EndpointTransferType::Isochronous => self.transfer as u8,
_ => {
self.transfer as u8 |
(self.synchronization as u8) << 2 |
(self.usage as u8) << 4
}
}
}
}
impl From<u8> for EndpointAttributes {
fn from(val: u8) -> EndpointAttributes {
EndpointAttributes {
transfer: match val & 0b11 {
0b00 => EndpointTransferType::Control,
0b01 => EndpointTransferType::Isochronous,
0b10 => EndpointTransferType::Bulk,
0b11 => EndpointTransferType::Interrupt,
_ => EndpointTransferType:: Control,
},
synchronization: match (val >> 2) & 0b11 {
0b00 => EndpointSynchronizationType::None,
0b01 => EndpointSynchronizationType::Asynchronous,
0b10 => EndpointSynchronizationType::Adaptive,
0b11 => EndpointSynchronizationType::Synchronous,
_ => EndpointSynchronizationType::None,
},
usage: match (val >> 4) & 0b11 {
0b00 => EndpointUsageType::Data,
0b01 => EndpointUsageType::Feedback,
0b10 => EndpointUsageType::ExplicitFeedback,
_ => EndpointUsageType::Reserved
}
}
}
}
#[derive(Debug)]
pub struct EndpointDescriptor {
pub b_length: u8,
pub b_descriptor_type: u8,
pub b_endpoint_address: u8,
pub bm_attributes: u8,
pub w_max_packet_size: u16,
pub b_interval: u8
}
impl EndpointDescriptor {
pub fn new(address: u8, attributes: EndpointAttributes, interval: u8) -> EndpointDescriptor {
EndpointDescriptor {
b_length: 7,
b_descriptor_type: Descriptor::Interface as u8,
b_endpoint_address: address,
bm_attributes: attributes.into(),
w_max_packet_size: MAX_PACKET_SIZE,
b_interval: interval,
}
}
pub fn into_u8_buf(&self, buf: &mut [u8]) -> usize {
buf[0] = 7;
buf[1] = Descriptor::Endpoint as u8;
buf[2] = self.b_endpoint_address;
buf[3] = self.bm_attributes;
buf[4] = self.w_max_packet_size as u8;
buf[5] = (self.w_max_packet_size >> 8) as u8;
buf[6] = self.b_interval;
7
}
pub fn length(&self) -> usize {
7
}
}
// This is a hardcoded HID device descriptor: a fully general one
// is out of scope right now. -plevis 9/27/18
#[derive(Debug)]
pub struct HidDeviceDescriptor {
b_length: u8,
b_descriptor_type: u8,
w_release: u16,
b_country: u8,
b_descriptors: u8,
b_sub_descriptor_type: u8,
w_sub_descriptor_length: u16,
}
impl HidDeviceDescriptor {
pub fn new() -> HidDeviceDescriptor {
HidDeviceDescriptor {
b_length: 9,
b_descriptor_type: Descriptor::HidDevice as u8,
w_release: 0x0100,
b_country: 0,
b_descriptors: 1,
b_sub_descriptor_type: 34, // Report
w_sub_descriptor_length: 34
}
}
pub fn into_u8_buf(&self, buf: &mut [u8]) -> usize {
buf[0] = self.b_length;
buf[1] = self.b_descriptor_type;
buf[2] = self.w_release as u8;
buf[3] = (self.w_release >> 8) as u8;
buf[4] = self.b_country;
buf[5] = self.b_descriptors;
buf[6] = self.b_sub_descriptor_type;
buf[7] = self.w_sub_descriptor_length as u8;
buf[8] = (self.w_sub_descriptor_length >> 8) as u8;
9
}
pub fn length(&self) -> usize {
9
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
#[allow(dead_code)]
#[repr(u8)]
pub enum SetupRequestType {
GetStatus = 0,
ClearFeature = 1,
Reserved = 2,
SetFeature = 3,
SetAddress = 5,
GetDescriptor = 6,
SetDescriptor = 7,
GetConfiguration = 8,
SetConfiguration = 9,
GetInterface = 10,
SetInterface = 11,
SynchFrame = 12,
Undefined = 15,
}
#[derive(Clone, Copy, Debug, PartialEq)]
#[allow(dead_code)]
#[repr(u8)]
pub enum SetupClassRequestType {
Undefined = 0,
SetIdle = 10,
}
#[repr(u8)]
#[derive(Clone, Copy, Debug, PartialEq)]
#[allow(dead_code)]
pub enum SetupDirection {
HostToDevice = 0,
DeviceToHost = 1,
}
#[repr(u8)]
#[derive(Clone, Copy, Debug, PartialEq)]
#[allow(dead_code)]
pub enum SetupRequestClass {
Standard = 0,
Class = 1,
Vendor = 2,
Reserved = 3,
}
#[repr(u8)]
#[derive(Clone, Copy, Debug, PartialEq)]
#[allow(dead_code)]
pub enum SetupRecipient {
Device = 0,
Interface = 1,
Endpoint = 2,
Other = 3,
Reserved = 4,
}
#[derive(Debug)]
pub struct SetupRequest {
pub bm_request_type: u8,
pub b_request: u8,
pub w_value: u16,
pub w_index: u16,
pub w_length: u16,
}
impl SetupRequest {
pub fn new(buf: &[u32; 16]) -> SetupRequest {
SetupRequest {
bm_request_type: (buf[0] & 0xff) as u8,
b_request: ((buf[0] & 0x0000ff00) >> 8) as u8,
w_value: ((buf[0] & 0xffff0000) >> 16) as u16,
w_index: (buf[1] & 0x0000ffff) as u16,
w_length: ((buf[1] & 0xffff0000) >> 16) as u16,
}
}
#[allow(dead_code)]
pub fn parse(buf: &[u32; 16], req: &mut SetupRequest) {
req.bm_request_type = (buf[0] & 0xff) as u8;
req.b_request = ((buf[0] & 0x0000ff00) >> 8) as u8;
req.w_value = ((buf[0] & 0xffff0000) >> 16) as u16;
req.w_index = (buf[1] & 0x0000ffff) as u16;
req.w_length = ((buf[1] & 0xffff0000) >> 16) as u16
}
// 0 is Host-to-Device, 1 is Device-to-Host
pub fn data_direction(&self) -> SetupDirection {
let val = (self.bm_request_type & 0x80) >> 7;
match val {
0 => SetupDirection::HostToDevice,
_ => SetupDirection::DeviceToHost
}
}
// 0 is Standard, 1 is Class, 2 is Vendor, 3 is Reserved
pub fn req_type(&self) -> SetupRequestClass {
let val = (self.bm_request_type & 0x60) >> 5;
match val {
0 => SetupRequestClass::Standard,
1 => SetupRequestClass::Class,
2 => SetupRequestClass::Vendor,
_ => SetupRequestClass::Reserved,
}
}
// 0 is Device, 1 is Interface, 2 is Endpoint, 3 is Other
// 4..31 are Reserved
pub fn recipient(&self) -> SetupRecipient {
let val = self.bm_request_type & 0x1f;
match val {
0 => SetupRecipient::Device,
1 => SetupRecipient::Interface,
2 => SetupRecipient::Endpoint,
3 => SetupRecipient::Other,
_ => SetupRecipient::Reserved,
}
}
pub fn class_request(&self) -> SetupClassRequestType {
match self.b_request {
10 => SetupClassRequestType::SetIdle,
_ => SetupClassRequestType::Undefined,
}
}
pub fn request(&self) -> SetupRequestType {
match self.b_request {
0 => SetupRequestType::GetStatus,
1 => SetupRequestType::ClearFeature,
2 => SetupRequestType::Reserved,
3 => SetupRequestType::SetFeature,
4 => SetupRequestType::Reserved,
5 => SetupRequestType::SetAddress,
6 => SetupRequestType::GetDescriptor,
7 => SetupRequestType::SetDescriptor,
8 => SetupRequestType::GetConfiguration,
9 => SetupRequestType::SetConfiguration,
10 => SetupRequestType::GetInterface,
11 => SetupRequestType::SetInterface,
12 => SetupRequestType::SynchFrame,
_ => SetupRequestType::Undefined
}
}
pub fn value(&self) -> u16 {
self.w_value
}
pub fn index(&self) -> u16 {
self.w_index
}
pub fn length(&self) -> u16 {
self.w_length
}
}
pub struct U2fHidCommandFrame {
pub channel_id: u32,
pub command: u8,
pub bcount_high: u8,
pub bcount_low: u8,
pub data: [u8; U2F_REPORT_SIZE as usize - 7],
}
impl U2fHidCommandFrame {
pub fn into_u32_buf(&self, buf: &mut [u32; 16]) {
buf[0] = self.channel_id;
buf[1] = (self.command as u32) << 0 |
(self.bcount_high as u32) << 8 |
(self.bcount_low as u32) << 16 |
(self.data[0] as u32) << 24;
}
}
pub struct U2fHidSequenceFrame {
channel_id: u32,
frame_type: u8,
sequence_num: u8,
data: [u8; U2F_REPORT_SIZE as usize - 6],
}
|
apache-2.0
|
EspLight/EspLight-APP
|
app/src/main/java/android/widget/VerticalSeekBar.java
|
1851
|
package android.widget;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.widget.SeekBar;
public class VerticalSeekBar extends SeekBar {
public VerticalSeekBar(Context context) {
super(context);
}
public VerticalSeekBar(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
public VerticalSeekBar(Context context, AttributeSet attrs) {
super(context, attrs);
}
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(h, w, oldh, oldw);
}
@Override
public synchronized void setProgress(int progress) // it is necessary for calling setProgress on click of a button
{
super.setProgress(progress);
onSizeChanged(getWidth(), getHeight(), 0, 0);
}
@Override
protected synchronized void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(heightMeasureSpec, widthMeasureSpec);
setMeasuredDimension(getMeasuredHeight(), getMeasuredWidth());
}
protected void onDraw(Canvas c) {
c.rotate(-90);
c.translate(-getHeight(), 0);
super.onDraw(c);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!isEnabled()) {
return false;
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
case MotionEvent.ACTION_MOVE:
case MotionEvent.ACTION_UP:
setProgress(getMax() - (int) (getMax() * event.getY() / getHeight()));
onSizeChanged(getWidth(), getHeight(), 0, 0);
break;
case MotionEvent.ACTION_CANCEL:
break;
}
return true;
}
}
|
apache-2.0
|
joewalnes/idea-community
|
java/java-impl/src/com/intellij/codeInsight/navigation/JavaGotoSuperHandler.java
|
4019
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.navigation;
import com.intellij.codeInsight.CodeInsightActionHandler;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.daemon.DaemonBundle;
import com.intellij.codeInsight.daemon.impl.PsiElementListNavigator;
import com.intellij.ide.util.MethodCellRenderer;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiSuperMethodUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class JavaGotoSuperHandler implements CodeInsightActionHandler {
public void invoke(@NotNull final Project project, @NotNull final Editor editor, @NotNull final PsiFile file) {
int offset = editor.getCaretModel().getOffset();
PsiElement[] superElements = findSuperElements(file, offset);
if (superElements == null || superElements.length == 0) return;
if (superElements.length == 1) {
PsiElement superElement = superElements[0].getNavigationElement();
OpenFileDescriptor descriptor = new OpenFileDescriptor(project, superElement.getContainingFile().getVirtualFile(), superElement.getTextOffset());
FileEditorManager.getInstance(project).openTextEditor(descriptor, true);
} else {
if (superElements[0] instanceof PsiMethod) {
boolean showMethodNames = !PsiUtil.allMethodsHaveSameSignature((PsiMethod[])superElements);
PsiElementListNavigator.openTargets(editor, (PsiMethod[])superElements,
CodeInsightBundle.message("goto.super.method.chooser.title"),
new MethodCellRenderer(showMethodNames));
}
else {
NavigationUtil.getPsiElementPopup(superElements, CodeInsightBundle.message("goto.super.class.chooser.title")).showInBestPositionFor(editor);
}
}
}
@Nullable
private static PsiElement[] findSuperElements(PsiFile file, int offset) {
PsiElement element = file.findElementAt(offset);
if (element == null) return null;
PsiMember e = PsiTreeUtil.getParentOfType(element, PsiMethod.class, PsiClass.class);
if (e instanceof PsiClass) {
PsiClass aClass = (PsiClass) e;
List<PsiClass> allSupers = new ArrayList<PsiClass>(Arrays.asList(aClass.getSupers()));
for (Iterator<PsiClass> iterator = allSupers.iterator(); iterator.hasNext();) {
PsiClass superClass = iterator.next();
if ("java.lang.Object".equals(superClass.getQualifiedName())) iterator.remove();
}
return allSupers.toArray(new PsiClass[allSupers.size()]);
} else if (e instanceof PsiMethod) {
PsiMethod method = (PsiMethod) e;
if (method.isConstructor()) {
PsiMethod constructorInSuper = PsiSuperMethodUtil.findConstructorInSuper(method);
if (constructorInSuper != null) {
return new PsiMethod[]{constructorInSuper};
}
} else {
return method.findSuperMethods(false);
}
}
return null;
}
public boolean startInWriteAction() {
return false;
}
}
|
apache-2.0
|
be-plans/be
|
libcap-ng/plan.sh
|
813
|
pkg_name=libcap-ng
pkg_origin=core
pkg_version=0.7.8
pkg_source=http://people.redhat.com/sgrubb/$pkg_name/$pkg_name-$pkg_version.tar.gz
pkg_shasum=c21af997445cd4107a55d386f955c5ea6f6e96ead693e9151277c0ab5f97d05f
pkg_maintainer="The Habitat Maintainers <[email protected]>"
pkg_description="The libcap-ng library is intended to make programming with posix capabilities much easier than the traditional libcap library"
pkg_upstream_url="https://people.redhat.com/sgrubb/libcap-ng/"
pkg_license=('GPL-2.0' 'LGPL-2.1')
pkg_deps=(core/glibc lilian/python)
pkg_build_deps=(lilian/make lilian/gcc)
pkg_bin_dirs=(bin)
pkg_lib_dirs=(lib)
pkg_include_dirs=(include)
source ../defaults.sh
do_build() {
./configure --prefix="${pkg_prefix}" --enable-static=no --with-python="$(pkg_path_for python)"
make -j "$(nproc)"
}
|
apache-2.0
|
vsilaev/tascalate-javaflow
|
net.tascalate.javaflow.providers.asm5/src/main/java/org/apache/commons/javaflow/providers/asm5/ContinuableMethodVisitor.java
|
17135
|
/**
* Original work: copyright 1999-2004 The Apache Software Foundation
* (http://www.apache.org/)
*
* This project is based on the work licensed to the Apache Software
* Foundation (ASF) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Modified work: copyright 2013-2022 Valery Silaev (http://vsilaev.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.javaflow.providers.asm5;
import static org.objectweb.asm.Opcodes.*;
import java.util.List;
import org.objectweb.asm.Handle;
import org.objectweb.asm.Label;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.tree.AbstractInsnNode;
import org.objectweb.asm.tree.InvokeDynamicInsnNode;
import org.objectweb.asm.tree.MethodInsnNode;
import org.objectweb.asm.tree.analysis.BasicValue;
import org.objectweb.asm.tree.analysis.Frame;
class ContinuableMethodVisitor extends MethodVisitor {
private static final String STACK_RECORDER = "org/apache/commons/javaflow/core/StackRecorder";
private static final String POP_METHOD = "pop";
private static final String PUSH_METHOD = "push";
private final ContinuableMethodNode methodNode;
private final Label startLabel = new Label();
private final List<Label> labels;
private final List<AbstractInsnNode> nodes;
private final int stackRecorderVar;
private int currentIndex = 0;
private Frame currentFrame = null;
ContinuableMethodVisitor(int api, ContinuableMethodNode a) {
super(api, a.mv);
this.methodNode = a;
this.labels = a.labels;
this.nodes = a.nodes;
this.stackRecorderVar = a.stackRecorderVar;
}
private static Type[] getArgumentTypes(AbstractInsnNode node) {
if (node instanceof MethodInsnNode) {
MethodInsnNode mnode = (MethodInsnNode)node;
return Type.getArgumentTypes(mnode.desc);
} else {
InvokeDynamicInsnNode mnode = (InvokeDynamicInsnNode)node;
return Type.getArgumentTypes(mnode.desc);
}
}
private static int getOwnerSize(AbstractInsnNode node) {
if (node instanceof MethodInsnNode) {
return node.getOpcode() == INVOKESTATIC ? 0 : 1;
} else {
// INVOKEDYNAMIC
return 0;
}
}
@Override
public void visitCode() {
mv.visitCode();
int fsize = labels.size();
Label[] restoreLabels = new Label[fsize];
for (int i = 0; i < restoreLabels.length; i++) {
restoreLabels[i] = new Label();
}
// verify if restoring
Label l0 = new Label();
// PC: StackRecorder stackRecorder = StackRecorder.get();
mv.visitMethodInsn(INVOKESTATIC, STACK_RECORDER, "get", "()L" + STACK_RECORDER + ";", false);
mv.visitInsn(DUP);
mv.visitVarInsn(ASTORE, stackRecorderVar);
mv.visitLabel(startLabel);
// PC: if (stackRecorder != null && !stackRecorder.isRestoring) {
mv.visitJumpInsn(IFNULL, l0);
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitFieldInsn(GETFIELD, STACK_RECORDER, "isRestoring", "Z");
mv.visitJumpInsn(IFEQ, l0);
mv.visitVarInsn(ALOAD, stackRecorderVar);
// PC: stackRecorder.popInt();
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, "popInt", "()I", false);
mv.visitTableSwitchInsn(0, fsize - 1, l0, restoreLabels);
// switch cases
for (int i = 0; i < fsize; i++) {
Label frameLabel = (Label) labels.get(i);
mv.visitLabel(restoreLabels[i]);
AbstractInsnNode mnode = (AbstractInsnNode) nodes.get(i);
//Frame frame = analyzer.getFrames()[methodNode.getIndex(mnode)];
Frame frame = methodNode.getFrameByNode(mnode);
// for each local variable store the value in locals popping it from the stack!
// locals
int lsize = frame.getLocals();
for (int j = lsize - 1; j >= 0; j--) {
BasicValue value = (BasicValue) frame.getLocal(j);
if (isNull(value)) {
mv.visitInsn(ACONST_NULL);
mv.visitVarInsn(ASTORE, j);
} else if (value == BasicValue.UNINITIALIZED_VALUE) {
// TODO ??
} else if (value == BasicValue.RETURNADDRESS_VALUE) {
// TODO ??
} else {
mv.visitVarInsn(ALOAD, stackRecorderVar);
Type type = value.getType();
if (value.isReference()) {
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, POP_METHOD + "Object", "()Ljava/lang/Object;", false);
Type t = value.getType();
String desc = t.getDescriptor();
if (desc.charAt(0) == '[') {
mv.visitTypeInsn(CHECKCAST, desc);
} else {
mv.visitTypeInsn(CHECKCAST, t.getInternalName());
}
mv.visitVarInsn(ASTORE, j);
} else {
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPopMethod(type), "()" + type.getDescriptor(), false);
mv.visitVarInsn(type.getOpcode(ISTORE), j);
}
}
}
if (frame instanceof MonitoringFrame) {
int[] monitoredLocals = ((MonitoringFrame) frame).getMonitored();
//System.out.println(System.identityHashCode(frame)+" AMonitored locals "+monitoredLocals.length);
for (int j = 0; j < monitoredLocals.length; j++) {
//System.out.println(System.identityHashCode(frame)+" AMonitored local "+monitoredLocals[j]);
mv.visitVarInsn(ALOAD, monitoredLocals[j]);
mv.visitInsn(MONITORENTER);
}
}
// stack
Type[] paramTypes = getArgumentTypes(mnode);
int argSize = paramTypes.length;
int ownerSize = getOwnerSize(mnode);
int initSize = mnode.getOpcode() == INVOKESPECIAL && MethodInsnNode.class.cast(mnode).name.equals("<init>") ? 2 : 0;
int ssize = frame.getStackSize();
for (int j = 0; j < ssize - argSize - ownerSize - initSize; j++) {
BasicValue value = (BasicValue) frame.getStack(j);
if (isNull(value)) {
mv.visitInsn(ACONST_NULL);
} else if (value == BasicValue.UNINITIALIZED_VALUE) {
// TODO ??
} else if (value == BasicValue.RETURNADDRESS_VALUE) {
// TODO ??
} else if (value.isReference()) {
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, POP_METHOD + "Object", "()Ljava/lang/Object;", false);
mv.visitTypeInsn(CHECKCAST, value.getType().getInternalName());
} else {
Type type = value.getType();
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPopMethod(type), "()" + type.getDescriptor(), false);
}
}
if (ownerSize > 0) {
// Load the object whose method we are calling
BasicValue value = ((BasicValue) frame.getStack(ssize - argSize - 1));
if (isNull(value)) {
// If user code causes NPE, then we keep this behavior: load null to get NPE at runtime
mv.visitInsn(ACONST_NULL);
} else {
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, POP_METHOD + "Reference", "()Ljava/lang/Object;", false);
mv.visitTypeInsn(CHECKCAST, value.getType().getInternalName());
}
}
// Create null types for the parameters of the method invocation
for (int j = 0; j < argSize; j++) {
pushDefault(paramTypes[j]);
}
// continue to the next method
mv.visitJumpInsn(GOTO, frameLabel);
}
// PC: }
// end of start block
mv.visitLabel(l0);
}
@Override
public void visitLabel(Label label) {
if (currentIndex < labels.size() && label == labels.get(currentIndex)) {
//int i = methodNode.getIndex((AbstractInsnNode)nodes.get(currentIndex));
//currentFrame = analyzer.getFrames()[i];
currentFrame = methodNode.getFrameByNode(nodes.get(currentIndex));
}
mv.visitLabel(label);
}
@Override
public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) {
mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
visitCall(Opcodes.INVOKEDYNAMIC, desc);
}
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean intf) {
mv.visitMethodInsn(opcode, owner, name, desc, intf);
visitCall(opcode, desc);
}
@Deprecated
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc) {
mv.visitMethodInsn(opcode, owner, name, desc);
visitCall(opcode, desc);
}
private void visitCall(int opcode, String desc) {
if (currentFrame != null) {
Label fl = new Label();
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitJumpInsn(IFNULL, fl);
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitFieldInsn(GETFIELD, STACK_RECORDER, "isCapturing", "Z");
mv.visitJumpInsn(IFEQ, fl);
// save stack
Type returnType = Type.getReturnType(desc);
boolean hasReturn = returnType != Type.VOID_TYPE;
if (hasReturn) {
mv.visitInsn(returnType.getSize() == 1 ? POP : POP2);
}
Type[] params = Type.getArgumentTypes(desc);
int argSize = params.length;
int ownerSize = opcode == INVOKESTATIC || opcode == INVOKEDYNAMIC ? 0 : 1; // TODO
int ssize = currentFrame.getStackSize() - argSize - ownerSize;
for (int i = ssize - 1; i >= 0; i--) {
BasicValue value = (BasicValue) currentFrame.getStack(i);
if (isNull(value)) {
mv.visitInsn(POP);
} else if (value == BasicValue.UNINITIALIZED_VALUE) {
// TODO ??
} else if (value.isReference()) {
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitInsn(SWAP);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, PUSH_METHOD + "Object", "(Ljava/lang/Object;)V", false);
} else {
Type type = value.getType();
if (type.getSize() > 1) {
mv.visitInsn(ACONST_NULL); // dummy stack entry
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitInsn(DUP2_X2); // swap2 for long/double
mv.visitInsn(POP2);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPushMethod(type), "(" + type.getDescriptor() + ")V", false);
mv.visitInsn(POP); // remove dummy stack entry
} else {
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitInsn(SWAP);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPushMethod(type), "(" + type.getDescriptor() + ")V", false);
}
}
}
boolean isInstanceMethod = (methodNode.access & ACC_STATIC) == 0;
if (isInstanceMethod) {
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitVarInsn(ALOAD, 0);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, PUSH_METHOD + "Reference", "(Ljava/lang/Object;)V", false);
}
// save locals
int fsize = currentFrame.getLocals();
for (int j = 0; j < fsize; j++) {
BasicValue value = (BasicValue) currentFrame.getLocal(j);
if (isNull(value)) {
// no need to save null
} else if (value == BasicValue.UNINITIALIZED_VALUE) {
// no need to save uninitialized objects
} else if (value.isReference()) {
mv.visitVarInsn(ALOAD, stackRecorderVar);
mv.visitVarInsn(ALOAD, j);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, PUSH_METHOD + "Object", "(Ljava/lang/Object;)V", false);
} else {
mv.visitVarInsn(ALOAD, stackRecorderVar);
Type type = value.getType();
mv.visitVarInsn(type.getOpcode(ILOAD), j);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPushMethod(type), "(" + type.getDescriptor() + ")V", false);
}
}
mv.visitVarInsn(ALOAD, stackRecorderVar);
if (currentIndex <= 5)
mv.visitInsn(ICONST_0 + currentIndex);
else
mv.visitIntInsn(SIPUSH, currentIndex);
mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, "pushInt", "(I)V", false);
if (currentFrame instanceof MonitoringFrame) {
int[] monitoredLocals = ((MonitoringFrame) currentFrame).getMonitored();
//System.out.println(System.identityHashCode(currentFrame)+" Monitored locals "+monitoredLocals.length);
for (int j = 0; j < monitoredLocals.length; j++) {
//System.out.println(System.identityHashCode(currentFrame)+" Monitored local "+monitoredLocals[j]);
mv.visitVarInsn(ALOAD, monitoredLocals[j]);
mv.visitInsn(MONITOREXIT);
}
}
Type methodReturnType = Type.getReturnType(methodNode.desc);
pushDefault(methodReturnType);
mv.visitInsn(methodReturnType.getOpcode(IRETURN));
mv.visitLabel(fl);
currentIndex++;
currentFrame = null;
}
}
@Override
public void visitMaxs(int maxStack, int maxLocals) {
Label endLabel = new Label();
mv.visitLabel(endLabel);
mv.visitLocalVariable("__stackRecorder", "L" + STACK_RECORDER + ";", null, startLabel, endLabel, stackRecorderVar);
mv.visitMaxs(maxStack, maxLocals + 1);
//was mv.visitMaxs(0, 0);
}
private static boolean isNull(BasicValue value) {
if (null == value)
return true;
if (!value.isReference())
return false;
Type type = value.getType();
return "Lnull;".equals(type.getDescriptor());
}
private void pushDefault(Type type) {
switch (type.getSort()) {
case Type.VOID:
break;
case Type.DOUBLE:
mv.visitInsn(DCONST_0);
break;
case Type.LONG:
mv.visitInsn(LCONST_0);
break;
case Type.FLOAT:
mv.visitInsn(FCONST_0);
break;
case Type.OBJECT:
case Type.ARRAY:
mv.visitInsn(ACONST_NULL);
break;
default:
mv.visitInsn(ICONST_0);
break;
}
}
private static String[] SUFFIXES = {
"Object", // 0 void
"Int", // 1 boolean
"Int", // 2 char
"Int", // 3 byte
"Int", // 4 short
"Int", // 5 int
"Float", // 6 float
"Long", // 7 long
"Double", // 8 double
"Object", // 9 array
"Object", // 10 object
};
private static String getPopMethod(Type type) {
return POP_METHOD + SUFFIXES[type.getSort()];
}
private static String getPushMethod(Type type) {
return PUSH_METHOD + SUFFIXES[type.getSort()];
}
}
|
apache-2.0
|
ppnathan/HMSHS
|
problems/DrowsyDriverModel/BeliefsCollection.cc
|
565
|
#include "DrowsyDriverModel.h"
#include "BeliefSet.h"
#include <iostream>
#include <list>
#include <fstream>
#include <cstdlib>
using namespace std;
int main(int argc, char** argv) {
DrowsyDriverModel hmshsmodel;
int nBeliefs = 50000;
CState initCState(hmshsmodel.getNumCStateVar(), 0);
DState initDState = 0;
vector<double> initQprob(hmshsmodel.getNumDState(), 0);
initQprob[0] = 1.0;
BeliefSet RandomBeliefs(nBeliefs);
RandomBeliefs.SampleBelief(hmshsmodel, initCState, initDState, initQprob);
return 0;
}
|
apache-2.0
|
ilfate/laravel.ilfate.net
|
app/database/migrations/2014_11_20_152233_create_users_table.php
|
836
|
<?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
use Illuminate\Support\Facades\Schema;
class CreateUsersTable extends Migration {
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::dropIfExists('users');
Schema::create('users', function($table)
{
$table->increments('id', 5);
$table->string('email', 60);
$table->string('password', 60);
$table->string('name', 20)->nullable();
$table->string('remember_token', 100)->nullable();
$table->timestamp('last_visit');
$table->timestamps();
$table->unique('email');
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
//
}
}
|
apache-2.0
|
spinnaker/deck
|
packages/core/src/presentation/navigation/pageSection.component.ts
|
2078
|
import type { IChangesObject, IComponentOptions, IController, IOnChangesObject } from 'angular';
import { module } from 'angular';
import type { INavigationPage } from './PageNavigationState';
import { PageNavigationState } from './PageNavigationState';
interface IPageSectionOnChanges extends IOnChangesObject {
visible: IChangesObject<boolean>;
label: IChangesObject<string>;
badge: IChangesObject<string>;
}
class PageSectionController implements IController {
public key: string;
public label: string;
public badge: string;
public visible: boolean;
public noWrapper: boolean;
private pageConfig: INavigationPage;
public $onInit(): void {
this.visible = this.visible !== false;
this.pageConfig = {
key: this.key,
label: this.label,
visible: this.visible,
badge: this.badge,
};
PageNavigationState.registerPage(this.pageConfig);
}
public $onChanges(changes: IPageSectionOnChanges): void {
if (changes.visible && !changes.visible.isFirstChange()) {
this.pageConfig.visible = changes.visible.currentValue;
}
if (changes.label && !changes.label.isFirstChange()) {
this.pageConfig.label = changes.label.currentValue;
}
if (changes.badge && !changes.badge.isFirstChange()) {
this.pageConfig.badge = changes.badge.currentValue;
}
}
}
const pageSectionComponent: IComponentOptions = {
bindings: {
key: '@',
label: '@',
badge: '<',
visible: '<',
noWrapper: '<',
},
controller: PageSectionController,
transclude: true,
template: `
<div ng-if="$ctrl.pageConfig.visible" class="page-subheading flex-1" data-page-id="{{$ctrl.pageConfig.key}}">
<h4 class="sticky-header">{{$ctrl.pageConfig.label}}</h4>
<div ng-class="$ctrl.noWrapper ? 'no-wrapper' : 'section-body'" data-page-content="{{$ctrl.pageConfig.key}}" ng-transclude></div>
</div>
`,
};
export const PAGE_SECTION_COMPONENT = 'spinnaker.core.presentation.navigation.pageSection';
module(PAGE_SECTION_COMPONENT, []).component('pageSection', pageSectionComponent);
|
apache-2.0
|
sky15179/Debug
|
TestTabelController/elongframework/eLongFramework/eLongControls/eLongSegmentedControl.h
|
1004
|
//
// eLongSegmentedControl.h
// ElongClient
//
// Created by zhucuirong on 15/10/29.
// Copyright © 2015年 elong. All rights reserved.
//
#import <UIKit/UIKit.h>
@class eLongSegmentedControl;
@protocol eLongSegmentedControlDelegate <NSObject>
- (void)eLongSegmentedControl:(eLongSegmentedControl *)segmentedControl clickAtIndex:(NSInteger)index;
@end
@interface eLongSegmentedControl : UIView
@property (nonatomic, assign) NSInteger selectedSegmentIndex;
@property (nonatomic, readonly) NSUInteger numberOfSegments;
@property (nonatomic, strong) UIFont *font;
@property (nonatomic, strong) UIColor *hairlineColor;
@property (nonatomic, weak) id<eLongSegmentedControlDelegate> delegate;
- (id)initWithFrame:(CGRect)frame titles:(NSArray *)titleArray normalColor:(UIColor *)normalColor selectedColor:(UIColor *)selectedColor indicatorView:(UIView *)indicatorView;//indicatorView.frame{0, 0, weight, height}
- (void)setSelectedSegmentIndex:(NSInteger)segment animated:(BOOL)animated;
@end
|
apache-2.0
|
yuananf/presto
|
presto-main/src/test/java/com/facebook/presto/execution/buffer/BufferTestUtils.java
|
9646
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution.buffer;
import com.facebook.presto.OutputBuffers;
import com.facebook.presto.OutputBuffers.OutputBufferId;
import com.facebook.presto.block.BlockAssertions;
import com.facebook.presto.operator.PageAssertions;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.type.Type;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
import static com.facebook.presto.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde;
import static com.google.common.base.Preconditions.checkArgument;
import static io.airlift.concurrent.MoreFutures.tryGetFutureValue;
import static io.airlift.units.DataSize.Unit.BYTE;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public final class BufferTestUtils
{
private BufferTestUtils() {}
static final PagesSerde PAGES_SERDE = testingPagesSerde();
static final Duration NO_WAIT = new Duration(0, MILLISECONDS);
static final Duration MAX_WAIT = new Duration(1, SECONDS);
private static final DataSize BUFFERED_PAGE_SIZE = new DataSize(PAGES_SERDE.serialize(createPage(42)).getRetainedSizeInBytes(), BYTE);
static BufferResult getFuture(ListenableFuture<BufferResult> future, Duration maxWait)
{
Optional<BufferResult> bufferResult = tryGetFutureValue(future, (int) maxWait.toMillis(), MILLISECONDS);
checkArgument(bufferResult.isPresent(), "bufferResult is empty");
return bufferResult.get();
}
static void assertBufferResultEquals(List<? extends Type> types, BufferResult actual, BufferResult expected)
{
assertEquals(actual.getSerializedPages().size(), expected.getSerializedPages().size(), "page count");
assertEquals(actual.getToken(), expected.getToken(), "token");
for (int i = 0; i < actual.getSerializedPages().size(); i++) {
Page actualPage = PAGES_SERDE.deserialize(actual.getSerializedPages().get(i));
Page expectedPage = PAGES_SERDE.deserialize(expected.getSerializedPages().get(i));
assertEquals(actualPage.getChannelCount(), expectedPage.getChannelCount());
PageAssertions.assertPageEquals(types, actualPage, expectedPage);
}
assertEquals(actual.isBufferComplete(), expected.isBufferComplete(), "buffer complete");
}
static BufferResult createBufferResult(String bufferId, long token, List<Page> pages)
{
checkArgument(!pages.isEmpty(), "pages is empty");
return new BufferResult(
bufferId,
token,
token + pages.size(),
false,
pages.stream()
.map(PAGES_SERDE::serialize)
.collect(Collectors.toList()));
}
public static Page createPage(int i)
{
return new Page(BlockAssertions.createLongsBlock(i));
}
static DataSize sizeOfPages(int count)
{
return new DataSize(BUFFERED_PAGE_SIZE.toBytes() * count, BYTE);
}
static BufferResult getBufferResult(OutputBuffer buffer, OutputBufferId bufferId, long sequenceId, DataSize maxSize, Duration maxWait)
{
ListenableFuture<BufferResult> future = buffer.get(bufferId, sequenceId, maxSize);
return getFuture(future, maxWait);
}
// TODO: remove this after PR #7987 is landed
static void acknowledgeBufferResult(OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId, long sequenceId)
{
buffer.acknowledge(bufferId, sequenceId);
}
static ListenableFuture<?> enqueuePage(OutputBuffer buffer, Page page)
{
ListenableFuture<?> future = buffer.enqueue(ImmutableList.of(PAGES_SERDE.serialize(page)));
assertFalse(future.isDone());
return future;
}
static ListenableFuture<?> enqueuePage(OutputBuffer buffer, Page page, int partition)
{
ListenableFuture<?> future = buffer.enqueue(partition, ImmutableList.of(PAGES_SERDE.serialize(page)));
assertFalse(future.isDone());
return future;
}
public static void addPage(OutputBuffer buffer, Page page)
{
assertTrue(buffer.enqueue(ImmutableList.of(PAGES_SERDE.serialize(page))).isDone(), "Expected add page to not block");
}
public static void addPage(OutputBuffer buffer, Page page, int partition)
{
assertTrue(buffer.enqueue(partition, ImmutableList.of(PAGES_SERDE.serialize(page))).isDone(), "Expected add page to not block");
}
static void assertQueueState(
OutputBuffer buffer,
OutputBuffers.OutputBufferId bufferId,
int bufferedPages,
int pagesSent)
{
assertEquals(
getBufferInfo(buffer, bufferId),
new BufferInfo(
bufferId,
false,
bufferedPages,
pagesSent,
new PageBufferInfo(
bufferId.getId(),
bufferedPages,
sizeOfPages(bufferedPages).toBytes(),
bufferedPages + pagesSent, // every page has one row
bufferedPages + pagesSent)));
}
static void assertQueueState(
OutputBuffer buffer,
int unassignedPages,
OutputBuffers.OutputBufferId bufferId,
int bufferedPages,
int pagesSent)
{
OutputBufferInfo outputBufferInfo = buffer.getInfo();
long assignedPages = outputBufferInfo.getBuffers().stream().mapToInt(BufferInfo::getBufferedPages).sum();
assertEquals(
outputBufferInfo.getTotalBufferedPages() - assignedPages,
unassignedPages,
"unassignedPages");
BufferInfo bufferInfo = outputBufferInfo.getBuffers().stream()
.filter(info -> info.getBufferId().equals(bufferId))
.findAny()
.orElse(null);
assertEquals(
bufferInfo,
new BufferInfo(
bufferId,
false,
bufferedPages,
pagesSent,
new PageBufferInfo(
bufferId.getId(),
bufferedPages,
sizeOfPages(bufferedPages).toBytes(),
bufferedPages + pagesSent, // every page has one row
bufferedPages + pagesSent)));
}
@SuppressWarnings("ConstantConditions")
static void assertQueueClosed(OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId, int pagesSent)
{
BufferInfo bufferInfo = getBufferInfo(buffer, bufferId);
assertEquals(bufferInfo.getBufferedPages(), 0);
assertEquals(bufferInfo.getPagesSent(), pagesSent);
assertEquals(bufferInfo.isFinished(), true);
}
@SuppressWarnings("ConstantConditions")
static void assertQueueClosed(OutputBuffer buffer, int unassignedPages, OutputBuffers.OutputBufferId bufferId, int pagesSent)
{
OutputBufferInfo outputBufferInfo = buffer.getInfo();
long assignedPages = outputBufferInfo.getBuffers().stream().mapToInt(BufferInfo::getBufferedPages).sum();
assertEquals(
outputBufferInfo.getTotalBufferedPages() - assignedPages,
unassignedPages,
"unassignedPages");
BufferInfo bufferInfo = outputBufferInfo.getBuffers().stream()
.filter(info -> info.getBufferId().equals(bufferId))
.findAny()
.orElse(null);
assertEquals(bufferInfo.getBufferedPages(), 0);
assertEquals(bufferInfo.getPagesSent(), pagesSent);
assertEquals(bufferInfo.isFinished(), true);
}
static void assertFinished(OutputBuffer buffer)
{
assertTrue(buffer.isFinished());
for (BufferInfo bufferInfo : buffer.getInfo().getBuffers()) {
assertTrue(bufferInfo.isFinished());
assertEquals(bufferInfo.getBufferedPages(), 0);
}
}
static void assertFutureIsDone(Future<?> future)
{
tryGetFutureValue(future, 5, SECONDS);
assertTrue(future.isDone());
}
private static BufferInfo getBufferInfo(OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId)
{
for (BufferInfo bufferInfo : buffer.getInfo().getBuffers()) {
if (bufferInfo.getBufferId().equals(bufferId)) {
return bufferInfo;
}
}
return null;
}
}
|
apache-2.0
|
liszt-code/liszt
|
README.md
|
1560
|
# liszt
[](https://travis-ci.org/liszt-code/liszt)
[](https://coveralls.io/github/liszt-code/liszt?branch=master)
Directory of people in a building.
## Motive
Liszt aims to provide an easy-to-use and secure interface for visitors to identify and communicate with residents.
### Design
#### Querying for residents
Typically, buildings assign identifiers to their units. This is one way visitors can query for a resident they wish to reach. Residents should attach their identities to these unit identifiers in order for visitors to query for them by name.
#### Communication
A simple phone call from a known number to a resident's registered phone number will cover the majority of use cases. However, it will be useful to be able to reach residents by other means, such as SMS or other third party messaging applications. Liszt supports many of these communication methods in order to make it as easy as possible for residents. Building managers will not have to do anything for this to work, as it is entirely self-service.
#### Features
* [ ] Opening the door via touch tone
* [ ] Opening the door via text message
* [ ] Opening the door from a resident's registered identity \(phone number / 3rd party ID\)
* [ ] Self-service website
* [ ] Management interface for enabling/disabling features
* [ ] Streaming video feed of front door camera
* [ ] Still shot of front door camera
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Trixis/Trixis inula/ Syn. Trixis frutescens latifolia/README.md
|
195
|
# Trixis frutescens var. latifolia Less. VARIETY
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
omarKhatib/social
|
fullStackAUTH/public/views/images.tpl.html
|
12572
|
<title>images</title>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!--
<link rel="stylesheet" href="https://www.w3schools.com/w3css/4/w3.css">
<link rel="stylesheet" href="https://www.w3schools.com/lib/w3-theme-blue-grey.css">
<link rel='stylesheet' href='https://fonts.googleapis.com/css?family=Open+Sans'>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css">
<style>
html,body,h1,h2,h3,h4,h5 {font-family: "Open Sans", sans-serif}
</style>
-->
<div class="w3-theme-l5">
<!-- Navbar -->
<div class="w3-top" ng-init='getImages();getProfileImage();loadConnection();getOldNotifications()'>
<div class="w3-bar w3-theme-d2 w3-left-align w3-large">
<a class="w3-bar-item w3-button w3-hide-medium w3-hide-large w3-right w3-padding-large w3-hover-white w3-large w3-theme-d2" href="javascript:void(0);" onclick="openNav()"><i class="fa fa-bars"></i></a>
<a href="#home" class="w3-bar-item w3-button w3-padding-large w3-theme-d4"><i class="fa fa-home w3-margin-right"></i>friends</a>
<a href="#feeds" class="w3-bar-item w3-button w3-hide-small w3-padding-large w3-hover-white" title="feeds"><i class="fa fa-globe"></i></a>
<a href="#home" class="w3-bar-item w3-button w3-hide-small w3-padding-large w3-hover-white" title="home"><i class="fa fa-user"></i></a>
<a href="#messenger" class="w3-bar-item w3-button w3-hide-small w3-padding-large w3-hover-white" title="messenger"><i class="fa fa-envelope"></i></a>
<div class="w3-dropdown-hover w3-hide-small">
<button class="w3-button w3-padding-large" title="Notifications"><i class="fa fa-bell"></i><span class="w3-badge w3-right w3-small w3-green">{{oldNotificationsNum+notificationNum}}</span></button>
<div class="w3-dropdown-content w3-card-4 w3-bar-block" style="width:300px">
<span ng-click='removeNotification(notification._id)' class="w3-bar-item w3-button" ng-repeat='notification in notifications | orderBy:"-"'>{{notification.from}} {{notification.action}}s you<span ng-if='notification.action!="follow" && notification.action!="unfollow" && notification.action!="message"'>r post : {{notification.post}}</span> <hr></span>
<span ng-click='removeNotification(notification._id)' class="w3-bar-item w3-button" ng-repeat='notification in oldNotifications | orderBy:"-"'>{{notification.from}} {{notification.action}}s you<span ng-if='notification.action!="follow" && notification.action!="unfollow" && notification.action!="message"'>r post : {{notification.post}}</span> <hr></span>
</div>
</div>
<div class="w3-dropdown-hover w3-right w3-padding-large">
<img ng-src="{{i}}" class="w3-circle" style="height:25px;width:25px" alt="profile image">
<div class="w3-dropdown-content w3-bar-block w3-border" style="right:0">
<a href="#settings" class="w3-bar-item w3-button">settings</a>
<a href="#" class="w3-bar-item w3-button" ng-click='signout()'>logout</a>
</div>
</div>
</div>
</div>
<!-- Navbar on small screens -->
<div id="navDemo" class="w3-bar-block w3-theme-d2 w3-hide w3-hide-large w3-hide-medium w3-large">
<a href="#home" class="w3-bar-item w3-button w3-padding-large">home</a>
<a href="#messenger" class="w3-bar-item w3-button w3-padding-large">messenger</a>
<a href="#home" class="w3-bar-item w3-button w3-padding-large">home</a>
<a href="#settings" class="w3-bar-item w3-button w3-padding-large">settings</a>
</div>
<!-- Page Container -->
<div class="w3-container w3-content" style="max-width:1400px;margin-top:80px">
<!-- The Grid -->
<div class="w3-row">
<!-- Left Column -->
<div class="w3-col m3">
<!-- Profile -->
<div class="w3-card-2 w3-round w3-white">
<div class="w3-container">
<h4 class="w3-center userName">{{username}}</h4>
<p class="w3-center"><img id="userImage" ng-src='{{i}}' alt="profile image">
</p>
<hr>
<center>
<p class="userData" ><i class="fa fa-pencil fa-fw w3-margin-right w3-text-theme"></i> {{job}}</p>
<p class="userData"><i class="fa fa-home fa-fw w3-margin-right w3-text-theme"></i> {{POB}} </p>
<p class="userData"><i class="fa fa-birthday-cake fa-fw w3-margin-right w3-text-theme"></i> {{DOB}}</p>
</center>
</div>
</div>
<br>
<!-- Accordion -->
<!-- Accordion -->
<div class="w3-card-2 w3-round">
<div class="w3-white">
<a href='#settings' class="w3-button w3-block w3-theme-l1 w3-left-align"><i class="fa fa-circle-o-notch fa-fw w3-margin-right"></i> Settings</a>
<a href='#images' class="w3-button w3-block w3-theme-l1 w3-left-align"><i class="fa fa-file-image-o fa-fw w3-margin-right"></i> Photos</a>
<div id="Demo3" class="w3-hide w3-container">
<div class="w3-row-padding">
<br>
</div>
</div>
</div>
</div>
<br>
<!-- Interests -->
<div class="w3-card-2 w3-round w3-white w3-hide-small">
<div class="w3-container">
<ul class="nav nav-tabs">
<li class='followTabs' ><a data-toggle="tab" data-target="#followers">followers</a></li>
<li class='followTabs'><a data-toggle="tab" data-target="#following">following</a></li>
<li class='followTabs'><a data-toggle="tab" data-target="#suggessions">suggessions</a></li>
</ul>
<div class="tab-content" >
<div ng-init='getFollowers()' id="followers" class="tab-pane fade in active">
<h3>followers ({{followers.length}})</h3>
<hr>
<h3 ng-if='followers.length==0' style='color:#c1c5c5'>you dont have any follower yet</h3>
<div style='margin-top: 44px;' ng-repeat='t in followers'>
<p ><span>{{t}}</span></p>
<hr>
</div>
</div>
<div ng-init='getFollowing()' id="following" class="tab-pane fade">
<h3>following ({{following.length}})</h3>
<hr>
<h3 ng-if='following.length==0' style='color:#c1c5c5'>you did'nt follow any one yet</h3>
<div ng-repeat='f in following'>
{{f}} <button ng-click='unfollow(f)' class='btn btn-danger' style='margin-right:10px'>unfollow</button>
<hr>
</div>
</div>
<div ng-init='getUsers()' id="suggessions" class="tab-pane fade" >
<h3>suggessions</h3>
<hr>
<input ng-model='keySearch' class="form-control w3-bar-item" type='text' placeholder="user search" style='margin-left: 20%;
margin-top: 7px;
width: 80%;border-radius: 10px;'>
<div style='margin-top: 44px; width:100%;margin-left:auto;margin-right:auto' ng-repeat='x in allUsers | filter:keySearch' ng-if='x.username!=username && following.indexOf(x.username)==-1'>
<img style='height:50px;width:50px;border-radius:100px;margin-right:10px' src='{{x.profileImage}}'><span style='margin-right:10px'>{{x.username}}</span><button ng-click='follow(x.username)' class='btn'>follow</button>
<hr>
</div>
</div>
</div>
</div>
</div>
<br>
<!-- Alert Box -->
<div class="w3-container w3-display-container w3-round w3-theme-l4 w3-border w3-theme-border w3-margin-bottom w3-hide-small">
<span onclick="this.parentElement.style.display='none'" class="w3-button w3-theme-l3 w3-display-topright">
<i class="fa fa-remove"></i>
</span>
<p><strong>Hey!</strong></p>
<p>People are looking at your profile. Find out who.</p>
</div>
<!-- End Left Column -->
</div>
<!-- Middle Column -->
<div class="w3-col m7">
<div class="w3-row-padding">
<div class="w3-col m12">
<div class='textAreaAndButton'>
<h2> Gallery </h2>
<input type='url' placeholder='image URL' ng-model='image' class="form-control postTextArea">
<center>
<button id='addbtn' ng-click='addImage()' class='btn btn-primary postBtn'>post</button>
</center>
</div>
<!-- </div>-->
</div>
</div>
<!--
<div ng-repeat = 'post in posts.data | orderBy:"-likes"'>
<div class="card w-75 post" >
<span id='remove' class='glyphicon glyphicon-remove' ng-click='remove(post._id)'></span>
<div class="card-block" ng-init='isEditible=false; showCommentField=false'>
<img id='postProfileImage' ng-src="{{i}}" alt="Avatar" class="w3-left w3-margin-right" style="width:60px">
<h4 id='postUsername' style='float:left'><b>{{post.username}}</b></h4><br>
<p contentEditable={{isEditible}} autofocus="autofocus" class="card-text" id='{{$index}}'>{{post.post}}</p>
<img id='postImg' ng-src='{{post.image}}' />
<span id='like' class="glyphicon glyphicon-thumbs-up postIcons" ng-click='like(post._id,post.post,post.likes,post.disLikes)'>{{post.likes}}</span>
<span id='dislike' class="glyphicon glyphicon-thumbs-down postIcons" ng-click='disLike(post._id,post.post,post.likes,post.disLikes)'>{{post.disLikes}}</span>
<span id='edit' class='glyphicon glyphicon-pencil postIcons' ng-click='isEditible=true' ng-show='!isEditible'></span>
<span id='comment' class='glyphicon glyphicon-comment postIcons' ng-click='showComments=!showComments;getComments(post._id)'></span>
<span id='applyEdits' ng-if='isEditible' ng-click='edit(post._id,$index,post.likes,post.disLikes)' class='glyphicon glyphicon-ok postIcons'></span>
</div>
<div ng-show='showComments' class='commentsBlock'>
<hr>
<input id='commentTextField' ng-model='comment' placeholder='comment' type='text' class='form-control'>
<button ng-click='addComment(post._id, comment)' class='commentbtn'>add</button>
<p ng-repeat='c in comments track by $index'>
<span>{{c}}</span>
</p>
</div>
</div>
</div>
-->
<div class='row' style='display:inline-block;margin-left:20%;margin-right:20%;width:60%'>
<h1 ng-if='images.length==0' style='color:#c4cad0'>No private images yet</h1>
<div ng-repeat='img in images' class='col-md-12' style='margin:5% auto;'>
<span id='remove' class='glyphicon glyphicon-remove' ng-click='removeImage(img)'></span>
<img src='{{img}}' style='width: 90%;heigh:auto'>
<hr>
</div>
</div>
</div>
<!-- Right Column -->
<div class="w3-col m2">
<!-- weather widget start -->
<div class="widget">
<!-- weather widget start --><a target="_blank" href="http://www.booked.net/weather/beirut-977"><img style='width: 98.5%;' src="https://w.bookcdn.com/weather/picture/32_977_1_1_3498db_250_2980b9_ffffff_ffffff_1_2071c9_ffffff_0_6.png?scode=124&domid=w209&anc_id=37372" alt="booked.net"/></a><!-- weather widget end -->
</div>
<br>
<div class="w3-card-2 w3-round w3-white w3-padding-16 w3-center">
<p>ADS</p>
</div>
<br>
<div class="w3-card-2 w3-round w3-white w3-padding-16 w3-center">
<p>ADS</p>
</div>
<br>
<div class="w3-card-2 w3-round w3-white w3-padding-32 w3-center">
<p><i class="fa fa-bug w3-xxlarge"></i></p>
</div>
<!-- End Right Column -->
</div>
<!-- End Grid -->
</div>
<!-- End Page Container -->
</div>
<br>
<!-- Footer -->
<footer class="w3-container w3-theme-d3 w3-padding-16">
<h5>Footer</h5>
</footer>
<footer class="w3-container w3-theme-d5">
</footer>
<script>
// Accordion
function myFunction(id) {
var x = document.getElementById(id);
if (x.className.indexOf("w3-show") == -1) {
x.className += " w3-show";
x.previousElementSibling.className += " w3-theme-d1";
} else {
x.className = x.className.replace("w3-show", "");
x.previousElementSibling.className =
x.previousElementSibling.className.replace(" w3-theme-d1", "");
}
}
// Used to toggle the menu on smaller screens when clicking on the menu button
function openNav() {
var x = document.getElementById("navDemo");
if (x.className.indexOf("w3-show") == -1) {
x.className += " w3-show";
} else {
x.className = x.className.replace(" w3-show", "");
}
}
</script>
</div>
|
apache-2.0
|
krobertson/knife-xenserver
|
vendor/fog/spec/ecloud/models/backup_internet_service_spec.rb
|
2266
|
require 'ecloud/spec_helper'
if Fog.mocking?
describe "Fog::Compute::Ecloud::BackupInternetService", :type => :mock_tmrk_ecloud_model do
subject { @vcloud.vdcs[0].backup_internet_services[0] }
describe :class do
subject { Fog::Compute::Ecloud::BackupInternetService }
it { should have_identity(:href) }
it { should have_only_these_attributes([:href, :name, :id, :protocol, :enabled, :description, :timeout, :redirect_url, :monitor]) }
end
context "with no uri" do
subject { Fog::Compute::Ecloud::BackupInternetService.new() }
it { should have_all_attributes_be_nil }
end
context "as a collection member" do
subject { @vcloud.vdcs[0].backup_internet_services[0].reload }
let(:composed_service_data) { @vcloud.vdcs[0].backup_internet_services[0].send(:_compose_service_data) }
it { should be_an_instance_of(Fog::Compute::Ecloud::BackupInternetService) }
its(:href) { should == @mock_backup_service.href }
its(:identity) { should == @mock_backup_service.href }
its(:name) { should == @mock_backup_service.name }
its(:id) { should == @mock_backup_service.object_id.to_s }
its(:protocol) { should == @mock_backup_service.protocol }
its(:enabled) { should == @mock_backup_service.enabled.to_s }
its(:description) { should == @mock_backup_service.description }
its(:timeout) { should == @mock_backup_service.timeout.to_s }
its(:redirect_url) { should == (@mock_backup_service.redirect_url || "") }
its(:monitor) { should == nil }
specify { composed_service_data[:href].should == subject.href.to_s }
specify { composed_service_data[:name].should == subject.name }
specify { composed_service_data[:id].should == subject.id.to_s }
specify { composed_service_data[:protocol].should == subject.protocol }
specify { composed_service_data[:enabled].should == subject.enabled.to_s }
specify { composed_service_data[:description].should == subject.description }
specify { composed_service_data[:timeout].should == subject.timeout.to_s }
end
end
end
|
apache-2.0
|
shaeberling/retrostore
|
appengine/src/main/java/org/retrostore/resources/ImageServiceWrapper.java
|
1268
|
/*
* Copyright 2017, Sascha Häberling
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.retrostore.resources;
import java.util.Optional;
/**
* Functionality around the image service.
*/
public interface ImageServiceWrapper {
int DEFAULT_SCREENSHOT_SIZE = 800;
/**
* Returns a URL that serves the image with the given blob key.
*
* @param blobKey the blob key of the image to serve.
* @param imageSize the maximum size of the longest side.
* @return The URL to serve the image in the given size.
*/
Optional<String> getServingUrl(String blobKey, int imageSize);
/**
* Like {@link #getServingUrl(String, int)} but with a default size.
*/
Optional<String> getServingUrl(String blobKey);
}
|
apache-2.0
|
jweixin/jwx
|
src/test/java/com/github/jweixin/jwx/WeixinProperConfigTest.java
|
591
|
package com.github.jweixin.jwx;
import org.junit.Assert;
import org.junit.Test;
import com.github.jweixin.jwx.context.InitialWeixinConfigureException;
import com.github.jweixin.jwx.context.WeixinContext;
import com.github.jweixin.jwx.context.WeixinContextConfigHelper;
public class WeixinProperConfigTest {
@Test
public void testProperSet() throws InitialWeixinConfigureException {
WeixinContext ctx = new WeixinContext();
ctx.setToken(" ");
WeixinContextConfigHelper.setFieldValue(ctx, "token", "foo");
Assert.assertEquals(ctx.getToken(), "foo");
}
}
|
apache-2.0
|
aeq/killbill
|
invoice/src/main/java/org/killbill/billing/invoice/InvoiceDispatcher.java
|
37120
|
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2015 Groupon, Inc
* Copyright 2014-2015 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.invoice;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import javax.annotation.Nullable;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.account.api.Account;
import org.killbill.billing.account.api.AccountApiException;
import org.killbill.billing.account.api.AccountInternalApi;
import org.killbill.billing.callcontext.InternalCallContext;
import org.killbill.billing.callcontext.InternalTenantContext;
import org.killbill.billing.catalog.api.BillingActionPolicy;
import org.killbill.billing.catalog.api.BillingMode;
import org.killbill.billing.catalog.api.CatalogApiException;
import org.killbill.billing.catalog.api.Currency;
import org.killbill.billing.catalog.api.PlanPhasePriceOverride;
import org.killbill.billing.catalog.api.PlanPhaseSpecifier;
import org.killbill.billing.catalog.api.Usage;
import org.killbill.billing.entitlement.api.SubscriptionEventType;
import org.killbill.billing.events.BusInternalEvent;
import org.killbill.billing.events.EffectiveSubscriptionInternalEvent;
import org.killbill.billing.events.InvoiceAdjustmentInternalEvent;
import org.killbill.billing.events.InvoiceInternalEvent;
import org.killbill.billing.events.InvoiceNotificationInternalEvent;
import org.killbill.billing.invoice.InvoiceDispatcher.FutureAccountNotifications.SubscriptionNotification;
import org.killbill.billing.invoice.api.DefaultInvoiceService;
import org.killbill.billing.invoice.api.DryRunArguments;
import org.killbill.billing.invoice.api.Invoice;
import org.killbill.billing.invoice.api.InvoiceApiException;
import org.killbill.billing.invoice.api.InvoiceItem;
import org.killbill.billing.invoice.api.InvoiceItemType;
import org.killbill.billing.invoice.api.InvoiceNotifier;
import org.killbill.billing.invoice.api.user.DefaultInvoiceAdjustmentEvent;
import org.killbill.billing.invoice.api.user.DefaultInvoiceCreationEvent;
import org.killbill.billing.invoice.api.user.DefaultInvoiceNotificationInternalEvent;
import org.killbill.billing.invoice.api.user.DefaultNullInvoiceEvent;
import org.killbill.billing.invoice.dao.InvoiceDao;
import org.killbill.billing.invoice.dao.InvoiceItemModelDao;
import org.killbill.billing.invoice.dao.InvoiceModelDao;
import org.killbill.billing.invoice.generator.BillingIntervalDetail;
import org.killbill.billing.invoice.generator.InvoiceGenerator;
import org.killbill.billing.invoice.model.DefaultInvoice;
import org.killbill.billing.invoice.model.FixedPriceInvoiceItem;
import org.killbill.billing.invoice.model.InvoiceItemFactory;
import org.killbill.billing.invoice.model.RecurringInvoiceItem;
import org.killbill.billing.invoice.notification.DefaultNextBillingDateNotifier;
import org.killbill.billing.invoice.notification.NextBillingDateNotificationKey;
import org.killbill.billing.junction.BillingEvent;
import org.killbill.billing.junction.BillingEventSet;
import org.killbill.billing.junction.BillingInternalApi;
import org.killbill.billing.subscription.api.SubscriptionBaseInternalApi;
import org.killbill.billing.subscription.api.SubscriptionBaseTransitionType;
import org.killbill.billing.subscription.api.user.SubscriptionBaseApiException;
import org.killbill.billing.util.callcontext.CallContext;
import org.killbill.billing.util.callcontext.InternalCallContextFactory;
import org.killbill.billing.util.callcontext.TenantContext;
import org.killbill.billing.util.config.InvoiceConfig;
import org.killbill.billing.util.globallocker.LockerType;
import org.killbill.billing.util.timezone.DateAndTimeZoneContext;
import org.killbill.bus.api.PersistentBus;
import org.killbill.bus.api.PersistentBus.EventBusException;
import org.killbill.clock.Clock;
import org.killbill.commons.locker.GlobalLock;
import org.killbill.commons.locker.GlobalLocker;
import org.killbill.commons.locker.LockFailedException;
import org.killbill.notificationq.api.NotificationEventWithMetadata;
import org.killbill.notificationq.api.NotificationQueue;
import org.killbill.notificationq.api.NotificationQueueService;
import org.killbill.notificationq.api.NotificationQueueService.NoSuchNotificationQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering;
import com.google.inject.Inject;
public class InvoiceDispatcher {
private static final Logger log = LoggerFactory.getLogger(InvoiceDispatcher.class);
private static final int NB_LOCK_TRY = 5;
private static final Ordering<DateTime> UPCOMING_NOTIFICATION_DATE_ORDERING = Ordering.natural();
private static final NullDryRunArguments NULL_DRY_RUN_ARGUMENTS = new NullDryRunArguments();
private final InvoiceGenerator generator;
private final BillingInternalApi billingApi;
private final AccountInternalApi accountApi;
private final SubscriptionBaseInternalApi subscriptionApi;
private final InvoiceDao invoiceDao;
private final InternalCallContextFactory internalCallContextFactory;
private final InvoiceNotifier invoiceNotifier;
private final InvoicePluginDispatcher invoicePluginDispatcher;
private final GlobalLocker locker;
private final PersistentBus eventBus;
private final Clock clock;
private final NotificationQueueService notificationQueueService;
private final InvoiceConfig invoiceConfig;
@Inject
public InvoiceDispatcher(final InvoiceGenerator generator,
final AccountInternalApi accountApi,
final BillingInternalApi billingApi,
final SubscriptionBaseInternalApi SubscriptionApi,
final InvoiceDao invoiceDao,
final InternalCallContextFactory internalCallContextFactory,
final InvoiceNotifier invoiceNotifier,
final InvoicePluginDispatcher invoicePluginDispatcher,
final GlobalLocker locker,
final PersistentBus eventBus,
final NotificationQueueService notificationQueueService,
final InvoiceConfig invoiceConfig,
final Clock clock) {
this.generator = generator;
this.billingApi = billingApi;
this.subscriptionApi = SubscriptionApi;
this.accountApi = accountApi;
this.invoiceDao = invoiceDao;
this.internalCallContextFactory = internalCallContextFactory;
this.invoiceNotifier = invoiceNotifier;
this.invoicePluginDispatcher = invoicePluginDispatcher;
this.locker = locker;
this.eventBus = eventBus;
this.clock = clock;
this.notificationQueueService = notificationQueueService;
this.invoiceConfig = invoiceConfig;
}
public void processSubscriptionForInvoiceGeneration(final EffectiveSubscriptionInternalEvent transition,
final InternalCallContext context) throws InvoiceApiException {
final UUID subscriptionId = transition.getSubscriptionId();
final DateTime targetDate = transition.getEffectiveTransitionTime();
processSubscriptionForInvoiceGeneration(subscriptionId, targetDate, context);
}
public void processSubscriptionForInvoiceGeneration(final UUID subscriptionId, final DateTime targetDate, final InternalCallContext context) throws InvoiceApiException {
processSubscriptionInternal(subscriptionId, targetDate, false, context);
}
public void processSubscriptionForInvoiceNotification(final UUID subscriptionId, final DateTime targetDate, final InternalCallContext context) throws InvoiceApiException {
final Invoice dryRunInvoice = processSubscriptionInternal(subscriptionId, targetDate, true, context);
if (dryRunInvoice != null && dryRunInvoice.getBalance().compareTo(BigDecimal.ZERO) > 0) {
final InvoiceNotificationInternalEvent event = new DefaultInvoiceNotificationInternalEvent(dryRunInvoice.getAccountId(), dryRunInvoice.getBalance(), dryRunInvoice.getCurrency(),
targetDate, context.getAccountRecordId(), context.getTenantRecordId(), context.getUserToken());
try {
eventBus.post(event);
} catch (EventBusException e) {
log.error("Failed to post event " + event, e);
}
}
}
private Invoice processSubscriptionInternal(final UUID subscriptionId, final DateTime targetDate, final boolean dryRunForNotification, final InternalCallContext context) throws InvoiceApiException {
try {
if (subscriptionId == null) {
log.error("Failed handling SubscriptionBase change.", new InvoiceApiException(ErrorCode.INVOICE_INVALID_TRANSITION));
return null;
}
final UUID accountId = subscriptionApi.getAccountIdFromSubscriptionId(subscriptionId, context);
final DryRunArguments dryRunArguments = dryRunForNotification ? NULL_DRY_RUN_ARGUMENTS : null;
return processAccount(accountId, targetDate, dryRunArguments, context);
} catch (final SubscriptionBaseApiException e) {
log.error("Failed handling SubscriptionBase change.",
new InvoiceApiException(ErrorCode.INVOICE_NO_ACCOUNT_ID_FOR_SUBSCRIPTION_ID, subscriptionId.toString()));
return null;
}
}
public Invoice processAccount(final UUID accountId, final DateTime targetDate,
@Nullable final DryRunArguments dryRunArguments, final InternalCallContext context) throws InvoiceApiException {
GlobalLock lock = null;
try {
lock = locker.lockWithNumberOfTries(LockerType.ACCNT_INV_PAY.toString(), accountId.toString(), NB_LOCK_TRY);
return processAccountWithLock(accountId, targetDate, dryRunArguments, context);
} catch (final LockFailedException e) {
// Not good!
log.error(String.format("Failed to process invoice for account %s, targetDate %s",
accountId.toString(), targetDate), e);
} finally {
if (lock != null) {
lock.release();
}
}
return null;
}
private Invoice processAccountWithLock(final UUID accountId, @Nullable final DateTime inputTargetDateTime,
@Nullable final DryRunArguments dryRunArguments, final InternalCallContext context) throws InvoiceApiException {
final boolean isDryRun = dryRunArguments != null;
// inputTargetDateTime is only allowed in dryRun mode to have the system compute it
Preconditions.checkArgument(inputTargetDateTime != null || isDryRun, "inputTargetDateTime is required in non dryRun mode");
try {
// Make sure to first set the BCD if needed then get the account object (to have the BCD set)
final BillingEventSet billingEvents = billingApi.getBillingEventsForAccountAndUpdateAccountBCD(accountId, dryRunArguments, context);
final List<DateTime> candidateDateTimes = (inputTargetDateTime != null) ? ImmutableList.of(inputTargetDateTime) : getUpcomingInvoiceCandidateDates(context);
for (final DateTime curTargetDateTime : candidateDateTimes) {
final Invoice invoice = processAccountWithLockAndInputTargetDate(accountId, curTargetDateTime, billingEvents, isDryRun, context);
if (invoice != null) {
return invoice;
}
}
return null;
} catch (CatalogApiException e) {
log.error("Failed handling SubscriptionBase change.", e);
return null;
}
}
private Invoice processAccountWithLockAndInputTargetDate(final UUID accountId, final DateTime targetDateTime,
final BillingEventSet billingEvents, final boolean isDryRun, final InternalCallContext context) throws InvoiceApiException {
try {
final Account account = accountApi.getAccountById(accountId, context);
final DateAndTimeZoneContext dateAndTimeZoneContext = billingEvents.iterator().hasNext() ?
new DateAndTimeZoneContext(billingEvents.iterator().next().getEffectiveDate(), account.getTimeZone(), clock) :
null;
final List<Invoice> invoices = billingEvents.isAccountAutoInvoiceOff() ?
ImmutableList.<Invoice>of() :
ImmutableList.<Invoice>copyOf(Collections2.transform(invoiceDao.getInvoicesByAccount(context),
new Function<InvoiceModelDao, Invoice>() {
@Override
public Invoice apply(final InvoiceModelDao input) {
return new DefaultInvoice(input);
}
}));
final Currency targetCurrency = account.getCurrency();
final LocalDate targetDate = (dateAndTimeZoneContext != null && targetDateTime != null) ? dateAndTimeZoneContext.computeTargetDate(targetDateTime) : null;
final Invoice invoice = targetDate != null ? generator.generateInvoice(account, billingEvents, invoices, targetDate, targetCurrency, context) : null;
//
// If invoice comes back null, there is nothing new to generate, we can bail early
//
if (invoice == null) {
log.info("Generated null invoice for accountId {} and targetDate {} (targetDateTime {})", new Object[]{accountId, targetDate, targetDateTime});
if (!isDryRun) {
final BusInternalEvent event = new DefaultNullInvoiceEvent(accountId, clock.getUTCToday(),
context.getAccountRecordId(), context.getTenantRecordId(), context.getUserToken());
postEvent(event, accountId, context);
}
return invoice;
}
// Generate missing credit (> 0 for generation and < 0 for use) prior we call the plugin
final InvoiceItem cbaItem = computeCBAOnExistingInvoice(invoice, context);
if (cbaItem != null) {
invoice.addInvoiceItem(cbaItem);
}
//
// Ask external invoice plugins if additional items (tax, etc) shall be added to the invoice
//
final CallContext callContext = buildCallContext(context);
invoice.addInvoiceItems(invoicePluginDispatcher.getAdditionalInvoiceItems(invoice, callContext));
if (!isDryRun) {
commitInvoiceStateAndNotifyAccountIfConfigured(account, invoice, billingEvents, dateAndTimeZoneContext, targetDate, context);
}
return invoice;
} catch (final AccountApiException e) {
log.error("Failed handling SubscriptionBase change.", e);
return null;
} catch (SubscriptionBaseApiException e) {
log.error("Failed handling SubscriptionBase change.", e);
return null;
}
}
private void commitInvoiceStateAndNotifyAccountIfConfigured(final Account account, final Invoice invoice, final BillingEventSet billingEvents, final DateAndTimeZoneContext dateAndTimeZoneContext, final LocalDate targetDate, final InternalCallContext context) throws SubscriptionBaseApiException, InvoiceApiException {
boolean isRealInvoiceWithNonEmptyItems = false;
// Extract the set of invoiceId for which we see items that don't belong to current generated invoice
final Set<UUID> adjustedUniqueOtherInvoiceId = new TreeSet<UUID>();
adjustedUniqueOtherInvoiceId.addAll(Collections2.transform(invoice.getInvoiceItems(), new Function<InvoiceItem, UUID>() {
@Nullable
@Override
public UUID apply(@Nullable final InvoiceItem input) {
return input.getInvoiceId();
}
}));
boolean isRealInvoiceWithItems = adjustedUniqueOtherInvoiceId.remove(invoice.getId());
if (isRealInvoiceWithItems) {
log.info("Generated invoice {} with {} items for accountId {} and targetDate {}", new Object[]{invoice.getId(), invoice.getNumberOfItems(), account.getId(), targetDate});
} else {
final Joiner joiner = Joiner.on(",");
final String adjustedInvoices = joiner.join(adjustedUniqueOtherInvoiceId.toArray(new UUID[adjustedUniqueOtherInvoiceId.size()]));
log.info("Adjusting existing invoices {} with {} items for accountId {} and targetDate {})", new Object[]{adjustedInvoices, invoice.getNumberOfItems(),
account.getId(), targetDate});
}
// Transformation to Invoice -> InvoiceModelDao
final InvoiceModelDao invoiceModelDao = new InvoiceModelDao(invoice);
final Iterable<InvoiceItemModelDao> invoiceItemModelDaos = Iterables.transform(invoice.getInvoiceItems(),
new Function<InvoiceItem, InvoiceItemModelDao>() {
@Override
public InvoiceItemModelDao apply(final InvoiceItem input) {
return new InvoiceItemModelDao(input);
}
});
final FutureAccountNotifications futureAccountNotifications = createNextFutureNotificationDate(invoiceItemModelDaos, billingEvents, dateAndTimeZoneContext, context);
// We filter any zero amount for USAGE items prior we generate the invoice, which may leave us with an invoice with no items;
// we recompute the isRealInvoiceWithItems flag based on what is left (the call to invoice is still necessary to set the future notifications).
final Iterable<InvoiceItemModelDao> filteredInvoiceItemModelDaos = Iterables.filter(invoiceItemModelDaos, new Predicate<InvoiceItemModelDao>() {
@Override
public boolean apply(@Nullable final InvoiceItemModelDao input) {
return (input.getType() != InvoiceItemType.USAGE || input.getAmount().compareTo(BigDecimal.ZERO) != 0);
}
});
final boolean isThereAnyItemsLeft = filteredInvoiceItemModelDaos.iterator().hasNext();
isRealInvoiceWithNonEmptyItems = isThereAnyItemsLeft ? isRealInvoiceWithItems : false;
if (isThereAnyItemsLeft) {
invoiceDao.createInvoice(invoiceModelDao, ImmutableList.copyOf(filteredInvoiceItemModelDaos), isRealInvoiceWithItems, futureAccountNotifications, context);
} else {
invoiceDao.setFutureAccountNotificationsForEmptyInvoice(account.getId(), futureAccountNotifications, context);
}
final List<InvoiceItem> fixedPriceInvoiceItems = invoice.getInvoiceItems(FixedPriceInvoiceItem.class);
final List<InvoiceItem> recurringInvoiceItems = invoice.getInvoiceItems(RecurringInvoiceItem.class);
setChargedThroughDates(dateAndTimeZoneContext, fixedPriceInvoiceItems, recurringInvoiceItems, context);
final List<InvoiceInternalEvent> events = new ArrayList<InvoiceInternalEvent>();
if (isRealInvoiceWithNonEmptyItems) {
events.add(new DefaultInvoiceCreationEvent(invoice.getId(), invoice.getAccountId(),
invoice.getBalance(), invoice.getCurrency(),
context.getAccountRecordId(), context.getTenantRecordId(), context.getUserToken()));
}
for (final UUID cur : adjustedUniqueOtherInvoiceId) {
final InvoiceAdjustmentInternalEvent event = new DefaultInvoiceAdjustmentEvent(cur, invoice.getAccountId(),
context.getAccountRecordId(), context.getTenantRecordId(), context.getUserToken());
events.add(event);
}
for (final InvoiceInternalEvent event : events) {
postEvent(event, account.getId(), context);
}
if (account.isNotifiedForInvoices() && isRealInvoiceWithNonEmptyItems) {
// Need to re-hydrate the invoice object to get the invoice number (record id)
// API_FIX InvoiceNotifier public API?
invoiceNotifier.notify(account, new DefaultInvoice(invoiceDao.getById(invoice.getId(), context)), buildTenantContext(context));
}
}
private InvoiceItem computeCBAOnExistingInvoice(final Invoice invoice, final InternalCallContext context) throws InvoiceApiException {
// Transformation to Invoice -> InvoiceModelDao
final InvoiceModelDao invoiceModelDao = new InvoiceModelDao(invoice);
final List<InvoiceItemModelDao> invoiceItemModelDaos = ImmutableList.copyOf(Collections2.transform(invoice.getInvoiceItems(),
new Function<InvoiceItem, InvoiceItemModelDao>() {
@Override
public InvoiceItemModelDao apply(final InvoiceItem input) {
return new InvoiceItemModelDao(input);
}
}));
invoiceModelDao.addInvoiceItems(invoiceItemModelDaos);
final InvoiceItemModelDao cbaItem = invoiceDao.doCBAComplexity(invoiceModelDao, context);
return cbaItem != null ? InvoiceItemFactory.fromModelDao(cbaItem) : null;
}
private TenantContext buildTenantContext(final InternalTenantContext context) {
return internalCallContextFactory.createTenantContext(context);
}
private CallContext buildCallContext(final InternalCallContext context) {
return internalCallContextFactory.createCallContext(context);
}
@VisibleForTesting
FutureAccountNotifications createNextFutureNotificationDate(final Iterable<InvoiceItemModelDao> invoiceItems, final BillingEventSet billingEvents, final DateAndTimeZoneContext dateAndTimeZoneContext, final InternalCallContext context) {
final Map<UUID, List<SubscriptionNotification>> result = new HashMap<UUID, List<SubscriptionNotification>>();
final Map<String, LocalDate> perSubscriptionUsage = new HashMap<String, LocalDate>();
// For each subscription that has a positive (amount) recurring item, create the date
// at which we should be called back for next invoice.
//
for (final InvoiceItemModelDao item : invoiceItems) {
List<SubscriptionNotification> perSubscriptionCallback = result.get(item.getSubscriptionId());
if (perSubscriptionCallback == null && (item.getType() == InvoiceItemType.RECURRING || item.getType() == InvoiceItemType.USAGE)) {
perSubscriptionCallback = new ArrayList<SubscriptionNotification>();
result.put(item.getSubscriptionId(), perSubscriptionCallback);
}
switch (item.getType()) {
case RECURRING:
if ((item.getEndDate() != null) &&
(item.getAmount() == null ||
item.getAmount().compareTo(BigDecimal.ZERO) >= 0)) {
perSubscriptionCallback.add(new SubscriptionNotification(dateAndTimeZoneContext.computeUTCDateTimeFromLocalDate(item.getEndDate()), true));
}
break;
case USAGE:
final String key = item.getSubscriptionId().toString() + ":" + item.getUsageName();
final LocalDate perSubscriptionUsageRecurringDate = perSubscriptionUsage.get(key);
if (perSubscriptionUsageRecurringDate == null || perSubscriptionUsageRecurringDate.compareTo(item.getEndDate()) < 0) {
perSubscriptionUsage.put(key, item.getEndDate());
}
break;
default:
// Ignore
}
}
for (final String key : perSubscriptionUsage.keySet()) {
final String[] parts = key.split(":");
final UUID subscriptionId = UUID.fromString(parts[0]);
final List<SubscriptionNotification> perSubscriptionCallback = result.get(subscriptionId);
final String usageName = parts[1];
final LocalDate endDate = perSubscriptionUsage.get(key);
final DateTime subscriptionUsageCallbackDate = getNextUsageBillingDate(subscriptionId, usageName, endDate, dateAndTimeZoneContext, billingEvents);
perSubscriptionCallback.add(new SubscriptionNotification(subscriptionUsageCallbackDate, true));
}
// If dryRunNotification is enabled we also need to fetch the upcoming PHASE dates (we add SubscriptionNotification with isForInvoiceNotificationTrigger = false)
final boolean isInvoiceNotificationEnabled = invoiceConfig.getDryRunNotificationSchedule().getMillis() > 0;
if (isInvoiceNotificationEnabled) {
final Map<UUID, DateTime> upcomingPhasesForSubscriptions = subscriptionApi.getNextFutureEventForSubscriptions(SubscriptionBaseTransitionType.PHASE, context);
for (UUID cur : upcomingPhasesForSubscriptions.keySet()) {
final DateTime curDate = upcomingPhasesForSubscriptions.get(cur);
List<SubscriptionNotification> resultValue = result.get(cur);
if (resultValue == null) {
resultValue = new ArrayList<SubscriptionNotification>();
}
resultValue.add(new SubscriptionNotification(curDate, false));
result.put(cur, resultValue);
}
}
return new FutureAccountNotifications(dateAndTimeZoneContext, result);
}
private DateTime getNextUsageBillingDate(final UUID subscriptionId, final String usageName, final LocalDate chargedThroughDate, final DateAndTimeZoneContext dateAndTimeZoneContext, final BillingEventSet billingEvents) {
final Usage usage = billingEvents.getUsages().get(usageName);
final BillingEvent billingEventSubscription = Iterables.tryFind(billingEvents, new Predicate<BillingEvent>() {
@Override
public boolean apply(@Nullable final BillingEvent input) {
return input.getSubscription().getId().equals(subscriptionId);
}
}).orNull();
final LocalDate nextCallbackUsageDate = (usage.getBillingMode() == BillingMode.IN_ARREAR) ? BillingIntervalDetail.alignProposedBillCycleDate(chargedThroughDate.plusMonths(usage.getBillingPeriod().getNumberOfMonths()), billingEventSubscription.getBillCycleDayLocal()) : chargedThroughDate;
return dateAndTimeZoneContext.computeUTCDateTimeFromLocalDate(nextCallbackUsageDate);
}
private void setChargedThroughDates(final DateAndTimeZoneContext dateAndTimeZoneContext,
final Collection<InvoiceItem> fixedPriceItems,
final Collection<InvoiceItem> recurringItems,
final InternalCallContext context) throws SubscriptionBaseApiException {
final Map<UUID, DateTime> chargeThroughDates = new HashMap<UUID, DateTime>();
addInvoiceItemsToChargeThroughDates(dateAndTimeZoneContext, chargeThroughDates, fixedPriceItems);
addInvoiceItemsToChargeThroughDates(dateAndTimeZoneContext, chargeThroughDates, recurringItems);
for (final UUID subscriptionId : chargeThroughDates.keySet()) {
if (subscriptionId != null) {
final DateTime chargeThroughDate = chargeThroughDates.get(subscriptionId);
subscriptionApi.setChargedThroughDate(subscriptionId, chargeThroughDate, context);
}
}
}
private void postEvent(final BusInternalEvent event, final UUID accountId, final InternalCallContext context) {
try {
eventBus.post(event);
} catch (final EventBusException e) {
log.error(String.format("Failed to post event %s for account %s", event.getBusEventType(), accountId), e);
}
}
private void addInvoiceItemsToChargeThroughDates(final DateAndTimeZoneContext dateAndTimeZoneContext,
final Map<UUID, DateTime> chargeThroughDates,
final Collection<InvoiceItem> items) {
for (final InvoiceItem item : items) {
final UUID subscriptionId = item.getSubscriptionId();
final LocalDate endDate = (item.getEndDate() != null) ? item.getEndDate() : item.getStartDate();
final DateTime proposedChargedThroughDate = dateAndTimeZoneContext.computeUTCDateTimeFromLocalDate(endDate);
if (chargeThroughDates.containsKey(subscriptionId)) {
if (chargeThroughDates.get(subscriptionId).isBefore(proposedChargedThroughDate)) {
chargeThroughDates.put(subscriptionId, proposedChargedThroughDate);
}
} else {
chargeThroughDates.put(subscriptionId, proposedChargedThroughDate);
}
}
}
public static class FutureAccountNotifications {
private final DateAndTimeZoneContext accountDateAndTimeZoneContext;
private final Map<UUID, List<SubscriptionNotification>> notifications;
public FutureAccountNotifications(final DateAndTimeZoneContext accountDateAndTimeZoneContext, final Map<UUID, List<SubscriptionNotification>> notifications) {
this.accountDateAndTimeZoneContext = accountDateAndTimeZoneContext;
this.notifications = notifications;
}
public DateAndTimeZoneContext getAccountDateAndTimeZoneContext() {
return accountDateAndTimeZoneContext;
}
public Map<UUID, List<SubscriptionNotification>> getNotifications() {
return notifications;
}
public static class SubscriptionNotification {
private final DateTime effectiveDate;
private final boolean isForNotificationTrigger;
public SubscriptionNotification(final DateTime effectiveDate, final boolean isForNotificationTrigger) {
this.effectiveDate = effectiveDate;
this.isForNotificationTrigger = isForNotificationTrigger;
}
public DateTime getEffectiveDate() {
return effectiveDate;
}
public boolean isForInvoiceNotificationTrigger() {
return isForNotificationTrigger;
}
}
}
private List<DateTime> getUpcomingInvoiceCandidateDates(final InternalCallContext internalCallContext) {
final Iterable<DateTime> nextScheduledInvoiceDates = getNextScheduledInvoiceEffectiveDate(internalCallContext);
final Iterable<DateTime> nextScheduledSubscriptionsEventDates = subscriptionApi.getFutureNotificationsForAccount(internalCallContext);
Iterables.concat(nextScheduledInvoiceDates, nextScheduledSubscriptionsEventDates);
return UPCOMING_NOTIFICATION_DATE_ORDERING.sortedCopy(Iterables.concat(nextScheduledInvoiceDates, nextScheduledSubscriptionsEventDates));
}
private Iterable<DateTime> getNextScheduledInvoiceEffectiveDate(final InternalCallContext internalCallContext) {
try {
final NotificationQueue notificationQueue = notificationQueueService.getNotificationQueue(DefaultInvoiceService.INVOICE_SERVICE_NAME,
DefaultNextBillingDateNotifier.NEXT_BILLING_DATE_NOTIFIER_QUEUE);
final List<NotificationEventWithMetadata<NextBillingDateNotificationKey>> futureNotifications = notificationQueue.getFutureNotificationForSearchKeys(internalCallContext.getAccountRecordId(), internalCallContext.getTenantRecordId());
final Iterable<NotificationEventWithMetadata<NextBillingDateNotificationKey>> filtered = Iterables.filter(futureNotifications, new Predicate<NotificationEventWithMetadata<NextBillingDateNotificationKey>>() {
@Override
public boolean apply(@Nullable final NotificationEventWithMetadata<NextBillingDateNotificationKey> input) {
final boolean isEventDryRunForNotifications = input.getEvent().isDryRunForInvoiceNotification() != null ?
input.getEvent().isDryRunForInvoiceNotification() : false;
return !isEventDryRunForNotifications;
}
});
return Iterables.transform(filtered, new Function<NotificationEventWithMetadata<NextBillingDateNotificationKey>, DateTime>() {
@Nullable
@Override
public DateTime apply(@Nullable final NotificationEventWithMetadata<NextBillingDateNotificationKey> input) {
return input.getEffectiveDate();
}
});
} catch (final NoSuchNotificationQueue noSuchNotificationQueue) {
throw new IllegalStateException(noSuchNotificationQueue);
}
}
private final static class NullDryRunArguments implements DryRunArguments {
@Override
public PlanPhaseSpecifier getPlanPhaseSpecifier() {
return null;
}
@Override
public SubscriptionEventType getAction() {
return null;
}
@Override
public UUID getSubscriptionId() {
return null;
}
@Override
public DateTime getEffectiveDate() {
return null;
}
@Override
public UUID getBundleId() {
return null;
}
@Override
public BillingActionPolicy getBillingActionPolicy() {
return null;
}
@Override
public List<PlanPhasePriceOverride> getPlanPhasePriceoverrides() {
return null;
}
}
}
|
apache-2.0
|
great-expectations/great_expectations
|
great_expectations/expectations/row_conditions.py
|
3909
|
from pyparsing import (
CaselessLiteral,
Combine,
Literal,
ParseException,
Regex,
Suppress,
Word,
alphanums,
alphas,
)
from great_expectations.exceptions import GreatExpectationsError
try:
import pyspark.sql.functions as F
except ImportError:
F = None
try:
import sqlalchemy as sa
except ImportError:
sa = None
def _set_notnull(s, l, t):
t["notnull"] = True
column_name = Combine(
Suppress(Literal('col("'))
+ Word(alphas, f"{alphanums}_.").setResultsName("column")
+ Suppress(Literal('")'))
)
gt = Literal(">")
lt = Literal("<")
ge = Literal(">=")
le = Literal("<=")
eq = Literal("==")
ops = (gt ^ lt ^ ge ^ le ^ eq).setResultsName("op")
fnumber = Regex(r"[+-]?\d+(?:\.\d*)?(?:[eE][+-]?\d+)?").setResultsName("fnumber")
condition_value = Suppress('"') + Word(f"{alphanums}.").setResultsName(
"condition_value"
) + Suppress('"') ^ Suppress("'") + Word(f"{alphanums}.").setResultsName(
"condition_value"
) + Suppress(
"'"
)
not_null = CaselessLiteral(".notnull()").setResultsName("notnull")
condition = (column_name + not_null).setParseAction(_set_notnull) ^ (
column_name + ops + (fnumber ^ condition_value)
)
class ConditionParserError(GreatExpectationsError):
pass
def _parse_great_expectations_condition(row_condition: str):
try:
return condition.parseString(row_condition)
except ParseException:
raise ConditionParserError(f"unable to parse condition: {row_condition}")
# noinspection PyUnresolvedReferences
def parse_condition_to_spark(row_condition: str) -> "pyspark.sql.Column":
parsed = _parse_great_expectations_condition(row_condition)
column = parsed["column"]
if "condition_value" in parsed:
if parsed["op"] == "==":
return F.col(column) == parsed["condition_value"]
else:
raise ConditionParserError(
f"Invalid operator: {parsed['op']} for string literal spark condition."
)
elif "fnumber" in parsed:
try:
num = int(parsed["fnumber"])
except ValueError:
num = float(parsed["fnumber"])
op = parsed["op"]
if op == ">":
return F.col(column) > num
elif op == "<":
return F.col(column) < num
elif op == ">=":
return F.col(column) >= num
elif op == "<=":
return F.col(column) <= num
elif op == "==":
return F.col(column) == num
elif "notnull" in parsed and parsed["notnull"] is True:
return F.col(column).isNotNull()
else:
raise ConditionParserError(f"unrecognized column condition: {row_condition}")
def parse_condition_to_sqlalchemy(
row_condition: str,
) -> "sqlalchemy.sql.expression.ColumnElement":
parsed = _parse_great_expectations_condition(row_condition)
column = parsed["column"]
if "condition_value" in parsed:
if parsed["op"] == "==":
return sa.column(column) == parsed["condition_value"]
else:
raise ConditionParserError(
f"Invalid operator: {parsed['op']} for string literal spark condition."
)
elif "fnumber" in parsed:
try:
num = int(parsed["fnumber"])
except ValueError:
num = float(parsed["fnumber"])
op = parsed["op"]
if op == ">":
return sa.column(column) > num
elif op == "<":
return sa.column(column) < num
elif op == ">=":
return sa.column(column) >= num
elif op == "<=":
return sa.column(column) <= num
elif op == "==":
return sa.column(column) == num
elif "notnull" in parsed and parsed["notnull"] is True:
return sa.not_(sa.column(column).is_(None))
else:
raise ConditionParserError(f"unrecognized column condition: {row_condition}")
|
apache-2.0
|
nedap/archie
|
src/main/java/com/nedap/archie/adlparser/treewalkers/CComplexObjectParser.java
|
13477
|
package com.nedap.archie.adlparser.treewalkers;
import com.nedap.archie.adlparser.ADLParserErrors;
import com.nedap.archie.adlparser.antlr.AdlParser.*;
import com.nedap.archie.aom.*;
import com.nedap.archie.base.MultiplicityInterval;
import com.nedap.archie.rules.Assertion;
import org.antlr.v4.runtime.tree.TerminalNode;
import java.util.ArrayList;
import java.util.List;
/**
* Parser for the definition part of an archetype
*
* Created by pieter.bos on 15/10/15.
*/
public class CComplexObjectParser extends BaseTreeWalker {
private final PrimitivesConstraintParser primitivesConstraintParser;
public CComplexObjectParser(ADLParserErrors errors) {
super(errors);
primitivesConstraintParser = new PrimitivesConstraintParser(errors);
}
public RulesSection parseRules(Rules_sectionContext context) {
RulesSection result = new RulesSection();
result.setContent(context.getText());
RulesParser rulesParser = new RulesParser(getErrors());
for(AssertionContext assertion:context.assertion_list().assertion()) {
result.addRule(rulesParser.parse(assertion));
}
return result;
}
public CComplexObject parseComplexObject(C_complex_objectContext context) {
CComplexObject object = new CComplexObject();
if(context.type_id() != null) {
object.setRmTypeName(context.type_id().getText());
}
if(context.ID_CODE() != null) {
object.setNodeId(context.ID_CODE().getText());
} else if (context.ROOT_ID_CODE() != null) {
object.setNodeId(context.ROOT_ID_CODE().getText());
}
//TODO: object.setDeprecated(context.) ?;
if (context.c_occurrences() != null) {
object.setOccurrences(parseMultiplicityInterval(context.c_occurrences()));
}
for (C_attribute_defContext attribute : context.c_attribute_def()) {
parseCAttribute(object, attribute);
}
return object;
}
private void parseCAttribute(CComplexObject parent, C_attribute_defContext attributeDefContext) {
if (attributeDefContext.c_attribute() != null) {
CAttribute attribute = new CAttribute();
C_attributeContext attributeContext = attributeDefContext.c_attribute();
if(attributeContext.attribute_id() != null) {
attribute.setRmAttributeName(attributeContext.attribute_id().getText());
} else {
attribute.setDifferentialPath(attributeContext.ADL_PATH().getText());
attribute.setRmAttributeName(getLastAttributeFromPath(attribute.getDifferentialPath()));
}
if (attributeContext.c_existence() != null) {
attribute.setExistence(parseMultiplicityInterval(attributeContext.c_existence()));
}
if (attributeContext.c_cardinality() != null) {
attribute.setCardinality(this.parseCardinalityInterval(attributeContext.c_cardinality()));
}
if (attributeContext.c_objects() != null) {
attribute.setChildren(parseCObjects(attributeContext.c_objects()));
} else if (attributeContext.CONTAINED_REGEXP() != null) {
attribute.addChild(primitivesConstraintParser.parseRegex(attributeContext.CONTAINED_REGEXP()));
}
parent.addAttribute(attribute);
} else if (attributeDefContext.c_attribute_tuple() != null) {
parent.addAttributeTuple(parseAttributeTuple(parent, attributeDefContext.c_attribute_tuple()));
}
}
public static String getFirstAttributeOfPath(String path) {
return path.substring(0, path.indexOf('/'));
}
public static String getPathMinusFirstAttribute(String path) {
return path.substring(path.indexOf('/'));
}
public static String getLastAttributeFromPath(String path) {
return path.substring(path.lastIndexOf('/')+1);
}
private CAttributeTuple parseAttributeTuple(CComplexObject parent, C_attribute_tupleContext attributeTupleContext) {
List<Attribute_idContext> attributeIdList = attributeTupleContext.attribute_id();
CAttributeTuple tuple = new CAttributeTuple();
for(Attribute_idContext idContext:attributeIdList) {
CAttribute attribute = new CAttribute();
String id = idContext.getText();//TODO? parse odin string value?
attribute.setRmAttributeName(id);
tuple.addMember(attribute);
parent.addAttribute(attribute);
}
List<C_object_tupleContext> tupleContexts = attributeTupleContext.c_object_tuple();
for(C_object_tupleContext tupleContext:tupleContexts) {
CPrimitiveTuple primitiveTuple = new CPrimitiveTuple();
List<C_object_tuple_itemContext> primitiveObjectContexts = tupleContext.c_object_tuple_items().c_object_tuple_item();
int i = 0;
for(C_object_tuple_itemContext tupleObjectContext:primitiveObjectContexts) {
CPrimitiveObject primitiveObject = null;
if(tupleObjectContext.c_primitive_object() != null) {
primitiveObject = primitivesConstraintParser.parsePrimitiveObject(tupleObjectContext.c_primitive_object());
} else if (tupleObjectContext.CONTAINED_REGEXP() != null) {
primitiveObject = primitivesConstraintParser.parseRegex(tupleObjectContext.CONTAINED_REGEXP());
}
tuple.getMembers().get(i).addChild(primitiveObject);
primitiveTuple.addMember(primitiveObject);
i++;
}
tuple.addTuple(primitiveTuple);
}
return tuple;
}
private List<CObject> parseCObjects(C_objectsContext objectsContext) {
ArrayList<CObject> result = new ArrayList<>();
if (objectsContext.c_primitive_object() != null) {
result.add(primitivesConstraintParser.parsePrimitiveObject(objectsContext.c_primitive_object()));
} else {
List<C_non_primitive_object_orderedContext> nonPrimitiveObjectOrderedContext = objectsContext.c_non_primitive_object_ordered();
if (nonPrimitiveObjectOrderedContext != null) {
for (C_non_primitive_object_orderedContext object : nonPrimitiveObjectOrderedContext) {
CObject cobject = parseNonPrimitiveObject(object.c_non_primitive_object());
Sibling_orderContext siblingOrderContext = object.sibling_order();
if(siblingOrderContext != null) {
SiblingOrder siblingOrder = new SiblingOrder();
if(siblingOrderContext.SYM_AFTER() != null) {
siblingOrder.setBefore(false);
} else if (siblingOrderContext.SYM_BEFORE() != null) {
siblingOrder.setBefore(true);
}
siblingOrder.setSiblingNodeId(siblingOrderContext.ID_CODE().getText());
cobject.setSiblingOrder(siblingOrder);
}
result.add(cobject);
}
}
}
return result;
}
private CObject parseNonPrimitiveObject(C_non_primitive_objectContext objectContext) {
/*
c_complex_object
| c_archetype_root
| c_complex_object_proxy
| archetype_slot
*/
if (objectContext.c_complex_object() != null) {
return parseComplexObject(objectContext.c_complex_object());
} else if (objectContext.c_archetype_root() != null) {
return parseArchetypeRoot(objectContext.c_archetype_root());
} else if (objectContext.c_complex_object_proxy() != null) {
return parseCComplexObjectProxy(objectContext.c_complex_object_proxy());
} else if (objectContext.archetype_slot() != null) {
return parseArchetypeSlot(objectContext.archetype_slot());
}
return null;
}
private CComplexObjectProxy parseCComplexObjectProxy(C_complex_object_proxyContext proxyContext) {
CComplexObjectProxy proxy = new CComplexObjectProxy();
proxy.setOccurrences(this.parseMultiplicityInterval(proxyContext.c_occurrences()));
proxy.setTargetPath(proxyContext.adl_path().getText());
proxy.setRmTypeName(proxyContext.type_id().getText());
proxy.setNodeId(proxyContext.ID_CODE().getText());
return proxy;
}
private CArchetypeRoot parseArchetypeRoot(C_archetype_rootContext archetypeRootContext) {
CArchetypeRoot root = new CArchetypeRoot();
root.setRmTypeName(archetypeRootContext.type_id().getText());
root.setNodeId(archetypeRootContext.ID_CODE().getText());
if(archetypeRootContext.archetype_ref() != null) {
root.setArchetypeRef(archetypeRootContext.archetype_ref().getText());
}
root.setOccurrences(this.parseMultiplicityInterval(archetypeRootContext.c_occurrences()));
for (C_attribute_defContext attributeContext : archetypeRootContext.c_attribute_def()) {
parseCAttribute(root, attributeContext);
}
//((Archetype_slotContext) slotContext).start.getInputStream().getText(slotContext.getSourceInterval())
return root;
}
private ArchetypeSlot parseArchetypeSlot(Archetype_slotContext slotContext) {
ArchetypeSlot slot = new ArchetypeSlot();
C_archetype_slot_headContext headContext = slotContext.c_archetype_slot_head();
slot.setNodeId(headContext.c_archetype_slot_id().ID_CODE().getText());
slot.setRmTypeName(headContext.c_archetype_slot_id().type_id().getText());
if(headContext.c_archetype_slot_id().SYM_CLOSED() != null) {
slot.setClosed(true);
}
if (headContext.c_occurrences() != null) {
slot.setOccurrences(parseMultiplicityInterval(headContext.c_occurrences()));
}
RulesParser assertionParser = new RulesParser(getErrors());
if (slotContext.c_excludes() != null) {
for (AssertionContext assertionContext : slotContext.c_excludes().assertion()) {
slot.getExcludes().add((Assertion) assertionParser.parse(assertionContext));
}
}
if (slotContext.c_includes() != null) {
for (AssertionContext assertionContext : slotContext.c_includes().assertion()) {
slot.getIncludes().add((Assertion) assertionParser.parse(assertionContext));
}
}
return slot;
}
private Cardinality parseCardinalityInterval(C_cardinalityContext context) {
Cardinality cardinality = new Cardinality();
MultiplicityInterval interval = parseMultiplicity(context.cardinality().multiplicity());
cardinality.setInterval(interval);
List<Multiplicity_modContext> modContexts = context.cardinality().multiplicity_mod();
for(Multiplicity_modContext modContext:modContexts) {
if(modContext.ordering_mod() != null) {
cardinality.setOrdered(modContext.ordering_mod().SYM_ORDERED() != null);
}
if(modContext.unique_mod() != null) {
cardinality.setUnique(true);
}
}
return cardinality;
}
private MultiplicityInterval parseMultiplicityInterval(C_existenceContext existenceContext) {
MultiplicityInterval interval = new MultiplicityInterval();
List<TerminalNode> integers = existenceContext.existence().INTEGER();
if(integers.size() == 1) {
interval.setLower(Integer.parseInt(integers.get(0).getText()));
interval.setUpper(interval.getLower());
} else if (integers.size() == 2) {
interval.setLower(Integer.parseInt(integers.get(0).getText()));
interval.setUpper(Integer.parseInt(integers.get(1).getText()));
}
return interval;
}
private MultiplicityInterval parseMultiplicityInterval(C_occurrencesContext occurrencesContext) {
if(occurrencesContext == null) {
return null;
}
return parseMultiplicity(occurrencesContext.multiplicity());
}
private MultiplicityInterval parseMultiplicity(MultiplicityContext multiplicity) {
if(multiplicity == null) {
return null;
}
MultiplicityInterval interval = new MultiplicityInterval();
List<TerminalNode> integers = multiplicity.INTEGER();
if(multiplicity.SYM_INTERVAL_SEP() != null) {
if(multiplicity.getText().contains("*")) {
interval.setLower(Integer.parseInt(integers.get(0).getText()));
interval.setUpperUnbounded(true);
} else {
interval.setLower(Integer.parseInt(integers.get(0).getText()));
interval.setUpper(Integer.parseInt(integers.get(1).getText()));
}
} else {
//one integer or *
if(multiplicity.getText().contains("*")) {
interval.setLowerUnbounded(false);
interval.setLower(0);
interval.setUpperUnbounded(true);
} else {
interval.setLower(Integer.parseInt(integers.get(0).getText()));
interval.setUpper(interval.getLower());
}
}
return interval;
}
}
|
apache-2.0
|
Gugli/Openfire
|
src/java/org/jivesoftware/openfire/audit/spi/AuditorImpl.java
|
18482
|
/*
* Copyright (C) 2005-2008 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.audit.spi;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.TimeZone;
import java.util.TimerTask;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.dom4j.DocumentFactory;
import org.dom4j.Element;
import org.jivesoftware.openfire.audit.AuditManager;
import org.jivesoftware.openfire.audit.Auditor;
import org.jivesoftware.openfire.session.Session;
import org.jivesoftware.util.FastDateFormat;
import org.jivesoftware.util.JiveGlobals;
import org.jivesoftware.util.LocaleUtils;
import org.jivesoftware.util.StringUtils;
import org.jivesoftware.util.TaskEngine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xmpp.packet.IQ;
import org.xmpp.packet.Message;
import org.xmpp.packet.Packet;
import org.xmpp.packet.Presence;
public class AuditorImpl implements Auditor {
private static final Logger Log = LoggerFactory.getLogger(AuditorImpl.class);
private AuditManager auditManager;
private File currentAuditFile;
private Writer writer;
private org.jivesoftware.util.XMLWriter xmlWriter;
/**
* Limit date used to detect when we need to rollover files. This date will be
* configured as the last second of the day.
*/
private Date currentDateLimit;
/**
* Max size in bytes that all audit log files may have. When the limit is reached
* oldest audit log files will be removed until total size is under the limit.
*/
private long maxTotalSize;
/**
* Max size in bytes that each audit log file may have. Once the limit has been
* reached a new audit file will be created.
*/
private long maxFileSize;
/**
* Max number of days to keep audit information. Once the limit has been reached
* audit files that contain information that exceed the limit will be deleted.
*/
private int maxDays;
/**
* Flag that indicates if packets can still be accepted to be saved to the audit log.
*/
private boolean closed = false;
/**
* Directoty (absolute path) where the audit files will be saved.
*/
private String logDir;
/**
* File (or better say directory) of the folder that contains the audit logs.
*/
private File baseFolder;
/**
* Queue that holds the audited packets that will be later saved to an XML file.
*/
private BlockingQueue<AuditPacket> logQueue = new LinkedBlockingQueue<>();
/**
* Allow only a limited number of files for each day, max. three digits (000-999)
*/
private final int maxTotalFilesDay = 1000;
/**
* Track the current index number `...-nnn.log´
*/
private int filesIndex = 0;
/**
* Timer to save queued logs to the XML file.
*/
private SaveQueuedPacketsTask saveQueuedPacketsTask;
private FastDateFormat dateFormat;
private static FastDateFormat auditFormat;
public AuditorImpl(AuditManager manager) {
auditManager = manager;
dateFormat = FastDateFormat.getInstance("yyyyMMdd", TimeZone.getTimeZone("UTC"));
auditFormat = FastDateFormat.getInstance("MMM dd, yyyy hh:mm:ss:SSS a", JiveGlobals.getLocale());
}
protected void setMaxValues(int totalSize, int fileSize, int days) {
maxTotalSize = (long) totalSize * 1024l * 1024l;
maxFileSize = (long) fileSize * 1024l * 1024l;
maxDays = days;
}
public void setLogTimeout(int logTimeout) {
// Cancel any existing task because the timeout has changed
if (saveQueuedPacketsTask != null) {
saveQueuedPacketsTask.cancel();
}
// Create a new task and schedule it with the new timeout
saveQueuedPacketsTask = new SaveQueuedPacketsTask();
TaskEngine.getInstance().schedule(saveQueuedPacketsTask, logTimeout, logTimeout);
}
public void setLogDir(String logDir) {
this.logDir = logDir;
// Create and catch file of the base folder that will contain audit files
baseFolder = new File(logDir);
// Create the folder if it does not exist
if (!baseFolder.exists()) {
if ( !baseFolder.mkdir() ) {
Log.error( "Unable to create log directory: {}", baseFolder );
}
}
}
@Override
public int getQueuedPacketsNumber() {
return logQueue.size();
}
@Override
public void audit(Packet packet, Session session) {
if (auditManager.isEnabled()) {
if (packet instanceof Message) {
if (auditManager.isAuditMessage()) {
writePacket(packet, session);
}
}
else if (packet instanceof Presence) {
if (auditManager.isAuditPresence()) {
writePacket(packet, session);
}
}
else if (packet instanceof IQ) {
if (auditManager.isAuditIQ()) {
writePacket(packet, session);
}
}
}
}
private void writePacket(Packet packet, Session session) {
if (!closed) {
// Add to the logging queue this new entry that will be saved later
logQueue.add(new AuditPacket(packet.createCopy(), session));
}
}
@Override
public void stop() {
// Stop queuing packets since we are being stopped
closed = true;
// Save all remaining queued packets to the XML file
saveQueuedPackets();
close();
}
private void close() {
if (xmlWriter != null) {
try {
xmlWriter.flush();
writer.write("</jive>");
xmlWriter.close();
writer = null;
xmlWriter = null;
}
catch (Exception e) {
Log.error(LocaleUtils.getLocalizedString("admin.error"), e);
}
}
}
private void prepareAuditFile(Date auditDate) throws IOException {
ensureMaxTotalSize();
// Rotate file if: we just started, current file size exceeded limit or date has changed
if (currentAuditFile == null || currentAuditFile.length() > maxFileSize ||
xmlWriter == null || currentDateLimit == null || auditDate.after(currentDateLimit))
{
createAuditFile(auditDate);
}
}
/**
* Ensures that max total size limit is not exceeded. If total size of audit files
* exceed the limit then oldest audit files will be removed until total size does
* not exceed limit.
*/
private void ensureMaxTotalSize() {
// Get list of existing audit files
FilenameFilter filter = new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith("jive.audit-") && name.endsWith(".log");
}
};
File[] files = baseFolder.listFiles(filter);
if (files == null) {
Log.debug( "Path '{}' does not denote a directory, or an IO exception occured while trying to list its content.", baseFolder );
return;
}
long totalLength = 0;
for (File file : files) {
totalLength = totalLength + file.length();
}
// Check if total size has been exceeded
if (totalLength > maxTotalSize) {
// Sort files by name (chronological order)
List<File> sortedFiles = new ArrayList<>(Arrays.asList(files));
Collections.sort(sortedFiles, new Comparator<File>() {
@Override
public int compare(File o1, File o2) {
return o1.getName().compareTo(o2.getName());
}
});
// Delete as many old files as required to be under the limit
while (totalLength > maxTotalSize && !sortedFiles.isEmpty()) {
File fileToDelete = sortedFiles.remove(0);
totalLength = totalLength - fileToDelete.length();
if (fileToDelete.equals(currentAuditFile)) {
// Close current file
close();
}
// Delete oldest file
if ( !fileToDelete.delete() )
{
Log.warn( "Unable to delete file '{}' as part of regular log rotation based on size of files (Openfire failed to clean up after itself)!", fileToDelete );
}
}
}
}
/**
* Deletes old audit files that exceeded the max number of days limit.
*/
private void ensureMaxDays() {
if (maxDays == -1) {
// Do nothing since we don't have any limit
return;
}
// Set limit date after which we need to delete old audit files
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.DATE, maxDays * -1);
final String oldestFile =
"jive.audit-" + dateFormat.format(calendar.getTime()) + "-000.log";
// Get list of audit files to delete
FilenameFilter filter = new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith("jive.audit-") && name.endsWith(".log") &&
name.compareTo(oldestFile) < 0;
}
};
File[] files = baseFolder.listFiles(filter);
// Delete old audit files
for (File fileToDelete : files) {
if (fileToDelete.equals(currentAuditFile)) {
// Close current file
close();
}
if ( !fileToDelete.delete() )
{
Log.warn( "Unable to delete file '{}' as part of regular log rotation based on age of file. (Openfire failed to clean up after itself)!", fileToDelete );
}
}
}
/* if this new logic still causes problems one may want to
* use log4j or change the file format from YYYYmmdd-nnn to YYYYmmdd-HHMM */
/**
* Sets <b>xmlWriter</b> so this class can use it to write audit logs<br>
* The audit filename <b>currentAuditFile</b> will be `jive.audit-YYYYmmdd-nnn.log´<br>
* `nnn´ will be reset to `000´ when a new log file is created the next day <br>
* `nnn´ will be increased for log files which belong to the same day<br>
* <b>WARNING:</b> If log files of the current day are deleted and the server is restarted then
* the value of `nnn´ may be random (it's calculated by `Math.max(files.length, filesIndex);´
* with `filesIndex=0´ and `files.length=nr(existing jive.audit-YYYYmmdd-???.log files)´ -
* if there are 10 audit files (033-043) then nnn will be 10 instead of 44).<br>
* If `nnn=999´ then all audit data will be written to this file till the next day.<br>
* @param auditDate
* @throws IOException
*/
private void createAuditFile(Date auditDate) throws IOException {
final String filePrefix = "jive.audit-" + dateFormat.format(auditDate) + "-";
if (currentDateLimit == null || auditDate.after(currentDateLimit)) {
// Set limit date after which we need to rollover the audit file (based on the date)
Calendar calendar = Calendar.getInstance();
calendar.setTime(auditDate);
calendar.set(Calendar.HOUR_OF_DAY, 23);
calendar.set(Calendar.MINUTE, 59);
calendar.set(Calendar.SECOND, 59);
calendar.set(Calendar.MILLISECOND, 999);
currentDateLimit = calendar.getTime();
filesIndex = 0;
}
// Get list of existing audit files
FilenameFilter filter = new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith(filePrefix) && name.endsWith(".log");
}
};
File[] files = baseFolder.listFiles(filter);
// if some daily files were already deleted then files.length will be smaller than filesIndex
// see also WARNING above
filesIndex = Math.max(files.length, filesIndex);
if (filesIndex >= maxTotalFilesDay)
{
// don't close this file, continue auditing to it
return;
}
File tmpAuditFile = new File(logDir, filePrefix + StringUtils.zeroPadString(Integer.toString(filesIndex), 3) + ".log");
if ( (filesIndex == maxTotalFilesDay-1) && !tmpAuditFile.exists() )
{
Log.warn("Creating last audit file for this date: " + dateFormat.format(auditDate));
}
while ( (filesIndex<(maxTotalFilesDay-1)) && (tmpAuditFile.exists()) )
{
Log.debug("Audit file '"+ tmpAuditFile.getName() +"' does already exist.");
filesIndex++;
tmpAuditFile = new File(logDir, filePrefix + StringUtils.zeroPadString(Integer.toString(filesIndex), 3) + ".log");
}
currentAuditFile = tmpAuditFile;
close();
// always append to an existing file (after restart)
writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(currentAuditFile, true), StandardCharsets.UTF_8));
writer.write("<jive xmlns=\"http://www.jivesoftware.org\">");
xmlWriter = new org.jivesoftware.util.XMLWriter(writer);
}
/**
* Saves the queued entries to an XML file and checks that very old files are deleted.
*/
private class SaveQueuedPacketsTask extends TimerTask {
@Override
public void run() {
try {
// Ensure that saved audit logs are not too old
ensureMaxDays();
// Save queued packets to the audit logs
saveQueuedPackets();
}
catch (Throwable e) {
Log.error(LocaleUtils.getLocalizedString("admin.error"), e);
}
}
}
private void saveQueuedPackets() {
List<AuditPacket> packets = new ArrayList<>(logQueue.size());
logQueue.drainTo(packets);
for (AuditPacket auditPacket : packets) {
try {
prepareAuditFile(auditPacket.getCreationDate());
Element element = auditPacket.getElement();
// Protect against null elements.
if (element != null) {
xmlWriter.write(element);
}
}
catch (IOException e) {
Log.error(LocaleUtils.getLocalizedString("admin.error"), e);
// Add again the entry to the queue to save it later
if (xmlWriter != null) {
logQueue.add(auditPacket);
}
}
}
try {
if (xmlWriter != null) {
xmlWriter.flush();
}
}
catch (IOException ioe) {
Log.error(ioe.getMessage(), ioe);
}
}
/**
* Wrapper on a Packet with information about the packet's status at the moment
* when the message was queued.<p>
*
* The idea is to wrap every packet that is needed to be audited and then add the
* wrapper to a queue that will be later processed (i.e. saved to the XML file).
*/
private static class AuditPacket {
private static DocumentFactory docFactory = DocumentFactory.getInstance();
private Element element;
private Date creationDate;
public AuditPacket(Packet packet, Session session) {
element = docFactory.createElement("packet", "http://www.jivesoftware.org");
creationDate = new Date();
if (session != null && session.getStreamID() != null) {
element.addAttribute("streamID", session.getStreamID().toString());
}
switch (session == null ? 0 : session.getStatus()) {
case Session.STATUS_AUTHENTICATED:
element.addAttribute("status", "auth");
break;
case Session.STATUS_CLOSED:
element.addAttribute("status", "closed");
break;
case Session.STATUS_CONNECTED:
element.addAttribute("status", "connected");
// This is a workaround. Since we don't want to have an incorrect FROM attribute
// value we need to clean up the FROM attribute. The FROM attribute will contain
// an incorrect value since we are setting a fake JID until the user actually
// authenticates with the server.
packet.setFrom((String) null);
break;
default:
element.addAttribute("status", "unknown");
break;
}
element.addAttribute("timestamp", auditFormat.format(creationDate));
element.add(packet.getElement());
}
/**
* Returns the Element associated with this audit packet.
*
* @return the Element.
*/
public Element getElement() {
return element;
}
/**
* Returns the date when the packet was audited. This is the time when the
* packet was queued to be saved.
*
* @return the date when the packet was audited.
*/
public Date getCreationDate() {
return creationDate;
}
}
}
|
apache-2.0
|
google/gvisor
|
pkg/sentry/platform/kvm/testutil/testutil.go
|
2747
|
// Copyright 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package testutil provides common assembly stubs for testing.
package testutil
import (
"fmt"
"strings"
)
// Getpid executes a trivial system call.
func Getpid()
// AddrOfGetpid returns the address of Getpid.
//
// In Go 1.17+, Go references to assembly functions resolve to an ABIInternal
// wrapper function rather than the function itself. We must reference from
// assembly to get the ABI0 (i.e., primary) address.
func AddrOfGetpid() uintptr
// AddrOfTouch returns the address of a function that touches the value in the
// first register.
func AddrOfTouch() uintptr
func touch()
// AddrOfSyscallLoop returns the address of a function that executes a syscall
// and loops.
func AddrOfSyscallLoop() uintptr
func syscallLoop()
// AddrOfSpinLoop returns the address of a function that spins on the CPU.
func AddrOfSpinLoop() uintptr
func spinLoop()
// AddrOfHaltLoop returns the address of a function that immediately halts and
// loops.
func AddrOfHaltLoop() uintptr
func haltLoop()
// AddrOfTwiddleRegsFault returns the address of a function that twiddles
// registers then faults.
func AddrOfTwiddleRegsFault() uintptr
func twiddleRegsFault()
// AddrOfTwiddleRegsSyscall returns the address of a function that twiddles
// registers then executes a syscall.
func AddrOfTwiddleRegsSyscall() uintptr
func twiddleRegsSyscall()
// FloatingPointWorks is a floating point test.
//
// It returns true or false.
func FloatingPointWorks() bool
// RegisterMismatchError is used for checking registers.
type RegisterMismatchError []string
// Error returns a human-readable error.
func (r RegisterMismatchError) Error() string {
return strings.Join([]string(r), ";")
}
// addRegisterMisatch allows simple chaining of register mismatches.
func addRegisterMismatch(err error, reg string, got, expected interface{}) error {
errStr := fmt.Sprintf("%s got %08x, expected %08x", reg, got, expected)
switch r := err.(type) {
case nil:
// Return a new register mismatch.
return RegisterMismatchError{errStr}
case RegisterMismatchError:
// Append the error.
r = append(r, errStr)
return r
default:
// Leave as is.
return err
}
}
|
apache-2.0
|
tempbottle/jsimpledb
|
src/java/org/jsimpledb/core/CoreIndex3.java
|
4089
|
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
package org.jsimpledb.core;
import java.util.NavigableMap;
import java.util.NavigableSet;
import org.jsimpledb.index.Index;
import org.jsimpledb.index.Index2;
import org.jsimpledb.index.Index3;
import org.jsimpledb.kv.KeyFilter;
import org.jsimpledb.tuple.Tuple2;
import org.jsimpledb.tuple.Tuple3;
import org.jsimpledb.tuple.Tuple4;
/**
* Core API {@link Index} implementation representing a composite index on three fields.
*
* <p>
* Instances are immutable.
* </p>
*
* @param <V1> first index value type
* @param <V2> second index value type
* @param <V3> third index value type
* @param <T> index target type
*/
public class CoreIndex3<V1, V2, V3, T> extends AbstractCoreIndex implements Index3<V1, V2, V3, T> {
// Constructors
CoreIndex3(Transaction tx, Index3View<V1, V2, V3, T> indexView) {
super(tx, 4, indexView);
}
// Methods
@Override
public CoreIndex3<V1, V2, V3, T> filter(int index, KeyFilter filter) {
return new CoreIndex3<V1, V2, V3, T>(this.tx, this.getIndex3View().filter(index, filter));
}
@SuppressWarnings("unchecked")
Index3View<V1, V2, V3, T> getIndex3View() {
return (Index3View<V1, V2, V3, T>)this.indexView;
}
// Index3
@Override
public NavigableSet<Tuple4<V1, V2, V3, T>> asSet() {
// Get index view
final Index3View<V1, V2, V3, T> iv = this.getIndex3View();
// Create field type for Tuple4<V1, V2, V3, T>
final Tuple4FieldType<V1, V2, V3, T> fieldType = new Tuple4FieldType<V1, V2, V3, T>(
iv.getValue1Type(), iv.getValue2Type(), iv.getValue3Type(), iv.getTargetType());
// Build set and apply filtering
IndexSet<Tuple4<V1, V2, V3, T>> indexSet = new IndexSet<Tuple4<V1, V2, V3, T>>(this.tx,
fieldType, iv.prefixMode, iv.prefix);
if (iv.hasFilters())
indexSet = indexSet.filterKeys(new IndexKeyFilter(this.tx, iv, 4));
// Done
return indexSet;
}
@Override
public NavigableMap<Tuple3<V1, V2, V3>, NavigableSet<T>> asMap() {
// Get index view
final Index3View<V1, V2, V3, T> iv = this.getIndex3View();
// Create new IndexView
final IndexView<Tuple3<V1, V2, V3>, T> tupleIV = iv.asTuple3IndexView();
// Build map and apply filtering
IndexMap<Tuple3<V1, V2, V3>, NavigableSet<T>> indexMap = new IndexMap.OfValues<Tuple3<V1, V2, V3>, T>(this.tx, tupleIV);
if (tupleIV.hasFilters())
indexMap = indexMap.filterKeys(new IndexKeyFilter(this.tx, tupleIV, 1));
// Done
return indexMap;
}
@Override
public NavigableMap<Tuple2<V1, V2>, Index<V3, T>> asMapOfIndex() {
// Get index view
final Index3View<V1, V2, V3, T> iv = this.getIndex3View();
// Create new IndexView
final Index2View<Tuple2<V1, V2>, V3, T> tupleIV = iv.asTuple2Index2View();
// Build map and apply filtering
IndexMap<Tuple2<V1, V2>, Index<V3, T>> indexMap = new IndexMap.OfIndex<Tuple2<V1, V2>, V3, T>(this.tx, tupleIV);
if (iv.hasFilters())
indexMap = indexMap.filterKeys(new IndexKeyFilter(this.tx, tupleIV, 1));
// Done
return indexMap;
}
@Override
public NavigableMap<V1, Index2<V2, V3, T>> asMapOfIndex2() {
// Get index view
final Index3View<V1, V2, V3, T> iv = this.getIndex3View();
// Build map and apply filtering
IndexMap<V1, Index2<V2, V3, T>> indexMap = new IndexMap.OfIndex2<V1, V2, V3, T>(this.tx, iv);
if (iv.hasFilters())
indexMap = indexMap.filterKeys(new IndexKeyFilter(this.tx, iv, 1));
// Done
return indexMap;
}
@Override
public CoreIndex2<V1, V2, V3> asIndex2() {
return new CoreIndex2<V1, V2, V3>(this.tx, this.getIndex3View().asIndex2View());
}
@Override
public CoreIndex<V1, V2> asIndex() {
return new CoreIndex<V1, V2>(this.tx, this.getIndex3View().asIndex2View().asIndexView());
}
}
|
apache-2.0
|
F5Networks/f5-cloud-libs
|
test/lib/localCryptoUtilTests.js
|
7520
|
/**
* Copyright 2016-2018 F5 Networks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const q = require('q');
const assert = require('assert');
describe('local crypto util tests', () => {
let localKeyUtilMock;
let cryptoUtilMock;
let utilMock;
let localCryptoUtil;
let childProcessMock;
let dataSent;
let optionsSent;
let encryptedKeySent;
let dataToDecrypt;
/* eslint-disable global-require */
beforeEach(() => {
utilMock = require('../../lib/util');
localKeyUtilMock = require('../../lib/localKeyUtil');
cryptoUtilMock = require('../../lib/cryptoUtil');
localCryptoUtil = require('../../lib/localCryptoUtil');
encryptedKeySent = undefined;
localKeyUtilMock.getPrivateKeyFilePath = function getPrivateKeyFilePath() {
return q('/foo/bar');
};
localKeyUtilMock.getPrivateKeyMetadata = function getPrivateKeyMetadata() {
return q({});
};
localKeyUtilMock.getExistingPrivateKeyName = function getExistingPrivateKeyName(folder, name) {
return q(name);
};
utilMock.readDataFromFile = function readDataFromFile() {
return q(dataToDecrypt);
};
cryptoUtilMock.decrypt = function decrypt(privateKey, data, options) {
dataSent = data;
optionsSent = options;
return q('hello, world');
};
cryptoUtilMock.symmetricDecrypt = function symmetricDecrypt(
privateKey,
encryptedKey,
iv,
data,
options
) {
dataSent = data;
optionsSent = options;
encryptedKeySent = encryptedKey;
return q('hello, world');
};
});
afterEach(() => {
Object.keys(require.cache).forEach((key) => {
delete require.cache[key];
});
});
describe('decrypt data tests', () => {
it('no file test', () => {
assert.throws(() => {
localCryptoUtil.decryptData(null, 'foo', 'bar');
}, /data is required/);
});
it('no private key folder test', () => {
assert.throws(() => {
localCryptoUtil.decryptData('foo', null, 'bar');
}, /privateKeyFolder is required/);
});
it('no private key name test', () => {
assert.throws(() => {
localCryptoUtil.decryptData('foo', 'bar');
}, /privateKeyName is required/);
});
it('basic test', () => {
dataToDecrypt = 'abcd';
return localCryptoUtil.decryptData(dataToDecrypt, 'foo', 'bar')
.then((response) => {
assert.strictEqual(dataSent, 'abcd');
assert.strictEqual(response, 'hello, world');
});
});
it('no passphrase test', () => {
return localCryptoUtil.decryptData('foo', 'foo', 'bar')
.then(() => {
assert.strictEqual(optionsSent.passphrase, undefined);
assert.strictEqual(optionsSent.passphraseEncrypted, false);
});
});
it('passphrase test', () => {
localKeyUtilMock.getPrivateKeyMetadata = function getPrivateKeyMetadata() {
return q({ passphrase: 'mypassphrase' });
};
return localCryptoUtil.decryptData('foo', 'foo', 'bar')
.then(() => {
assert.strictEqual(optionsSent.passphrase, 'mypassphrase');
assert.strictEqual(optionsSent.passphraseEncrypted, true);
});
});
});
describe('decrypt password tests', () => {
it('basic test', () => {
return localCryptoUtil.decryptPassword('secret')
.then((decryptedSecret) => {
assert.deepStrictEqual(decryptedSecret, 'hello, world');
});
});
});
describe('symmetric decrypt password tests', () => {
it('basic test', () => {
dataToDecrypt = {
encryptedData: 'secret',
encryptedKey: 'key',
iv: 'foo',
privateKey: {
folder: 'foo',
name: 'bar'
}
};
return localCryptoUtil.symmetricDecryptPassword(dataToDecrypt)
.then((decryptedSecret) => {
assert.deepStrictEqual(decryptedSecret, 'hello, world');
});
});
});
describe('decrypt data from file tests', () => {
it('basic test', () => {
dataToDecrypt = 'abcd';
return localCryptoUtil.decryptDataFromFile('/foo/bar')
.then((response) => {
assert.strictEqual(dataSent, dataToDecrypt);
assert.strictEqual(response, 'hello, world');
});
});
it('symmetric test', () => {
const encryptedData = 'secret';
const encryptedKey = 'key';
dataToDecrypt = JSON.stringify({
encryptedData,
encryptedKey,
iv: 'foo',
privateKey: {
folder: 'foo',
name: 'bar'
}
});
return localCryptoUtil.decryptDataFromFile('/foo/bar', { symmetric: true })
.then((response) => {
assert.strictEqual(dataSent, encryptedData);
assert.strictEqual(response, 'hello, world');
assert.strictEqual(encryptedKeySent, encryptedKey);
});
});
it('error test', () => {
assert.throws(() => {
localCryptoUtil.decryptDataFromFile(null);
}, /dataFile is required/);
});
});
describe('decrypt conf value tests', () => {
beforeEach(() => {
childProcessMock = require('child_process');
});
it('basic test', () => {
childProcessMock.execFile = function execFile(file, params, cb) {
cb(null, 'hello, world', null);
};
return localCryptoUtil.decryptConfValue('foo')
.then((response) => {
assert.strictEqual(response, 'hello, world');
});
});
it('error test', () => {
childProcessMock.execFile = function execFile(file, params, cb) {
cb(new Error('foo'), null, 'bar');
};
return localCryptoUtil.decryptConfValue('foo')
.then(() => {
assert.ok(false, 'decryptConfValue should have thrown');
})
.catch((err) => {
assert.notStrictEqual(err.message.indexOf('bar'), -1);
});
});
});
});
|
apache-2.0
|
Chanven/CommonPullToRefresh
|
cptr/src/com/chanven/lib/cptr/utils/PtrLocalDisplay.java
|
1526
|
package com.chanven.lib.cptr.utils;
import android.content.Context;
import android.util.DisplayMetrics;
import android.view.View;
import android.view.WindowManager;
public class PtrLocalDisplay {
public static int SCREEN_WIDTH_PIXELS;
public static int SCREEN_HEIGHT_PIXELS;
public static float SCREEN_DENSITY;
public static int SCREEN_WIDTH_DP;
public static int SCREEN_HEIGHT_DP;
public static void init(Context context) {
if (context == null) {
return;
}
DisplayMetrics dm = new DisplayMetrics();
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
wm.getDefaultDisplay().getMetrics(dm);
SCREEN_WIDTH_PIXELS = dm.widthPixels;
SCREEN_HEIGHT_PIXELS = dm.heightPixels;
SCREEN_DENSITY = dm.density;
SCREEN_WIDTH_DP = (int) (SCREEN_WIDTH_PIXELS / dm.density);
SCREEN_HEIGHT_DP = (int) (SCREEN_HEIGHT_PIXELS / dm.density);
}
public static int dp2px(float dp) {
final float scale = SCREEN_DENSITY;
return (int) (dp * scale + 0.5f);
}
public static int designedDP2px(float designedDp) {
if (SCREEN_WIDTH_DP != 320) {
designedDp = designedDp * SCREEN_WIDTH_DP / 320f;
}
return dp2px(designedDp);
}
public static void setPadding(final View view, float left, float top, float right, float bottom) {
view.setPadding(designedDP2px(left), dp2px(top), designedDP2px(right), dp2px(bottom));
}
}
|
apache-2.0
|
rlon008/testamation
|
testamation-test-common/src/main/java/nz/co/testamation/testcommon/fixture/SomeFixture.java
|
5010
|
/*
* Copyright 2016 Ratha Long
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nz.co.testamation.testcommon.fixture;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.RandomStringUtils;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.Duration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.Collection;
import java.util.concurrent.ThreadLocalRandom;
public class SomeFixture {
public static String someString( int length ) {
return RandomStringUtils.randomAlphabetic( length );
}
public static String someString() {
return RandomStringUtils.randomAlphabetic( 7 );
}
public static int someInt() {
return ThreadLocalRandom.current().nextInt();
}
public static long someLong() {
return ThreadLocalRandom.current().nextLong();
}
public static int someIntLessThan( int maxExclusive ) {
return ThreadLocalRandom.current().nextInt( maxExclusive );
}
public static int someYear() {
return 2000 + someIntLessThan( 11 );
}
public static Duration someDuration() {
return Duration.ofSeconds( someLong() );
}
public static String someEmail() {
return String.format( "%s@%s.com", RandomStringUtils.randomAlphabetic( 5 ), RandomStringUtils.randomAlphabetic( 5 ) ).toLowerCase();
}
public static LocalDate someLocalDate() {
return LocalDate.of(
someYear(),
someIntLessThan( 12 ) + 1,
someIntLessThan( 28 ) + 1
);
}
public static LocalDateTime someDateTime() {
return LocalDateTime.of(
someYear(),
someIntLessThan( 12 ) + 1,
someIntLessThan( 28 ) + 1,
someIntBetween( 4, 24 ), // avoid sometimes invalid 2am due to daylight savings
someIntLessThan( 60 ),
someIntLessThan( 60 )
);
}
public static Boolean someBoolean() {
return ThreadLocalRandom.current().nextBoolean();
}
public static <E extends Enum> E someEnum( Class<E> enumClazz ) {
return someValue( enumClazz.getEnumConstants() );
}
public static <E extends Enum> E someEnum( Class<E> enumClazz, Predicate<E> predicate ) {
E result;
do {
result = someEnum( enumClazz );
} while ( !predicate.apply( result ) );
return result;
}
public static <T extends Enum> T someEnumOtherThan( Class<T> enumClazz, T... excluded ) {
return someValue( Sets.difference( ImmutableSet.copyOf( enumClazz.getEnumConstants() ), ImmutableSet.copyOf( excluded ) ) );
}
private static int someInt( int length ) {
return new Integer( RandomStringUtils.randomNumeric( length ) );
}
public static Integer someIntBetween( int minInclusive, int maxExclusive ) {
return ThreadLocalRandom.current().nextInt( minInclusive, maxExclusive );
}
public static String someEmail( String prefix ) {
return prefix + "_" + someEmail();
}
public static BigDecimal someBigDecimal() {
return new BigDecimal( somePositiveInt() );
}
public static BigDecimal someBigDecimalPercentage() {
return new BigDecimal( someDouble() );
}
public static double someDouble() {
return ThreadLocalRandom.current().nextDouble();
}
public static byte[] someBytes() {
return someString().getBytes();
}
public static int somePositiveInt() {
return Math.abs( someInt() );
}
public static <T> T someValue( T... values ) {
return values[ someIntLessThan( values.length ) ];
}
public static <T> T someValue( Collection<T> values ) {
return Iterables.get( values, someIntLessThan( values.size() ) );
}
public static String someString( String... choises ) {
return choises[ SomeFixture.someIntBetween( 0, choises.length ) ];
}
public static String someString( Iterable<String> choices ) {
return someString( Iterables.toArray( choices, String.class ) );
}
public static BigInteger someBigInteger() {
return new BigInteger( String.valueOf( somePositiveInt() ) );
}
public static <T> T someThing( T... things ) {
return things[ someIntBetween( 0, things.length ) ];
}
}
|
apache-2.0
|
wmh-demos/HookDemo
|
binder-hook/src/main/java/me/com/hookdemo/hook/HookHelper.java
|
1128
|
package me.com.hookdemo.hook;
import android.app.Instrumentation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
public class HookHelper {
/**
* 使用反射的方式找到ActivityThread这个类
* 将类中的Instrumentation对象替换成自己实现的
*/
public static void attachContext() throws Exception {
Class<?> activityThreadClass = Class.forName("android.app.ActivityThread");
Method currentActivityThreadMethod = activityThreadClass.getDeclaredMethod("currentActivityThread");
currentActivityThreadMethod.setAccessible(true);
Object currentActivityThread = currentActivityThreadMethod.invoke(null);
Field mInstrumentationField = activityThreadClass.getDeclaredField("mInstrumentation");
mInstrumentationField.setAccessible(true);
Instrumentation mInstrumentation = (Instrumentation) mInstrumentationField.get(currentActivityThread);
Instrumentation demonInstrumentation = new DemonInstrumentation(mInstrumentation);
mInstrumentationField.set(currentActivityThread, demonInstrumentation);
}
}
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Amaranthaceae/Desmochaeta/Desmochaeta uncinata/README.md
|
186
|
# Desmochaeta uncinata Roem. & Schult. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
danihegglin/DynDCO
|
src/main/scala/ch/uzh/dyndco/algorithms/maxsum/VariableVertex.scala
|
4517
|
package ch.uzh.dyndco.algorithms.maxsum
import collection.mutable.Map
import collection.mutable.Set
import scala.collection.SortedMap
import scala.util.Random
import ch.uzh.dyndco.stack.vertex.DynamicVertex
import ch.uzh.dyndco.problems.MeetingConstraints
import ch.uzh.dyndco.util.Monitoring
import scala.collection.mutable.MutableList
import ch.uzh.dyndco.problems.MeetingSchedulingFactory
class VariableVertex (id: Any, initialState: MaxSumMessage)
extends DynamicVertex(id, initialState) {
/**
* This avoids type-checks/-casts.
*/
type Signal = MaxSumMessage
/**
* Build Utilities for all Function Vertices
*/
def buildUtilities(allMarginalUtilities : Map[Any, Map[Any, Map[Int, Double]]]): Map[Any,Map[Int, Double]] = {
val utilities = Map[Any,Map[Int, Double]]() // Keeps track which meeting has which assignment
for(functionVertex <- allMarginalUtilities.keys){
// build set of other functionVertices for the particular functionVertex target
var allUtilities = Set[Map[Any,Map[Int,Double]]]() // holds functions, all variables and their costs
for(currFunctionVertex <- allMarginalUtilities.keys){
if(currFunctionVertex != functionVertex || allMarginalUtilities.size == 1){
var variableUtility = allMarginalUtilities.apply(currFunctionVertex)
allUtilities += variableUtility
}
}
// build assignment -> costs for the particular functionVertex target
var assignmentMap : Map[Int,Double] = Map[Int,Double]()
for(assignment : Int <- 1 to TIMESLOTS){
var utility : Double = 0
// process every utility map
for(functions <- allUtilities){
for(currVariableVertexId <- functions.keys){
var variableAssignments = functions.apply(currVariableVertexId)
if(variableAssignments.contains(assignment)){
utility += variableAssignments.apply(assignment)
}
}
}
// normalize utility
utility = normalize(utility)
assignmentMap += (assignment -> utility)
}
// Add assignment costs to map for all functionVertices
utilities += (functionVertex -> assignmentMap)
}
utilities
}
/**
* Find Best Value
*/
def findBestValueAssignment(marginalUtilities : Map[Any,Map[Int, Double]]) : Int = {
// Sum of all utilities
var allUtilities = Map[Int,Double]()
for(utilities <- marginalUtilities.values){
for(timeslot <- utilities.keys){
var utility = utilities.apply(timeslot)
if(allUtilities.contains(timeslot)){
utility += allUtilities.apply(timeslot)
}
allUtilities += (timeslot -> utility)
}
}
// Find max value
findMaxValue(allUtilities)
}
/**
* Collect Signals
*/
def collect() = {
newRound()
if(initialized){
// check if finished
convergenceCheck()
// build all Utilities: function -> variable -> timeslot -> utility
var isNull : Boolean = true
val receivedUtilities = Map[Any, Map[Any, Map[Int, Double]]]()
for (signal <- signals.iterator) {
try {
var message : MaxSumMessage = signal
for(utilities <- message.utilities.values){
for(utility <- utilities.values){
if(utility < 0 || utility > 0){
isNull = false
}
}
}
receivedUtilities += (message.sender -> message.utilities)
}
catch {
case e : Exception =>
//println("signal was null")
}
}
// prepare utilities
val allUtilities = buildUtilities(receivedUtilities)
if(!isNull){
// find best assignments for all requirements
if(!converged){
var maxValue = findBestValueAssignment(allUtilities)
registerValue(maxValue)
}
// store curent utility
storeAgentUtility()
}
new MaxSumMessage(id, allUtilities)
}
else {
// initialize
initialized = true
// add pref to index
value = CONSTRAINTS_CURRENT.preferred.apply(MEETING_ID)
var currentUtilities = calculateAllUtilities(CONSTRAINTS_CURRENT)
var utilities = Map[Any, Map[Int, Double]]()
utilities += (id -> currentUtilities)
new MaxSumMessage(id, utilities)
}
}
}
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Orobanchaceae/Pedicularis/Pedicularis ornithorhyncha/ Syn. Pedicularis pedicellata/README.md
|
186
|
# Pedicularis pedicellata Bunge SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Asparagaceae/Pseudomuscari/Pseudomuscari forniculatum/ Syn. Bellevalia forniculata/README.md
|
196
|
# Bellevalia forniculata (Fomin) Delaunay SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Bryophyta/Bryopsida/Bryales/Bryaceae/Bryum/Bryum pachytheca/README.md
|
191
|
# Bryum pachytheca C. Müller, 1848 SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Fungi/Basidiomycota/Agaricomycetes/Cantharellales/Cantharellaceae/Cantharellus/Cantharellus retirugis/ Syn. Cantharellus retirugus marginatus/README.md
|
216
|
# Cantharellus retirugus var. marginatus Maire VARIETY
#### Status
SYNONYM
#### According to
Index Fungorum
#### Published in
null
#### Original name
Cantharellus retirugus var. marginatus Maire
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Liliopsida/Poales/Cyperaceae/Carex/Carex argunensis/ Syn. Carex argunensis alticola/README.md
|
192
|
# Carex argunensis subsp. alticola SUBSPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Hieracium/Hieracium pallescens/Hieracium pallescens jugicola/README.md
|
225
|
# Hieracium pallescens subsp. jugicola (Zahn) Greuter SUBSPECIES
#### Status
ACCEPTED
#### According to
Euro+Med Plantbase
#### Published in
null
#### Original name
Hieracium maureri subsp. jugicola Zahn
### Remarks
null
|
apache-2.0
|
terraform-google-modules/docs-examples
|
data_catalog_entry_group_tag/tutorial.md
|
1859
|
# Data Catalog Entry Group Tag - Terraform
## Setup
<walkthrough-author name="[email protected]" analyticsId="UA-125550242-1" tutorialName="data_catalog_entry_group_tag" repositoryUrl="https://github.com/terraform-google-modules/docs-examples"></walkthrough-author>
Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform.
<walkthrough-project-billing-setup></walkthrough-project-billing-setup>
Terraform provisions real GCP resources, so anything you create in this session will be billed against this project.
## Terraforming!
Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command
to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up
the project name from the environment variable.
```bash
export GOOGLE_CLOUD_PROJECT={{project-id}}
```
After that, let's get Terraform started. Run the following to pull in the providers.
```bash
terraform init
```
With the providers downloaded and a project set, you're ready to use Terraform. Go ahead!
```bash
terraform apply
```
Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan.
```bash
yes
```
## Post-Apply
### Editing your config
Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed.
```bash
terraform plan
```
So let's make a change! Try editing a number, or appending a value to the name in the editor. Then,
run a 'plan' again.
```bash
terraform plan
```
Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes
at the 'yes' prompt.
```bash
terraform apply
```
```bash
yes
```
## Cleanup
Run the following to remove the resources Terraform provisioned:
```bash
terraform destroy
```
```bash
yes
```
|
apache-2.0
|
CSCSI/Triana
|
triana-core/src/main/java/org/trianacode/taskgraph/proxy/java/JavaConstants.java
|
3506
|
/*
* The University of Wales, Cardiff Triana Project Software License (Based
* on the Apache Software License Version 1.1)
*
* Copyright (c) 2007 University of Wales, Cardiff. All rights reserved.
*
* Redistribution and use of the software in source and binary forms, with
* or without modification, are permitted provided that the following
* conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The end-user documentation included with the redistribution, if any,
* must include the following acknowledgment: "This product includes
* software developed by the University of Wales, Cardiff for the Triana
* Project (http://www.trianacode.org)." Alternately, this
* acknowledgment may appear in the software itself, if and wherever
* such third-party acknowledgments normally appear.
*
* 4. The names "Triana" and "University of Wales, Cardiff" must not be
* used to endorse or promote products derived from this software
* without prior written permission. For written permission, please
* contact [email protected].
*
* 5. Products derived from this software may not be called "Triana," nor
* may Triana appear in their name, without prior written permission of
* the University of Wales, Cardiff.
*
* 6. This software may not be sold, used or incorporated into any product
* for sale to third parties.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
* NO EVENT SHALL UNIVERSITY OF WALES, CARDIFF OR ITS CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*
* ------------------------------------------------------------------------
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Triana Project. For more information on the
* Triana Project, please see. http://www.trianacode.org.
*
* This license is based on the BSD license as adopted by the Apache
* Foundation and is governed by the laws of England and Wales.
*
*/
package org.trianacode.taskgraph.proxy.java;
/**
* Constants used by java units.
*
* @author Ian Wang
* @version $Revision: 4048 $
*/
public interface JavaConstants {
/**
* the proxy type for java units
*/
public static final String JAVA_PROXY_TYPE = "Java";
// the unit name for java units
public static final String UNIT_NAME = "unitName";
// the unit package for java units
public static final String UNIT_PACKAGE = "unitPackage";
/**
* the rendering hint for java units
*/
public static final String JAVA_RENDERING_HINT = "Java";
}
|
apache-2.0
|
vteco/cfin
|
apps/frontend/modules/standalone/templates/_footer_fr.php
|
4252
|
<aside class="alignLeft">
<div class="container_24 alignLeft">
<div class="wrapper">
<article class="grid_5">
<h6>Nos services</h6>
<ul>
<li><a href="<?php echo href_by_pagename('page_credit_1') ?>">Crédit immobilier</a></li>
<li><a href="<?php echo href_by_pagename('page_gestion_2') ?>">Crédit patrimonial</a></li>
<li><a href="<?php echo href_by_pagename('page_gestion_3') ?>">Mobilisation du patrimoine</a></li>
<li><a href="<?php echo href_by_pagename('page_gestion_4') ?>">Financement Entreprise</a></li>
<li><a href="<?php echo href_by_pagename('page_gestion_5') ?>">French Mortgage</a></li>
</ul>
</article>
<article class="grid_5">
<h6>Nos solutions</h6>
<ul>
<li><a href="<?php echo href_by_pagename('page_credit_1') ?>">Crédit Immobilier résidentiel</a></li>
<li><a href="<?php echo href_by_pagename('page_credit_1') ?>">Crédit Immobilier commercial</a></li>
<li><a href="<?php echo href_by_pagename('page_gestion_3') ?>">Crédit hypothécaire</a></li>
<li><a href="<?php echo href_by_pagename('page_gestion_4') ?>">Crédit lombard</a></li>
<li><a href="<?php echo href_by_pagename('page_entreprise_1') ?>">Financement de fond de commerce</a></li>
<li><a href="<?php echo href_by_pagename('page_entreprise_1') ?>">Acquisition de titres</a></li>
<li><a href="<?php echo href_by_pagename('page_entreprise_1') ?>">Financement LBO</a></li>
<li><a href="<?php echo href_by_pagename('page_entreprise_1') ?>">Financement de BFR</a></li>
<li>Affacturage</li>
</ul>
</article>
<article class="grid_5">
<h6>Qui sommes-nous?</h6>
<ul>
<li><a href="<?php echo href_by_pagename('page_quisommesnous_2') ?>">Courtier en immobilier</a></li>
<li><a href="<?php echo href_by_pagename('page_quisommesnous_4') ?>">Équipe</a></li>
<li><a href="<?php echo href_by_pagename('page_partenaires') ?>">Partenaires</a></li>
<li><a href="<?php echo href_by_pagename('page_mentionslegales') ?>">Mentions légales</a></li>
<li><a href="<?php echo url_for('@contact') ?>">Nous contacter</a></li>
<li><a href="<?php echo url_for('@news-list') ?>">Presse</a></li>
<li><a href="<?php echo url_for('@faq-list') ?>">FAQ</a></li>
<li><a href="<?php echo href_by_pagename('page_infospratiques_fr') ?>">Informations pratiques</a></li>
</ul>
</article>
<article class="grid_9">
<h6>S'inscrire à la newsletter</h6>
<form id="newsletter-form">
<input type="text" value="e-mail address" onFocus="if(this.value=='e-mail address'){this.value=''}" onBlur="if(this.value==''){this.value='e-mail address'}">
<a class="button1" onClick="document.getElementById('newsletter-form').submit()">S'inscrire</a>
</form>
<h6>Nous contacter</h6>
<div class="icons">
<a class="normaltip" target="_blank" title="Facebook" href="http://www.facebook.com/pages/Carte-Financement/103833006320861"><img src="/images/socialIcons/facebook-grey.png" alt=""><img src="/images/socialIcons/facebook.png" alt=""></a>
<a class="normaltip" target="_blank" title="Twitter" href="http://twitter.com/courtiercredit"><img src="/images/socialIcons/twitter-grey.png" alt=""><img src="/images/socialIcons/twitter.png" alt=""></a>
<a class="normaltip" target="_blank" title="LinkedIn" href="http://www.linkedin.com/company/carte-financement?trk=cp_followed_name_carte-financement"><img src="/images/socialIcons/linkedin-grey.png" alt=""><img src="/images/socialIcons/linkedin.png" alt=""></a>
<a class="normaltip" target="_blank" title="Google plus" href="https://plus.google.com/111303088006101634610" rel="publisher"><img src="/images/socialIcons/google-plus-grey.png" alt=""><img src="/images/socialIcons/google-plus.png" alt=""></a>
</div>
</article>
</div>
</div>
</aside>
<!--==============================footer=================================-->
<footer>
<span>CarteFinancement © 2012 | </span>
</footer>
<script type="text/javascript">
Cufon.now();
</script>
|
apache-2.0
|
soajs/soajs.dashboard
|
test/unit/lib/environment/drivers/infra.test.js
|
49180
|
"use strict";
var async = require("async");
var helper = require("../../../../helper.js");
var config = require("../../../../../config.js");
var utils = helper.requireModule('./lib/environment/drivers/infra.js');
const nock = require('nock');
var req = {
soajs: {
registry: {
coreDB: {
provision: {
name: 'core_provision',
prefix: '',
servers: [
{ host: '127.0.0.1', port: 27017 }
],
credentials: null,
streaming: {
batchSize: 10000,
colName: {
batchSize: 10000
}
},
URLParam: {
maxPoolSize: 2, bufferMaxEntries: 0
},
registryLocation: {
l1: 'coreDB',
l2: 'provision',
env: 'dev'
},
timeConnected: 1491861560912
}
},
services: {
controller : {
port : 80
}
}
},
log: {
debug: function (data) {
},
error: function (data) {
},
info: function (data) {
}
},
inputmaskData: {
specs : {}
},
validator: {
Validator: function () {
return {
validate: function () {
return {
errors: []
};
}
};
}
},
awareness: {
getHost: function (service, cb) {
return cb ("dashboard.com");
}
}
},
headers: {
key : "key",
soajsauth: "auth",
},
query: {
"access_token": "token"
}
};
var mongoStub = {
findEntry: function (soajs, opts, cb) {
if (opts.collection === 'infra') {
cb (null, {'_id' : 123123})
} else {
cb(null, {
"productize": {
"modifyTemplateStatus": true
},
"cluster": {},
"controller": {},
"urac": {},
"oauth": {},
"nginx": {},
"user": {}
});
}
},
updateEntry: function (soajs, opts, cb) {
cb(null, true);
},
saveEntry: function (soajs, opts, cb) {
cb(null, true);
},
removeEntry: function (soajs, opts, cb) {
cb(null, true);
},
closeConnection: function (soajs) {
return true;
},
validateCustomId : function(soajs, id) {
return true
},
onboardVM : function(soajs, id) {
return true
}
};
var BL = {
customRegistry :{
module : {}
},
model: mongoStub,
cd : {
module : {}
},
cloud :{
deploy :{
module :{}
},
services :{
module :{}
},
resources :{
module :{}
},
infra : {
module : {}
}
},
resources: {
module : {}
}
};
var template = {
"type": "_template",
"name": "MGTT",
"description": "Mike Generic Test Template",
"link": "",
"content": {
"custom_registry": {
"data": [
{
"name": "ciConfig",
"value": {
"apiPrefix": "cloud-api",
"domain": "herrontech.com",
"protocol": "https",
"port": 443.0
}
},
{
"name": "ciConfig2",
"value": "string value here ..."
},
{
"name": "ciConfig3",
"value": {
"apiPrefix": "dashboard-api",
"domain": "soajs.org",
"protocol": "https",
"port": 443.0
}
}
]
},
"productization": {
"data": [
{
"code": "MIKE",
"name": "Mike Product",
"description": "Mike Product Description",
"packages": [
{
"code": "BASIC",
"name": "Basic Package",
"description": "Basic Package Description",
"TTL": 2160000.0,
"acl": {
"oauth": {},
"urac": {},
"daas": {}
}
},
{
"code": "MAIN",
"name": "Main Package",
"description": "Main Package Description",
"TTL": 2160000.0,
"acl": {}
}
]
}
]
},
"tenant": {
"data": [
{
"code": "MIKE",
"name": "Mike Tenant",
"description": "Mike Tenant Description",
"applications": [
{
"product": "MIKE",
"package": "MIKE_MAIN",
"description": "Mike main application",
"_TTL": 2160000.0,
"keys": [
{
"extKeys": [
{
"device": {},
"geo": {},
"dashboardAccess": false,
"expDate": null
}
],
"config": {
"a": "b"
}
}
]
},
{
"product": "MIKE",
"package": "MIKE_USER",
"description": "Mike Logged In user Application",
"_TTL": 2160000.0,
"keys": [
{
"extKeys": [
{
"device": {},
"geo": {},
"dashboardAccess": true,
"expDate": null
}
],
"config": {
"c": "d"
}
}
]
}
]
}
]
},
"secrets": {
"data": [
{
"name": "mike"
}
]
},
"deployments": {
"repo": {
"controller": {
"label": "SOAJS API Gateway",
"name": "controller",
"type": "service",
"category": "soajs",
"deploy": {
"memoryLimit": 500.0,
"mode": "replicated",
"replicas": 1.0
}
}
},
"resources": {
"nginx": {
"label": "Nginx",
"type": "server",
"category": "nginx",
"ui": "${REF:resources/drivers/server/nginx}",
"deploy": {
"memoryLimit": 500.0,
"mode": "global",
"secrets": "mike"
}
},
"external": {
"label": "External Mongo",
"type": "cluster",
"category": "mongo",
"limit": 1.0,
"ui": "${REF:resources/drivers/cluster/mongo}",
"deploy": null
}
}
}
},
"deploy": {
database: {
pre: {
custom_registry: {
imfv: [
{
name: 'ciConfig',
locked: true,
plugged: false,
shared: true,
value: {
test1: true
}
},
{
name: 'ciConfig2',
locked: true,
plugged: false,
shared: true,
value: {
test2: true
}
},
{
name: 'ciConfig3',
locked: true,
plugged: false,
shared: true,
value: {
test3: true
}
}
]
}
},
steps: {
productization: {
ui: {
readOnly: true
}
},
tenant: {
ui: {
readOnly: true
}
}
},
post: {
'deployments__dot__resources__dot__external': {
imfv: [
{
name: 'external',
type: 'cluster',
category: 'mongo',
locked: false,
shared: false,
plugged: false,
config: {
username: 'username',
password: 'pwd'
}
}
],
"status":{
"done": true,
"data":[
{
"db": "mongo id of this resource"
}
]
}
}
}
},
deployments: {
pre: {
"infra.cluster.deploy": {
"imfv" : [
{
"command":{
"method" : "post",
"routeName" : "/bridge/executeDriver", //change the path
"data" : {
"type" : "infra",
"name" : "google",
"driver" : "google",
"command" : "deployCluster",
"project" : "demo",
"options" : {
"region" : "us-east1-b",
"workernumber" : 3,
"workerflavor" : "n1-standard-2",
"regionLabel" : "us-east1-b",
"technology" : "kubernetes",
"envCode" : "PORTAL"
}
}
},
"check" : {
"id" : {
"type" : "string",
"required": true
}
}
},
{
"recursive" : {
"max" : 5,
"delay": 300
},
"check" : {
"id" : {
"type" : "string",
"required": true
},
"ip" : {
"type" : "string",
"required": true
}
},
"command": {
"method" : "post",
"routeName" : "/bridge/executeDriver",
"data" : {
"type" : "infra",
"name" : "google",
"driver" : "google",
"command" : "getDeployClusterStatus",
"project" : "demo",
"options" : {
"envCode" : "PORTAL"
}
}
}
}
],
"status": {
"done": true,
"data": {
"id": "kaza",
"ip": "kaza",
"dns": { "a":"b" }
},
"rollback" : {
"command":{
"method" : "post",
"routeName" : "/bridge/executeDriver",
"params": {},
"data" : {
"type" : "infra",
"name" : "google",
"driver" : "google",
"command" : "deleteCluster",
"project" : "demo",
"options" : {
"envCode" : "PORTAL",
"force" : true
}
}
}
}
},
}
},
steps: {
secrets: {
imfv: [
{
name: 'mike',
type: 'Generic',
data: 'something in secret'
}
]
},
'deployments.repo.controller': {
imfv: [
{
name: 'controller',
options: {
deployConfig: {
replication: {
mode: 'replicated',
replicas: 1
},
memoryLimit: 524288000
},
gitSource: {
owner: 'soajs',
repo: 'soajs.controller',
branch: 'master',
commit: '468588b0a89e55020f26b805be0ff02e0f31a7d8'
},
custom: {
sourceCode: {},
name: 'controller',
type: 'service'
},
recipe: '5ab4d65bc261bdb38a9fe363',
env: 'MIKE'
},
deploy: true,
type: 'custom'
}
],
"status": {
}
},
'deployments.resources.nginx': {
imfv: [
{
name: 'nginx',
type: 'server',
category: 'nginx',
locked: false,
shared: false,
plugged: false,
config: null,
deploy: {
options: {
deployConfig: {
replication: {
mode: 'global'
},
memoryLimit: 524288000
},
custom: {
sourceCode: {},
secrets: [
{
name: 'mike',
mountPath: '/etc/soajs/certs',
type: 'certificate'
}
],
name: 'mynginx',
type: 'server'
},
recipe: '5ab4d65bc261bdb38a9fe363',
env: 'MIKE'
},
deploy: true,
type: 'custom'
}
}
]
}
},
post: {
"infra.dns": {
"imfv": [
{
"recursive" : {
"max" : 5,
"delay": 300
},
"check" : {
"dns" : {
"type" : "object",
"required": true
},
"ip" : {
"type" : "string",
"required": true
}
},
"command": {
"method" : "post",
"routeName" : "/bridge/executeDriver",
"data" : {
"type" : "infra",
"name" : "google",
"driver" : "google",
"command" : "getDNSInfo",
"project" : "demo",
"options" : {
"envCode" : "PORTAL"
}
}
}
}
],
"status": {
"done": true,
"data": {
"ip": "kaza",
"dns": { "a":"b" }
}
},
}
}
}
},
soajs_project: "soajs_project"
};
var environmentRecord = {
_id: '5a58d942ace01a5325fa3e4c',
code: 'DASHBORAD',
deployer: {
"type": "container",
"selected": "container.docker.local",
"container": {
"docker": {
"local": {
"socketPath": "/var/run/docker.sock"
},
"remote": {
"nodes": ""
}
},
"kubernetes": {
"local": {
"nginxDeployType": "",
"namespace": {},
"auth": {
"token": ""
}
},
"remote": {
"nginxDeployType": "",
"namespace": {},
"auth": {
"token": ""
}
}
}
}
},
dbs: {
clusters: {
oneCluster: {
servers: {}
}
},
config: {
session: {
cluster: 'oneCluster'
}
}
},
services: {},
profile: '',
"restriction":{
"1231231":{
"eastus": {
group: "grouptest",
network: "networktest"
}
}
}
};
var infraRecord = {
"_id":'5af2b621a0e17acc56000001',
"name": "test",
"technologies": [
"test"
],
"templates": [
"local"
],
"label": "test",
"deployments": []
};
var lib = {
initBLModel : function(module, modelName, cb){
return cb(null, {
add : function (context, req, data, cb) {
return cb(null, true);
},
delete : function (context, req, data, cb) {
return cb(true);
},
saveConfig : function (context, req, data, cb) {
return cb(null, true);
},
deployService : function (context, req, data, cb) {
return cb(null, {service: {
id: "1"
}});
},
deleteService : function (context, req, data, cb) {
return cb(null, true);
},
addResource: function (context, req, data, cb) {
return cb(null, {_id: "1"});
},
setConfig: function (context, req, data, cb) {
return cb(null, true);
},
deleteResource: function (context, req, data, cb) {
return cb(true);
},
list : function (config, soajs, deployer, cb) {
return cb(null, true);
},
activate : function (config, soajs, deployer, cb) {
return cb(true);
},
modify : function (config, soajs, deployer, cb) {
return cb(null, true);
},
deactivate : function (config, soajs, deployer, cb) {
return cb(null, {service: {
id: "1"
}});
},
removeDeployment : function (config, soajs, deployer, cb) {
return cb(null, true);
},
getDeployClusterStatus: function (config, soajs, req ,deployer, cbMain) {
return cbMain(null, true);
},
deployCluster: function (config, soajs, deployer,req, cb) {
return cb(null, true);
},
scaleCluster: function (config, soajs, deployer, cb) {
return cb(true);
},
removeEnvFromDeployment: function (config, soajs, req, deployer, cb) {
return cb(true);
},
getCluster: function (config, soajs, deployer, cb) {
return cb(true);
},
updateCluster: function (config, soajs, deployer, cb) {
return cb(true);
},
getDNSInfo: function (config, req, soajs, deployer, cb) {
return cb(true);
},
removeTemplate: function (config, soajs, deployer, cb) {
return cb(true);
},
addTemplate: function (config, soajs, deployer, cb) {
return cb(true);
},
updateTemplate: function (config, soajs, deployer, cb) {
return cb(true);
},
uploadTemplate: function (config, soajs, deployer, cb) {
return cb(true);
},
uploadTemplateInputsFile: function (config, soajs, deployer, cb) {
return cb(true);
},
downloadTemplate: function (config, soajs, deployer, cb) {
return cb(true);
},
getDeployVMStatus: function (config, req, soajs, deployer, cb) {
return cb(true);
},
onboardVM: function (config, req, soajs, deployer, cb) {
return cb(true);
},
destroyVM: function (config, req, soajs, deployer, cb) {
return cb(true);
},
deployVM: function (config, req, soajs, deployer, cb) {
return cb(true);
},
});
},
checkReturnError: function(req, {}, {}, cb){
return cb(null, true);
}
};
var context = {};
describe("testing infra.js", function () {
describe("testing validate", function () {
it("success", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": {
"method": "post",
"routeName": "/bridge/executeDriver",
"data": {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}
},
"check": {
"id": {
"type": "string",
"required": true
}
}
},
{
"recursive": {
"max": 5,
"delay": 300
},
"check": {
"id": {
"type": "string",
"required": true
},
"ip": {
"type": "string",
"required": true
}
},
"command": {
"method": "post",
"routeName": "/bridge/executeDriver",
"data": {
"type": "infra",
"name": "google",
"driver": "google",
"command": "getDeployClusterStatus",
"project": "demo",
"options": {
"envCode": "PORTAL"
}
}
}
}
]
}
};
utils.validate(req, context, lib, async, BL, 'mongo', function (err, body) {
done();
})
});
it("success with errors", function (done) {
req.soajs.validator = {
Validator: function () {
return {
validate: function () {
return {
errors: [{err: "msg"}]
};
}
};
}
};
utils.validate(req, context, lib, async, BL, 'mongo', function (err, body) {
done();
})
});
});
describe("testing deploy", function () {
it("success infra already deployed", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
template: JSON.parse(JSON.stringify(template)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "getDeployClusterStatus",
"check": {
"id": {
"type": "string",
"required": true
}
}
},
{
"recursive": {
"max": 0,
"delay": 0
},
"check": {
"id": {
"type": "string",
"required": true
},
"ip": {
"type": "string",
"required": true
}
},
"command": "getDeployClusterStatus",
}
]
}
};
context.template.deploy.deployments.pre["infra.cluster.deploy"].status = {
done: true
};
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
done();
})
});
it("success infra with error", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
}
},
{
// "recursive": {
// "max": 5,
// "delay": 1
// },
"check": {
"id": {
"type": "string",
"required": true
},
"ip": {
"type": "string",
"required": true
}
},
"command": "deployCluster"
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}).reply(200, {
result: true,
data: {}
});
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra without command", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": '',
"check": {
"id": {
"type": "string",
"required": true
}
}
},
{
"recursive": {
"max": 5,
"delay": 300
},
"check": {
"id": {
"type": "string",
"required": true
},
"ip": {
"type": "string",
"required": true
}
},
"command": 'deployCluster'
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}).reply(200, {
result: true,
data: false
});
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra without response", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
}
},
{
"recursive": {
"max": 5,
"delay": 300
},
"check": {
"id": {
"type": "string",
"required": true
},
"ip": {
"type": "string",
"required": true
}
},
"command": {
"method": "post",
"routeName": "/bridge/executeDriver",
"data": {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"envCode": "PORTAL"
}
}
}
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}).reply(200, {
result: true,
data: false
});
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra with response", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}).reply(200, {
result: true,
data: true
});
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra with response case getDeployVMStatus", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "getDeployVMStatus",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
options : {
params : ['test'],
data : ['test']
}
},
]
}
};
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra with response case onBoard", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "onboardVM",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
options : {
params : ['test'],
data : ['test']
}
},
]
}
};
context.infraProvider.command = 'onboardVM';
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra with response case dnsInfo", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "getDNSInfo",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
options : {
params : ['test'],
data : ['test']
}
},
]
}
};
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra with response case deployVm", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployVM",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
options : {
params : {'specs' : {}},
data: [{"test": 'test', "specs": {}}],
}
},
]
}
};
req.soajs.inputmaskData.specs = {
layerName : ''
};
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra with response case releaseVm", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "releaseVM",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
options : {
params : ['test'],
data : ['test']
}
},
]
}
};
context.infraProvider.command = 'onboardVM';
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra with response case destroyVm", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "destroyVM",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
options : {
params : ['test'],
data : ['test']
}
},
]
}
};
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra ", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}).reply(200, {
result: true,
data: true
});
req.soajs.validator = {
Validator: function () {
return {
validate: function () {
return {
valid: true
};
}
};
}
};
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra max count", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 0,
"delay": 300
},
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}).reply(200, {
result: true,
data: false
});
req.soajs.validator = {
Validator: function () {
return {
validate: function () {
return {
valid: true
};
}
};
}
};
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra inputs object", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs":
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 5,
"delay": 300
},
}
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}).reply(200, {
result: true,
data: false
});
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra inputs empty", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": []
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}).reply(200, {
result: true,
data: false
});
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra no steps ", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
null
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deployCluster",
"project": "demo",
"options": {
"region": "us-east1-b",
"workernumber": 3,
"workerflavor": "n1-standard-2",
"regionLabel": "us-east1-b",
"technology": "kubernetes",
"envCode": "PORTAL"
}
}).reply(200, {
result: true,
data: true
});
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
});
describe("testing rollback", function () {
it("success infra no status", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status;
utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra done false", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
context.template.deploy.deployments.pre["infra.cluster.deploy"].status = {
done: false
};
utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra rollback emtpy", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
context.template.deploy.deployments.pre["infra.cluster.deploy"].status = {
done: true,
rollback: {}
};
utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra with rolback", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deleteCluster",
"project": "demo",
"options": {
"envCode": "PORTAL",
"force": true
}
}).reply(200, {
result: true,
data: true
});
utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra with bad rollback", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deleteCluster",
"project": "demo",
"options": {
"envCode": "PORTAL",
"force": true
}
}).reply(200, {
result: false,
errors: {
details: [{
message: "err"
}]
}
});
context.template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback = [template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback]
utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra no rollback", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deleteCluster",
"project": "demo",
"options": {
"envCode": "PORTAL",
"force": true
}
}).reply(200, {
result: false,
errors: {
details: [{
message: "err"
}]
}
});
context.template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback = [];
utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra rollback null", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deleteCluster",
"project": "demo",
"options": {
"envCode": "PORTAL",
"force": true
}
}).reply(200, {
result: false,
errors: {
details: [{
message: "err"
}]
}
});
context.template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback = [null];
utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
it("success infra no rollback", function (done) {
context = {
BL: BL,
environmentRecord: environmentRecord,
template: JSON.parse(JSON.stringify(template)),
infraProvider : JSON.parse(JSON.stringify(infraRecord)),
config: config,
errors: [],
opts: {
"stage": "deployments",
"group": "pre",
"stepPath": "infra.cluster.deploy",
"section": [
"infra",
"cluster",
"deploy"
],
"inputs": [
{
"command": "deployCluster",
"check": {
"id": {
"type": "string",
"required": true
}
},
"recursive": {
"max": 1,
"delay": 300
},
}
]
}
};
nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', {
"type": "infra",
"name": "google",
"driver": "google",
"command": "deleteCluster",
"project": "demo",
"options": {
"envCode": "PORTAL",
"force": true
}
}).reply(200, {
result: false,
errors: {
details: [{
message: "err"
}]
}
});
delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback
utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) {
nock.cleanAll();
done();
})
});
})
});
|
apache-2.0
|
gitpan/GOOGLE-ADWORDS-PERL-CLIENT
|
lib/Google/Ads/AdWords/v201402/LogicalUserList.pm
|
4522
|
package Google::Ads::AdWords::v201402::LogicalUserList;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/rm/v201402' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use base qw(Google::Ads::AdWords::v201402::UserList);
# Variety: sequence
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %id_of :ATTR(:get<id>);
my %isReadOnly_of :ATTR(:get<isReadOnly>);
my %name_of :ATTR(:get<name>);
my %description_of :ATTR(:get<description>);
my %status_of :ATTR(:get<status>);
my %integrationCode_of :ATTR(:get<integrationCode>);
my %accessReason_of :ATTR(:get<accessReason>);
my %accountUserListStatus_of :ATTR(:get<accountUserListStatus>);
my %membershipLifeSpan_of :ATTR(:get<membershipLifeSpan>);
my %size_of :ATTR(:get<size>);
my %sizeRange_of :ATTR(:get<sizeRange>);
my %sizeForSearch_of :ATTR(:get<sizeForSearch>);
my %sizeRangeForSearch_of :ATTR(:get<sizeRangeForSearch>);
my %UserList__Type_of :ATTR(:get<UserList__Type>);
my %rules_of :ATTR(:get<rules>);
__PACKAGE__->_factory(
[ qw( id
isReadOnly
name
description
status
integrationCode
accessReason
accountUserListStatus
membershipLifeSpan
size
sizeRange
sizeForSearch
sizeRangeForSearch
UserList__Type
rules
) ],
{
'id' => \%id_of,
'isReadOnly' => \%isReadOnly_of,
'name' => \%name_of,
'description' => \%description_of,
'status' => \%status_of,
'integrationCode' => \%integrationCode_of,
'accessReason' => \%accessReason_of,
'accountUserListStatus' => \%accountUserListStatus_of,
'membershipLifeSpan' => \%membershipLifeSpan_of,
'size' => \%size_of,
'sizeRange' => \%sizeRange_of,
'sizeForSearch' => \%sizeForSearch_of,
'sizeRangeForSearch' => \%sizeRangeForSearch_of,
'UserList__Type' => \%UserList__Type_of,
'rules' => \%rules_of,
},
{
'id' => 'SOAP::WSDL::XSD::Typelib::Builtin::long',
'isReadOnly' => 'SOAP::WSDL::XSD::Typelib::Builtin::boolean',
'name' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'description' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'status' => 'Google::Ads::AdWords::v201402::UserListMembershipStatus',
'integrationCode' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'accessReason' => 'Google::Ads::AdWords::v201402::AccessReason',
'accountUserListStatus' => 'Google::Ads::AdWords::v201402::AccountUserListStatus',
'membershipLifeSpan' => 'SOAP::WSDL::XSD::Typelib::Builtin::long',
'size' => 'SOAP::WSDL::XSD::Typelib::Builtin::long',
'sizeRange' => 'Google::Ads::AdWords::v201402::SizeRange',
'sizeForSearch' => 'SOAP::WSDL::XSD::Typelib::Builtin::long',
'sizeRangeForSearch' => 'Google::Ads::AdWords::v201402::SizeRange',
'UserList__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'rules' => 'Google::Ads::AdWords::v201402::UserListLogicalRule',
},
{
'id' => 'id',
'isReadOnly' => 'isReadOnly',
'name' => 'name',
'description' => 'description',
'status' => 'status',
'integrationCode' => 'integrationCode',
'accessReason' => 'accessReason',
'accountUserListStatus' => 'accountUserListStatus',
'membershipLifeSpan' => 'membershipLifeSpan',
'size' => 'size',
'sizeRange' => 'sizeRange',
'sizeForSearch' => 'sizeForSearch',
'sizeRangeForSearch' => 'sizeRangeForSearch',
'UserList__Type' => 'UserList.Type',
'rules' => 'rules',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201402::LogicalUserList
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
LogicalUserList from the namespace https://adwords.google.com/api/adwords/rm/v201402.
Represents a user list that is a custom combination of user lists and user interests.
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * rules
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
|
apache-2.0
|
xorware/android_frameworks_base
|
packages/SystemUI/src/com/android/systemui/settings/BrightnessController.java
|
11052
|
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.systemui.settings;
import android.content.ContentResolver;
import android.content.Context;
import android.database.ContentObserver;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.IPowerManager;
import android.os.PowerManager;
import android.os.RemoteException;
import android.os.ServiceManager;
import android.os.UserHandle;
import android.provider.Settings;
import android.widget.ImageView;
import com.android.internal.logging.MetricsLogger;
import com.android.internal.logging.MetricsProto.MetricsEvent;
import java.util.ArrayList;
public class BrightnessController implements ToggleSlider.Listener {
private static final String TAG = "StatusBar.BrightnessController";
private static final boolean SHOW_AUTOMATIC_ICON = false;
/**
* {@link android.provider.Settings.System#SCREEN_AUTO_BRIGHTNESS_ADJ} uses the range [-1, 1].
* Using this factor, it is converted to [0, BRIGHTNESS_ADJ_RESOLUTION] for the SeekBar.
*/
public static final float BRIGHTNESS_ADJ_RESOLUTION = 2048;
private final int mMinimumBacklight;
private final int mMaximumBacklight;
private final Context mContext;
private final ImageView mIcon;
private final ToggleSlider mControl;
private final boolean mAutomaticAvailable;
private final IPowerManager mPower;
private final CurrentUserTracker mUserTracker;
private final Handler mHandler;
private final BrightnessObserver mBrightnessObserver;
private ArrayList<BrightnessStateChangeCallback> mChangeCallbacks =
new ArrayList<BrightnessStateChangeCallback>();
private boolean mAutomatic;
private boolean mListening;
private boolean mExternalChange;
public interface BrightnessStateChangeCallback {
public void onBrightnessLevelChanged();
}
/** ContentObserver to watch brightness **/
private class BrightnessObserver extends ContentObserver {
private final Uri BRIGHTNESS_MODE_URI =
Settings.System.getUriFor(Settings.System.SCREEN_BRIGHTNESS_MODE);
private final Uri BRIGHTNESS_URI =
Settings.System.getUriFor(Settings.System.SCREEN_BRIGHTNESS);
private final Uri BRIGHTNESS_ADJ_URI =
Settings.System.getUriFor(Settings.System.SCREEN_AUTO_BRIGHTNESS_ADJ);
public BrightnessObserver(Handler handler) {
super(handler);
}
@Override
public void onChange(boolean selfChange) {
onChange(selfChange, null);
}
@Override
public void onChange(boolean selfChange, Uri uri) {
if (selfChange) return;
try {
mExternalChange = true;
if (BRIGHTNESS_MODE_URI.equals(uri)) {
updateMode();
updateSlider();
} else if (BRIGHTNESS_URI.equals(uri) && !mAutomatic) {
updateSlider();
} else if (BRIGHTNESS_ADJ_URI.equals(uri) && mAutomatic) {
updateSlider();
} else {
updateMode();
updateSlider();
}
for (BrightnessStateChangeCallback cb : mChangeCallbacks) {
cb.onBrightnessLevelChanged();
}
} finally {
mExternalChange = false;
}
}
public void startObserving() {
final ContentResolver cr = mContext.getContentResolver();
cr.unregisterContentObserver(this);
cr.registerContentObserver(
BRIGHTNESS_MODE_URI,
false, this, UserHandle.USER_ALL);
cr.registerContentObserver(
BRIGHTNESS_URI,
false, this, UserHandle.USER_ALL);
cr.registerContentObserver(
BRIGHTNESS_ADJ_URI,
false, this, UserHandle.USER_ALL);
}
public void stopObserving() {
final ContentResolver cr = mContext.getContentResolver();
cr.unregisterContentObserver(this);
}
}
public BrightnessController(Context context, ImageView icon, ToggleSlider control) {
mContext = context;
mIcon = icon;
mControl = control;
mHandler = new Handler();
mUserTracker = new CurrentUserTracker(mContext) {
@Override
public void onUserSwitched(int newUserId) {
updateMode();
updateSlider();
}
};
mBrightnessObserver = new BrightnessObserver(mHandler);
PowerManager pm = (PowerManager)context.getSystemService(Context.POWER_SERVICE);
mMinimumBacklight = pm.getMinimumScreenBrightnessSetting();
mMaximumBacklight = pm.getMaximumScreenBrightnessSetting();
mAutomaticAvailable = context.getResources().getBoolean(
com.android.internal.R.bool.config_automatic_brightness_available);
mPower = IPowerManager.Stub.asInterface(ServiceManager.getService("power"));
}
public void addStateChangedCallback(BrightnessStateChangeCallback cb) {
mChangeCallbacks.add(cb);
}
public boolean removeStateChangedCallback(BrightnessStateChangeCallback cb) {
return mChangeCallbacks.remove(cb);
}
@Override
public void onInit(ToggleSlider control) {
// Do nothing
}
public void registerCallbacks() {
if (mListening) {
return;
}
mBrightnessObserver.startObserving();
mUserTracker.startTracking();
// Update the slider and mode before attaching the listener so we don't
// receive the onChanged notifications for the initial values.
updateMode();
updateSlider();
mControl.setOnChangedListener(this);
mListening = true;
}
/** Unregister all call backs, both to and from the controller */
public void unregisterCallbacks() {
if (!mListening) {
return;
}
mBrightnessObserver.stopObserving();
mUserTracker.stopTracking();
mControl.setOnChangedListener(null);
mListening = false;
}
@Override
public void onChanged(ToggleSlider view, boolean tracking, boolean automatic, int value,
boolean stopTracking) {
updateIcon(mAutomatic);
if (mExternalChange) return;
if (!mAutomatic) {
final int val = value + mMinimumBacklight;
if (stopTracking) {
MetricsLogger.action(mContext, MetricsEvent.ACTION_BRIGHTNESS, val);
}
setBrightness(val);
if (!tracking) {
AsyncTask.execute(new Runnable() {
public void run() {
Settings.System.putIntForUser(mContext.getContentResolver(),
Settings.System.SCREEN_BRIGHTNESS, val,
UserHandle.USER_CURRENT);
}
});
}
} else {
final float adj = value / (BRIGHTNESS_ADJ_RESOLUTION / 2f) - 1;
if (stopTracking) {
MetricsLogger.action(mContext, MetricsEvent.ACTION_BRIGHTNESS_AUTO, value);
}
setBrightnessAdj(adj);
if (!tracking) {
AsyncTask.execute(new Runnable() {
public void run() {
Settings.System.putFloatForUser(mContext.getContentResolver(),
Settings.System.SCREEN_AUTO_BRIGHTNESS_ADJ, adj,
UserHandle.USER_CURRENT);
}
});
}
}
for (BrightnessStateChangeCallback cb : mChangeCallbacks) {
cb.onBrightnessLevelChanged();
}
}
private void setMode(int mode) {
Settings.System.putIntForUser(mContext.getContentResolver(),
Settings.System.SCREEN_BRIGHTNESS_MODE, mode,
mUserTracker.getCurrentUserId());
}
private void setBrightness(int brightness) {
try {
mPower.setTemporaryScreenBrightnessSettingOverride(brightness);
} catch (RemoteException ex) {
}
}
private void setBrightnessAdj(float adj) {
try {
mPower.setTemporaryScreenAutoBrightnessAdjustmentSettingOverride(adj);
} catch (RemoteException ex) {
}
}
private void updateIcon(boolean automatic) {
if (mIcon != null) {
mIcon.setImageResource(automatic && SHOW_AUTOMATIC_ICON ?
com.android.systemui.R.drawable.ic_qs_brightness_auto_on :
com.android.systemui.R.drawable.ic_qs_brightness_auto_off);
}
}
/** Fetch the brightness mode from the system settings and update the icon */
private void updateMode() {
if (mAutomaticAvailable) {
int automatic;
automatic = Settings.System.getIntForUser(mContext.getContentResolver(),
Settings.System.SCREEN_BRIGHTNESS_MODE,
Settings.System.SCREEN_BRIGHTNESS_MODE_MANUAL,
UserHandle.USER_CURRENT);
mAutomatic = automatic != Settings.System.SCREEN_BRIGHTNESS_MODE_MANUAL;
updateIcon(mAutomatic);
} else {
mControl.setChecked(false);
updateIcon(false /*automatic*/);
}
}
/** Fetch the brightness from the system settings and update the slider */
private void updateSlider() {
if (mAutomatic) {
float value = Settings.System.getFloatForUser(mContext.getContentResolver(),
Settings.System.SCREEN_AUTO_BRIGHTNESS_ADJ, 0,
UserHandle.USER_CURRENT);
mControl.setMax((int) BRIGHTNESS_ADJ_RESOLUTION);
mControl.setValue((int) ((value + 1) * BRIGHTNESS_ADJ_RESOLUTION / 2f));
} else {
int value;
value = Settings.System.getIntForUser(mContext.getContentResolver(),
Settings.System.SCREEN_BRIGHTNESS, mMaximumBacklight,
UserHandle.USER_CURRENT);
mControl.setMax(mMaximumBacklight - mMinimumBacklight);
mControl.setValue(value - mMinimumBacklight);
}
}
}
|
apache-2.0
|
lambdastackio/aws-sdk-rust
|
docs/aws_sdk_rust/aws/s3/object/struct.HeadObjectOutputWriter.html
|
6025
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="generator" content="rustdoc">
<meta name="description" content="API documentation for the Rust `HeadObjectOutputWriter` struct in crate `aws_sdk_rust`.">
<meta name="keywords" content="rust, rustlang, rust-lang, HeadObjectOutputWriter">
<title>aws_sdk_rust::aws::s3::object::HeadObjectOutputWriter - Rust</title>
<link rel="stylesheet" type="text/css" href="../../../../normalize.css">
<link rel="stylesheet" type="text/css" href="../../../../rustdoc.css">
<link rel="stylesheet" type="text/css" href="../../../../main.css">
<link rel="shortcut icon" href="https://lambdastackio.github.io/static/images/favicon.ico">
</head>
<body class="rustdoc struct">
<!--[if lte IE 8]>
<div class="warning">
This old browser is unsupported and will most likely display funky
things.
</div>
<![endif]-->
<nav class="sidebar">
<a href='../../../../aws_sdk_rust/index.html'><img src='https://lambdastackio.github.io/static/images/lambdastack-200x200.png' alt='logo' width='100'></a>
<p class='location'>Struct HeadObjectOutputWriter</p><div class="block items"><ul><li><a href="#methods">Methods</a></li></ul></div><p class='location'><a href='../../../index.html'>aws_sdk_rust</a>::<wbr><a href='../../index.html'>aws</a>::<wbr><a href='../index.html'>s3</a>::<wbr><a href='index.html'>object</a></p><script>window.sidebarCurrent = {name: 'HeadObjectOutputWriter', ty: 'struct', relpath: ''};</script><script defer src="sidebar-items.js"></script>
</nav>
<nav class="sub">
<form class="search-form js-only">
<div class="search-container">
<input class="search-input" name="search"
autocomplete="off"
placeholder="Click or press ‘S’ to search, ‘?’ for more options…"
type="search">
</div>
</form>
</nav>
<section id='main' class="content">
<h1 class='fqn'><span class='in-band'>Struct <a href='../../../index.html'>aws_sdk_rust</a>::<wbr><a href='../../index.html'>aws</a>::<wbr><a href='../index.html'>s3</a>::<wbr><a href='index.html'>object</a>::<wbr><a class="struct" href=''>HeadObjectOutputWriter</a></span><span class='out-of-band'><span id='render-detail'>
<a id="toggle-all-docs" href="javascript:void(0)" title="collapse all docs">
[<span class='inner'>−</span>]
</a>
</span><a class='srclink' href='../../../../src/aws_sdk_rust/aws/s3/object.rs.html#348' title='goto source code'>[src]</a></span></h1>
<pre class='rust struct'>pub struct HeadObjectOutputWriter;</pre><div class='docblock'><p>Write <code>HeadObjectOutput</code> contents to a <code>SignedRequest</code></p>
</div><h2 id='methods'>Methods</h2><h3 class='impl'><span class='in-band'><code>impl <a class="struct" href="../../../../aws_sdk_rust/aws/s3/object/struct.HeadObjectOutputWriter.html" title="struct aws_sdk_rust::aws::s3::object::HeadObjectOutputWriter">HeadObjectOutputWriter</a></code></span><span class='out-of-band'><div class='ghost'></div><a class='srclink' href='../../../../src/aws_sdk_rust/aws/s3/object.rs.html#3537-3566' title='goto source code'>[src]</a></span></h3>
<div class='impl-items'><h4 id='method.write_params' class="method"><span id='write_params.v' class='invisible'><code>fn <a href='#method.write_params' class='fnname'>write_params</a>(params: &mut <a class="type" href="../../../../aws_sdk_rust/aws/common/params/type.Params.html" title="type aws_sdk_rust::aws::common::params::Params">Params</a>, name: &<a class="primitive" href="https://doc.rust-lang.org/nightly/std/primitive.str.html">str</a>, obj: &<a class="struct" href="../../../../aws_sdk_rust/aws/s3/object/struct.HeadObjectOutput.html" title="struct aws_sdk_rust::aws::s3::object::HeadObjectOutput">HeadObjectOutput</a>)</code></span></h4>
</div></section>
<section id='search' class="content hidden"></section>
<section class="footer"></section>
<aside id="help" class="hidden">
<div>
<h1 class="hidden">Help</h1>
<div class="shortcuts">
<h2>Keyboard Shortcuts</h2>
<dl>
<dt>?</dt>
<dd>Show this help dialog</dd>
<dt>S</dt>
<dd>Focus the search field</dd>
<dt>⇤</dt>
<dd>Move up in search results</dd>
<dt>⇥</dt>
<dd>Move down in search results</dd>
<dt>⏎</dt>
<dd>Go to active search result</dd>
<dt>+</dt>
<dd>Collapse/expand all sections</dd>
</dl>
</div>
<div class="infos">
<h2>Search Tricks</h2>
<p>
Prefix searches with a type followed by a colon (e.g.
<code>fn:</code>) to restrict the search to a given type.
</p>
<p>
Accepted types are: <code>fn</code>, <code>mod</code>,
<code>struct</code>, <code>enum</code>,
<code>trait</code>, <code>type</code>, <code>macro</code>,
and <code>const</code>.
</p>
<p>
Search functions by type signature (e.g.
<code>vec -> usize</code> or <code>* -> vec</code>)
</p>
</div>
</div>
</aside>
<script>
window.rootPath = "../../../../";
window.currentCrate = "aws_sdk_rust";
</script>
<script src="../../../../main.js"></script>
<script defer src="../../../../search-index.js"></script>
</body>
</html>
|
apache-2.0
|
Nemo157/hamlet
|
tests/macros.rs
|
1035
|
//! Having these as an integration test checks that the visibility of the macros
//! and their dependencies is valid for use by external libraries.
#[macro_use]
// Rename the import to check that the macros don't use the literal crate name
extern crate hamlet as willy;
// I would really like to do:
//
// ```
// #![no_std]
// #![no_implicit_prelude]
// #[macro_use]
// extern crate std as bob;
// ```
//
// but macros are too unhygienic :(
// We don't `use` anything here to check the macros only use fully qualified
// paths.
#[test]
fn empty_attrs() {
assert_eq!(&*attrs!().into_vec(), &[]);
}
#[test]
fn single_attr() {
assert_eq!(&*attrs!(id = "foo").into_vec(),
&[
willy::attr::Attribute::new("id", "foo"),
]);
}
#[test]
fn multi_attr() {
assert_eq!(&*attrs!(id = "foo", class = "bar").into_vec(),
&[
willy::attr::Attribute::new("id", "foo"),
willy::attr::Attribute::new("class", "bar"),
]);
}
|
apache-2.0
|
shorton3/dashingplatforms
|
src/platform/msgmgr/MessageBlockWrapper.h
|
3263
|
/******************************************************************************
*
* File name: MessageBlockWrapper.h
* Subsystem: Platform Services
* Description: This class creates a wrapper around the ACE Message Block
* class so that we can pool ACE Message Blocks inside the OPM.
*
* Name Date Release
* -------------------- ---------- ---------------------------------------------
* Stephen Horton 01/01/2014 Initial release
*
*
******************************************************************************/
#ifndef _PLAT_MESSAGE_BLOCK_WRAPPER_H_
#define _PLAT_MESSAGE_BLOCK_WRAPPER_H_
//-----------------------------------------------------------------------------
// System include files, includes 3rd party libraries.
//-----------------------------------------------------------------------------
#include <ace/Message_Block.h>
//-----------------------------------------------------------------------------
// Component includes, includes elements of our system.
//-----------------------------------------------------------------------------
#include "platform/opm/OPMBase.h"
//-----------------------------------------------------------------------------
// Forward Declarations.
//-----------------------------------------------------------------------------
// For C++ class declarations, we have one (and only one) of these access
// blocks per class in this order: public, protected, and then private.
//
// Inside each block, we declare class members in this order:
// 1) nested classes (if applicable)
// 2) static methods
// 3) static data
// 4) instance methods (constructors/destructors first)
// 5) instance data
//
/**
* MessageBlockWrapper is a wrapper class around the ACE_Message_Block class.
* <p>
* This simple wrapper exists so that we can create a pool of
* ACE_Message_Blocks inside the OPM.
* <p>
* $Author: Stephen Horton$
* $Revision: 1$
*/
class MessageBlockWrapper : public OPMBase, ACE_Message_Block
{
public:
/** Constructor */
MessageBlockWrapper(int blockSize = 0);
/** Virtual Destructor */
virtual ~MessageBlockWrapper();
/** OPMBase static initializer method for bootstrapping the objects */
static OPMBase* initialize(int initializer);
/** OPMBase clean method gets called when the object gets released back
into its pool */
void clean();
/** Overridden ACE_Message_Block Method */
void base(char *data, size_t size);
/** Overridden ACE_Message_Block Method */
char *base (void) const;
/** Overridden ACE_Message_Block Method */
void msg_priority (unsigned long priority);
/**
* String'ized debugging method
* @return string representation of the contents of this object
*/
string toString();
protected:
private:
/**
* Copy Constructor declared private so that default automatic
* methods aren't used.
*/
MessageBlockWrapper(const MessageBlockWrapper& rhs);
/**
* Assignment operator declared private so that default automatic
* methods aren't used.
*/
MessageBlockWrapper& operator= (const MessageBlockWrapper& rhs);
};
#endif
|
apache-2.0
|
jaowl/practice-java
|
calc/slf4j-1.7.23/site/apidocs/org/slf4j/helpers/package-use.html
|
8473
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_17) on Tue Feb 14 23:32:46 CET 2017 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Package org.slf4j.helpers (SLF4J 1.7.23 API)</title>
<meta name="date" content="2017-02-14">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Package org.slf4j.helpers (SLF4J 1.7.23 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/slf4j/helpers/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="Uses of Package org.slf4j.helpers" class="title">Uses of Package<br>org.slf4j.helpers</h1>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../org/slf4j/helpers/package-summary.html">org.slf4j.helpers</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.slf4j.event">org.slf4j.event</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#org.slf4j.helpers">org.slf4j.helpers</a></td>
<td class="colLast">
<div class="block">Helper classes.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="#org.slf4j.impl">org.slf4j.impl</a></td>
<td class="colLast">
<div class="block">Implementations of core logging interfaces defined in the <a href="../../../org/slf4j/package-summary.html"><code>org.slf4j</code></a> package.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.slf4j.event">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../org/slf4j/helpers/package-summary.html">org.slf4j.helpers</a> used by <a href="../../../org/slf4j/event/package-summary.html">org.slf4j.event</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../org/slf4j/helpers/class-use/SubstituteLogger.html#org.slf4j.event">SubstituteLogger</a>
<div class="block">A logger implementation which logs via a delegate logger.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.slf4j.helpers">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../org/slf4j/helpers/package-summary.html">org.slf4j.helpers</a> used by <a href="../../../org/slf4j/helpers/package-summary.html">org.slf4j.helpers</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../org/slf4j/helpers/class-use/FormattingTuple.html#org.slf4j.helpers">FormattingTuple</a>
<div class="block">Holds the results of formatting done by <a href="../../../org/slf4j/helpers/MessageFormatter.html" title="class in org.slf4j.helpers"><code>MessageFormatter</code></a>.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><a href="../../../org/slf4j/helpers/class-use/MarkerIgnoringBase.html#org.slf4j.helpers">MarkerIgnoringBase</a>
<div class="block">This class serves as base for adapters or native implementations of logging systems
lacking Marker support.</div>
</td>
</tr>
<tr class="altColor">
<td class="colOne"><a href="../../../org/slf4j/helpers/class-use/NOPLogger.html#org.slf4j.helpers">NOPLogger</a>
<div class="block">A direct NOP (no operation) implementation of <a href="../../../org/slf4j/Logger.html" title="interface in org.slf4j"><code>Logger</code></a>.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><a href="../../../org/slf4j/helpers/class-use/SubstituteLogger.html#org.slf4j.helpers">SubstituteLogger</a>
<div class="block">A logger implementation which logs via a delegate logger.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.slf4j.impl">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../org/slf4j/helpers/package-summary.html">org.slf4j.helpers</a> used by <a href="../../../org/slf4j/impl/package-summary.html">org.slf4j.impl</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../org/slf4j/helpers/class-use/MarkerIgnoringBase.html#org.slf4j.impl">MarkerIgnoringBase</a>
<div class="block">This class serves as base for adapters or native implementations of logging systems
lacking Marker support.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/slf4j/helpers/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2005–2017 <a href="http://www.qos.ch">QOS.ch</a>. All rights reserved.</small></p>
</body>
</html>
|
apache-2.0
|
xamarin/Rivets
|
Rivets/Utility.cs
|
8263
|
//
// System.Web.HttpUtility
//
// Authors:
// Patrik Torstensson ([email protected])
// Wictor Wilén (decode/encode functions) ([email protected])
// Tim Coleman ([email protected])
// Gonzalo Paniagua Javier ([email protected])
//
// Copyright (C) 2005-2010 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System;
namespace Rivets
{
public sealed class Utility
{
#region Constructors
public Utility ()
{
}
#endregion // Constructors
#region Methods
public static Dictionary<string, string> ParseQueryString(string query)
{
var queryDict = new Dictionary<string, string>();
if (string.IsNullOrEmpty(query))
return queryDict;
foreach (string token in query.TrimStart(new char[] { '?' }).Split(new char[] { '&' }, StringSplitOptions.RemoveEmptyEntries))
{
string[] parts = token.Split(new char[] { '=' }, StringSplitOptions.RemoveEmptyEntries);
if (parts.Length == 2)
queryDict[parts[0].Trim()] = Utility.UrlDecode(parts[1]).Trim();
else
queryDict[parts[0].Trim()] = "";
}
return queryDict;
}
public static string UrlDecode (string str)
{
return UrlDecode(str, Encoding.UTF8);
}
static void WriteCharBytes (IList buf, char ch, Encoding e)
{
if (ch > 255) {
foreach (byte b in e.GetBytes (new char[] { ch }))
buf.Add (b);
} else
buf.Add ((byte)ch);
}
public static string UrlDecode (string s, Encoding e)
{
if (null == s)
return null;
if (s.IndexOf ('%') == -1 && s.IndexOf ('+') == -1)
return s;
if (e == null)
e = Encoding.UTF8;
long len = s.Length;
var bytes = new List <byte> ();
int xchar;
char ch;
for (int i = 0; i < len; i++) {
ch = s [i];
if (ch == '%' && i + 2 < len && s [i + 1] != '%') {
if (s [i + 1] == 'u' && i + 5 < len) {
// unicode hex sequence
xchar = GetChar (s, i + 2, 4);
if (xchar != -1) {
WriteCharBytes (bytes, (char)xchar, e);
i += 5;
} else
WriteCharBytes (bytes, '%', e);
} else if ((xchar = GetChar (s, i + 1, 2)) != -1) {
WriteCharBytes (bytes, (char)xchar, e);
i += 2;
} else {
WriteCharBytes (bytes, '%', e);
}
continue;
}
if (ch == '+')
WriteCharBytes (bytes, ' ', e);
else
WriteCharBytes (bytes, ch, e);
}
byte[] buf = bytes.ToArray ();
bytes = null;
return e.GetString (buf, 0, buf.Length);
}
static int GetInt (byte b)
{
char c = (char) b;
if (c >= '0' && c <= '9')
return c - '0';
if (c >= 'a' && c <= 'f')
return c - 'a' + 10;
if (c >= 'A' && c <= 'F')
return c - 'A' + 10;
return -1;
}
static int GetChar (byte [] bytes, int offset, int length)
{
int value = 0;
int end = length + offset;
for (int i = offset; i < end; i++) {
int current = GetInt (bytes [i]);
if (current == -1)
return -1;
value = (value << 4) + current;
}
return value;
}
static int GetChar (string str, int offset, int length)
{
int val = 0;
int end = length + offset;
for (int i = offset; i < end; i++) {
char c = str [i];
if (c > 127)
return -1;
int current = GetInt ((byte) c);
if (current == -1)
return -1;
val = (val << 4) + current;
}
return val;
}
internal static bool NotEncoded (char c)
{
return (c == '!' || c == '(' || c == ')' || c == '*' || c == '-' || c == '.' || c == '_'
#if !NET_4_0
|| c == '\''
#endif
);
}
public static string UrlEncode(string str)
{
return UrlEncode(str, Encoding.UTF8);
}
public static string UrlEncode (string s, Encoding Enc)
{
if (s == null)
return null;
if (s == String.Empty)
return String.Empty;
bool needEncode = false;
int len = s.Length;
for (int i = 0; i < len; i++) {
char c = s [i];
if ((c < '0') || (c < 'A' && c > '9') || (c > 'Z' && c < 'a') || (c > 'z')) {
if (NotEncoded (c))
continue;
needEncode = true;
break;
}
}
if (!needEncode)
return s;
// avoided GetByteCount call
byte [] bytes = new byte[Enc.GetMaxByteCount(s.Length)];
int realLen = Enc.GetBytes (s, 0, s.Length, bytes, 0);
#if PORTABLE || WINDOWS_PHONE || WINRT || UWP
var strData = UrlEncodeToBytes (bytes, 0, realLen);
return Encoding.UTF8.GetString (strData, 0, strData.Length);
#else
return Encoding.ASCII.GetString (UrlEncodeToBytes (bytes, 0, realLen));
#endif
}
public static byte [] UrlEncodeToBytes (string str)
{
return UrlEncodeToBytes (str, Encoding.UTF8);
}
public static byte [] UrlEncodeToBytes (string str, Encoding e)
{
if (str == null)
return null;
if (str.Length == 0)
return new byte [0];
byte [] bytes = e.GetBytes (str);
return UrlEncodeToBytes (bytes, 0, bytes.Length);
}
public static byte [] UrlEncodeToBytes (byte [] bytes)
{
if (bytes == null)
return null;
if (bytes.Length == 0)
return new byte [0];
return UrlEncodeToBytes (bytes, 0, bytes.Length);
}
internal static byte[] UrlEncodeToBytes (byte[] bytes, int offset, int count)
{
if (bytes == null)
throw new ArgumentNullException ("bytes");
int blen = bytes.Length;
if (blen == 0)
return new byte [0];
if (offset < 0 || offset >= blen)
throw new ArgumentOutOfRangeException("offset");
if (count < 0 || count > blen - offset)
throw new ArgumentOutOfRangeException("count");
MemoryStream result = new MemoryStream (count);
int end = offset + count;
for (int i = offset; i < end; i++)
UrlEncodeChar ((char)bytes [i], result, false);
return result.ToArray();
}
static char [] hexChars = "0123456789abcdef".ToCharArray ();
internal static void UrlEncodeChar (char c, Stream result, bool isUnicode) {
if (c > 255) {
//FIXME: what happens when there is an internal error?
//if (!isUnicode)
// throw new ArgumentOutOfRangeException ("c", c, "c must be less than 256");
int idx;
int i = (int) c;
result.WriteByte ((byte)'%');
result.WriteByte ((byte)'u');
idx = i >> 12;
result.WriteByte ((byte)hexChars [idx]);
idx = (i >> 8) & 0x0F;
result.WriteByte ((byte)hexChars [idx]);
idx = (i >> 4) & 0x0F;
result.WriteByte ((byte)hexChars [idx]);
idx = i & 0x0F;
result.WriteByte ((byte)hexChars [idx]);
return;
}
if (c > ' ' && NotEncoded (c)) {
result.WriteByte ((byte)c);
return;
}
if (c==' ') {
result.WriteByte ((byte)'+');
return;
}
if ( (c < '0') ||
(c < 'A' && c > '9') ||
(c > 'Z' && c < 'a') ||
(c > 'z')) {
if (isUnicode && c > 127) {
result.WriteByte ((byte)'%');
result.WriteByte ((byte)'u');
result.WriteByte ((byte)'0');
result.WriteByte ((byte)'0');
}
else
result.WriteByte ((byte)'%');
int idx = ((int) c) >> 4;
result.WriteByte ((byte)hexChars [idx]);
idx = ((int) c) & 0x0F;
result.WriteByte ((byte)hexChars [idx]);
}
else
result.WriteByte ((byte)c);
}
#endregion // Methods
}
}
|
apache-2.0
|
bvn13/LicenseServerJClient
|
src/main/java/ru/bvn13/licenseserverjclient/soap/CheckClientLicense.java
|
5102
|
package ru.bvn13.licenseserverjclient.soap;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for checkClientLicense complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="checkClientLicense">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="request" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="clientId" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/>
* <element name="systemId" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/>
* <element name="properties" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "checkClientLicense", propOrder = {
"request"
})
public class CheckClientLicense {
@XmlElement(namespace = "")
protected CheckClientLicense.Request request;
/**
* Gets the value of the request property.
*
* @return
* possible object is
* {@link CheckClientLicense.Request }
*
*/
public CheckClientLicense.Request getRequest() {
return request;
}
/**
* Sets the value of the request property.
*
* @param value
* allowed object is
* {@link CheckClientLicense.Request }
*
*/
public void setRequest(CheckClientLicense.Request value) {
this.request = value;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="clientId" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/>
* <element name="systemId" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/>
* <element name="properties" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"clientId",
"systemId",
"properties"
})
public static class Request {
@XmlElement(required = true)
protected String clientId;
@XmlElement(required = true)
protected String systemId;
@XmlElement(required = true)
protected String properties;
/**
* Gets the value of the clientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getClientId() {
return clientId;
}
/**
* Sets the value of the clientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setClientId(String value) {
this.clientId = value;
}
/**
* Gets the value of the systemId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSystemId() {
return systemId;
}
/**
* Sets the value of the systemId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSystemId(String value) {
this.systemId = value;
}
/**
* Gets the value of the properties property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getProperties() {
return properties;
}
/**
* Sets the value of the properties property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setProperties(String value) {
this.properties = value;
}
}
}
|
apache-2.0
|
oehme/analysing-gradle-performance
|
my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p111/Production2220.java
|
1891
|
package org.gradle.test.performance.mediummonolithicjavaproject.p111;
public class Production2220 {
private String property0;
public String getProperty0() {
return property0;
}
public void setProperty0(String value) {
property0 = value;
}
private String property1;
public String getProperty1() {
return property1;
}
public void setProperty1(String value) {
property1 = value;
}
private String property2;
public String getProperty2() {
return property2;
}
public void setProperty2(String value) {
property2 = value;
}
private String property3;
public String getProperty3() {
return property3;
}
public void setProperty3(String value) {
property3 = value;
}
private String property4;
public String getProperty4() {
return property4;
}
public void setProperty4(String value) {
property4 = value;
}
private String property5;
public String getProperty5() {
return property5;
}
public void setProperty5(String value) {
property5 = value;
}
private String property6;
public String getProperty6() {
return property6;
}
public void setProperty6(String value) {
property6 = value;
}
private String property7;
public String getProperty7() {
return property7;
}
public void setProperty7(String value) {
property7 = value;
}
private String property8;
public String getProperty8() {
return property8;
}
public void setProperty8(String value) {
property8 = value;
}
private String property9;
public String getProperty9() {
return property9;
}
public void setProperty9(String value) {
property9 = value;
}
}
|
apache-2.0
|
manashmndl/dfvfs
|
tests/path/zip_path_spec.py
|
1347
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the zip path specification implementation."""
import unittest
from dfvfs.path import zip_path_spec
from tests.path import test_lib
class ZipPathSpecTest(test_lib.PathSpecTestCase):
"""Tests for the zip path specification implementation."""
def testInitialize(self):
"""Tests the path specification initialization."""
path_spec = zip_path_spec.ZipPathSpec(
location=u'/test', parent=self._path_spec)
self.assertNotEqual(path_spec, None)
with self.assertRaises(ValueError):
_ = zip_path_spec.ZipPathSpec(location=u'/test', parent=None)
with self.assertRaises(ValueError):
_ = zip_path_spec.ZipPathSpec(location=None, parent=self._path_spec)
with self.assertRaises(ValueError):
_ = zip_path_spec.ZipPathSpec(
location=u'/test', parent=self._path_spec, bogus=u'BOGUS')
def testComparable(self):
"""Tests the path specification comparable property."""
path_spec = zip_path_spec.ZipPathSpec(
location=u'/test', parent=self._path_spec)
self.assertNotEqual(path_spec, None)
expected_comparable = u'\n'.join([
u'type: TEST',
u'type: ZIP, location: /test',
u''])
self.assertEqual(path_spec.comparable, expected_comparable)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
samepage-labs/ruby-management-client
|
CHANGELOG.md
|
354
|
### Development
[Full Changelog](http://github.com/samepage/ruby-management-client/compare/v1.0.1...master)
* Fix build status url in README file
* Fix email in copyright file
### 1.0.1 / 2015-09-17
[Full Changelog](http://github.com/samepage/ruby-management-client/compare/v1.0.0...v1.0.1)
Housekeeping Fixes:
* Fix deployment to Gemfury repository
|
apache-2.0
|
dcarbone/php-fhir-generated
|
src/DCarbone/PHPFHIRGenerated/R4/FHIRDecimalPrimitive.php
|
9630
|
<?php
namespace DCarbone\PHPFHIRGenerated\R4;
/*!
* This class was generated with the PHPFHIR library (https://github.com/dcarbone/php-fhir) using
* class definitions from HL7 FHIR (https://www.hl7.org/fhir/)
*
* Class creation date: December 26th, 2019 15:44+0000
*
* PHPFHIR Copyright:
*
* Copyright 2016-2019 Daniel Carbone ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* FHIR Copyright Notice:
*
* Copyright (c) 2011+, HL7, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of HL7 nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
* Generated on Fri, Nov 1, 2019 09:29+1100 for FHIR v4.0.1
*
* Note: the schemas & schematrons do not contain all of the rules about what makes resources
* valid. Implementers will still need to be familiar with the content of the specification and with
* any profiles that apply to the resources in order to make a conformant implementation.
*
*/
/**
* Class FHIRDecimalPrimitive
* @package \DCarbone\PHPFHIRGenerated\R4
*/
class FHIRDecimalPrimitive implements PHPFHIRTypeInterface
{
use PHPFHIRValidationAssertionsTrait;
// name of FHIR type this class describes
const FHIR_TYPE_NAME = PHPFHIRConstants::TYPE_NAME_DECIMAL_HYPHEN_PRIMITIVE;
const FIELD_VALUE = 'value';
/** @var string */
private $_xmlns = 'http://hl7.org/fhir';
/**
* @var null|double
*/
protected $value = null;
/**
* Validation map for fields in type decimal-primitive
* @var array
*/
private static $_validationRules = [ ];
/**
* FHIRDecimalPrimitive Constructor
* @param null|double $value
*/
public function __construct($value = null)
{
$this->setValue($value);
}
/**
* @return string
*/
public function _getFHIRTypeName()
{
return self::FHIR_TYPE_NAME;
}
/**
* @return string|null
*/
public function _getFHIRXMLNamespace()
{
return '' === $this->_xmlns ? null : $this->_xmlns;
}
/**
* @param null|string $xmlNamespace
* @return static
*/
public function _setFHIRXMLNamespace($xmlNamespace)
{
if (null === $xmlNamespace || is_string($xmlNamespace)) {
$this->_xmlns = (string)$xmlNamespace;
return $this;
}
throw new \InvalidArgumentException(sprintf(
'$xmlNamespace must be a null or string value, %s seen.',
gettype($xmlNamespace)
));
}
/**
* @return string
*/
public function _getFHIRXMLElementDefinition()
{
$xmlns = $this->_getFHIRXMLNamespace();
if (null !== $xmlns) {
$xmlns = " xmlns=\"{$xmlns}\"";
}
return "<decimal_primitive{$xmlns}></decimal_primitive>";
}
/**
* @return null|double
*/
public function getValue()
{
return $this->value;
}
/**
* @param null|float|string $value
* @return static
*/
public function setValue($value)
{
if (null === $value) {
$this->value = null;
} elseif (is_scalar($value)) {
$this->value = floatval($value);
} else {
throw new \InvalidArgumentException(sprintf('decimal-primitive value must be null, float, or numeric string, %s seen.', gettype($value)));
}
return $this;
}
/**
* Returns the validation rules that this type's fields must comply with to be considered "valid"
* The returned array is in ["fieldname[.offset]" => ["rule" => {constraint}]]
*
* @return array
*/
public function _getValidationRules()
{
return self::$_validationRules;
}
/**
* Validates that this type conforms to the specifications set forth for it by FHIR. An empty array must be seen as
* passing.
*
* @return array
*/
public function _getValidationErrors()
{
$errs = [];
$validationRules = $this->_getValidationRules();
if (isset($validationRules[self::FIELD_VALUE]) && null !== ($v = $this->getValue())) {
foreach($validationRules[self::FIELD_VALUE] as $rule => $constraint) {
$err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_DECIMAL_HYPHEN_PRIMITIVE, self::FIELD_VALUE, $rule, $constraint, $v);
if (null !== $err) {
if (!isset($errs[self::FIELD_VALUE])) {
$errs[self::FIELD_VALUE] = [];
}
$errs[self::FIELD_VALUE][$rule] = $err;
}
}
}
return $errs;
}
/**
* @param \SimpleXMLElement|string|null $sxe
* @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRDecimalPrimitive $type
* @param null|int $libxmlOpts
* @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRDecimalPrimitive
*/
public static function xmlUnserialize($sxe = null, PHPFHIRTypeInterface $type = null, $libxmlOpts = 591872)
{
if (null === $sxe) {
return null;
}
if (is_string($sxe)) {
libxml_use_internal_errors(true);
$sxe = new \SimpleXMLElement($sxe, $libxmlOpts, false);
if ($sxe === false) {
throw new \DomainException(sprintf('FHIRDecimalPrimitive::xmlUnserialize - String provided is not parseable as XML: %s', implode(', ', array_map(function(\libXMLError $err) { return $err->message; }, libxml_get_errors()))));
}
libxml_use_internal_errors(false);
}
if (!($sxe instanceof \SimpleXMLElement)) {
throw new \InvalidArgumentException(sprintf('FHIRDecimalPrimitive::xmlUnserialize - $sxe value must be null, \\SimpleXMLElement, or valid XML string, %s seen', gettype($sxe)));
}
if (null === $type) {
$type = new FHIRDecimalPrimitive;
} elseif (!is_object($type) || !($type instanceof FHIRDecimalPrimitive)) {
throw new \RuntimeException(sprintf(
'FHIRDecimalPrimitive::xmlUnserialize - $type must be instance of \DCarbone\PHPFHIRGenerated\R4\FHIRDecimalPrimitive or null, %s seen.',
is_object($type) ? get_class($type) : gettype($type)
));
}
$xmlNamespaces = $sxe->getDocNamespaces(false, false);
if ([] !== $xmlNamespaces) {
$ns = reset($xmlNamespaces);
if (false !== $ns && '' !== $ns) {
$type->_xmlns = $ns;
}
}
$attributes = $sxe->attributes();
$children = $sxe->children();
if (isset($attributes->value)) {
$type->setValue((string)$attributes->value);
} elseif (isset($children->value)) {
$type->setValue((string)$children->value);
} elseif ('' !== ($v = (string)$sxe)) {
$type->setValue($v);
}
return $type;
}
/**
* @param null|\SimpleXMLElement $sxe
* @param null|int $libxmlOpts
* @return \SimpleXMLElement
*/
public function xmlSerialize(\SimpleXMLElement $sxe = null, $libxmlOpts = 591872)
{
if (null === $sxe) {
$sxe = new \SimpleXMLElement($this->_getFHIRXMLElementDefinition(), $libxmlOpts, false);
}
$sxe->addAttribute(self::FIELD_VALUE, (string)$this);
return $sxe;
}
/**
* @return null|double
*/
public function jsonSerialize()
{
return $this->getValue();
}
/**
* @return string
*/
public function __toString()
{
return (string)$this->getValue();
}
}
|
apache-2.0
|
cgcardona/bitcoin-cookbook
|
cookbooks/nginx/files/default/index.html
|
122
|
<html>
<head>
<title>Hello World</title>
</head>
<body>
<h1>This is a test</h1>
<p>Please work!</p>
</body>
</html>
|
apache-2.0
|
Tataraovoleti/Struts2App
|
src/com/java/struts/fazalcode/LoginAction.java
|
873
|
/**
* Copyright 2013 @ Fazal Code
* All Rights Reserved to Fazal Code
*/
package com.java.struts.fazalcode;
import com.opensymphony.xwork2.ActionSupport;
/**
* @author Tatarao voleti
* @date Nov 11, 2013
*/
public class LoginAction extends ActionSupport {
private String username;
private String password;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
@Override
public String toString() {
return "LoginAction [username=" + username + ", password=" + password
+ "]";
}
public String execute(){
if (username.equalsIgnoreCase("user") && password.equalsIgnoreCase("pass")) {
return "success";
} else {
return "failure";
}
}
}
|
apache-2.0
|
Netflix/hollow
|
hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/FlatRecordExtractor.java
|
8517
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper.flatrecords;
import com.netflix.hollow.core.HollowConstants;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.read.engine.set.HollowSetTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSetSchema;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.copy.HollowRecordCopier;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
import java.util.HashMap;
import java.util.Map;
/**
* Warning: Experimental. the FlatRecord feature is subject to breaking changes.
*/
public class FlatRecordExtractor {
private final HollowReadStateEngine extractFrom;
private final FlatRecordWriter writer;
private final ExtractorOrdinalRemapper ordinalRemapper;
private final Map<String, HollowRecordCopier> recordCopiersByType;
public FlatRecordExtractor(HollowReadStateEngine extractFrom, HollowSchemaIdentifierMapper schemaIdMapper) {
this.extractFrom = extractFrom;
this.writer = new FlatRecordWriter(extractFrom, schemaIdMapper);
this.ordinalRemapper = new ExtractorOrdinalRemapper();
this.recordCopiersByType = new HashMap<>();
}
public FlatRecord extract(String type, int ordinal) {
ordinalRemapper.clear();
writer.reset();
HollowTypeReadState typeState = extractFrom.getTypeState(type);
extractHollowRecord(typeState, ordinal);
return writer.generateFlatRecord();
}
private void extractHollowRecord(HollowTypeReadState typeState, int ordinal) {
traverse(typeState, ordinal);
String type = typeState.getSchema().getName();
HollowRecordCopier recordCopier = recordCopier(type);
HollowWriteRecord rec = recordCopier.copy(ordinal);
int flatOrdinal = writer.write(typeState.getSchema(), rec);
ordinalRemapper.remapOrdinal(type, ordinal, flatOrdinal);
}
private void traverse(HollowTypeReadState typeState, int ordinal) {
switch(typeState.getSchema().getSchemaType()) {
case OBJECT:
traverseObject((HollowObjectTypeReadState)typeState, ordinal);
break;
case LIST:
traverseList((HollowListTypeReadState)typeState, ordinal);
break;
case SET:
traverseSet((HollowSetTypeReadState)typeState, ordinal);
break;
case MAP:
traverseMap((HollowMapTypeReadState)typeState, ordinal);
break;
}
}
private void traverseObject(HollowObjectTypeReadState typeState, int ordinal) {
HollowObjectSchema schema = typeState.getSchema();
for(int i=0;i<schema.numFields();i++) {
if(schema.getFieldType(i) == FieldType.REFERENCE) {
HollowTypeReadState refTypeState = schema.getReferencedTypeState(i);
int refOrdinal = typeState.readOrdinal(ordinal, i);
extractHollowRecord(refTypeState, refOrdinal);
}
}
}
private void traverseList(HollowListTypeReadState typeState, int ordinal) {
HollowListSchema schema = typeState.getSchema();
int size = typeState.size(ordinal);
for(int i=0;i<size;i++) {
int refOrdinal = typeState.getElementOrdinal(ordinal, i);
if(refOrdinal != HollowConstants.ORDINAL_NONE)
extractHollowRecord(schema.getElementTypeState(), refOrdinal);
}
}
private void traverseSet(HollowSetTypeReadState typeState, int ordinal) {
HollowSetSchema schema = typeState.getSchema();
HollowOrdinalIterator iter = typeState.ordinalIterator(ordinal);
int refOrdinal = iter.next();
while(refOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
if(refOrdinal != HollowConstants.ORDINAL_NONE)
extractHollowRecord(schema.getElementTypeState(), refOrdinal);
refOrdinal = iter.next();
}
}
private void traverseMap(HollowMapTypeReadState typeState, int ordinal) {
HollowMapSchema schema = typeState.getSchema();
HollowMapEntryOrdinalIterator iter = typeState.ordinalIterator(ordinal);
while(iter.next()) {
if(iter.getKey() != HollowConstants.ORDINAL_NONE)
extractHollowRecord(schema.getKeyTypeState(), iter.getKey());
if(iter.getValue() != HollowConstants.ORDINAL_NONE)
extractHollowRecord(schema.getValueTypeState(), iter.getValue());
}
}
private HollowRecordCopier recordCopier(String type) {
HollowRecordCopier recordCopier = recordCopiersByType.get(type);
if(recordCopier == null) {
recordCopier = HollowRecordCopier.createCopier(extractFrom.getTypeState(type), ordinalRemapper, false);
recordCopiersByType.put(type, recordCopier);
}
return recordCopier;
}
private static class ExtractorOrdinalRemapper implements OrdinalRemapper {
private final Map<TypedOrdinal, Integer> mappedFlatOrdinals = new HashMap<>();
@Override
public int getMappedOrdinal(String type, int originalOrdinal) {
return mappedFlatOrdinals.get(new TypedOrdinal(type, originalOrdinal));
}
@Override
public void remapOrdinal(String type, int originalOrdinal, int mappedOrdinal) {
mappedFlatOrdinals.put(new TypedOrdinal(type, originalOrdinal), mappedOrdinal);
}
@Override
public boolean ordinalIsMapped(String type, int originalOrdinal) {
throw new UnsupportedOperationException();
}
public void clear() {
mappedFlatOrdinals.clear();
}
private static class TypedOrdinal {
private final String type;
private final int ordinal;
public TypedOrdinal(String type, int ordinal) {
this.type = type;
this.ordinal = ordinal;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ordinal;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TypedOrdinal other = (TypedOrdinal) obj;
if (ordinal != other.ordinal)
return false;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
}
}
|
apache-2.0
|
skoba/openehr-ruby
|
spec/lib/openehr/rm/composition/content/entry/action_spec.rb
|
2444
|
require File.dirname(__FILE__) + '/../../../../../../spec_helper'
#require File.dirname(__FILE__) + '/shared_examples_spec'
include OpenEHR::RM::Composition::Content::Entry
include OpenEHR::RM::DataTypes::Quantity::DateTime
include OpenEHR::RM::DataTypes::Text
include OpenEHR::RM::DataStructures::ItemStructure
describe Action do
let(:name) {DvText.new(:value => 'entry package')}
let(:language) { double('language',:code_string => 'ja')}
let(:encoding) { double('encoding', :code_string => 'UTF-8')}
let(:subject) { double('PartyProxy')}
# it_should_behave_like 'entry'
before(:each) do
time = DvDateTime.new(:value => '2009-11-18T20:17:18')
description = double(ItemStructure, :archetype_node_id => 'at0002')
current_state = double(DvCodedText, :value => 'planned')
ism_transition = double(IsmTransition, :current_state => current_state)
instruction_details = double(InstructionDetails, :activity_id => 'at0003')
@action= Action.new(:archetype_node_id => 'at0001',
:name => name,
:language => language,
:encoding => encoding,
:subject => subject,
:time => time,
:description => description,
:ism_transition => ism_transition,
:instruction_details => instruction_details)
end
it 'should be an instance of Action' do
expect(@action).to be_an_instance_of Action
end
it 'time should be assigned properly' do
expect(@action.time.value).to eq('2009-11-18T20:17:18')
end
it 'should raise ArgumentError with nil assigned to time' do
expect {
@action.time = nil
}.to raise_error ArgumentError
end
it 'description should assigned properly' do
expect(@action.description.archetype_node_id).to eq('at0002')
end
it 'should raise ArgumentError with nil description' do
expect {
@action.description = nil
}.to raise_error ArgumentError
end
it 'ism_transition should be assigned properly' do
expect(@action.ism_transition.current_state.value).to eq('planned')
end
it 'should raise ArgumentError with nil ism_transition' do
expect {
@action.ism_transition = nil
}.to raise_error ArgumentError
end
it 'instruction_details should be assigned properly' do
expect(@action.instruction_details.activity_id).to eq('at0003')
end
end
|
apache-2.0
|
maveriklko9719/Coffee-Spills
|
docs/en/3.0.0rc1/cordova/camera/camera.md
|
3023
|
---
license: Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---
Camera
======
> The `camera` object provides access to the device's default camera application.
**Important privacy note:** Collection and use of images from a device's camera raises important privacy issues. Your app's privacy policy should discuss how the app uses the camera and whether the images recorded are shared with any other parties. In addition, if the app's use of the camera is not apparent in the user interface, you should provide a just-in-time notice prior to your app accessing the camera (if the device operating system doesn't do so already). That notice should provide the same information noted above, as well as obtaining the user's permission (e.g., by presenting choices for "OK" and "No Thanks"). For more information, please see the Privacy Guide.
Methods
-------
- camera.getPicture
- camera.cleanup
Permissions
-----------
### Android
#### app/res/xml/config.xml
<plugin name="Camera" value="org.apache.cordova.CameraLauncher" />
#### app/AndroidManifest
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
### BlackBerry WebWorks
#### www/plugins.xml
<plugin name="Camera" value="org.apache.cordova.camera.Camera" />
#### www/config.xml
<feature id="blackberry.media.camera" />
<rim:permissions>
<rim:permit>use_camera</rim:permit>
</rim:permissions>
### iOS
#### config.xml
<plugin name="Camera" value="CDVCamera" />
### Windows Phone
#### Properties/WPAppManifest.xml
<Capabilities>
<Capability Name="ID_CAP_ISV_CAMERA" />
<Capability Name="ID_HW_FRONTCAMERA" />
</Capabilities>
Reference: [Application Manifest for Windows Phone](http://msdn.microsoft.com/en-us/library/ff769509%28v=vs.92%29.aspx)
### Tizen
#### config.xml
<feature name="http://tizen.org/api/application" required="true"/>
<feature name="http://tizen.org/api/application.launch" required="true"/>
Reference: [Application Manifest for Tizen Web Application](https://developer.tizen.org/help/topic/org.tizen.help.gs/Creating%20a%20Project.html?path=0_1_1_3#8814682_CreatingaProject-EditingconfigxmlFeatures)
|
apache-2.0
|
Algoryx/agxUnity
|
Editor/AGXUnityEditor/Tools/RigidBodyTool.cs
|
10413
|
using System.Linq;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
using UnityEditor.SceneManagement;
using AGXUnity;
using AGXUnity.Collide;
using GUI = AGXUnity.Utils.GUI;
namespace AGXUnityEditor.Tools
{
[CustomTool( typeof( RigidBody ) )]
public class RigidBodyTool : CustomTargetTool
{
private List<Constraint> m_constraints = new List<Constraint>();
public RigidBody RigidBody
{
get
{
return Targets[ 0 ] as RigidBody;
}
}
public bool FindTransformGivenPointTool
{
get { return GetChild<FindPointTool>() != null; }
set
{
if ( value && !FindTransformGivenPointTool ) {
RemoveAllChildren();
var pointTool = new FindPointTool();
pointTool.OnPointFound = data =>
{
Undo.RecordObject( RigidBody.transform, "Rigid body transform" );
RigidBody.transform.position = data.RaycastResult.Point;
RigidBody.transform.rotation = data.Rotation;
EditorUtility.SetDirty( RigidBody );
};
AddChild( pointTool );
}
else if ( !value )
RemoveChild( GetChild<FindPointTool>() );
}
}
public bool FindTransformGivenEdgeTool
{
get { return GetChild<EdgeDetectionTool>() != null; }
set
{
if ( value && !FindTransformGivenEdgeTool ) {
RemoveAllChildren();
var edgeTool = new EdgeDetectionTool();
edgeTool.OnEdgeFound = data =>
{
Undo.RecordObject( RigidBody.transform, "Rigid body transform" );
RigidBody.transform.position = data.Position;
RigidBody.transform.rotation = data.Rotation;
EditorUtility.SetDirty( RigidBody );
};
AddChild( edgeTool );
}
else if ( !value )
RemoveChild( GetChild<EdgeDetectionTool>() );
}
}
public bool ShapeCreateTool
{
get { return GetChild<ShapeCreateTool>() != null; }
set
{
if ( value && !ShapeCreateTool ) {
RemoveAllChildren();
var shapeCreateTool = new ShapeCreateTool( RigidBody.gameObject );
AddChild( shapeCreateTool );
}
else if ( !value )
RemoveChild( GetChild<ShapeCreateTool>() );
}
}
public bool ConstraintCreateTool
{
get { return GetChild<ConstraintCreateTool>() != null; }
set
{
if ( value && !ConstraintCreateTool ) {
RemoveAllChildren();
var constraintCreateTool = new ConstraintCreateTool( RigidBody.gameObject,
false,
newConstraint => m_constraints.Add( newConstraint ) );
AddChild( constraintCreateTool );
}
else if ( !value )
RemoveChild( GetChild<ConstraintCreateTool>() );
}
}
public bool DisableCollisionsTool
{
get { return GetChild<DisableCollisionsTool>() != null; }
set
{
if ( value && !DisableCollisionsTool ) {
RemoveAllChildren();
var disableCollisionsTool = new DisableCollisionsTool( RigidBody.gameObject );
AddChild( disableCollisionsTool );
}
else if ( !value )
RemoveChild( GetChild<DisableCollisionsTool>() );
}
}
public bool RigidBodyVisualCreateTool
{
get { return GetChild<RigidBodyVisualCreateTool>() != null; }
set
{
if ( value && !RigidBodyVisualCreateTool ) {
RemoveAllChildren();
var createRigidBodyVisualTool = new RigidBodyVisualCreateTool( RigidBody );
AddChild( createRigidBodyVisualTool );
}
else if ( !value )
RemoveChild( GetChild<RigidBodyVisualCreateTool>() );
}
}
public bool ToolsActive = true;
public RigidBodyTool( Object[] targets )
: base( targets )
{
#if UNITY_2019_1_OR_NEWER
var allConstraints = StageUtility.GetCurrentStageHandle().Contains( RigidBody.gameObject ) ?
StageUtility.GetCurrentStageHandle().FindComponentsOfType<Constraint>() :
Object.FindObjectsOfType<Constraint>();
#else
var allConstraints = Object.FindObjectsOfType<Constraint>();
#endif
foreach ( var constraint in allConstraints ) {
foreach ( var rb in GetTargets<RigidBody>() )
if ( constraint.AttachmentPair.Contains( rb ) )
m_constraints.Add( constraint );
}
}
public override void OnAdd()
{
foreach ( var rb in GetTargets<RigidBody>() )
rb.MassProperties.OnForcedMassInertiaUpdate();
}
public override void OnSceneViewGUI( SceneView sceneView )
{
int rbIndex = 0;
foreach ( var rb in GetTargets<RigidBody>() ) {
var cmPosition = rb.transform.position +
rb.transform.TransformDirection( rb.MassProperties.CenterOfMassOffset.Value );
var cmTransformToolVisible = !rb.MassProperties.CenterOfMassOffset.UseDefault;
if ( cmTransformToolVisible ) {
var newPosition = PositionTool( cmPosition, rb.transform.rotation, 0.6f, 1.0f );
if ( Vector3.SqrMagnitude( cmPosition - newPosition ) > 1.0E-6 ) {
Undo.RecordObject( rb.MassProperties, "Center of mass changed" );
cmPosition = newPosition;
rb.MassProperties.CenterOfMassOffset.UserValue = rb.transform.InverseTransformDirection( newPosition -
rb.transform.position );
EditorUtility.SetDirty( rb );
}
}
var rbId = "rb_vis_" + (rbIndex++).ToString();
var vp = GetOrCreateVisualPrimitive<Utils.VisualPrimitiveSphere>( rbId, "GUI/Text Shader" );
vp.Color = new Color( 0, 0, 1, 0.25f );
vp.Visible = true;
vp.Pickable = false;
vp.SetTransform( cmPosition,
rb.transform.rotation,
0.05f,
true,
0.0f,
0.25f );
//var shapes = rb.Shapes;
//if ( shapes.Length < 2 )
// continue;
//int shapeIndex = 0;
//foreach ( var shape in shapes ) {
// var shapeLine = GetOrCreateVisualPrimitive<Utils.VisualPrimitiveCylinder>( rbId + "_shape_" + (shapeIndex++).ToString(),
// "GUI/Text Shader" );
// shapeLine.Color = new Color( 0, 1, 0, 0.05f );
// shapeLine.Visible = true;
// shapeLine.Pickable = false;
// shapeLine.SetTransform( cmPosition, shape.transform.position, 0.015f );
//}
}
}
public override void OnPreTargetMembersGUI()
{
var skin = InspectorEditor.Skin;
bool toggleShapeCreate = false;
bool toggleConstraintCreate = false;
bool toggleDisableCollisions = false;
bool toggleRigidBodyVisualCreate = false;
if ( !IsMultiSelect && ToolsActive ) {
InspectorGUI.ToolButtons( InspectorGUI.ToolButtonData.Create( ToolIcon.CreateConstraint,
ConstraintCreateTool,
"Create new constraint to this rigid body.",
() => toggleConstraintCreate = true ),
InspectorGUI.ToolButtonData.Create( ToolIcon.DisableCollisions,
DisableCollisionsTool,
"Disable collisions against other objects.",
() => toggleDisableCollisions = true ),
InspectorGUI.ToolButtonData.Create( ToolIcon.CreateShapeGivenVisual,
ShapeCreateTool,
"Create shape from child visual object.",
() => toggleShapeCreate = true ),
InspectorGUI.ToolButtonData.Create( ToolIcon.CreateVisual,
RigidBodyVisualCreateTool,
"Create visual representation of each physical shape in this body.",
() => toggleRigidBodyVisualCreate = true,
Tools.RigidBodyVisualCreateTool.ValidForNewShapeVisuals( RigidBody ) ) );
}
if ( ConstraintCreateTool ) {
GetChild<ConstraintCreateTool>().OnInspectorGUI();
}
if ( DisableCollisionsTool ) {
GetChild<DisableCollisionsTool>().OnInspectorGUI();
}
if ( ShapeCreateTool ) {
GetChild<ShapeCreateTool>().OnInspectorGUI();
}
if ( RigidBodyVisualCreateTool ) {
GetChild<RigidBodyVisualCreateTool>().OnInspectorGUI();
}
EditorGUILayout.LabelField( GUI.MakeLabel( "Mass properties", true ), skin.Label );
using ( InspectorGUI.IndentScope.Single )
InspectorEditor.DrawMembersGUI( GetTargets<RigidBody>().Select( rb => rb.MassProperties ).ToArray() );
if ( toggleConstraintCreate )
ConstraintCreateTool = !ConstraintCreateTool;
if ( toggleDisableCollisions )
DisableCollisionsTool = !DisableCollisionsTool;
if ( toggleShapeCreate )
ShapeCreateTool = !ShapeCreateTool;
if ( toggleRigidBodyVisualCreate )
RigidBodyVisualCreateTool = !RigidBodyVisualCreateTool;
}
public override void OnPostTargetMembersGUI()
{
if ( IsMultiSelect )
return;
InspectorGUI.ToolArrayGUI( this, RigidBody.Shapes, "Shapes" );
InspectorGUI.ToolArrayGUI( this, m_constraints.ToArray(), "Constraints" );
}
}
}
|
apache-2.0
|
beniz/hmdp
|
src/hmdpsim/rational.cc
|
7168
|
/* -*-C++-*- */
/*
* Copyright (C) 2003 Carnegie Mellon University and Rutgers University
*
* Permission is hereby granted to distribute this software for
* non-commercial research purposes, provided that this copyright
* notice is included with any such distribution.
*
* THIS SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
* EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
* SOFTWARE IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU
* ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
*
*/
#include "rational.h"
#include "exceptions.h"
#include <limits>
#include <math.h>
#include <stdlib.h>
namespace ppddl_parser
{
/* ====================================================================== */
/* Rational */
int Rational::iterations_ = 100;
/* Returns the greatest common devisor of the two integers. */
static int gcd(int n, int m) {
int a = abs(n);
int b = abs(m);
while (b > 0) {
int c = b;
b = a % b;
a = c;
}
return a;
}
/* Returns the least common multiplier of the two integers. */
static int lcm(int n, int m) {
return n/gcd(n, m)*m;
}
/* Returns the multipliers for the two integers. */
std::pair<int, int> Rational::multipliers(int n, int m) {
int f = lcm(n, m);
return std::make_pair(f/n, f/m);
}
/* Constructs a rational number. */
Rational::Rational(int n, int m) {
if (m == 0) {
throw Exception("division by zero");
} else {
int d = gcd(n, m);
numerator_ = n/d;
denominator_ = m/d;
if (denominator_ < 0) {
numerator_ *= -1;
denominator_ *= -1;
}
}
}
/* Constructs a rational number. */
Rational::Rational (const double &x)
{
Rational r = Rational::toRational (x, Rational::iterations_);
numerator_ = r.numerator ();
denominator_ = r.denominator ();
}
Rational Rational::toRational (const double &x, int iterations)
{
if (x == 0.0
|| x < std::numeric_limits<long>::min ()
|| x > std::numeric_limits<long>::max ())
return Rational (0,1);
else
{
int sign = x < 0.0 ? -1 : 1;
return sign * Rational::toRational (sign * x, 1.0e9, iterations); // was 1.0e12
}
}
Rational Rational::toRational (const double &x, const double &limit, int iterations)
{
double intpart;
//std::cout << "x: " << x << std::endl;
double fractpart = modf(x, &intpart);
//debug
//std::cout << "fractpart: " << fractpart << std::endl;
//debug
double d = 1.0 / fractpart;
int left = static_cast<int> (intpart);
//debug
/* std::cout << "left: " << left << " -- d: " << d
<< " -- limit: " << limit << std::endl; */
//debug
if (d > limit || iterations == 0)
return Rational (left, 1);
else
{
Rational tempr = Rational::toRational (d, limit * 0.1, --iterations);
Rational r = Rational (tempr.denominator (), tempr.numerator ());
return Rational (left, 1) + r;
}
}
/* Constructs a rational number. */
/*Rational::Rational(const char* s)
: numerator_(0) {
std::cout << "rational from char: " << s << std::endl;
const char* si = s;
for (; *si != '\0' && *si != '.' && *si != '/'; si++) {
numerator_ = 10*numerator_ + (*si - '0');
}
std::cout << "numerator: " << numerator_ << std::endl;
if (*si == '/') {
denominator_ = 0;
for (si++; *si != '\0'; si++) {
denominator_ = 10*denominator_ + (*si - '0');
}
if (denominator_ == 0) {
throw Exception("division by zero");
}
int d = gcd(numerator_, denominator_);
numerator_ /= d;
denominator_ /= d;
} else if (*si == '.') {
int a = numerator_;
numerator_ = 0;
denominator_ = 1;
for (si++; *si != '\0'; si++) {
numerator_ = 10*numerator_ + (*si - '0');
denominator_ *= 10;
}
int d = gcd(numerator_, denominator_);
numerator_ /= d;
denominator_ /= d;
numerator_ += a*denominator_;
} else {
denominator_ = 1;
}
}*/
/* Less-than comparison operator for rational numbers. */
bool operator<(const Rational& q, const Rational& p) {
std::pair<int, int> m =
Rational::multipliers(q.denominator(), p.denominator());
return q.numerator()*m.first < p.numerator()*m.second;
}
/* Less-than-or-equal comparison operator for rational numbers. */
bool operator<=(const Rational& q, const Rational& p) {
std::pair<int, int> m =
Rational::multipliers(q.denominator(), p.denominator());
return q.numerator()*m.first <= p.numerator()*m.second;
}
/* Equality comparison operator for rational numbers. */
bool operator==(const Rational& q, const Rational& p) {
std::pair<int, int> m =
Rational::multipliers(q.denominator(), p.denominator());
return q.numerator()*m.first == p.numerator()*m.second;
}
/* Inequality comparison operator for rational numbers. */
bool operator!=(const Rational& q, const Rational& p) {
std::pair<int, int> m =
Rational::multipliers(q.denominator(), p.denominator());
return q.numerator()*m.first != p.numerator()*m.second;
}
/* Greater-than-or-equal comparison operator for rational numbers. */
bool operator>=(const Rational& q, const Rational& p) {
std::pair<int, int> m =
Rational::multipliers(q.denominator(), p.denominator());
return q.numerator()*m.first >= p.numerator()*m.second;
}
/* Greater-than comparison operator for rational numbers. */
bool operator>(const Rational& q, const Rational& p) {
std::pair<int, int> m =
Rational::multipliers(q.denominator(), p.denominator());
return q.numerator()*m.first > p.numerator()*m.second;
}
/* Addition operator for rational numbers. */
Rational operator+(const Rational& q, const Rational& p) {
std::pair<int, int> m =
Rational::multipliers(q.denominator(), p.denominator());
return Rational(q.numerator()*m.first + p.numerator()*m.second,
q.denominator()*m.first);
}
/* Subtraction operator for rational numbers. */
Rational operator-(const Rational& q, const Rational& p) {
std::pair<int, int> m =
Rational::multipliers(q.denominator(), p.denominator());
return Rational(q.numerator()*m.first - p.numerator()*m.second,
q.denominator()*m.first);
}
/* Multiplication operator for rational numbers. */
Rational operator*(const Rational& q, const Rational& p) {
int d1 = gcd(q.numerator(), p.denominator());
int d2 = gcd(p.numerator(), q.denominator());
return Rational((q.numerator()/d1)*(p.numerator()/d2),
(q.denominator()/d2)*(p.denominator()/d1));
}
/* Division operator for rational numbers. */
Rational operator/(const Rational& q, const Rational& p) {
if (p == 0) {
throw Exception("division by zero");
}
int d1 = gcd(q.numerator(), p.numerator());
int d2 = gcd(p.denominator(), q.denominator());
return Rational((q.numerator()/d1)*(p.denominator()/d2),
(q.denominator()/d2)*(p.numerator()/d1));
}
/* Output operator for rational numbers. */
std::ostream& operator<<(std::ostream& os, const Rational& q) {
os << q.numerator();
if (q.denominator() != 1) {
os << '/' << q.denominator();
}
return os;
}
} /* end of namespace */
|
apache-2.0
|
migesok/jaxb-java-time-adapters
|
threeten-jaxb-core/src/test/java/io/github/threetenjaxb/core/OffsetTimeXmlAdapterTest.java
|
605
|
package io.github.threetenjaxb.core;
import java.time.OffsetTime;
import java.time.ZoneOffset;
import java.util.HashMap;
import java.util.Map;
class OffsetTimeXmlAdapterTest extends AbstractXmlAdapterTest<String, OffsetTime, OffsetTimeXmlAdapter> {
private static final Map<String, OffsetTime> STRING_OFFSET_TIME_MAP = new HashMap<>();
static {
STRING_OFFSET_TIME_MAP.put("10:15:30+01:00", OffsetTime
.of(10, 15, 30, 0, ZoneOffset.ofHours(1))
);
}
OffsetTimeXmlAdapterTest() {
super(new OffsetTimeXmlAdapter(), STRING_OFFSET_TIME_MAP);
}
}
|
apache-2.0
|
unloop/lastbackend
|
internal/cli/command/cluster/service.go
|
11710
|
//
// Last.Backend LLC CONFIDENTIAL
// __________________
//
// [2014] - [2020] Last.Backend LLC
// All Rights Reserved.
//
// NOTICE: All information contained herein is, and remains
// the property of Last.Backend LLC and its suppliers,
// if any. The intellectual and technical concepts contained
// herein are proprietary to Last.Backend LLC
// and its suppliers and may be covered by Russian Federation and Foreign Patents,
// patents in process, and are protected by trade secret or copyright law.
// Dissemination of this information or reproduction of this material
// is strictly forbidden unless prior written permission is obtained
// from Last.Backend LLC.
//
package cluster
import (
"context"
"encoding/json"
"errors"
"fmt"
"github.com/lastbackend/lastbackend/tools/logger"
"io"
"os"
"strings"
"github.com/lastbackend/lastbackend/internal/cli/views"
"github.com/lastbackend/lastbackend/internal/pkg/models"
"github.com/lastbackend/lastbackend/pkg/api/types/v1/request"
"github.com/spf13/cobra"
)
const serviceListExample = `
# Get all services for 'ns-demo' namespace
lb service ls ns-demo
`
const serviceInspectExample = `
# Get information for 'redis' service in 'ns-demo' namespace
lb service inspect ns-demo redis
`
const serviceCreateExample = `
# Create new redis service with description and 256 MB limit memory
lb service create ns-demo redis --desc "Example description" -m 256mib
`
const serviceRemoveExample = `
# Remove 'redis' service in 'ns-demo' namespace
lb service remove ns-demo redis
`
const serviceUpdateExample = `
# Update info for 'redis' service in 'ns-demo' namespace
lb service update ns-demo redis --desc "Example new description" -m 128
`
const serviceLogsExample = `
# Get 'redis' service logs for 'ns-demo' namespace
lb service logs ns-demo redis
`
func (c *command) NewServiceCmd() *cobra.Command {
log := logger.WithContext(context.Background())
cmd := &cobra.Command{
Use: "service",
Short: "Manage your service",
Run: func(cmd *cobra.Command, args []string) {
if err := cmd.Help(); err != nil {
log.Error(err.Error())
return
}
},
}
cmd.AddCommand(c.serviceListCmd())
cmd.AddCommand(c.serviceInspectCmd())
cmd.AddCommand(c.serviceCreateCmd())
cmd.AddCommand(c.serviceRemoveCmd())
cmd.AddCommand(c.serviceUpdateCmd())
cmd.AddCommand(c.serviceLogsCmd())
return cmd
}
func (c *command) serviceListCmd() *cobra.Command {
return &cobra.Command{
Use: "ls [NAMESPACE]",
Short: "Display the services list",
Example: serviceListExample,
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
namespace := args[0]
response, err := c.client.cluster.V1().Namespace(namespace).Service().List(context.Background())
if err != nil {
fmt.Println(err)
return
}
if response == nil || len(*response) == 0 {
fmt.Println("no services available")
return
}
list := views.FromApiServiceListView(response)
list.Print()
},
}
}
func (c *command) serviceInspectCmd() *cobra.Command {
return &cobra.Command{
Use: "inspect [NAMESPACE]/[NAME]",
Short: "Service info by name",
Example: serviceInspectExample,
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
namespace, name, err := serviceParseSelfLink(args[0])
checkError(err)
svc, err := c.client.cluster.V1().Namespace(namespace).Service(name).Get(context.Background())
if err != nil {
fmt.Println(err)
return
}
routes, err := c.client.cluster.V1().Namespace(namespace).Route().List(context.Background())
if err != nil {
fmt.Println(err)
return
}
for _, r := range *routes {
for _, rule := range r.Spec.Rules {
if rule.Service == svc.Meta.Name {
fmt.Println("exposed:", r.Status.State, r.Spec.Domain, r.Spec.Port)
}
}
}
ss := views.FromApiServiceView(svc)
ss.Print()
},
}
}
func (c *command) serviceCreateCmd() *cobra.Command {
return &cobra.Command{
Use: "create [NAMESPACE]/[NAME] [IMAGE]",
Short: "Create service",
Example: serviceCreateExample,
Args: cobra.ExactArgs(2),
Run: func(cmd *cobra.Command, args []string) {
namespace, name, err := serviceParseSelfLink(args[0])
checkError(err)
image := args[1]
opts, err := serviceParseManifest(cmd, name, image)
checkError(err)
response, err := c.client.cluster.V1().Namespace(namespace).Service().Create(context.Background(), opts)
if err != nil {
fmt.Println(err)
return
}
fmt.Println(fmt.Sprintf("Service `%s` is created", name))
service := views.FromApiServiceView(response)
service.Print()
},
}
}
func (c *command) serviceRemoveCmd() *cobra.Command {
return &cobra.Command{
Use: "remove [NAMESPACE] [NAME]",
Short: "Remove service by name",
Example: serviceRemoveExample,
Args: cobra.ExactArgs(2),
Run: func(cmd *cobra.Command, args []string) {
namespace := args[0]
name := args[1]
opts := &request.ServiceRemoveOptions{Force: false}
if err := opts.Validate(); err != nil {
fmt.Println(err.Err())
return
}
c.client.cluster.V1().Namespace(namespace).Service(name).Remove(context.Background(), opts)
fmt.Println(fmt.Sprintf("Service `%s` is successfully removed", name))
},
}
}
func (c *command) serviceUpdateCmd() *cobra.Command {
return &cobra.Command{
Use: "update [NAMESPACE]/[NAME]",
Short: "Change configuration of the service",
Example: serviceUpdateExample,
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
namespace, name, err := serviceParseSelfLink(args[0])
checkError(err)
opts, err := serviceParseManifest(cmd, name, models.EmptyString)
checkError(err)
response, err := c.client.cluster.V1().Namespace(namespace).Service(name).Update(context.Background(), opts)
if err != nil {
fmt.Println(err)
return
}
fmt.Println(fmt.Sprintf("Service `%s` is updated", name))
ss := views.FromApiServiceView(response)
ss.Print()
},
}
}
func (c *command) serviceLogsCmd() *cobra.Command {
return &cobra.Command{
Use: "logs [NAMESPACE]/[NAME]",
Short: "Get service logs",
Example: serviceLogsExample,
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
opts := new(request.ServiceLogsOptions)
var err error
opts.Tail, err = cmd.Flags().GetInt("tail")
if err != nil {
fmt.Println(err.Error())
return
}
opts.Follow, err = cmd.Flags().GetBool("follow")
if err != nil {
fmt.Println(err.Error())
return
}
namespace, name, err := serviceParseSelfLink(args[0])
checkError(err)
reader, _, err := c.client.cluster.V1().Namespace(namespace).Service(name).Logs(context.Background(), opts)
if err != nil {
fmt.Println(err)
return
}
dec := json.NewDecoder(reader)
for {
var doc models.LogMessage
err := dec.Decode(&doc)
if err == io.EOF {
// all done
break
}
if err != nil {
fmt.Errorf(err.Error())
os.Exit(1)
}
fmt.Println(">", doc.Selflink, doc.Data)
}
},
}
}
func serviceParseSelfLink(selflink string) (string, string, error) {
match := strings.Split(selflink, "/")
var (
namespace, name string
)
switch len(match) {
case 2:
namespace = match[0]
name = match[1]
case 1:
fmt.Println("Use default namespace:", models.DEFAULT_NAMESPACE)
namespace = models.DEFAULT_NAMESPACE
name = match[0]
default:
return "", "", errors.New("invalid service name provided")
}
return namespace, name, nil
}
func serviceManifestFlags(cmd *cobra.Command) {
cmd.Flags().StringP("name", "n", "", "set service name")
cmd.Flags().StringP("desc", "d", "", "set service description")
cmd.Flags().StringP("memory", "m", "128MIB", "set service spec memory")
cmd.Flags().IntP("replicas", "r", 0, "set service replicas")
cmd.Flags().StringArrayP("port", "p", make([]string, 0), "set service ports")
cmd.Flags().StringArrayP("env", "e", make([]string, 0), "set service env")
cmd.Flags().StringArray("env-from-secret", make([]string, 0), "set service env from secret")
cmd.Flags().StringArray("env-from-config", make([]string, 0), "set service env from config")
cmd.Flags().StringP("image", "i", "", "set service image")
cmd.Flags().String("image-secret-name", "", "set service image auth secret name")
cmd.Flags().String("image-secret-key", "", "set service image auth secret key")
}
func serviceParseManifest(cmd *cobra.Command, name, image string) (*request.ServiceManifest, error) {
var err error
description, err := cmd.Flags().GetString("desc")
checkFlagParseError(err)
memory, err := cmd.Flags().GetString("memory")
checkFlagParseError(err)
if name == models.EmptyString {
name, err = cmd.Flags().GetString("name")
checkFlagParseError(err)
}
if image == models.EmptyString {
image, err = cmd.Flags().GetString("image")
checkFlagParseError(err)
}
ports, err := cmd.Flags().GetStringArray("ports")
checkFlagParseError(err)
env, err := cmd.Flags().GetStringArray("env")
checkFlagParseError(err)
senv, err := cmd.Flags().GetStringArray("env-from-secret")
checkFlagParseError(err)
cenv, err := cmd.Flags().GetStringArray("env-from-config")
checkFlagParseError(err)
replicas, err := cmd.Flags().GetInt("replicas")
checkFlagParseError(err)
authName, err := cmd.Flags().GetString("image-secret-name")
checkFlagParseError(err)
authKey, err := cmd.Flags().GetString("image-secret-key")
checkFlagParseError(err)
opts := new(request.ServiceManifest)
css := make([]request.ManifestSpecTemplateContainer, 0)
cs := request.ManifestSpecTemplateContainer{}
if len(name) != 0 {
opts.Meta.Name = &name
}
if len(description) != 0 {
opts.Meta.Description = &description
}
if memory != models.EmptyString {
cs.Resources.Request.RAM = memory
}
if replicas != 0 {
opts.Spec.Replicas = &replicas
}
if len(ports) > 0 {
opts.Spec.Network = new(request.ManifestSpecNetwork)
opts.Spec.Network.Ports = make([]string, 0)
opts.Spec.Network.Ports = ports
}
es := make(map[string]request.ManifestSpecTemplateContainerEnv)
if len(env) > 0 {
for _, e := range env {
kv := strings.SplitN(e, "=", 2)
eo := request.ManifestSpecTemplateContainerEnv{
Name: kv[0],
}
if len(kv) > 1 {
eo.Value = kv[1]
}
es[eo.Name] = eo
}
}
if len(senv) > 0 {
for _, e := range senv {
kv := strings.SplitN(e, "=", 3)
eo := request.ManifestSpecTemplateContainerEnv{
Name: kv[0],
}
if len(kv) < 3 {
return nil, errors.New("Service env from secret is in wrong format, should be [NAME]=[SECRET NAME]=[SECRET STORAGE KEY]")
}
if len(kv) == 3 {
eo.Secret.Name = kv[1]
eo.Secret.Key = kv[2]
}
es[eo.Name] = eo
}
}
if len(cenv) > 0 {
for _, e := range cenv {
kv := strings.SplitN(e, "=", 3)
eo := request.ManifestSpecTemplateContainerEnv{
Name: kv[0],
}
if len(kv) < 3 {
return nil, errors.New("Service env from config is in wrong format, should be [NAME]=[CONFIG NAME]=[CONFIG KEY]")
}
if len(kv) == 3 {
eo.Config.Name = kv[1]
eo.Config.Key = kv[2]
}
es[eo.Name] = eo
}
}
if len(es) > 0 {
senvs := make([]request.ManifestSpecTemplateContainerEnv, 0)
for _, e := range es {
senvs = append(senvs, e)
}
cs.Env = senvs
}
opts.Meta.Description = &description
cs.Image.Name = image
if authName != models.EmptyString {
cs.Image.Secret.Name = authName
}
if authKey != models.EmptyString {
cs.Image.Secret.Key = authKey
}
css = append(css, cs)
if err := opts.Validate(); err != nil {
return nil, err.Err()
}
return opts, nil
}
|
apache-2.0
|
resin-io-library/base-images
|
balena-base-images/python/beagleboard-xm/alpine/3.13/3.9.7/run/Dockerfile
|
4130
|
# AUTOGENERATED FILE
FROM balenalib/beagleboard-xm-alpine:3.13-run
# remove several traces of python
RUN apk del python*
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# install python dependencies
RUN apk add --no-cache ca-certificates libffi \
&& apk add --no-cache libssl1.0 || apk add --no-cache libssl1.1
# key 63C7CC90: public key "Simon McVittie <[email protected]>" imported
# key 3372DCFA: public key "Donald Stufft (dstufft) <[email protected]>" imported
RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \
&& gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \
&& gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059
# point Python at a system-provided certificate database. Otherwise, we might hit CERTIFICATE_VERIFY_FAILED.
# https://www.python.org/dev/peps/pep-0476/#trust-database
ENV SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
ENV PYTHON_VERSION 3.9.7
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
ENV PYTHON_PIP_VERSION 21.2.4
ENV SETUPTOOLS_VERSION 58.0.0
RUN set -x \
&& buildDeps=' \
curl \
gnupg \
' \
&& apk add --no-cache --virtual .build-deps $buildDeps \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" \
&& echo "ac2bb1a87f649ab92d472e5fa6899205dc4a49d5ada39bb6a6a0702c1b8b1cfa Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" | sha256sum -c - \
&& tar -xzf "Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" --strip-components=1 \
&& rm -rf "Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" \
&& if [ ! -e /usr/local/bin/pip3 ]; then : \
&& curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \
&& echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \
&& python3 get-pip.py \
&& rm get-pip.py \
; fi \
&& pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \
&& find /usr/local \
\( -type d -a -name test -o -name tests \) \
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
-exec rm -rf '{}' + \
&& cd / \
&& rm -rf /usr/src/python ~/.cache
# make some useful symlinks that are expected to exist
RUN cd /usr/local/bin \
&& ln -sf pip3 pip \
&& { [ -e easy_install ] || ln -s easy_install-* easy_install; } \
&& ln -sf idle3 idle \
&& ln -sf pydoc3 pydoc \
&& ln -sf python3 python \
&& ln -sf python3-config python-config
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@python" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Alpine Linux 3.13 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.9.7, Pip v21.2.4, Setuptools v58.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& ln -f /bin/sh /bin/sh.real \
&& ln -f /bin/sh-shim /bin/sh
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Lamiaceae/Salvia/Salvia punctata/README.md
|
177
|
# Salvia punctata Ruiz & Pav. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
wim-agency/wim-agency.github.io
|
Payps/payps_2017_04_21_obnovlenie_cepochki/payps_2017_03_01_reactivate_3_2.html
|
27421
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<!--[if !mso]><!-->
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<!--<![endif]-->
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title></title>
<style type="text/css">
html {width: 100%;}
body {width:100% !important;}
.ReadMsgBody, .ExternalClass {width:100%; display:block !important;}
table td {border-collapse: collapse; mso-table-lspace:0pt; mso-table-rspace:0pt;}
.ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {
line-height: 100%;
}
body {-webkit-text-size-adjust:none; -ms-text-size-adjust:none;}
body {margin:0; padding:0;}
table td {border-collapse:collapse;}
p {margin:0; padding:0; margin-bottom:0;}
h1, h2, h3, h4, h5, h6 {
color: black;
line-height: 100%;
}
a, a:link {
color:#0066cc;
text-decoration: none;
}
html, body {
height:100%;
}
body, #body_style {
background:#273659;
color:#000000;
font-family:Arial, Helvetica, sans-serif;
font-size:12px;
}
span.yshortcuts { color:#000; background-color:none; border:none;}
span.yshortcuts:hover,
span.yshortcuts:active,
span.yshortcuts:focus {color:#000; background-color:none; border:none;}
a:visited { color: #3c96e2; text-decoration: none}
a:focus { color: #3c96e2; text-decoration: underline}
a:hover { color: #3c96e2; text-decoration: underline}
@media only screen and (max-device-width: 480px) {
body[yahoo] #container1 {display:block !important}
body[yahoo] p {font-size: 10px}
}
@media only screen and (min-device-width: 768px) and (max-device-width: 1024px) {
body[yahoo] #container1 {display:block !important}
body[yahoo] p {font-size: 12px}
}
@media only screen and (max-width: 640px), only screen and (max-device-width: 640px) {
td[class=w640-full], table[class=w640-full], div[class=w640-full], img[class=w640-full] {
width:100% !important;
height:auto !important;
}
td[class=w640-align-center], table[class=w640-align-center], div[class=w640-align-center] {
text-align: center !important;
}
}
.preheader {display:none !important; visibility:hidden; opacity:0; color:transparent; height:0; width:0;}
</style>
</head>
<body style="color:#000000; font-family:Arial, Helvetica, sans-serif; font-size:12px; background:#273659; padding: 0px; margin: 0px" alink="#FF0000" link="#FF0000" bgcolor="#273659" text="#000000" yahoo="fix">
<table height="100%" width="100%" cellpadding="0" cellspacing="0" border="0">
<tr>
<td bgcolor="#273659" valign="top" align="center" width="640" style="width: 640px;">
<!--[if gte mso 10]>
<table align="center" width="640" border="0" cellspacing="0" cellpadding="0">
<tr><td><![endif]-->
<table width="640" cellspacing="0" cellpadding="0" style="width: 100%; max-width: 640px;">
<tbody>
<tr>
<td style="font-size: 1px; line-height: 1px; font-family: Arial, Helvetica, sans-serif; color:#273659; text-align: left;">
Не упустите свой шанс
</td>
</tr>
<tr>
<td align="center" height="40" style="padding-top: 10px; padding-bottom: 10px;">
<table width="88%" border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td height="21" align="left" style="padding-right: 5px; font-size: 14px; line-height: 20px; font-family: Arial, Helvetica, sans-serif; color:#abaeb8;">
<a target="_blank" href="http://$pers_7848$" style="color: #ffffff; text-decoration: none;"><img width="94" alt="pay p.s." src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_1a/logo.png" style="vertical-align: top; text-align: center; border: none;"></a>
</td>
<td align="right" height="21" style="font-size: 14px; line-height: 20px; font-family: Arial, Helvetica, sans-serif; color:#abaeb8; text-align: right;">
<a href="https://link.payps.ru/u/gm.php?UID=$uid$&ID=$ident$" target="_blank" style="color: #abaeb8; text-decoration: underline;">Онлайн-версия</a>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td align="center" style="font-size: 18px; line-height: 24px; font-family: cursive; color:#000000;">
<a target="_blank" href="http://$pers_7848$" style="color: #273659; text-decoration: underline;"><img width="640" src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_3_2/banner-top.jpg" style="display: block; border:none; width: 100% !important; max-width: 640px !important; height: auto !important; max-height: 250px !important; text-align: center;" alt="Улучшенные условия — сейчас и всегда" title="Улучшенные условия — сейчас и всегда"></a>
</td>
</tr>
<tr>
<td valign="top" align="center" bgcolor="#ffffff" style="padding-top: 15px;">
<table width="88%" border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td style="padding-bottom: 20px; font-size: 16px; line-height: 21px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
Прошли те времена, когда, для того чтобы получить займ, вам нужно было идти в банк, собирать документы и ждать решения несколько дней.
</td>
</tr>
<tr>
<td style="padding-bottom: 10px; font-size: 28px; line-height: 33px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
С Pay P.S. всё гораздо проще:
</td>
</tr>
<tr>
<td style="padding-bottom: 12px;">
<table width="100%" border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td width="30" valign="top" style="padding-right: 10px; font-size: 16px; line-height: 21px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
<img style="display: block; border: none; font-weight: bold;" width="30" height="30" src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_3_2/ico01.png" alt="1">
</td>
<td valign="top" style="padding-top: 4px; font-size: 16px; line-height: 21px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
Вы заходите на сайт и выбираете сумму и срок.
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td style="padding-bottom: 12px;">
<table width="100%" border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td width="30" valign="top" style="padding-right: 10px; font-size: 16px; line-height: 21px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
<img style="display: block; border: none; font-weight: bold;" width="30" height="30" src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_3_2/ico02.png" alt="2">
</td>
<td valign="top" style="padding-top: 4px; font-size: 16px; line-height: 21px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
Регистрируетесь и заполняете анкету в онлайне.
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td style="padding-bottom: 12px;">
<table width="100%" border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td width="30" valign="top" style="padding-right: 10px; font-size: 16px; line-height: 21px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
<img style="display: block; border: none; font-weight: bold;" width="30" height="30" src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_3_2/ico03.png" alt="3">
</td>
<td valign="top" style="padding-top: 4px; font-size: 16px; line-height: 21px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
Мы принимаем решение в течение нескольких минут — вуаля, деньги ваши!
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td align="left" style="padding-bottom: 25px; font-size: 19px; line-height: 22px; font-family: Arial, Helvetica, sans-serif; color:#000000; text-transform: uppercase;">
<div><!--[if mso]>
<v:rect href="http://$pers_7848$" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w="urn:schemas-microsoft-com:office:word" style="height:60px;v-text-anchor:middle;width:220px;" stroke="f" fillcolor="#f4d724">
<w:anchorlock/>
<center>
<![endif]-->
<a href="http://$pers_7848$" style="background-color:#f4d724;color:#2d3d61;display:inline-block;font-family: Arial, Helvetica, sans-serif; text-transform: uppercase; font-size:16px;font-weight:bold;line-height:60px;text-align:center;text-decoration:none;width:220px;-webkit-text-size-adjust:none;"><strong>получить деньги</strong></a>
<!--[if mso]>
</center>
</v:rect>
<![endif]--></div>
</td>
</tr>
</tbody>
</table>
<table width="100%" cellspacing="0" cellpadding="0" border="0">
<tbody>
<tr>
<td style="font-size: 16px; line-height: 21px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
<img style="display: block; border: none; width: 100%; max-width: 640px; height: auto; max-height: 35px;" width="640" src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_3_2/img-decor-top.gif" alt="">
</td>
</tr>
<tr>
<td align="center" bgcolor="#f9eb91" style="padding-top: 11px; padding-bottom: 12px;">
<table width="88%" cellspacing="0" cellpadding="0" border="0">
<tbody>
<tr>
<td style="padding-bottom: 15px; font-size: 16px; line-height: 20px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
Попробуйте, это удобно: иметь под рукой деньги, когда они так нужны.
<br>Тем более, что ещё 3 дня для вас действуют улучшенные условия:
</td>
</tr>
<tr>
<td style="font-size: 0; text-align: left;">
<!--[if (gte mso 9)|(IE)]>
<table width="100%" cellspacing="0" cellpadding="0" border="0">
<tr>
<td valign="top" width="200">
<![endif]-->
<div style="display: inline-block; vertical-align: top; width: 200px;">
<table align="left" cellspacing="0" cellpadding="0" border="0">
<tbody>
<tr>
<td style="padding-bottom: 5px; font-size: 22px; line-height: 26px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
До 11 000 рублей
</td>
</tr>
<tr>
<td style="padding-bottom: 10px; font-size: 16px; line-height: 20px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
вместо 8000
</td>
</tr>
</tbody>
</table>
</div>
<!--[if (gte mso 9)|(IE)]>
</td>
<td valign="top" width="200">
<![endif]-->
<div style="display: inline-block; vertical-align: top; width: 200px;">
<table align="left" cellspacing="0" cellpadding="0" border="0">
<tbody>
<tr>
<td style="padding-bottom: 5px; font-size: 22px; line-height: 26px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
1,8% в день
</td>
</tr>
<tr>
<td style="padding-bottom: 10px; font-size: 16px; line-height: 20px; font-family: Arial, Helvetica, sans-serif; color:#2d3d61; text-align: left;">
вместо 1,9%
</td>
</tr>
</tbody>
</table>
</div>
<!--[if (gte mso 9)|(IE)]>
</td>
</tr>
</table>
<![endif]-->
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td align="center" style="padding-top: 20px; padding-bottom: 40px;">
<table width="88%" border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td style="font-size: 0; text-align: center;">
<!--[if (gte mso 9)|(IE)]>
<table width="100%" cellspacing="0" cellpadding="0" border="0">
<tr>
<td valign="top" width="50%" align="left">
<![endif]-->
<div style="display: inline-block; vertical-align: top; width: 50%; min-width: 270px;">
<table align="left" cellspacing="0" cellpadding="0" border="0">
<tbody>
<tr>
<td valign="top" style="padding-right: 10px; padding-bottom: 12px; font-size: 30px; line-height: 35px; font-family: Arial, Helvetica, sans-serif; color:#ffffff; text-align: left;">
<a target="_blank" href="http://$pers_7848$" style="color: #ffffff; text-decoration: none;"><img width="95" alt="pay p.s." src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_3_2/logo-bottom.png" style="display: block; text-align: center; border: none;"></a>
</td>
<td valign="top" style="font-size: 12px; line-height: 16px; font-family: Arial, Helvetica, sans-serif; color:#ffffff; text-align: left;">
Бери сейчас, плати потом
</td>
</tr>
</tbody>
</table>
</div>
<!--[if (gte mso 9)|(IE)]>
</td>
<td valign="top" width="50%" align="left">
<![endif]-->
<div style="display: inline-block; vertical-align: top; width: 50%; min-width: 260px;">
<table width="100%" align="left" cellspacing="0" cellpadding="0" border="0">
<tbody>
<tr>
<td align="left" style="padding-bottom: 12px; font-size: 20px; line-height: 25px; font-family: Arial, Helvetica, sans-serif; color:#f7f8fa;">
<img style="display: block; border: none;" height="20" src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_3_2/ico-visa.png" alt="VISA">
</td>
<td align="left" style="padding-bottom: 12px; font-size: 20px; line-height: 25px; font-family: Arial, Helvetica, sans-serif; color:#f7f8fa;">
<img style="display: block; border: none;" height="26" src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_3_2/ico-mastercard.png" alt="VISA">
</td>
<td align="left" style="padding-bottom: 12px; font-size: 20px; line-height: 25px; font-family: Arial, Helvetica, sans-serif; color:#f7f8fa;">
<img style="display: block; border: none;" height="28" src="http://link.payps.ru/custloads/765352314/20170419_Reactivation_abandoned_order_3_2/ico-thawte.png" alt="thawte">
</td>
</tr>
</tbody>
</table>
</div>
<!--[if (gte mso 9)|(IE)]>
</td>
</tr>
</table>
<![endif]-->
</td>
</tr>
<tr>
<td style="font-size: 0; text-align: center;">
<!--[if (gte mso 9)|(IE)]>
<table width="100%" cellspacing="0" cellpadding="0" border="0">
<tr>
<td valign="top" width="50%" align="left">
<![endif]-->
<div style="display: inline-block; vertical-align: top; width: 50%; min-width: 270px;">
<table align="left" cellspacing="0" cellpadding="0" border="0">
<tbody>
<tr>
<td style="padding-bottom: 18px; font-size: 12px; line-height: 18px; font-family: Arial, Helvetica, sans-serif; color:#abaeb8; text-align: left;">
Все права защищены © 2017 Pay P.S.
<br>ООО МФО «Займ Онлайн», Россия, <a href="#" style="pointer-events: none; color: #abaeb8 !important; text-decoration: none !important;">Мoсквa, 123001, ул. Cпиpиднoвкa д.27/24</a>
</td>
</tr>
</tbody>
</table>
</div>
<!--[if (gte mso 9)|(IE)]>
</td>
<td valign="top" width="50%" align="left">
<![endif]-->
<div style="display: inline-block; vertical-align: top; width: 50%; min-width: 260px;">
<table width="100%" align="left" cellspacing="0" cellpadding="0" border="0">
<tbody>
<tr>
<td style="padding-bottom: 18px; font-size: 12px; line-height: 18px; font-family: Arial, Helvetica, sans-serif; color:#abaeb8; text-align: left;">
<a href="http://www.payps.ru/static/docs/conditions-of-use_2016-10-24.pdf" target="_blank" style="color: #abaeb8; text-decoration: underline;">Пользовательское соглашение</a>
<br><a href="http://www.payps.ru/static/docs/credit_rules_2015-12-29.pdf" target="_blank" style="color: #abaeb8; text-decoration: underline;">Правила предоставления микрозаймов</a>
</td>
</tr>
</tbody>
</table>
</div>
<!--[if (gte mso 9)|(IE)]>
</td>
</tr>
</table>
<![endif]-->
</td>
</tr>
<tr>
<td style="font-size: 12px; line-height: 18px; font-family: Arial, Helvetica, sans-serif; color:#abaeb8; text-align: center;">
Я больше не хочу получать письма с лучшими предложениями от PayPS – <a href="https://link.payps.ru/u/un.php?par=$uid$_$cid$_$llid$_$sid$" target="_blank" style="color: #abaeb8; text-decoration: underline;">отписаться</a>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
<!--[if gte mso 10]>
</td></tr>
</table><![endif]-->
</td>
</tr>
</table>
</body>
</html>
|
apache-2.0
|
cowthan/AyoWeibo
|
ayoview/src/main/java/org/ayo/app/tmpl/AyoJigsawActivityAttacher.java
|
1539
|
package org.ayo.app.tmpl;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import org.ayo.app.common.AyoSwipeBackActivityAttacher;
import genius.android.view.R;
/**
* 一个Activity装载一个Fragment
* Fragment管界面
*
*/
public abstract class AyoJigsawActivityAttacher extends AyoSwipeBackActivityAttacher {
protected abstract Fragment getFragment();
protected abstract View getTopView();
protected abstract View getBottomView();
protected abstract View getCoverView();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.ayo_tmpl_ac_fragment_container);
View topView = getTopView();
if(topView != null) {
ViewGroup top = (ViewGroup) findViewById(R.id.top);
top.addView(topView);
}
View bottomView = getBottomView();
if(bottomView != null){
ViewGroup bottom = (ViewGroup) findViewById(R.id.bottom);
bottom.addView(bottomView);
}
View coverView = getCoverView();
if(coverView != null){
ViewGroup root = (ViewGroup) findViewById(R.id.root);
root.addView(coverView);
}
FrameLayout fl_root = (FrameLayout) findViewById(R.id.fl_root);
getSupportFragmentManager().beginTransaction().replace(fl_root.getId(), getFragment()).commit();
}
}
|
apache-2.0
|
kiranw06/synthapp
|
README.md
|
278
|
# synthapp
This project is generated with [yo angular generator](https://github.com/yeoman/generator-angular)
version 0.15.1.
## Build & development
Run `grunt` for building and `grunt serve` for preview.
## Testing
Running `grunt test` will run the unit tests with karma.
|
apache-2.0
|
mattiamascia/jrank
|
src/test/java/com/f1000/rank/journal/RankJournalsTest.java
|
1989
|
/**
*
*/
package com.f1000.rank.journal;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import com.f1000.rank.journal.model.Journal;
/**
* The Class RankJournalsTest.
*
* @author mattiam
* @version $Revision: 1.0 $
*/
public class RankJournalsTest {
/** The appRank. */
private AppRank appRank;
/** The stub. */
private RankJournalsStub stub;
/**
* Initialize.
*/
@Before
public void init() {
this.stub = new RankJournalsStub();
this.appRank = new AppRank();
}
@Test
public final void testEmptyJournals() {
List<Journal> ranked = this.appRank.rankJournals(null);
assertEquals(null, ranked);
ranked = this.appRank.rankJournals(new ArrayList<Journal>());
assertEquals(new ArrayList<Journal>(), ranked);
}
/**
* Test rank journals.
*/
@Test
public final void testRankJournals() {
Journal[] output = new Journal[3];
output[0] = new Journal("Journal A", 5.6f);
output[1] = new Journal("Journal C", 3.1f);
output[2] = new Journal("Journal B", 2.4f);
List<Journal> ranked = this.appRank.rankJournals(this.stub
.getJournals());
assertArrayEquals(output, ranked.toArray());
}
@Test
public final void testSharedRankJournals() {
Journal[] output = new Journal[3];
output[0] = new Journal("Journal B", 6.2f);
output[1] = new Journal("Journal C", 6.2f);
output[2] = new Journal("Journal A", 2.2f);
List<Journal> ranked = this.appRank.rankJournals(this.stub
.getSharedRankJournals());
assertArrayEquals(output, ranked.toArray());
}
@Test
public final void testReviewRankJournals() {
Journal[] output = new Journal[2];
output[0] = new Journal("Journal C", 3.1f);
output[1] = new Journal("Journal B", 2.4f);
List<Journal> ranked = this.appRank.rankJournals(this.stub
.getReviewRankJournals());
assertArrayEquals(output, ranked.toArray());
}
}
|
apache-2.0
|
utwente/lisa-telefonie
|
client/app/tel/kpn/kpnDelete.controller.js
|
339
|
'use strict';
angular.module('ictsAppApp')
.controller('TelKpnDeleteModalCtrl', ['$scope', '$modalInstance', 'record',
function ($scope, $modalInstance, record) {
$scope.record = record;
$scope.delete = function () {
$modalInstance.close();
};
$scope.cancel = function () {
$modalInstance.dismiss('cancel');
};
}]);
|
apache-2.0
|
Top-Q/jsystem
|
jsystem-core-projects/jsystemCore/src/main/java/jsystem/utils/ExtentionFilter.java
|
773
|
/*
* Copyright 2005-2010 Ignis Software Tools Ltd. All rights reserved.
*/
package jsystem.utils;
import java.io.File;
import java.io.FilenameFilter;
/**
* This filter use to filtering ends of file names.
*/
public class ExtentionFilter implements FilenameFilter {
String endWith = null;
/**
* Create instance of ExtentionFilter
*
* @param endWith
* file names end
*/
public ExtentionFilter(String endWith) {
this.endWith = endWith;
}
/**
* Filter files.
*
* @param name
* file name
* @return true if name ends same as variable set in constructor
*/
public boolean accept(File dir, String name) {
if (endWith == null) {
return true;
}
return (name.toLowerCase().endsWith(endWith.toLowerCase()));
}
}
|
apache-2.0
|
domiworks/TDA-Landscape-Generator
|
jogl-2.0/javadoc/gluegen/javadoc/com/jogamp/common/util/locks/Lock.html
|
14654
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_45) on Thu Jan 30 13:49:11 CET 2014 -->
<title>Lock (GlueGen Runtime Documentation)</title>
<meta name="date" content="2014-01-30">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Lock (GlueGen Runtime Documentation)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li><a href="../../../../../com/jogamp/common/util/locks/LockFactory.html" title="class in com.jogamp.common.util.locks"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/jogamp/common/util/locks/Lock.html" target="_top">Frames</a></li>
<li><a href="Lock.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#field_summary">Field</a> | </li>
<li>Constr | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li><a href="#field_detail">Field</a> | </li>
<li>Constr | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">com.jogamp.common.util.locks</div>
<h2 title="Interface Lock" class="title">Interface Lock</h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Known Subinterfaces:</dt>
<dd><a href="../../../../../com/jogamp/common/util/locks/RecursiveLock.html" title="interface in com.jogamp.common.util.locks">RecursiveLock</a>, <a href="../../../../../com/jogamp/common/util/locks/RecursiveThreadGroupLock.html" title="interface in com.jogamp.common.util.locks">RecursiveThreadGroupLock</a>, <a href="../../../../../com/jogamp/common/util/locks/ThreadLock.html" title="interface in com.jogamp.common.util.locks">ThreadLock</a></dd>
</dl>
<dl>
<dt>All Known Implementing Classes:</dt>
<dd><a href="../../../../../com/jogamp/common/util/locks/SingletonInstance.html" title="class in com.jogamp.common.util.locks">SingletonInstance</a></dd>
</dl>
<hr>
<br>
<pre>public interface <span class="strong">Lock</span></pre>
<div class="block">Specifying a thread blocking lock implementation</div>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- =========== FIELD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="field_summary">
<!-- -->
</a>
<h3>Field Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Field Summary table, listing fields, and an explanation">
<caption><span>Fields</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Field and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><strong><a href="../../../../../com/jogamp/common/util/locks/Lock.html#DEBUG">DEBUG</a></strong></code>
<div class="block">Enable via the property <code>jogamp.debug.Lock</code></div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><strong><a href="../../../../../com/jogamp/common/util/locks/Lock.html#DEFAULT_TIMEOUT">DEFAULT_TIMEOUT</a></strong></code>
<div class="block">The default <a href="../../../../../com/jogamp/common/util/locks/Lock.html#TIMEOUT"><code>TIMEOUT</code></a> value, of 5000L ms</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static long</code></td>
<td class="colLast"><code><strong><a href="../../../../../com/jogamp/common/util/locks/Lock.html#TIMEOUT">TIMEOUT</a></strong></code>
<div class="block">The <code>TIMEOUT</code> for <a href="../../../../../com/jogamp/common/util/locks/Lock.html#lock()"><code>lock()</code></a> in ms,
defaults to <a href="../../../../../com/jogamp/common/util/locks/Lock.html#DEFAULT_TIMEOUT"><code>DEFAULT_TIMEOUT</code></a>.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static boolean</code></td>
<td class="colLast"><code><strong><a href="../../../../../com/jogamp/common/util/locks/Lock.html#TRACE_LOCK">TRACE_LOCK</a></strong></code>
<div class="block">Enable via the property <code>jogamp.debug.Lock.TraceLock</code></div>
</td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method_summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span>Methods</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code>boolean</code></td>
<td class="colLast"><code><strong><a href="../../../../../com/jogamp/common/util/locks/Lock.html#isLocked()">isLocked</a></strong>()</code>
<div class="block">Query if locked</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><strong><a href="../../../../../com/jogamp/common/util/locks/Lock.html#lock()">lock</a></strong>()</code>
<div class="block">Blocking until the lock is acquired by this Thread or <a href="../../../../../com/jogamp/common/util/locks/Lock.html#TIMEOUT"><code>TIMEOUT</code></a> is reached.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>boolean</code></td>
<td class="colLast"><code><strong><a href="../../../../../com/jogamp/common/util/locks/Lock.html#tryLock(long)">tryLock</a></strong>(long timeout)</code>
<div class="block">Blocking until the lock is acquired by this Thread or <code>maxwait</code> in ms is reached.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><strong><a href="../../../../../com/jogamp/common/util/locks/Lock.html#unlock()">unlock</a></strong>()</code>
<div class="block">Release the lock.</div>
</td>
</tr>
</table>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ FIELD DETAIL =========== -->
<ul class="blockList">
<li class="blockList"><a name="field_detail">
<!-- -->
</a>
<h3>Field Detail</h3>
<a name="DEBUG">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>DEBUG</h4>
<pre>static final boolean DEBUG</pre>
<div class="block">Enable via the property <code>jogamp.debug.Lock</code></div>
</li>
</ul>
<a name="TRACE_LOCK">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>TRACE_LOCK</h4>
<pre>static final boolean TRACE_LOCK</pre>
<div class="block">Enable via the property <code>jogamp.debug.Lock.TraceLock</code></div>
</li>
</ul>
<a name="DEFAULT_TIMEOUT">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>DEFAULT_TIMEOUT</h4>
<pre>static final long DEFAULT_TIMEOUT</pre>
<div class="block">The default <a href="../../../../../com/jogamp/common/util/locks/Lock.html#TIMEOUT"><code>TIMEOUT</code></a> value, of 5000L ms</div>
<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#com.jogamp.common.util.locks.Lock.DEFAULT_TIMEOUT">Constant Field Values</a></dd></dl>
</li>
</ul>
<a name="TIMEOUT">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>TIMEOUT</h4>
<pre>static final long TIMEOUT</pre>
<div class="block">The <code>TIMEOUT</code> for <a href="../../../../../com/jogamp/common/util/locks/Lock.html#lock()"><code>lock()</code></a> in ms,
defaults to <a href="../../../../../com/jogamp/common/util/locks/Lock.html#DEFAULT_TIMEOUT"><code>DEFAULT_TIMEOUT</code></a>.
<p>
It can be overridden via the system property <code>jogamp.common.utils.locks.Lock.timeout</code>.
</p></div>
</li>
</ul>
</li>
</ul>
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method_detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="lock()">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>lock</h4>
<pre>void lock()
throws <a href="http://docs.oracle.com/javase/6/docs/api/java/lang/RuntimeException.html?is-external=true" title="class or interface in java.lang">RuntimeException</a></pre>
<div class="block">Blocking until the lock is acquired by this Thread or <a href="../../../../../com/jogamp/common/util/locks/Lock.html#TIMEOUT"><code>TIMEOUT</code></a> is reached.</div>
<dl><dt><span class="strong">Throws:</span></dt>
<dd><code><a href="http://docs.oracle.com/javase/6/docs/api/java/lang/RuntimeException.html?is-external=true" title="class or interface in java.lang">RuntimeException</a></code> - in case of <a href="../../../../../com/jogamp/common/util/locks/Lock.html#TIMEOUT"><code>TIMEOUT</code></a></dd></dl>
</li>
</ul>
<a name="tryLock(long)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>tryLock</h4>
<pre>boolean tryLock(long timeout)
throws <a href="http://docs.oracle.com/javase/6/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
<div class="block">Blocking until the lock is acquired by this Thread or <code>maxwait</code> in ms is reached.</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>timeout</code> - Maximum time in ms to wait to acquire the lock. If this value is zero,
the call returns immediately either without being able
to acquire the lock, or with acquiring the lock directly while ignoring any scheduling order.</dd>
<dt><span class="strong">Returns:</span></dt><dd>true if the lock has been acquired within <code>maxwait</code>, otherwise false</dd>
<dt><span class="strong">Throws:</span></dt>
<dd><code><a href="http://docs.oracle.com/javase/6/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code></dd></dl>
</li>
</ul>
<a name="unlock()">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>unlock</h4>
<pre>void unlock()
throws <a href="http://docs.oracle.com/javase/6/docs/api/java/lang/RuntimeException.html?is-external=true" title="class or interface in java.lang">RuntimeException</a></pre>
<div class="block">Release the lock.</div>
<dl><dt><span class="strong">Throws:</span></dt>
<dd><code><a href="http://docs.oracle.com/javase/6/docs/api/java/lang/RuntimeException.html?is-external=true" title="class or interface in java.lang">RuntimeException</a></code> - in case the lock is not acquired by this thread.</dd></dl>
</li>
</ul>
<a name="isLocked()">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>isLocked</h4>
<pre>boolean isLocked()</pre>
<div class="block">Query if locked</div>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li><a href="../../../../../com/jogamp/common/util/locks/LockFactory.html" title="class in com.jogamp.common.util.locks"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/jogamp/common/util/locks/Lock.html" target="_top">Frames</a></li>
<li><a href="Lock.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#field_summary">Field</a> | </li>
<li>Constr | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li><a href="#field_detail">Field</a> | </li>
<li>Constr | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
apache-2.0
|
freddiedfre/google_bing_ads
|
examples/Dfp/v201502/ProposalLineItemService/CreateProposalLineItems.php
|
4966
|
<?php
/**
* This example creates a new proposal line item that targets the whole network.
* To determine which proposal line items exist, run
* GetAllProposalLineItems.php.
*
* Tags: NetworkService.getCurrentNetwork
* Tags: ProposalLineItemService.createProposalLineItems
*
* PHP version 5
*
* Copyright 2014, Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @package GoogleApiAdsDfp
* @subpackage v201502
* @category WebServices
* @copyright 2014, Google Inc. All Rights Reserved.
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache License,
* Version 2.0
* @author Vincent Tsao
*/
error_reporting(E_STRICT | E_ALL);
// You can set the include path to src directory or reference
// DfpUser.php directly via require_once.
// $path = '/path/to/dfp_api_php_lib/src';
$path = dirname(__FILE__) . '/../../../../src';
set_include_path(get_include_path() . PATH_SEPARATOR . $path);
require_once 'Google/Api/Ads/Dfp/Lib/DfpUser.php';
require_once 'Google/Api/Ads/Dfp/Util/DateTimeUtils.php';
require_once dirname(__FILE__) . '/../../../Common/ExampleUtils.php';
// Set the ID of the proposal that the proposal line items will belong to.
$proposalId = 'INSERT_PROPOSAL_ID_HERE';
// Set the ID of the product that the proposal line items should be created
// from.
$productId = 'INSERT_PRODUCT_ID_HERE';
// Set the ID of the rate card that the proposal line items should be priced
// with.
$rateCardId = 'INSERT_RATE_CARD_ID_HERE';
try {
// Get DfpUser from credentials in "../auth.ini"
// relative to the DfpUser.php file's directory.
$user = new DfpUser();
// Log SOAP XML request and response.
$user->LogDefaults();
// Get the ProposalLineItemService.
$proposalLineItemService = $user->GetService('ProposalLineItemService',
'v201502');
// Get the NetworkService.
$networkService = $user->GetService('NetworkService', 'v201502');
// Get the root ad unit ID used to target the whole site.
$rootAdUnitId = $networkService->getCurrentNetwork()->effectiveRootAdUnitId;
// Create inventory targeting.
$inventoryTargeting = new InventoryTargeting();
// Create ad unit targeting for the root ad unit (i.e. the whole network).
$adUnitTargeting = new AdUnitTargeting();
$adUnitTargeting->adUnitId = $rootAdUnitId;
$adUnitTargeting->includeDescendants = true;
$inventoryTargeting->targetedAdUnits = array($adUnitTargeting);
// Create targeting.
$targeting = new Targeting();
$targeting->inventoryTargeting = $inventoryTargeting;
// Create a proposal line item.
$proposalLineItem = new ProposalLineItem();
$proposalLineItem->name = sprintf('Proposal line item #%s', uniqid());
$proposalLineItem->proposalId = $proposalId;
$proposalLineItem->rateCardId = $rateCardId;
$proposalLineItem->productId = $productId;
$proposalLineItem->targeting = $targeting;
// Set the length of the proposal line item to run.
$proposalLineItem->startDateTime =
DateTimeUtils::GetDfpDateTime(new DateTime());
$proposalLineItem->endDateTime =
DateTimeUtils::GetDfpDateTime(new DateTime('+1 month'));
// Set delivery specifications for the proposal line item.
$proposalLineItem->deliveryRateType = 'EVENLY';
$proposalLineItem->creativeRotationType = 'OPTIMIZED';
// Set billing specifications for the proposal line item.
$proposalLineItem->billingCap = 'CAPPED_CUMULATIVE';
$proposalLineItem->billingSource = 'THIRD_PARTY_VOLUME';
// Set pricing for the proposal line item for 1000 impressions at a CPM of $2
// for a total value of $2.
$goal = new Goal();
$goal->units = 1000;
$goal->unitType = 'IMPRESSIONS';
$proposalLineItem->goal = $goal;
$proposalLineItem->cost = new Money('USD', 2000000);
$proposalLineItem->costPerUnit = new Money('USD', 2000000);
$proposalLineItem->rateType = 'CPM';
// Create the proposal line item on the server.
$proposalLineItems = $proposalLineItemService->createProposalLineItems(
array($proposalLineItem));
foreach ($proposalLineItems as $createdProposalLineItem) {
printf("A proposal line item with ID %d and name '%s' was created.\n",
$createdProposalLineItem->id, $createdProposalLineItem->name);
}
} catch (OAuth2Exception $e) {
ExampleUtils::CheckForOAuth2Errors($e);
} catch (ValidationException $e) {
ExampleUtils::CheckForOAuth2Errors($e);
} catch (Exception $e) {
printf("%s\n", $e->getMessage());
}
|
apache-2.0
|
koshelev/finagle
|
finagle-netty4/src/main/scala/com/twitter/finagle/netty4/Netty4Listener.scala
|
9301
|
package com.twitter.finagle.netty4
import com.twitter.concurrent.NamedPoolThreadFactory
import com.twitter.finagle._
import com.twitter.finagle.netty4.channel.{Netty4FramedServerChannelInitializer, Netty4RawServerChannelInitializer, ServerBridge}
import com.twitter.finagle.netty4.transport.ChannelTransport
import com.twitter.finagle.param.Timer
import com.twitter.finagle.server.Listener
import com.twitter.finagle.transport.Transport
import com.twitter.util._
import io.netty.bootstrap.ServerBootstrap
import io.netty.channel._
import io.netty.channel.epoll.{EpollEventLoopGroup, EpollServerSocketChannel}
import io.netty.channel.nio.NioEventLoopGroup
import io.netty.channel.socket.nio.NioServerSocketChannel
import io.netty.util.concurrent.{FutureListener, Future => NettyFuture}
import java.lang.{Boolean => JBool, Integer => JInt}
import java.net.SocketAddress
import java.util.concurrent.TimeUnit
private[finagle] object Netty4Listener {
val TrafficClass: ChannelOption[JInt] = ChannelOption.newInstance("trafficClass")
/**
* A [[com.twitter.finagle.Stack.Param]] used to configure the ability to
* exert back pressure by only reading from the Channel when the [[Transport]] is
* read.
*/
private[finagle] case class BackPressure(enabled: Boolean) {
def mk(): (BackPressure, Stack.Param[BackPressure]) = (this, BackPressure.param)
}
private[finagle] object BackPressure {
implicit val param: Stack.Param[BackPressure] =
Stack.Param(BackPressure(enabled = true))
}
}
/**
* Constructs a `Listener[In, Out]` given a ``pipelineInit`` function
* responsible for framing a [[Transport]] stream. The [[Listener]] is configured
* via the passed in [[com.twitter.finagle.Stack.Param Params]].
*
* @see [[com.twitter.finagle.server.Listener]]
* @see [[com.twitter.finagle.transport.Transport]]
* @see [[com.twitter.finagle.param]]
*/
private[finagle] case class Netty4Listener[In, Out](
pipelineInit: ChannelPipeline => Unit,
params: Stack.Params,
transportFactory: Channel => Transport[Any, Any] = { ch: Channel => new ChannelTransport(ch) },
setupMarshalling: ChannelInitializer[Channel] => ChannelHandler = identity
)(implicit mIn: Manifest[In], mOut: Manifest[Out])
extends Listener[In, Out] {
import Netty4Listener.BackPressure
// Exports N4-related metrics under `finagle/netty4`.
exportNetty4MetricsAndRegistryEntries()
trackReferenceLeaks.init
private[this] val Timer(timer) = params[Timer]
// transport params
private[this] val Transport.Liveness(_, _, keepAlive) = params[Transport.Liveness]
private[this] val Transport.BufferSizes(sendBufSize, recvBufSize) = params[Transport.BufferSizes]
private[this] val Transport.Options(noDelay, reuseAddr) = params[Transport.Options]
// listener params
private[this] val Listener.Backlog(backlog) = params[Listener.Backlog]
private[this] val BackPressure(backPressureEnabled) = params[BackPressure]
// netty4 params
private[this] val param.Allocator(allocator) = params[param.Allocator]
/**
* Listen for connections and apply the `serveTransport` callback on
* connected [[Transport transports]].
*
* @param addr socket address for listening.
* @param serveTransport a call-back for newly created transports which in turn are
* created for new connections.
* @note the ``serveTransport`` implementation is responsible for calling
* [[Transport.close() close]] on [[Transport transports]].
*/
def listen(addr: SocketAddress)(serveTransport: Transport[In, Out] => Unit): ListeningServer =
new ListeningServer with CloseAwaitably {
private[this] val bridge = new ServerBridge(
transportFactory.andThen(Transport.cast[In, Out]),
serveTransport
)
private[this] val bossLoop: EventLoopGroup =
if (nativeEpoll.enabled)
new EpollEventLoopGroup(
1 /*nThreads*/ ,
new NamedPoolThreadFactory("finagle/netty4/boss", makeDaemons = true)
)
else
new NioEventLoopGroup(
1 /*nThreads*/ ,
new NamedPoolThreadFactory("finagle/netty4/boss", makeDaemons = true)
)
private[this] val bootstrap = new ServerBootstrap()
if (nativeEpoll.enabled)
bootstrap.channel(classOf[EpollServerSocketChannel])
else
bootstrap.channel(classOf[NioServerSocketChannel])
bootstrap.group(bossLoop, params[param.WorkerPool].eventLoopGroup)
bootstrap.childOption[JBool](ChannelOption.TCP_NODELAY, noDelay)
bootstrap.option(ChannelOption.ALLOCATOR, allocator)
bootstrap.childOption(ChannelOption.ALLOCATOR, allocator)
bootstrap.option[JBool](ChannelOption.SO_REUSEADDR, reuseAddr)
backlog.foreach(bootstrap.option[JInt](ChannelOption.SO_BACKLOG, _))
sendBufSize.foreach(bootstrap.childOption[JInt](ChannelOption.SO_SNDBUF, _))
recvBufSize.foreach(bootstrap.childOption[JInt](ChannelOption.SO_RCVBUF, _))
keepAlive.foreach(bootstrap.childOption[JBool](ChannelOption.SO_KEEPALIVE, _))
bootstrap.childOption[JBool](ChannelOption.AUTO_READ, !backPressureEnabled)
params[Listener.TrafficClass].value.foreach { tc =>
bootstrap.option[JInt](Netty4Listener.TrafficClass, tc)
bootstrap.childOption[JInt](Netty4Listener.TrafficClass, tc)
}
private[this] val rawInitializer = new Netty4RawServerChannelInitializer(params)
private[this] val framedInitializer = new Netty4FramedServerChannelInitializer(params)
// our netty pipeline is divided into four chunks:
// raw => marshalling => framed => bridge
// `pipelineInit` sets up the marshalling handlers
// `rawInitializer` adds the raw handlers to the beginning
// `framedInitializer` adds the framed handlers to the end
// `bridge` adds the bridging handler to the end.
//
// This order is necessary because the bridge must be at the end, raw must
// be before marshalling, and marshalling must be before framed. This
// creates an ordering:
//
// raw => marshalling
// marshalling => framed
// raw => bridge
// marshalling => bridge
// framed => bridge
//
// The only way to satisfy this ordering is
//
// raw => marshalling => framed => bridge.
bootstrap.childHandler(new ChannelInitializer[Channel] {
def initChannel(ch: Channel): Unit = {
// pipelineInit comes first so that implementors can put whatever they
// want in pipelineInit, without having to worry about clobbering any
// of the other handlers.
pipelineInit(ch.pipeline)
ch.pipeline.addLast(rawInitializer)
// we use `setupMarshalling` to support protocols where the
// connection is multiplexed over child channels in the
// netty layer
ch.pipeline.addLast("marshalling", setupMarshalling(new ChannelInitializer[Channel] {
def initChannel(ch: Channel): Unit = {
ch.pipeline.addLast("framedInitializer", framedInitializer)
// The bridge handler must be last in the pipeline to ensure
// that the bridging code sees all encoding and transformations
// of inbound messages.
ch.pipeline.addLast("finagleBridge", bridge)
}
}))
}
})
// Block until listening socket is bound. `ListeningServer`
// represents a bound server and if we don't block here there's
// a race between #listen and #boundAddress being available.
private[this] val bound = bootstrap.bind(addr).awaitUninterruptibly()
if (!bound.isSuccess)
throw new java.net.BindException(
s"Failed to bind to ${addr.toString}: ${bound.cause().getMessage}")
private[this] val ch = bound.channel()
/**
* Immediately close the listening socket then shutdown the netty
* boss threadpool with ``deadline`` timeout for existing tasks.
*
* @return a [[Future]] representing the shutdown of the boss threadpool.
*/
def closeServer(deadline: Time): Future[Unit] = closeAwaitably {
// note: this ultimately calls close(2) on
// a non-blocking socket so it should not block.
ch.close().awaitUninterruptibly()
val p = new Promise[Unit]
val timeout = deadline - Time.now
val timeoutMs = timeout.inMillis
// The boss loop immediately starts refusing new work.
// Existing tasks have ``timeoutMs`` time to finish executing.
bossLoop
.shutdownGracefully(0 /* quietPeriod */ , timeoutMs.max(0), TimeUnit.MILLISECONDS)
.addListener(new FutureListener[Any] {
def operationComplete(future: NettyFuture[Any]): Unit = p.setDone()
})
// Don't rely on netty to satisfy the promise and transform all results to
// success because we don't want the non-deterministic lifecycle of external
// resources to affect application success.
p.raiseWithin(timeout)(timer).transform { _ => Future.Done }
}
def boundAddress: SocketAddress = ch.localAddress()
}
override def toString: String = "Netty4Listener"
}
|
apache-2.0
|
karmab/kcli
|
kvirt/web/static/js/imageaction.js
|
1212
|
function imagecreate(image, pool, url, cmd){
if ( image === undefined ) {
image = $("#image").val();
}
if ( pool === undefined ) {
pool = $("#pool").val();
}
if ( url === undefined ) {
url = $("#url").val();
}
if ( cmd === undefined ) {
cmd = $("#cmd").val();
}
$("#wheel").show();
data = {'image': image, 'action': 'create', 'pool': pool, 'url': url, 'cmd': cmd};
$.ajax({
type: "POST",
url: '/imageaction',
data: data,
success: function(data) {
$("#wheel").hide();
$("#urllabel").hide();
$("#url").hide();
if (data.result == 'success') {
$('.top-right').notify({message: { text: "Image "+image+" created!!!" }, type: 'success'}).show();
} else {
$('.top-right').notify({message: { text: "Image "+image+" not created because "+data.reason }, type: 'danger'}).show();
};
}
});
}
function imageurl(){
image = $( "#image option:selected" ).text();
if (~image.indexOf("rhel")) {
$("#url").show();
$("#urllabel").show();
url = $( "#image option:selected" ).attr("url");
window.open(url, "_blank");
}
}
|
apache-2.0
|
resin-io-library/base-images
|
balena-base-images/node/artik530/debian/buster/17.6.0/run/Dockerfile
|
2932
|
# AUTOGENERATED FILE
FROM balenalib/artik530-debian:buster-run
ENV NODE_VERSION 17.6.0
ENV YARN_VERSION 1.22.4
RUN buildDeps='curl libatomic1' \
&& set -x \
&& for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& apt-get update && apt-get install -y $buildDeps --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "31786cf6387c85a34f1eb85be5838facaad40f50f61030557e42a4af4bb31294 node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@node" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Buster \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v17.6.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh
|
apache-2.0
|
consulo/consulo
|
modules/base/ui-api/src/main/java/consulo/ui/layout/FoldoutLayout.java
|
1731
|
/*
* Copyright 2013-2020 consulo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package consulo.ui.layout;
import consulo.disposer.Disposable;
import consulo.localize.LocalizeValue;
import consulo.ui.Component;
import consulo.ui.internal.UIInternal;
import consulo.ui.annotation.RequiredUIAccess;
import javax.annotation.Nonnull;
import java.util.EventListener;
/**
* @author VISTALL
* @since 2020-05-29
*/
public interface FoldoutLayout extends Layout {
@FunctionalInterface
static interface StateListener extends EventListener {
@RequiredUIAccess
void stateChanged(boolean state);
}
@Nonnull
static FoldoutLayout create(@Nonnull LocalizeValue titleValue, @Nonnull Component component) {
return create(titleValue, component, true);
}
@Nonnull
static FoldoutLayout create(@Nonnull LocalizeValue titleValue, @Nonnull Component component, boolean state) {
return UIInternal.get()._Layouts_foldout(titleValue, component, state);
}
@Nonnull
@RequiredUIAccess
FoldoutLayout setState(boolean showing);
@Nonnull
@RequiredUIAccess
FoldoutLayout setTitle(@Nonnull LocalizeValue title);
@Nonnull
Disposable addStateListener(@Nonnull StateListener stateListener);
}
|
apache-2.0
|
dapregi/cellbase
|
cellbase-app/src/main/java/org/opencb/cellbase/app/cli/DownloadCommandExecutor.java
|
52004
|
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.cellbase.app.cli;
import com.beust.jcommander.ParameterException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
import org.opencb.cellbase.core.config.Species;
import org.opencb.commons.utils.FileUtils;
import java.io.*;
import java.net.URI;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* Created by imedina on 03/02/15.
*/
public class DownloadCommandExecutor extends CommandExecutor {
private CliOptionsParser.DownloadCommandOptions downloadCommandOptions;
private Path output = null;
private Path common = null;
private File ensemblScriptsFolder;
private String ensemblVersion;
private String ensemblRelease;
private Species species;
private static final String[] VARIATION_FILES = {"variation.txt.gz", "variation_feature.txt.gz",
"transcript_variation.txt.gz", "variation_synonym.txt.gz", "seq_region.txt.gz", "source.txt.gz",
"attrib.txt.gz", "attrib_type.txt.gz", "seq_region.txt.gz", "structural_variation_feature.txt.gz",
"study.txt.gz", "phenotype.txt.gz", "phenotype_feature.txt.gz", "phenotype_feature_attrib.txt.gz",
"motif_feature_variation.txt.gz", "genotype_code.txt.gz", "allele_code.txt.gz",
"population_genotype.txt.gz", "population.txt.gz", "allele.txt.gz", };
private static final String[] REGULATION_FILES = {"AnnotatedFeatures.gff.gz", "MotifFeatures.gff.gz",
"RegulatoryFeatures_MultiCell.gff.gz", };
private static final Map<String, String> GENE_UNIPROT_XREF_FILES = new HashMap() {
{
put("Homo sapiens", "HUMAN_9606_idmapping_selected.tab.gz");
put("Mus musculus", "MOUSE_10090_idmapping_selected.tab.gz");
put("Rattus norvegicus", "RAT_10116_idmapping_selected.tab.gz");
put("Danio rerio", "DANRE_7955_idmapping_selected.tab.gz");
put("Drosophila melanogaster", "DROME_7227_idmapping_selected.tab.gz");
put("Saccharomyces cerevisiae", "YEAST_559292_idmapping_selected.tab.gz");
}
};
private static final String ENSEMBL_NAME = "ENSEMBL";
private static final String GENE_EXPRESSION_ATLAS_NAME = "Gene Expression Atlas";
private static final String HPO_NAME = "HPO";
private static final String DISGENET_NAME = "DisGeNET";
private static final String DGIDB_NAME = "DGIdb";
private static final String UNIPROT_NAME = "UniProt";
private static final String CADD_NAME = "CADD";
private static final String MIRBASE_NAME = "miRBase";
private static final String MIRTARBASE_NAME = "miRTarBase";
private static final String TARGETSCAN_NAME = "TargetScan";
private static final String INTACT_NAME = "IntAct";
private static final String INTERPRO_NAME = "InterPro";
private static final String GERP_NAME = "GERP++";
private static final String PHASTCONS_NAME = "PhastCons";
private static final String PHYLOP_NAME = "PhyloP";
private static final String CLINVAR_NAME = "ClinVar";
private static final String GWAS_NAME = "Gwas Catalog";
// private static final String DBSNP_NAME = "dbSNP";
private static final String REACTOME_NAME = "Reactome";
private static final String TRF_NAME = "Tandem repeats finder";
private static final String GSD_NAME = "Genomic super duplications";
private static final String WM_NAME = "WindowMasker";
public DownloadCommandExecutor(CliOptionsParser.DownloadCommandOptions downloadCommandOptions) {
super(downloadCommandOptions.commonOptions.logLevel, downloadCommandOptions.commonOptions.verbose,
downloadCommandOptions.commonOptions.conf);
this.downloadCommandOptions = downloadCommandOptions;
if (downloadCommandOptions.output != null) {
output = Paths.get(downloadCommandOptions.output);
}
if (downloadCommandOptions.common != null) {
common = Paths.get(downloadCommandOptions.common);
} else {
common = output.resolve("common");
}
this.ensemblScriptsFolder = new File(System.getProperty("basedir") + "/bin/ensembl-scripts/");
}
/**
* Execute specific 'download' command options.
*/
public void execute() {
try {
if (downloadCommandOptions.species != null && !downloadCommandOptions.species.isEmpty()) {
// We need to get the Species object from the CLI name
// This can be the scientific or common name, or the ID
// Species speciesToDownload = null;
for (Species sp : configuration.getAllSpecies()) {
if (downloadCommandOptions.species.equalsIgnoreCase(sp.getScientificName())
|| downloadCommandOptions.species.equalsIgnoreCase(sp.getCommonName())
|| downloadCommandOptions.species.equalsIgnoreCase(sp.getId())) {
species = sp;
break;
}
}
// If everything is right we launch the download
if (species != null) {
processSpecies(species);
} else {
logger.error("Species '{}' not valid", downloadCommandOptions.species);
}
} else {
logger.error("--species parameter '{}' not valid", downloadCommandOptions.species);
}
} catch (ParameterException e) {
logger.error("Error in 'download' command line: " + e.getMessage());
} catch (IOException | InterruptedException e) {
logger.error("Error downloading '" + downloadCommandOptions.species + "' files: " + e.getMessage());
}
}
private void processSpecies(Species sp) throws IOException, InterruptedException {
logger.info("Processing species " + sp.getScientificName());
// We need to find which is the correct Ensembl host URL.
// This can different depending on if is a vertebrate species.
String ensemblHostUrl;
if (configuration.getSpecies().getVertebrates().contains(sp)) {
ensemblHostUrl = configuration.getDownload().getEnsembl().getUrl().getHost();
} else {
ensemblHostUrl = configuration.getDownload().getEnsemblGenomes().getUrl().getHost();
}
// Getting the assembly.
// By default the first assembly in the configuration.json
Species.Assembly assembly = null;
if (downloadCommandOptions.assembly == null || downloadCommandOptions.assembly.isEmpty()) {
assembly = sp.getAssemblies().get(0);
} else {
for (Species.Assembly assembly1 : sp.getAssemblies()) {
if (downloadCommandOptions.assembly.equalsIgnoreCase(assembly1.getName())) {
assembly = assembly1;
break;
}
}
}
// Checking that the species and assembly are correct
if (ensemblHostUrl == null || assembly == null) {
logger.error("Something is not correct, check the species '{}' or the assembly '{}'",
downloadCommandOptions.species, downloadCommandOptions.assembly);
return;
}
// Output folder creation
String spShortName = sp.getScientificName().toLowerCase()
.replaceAll("\\.", "")
.replaceAll("\\)", "")
.replaceAll("\\(", "")
.replaceAll("[-/]", " ")
.replaceAll("\\s+", "_");
String spAssembly = assembly.getName().toLowerCase();
Path spFolder = output.resolve(spShortName + "_" + spAssembly);
makeDir(spFolder);
makeDir(common);
ensemblVersion = assembly.getEnsemblVersion();
ensemblRelease = "release-" + ensemblVersion.split("_")[0];
if (downloadCommandOptions.data != null && !downloadCommandOptions.data.isEmpty()) {
List<String> dataList;
if (downloadCommandOptions.data.equals("all")) {
dataList = sp.getData();
} else {
dataList = Arrays.asList(downloadCommandOptions.data.split(","));
}
for (String data : dataList) {
switch (data) {
case EtlCommons.GENOME_DATA:
downloadReferenceGenome(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl);
break;
case EtlCommons.GENE_DATA:
downloadEnsemblGene(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl);
break;
case EtlCommons.GENE_DISEASE_ASSOCIATION_DATA:
if (speciesHasInfoToDownload(sp, "gene_disease_association")) {
downloadGeneDiseaseAssociation(sp, spFolder);
}
break;
case EtlCommons.VARIATION_DATA:
if (speciesHasInfoToDownload(sp, "variation")) {
downloadVariation(sp, spShortName, spFolder, ensemblHostUrl);
}
break;
case EtlCommons.VARIATION_FUNCTIONAL_SCORE_DATA:
if (speciesHasInfoToDownload(sp, "variation_functional_score")) {
downloadCaddScores(sp, assembly.getName(), spFolder);
}
break;
case EtlCommons.REGULATION_DATA:
if (speciesHasInfoToDownload(sp, "regulation")) {
downloadRegulation(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl);
}
break;
case EtlCommons.PROTEIN_DATA:
if (speciesHasInfoToDownload(sp, "protein")) {
downloadProtein();
}
break;
case EtlCommons.CONSERVATION_DATA:
if (speciesHasInfoToDownload(sp, "conservation")) {
downloadConservation(sp, assembly.getName(), spFolder);
}
break;
case EtlCommons.CLINICAL_DATA:
if (speciesHasInfoToDownload(sp, "clinical")) {
downloadClinical(sp, spFolder);
}
break;
case EtlCommons.REPEATS_DATA:
if (speciesHasInfoToDownload(sp, "repeats")) {
downloadRepeats(sp, assembly.getName(), spFolder);
}
break;
default:
System.out.println("This data parameter is not allowed");
break;
}
}
}
}
private boolean speciesHasInfoToDownload(Species sp, String info) {
boolean hasInfo = true;
if (sp.getData() == null || !sp.getData().contains(info)) {
logger.warn("Species '{}' has no '{}' information available to download", sp.getScientificName(), info);
hasInfo = false;
}
return hasInfo;
}
private String getPhylo(Species sp) {
if (configuration.getSpecies().getVertebrates().contains(sp)) {
return "vertebrates";
} else if (configuration.getSpecies().getMetazoa().contains(sp)) {
return "metazoa";
} else if (configuration.getSpecies().getFungi().contains(sp)) {
return "fungi";
} else if (configuration.getSpecies().getProtist().contains(sp)) {
return "protists";
} else if (configuration.getSpecies().getPlants().contains(sp)) {
return "plants";
} else if (configuration.getSpecies().getVirus().contains(sp)) {
return "virus";
} else if (configuration.getSpecies().getBacteria().contains(sp)) {
return "bacteria";
} else {
throw new ParameterException("Species " + sp.getScientificName() + " not associated to any phylo in the configuration file");
}
}
private void downloadReferenceGenome(Species sp, String shortName, String assembly, Path spFolder, String host)
throws IOException, InterruptedException {
logger.info("Downloading genome information ...");
Path sequenceFolder = spFolder.resolve("genome");
makeDir(sequenceFolder);
/**
* Reference genome sequences are downloaded from Ensembl
*/
String url = host + "/" + ensemblRelease;
if (sp.getScientificName().equals("Homo sapiens")) {
// New Homo sapiens assemblies contain too many ALT regions,
// so we download 'primary_assembly' file
url = url + "/fasta/" + shortName + "/dna/*.dna.primary_assembly.fa.gz";
} else {
if (!configuration.getSpecies().getVertebrates().contains(sp)) {
url = host + "/" + ensemblRelease + "/" + getPhylo(sp);
}
url = url + "/fasta/";
if (configuration.getSpecies().getBacteria().contains(sp)) {
// WARN: assuming there's just one assembly
url = url + sp.getAssemblies().get(0).getEnsemblCollection() + "/";
}
url = url + shortName + "/dna/*.dna.toplevel.fa.gz";
}
String outputFileName = StringUtils.capitalize(shortName) + "." + assembly + ".fa.gz";
Path outputPath = sequenceFolder.resolve(outputFileName);
downloadFile(url, outputPath.toString());
logger.info("Saving reference genome version data at {}", sequenceFolder.resolve("genomeVersion.json"));
saveVersionData(EtlCommons.GENOME_DATA, ENSEMBL_NAME, ensemblVersion, getTimeStamp(),
Collections.singletonList(url), sequenceFolder.resolve("genomeVersion.json"));
}
private String getTimeStamp() {
return new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime());
}
private void saveVersionData(String data, String source, String version, String date, List<String> url,
Path outputFilePath) {
Map versionData = new HashedMap();
versionData.put("data", data);
versionData.put("source", source);
versionData.put("version", version);
versionData.put("downloadDate", date);
versionData.put("uRL", url);
writeVersionDataFile(versionData, outputFilePath);
}
private void writeVersionDataFile(Map versionData, Path outputFilePath) {
try {
OutputStream os = Files.newOutputStream(outputFilePath);
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(os));
ObjectMapper jsonObjectMapper = new ObjectMapper();
ObjectWriter jsonObjectWriter = jsonObjectMapper.writer();
bw.write(jsonObjectWriter.writeValueAsString(versionData) + "\n");
bw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void downloadEnsemblGene(Species sp, String spShortName, String assembly, Path speciesFolder, String host)
throws IOException, InterruptedException {
logger.info("Downloading gene information ...");
Path geneFolder = speciesFolder.resolve("gene");
makeDir(geneFolder);
downloadEnsemblData(sp, spShortName, geneFolder, host);
downloadDrugData(sp, speciesFolder);
downloadGeneUniprotXref(sp, geneFolder);
downloadGeneExpressionAtlas();
downloadGeneDiseaseAnnotation(geneFolder);
runGeneExtraInfo(sp, assembly, geneFolder);
}
private void downloadDrugData(Species species, Path speciesFolder) throws IOException, InterruptedException {
if (species.getScientificName().equals("Homo sapiens")) {
logger.info("Downloading drug-gene data...");
Path geneDrugFolder = speciesFolder.resolve("gene/geneDrug");
makeDir(geneDrugFolder);
String url = configuration.getDownload().getDgidb().getHost();
downloadFile(url, geneDrugFolder.resolve("dgidb.tsv").toString());
saveVersionData(EtlCommons.GENE_DATA, DGIDB_NAME, null, getTimeStamp(), Collections.singletonList(url),
geneDrugFolder.resolve("dgidbVersion.json"));
}
}
private void downloadEnsemblData(Species sp, String spShortName, Path geneFolder, String host)
throws IOException, InterruptedException {
logger.info("Downloading gene Ensembl data (gtf, pep, cdna, motifs) ...");
List<String> downloadedUrls = new ArrayList<>(4);
String ensemblHost = host + "/" + ensemblRelease;
if (!configuration.getSpecies().getVertebrates().contains(sp)) {
ensemblHost = host + "/" + ensemblRelease + "/" + getPhylo(sp);
}
String bacteriaCollectionPath = "";
if (configuration.getSpecies().getBacteria().contains(sp)) {
// WARN: assuming there's just one assembly
bacteriaCollectionPath = sp.getAssemblies().get(0).getEnsemblCollection() + "/";
}
// Ensembl leaves now several GTF files in the FTP folder, we need to build a more accurate URL
// to download the correct GTF file.
String version = ensemblRelease.split("-")[1];
String url = ensemblHost + "/gtf/" + bacteriaCollectionPath + spShortName + "/*" + version + ".gtf.gz";
String fileName = geneFolder.resolve(spShortName + ".gtf.gz").toString();
downloadFile(url, fileName);
downloadedUrls.add(url);
url = ensemblHost + "/fasta/" + bacteriaCollectionPath + spShortName + "/pep/*.pep.all.fa.gz";
fileName = geneFolder.resolve(spShortName + ".pep.all.fa.gz").toString();
downloadFile(url, fileName);
downloadedUrls.add(url);
url = ensemblHost + "/fasta/" + bacteriaCollectionPath + spShortName + "/cdna/*.cdna.all.fa.gz";
fileName = geneFolder.resolve(spShortName + ".cdna.all.fa.gz").toString();
downloadFile(url, fileName);
downloadedUrls.add(url);
url = ensemblHost + "/regulation/" + spShortName + "/MotifFeatures.gff.gz";
Path outputFile = geneFolder.resolve("MotifFeatures.gff.gz");
downloadFile(url, outputFile.toString());
downloadedUrls.add(url);
saveVersionData(EtlCommons.GENE_DATA, ENSEMBL_NAME, ensemblVersion, getTimeStamp(), downloadedUrls,
geneFolder.resolve("ensemblCoreVersion.json"));
}
private void downloadGeneUniprotXref(Species sp, Path geneFolder) throws IOException, InterruptedException {
logger.info("Downloading UniProt ID mapping ...");
if (GENE_UNIPROT_XREF_FILES.containsKey(sp.getScientificName())) {
String geneGtfUrl = configuration.getDownload().getGeneUniprotXref().getHost() + "/"
+ GENE_UNIPROT_XREF_FILES.get(sp.getScientificName());
downloadFile(geneGtfUrl, geneFolder.resolve("idmapping_selected.tab.gz").toString());
downloadFile(getUniProtReleaseNotesUrl(), geneFolder.resolve("uniprotRelnotes.txt").toString());
saveVersionData(EtlCommons.GENE_DATA, UNIPROT_NAME,
getUniProtRelease(geneFolder.resolve("uniprotRelnotes.txt").toString()), getTimeStamp(),
Collections.singletonList(geneGtfUrl), geneFolder.resolve("uniprotXrefVersion.json"));
}
}
private String getUniProtRelease(String relnotesFilename) {
Path path = Paths.get(relnotesFilename);
Files.exists(path);
try {
// The first line at the relnotes.txt file contains the UniProt release
BufferedReader reader = Files.newBufferedReader(path, Charset.defaultCharset());
String release = reader.readLine().split(" ")[2];
reader.close();
return release;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
private String getUniProtReleaseNotesUrl() {
return URI.create(configuration.getDownload().getGeneUniprotXref().getHost()).resolve("../../../").toString()
+ "/relnotes.txt";
}
private void downloadGeneExpressionAtlas() throws IOException, InterruptedException {
logger.info("Downloading gene expression atlas ...");
Path expression = common.resolve("expression");
if (!Files.exists(expression)) {
makeDir(expression);
String geneGtfUrl = configuration.getDownload().getGeneExpressionAtlas().getHost();
downloadFile(geneGtfUrl, expression.resolve("allgenes_updown_in_organism_part.tab.gz").toString());
saveVersionData(EtlCommons.GENE_DATA, GENE_EXPRESSION_ATLAS_NAME, getGeneExpressionAtlasVersion(), getTimeStamp(),
Collections.singletonList(geneGtfUrl), expression.resolve("geneExpressionAtlasVersion.json"));
}
}
private String getGeneExpressionAtlasVersion() {
return FilenameUtils.getBaseName(configuration.getDownload().getGeneExpressionAtlas().getHost())
.split("_")[5].replace(".tab", "");
}
private void downloadGeneDiseaseAnnotation(Path geneFolder) throws IOException, InterruptedException {
logger.info("Downloading gene disease annotation ...");
String host = configuration.getDownload().getHpo().getHost();
String fileName = StringUtils.substringAfterLast(host, "/");
downloadFile(host, geneFolder.resolve(fileName).toString());
saveVersionData(EtlCommons.GENE_DATA, HPO_NAME, null, getTimeStamp(), Collections.singletonList(host),
geneFolder.resolve("hpoVersion.json"));
host = configuration.getDownload().getDisgenet().getHost();
String readme = configuration.getDownload().getDisgenetReadme().getHost();
fileName = StringUtils.substringAfterLast(host, "/");
downloadFile(host, geneFolder.resolve(fileName).toString());
downloadFile(readme, geneFolder.resolve("disgenetReadme.txt").toString());
saveVersionData(EtlCommons.GENE_DISEASE_ASSOCIATION_DATA, DISGENET_NAME,
getDisgenetVersion(geneFolder.resolve("disgenetReadme.txt")), getTimeStamp(),
Collections.singletonList(host), geneFolder.resolve("disgenetVersion.json"));
}
private String getDisgenetVersion(Path path) {
Files.exists(path);
try {
BufferedReader reader = Files.newBufferedReader(path, Charset.defaultCharset());
String line = reader.readLine();
// There shall be a line at the README.txt containing the version.
// e.g. The files in the current directory contain the data corresponding to the latest release (version 4.0, April 2016). ...
while (line != null) {
if (line.contains("(version")) {
String version = line.split("\\(")[1].split("\\)")[0];
reader.close();
return version;
}
line = reader.readLine();
}
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
private void runGeneExtraInfo(Species sp, String assembly, Path geneFolder) throws IOException, InterruptedException {
logger.info("Downloading gene extra info ...");
String geneExtraInfoLogFile = geneFolder.resolve("gene_extra_info.log").toString();
List<String> args = new ArrayList<>();
if (sp.getScientificName().equals("Homo sapiens") && assembly.equalsIgnoreCase("GRCh37")) {
args.addAll(Arrays.asList("--species", sp.getScientificName(), "--outdir", geneFolder.toAbsolutePath().toString(),
"--ensembl-libs", configuration.getDownload().getEnsembl().getLibs()
.replace("79", "75")));
} else {
args.addAll(Arrays.asList("--species", sp.getScientificName(), "--outdir", geneFolder.toAbsolutePath().toString(),
"--ensembl-libs", configuration.getDownload().getEnsembl().getLibs()));
}
if (!configuration.getSpecies().getVertebrates().contains(species)
&& !species.getScientificName().equals("Drosophila melanogaster")) {
args.add("--phylo");
args.add("no-vertebrate");
}
// run gene_extra_info.pl
boolean geneExtraInfoDownloaded = EtlCommons.runCommandLineProcess(ensemblScriptsFolder,
"./gene_extra_info.pl",
args,
geneExtraInfoLogFile);
// check output
if (geneExtraInfoDownloaded) {
logger.info("Gene extra files created OK");
} else {
logger.error("Gene extra info for " + sp.getScientificName() + " cannot be downloaded");
}
}
private void downloadVariation(Species sp, String shortName, Path spFolder, String host)
throws IOException, InterruptedException {
logger.info("Downloading variation information ...");
Path variationFolder = spFolder.resolve("variation");
makeDir(variationFolder);
String variationUrl = host + "/" + ensemblRelease;
if (!configuration.getSpecies().getVertebrates().contains(sp)) {
variationUrl = host + "/" + ensemblRelease + "/" + getPhylo(sp);
}
variationUrl = variationUrl + "/mysql/" + shortName + "_variation_" + ensemblVersion;
List<String> downloadedUrls = new ArrayList<>(VARIATION_FILES.length);
for (String variationFile : VARIATION_FILES) {
Path outputFile = variationFolder.resolve(variationFile);
downloadFile(variationUrl + "/" + variationFile, outputFile.toString());
downloadedUrls.add(variationUrl + "/" + variationFile);
}
saveVersionData(EtlCommons.VARIATION_DATA, ENSEMBL_NAME, ensemblVersion, getTimeStamp(), downloadedUrls,
variationFolder.resolve("ensemblVariationVersion.json"));
}
private void downloadRegulation(Species species, String shortName, String assembly, Path speciesFolder, String host)
throws IOException, InterruptedException {
logger.info("Downloading regulation information ...");
Path regulationFolder = speciesFolder.resolve("regulation");
makeDir(regulationFolder);
// Downloading Ensembl Regulation
String regulationUrl = host + "/" + ensemblRelease;
if (!configuration.getSpecies().getVertebrates().contains(species)) {
regulationUrl = host + "/" + ensemblRelease + "/" + getPhylo(species);
}
regulationUrl = regulationUrl + "/regulation/" + shortName;
List<String> downloadedUrls = new ArrayList<>(REGULATION_FILES.length);
for (String regulationFile : REGULATION_FILES) {
Path outputFile = regulationFolder.resolve(regulationFile);
downloadFile(regulationUrl + "/" + regulationFile, outputFile.toString());
downloadedUrls.add(regulationUrl + "/" + regulationFile);
}
saveVersionData(EtlCommons.REGULATION_DATA, ENSEMBL_NAME, ensemblVersion, getTimeStamp(), downloadedUrls,
regulationFolder.resolve("ensemblRegulationVersion.json"));
// Downloading miRNA info
String url;
Path mirbaseFolder = common.resolve("mirbase");
if (!Files.exists(mirbaseFolder)) {
makeDir(mirbaseFolder);
downloadedUrls = new ArrayList<>(2);
url = configuration.getDownload().getMirbase().getHost() + "/miRNA.xls.gz";
downloadFile(url, mirbaseFolder.resolve("miRNA.xls.gz").toString());
downloadedUrls.add(url);
url = configuration.getDownload().getMirbase().getHost() + "/aliases.txt.gz";
downloadFile(url, mirbaseFolder.resolve("aliases.txt.gz").toString());
downloadedUrls.add(url);
String readmeUrl = configuration.getDownload().getMirbaseReadme().getHost();
downloadFile(readmeUrl, mirbaseFolder.resolve("mirbaseReadme.txt").toString());
saveVersionData(EtlCommons.REGULATION_DATA, MIRBASE_NAME,
getLine(mirbaseFolder.resolve("mirbaseReadme.txt"), 1), getTimeStamp(),
Collections.singletonList(url), mirbaseFolder.resolve("mirbaseVersion.json"));
}
if (species.getScientificName().equals("Homo sapiens")) {
if (assembly.equalsIgnoreCase("GRCh37")) {
url = configuration.getDownload().getTargetScan().getHost() + "/hg19/database/targetScanS.txt.gz";
downloadFile(url, regulationFolder.resolve("targetScanS.txt.gz").toString());
String readmeUrl = configuration.getDownload().getTargetScan().getHost() + "/hg19/database/README.txt";
saveVersionData(EtlCommons.REGULATION_DATA, TARGETSCAN_NAME, null, getTimeStamp(),
Collections.singletonList(url), regulationFolder.resolve("targetScanVersion.json"));
url = configuration.getDownload().getMiRTarBase().getHost() + "/hsa_MTI.xls";
downloadFile(url, regulationFolder.resolve("hsa_MTI.xls").toString());
saveVersionData(EtlCommons.REGULATION_DATA, MIRTARBASE_NAME, url.split("/")[5], getTimeStamp(),
Collections.singletonList(url), regulationFolder.resolve("miRTarBaseVersion.json"));
}
}
if (species.getScientificName().equals("Mus musculus")) {
url = configuration.getDownload().getTargetScan().getHost() + "/mm9/database/targetScanS.txt.gz";
downloadFile(url, regulationFolder.resolve("targetScanS.txt.gz").toString());
String readmeUrl = configuration.getDownload().getTargetScan().getHost() + "/mm9/database/README.txt";
downloadFile(readmeUrl, regulationFolder.resolve("targetScanReadme.txt").toString());
saveVersionData(EtlCommons.REGULATION_DATA, TARGETSCAN_NAME, null, getTimeStamp(),
Collections.singletonList(url), regulationFolder.resolve("targetScanVersion.json"));
url = configuration.getDownload().getMiRTarBase().getHost() + "/mmu_MTI.xls";
downloadFile(url, regulationFolder.resolve("mmu_MTI.xls").toString());
saveVersionData(EtlCommons.REGULATION_DATA, MIRTARBASE_NAME, url.split("/")[5], getTimeStamp(),
Collections.singletonList(url),
regulationFolder.resolve("miRTarBaseVersion.json"));
}
}
private String getLine(Path readmePath, int lineNumber) {
Files.exists(readmePath);
try {
BufferedReader reader = Files.newBufferedReader(readmePath, Charset.defaultCharset());
String line = null;
for (int i = 0; i < lineNumber; i++) {
line = reader.readLine();
}
reader.close();
return line;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* This method downloads UniProt, IntAct and Interpro data from EMBL-EBI.
*
* @throws IOException
* @throws InterruptedException
*/
private void downloadProtein() throws IOException, InterruptedException {
logger.info("Downloading protein information ...");
Path proteinFolder = common.resolve("protein");
if (!Files.exists(proteinFolder)) {
makeDir(proteinFolder);
String url = configuration.getDownload().getUniprot().getHost();
downloadFile(url, proteinFolder.resolve("uniprot_sprot.xml.gz").toString());
String relNotesUrl = configuration.getDownload().getUniprotRelNotes().getHost();
downloadFile(relNotesUrl, proteinFolder.resolve("uniprotRelnotes.txt").toString());
saveVersionData(EtlCommons.PROTEIN_DATA, UNIPROT_NAME, getLine(proteinFolder.resolve("uniprotRelnotes.txt"), 1),
getTimeStamp(), Collections.singletonList(url), proteinFolder.resolve("uniprotVersion.json"));
makeDir(proteinFolder.resolve("uniprot_chunks"));
splitUniprot(proteinFolder.resolve("uniprot_sprot.xml.gz"), proteinFolder.resolve("uniprot_chunks"));
url = configuration.getDownload().getIntact().getHost();
downloadFile(url, proteinFolder.resolve("intact.txt").toString());
saveVersionData(EtlCommons.PROTEIN_DATA, INTACT_NAME, null, getTimeStamp(), Collections.singletonList(url),
proteinFolder.resolve("intactVersion.json"));
url = configuration.getDownload().getInterpro().getHost();
downloadFile(url, proteinFolder.resolve("protein2ipr.dat.gz").toString());
relNotesUrl = configuration.getDownload().getInterproRelNotes().getHost();
downloadFile(relNotesUrl, proteinFolder.resolve("interproRelnotes.txt").toString());
saveVersionData(EtlCommons.PROTEIN_DATA, INTERPRO_NAME, getLine(proteinFolder.resolve("interproRelnotes.txt"), 5),
getTimeStamp(), Collections.singletonList(url), proteinFolder.resolve("interproVersion.json"));
} else {
logger.info("Protein: skipping this since it is already downloaded. Delete 'protein' folder to force download");
}
}
private void splitUniprot(Path uniprotFilePath, Path splitOutdirPath) throws IOException {
BufferedReader br = FileUtils.newBufferedReader(uniprotFilePath);
PrintWriter pw = null;
StringBuilder header = new StringBuilder();
boolean beforeEntry = true;
boolean inEntry = false;
int count = 0;
int chunk = 0;
String line;
while ((line = br.readLine()) != null) {
if (line.trim().startsWith("<entry ")) {
inEntry = true;
beforeEntry = false;
if (count % 10000 == 0) {
pw = new PrintWriter(new FileOutputStream(splitOutdirPath.resolve("chunk_" + chunk + ".xml").toFile()));
pw.println(header.toString().trim());
}
count++;
}
if (beforeEntry) {
header.append(line).append("\n");
}
if (inEntry) {
pw.println(line);
}
if (line.trim().startsWith("</entry>")) {
inEntry = false;
if (count % 10000 == 0) {
pw.print("</uniprot>");
pw.close();
chunk++;
}
}
}
pw.print("</uniprot>");
pw.close();
br.close();
}
/**
* This method downloads bith PhastCons and PhyloP data from UCSC for Human and Mouse species.
*
* @param species The Species object to download the data
* @param assembly The assembly required
* @param speciesFolder Output folder to download the data
* @throws IOException
* @throws InterruptedException
*/
private void downloadConservation(Species species, String assembly, Path speciesFolder)
throws IOException, InterruptedException {
logger.info("Downloading conservation information ...");
Path conservationFolder = speciesFolder.resolve("conservation");
if (species.getScientificName().equals("Homo sapiens")) {
makeDir(conservationFolder);
makeDir(conservationFolder.resolve("phastCons"));
makeDir(conservationFolder.resolve("phylop"));
makeDir(conservationFolder.resolve("gerp"));
String[] chromosomes = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14",
"15", "16", "17", "18", "19", "20", "21", "22", "X", "Y", "M", };
if (assembly.equalsIgnoreCase("GRCh37")) {
logger.debug("Downloading GERP++ ...");
downloadFile(configuration.getDownload().getGerp().getHost(),
conservationFolder.resolve(EtlCommons.GERP_SUBDIRECTORY + "/" + EtlCommons.GERP_FILE).toAbsolutePath().toString());
saveVersionData(EtlCommons.CONSERVATION_DATA, GERP_NAME, null, getTimeStamp(),
Collections.singletonList(configuration.getDownload().getGerp().getHost()),
conservationFolder.resolve("gerpVersion.json"));
String url = configuration.getDownload().getConservation().getHost() + "/hg19";
List<String> phastconsUrls = new ArrayList<>(chromosomes.length);
List<String> phyloPUrls = new ArrayList<>(chromosomes.length);
for (int i = 0; i < chromosomes.length; i++) {
String phastConsUrl = url + "/phastCons46way/primates/chr" + chromosomes[i] + ".phastCons46way.primates.wigFix.gz";
downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i]
+ ".phastCons46way.primates.wigFix.gz").toString());
phastconsUrls.add(phastConsUrl);
String phyloPUrl = url + "/phyloP46way/primates/chr" + chromosomes[i] + ".phyloP46way.primate.wigFix.gz";
downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i]
+ ".phyloP46way.primate.wigFix.gz").toString());
phyloPUrls.add(phyloPUrl);
}
saveVersionData(EtlCommons.CONSERVATION_DATA, PHASTCONS_NAME, null, getTimeStamp(), phastconsUrls,
conservationFolder.resolve("phastConsVersion.json"));
saveVersionData(EtlCommons.CONSERVATION_DATA, PHYLOP_NAME, null, getTimeStamp(), phyloPUrls,
conservationFolder.resolve("phyloPVersion.json"));
}
if (assembly.equalsIgnoreCase("GRCh38")) {
String url = configuration.getDownload().getConservation().getHost() + "/hg38";
List<String> phastconsUrls = new ArrayList<>(chromosomes.length);
List<String> phyloPUrls = new ArrayList<>(chromosomes.length);
for (int i = 0; i < chromosomes.length; i++) {
String phastConsUrl = url + "/phastCons100way/hg38.100way.phastCons/chr" + chromosomes[i]
+ ".phastCons100way.wigFix.gz";
downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i]
+ ".phastCons100way.wigFix.gz").toString());
phastconsUrls.add(phastConsUrl);
String phyloPUrl = url + "/phyloP100way/hg38.100way.phyloP100way/chr" + chromosomes[i] + ".phyloP100way.wigFix.gz";
downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i]
+ ".phyloP100way.wigFix.gz").toString());
phyloPUrls.add(phyloPUrl);
}
saveVersionData(EtlCommons.CONSERVATION_DATA, PHASTCONS_NAME, null, getTimeStamp(), phastconsUrls,
conservationFolder.resolve("phastConsVersion.json"));
saveVersionData(EtlCommons.CONSERVATION_DATA, PHYLOP_NAME, null, getTimeStamp(), phyloPUrls,
conservationFolder.resolve("phastConsVersion.json"));
// String phastConsUrl = url + "/phastCons7way/hg38.phastCons100way.wigFix.gz";
// Path outFile = conservationFolder.resolve("phastCons").resolve("hg38.phastCons100way.wigFix.gz");
// downloadFile(phastConsUrl, outFile.toString());
//
// String phyloPUrl = url + "/phyloP7way/hg38.phyloP100way.wigFix.gz";
// outFile = conservationFolder.resolve("phylop").resolve("hg38.phyloP100way.wigFix.gz");
// downloadFile(phyloPUrl, outFile.toString());
}
}
if (species.getScientificName().equals("Mus musculus")) {
makeDir(conservationFolder);
makeDir(conservationFolder.resolve("phastCons"));
makeDir(conservationFolder.resolve("phylop"));
String url = configuration.getDownload().getConservation().getHost() + "/mm10";
String[] chromosomes = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14",
"15", "16", "17", "18", "19", "X", "Y", "M", };
List<String> phastconsUrls = new ArrayList<>(chromosomes.length);
List<String> phyloPUrls = new ArrayList<>(chromosomes.length);
for (int i = 0; i < chromosomes.length; i++) {
String phastConsUrl = url + "/phastCons60way/mm10.60way.phastCons/chr" + chromosomes[i] + ".phastCons60way.wigFix.gz";
downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i]
+ ".phastCons60way.wigFix.gz").toString());
phastconsUrls.add(phastConsUrl);
String phyloPUrl = url + "/phyloP60way/mm10.60way.phyloP60way/chr" + chromosomes[i] + ".phyloP60way.wigFix.gz";
downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i]
+ ".phyloP60way.wigFix.gz").toString());
phyloPUrls.add(phyloPUrl);
}
saveVersionData(EtlCommons.CONSERVATION_DATA, PHASTCONS_NAME, null, getTimeStamp(), phastconsUrls,
conservationFolder.resolve("phastConsVersion.json"));
saveVersionData(EtlCommons.CONSERVATION_DATA, PHYLOP_NAME, null, getTimeStamp(), phyloPUrls,
conservationFolder.resolve("phastConsVersion.json"));
}
}
private void downloadClinical(Species species, Path speciesFolder)
throws IOException, InterruptedException {
if (species.getScientificName().equals("Homo sapiens")) {
logger.info("Downloading clinical information ...");
Path clinicalFolder = speciesFolder.resolve("clinical");
makeDir(clinicalFolder);
List<String> clinvarUrls = new ArrayList<>(3);
String url = configuration.getDownload().getClinvar().getHost();
downloadFile(url, clinicalFolder.resolve("ClinVar.xml.gz").toString());
clinvarUrls.add(url);
url = configuration.getDownload().getClinvarEfoTerms().getHost();
downloadFile(url, clinicalFolder.resolve("ClinVar_Traits_EFO_Names.csv").toString());
clinvarUrls.add(url);
url = configuration.getDownload().getClinvarSummary().getHost();
downloadFile(url, clinicalFolder.resolve("variant_summary.txt.gz").toString());
clinvarUrls.add(url);
saveVersionData(EtlCommons.CLINICAL_DATA, CLINVAR_NAME, getClinVarVersion(), getTimeStamp(), clinvarUrls,
clinicalFolder.resolve("clinvarVersion.json"));
url = configuration.getDownload().getGwasCatalog().getHost();
downloadFile(url, clinicalFolder.resolve("gwas_catalog.tsv").toString());
saveVersionData(EtlCommons.CLINICAL_DATA, GWAS_NAME, getGwasVersion(), getTimeStamp(), Collections.singletonList(url),
clinicalFolder.resolve("gwasVersion.json"));
// List<String> dbsnpUrls = new ArrayList<>(2);
// url = configuration.getDownload().getDbsnp().getHost();
// downloadFile(url, clinicalFolder.resolve("All.vcf.gz").toString());
// dbsnpUrls.add(url);
//
// url = url + ".tbi";
// downloadFile(url, clinicalFolder.resolve("All.vcf.gz.tbi").toString());
// dbsnpUrls.add(url);
// saveVersionData(EtlCommons.CLINICAL_DATA, DBSNP_NAME, getDbsnpVersion(), getTimeStamp(), dbsnpUrls,
// clinicalFolder.resolve("dbsnpVersion.json"));
}
}
// private String getDbsnpVersion() {
// // ftp://ftp.ncbi.nih.gov/snp/organisms/human_9606_b144_GRCh37p13/VCF/All_20150605.vcf.gz
// return configuration.getDownload().getDbsnp().getHost().split("_")[2];
// }
private String getGwasVersion() {
// ftp://ftp.ebi.ac.uk/pub/databases/gwas/releases/2016/05/10/gwas-catalog-associations.tsv
String[] pathParts = configuration.getDownload().getGwasCatalog().getHost().split("/");
return pathParts[9] + "/" + pathParts[8] + "/" + pathParts[7];
}
private String getClinVarVersion() {
// ftp://ftp.ncbi.nlm.nih.gov/pub/clinvar/xml/ClinVarFullRelease_2015-12.xml.gz
return configuration.getDownload().getClinvar().getHost().split("_")[1].split("\\.")[0];
}
private void downloadGeneDiseaseAssociation(Species species, Path speciesFolder) throws IOException, InterruptedException {
if (species.getScientificName().equals("Homo sapiens")) {
logger.info("Downloading gene to disease information ...");
Path gene2diseaseFolder = speciesFolder.resolve("gene_disease_association");
makeDir(gene2diseaseFolder);
// Downloads DisGeNET
String url = configuration.getDownload().getDisgenet().getHost();
String readmeUrl = configuration.getDownload().getDisgenetReadme().getHost();
downloadFile(url, gene2diseaseFolder.resolve("all_gene_disease_associations.txt.gz").toString());
downloadFile(readmeUrl, gene2diseaseFolder.resolve("disgenetReadme.txt").toString());
saveVersionData(EtlCommons.GENE_DISEASE_ASSOCIATION_DATA, DISGENET_NAME,
getDisgenetVersion(gene2diseaseFolder.resolve("disgenetReadme.txt")), getTimeStamp(),
Collections.singletonList(url), gene2diseaseFolder.resolve("disgenetVersion.json"));
// Downloads HPO
url = configuration.getDownload().getHpo().getHost();
downloadFile(url, gene2diseaseFolder.resolve("ALL_SOURCES_ALL_FREQUENCIES_diseases_to_genes_to_phenotypes.txt").toString());
saveVersionData(EtlCommons.GENE_DISEASE_ASSOCIATION_DATA, HPO_NAME, null, getTimeStamp(), Collections.singletonList(url),
gene2diseaseFolder.resolve("hpoVersion.json"));
}
}
private void downloadCaddScores(Species species, String assembly, Path speciesFolder) throws IOException, InterruptedException {
if (species.getScientificName().equals("Homo sapiens") && assembly.equalsIgnoreCase("GRCh37")) {
logger.info("Downloading CADD scores information ...");
Path variationFunctionalScoreFolder = speciesFolder.resolve("variation_functional_score");
makeDir(variationFunctionalScoreFolder);
// Downloads CADD scores
String url = configuration.getDownload().getCadd().getHost();
downloadFile(url, variationFunctionalScoreFolder.resolve("whole_genome_SNVs.tsv.gz").toString());
saveVersionData(EtlCommons.VARIATION_FUNCTIONAL_SCORE_DATA, CADD_NAME, url.split("/")[5], getTimeStamp(),
Collections.singletonList(url), variationFunctionalScoreFolder.resolve("caddVersion.json"));
}
}
private void downloadReactomeData() throws IOException, InterruptedException {
Path proteinFolder = common.resolve("protein");
String url = configuration.getDownload().getReactome().getHost();
downloadFile(url, proteinFolder.resolve("biopax.zip").toString());
saveVersionData(EtlCommons.PROTEIN_DATA, REACTOME_NAME, null, getTimeStamp(), Collections.singletonList(url),
proteinFolder.resolve("reactomeVersion.json"));
}
private void downloadRepeats(Species species, String assembly, Path speciesFolder)
throws IOException, InterruptedException {
if (species.getScientificName().equals("Homo sapiens")) {
logger.info("Downloading repeats data ...");
Path repeatsFolder = speciesFolder.resolve(EtlCommons.REPEATS_FOLDER);
makeDir(repeatsFolder);
String pathParam;
if (assembly.equalsIgnoreCase("grch37")) {
pathParam = "hg19";
} else if (assembly.equalsIgnoreCase("grch38")) {
pathParam = "hg38";
} else {
logger.error("Please provide a valid human assembly {GRCh37, GRCh38)");
throw new ParameterException("Assembly '" + assembly + "' is not valid. Please provide a valid human "
+ "assembly {GRCh37, GRCh38)");
}
// Download tandem repeat finder
String url = configuration.getDownload().getSimpleRepeats().getHost() + "/" + pathParam
+ "/database/simpleRepeat.txt.gz";
downloadFile(url, repeatsFolder.resolve(EtlCommons.TRF_FILE).toString());
saveVersionData(EtlCommons.REPEATS_DATA, TRF_NAME, null, getTimeStamp(), Collections.singletonList(url),
repeatsFolder.resolve(EtlCommons.TRF_VERSION_FILE));
// Download genomic super duplications
url = configuration.getDownload().getGenomicSuperDups().getHost() + "/" + pathParam
+ "/database/genomicSuperDups.txt.gz";
downloadFile(url, repeatsFolder.resolve(EtlCommons.GSD_FILE).toString());
saveVersionData(EtlCommons.REPEATS_DATA, GSD_NAME, null, getTimeStamp(), Collections.singletonList(url),
repeatsFolder.resolve(EtlCommons.GSD_VERSION_FILE));
// Download WindowMasker
if (!pathParam.equalsIgnoreCase("hg19")) {
url = configuration.getDownload().getWindowMasker().getHost() + "/" + pathParam
+ "/database/windowmaskerSdust.txt.gz";
downloadFile(url, repeatsFolder.resolve(EtlCommons.WM_FILE).toString());
saveVersionData(EtlCommons.REPEATS_DATA, WM_NAME, null, getTimeStamp(), Collections.singletonList(url),
repeatsFolder.resolve(EtlCommons.WM_VERSION_FILE));
}
}
}
private void downloadFile(String url, String outputFileName) throws IOException, InterruptedException {
List<String> wgetArgs = Arrays.asList("--tries=10", url, "-O", outputFileName, "-o", outputFileName + ".log");
boolean downloaded = EtlCommons.runCommandLineProcess(null, "wget", wgetArgs, null);
if (downloaded) {
logger.info(outputFileName + " created OK");
} else {
logger.warn(url + " cannot be downloaded");
}
}
}
|
apache-2.0
|
lhfei/hadoop-in-action
|
src/main/java/cn/lhfei/hadoop/hbase/crud/DeleteApp.java
|
3599
|
/*
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.lhfei.hadoop.hbase.crud;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import cn.lhfei.hadoop.hbase.common.HBaseHelper;
/**
* @version 1.0.0
*
* @author Hefei Li
*
* @since Dec 15, 2015
*/
public class DeleteApp {
private static final Logger log = LoggerFactory.getLogger(DeleteApp.class);
public static void main(String[] args) {
try {
Configuration conf = HBaseConfiguration.create();
HBaseHelper helper = HBaseHelper.getHelper(conf);
helper.dropTable("testtable");
helper.createTable("testtable", 100, "colfam1", "colfam2");
helper.put("testtable",
new String[] { "row1" },
new String[] { "colfam1", "colfam2" },
new String[] { "qual1", "qual1", "qual2", "qual2", "qual3", "qual3" },
new long[] { 1, 2, 3, 4, 5, 6 },
new String[] { "val1", "val1", "val2", "val2", "val3", "val3" });
log.info("Before delete call...");
helper.dump("testtable", new String[]{ "row1" }, null, null);
Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(TableName.valueOf("testtable"));
// DeleteExample
Delete delete = new Delete(Bytes.toBytes("row1")); // DeleteExample-1-NewDel Create delete with specific row.
delete.setTimestamp(1); // DeleteExample-2-SetTS Set timestamp for row deletes.
delete.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1")); // DeleteExample-3-DelColNoTS Delete the latest version only in one column.
delete.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("qual3"), 3); // DeleteExample-4-DelColTS Delete specific version in one column.
delete.addColumns(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1")); // DeleteExample-5-DelColsNoTS Delete all versions in one column.
delete.addColumns(Bytes.toBytes("colfam1"), Bytes.toBytes("qual3"), 2); // DeleteExample-6-DelColsTS Delete the given and all older versions in one column.
delete.addFamily(Bytes.toBytes("colfam1")); // DeleteExample-7-AddCol Delete entire family, all columns and versions.
delete.addFamily(Bytes.toBytes("colfam1"), 3); // DeleteExample-8-AddCol Delete the given and all older versions in the entire column family, i.e., from all columns therein.
table.delete(delete); // DeleteExample-9-DoDel Delete the data from the HBase table.
// ^^ DeleteExample
table.close();
connection.close();
log.info("After delete call...");
helper.dump("testtable", new String[] { "row1" }, null, null);
helper.close();
} catch (IOException e) {
log.error(e.getMessage(), e);
}
}
}
|
apache-2.0
|
hongtaocai/code_interview
|
cpp/FindMinimuminRotatedSortedArrayII.cpp
|
831
|
class Solution {
public:
int findMin(vector<int> &num) {
findMin(num, 0, num.size()-1);
}
inline int min(int a, int b) {
if (a > b) {
return b;
}
return a;
}
int findMin(vector<int> &num, int start, int end) {
if(start == end) {
return num[start];
}
if(start + 1 == end) {
if(num[start] < num[end]) {
return num[start];
}
return num[end];
}
int mid = ((end-start)>>1) + start;
if(num[start] < num[mid]) {
return min(num[start], findMin(num, mid, end));
}
if(num[mid] < num[end]) {
return findMin(num, start, mid);
}
return min(findMin(num, mid+1, end), findMin(num, start, mid));
}
};
|
apache-2.0
|
SU-ECE-17-7/ibeis
|
ibeis/scripts/getshark_old.py
|
22458
|
def get_injured_sharks():
"""
>>> from ibeis.scripts.getshark import * # NOQA
"""
import requests
url = 'http://www.whaleshark.org/getKeywordImages.jsp'
resp = requests.get(url)
assert resp.status_code == 200
keywords = resp.json()['keywords']
key_list = ut.take_column(keywords, 'indexName')
key_to_nice = {k['indexName']: k['readableName'] for k in keywords}
injury_patterns = [
'injury', 'net', 'hook', 'trunc', 'damage', 'scar', 'nicks', 'bite',
]
injury_keys = [key for key in key_list if any([pat in key for pat in injury_patterns])]
noninjury_keys = ut.setdiff(key_list, injury_keys)
injury_nice = ut.lmap(lambda k: key_to_nice[k], injury_keys) # NOQA
noninjury_nice = ut.lmap(lambda k: key_to_nice[k], noninjury_keys) # NOQA
key_list = injury_keys
keyed_images = {}
for key in ut.ProgIter(key_list, lbl='reading index', bs=True):
key_url = url + '?indexName={indexName}'.format(indexName=key)
key_resp = requests.get(key_url)
assert key_resp.status_code == 200
key_imgs = key_resp.json()['images']
keyed_images[key] = key_imgs
key_hist = {key: len(imgs) for key, imgs in keyed_images.items()}
key_hist = ut.sort_dict(key_hist, ut.identity)
print(ut.repr3(key_hist))
nice_key_hist = ut.map_dict_keys(lambda k: key_to_nice[k], key_hist)
nice_key_hist = ut.sort_dict(nice_key_hist, ut.identity)
print(ut.repr3(nice_key_hist))
key_to_urls = {key: ut.take_column(vals, 'url') for key, vals in keyed_images.items()}
overlaps = {}
import itertools
overlap_img_list = []
for k1, k2 in itertools.combinations(key_to_urls.keys(), 2):
overlap_imgs = ut.isect(key_to_urls[k1], key_to_urls[k2])
num_overlap = len(overlap_imgs)
overlaps[(k1, k2)] = num_overlap
overlaps[(k1, k1)] = len(key_to_urls[k1])
if num_overlap > 0:
#print('[%s][%s], overlap=%r' % (k1, k2, num_overlap))
overlap_img_list.extend(overlap_imgs)
all_img_urls = list(set(ut.flatten(key_to_urls.values())))
num_all = len(all_img_urls) # NOQA
print('num_all = %r' % (num_all,))
# Determine super-categories
categories = ['nicks', 'scar', 'trunc']
# Force these keys into these categories
key_to_cat = {'scarbite': 'other_injury'}
cat_to_keys = ut.ddict(list)
for key in key_to_urls.keys():
flag = 1
if key in key_to_cat:
cat = key_to_cat[key]
cat_to_keys[cat].append(key)
continue
for cat in categories:
if cat in key:
cat_to_keys[cat].append(key)
flag = 0
if flag:
cat = 'other_injury'
cat_to_keys[cat].append(key)
cat_urls = ut.ddict(list)
for cat, keys in cat_to_keys.items():
for key in keys:
cat_urls[cat].extend(key_to_urls[key])
cat_hist = {}
for cat in list(cat_urls.keys()):
cat_urls[cat] = list(set(cat_urls[cat]))
cat_hist[cat] = len(cat_urls[cat])
print(ut.repr3(cat_to_keys))
print(ut.repr3(cat_hist))
key_to_cat = dict([(val, key) for key, vals in cat_to_keys.items() for val in vals])
#ingestset = {
# '__class__': 'ImageSet',
# 'images': ut.ddict(dict)
#}
#for key, key_imgs in keyed_images.items():
# for imgdict in key_imgs:
# url = imgdict['url']
# encid = imgdict['correspondingEncounterNumber']
# # Make structure
# encdict = encounters[encid]
# encdict['__class__'] = 'Encounter'
# imgdict = ut.delete_keys(imgdict.copy(), ['correspondingEncounterNumber'])
# imgdict['__class__'] = 'Image'
# cat = key_to_cat[key]
# annotdict = {'relative_bbox': [.01, .01, .98, .98], 'tags': [cat, key]}
# annotdict['__class__'] = 'Annotation'
# # Ensure structures exist
# encdict['images'] = encdict.get('images', [])
# imgdict['annots'] = imgdict.get('annots', [])
# # Add an image to this encounter
# encdict['images'].append(imgdict)
# # Add an annotation to this image
# imgdict['annots'].append(annotdict)
##http://springbreak.wildbook.org/rest/org.ecocean.Encounter/1111
#get_enc_url = 'http://www.whaleshark.org/rest/org.ecocean.Encounter/%s' % (encid,)
#resp = requests.get(get_enc_url)
#print(ut.repr3(encdict))
#print(ut.repr3(encounters))
# Download the files to the local disk
#fpath_list =
all_urls = ut.unique(ut.take_column(
ut.flatten(
ut.dict_subset(keyed_images, ut.flatten(cat_to_keys.values())).values()
), 'url'))
dldir = ut.truepath('~/tmpsharks')
from os.path import commonprefix, basename # NOQA
prefix = commonprefix(all_urls)
suffix_list = [url_[len(prefix):] for url_ in all_urls]
fname_list = [suffix.replace('/', '--') for suffix in suffix_list]
fpath_list = []
for url, fname in ut.ProgIter(zip(all_urls, fname_list), lbl='downloading imgs', freq=1):
fpath = ut.grab_file_url(url, download_dir=dldir, fname=fname, verbose=False)
fpath_list.append(fpath)
# Make sure we keep orig info
#url_to_keys = ut.ddict(list)
url_to_info = ut.ddict(dict)
for key, imgdict_list in keyed_images.items():
for imgdict in imgdict_list:
url = imgdict['url']
info = url_to_info[url]
for k, v in imgdict.items():
info[k] = info.get(k, [])
info[k].append(v)
info['keys'] = info.get('keys', [])
info['keys'].append(key)
#url_to_keys[url].append(key)
info_list = ut.take(url_to_info, all_urls)
for info in info_list:
if len(set(info['correspondingEncounterNumber'])) > 1:
assert False, 'url with two different encounter nums'
# Combine duplicate tags
hashid_list = [ut.get_file_uuid(fpath_, stride=8) for fpath_ in ut.ProgIter(fpath_list, bs=True)]
groupxs = ut.group_indices(hashid_list)[1]
# Group properties by duplicate images
#groupxs = [g for g in groupxs if len(g) > 1]
fpath_list_ = ut.take_column(ut.apply_grouping(fpath_list, groupxs), 0)
url_list_ = ut.take_column(ut.apply_grouping(all_urls, groupxs), 0)
info_list_ = [ut.map_dict_vals(ut.flatten, ut.dict_accum(*info_))
for info_ in ut.apply_grouping(info_list, groupxs)]
encid_list_ = [ut.unique(info_['correspondingEncounterNumber'])[0]
for info_ in info_list_]
keys_list_ = [ut.unique(info_['keys']) for info_ in info_list_]
cats_list_ = [ut.unique(ut.take(key_to_cat, keys)) for keys in keys_list_]
clist = ut.ColumnLists({
'gpath': fpath_list_,
'url': url_list_,
'encid': encid_list_,
'key': keys_list_,
'cat': cats_list_,
})
#for info_ in ut.apply_grouping(info_list, groupxs):
# info = ut.dict_accum(*info_)
# info = ut.map_dict_vals(ut.flatten, info)
# x = ut.unique(ut.flatten(ut.dict_accum(*info_)['correspondingEncounterNumber']))
# if len(x) > 1:
# info = info.copy()
# del info['keys']
# print(ut.repr3(info))
flags = ut.lmap(ut.fpath_has_imgext, clist['gpath'])
clist = clist.compress(flags)
import ibeis
ibs = ibeis.opendb('WS_Injury', allow_newdir=True)
gid_list = ibs.add_images(clist['gpath'])
clist['gid'] = gid_list
failed_flags = ut.flag_None_items(clist['gid'])
print('# failed %s' % (sum(failed_flags)),)
passed_flags = ut.not_list(failed_flags)
clist = clist.compress(passed_flags)
ut.assert_all_not_None(clist['gid'])
#ibs.get_image_uris_original(clist['gid'])
ibs.set_image_uris_original(clist['gid'], clist['url'], overwrite=True)
#ut.zipflat(clist['cat'], clist['key'])
if False:
# Can run detection instead
clist['tags'] = ut.zipflat(clist['cat'])
aid_list = ibs.use_images_as_annotations(clist['gid'], adjust_percent=0.01,
tags_list=clist['tags'])
aid_list
import plottool as pt
from ibeis import core_annots
pt.qt4ensure()
#annots = ibs.annots()
#aids = [1, 2]
#ibs.depc_annot.get('hog', aids , 'hog')
#ibs.depc_annot.get('chip', aids, 'img')
for aid in ut.InteractiveIter(ibs.get_valid_aids()):
hogs = ibs.depc_annot.d.get_hog_hog([aid])
chips = ibs.depc_annot.d.get_chips_img([aid])
chip = chips[0]
hogimg = core_annots.make_hog_block_image(hogs[0])
pt.clf()
pt.imshow(hogimg, pnum=(1, 2, 1))
pt.imshow(chip, pnum=(1, 2, 2))
fig = pt.gcf()
fig.show()
fig.canvas.draw()
#print(len(groupxs))
#if False:
#groupxs = ut.find_duplicate_items(ut.lmap(basename, suffix_list)).values()
#print(ut.repr3(ut.apply_grouping(all_urls, groupxs)))
# # FIX
# for fpath, fname in zip(fpath_list, fname_list):
# if ut.checkpath(fpath):
# ut.move(fpath, join(dirname(fpath), fname))
# print('fpath = %r' % (fpath,))
#import ibeis
#from ibeis.dbio import ingest_dataset
#dbdir = ibeis.sysres.lookup_dbdir('WS_ALL')
#self = ingest_dataset.Ingestable2(dbdir)
if False:
# Show overlap matrix
import plottool as pt
import pandas as pd
import numpy as np
dict_ = overlaps
s = pd.Series(dict_, index=pd.MultiIndex.from_tuples(overlaps))
df = s.unstack()
lhs, rhs = df.align(df.T)
df = lhs.add(rhs, fill_value=0).fillna(0)
label_texts = df.columns.values
def label_ticks(label_texts):
import plottool as pt
truncated_labels = [repr(lbl[0:100]) for lbl in label_texts]
ax = pt.gca()
ax.set_xticks(list(range(len(label_texts))))
ax.set_xticklabels(truncated_labels)
[lbl.set_rotation(-55) for lbl in ax.get_xticklabels()]
[lbl.set_horizontalalignment('left') for lbl in ax.get_xticklabels()]
#xgrid, ygrid = np.meshgrid(range(len(label_texts)), range(len(label_texts)))
#pt.plot_surface3d(xgrid, ygrid, disjoint_mat)
ax.set_yticks(list(range(len(label_texts))))
ax.set_yticklabels(truncated_labels)
[lbl.set_horizontalalignment('right') for lbl in ax.get_yticklabels()]
[lbl.set_verticalalignment('center') for lbl in ax.get_yticklabels()]
#[lbl.set_rotation(20) for lbl in ax.get_yticklabels()]
#df = df.sort(axis=0)
#df = df.sort(axis=1)
sortx = np.argsort(df.sum(axis=1).values)[::-1]
df = df.take(sortx, axis=0)
df = df.take(sortx, axis=1)
fig = pt.figure(fnum=1)
fig.clf()
mat = df.values.astype(np.int32)
mat[np.diag_indices(len(mat))] = 0
vmax = mat[(1 - np.eye(len(mat))).astype(np.bool)].max()
import matplotlib.colors
norm = matplotlib.colors.Normalize(vmin=0, vmax=vmax, clip=True)
pt.plt.imshow(mat, cmap='hot', norm=norm, interpolation='none')
pt.plt.colorbar()
pt.plt.grid('off')
label_ticks(label_texts)
fig.tight_layout()
#overlap_df = pd.DataFrame.from_dict(overlap_img_list)
class TmpImage(ut.NiceRepr):
pass
from skimage.feature import hog
from skimage import data, color, exposure
import plottool as pt
image2 = color.rgb2gray(data.astronaut()) # NOQA
fpath = './GOPR1120.JPG'
import vtool as vt
for fpath in [fpath]:
"""
http://scikit-image.org/docs/dev/auto_examples/plot_hog.html
"""
image = vt.imread(fpath, grayscale=True)
image = pt.color_funcs.to_base01(image)
fig = pt.figure(fnum=2)
fd, hog_image = hog(image, orientations=8, pixels_per_cell=(16, 16),
cells_per_block=(1, 1), visualise=True)
fig, (ax1, ax2) = pt.plt.subplots(1, 2, figsize=(8, 4), sharex=True, sharey=True)
ax1.axis('off')
ax1.imshow(image, cmap=pt.plt.cm.gray)
ax1.set_title('Input image')
ax1.set_adjustable('box-forced')
# Rescale histogram for better display
hog_image_rescaled = exposure.rescale_intensity(hog_image, in_range=(0, 0.02))
ax2.axis('off')
ax2.imshow(hog_image_rescaled, cmap=pt.plt.cm.gray)
ax2.set_title('Histogram of Oriented Gradients')
ax1.set_adjustable('box-forced')
pt.plt.show()
#for
def detect_sharks(ibs, gids):
#import ibeis
#ibs = ibeis.opendb('WS_ALL')
config = {
'algo' : 'yolo',
'sensitivity' : 0.2,
'config_filepath' : ut.truepath('~/work/WS_ALL/localizer_backup/detect.yolo.2.cfg'),
'weight_filepath' : ut.truepath('~/work/WS_ALL/localizer_backup/detect.yolo.2.39000.weights'),
'class_filepath' : ut.truepath('~/work/WS_ALL/localizer_backup/detect.yolo.2.cfg.classes'),
}
depc = ibs.depc_image
#imgsets = ibs.imagesets(text='Injured Sharks')
#images = ibs.images(imgsets.gids[0])
images = ibs.images(gids)
images = images.compress([ext not in ['.gif'] for ext in images.exts])
gid_list = images.gids
# result is a tuple:
# (score, bbox_list, theta_list, conf_list, class_list)
results_list = depc.get_property('localizations', gid_list, None, config=config)
results_list2 = []
multi_gids = []
failed_gids = []
#ibs.set_image_imagesettext(failed_gids, ['Fixme'] * len(failed_gids))
ibs.set_image_imagesettext(multi_gids, ['Fixme2'] * len(multi_gids))
failed_gids
for gid, res in zip(gid_list, results_list):
score, bbox_list, theta_list, conf_list, class_list = res
if len(bbox_list) == 0:
failed_gids.append(gid)
elif len(bbox_list) == 1:
results_list2.append((gid, bbox_list, theta_list))
elif len(bbox_list) > 1:
multi_gids.append(gid)
idx = conf_list.argmax()
res2 = (gid, bbox_list[idx:idx + 1], theta_list[idx:idx + 1])
results_list2.append(res2)
ut.dict_hist(([t[1].shape[0] for t in results_list]))
localized_imgs = ibs.images(ut.take_column(results_list2, 0))
assert all([len(a) == 1 for a in localized_imgs.aids])
old_annots = ibs.annots(ut.flatten(localized_imgs.aids))
#old_tags = old_annots.case_tags
# Override old bboxes
import numpy as np
bboxes = np.array(ut.take_column(results_list2, 1))[:, 0, :]
ibs.set_annot_bboxes(old_annots.aids, bboxes)
if False:
import plottool as pt
pt.qt4ensure()
inter = pt.MultiImageInteraction(
ibs.get_image_paths(ut.take_column(results_list2, 0)),
bboxes_list=ut.take_column(results_list2, 1)
)
inter.dump_to_disk('shark_loc', num=50, prefix='shark_loc')
inter.start()
inter = pt.MultiImageInteraction(ibs.get_image_paths(failed_gids))
inter.start()
inter = pt.MultiImageInteraction(ibs.get_image_paths(multi_gids))
inter.start()
def train_part_detector():
"""
Problem:
healthy sharks usually have a mostly whole body shot
injured sharks usually have a close up shot.
This distribution of images is likely what the injur-shark net is picking up on.
The goal is to train a detector that looks for things that look
like the distribution of injured sharks.
We will run this on healthy sharks to find the parts of
"""
import ibeis
ibs = ibeis.opendb('WS_ALL')
imgset = ibs.imagesets(text='Injured Sharks')
injured_annots = imgset.annots[0] # NOQA
#config = {
# 'dim_size': (224, 224),
# 'resize_dim': 'wh'
#}
from pydarknet import Darknet_YOLO_Detector
data_path = ibs.export_to_xml()
output_path = join(ibs.get_cachedir(), 'training', 'localizer')
ut.ensuredir(output_path)
dark = Darknet_YOLO_Detector()
results = dark.train(data_path, output_path)
del dark
localizer_weight_path, localizer_config_path, localizer_class_path = results
classifier_model_path = ibs.classifier_train()
labeler_model_path = ibs.labeler_train()
output_path = join(ibs.get_cachedir(), 'training', 'detector')
ut.ensuredir(output_path)
ut.copy(localizer_weight_path, join(output_path, 'localizer.weights'))
ut.copy(localizer_config_path, join(output_path, 'localizer.config'))
ut.copy(localizer_class_path, join(output_path, 'localizer.classes'))
ut.copy(classifier_model_path, join(output_path, 'classifier.npy'))
ut.copy(labeler_model_path, join(output_path, 'labeler.npy'))
# ibs.detector_train()
def purge_ensure_one_annot_per_images(ibs):
"""
pip install Pipe
"""
# Purge all but one annotation
images = ibs.images()
#images.aids
groups = images._annot_groups
import numpy as np
# Take all but the largest annotations per images
large_masks = [ut.index_to_boolmask([np.argmax(x)], len(x)) for x in groups.bbox_area]
small_masks = ut.lmap(ut.not_list, large_masks)
# Remove all but the largets annotation
small_aids = ut.zipcompress(groups.aid, small_masks)
small_aids = ut.flatten(small_aids)
# Fix any empty images
images = ibs.images()
empty_images = ut.where(np.array(images.num_annotations) == 0)
print('empty_images = %r' % (empty_images,))
#list(map(basename, map(dirname, images.uris_original)))
def VecPipe(func):
import pipe
@pipe.Pipe
def wrapped(sequence):
return map(func, sequence)
#return (None if item is None else func(item) for item in sequence)
return wrapped
name_list = list(images.uris_original | VecPipe(dirname) | VecPipe(basename))
aids_list = images.aids
ut.assert_all_eq(list(aids_list | VecPipe(len)))
annots = ibs.annots(ut.flatten(aids_list))
annots.names = name_list
def shark_misc():
import ibeis
ibs = ibeis.opendb('WS_ALL')
aid_list = ibs.get_valid_aids()
flag_list = ibs.get_annot_been_adjusted(aid_list)
adjusted_aids = ut.compress(aid_list, flag_list)
return adjusted_aids
#if False:
# # TRY TO FIGURE OUT WHY URLS ARE MISSING IN STEP 1
# encounter_to_parsed1 = parsed1.group_items('encounter')
# encounter_to_parsed2 = parsed2.group_items('encounter')
# url_to_parsed1 = parsed1.group_items('img_url')
# url_to_parsed2 = parsed2.group_items('img_url')
# def set_overlap(set1, set2):
# set1 = set(set1)
# set2 = set(set2)
# return ut.odict([
# ('s1', len(set1)),
# ('s2', len(set2)),
# ('isect', len(set1.intersection(set2))),
# ('union', len(set1.union(set2))),
# ('s1 - s2', len(set1.difference(set2))),
# ('s2 - s1', len(set2.difference(set1))),
# ])
# print('encounter overlap: ' + ut.repr3(set_overlap(encounter_to_parsed1, encounter_to_parsed2)))
# print('url overlap: ' + ut.repr3(set_overlap(url_to_parsed1, url_to_parsed2)))
# url1 = list(url_to_parsed1.keys())
# url2 = list(url_to_parsed2.keys())
# # remove common prefixes
# from os.path import commonprefix, basename # NOQA
# cp1 = commonprefix(url1)
# cp2 = commonprefix(url2)
# #suffix1 = sorted([u[len(cp1):].lower() for u in url1])
# #suffix2 = sorted([u[len(cp2):].lower() for u in url2])
# suffix1 = sorted([u[len(cp1):] for u in url1])
# suffix2 = sorted([u[len(cp2):] for u in url2])
# print('suffix overlap: ' + ut.repr3(set_overlap(suffix1, suffix2)))
# set1 = set(suffix1)
# set2 = set(suffix2)
# only1 = list(set1 - set1.intersection(set2))
# only2 = list(set2 - set1.intersection(set2))
# import numpy as np
# for suf in ut.ProgIter(only2, bs=True):
# dist = np.array(ut.edit_distance(suf, only1))
# idx = ut.argsort(dist)[0:3]
# if dist[idx][0] < 3:
# close = ut.take(only1, idx)
# print('---')
# print('suf = %r' % (join(cp2, suf),))
# print('close = %s' % (ut.repr3([join(cp1, c) for c in close]),))
# print('---')
# break
# # Associate keywords with original images
# #lower_urls = [x.lower() for x in parsed['img_url']]
# url_to_idx = ut.make_index_lookup(parsed1['img_url'])
# parsed1['keywords'] = [[] for _ in range(len(parsed1))]
# for url, keys in url_to_keys.items():
# # hack because urls are note in the same format
# url = url.replace('wildbook_data_dir', 'shepherd_data_dir')
# url = url.lower()
# if url in url_to_idx:
# idx = url_to_idx[url]
# parsed1['keywords'][idx].extend(keys)
#healthy_annots = ibs.annots(ibs.imagesets(text='Non-Injured Sharks').aids[0])
#ibs.set_annot_prop('healthy', healthy_annots.aids, [True] * len(healthy_annots))
#['healthy' in t and len(t) > 0 for t in single_annots.case_tags]
#healthy_tags = []
#ut.find_duplicate_items(cur_img_uuids)
#ut.find_duplicate_items(new_img_uuids)
#cur_uuids = set(cur_img_uuids)
#new_uuids = set(new_img_uuids)
#both_uuids = new_uuids.intersection(cur_uuids)
#only_cur = cur_uuids - both_uuids
#only_new = new_uuids - both_uuids
#print('len(cur_uuids) = %r' % (len(cur_uuids)))
#print('len(new_uuids) = %r' % (len(new_uuids)))
#print('len(both_uuids) = %r' % (len(both_uuids)))
#print('len(only_cur) = %r' % (len(only_cur)))
#print('len(only_new) = %r' % (len(only_new)))
# Ensure that data in both sets are syncronized
#images_both = []
#if False:
# print('Removing small images')
# import numpy as np
# import vtool as vt
# imgsize_list = np.array([vt.open_image_size(gpath) for gpath in parsed['new_fpath']])
# sqrt_area_list = np.sqrt(np.prod(imgsize_list, axis=1))
# areq_flags_list = sqrt_area_list >= 750
# parsed = parsed.compress(areq_flags_list)
|
apache-2.0
|
nycJSorg/angular-presentation
|
libs/code-demos/src/lib/shared/monaco-replay.ts
|
488
|
declare const monaco: any;
export function replay(editor: any) {
const keys = [];
editor.addCommand(monaco.KeyCode.F9, () => {
console.log('starting recording');
editor.onKeyDown(key => {
keys.push({
timestamp: new Date(),
key
});
});
editor.trigger('keyboard', 'type', { text: 'test' });
editor.trigger('keyboard', monaco.editor.Handler.CursorLeft);
});
editor.addCommand(monaco.KeyCode.F8, () => {
console.log(keys);
});
}
|
apache-2.0
|
geekdos/Personal_Labs
|
MidelwarLab/GestionDesNotes/src/com/geekdos/app/App.java
|
9128
|
package com.geekdos.app;
import com.geekdos.midelwar.interfaces.GestionDesNotesInterface;
import com.geekdos.model.*;
import java.net.MalformedURLException;
import java.rmi.Naming;
import java.rmi.NotBoundException;
import java.rmi.Remote;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by theXuser on 11/12/2016.
*/
public class App {
public static void main(String[] args){
System.out.println("-----------------------------------------------------");
System.out.println("Bienvenue dans l'application Gestion des Notes Client");
System.out.println("-----------------------------------------------------");
Etudiant oussama = new Etudiant();
Etudiant dina = new Etudiant();
Etudiant ouail = new Etudiant();
Etudiant ayoub = new Etudiant();
Etudiant yassin = new Etudiant();
Etudiant halima = new Etudiant();
oussama.setNom("KHACHIAI");oussama.setPrenom("Oussama");oussama.setCne("1128764379");oussama.setAge(25);
dina.setNom("BEN HALIMA");dina.setPrenom("Dina");dina.setCne("1228764379");dina.setAge(21);
ouail.setNom("KERDAD");ouail.setPrenom("Ouail");ouail.setCne("1028764379");ouail.setAge(25);
ayoub.setNom("BOUCHAREB");ayoub.setPrenom("Ayoub");ayoub.setCne("1228764380");ayoub.setAge(25);
yassin.setNom("AKESBI");yassin.setPrenom("Yassin");yassin.setCne("1228764385");yassin.setAge(23);
halima.setNom("BOUJRA");halima.setPrenom("Halima");halima.setCne("1228764386");halima.setAge(23);
Note note1 = new Note();Note note2 = new Note();
Note note3 = new Note();Note note4 = new Note();
Note note5 = new Note();Note note6 = new Note();
Note note7 = new Note();Note note8 = new Note();
Note note9 = new Note();Note note10 = new Note();
Note note11 = new Note();Note note12 = new Note();
Note note13 = new Note();Note note14 = new Note();
note1.setName("M1");note2.setName("M2");
note3.setName("M3");note4.setName("M4");
note5.setName("M5");note6.setName("M6");
note7.setName("M7");note8.setName("M8");
note9.setName("M9");note10.setName("M10");
note11.setName("M11");note12.setName("M12");
note13.setName("M13");note14.setName("M14");
note1.setValue(18);note2.setValue(18);
note3.setValue(17);note4.setValue(17);
note5.setValue(15);note6.setValue(15);
note7.setValue(15);note8.setValue(10);
note9.setValue(10);note10.setValue(10);
note11.setValue(8);note12.setValue(5);
note13.setValue(12);note14.setValue(7);
List<Note> oussamaNotes = new ArrayList<Note>();
List<Note> dinaNotes = new ArrayList<Note>();
List<Note> ouailNotes = new ArrayList<Note>();
List<Note> ayoubNotes = new ArrayList<Note>();
oussamaNotes.add(note1);
oussamaNotes.add(note2);
dinaNotes.add(note3);
dinaNotes.add(note4);
ouailNotes.add(note5);
ouailNotes.add(note6);
ayoubNotes.add(note7);
ayoubNotes.add(note8);
oussama.setNotes(oussamaNotes);
dina.setNotes(dinaNotes);
ouail.setNotes(ouailNotes);
ayoub.setNotes(ayoubNotes);
List<Etudiant> etudiants = new ArrayList<>();
etudiants.add(oussama);
etudiants.add(dina);
etudiants.add(ouail);
etudiants.add(ayoub);
try {
Remote gestionDesNotes = Naming.lookup("rmi://169.254.12.27/GestionDesNotes");
((GestionDesNotesInterface) gestionDesNotes).setLes_etudiants(etudiants);
String messsageOussama = "La moyenne des note de : "+ oussama.getNom() +" "+ oussama.getPrenom();
messsageOussama += " Qui porte le CNE "+oussama.getCne();
messsageOussama += " est : "+ ((GestionDesNotesInterface) gestionDesNotes).getNote("KHACHIAI");
String messsageOuail = "La moyenne des note de : "+ ouail.getNom() +" "+ ouail.getPrenom();
messsageOuail += " Qui porte le CNE "+ouail.getCne();
messsageOuail += " est : "+ ((GestionDesNotesInterface) gestionDesNotes).getNote("KERDAD");
String messsageDina = "La moyenne des note de : "+ dina.getNom() +" "+ dina.getPrenom();
messsageDina += " Qui porte le CNE "+dina.getCne();
messsageDina += " est : "+ ((GestionDesNotesInterface) gestionDesNotes).getNote("BEN HALIMA");
String messsageAyoub = "La moyenne des note de : "+ ayoub.getNom() +" "+ ayoub.getPrenom();
messsageAyoub += " Qui porte le CNE "+ayoub.getCne();
messsageAyoub += " est : "+ ((GestionDesNotesInterface) gestionDesNotes).getNote("BOUCHAREB");
/**
* Pour affichier la liste des étudiants qui sont enregistrer avec leur note du moyenne
*/
System.out.println("-----------------------------------------------------");
System.out.println("La liste des étudiants enregistrer");
System.out.println("-----------------------------------------------------");
System.out.println(messsageOussama);
System.out.println(messsageDina);
System.out.println(messsageOuail);
System.out.println(messsageAyoub);
System.out.println("-----------------------------------------------------");
System.out.println("L'etudiant magoron est:");
System.out.println("-----------------------------------------------------");
String nomMagoron = ((GestionDesNotesInterface) gestionDesNotes).getMajoran(etudiants).getNom();
System.out.println("L'etudiant Majoran est : "+nomMagoron);
/**-------------------------------
* Pour affichier la liste des étudiants qu'en validé les module avec un moyenne sup a 12
*/
System.out.println("-----------------------------------------------------");
System.out.println("La liste des étudiant qu'en valider les modules");
System.out.println("-----------------------------------------------------");
for (Etudiant etudiant: ((GestionDesNotesInterface) gestionDesNotes).getvalidation()) {
String message = "L'étudiant : "+etudiant.getNom();
message += " à validé les modules ";
for (int i = 0; i < etudiant.getNotes().size() ;i++) {
message += "<<" + etudiant.getNotes().get(i).getName();
message += ", " + etudiant.getNotes().get(i).getValue()+">> ";
}
System.out.println(message);
System.out.println("-----------------------------------------------------");
}
/**
* Pour affichier la liste des étudiants qu'en un ratrapage dans des module avec un 12 < moyenne >= 7
*/
System.out.println("-----------------------------------------------------");
System.out.println("La liste des étudiant qu'en ratrapage");
System.out.println("-----------------------------------------------------");
for (Etudiant etudiant: ((GestionDesNotesInterface) gestionDesNotes).getRat()) {
String message = "L'étudiant : "+etudiant.getNom();
message += " à un ratrapage dans les modules ";
for (int i = 0; i < etudiant.getNotes().size() ;i++) {
message += "<<" + etudiant.getNotes().get(i).getName();
message += ", " + etudiant.getNotes().get(i).getValue()+">> ";
}
System.out.println(message);
System.out.println("-----------------------------------------------------");
}
/**
* Pour affichier la liste des étudiants qu'en un non validé dans des module avec un moyenne < 7
*/
System.out.println("-----------------------------------------------------");
System.out.println("La liste des étudiant qu'en non validé");
System.out.println("-----------------------------------------------------");
for (Etudiant etudiant: ((GestionDesNotesInterface) gestionDesNotes).getNonValidation()) {
String message = "L'étudiant : "+etudiant.getNom();
message += " à eu une non validé dans les modules ";
for (int i = 0; i < etudiant.getNotes().size() ;i++) {
message += "<<" + etudiant.getNotes().get(i).getName();
message += ", " + etudiant.getNotes().get(i).getValue()+">> ";
}
System.out.println(message);
System.out.println("-----------------------------------------------------");
}
} catch (NotBoundException e) {
e.printStackTrace();
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
|
apache-2.0
|
Gallio/mbunit-v2
|
src/refly/Refly/CodeDom/FieldDeclaration.cs
|
2091
|
/// Refly License
///
/// Copyright (c) 2004 Jonathan de Halleux, http://www.dotnetwiki.org
///
/// This software is provided 'as-is', without any express or implied warranty.
/// In no event will the authors be held liable for any damages arising from
/// the use of this software.
///
/// Permission is granted to anyone to use this software for any purpose,
/// including commercial applications, and to alter it and redistribute it
/// freely, subject to the following restrictions:
///
/// 1. The origin of this software must not be misrepresented;
/// you must not claim that you wrote the original software.
/// If you use this software in a product, an acknowledgment in the product
/// documentation would be appreciated but is not required.
///
/// 2. Altered source versions must be plainly marked as such,
/// and must not be misrepresented as being the original software.
///
///3. This notice may not be removed or altered from any source distribution.
using System;
using System.CodeDom;
namespace Refly.CodeDom
{
using Refly.CodeDom.Expressions;
/// <summary>
/// A field declaration
/// </summary>
public class FieldDeclaration : MemberDeclaration
{
private ITypeDeclaration type;
private Expression initExpression = null;
internal FieldDeclaration(string name, Declaration declaringType, ITypeDeclaration type)
:base(name,declaringType)
{
if (type==null)
throw new ArgumentNullException("type");
this.type = type;
}
public ITypeDeclaration Type
{
get
{
return this.type;
}
}
public Expression InitExpression
{
get
{
return this.initExpression;
}
set
{
this.initExpression = value;
}
}
public override CodeTypeMember ToCodeDom()
{
CodeMemberField f = new CodeMemberField(
this.Type.TypeReference,
this.Name
);
if (this.initExpression!=null)
{
f.InitExpression = this.initExpression.ToCodeDom();
}
// comments
base.ToCodeDom(f);
return f;
}
}
}
|
apache-2.0
|
lyndonnixon/annotationtool
|
libraries/graphite/Graphite/Relation.php
|
106
|
<?php
class Graphite_Relation extends Graphite_Resource
{
function nodeType() { return "#relation"; }
}
|
apache-2.0
|
getsocial-im/getsocial-unity-sdk
|
example/GetSocialSdkDemo/Assets/GetSocial/GetSocialNative/Scripts/Core/Internal/Generated/DDGetAnnouncementsRequest.cs
|
8839
|
#if UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_EDITOR
/**
* Autogenerated by Thrift Compiler ()
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using System.IO;
using Thrift;
using Thrift.Collections;
using System.Runtime.Serialization;
using Thrift.Protocol;
using Thrift.Transport;
namespace GetSocialSdk.Core
{
#if !SILVERLIGHT
[Serializable]
#endif
public partial class DDGetAnnouncementsRequest : TBase
{
private string _sessionId;
private string _appId;
private string _searchTerm;
private Pagination _pagination;
private SGEntity _target;
private string _orderBy;
private AFPollFilterType _withPolls;
public string SessionId
{
get
{
return _sessionId;
}
set
{
__isset.sessionId = true;
this._sessionId = value;
}
}
public string AppId
{
get
{
return _appId;
}
set
{
__isset.appId = true;
this._appId = value;
}
}
public string SearchTerm
{
get
{
return _searchTerm;
}
set
{
__isset.searchTerm = true;
this._searchTerm = value;
}
}
public Pagination Pagination
{
get
{
return _pagination;
}
set
{
__isset.pagination = true;
this._pagination = value;
}
}
/// <summary>
/// for all announcements: keep target empty
/// </summary>
public SGEntity Target
{
get
{
return _target;
}
set
{
__isset.target = true;
this._target = value;
}
}
public string OrderBy
{
get
{
return _orderBy;
}
set
{
__isset.@orderBy = true;
this._orderBy = value;
}
}
/// <summary>
/// options: [-]status
///
/// <seealso cref="AFPollFilterType"/>
/// </summary>
public AFPollFilterType WithPolls
{
get
{
return _withPolls;
}
set
{
__isset.withPolls = true;
this._withPolls = value;
}
}
public Isset __isset;
#if !SILVERLIGHT
[Serializable]
#endif
public struct Isset {
public bool sessionId;
public bool appId;
public bool searchTerm;
public bool pagination;
public bool target;
public bool @orderBy;
public bool withPolls;
}
public DDGetAnnouncementsRequest() {
}
public void Read (TProtocol iprot)
{
iprot.IncrementRecursionDepth();
try
{
TField field;
iprot.ReadStructBegin();
while (true)
{
field = iprot.ReadFieldBegin();
if (field.Type == TType.Stop) {
break;
}
switch (field.ID)
{
case 1:
if (field.Type == TType.String) {
SessionId = iprot.ReadString();
} else {
TProtocolUtil.Skip(iprot, field.Type);
}
break;
case 2:
if (field.Type == TType.String) {
AppId = iprot.ReadString();
} else {
TProtocolUtil.Skip(iprot, field.Type);
}
break;
case 3:
if (field.Type == TType.String) {
SearchTerm = iprot.ReadString();
} else {
TProtocolUtil.Skip(iprot, field.Type);
}
break;
case 4:
if (field.Type == TType.Struct) {
Pagination = new Pagination();
Pagination.Read(iprot);
} else {
TProtocolUtil.Skip(iprot, field.Type);
}
break;
case 5:
if (field.Type == TType.Struct) {
Target = new SGEntity();
Target.Read(iprot);
} else {
TProtocolUtil.Skip(iprot, field.Type);
}
break;
case 6:
if (field.Type == TType.String) {
OrderBy = iprot.ReadString();
} else {
TProtocolUtil.Skip(iprot, field.Type);
}
break;
case 7:
if (field.Type == TType.I32) {
WithPolls = (AFPollFilterType)iprot.ReadI32();
} else {
TProtocolUtil.Skip(iprot, field.Type);
}
break;
default:
TProtocolUtil.Skip(iprot, field.Type);
break;
}
iprot.ReadFieldEnd();
}
iprot.ReadStructEnd();
}
finally
{
iprot.DecrementRecursionDepth();
}
}
public void Write(TProtocol oprot) {
oprot.IncrementRecursionDepth();
try
{
TStruct struc = new TStruct("DDGetAnnouncementsRequest");
oprot.WriteStructBegin(struc);
TField field = new TField();
if (SessionId != null && __isset.sessionId) {
field.Name = "sessionId";
field.Type = TType.String;
field.ID = 1;
oprot.WriteFieldBegin(field);
oprot.WriteString(SessionId);
oprot.WriteFieldEnd();
}
if (AppId != null && __isset.appId) {
field.Name = "appId";
field.Type = TType.String;
field.ID = 2;
oprot.WriteFieldBegin(field);
oprot.WriteString(AppId);
oprot.WriteFieldEnd();
}
if (SearchTerm != null && __isset.searchTerm) {
field.Name = "searchTerm";
field.Type = TType.String;
field.ID = 3;
oprot.WriteFieldBegin(field);
oprot.WriteString(SearchTerm);
oprot.WriteFieldEnd();
}
if (Pagination != null && __isset.pagination) {
field.Name = "pagination";
field.Type = TType.Struct;
field.ID = 4;
oprot.WriteFieldBegin(field);
Pagination.Write(oprot);
oprot.WriteFieldEnd();
}
if (Target != null && __isset.target) {
field.Name = "target";
field.Type = TType.Struct;
field.ID = 5;
oprot.WriteFieldBegin(field);
Target.Write(oprot);
oprot.WriteFieldEnd();
}
if (OrderBy != null && __isset.@orderBy) {
field.Name = "orderBy";
field.Type = TType.String;
field.ID = 6;
oprot.WriteFieldBegin(field);
oprot.WriteString(OrderBy);
oprot.WriteFieldEnd();
}
if (__isset.withPolls) {
field.Name = "withPolls";
field.Type = TType.I32;
field.ID = 7;
oprot.WriteFieldBegin(field);
oprot.WriteI32((int)WithPolls);
oprot.WriteFieldEnd();
}
oprot.WriteFieldStop();
oprot.WriteStructEnd();
}
finally
{
oprot.DecrementRecursionDepth();
}
}
public override string ToString() {
StringBuilder __sb = new StringBuilder("DDGetAnnouncementsRequest(");
bool __first = true;
if (SessionId != null && __isset.sessionId) {
if(!__first) { __sb.Append(", "); }
__first = false;
__sb.Append("SessionId: ");
__sb.Append(SessionId);
}
if (AppId != null && __isset.appId) {
if(!__first) { __sb.Append(", "); }
__first = false;
__sb.Append("AppId: ");
__sb.Append(AppId);
}
if (SearchTerm != null && __isset.searchTerm) {
if(!__first) { __sb.Append(", "); }
__first = false;
__sb.Append("SearchTerm: ");
__sb.Append(SearchTerm);
}
if (Pagination != null && __isset.pagination) {
if(!__first) { __sb.Append(", "); }
__first = false;
__sb.Append("Pagination: ");
__sb.Append(Pagination== null ? "<null>" : Pagination.ToString());
}
if (Target != null && __isset.target) {
if(!__first) { __sb.Append(", "); }
__first = false;
__sb.Append("Target: ");
__sb.Append(Target== null ? "<null>" : Target.ToString());
}
if (OrderBy != null && __isset.@orderBy) {
if(!__first) { __sb.Append(", "); }
__first = false;
__sb.Append("OrderBy: ");
__sb.Append(OrderBy);
}
if (__isset.withPolls) {
if(!__first) { __sb.Append(", "); }
__first = false;
__sb.Append("WithPolls: ");
__sb.Append(WithPolls);
}
__sb.Append(")");
return __sb.ToString();
}
}
}
#endif
|
apache-2.0
|
Gamesjiazhi/rwproject
|
ResourceWar/undeadClient2D/frameworks/runtime-src/proj.android/assets/src/FrameAnimUnit.lua
|
3142
|
ScriptHandler_FrameAnimEventCallFun = 9000000 -- 帧事件回调
-- 帧动画配置列表
local FrameAnimInfoList = {
["WorkerSquat"] = { fileName="human_worker.png", plistName="human_worker.plist", fameName="human_worker_squat", frameCount=6, delay=0.1 }, -- 工人蹲下建造
["WorkerStand"] = { fileName="human_worker.png", plistName="human_worker.plist", fameName="human_worker_stand", frameCount=6, delay=0.1 }, -- 工人蹲下建造
["DragonShadow"] = { fileName="dragon_shadow.pvr.ccz", plistName="dragon_shadow.plist", fameName="dragon_shadow", frameCount=6, delay=0.3 }, -- 龙背影
}
-- 帧动画单元,封装了帧动画
FrameAnimUnit = class("FrameAnimUnit",function()
return cc.Sprite:create()
end)
-- 构造函数
function FrameAnimUnit:ctor()
self._animation = nil;
self._animate = nil;
self._action = nil;
end
-- 初始化
function FrameAnimUnit:init( name, eventListener )
-- 获取配置信息
local info = FrameAnimInfoList[name]
-- 缓存图集
cc.SpriteFrameCache:getInstance():addSpriteFrames( info["plistName"], info["fileName"] )
-- 检查动画缓存
self._animation = cc.AnimationCache:getInstance():getAnimation( name )
if self._animation == nil then
-- 创建帧动画
local animFrames = {}
for i=1,info["frameCount"], 1 do
local frameName = string.format( info["fameName"].."_%02d.png", i )
animFrames[i] = cc.SpriteFrameCache:getInstance():getSpriteFrame( frameName )
end
self._animation = cc.Animation:createWithSpriteFrames( animFrames )
self._animation:setDelayPerUnit( info["delay"] )
self._animation:setRestoreOriginalFrame( true )
-- 缓存帧动画
cc.AnimationCache:getInstance():addAnimation( self._animation, name )
end
-- 创建动作,如果有事件监听,就用自定义带有事件监听功能的帧动画组件
if eventListener then
self._animate = EventAnimate:create( self._animation )
self._animate:registerScriptHandlerSelf( eventListener, ScriptHandler_FrameAnimEventCallFun );
else
self._animate = cc.Animate:create( self._animation )
end
return true
end
-- 播放一次
function FrameAnimUnit:playOnce( isRemove )
self:playCount( 1, isRemove );
end
-- 播放次数
function FrameAnimUnit:playCount( count, isRemove )
if self._action and self._action:isDone() == false then
return;
end
if isRemove then
self._action = cc.Sequence:create( cc.Repeat:create( self._animate, count ),
cc.CallFunc:create( function(sender) sender:removeFromParent() end) )
else
self._action = cc.Repeat:create( self._animate, count )
end
self:runAction( self._action );
end
-- 循环播放
function FrameAnimUnit:playLoop()
if self._action and self._action:isDone() == false then
return;
end
self._action = cc.RepeatForever:create( self._animate )
self:runAction( self._action );
end
-- 停止播放
function FrameAnimUnit:stop()
self:stopAllActions();
end
-- 创建
function FrameAnimUnit.create( name, eventListener )
local object = FrameAnimUnit.new();
if object:init( name, eventListener ) == false then
return nil;
end
return object;
end
return FrameAnimUnit
|
apache-2.0
|
etirelli/drools
|
drools-scenario-simulation/drools-scenario-simulation-backend/src/main/java/org/drools/scenariosimulation/backend/runner/DMNScenarioRunner.java
|
2033
|
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.scenariosimulation.backend.runner;
import java.util.List;
import org.drools.scenariosimulation.api.model.ScenarioWithIndex;
import org.drools.scenariosimulation.api.model.Simulation;
import org.drools.scenariosimulation.api.model.SimulationDescriptor;
import org.drools.scenariosimulation.backend.expression.DMNFeelExpressionEvaluator;
import org.kie.api.runtime.KieContainer;
public class DMNScenarioRunner extends AbstractScenarioRunner {
public DMNScenarioRunner(KieContainer kieContainer, Simulation simulation) {
this(kieContainer, simulation, null);
}
public DMNScenarioRunner(KieContainer kieContainer, Simulation simulation, String fileName) {
this(kieContainer, simulation.getSimulationDescriptor(), simulation.getScenarioWithIndex(), fileName);
}
public DMNScenarioRunner(KieContainer kieContainer, SimulationDescriptor simulationDescriptor, List<ScenarioWithIndex> scenarios) {
this(kieContainer, simulationDescriptor, scenarios, null);
}
public DMNScenarioRunner(KieContainer kieContainer, SimulationDescriptor simulationDescriptor, List<ScenarioWithIndex> scenarios, String fileName) {
super(kieContainer, simulationDescriptor, scenarios, fileName, DMNFeelExpressionEvaluator::new);
}
@Override
protected AbstractRunnerHelper newRunnerHelper() {
return new DMNScenarioRunnerHelper();
}
}
|
apache-2.0
|
bytedance/fedlearner
|
deploy/scripts/aliyun/upgrade-add-on.sh
|
7606
|
#!/bin/bash
# Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ACCESS_KEY_ID=$1
ACCESS_KEY_SECRET=$2
IMAGE_HUB_URL=$3
IMAGE_HUB_USERNAME=$4
IMAGE_HUB_PASSWORD=$5
EXTERNAL_NAME=$6
GRPC_SSL_NAME=$7
DB_PASSWORD=$8
DOMAIN_URL=$9
REGION="cn-beijing"
ZONE_ID="cn-beijing-h"
GENERATER_NAME="fedlearnerwins"
function echo_exit {
echo $1
exit 1
}
function echo_log {
msg=$1
echo $msg
echo $msg >> upgrade.log
}
function json2yaml {
python -c 'import json; open("config", "w").write(json.load(open("./tmp","r"))["config"]);'
}
function install_cli {
# Download kubectl
kubectl help >/dev/null 2>&1
if [ $? -ne 0 ]
then
echo_log "Download kubectl."
curl -LO https://storage.googleapis.com/kubernetes-release/release/v1.18.0/bin/darwin/amd64/kubectl
mv kubectl /usr/local/bin/
chmod 755 /usr/local/bin/kubectl
fi
# Download helm
helm version | grep Version:\"v3 >/dev/null 2>&1
if [ $? -ne 0 ]
then
echo_log "Download helm."
curl -LO https://get.helm.sh/helm-v3.2.3-darwin-amd64.tar.gz
tar -zxvf helm-v3.2.3-darwin-amd64.tar.gz
mv darwin-amd64/helm /usr/local/bin/
chmod 755 /usr/local/bin/helm
rm -rf darwin-amd64 helm-v3.2.3-darwin-amd64.tar.gz
fi
# Download aliyun cli
aliyun version >/dev/null 2>&1
if [ $? -ne 0 ]
then
echo_log "Download aliyun cli."
curl -LO https://aliyuncli.alicdn.com/aliyun-cli-macosx-3.0.32-amd64.tgz
tar -zxvf aliyun-cli-macosx-3.0.32-amd64.tgz
mv aliyun /usr/local/bin
chmod 755 /usr/local/bin/aliyun
rm -rf aliyun-cli-macosx-3.0.32-amd64.tgz
fi
# Configure aliyun cli
aliyun auto-completion
aliyun configure set --profile akProfile --region $REGION --access-key-id $ACCESS_KEY_ID --access-key-secret $ACCESS_KEY_SECRET --language en
if [ $? -ne 0 ]
then
echo_exit "Failed to initiate aliyun cli."
fi
}
function upgrade {
cat ../../charts/fedlearner-add-on/configuration-snippet.txt | grep grpc_set_header >/dev/null 2>&1
if [ $? -ne 0 ]
then
echo "grpc_set_header Host $GRPC_SSL_NAME;" >> ../../charts/fedlearner-add-on/configuration-snippet.txt
fi
cat ../../charts/fedlearner-add-on/server-snippet.txt | grep grpc_ssl_name >/dev/null 2>&1
if [ $? -ne 0 ]
then
echo "grpc_ssl_name $GRPC_SSL_NAME;" >> ../../charts/fedlearner-add-on/server-snippet.txt
fi
CLUSTER_ID=`aliyun cs DescribeClusters | grep -A 1 name | grep -A 1 $GENERATER_NAME | grep cluster_id | awk -F "\"" '{print $4}'`
if [ $? -ne 0 ]
then
echo_exit "Failed to get k8s cluster."
fi
rm -rf tmp config
echo "Creating config file in current dir, you can move it to ~/.kube/config."
aliyun cs GET /k8s/$CLUSTER_ID/user_config > ./tmp
if [ $? -ne 0 ]
then
echo_exit "Failed to get k8s cluster config."
fi
json2yaml
CURRENT_DIR=`pwd`
export KUBECONFIG="$CURRENT_DIR/config"
helm upgrade fedlearner-add-on ../../charts/fedlearner-add-on \
--set imageCredentials.registry=$IMAGE_HUB_URL \
--set imageCredentials.username=$IMAGE_HUB_USERNAME \
--set imageCredentials.password=$IMAGE_HUB_PASSWORD \
--set service.externalName=$EXTERNAL_NAME
FILE_SYSTEM_ID=`aliyun nas DescribeFileSystems --Description $GENERATER_NAME | grep FileSystemId | awk -F "\"" '{print $4}'`
if [ -n "$FILE_SYSTEM_ID" ]
then
MOUNT_TARGET_DOMAIN=`aliyun nas DescribeMountTargets --FileSystemId $FILE_SYSTEM_ID | grep MountTargetDomain | awk -F "\"" '{print $4}'`
helm upgrade fedlearner-stack ../../charts/fedlearner-stack --set nfs-server-provisioner.enabled=false \
--set nfs-client-provisioner.enabled=true \
--set nfs-client-provisioner.nfs.server=$MOUNT_TARGET_DOMAIN \
--set mariadb.enabled=false \
--set 'ingress-nginx.controller.extraVolumeMounts[0].name=fedlearner-proxy-client' \
--set 'ingress-nginx.controller.extraVolumeMounts[0].mountPath=/etc/ingress-nginx/client/' \
--set 'ingress-nginx.controller.extraVolumes[0].name=fedlearner-proxy-client' \
--set 'ingress-nginx.controller.extraVolumes[0].secret.secretName=fedlearner-proxy-client'
else
echo_exit "Failed to update fedlearner-stack since missing MOUNT_TARGET_DOMAIN."
fi
VPC_ID=`aliyun vpc DescribeVpcs --VpcName $GENERATER_NAME | grep VpcId | awk -F "\"" '{print $4}'`
if [[ $VPC_ID == "vpc"* ]]
then
DB_INSTANCE_ID=`aliyun rds DescribeDBInstances --VpcId $VPC_ID | grep \"DBInstanceId\" | awk -F "\"" '{print $4}'`
if [ -n "$DB_INSTANCE_ID" ]
then
DB_URL=`aliyun rds DescribeDBInstanceNetInfo --DBInstanceId $DB_INSTANCE_ID | grep ConnectionString\" | awk -F "\"" '{print $4}'`
helm upgrade fedlearner ../../charts/fedlearner \
--set fedlearner-web-console.cluster.env.DB_USERNAME=fedlearner \
--set fedlearner-web-console.cluster.env.DB_PASSWORD=$DB_PASSWORD \
--set fedlearner-web-console.cluster.env.DB_HOST=$DB_URL \
--set fedlearner-web-console.cluster.env.DB_PORT=3306 \
--set fedlearner-operator.extraArgs.ingress-extra-host-suffix=$DOMAIN_URL \
--set fedlearner-operator.extraArgs.ingress-client-auth-secret-name="default/ca-secret" \
--set fedlearner-operator.extraArgs.ingress-enabled-client-auth=true \
--set fedlearner-operator.extraArgs.ingress-secret-name=fedlearner-proxy-server
else
echo_exit "Failed to update fedlearner-stack since missing DB_INSTANCE_ID."
fi
else
echo_exit "Failed to update fedlearner-stack since missing VPC_ID."
fi
}
function usage {
echo "Usage: "
echo " ./upgrade-add-on.sh access_key_id access_key_secret image_hub_url image_hub_username image_hub_password external_name grpc_ssl_name db_password domain_url"
echo ""
echo "Params:"
echo ""
echo " access_key_id: the access key id provided by aliyun, required"
echo " access_key_secret: the access key secret provided by aliyun, required"
echo " image_hub_url: the docker image hub url, required"
echo " image_hub_username: the docker image hub username, required"
echo " image_hub_password: the docker image hub password, required"
echo " external_name: the ip address for external service, required"
echo " grpc_ssl_name: the grpc ssl name, required"
echo " db_password: the database password, required"
echo " domain_url: the domain url, required"
}
if [[ -z $ACCESS_KEY_ID ]] || [[ -z $ACCESS_KEY_SECRET ]] || [[ -z $IMAGE_HUB_URL ]] || [[ -z $IMAGE_HUB_USERNAME ]] || [[ -z $IMAGE_HUB_PASSWORD ]] || [[ -z $EXTERNAL_NAME ]] || [[ -z $GRPC_SSL_NAME ]] || [[ -z $DOMAIN_URL ]]
then
usage
exit 1
else
install_cli
upgrade
fi
|
apache-2.0
|
google/nomulus
|
core/src/test/java/google/registry/testing/InjectExtension.java
|
7738
|
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.testing;
import static com.google.common.base.Preconditions.checkState;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
import org.junit.jupiter.api.extension.AfterEachCallback;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
/**
* JUnit extension for overriding {@code private static} fields during a test.
*
* <p>This extension uses reflection to change the value of a field while your test is running and
* then restore it to its original value after it's done (even if the test fails). The injection
* will work even if the field is marked {@code private} (but not if it's {@code final}). The
* downside is that if you rename the field in the future, IDE refactoring won't be smart enough to
* update the injection site.
*
* <p>We encourage you to consider using {@link google.registry.util.NonFinalForTesting
* @NonFinalForTesting} to document your injected fields.
*
* <p>This class is a horrible evil hack, but it alleviates you of the toil of having to break
* encapsulation by making your fields non-{@code private}, using the {@link
* com.google.common.annotations.VisibleForTesting @VisibleForTesting} annotation to document
* why you've reduced visibility, creating a temporary field to store the old value, and then
* writing an {@link org.junit.After @After} method to restore it. So sometimes it feels good
* to be evil; but hopefully one day we'll be able to delete this class and do things
* <i>properly</i> with <a href="http://square.github.io/dagger/">Dagger</a> dependency injection.
*
* <p>You use this class in by declaring it as an {@link
* org.junit.jupiter.api.extension.RegisterExtension @RegisterExtension} field and then call
* {@link #setStaticField} from either your {@link org.junit.jupiter.api.Test @Test} or {@link
* org.junit.jupiter.api.BeforeEach @BeforeEach} methods. For example:
*
* <pre>
* // Doomsday.java
* public class Doomsday {
*
* private static Clock clock = new SystemClock();
*
* public long getTime() {
* return clock.currentTimeMillis();
* }
* }
*
* // DoomsdayTest.java
* public class DoomsdayTest {
*
* @RegisterExtension
* public InjectExtension inject = new InjectExtension();
*
* private final FakeClock clock = new FakeClock();
*
* @BeforeEach
* public void beforeEach() {
* inject.setStaticField(Doomsday.class, "clock", clock);
* }
*
* @Test
* public void test() {
* clock.advanceBy(666L);
* Doomsday doom = new Doomsday();
* assertEquals(666L, doom.getTime());
* }
* }
* </pre>
*
* @see google.registry.util.NonFinalForTesting
*/
public class InjectExtension implements AfterEachCallback, BeforeEachCallback {
private static class Change {
private final Field field;
@Nullable private Object oldValue;
@Nullable private final Object newValue;
private boolean active;
Change(Field field, @Nullable Object oldValue, @Nullable Object newValue, boolean active) {
this.field = field;
this.oldValue = oldValue;
this.newValue = newValue;
this.active = active;
}
}
private final List<Change> changes = new ArrayList<>();
private final Set<Field> injected = new HashSet<>();
/** Adds the specified field override to those set by the extension. */
public InjectExtension withStaticFieldOverride(
Class<?> clazz, String fieldName, @Nullable Object newValue) {
changes.add(new Change(getField(clazz, fieldName), null, newValue, false));
return this;
}
/**
* Sets a static field and be restores its current value after the test completes.
*
* <p>Prefer to use withStaticFieldOverride(), which is more consistent with the extension
* pattern.
*
* <p>The field is allowed to be {@code private}, but it most not be {@code final}.
*
* <p>This method may be called either from either your {@link
* org.junit.jupiter.api.BeforeEach @BeforeEach} method or from the {@link
* org.junit.jupiter.api.Test @Test} method itself. However you may not inject the same field
* multiple times during the same test.
*
* @throws IllegalArgumentException if the static field could not be found or modified.
* @throws IllegalStateException if the field has already been injected during this test.
*/
public void setStaticField(Class<?> clazz, String fieldName, @Nullable Object newValue) {
Field field = getField(clazz, fieldName);
Change change = new Change(field, null, newValue, true);
activateChange(change);
changes.add(change);
injected.add(field);
}
@Override
public void beforeEach(ExtensionContext context) {
for (Change change : changes) {
if (!change.active) {
activateChange(change);
}
}
}
@Override
public void afterEach(ExtensionContext context) {
RuntimeException thrown = null;
for (Change change : changes) {
if (change.active) {
try {
checkState(
change.field.get(null).equals(change.newValue),
"Static field value was changed post-injection: %s.%s",
change.field.getDeclaringClass().getSimpleName(),
change.field.getName());
change.field.set(null, change.oldValue);
} catch (IllegalArgumentException | IllegalStateException | IllegalAccessException e) {
if (thrown == null) {
thrown = new RuntimeException(e);
} else {
thrown.addSuppressed(e);
}
}
}
}
changes.clear();
injected.clear();
if (thrown != null) {
throw thrown;
}
}
private Field getField(Class<?> clazz, String fieldName) {
try {
return clazz.getDeclaredField(fieldName);
} catch (SecurityException | NoSuchFieldException e) {
throw new IllegalArgumentException(
String.format("Static field not found: %s.%s", clazz.getSimpleName(), fieldName), e);
}
}
private void activateChange(Change change) {
Class<?> clazz = change.field.getDeclaringClass();
try {
change.field.setAccessible(true);
change.oldValue = change.field.get(null);
} catch (IllegalArgumentException | IllegalAccessException e) {
throw new IllegalArgumentException(
String.format(
"Static field not gettable: %s.%s", clazz.getSimpleName(), change.field.getName()),
e);
}
checkState(
!injected.contains(change.field),
"Static field already injected: %s.%s",
clazz.getSimpleName(),
change.field.getName());
try {
change.field.set(null, change.newValue);
} catch (IllegalArgumentException | IllegalAccessException e) {
throw new IllegalArgumentException(
String.format(
"Static field not settable: %s.%s", clazz.getSimpleName(), change.field.getName()),
e);
}
change.active = true;
}
}
|
apache-2.0
|
victormejia/d3-workshop-playground
|
modules/geomapping/geomap.js
|
703
|
(function() {
'use strict';
// set up margins
var el = d3.select('.geomap'),
elWidth = parseInt(el.style('width'), 10),
elHeight = parseInt(el.style('height'), 10),
margin = {top: 20, right: 20, bottom: 30, left: 50},
width = elWidth - margin.left - margin.right,
height = elHeight - margin.top - margin.bottom;
// create svg element
var svg = el.append("svg")
.attr("width", elWidth)
.attr("height", elHeight)
.append("g")
.attr('transform', 'translate(' + margin.left + "," + margin.top + ')');
d3.json("/data/us-states.json", function(error, data) {
visualize(data);
});
function visualize(data) {
// code here
}
}());
|
apache-2.0
|
pvo99i/hessdroid
|
tests/org/ast/tests/api/TestPrimitiveTypes.java
|
934
|
/*
* Copyright (C) 2009 [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ast.tests.api;
import java.io.Serializable;
/**
* Simple interface for testing primitive data types.
*
* @author [email protected]
*/
public interface TestPrimitiveTypes extends Serializable {
public int getInt();
public Integer getInteger();
public String getString();
public boolean getBoolean();
}
|
apache-2.0
|
capitalone/Hygieia
|
src/app/shared/modals/form-modal/form-modal.component.html
|
342
|
<div class="modal-content">
<div class="modal-header">
<h4>{{title}}</h4>
<button type="button" class="close" aria-label="Close" (click)="activeModal.dismiss('Cross click')">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
<ng-template appFormModal></ng-template>
</div>
</div>
|
apache-2.0
|
clarkyzl/flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/LogicalTypeCastAvoidanceTest.java
|
14234
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.types;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.types.logical.ArrayType;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.BinaryType;
import org.apache.flink.table.types.logical.BooleanType;
import org.apache.flink.table.types.logical.CharType;
import org.apache.flink.table.types.logical.DateType;
import org.apache.flink.table.types.logical.DayTimeIntervalType;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.DistinctType;
import org.apache.flink.table.types.logical.DoubleType;
import org.apache.flink.table.types.logical.FloatType;
import org.apache.flink.table.types.logical.IntType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.MapType;
import org.apache.flink.table.types.logical.MultisetType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.table.types.logical.RowType.RowField;
import org.apache.flink.table.types.logical.SmallIntType;
import org.apache.flink.table.types.logical.StructuredType;
import org.apache.flink.table.types.logical.TimeType;
import org.apache.flink.table.types.logical.TimestampKind;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.flink.table.types.logical.TinyIntType;
import org.apache.flink.table.types.logical.TypeInformationRawType;
import org.apache.flink.table.types.logical.VarBinaryType;
import org.apache.flink.table.types.logical.VarCharType;
import org.apache.flink.table.types.logical.YearMonthIntervalType;
import org.apache.flink.table.types.logical.ZonedTimestampType;
import org.apache.flink.table.types.logical.utils.LogicalTypeCasts;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import static org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsAvoidingCast;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/** Tests for {@link LogicalTypeCasts#supportsAvoidingCast(LogicalType, LogicalType)}. */
@RunWith(Parameterized.class)
public class LogicalTypeCastAvoidanceTest {
@Parameters(name = "{index}: [{0} COMPATIBLE {1} => {2}")
public static List<Object[]> testData() {
return Arrays.asList(
new Object[][] {
{new CharType(), new CharType(5), false},
{new VarCharType(30), new VarCharType(10), false},
{new VarCharType(10), new VarCharType(30), true},
{new CharType(10), new VarCharType(30), true},
{new BinaryType(10), new VarBinaryType(30), true},
{new CharType(false, 10), new VarCharType(30), true},
{new BinaryType(false, 10), new VarBinaryType(30), true},
{new VarCharType(30), new CharType(10), false},
{new VarBinaryType(30), new BinaryType(10), false},
{new BooleanType(), new BooleanType(false), false},
{new BinaryType(10), new BinaryType(30), false},
{new VarBinaryType(10), new VarBinaryType(30), true},
{new VarBinaryType(30), new VarBinaryType(10), false},
{new DecimalType(), new DecimalType(10, 2), false},
{new TinyIntType(), new TinyIntType(false), false},
{new SmallIntType(), new SmallIntType(false), false},
{new IntType(), new IntType(false), false},
{new IntType(false), new IntType(), true},
{new BigIntType(), new BigIntType(false), false},
{new FloatType(), new FloatType(false), false},
{new DoubleType(), new DoubleType(false), false},
{new DateType(), new DateType(false), false},
{new TimeType(), new TimeType(9), false},
{new TimestampType(9), new TimestampType(3), false},
{new ZonedTimestampType(9), new ZonedTimestampType(3), false},
{
new ZonedTimestampType(false, TimestampKind.ROWTIME, 9),
new ZonedTimestampType(3),
false
},
{
new YearMonthIntervalType(
YearMonthIntervalType.YearMonthResolution.YEAR_TO_MONTH, 2),
new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.MONTH),
false
},
{
new DayTimeIntervalType(
DayTimeIntervalType.DayTimeResolution.DAY_TO_SECOND, 2, 6),
new DayTimeIntervalType(
DayTimeIntervalType.DayTimeResolution.DAY_TO_SECOND, 2, 7),
false
},
{
new ArrayType(new TimestampType()),
new ArrayType(new SmallIntType()),
false,
},
{
new MultisetType(new TimestampType()),
new MultisetType(new SmallIntType()),
false
},
{
new MapType(new VarCharType(10), new TimestampType()),
new MapType(new VarCharType(30), new TimestampType()),
true
},
{
new MapType(new VarCharType(30), new TimestampType()),
new MapType(new VarCharType(10), new TimestampType()),
false
},
{
new RowType(
Arrays.asList(
new RowType.RowField("a", new VarCharType()),
new RowType.RowField("b", new VarCharType()),
new RowType.RowField("c", new VarCharType()),
new RowType.RowField("d", new TimestampType()))),
new RowType(
Arrays.asList(
new RowType.RowField("_a", new VarCharType()),
new RowType.RowField("_b", new VarCharType()),
new RowType.RowField("_c", new VarCharType()),
new RowType.RowField("_d", new TimestampType()))),
// field name doesn't matter
true
},
{
new RowType(
Arrays.asList(
new RowField("f1", new IntType()),
new RowField("f2", new VarCharType()))),
new RowType(
Arrays.asList(
new RowField("f1", new IntType()),
new RowField("f2", new BooleanType()))),
false
},
{
new ArrayType(
new RowType(
Arrays.asList(
new RowField("f1", new IntType()),
new RowField("f2", new IntType())))),
new ArrayType(
new RowType(
Arrays.asList(
new RowField("f3", new IntType()),
new RowField("f4", new IntType())))),
true
},
{
new MapType(
new IntType(),
new RowType(
Arrays.asList(
new RowField("f1", new IntType()),
new RowField("f2", new IntType())))),
new MapType(
new IntType(),
new RowType(
Arrays.asList(
new RowField("f3", new IntType()),
new RowField("f4", new IntType())))),
true
},
{
new MultisetType(
new RowType(
Arrays.asList(
new RowField("f1", new IntType()),
new RowField("f2", new IntType())))),
new MultisetType(
new RowType(
Arrays.asList(
new RowField("f1", new IntType()),
new RowField("f2", new IntType())))),
true
},
{
new TypeInformationRawType<>(Types.GENERIC(LogicalTypesTest.class)),
new TypeInformationRawType<>(Types.GENERIC(Object.class)),
false
},
{
createUserType("User", new IntType(), new VarCharType()),
createUserType("User", new IntType(), new VarCharType()),
true
},
{
createUserType("User", new IntType(), new VarCharType()),
createUserType("User2", new IntType(), new VarCharType()),
false
},
{
createDistinctType("Money", new DecimalType(10, 2)),
createDistinctType("Money", new DecimalType(10, 2)),
true
},
{
createDistinctType("Money", new DecimalType(10, 2)),
createDistinctType("Money2", new DecimalType(10, 2)),
true
},
// row and structure type
{
RowType.of(new IntType(), new VarCharType()),
createUserType("User2", new IntType(), new VarCharType()),
true
},
{
RowType.of(new BigIntType(), new VarCharType()),
createUserType("User2", new IntType(), new VarCharType()),
false
},
{
createUserType("User2", new IntType(), new VarCharType()),
RowType.of(new IntType(), new VarCharType()),
true
},
{
createUserType("User2", new IntType(), new VarCharType()),
RowType.of(new BigIntType(), new VarCharType()),
false
},
});
}
@Parameter public LogicalType sourceType;
@Parameter(1)
public LogicalType targetType;
@Parameter(2)
public boolean equals;
@Test
public void testSupportsAvoidingCast() {
assertThat(supportsAvoidingCast(sourceType, targetType), equalTo(equals));
assertTrue(supportsAvoidingCast(sourceType, sourceType.copy()));
assertTrue(supportsAvoidingCast(targetType, targetType.copy()));
}
private static DistinctType createDistinctType(String name, LogicalType sourceType) {
return DistinctType.newBuilder(ObjectIdentifier.of("cat", "db", name), sourceType)
.description("Money type desc.")
.build();
}
private static StructuredType createUserType(String name, LogicalType... children) {
return StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", name), User.class)
.attributes(
Arrays.stream(children)
.map(lt -> new StructuredType.StructuredAttribute("field", lt))
.collect(Collectors.toList()))
.description("User type desc.")
.setFinal(true)
.setInstantiable(true)
.build();
}
private static final class User {
public int setting;
}
}
|
apache-2.0
|
ThilankaBowala/andes
|
modules/andes-core/broker/src/main/java/org/wso2/andes/mqtt/MQTTLocalSubscription.java
|
6839
|
/*
* Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.andes.mqtt;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.dna.mqtt.wso2.QOSLevel;
import org.wso2.andes.kernel.*;
import org.wso2.andes.mqtt.utils.MQTTUtils;
import org.wso2.andes.subscription.OutboundSubscription;
import java.nio.ByteBuffer;
import java.util.UUID;
/**
* Cluster wide subscriptions relevant per topic will be maintained through this class
* Per topic there will be only one subscription just on indicate that the subscription rely on the specific node
* Each time a message is published to a specific node the Andes kernal will call this subscription object
* The subscriber will contain a reference to the relevant bridge connection where the bridge will notify the protocol
* engine to inform the relevant subscriptions which are channel bound
*/
public class MQTTLocalSubscription implements OutboundSubscription {
//Will log the flows in relevant for this class
private static Log log = LogFactory.getLog(MQTTLocalSubscription.class);
//The reference to the bridge object
private MQTTopicManager mqqtServerChannel;
//Will store the MQTT channel id
private String mqttSubscriptionID;
//Will set unique uuid as the channel of the subscription this will be used to track the delivery of messages
private UUID channelID;
//The QOS level the subscription is bound to
private int subscriberQOS;
private String wildcardDestination;
//keep if the underlying subscription is active
private boolean isActive;
/**
* Track messages sent as retained messages
*/
private ConcurrentTrackingList<Long> retainedMessageList = new ConcurrentTrackingList<Long>();
/**
* Will allow retrieval of the qos the subscription is bound to
*
* @return the level of qos the subscription is bound to
*/
public int getSubscriberQOS() {
return subscriberQOS;
}
/**
* Will specify the level of the qos the subscription is bound to
*
* @param subscriberQOS the qos could be either 0,1 or 2
*/
public void setSubscriberQOS(int subscriberQOS) {
this.subscriberQOS = subscriberQOS;
}
/**
* Retrieval of the subscription id
*
* @return the id of the subscriber
*/
public String getMqttSubscriptionID() {
return mqttSubscriptionID;
}
/**
* Sets an id to the subscriber which will be unique
*
* @param mqttSubscriptionID the unique id of the subscriber
*/
public void setMqttSubscriptionID(String mqttSubscriptionID) {
this.mqttSubscriptionID = mqttSubscriptionID;
}
/**
* The relevant subscription will be registered
*
* @param channelID ID of the underlying subscription channel
* @param isActive true if subscription is active (TCP connection is live)
*/
public MQTTLocalSubscription(String wildCardDestination, UUID channelID, boolean isActive) {
this.channelID = channelID;
this.isActive = isActive;
this.wildcardDestination = wildCardDestination;
}
/**
* Will set the server channel that will maintain the connectivity between the mqtt protocol realm
*
* @param mqqtServerChannel the bridge connection that will be maintained between the protocol and andes
*/
public void setMqqtServerChannel(MQTTopicManager mqqtServerChannel) {
this.mqqtServerChannel = mqqtServerChannel;
}
/**
* {@inheritDoc}
*/
@Override
public boolean sendMessageToSubscriber(ProtocolMessage protocolMessage, AndesContent content)
throws AndesException {
boolean sendSuccess;
DeliverableAndesMetadata messageMetadata = protocolMessage.getMessage();
if(messageMetadata.isRetain()) {
recordRetainedMessage(messageMetadata.getMessageID());
}
//Should get the message from the list
ByteBuffer message = MQTTUtils.getContentFromMetaInformation(content);
//Will publish the message to the respective queue
if (null != mqqtServerChannel) {
try {
//TODO:review - instead of getSubscribedDestination() used message destination
mqqtServerChannel.distributeMessageToSubscriber(wildcardDestination, message,
messageMetadata.getMessageID(), messageMetadata.getQosLevel(),
messageMetadata.isPersistent(), getMqttSubscriptionID(),
getSubscriberQOS(), messageMetadata);
//We will indicate the ack to the kernel at this stage
//For MQTT QOS 0 we do not get ack from subscriber, hence will be implicitly creating an ack
if (QOSLevel.AT_MOST_ONCE.getValue() == getSubscriberQOS() ||
QOSLevel.AT_MOST_ONCE.getValue() == messageMetadata.getQosLevel()) {
mqqtServerChannel.implicitAck(messageMetadata.getMessageID(), getChannelID());
}
sendSuccess = true;
} catch (MQTTException e) {
final String error = "Error occurred while delivering message to the subscriber for message :" +
messageMetadata.getMessageID();
log.error(error, e);
throw new AndesException(error, e);
}
} else {
sendSuccess = false;
}
return sendSuccess;
}
/**
* Record the given message ID as a retained message in the trcker.
*
* @param messageID
* Message ID of the retained message
*/
public void recordRetainedMessage(long messageID) {
retainedMessageList.add(messageID);
}
@Override
public boolean isActive() {
return true;
}
@Override
public UUID getChannelID() {
return channelID != null ? channelID : null;
}
//TODO: decide how to call this
public void ackReceived(long messageID) {
// Remove if received acknowledgment message id contains in retained message list.
retainedMessageList.remove(messageID);
}
}
|
apache-2.0
|
ryleyherrington/grammatics
|
build/iphone/Classes/TiFacebookLoginButtonProxy.h
|
753
|
/**
* Appcelerator Titanium Mobile
* Copyright (c) 2009-2011 by Grammatics, Inc. All Rights Reserved.
* Licensed under the terms of the Apache Public License
* Please see the LICENSE included with this distribution for details.
*
* WARNING: This is generated code. Modify at your own risk and without support.
*/
#ifdef USE_TI_FACEBOOK
#import "TiViewProxy.h"
#import "FacebookModule.h"
#import "TiFacebookLoginButton.h"
@interface TiFacebookLoginButtonProxy : TiViewProxy {
FacebookModule *module;
}
-(id)_initWithPageContext:(id<TiEvaluator>)context_ args:(id)args module:(FacebookModule*)module_;
@property(nonatomic,readonly) FacebookModule *_module;
-(void)internalSetWidth:(id)width;
-(void)internalSetHeight:(id)height;
@end
#endif
|
apache-2.0
|
bigtester/automation-test-engine
|
org.bigtester.ate.core/src/main/java/org/bigtester/ate/model/page/elementaction/ITestObjectActionImpl.java
|
1433
|
/*******************************************************************************
* ATE, Automation Test Engine
*
* Copyright 2015, Montreal PROT, or individual contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Montreal PROT.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.bigtester.ate.model.page.elementaction;
import org.eclipse.jdt.annotation.Nullable;
// TODO: Auto-generated Javadoc
/**
* This class TestObjectFinderImpl defines ....
* @author Peidong Hu
*
*/
public interface ITestObjectActionImpl {
/**
* Gets the capability.
*
* @param <T> the generic type
* @param type the type
* @return the capability
*/
@Nullable
<T> T getCapability (Class<T> type);
}
|
apache-2.0
|
yahoojapan/multiple-dimension-spread
|
src/common/src/test/java/jp/co/yahoo/dataplatform/mds/stats/TestSpreadSummaryStats.java
|
3580
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.co.yahoo.dataplatform.mds.stats;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.Arguments;
import static org.junit.jupiter.api.Assertions.*;
import static org.junit.jupiter.params.provider.Arguments.arguments;
public class TestSpreadSummaryStats {
@Test
public void T_newInstance_1(){
SpreadSummaryStats stats = new SpreadSummaryStats();
assertEquals( 0 , stats.getLineCount() );
SummaryStats summary = stats.getSummaryStats();
assertEquals( 0 , summary.getRowCount() );
assertEquals( 0 , summary.getRawDataSize() );
assertEquals( 0 , summary.getRealDataSize() );
}
@Test
public void T_newInstance_2(){
SpreadSummaryStats stats = new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) );
assertEquals( 5 , stats.getLineCount() );
SummaryStats summary = stats.getSummaryStats();
assertEquals( 10 , summary.getRowCount() );
assertEquals( 100 , summary.getRawDataSize() );
assertEquals( 50 , summary.getRealDataSize() );
System.out.println( stats.toString() );
}
@Test
public void T_merge_1(){
SpreadSummaryStats stats = new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) );
stats.merge( new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) ) );
assertEquals( 10 , stats.getLineCount() );
SummaryStats summary = stats.getSummaryStats();
assertEquals( 20 , summary.getRowCount() );
assertEquals( 200 , summary.getRawDataSize() );
assertEquals( 100 , summary.getRealDataSize() );
}
@Test
public void T_merge_2(){
SpreadSummaryStats stats = new SpreadSummaryStats();
stats.merge( new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) ) );
assertEquals( 5 , stats.getLineCount() );
SummaryStats summary = stats.getSummaryStats();
assertEquals( 10 , summary.getRowCount() );
assertEquals( 100 , summary.getRawDataSize() );
assertEquals( 50 , summary.getRealDataSize() );
}
@Test
public void T_average_1(){
SpreadSummaryStats stats = new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) );
assertEquals( 5 , stats.getLineCount() );
SummaryStats summary = stats.getSummaryStats();
assertEquals( 10 , summary.getRowCount() );
assertEquals( 100 , summary.getRawDataSize() );
assertEquals( 50 , summary.getRealDataSize() );
assertEquals( (double)20 , stats.getAverageRecordSize() );
assertEquals( (double)10 , stats.getAverageRecordRealSize() );
assertEquals( (double)2 , stats.getAverageRecordPerField() );
}
}
|
apache-2.0
|
daniellavoie/spring-cloud-netflix
|
spring-cloud-netflix-core/src/test/java/org/springframework/cloud/netflix/rx/SingleReturnValueHandlerTest.java
|
4114
|
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.netflix.rx;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.IntegrationTest;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.boot.test.TestRestTemplate;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import rx.Single;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
/**
* Tests the {@link SingleReturnValueHandler} class.
*
* @author Spencer Gibb
* @author Jakub Narloch
*/
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = SingleReturnValueHandlerTest.Application.class)
@WebAppConfiguration
@IntegrationTest({"server.port=0"})
@DirtiesContext
public class SingleReturnValueHandlerTest {
@Value("${local.server.port}")
private int port = 0;
private TestRestTemplate restTemplate = new TestRestTemplate();
@Configuration
@EnableAutoConfiguration
@RestController
protected static class Application {
// tag::rx_single[]
@RequestMapping(method = RequestMethod.GET, value = "/single")
public Single<String> single() {
return Single.just("single value");
}
@RequestMapping(method = RequestMethod.GET, value = "/singleWithResponse")
public ResponseEntity<Single<String>> singleWithResponse() {
return new ResponseEntity<>(Single.just("single value"), HttpStatus.NOT_FOUND);
}
@RequestMapping(method = RequestMethod.GET, value = "/throw")
public Single<Object> error() {
return Single.error(new RuntimeException("Unexpected"));
}
// end::rx_single[]
}
@Test
public void shouldRetrieveSingleValue() {
// when
ResponseEntity<String> response = restTemplate.getForEntity(path("/single"), String.class);
// then
assertNotNull(response);
assertEquals(HttpStatus.OK, response.getStatusCode());
assertEquals("single value", response.getBody());
}
@Test
public void shouldRetrieveSingleValueWithStatusCode() {
// when
ResponseEntity<String> response = restTemplate.getForEntity(path("/singleWithResponse"), String.class);
// then
assertNotNull(response);
assertEquals(HttpStatus.NOT_FOUND, response.getStatusCode());
assertEquals("single value", response.getBody());
}
@Test
public void shouldRetrieveErrorResponse() {
// when
ResponseEntity<Object> response = restTemplate.getForEntity(path("/throw"), Object.class);
// then
assertNotNull(response);
assertEquals(HttpStatus.INTERNAL_SERVER_ERROR, response.getStatusCode());
}
private String path(String context) {
return String.format("http://localhost:%d%s", port, context);
}
}
|
apache-2.0
|
djsilenceboy/LearnTest
|
DB_Test/DB2/HADR/Enable_ROS.sh
|
1417
|
#!/bin/sh
#
# Purpose:
# Enable Reads on standby (ROS) for Database using HADR.
#
# Usage:
# ThisScript <DbName>
#
# exit: 0
# 1 - Input parameter error.
#
# Notice:
# 1. When calling this script manually by logging as DB2 user.
# a. This is the optional way to do backup temporarily.
# b. It is ok to set up environment with "db2profile" or not. Because the environment has already been set up by logging as DB2 user.
#
# 2. When calling this script from cron job created for DB2 user.
# a. This is the preferred way to do backup periodically.
# b. It must call "db2profile" to set up environment.
#
# 3. When calling this script from cron job created for root user.
# a. It must "su - <DB2 user>" first.
# b. It must call "db2profile" to set up environment.
#
# Update log: (date / version / author : comments)
# 2015-03-25 / 1.0.0 / Du Jiang : Creation
. /home/db2inst1/sqllib/db2profile
echo "============================================================"
echo "Begin set DB config: `date`"
echo "----------------------------------------"
echo "Before:"
echo
db2set -all
echo "----------------------------------------"
db2set DB2_HADR_ROS=ON
db2set DB2_STANDBY_ISO=UR
echo "----------------------------------------"
echo "After:"
echo
db2set -all
echo "----------------------------------------"
echo "End set DB config: `date`"
echo "============================================================"
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Orchidaceae/Scaphyglottis/Scaphyglottis subulata/ Syn. Reichenbachanthus subulatus/README.md
|
203
|
# Reichenbachanthus subulatus (Schltr.) Dressler SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.