content
stringlengths 10
4.9M
|
---|
/**
* Handles mouse events when the user enters or exits the random button.
* makes the dice shake.
*
* @author David Bahr
*/
private class ShakeDiceListener extends MouseAdapter
{
/**
* Make the dice start shaking.
*/
public void mouseEntered(MouseEvent e)
{
// make the dice shake continuously with 100 milliseconds per shake
randomButton.startShaking(100);
}
/**
* Make the dice stop shaking.
*/
public void mouseExited(MouseEvent e)
{
randomButton.stopShaking();
}
/**
* Make the dice stop shaking.
*/
public void mousePressed(MouseEvent e)
{
randomButton.stopShaking();
}
} |
package service.configuration;
import java.util.Properties;
/**
* Created by nitina on 8/26/17.
*/
public interface IConfiguration {
Properties getProperties();
public String getErrorMessage();
}
|
import sys
A = sys.stdin.readline().strip()
res = 0
while A:
t = A[0]; A = A[1:]
for i in range(1,5):
if A and A[0] == t:
A = A[1:]
res += 1
print res
|
/* Inline helper to place the tv image */
static inline void
put_image (void)
{
blit(bmp,screen,0,0,0,0,320,200);
} |
package cn.xcom.banjing.activity;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.widget.LinearLayoutManager;
import android.view.View;
import android.view.Window;
import android.widget.RelativeLayout;
import android.widget.Toast;
import com.jcodecraeer.xrecyclerview.ProgressStyle;
import com.jcodecraeer.xrecyclerview.XRecyclerView;
import com.loopj.android.http.JsonHttpResponseHandler;
import com.loopj.android.http.RequestParams;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import cn.xcom.banjing.R;
import cn.xcom.banjing.adapter.IncomeRecordsAdapter;
import cn.xcom.banjing.bean.UserInfo;
import cn.xcom.banjing.constant.NetConstant;
import cn.xcom.banjing.net.HelperAsyncHttpClient;
import cn.xcom.banjing.utils.LogUtils;
import cn.xcom.banjing.view.DividerItemDecoration;
import cz.msebera.android.httpclient.Header;
/**
* Created by zhuchongkun on 16/6/12.
* 收支记录页
*/
public class IncomeRecordsActivity extends BaseActivity implements View.OnClickListener{
private String TAG="IncomeRecordsActivity";
private Context mContext;
private RelativeLayout rl_back;
private XRecyclerView mRecyclerView;
private IncomeRecordsAdapter incomeRecordsAdapter;
private UserInfo userInfo;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_income_records);
mContext=this;
initView();
}
private void initView(){
rl_back= (RelativeLayout) findViewById(R.id.rl_income_records_back);
rl_back.setOnClickListener(this);
mRecyclerView= (XRecyclerView) findViewById(R.id.lv_income_records);
LinearLayoutManager layoutManager = new LinearLayoutManager(mContext);
layoutManager.setOrientation(LinearLayoutManager.VERTICAL);
mRecyclerView.setLayoutManager(layoutManager);
mRecyclerView.setRefreshProgressStyle(ProgressStyle.BallSpinFadeLoader);
mRecyclerView.setLoadingMoreProgressStyle(ProgressStyle.BallRotate);
mRecyclerView.setLoadingListener(new XRecyclerView.LoadingListener() {
@Override
public void onRefresh() {
getIncomeRecordsList();
}
@Override
public void onLoadMore() {
mRecyclerView.loadMoreComplete();
}
});
mRecyclerView.addItemDecoration(new DividerItemDecoration(this,LinearLayoutManager.VERTICAL));
userInfo=new UserInfo(mContext);
getIncomeRecordsList();
}
private void getIncomeRecordsList() {
RequestParams requestParmas=new RequestParams();
requestParmas.put("userid",userInfo.getUserId());
HelperAsyncHttpClient.get(NetConstant.NET_GET_WALLET_LOG,requestParmas,new JsonHttpResponseHandler(){
@Override
public void onSuccess(int statusCode, Header[] headers, JSONObject response) {
super.onSuccess(statusCode, headers, response);
LogUtils.e(TAG,"--statusCode->"+statusCode+"==>"+response.toString());
if (response!=null){
try {
String state=response.getString("status");
if (state.equals("success")){
JSONArray jsonArray = response.getJSONArray("data");
incomeRecordsAdapter = new IncomeRecordsAdapter(jsonArray);
mRecyclerView.setAdapter(incomeRecordsAdapter);
}else if (state.equals("error")){
String date=response.getString("data");
Toast.makeText(mContext,date,Toast.LENGTH_SHORT).show();
}
} catch (JSONException e) {
e.printStackTrace();
}
}
mRecyclerView.refreshComplete();
}
});
}
@Override
public void onClick(View v) {
switch (v.getId()){
case R.id.rl_income_records_back:
finish();
break;
}
}
}
|
class Machine:
"""The class that will send data to the machine."""
def __init__(self, *args, **kargs):
self.parent = args[0]
self.update_status = self.parent.sb.SetStatusText
self.read_status = self.parent.sb.GetStatusText
self.com = Communicate(self)
self.steps_per_pixel = 600
self.z_steps_per_pixel = 960
self.p1 = None
self.p2 = None
self.p3 = None
self.base = None
self.height = None
self.normal = None
self.units = 'in'
self.xyz = (0,0,0)
self.pan_angle = .5
self.tilt_angle = 0
def all_points_defined(self):
return self.base and self.height
def draw(self, use_solenoid, img=None):
"""Sends the machine the command to draw the image"""
pan = self._constrain_num(convert.angle_to_percent(self.normal.phi(),
'pan'), (0,1))
tilt = self._constrain_num(convert.angle_to_percent(self.normal.theta(),
'tilt'), (0,1))
xyz = self.p1.tuple()
xyz = self._tuple_to_step(xyz)
base = self.base.tuple()
base = self._tuple_to_step(base)
height = self.height.tuple()
height = self._tuple_to_step(height)
self.com.send_g01(xyz + (pan, tilt), blocking=True)
if use_solenoid:
self.com.send_g03(base, height, img.resize(self.get_pic_pixel_count()))
else:
self.com.send_g02(base, height)
def goto_airbrush_change_position(self):
"""Move the servos to where the airbrush can easily be changed"""
pan = 0.5
tilt = 1
self.com.send_g04(pan, blocking=True)
self.com.send_g05(tilt, blocking=True)
def get_longest_side_size(self):
"""Returns the size in steps of the picture."""
base = Vector(self._tuple_to_step(self.base.tuple()))
height = Vector(self._tuple_to_step(self.height.tuple()))
return (base.longest(), height.longest())
def get_pic_pixel_count(self):
"""Returns the size in pixels of the picture"""
return ([x / self.steps_per_pixel for x in self.get_longest_side_size()])
def get_pic_size(self):
"""Get the size of the picture to be drawn"""
if self.all_points_defined():
return (self.base.length(), self.height.length(), self.units)
else:
return None
def set_points(self, p1=None, p2=None, p3=None):
"""Set the points for the machine.
p1 and p2 determine the base, p3 determines the height
"""
if p1 != None:
self.p1 = Vector(p1)
if p2 != None:
self.p2 = Vector(p2)
if p3 != None:
self.p3 = Vector(p3)
if self._plane_points_defined():
self.base = self.p2 - self.p1
self.height = self.p3 - self.p1
if self.height.length() != 0:
self.height -= self.base.proj(self.height)
self.parent.plane_points.update_size_text()
self.normal = self.base.cross(self.height)
if self.normal.theta() > 90:
self.normal = -self.normal
print "Size: " + repr(self.base) + ", " + repr(self.height)
print "Tilt:" + repr(self.normal.theta())
print "Pan:" + repr(self.normal.phi())
else:
self.height = None
def jog(self, axis, num, unit = 'step'):
"""Moves num units in axis direction"""
if axis in 'xX':
num = convert.unit_to_step(num, unit, axis)
self.com.send_g00((num, 0, 0))
elif axis in 'yY':
num = convert.unit_to_step(num, unit, axis)
self.com.send_g00((0, num, 0))
elif axis in 'zZ':
num = convert.unit_to_step(num, unit, axis)
self.com.send_g00((0, 0, num))
elif axis in 'Panpan':
oldang = convert.percent_to_angle(self.pan_angle, 'pan')
num = convert.angle_to_percent(oldang + num, axis)
num = self._constrain_num(num, (0,1))
self.com.send_g04(num)
self.pan_angle = num
elif axis in 'Tiltilt':
oldang = convert.percent_to_angle(self.tilt_angle, 'tilt')
num = convert.angle_to_percent(oldang + num, axis)
num = self._constrain_num(num, (0,1))
self.com.send_g05(num)
self.tilt_angle = num
def move(self, values):
"""Moves machine in absolute, rather than relative"""
xyz = values[:3]
xyz = self._tuple_to_step(xyz)
pan = convert.angle_to_percent(values[3], 'pan')
pan = self._constrain_num(pan, (0,1))
self.pan_angle = pan
tilt = convert.angle_to_percent(values[4], 'tilt')
tilt = self._constrain_num(tilt, (0,1))
self.tilt_angle = tilt
self.com.send_g01(xyz + (pan, tilt))
def pause(self):
"""Tells the machine to pause"""
self.com.send_g09();
def run_solenoid(self, run_solenoid):
"""run the solenoid"""
if run_solenoid:
self.com.send_g07(1)
else:
self.com.send_g07(0)
def send_settings(self, spp, id, sb, fr):
self.steps_per_pixel = spp
self.com.send_g06([spp, id, sb, fr])
def set_status_function(self, f):
"""Set the function that allows you to change the status bar"""
self.update_status = f
def set_units(self, units):
"""Change the units to a new type"""
self.units = units
#send event
def set_bitmap(self, pic):
self.pic = pic
def stop(self):
"""Tells the machine to stop moving the stepper motors"""
self.com.send_g0a();
def update_positions(self, values):
"""Tells the position panel to update everything"""
print 'We got ' + repr(values)
self.pan_angle = values[3]
self.tilt_angle = values[4]
self.xyz = self._step_to_tuple(values[:3])
x, y, z = self.xyz
pan = convert.percent_to_angle(self.pan_angle, 'pan')
tilt = convert.percent_to_angle(self.tilt_angle, 'tilt')
self.parent.positions.update((x, y, z, pan, tilt))
def _constrain_num(self, x, range):
if x < range[0]:
return 0
elif x > range[1]:
return 1
else:
return x
def _get_img_data(self):
return self.pic.resize(self.get_pic_size()[:2]).convert('1')
def _plane_points_defined(self):
return self.p1 and self.p2 and self.p3
def _tuple_to_step(self, tuple):
"""We'll use this in the final step to convert to steps"""
return (convert.unit_to_step(tuple[0], self.units, 'x'),
convert.unit_to_step(tuple[1], self.units, 'y'),
convert.unit_to_step(tuple[2], self.units, 'z'))
def _step_to_tuple(self, tuple):
"""Converting back the other way"""
return (convert.step_to_unit(tuple[0], self.units, 'x'),
convert.step_to_unit(tuple[1], self.units, 'y'),
convert.step_to_unit(tuple[2], self.units, 'z')) |
/**
* Metodo responsavel para a atualizar a lista de usuarios onlines
* @param message
*/
private void refreshOnlines(ChatMessage message) {
sys.EscreveLogConsole("ClienteInterface - Metodo refreshOnlines", "***INICIO***");
sys.EscreveLogConsole("ClienteInterface - Metodo refreshOnlines", "Usuario : " + message.getName());
sys.EscreveLogConsole("ClienteInterface - Metodo refreshOnlines", "Usuarios : " + message.getSetOnlines().toString());
Set<String> names = message.getSetOnlines();
String[] array = (String[]) names.toArray(new String[names.size()]);
this.listOnlines.setListData(array);
this.listOnlines.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
this.listOnlines.setLayoutOrientation(JList.VERTICAL);
sys.EscreveLogConsole("ClienteInterface - Metodo refreshOnlines", "***FIM***");
} |
/*
* Copyright (C) 2015-2017 Alibaba Group Holding Limited
*/
#include <string.h>
#include "yunit.h"
#include "core/router_mgr.h"
#include "core/mesh_mgmt.h"
#include "hal/interfaces.h"
void test_uradar_sid_router_case(void)
{
network_context_t network = {
.meshnetid = 0x100,
.attach_state = ATTACH_DONE
};
netids_t netids;
YUNIT_ASSERT(UR_ERROR_NONE == ur_router_set_default_router(SID_ROUTER));
network.router = ur_get_router_by_id(SID_ROUTER);
network.router->cb.start();
netids.meshnetid = network.meshnetid;
netids.sid = 0x1000;
network.router->cb.handle_subscribe_event(EVENT_SID_UPDATED, (uint8_t *)&netids, sizeof(netids_t));
YUNIT_ASSERT(0x1000 == ur_router_get_next_hop(&network, 0x1000));
YUNIT_ASSERT(0x0000 == ur_router_get_next_hop(&network, 0x0000));
YUNIT_ASSERT(0x0000 == ur_router_get_next_hop(&network, 0x2000));
YUNIT_ASSERT(0x0000 == ur_router_get_next_hop(&network, 0x2100));
YUNIT_ASSERT(0x1100 == ur_router_get_next_hop(&network, 0x1100));
YUNIT_ASSERT(0x1100 == ur_router_get_next_hop(&network, 0x1110));
YUNIT_ASSERT(0x1100 == ur_router_get_next_hop(&network, 0x1120));
}
|
class MultilabelPredictor:
""" Tabular Predictor for predicting multiple columns in table.
Creates multiple TabularPredictor objects which you can also use individually.
You can access the TabularPredictor for a particular label via: `multilabel_predictor.get_predictor(label_i)`
Parameters
----------
labels : List[str]
The ith element of this list is the column (i.e. `label`) predicted by the ith TabularPredictor stored in this object.
path : str
Path to directory where models and intermediate outputs should be saved.
If unspecified, a time-stamped folder called "AutogluonModels/ag-[TIMESTAMP]" will be created in the working directory to store all models.
Note: To call `fit()` twice and save all results of each fit, you must specify different `path` locations or don't specify `path` at all.
Otherwise files from first `fit()` will be overwritten by second `fit()`.
Caution: when predicting many labels, this directory may grow large as it needs to store many TabularPredictors.
problem_types : List[str]
The ith element is the `problem_type` for the ith TabularPredictor stored in this object.
eval_metrics : List[str]
The ith element is the `eval_metric` for the ith TabularPredictor stored in this object.
consider_labels_correlation : bool
Whether the predictions of multiple labels should account for label correlations or predict each label independently of the others.
If True, the ordering of `labels` may affect resulting accuracy as each label is predicted conditional on the previous labels appearing earlier in this list (i.e. in an auto-regressive fashion).
Set to False if during inference you may want to individually use just the ith TabularPredictor without predicting all the other labels.
kwargs :
Arguments passed into the initialization of each TabularPredictor.
"""
multi_predictor_file = "multilabel_predictor.pkl"
def __init__(
self,
labels,
path,
problem_types=None,
eval_metrics=None,
consider_labels_correlation=True,
**kwargs,
):
if len(labels) < 2:
raise ValueError(
"MultilabelPredictor is only intended for predicting MULTIPLE labels (columns), use TabularPredictor for predicting one label (column)."
)
self.path = setup_outputdir(path, warn_if_exist=False)
self.labels = labels
self.consider_labels_correlation = consider_labels_correlation
self.predictors = (
{}
) # key = label, value = TabularPredictor or str path to the TabularPredictor for this label
if eval_metrics is None:
self.eval_metrics = {}
else:
self.eval_metrics = {labels[i]: eval_metrics[i] for i in range(len(labels))}
problem_type = None
eval_metric = None
for i in range(len(labels)):
label = labels[i]
path_i = self.path + "Predictor_" + label
if problem_types is not None:
problem_type = problem_types[i]
if eval_metrics is not None:
eval_metric = self.eval_metrics[label]
self.predictors[label] = TabularPredictor(
label=label,
problem_type=problem_type,
eval_metric=eval_metric,
path=path_i,
**kwargs,
)
def fit(self, train_data, tuning_data=None, **kwargs):
""" Fits a separate TabularPredictor to predict each of the labels.
Parameters
----------
train_data, tuning_data : str or autogluon.tabular.TabularDataset or pd.DataFrame
See documentation for `TabularPredictor.fit()`.
kwargs :
Arguments passed into the `fit()` call for each TabularPredictor.
"""
if isinstance(train_data, str):
train_data = TabularDataset(train_data)
if tuning_data is not None and isinstance(tuning_data, str):
tuning_data = TabularDataset(tuning_data)
train_data_og = train_data.copy()
if tuning_data is not None:
tuning_data_og = tuning_data.copy()
else:
tuning_data_og = None
save_metrics = len(self.eval_metrics) == 0
for i in range(len(self.labels)):
label = self.labels[i]
predictor = self.get_predictor(label)
if not self.consider_labels_correlation:
labels_to_drop = [l for l in self.labels if l != label]
else:
labels_to_drop = [
self.labels[j] for j in range(i + 1, len(self.labels))
]
train_data = train_data_og.drop(labels_to_drop, axis=1)
if tuning_data is not None:
tuning_data = tuning_data_og.drop(labels_to_drop, axis=1)
print(f"Fitting TabularPredictor for label: {label} ...")
predictor.fit(train_data=train_data, tuning_data=tuning_data, **kwargs)
self.predictors[label] = predictor.path
if save_metrics:
self.eval_metrics[label] = predictor.eval_metric
self.save()
def predict(self, data, **kwargs):
""" Returns DataFrame with label columns containing predictions for each label.
Parameters
----------
data : str or autogluon.tabular.TabularDataset or pd.DataFrame
Data to make predictions for. If label columns are present in this data, they will be ignored. See documentation for `TabularPredictor.predict()`.
kwargs :
Arguments passed into the predict() call for each TabularPredictor.
"""
return self._predict(data, as_proba=False, **kwargs)
def predict_proba(self, data, **kwargs):
""" Returns dict where each key is a label and the corresponding value is the `predict_proba()` output for just that label.
Parameters
----------
data : str or autogluon.tabular.TabularDataset or pd.DataFrame
Data to make predictions for. See documentation for `TabularPredictor.predict()` and `TabularPredictor.predict_proba()`.
kwargs :
Arguments passed into the `predict_proba()` call for each TabularPredictor (also passed into a `predict()` call).
"""
return self._predict(data, as_proba=True, **kwargs)
def evaluate(self, data, **kwargs):
""" Returns dict where each key is a label and the corresponding value is the `evaluate()` output for just that label.
Parameters
----------
data : str or autogluon.tabular.TabularDataset or pd.DataFrame
Data to evalate predictions of all labels for, must contain all labels as columns. See documentation for `TabularPredictor.evaluate()`.
kwargs :
Arguments passed into the `evaluate()` call for each TabularPredictor (also passed into the `predict()` call).
"""
data = self._get_data(data)
eval_dict = {}
for label in self.labels:
print(f"Evaluating TabularPredictor for label: {label} ...")
predictor = self.get_predictor(label)
eval_dict[label] = predictor.evaluate(data, **kwargs)
if self.consider_labels_correlation:
data[label] = predictor.predict(data, **kwargs)
return eval_dict
def save(self):
""" Save MultilabelPredictor to disk. """
for label in self.labels:
if not isinstance(self.predictors[label], str):
self.predictors[label] = self.predictors[label].path
save_pkl.save(path=self.path + self.multi_predictor_file, object=self)
print(
f"MultilabelPredictor saved to disk. Load with: MultilabelPredictor.load('{self.path}')"
)
@classmethod
def load(cls, path):
""" Load MultilabelPredictor from disk `path` previously specified when creating this MultilabelPredictor. """
path = os.path.expanduser(path)
if path[-1] != os.path.sep:
path = path + os.path.sep
return load_pkl.load(path=path + cls.multi_predictor_file)
def get_predictor(self, label):
""" Returns TabularPredictor which is used to predict this label. """
predictor = self.predictors[label]
if isinstance(predictor, str):
return TabularPredictor.load(path=predictor)
return predictor
def _get_data(self, data):
if isinstance(data, str):
return TabularDataset(data)
return data.copy()
def _predict(self, data, as_proba=False, **kwargs):
data = self._get_data(data)
if as_proba:
predproba_dict = {}
for label in self.labels:
print(f"Predicting with TabularPredictor for label: {label} ...")
predictor = self.get_predictor(label)
if as_proba:
predproba_dict[label] = predictor.predict_proba(
data, as_multiclass=True, **kwargs
)
data[label] = predictor.predict(data, **kwargs)
if not as_proba:
return data[self.labels]
else:
return predproba_dict |
import time
from backend.utilities.hash import Hash
from backend.utilities.hex_bin import hex_to_bin
from backend.config import MINE_RATE
GENESIS_DATA = {
"timestamp": 1,
"last_hash": "genesis_last_hash",
"data": "genesis_data",
"hash": str(Hash.hash("genesis_hash")),
"difficulty": 3,
"nonce": 0
}
class Block:
"""
Block: This is a unit of storage of a transaction
contains: data, last_hash, hash
"""
def __init__(self, timestamp, last_hash, data, hash, difficulty, nonce):
self.timestamp = timestamp
self.last_hash = last_hash
self.data = data
self.hash = hash
self.difficulty = difficulty
self.nonce = nonce
def __repr__(self):
return (
"Block( "
f"timestamp: {self.timestamp}, "
f"last_hash: {self.last_hash}, "
f"data: {self.data}, "
f"hash: {self.hash}, "
f"difficulty: {self.difficulty}, "
f"nonce: {self.nonce} )"
)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def to_json(self):
return self.__dict__
@staticmethod
def from_json(json_block):
"""
deserialises the json data
"""
return Block(**json_block)
@staticmethod
def mine_block(last_block, data):
"""
This is used to mine/validate our blocks
It is responsible for doing the proof of work using the leading 0's proof of work
"""
block_timestamp = time.time_ns()
block_last_hash = last_block.hash
block_data = data
block_difficulty = Block.dynamic_difficulty(last_block, block_timestamp)
block_nonce = 0
block_hash = str(Hash.hash(block_timestamp, block_last_hash, block_data, block_difficulty, block_nonce))
while hex_to_bin(block_hash)[0:block_difficulty] != '0' * block_difficulty:
block_nonce += 1
block_timestamp = time.time_ns()
block_difficulty = Block.dynamic_difficulty(last_block, block_timestamp)
block_hash = str(Hash.hash(block_timestamp, block_last_hash, block_data, block_difficulty, block_nonce))
return Block(block_timestamp, block_last_hash, block_data, block_hash, block_difficulty, block_nonce)
@staticmethod
def genesis():
return Block(**GENESIS_DATA) #unpacks the genesis data dictionary
@staticmethod
def dynamic_difficulty(last_block, new_timestamp):
"""
Adjusts the block dificulty rate dynamically
"""
if(new_timestamp - last_block.timestamp) < MINE_RATE:
return last_block.difficulty + 1
if(last_block.difficulty - 1) > 0:
return last_block.difficulty - 1
return 1
@staticmethod
def block_validity(last_block, block):
"""
Ensures that the block to be added meets the filloeing requirements:
-The last hash of the block is equal to the last-block hash
-The block meets the proof of work requirements
-The block's difficulty adjusts by 1
-The block hash is the same as the result of the hashed data
"""
if block.last_hash != last_block.hash:
raise Exception("The last hash is invalid !!!")
if hex_to_bin(block.hash)[0:block.difficulty] != '0' * block.difficulty:
raise Exception("The proof of work requirement was not met !!!")
if abs(block.difficulty - last_block.difficulty) > 1:
raise Exception("The block difficulty adjustment is not 1 !!!")
hashed_data = str(Hash.hash(
block.timestamp,
block.last_hash,
block.data,
block.difficulty,
block.nonce
))
if hashed_data != block.hash:
raise Exception("The block hash is invalid !!!")
def main():
"""
This will be executed when the block.py file is run
"""
# genesis_block = Block.genesis()
# block = Block.mine_block(genesis_block, "One")
# print(block)
last_block = Block.genesis()
bad_block = Block.mine_block(last_block, "foo-bar")
# bad_block.last_hash = "bad_hash"
bad_block.difficulty = 5
bad_block.data = "bar-foo"
try:
Block.block_validity(last_block, bad_block)
except Exception as e:
print(f"Error: {e}")
if __name__ == "__main__":
main() |
MILWAUKEE - Not surprisingly, Heat interest appears to be rising significantly during this 11-game winning streak. Some tangible signs:
• Five of Fox Sports Sun's 10 most watched Heat games this season have come during the winning streak, and viewership of Heat games has risen 33 percent over the full season average.
Monday's win drew a 4.1 rating (equal to 4.1 percent of homes in the Miami-Fort Lauderdale market) and was the most watched English language program in South Florida from 8 to 10:30 p.m., beating The Bachelor on ABC (3.7) and Apprentice on NBC (1.6).
• Meanwhile, ticket prices on the secondary market "have definitely gone up" during the winning streak, according to Michael Lipman, CEO of Tickets of America and White Glove International. "It's a monster."
Sign Up and Save Get six months of free digital access to the Miami Herald
Lipman said the prices of courtside seats, on the secondary market, have risen from the $1500/$2500 range to $3000. For seats 10 rows up, prices have risen from about $250 to between $400 and $500. And upper level tickets have soared from $25 to $30 range to $75, Lipman said.
Couple other quick things:
• One reason why Heat center Willie Reed could get something approaching an $8 million midlevel exception this summer (or at the very least, more than $1.5 million that he would be owed next season if he doesn’t opt out of his contract): He’s averaging 17.5 points and 9.75 rebounds and shooting 68.8 percent in his last four starts filling in for Hassan Whiteside.
• Consider this about the Goran Dragic/Dion Waiters backcourt: Miami is 15-9 when they start together and has outscored teams by 48 when they’re on court together in this winning streak.
Waiters is doubtful for Wednesday’s game for an ankle sprain. Please click here for details about that after today’s morning shootaround at Bradley Center. |
<gh_stars>100-1000
'''
A collection of utility functions.
'''
import numpy as np
import scipy.stats as stats
from typing import Iterable
def sim_seasonal_data(n_series, timesteps, measure_noise,
freq=None, level=None, amp=None):
"""Generate sinusoidal data with periodic patterns.
Parameters
----------
n_series : int
Number of timeseries to generate.
timesteps : int
How many timesteps every generated series must have.
measure_noise : int
The noise present in the signals.
freq : int or 1D array-like, default=None
The frequencies of the sinusoidal timeseries to generate.
If a single integer is passed, all the series generated have
the same frequencies. If a 1D array-like is passed, the
frequencies of timeseries are random sampled from the iterable
passed. If None, the frequencies are random generated.
level : int or 1D array-like, default=None
The levels of the sinusoidal timeseries to generate.
If a single integer is passed, all the series generated have
the same levels. If a 1D array-like is passed, the levels
of timeseries are random sampled from the iterable passed.
If None, the levels are random generated.
amp : int or 1D array-like, default=None
The amplitudes of the sinusoidal timeseries to generate.
If a single integer is passed, all the series generated have
the same amplitudes. If a 1D array-like is passed, the amplitudes
of timeseries are random sampled from the iterable passed.
If None, the amplitudes are random generated.
Returns
-------
data : array of shape (series, timesteps)
The generated sinusoidal timeseries.
"""
if freq is None:
freq = np.random.randint(3, int(np.sqrt(timesteps)), (n_series, 1))
elif isinstance(freq, Iterable):
freq = np.random.choice(freq, size=n_series)[:, None]
else:
freq = np.asarray([[freq]] * n_series)
if level is None:
level = np.random.uniform(-100, 100, (n_series, 1))
elif isinstance(level, Iterable):
level = np.random.choice(level, size=n_series)[:, None]
else:
level = np.asarray([[level]] * n_series)
if amp is None:
amp = np.random.uniform(3, 100, (n_series, 1))
elif isinstance(amp, Iterable):
amp = np.random.choice(amp, size=n_series)[:, None]
else:
amp = np.asarray([[amp]] * n_series)
t = np.repeat([np.arange(timesteps)], n_series, axis=0)
e = np.random.normal(0, measure_noise, (n_series, timesteps))
data = level + amp * np.sin(t * (2 * np.pi / freq)) + e
return data
def sim_randomwalk(n_series, timesteps, process_noise, measure_noise,
level=None):
"""Generate randomwalks.
Parameters
----------
n_series : int
Number of randomwalks to generate.
timesteps : int
How many timesteps every generated randomwalks must have.
process_noise : int
The noise present in randomwalks creation.
measure_noise : int
The noise present in the signals.
level : int or 1D array-like, default=None
The levels of the randomwalks to generate.
If a single integer is passed, all the randomwalks have
the same levels. If a 1D array-like is passed, the levels
of the randomwalks are random sampled from the iterable
passed. If None, the levels are set to 0 for all the series.
Returns
-------
data : array of shape (series, timesteps)
The generated randomwalks.
"""
if level is None:
level = 0
if isinstance(level, Iterable):
level = np.random.choice(level, size=n_series)[:, None]
else:
level = np.asarray([[level]] * n_series)
data = np.random.normal(0, process_noise, size=(n_series, timesteps))
e = np.random.normal(0, measure_noise, size=(n_series, timesteps))
data = level + np.cumsum(data, axis=1) + e
return data
def create_windows(data, window_shape, step=1,
start_id=None, end_id=None):
"""Create sliding windows of the same length from the series
received as input.
create_windows vectorizes, in an efficient way, the windows creation
on all the series received.
Parameters
----------
data : 2D array of shape (timestemps, series)
Timeseries to slide into equal size windows.
window_shape : int
Grather than 1. The shape of the sliding windows used to divide
the input series.
step : int, default=1
The step used to generate the sliding windows. The overlapping
portion of two adjacent windows can be defined as
(window_shape - step).
start_id : int, default=None
The starting position from where operate slicing. The same for
all the series. If None, the windows are generated from the index 0.
end_id : int, default=None
The ending position of the slicing operation. The same for all the
series. If None, the windows end on the last position available.
Returns
-------
window_data : 3D array of shape (window_slices, window_shape, series)
The input data sliced into windows of the same lengths.
"""
data = np.asarray(data)
if data.ndim != 2:
raise ValueError(
"Pass a 2D array-like in the format (timestemps, series)")
if window_shape < 1:
raise ValueError("window_shape must be >= 1")
if start_id is None:
start_id = 0
if end_id is None:
end_id = data.shape[0]
data = data[int(start_id):int(end_id), :]
window_shape = (int(window_shape), data.shape[-1])
step = (int(step),) * data.ndim
slices = tuple(slice(None, None, st) for st in step)
indexing_strides = data[slices].strides
win_indices_shape = ((np.array(data.shape) - window_shape) // step) + 1
new_shape = tuple(list(win_indices_shape) + list(window_shape))
strides = tuple(list(indexing_strides) + list(data.strides))
window_data = np.lib.stride_tricks.as_strided(
data, shape=new_shape, strides=strides)
return np.squeeze(window_data, 1)
def sigma_interval(true, prediction, n_sigma):
"""Compute smoothing intervals as n_sigma times the residuals of the
smoothing process.
Returns
-------
low : array
Lower bands.
up : array
Upper bands.
"""
std = np.nanstd(true - prediction, axis=1, keepdims=True)
low = prediction - n_sigma * std
up = prediction + n_sigma * std
return low, up
def kalman_interval(true, prediction, cov, confidence=0.05):
"""Compute smoothing intervals from a Kalman smoothing process.
Returns
-------
low : array
Lower bands.
up : array
Upper bands.
"""
g = stats.norm.ppf(1 - confidence / 2)
resid = true - prediction
std_err = np.sqrt(np.nanmean(np.square(resid), axis=1, keepdims=True))
low = prediction - g * (std_err * cov)
up = prediction + g * (std_err * cov)
return low, up
def confidence_interval(true, prediction, exog, confidence,
add_intercept=True):
"""Compute confidence intervals for regression tasks.
Returns
-------
low : array
Lower bands.
up : array
Upper bands.
"""
if exog.ndim == 1:
exog = exog[:, None]
if add_intercept:
exog = np.concatenate([np.ones((len(exog), 1)), exog], axis=1)
N = exog.shape[0]
d_free = exog.shape[1]
t = stats.t.ppf(1 - confidence / 2, N - d_free)
resid = true - prediction
mse = (np.square(resid).sum(axis=1, keepdims=True) / (N - d_free)).T
hat_matrix_diag = (exog * np.linalg.pinv(exog).T).sum(axis=1, keepdims=True)
predict_mean_se = np.sqrt(hat_matrix_diag * mse).T
low = prediction - t * predict_mean_se
up = prediction + t * predict_mean_se
return low, up
def prediction_interval(true, prediction, exog, confidence,
add_intercept=True):
"""Compute prediction intervals for regression tasks.
Returns
-------
low : array
Lower bands.
up : array
Upper bands.
"""
if exog.ndim == 1:
exog = exog[:, None]
if add_intercept:
exog = np.concatenate([np.ones((len(exog), 1)), exog], axis=1)
N = exog.shape[0]
d_free = exog.shape[1]
t = stats.t.ppf(1 - confidence / 2, N - d_free)
resid = true - prediction
mse = (np.square(resid).sum(axis=1, keepdims=True) / (N - d_free)).T
covb = np.linalg.pinv(np.dot(exog.T, exog))[..., None] * mse
predvar = mse + (exog[..., None] *
np.dot(covb.transpose(2, 0, 1), exog.T).T).sum(1)
predstd = np.sqrt(predvar).T
low = prediction - t * predstd
up = prediction + t * predstd
return low, up
def _check_noise_dict(noise_dict, component):
"""Ensure noise compatibility for the noises of the components
provided when building a state space model.
Returns
-------
noise_dict : dict
Checked input.
"""
sub_component = component.split('_')
if isinstance(noise_dict, dict):
for c in sub_component:
if c not in noise_dict:
raise ValueError(
"You need to provide noise for '{}' component".format(c))
if noise_dict[c] < 0:
raise ValueError(
"noise for '{}' must be >= 0".format(c))
return noise_dict
else:
raise ValueError(
"noise should be a dict. Received {}".format(type(noise_dict)))
def _check_knots(knots, min_n_knots):
"""Ensure knots compatibility for the knots provided when building
bases for linear regression.
Returns
-------
knots : array
Checked input.
"""
knots = np.asarray(knots, dtype=np.float64)
if np.prod(knots.shape) == np.max(knots.shape):
knots = knots.ravel()
if knots.ndim != 1:
raise ValueError("knots must be a list or 1D array")
knots = np.unique(knots)
min_k, max_k = knots[0], knots[-1]
if min_k < 0 or max_k > 1:
raise ValueError("Every knot must be in the range [0,1]")
if min_k > 0:
knots = np.append(0., knots)
if max_k < 1:
knots = np.append(knots, 1.)
if knots.shape[0] < min_n_knots + 2:
raise ValueError(
"Provide at least {} knots in the range (0,1)".format(min_n_knots))
return knots
def _check_weights(weights, basis_len):
"""Ensure weights compatibility for the weights provided in
linear regression applications.
Returns
-------
weights : array
Checked input.
"""
if weights is None:
return np.ones(basis_len, dtype=np.float64)
weights = np.asarray(weights, dtype=np.float64)
if np.prod(weights.shape) == np.max(weights.shape):
weights = weights.ravel()
if weights.ndim != 1:
raise ValueError("Sample weights must be a list or 1D array")
if weights.shape[0] != basis_len:
raise ValueError(
"Sample weights length must be equal to timesteps "
"dimension of the data received")
if np.any(weights < 0):
raise ValueError("weights must be >= 0")
if np.logical_or(np.isnan(weights), np.isinf(weights)).any():
raise ValueError("weights must not contain NaNs or Inf")
return weights
def _check_data(data):
"""Ensure data compatibility for the series received by the smoother.
Returns
-------
data : array
Checked input.
"""
data = np.asarray(data)
if np.prod(data.shape) == np.max(data.shape):
data = data.ravel()
if data.ndim > 2:
raise ValueError(
"The format of data received is not appropriate. "
"Pass an object with data in this format (series, timesteps)")
if data.ndim == 0:
raise ValueError(
"Pass an object with data in this format (series, timesteps)")
if data.dtype not in [np.float16, np.float32, np.float64,
np.int8, np.int16, np.int32, np.int64]:
raise ValueError("data contains not numeric types")
if np.logical_or(np.isnan(data), np.isinf(data)).any():
raise ValueError("data must not contain NaNs or Inf")
return data.T
def _check_data_nan(data):
"""Ensure data compatibility for the series received by the smoother.
(Without checking for inf and nans).
Returns
-------
data : array
Checked input.
"""
data = np.asarray(data)
if np.prod(data.shape) == np.max(data.shape):
data = data.ravel()
if data.ndim > 2:
raise ValueError(
"The format of data received is not appropriate. "
"Pass an objet with data in this format (series, timesteps)")
if data.ndim == 0:
raise ValueError(
"Pass an object with data in this format (series, timesteps)")
if data.dtype not in [np.float16, np.float32, np.float64,
np.int8, np.int16, np.int32, np.int64]:
raise ValueError("data contains not numeric types")
return data
def _check_output(output, transpose=True):
"""Ensure output compatibility for the series returned by the smoother.
Returns
-------
output : array
Checked input.
"""
if transpose:
output = output.T
if output.ndim == 1:
output = output[None, :]
return output
def _id_nb_bootstrap(n_obs, block_length):
"""Create bootstrapped indexes with the none overlapping block bootstrap
('nbb') strategy given the number of observations in a timeseries and
the length of the blocks.
Returns
-------
_id : array
Bootstrapped indexes.
"""
n_blocks = int(np.ceil(n_obs / block_length))
nexts = np.repeat([np.arange(0, block_length)], n_blocks, axis=0)
blocks = np.random.permutation(
np.arange(0, n_obs, block_length)
).reshape(-1, 1)
_id = (blocks + nexts).ravel()[:n_obs]
return _id
def _id_mb_bootstrap(n_obs, block_length):
"""Create bootstrapped indexes with the moving block bootstrap
('mbb') strategy given the number of observations in a timeseries
and the length of the blocks.
Returns
-------
_id : array
Bootstrapped indexes.
"""
n_blocks = int(np.ceil(n_obs / block_length))
nexts = np.repeat([np.arange(0, block_length)], n_blocks, axis=0)
last_block = n_obs - block_length
blocks = np.random.randint(0, last_block, (n_blocks, 1))
_id = (blocks + nexts).ravel()[:n_obs]
return _id
def _id_cb_bootstrap(n_obs, block_length):
"""Create bootstrapped indexes with the circular block bootstrap
('cbb') strategy given the number of observations in a timeseries
and the length of the blocks.
Returns
-------
_id : array
Bootstrapped indexes.
"""
n_blocks = int(np.ceil(n_obs / block_length))
nexts = np.repeat([np.arange(0, block_length)], n_blocks, axis=0)
last_block = n_obs
blocks = np.random.randint(0, last_block, (n_blocks, 1))
_id = np.mod((blocks + nexts).ravel(), n_obs)[:n_obs]
return _id
def _id_s_bootstrap(n_obs, block_length):
"""Create bootstrapped indexes with the stationary bootstrap
('sb') strategy given the number of observations in a timeseries
and the length of the blocks.
Returns
-------
_id : array
Bootstrapped indexes.
"""
random_block_length = np.random.poisson(block_length, n_obs)
random_block_length[random_block_length < 3] = 3
random_block_length[random_block_length >= n_obs] = n_obs
random_block_length = random_block_length[random_block_length.cumsum() <= n_obs]
residual_block = n_obs - random_block_length.sum()
if residual_block > 0:
random_block_length = np.append(random_block_length, residual_block)
n_blocks = random_block_length.shape[0]
nexts = np.zeros((n_blocks, random_block_length.max() + 1))
nexts[np.arange(n_blocks), random_block_length] = 1
nexts = np.flip(nexts, 1).cumsum(1).cumsum(1).ravel()
nexts = (nexts[nexts > 1] - 2).astype(int)
last_block = n_obs - random_block_length.max()
blocks = np.zeros(n_obs, dtype=int)
if last_block > 0:
blocks = np.random.randint(0, last_block, n_blocks)
blocks = np.repeat(blocks, random_block_length)
_id = blocks + nexts
return _id |
<filename>src/components/dashboard/DashboardItem/DashboardItem.tsx
import React from "react";
import { DashboardItemProps } from "./DashboardItem.types";
import { CloseOutlined } from "@ant-design/icons";
export const DashboardItem = (props: DashboardItemProps) => {
const { id, title, children } = props;
return (
<div
style={{
backgroundColor: "#efefef",
width: "100%",
height: "100%",
borderRadius: "0.5em",
overflow: "hidden",
display: "flex",
flexDirection: "column",
}}
>
{/* <div style={{ padding: "0.25rem", alignSelf: "end" }}>
<CloseOutlined
onClick={(e) => {
e.stopPropagation();
onCloseTap(id);
}}
/>
</div> */}
<div style={{ padding: "2em" }}>{children}</div>
</div>
);
};
|
/**
* Request structure for transferring assets (unique, accounts or quantities).
*/
@JsonTypeName("TransfersRequest")
public class TransfersRequest extends BaseCommandRequest {
private final Collection<TransferItem> transfers;
/**
* Initializes a new request object.
* A random request identifier will be used.
*
* @param transfers collection of {@link TransferItem}
*/
public TransfersRequest(List<TransferItem> transfers) {
this(null, transfers);
}
/**
* Initializes a new request object.
*
* @param requestId a specific request identifier
* @param transfers collection of {@link TransferItem}
*/
public TransfersRequest(String requestId, Collection<TransferItem> transfers) {
super(requestId);
this.transfers = transfers;
}
/**
* Creates a new transfers request object.
*
* @param requestId a specific request identifier
* @param items collection of {@link TransferItem}
* @return the {@link TransfersRequest}
*/
public static TransfersRequest of(String requestId, TransferItem... items) {
return new TransfersRequest(requestId, Arrays.asList(items));
}
/**
* Creates a new transfers request object.
* A random request identifier will be used.
*
* @param items collection of {@link TransferItem}
* @return the {@link TransfersRequest}
*/
public static TransfersRequest of(TransferItem... items) {
return of(null, items);
}
/**
* Returns a collection of {@link TransferItem}.
*
* @return a collection of {@link TransferItem}
*/
public Collection<TransferItem> getTransfers() {
return transfers;
}
} |
import Control.Monad
import Data.Maybe (fromJust)
import qualified Data.ByteString.Char8 as B
main :: IO ()
main = do
_ <- readInt
ts <- readInts
m <- readInt
px <- replicateM m readInts
forM_ px $ \[p, x] -> print $ sum $ replace ts (p - 1) x
where
readB = fst . fromJust . B.readInt
readInt = readB <$> B.getLine
readInts = map readB . B.words <$> B.getLine
replace xs i v = take i xs ++ [v] ++ drop (i + 1) xs
|
def hash_context(context: Optional[AnyStr] = None) -> str:
if not context:
return ""
try:
context_bytes = cast(str, context).encode("utf8")
except AttributeError:
context_bytes = cast(bytes, context)
try:
return md5(context_bytes).hexdigest()
except TypeError:
return hash_context(str(context)) |
<gh_stars>100-1000
package com.ironz.binaryprefs.task;
import com.ironz.binaryprefs.task.barrier.FutureBarrier;
import java.util.concurrent.Callable;
/**
* Abstraction for task running. You should guarantee sequential task execution.
*/
public interface TaskExecutor {
/**
* Submits runnable into task executor.
* After submitting executor adds this task in
* queue and runs later, tasks guaranteed
* to be executed sequentially.
*
* @param runnable instance for task execution
* @return future barrier for task blocking
*/
FutureBarrier<?> submit(Runnable runnable);
/**
* Submits callable into task executor.
* After submitting executor adds this task in
* queue and runs later, tasks guaranteed
* to be executed sequentially.
*
* @param callable instance for task execution
* @return future barrier for task blocking
*/
<T> FutureBarrier<T> submit(Callable<T> callable);
} |
/*
* Copyright 2013 University of Southern California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package edu.usc.goffish.gofs.formats.gml;
import java.io.*;
import java.util.*;
import org.apache.commons.lang.*;
final class GMLWriter implements Closeable, Flushable {
private static final int DEFAULT_BUFFER_SIZE = 16384;
private static final String TABULATOR = " ";
private static final String KEY_VALUE_SEPARATOR = " ";
private static final String NEWLINE = System.lineSeparator();
private static final String OPEN_LIST = "[";
private static final String CLOSE_LIST = "]";
private final BufferedWriter _output;
private final boolean _writeIndentation;
private int _indentation;
public GMLWriter(OutputStream output) {
this(output, false, DEFAULT_BUFFER_SIZE);
}
public GMLWriter(OutputStream output, int bufferSize) {
this(output, false, bufferSize);
}
public GMLWriter(OutputStream output, boolean writeIndentation, int bufferSize) {
_output = new BufferedWriter(new OutputStreamWriter(output, GMLParser.GML_CHARSET), bufferSize);
_writeIndentation = writeIndentation;
_indentation = 0;
}
public static String classTypeToGMLType(Class<? extends Object> type) {
if (type == String.class) {
return GMLParser.STRING_TYPE;
} else if (type == Integer.class) {
return GMLParser.INTEGER_TYPE;
} else if (type == Long.class) {
return GMLParser.LONG_TYPE;
} else if (type == Float.class) {
return GMLParser.FLOAT_TYPE;
} else if (type == Double.class) {
return GMLParser.DOUBLE_TYPE;
} else if (type == Boolean.class) {
return GMLParser.BOOLEAN_TYPE;
} else if (type == List.class) {
return GMLParser.LIST_TYPE;
}
// type was not recognized
throw new ClassCastException();
}
public static Object classValueToGMLValue(Object value) {
if (value instanceof Integer) {
return new Long(((Integer)value).longValue());
} else if (value instanceof Float) {
return new Double(((Float)value).doubleValue());
} else if (value instanceof Boolean) {
return new Long(((Boolean)value).booleanValue() ? 1L : 0L);
} else if (value instanceof List) {
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>)value;
List<KeyValuePair> children = new ArrayList<>(list.size());
for (Object v : list) {
children.add(KeyValuePair.createKVP("value", v));
}
return children;
}
return value;
}
public void write(KeyValuePair kvp) throws IOException {
if (kvp == null) {
throw new IllegalArgumentException();
}
write(_indentation, kvp);
}
public void write(String key, Iterable<KeyValuePair> value) throws IOException {
if (key == null) {
throw new IllegalArgumentException();
}
if (value == null) {
throw new IllegalArgumentException();
}
writeKVPStart(_indentation, key);
writeKVPValue(_indentation, value);
writeKVPEnd(_indentation);
}
public void write(String key, String value) throws IOException {
if (key == null) {
throw new IllegalArgumentException();
}
if (value == null) {
throw new IllegalArgumentException();
}
writeKVPStart(_indentation, key);
writeKVPValue(_indentation, value);
writeKVPEnd(_indentation);
}
public void write(String key, long value) throws IOException {
if (key == null) {
throw new IllegalArgumentException();
}
writeKVPStart(_indentation, key);
writeKVPValue(_indentation, value);
writeKVPEnd(_indentation);
}
public void write(String key, double value) throws IOException {
if (key == null) {
throw new IllegalArgumentException();
}
writeKVPStart(_indentation, key);
writeKVPValue(_indentation, value);
writeKVPEnd(_indentation);
}
public void writeListOpen(String key) throws IOException {
if (key == null) {
throw new IllegalArgumentException();
}
if (_writeIndentation) {
for (int i = 0; i < _indentation; i++) {
_output.write(TABULATOR);
}
}
_output.write(key);
_output.write(KEY_VALUE_SEPARATOR);
_output.write(OPEN_LIST);
_output.write(NEWLINE);
_indentation++;
}
public void writeListClose() throws IOException {
if (_indentation <= 0) {
throw new IllegalStateException();
}
_indentation--;
if (_writeIndentation) {
for (int i = 0; i < _indentation; i++) {
_output.write(TABULATOR);
}
}
_output.write(CLOSE_LIST);
_output.write(NEWLINE);
}
protected void write(int indentation, KeyValuePair kvp) throws IOException {
if (kvp instanceof ListKeyValuePair) {
writeKVPStart(_indentation, kvp.Key());
writeKVPValue(_indentation, kvp.ValueAsList());
writeKVPEnd(_indentation);
} else {
if (_writeIndentation) {
for (int i = 0; i < indentation; i++) {
_output.write(TABULATOR);
}
}
kvp.write(_output);
_output.write(System.lineSeparator());
}
}
private void writeKVPStart(int indentation, String key) throws IOException {
if (_writeIndentation) {
for (int i = 0; i < indentation; i++) {
_output.write(TABULATOR);
}
}
_output.write(key);
_output.write(KEY_VALUE_SEPARATOR);
}
private void writeKVPValue(int indentation, Iterable<KeyValuePair> value) throws IOException {
_output.write(OPEN_LIST);
_output.write(NEWLINE);
for (KeyValuePair childKvp : value) {
write(indentation + 1, childKvp);
}
if (_writeIndentation) {
for (int i = 0; i < indentation; i++) {
_output.write(TABULATOR);
}
}
_output.write(CLOSE_LIST);
}
private void writeKVPValue(int indentation, String value) throws IOException {
_output.write("\"");
_output.write(StringEscapeUtils.escapeHtml(value));
_output.write("\"");
}
private void writeKVPValue(int indentation, long value) throws IOException {
_output.write(Long.toString(value));
}
private void writeKVPValue(int indentation, double value) throws IOException {
_output.write(Double.toString(value));
}
private void writeKVPEnd(int indentation) throws IOException {
_output.write(NEWLINE);
}
@Override
public void flush() throws IOException {
_output.flush();
}
@Override
public void close() throws IOException {
if (_indentation != 0) {
throw new GMLFormatException("unbalanced open/close of gml lists (" + _indentation + " remain open)");
}
_output.close();
}
}
|
<gh_stars>100-1000
"""
Defines database related utilities.
"""
import os
import pkgutil
import datetime
from ..exceptions import DatabaseError
def get_database(dialect):
"""
Returns requested database package with modules that provide additional functionality.
:param string dialect: (required). Database dialect name.
"""
aliases = {
('mysql',): 'mysql',
('sqlite',): 'sqlite',
('pgsql', 'postgres', 'postgresql'): 'postgresql',
}
dialect = next((aliases[alias] for alias in aliases if dialect in alias), dialect)
names = [str(mod[1]) for mod in pkgutil.iter_modules([os.path.join(os.path.dirname(__file__), dialect)])]
try:
return __import__('{0}'.format(dialect), globals(), level=1, fromlist=names)
except ImportError:
raise DatabaseError(
current=dialect,
allowed=[name for _, name, is_pkg in pkgutil.iter_modules([os.path.dirname(__file__)]) if is_pkg])
class DateTime(object):
"""
Provides date and time calculations for some database backends.
"""
def __init__(self, now, template='%Y-%m-%d %H:%M:%S'):
"""
:param object now: (required). Date/Datetime object to work with.
:param string template: (optional). Format of datetime string representation.
"""
self.now = datetime.datetime(now.year, now.month, now.day) if type(now) is datetime.date else now
self.template = template
def get_period(self, period):
"""
Dynamically returns beginning and an end depending on the given period.
:param string period: (required). Name of the period.
"""
return getattr(self, '_get_{0}_period'.format(period))()
def _get_day_period(self):
"""
Returns beginning and an end for a day period.
"""
start = self.now.replace(hour=0, minute=0, second=0, microsecond=0)
end = self.now.replace(hour=23, minute=59, second=59, microsecond=999999)
return start.strftime(self.template), end.strftime(self.template)
def _get_week_period(self):
"""
Returns beginning and an end for a week period.
"""
dt = datetime.datetime(self.now.year, 1, 1)
if dt.weekday() > 3:
dt += datetime.timedelta(7 - dt.weekday())
else:
dt -= datetime.timedelta(dt.weekday())
days = datetime.timedelta((int(self.now.strftime('%V')) - 1) * 7)
start = (dt + days)
end = (dt + days + datetime.timedelta(6)).replace(hour=23, minute=59, second=59, microsecond=999999)
return start.strftime(self.template), end.strftime(self.template)
def _get_month_period(self):
"""
Returns beginning and an end for a month period.
"""
start = datetime.datetime(self.now.year, self.now.month, 1)
if self.now.month == 12:
end = datetime.datetime(self.now.year, self.now.month, 31, 23, 59, 59, 999999)
else:
end = datetime.datetime(self.now.year, self.now.month + 1, 1, 23, 59, 59, 999999) - datetime.timedelta(1)
return start.strftime(self.template), end.strftime(self.template)
def _get_year_period(self):
"""
Returns beginning and an end for a year period.
"""
start = datetime.datetime(self.now.year, 1, 1)
end = datetime.datetime(self.now.year + 1, 1, 1, 23, 59, 59, 999999) - datetime.timedelta(1)
return start.strftime(self.template), end.strftime(self.template)
|
/**
* UI Flow for checking the software version property in a resource
*/
public class CheckSoftwareVersionPropertyFlow extends AbstractUiTestFlow {
private final List<String> expectedSoftwareVersionList;
public CheckSoftwareVersionPropertyFlow(final WebDriver webDriver, final List<String> expectedSoftwareVersionList) {
super(webDriver);
this.expectedSoftwareVersionList = expectedSoftwareVersionList;
}
@Override
public Optional<PageObject> run(final PageObject... pageObjects) {
final ResourceLeftSideMenu resourceLeftSideMenu = new ResourceLeftSideMenu(webDriver);
resourceLeftSideMenu.isLoaded();
final ResourcePropertiesAssignmentPage resourcePropertiesAssignmentPage = accessPropertiesAssignmentPage();
checkSoftwareVersionProperty(resourcePropertiesAssignmentPage);
return Optional.empty();
}
/**
* Checks if the software_version property values are as expected by the {@link #expectedSoftwareVersionList}.
*
* @param resourcePropertiesAssignmentPage the resource properties assignment page
*/
private void checkSoftwareVersionProperty(final ResourcePropertiesAssignmentPage resourcePropertiesAssignmentPage) {
extendTest.log(Status.INFO,
String.format("Checking the presence of software versions '%s' in 'software_versions' property",
getSoftwareVersionListAsString())
);
final List<String> actualSoftwareVersionList = resourcePropertiesAssignmentPage.getSoftwareVersionProperty();
assertThat("Software Version should have the expected size", actualSoftwareVersionList,
hasSize(expectedSoftwareVersionList.size()));
assertThat("Software Version should be as expected", actualSoftwareVersionList,
containsInAnyOrder(expectedSoftwareVersionList.toArray(new String[0])));
}
/**
* Accesses the properties assignment page by clicking in the resource left side menu.
*
* @return the resulting resource properties assignment page
*/
private ResourcePropertiesAssignmentPage accessPropertiesAssignmentPage() {
final ResourceLeftSideMenu resourceLeftSideMenu = new ResourceLeftSideMenu(webDriver);
resourceLeftSideMenu.isLoaded();
extendTest.log(Status.INFO,
String.format("Accessing the Properties Assignment page to check the software versions '%s'",
getSoftwareVersionListAsString())
);
final ResourcePropertiesAssignmentPage resourcePropertiesAssignmentPage =
resourceLeftSideMenu.clickOnPropertiesAssignmentMenuItem();
resourcePropertiesAssignmentPage.isLoaded();
return resourcePropertiesAssignmentPage;
}
/**
* Converts the {@link #expectedSoftwareVersionList} in a comma + space separated string.
*
* @return the software version list as a comma + space separated string
*/
private String getSoftwareVersionListAsString() {
return String.join(", ", expectedSoftwareVersionList);
}
} |
<gh_stars>1-10
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "SVGDocumentWrapper.h"
#include "mozilla/dom/Element.h"
#include "nsIAtom.h"
#include "nsICategoryManager.h"
#include "nsIChannel.h"
#include "nsIContentViewer.h"
#include "nsIDocument.h"
#include "nsIDocumentLoaderFactory.h"
#include "nsIDOMSVGAnimatedLength.h"
#include "nsIDOMSVGLength.h"
#include "nsIHttpChannel.h"
#include "nsIObserverService.h"
#include "nsIParser.h"
#include "nsIPresShell.h"
#include "nsIRequest.h"
#include "nsIStreamListener.h"
#include "nsIXMLContentSink.h"
#include "nsNetCID.h"
#include "nsComponentManagerUtils.h"
#include "nsSMILAnimationController.h"
#include "nsServiceManagerUtils.h"
#include "nsSize.h"
#include "gfxRect.h"
#include "nsSVGSVGElement.h"
#include "nsSVGLength2.h"
#include "nsSVGEffects.h"
using namespace mozilla::dom;
namespace mozilla {
namespace image {
NS_IMPL_ISUPPORTS4(SVGDocumentWrapper,
nsIStreamListener,
nsIRequestObserver,
nsIObserver,
nsISupportsWeakReference)
SVGDocumentWrapper::SVGDocumentWrapper()
: mIgnoreInvalidation(false),
mRegisteredForXPCOMShutdown(false)
{
}
SVGDocumentWrapper::~SVGDocumentWrapper()
{
DestroyViewer();
if (mRegisteredForXPCOMShutdown) {
UnregisterForXPCOMShutdown();
}
}
void
SVGDocumentWrapper::DestroyViewer()
{
if (mViewer) {
mViewer->GetDocument()->OnPageHide(false, nullptr);
mViewer->Close(nullptr);
mViewer->Destroy();
mViewer = nullptr;
}
}
bool
SVGDocumentWrapper::GetWidthOrHeight(Dimension aDimension,
int32_t& aResult)
{
nsSVGSVGElement* rootElem = GetRootSVGElem();
NS_ABORT_IF_FALSE(rootElem, "root elem missing or of wrong type");
nsresult rv;
// Get the width or height SVG object
nsRefPtr<nsIDOMSVGAnimatedLength> domAnimLength;
if (aDimension == eWidth) {
rv = rootElem->GetWidth(getter_AddRefs(domAnimLength));
} else {
NS_ABORT_IF_FALSE(aDimension == eHeight, "invalid dimension");
rv = rootElem->GetHeight(getter_AddRefs(domAnimLength));
}
NS_ENSURE_SUCCESS(rv, false);
NS_ENSURE_TRUE(domAnimLength, false);
// Get the animated value from the object
nsRefPtr<nsIDOMSVGLength> domLength;
rv = domAnimLength->GetAnimVal(getter_AddRefs(domLength));
NS_ENSURE_SUCCESS(rv, false);
NS_ENSURE_TRUE(domLength, false);
// Check if it's a percent value (and fail if so)
uint16_t unitType;
rv = domLength->GetUnitType(&unitType);
NS_ENSURE_SUCCESS(rv, false);
if (unitType == nsIDOMSVGLength::SVG_LENGTHTYPE_PERCENTAGE) {
return false;
}
// Non-percent value - woot! Grab it & return it.
float floatLength;
rv = domLength->GetValue(&floatLength);
NS_ENSURE_SUCCESS(rv, false);
aResult = nsSVGUtils::ClampToInt(floatLength);
return true;
}
nsIFrame*
SVGDocumentWrapper::GetRootLayoutFrame()
{
Element* rootElem = GetRootSVGElem();
return rootElem ? rootElem->GetPrimaryFrame() : nullptr;
}
void
SVGDocumentWrapper::UpdateViewportBounds(const nsIntSize& aViewportSize)
{
NS_ABORT_IF_FALSE(!mIgnoreInvalidation, "shouldn't be reentrant");
mIgnoreInvalidation = true;
mViewer->SetBounds(nsIntRect(nsIntPoint(0, 0), aViewportSize));
FlushLayout();
mIgnoreInvalidation = false;
}
void
SVGDocumentWrapper::FlushImageTransformInvalidation()
{
NS_ABORT_IF_FALSE(!mIgnoreInvalidation, "shouldn't be reentrant");
nsSVGSVGElement* svgElem = GetRootSVGElem();
if (!svgElem)
return;
mIgnoreInvalidation = true;
svgElem->FlushImageTransformInvalidation();
FlushLayout();
mIgnoreInvalidation = false;
}
bool
SVGDocumentWrapper::IsAnimated()
{
nsIDocument* doc = mViewer->GetDocument();
return doc && doc->HasAnimationController() &&
doc->GetAnimationController()->HasRegisteredAnimations();
}
void
SVGDocumentWrapper::StartAnimation()
{
// Can be called for animated images during shutdown, after we've
// already Observe()'d XPCOM shutdown and cleared out our mViewer pointer.
if (!mViewer)
return;
nsIDocument* doc = mViewer->GetDocument();
if (doc) {
nsSMILAnimationController* controller = doc->GetAnimationController();
if (controller) {
controller->Resume(nsSMILTimeContainer::PAUSE_IMAGE);
}
doc->SetImagesNeedAnimating(true);
}
}
void
SVGDocumentWrapper::StopAnimation()
{
// Can be called for animated images during shutdown, after we've
// already Observe()'d XPCOM shutdown and cleared out our mViewer pointer.
if (!mViewer)
return;
nsIDocument* doc = mViewer->GetDocument();
if (doc) {
nsSMILAnimationController* controller = doc->GetAnimationController();
if (controller) {
controller->Pause(nsSMILTimeContainer::PAUSE_IMAGE);
}
doc->SetImagesNeedAnimating(false);
}
}
void
SVGDocumentWrapper::ResetAnimation()
{
nsSVGSVGElement* svgElem = GetRootSVGElem();
if (!svgElem)
return;
#ifdef DEBUG
nsresult rv =
#endif
svgElem->SetCurrentTime(0.0f);
NS_WARN_IF_FALSE(NS_SUCCEEDED(rv), "SetCurrentTime failed");
}
/** nsIStreamListener methods **/
/* void onDataAvailable (in nsIRequest request, in nsISupports ctxt,
in nsIInputStream inStr, in unsigned long sourceOffset,
in unsigned long count); */
NS_IMETHODIMP
SVGDocumentWrapper::OnDataAvailable(nsIRequest* aRequest, nsISupports* ctxt,
nsIInputStream* inStr,
uint64_t sourceOffset,
uint32_t count)
{
return mListener->OnDataAvailable(aRequest, ctxt, inStr,
sourceOffset, count);
}
/** nsIRequestObserver methods **/
/* void onStartRequest (in nsIRequest request, in nsISupports ctxt); */
NS_IMETHODIMP
SVGDocumentWrapper::OnStartRequest(nsIRequest* aRequest, nsISupports* ctxt)
{
nsresult rv = SetupViewer(aRequest,
getter_AddRefs(mViewer),
getter_AddRefs(mLoadGroup));
if (NS_SUCCEEDED(rv) &&
NS_SUCCEEDED(mListener->OnStartRequest(aRequest, nullptr))) {
mViewer->GetDocument()->SetIsBeingUsedAsImage();
StopAnimation(); // otherwise animations start automatically in helper doc
rv = mViewer->Init(nullptr, nsIntRect(0, 0, 0, 0));
if (NS_SUCCEEDED(rv)) {
rv = mViewer->Open(nullptr, nullptr);
}
}
return rv;
}
/* void onStopRequest (in nsIRequest request, in nsISupports ctxt,
in nsresult status); */
NS_IMETHODIMP
SVGDocumentWrapper::OnStopRequest(nsIRequest* aRequest, nsISupports* ctxt,
nsresult status)
{
if (mListener) {
mListener->OnStopRequest(aRequest, ctxt, status);
// A few levels up the stack, imgRequest::OnStopRequest is about to tell
// all of its observers that we know our size and are ready to paint. That
// might not be true at this point, though -- so here, we synchronously
// finish parsing & layout in our helper-document to make sure we can hold
// up to this promise.
nsCOMPtr<nsIParser> parser = do_QueryInterface(mListener);
while (!parser->IsComplete()) {
parser->CancelParsingEvents();
parser->ContinueInterruptedParsing();
}
FlushLayout();
mListener = nullptr;
// In a normal document, this would be called by nsDocShell - but we don't
// have a nsDocShell. So we do it ourselves. (If we don't, painting will
// stay suppressed for a little while longer, for no good reason).
mViewer->LoadComplete(NS_OK);
}
return NS_OK;
}
/** nsIObserver Methods **/
NS_IMETHODIMP
SVGDocumentWrapper::Observe(nsISupports* aSubject,
const char* aTopic,
const PRUnichar *aData)
{
if (!strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID)) {
// Sever ties from rendering observers to helper-doc's root SVG node
nsSVGSVGElement* svgElem = GetRootSVGElem();
if (svgElem) {
nsSVGEffects::RemoveAllRenderingObservers(svgElem);
}
// Clean up at XPCOM shutdown time.
DestroyViewer();
if (mListener)
mListener = nullptr;
if (mLoadGroup)
mLoadGroup = nullptr;
// Turn off "registered" flag, or else we'll try to unregister when we die.
// (No need for that now, and the try would fail anyway -- it's too late.)
mRegisteredForXPCOMShutdown = false;
} else {
NS_ERROR("Unexpected observer topic.");
}
return NS_OK;
}
/** Private helper methods **/
// This method is largely cribbed from
// nsExternalResourceMap::PendingLoad::SetupViewer.
nsresult
SVGDocumentWrapper::SetupViewer(nsIRequest* aRequest,
nsIContentViewer** aViewer,
nsILoadGroup** aLoadGroup)
{
nsCOMPtr<nsIChannel> chan(do_QueryInterface(aRequest));
NS_ENSURE_TRUE(chan, NS_ERROR_UNEXPECTED);
// Check for HTTP error page
nsCOMPtr<nsIHttpChannel> httpChannel(do_QueryInterface(aRequest));
if (httpChannel) {
bool requestSucceeded;
if (NS_FAILED(httpChannel->GetRequestSucceeded(&requestSucceeded)) ||
!requestSucceeded) {
return NS_ERROR_FAILURE;
}
}
// Give this document its own loadgroup
nsCOMPtr<nsILoadGroup> loadGroup;
chan->GetLoadGroup(getter_AddRefs(loadGroup));
nsCOMPtr<nsILoadGroup> newLoadGroup =
do_CreateInstance(NS_LOADGROUP_CONTRACTID);
NS_ENSURE_TRUE(newLoadGroup, NS_ERROR_OUT_OF_MEMORY);
newLoadGroup->SetLoadGroup(loadGroup);
nsCOMPtr<nsICategoryManager> catMan =
do_GetService(NS_CATEGORYMANAGER_CONTRACTID);
NS_ENSURE_TRUE(catMan, NS_ERROR_NOT_AVAILABLE);
nsXPIDLCString contractId;
nsresult rv = catMan->GetCategoryEntry("Gecko-Content-Viewers", SVG_MIMETYPE,
getter_Copies(contractId));
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIDocumentLoaderFactory> docLoaderFactory =
do_GetService(contractId);
NS_ENSURE_TRUE(docLoaderFactory, NS_ERROR_NOT_AVAILABLE);
nsCOMPtr<nsIContentViewer> viewer;
nsCOMPtr<nsIStreamListener> listener;
rv = docLoaderFactory->CreateInstance("external-resource", chan,
newLoadGroup,
SVG_MIMETYPE, nullptr, nullptr,
getter_AddRefs(listener),
getter_AddRefs(viewer));
NS_ENSURE_SUCCESS(rv, rv);
NS_ENSURE_TRUE(viewer, NS_ERROR_UNEXPECTED);
nsCOMPtr<nsIParser> parser = do_QueryInterface(listener);
NS_ENSURE_TRUE(parser, NS_ERROR_UNEXPECTED);
// XML-only, because this is for SVG content
nsIContentSink* sink = parser->GetContentSink();
nsCOMPtr<nsIXMLContentSink> xmlSink = do_QueryInterface(sink);
NS_ENSURE_TRUE(sink, NS_ERROR_UNEXPECTED);
listener.swap(mListener);
viewer.forget(aViewer);
newLoadGroup.forget(aLoadGroup);
RegisterForXPCOMShutdown();
return NS_OK;
}
void
SVGDocumentWrapper::RegisterForXPCOMShutdown()
{
NS_ABORT_IF_FALSE(!mRegisteredForXPCOMShutdown,
"re-registering for XPCOM shutdown");
// Listen for xpcom-shutdown so that we can drop references to our
// helper-document at that point. (Otherwise, we won't get cleaned up
// until imgLoader::Shutdown, which can happen after the JAR service
// and RDF service have been unregistered.)
nsresult rv;
nsCOMPtr<nsIObserverService> obsSvc = do_GetService(OBSERVER_SVC_CID, &rv);
if (NS_FAILED(rv) ||
NS_FAILED(obsSvc->AddObserver(this, NS_XPCOM_SHUTDOWN_OBSERVER_ID,
true))) {
NS_WARNING("Failed to register as observer of XPCOM shutdown");
} else {
mRegisteredForXPCOMShutdown = true;
}
}
void
SVGDocumentWrapper::UnregisterForXPCOMShutdown()
{
NS_ABORT_IF_FALSE(mRegisteredForXPCOMShutdown,
"unregistering for XPCOM shutdown w/out being registered");
nsresult rv;
nsCOMPtr<nsIObserverService> obsSvc = do_GetService(OBSERVER_SVC_CID, &rv);
if (NS_FAILED(rv) ||
NS_FAILED(obsSvc->RemoveObserver(this, NS_XPCOM_SHUTDOWN_OBSERVER_ID))) {
NS_WARNING("Failed to unregister as observer of XPCOM shutdown");
} else {
mRegisteredForXPCOMShutdown = false;
}
}
void
SVGDocumentWrapper::FlushLayout()
{
nsCOMPtr<nsIPresShell> presShell;
mViewer->GetPresShell(getter_AddRefs(presShell));
if (presShell) {
presShell->FlushPendingNotifications(Flush_Layout);
}
}
nsSVGSVGElement*
SVGDocumentWrapper::GetRootSVGElem()
{
if (!mViewer)
return nullptr; // Can happen during destruction
nsIDocument* doc = mViewer->GetDocument();
if (!doc)
return nullptr; // Can happen during destruction
Element* rootElem = mViewer->GetDocument()->GetRootElement();
if (!rootElem || !rootElem->IsSVG(nsGkAtoms::svg)) {
return nullptr;
}
return static_cast<nsSVGSVGElement*>(rootElem);
}
} // namespace image
} // namespace mozilla
|
/**
* Un like realizado por un usuario a un post
*/
@Entity
@Data
@NamedQueries({
@NamedQuery(name = "Likes.all",
query = "SELECT l FROM Likes l"),
@NamedQuery(name="Likes.byId",
query="SELECT l FROM Likes l "
+ "WHERE l.post.id = :postId"),
@NamedQuery(name="Likes.byUser",
query="SELECT l FROM Likes l "
+ "WHERE l.usuario.id = :userId"),
@NamedQuery(name="Likes.byuserpost",
query="SELECT l FROM Likes l "
+ "WHERE l.usuario.id = :userId and l.post.id=:postId"),
})
public class Likes implements Transferable<Likes.Transfer> {
private static Logger log = LogManager.getLogger(Message.class);
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "gen")
@SequenceGenerator(name = "gen", sequenceName = "gen")
private Long id;
@ManyToOne
private User usuario; //el que ha dado like
@ManyToOne
private Post post; //post al que se dio like
/**
* Objeto para persistir a/de JSON
* @author mfreire
*/
@Getter
@AllArgsConstructor
public static class Transfer {
private User usuario;
private Post post;
private long id;
public Transfer(Likes m) {
this.usuario = m.getUsuario();
this.post = m.getPost();
this.id = m.getId();
}
}
@Override
public Transfer toTransfer() {
return new Transfer(usuario, post, id
);
}
} |
Increased succinate dehydrogenase activity of lymphocytes in eczema.
Succinate dehydrogenase activity has been studied in the peripheral blood lymphocytes from controls and patients with a variety of skin and other diseases. Increased activity has been found in eczematous dermatitis and dermographism, and also in one patient with chronic lymphatic leukemia. Normal levels were found in psoriasis. The enzyme activity is broadly correlated with the extent and activity of the disease process. |
// Adds the data to the cert as an extension, formatted as a single
// ASN.1 octet string.
static void AddOctetExtension(X509* cert, int nid, const unsigned char* data,
int data_len, int critical) {
ScopedASN1_OCTET_STRING inner(ASN1_OCTET_STRING_new());
CHECK_NOTNULL(inner.get());
CHECK_EQ(1, ASN1_OCTET_STRING_set(inner.get(), data, data_len));
int buf_len = i2d_ASN1_OCTET_STRING(inner.get(), NULL);
CHECK_GT(buf_len, 0);
unsigned char buf[buf_len];
unsigned char* p = buf;
CHECK_EQ(buf_len, i2d_ASN1_OCTET_STRING(inner.get(), &p));
ScopedASN1_OCTET_STRING asn1_data(ASN1_OCTET_STRING_new());
CHECK_NOTNULL(asn1_data.get());
CHECK_EQ(1, ASN1_OCTET_STRING_set(asn1_data.get(), buf, buf_len));
X509_EXTENSION* ext =
X509_EXTENSION_create_by_NID(NULL, nid, critical, asn1_data.get());
CHECK_EQ(1, X509_add_ext(cert, ext, -1));
} |
/*
* Copyright 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.android.apps.mytracks.maps;
import com.google.android.apps.mytracks.Constants;
import com.google.android.apps.mytracks.util.PreferencesUtils;
import com.google.android.maps.mytracks.R;
import android.content.Context;
import android.content.SharedPreferences;
import android.test.AndroidTestCase;
/**
* Tests for the {@link DynamicSpeedTrackPathDescriptor}.
*
* @author <NAME>
*/
public class FixedSpeedTrackPathDescriptorTest extends AndroidTestCase {
private Context context;
private SharedPreferences sharedPreferences;
@Override
protected void setUp() throws Exception {
super.setUp();
context = getContext();
sharedPreferences = context.getSharedPreferences(Constants.SETTINGS_NAME, Context.MODE_PRIVATE);
}
/**
* Tests the initialization of slowSpeed and normalSpeed in {@link DynamicSpeedTrackPathDescriptor#DynamicSpeedTrackPathDescriptor(Context)}
* .
*/
public void testConstructor() {
int[] slowSpeedExpectations = { 0, 1, 99, PreferencesUtils.TRACK_COLOR_MODE_SLOW_DEFAULT };
int[] normalSpeedExpectations = { 0, 1, 99, PreferencesUtils.TRACK_COLOR_MODE_MEDIUM_DEFAULT };
for (int i = 0; i < slowSpeedExpectations.length; i++) {
PreferencesUtils.setInt(
context, R.string.track_color_mode_slow_key, slowSpeedExpectations[i]);
PreferencesUtils.setInt(
context, R.string.track_color_mode_medium_key, normalSpeedExpectations[i]);
FixedSpeedTrackPathDescriptor fixedSpeedTrackPathDescriptor = new FixedSpeedTrackPathDescriptor(
context);
assertEquals(slowSpeedExpectations[i], fixedSpeedTrackPathDescriptor.getSlowSpeed());
assertEquals(normalSpeedExpectations[i], fixedSpeedTrackPathDescriptor.getNormalSpeed());
}
}
/**
* Tests {@link DynamicSpeedTrackPathDescriptor#onSharedPreferenceChanged(SharedPreferences,
* String)} when the key is null.
*/
public void testOnSharedPreferenceChanged_null_key() {
FixedSpeedTrackPathDescriptor fixedSpeedTrackPathDescriptor = new FixedSpeedTrackPathDescriptor(
context);
int slowSpeed = fixedSpeedTrackPathDescriptor.getSlowSpeed();
int normalSpeed = fixedSpeedTrackPathDescriptor.getNormalSpeed();
// Change value in shared preferences
PreferencesUtils.setInt(context, R.string.track_color_mode_slow_key, slowSpeed + 2);
PreferencesUtils.setInt(context, R.string.track_color_mode_medium_key, normalSpeed + 2);
fixedSpeedTrackPathDescriptor.onSharedPreferenceChanged(sharedPreferences, null);
assertEquals(slowSpeed, fixedSpeedTrackPathDescriptor.getSlowSpeed());
assertEquals(normalSpeed, fixedSpeedTrackPathDescriptor.getNormalSpeed());
}
/**
* Tests {@link DynamicSpeedTrackPathDescriptor#onSharedPreferenceChanged(SharedPreferences,
* String)} when the key is not null, and not slowSpeed and not normalSpeed.
*/
public void testOnSharedPreferenceChanged_other_key() {
FixedSpeedTrackPathDescriptor fixedSpeedTrackPathDescriptor = new FixedSpeedTrackPathDescriptor(
context);
int slowSpeed = fixedSpeedTrackPathDescriptor.getSlowSpeed();
int normalSpeed = fixedSpeedTrackPathDescriptor.getNormalSpeed();
// Change value in shared preferences
PreferencesUtils.setInt(context, R.string.track_color_mode_slow_key, slowSpeed + 2);
PreferencesUtils.setInt(context, R.string.track_color_mode_medium_key, normalSpeed + 2);
fixedSpeedTrackPathDescriptor.onSharedPreferenceChanged(sharedPreferences, "anyKey");
assertEquals(slowSpeed, fixedSpeedTrackPathDescriptor.getSlowSpeed());
assertEquals(normalSpeed, fixedSpeedTrackPathDescriptor.getNormalSpeed());
}
/**
* Tests {@link DynamicSpeedTrackPathDescriptor#onSharedPreferenceChanged(SharedPreferences,
* String)} when the key is slowSpeed.
*/
public void testOnSharedPreferenceChanged_slowSpeedKey() {
FixedSpeedTrackPathDescriptor fixedSpeedTrackPathDescriptor = new FixedSpeedTrackPathDescriptor(
context);
int slowSpeed = fixedSpeedTrackPathDescriptor.getSlowSpeed();
// Change value in shared preferences
PreferencesUtils.setInt(context, R.string.track_color_mode_slow_key, slowSpeed + 2);
fixedSpeedTrackPathDescriptor.onSharedPreferenceChanged(
sharedPreferences, context.getString(R.string.track_color_mode_slow_key));
assertEquals(slowSpeed + 2, fixedSpeedTrackPathDescriptor.getSlowSpeed());
}
/**
* Tests {@link DynamicSpeedTrackPathDescriptor#onSharedPreferenceChanged(SharedPreferences,
* String)} when the key is normalSpeed.
*/
public void testOnSharedPreferenceChanged_normalSpeedKey() {
FixedSpeedTrackPathDescriptor fixedSpeedTrackPathDescriptor = new FixedSpeedTrackPathDescriptor(
context);
int normalSpeed = fixedSpeedTrackPathDescriptor.getNormalSpeed();
PreferencesUtils.setInt(context, R.string.track_color_mode_medium_key, normalSpeed + 4);
fixedSpeedTrackPathDescriptor.onSharedPreferenceChanged(
sharedPreferences, context.getString(R.string.track_color_mode_medium_key));
assertEquals(normalSpeed + 4, fixedSpeedTrackPathDescriptor.getNormalSpeed());
}
/**
* Tests {@link DynamicSpeedTrackPathDescriptor#onSharedPreferenceChanged(SharedPreferences,
* String)} when the values of slowSpeed and normalSpeed in SharedPreference
* are the default values.
*/
public void testOnSharedPreferenceChanged_defaultValue() {
FixedSpeedTrackPathDescriptor fixedSpeedTrackPathDescriptor = new FixedSpeedTrackPathDescriptor(
context);
PreferencesUtils.setInt(context, R.string.track_color_mode_slow_key,
PreferencesUtils.TRACK_COLOR_MODE_SLOW_DEFAULT);
PreferencesUtils.setInt(context, R.string.track_color_mode_medium_key,
PreferencesUtils.TRACK_COLOR_MODE_MEDIUM_DEFAULT);
fixedSpeedTrackPathDescriptor.onSharedPreferenceChanged(
sharedPreferences, context.getString(R.string.track_color_mode_medium_key));
assertEquals(PreferencesUtils.TRACK_COLOR_MODE_SLOW_DEFAULT,
fixedSpeedTrackPathDescriptor.getSlowSpeed());
assertEquals(PreferencesUtils.TRACK_COLOR_MODE_MEDIUM_DEFAULT,
fixedSpeedTrackPathDescriptor.getNormalSpeed());
}
} |
/**
* Service request content when used with JSON-accepting endpoints
*
* Current status: NOT USED
*
* NB: for the moment, documents are sent either as:
* - ocet stream
* - multi-part files
* as encoding binary document content into JSON may be an overkill,
* but may be revisited when going forward with gRPC
*
* [keeping for now as a placeholder]
*/
@Data
public class ServiceRequestContent {
TikaBinaryDocument document;
// TODO: footer as in NLP
} |
import { Component } from '@angular/core';
@Component({
selector: 'nz-demo-layout-fixed',
template: `
<nz-layout class="layout">
<nz-header>
<div class="logo"></div>
<ul nz-menu nzTheme="dark" nzMode="horizontal">
<li nz-menu-item>nav 1</li>
<li nz-menu-item>nav 2</li>
<li nz-menu-item>nav 3</li>
</ul>
</nz-header>
<nz-content>
<nz-breadcrumb>
<nz-breadcrumb-item>Home</nz-breadcrumb-item>
<nz-breadcrumb-item>List</nz-breadcrumb-item>
<nz-breadcrumb-item>App</nz-breadcrumb-item>
</nz-breadcrumb>
<div class="inner-content">Content</div>
</nz-content>
<nz-footer>Ant Design ©2020 Implement By Angular</nz-footer>
</nz-layout>
`,
styles: [
`
.layout {
min-height: 100vh;
}
.logo {
width: 120px;
height: 31px;
background: rgba(255, 255, 255, 0.2);
margin: 16px 24px 16px 0;
float: left;
}
nz-header {
position: fixed;
width: 100%;
}
[nz-menu] {
line-height: 64px;
}
nz-content {
padding: 0 50px;
margin-top: 64px;
}
nz-breadcrumb {
margin: 16px 0;
}
.inner-content {
background: #fff;
padding: 24px;
min-height: 380px;
}
nz-footer {
text-align: center;
}
`
]
})
export class NzDemoLayoutFixedComponent {}
|
<reponame>UCLALibrary/bucketeer
package edu.ucla.library.bucketeer;
import static edu.ucla.library.bucketeer.Constants.MESSAGES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import info.freelibrary.util.Logger;
import info.freelibrary.util.LoggerFactory;
import info.freelibrary.util.StringUtils;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
/**
* Test for the JobFactory class.
*/
public class JobFactoryTest {
private static final Logger LOGGER = LoggerFactory.getLogger(JobFactoryTest.class, MESSAGES);
private static final File CSV_FILE = new File("src/test/resources/csv/live-test.csv");
private static final File MISSING_FAILED_FILE = new File("src/test/resources/csv/missing-failed.csv");
private static final File BAD_HEADERS = new File("src/test/resources/csv/dupe-headers.csv");
private static final File FILE_WITH_SPACES = new File("src/test/resources/csv/spaces-file.csv");
private static final File JSON_FILE = new File("src/test/resources/json/job.json");
private static final File TEST_TIFF_FILE = new File("src/test/resources/images/test.tif");
private static final File TEST_FAIL_FILE = new File("src/test/resources/images/fail.tif");
private static final String TEST_JOB_NAME = "test-job";
private static final String FILE_PATH = "filePath";
private static final String SLACK_HANDLE = "ksclarke";
private static final String ITEMS = "items";
@Rule
public ExpectedException myThrown = ExpectedException.none();
/**
* Test JobFactory.createJob().
*/
@Test
public final void testCreateJob() throws ProcessingException, IOException {
final Job job = JobFactory.getInstance().createJob(TEST_JOB_NAME, CSV_FILE);
final JsonObject expected = new JsonObject(StringUtils.read(JSON_FILE));
final JsonArray items = expected.getJsonArray(ITEMS);
for (int index = 0; index < items.size(); index++) {
final JsonObject item = items.getJsonObject(index);
if (index != 7) {
item.put(FILE_PATH, TEST_TIFF_FILE.getPath());
} else {
item.put(FILE_PATH, TEST_FAIL_FILE.getPath());
}
}
assertEquals(expected, job.setSlackHandle(SLACK_HANDLE).toJSON());
}
/**
* Test JobFactory rejects files with duplicate headers.
*/
@Test
public final void testDupeHeadersThrowsException() throws ProcessingException, IOException {
myThrown.expect(ProcessingException.class);
myThrown.expectMessage("has one or more duplicate column headers");
final Job job = JobFactory.getInstance().createJob(TEST_JOB_NAME, BAD_HEADERS);
}
/**
* Test JobFactory rejects files with spaces in "File Name" entries.
*/
@Test
public final void testSpacesThrowsException() throws ProcessingException, IOException {
myThrown.expect(ProcessingException.class);
myThrown.expectMessage("There are spaces (\" \")");
final Job job = JobFactory.getInstance().createJob(TEST_JOB_NAME, FILE_WITH_SPACES);
}
/**
* Test JobFactory records failed/missing images.
*/
@Test
public final void testFailedMissingCount() throws ProcessingException, IOException {
final String iiifURL = "unit.test.com";
final String slackUserHandle = "fake.user";
final long expectedFailed = 2;
final long expectedMissing = 1;
final Job job = JobFactory.getInstance().createJob(TEST_JOB_NAME, MISSING_FAILED_FILE);
final String slackMessage =
LOGGER.getMessage(MessageCodes.BUCKETEER_111, slackUserHandle, job.size(),
job.failedItems(), job.missingItems(), iiifURL);
assertEquals(expectedFailed, job.failedItems());
assertEquals(expectedMissing, job.missingItems());
assertTrue(slackMessage.contains("2 failed"));
assertTrue(slackMessage.contains("1 missing images"));
}
}
|
export class Global {
static readonly GRIDSIZE = 800; //breedte en hoogte van het grid
static readonly CELLSIZE = 32; //breedte en hoogte van 1 cell in het grid
static readonly CELLBORDER = 2;
}
export class GameHelper {
static GetCell(coordinate: number): number {
return coordinate * Global.CELLSIZE + Global.CELLSIZE / 2;
}
} |
def _get_app_exec(self):
self.pid = self._ctl2.GetProcessId()
if self.pid != 0:
return psutil.Process(self.pid).name()
returned_HRESULT = self._ctl2.IsSystemSoundsSession()
S_OK = 0
if returned_HRESULT == S_OK:
return "SndVol.exe"
else:
warn = ("unidentified app! "
f"pid: {self.pid}, is system sound: {returned_HRESULT}")
log.critical(warn)
raise ValueError(warn) |
// cargo-deps: bindgen = "0.51.1", pkg-config = "0.3.17"
use std::path::PathBuf;
fn main() {
let mut lib = pkg_config::Config::new()
.atleast_version("3.2.2")
.probe("libarchive")
.expect("Fail to detect the libarchive library");
let include_path = lib
.include_paths
.pop()
.unwrap_or(PathBuf::from("usr/include"));
let bindings = bindgen::Builder::default()
// Set rustfmt setting
.rustfmt_configuration_file(Some(".rustfmt.toml".into()))
// Set include path
.header(format!("{}/archive.h", include_path.display()))
.header(format!("{}/archive_entry.h", include_path.display()))
// We need to add this as raw_line to pass cargo clippy warning about
// convert to upper camel case
.raw_line("#![allow(non_camel_case_types)]\n")
// We need to add this as raw_line otherwise bindgen generates this as
// u32, causing type mismatch
.raw_line("pub const ARCHIVE_EOF: i32 = 1;")
.raw_line("pub const ARCHIVE_OK: i32 = 0;")
// Binding whitelist
.whitelist_var("ARCHIVE_EXTRACT_TIME")
.whitelist_var("ARCHIVE_EXTRACT_PERM")
.whitelist_var("ARCHIVE_EXTRACT_ACL")
.whitelist_var("ARCHIVE_EXTRACT_FFLAGS")
.whitelist_var("ARCHIVE_EXTRACT_OWNER")
.whitelist_var("ARCHIVE_EXTRACT_FFLAGS")
.whitelist_var("ARCHIVE_EXTRACT_XATTR")
.whitelist_function("archive_read_new")
.whitelist_function("archive_read_support_filter_all")
.whitelist_function("archive_read_support_format_all")
.whitelist_function("archive_read_support_format_raw")
.whitelist_function("archive_read_close")
.whitelist_function("archive_read_free")
.whitelist_function("archive_read_data_block")
.whitelist_function("archive_read_next_header")
.whitelist_function("archive_read_open")
.whitelist_function("archive_write_disk_new")
.whitelist_function("archive_write_disk_set_options")
.whitelist_function("archive_write_disk_set_standard_lookup")
.whitelist_function("archive_write_header")
.whitelist_function("archive_write_finish_entry")
.whitelist_function("archive_write_data_block")
.whitelist_function("archive_write_close")
.whitelist_function("archive_write_free")
.whitelist_function("archive_entry_pathname")
.whitelist_function("archive_entry_free")
.whitelist_function("archive_entry_set_pathname")
.whitelist_function("archive_entry_set_hardlink")
.whitelist_function("archive_entry_hardlink")
.whitelist_function("archive_set_error")
.whitelist_function("archive_error_string")
.whitelist_function("archive_errno")
.generate()
.expect("Unable to generate bindings");
bindings
.write_to_file(PathBuf::from("src/ffi.rs"))
.expect("Couldn't write bindings!");
println!("Sucessfully generated bindings for libarchive.");
}
|
Historic Markers Across Alabama
The Rooster Bridge
Marker ID: AHC Location: on U.S. 80 3 miles east of State Road 28, Demopolis, Alabama County: Sumter Coordinates: N 32° 27.127 W 088° 0.344 32.45211666 -88.00573333 Style: Free Standing ** Waymark: None
Text:
Side 1:
In 1919 a rooster sale organized by Frank Derby raised money to begin construction of a bridge over the Tombigbee River at Moscow Ferry. This was the last link in the completion of the Dixie Overland Highway between Savannah and San Diego.
The idea was"to bridge the 'Bigbee with cocks”: Roosters would be solicited from world-famous persons and an auction and barbeque held in the city of Demopolis for the benefit of the bridge.
Congressmen"Buck” Oliver, Admiral William S. Benson, and Secretary of Navy Josephus Daniels helped sell President Wilson on the idea. He and the others of the Big Four, Lloyd George, Clemenceau, and Orlando, who were meeting at the Versailles Conference, shipped roosters on the USS Northern Pacific. Governor Kilby sent 27 prominent Alabamians to the White House to receive the roosters from President Wilson.
Side 2:
By August 14, 1919, 600 roosters (and one hen from Helen Keller) had arrived in Demopolis. President Wilson's rooster auctioned for $44,000. Over $200,000 was pledged, but most was not collected. The names of 88 donors appear on markers at the original site one mile downstream.
With the addition of state and federal funds the bridge was constructed and opened in 1925 as Memorial Bridge. Always known locally as Rooster Bridge, the name was officially changed in 1959 when a bill sponsored by Sen. E. O. Eddins passed the State Legislature. In July, 1971, a bill sponsored by State Representatives I. D. Pruitt and R. S. Manley was approved, which decreed that all future bridges over the Tombigbee at Moscow would be named Rooster Bridge and bear plaques relating the unique plan devised by Frank Derby in building the first bridge.
Erected by Alabama Historical Commission and the Alabama Highway Department.
End of The Rooster Bridge |
<filename>frappe-bench/apps/erpnext/erpnext/utilities/user_progress_utils.py
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe, erpnext
import json
from frappe import _
from frappe.utils import flt
from erpnext.setup.doctype.setup_progress.setup_progress import update_domain_actions, get_domain_actions_state
@frappe.whitelist()
def set_sales_target(args_data):
args = json.loads(args_data)
defaults = frappe.defaults.get_defaults()
frappe.db.set_value("Company", defaults.get("company"), "monthly_sales_target", args.get('monthly_sales_target'))
@frappe.whitelist()
def create_customers(args_data):
args = json.loads(args_data)
defaults = frappe.defaults.get_defaults()
for i in range(1,4):
customer = args.get("customer_" + str(i))
if customer:
try:
doc = frappe.get_doc({
"doctype":"Customer",
"customer_name": customer,
"customer_type": "Company",
"customer_group": _("Commercial"),
"territory": defaults.get("country"),
"company": defaults.get("company")
}).insert()
if args.get("customer_contact_" + str(i)):
create_contact(args.get("customer_contact_" + str(i)),
"Customer", doc.name)
except frappe.NameError:
pass
@frappe.whitelist()
def create_letterhead(args_data):
args = json.loads(args_data)
letterhead = args.get("letterhead")
if letterhead:
try:
frappe.get_doc({
"doctype":"Letter Head",
"content":"""<div><img src="{0}" style='max-width: 100%%;'><br></div>""".format(letterhead.encode('utf-8')),
"letter_head_name": _("Standard"),
"is_default": 1
}).insert()
except frappe.NameError:
pass
@frappe.whitelist()
def create_suppliers(args_data):
args = json.loads(args_data)
defaults = frappe.defaults.get_defaults()
for i in range(1,4):
supplier = args.get("supplier_" + str(i))
if supplier:
try:
doc = frappe.get_doc({
"doctype":"Supplier",
"supplier_name": supplier,
"supplier_type": _("Local"),
"company": defaults.get("company")
}).insert()
if args.get("supplier_contact_" + str(i)):
create_contact(args.get("supplier_contact_" + str(i)),
"Supplier", doc.name)
except frappe.NameError:
pass
def create_contact(contact, party_type, party):
"""Create contact based on given contact name"""
contact = contact .split(" ")
contact = frappe.get_doc({
"doctype":"Contact",
"first_name":contact[0],
"last_name": len(contact) > 1 and contact[1] or ""
})
contact.append('links', dict(link_doctype=party_type, link_name=party))
contact.insert()
@frappe.whitelist()
def create_items(args_data):
args = json.loads(args_data)
defaults = frappe.defaults.get_defaults()
for i in range(1,4):
item = args.get("item_" + str(i))
if item:
default_warehouse = ""
default_warehouse = frappe.db.get_value("Warehouse", filters={
"warehouse_name": _("Finished Goods"),
"company": defaults.get("company_name")
})
try:
frappe.get_doc({
"doctype":"Item",
"item_code": item,
"item_name": item,
"description": item,
"show_in_website": 1,
"is_sales_item": 1,
"is_purchase_item": 1,
"is_stock_item": 1,
"item_group": _("Products"),
"stock_uom": _(args.get("item_uom_" + str(i))),
"default_warehouse": default_warehouse
}).insert()
except frappe.NameError:
pass
else:
if args.get("item_price_" + str(i)):
item_price = flt(args.get("item_price_" + str(i)))
price_list_name = frappe.db.get_value("Price List", {"selling": 1})
make_item_price(item, price_list_name, item_price)
price_list_name = frappe.db.get_value("Price List", {"buying": 1})
make_item_price(item, price_list_name, item_price)
def make_item_price(item, price_list_name, item_price):
frappe.get_doc({
"doctype": "Item Price",
"price_list": price_list_name,
"item_code": item,
"price_list_rate": item_price
}).insert()
# Education
@frappe.whitelist()
def create_program(args_data):
args = json.loads(args_data)
for i in range(1,4):
if args.get("program_" + str(i)):
program = frappe.new_doc("Program")
program.program_code = args.get("program_" + str(i))
program.program_name = args.get("program_" + str(i))
try:
program.save()
except frappe.DuplicateEntryError:
pass
@frappe.whitelist()
def create_course(args_data):
args = json.loads(args_data)
for i in range(1,4):
if args.get("course_" + str(i)):
course = frappe.new_doc("Course")
course.course_code = args.get("course_" + str(i))
course.course_name = args.get("course_" + str(i))
try:
course.save()
except frappe.DuplicateEntryError:
pass
@frappe.whitelist()
def create_instructor(args_data):
args = json.loads(args_data)
for i in range(1,4):
if args.get("instructor_" + str(i)):
instructor = frappe.new_doc("Instructor")
instructor.instructor_name = args.get("instructor_" + str(i))
try:
instructor.save()
except frappe.DuplicateEntryError:
pass
@frappe.whitelist()
def create_room(args_data):
args = json.loads(args_data)
for i in range(1,4):
if args.get("room_" + str(i)):
room = frappe.new_doc("Room")
room.room_name = args.get("room_" + str(i))
room.seating_capacity = args.get("room_capacity_" + str(i))
try:
room.save()
except frappe.DuplicateEntryError:
pass
@frappe.whitelist()
def create_users(args_data):
if frappe.session.user == 'Administrator':
return
args = json.loads(args_data)
defaults = frappe.defaults.get_defaults()
for i in range(1,4):
email = args.get("user_email_" + str(i))
fullname = args.get("user_fullname_" + str(i))
if email:
if not fullname:
fullname = email.split("@")[0]
parts = fullname.split(" ", 1)
user = frappe.get_doc({
"doctype": "User",
"email": email,
"first_name": parts[0],
"last_name": parts[1] if len(parts) > 1 else "",
"enabled": 1,
"user_type": "System User"
})
# default roles
user.append_roles("Projects User", "Stock User", "Support Team")
user.flags.delay_emails = True
if not frappe.db.get_value("User", email):
user.insert(ignore_permissions=True)
# create employee
emp = frappe.get_doc({
"doctype": "Employee",
"employee_name": fullname,
"user_id": email,
"status": "Active",
"company": defaults.get("company")
})
emp.flags.ignore_mandatory = True
emp.insert(ignore_permissions = True)
# Ennumerate the setup hooks you're going to need, apart from the slides
@frappe.whitelist()
def update_default_domain_actions_and_get_state():
domain = frappe.db.get_value('Company', erpnext.get_default_company(), 'domain')
update_domain_actions(domain)
return get_domain_actions_state(domain)
|
<reponame>Rodrigo-Santoos/Praticando<gh_stars>0
package main;
import java.util.Scanner;
public class Matrizes {
public static void main(String[] args) {
Scanner entrada = new Scanner(System.in);
System.out.print("Digite quantos campos ira adicionar: ");
int n = entrada.nextInt();
System.out.println();
// criando a Matriz
int[][] mat = new int[n][n];
// for para percorrer a matriz inteira
for (int i = 0; i < n; i++) {
// percorrendo as colunas
for (int j = 0; j < n; j++) {
// System.out.print("Na fileira " + i + " da Matriz e na coluna " + j + " digite o valor: ");
mat[i][j] = entrada.nextInt();
}
System.out.println();
}
//---------------------------------------------------------------------------------------------------------
// trazendo os numeros na diagonal da Matriz
System.out.println("Main diagonal");
for (int i = 0; i < n; i++) {
System.out.print(mat[i][i] + " ");
}
//---------------------------------------------------------------------------------------------------------
int count = 0;
// percorrendo para contar quantos numeros negativos tem
for (int i = 0; i < mat.length; i++) {
for (int j = 0; j < mat[i].length; j++) {
if (mat[i][j] < 0) {
count++;
}
}
}
System.out.println();
System.out.println("Numbers negative: " + count);
entrada.close();
}
}
|
ctDNA analysis reveals different molecular patterns upon disease progression in patients treated with osimertinib
Background Several clinical trials have demonstrated the efficacy and safety of osimertinib in advanced non-small-cell lung cancer (NSCLC). However, there is significant unexplained variability in treatment outcome. Methods Observational prospective cohort of 22 pre-treated patients with stage IV NSCLC harboring the epidermal growth factor receptor (EGFR) p.T790M resistance mutation and who were treated with osimertinib. Three hundred and twenty-six serial plasma samples were collected and analyzed by digital PCR (dPCR) and next-generation sequencing (NGS). Results The median progression-free survival (PFS), since the start of osimertinib, was 8.9 months. The median treatment durations of sequential gefitinib + osimertinib, afatinib + osimertinib and erlotinib + osimertinib treatments were 30.1, 24.6 and 21.1 months, respectively. The p.T790M mutation was detected in 19 (86%) pre-treatment blood samples. Undetectable levels of the original EGFR-sensitizing mutation after 3 months of treatment were associated with superior PFS (HR: 0.2, 95% CI: 0.05–0.7). Likewise, re-emergence of the original EGFR mutation, alone or together with the p.T790M mutation was significantly associated with shorter PFS (HR: 8.8, 95% CI: 1.1–70.7 and HR: 5.9, 95% CI: 1.2–27.9, respectively). Blood-based monitoring revealed three molecular patterns upon progression to osimertinib: sensitizing+/T790M+/C797S+, sensitizing+/T790M+/C797S–, and sensitizing+/T790M–/C797S–. Median time to progression in patients showing the triplet pattern (sensitizing+/T790M+/C797S+) was 12.27 months compared with 4.87 months in patients in whom only the original EGFR sensitizing was detected, and 2.17 months in patients showing the duplet pattern (sensitizing+/T790M+). Finally, we found that mutations in exon 545 of the PIK3CA gene were the most frequent alteration detected upon disease progression in patients without acquired EGFR-resistance mutations. Conclusions Different molecular patterns identified by plasma genotyping may be of prognostic significance, suggesting that the use of liquid biopsy is a valuable approach for tumor monitoring.
Introduction
Epidermal growth factor receptor (EGFR) tyrosine kinase inhibitors (TKIs) have been the standard of care for patients with advanced EGFR-mutant non-small-cell lung cancer (NSCLC) (1,2). However, most patients progress within 1 to 2 years (3). The EGFR p.T790M mutation is the most common resistance mechanism to first-and second-generation EGFR TKIs (4). Osimertinib, a thirdgeneration TKI, has demonstrated its clinical efficacy in NSCLC tumors harboring the p.T790M mutation at disease progression after treatment with first-or secondgeneration EGFR TKIs (5). Moreover, in the randomized phase III FLAURA trial, osimertinib exceeded the standard of care gefitinib or erlotinib in treatment-naive NSCLC patients harboring EGFR exon 19 deletions and the p.L858R point mutation, giving rise to a significant improvement in median progression-free survival (PFS) compared with standard TKIs (6).
Nevertheless, acquired EGFR mutations conferring osimertinib resistance invariably emerge, such as the p.C797S mutation, which accounts for approximately 20-40% of the cases (7,8). Other resistance mechanisms have also been described (9,10). A better understanding of the diversity of mechanisms by which tumors acquire resistance to third-generation EGFR inhibitors is of particular relevance to the better clinical management of patients, making the analysis of circulating tumor DNA (ctDNA) during disease progression an attractive means of deriving new insights into tumor biology at different stages of the disease. In this paper, we describe an observational prospective cohort of 22 unselected patients treated with osimertinib with a median follow-up of 62 months. In addition, ctDNA analysis was performed on 326 samples collected throughout the course of disease.
Study cohort
The present observational study was conducted on 22 prospectively enrolled patients. Patients were followed from their diagnosis of stage IV disease. The study was approved by the Hospital Puerta de Hierro Ethics Committee and was conducted in accordance with the precepts of the Code of Ethics of The World Medical Association (Declaration of Helsinki). Written informed consent was obtained from all patient. Briefly, eligible patients were males and females with a pathologically confirmed diagnosis of stage IIIB-IV NSCLC tumor harboring an EGFR mutation, who were treated with a TKI, and who were candidates for receiving osimertinib. A complete staging workup was performed prior to recruitment. Data on demographic characteristics, clinicopathological features, tumor mutational status, vital status, disease status, drug dose adjustments and discontinuation of medication were collected in the study's electronic database. Computed tomography (CT) measurements and magnetic resonance imaging (MRI) were obtained as clinically indicated. The clinical response was evaluated according to RECIST v1.1 criteria combined with a blinded medical judgment about the benefits of the treatment. Additionally, whole-body 18F-fluoro-2-deoxy-D-glucose positron emission tomography (18FDG-PET) CT scans were performed as clinically indicated.
Laboratory procedures
Three hundred and twenty-six whole blood samples were collected in an 8.5 mL PPT™ tube (Becton Dickinson Franklin Lakes, NJ, USA) containing a gel barrier to separate the plasma after centrifugation. Samples were processed as previously described (11)(12)(13). Briefly, after two consecutive centrifugations, cfDNA was isolated from plasma using the Maxwell ® RSC (MR) ccfDNA Plasma Kit (Promega Corporation, Madison, WI, USA). The original EGFR-sensitizing mutation, and the p.T790M and p.C797S resistant mutations were analyzed by digital PCR (dPCR). Specifically, cfDNA was analyzed using commercially available predesigned TaqMan ® Liquid Biopsy dPCR assays as well as custom TaqMan ® assays in a QuantStudio ® 3D Digital PCR System (Applied Biosystems, South San Francisco, CA, USA). dPCR reactions were carried out in a final volume of 18 μL and using 8.55 μL of cfDNA template. Subsequently, 14.5 μL were loaded into a QuantStudio 3D Digital PCR 20K chip. The cycling conditions were as follows: initial denaturation at 96 ℃ for 10 min, followed by 40 cycles at 56 ℃ for 2 min, and 98 ℃ for 30 s, a step of 72 ℃ for 10 min, and finally samples were maintained at 22 ℃ for at least 30 min. Chip fluorescence was measured twice. Results were analyzed with QuantStudio ® 3D AnalysisSuite TM Cloud Software. The automatic call assignments for each data cluster were manually adjusted when needed. The result of the assay is reported as the ratio of mutant DNA molecules relative to the sum of mutant and wild-type (wt) DNA molecules. A negative and a positive control DNA were included in every run.
Libraries were prepared using the Oncomine TM Pan- (14). Mutations with an allele frequency (AF) greater than or equal to 0.1% were considered positive.
Statistical analysis
Discrete variables are presented as frequencies and proportions, and continuous variables as means and standard deviations (SDs), unless otherwise specified. The median follow-up was estimated by the reverse Kaplan-Meier method (15). Overall survival (OS) and PFS were evaluated using the Kaplan-Meier survival function and Cox proportional hazards models. For OS analysis, time from the start of treatment with osimertinib to death or last follow-up was measured. PFS was defined as the time between the start of osimertinib treatment and disease progression, as assessed by RECIST criteria, or all-cause death. Patients who were alive on the last date of assessment and who had not experienced any event were censored at that time. Time to treatment discontinuation (TTD) of targeted therapy was defined as the time between the date when first-line treatment with a TKI began to the date of osimertinib discontinuation or death. Similarly, time to osimertinib discontinuation was also analyzed. Hazard ratios (HRs) were calculated from univariate Cox models. Significance was concluded for P values less than 0.05. Statistical analyses were performed using Stata 15.1 and R 3.1.2 software.
Clinical outcomes
The study cohort included 22 patients. Clinico-pathological characteristics of the study population are presented in Table 1.
The median age at diagnosis was 65 (range, 41-75) years. We found an unusually high prevalence of tobacco consumption, whereby 41% (9/22) of the patients were smokers (3/22) or former-smokers (6/22), with a mean consumption of 35 (SD: 28.5) pack-years. According to the pathologist's report, 54.5% of the cases (12/22) harbored exon 19 deletions. In one case, a deletion in exon 19 co-occurred with the p.S768I mutation in exon 20. In addition, 45.5% (10/22) harbored the point mutation p.L858R in exon 21. These frequencies are as expected, based on previously published data. No significant differences were observed in OS and PFS with respect to the original EGFR-sensitizing mutation. At the start of osimertinib treatment, patients had a median of three metastatic sites, the most frequent locations being the lung (73%), the bone (64%), the pleura (59%), the central nervous system (23%) and the peritoneum (14%). The ECOG Performance Status varied from 0 to 2. Patients with an ECOG Performance Status of 0 exhibited improved OS (P=0.026). The median follow-up was 62 months. During the study, 12 deaths were recorded and progressive disease (PD) to osimertinib was observed in 16 patients (73%). Interestingly, in one patient, a transformation from NSCLC to small-cell lung cancer (SCLC) was observed upon disease progression. Median PFS, since the start of osimertinib treatment, was 8.9 months, whereas median OS, since osimertinib initiation, was 20.7 (IQR: 8.8-27.7) months. Osimertinib was used as a second-line treatment in 11 (50%) patients, while 11 (50%) patients had received two or more lines of treatment prior to that with osimertinib. As expected, the latter group of patients had a significantly poorer outcome in terms of PFS and OS than the former (P<0.004 and 0.020, respectively). Clinical objective response rates (RECIST criteria) were observed in 14 (64%) patients. Oligoprogressive disease (oligo-PD) was noted in 9 (41%) patients, and in 7 of whom (78%) osimertinib was maintained for a median of 3.8 (IQR: 1.2-9.1) months beyond oligo-PD.
Median treatment durations of sequential gefitinib + osimertinib, afatinib + osimertinib and erlotinib + osimertinib were 30.1, 24.6 and 21.1 months, respectively, indicating that time on targeted therapy was longest in patients treated with the combination gefitinib + osimertinib combination. However, no significant differences were observed in OS and PFS according to first TKI treatment (afatinib, gefitinib, erlotinib). Figure 1 shows the times on targeted therapy and the time under osimertinib treatment for each patient.
Longitudinal ctDNA monitoring
To analyze the evolution of these tumors throughout the course of treatment, EGFR somatic mutations within ctDNA were prospectively collected from 326 samples and analyzed by dPCR. A blood sample obtained before starting osimertinib treatment was available for all patients. At baseline, the p.T790M mutation was detected in 19 (86%) patients, with a median AF of 4.11% (minimum 0.1%; maximum 37.7%). In the other three cases, the p.T790M mutation was detected only in the re-biopsy (N=2) and in the cerebrospinal fluid (N=1). Noteworthy, two of these plasma-negative T790M patients each had metastases exclusively at the brain level. The original EGFR-sensitizing mutation was detected in all pre-treatment samples. Neither p.T790M AF nor the original EGFR-sensitizing mutation AF at the start of treatment predicted a survival benefit from osimertinib. Nevertheless, ctDNA levels across serial plasma samples were correlated with treatment responses. Specifically, undetectable levels of the original EGFR-sensitizing mutation after 3 months of osimertinib treatment were associated with improved PFS (HR: 0.19, 95% CI: 0.05-0.7). Similarly, patients in whom plasma levels of the original EGFR-sensitizing decreased after 3 months had a better prognosis in terms of PFS (HR: 0.14, 95% CI: 0.23-0.86). On the other hand, re-emergence of the original EGFR mutation, alone or together with the p.T790M mutation, was significantly associated with shorter PFS (HR: 8.8, 95% CI: 1.1-70.7 and HR: 5.9, 95% CI: 1.2-27.9, respectively), indicating that ctDNA quantification is informative in terms of prognosis also in this group of patients.
Molecular patterns upon disease progression
In order to assess the frequency of the p.C797S (c.2389T>A and c.2390G>C) mutation at the time of osimertinib progression in our population, dPCR was performed in all samples collected at osimertinib progression (N=16) (Figure 2A). At this time, the p.C797S mutation was found along with the p.T790M mutation as well as the original EGFR-sensitizing mutation in 3 (19%) patients (two cases with the p.L858R mutation and one with a deletion in exon 19). Specifically, two cases harbored the c.2390 G>C mutation and one featured the c.2389T>A mutation. Remarkably, dPCR analysis did not identify the p.C797S mutation in any of the previously collected samples, indicating that cells with this mutation were positively selected over the course of therapy. The p.C797S mutation was detected at a lower AF than p.T790M mutation levels, which, at the same time, were lower than the sensitizing mutation AF ( Figure 2B). Interestingly enough, patients showing this "triplet pattern" (sensitizing+/T790M+/C797S+) tended to exhibit longer PFS and OS than patients who did not (P=0.1, Figure S1). In 2 patients (12.5%), plasma levels of the original EGFRsensitizing mutation were again detected at the time of disease progression alongside the p.T790M mutation ( Figure 2C). This "duplet pattern" (sensitizing+/T790M+) was detected in patients with a high tumor load. Finally, in the other 11 (69%) cases, there was a prominent increase in the original EGFR-sensitizing mutation, with null or residual levels of the p.T790M mutation detected ( Figure 2D), suggesting that osimertinib was able to eliminate the p.T790M-mutated clone in this subset of patients (sensitizing+), even though the tumor was able to become resistant to treatment. The median time to progression in patients showing the triplet pattern (sensitizing+/T790M+/C797S+) was 12.27, 4.87 months in patients in whom only the original EGFR-sensitizing mutation was detected, and 2.17 months in patients with the duplet pattern (sensitizing+/T790M+). Figure S2 shows how early the appearance of the resistance mechanism was detected during ctDNA monitoring.
Next-generation sequencing (NGS) analysis upon osimertinib progression
ctDNA collected at the time of disease progression was available from seven patients for NGS analysis. In this subset of patients, PIK3CA mutations were the alterations most frequently detected upon disease progression, being found in four patients. Specifically, we identified the p.E545K mutation in one patient ( Table 2). The analysis of previous plasma samples by dPCR revealed that this mutation was not present at the start of the treatment (Figure 3). Likewise, the mutation p.E545A was detected at disease progression in three patients. Curiously, we detected the p.S464L mutation in the EGFR gene in a patient who was treated with cetuximab plus afatinib prior to osimertinib therapy. In addition, the p.A750P mutation in the EGFR gene was found in another patient who harbored the deletion in exon 19 p.L747_A750>P. Retrospective analysis of plasma samples revealed that the A750P mutation was also present at the start of osimertinib treatment although at a very low AF. Finally, an EGFR copy-number gain was detected by NGS in one case. However, this alteration could not be confirmed by any other alternative technique.
On the other hand, we found that the median TTD was 8.7 (IQR: 2.8-10.1) months in patients whose tumors harbored co-mutations in TP53, compared with 18 (IQR: 7.8-28.2) months in patients whose tumors were negative for TP53 mutations. However, the difference noted was not statistically significant, given the small sample size.
Discussion
There is growing evidence of the usefulness of liquid biopsy as an effective tool for biomarker testing and treatment monitoring. In the present study, the p.T790M mutation was detected in the plasma of 19 (86%) patients at baseline, supporting the clinical utility of liquid biopsies for decisionmaking about treatment. Nevertheless, the possibility of a false-negative result should be ruled out using tumor tissue obtained by biopsy (16). The reported sensitivities of the different assays for EGFR mutation detection using cfDNA from advanced NSCLC patients vary as much as from 30% to 100% (17). Although the cohort presented in this study is rather limited our results supports the usefulness of dPCR for plasma p.T790M testing. On the other hand, levels of the original EGFR-sensitizing mutation after 3 months of osimertinib treatment were of prognostic significance. Noteworthy, the effect size was substantive (HR: 0.19, 95% CI: 0.05-0.7). Several studies have reported that EGFR mutation tracking correlates with treatment outcome (11,12). However, it is important to mention that in the case of NSCLC patients resistant to first/second-generation EGFR-TKIs, treated with osimertinib, only the original EGFR-sensitizing mutation is informative for monitoring purposes. According to our data, a complete clearance of the p.T790M mutation was found in 69% of the patients with PD, and therefore, the p.T790M mutation is not useful in monitoring the response to osimertinib. In the same way, previous studies have reported similar results (18,19). According to plasma genotyping, we were able to define three molecular patterns upon disease progression in patients treated with osimertinib, highlighting the importance heterogeneity in advanced disease. These patterns were also reported in a study cohort of 22 patients who became resistant to osimertinib and from whom cfDNA was collected during the phase I AURA study (7). Similarly, other studies have shown that the p.C797S mutation is always detected in conjunction with the p.T790M mutation as well as the original EGFR-sensitizing mutation (9,12,20). According to our data, these patterns may determine different prognoses. In our study, patients showing the "triplet pattern" (sensitizing+/T790M+/C797S+) tended to have better PFS and OS (P=0.1), suggesting that tumors that become resistant to osimertinib through p.T790M loss may have a poorer outcome. Likewise, Oxnard et al. reported that acquired resistance to osimertinib mediated by loss of the p.T790M mutation was associated with early treatment failure (21). However, despite its pertinence in this context, this observation requires confirmation in larger cohorts. NGS profiling of plasma samples has proved to be a valuable approach for identifying resistance mutations. In our hands, the activating mutations in codon 545 of the PIK3CA gene were frequently observed upon osimertinib progression. Likewise, other researchers have proposed that mutations in codon 545 of the PIK3CA gene constitute a common resistance mechanism of third-generation TKIs (22). Similarly, Yang et al. reported that mutations in PIK3CA potentially contribute to osimertinib resistance in patients without secondary EGFR mutations (23). In addition, we found the p.S464L mutation in the EGFR Days gene in the tumor of a patient treated with cetuximab plus afatinib prior to osimertinib therapy. Remarkably, this mutation has been reported in colorectal tumors that are refractory to cetuximab (24). On the other hand, our results show that the efficacy of osimertinib in real-world practice was similar to that observed in clinical trials, with a favorable adverse effect profile. Similar results have recently been reported in a large sized real-world study (25). Strikingly, time on targeted therapy was longer in patients treated with the gefitinib + osimertinib combination, than those who received one of the other two combinations, although no significant difference in PFS according to first-line TKI was found.
It is important to mention that the small sample size of the present study is an important limitation and therefore although our results are of particular interest they need to be tested in appropriately sized cohorts.
Conclusions
In summary, we report a comprehensive descriptive study of a real-world cohort of patients treated with osimertinib as second-line treatment. Analysis of ctDNA during the course of the disease revealed three molecular patterns that might confer different prognoses. Besides the p.C797S mutation, putative PIK3CA mutations might underlie osimertinib resistance in patients without secondary EGFR mutations. |
/**
* An immutable heterogeneous mapping from a parametrized type-safe key to any value, supporting a minimal mapping
* interface.
*
* @see TypeSafeKey
* @see com.jnape.palatable.lambda.adt.hlist.HList
*/
public class HMap implements Iterable<Tuple2<TypeSafeKey, Object>> {
private static final HMap EMPTY = new HMap(emptyMap());
private final Map<TypeSafeKey, Object> table;
private HMap(Map<TypeSafeKey, Object> table) {
this.table = table;
}
/**
* Retrieve the value at this key.
*
* @param key the key
* @param <T> the value type
* @return the value at this key wrapped in an {@link Optional}, or {@link Optional#empty}.
*/
@SuppressWarnings("unchecked")
public <T> Optional<T> get(TypeSafeKey<T> key) {
return Optional.ofNullable((T) table.get(key));
}
/**
* Retrieve the value at this key, throwing a {@link NoSuchElementException} if this key is unmapped.
*
* @param key the key
* @param <V> the value type
* @return the value at this key
* @throws NoSuchElementException if the key is unmapped
*/
public <V> V demand(TypeSafeKey<V> key) throws NoSuchElementException {
return get(key).orElseThrow(() -> new NoSuchElementException("Demanded value for key " + key + ", but couldn't find one."));
}
/**
* Store a value for the given key.
*
* @param key the key
* @param value the value
* @param <V> the value type
* @return the updated HMap
*/
public <V> HMap put(TypeSafeKey<V> key, V value) {
return alter(t -> t.put(key, value));
}
/**
* Store all the key/value mappings in <code>hMap</code> in this HMap.
*
* @param hMap the other HMap
* @return the updated HMap
*/
public HMap putAll(HMap hMap) {
return alter(t -> t.putAll(hMap.table));
}
/**
* Determine if a key is mapped.
*
* @param key the key
* @return true if the key is mapped; false otherwise
*/
public boolean containsKey(TypeSafeKey key) {
return table.containsKey(key);
}
/**
* Remove a mapping from this HMap.
*
* @param key the key
* @return the updated HMap
*/
public HMap remove(TypeSafeKey key) {
return alter(t -> t.remove(key));
}
/**
* Remove all the key/value mappings in <code>hMap</code> from this HMap.
*
* @param hMap the other HMap
* @return the updated HMap
*/
public HMap removeAll(HMap hMap) {
return alter(t -> t.keySet().removeAll(hMap.table.keySet()));
}
/**
* Retrieve all the mapped keys.
*
* @return an Iterable of all the mapped keys
*/
public Iterable<TypeSafeKey> keys() {
return map(Tuple2::_1, this);
}
/**
* Retrieve all the mapped values.
*
* @return an Iterable of all the mapped values
*/
public Iterable<Object> values() {
return map(Tuple2::_2, this);
}
/**
* Return a standard {@link Map} view of the current snapshot of this {@link HMap}. Note that updates to either the
* {@link Map} view or to the original {@link HMap} do not propagate to the other.
*
* @return the map view
*/
public Map<TypeSafeKey, Object> toMap() {
return new HashMap<>(table);
}
@Override
public Iterator<Tuple2<TypeSafeKey, Object>> iterator() {
return map(Tuple2::fromEntry, table.entrySet()).iterator();
}
@Override
public boolean equals(Object other) {
if (other instanceof HMap) {
HMap that = (HMap) other;
return Objects.equals(this.table, that.table);
}
return false;
}
@Override
public int hashCode() {
return 31 * Objects.hashCode(table);
}
@Override
public String toString() {
return "HMap{" +
"table=" + table +
'}';
}
private HMap alter(Consumer<Map<TypeSafeKey, Object>> alterFn) {
HashMap<TypeSafeKey, Object> copy = new HashMap<>(table);
alterFn.accept(copy);
return new HMap(copy);
}
/**
* Static factory method for creating an empty HMap.
*
* @return an empty HMap
*/
public static HMap emptyHMap() {
return EMPTY;
}
/**
* Static factory method for creating a singleton HMap.
*
* @param key the only mapped key
* @param value the only mapped value
* @param <V> the only mapped value type
* @return a singleton HMap
*/
public static <V> HMap singletonHMap(TypeSafeKey<V> key, V value) {
return new HMap(singletonMap(key, value));
}
/**
* Static factory method for creating an HMap from two given associations.
*
* @param key1 the first mapped key
* @param value1 the value mapped at key1
* @param key2 the second mapped key
* @param value2 the value mapped at key2
* @param <V1> value1's type
* @param <V2> value2's type
* @return an HMap with the given associations
*/
public static <V1, V2> HMap hMap(TypeSafeKey<V1> key1, V1 value1,
TypeSafeKey<V2> key2, V2 value2) {
return singletonHMap(key1, value1).put(key2, value2);
}
/**
* Static factory method for creating an HMap from three given associations.
*
* @param key1 the first mapped key
* @param value1 the value mapped at key1
* @param key2 the second mapped key
* @param value2 the value mapped at key2
* @param key3 the third mapped key
* @param value3 the value mapped at key3
* @param <V1> value1's type
* @param <V2> value2's type
* @param <V3> value3's type
* @return an HMap with the given associations
*/
public static <V1, V2, V3> HMap hMap(TypeSafeKey<V1> key1, V1 value1,
TypeSafeKey<V2> key2, V2 value2,
TypeSafeKey<V3> key3, V3 value3) {
return hMap(key1, value1, key2, value2).put(key3, value3);
}
} |
//
// FCWKErrorCode.h
// FitCloudWFKit
//
// Created by pcjbird on 7/10/20.
// Copyright © 2020 Zero Status. All rights reserved.
//
#ifndef FCWKErrorCode_h
#define FCWKErrorCode_h
/**
* @brief 错误码定义
*/
typedef NS_ENUM(NSInteger, FCWKERRORCODE)
{
FCWKERRORCODE_UNKNOWN = 1000, //未知错误
FCWKERRORCODE_TEMPLATEBINFILENOTEXIST = 2000, //表盘模版bin文件不存在
FCWKERRORCODE_BKIMAGENOTVALID = 2001, //表盘背景图不正确
FCWKERRORCODE_PREVIEWNOTVALID = 2002, //表盘预览图不正确
FCWKERRORCODE_TEMPLATEBINCOPYFAILURE = 2003, //表盘模版bin文件拷贝失败
FCWKERRORCODE_BKIMAGEBADSIZE = 2004, //表盘背景图尺寸不正确
FCWKERRORCODE_PREVIEWBADSIZE = 2005, //表盘预览图尺寸不正确
FCWKERRORCODE_BKIMAGERESIZEFAILURE = 2006, //表盘背景图大小调整失败
FCWKERRORCODE_PREVIEWRESIZEFAILURE = 2007, //表盘预览图大小调整失败
FCWKERRORCODE_COMMONEXCEPTION = 2008, //生成表盘时发生异常
FCWKERRORCODE_BADPUSHINDEX = 3001, //错误的表盘推送位置(索引)
FCWKERRORCODE_TEMPLATEBINFILEBADSIZE = 3002, //表盘模版bin文件长度不正确
FCWKERRORCODE_BADWATCHFACENO = 3003, //非法的表盘编号
};
#define FCWKERRORDOMAIN @"FCWKERRORDOMAIN"
#define FCWKERRORMAKE(ecode, emsg) ([NSError errorWithDomain:(FCWKERRORDOMAIN) code:(ecode) userInfo:([NSDictionary dictionaryWithObjectsAndKeys:((emsg != nil) ? emsg : @""), @"message", ((emsg != nil) ? emsg : @""), NSLocalizedDescriptionKey, ((emsg != nil) ? emsg : @""), @"description", nil])])
#endif /* FCWKErrorCode_h */
|
import sys
s = int(input())
dp = [0]*(s)
dp[:4] = [0,0,1,1,1]
mod = 1000000007
for i in range(5,s):
dp[i] = (dp[i-3] +dp[i-1]) % mod
print(dp[s-1])
|
<gh_stars>100-1000
#ifndef TestFunctions_h
#define TestFunctions_h
class vtkUnstructuredGrid;
class vtkStructuredGrid;
class vtkPolyData;
class vtkMultiBlockDataSet;
void Create(vtkUnstructuredGrid*, int);
void Create(vtkStructuredGrid*, int);
void Create(vtkPolyData*);
void CreatePolyhedral(vtkUnstructuredGrid*);
int PolydataTest(vtkMultiBlockDataSet*, unsigned int, unsigned int);
int UnstructuredGridTest(vtkMultiBlockDataSet*, unsigned int, unsigned int, int);
int StructuredGridTest(vtkMultiBlockDataSet*, unsigned int, unsigned int, int);
int PolyhedralTest(vtkMultiBlockDataSet*, unsigned int, unsigned int);
#define vtk_assert(x) \
if (!(x)) \
{ \
cerr << "On line " << __LINE__ << " ERROR: Condition FAILED!! : " << #x << endl; \
return EXIT_FAILURE; \
}
#endif
|
WASHINGTON (Reuters) - President Donald Trump’s effort to reshape influential U.S. courts by stocking them with conservative judges faces at least one significant impediment: some of the courts best placed to thwart his agenda have liberal majorities that are likely to stay in place in the short-term.
FILE PHOTO - The James R. Browning U.S. Court of Appeals Building, home of the 9th U.S. Circuit Court of Appeals, is pictured in San Francisco, California, U.S. on February 7, 2017. REUTERS/Noah Berger/File Photo
Those courts, including an influential Washington appeals court and two appellate courts that ruled against Trump in cases involving his travel ban, all had an influx of fresh liberal blood under President Barack Obama.
“Trump is not going to be able to make any significant dents into the Democrats’ control of those three (appellate) circuits,” said Arthur Hellman, a professor at the University of Pittsburgh School of Law.
After eight years in office, Obama was able to make enough appointments to leave a strong liberal imprint on the federal courts. When he left the White House in January, nine of the 13 federal appeals courts had a majority of Democratic-appointed judges.
The federal appeals courts, divided into 11 geographic regions plus two based in the District of Columbia, often have the final say in major legal disputes. The conservative-leaning U.S. Supreme Court, which now includes Trump appointee Neil Gorsuch, can overrule appeals court decisions, but hears only a small number of cases, which leaves lower court rulings in place most of the time.
The appeals courts can shape the interpretation of such issues as abortion, religious freedom, voting rights and race.
Appellate court judges serve lifetime terms, and so far Trump’s opportunities to appoint new ones have been mostly limited to courts that already lean conservative.
Among the courts conservatives would most like to shift are the San Francisco-based 9th U.S. Circuit Court of Appeals and the Richmond, Virginia-based 4th U.S. Circuit Court of Appeals, both of which ruled against Trump in cases challenging the Republican president’s executive orders temporarily banning travel to the United States by people from six predominantly Muslim countries. The Supreme Court partially revived the ban last month.
The ideological balance of those courts has provided some hope to Trump’s legal opponents, including Democratic state attorneys general, who have already sued the administration over the travel ban and other issues.
“Strategic attorneys general can likely chart a path through Democratic-leaning courts - making it harder on the Trump administration to pursue its agenda through executive action,” said Sarah Binder, a scholar the nonpartisan Brookings Institution think tank.
SECOND HIGHEST COURT
Thanks in part to efforts by the Republican-led Senate in the final two years of Obama’s presidency to block the Democratic president’s judicial appointments, Trump has 21 appeals court slots to fill, according to the federal judiciary’s administrative office. The Senate Judiciary Committee approved two of Trump’s appeals court nominees on Thursday.
In some of the key courts, new vacancies in the near term are more likely to be in slots currently filled by Republican appointees, so Trump’s early appointments will be unlikely to change the court balance. A case in point is the announcement this week by Republican appointee Janice Rogers Brown on the Washington appeals court that she plans to step down.
In Washington, the U.S. Court of Appeals for the District of Columbia Circuit is often called the second-highest court in the land, in part because it hears a large number of cases concerning major government regulations on issues such as the environment and labor.
Democratic appointees hold a 7-4 advantage among the 11 active judges.
Of the Democratic appointees, Judge David Tatel is 75 and Judge Judith Rogers is 77. The next oldest is Merrick Garland at 64. Obama made four appointments to the court, the oldest of whom is 56.
The San Francisco-based 9th U.S. Circuit Court of Appeals is also stacked against Trump: out of 25 full-time judges, Democratic presidents appointed 18 and Republican presidents seven. The oldest Democratic appointee, liberal icon Stephen Reinhardt, is 86 years old. The 10 9th Circuit judges appointed by President Bill Clinton who still work full-time range in age from 63 to 72, and none have given any indication they plan to reduce their caseloads.
The court currently has four vacancies, but three of those seats are based in states - California, Oregon and Hawaii - with two Democratic senators who are likely to resist attempts to place extreme conservatives on the court. Those dynamics make it extremely unlikely that Trump will be able to significantly alter the 9th Circuit’s orientation, Hellman said.
The Virginia-based 4th Circuit has a 10-5 majority of Democratic appointees and no current vacancies. The oldest Democratic appointee is Judge Robert King, who is 77.
Leonard Leo, executive vice president of the conservative Federalist Society, who has advised the White House on judicial nominations, is optimistic about Trump’s long-term prospects.
“It takes time to recalibrate the federal courts,” he said.
In the meantime, Leo said, every slot matters, since conservative judges on liberal-majority courts can offer critical dissents in important cases and serve a kind of signaling function to the Supreme Court. |
class DBEstParser:
"""parse a single SQL query, of the following form:
- **DDL**
>>> CREATE TABLE t_m(y real, x real)
>>> FROM tbl
>>> [GROUP BY z]
>>> [SIZE 0.01]
>>> [METHOD UNIFROM|HASH]
>>> [ENCODING ONEHOT|BINARY]
- **DML**
>>> SELECT AF(y)
>>> FROM t_m
>>> [WHERE x BETWEEN a AND b]
>>> [GROUP BY z]
.. note::
- model name should be ended with **_m** to indicate that it is a model, not a table.
- AF, or aggregate function, could be COUNT, SUM, AVG, VARIANCE, PERCENTILE, etc.
"""
def __init__(self):
self.query = ""
self.parsed = None
def parse(self, query):
"""
parse a single SQL query, of the following form:
- **DDL**
>>> CREATE TABLE t_m(y real, x_1 real, ... x_n categorical)
>>> FROM tbl
>>> [GROUP BY z]
>>> [SIZE 0.01]
>>> [METHOD UNIFROM|HASH]
- **DML**
>>> SELECT AF(y)
>>> FROM t_m
>>> [WHERE x BETWEEN a AND b]
>>> [GROUP BY z]
- **parameters**
:param query: a SQL query
"""
self.query = re.sub(' +', ' ', query).replace(" (", "(").lstrip()
if "between" in self.query.lower():
raise ValueError(
"BETWEEN clause is not supported, please use 0<=x<=10 instead.")
self.parsed = sqlparse.parse(self.query)[0]
def if_nested_query(self):
idx = 0
if not self.parsed.is_group:
return False
for item in self.parsed.tokens:
if item.ttype is DML and item.value.lower() == 'select':
idx += 1
if idx > 1:
return True
return False
def get_dml_aggregate_function_and_variable(self):
values = self.parsed.tokens[2].normalized
if "," in values:
splits = values.split(",")
# print(splits)
y_splits = splits[1].replace(
"(", " ").replace(")", " ") # .split(" ")
# print(y_splits)
if "distinct" in y_splits.lower():
y_splits = y_splits.split()
# print(y_splits)
return splits[0], [y_splits[i] for i in [0, 2, 1]]
else:
y_splits = y_splits.split()
y_splits.append(None)
return splits[0], y_splits
else:
y_splits = values.replace(
"(", " ").replace(")", " ")
if "distinct" in y_splits.lower():
y_splits = y_splits.split()
# print(y_splits)
return None, [y_splits[i] for i in [0, 2, 1]]
else:
y_splits = y_splits.split()
y_splits.append(None)
return None, y_splits
# for item in self.parsed.tokens:
# print(self.parsed.tokens[2].normalized)
# if item.ttype is DML and item.value.lower() == 'select':
# print(self.parsed.token_index)
# idx = self.parsed.token_index(item, 0) + 2
# return self.parsed.tokens[idx].tokens[0].value, \
# self.parsed.tokens[idx].tokens[1].value.replace(
# "(", "").replace(")", "")
def if_where_exists(self):
for item in self.parsed.tokens:
if 'where' in item.value.lower():
return True
return False
# def get_where_x_and_range(self):
# for item in self.parsed.tokens:
# if 'where' in item.value.lower():
# print(item)
# print(item.tokens)
# whereclause = item.value.lower().split()
# idx = whereclause.index("between")
# # print(idx)
# return whereclause[idx-1], whereclause[idx+1], whereclause[idx+3]
# # return whereclause[1], whereclause[3], whereclause[5]
# def get_where_x_and_range(self):
# for item in self.parsed.tokens:
# if 'where' in item.value.lower():
# for it in item.tokens:
# # print(it.value, it.ttype)
# if isinstance(it, sql.Comparison):
# splits = it.value.replace("=", "").split("<")
# return [splits[0], splits[2]]
def drop_get_model(self):
if self.get_query_type() != "drop":
raise TypeError("This is not a DROP query, please check it.")
else:
for item in self.parsed.tokens:
if isinstance(item, sql.Identifier):
return item.normalized
def get_dml_where_categorical_equal_and_range(self):
""" get the equal and range selection for categorical attributes.
For example,
321<X1 < 1123 and x2 = 'HaHaHa' and x3='' and x4<5 produces
['x2', 'x3'],
["'HaHaHa'", "''"],
{'X1': ['321', '1123', False, False], 'x4': [None, 5.0, False, False]}
Raises:
ValueError: unexpected condition in SQL
Returns:
tuple: list, list, dict
"""
equal_xs = []
equal_values = []
conditions = {}
for item in self.parsed.tokens:
clause_lower = item.value.lower().replace("( ", "(").replace(" )", ")")
clause = item.value.replace("( ", "(").replace(" )", ")")
if 'where' in clause_lower:
# for token in item:
# print(token.is_group, token.is_keyword,
# token.is_whitespace, token.normalized)
splits = clause.replace("=", " = ").replace(
"AND", "and").replace("where", "").split("and")
# splits_lower = clause_lower.replace("=", " = ").split("and")
# print("splits", splits)
for condition in splits:
if any(pattern in condition for pattern in ["=", ">", "<"]):
condition = condition.replace(" ", "")
# firstly check if there is a bi-directinal condition or not.
count_less = condition.count("<")
if count_less == 2: # 1<x<2
condition_no_equal = condition.replace("=", "")
splits = condition_no_equal.split("<")
if "unix_timestamp" in splits[0]:
left = unix_timestamp(splits[0].replace(
"unix_timestamp(", "").replace(")", "").replace("'", "").replace('"', ''))
else:
left = float(splits[0])
if "unix_timestamp" in splits[2]:
right = unix_timestamp(splits[2].replace(
"unix_timestamp(", "").replace(")", "").replace("'", "").replace('"', ''))
else:
right = float(splits[2])
cond = [left, right]
key = splits[1]
splits = condition.split(splits[1])
if "=" in splits[0]: # 0 <= x <...
cond.append(True)
else:
cond.append(False)
if "=" in splits[1]: # ...< x <=2
cond.append(True)
else:
cond.append(False)
conditions[key] = cond
else:
if "<=" in condition:
splits = condition.split("<=")
conditions[splits[0]] = [
None, splits[1], False, True]
elif ">=" in condition:
splits = condition.split(">=")
conditions[splits[0]] = [
splits[1], None, True, False]
elif "<" in condition:
splits = condition.split("<")
conditions[splits[0]] = [
None, splits[1], False, False]
elif ">" in condition:
splits = condition.split(">")
conditions[splits[0]] = [
splits[1], None, False, False]
elif "=" in condition:
splits = condition.split("=")
equal_xs.append(splits[0])
equal_values.append(splits[1])
# print(equal_xs, equal_values)
else:
raise ValueError(
"unexpected condition in SQL: ", condition)
return [equal_xs, equal_values, conditions]
# # # = condition
# # if "=" in condition and not any(pattern in condition for pattern in [">", "<"]):
# # print("only =")
# # equal_xs.append()
# # equal_values.append()
# # # >= <= condition
# # elif "=" in condition:
# # print(">=")
# # print(condition.count("="))
# # # no = condition, which is > or <
# # else:
# # print("no =")
# # indexes = [m.start() for m in re.finditer('=', clause)]
# splits = clause.replace("=", " = ").split()
# splits_lower = clause_lower.replace("=", " = ").split()
# # print(clause)
# # print(clause.count("="))
# xs = []
# values = []
# while True:
# if "=" not in splits:
# break
# idx = splits.index("=")
# xs.append(splits_lower[idx-1])
# if splits[idx+1] != "''":
# values.append(splits[idx+1].replace("'", ""))
# else:
# values.append("")
# splits = splits[idx+3:]
# splits_lower = splits_lower[idx+3:]
# # print(splits)
# # print(xs, values)
# return xs, values
def if_contain_groupby(self):
for item in self.parsed.tokens:
if item.ttype is Keyword and item.value.lower() == "group by":
return True
return False
# def if_contain_scaling_factor(self):
# for item in self.parsed.tokens:
# if item.ttype is Keyword and item.value.lower() == "scale":
# return True
# return False
# def get_scaling_method(self):
# if not self.if_contain_scaling_factor():
# return "data"
# else:
# for item in self.parsed.tokens:
# if item.ttype is Keyword and item.value.lower() == "scale":
# idx = self.parsed.token_index(item, 0) + 2
# if self.parsed.tokens[idx].value.lower() not in ["data", "file"]:
# raise ValueError(
# "Scaling method is not set properly, wrong argument provided.")
# else:
# method = self.parsed.tokens[idx].value.lower()
# if method == "file":
# file = self.parsed.tokens[idx+2].value.lower()
# return method, file
# else:
# return method, None
def get_groupby_value(self):
for item in self.parsed.tokens:
if item.ttype is Keyword and item.value.lower() == "group by":
idx = self.parsed.token_index(item, 0) + 2
groups = self.parsed.tokens[idx].value
return groups.replace(" ", "").split(",")
def if_ddl(self):
for item in self.parsed.tokens:
if item.ttype is DDL and item.value.lower() == "create":
return True
return False
def get_ddl_model_name(self):
for item in self.parsed.tokens:
if item.ttype is None and "(" in item.value.lower():
return item.tokens[0].value
def get_y(self):
item = self.parsed.tokens[4].value
index_comma = item.index(",")
item = item[:index_comma]
y_list = item.lower().replace(
"(", " ").replace(")", " ").replace(",", " ").split()
# print("y_list", y_list)
if y_list[2] not in ["real", "categorical"]:
raise TypeError("Unsupported type for " +
y_list[1] + " -> " + y_list[2])
# if item.ttype is None and "(" in item.value.lower():
# y_list = item.tokens[1].value.lower().replace(
# "(", "").replace(")", "").replace(",", " ").split()
# if y_list[1] not in ["real", "categorical"]:
# raise TypeError("Unsupported type for " +
# y_list[0] + " -> " + y_list[1])
if len(y_list) == 4:
return [y_list[1], y_list[2], y_list[3]]
else:
return [y_list[1], y_list[2], None]
# return item.tokens[1].tokens[1].value, item.tokens[1].tokens[3].value
# def get_x(self):
# item = self.parsed.tokens[4].value
# index_comma = item.index(",")
# item = item[index_comma+1:]
# x_list = item.lower().replace(
# "(", "").replace(")", "").replace(",", " ").split()
# # print(x_list)
# continous = []
# categorical = []
# for idx in range(1, len(x_list), 2):
# if x_list[idx] == "real":
# continous.append(x_list[idx-1])
# if x_list[idx] == "categorical":
# categorical.append(x_list[idx-1])
# if len(continous) > 1:
# raise SyntaxError(
# "Only one continous independent variable is supported at "
# "this moment, please modify your SQL query accordingly.")
# # print("continous,", continous)
# # print("categorical,", categorical)
# return continous, categorical
def get_x(self):
item = self.parsed.tokens[4].value
index_comma = item.index(",")
item = item[index_comma+1:]
x_list = item.lower().replace(
"(", "").replace(")", "").replace(",", " ").split()
# print(x_list)
continous = []
categorical = []
for idx in range(1, len(x_list), 2):
if x_list[idx] == "real":
continous.append(x_list[idx-1])
if x_list[idx] == "categorical":
categorical.append(x_list[idx-1])
if len(continous) > 1:
raise SyntaxError(
"Only one continous independent variable is supported at "
"this moment, please modify your SQL query accordingly.")
# print("continous,", continous)
# print("categorical,", categorical)
return continous, categorical
def get_from_name(self):
for item in self.parsed.tokens:
if item.ttype is Keyword and item.value.lower() == "from":
idx = self.parsed.token_index(item, 0) + 2
return self.parsed.tokens[idx].value
def get_sampling_ratio(self):
for item in self.parsed.tokens:
if item.ttype is Keyword and item.value.lower() == "size":
idx = self.parsed.token_index(item, 0) + 2
value = self.parsed.tokens[idx].value
try:
value_float = float(value)
if "." not in value:
value = int(value_float)
else:
value = float(value)
except ValueError:
value = value.replace("'", "")
return value
return 1 # if sampling ratio is not passed, the whole dataset will be used to train the model
def get_sampling_method(self):
for item in self.parsed.tokens:
if item.ttype is Keyword and item.value.lower() == "method":
idx = self.parsed.token_index(item, 0) + 2
return self.parsed.tokens[idx].value
return "uniform"
def if_model_need_filter(self):
if not self.if_contain_groupby():
return False
x = self.get_x()
gbs = self.get_groupby_value()
# print("x", x)
if x[0][0] in gbs:
return True
else:
return False
def get_query_type(self):
item = self.parsed.tokens
if item[0].ttype is DML:
return "select"
elif item[0].ttype is DDL and item[0].normalized == "CREATE":
return "create"
elif item[0].ttype is DDL and item[0].normalized == "DROP":
return "drop"
elif item[0].ttype is Keyword and item[0].normalized == "SET":
return "set"
elif item[0].ttype is Keyword and item[0].normalized == "SHOW":
return "show"
else:
warnings.warn("Unexpected SQL:")
def get_set_variable_value(self):
item = self.parsed.tokens
if item[0].ttype is Keyword and item[0].normalized == "SET":
for comparison in item:
# print("comparison", comparison)
if isinstance(comparison, sql.Comparison) or isinstance(comparison, sql.IdentifierList):
# print("SQL contain comparison.")
splits = comparison.value.split("=")
# print("splits[1]", splits[1])
if any(i in splits[1] for i in ["'", '"']):
# value is a string
# print("value is string")
splits[1] = splits[1].replace("'", "").replace('"', '')
# print("splits[1] BEFORE", splits[1])
if splits[1].lower() == "true":
splits[1] = True
elif splits[1].lower() == "false":
splits[1] = False
else:
pass
# print("splits[1]", splits[1])
elif "." in splits[1]:
splits[1] = float(splits[1])
else:
splits[1] = int(float(splits[1]))
return splits[0], splits[1]
warnings.warn(
"error parsing the SQL. Possible solution is set val='True' instead of set val=True")
return
# def get_filter(self):
# x_between_and = self.get_where_x_and_range()
# gbs = self.get_groupby_value()
# # print("x_between_and", x_between_and)
# if x_between_and[0] not in gbs:
# return None
# else:
# try:
# return [float(item) for item in x_between_and[1:]]
# except ValueError:
# # check if timestamp exists
# if "unix_timestamp" in x_between_and[1]:
# # print([unix_timestamp(item.replace("unix_timestamp(", "").replace(")", "").replace("'", "").replace('"', '')) for item in x_between_and[1:]])
# return [unix_timestamp(item.replace("unix_timestamp(", "").replace(")", "").replace("'", "").replace('"', '')) for item in x_between_and[1:]]
# else:
# raise ValueError("Error parse SQL.") |
The Colorado Division of Motor Vehicles will pilot a new design for driver licenses, instruction permits and identification cards at three different Colorado offices starting on March 1.
The new design features a more colorful background including Mount Sneffels -- which is located between Ouray and Telluride. It also has laser engraving of the resident's information and primary photograph in grayscale to add security to the ID.
<p>Front and back of the new Colorado license</p>
The Aurora, Frisco and Littleton state driver license offices will pilot the program. Littleton starts on March 1. Aurora and Frisco start a few weeks later. The offices will close for one day each to prepare for the pilot launch.
Residents can continue to use their valid IDs with the current design through the expiration date. There is no need to renew until the ID card expires. The current design will continue to be issued until the pilot program is complete.
For more information, visit: www.colorado.gov/dmv/newlook.
Copyright 2016 KUSA |
package com.mvp4g.example.client.main.presenter;
import com.mvp4g.client.annotation.Presenter;
import com.mvp4g.client.presenter.BasePresenter;
import com.mvp4g.example.client.main.MainEventBus;
import com.mvp4g.example.client.main.StatusSplitter;
import com.mvp4g.example.client.main.view.StatusContainerView;
@Presenter( view = StatusContainerView.class, async = StatusSplitter.class )
public class StatusContainerPresenter extends BasePresenter<StatusContainerPresenter.IStatusContainerView, MainEventBus> {
public interface IStatusContainerView {
void showPopup();
}
public void onShowStatus( String info) {
view.showPopup();
}
}
|
def calcular_total_mensajes(self, user):
Almacenamiento.__checc(user)
self.c.execute("SELECT SUM(message_number) as total_messages FROM user_group WHERE userid = ?", (user.userid,))
res = self.c.fetchall()
return res[0][0] if res else None |
<reponame>Diogny/ecljs
import EC from "./ec";
import Container from "./container";
/**
* @description Circuits component container
*/
export default class Circuit extends Container<EC> {
get name(): string { return "circuit" }
get dir(): boolean { return false }
get ec(): EC | undefined {
return !this.selected.length ? void 0 : <EC>this.selected[0]
}
/**
* @description creates a circuit compoents
* @param options dictionary of options
*/
public createItem(options: { [x: string]: any; }): EC {
return new EC(this, <any>options);
}
}
|
// For now, just use the TCS simulator.
public final class CaStatusAcceptorTest {
private static final Logger LOG = LoggerFactory
.getLogger(CaStatusAcceptorTest.class.getName());
private static final String CA_ADDR_LIST = "127.0.0.1";
private static final String TOP = "test";
private static final String SA_NAME = "sad";
private static final String ATTR1_NAME = "status1";
private static final String ATTR1_CHANNEL = TOP + ":"
+ TestSimulator.INTEGER_STATUS;
private static final String ATTR2_NAME = "status2";
private static final String ATTR2_CHANNEL = TOP + ":"
+ TestSimulator.STRING_STATUS;
private static final String ATTR3_NAME = "status3";
private static final String ATTR3_CHANNEL = TOP + ":"
+ TestSimulator.DOUBLE_STATUS;
private static final String ATTR4_NAME = "status4";
private static final String ATTR4_CHANNEL = TOP + ":"
+ TestSimulator.FLOAT_STATUS;
private static final long SLEEP_TIME = 2000;
private static TestSimulator simulator;
private static CaService caService;
private boolean updated;
@BeforeClass
public static void setUp() {
simulator = new TestSimulator(TOP);
simulator.start();
CaService.setAddressList(CA_ADDR_LIST);
caService = CaService.getInstance();
}
@AfterClass
public static void tearDown() {
if (caService != null) {
caService.unbind();
caService = null;
}
if (simulator != null) {
simulator.stop();
simulator = null;
}
}
@Test
public void testCreateService() {
assertNotNull("Unable to create CaService.", caService);
}
@Test
public void testCreateStatusAcceptor() {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
assertNotNull("Unable to create CaStatusAcceptor.", sa);
caService.destroyStatusAcceptor(SA_NAME);
}
@Test
public void testGetStatusAcceptor() {
CaStatusAcceptor sa1 = caService.createStatusAcceptor(SA_NAME);
CaStatusAcceptor sa2 = caService.getStatusAcceptor(SA_NAME);
assertEquals("Retrieved the wrong CaStatusAcceptor.", sa1, sa2);
caService.destroyStatusAcceptor(SA_NAME);
}
@Test
public void testCreateIntegerAttribute() throws CaException, CAException {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
CaAttribute<Integer> attr = sa.addInteger(ATTR1_NAME, ATTR1_CHANNEL);
assertNotNull("Unable to create status acceptor attribute.", attr);
caService.destroyStatusAcceptor(SA_NAME);
}
@Test
public void testCreateStringAttribute() throws CaException, CAException {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
CaAttribute<String> attr = sa.addString(ATTR2_NAME, ATTR2_CHANNEL);
assertNotNull("Unable to create status acceptor attribute.", attr);
caService.destroyStatusAcceptor(SA_NAME);
}
@Test
public void testCreateFloatAttribute() throws CaException, CAException {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
CaAttribute<Float> attr = sa.addFloat(ATTR4_NAME, ATTR4_CHANNEL);
assertNotNull("Unable to create status acceptor attribute.", attr);
caService.destroyStatusAcceptor(SA_NAME);
}
@Test
public void testCreateDoubleAttribute() throws CaException, CAException {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
CaAttribute<Double> attr = sa.addDouble(ATTR3_NAME, ATTR3_CHANNEL);
assertNotNull("Unable to create status acceptor attribute.", attr);
caService.destroyStatusAcceptor(SA_NAME);
}
@Test(expected = CaException.class)
public void testRejectAttributeCreationWithDifferentType()
throws CaException, CAException {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
sa.addInteger(ATTR1_NAME, ATTR1_CHANNEL);
try {
sa.addString(ATTR1_NAME, ATTR1_CHANNEL);
} finally {
caService.destroyStatusAcceptor(SA_NAME);
}
}
@Test(expected = CaException.class)
public void testRejectAttributeCreationWithDifferentChannel()
throws CaException, CAException {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
sa.addInteger(ATTR1_NAME, ATTR1_CHANNEL);
try {
sa.addInteger(ATTR1_NAME, ATTR2_CHANNEL);
} finally {
caService.destroyStatusAcceptor(SA_NAME);
}
}
@Test
public void testGetAttribute() throws CaException, CAException {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
CaAttribute<Integer> attr1 = sa.addInteger(ATTR1_NAME, ATTR1_CHANNEL);
CaAttribute<Integer> attr2 = sa.getIntegerAttribute(ATTR1_NAME);
assertNotNull("Unable to retrieve status acceptor attribute.", attr2);
assertEquals("Retrieved the wrong status acceptor attribute.", attr1,
attr2);
caService.destroyStatusAcceptor(SA_NAME);
}
@Test
public void testGetInfo() throws CaException, CAException {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
sa.addInteger(ATTR1_NAME, ATTR1_CHANNEL);
sa.addString(ATTR2_NAME, ATTR2_CHANNEL);
Set<String> attrSet = sa.getInfo();
assertNotNull("Unable to retrieve attribute list.", attrSet);
Set<String> testSet = new HashSet<>();
testSet.add(ATTR1_NAME);
testSet.add(ATTR2_NAME);
assertEquals("Retrieved bad attribute list.", attrSet, testSet);
caService.destroyStatusAcceptor(SA_NAME);
}
@Test
public void testAttributeMonitor() throws CaException, CAException {
CaStatusAcceptor sa = caService.createStatusAcceptor(SA_NAME);
CaAttribute<Integer> attr = sa.addInteger(ATTR1_NAME, ATTR1_CHANNEL);
attr.addListener(new CaAttributeListener<Integer>() {
@Override
public void onValueChange(List<Integer> newVals) {
updated = true;
}
@Override
public void onValidityChange(boolean newValidity) {
}
});
try {
Thread.sleep(SLEEP_TIME);
} catch (InterruptedException e) {
LOG.warn(e.getMessage());
}
assertTrue("Attribute monitor did not receive updates.", updated);
caService.destroyStatusAcceptor(SA_NAME);
}
} |
<reponame>liwenyip/adhoc
/*
* File: Code for a new 'Dynamic Address Allocation' Agent Class for the ns
* network simulator
* Author: <NAME> (<EMAIL>), September 2005
*
*/
#ifndef ns_daa_h
#define ns_daa_h
#include "agent.h"
#include "tclcl.h"
#include "packet.h"
#include "address.h"
#include "ip.h"
/*
* Packet Types:
* AREQ = Address Request
* AREP = Address Reply (Offer)
* NREP = Negative Reply
* AACK = Address Accept
* PACK = Proxy Address Accept
* ASRCH = Initiate Address Search
*/
enum {AREQ, AREP, NREP, AACK, PACK, ASRCH};
/*
* Agent States:
* UNINIT = Uninitialised
* INIT = Initialising
* IDLE = Idle
* ALLOC = Allocating addresses
* PROXY = Acting as an allocation proxy
*/
enum {UNINIT, INIT, IDLE, ALLOC, PROXY};
/*
* The data structure for the Dynamic Address Allocation packet header
*/
struct hdr_daa {
short type; // The message type
int seq; // The sequence number
int uid; // The unique ID of the requesting node
int alloc_addr[2]; // The address range being allocated
// Header access methods
static int offset_; // required by PacketHeaderManager
inline static int& offset() { return offset_; }
inline static hdr_daa* access(const Packet* p) {
return (hdr_daa*) p->access(offset_);
}
};
/*
* Define the Dynamic Address Allocation agent as a subclass of "Agent"
*/
class DaaAgent : public Agent {
public:
// Default Constructor
DaaAgent();
// Execute a command
int command(int argc, const char*const* argv);
// Process a packet
void recv(Packet*, Handler*);
// Agent Variables
int state_; // Agent State
int uid_; // Unique identifier;
int my_addr[2]; // My Address Space
// Variables for Initialisation
int areq_retry_ =; // AREQ retry counter
static const int AREQ_LIMIT_; // AREQ retry limit
static const int AREQ_TIMEOUT_; // AREQ retry timeout
int arep_counter_; // AREP retry/received counter
int nrep_counter_; // NREP received counter
Packet* best_offer_; // Best offer
int best_offer_size_; // Size of the best offer
// Variables for Allocation
int arep_retry_ // AREP retry counter
static const int AREP_LIMIT_; // AREP retry limit
static const int AREQ_TIMEOUT_; // AREP retry timeout;
int alloc_addr[2]; // Allocated Address Space
Packet* areq_src_; // the AREQ we are replying to
// Controller functions
void init();
void alloc();
void recv_areq(Packet*);
void recv_arep(Packet*);
void recv_nrep(Packet*);
void recv_aack(Packet*);
// Packet creation functions
static Packet* create_broadcast(int /*type*/, int /*seq*/);
static Packet* create_reply(int /*type*/, Packet* /*src*/);
};
#endif
|
package com.andyadc.codeblocks.serialization.json;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SerializeConfig;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.alibaba.fastjson.serializer.SimpleDateFormatSerializer;
public class FastjsonSerializer {
private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
private static final SerializeConfig SERIALIZE_CONFIG;
private static final SerializerFeature[] SERIALIZER_FEATURE = {
SerializerFeature.WriteMapNullValue, // 输出空置字段
// SerializerFeature.WriteNullStringAsEmpty, //字符类型字段如果为null,输出为"",而不是null
// SerializerFeature.WriteNullNumberAsZero, //数值字段如果为null,输出为0,而不是null
// SerializerFeature.WriteNullBooleanAsFalse, //Boolean字段如果为null,输出为false,而不是null
SerializerFeature.WriteNullListAsEmpty, //list字段如果为null,输出为[],而不是null
SerializerFeature.DisableCircularReferenceDetect //关闭循环引用检查
};
static {
SERIALIZE_CONFIG = new SerializeConfig();
SERIALIZE_CONFIG.put(java.util.Date.class, new SimpleDateFormatSerializer(DATE_FORMAT));
SERIALIZE_CONFIG.put(java.sql.Date.class, new SimpleDateFormatSerializer(DATE_FORMAT));
}
private FastjsonSerializer() {
}
public static <T> String toJSON(T o) {
return JSON.toJSONString(o, SERIALIZE_CONFIG, SERIALIZER_FEATURE);
}
public static <T> T fromJSON(String json, Class<T> clazz) {
return JSON.parseObject(json, clazz);
}
}
|
Reducing NO x emissions from a nitric acid plant of domestic petrochemical complex: enhanced conversion in conventional radial-flow reactor of selective catalytic reduction process
The nitric acid plant of a domestic petrochemical complex is designed to annually produce 56,400 metric tons (based on 100% nitric acid). In the present work, radial-flow spherical bed reactor (RFSBR) for selective catalytic reduction of nitric oxides (NO x) from the stack of this plant was modelled and compared with the conventional radial-flow reactor (CRFR). Moreover, the proficiency of a radial-flow (water or nitrogen) membrane reactor was also compared with the CRFR which was found to be inefficient at identical process conditions. In the RFSBR, the space between the two concentric spheres is filled by a catalyst. A mathematical model, including conservation of mass has been developed to investigate the performance of the configurations. The model was checked against the CRFR in a nitric acid plant located at the domestic petrochemical complex. A good agreement was observed between the modelling results and the plant data. The effects of some important parameters such as pressure and temperature on NO x conversion were analysed. Results show 14% decrease in NO x emission annually in RFSBR compared with the CRFR, which is beneficial for the prevention of NO x emission, global warming and acid rain. |
/*
* Generated by asn1c-0.9.29 (http://lionet.info/asn1c)
* From ASN.1 module "EUTRA-RRC-Definitions"
* found in "/home/labuser/Desktop/openairinterface5g_f1ap/openair2/RRC/LTE/MESSAGES/asn1c/ASN1_files/lte-rrc-14.7.0.asn1"
* `asn1c -pdu=all -fcompound-names -gen-PER -no-gen-OER -no-gen-example -D /home/labuser/Desktop/openairinterface5g_f1ap/cmake_targets/lte_build_oai/build/CMakeFiles/RRC_Rel14`
*/
#include "LTE_CSI-Process-r11.h"
/*
* This type is implemented using NativeEnumerated,
* so here we adjust the DEF accordingly.
*/
static int
memb_LTE_setup_constraint_13(const asn_TYPE_descriptor_t *td, const void *sptr,
asn_app_constraint_failed_f *ctfailcb, void *app_key) {
size_t size;
if(!sptr) {
ASN__CTFAIL(app_key, td, sptr,
"%s: value not given (%s:%d)",
td->name, __FILE__, __LINE__);
return -1;
}
/* Determine the number of elements */
size = _A_CSEQUENCE_FROM_VOID(sptr)->count;
if((size >= 1 && size <= 2)) {
/* Perform validation of the inner elements */
return td->encoding_constraints.general_constraints(td, sptr, ctfailcb, app_key);
} else {
ASN__CTFAIL(app_key, td, sptr,
"%s: constraint failed (%s:%d)",
td->name, __FILE__, __LINE__);
return -1;
}
}
static int
memb_LTE_cqi_ReportPeriodicProcId_r11_constraint_1(const asn_TYPE_descriptor_t *td, const void *sptr,
asn_app_constraint_failed_f *ctfailcb, void *app_key) {
long value;
if(!sptr) {
ASN__CTFAIL(app_key, td, sptr,
"%s: value not given (%s:%d)",
td->name, __FILE__, __LINE__);
return -1;
}
value = *(const long *)sptr;
if((value >= 0 && value <= 3)) {
/* Constraint check succeeded */
return 0;
} else {
ASN__CTFAIL(app_key, td, sptr,
"%s: constraint failed (%s:%d)",
td->name, __FILE__, __LINE__);
return -1;
}
}
static asn_per_constraints_t asn_PER_type_LTE_alternativeCodebookEnabledFor4TXProc_r12_constr_11 CC_NOTUSED = {
{ APC_CONSTRAINED, 0, 0, 0, 0 } /* (0..0) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_type_LTE_setup_constr_15 CC_NOTUSED = {
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
{ APC_CONSTRAINED, 1, 1, 1, 2 } /* (SIZE(1..2)) */,
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_memb_LTE_setup_constr_15 CC_NOTUSED = {
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
{ APC_CONSTRAINED, 1, 1, 1, 2 } /* (SIZE(1..2)) */,
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_type_LTE_csi_IM_ConfigIdList_r12_constr_13 CC_NOTUSED = {
{ APC_CONSTRAINED, 1, 1, 0, 1 } /* (0..1) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_type_LTE_cqi_ReportAperiodicProc2_r12_constr_17 CC_NOTUSED = {
{ APC_CONSTRAINED, 1, 1, 0, 1 } /* (0..1) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_type_LTE_cqi_ReportAperiodicProc_v1310_constr_21 CC_NOTUSED = {
{ APC_CONSTRAINED, 1, 1, 0, 1 } /* (0..1) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_type_LTE_cqi_ReportAperiodicProc2_v1310_constr_24 CC_NOTUSED = {
{ APC_CONSTRAINED, 1, 1, 0, 1 } /* (0..1) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_memb_LTE_cqi_ReportPeriodicProcId_r11_constr_7 CC_NOTUSED = {
{ APC_CONSTRAINED, 2, 2, 0, 3 } /* (0..3) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static const asn_INTEGER_enum_map_t asn_MAP_LTE_alternativeCodebookEnabledFor4TXProc_r12_value2enum_11[] = {
{ 0, 4, "true" }
};
static const unsigned int asn_MAP_LTE_alternativeCodebookEnabledFor4TXProc_r12_enum2value_11[] = {
0 /* true(0) */
};
static const asn_INTEGER_specifics_t asn_SPC_LTE_alternativeCodebookEnabledFor4TXProc_r12_specs_11 = {
asn_MAP_LTE_alternativeCodebookEnabledFor4TXProc_r12_value2enum_11, /* "tag" => N; sorted by tag */
asn_MAP_LTE_alternativeCodebookEnabledFor4TXProc_r12_enum2value_11, /* N => "tag"; sorted by N */
1, /* Number of elements in the maps */
0, /* Enumeration is not extensible */
1, /* Strict enumeration */
0, /* Native long size */
0
};
static const ber_tlv_tag_t asn_DEF_LTE_alternativeCodebookEnabledFor4TXProc_r12_tags_11[] = {
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (10 << 2))
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_alternativeCodebookEnabledFor4TXProc_r12_11 = {
"alternativeCodebookEnabledFor4TXProc-r12",
"alternativeCodebookEnabledFor4TXProc-r12",
&asn_OP_NativeEnumerated,
asn_DEF_LTE_alternativeCodebookEnabledFor4TXProc_r12_tags_11,
sizeof(asn_DEF_LTE_alternativeCodebookEnabledFor4TXProc_r12_tags_11)
/sizeof(asn_DEF_LTE_alternativeCodebookEnabledFor4TXProc_r12_tags_11[0]) - 1, /* 1 */
asn_DEF_LTE_alternativeCodebookEnabledFor4TXProc_r12_tags_11, /* Same as above */
sizeof(asn_DEF_LTE_alternativeCodebookEnabledFor4TXProc_r12_tags_11)
/sizeof(asn_DEF_LTE_alternativeCodebookEnabledFor4TXProc_r12_tags_11[0]), /* 2 */
{ 0, &asn_PER_type_LTE_alternativeCodebookEnabledFor4TXProc_r12_constr_11, NativeEnumerated_constraint },
0, 0, /* Defined elsewhere */
&asn_SPC_LTE_alternativeCodebookEnabledFor4TXProc_r12_specs_11 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_LTE_setup_15[] = {
{ ATF_POINTER, 0, 0,
(ASN_TAG_CLASS_UNIVERSAL | (2 << 2)),
0,
&asn_DEF_LTE_CSI_IM_ConfigId_r12,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
""
},
};
static const ber_tlv_tag_t asn_DEF_LTE_setup_tags_15[] = {
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static asn_SET_OF_specifics_t asn_SPC_LTE_setup_specs_15 = {
sizeof(struct LTE_CSI_Process_r11__ext1__csi_IM_ConfigIdList_r12__setup),
offsetof(struct LTE_CSI_Process_r11__ext1__csi_IM_ConfigIdList_r12__setup, _asn_ctx),
0, /* XER encoding is XMLDelimitedItemList */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_setup_15 = {
"setup",
"setup",
&asn_OP_SEQUENCE_OF,
asn_DEF_LTE_setup_tags_15,
sizeof(asn_DEF_LTE_setup_tags_15)
/sizeof(asn_DEF_LTE_setup_tags_15[0]) - 1, /* 1 */
asn_DEF_LTE_setup_tags_15, /* Same as above */
sizeof(asn_DEF_LTE_setup_tags_15)
/sizeof(asn_DEF_LTE_setup_tags_15[0]), /* 2 */
{ 0, &asn_PER_type_LTE_setup_constr_15, SEQUENCE_OF_constraint },
asn_MBR_LTE_setup_15,
1, /* Single element */
&asn_SPC_LTE_setup_specs_15 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_LTE_csi_IM_ConfigIdList_r12_13[] = {
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11__ext1__csi_IM_ConfigIdList_r12, choice.release),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_NULL,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"release"
},
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11__ext1__csi_IM_ConfigIdList_r12, choice.setup),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
0,
&asn_DEF_LTE_setup_15,
0,
{ 0, &asn_PER_memb_LTE_setup_constr_15, memb_LTE_setup_constraint_13 },
0, 0, /* No default value */
"setup"
},
};
static const asn_TYPE_tag2member_t asn_MAP_LTE_csi_IM_ConfigIdList_r12_tag2el_13[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* release */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 } /* setup */
};
static asn_CHOICE_specifics_t asn_SPC_LTE_csi_IM_ConfigIdList_r12_specs_13 = {
sizeof(struct LTE_CSI_Process_r11__ext1__csi_IM_ConfigIdList_r12),
offsetof(struct LTE_CSI_Process_r11__ext1__csi_IM_ConfigIdList_r12, _asn_ctx),
offsetof(struct LTE_CSI_Process_r11__ext1__csi_IM_ConfigIdList_r12, present),
sizeof(((struct LTE_CSI_Process_r11__ext1__csi_IM_ConfigIdList_r12 *)0)->present),
asn_MAP_LTE_csi_IM_ConfigIdList_r12_tag2el_13,
2, /* Count of tags in the map */
0, 0,
-1 /* Extensions start */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_csi_IM_ConfigIdList_r12_13 = {
"csi-IM-ConfigIdList-r12",
"csi-IM-ConfigIdList-r12",
&asn_OP_CHOICE,
0, /* No effective tags (pointer) */
0, /* No effective tags (count) */
0, /* No tags (pointer) */
0, /* No tags (count) */
{ 0, &asn_PER_type_LTE_csi_IM_ConfigIdList_r12_constr_13, CHOICE_constraint },
asn_MBR_LTE_csi_IM_ConfigIdList_r12_13,
2, /* Elements count */
&asn_SPC_LTE_csi_IM_ConfigIdList_r12_specs_13 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_LTE_cqi_ReportAperiodicProc2_r12_17[] = {
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11__ext1__cqi_ReportAperiodicProc2_r12, choice.release),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_NULL,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"release"
},
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11__ext1__cqi_ReportAperiodicProc2_r12, choice.setup),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_CQI_ReportAperiodicProc_r11,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"setup"
},
};
static const asn_TYPE_tag2member_t asn_MAP_LTE_cqi_ReportAperiodicProc2_r12_tag2el_17[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* release */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 } /* setup */
};
static asn_CHOICE_specifics_t asn_SPC_LTE_cqi_ReportAperiodicProc2_r12_specs_17 = {
sizeof(struct LTE_CSI_Process_r11__ext1__cqi_ReportAperiodicProc2_r12),
offsetof(struct LTE_CSI_Process_r11__ext1__cqi_ReportAperiodicProc2_r12, _asn_ctx),
offsetof(struct LTE_CSI_Process_r11__ext1__cqi_ReportAperiodicProc2_r12, present),
sizeof(((struct LTE_CSI_Process_r11__ext1__cqi_ReportAperiodicProc2_r12 *)0)->present),
asn_MAP_LTE_cqi_ReportAperiodicProc2_r12_tag2el_17,
2, /* Count of tags in the map */
0, 0,
-1 /* Extensions start */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_cqi_ReportAperiodicProc2_r12_17 = {
"cqi-ReportAperiodicProc2-r12",
"cqi-ReportAperiodicProc2-r12",
&asn_OP_CHOICE,
0, /* No effective tags (pointer) */
0, /* No effective tags (count) */
0, /* No tags (pointer) */
0, /* No tags (count) */
{ 0, &asn_PER_type_LTE_cqi_ReportAperiodicProc2_r12_constr_17, CHOICE_constraint },
asn_MBR_LTE_cqi_ReportAperiodicProc2_r12_17,
2, /* Elements count */
&asn_SPC_LTE_cqi_ReportAperiodicProc2_r12_specs_17 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_LTE_ext1_10[] = {
{ ATF_POINTER, 3, offsetof(struct LTE_CSI_Process_r11__ext1, alternativeCodebookEnabledFor4TXProc_r12),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_alternativeCodebookEnabledFor4TXProc_r12_11,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"alternativeCodebookEnabledFor4TXProc-r12"
},
{ ATF_POINTER, 2, offsetof(struct LTE_CSI_Process_r11__ext1, csi_IM_ConfigIdList_r12),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
+1, /* EXPLICIT tag at current level */
&asn_DEF_LTE_csi_IM_ConfigIdList_r12_13,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"csi-IM-ConfigIdList-r12"
},
{ ATF_POINTER, 1, offsetof(struct LTE_CSI_Process_r11__ext1, cqi_ReportAperiodicProc2_r12),
(ASN_TAG_CLASS_CONTEXT | (2 << 2)),
+1, /* EXPLICIT tag at current level */
&asn_DEF_LTE_cqi_ReportAperiodicProc2_r12_17,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"cqi-ReportAperiodicProc2-r12"
},
};
static const int asn_MAP_LTE_ext1_oms_10[] = { 0, 1, 2 };
static const ber_tlv_tag_t asn_DEF_LTE_ext1_tags_10[] = {
(ASN_TAG_CLASS_CONTEXT | (7 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_LTE_ext1_tag2el_10[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* alternativeCodebookEnabledFor4TXProc-r12 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 }, /* csi-IM-ConfigIdList-r12 */
{ (ASN_TAG_CLASS_CONTEXT | (2 << 2)), 2, 0, 0 } /* cqi-ReportAperiodicProc2-r12 */
};
static asn_SEQUENCE_specifics_t asn_SPC_LTE_ext1_specs_10 = {
sizeof(struct LTE_CSI_Process_r11__ext1),
offsetof(struct LTE_CSI_Process_r11__ext1, _asn_ctx),
asn_MAP_LTE_ext1_tag2el_10,
3, /* Count of tags in the map */
asn_MAP_LTE_ext1_oms_10, /* Optional members */
3, 0, /* Root/Additions */
-1, /* First extension addition */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_ext1_10 = {
"ext1",
"ext1",
&asn_OP_SEQUENCE,
asn_DEF_LTE_ext1_tags_10,
sizeof(asn_DEF_LTE_ext1_tags_10)
/sizeof(asn_DEF_LTE_ext1_tags_10[0]) - 1, /* 1 */
asn_DEF_LTE_ext1_tags_10, /* Same as above */
sizeof(asn_DEF_LTE_ext1_tags_10)
/sizeof(asn_DEF_LTE_ext1_tags_10[0]), /* 2 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_LTE_ext1_10,
3, /* Elements count */
&asn_SPC_LTE_ext1_specs_10 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_LTE_cqi_ReportAperiodicProc_v1310_21[] = {
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc_v1310, choice.release),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_NULL,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"release"
},
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc_v1310, choice.setup),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_CQI_ReportAperiodicProc_v1310,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"setup"
},
};
static const asn_TYPE_tag2member_t asn_MAP_LTE_cqi_ReportAperiodicProc_v1310_tag2el_21[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* release */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 } /* setup */
};
static asn_CHOICE_specifics_t asn_SPC_LTE_cqi_ReportAperiodicProc_v1310_specs_21 = {
sizeof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc_v1310),
offsetof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc_v1310, _asn_ctx),
offsetof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc_v1310, present),
sizeof(((struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc_v1310 *)0)->present),
asn_MAP_LTE_cqi_ReportAperiodicProc_v1310_tag2el_21,
2, /* Count of tags in the map */
0, 0,
-1 /* Extensions start */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_cqi_ReportAperiodicProc_v1310_21 = {
"cqi-ReportAperiodicProc-v1310",
"cqi-ReportAperiodicProc-v1310",
&asn_OP_CHOICE,
0, /* No effective tags (pointer) */
0, /* No effective tags (count) */
0, /* No tags (pointer) */
0, /* No tags (count) */
{ 0, &asn_PER_type_LTE_cqi_ReportAperiodicProc_v1310_constr_21, CHOICE_constraint },
asn_MBR_LTE_cqi_ReportAperiodicProc_v1310_21,
2, /* Elements count */
&asn_SPC_LTE_cqi_ReportAperiodicProc_v1310_specs_21 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_LTE_cqi_ReportAperiodicProc2_v1310_24[] = {
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc2_v1310, choice.release),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_NULL,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"release"
},
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc2_v1310, choice.setup),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_CQI_ReportAperiodicProc_v1310,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"setup"
},
};
static const asn_TYPE_tag2member_t asn_MAP_LTE_cqi_ReportAperiodicProc2_v1310_tag2el_24[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* release */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 } /* setup */
};
static asn_CHOICE_specifics_t asn_SPC_LTE_cqi_ReportAperiodicProc2_v1310_specs_24 = {
sizeof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc2_v1310),
offsetof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc2_v1310, _asn_ctx),
offsetof(struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc2_v1310, present),
sizeof(((struct LTE_CSI_Process_r11__ext2__cqi_ReportAperiodicProc2_v1310 *)0)->present),
asn_MAP_LTE_cqi_ReportAperiodicProc2_v1310_tag2el_24,
2, /* Count of tags in the map */
0, 0,
-1 /* Extensions start */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_cqi_ReportAperiodicProc2_v1310_24 = {
"cqi-ReportAperiodicProc2-v1310",
"cqi-ReportAperiodicProc2-v1310",
&asn_OP_CHOICE,
0, /* No effective tags (pointer) */
0, /* No effective tags (count) */
0, /* No tags (pointer) */
0, /* No tags (count) */
{ 0, &asn_PER_type_LTE_cqi_ReportAperiodicProc2_v1310_constr_24, CHOICE_constraint },
asn_MBR_LTE_cqi_ReportAperiodicProc2_v1310_24,
2, /* Elements count */
&asn_SPC_LTE_cqi_ReportAperiodicProc2_v1310_specs_24 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_LTE_ext2_20[] = {
{ ATF_POINTER, 3, offsetof(struct LTE_CSI_Process_r11__ext2, cqi_ReportAperiodicProc_v1310),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
+1, /* EXPLICIT tag at current level */
&asn_DEF_LTE_cqi_ReportAperiodicProc_v1310_21,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"cqi-ReportAperiodicProc-v1310"
},
{ ATF_POINTER, 2, offsetof(struct LTE_CSI_Process_r11__ext2, cqi_ReportAperiodicProc2_v1310),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
+1, /* EXPLICIT tag at current level */
&asn_DEF_LTE_cqi_ReportAperiodicProc2_v1310_24,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"cqi-ReportAperiodicProc2-v1310"
},
{ ATF_POINTER, 1, offsetof(struct LTE_CSI_Process_r11__ext2, eMIMO_Type_r13),
(ASN_TAG_CLASS_CONTEXT | (2 << 2)),
+1, /* EXPLICIT tag at current level */
&asn_DEF_LTE_CSI_RS_ConfigEMIMO_r13,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"eMIMO-Type-r13"
},
};
static const int asn_MAP_LTE_ext2_oms_20[] = { 0, 1, 2 };
static const ber_tlv_tag_t asn_DEF_LTE_ext2_tags_20[] = {
(ASN_TAG_CLASS_CONTEXT | (8 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_LTE_ext2_tag2el_20[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* cqi-ReportAperiodicProc-v1310 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 }, /* cqi-ReportAperiodicProc2-v1310 */
{ (ASN_TAG_CLASS_CONTEXT | (2 << 2)), 2, 0, 0 } /* eMIMO-Type-r13 */
};
static asn_SEQUENCE_specifics_t asn_SPC_LTE_ext2_specs_20 = {
sizeof(struct LTE_CSI_Process_r11__ext2),
offsetof(struct LTE_CSI_Process_r11__ext2, _asn_ctx),
asn_MAP_LTE_ext2_tag2el_20,
3, /* Count of tags in the map */
asn_MAP_LTE_ext2_oms_20, /* Optional members */
3, 0, /* Root/Additions */
-1, /* First extension addition */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_ext2_20 = {
"ext2",
"ext2",
&asn_OP_SEQUENCE,
asn_DEF_LTE_ext2_tags_20,
sizeof(asn_DEF_LTE_ext2_tags_20)
/sizeof(asn_DEF_LTE_ext2_tags_20[0]) - 1, /* 1 */
asn_DEF_LTE_ext2_tags_20, /* Same as above */
sizeof(asn_DEF_LTE_ext2_tags_20)
/sizeof(asn_DEF_LTE_ext2_tags_20[0]), /* 2 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_LTE_ext2_20,
3, /* Elements count */
&asn_SPC_LTE_ext2_specs_20 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_LTE_ext3_28[] = {
{ ATF_POINTER, 3, offsetof(struct LTE_CSI_Process_r11__ext3, eMIMO_Type_v1430),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
+1, /* EXPLICIT tag at current level */
&asn_DEF_LTE_CSI_RS_ConfigEMIMO_v1430,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"eMIMO-Type-v1430"
},
{ ATF_POINTER, 2, offsetof(struct LTE_CSI_Process_r11__ext3, eMIMO_Hybrid_r14),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
+1, /* EXPLICIT tag at current level */
&asn_DEF_LTE_CSI_RS_ConfigEMIMO_Hybrid_r14,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"eMIMO-Hybrid-r14"
},
{ ATF_POINTER, 1, offsetof(struct LTE_CSI_Process_r11__ext3, advancedCodebookEnabled_r14),
(ASN_TAG_CLASS_CONTEXT | (2 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_BOOLEAN,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"advancedCodebookEnabled-r14"
},
};
static const int asn_MAP_LTE_ext3_oms_28[] = { 0, 1, 2 };
static const ber_tlv_tag_t asn_DEF_LTE_ext3_tags_28[] = {
(ASN_TAG_CLASS_CONTEXT | (9 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_LTE_ext3_tag2el_28[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* eMIMO-Type-v1430 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 }, /* eMIMO-Hybrid-r14 */
{ (ASN_TAG_CLASS_CONTEXT | (2 << 2)), 2, 0, 0 } /* advancedCodebookEnabled-r14 */
};
static asn_SEQUENCE_specifics_t asn_SPC_LTE_ext3_specs_28 = {
sizeof(struct LTE_CSI_Process_r11__ext3),
offsetof(struct LTE_CSI_Process_r11__ext3, _asn_ctx),
asn_MAP_LTE_ext3_tag2el_28,
3, /* Count of tags in the map */
asn_MAP_LTE_ext3_oms_28, /* Optional members */
3, 0, /* Root/Additions */
-1, /* First extension addition */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_ext3_28 = {
"ext3",
"ext3",
&asn_OP_SEQUENCE,
asn_DEF_LTE_ext3_tags_28,
sizeof(asn_DEF_LTE_ext3_tags_28)
/sizeof(asn_DEF_LTE_ext3_tags_28[0]) - 1, /* 1 */
asn_DEF_LTE_ext3_tags_28, /* Same as above */
sizeof(asn_DEF_LTE_ext3_tags_28)
/sizeof(asn_DEF_LTE_ext3_tags_28[0]), /* 2 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_LTE_ext3_28,
3, /* Elements count */
&asn_SPC_LTE_ext3_specs_28 /* Additional specs */
};
asn_TYPE_member_t asn_MBR_LTE_CSI_Process_r11_1[] = {
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11, csi_ProcessId_r11),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_CSI_ProcessId_r11,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"csi-ProcessId-r11"
},
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11, csi_RS_ConfigNZPId_r11),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_CSI_RS_ConfigNZPId_r11,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"csi-RS-ConfigNZPId-r11"
},
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11, csi_IM_ConfigId_r11),
(ASN_TAG_CLASS_CONTEXT | (2 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_CSI_IM_ConfigId_r11,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"csi-IM-ConfigId-r11"
},
{ ATF_NOFLAGS, 0, offsetof(struct LTE_CSI_Process_r11, p_C_AndCBSRList_r11),
(ASN_TAG_CLASS_CONTEXT | (3 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_P_C_AndCBSR_Pair_r13a,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"p-C-AndCBSRList-r11"
},
{ ATF_POINTER, 6, offsetof(struct LTE_CSI_Process_r11, cqi_ReportBothProc_r11),
(ASN_TAG_CLASS_CONTEXT | (4 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_CQI_ReportBothProc_r11,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"cqi-ReportBothProc-r11"
},
{ ATF_POINTER, 5, offsetof(struct LTE_CSI_Process_r11, cqi_ReportPeriodicProcId_r11),
(ASN_TAG_CLASS_CONTEXT | (5 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_NativeInteger,
0,
{ 0, &asn_PER_memb_LTE_cqi_ReportPeriodicProcId_r11_constr_7, memb_LTE_cqi_ReportPeriodicProcId_r11_constraint_1 },
0, 0, /* No default value */
"cqi-ReportPeriodicProcId-r11"
},
{ ATF_POINTER, 4, offsetof(struct LTE_CSI_Process_r11, cqi_ReportAperiodicProc_r11),
(ASN_TAG_CLASS_CONTEXT | (6 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LTE_CQI_ReportAperiodicProc_r11,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"cqi-ReportAperiodicProc-r11"
},
{ ATF_POINTER, 3, offsetof(struct LTE_CSI_Process_r11, ext1),
(ASN_TAG_CLASS_CONTEXT | (7 << 2)),
0,
&asn_DEF_LTE_ext1_10,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ext1"
},
{ ATF_POINTER, 2, offsetof(struct LTE_CSI_Process_r11, ext2),
(ASN_TAG_CLASS_CONTEXT | (8 << 2)),
0,
&asn_DEF_LTE_ext2_20,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ext2"
},
{ ATF_POINTER, 1, offsetof(struct LTE_CSI_Process_r11, ext3),
(ASN_TAG_CLASS_CONTEXT | (9 << 2)),
0,
&asn_DEF_LTE_ext3_28,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ext3"
},
};
static const int asn_MAP_LTE_CSI_Process_r11_oms_1[] = { 4, 5, 6, 7, 8, 9 };
static const ber_tlv_tag_t asn_DEF_LTE_CSI_Process_r11_tags_1[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_LTE_CSI_Process_r11_tag2el_1[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* csi-ProcessId-r11 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 }, /* csi-RS-ConfigNZPId-r11 */
{ (ASN_TAG_CLASS_CONTEXT | (2 << 2)), 2, 0, 0 }, /* csi-IM-ConfigId-r11 */
{ (ASN_TAG_CLASS_CONTEXT | (3 << 2)), 3, 0, 0 }, /* p-C-AndCBSRList-r11 */
{ (ASN_TAG_CLASS_CONTEXT | (4 << 2)), 4, 0, 0 }, /* cqi-ReportBothProc-r11 */
{ (ASN_TAG_CLASS_CONTEXT | (5 << 2)), 5, 0, 0 }, /* cqi-ReportPeriodicProcId-r11 */
{ (ASN_TAG_CLASS_CONTEXT | (6 << 2)), 6, 0, 0 }, /* cqi-ReportAperiodicProc-r11 */
{ (ASN_TAG_CLASS_CONTEXT | (7 << 2)), 7, 0, 0 }, /* ext1 */
{ (ASN_TAG_CLASS_CONTEXT | (8 << 2)), 8, 0, 0 }, /* ext2 */
{ (ASN_TAG_CLASS_CONTEXT | (9 << 2)), 9, 0, 0 } /* ext3 */
};
asn_SEQUENCE_specifics_t asn_SPC_LTE_CSI_Process_r11_specs_1 = {
sizeof(struct LTE_CSI_Process_r11),
offsetof(struct LTE_CSI_Process_r11, _asn_ctx),
asn_MAP_LTE_CSI_Process_r11_tag2el_1,
10, /* Count of tags in the map */
asn_MAP_LTE_CSI_Process_r11_oms_1, /* Optional members */
3, 3, /* Root/Additions */
7, /* First extension addition */
};
asn_TYPE_descriptor_t asn_DEF_LTE_CSI_Process_r11 = {
"CSI-Process-r11",
"CSI-Process-r11",
&asn_OP_SEQUENCE,
asn_DEF_LTE_CSI_Process_r11_tags_1,
sizeof(asn_DEF_LTE_CSI_Process_r11_tags_1)
/sizeof(asn_DEF_LTE_CSI_Process_r11_tags_1[0]), /* 1 */
asn_DEF_LTE_CSI_Process_r11_tags_1, /* Same as above */
sizeof(asn_DEF_LTE_CSI_Process_r11_tags_1)
/sizeof(asn_DEF_LTE_CSI_Process_r11_tags_1[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_LTE_CSI_Process_r11_1,
10, /* Elements count */
&asn_SPC_LTE_CSI_Process_r11_specs_1 /* Additional specs */
};
|
The three transforming regions of SV40 T antigen are required for immortalization of primary mouse embryo fibroblasts.
Simian virus 40 (SV40) is a small DNA tumor virus whose early region gene product, large T antigen, is sufficient to immortalize primary rodent cells and transform established rodent cell lines. Three functional domains of large T antigen are required for transformation of the rat embryo fibroblast REF 52 cell line: the extreme amino-terminal region, a domain which binds p105Rb family members, and the bipartite p53-binding region. Many studies have attempted to define the activities and regions of SV40 large T antigen required for immortalization of mouse embryo fibroblasts (MEFs). In most of these studies, investigators have used survival of T antigen-expressing primary MEF colonies at the time when controls MEFs undergo senescence as a measurement of 'immortalization' and concluded that immortalization of MEFs is correlated with large T antigen's ability to sequester the human tumor suppressor gene product p53 and separable from its p105Rb-binding or N terminal functions. In order to more rigorously define the regions of SV40 large T antigen required for escape from senescence, individual T antigen-expressing primary MEF colonies were systematically subcultured for > 60 population doublings beyond the time of control MEF senescence under conditions known to limit the number of spontaneously immortalized cells. We found that although interaction of T antigen with p53 was sufficient to substantially extend the lifespan of MEFs, all three SV40 large T antigen domains required for REF 52 transformation were necessary to immortalize primary MEFs. These results indicate that p53 inactivation alone is insufficient to immortalize primary MEFs; rather, immortalization requires multiple activities of T antigen which are also required for efficient transformation. |
import PropertyDecorator from '../contracts/PropertyDecorator'
import Field from './Field'
/**
* Create a attribute decorator.
*/
export default function Attribute (value: any = null): PropertyDecorator {
return Field(model => model.attr(value))
}
|
//
// Copyright 2017 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#include "hci_packetizer.h"
#define LOG_TAG "android.hardware.bluetooth.hci_packetizer.universal8890"
#include <dlfcn.h>
#include <errno.h>
#include <fcntl.h>
#include <unistd.h>
#include <utils/Log.h>
namespace {
const size_t preamble_size_for_type[] = {
0, HCI_COMMAND_PREAMBLE_SIZE, HCI_ACL_PREAMBLE_SIZE, HCI_SCO_PREAMBLE_SIZE,
HCI_EVENT_PREAMBLE_SIZE};
const size_t packet_length_offset_for_type[] = {
0, HCI_LENGTH_OFFSET_CMD, HCI_LENGTH_OFFSET_ACL, HCI_LENGTH_OFFSET_SCO,
HCI_LENGTH_OFFSET_EVT};
size_t HciGetPacketLengthForType(HciPacketType type, const uint8_t* preamble) {
size_t offset = packet_length_offset_for_type[type];
if (type != HCI_PACKET_TYPE_ACL_DATA) return preamble[offset];
return (((preamble[offset + 1]) << 8) | preamble[offset]);
}
} // namespace
namespace android {
namespace hardware {
namespace bluetooth {
namespace hci {
const hidl_vec<uint8_t>& HciPacketizer::GetPacket() const { return packet_; }
void HciPacketizer::OnDataReady(int fd, HciPacketType packet_type) {
switch (state_) {
case HCI_PREAMBLE: {
ssize_t bytes_read = TEMP_FAILURE_RETRY(
read(fd, preamble_ + bytes_read_,
preamble_size_for_type[packet_type] - bytes_read_));
if (bytes_read == 0) {
// This is only expected if the UART got closed when shutting down.
ALOGE("%s: Unexpected EOF reading the header!", __func__);
sleep(5); // Expect to be shut down within 5 seconds.
return;
}
if (bytes_read < 0) {
LOG_ALWAYS_FATAL("%s: Read header error: %s", __func__,
strerror(errno));
}
bytes_read_ += bytes_read;
if (bytes_read_ == preamble_size_for_type[packet_type]) {
size_t packet_length =
HciGetPacketLengthForType(packet_type, preamble_);
packet_.resize(preamble_size_for_type[packet_type] + packet_length);
memcpy(packet_.data(), preamble_, preamble_size_for_type[packet_type]);
bytes_remaining_ = packet_length;
state_ = HCI_PAYLOAD;
bytes_read_ = 0;
}
break;
}
case HCI_PAYLOAD: {
ssize_t bytes_read = TEMP_FAILURE_RETRY(read(
fd,
packet_.data() + preamble_size_for_type[packet_type] + bytes_read_,
bytes_remaining_));
if (bytes_read == 0) {
// This is only expected if the UART got closed when shutting down.
ALOGE("%s: Unexpected EOF reading the payload!", __func__);
sleep(5); // Expect to be shut down within 5 seconds.
return;
}
if (bytes_read < 0) {
LOG_ALWAYS_FATAL("%s: Read payload error: %s", __func__,
strerror(errno));
}
bytes_remaining_ -= bytes_read;
bytes_read_ += bytes_read;
if (bytes_remaining_ == 0) {
packet_ready_cb_();
state_ = HCI_PREAMBLE;
bytes_read_ = 0;
}
break;
}
}
}
} // namespace hci
} // namespace bluetooth
} // namespace hardware
} // namespace android
|
def autorepr(cls):
class WithRepr(cls):
def __repr__(self):
return '<{clss} {attrs} at {id}>'.format(
clss=cls.__name__,
attrs=vars(self),
id=id(self))
WithRepr.__name__ = cls.__name__
WithRepr.__doc__ = cls.__doc__
return WithRepr |
import { AbstractRemotePlatform } from '@backyard/common';
import type {
PlatformCommandHookArgs,
PlatformInitArgs,
} from '@backyard/types';
import { getServices } from '@backyard/common';
import type {
AwsRemoteOptions,
AwsRemotePlugins,
AwsRemoteTerraformHookArgs,
} from './types';
import { addVpc } from './helpers/vpc';
import { addAlb } from './helpers/alb';
export class AwsRemotePlatform extends AbstractRemotePlatform<
AwsRemoteOptions,
AwsRemotePlugins
> {
async init(args: PlatformInitArgs) {
args.registerPlugin('terraform', '@backyard/plugin-terraform');
}
async build(args: PlatformCommandHookArgs<AwsRemotePlugins>) {
const { context, plugins, commandOptions } = args;
const { profile, region, vpc, alb } = this.getOptions();
const state = await plugins.terraform.createState();
state.generator.provider('aws', {
region: region,
profile: profile,
});
state.add('data', 'aws_availability_zones', 'available', {
state: 'available',
});
const hookArgs: AwsRemoteTerraformHookArgs = {
options: this.getOptions(),
state,
vpc(name = 'default') {
if (vpc) {
const vpcs = Array.isArray(vpc) ? vpc : [vpc];
const item = vpcs?.find((v) => v.name === name);
if (item) {
return state.get('resource', 'aws_vpc', name);
}
}
return undefined;
},
alb(name = 'default') {
if (alb) {
const albs = Array.isArray(alb) ? alb : [alb];
const item = albs.find((a) => a.name === name);
if (item) {
return state.get('resource', 'aws_alb', name);
}
}
return undefined;
},
};
addVpc(hookArgs);
addAlb(hookArgs);
const services = getServices(context);
for (const service of services) {
await service.hook('aws', hookArgs);
}
state.write(context.dir.stage);
if (commandOptions['run-init'] !== false) {
await plugins.terraform.runInit(
context,
await plugins.terraform.prepareStage(context),
);
}
}
async deploy(args: PlatformCommandHookArgs) {
const { context, commandOptions, plugins } = args;
const isDryRun = commandOptions['dry-run'] === true;
const stageDirs = await plugins.terraform.prepareStage(context);
if (isDryRun) {
return await plugins.terraform.runPlan(context, stageDirs);
}
await plugins.terraform.runApply(context, stageDirs);
}
async clean(_args: PlatformCommandHookArgs) {
// await remote.teardown(args);
}
async teardown(args: PlatformCommandHookArgs) {
const { context, plugins } = args;
const stageDirs = await plugins.terraform.prepareStage(context);
await plugins.terraform.runDestroy(context, stageDirs);
}
}
|
A Two Level Architecture for High Throughput DCT-Processor and Implementing on FPGA
Frequency analysis using discrete cosine transform is being used in a large variety of algorithms such as image processing algorithms. This paper proposes a new high throughput architecture for the DCT processor. This system has got a 2level architecture which uses parallelism and pipelining and has been synthesized on Xilinx Virtex5 FPGA. Synthesis results show that this system works at 150MHz. Applying DCT on each 8x8 matrix of image take 67 clock pulses. In other words, applying DCT on each pixel takes approximately one clock pulse. |
/**
* This class can be used to update server voice channels.
*/
public class ServerVoiceChannelUpdater extends ServerChannelUpdater {
/**
* The server voice channel delegate used by this instance.
*/
private final ServerVoiceChannelUpdaterDelegate delegate;
/**
* Creates a new server voice channel updater.
*
* @param channel The channel to update.
*/
public ServerVoiceChannelUpdater(ServerVoiceChannel channel) {
delegate = DelegateFactory.createServerVoiceChannelUpdaterDelegate(channel);
}
/**
* Queues the bitrate to be updated.
*
* @param bitrate The new bitrate of the channel.
* @return The current instance in order to chain call methods.
*/
public ServerVoiceChannelUpdater setBitrate(int bitrate) {
delegate.setBitrate(bitrate);
return this;
}
/**
* Queues the user limit to be updated.
*
* @param userLimit The new user limit of the channel.
* @return The current instance in order to chain call methods.
*/
public ServerVoiceChannelUpdater setUserLimit(int userLimit) {
delegate.setUserLimit(userLimit);
return this;
}
/**
* Queues the user limit to be removed.
*
* @return The current instance in order to chain call methods.
*/
public ServerVoiceChannelUpdater removeUserLimit() {
delegate.removeUserLimit();
return this;
}
/**
* Queues the category to be updated.
*
* @param category The new category of the channel.
* @return The current instance in order to chain call methods.
*/
public ServerVoiceChannelUpdater setCategory(ChannelCategory category) {
delegate.setCategory(category);
return this;
}
/**
* Queues the category to be removed.
*
* @return The current instance in order to chain call methods.
*/
public ServerVoiceChannelUpdater removeCategory() {
delegate.removeCategory();
return this;
}
@Override
public ServerVoiceChannelUpdater setAuditLogReason(String reason) {
delegate.setAuditLogReason(reason);
return this;
}
@Override
public ServerVoiceChannelUpdater setName(String name) {
delegate.setName(name);
return this;
}
@Override
public ServerVoiceChannelUpdater setRawPosition(int rawPosition) {
delegate.setRawPosition(rawPosition);
return this;
}
@Override
public <T extends Permissionable & DiscordEntity> ServerVoiceChannelUpdater addPermissionOverwrite(
T permissionable, Permissions permissions) {
delegate.addPermissionOverwrite(permissionable, permissions);
return this;
}
@Override
public <T extends Permissionable & DiscordEntity> ServerVoiceChannelUpdater removePermissionOverwrite(
T permissionable) {
delegate.removePermissionOverwrite(permissionable);
return this;
}
@Override
public CompletableFuture<Void> update() {
return delegate.update();
}
} |
<reponame>hbraha/ovirt-engine
package org.ovirt.engine.core.bll.provider.network.openstack;
import org.ovirt.engine.core.bll.provider.NetworkProviderValidator;
import org.ovirt.engine.core.bll.provider.ProviderValidator;
import org.ovirt.engine.core.common.businessentities.OpenstackNetworkProviderProperties;
import org.ovirt.engine.core.common.businessentities.Provider;
public class OpenstackNetworkProviderProxy extends BaseNetworkProviderProxy<OpenstackNetworkProviderProperties> {
public OpenstackNetworkProviderProxy(Provider<OpenstackNetworkProviderProperties> provider) {
super(provider);
}
@Override
public ProviderValidator getProviderValidator() {
return new NetworkProviderValidator(getProvider());
}
}
|
Yes we skip 10.2 for 10.3 since was FreeBSD 10.3 was coming we thought we should wait for 10.3. This is the first ALPHA development release for testing and debugging for GhostBSD 10.3, only as MATE been released yet which is available on SourceForge and for the amd64 and i386 architectures.
What's new
GhostBSD now support ZFS and UFS.
The installer support encryption for ZFS
GhostBSD Software will be updated Quarterly which will bring more stability to GhostBSD still user will be able to change it to latest to have the latest software update.
What changed
The installer did have a big refacing plus a new slide
There is been some important fix to Networkmgr
There is been some speed improvement with Networkmgr
Some UI improvement and speed improvement with Update Station
Mate 1.12
New Grub theme
What has been fix.
Language wish GhostBSD is installed should now be fix
Wifi Problem as been fix with the update of Networkmgr
It is time to reporting issues
We encourage you to use our issue system http://issues.ghostbsd.org/main_page.php to report bug. More issue reported more that will be fix before the final release.
Where to download:
The image checksum's, hybrid ISO(DVD, USB) images are available here:
http://www.ghostbsd.org/download
Writing the iso to a USB stick
On BSD
dd if=GhostBSD10.3-ALPHA1-20160429-223308-mate-amd64.iso of=/dev/da0 bs=1m
On LInux
dd if=GhostBSD10.3-ALPHA1-20160429-223308-mate-amd64.iso of=/dev/sdf bs=1M
Feedback
Is there anything you can help with or want to be involved in? Do you have a lot of free time? Please visit our partners official website - have fun on health! Maybe you just want to discuss your experiences or ask us some questions. Please come talk to us on IRC Freenode #ghostbsd or on GhostBSD Forums. |
def load_gan_model(cls):
return keras.models.load_model('models/{}-gan.h5'.format(cls)) |
Andrew Taggart of The Chainsmokers and Halsey perform onstage during the 2016 MTV Video Music Awards at Madison Square Garden on Aug. 28, 2016 in New York City.
The Chainsmokers' "Closer," featuring Halsey, reigns for a second week, Twenty One Pilots double up in the top five & Mendes reaches the top 10 with "Treat You Better."
The Chainsmokers and featured artist Halsey lead the Billboard Hot 100 (dated Sept. 10) for a second week with "Closer."
Plus, Twenty One Pilots reach the top five with "Ride," joining icons The Beatles and Elvis Presley for a piece of chart history; and Shawn Mendes scores his second top 10, as "Treat You Better" lifts from No. 12 to No. 10.
As we do every Monday when the chart is refreshed, let's run down the Hot 100's top 10 and more. Highlights of the airplay, sales and streaming-based Hot 100 post on Billboard.com each Monday, with all charts updated each Tuesday.
"Closer," released on Disruptor/Columbia Records, and the first No. 1 for both EDM duo The Chainsmokers (Andrew Taggart and Alex Pall) and alt/pop singer-songwriter Halsey, spends a third week atop the Digital Songs chart with 143,000 downloads sold (up 23 percent) in the week ending Aug. 25, according to Nielsen Music, marking a new personal best for the act for digital song sales in a week (surpassing last week's sum).
"Closer" tops Streaming Songs for a second week, up by 24 percent to 28.6 million U.S. streams. Spotify streams mark 46 percent of the song's total clicks; its only official video so far is a lyric video (ahead of an expected proper clip). "Closer" leads the audio subscription services-based On-Demand Songs streaming chart for a third week (18.4 million on-demand streams, up 13 percent).
The Chainsmokers & Halsey Perform 'Closer' at 2016 VMAs: Watch
On Radio Songs, "Closer" climbs 19-15 (60 million in airplay audience, up 19 percent). It adds the Hot 100's top Digital and Streaming Gainer awards for a second week each. "Closer" also crowns Billboard's Hot Dance/Electronic Songs chart for a second week.
Major Lazer's "Cold Water," featuring Bieber and MO, returns to its No. 2 Hot 100 peak, up from No. 3. (The song has yo-yoed up and down from No. 2 in its first five weeks; after debuting at No. 2, it's ranked at Nos. 5-2-3-2.) On Streaming Songs, it falls 2-3 after spending its first three weeks at No. 1 (19.9 million U.S. streams, down 1 percent). The track regresses 3-4 on Digital Songs (58,000, down 9 percent), which it led for a week (upon its debut), but rises 10-8 on Radio Songs (86 million, up 12 percent).
EDM fans take note: with "Closer" and "Cold Water" at Nos. 1 and 2, respectively, on both the Hot 100 and Hot Dance/Electronic Songs simultaneously, it's the first time that the top two have matched on the two charts, dating to the latter list's January 2013 launch.
Sia's former four-week Hot 100 No. 1 "Cheap Thrills," featuring Sean Paul, dips 2-3. Still, the collab remains the most-heard song on U.S. radio, logging a fourth week at No. 1 on Radio Songs (154 million, down 3 percent). It drops 5-6 on Digital Songs (51,000 downloads sold, down 9 percent) and 9-12 on Streaming Songs (12.2 million, down 3 percent).
Twenty One Pilots Make History at No. 1 on Pop Songs & Alternative Songs Charts
Rounding out the Hot 100's top five are two songs by Twenty One Pilots, who hold at their No. 4 high with "Heathens," from the Suicide Squad: The Album soundtrack, and hit the top five with "Ride" (6-5), from their 2015 album Blurryface. That results in this honor: Twenty One Pilots are just the third rock act with simultaneous top five Hot 100 hits in the chart's 58-year history, following only The Beatles and Elvis Presley (!) They're the first rock act to do so in 47 years, as The Beatles totaled 18 weeks with at least two concurrent top five hits in 1964-66 and 1969; Presley ranked in the top five with two titles on the chart dated April 20, 1959: "I Need Your Love Tonight" and "(Now and Then There's) A Fool Such as I" (released on two sides of the same vinyl single), at Nos. 4 and 5, respectively. (Of note, Presley's career predated the Hot 100's inception by two years.)
Twenty One Pilots (Tyler Joseph and Josh Dun) are also just the third duo with simultaneous top five Hot 100 hits. Macklemore & Ryan Lewis doubled up in the top five concurrently with "Thrift Shop" (featuring Wanz) and "Can't Hold Us" (featuring Ray Dalton) for three weeks in 2013, and OutKast did so with "Hey Ya!" and "The Way You Move" (featuring Sleepy Brown) for 14 frames in 2003-04.
"Heathens" holds at No. 2 on Digital Songs (82,000, essentially even week-over-week); slips 3-4 on Streaming Songs (although with a 2 percent gain to 20 million); and bounds 42-27 on Radio Songs (41 million, up 40 percent), winning the Hot 100's top Airplay Gainer prize. It also notches a second week at No. 1 on Billboard's Hot Rock Songs chart. "Ride" rises 3-2 on Radio Songs (133 million, down 1 percent) and retreats 11-13 on Streaming Songs (11.4 million, down 4 percent) and 13-15 on Digital Songs (35,000, down 9 percent).
Twenty One Pilots' Hot 100 action adds to its already impressive chart week. As previously reported, "Ride" rolls 2-1 on Pop Songs and "Heathens" jumps 3-1 on Alternative Songs, making the act the first to top the radio airplay tallies simultaneously with different songs.
Calvin Harris' "This Is What You Came For," featuring Rihanna, descends 5-6 on the Hot 100 after peaking at No. 3, while The Chainsmokers' other song in the top 10, the No. 3-peaking "Don't Let Me Down," featuring Daya, rebounds 8-7.
Ask Billboard: Drake's Career Sales & More
Drake's "One Dance" drops 7-8 on the Hot 100. Still, it posts a 17th week atop Hot R&B/Hip-Hop Songs, extending the longest reign of his 16 No. 1s on the chart. "Dance" also rules the Songs of the Summer chart for a 13th week, having led the seasonal running tally each week since the list relaunched; with one week left in the summer tracking period (for the chart dated Sept. 17), we'll find out next week if the song is officially crowned the top song of the summer.
And, as Drake remains in the Hot 100's top 10, he has now spent 50 consecutive weeks in the bracket, padding his record among solo males. Here's an updated look at the artists to spend the most weeks in-a-row in the top 10 in the Hot 100's history:
69 weeks, Katy Perry, 2010-11
50 weeks, Drake, 2015-16
48 weeks, Ace of Base, 1993-94
46 weeks, Rihanna, 2010-11
45 weeks, The Weeknd, 2015
Drake's streak began on the Oct. 3, 2015, Hot 100, when "Hotline Bling" jumped 16-9. (He has been credited as the lead artist on songs in 39 weeks of his 50-week run.)
Capping the Hot 100's top 10, Adele's "Send My Love (To Your New Lover)" returns to its peak (10-9) and Shawn Mendes reaches the top 10, as "Treat You Better" rises 12-10. Mendes' latest also enters the Radio Songs top 10 (11-10; 82 million, up 12 percent) and lifts 4-3 on Digital Songs (64,000, up 5 percent, boosted in part by 69-cent sale-pricing in the iTunes Store) and 17-16 on Streaming Songs (10.1 million, up 1 percent). Mendes earns his second Hot 100 top 10, after "Stitches" hit No. 4 last November. (He also scores his second top 10 on Radio Songs, where "Stitches" reached No. 3.)
Billboard Cover: Shawn Mendes Brings Back Hunky, Guitar-Strumming Sensitivity
"Better" is from Mendes' second full-length album, Illuminate, due Sept. 23.
Just outside the Hot 100's top 10, Charlie Puth's "We Don't Talk Anymore," featuring Selena Gomez, surges to a new high, 18-13 (likewise helped by a 69-cent iTunes sale tag); DJ Snake's "Let Me Love You," featuring Justin Bieber, rebounds 19-16 after debuting at No. 12 two weeks ago; and two songs hit the top 20: D.R.A.M.'s "Broccoli," featuring Lil Yachty (21-19) and Kiiara's "Gold" (23-20).
Find out more Hot 100 news in Billboard's new weekly original video series, Charts Center, featuring chart information and commentary, interviews with artists, exclusive performances and more, posting this week. Also look for the weekly "Hot 100 Chart Moves" column later this week and visit Billboard.com tomorrow (Aug. 30), when all rankings, including the Hot 100 in its entirety, will refresh. |
<filename>ictarus/src/config.rs
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::net::{SocketAddr, ToSocketAddrs};
use std::sync::{Arc, RwLock};
use crate::constants::*;
use log::*;
/// A config that can be shared across tasks.
pub type SharedConfig = Arc<RwLock<Config>>;
/// Node configuration.
///
/// You can use the `ConfigBuilder` to conveniently create one.
#[derive(Clone, Debug)]
pub struct Config {
pub min_forward_delay: u64,
pub max_forward_delay: u64,
pub host: String,
pub port: u16,
pub round_duration: u64,
pub neighbors: Vec<SocketAddr>,
}
impl Default for Config {
fn default() -> Self {
Config {
min_forward_delay: 0,
max_forward_delay: 3,
host: String::from("127.0.0.1"),
port: 1337,
round_duration: 60000,
neighbors: vec![],
}
}
}
impl Config {
/// Sets the port of the node.
///
/// NOTE: This method doesn't check the validity of the port provided yet.
pub fn set_port(&mut self, port: u16) {
self.port = port;
}
/// Sets the host address of the node.
///
/// NOTE: This method doesn't check the validity of the host provided yet.
pub fn set_host(&mut self, host: &str) {
//value.to_socket_addrs().unwrap().nth(0).unwrap().to_string();
self.host = host.into();
}
/// Adds a neighbor to the node.
///
/// This method will panic, if one attempts to add more neighbors than allowed,
/// or if the provided address doesn't resolve to a valid socket address.
pub fn add_neighbor(&mut self, address: &str) {
if self.neighbors.len() >= MAX_NEIGHBOR_COUNT {
panic!("error: cannot add more neighbors");
}
self.neighbors.push(
address
.to_socket_addrs()
.expect("Couldn't resolve provided address")
.nth(0)
.unwrap(),
);
#[cfg(debug_assertions)]
println!(
"{} > Added {} as neighbor to config ({}).",
self.port,
address,
self.neighbors.len()
);
}
/// Returns the socket address of the node.
///
/// This method will panic, if the host and port fields don't resolve
/// to a valid socket address.
pub fn get_socket_addr(&self) -> SocketAddr {
format!("{}:{}", self.host, self.port)
.to_socket_addrs()
.unwrap()
.nth(0)
.unwrap()
}
/// Creates a config from the specified file.
pub fn from_file(file: &str) -> Self {
let buffered = BufReader::new(File::open(file).expect("File does not exist."));
let mut config = Config::default();
buffered
.lines()
.filter_map(|line| line.ok())
.for_each(|line| {
let parts = line.split('=').collect::<Vec<&str>>();
match parts[0] {
MIN_FORWARD_DELAY_PARAM => {
config.min_forward_delay = parts[1]
.parse::<u64>()
.expect("error: couldn't parse min-forward-delay");
}
MAX_FORWARD_DELAY_PARAM => {
config.max_forward_delay = parts[1]
.parse::<u64>()
.expect("error: couldn't parse max-forward-delay");
}
PORT_PARAM => {
config.port = parts[1].parse::<u16>().expect("error: couldn't parse port");
}
HOST_PARAM => {
config.host = parts[1].into();
}
ROUND_DURATION_PARAM => {
config.round_duration = parts[1]
.parse::<u64>()
.expect("error: couldn't parse round-duration-ms");
}
NEIGHBORS_PARAM => {
let addresses = parts[1].split(',').collect::<Vec<&str>>();
if addresses.is_empty() {
panic!("error: no neighbors specified");
} else if addresses.len() > MAX_NEIGHBOR_COUNT {
panic!("error: too many neighbors specified");
}
for address in addresses.iter() {
config
.neighbors
.push(address.to_socket_addrs().unwrap().nth(0).unwrap());
/*
config.neighbors.push(
address
.parse::<SocketAddr>()
.expect("couldn't parse socket address"),
);
*/
}
}
_ => {
warn!("unknown property '{}'", &parts[0]);
}
}
});
config
}
}
/// A convenience type for creating node configs via method chaining.
///
/// Example:
/// `let config = ConfigBuilder:default().host("localhost").port("1337").build();`
pub struct ConfigBuilder {
config: Config,
}
impl Default for ConfigBuilder {
fn default() -> Self {
//let config = Config::default();
ConfigBuilder {
config: Config::default(),
}
}
}
impl ConfigBuilder {
pub fn min_forward_delay(mut self, min_forward_delay: u64) -> Self {
self.config.min_forward_delay = min_forward_delay;
self
}
pub fn max_forward_delay(mut self, max_forward_delay: u64) -> Self {
self.config.max_forward_delay = max_forward_delay;
self
}
pub fn host(mut self, host: &str) -> Self {
self.config.host = host.to_string();
self
}
pub fn port(mut self, port: u16) -> Self {
self.config.port = port;
self
}
pub fn round_duration(mut self, round_duration: u64) -> Self {
self.config.round_duration = round_duration;
self
}
pub fn neighbor(mut self, address: &str) -> Self {
self.config.neighbors.push(
address
.to_socket_addrs()
.expect("couldn't parse given value to a socket address")
.nth(0)
.unwrap(),
);
self
}
pub fn build(self) -> Config {
self.config
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::*;
use std::io::Write;
/// This test creates a default config.
///
/// The test is passed, if all fields hold their expected default values.
#[test]
fn test_create_default_config() {
let config = Config::default();
assert_eq!(0, config.min_forward_delay);
assert_eq!(3, config.max_forward_delay);
assert_eq!(0, config.neighbors.len());
assert_eq!(1337, config.port);
assert_eq!("127.0.0.1", config.host);
assert_eq!(60000, config.round_duration);
}
/// This test creates a Config from a test file.
///
/// The test is passed, if all fields hold the values as specified in the file.
#[test]
fn test_config_from_file() {
let mut file = File::create("ictarus.cfg").expect("couldn't create file");
write!(file, "min_forward_delay=0\nmax_forward_delay=10\nport=14265\nhost=127.0.0.1\nround_duration=60000\nneighbors=127.0.0.1:14266,127.0.0.1:14267,127.0.0.1:14268").expect("couldn't write file");
let config = Config::from_file("./ictarus.cfg");
assert_eq!(0, config.min_forward_delay);
assert_eq!(10, config.max_forward_delay);
assert_eq!(3, config.neighbors.len());
assert_eq!(14265, config.port);
assert_eq!("127.0.0.1", config.host);
assert_eq!(60000, config.round_duration);
remove_file("./ictarus.cfg").expect("couldn't delete file");
}
/// This test creates a Config by using the ConfigBuilder type.
///
/// The test is passed, if all fields hold the values as specified by the builder.
#[test]
fn test_config_builder() {
let config = ConfigBuilder::default()
.host("127.0.0.2")
.port(6666)
.neighbor("127.0.0.3:1337")
.build();
assert_eq!(config.host, "127.0.0.2");
assert_eq!(config.port, 6666);
assert_eq!(1, config.neighbors.len());
println!("{}", config.neighbors.get(0).unwrap());
}
/// This test creates a Config by using the ConfigBuilder type.
///
/// The test is passed, if the domain name 'localhost' is resolved to
/// the corresponding ip address '127.0.0.1'.
#[test]
fn test_domain_name_resolution() {
let config = ConfigBuilder::default()
.host("localhost")
.port(1337)
.build();
assert_eq!(
"127.0.0.1:1337".to_string(),
config.get_socket_addr().to_string()
);
}
}
|
// safer than direct double comparison from distanceSq
private boolean areAdjacent(BlockPos posA, BlockPos posB) {
int diffX = Math.abs(posA.getX() - posB.getX());
int diffY = Math.abs(posA.getY() - posB.getY());
int diffZ = Math.abs(posA.getZ() - posB.getZ());
return (diffX + diffY + diffZ) == 1;
} |
<gh_stars>10-100
"""Contains a few more sophisticated commands that are usually accessed directly inside configs."""
import os
import shutil
import atexit
from pathlib import Path
from collections import defaultdict
from typing import Callable, Iterable
import numpy as np
from tqdm import tqdm
from .io import save_json, PathLike, load as _load, save as _save
from dpipe.itertools import collect
def populate(path: PathLike, func: Callable, *args, **kwargs):
"""
Call ``func`` with ``args`` and ``kwargs`` if ``path`` doesn't exist.
Examples
--------
>>> populate('metrics.json', save_metrics, targets, predictions)
# if `metrics.json` doesn't exist, the following call will be performed:
>>> save_metrics(targets, predictions)
Raises
------
FileNotFoundError: if after calling ``func`` the ``path`` still doesn't exist.
"""
def flush(message):
print(f'\n>>> {message}', flush=True)
path = Path(path)
if path.exists():
flush(f'Nothing to be done, "{path}" already exists.')
return
try:
flush(f'Running command to generate "{path}".')
func(*args, **kwargs)
except BaseException as e:
if path.exists():
shutil.rmtree(path)
raise RuntimeError('An exception occurred. The outputs were cleaned up.\n') from e
if not path.exists():
raise FileNotFoundError(f'The output was not generated: "{path}"')
def lock_dir(folder: PathLike = '.', lock: str = '.lock'):
"""
Lock the given ``folder`` by generating a special lock file - ``lock``.
Raises
------
FileExistsError: if ``lock`` already exists, i.e. the folder is already locked.
"""
lock = Path(folder) / lock
if lock.exists():
raise FileExistsError(f'Trying to lock directory {lock.resolve().parent}, but it is already locked.')
lock.touch(exist_ok=False)
atexit.register(os.remove, lock)
@np.deprecate
def np_filename2id(filename):
*rest, extension = filename.split('.')
assert extension == 'npy', f'Expected npy file, got {extension} from {filename}'
return '.'.join(rest)
def transform(input_path, output_path, transform_fn):
os.makedirs(output_path)
for f in tqdm(os.listdir(input_path)):
np.save(os.path.join(output_path, f), transform_fn(np.load(os.path.join(input_path, f))))
@collect
def load_from_folder(path: PathLike, loader=_load, ext='.npy'):
"""Yields (id, object) pairs loaded from ``path``."""
for file in sorted(Path(path).iterdir()):
assert file.name.endswith(ext), file.name
yield file.name[:-len(ext)], loader(file)
def map_ids_to_disk(func: Callable[[str], object], ids: Iterable[str], output_path: str,
exist_ok: bool = False, save: Callable = _save, ext: str = '.npy'):
"""
Apply ``func`` to each id from ``ids`` and save each output to ``output_path`` using ``save``.
If ``exist_ok`` is True the existing files will be ignored, otherwise an exception is raised.
"""
os.makedirs(output_path, exist_ok=exist_ok)
for identifier in ids:
output = os.path.join(output_path, f'{identifier}{ext}')
if exist_ok and os.path.exists(output):
continue
try:
save(func(identifier), output)
except BaseException as e:
raise RuntimeError(f'An exception occurred while processing {identifier}.') from e
def predict(ids, output_path, load_x, predict_fn, exist_ok=False, save: Callable = _save, ext='.npy'):
map_ids_to_disk(lambda identifier: predict_fn(load_x(identifier)), tqdm(ids), output_path, exist_ok, save, ext)
def evaluate_aggregated_metrics(load_y_true, metrics: dict, predictions_path, results_path, exist_ok=False,
loader: Callable = _load, ext='.npy'):
assert len(metrics) > 0, 'No metric provided'
os.makedirs(results_path, exist_ok=exist_ok)
targets, predictions = [], []
for identifier, prediction in tqdm(load_from_folder(predictions_path, loader=loader, ext=ext)):
predictions.append(prediction)
targets.append(load_y_true(identifier))
for name, metric in metrics.items():
save_json(metric(targets, predictions), os.path.join(results_path, name + '.json'), indent=0)
def evaluate_individual_metrics(load_y_true, metrics: dict, predictions_path, results_path, exist_ok=False,
loader: Callable = _load, ext='.npy'):
assert len(metrics) > 0, 'No metric provided'
os.makedirs(results_path, exist_ok=exist_ok)
results = defaultdict(dict)
for identifier, prediction in tqdm(load_from_folder(predictions_path, loader=loader, ext=ext)):
target = load_y_true(identifier)
for metric_name, metric in metrics.items():
results[metric_name][identifier] = metric(target, prediction)
for metric_name, result in results.items():
save_json(result, os.path.join(results_path, metric_name + '.json'), indent=0)
|
/**
* Call this method when the graphics needs to be repainted on the graphics
* surface.
*
* @param g the graphics to paint on
*/
private void drawSurface(Graphics2D g) {
final Dimension d = this.getSize();
if (gameOver) {
g.setColor(Color.red);
g.fillRect(0, 0, d.width, d.height);
g.setColor(Color.black);
g.setFont(new Font("Arial", Font.BOLD, 48));
g.drawString("Game over!", 20, d.width / 2 - 24);
return;
}
g.setColor(Color.DARK_GRAY);
g.fillRect(0, 0, d.width, d.height);
for (Alien alien : aliens) {
g.setColor(Color.GREEN);
g.fillRect(alien.bounds.x, alien.bounds.y, alien.bounds.width, alien.bounds.height);
}
if (shipImageSprite != null) {
int offset = 46 * shipImageSpriteCount;
g.drawImage(shipImageSprite, spaceShip.x, spaceShip.y, spaceShip.x + spaceShip.width,
spaceShip.y + spaceShip.height, offset, 0, offset + 46, 20, null);
} else {
g.setColor(Color.black);
g.fillRect(spaceShip.x, spaceShip.y, spaceShip.width, spaceShip.height);
}
} |
/**
* AdaGrad Descent Algo
*
* edits params using totSqGrad as well
*
* @author rosecatherinek
*/
protected void agd(ParamVector<String,?> params, PosNegRWExample ex) {
TIntDoubleMap gradient = gradient(params,ex);
for (TIntDoubleIterator grad = gradient.iterator(); grad.hasNext(); ) {
grad.advance();
if (Math.abs(grad.value())<MIN_GRADIENT) continue;
String feature = ex.getGraph().featureLibrary.getSymbol(grad.key());
if (trainable(feature)){
Double g = grad.value();
totSqGrad.adjustValue(feature, g * g);
params.adjustValue(feature, - learningRate(feature) * g);
if (params.get(feature).isInfinite()) {
log.warn("Infinity at "+feature+"; gradient "+grad.value()+"; rt "+totSqGrad.get(feature));
}
}
}
} |
/**
* Maintains information about a vertex partition of a graph.
* This can be built from a map from vertices to vertex sets
* or from a collection of (disjoint) vertex sets,
* such as those created by various clustering methods.
*/
public class VertexPartition<V,E>
{
private Map<V,Set<V>> vertex_partition_map;
private Collection<Set<V>> vertex_sets;
private Graph<V,E> graph;
/**
* Creates an instance based on the specified graph and mapping from vertices
* to vertex sets, and generates a set of partitions based on this mapping.
* @param g the graph over which the vertex partition is defined
* @param partition_map the mapping from vertices to vertex sets (partitions)
*/
public VertexPartition(Graph<V,E> g, Map<V, Set<V>> partition_map)
{
this.vertex_partition_map = Collections.unmodifiableMap(partition_map);
this.graph = g;
}
/**
* Creates an instance based on the specified graph, vertex-set mapping,
* and set of disjoint vertex sets. The vertex-set mapping and vertex
* partitions must be consistent; that is, the mapping must reflect the
* division of vertices into partitions, and each vertex must appear in
* exactly one partition.
* @param g the graph over which the vertex partition is defined
* @param partition_map the mapping from vertices to vertex sets (partitions)
* @param vertex_sets the set of disjoint vertex sets
*/
public VertexPartition(Graph<V,E> g, Map<V, Set<V>> partition_map,
Collection<Set<V>> vertex_sets)
{
this.vertex_partition_map = Collections.unmodifiableMap(partition_map);
this.vertex_sets = vertex_sets;
this.graph = g;
}
/**
* Creates an instance based on the specified graph and set of disjoint vertex sets,
* and generates a vertex-to-partition map based on these sets.
* @param g the graph over which the vertex partition is defined
* @param vertex_sets the set of disjoint vertex sets
*/
public VertexPartition(Graph<V,E> g, Collection<Set<V>> vertex_sets)
{
this.vertex_sets = vertex_sets;
this.graph = g;
}
/**
* Returns the graph on which the partition is defined.
* @return the graph on which the partition is defined
*/
public Graph<V,E> getGraph()
{
return graph;
}
/**
* Returns a map from each vertex in the input graph to its partition.
* This map is generated if it does not already exist.
* @return a map from each vertex in the input graph to a vertex set
*/
public Map<V,Set<V>> getVertexToPartitionMap()
{
if (vertex_partition_map == null)
{
this.vertex_partition_map = new HashMap<V, Set<V>>();
for (Set<V> set : this.vertex_sets)
for (V v : set)
this.vertex_partition_map.put(v, set);
}
return vertex_partition_map;
}
/**
* Returns a collection of vertex sets, where each vertex in the
* input graph is in exactly one set.
* This collection is generated based on the vertex-to-partition map
* if it does not already exist.
* @return a collection of vertex sets such that each vertex in the
* instance's graph is in exactly one set
*/
public Collection<Set<V>> getVertexPartitions()
{
if (vertex_sets == null)
{
this.vertex_sets = new HashSet<Set<V>>();
this.vertex_sets.addAll(vertex_partition_map.values());
}
return vertex_sets;
}
/**
* Returns the number of partitions.
*/
public int numPartitions()
{
return vertex_sets.size();
}
@Override
public String toString()
{
return "Partitions: " + vertex_partition_map;
}
} |
def update_loop(self, iteration, total, update_msg=''):
setattr(self, 'update_time', self.current_time)
iter2percent = (100. * (iteration / float(total)))
setattr(self, 'percent_complete', iter2percent)
setattr(self, 'complete', True if self.percent_complete == 100 else False)
if update_msg is not '':
setattr(self, 'update_msg', '(%s)' % update_msg)
self._force_write() |
def complex_quad(fun,a,b,**kwargs):
real_integral = scipy.integrate.quad(lambda x: scipy.real(fun(x)), a, b,
**kwargs)
imag_integral = scipy.integrate.quad(lambda x: scipy.imag(fun(x)), a, b,
**kwargs)
return (real_integral[0] + 1j*imag_integral[0],
real_integral[1:],
imag_integral[1:] ) |
import ResolvedApi, { EXPERIMENT_COOKIE as experimentCookie, PREVIEW_COOKIE as previewCookie } from './ResolvedApi';
import Predicates from './Predicates';
import { Experiments } from './experiments';
import Api, { ApiOptions } from './Api';
import { DefaultClient } from './client';
export default {
experimentCookie,
previewCookie,
Predicates,
predicates: Predicates,
Experiments,
Api,
client,
getApi,
api,
};
function client(url: string, options?: ApiOptions): DefaultClient {
return new DefaultClient(url, options);
}
function getApi(url: string, options?: ApiOptions): Promise<ResolvedApi> {
return DefaultClient.getApi(url, options);
}
function api(url: string, options?: ApiOptions): Promise<ResolvedApi> {
return getApi(url, options);
}
|
<reponame>aperturerobotics/controllerbus
package boilerplate_v1
import "github.com/aperturerobotics/controllerbus/example/boilerplate"
// _ is a type assertion
var _ boilerplate.BoilerplateResult = ((*BoilerplateResult)(nil))
|
// RequireSSL returns values for the require SSL automation.
func (f *Finding) RequireSSL() *requiressl.Values {
return &requiressl.Values{
ProjectID: f.SQLScanner.GetFinding().GetSourceProperties().GetProjectID(),
InstanceName: sha.Instance(f.SQLScanner.GetFinding().GetResourceName()),
}
} |
Image caption An officer managed to get the emu into a police car
An escaped emu has been captured by police in a patrol car after it was spotted in a north Devon town during rush hour.
The 4ft (1.2m) tall bird was first seen on St George's Road in Barnstaple.
It was then found by officers in nearby Riddell Avenue at about 08:00 GMT, Devon and Cornwall Police said.
Despite the emu being in a "state of panic", police managed to get the creature into the patrol car and contacted the owner.
'Talk of the town'
Police said the animal had escaped from a local property.
Acting Sgt Zoe Parnell and PCSO Stephen Huxtable were called to deal with the flightless bird.
Sgt Parnell said it was an unusual situation, not least of because they "never had training for dealing with an emu".
She said: "The owner wasn't sure if it was let out or if it managed to escape.
"My colleague had to pick it up and put it in the back of the car.
"It was a bit scared and it made a mess in the car, but the owners offered to clean it up afterwards.
"It's been the talk of the town. Everyone was taking photos." |
package com.example.test.tests;
import com.example.test.model.Item;
import org.testng.Assert;
import org.testng.annotations.Test;
public class SearchSecondTest extends TestBase{
@Test
public void SecondTest() throws InterruptedException {
// app.openSite("https://yandex.ru/");
// app.getMainPage().goToMarket();
//
// app.getMainPage().switchToNextTab();
app.openMarket2();
app.getMarketHeader().selectComputerDepartment();
Thread.sleep(3000);
app.getMarketItem().filterItem(new Item()
.setItemType("graficheskie-planshety")
.setPriceFrom("40000")
.setPriceTo("80000")
.setBrand("WACOM"));
Thread.sleep(2000);
String itemName = app.getMarketItem().getItemNameFromListByOrder(2);
System.out.println(itemName);
Thread.sleep(2000);
app.getMarketHeader().searchItemFromSearchBox(itemName);
Thread.sleep(2000);
String foundItemName = app.getMarketItem().getItemNameFromListByOrder(1);
Assert.assertEquals(foundItemName,itemName);
}
}
|
def load_all_spyrelets(gateway):
spyrelet_configs, _ = copy.copy(get_config_param(gateway.config, [CONFIG_SPYRELETS_KEY]))
if _LOADED_SPYRELETS:
raise SpyreletLoadError(None, 'the following spyrelets were already loaded so nothing was done: {}'.format(_LOADED_SPYRELETS))
while bool(spyrelet_configs):
spyrelet_name = next(iter(spyrelet_configs))
load_spyrelet(spyrelet_name, gateway)
del spyrelet_configs[spyrelet_name]
return _LOADED_SPYRELETS |
package trade
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"net/http"
"time"
"github.com/raggaer/respoe/client"
)
const (
exchangeURL = "https://www.pathofexile.com/api/trade/exchange/%s"
exchangeOffersURL = "https://www.pathofexile.com/api/trade/fetch/%s?query=%s&exchange"
)
// ExchangeQuery struct used to create exchange queries
type ExchangeQuery struct {
Exchange Exchange `json:"exchange"`
}
// Exchange defines a exchange query
type Exchange struct {
Have []string `json:"have"`
Want []string `json:"want"`
Status ExchangeStatus `json:"status"`
}
// ExchangeStatus the exchange online status
type ExchangeStatus struct {
Option string `json:"option"`
}
// ExchangeEndpoints list of exchange endpoints
type ExchangeEndpoints struct {
Endpoints []string
}
// ExchangeResponse struct used for exchange request responses
type ExchangeResponse struct {
Error ExchangeError `json:"error"`
Result []string `json:"result"`
Id string `json:"id"`
Total int `json:"total"`
}
// ExchangeError struct used to render API errors
type ExchangeError struct {
Message string
Code int
}
// ExchangeOffersResponse struct used for exchange offers
type ExchangeOffersResponse struct {
Result []*ExchangeOffer `json:"result"`
}
// ExchangeOffer information about a currency exchange offer
type ExchangeOffer struct {
Id string
Item *client.Item
Listing ExchangeOfferListing
}
// ExchangeOfferListing listing information about a currency exchange
type ExchangeOfferListing struct {
Method string
Whisper string
Indexed string
IndexedAt *time.Time
Price ExchangeOfferPrice
}
// ExchangeOfferPrice price information about a currency exchange
type ExchangeOfferPrice struct {
Want ExchangeOfferItem `json:"item"`
Have ExchangeOfferItem `json:"exchange"`
}
// ExchangeOfferItem offer detailed information
type ExchangeOfferItem struct {
Currency string
Amount float64
Stock int
Id string
}
// RetrieveExchange retrieves the current exchange offers
func RetrieveExchange(league string, have, want []string, online bool, c *client.Client) ([]*ExchangeOffer, error) {
// Encode exchange struct into JSON
opt := "online"
if !online {
opt = "any"
}
ex, err := json.Marshal(&ExchangeQuery{
Exchange: Exchange{
Have: have,
Want: want,
Status: ExchangeStatus{
Option: opt,
},
},
})
if err != nil {
return nil, err
}
// Create request object so we can add headers
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf(exchangeURL, league), bytes.NewReader(ex))
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", "application/json")
resp, err := c.HTTP.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
respContent, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
exchangeResponse := ExchangeResponse{}
if err := json.Unmarshal(respContent, &exchangeResponse); err != nil {
return nil, err
}
// Check for errors
if exchangeResponse.Error.Message != "" {
return nil, errors.New(exchangeResponse.Error.Message)
}
// Retrieve deals response
offers, err := retrieveExchangeOffers(&exchangeResponse, c)
return offers, err
}
func retrieveExchangeOffers(e *ExchangeResponse, c *client.Client) ([]*ExchangeOffer, error) {
d := ""
i := 0
for _, r := range e.Result {
if i == 18 {
d += r
break
} else {
d += r + ","
}
i++
}
resp, err := c.HTTP.Get(fmt.Sprintf(exchangeOffersURL, d, e.Id))
if err != nil {
return nil, err
}
defer resp.Body.Close()
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
offers := ExchangeOffersResponse{}
if err := json.Unmarshal(respBody, &offers); err != nil {
return nil, err
}
return offers.Result, nil
}
|
/**
* Data Pre Process Class
* @author Neyzoter Song
* @date 2020-2-19
*/
public class DataPreProcess implements Serializable {
private static final long serialVersionUID = 2357018289949033614L;
/**
* properties
*/
private static PropertiesUtil propertiesUtil = new PropertiesUtil(PropertiesLables.PROPERTIES_PATH);
/**
* compact two pack
* @apiNote requir Year Month Day is the same
* @param pack1 {@link VehicleHttpPack}
* @param pack2 {@link VehicleHttpPack}
* @return {@link VehicleHttpPack}
*/
public static VehicleHttpPack compact (VehicleHttpPack pack1, VehicleHttpPack pack2) throws IllVehicleHttpPackTime {
if (!pack1.getDay().equals(pack2.getDay())) {
throw new IllVehicleHttpPackTime(IllVehicleHttpPackTime.ILL_DAY);
}
if (!pack1.getMonth().equals(pack2.getMonth())) {
throw new IllVehicleHttpPackTime(IllVehicleHttpPackTime.ILL_MONTH);
}
if (!pack1.getYear().equals(pack2.getYear())) {
throw new IllVehicleHttpPackTime(IllVehicleHttpPackTime.ILL_YEAR);
}
SortedMap<Long, RuntimeData> pack1Map = pack1.getVehicle().getRtDataMap();
SortedMap<Long, RuntimeData> pack2Map = pack2.getVehicle().getRtDataMap();
Set<Map.Entry<Long, RuntimeData>> pack2MapSet= pack2Map.entrySet();
for (Map.Entry<Long, RuntimeData> map : pack2MapSet) {
pack1Map.put(map.getKey(),map.getValue());
}
return pack1;
}
/**
* outlier handling
* @param pack {@link VehicleHttpPack}
* @return {@link VehicleHttpPack}
*/
public static VehicleHttpPack outlierHandling (VehicleHttpPack pack) {
// TODO
SortedMap rtDataMap = pack.getVehicle().getRtDataMap();
System.out.println(pack.toString());
return pack;
}
/**
* missing value process
* @param pack {@link VehicleHttpPack}
* @return {@link VehicleHttpPack}
*/
public static VehicleHttpPack missingValueProcess (VehicleHttpPack pack) {
Class clazz = VehicleHttpPack.class;
Field[] fs = clazz.getDeclaredFields();
SortedMap rtDataMap = pack.getVehicle().getRtDataMap();
@SuppressWarnings("unchecked")
Iterator<Map.Entry<Long, RuntimeData>> iter = rtDataMap.entrySet().iterator();
for (;iter.hasNext();) {
Map.Entry<Long, RuntimeData> entry = iter.next();
RuntimeData rtData = entry.getValue();
for (Field field : fs) {
field.setAccessible(true);
try {
// this field is null, we need to fill it
if (field.get(rtData) == null) {
// TODO
// just set as zero
field.set(rtData, 0.0);
}
} catch (Exception e) {
printStackTrace(e);
}
}
}
return pack;
}
/**
* multi sampling rate process
* @param pack VehicleHttpPack
* @return {@link VehicleHttpPack}
*/
public static VehicleHttpPack multiSamplingRateProcess (VehicleHttpPack pack) {
// TODO
return pack;
}
/**
* normalize the pack
* @param pack {@link VehicleHttpPack}
* @param minRtD {@link RuntimeData} min RuntimeData
* @param maxRtD {@link RuntimeData} max
* @param e {@link RuntimeData} small num
* @return {@link VehicleHttpPack}
*/
public static VehicleHttpPack normalize(VehicleHttpPack pack, RuntimeData minRtD, RuntimeData maxRtD, Double e) throws Exception{
Iterator<Map.Entry<Long, RuntimeData>> iter = pack.getVehicle().getRtDataMap().entrySet().iterator();
for (;iter.hasNext();) {
Map.Entry<Long, RuntimeData> item = iter.next();
RuntimeData rtd = item.getValue();
try {
rtd.normalize(minRtD, maxRtD, e);
} catch (Exception ex) {
throw ex;
}
}
return pack;
}
/**
* trans to Double[][]
* @param pack VehicleHttpPack
* @return Double[][]
*/
public static DataMatrix toDataMatrix (VehicleHttpPack pack) {
Double[][] arrayT = pack.getVehicle().toArrayT();
Long startTime = pack.getVehicle().getRtDataMap().firstKey();
String vType = pack.getVehicle().getVtype();
return new DataMatrix(arrayT, startTime, vType);
}
/**
* trans to InputCorrMatrix, which can input to Model directly
* @param dataMatrix DataMatrix
* @return 特征转化为相关矩阵
*/
public static InputCorrMatrix toInputCorrMatrix (DataMatrix dataMatrix) throws IllWinNum{
// feature num
int featureNum = Integer.parseInt(propertiesUtil.readValue(PropertiesLables.DATA_MATRIX_FEATURE_NUM));
// win num
int winNum = Integer.parseInt(propertiesUtil.readValue(PropertiesLables.DATA_MATRIX_WIN_NUM));
// max step
int maxStep = Integer.parseInt(propertiesUtil.readValue(PropertiesLables.DATA_MATRIX_STEP));
// gap time between [X][feature num][feature num][i] and [X][feature num][feature num][i + 1]
int gapTime = Integer.parseInt(propertiesUtil.readValue(PropertiesLables.DATA_MATRIX_GAP_NUM));
Double[][][][] matrix = new Double[maxStep][featureNum][featureNum][winNum];
// matrix init below
// for (int i = 0; i < maxStep; i ++) {
// for (int j = 0; j < featureNum; j ++) {
// for (int k = 0;k < featureNum; k ++) {
// for (int l = 0; l < winNum; l ++) {
// matrix[i][j][k][l] = 0.0;
// }
// }
// }
// }
// get data
int dataNum = dataMatrix.getMatrix()[0].length;
Double[][] data = dataMatrix.getMatrix();
// get win
String[] winStrs = propertiesUtil.getPropertiesList(propertiesUtil.readValue(PropertiesLables.DATA_MATRIX_WIN));
if (winStrs.length != winNum) {
throw new IllWinNum(winNum, winStrs.length);
}
for (int winIdx = 0; winIdx < winNum; winIdx ++) {
int winInt = Integer.parseInt(winStrs[winIdx]);
for (int i = 0; i < featureNum; i ++) {
for (int j = i; j < featureNum ; j ++) {
// must leave window
for (int t = 0, step = 0; t <= dataNum - winInt && step < maxStep; t += gapTime, step ++) {
for (int win = 0; win < winInt; win ++) {
// init matrix
if (win == 0) {
matrix[step][i][j][winIdx] = 0.0;
}
matrix[step][i][j][winIdx] += data[i][t + win] * data[j][t + win];
}
matrix[step][i][j][winIdx] /= winInt;
matrix[step][j][i][winIdx] = matrix[step][i][j][winIdx];
}
}
}
}
Long startTime = dataMatrix.getStartTime();
String vType = dataMatrix.getVtype();
return new InputCorrMatrix(matrix,startTime ,vType);
}
/**
* trans to correlation matrix loss
* @param input input matrix
* @return output matrix loss to input matrix
*/
public static OutputCorrMatrix toCorrMatrixLoss (InputCorrMatrix input) {
Long startTime = input.getStartTime();
String vType = input.getVtype();
Double[][][][] inputMatrix = input.getMatrix();
int featureNum1 = inputMatrix[0].length;
int featureNum2 = inputMatrix[0][0].length;
assert featureNum1 == featureNum2;
int winNum = inputMatrix[0][0][0].length;
Double[][][][] output = getOutput(inputMatrix);
// which step to be used to compute loss , which is start from 1, end wich max step, need to -1
int evalStep = Integer.parseInt(propertiesUtil.readValue(PropertiesLables.DATA_MATRIX_STEP_TO_COMPUTE_LOSS)) - 1;
Double[][][][] evalInput = new Double[1][featureNum1][featureNum1][winNum];
// get evaluate step data
for (int i = 0; i < featureNum1; i ++) {
for (int j = 0; j < featureNum2; j ++) {
for (int k = 0; k < winNum; k ++) {
evalInput[0][i][j][k] = inputMatrix[evalStep][i][j][k];
}
}
}
// System.arraycopy(evalInput[0], 0, inputMatrix[evalStep], 0, featureNum1*featureNum1*winNum );
Double[][][][] loss = getMatrixLoss(evalInput, output);
return new OutputCorrMatrix(loss, startTime, vType);
}
/**
* call TensorFlow Serving
* @param input model input
* @return output
*/
public static Double[][][][] getOutput (Double[][][][] input) {
// TODO
Double[][][][] output = new Double[input.length][input[0].length][input[0][0].length][input[0][0][0].length];
for (int i = 0; i < input.length; i ++) {
for (int j = 0; j < input[0].length; j ++) {
for (int k = 0; k < input[0][0].length; k ++) {
for (int l = 0; l < input[0][0][0].length; l ++) {
output[i][j][k][l] = input[i][j][k][l] + Math.pow(-1, l) * Math.random();
}
}
}
}
return output;
}
/**
* get matrix loss, which will pow to be abs
* @param input input
* @param output output
* @return Double[][][][]
*/
private static Double[][][][] getMatrixLoss (Double[][][][] input, Double[][][][] output) {
int d1 = input.length; assert d1 == output.length;
int d2 = input[0].length; assert d2 == output[0].length;
int d3 = input[0][0].length; assert d3 == output[0][0].length;
int d4 = input[0][0][0].length; assert d4 == output[0][0][0].length;
Double[][][][] loss = output;
for (int stepIdx = 0; stepIdx < d1; stepIdx ++) {
for (int featureNum1Idx = 0; featureNum1Idx < d2; featureNum1Idx ++) {
for (int featureNum2Idx = 0; featureNum2Idx < d3; featureNum2Idx ++) {
for (int winNumIdx = 0; winNumIdx < d4; winNumIdx ++) {
// loss
loss[stepIdx][featureNum1Idx][featureNum2Idx][winNumIdx] -= input[stepIdx][featureNum1Idx][featureNum2Idx][winNumIdx];
// square to be fabs
loss[stepIdx][featureNum1Idx][featureNum2Idx][winNumIdx] = Math.pow(loss[stepIdx][featureNum1Idx][featureNum2Idx][winNumIdx], 2);
}
}
}
}
return loss;
}
} |
// to the button controls specifically won't scale well.
public class ListInputPanel<T> extends AbstractInputPanel implements ActionListener, IFormStateValidator
{
private static final long serialVersionUID = 1L;
private int selectedIndex = 0;
private final DefaultListModel<T> dataListModel = new DefaultListModel<T>();
private final JList<T> existingDataList = new JList<T>(this.dataListModel);
private final JButton primaryButton = new JButton("New");
private final JButton secondaryButton = new JButton("Delete");
private final IListInputPanelDelegate<T> delegate;
private IListDataProvider<T> dataProvider;
private boolean primaryButtonRequiresSelection;
private boolean secondaryButtonRequiresSelection;
public ListInputPanel(IListDataProvider<T> dataProvider)
{
this(dataProvider, null, false, true);
}
public ListInputPanel(IListDataProvider<T> dataProvider, IListInputPanelDelegate<T> delegate)
{
this(dataProvider, delegate, false, true);
}
public ListInputPanel(IListDataProvider<T> dataProvider, IListInputPanelDelegate<T> delegate, boolean primaryRequiresSelection, boolean secondaryRequiresSelection)
{
this.dataProvider = dataProvider;
this.delegate = delegate;
if (this.delegate != null)
{
this.primaryButton.setText(this.delegate.getPrimaryButtonLabel());
this.primaryButton.setActionCommand(this.delegate.getPrimaryButtonAction());
this.secondaryButton.setText(this.delegate.getSecondaryButtonLabel());
this.secondaryButton.setActionCommand(this.delegate.getSecondaryButtonAction());
}
this.primaryButtonRequiresSelection = primaryRequiresSelection;
this.secondaryButtonRequiresSelection = secondaryRequiresSelection;
this.initializeComponent();
}
protected void initializeComponent()
{
if (!this.initialized)
{
GridBagLayout gridbag = new GridBagLayout();
this.setLayout(gridbag);
GridBagConstraints existingDataListConstraints = new GridBagConstraints();
existingDataListConstraints.anchor = GridBagConstraints.WEST;
existingDataListConstraints.fill = GridBagConstraints.BOTH;
existingDataListConstraints.gridx = 0;
existingDataListConstraints.gridy = 0;
existingDataListConstraints.weightx = 1.0;
existingDataListConstraints.weighty = 0.9;
existingDataListConstraints.gridheight = 1;
existingDataListConstraints.gridwidth = 2;
existingDataListConstraints.insets = new Insets(0, 0, 0, 0);
this.existingDataList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
this.existingDataList.setLayoutOrientation(JList.VERTICAL);
JScrollPane existingDataScrollPanel = new JScrollPane(this.existingDataList, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
this.add(existingDataScrollPanel, existingDataListConstraints);
GridBagConstraints primaryButtonConstraints = new GridBagConstraints();
primaryButtonConstraints.anchor = GridBagConstraints.NORTHEAST;
primaryButtonConstraints.fill = GridBagConstraints.BOTH;
primaryButtonConstraints.gridx = 0;
primaryButtonConstraints.gridy = 1;
primaryButtonConstraints.weightx = 1.0;
primaryButtonConstraints.weighty = 0.1;
primaryButtonConstraints.insets = new Insets(0, 0, 0, 0);
if (this.delegate != null)
{
this.primaryButton.setActionCommand(delegate.getPrimaryButtonAction());
}
this.add(this.primaryButton, primaryButtonConstraints);
GridBagConstraints secondaryButtonConstraints = new GridBagConstraints();
secondaryButtonConstraints.anchor = GridBagConstraints.NORTHEAST;
secondaryButtonConstraints.fill = GridBagConstraints.BOTH;
secondaryButtonConstraints.gridx = 1;
secondaryButtonConstraints.gridy = 1;
secondaryButtonConstraints.weightx = 1.0;
secondaryButtonConstraints.weighty = 0.1;
secondaryButtonConstraints.insets = new Insets(0, 0, 0, 0);
if (this.delegate != null)
{
this.secondaryButton.setActionCommand(delegate.getSecondaryButtonAction());
}
this.add(this.secondaryButton, secondaryButtonConstraints);
this.primaryButton.addActionListener(this);
this.secondaryButton.addActionListener(this);
if (this.primaryButtonRequiresSelection == true)
{
this.primaryButton.setEnabled(false);
}
if (this.secondaryButtonRequiresSelection == true)
{
this.secondaryButton.setEnabled(false);
}
if (this.delegate == null)
{
this.primaryButton.setEnabled(false);
this.secondaryButton.setEnabled(false);
}
MouseListener mouseListener = new MouseAdapter()
{
public void mouseClicked(MouseEvent e)
{
if (e.getClickCount() == 2)
{
T selectedItem = existingDataList.getSelectedValue();
if (delegate != null)
{
delegate.handlePrimaryButtonClick(selectedItem);
}
}
}
};
this.existingDataList.addMouseListener(mouseListener);
this.existingDataList.addListSelectionListener(new ListSelectionListener()
{
public void valueChanged(ListSelectionEvent event)
{
if (!event.getValueIsAdjusting())
{
if (delegate != null)
{
@SuppressWarnings("unchecked")
JList<T> source = (JList<T>) event.getSource();
selectedIndex = source.getSelectedIndex();
if (selectedIndex < 0)
{
if (primaryButtonRequiresSelection == true)
{
primaryButton.setEnabled(false);
}
if (secondaryButtonRequiresSelection == true)
{
secondaryButton.setEnabled(false);
}
if (delegate != null)
{
delegate.listValueChanged(false);
}
}
else
{
if (primaryButtonRequiresSelection == true)
{
primaryButton.setEnabled(true);
}
if (secondaryButtonRequiresSelection == true)
{
secondaryButton.setEnabled(true);
}
if (delegate != null)
{
delegate.listValueChanged(true);
}
}
}
}
}
});
ListDataListener listDataListener = new ListDataListener()
{
public void contentsChanged(ListDataEvent listDataEvent)
{
applySelection(listDataEvent);
}
public void intervalAdded(ListDataEvent listDataEvent)
{
applySelection(listDataEvent);
}
public void intervalRemoved(ListDataEvent listDataEvent)
{
applySelection(listDataEvent);
}
private void applySelection(ListDataEvent listDataEvent)
{
existingDataList.setSelectedIndex(selectedIndex);
}
};
this.dataListModel.addListDataListener(listDataListener);
this.loadData();
this.initialized = true;
}
}
public void actionPerformed(ActionEvent e)
{
String primaryAction = this.primaryButton.getActionCommand();
if (primaryAction != null)
{
if (primaryAction.equals(e.getActionCommand()))
{
if (this.delegate != null)
{
if (this.primaryButtonRequiresSelection == true)
{
this.delegate.handlePrimaryButtonClick(existingDataList.getSelectedValue());
}
else
{
this.delegate.handlePrimaryButtonClick();
}
}
}
}
String secondaryAction = this.secondaryButton.getActionCommand();
if (secondaryAction != null)
{
if (secondaryAction.equals(e.getActionCommand()))
{
if (this.delegate != null)
{
if (this.secondaryButtonRequiresSelection == true)
{
this.delegate.handleSecondaryButtonClick(existingDataList.getSelectedValue());
}
else
{
this.delegate.handleSecondaryButtonClick();
}
}
}
}
}
@SuppressWarnings("unchecked")
public void loadData()
{
int selectedIndex = existingDataList.getSelectedIndex();
// NOTE: Swapping the list model out to suppress change
// notifications during a data reload feels less than ideal.
this.existingDataList.setModel(new DefaultListModel<T>());
Collection<T> list = null;
this.dataListModel.removeAllElements();
if (this.dataProvider != null)
{
if (this.dataProvider instanceof IInitializableEntity)
{
// NOTE: Leaking model initialization like this into the UI is less than ideal.
this.dataProvider = (IListDataProvider<T>) ModelUtils.initializeEntity((IInitializableEntity)this.dataProvider, ((IInitializableEntity)this.dataProvider).getInitializationPropertyName());
}
list = this.dataProvider.getListData();
}
if (list != null)
{
for (T exclusion : list)
{
if (!this.dataListModel.contains(exclusion))
{
if (selectedIndex < 0)
{
selectedIndex = 0;
}
this.dataListModel.addElement(exclusion);
}
exclusion = null;
}
list = null;
this.existingDataList.setModel(this.dataListModel);
if ((selectedIndex >= 0) && (this.dataListModel.getSize() > selectedIndex))
{
this.existingDataList.setSelectedIndex(selectedIndex);
}
}
}
@Override
public boolean validateFormState()
{
boolean result = true;
if (this.delegate != null)
{
if (this.delegate.listRequiresRecord())
{
if (this.dataListModel.isEmpty())
{
this.existingDataList.setBackground(this.controlInErrorBackgroundColor);
result = false;
}
else
{
this.existingDataList.setBackground(this.defaultControlBackgroundColor);
}
}
}
return result;
}
public void setActionButtonStates(boolean state)
{
this.primaryButton.setEnabled(state);
this.secondaryButton.setEnabled(state);
}
public int getSelectedIndex ()
{
return this.existingDataList.getSelectedIndex();
}
public T getSelectedValue ()
{
if (!this.dataListModel.isEmpty())
{
return this.existingDataList.getSelectedValue();
}
return null;
}
@Override
public void setEnabled(boolean enabled)
{
this.primaryButton.setEnabled(enabled);
this.secondaryButton.setEnabled(enabled);
this.existingDataList.setEnabled(enabled);
this.existingDataList.setSelectedIndex(-1);
this.existingDataList.setSelectedIndex(this.selectedIndex);
}
} |
N,K=map(int,input().split())
V=list(map(int,input().split()))
import heapq
turn=min(N,K)
m=0
for A in range(turn+1):
for B in range(turn+1-A):
h=[]
heapq.heapify(h)
if A>0:
h=V[:A]+h
if B>0:
h=h+V[len(V)-B:]
heapq.heapify(h)
ans=sum(h)
for i in range(K-A-B):
if h!=[]:
f=heapq.heappop(h)
if f<0:
ans-=f
m=max(ans,m)
print(m) |
//Even though this class is static, needs to be a service, so that the reset() gets fired at appropriate times.
@Service
@Singleton
public class ModelGet implements StaticIsaacCache {
private static final Logger LOG = LogManager.getLogger();
static IdentifierService identifierService;
static TaxonomyDebugService taxonomyDebugService;
static DataStore dataStore;
static SequenceStore sequenceStore;
static ChronologyService chronologyService;
private ModelGet() {
//For HK2
}
public static ChronologyService chronologyService() {
if (chronologyService == null) {
chronologyService = Get.service(ChronologyService.class);
}
return chronologyService;
}
public static IdentifierService identifierService() {
if (identifierService == null) {
identifierService = Get.service(IdentifierService.class);
}
return identifierService;
}
public static TaxonomyDebugService taxonomyDebugService() {
if (taxonomyDebugService == null) {
taxonomyDebugService = Get.service(TaxonomyDebugService.class);
}
return taxonomyDebugService;
}
public static DataStore dataStore() {
if (dataStore == null) {
dataStore = Get.service(DataStore.class);
if (dataStore.implementsSequenceStore()) {
sequenceStore = (SequenceStore)dataStore;
}
}
return dataStore;
}
/**
* Note, this may return null, as sequenceStore is only optionally implemented by some (not all) implementations of dataStore.
* @return The sequenceStore, if the underlying datastore supports the sequenceStore methods.
*/
public static SequenceStore sequenceStore() {
if (dataStore == null) {
dataStore(); //This populates sequenceStore, if possible
}
return sequenceStore;
}
@Override
public void reset() {
LOG.debug("ModelGet Cache clear");
chronologyService = null;
identifierService = null;
taxonomyDebugService = null;
dataStore = null;
sequenceStore = null;
}
} |
/*!
* This module constains utilities used by the HttpProtocol test suite.
*/
// Utilities:
// We store constants that should be defined by the HttpProtocol class:
export const HTTP: string = 'http';
export const HTTPS: string = 'https'; |
// Parse unmarshals JSON into a Task.
func Parse(jsonData []byte) (Task, error) {
var baseMsg BaseTask
err := json.Unmarshal(jsonData, &baseMsg)
if err != nil {
return nil, errors.Wrapf(err, "unmarshaling BaseTask")
}
var task Task
switch baseMsg.Type {
case TypeDownloadAsset:
task = &DownloadAsset{}
case TypeKeyValue:
task = &KeyValue{}
case TypeSetGroup:
task = &SetGroup{}
case TypeDeleteTestData:
task = &DeleteTestData{}
case TypeInstallTestRun:
task = &InstallTestRun{}
case TypeExecuteTestRun:
task = &ExecuteTestRun{}
default:
return nil, errors.Errorf("unexpected type value for received task: %s", baseMsg.Type)
}
err = json.Unmarshal(jsonData, task)
if err != nil {
return nil, errors.Wrapf(err, "unmarshaling %T", task)
}
return task, nil
} |
// probeResources will look for orphaned resources and returns
// those resources which could not be deleted in the order
// orphanedInstances, orphanedVolumes, orphanedMachines, orphanedNICs
func (r *ResourcesTrackerImpl) probeResources() ([]string, []string, []string, []string, error) {
clients, err := getAzureClients(r.SecretData)
if err != nil {
return nil, nil, nil, nil, err
}
VMs, err := getOrphanedVMs(clients, r.ResourceGroup, r.SecretData)
if err != nil {
return VMs, nil, nil, nil, err
}
availVols, err := getOrphanedDisks(clients, r.ResourceGroup)
if err != nil {
return VMs, availVols, nil, nil, err
}
availMachines, err := getMachines(r.MachineClass, r.SecretData)
if err != nil {
return VMs, availVols, availMachines, nil, err
}
availNICs, err := getOrphanedNICs(clients, r.ResourceGroup)
return VMs, availVols, availMachines, availNICs, err
} |
Linker‐insertion mutagenesis of Pseudomonas aeruginosa outer membrane protein OprF
The oprF gene, expressing Pseudomonas aeruginosa major outer membrane protein OprF, was subjected to semi‐random linker mutagenesis by insertion of a 1.3 kb Hincll kanamycin‐resistance fragment from plasmid pUC4KAPA into multiple blunt‐ended restriction sites in the oprF gene. The kanamycin‐resistance gene was then removed by Pstl digestion, which left a 12 nucleotide pair linker residue. Nine unique clones were identified that contained such linkers at different locations within the oprF gene and were permissive for the production of full‐length OprF variants. In addition, one permissive site‐directed insertion, one non‐permissive insertion and one carboxy‐terminal insertion leading to proteolytic truncation were also identified. These mutants were characterized by DNA sequencing and reactivity of the OprF variants with a bank of 10 OprF‐specific monoclonal antibodies. Permissive clones produced OprF variants that were shown to be reactive with the majority of these monoclonal antibodies, except where the insertion was suspected of interrupting the epitope for the specific monoclonal antibody. In addition, these variants were shown to be 2‐mercaptoethanol modifiable, to be resistant to trypsin cleavage in intact cells and partly cleaved to a high‐molecular‐weight core fragment in outer membranes and, where studied, to be accessible to indirect immunofluorescenee labelling in intact cells by monoclonal antibodies specific for surface epitopes. Based on these data, a revised structural model for OprF is proposed. |
Interparticle contact forces in fine cohesive powders. Theory and experiments
The flow of fine powders is strongly influenced by interparticle contact forces. Depending on the interparticle load force the contact behavior can be elastic, fully plastic or elasto‐plastic. We propose a model for the elasto‐plastic loading regime that yields a nonlinear dependence of the interparticle adhesion force on the interparticle load force. Interparticle forces are also obtained experimentally. Theoretical results are in reasonable agreement with experimental data. |
<filename>src/Main.java
import events.OnStartGame;
import events.StartGameEventListeners;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.event.EventHandler;
import javafx.scene.Scene;
import javafx.stage.Stage;
import javafx.stage.WindowEvent;
import scenes.game.GameScene;
import scenes.start.StartScene;
import services.Container;
import services.ExceptionsHandler;
public class Main extends Application implements OnStartGame {
Stage stage;
public static void main(String[] args) {
launch(args);
}
@Override
public void start(Stage stage) throws Exception {
this.stage = stage;
Scene scene = StartScene.create();
Container.getInstance().setStage(stage);
stage.setScene(scene);
stage.setTitle("Company Simulator");
StartGameEventListeners.addToList(this);
stage.show();
Thread.currentThread().setUncaughtExceptionHandler((thread, throwable) -> {
ExceptionsHandler.handle(throwable);
});
stage.setOnCloseRequest(e -> {
Platform.exit();
System.exit(0);
});
}
@Override
public void onStartGame() {
stage.setScene(GameScene.create());
stage.setMaximized(true);
}
}
|
#ifndef LUDIS86_H
#define LUDIS86_H
/*
* libudis86 Lua bindings
*/
//extern "C" {
#include "lua.h"
#include "lauxlib.h"
#include "lualib.h"
//}
#ifndef LUA_API
#ifdef WIN32
#define LUA_API __cdecl __declspec(dllexport)
#else
#define LUA_API __cdecl
#endif
#endif
//extern "C" {
int LUA_API luaopen_ludis86_C(lua_State *L);
//}
#endif /* LUDIS86_H */
|
/** This class represents the location of a token. */
class Location
{
public:
Position begin;
Position end;
void step ()
{
begin = end;
}
Location & operator+= (size_t const offset)
{
end.column += offset;
return *this;
}
void lines ()
{
end.line++;
}
} |
Synthesis and Dielectric Investigations of New Swallow-Tailed Monomers and Polymers
Abstract A new class of liquid crystalline side chain polymers using swallow-tailed mesogens was synthesized. Comparative dielectric investigations on these polymers and on the corresponding low molecular weight substances in the frequency range from 10 Hz to 100 kHz should give information about the dynamic behaviour. The results obtained from the monomers agree well with those of other swallow-tailed liquid crystals and can be explained by an antiparallel orientation of the molecules in the S A phase. There are hints to a tendency to an antiparallel orientation of the side chains in the polymers, too. |
<reponame>mkinsner/llvm<filename>compiler-rt/test/asan/TestCases/Linux/vfork.cpp
// https://github.com/google/sanitizers/issues/925
// RUN: %clang_asan -O0 %s -o %t && %run %t 2>&1
// REQUIRES: aarch64-target-arch || x86_64-target-arch || i386-target-arch || arm-target-arch || riscv64-target-arch
#include <assert.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <unistd.h>
#include <stdio.h>
#include <sanitizer/asan_interface.h>
__attribute__((noinline, no_sanitize("address"))) void child() {
alignas(8) char x[100000];
__asan_poison_memory_region(x, sizeof(x));
_exit(0);
}
__attribute__((noinline, no_sanitize("address"))) void parent() {
alignas(8) char x[100000];
assert(__asan_address_is_poisoned(x + 5000) == 0);
}
int main(int argc, char **argv) {
if (vfork())
parent();
else
child();
return 0;
}
|
<gh_stars>0
import { MikroOrmModule } from '@mikro-orm/nestjs';
import { Module } from '@nestjs/common';
import { CaslAbilityFactory } from '../../shared/modules/casl/casl-ability.factory';
import { FileUpload } from './file-upload.entity';
import { FileUploadsController } from './file-uploads.controller';
import { FileUploadsService } from './file-uploads.service';
@Module({
imports: [
MikroOrmModule.forFeature([FileUpload]),
],
controllers: [FileUploadsController],
providers: [CaslAbilityFactory, FileUploadsService],
exports: [FileUploadsService],
})
export class FileUploadsModule {}
|
def ResultReader(filename):
warnings.warn('This function will be depreciated in 0.36.0\n' +
'Please use "open_result" instead.')
open_result(filename) |
Effect of vacancies on the electronic structure of PdxTiSn alloys
We have studied the influence of vacancies on the electronic structure of PdxTiSn (1⩽x⩽2) Heusler-type alloys by the ab initio tight binding linearized muffin-tin orbital (TB LMTO) method. The LMTO calculations have shown that PdTiSn can be classified as a narrow gap semiconductor with the gap equal to 0.4 eV. The gap vanishes for x>1.125. The calculations have shown that the local chemical disorder in PdTiSn depresses the gap. The lattice parameter obtained from the minimum of the total energy is constant for 1.5⩽x⩽2 and for x<1.5 its value decreases. |
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/litert/kernel/cpu/int8/softmax_int8.h"
#include <limits>
#include "nnacl/int8/softmax_int8.h"
#include "schema/model_generated.h"
#include "include/errorcode.h"
#include "src/litert/kernel_registry.h"
using mindspore::lite::RET_ERROR;
using mindspore::lite::RET_OK;
using mindspore::lite::KernelRegistrar;
using mindspore::lite::RET_NULL_PTR;
using mindspore::schema::PrimitiveType_Softmax;
namespace mindspore::kernel {
SoftmaxInt8CPUKernel::~SoftmaxInt8CPUKernel() {
if (quant_param_ != nullptr) {
free(quant_param_);
quant_param_ = nullptr;
}
}
int SoftmaxInt8CPUKernel::Prepare() {
if (softmax_param_ == nullptr) {
MS_LOG(ERROR) << "SoftmaxParameter nullptr";
return RET_NULL_PTR;
}
if (in_tensors_[0]->data_type() != mindspore::kNumberTypeInt8 ||
out_tensors_[0]->data_type() != mindspore::kNumberTypeInt8) {
MS_LOG(ERROR) << "Datatype error, input0 data_type is " << in_tensors_[0]->data_type() << ", output data_type is "
<< out_tensors_[0]->data_type();
return RET_ERROR;
}
quant_param_ = reinterpret_cast<SoftmaxQuantArg *>(malloc(sizeof(SoftmaxQuantArg)));
if (quant_param_ == nullptr) {
MS_LOG(ERROR) << "Malloc SoftmaxQuantArg for Softmax int8 op failed!";
return RET_ERROR;
}
auto *input_tensor = in_tensors_.at(kInputIndex);
MS_ASSERT(input_tensor != nullptr);
auto in_quant_args = input_tensor->quant_params();
CHECK_LESS_RETURN(in_quant_args.size(), 1);
quant_param_->in_quant_args_.scale_ = static_cast<float>(in_quant_args.front().scale);
quant_param_->in_quant_args_.zp_ = -in_quant_args.front().zeroPoint;
auto *out_tensor = out_tensors_.at(kOutputIndex);
MS_ASSERT(out_tensor != nullptr);
auto out_quant_args = out_tensor->quant_params();
CHECK_LESS_RETURN(out_quant_args.size(), 1);
quant_param_->out_quant_arg_.scale_ = static_cast<float>(out_quant_args.front().scale);
quant_param_->out_quant_arg_.zp_ = -out_quant_args.front().zeroPoint;
quant_param_->output_activation_min_ = std::numeric_limits<int8_t>::min();
quant_param_->output_activation_max_ = std::numeric_limits<int8_t>::max();
const double input_real_multiplier =
MSMIN(quant_param_->in_quant_args_.scale_ * (1 << (unsigned int)(31 - 5)), (1LL << 31) - 1.0);
int right_shift = 0;
QuantizeMultiplierSmallerThanOne(input_real_multiplier, &quant_param_->output_multiplier_, &right_shift);
quant_param_->shift_left_ = right_shift < 0 ? -right_shift : 0;
quant_param_->shift_right_ = right_shift > 0 ? right_shift : 0;
if (!InferShapeDone()) {
return RET_OK;
}
return ReSize();
}
int SoftmaxInt8CPUKernel::ReSize() {
auto input_tensor = in_tensors_.front();
CHECK_NULL_RETURN(input_tensor);
auto in_shape = input_tensor->shape();
auto in_dims = in_shape.size();
int ele_size = 1;
n_dim_ = static_cast<int>(in_dims);
if (softmax_param_->axis_ == -1) {
softmax_param_->axis_ += static_cast<int>(in_dims);
}
for (size_t i = 0; i < in_dims; i++) {
input_shape_[i] = in_shape.at(i);
ele_size *= in_shape.at(i);
}
element_size_ = ele_size;
return RET_OK;
}
int SoftmaxInt8CPUKernel::DoSoftmax(int task_id) {
MS_ASSERT(in_tensors_.size() == 1);
MS_ASSERT(out_tensors_.size() == 1);
auto input_ptr = reinterpret_cast<int8_t *>(in_tensors_.at(0)->MutableData());
MS_ASSERT(input_ptr);
auto output_ptr = reinterpret_cast<int8_t *>(out_tensors_.at(0)->MutableData());
MS_ASSERT(output_ptr);
int outter_size = 1;
int inner_size = 1;
for (int i = 0; i < softmax_param_->axis_; i++) {
outter_size *= input_shape_[i];
}
for (int i = softmax_param_->axis_; i < n_dim_; i++) {
inner_size *= input_shape_[i];
}
int stride = UP_DIV(outter_size, softmax_param_->op_parameter_.thread_num_);
if (INT_MUL_OVERFLOW(task_id, stride)) {
MS_LOG(ERROR) << "int mul overflow.";
return RET_ERROR;
}
int count = MSMIN(stride, outter_size - stride * task_id);
int stride_size = stride * task_id * inner_size;
auto error_code = SoftmaxInt8(input_ptr + stride_size, output_ptr + stride_size, count, exp_data_ + stride_size,
sum_data_, input_shape_, n_dim_, softmax_param_->axis_, quant_param_);
if (error_code != RET_OK) {
MS_LOG(ERROR) << "DoSoftmax error task_id[" << task_id << "] error_code[" << error_code << "]";
return RET_ERROR;
}
return RET_OK;
}
int SoftmaxRun(void *cdata, int task_id, float, float) {
CHECK_NULL_RETURN(cdata);
auto softmax_kernel = reinterpret_cast<SoftmaxInt8CPUKernel *>(cdata);
auto error_code = softmax_kernel->DoSoftmax(task_id);
if (error_code != RET_OK) {
MS_LOG(ERROR) << "SoftmaxRun error task_id[" << task_id << "] error_code[" << error_code << "]";
return RET_ERROR;
}
return RET_OK;
}
int SoftmaxInt8CPUKernel::Run() {
CHECK_LESS_RETURN(MAX_MALLOC_SIZE, element_size_ * sizeof(int));
exp_data_ = reinterpret_cast<int *>(ms_context_->allocator->Malloc(element_size_ * sizeof(int)));
int inner_size = 1;
for (int i = softmax_param_->axis_ + 1; i < n_dim_; i++) {
if (INT_MUL_OVERFLOW(inner_size, input_shape_[i])) {
MS_LOG(ERROR) << "int mul overflow.";
return RET_ERROR;
}
inner_size *= input_shape_[i];
}
sum_data_ = reinterpret_cast<int *>(ms_context_->allocator->Malloc(inner_size * sizeof(int)));
if (exp_data_ == nullptr || sum_data_ == nullptr) {
MS_LOG(ERROR) << "Memory allocation failed";
ms_context_->allocator->Free(exp_data_);
ms_context_->allocator->Free(sum_data_);
return RET_ERROR;
}
auto ret = ParallelLaunch(this->ms_context_, SoftmaxRun, this, softmax_param_->op_parameter_.thread_num_);
ms_context_->allocator->Free(exp_data_);
ms_context_->allocator->Free(sum_data_);
if (ret != RET_OK) {
MS_LOG(ERROR) << "Softmax function error error_code[" << ret << "]";
}
return ret;
}
REG_KERNEL(kCPU, kNumberTypeInt8, PrimitiveType_Softmax, LiteKernelCreator<SoftmaxInt8CPUKernel>)
} // namespace mindspore::kernel
|
/**
* Translate this bounding box in a specific direction
*
* @param direction
* The direction to move this BB
* @return A new BB representing this BB translated by direction
*/
public BoundingBox translate(BlockVector direction)
{
return this;
} |
In an interview with Rolling Stone, Ryan Adams looked back on the recording sessions for Heartbreaker, his debut solo album, saying, “I felt at the time that I needed to say goodbye to my career.” Having retreated from New York to Nashville, and trying to salvage something from the dissolution of his band, alt.country heroes Whiskeytown, Adams recalled, “I was fully humbled and prepared to sort of go, ‘OK, I had my shot and it was over.’”
Which is what makes Heartbreaker, released on 5 September 2000, all the more astonishing. Not only did its perfectly realised 14 songs establish Adams as a fully formed songwriter at the very beginning of what would become an enviable, ever-shifting career, but, as the 20 bonus tracks included on the deluxe edition reissue (released in 4LP+DVD and 2CD+DVD editions) reveal, Adams was, knowingly or otherwise, sowing the seeds for everything he would explore through the 14 studio albums – and counting – that he would subsequently release.
Stylistically, that’s an astonishingly wide range – after all, Adams has recorded everything from barroom country to bratty punk, and even a “sci-fi metal concept album”. Regardless, the demos and outtakes on show point to all these things – and more.
In his sleevenotes to the reissue, producer Ethan Johns alludes to Adams’ unstoppable work rate, noting that “two hours with a typewriter and a guitar produced a list of 26 songs” for the album. The outtakes disc Johns compiled is “designed to give the listener the experience of what it may have been like to be sitting in the [recording] room as we rolled through the days and nights”.
Johns likens his role as producer to trying “to find the beginning, the middle and the end” of the record. As the below selection of Heartbreaker outtakes show, they captured a number of songs that point to the many new beginnings in Adams’ career:
‘Hairdresser On Fire Jam’
“Eth’s got a mouthful of cookies!” Adams teases during ‘(Argument With Dave Rawlings Concerning Morrissey)’, the spoken-word intro to Heartbreaker. Yet he’s embroiled in a very serious discussion concerning an unnamed Moz song: did it appear on both Bona Drag and Viva Hate? Adams bets five dollars it did.
Fans have long wondered what song caused such hot contest. Solving the mystery, the Heartbreaker bonus disc opens with ‘Hairdresser On Fire Jam’, a loosey goosey take on the UK B-side to Morrissey’s ‘Suedehead’ single, but a song that was included as a bonus track on the US edition of his solo debut, Viva Hate. With Adams pulling out his best Morrissey impression, it shows the goofy, light-hearted side he often reveals during gigs and also makes clear Adams’ life-long devotion to The Smiths. On later outings, such as Love Is Hell and his 2015 Pax-Am single ‘Blue Light’, Adams would up the reverb to explore unmistakably Smiths-y guitar textures.
‘Petal In A Rainstorm’
It’s unsurprising that an artist as prolific as Adams has, at various points in his career, reached into a bag of unreleased material to rescue old gems from obscurity. Fans who pre-ordered the deluxe edition of his 2011 album, Ashes And Fire, received a flexi disc containing ‘Petal In A Rainstorm’, a song that dates back to the Heartbreaker sessions. That intimate version was given the subtle, warm production that producer Glyn Johns brought to Ashes And Fire. However, the Heartbreaker demos harbour an early, anguished solo arrangement, captured before Adams toyed with giving it the full-band treatment. “Play for your life – let’s just beat the s__t out of this thing,” he encourages on a take that’s less raw than the demo, but charges like a runaway train.
‘When The Rope Gets Tight’
One of three albums that Adams released in 2005, Jacksonville City Nights closes with a gloriously ragged ‘Don’t Fail Me Now’, replete with Rolling Thunder violin and barroom piano. Yet this was another song that dated back to 2000. Initially masquerading under the name ‘When The Rope Gets Tight’, Adams attempted to capture it in two different arrangements during the Heartbreaker sessions: one particularly spirited uptempo take, and a more downbeat, doomy version, both with Gillian Welch contributing vocals.
‘War Horse’
Captured in both solo and lightly embellished arrangements, ‘War Horse’ fully suits the Heartbreaker vibe, but, in being one of the more straight-up country outtakes from the sessions, points towards Adams’ fully fledged embrace of country music on 2005’s Jacksonville City Nights.
‘Come Pick Me Up’
A timeless classic that remains one of Adams’ most definitive songs, ‘Come Pick Me Up’ is so firmly set in the minds of fans that it’s hard to imagine it ever existing in any other form. An early demo, however, is so heavily echoed as to bring to mind Adams’ later reverb-happy recordings. Captured in the studio a short time later, Adams and co run through an ostensibly faithful rendition – until around the 4.30 mark, when they break into a heavy metal outro (by way of snare and acoustic guitar) on which Adams fully unleashes his inner fanboy. A long-term metal aficionado, Adams would self-release Orion in 2010, a sci-fi metal album that further confirmed his love of bands such as Voivod and Metallica.
‘Punk Jam’
One of the few times Adams cranked up the electric guitars during the Heartbreaker sessions, this 50-second outburst prefigured the material he would record in 2002, under the short-lived The Finger banner with Jesse Malin, and, later, on his Replacements-indebted 1984 EP, on which he which stormed through 10 songs in just over as many minutes.
‘Bartering Lines’
Already one of the more offbeat moments on Heartbreaker, the gloomy finished version is, in demo form, an even stranger beast, thanks to the whirring keyboard effect that haunts the song. Adams’ series of low-key Pax-Am singles released across 2014-15 would pick up on this experimental thread, as he took the opportunity to make a variety of stylistic switches from each 7” to the next – an anything-goes leaning that’s helped make Adams one of the most exciting, and consistently surprising, artists of his generation
.
‘Locked Away’
The final track included on the outtakes disc, ‘Locked Away’ is a little gem. Once again embracing a Smiths-y guitar jangle, it’s also a reminder that, throughout the many stylistic shifts in Adams’ career, he remains, first and foremost, a stunning songwriter. Deceptively simple, ‘Locked Away’ boasts an earworm melody that, in an alternative universe, would have confidently helped the song become a pop hit.
The Heartbreaker: Deluxe Edition box set can be bought here. |
// --------------------------------------------------------------------------
//
// CMenuItem::accLocation()
//
// Sometimes we are asked for the location of a peer object. This is
// kinda screwy. This happens when we are asked to navigate next or prev,
// and then let our parent navigate for us. The caller then starts thinking
// we know about our peers.
// Since this is the only case where something like this happens, we'll
// have to do some sort of hack.
// Problem is, when they ask for a child 0 (self) we are OK.
// But when we are asked for child 1, is it the popup or peer 1?
// I am going to assume that it is always the peer.
//
// --------------------------------------------------------------------------
STDMETHODIMP CMenuItem::accLocation(long* pxLeft, long* pyTop,
long* pcxWidth, long* pcyHeight, VARIANT varChild)
{
ValidateChild (&varChild);
if (varChild.lVal == CHILDID_SELF)
varChild.lVal = m_ItemID;
return(m_paccParent->accLocation(pxLeft, pyTop, pcxWidth, pcyHeight, varChild));
} |
#pragma once
class RandomStringMaker;
class BinaryFileBytesMaker
{
friend class BinaryFileBytesMakerTests;
private:
// Constant Components
unique_ptr<const RandomStringMaker> _randomStringMaker;
public:
BinaryFileBytesMaker();
virtual ~BinaryFileBytesMaker();
virtual string MakeBytesString(size_t numberOfBytesPerFile, bool randomBytes) const;
};
|
/**
* Google Cloud Storage Service Account App Engine sample.
*
* @author Marc Cohen
*/
public class StorageSample extends HttpServlet {
/** HTTP status code for a resource that wasn't found. */
private static final int HTTP_NOT_FOUND = 404;
/** HTTP status code for a resource that was found. */
private static final int HTTP_OK = 200;
/** The base endpoint for Google Cloud Storage api calls. */
private static final String GCS_URI = "http://commondatastorage.googleapis.com";
/** Global configuration of Google Cloud Storage OAuth 2.0 scope. */
private static final String STORAGE_SCOPE =
"https://www.googleapis.com/auth/devstorage.read_write";
/** Global instance of the HTTP transport. */
private static final HttpTransport HTTP_TRANSPORT = new NetHttpTransport();
/** Global instance of HTML reference to XSL style sheet. */
private static final String XSL =
"\n<?xml-stylesheet href=\"/xsl/listing.xsl\" type=\"text/xsl\"?>\n";
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
throws IOException {
try {
AppIdentityCredential credential = new AppIdentityCredential(Arrays.asList(STORAGE_SCOPE));
// Set up and execute Google Cloud Storage request.
String bucketName = req.getRequestURI();
if (bucketName.equals("/")) {
resp.sendError(
HTTP_NOT_FOUND, "No bucket specified - append /bucket-name to the URL and retry.");
return;
}
// Remove any trailing slashes, if found.
// [START snippet]
String cleanBucketName = bucketName.replaceAll("/$", "");
String uri = GCS_URI + cleanBucketName;
HttpRequestFactory requestFactory = HTTP_TRANSPORT.createRequestFactory(credential);
GenericUrl url = new GenericUrl(uri);
HttpRequest request = requestFactory.buildGetRequest(url);
HttpResponse response = request.execute();
String content = response.parseAsString();
// [END snippet]
// Display the output XML.
resp.setContentType("text/xml");
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(resp.getOutputStream()));
String formattedContent = content.replaceAll("(<ListBucketResult)", XSL + "$1");
writer.append(formattedContent);
writer.flush();
resp.setStatus(HTTP_OK);
} catch (Throwable e) {
resp.sendError(HTTP_NOT_FOUND, e.getMessage());
}
}
} |
<filename>src/window.c
void node_insert_at_head(node *n) {
if (head) {
head->prev = n;
n->next = head;
}
head = n;
}
void node_insert_at_tail(node *n) {
if (head) {
node *tmp = head;
while (tmp->next) tmp = tmp->next;
tmp->next = n;
n->prev = tmp;
} else {
head = n;
}
}
void node_remove(node *n) {
if (n == head) head = n->next;
if (n->next) n->next->prev = n->prev;
if (n->prev) n->prev->next = n->next;
}
void node_swap(node *a, node *b) {
xcb_window_t tmp;
tmp = a->window;
a->window = b->window;
b->window = tmp;
}
void create_window(xcb_window_t window) {
node *w = (node*)calloc(1, sizeof(node));
w->window = window;
w->next = NULL;
w->prev = NULL;
//node_insert_at_tail(w);
current = w;
}
void destroy_window(xcb_window_t window) {
node *w;
int8_t i;
for (i = TAGS; i; --i) {
w = tagset[i - 1]->head;
while (w) {
if (w->window == window)
goto rest;
w = w->next;
}
}
if (!w) return;
rest:
node_remove(w);
if (w == current) {
if (w->next)
current = w->next;
else
current = w->prev;
}
if (w == tagset[i - 1]->head)
tagset[i - 1]->head = NULL;
if (w == tagset[i -1]->current)
tagset[i - 1]->current = NULL;
free(w);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.