code
stringlengths 5
1.04M
| repo_name
stringlengths 7
108
| path
stringlengths 6
299
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1.04M
|
---|---|---|---|---|---|
/*****************************************************************************
*
* HOPERUN PROPRIETARY INFORMATION
*
* The information contained herein is proprietary to HopeRun
* and shall not be reproduced or disclosed in whole or in part
* or used for any design or manufacture
* without direct written authorization from HopeRun.
*
* Copyright (c) 2013 by HopeRun. All rights reserved.
*
*****************************************************************************/
package com.hoperun.feiying.framework.task.callback;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.log4j.Logger;
import com.hoperun.feiying.framework.enums.ECommandType;
import com.hoperun.feiying.framework.task.JoyplusScheduleService;
import com.hoperun.feiying.framework.task.MailSenderService;
import com.hoperun.feiying.framework.task.ScheduleStatusManager;
import com.hoperun.feiying.framework.task.TimerService;
import com.hoperun.feiying.joyplus.service.entity.ResourceType;
import com.hoperun.feiying.joyplus.service.service.ProgramService;
import com.hoperun.feiying.joyplus.service.service.ResourceTypeService;
import com.hoperun.feiying.joyplus.service.service.SettingService;
import com.hoperun.framework.core.spring.ApplicationContextHolder;
import com.hoperun.framework.task.ERemoteScheduleResult;
import com.hoperun.framework.task.IRemoteSchedule;
import com.hoperun.framework.task.IRemoteScheduleCompleteCallback;
import com.hoperun.framework.task.IRemoteScheduleTimeoutCallback;
import com.hoperun.framework.task.IRemoteTaskTimeoutCallback;
import com.hoperun.framework.task.RemoteScheduleManager;
import com.hoperun.framework.task.RemoteTaskDefine;
import com.hoperun.framework.task.utils.LogUtils;
import com.hoperun.framework.task.utils.ScheduleVOLogFormatter;
/**
* ClassName:RemoteTimerScheduleCompleteCallback
*
* @description
* @author zhu_yingjie
* @Date2014-2-17
*
*/
public class RemoteTimerScheduleCompleteCallback implements IRemoteScheduleCompleteCallback {
private static final Logger log = Logger.getLogger(RemoteTimerScheduleCompleteCallback.class);
private JoyplusScheduleService joyplusScheduleService = (JoyplusScheduleService) ApplicationContextHolder
.getApplicationContext().getBean("joyplusScheduleService");
private SettingService settingService = (SettingService) ApplicationContextHolder.getApplicationContext().getBean(
"settingServiceImpl");
private ProgramService programService = (ProgramService) ApplicationContextHolder.getApplicationContext().getBean(
"programServiceImpl");
private ResourceTypeService resourceTypeService = (ResourceTypeService) ApplicationContextHolder
.getApplicationContext().getBean("resourceTypeServiceImpl");
private MailSenderService mailSenderService = (MailSenderService) ApplicationContextHolder.getApplicationContext()
.getBean("mailSenderService");
private List<ResourceType> subTypeList;
public RemoteTimerScheduleCompleteCallback() {
subTypeList = resourceTypeService.findResourceByTypeLevel(2);
}
@Override
public void callback(IRemoteSchedule schedule, ERemoteScheduleResult scheduleResult) {
try {
log.debug("=======IN CALLBACK:");
Object category;
ECommandType commandType = ECommandType.valueOf(schedule.getParams()
.get(RemoteTaskDefine.SCHEDULE_COMMAND_TYPE_KEY).toString());
Map<String, String> recordMap = new HashMap<String, String>();
recordMap.put(schedule.getIdentity().getIdentity(), ScheduleVOLogFormatter.getFinishDetailInfo(schedule));
ScheduleStatusManager.getInstance().updateInstance(recordMap);
switch (commandType) {
case Movie:
// doViewDetailSchedule(ECommandType.ProgramView,
// ECommandType.Movie);
doListSchedule(ECommandType.Teleplay);
// doHotKeySchedule(ECommandType.ProgramHotKeyword);
break;
case Teleplay:
doListSchedule(ECommandType.Variety);
break;
case Variety:
doListSchedule(ECommandType.Cartoon);
break;
case Cartoon:
// doSubTypeSchedule();
log.debug("===>In find programView of Movie:");
doViewDetailSchedule(ECommandType.ProgramView, ECommandType.Movie);
break;
case ProgramView:
category = schedule.getParams().get(RemoteTaskDefine.PARAMS_COMMAND_SUB_CATEGORY);
if (category.equals(ECommandType.Movie.toString())) {
log.debug("===>In find programView of Teleplay:");
doViewDetailSchedule(ECommandType.ProgramView, ECommandType.Teleplay);
} else if (category.equals(ECommandType.Teleplay.toString())) {
log.debug("===>In find programView of Variety:");
doViewDetailSchedule(ECommandType.ProgramView, ECommandType.Variety);
} else if (category.equals(ECommandType.Variety.toString())) {
log.debug("===>In find programView of Cartoon:");
doViewDetailSchedule(ECommandType.ProgramView, ECommandType.Cartoon);
} else {
log.debug("===>In find ProgramRelatedVideos of Movie:");
doViewDetailSchedule(ECommandType.ProgramRelatedVideos, ECommandType.Movie);
}
break;
case ProgramRelatedVideos:
// doDetailSchedule(ECommandType.ProgramReviews);
category = schedule.getParams().get(RemoteTaskDefine.PARAMS_COMMAND_SUB_CATEGORY);
if (category.equals(ECommandType.Movie.toString())) {
log.debug("===>In find ProgramRelatedVideos of Teleplay:");
doViewDetailSchedule(ECommandType.ProgramRelatedVideos, ECommandType.Teleplay);
} else if (category.equals(ECommandType.Teleplay.toString())) {
log.debug("===>In find ProgramRelatedVideos of Variety:");
doViewDetailSchedule(ECommandType.ProgramRelatedVideos, ECommandType.Variety);
} else if (category.equals(ECommandType.Variety.toString())) {
log.debug("===>In find ProgramRelatedVideos of Cartoon:");
doViewDetailSchedule(ECommandType.ProgramRelatedVideos, ECommandType.Cartoon);
} else {
log.debug("===>In find ProgramReviews of Movie:");
doViewDetailSchedule(ECommandType.ProgramReviews, ECommandType.Movie);
}
break;
case ProgramReviews:
// doSubTypeSchedule();
category = schedule.getParams().get(RemoteTaskDefine.PARAMS_COMMAND_SUB_CATEGORY);
if (category.equals(ECommandType.Movie.toString())) {
log.debug("===>In find ProgramReviews of Teleplay:");
doViewDetailSchedule(ECommandType.ProgramReviews, ECommandType.Teleplay);
} else if (category.equals(ECommandType.Teleplay.toString())) {
log.debug("===>In find ProgramReviews of Variety:");
doViewDetailSchedule(ECommandType.ProgramReviews, ECommandType.Variety);
} else if (category.equals(ECommandType.Variety.toString())) {
log.debug("===>In find ProgramReviews of Cartoon:");
doViewDetailSchedule(ECommandType.ProgramReviews, ECommandType.Cartoon);
} else {
doSubTypeSchedule(0);
}
break;
case SubType:
Integer subtypeIndex = (Integer) (schedule.getParams().get(RemoteTaskDefine.PARAMS_SUBTYPE_INDEX));
if (subtypeIndex < subTypeList.size() - 1) {
doSubTypeSchedule(subtypeIndex + 1);
} else {
doHotKeySchedule(ECommandType.ProgramHotKeyword);
}
break;
case ProgramHotKeyword:
log.debug("=======IN SENDMAIL:");
StringBuffer mailContent = new StringBuffer();
for (Map<String, String> record : ScheduleStatusManager.getInstance().getRecordList()) {
Iterator<Entry<String, String>> iterator = record.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, String> entry = (Entry<String, String>) iterator.next();
String key = entry.getKey();
String value = entry.getValue();
mailContent.append("\n\r");
mailContent.append("-------------------------------------------------------------------");
mailContent.append("Schedule identity: \n\r");
mailContent.append(key);
mailContent.append("\n\r");
mailContent.append("Summary: \n\r");
mailContent.append(value);
mailContent.append("\n\r");
}
}
mailSenderService
.sendWithTemplate(String.format("The summary message is %s ", mailContent.toString()));
LogUtils.info("The Schedule is finished, all task is done, please check email for detail info.");
break;
default:
break;
}
} catch (Exception e) {
log.error(String.format("Exception in command controller when execute request, message is %s",
e.getMessage()), e);
mailSenderService.sendWithTemplate(String.format(
"Exception in command controller when execute request, message is %s", TimerService.getTrace(e)));
}
}
public void doListSchedule(ECommandType type) {
IRemoteScheduleCompleteCallback scheduleCompleteCallback = new RemoteScheduleCompleteCallback();
IRemoteScheduleTimeoutCallback scheduleTimeoutCallback = new RemoteScheduleTimeoutCallback();
IRemoteTaskTimeoutCallback taskTimeoutCallback = new RemoteTaskTimeoutCallback();
HashMap<String, Object> apiParams = new HashMap<String, Object>();
IRemoteSchedule schedule = null;
schedule = RemoteScheduleManager.getInstance().createListSchedule(type, 1, getMaxValueByECommandType(type),
apiParams, scheduleCompleteCallback, scheduleTimeoutCallback, taskTimeoutCallback);
schedule.addCompleteCallback(new RemoteTimerScheduleCompleteCallback());
joyplusScheduleService.startSchedule(schedule);
}
public void doDetailSchedule(ECommandType type) {
IRemoteScheduleCompleteCallback scheduleCompleteCallback = new RemoteScheduleCompleteCallback();
IRemoteScheduleTimeoutCallback scheduleTimeoutCallback = new RemoteScheduleTimeoutCallback();
IRemoteTaskTimeoutCallback taskTimeoutCallback = new RemoteTaskTimeoutCallback();
ECommandType[] options = { ECommandType.Movie, ECommandType.Teleplay, ECommandType.Variety,
ECommandType.Cartoon };
HashMap<String, Object> apiParams = new HashMap<String, Object>();
IRemoteSchedule schedule = null;
for (ECommandType commandType : options) {
List<Long> prodIds = programService.findProIds(1, getMaxValueByECommandType(commandType)
- TimerService.ADD_VALUE, getDetailTypeByECommandType(commandType));
List<Object> prodIdList = new ArrayList<Object>();
if (prodIds != null) {
for (Long id : prodIds) {
prodIdList.add(id.toString());
}
}
schedule = RemoteScheduleManager.getInstance().createDetailSchedule(type, prodIdList, apiParams,
scheduleCompleteCallback, scheduleTimeoutCallback, taskTimeoutCallback);
schedule.addCompleteCallback(new RemoteTimerScheduleCompleteCallback());
joyplusScheduleService.startSchedule(schedule);
}
}
public void doViewDetailSchedule(ECommandType type, ECommandType category) {
IRemoteScheduleCompleteCallback scheduleCompleteCallback = new RemoteScheduleCompleteCallback();
IRemoteScheduleTimeoutCallback scheduleTimeoutCallback = new RemoteScheduleTimeoutCallback();
IRemoteTaskTimeoutCallback taskTimeoutCallback = new RemoteTaskTimeoutCallback();
// ECommandType[] options = { ECommandType.Movie, ECommandType.Teleplay,
// ECommandType.Variety,
// ECommandType.Cartoon };
HashMap<String, Object> apiParams = new HashMap<String, Object>();
apiParams.put(RemoteTaskDefine.PARAMS_COMMAND_SUB_CATEGORY, category.toString());
IRemoteSchedule schedule = null;
List<Long> prodIds = programService.findProIds(1, getMaxValueByECommandType(category) - TimerService.ADD_VALUE,
getDetailTypeByECommandType(category));
List<Object> prodIdList = new ArrayList<Object>();
if (prodIds != null) {
for (Long id : prodIds) {
prodIdList.add(id.toString());
}
}
schedule = RemoteScheduleManager.getInstance().createDetailSchedule(type, prodIdList, apiParams,
scheduleCompleteCallback, scheduleTimeoutCallback, taskTimeoutCallback);
schedule.addCompleteCallback(new RemoteTimerScheduleCompleteCallback());
joyplusScheduleService.startSchedule(schedule);
}
public void doSubTypeSchedule(int index) {
IRemoteScheduleCompleteCallback scheduleCompleteCallback = new RemoteScheduleCompleteCallback();
IRemoteScheduleTimeoutCallback scheduleTimeoutCallback = new RemoteScheduleTimeoutCallback();
IRemoteTaskTimeoutCallback taskTimeoutCallback = new RemoteTaskTimeoutCallback();
HashMap<String, Object> apiParams = new HashMap<String, Object>();
apiParams.put(RemoteTaskDefine.PARAMS_SUBTYPE_INDEX, index);
IRemoteSchedule schedule = null;
int maxValue = 0;
ResourceType resourceType = subTypeList.get(index);
apiParams.put(RemoteTaskDefine.PARAMS_SUBTYPE_SUBTYPE_NAME_KEY, resourceType.getTypeName());
apiParams.put(RemoteTaskDefine.PARAMS_SUBTYPE_TYPE_NAME_KEY, resourceType.getType().toString());
maxValue = Integer.parseInt(settingService.findByKey(
SettingService.SETTING_MAX_INDEX_PRIX + resourceType.getType()).getSettingValue())
+ TimerService.SUB_TYPE_ADD_VALUE;
schedule = RemoteScheduleManager.getInstance().createListSchedule(ECommandType.SubType, 1, maxValue, apiParams,
scheduleCompleteCallback, scheduleTimeoutCallback, taskTimeoutCallback);
schedule.addCompleteCallback(new RemoteTimerScheduleCompleteCallback());
joyplusScheduleService.startSchedule(schedule);
}
public void doHotKeySchedule(ECommandType type) {
IRemoteScheduleCompleteCallback scheduleCompleteCallback = new RemoteScheduleCompleteCallback();
IRemoteScheduleTimeoutCallback scheduleTimeoutCallback = new RemoteScheduleTimeoutCallback();
IRemoteTaskTimeoutCallback taskTimeoutCallback = new RemoteTaskTimeoutCallback();
HashMap<String, Object> apiParams = new HashMap<String, Object>();
IRemoteSchedule schedule = null;
List<Object> list = new ArrayList<Object>();
list.add("00000000");
schedule = RemoteScheduleManager.getInstance().createDetailSchedule(type, list, apiParams,
scheduleCompleteCallback, scheduleTimeoutCallback, taskTimeoutCallback);
schedule.addCompleteCallback(new RemoteTimerScheduleCompleteCallback());
joyplusScheduleService.startSchedule(schedule);
}
public int getMaxValueByECommandType(ECommandType commandType) {
Integer type = null;
int maxValue = 0;
switch (commandType) {
case Movie:
type = 1;
break;
case Teleplay:
type = 2;
break;
case Variety:
type = 3;
break;
case Cartoon:
type = 131;
break;
default:
break;
}
maxValue = Integer.parseInt(settingService.findByKey(SettingService.SETTING_MAX_INDEX_PRIX + type)
.getSettingValue()) + TimerService.ADD_VALUE;
return maxValue;
}
public String getDetailTypeByECommandType(ECommandType commandType) {
String detailType = null;
switch (commandType) {
case Movie:
detailType = "Movie";
break;
case Teleplay:
detailType = "Television";
break;
case Variety:
detailType = "Show";
break;
case Cartoon:
detailType = "Cartoon";
break;
default:
break;
}
return detailType;
}
}
| yangjun2/android | Feiying/joyplus-service/trunk/src/main/java/com/hoperun/feiying/framework/task/callback/RemoteTimerScheduleCompleteCallback.java | Java | unlicense | 14,906 |
package abstractFactory;
public class EOS implements CarroEsportivo{
@Override
public String getNome() {
return "EOS";
}
@Override
public int getTempoAceleracao() {
return 12;
}
}
| maiconmichelon/aula-design-patterns | DesignPatterns/src/abstractFactory/EOS.java | Java | unlicense | 209 |
/*
* Pactera Technology Co. Ltd. Copyright 2013, All rights reserved.
* 文件名 :SoapHeader.java
* 创建人 :zhangxurong
* 创建时间:2013-6-17
*/
package com.changhong.sdk.httpbean;
import java.util.LinkedHashMap;
import java.util.Map;
import com.changhong.sdk.baseapi.StringUtils;
import com.changhong.sdk.http.SeqCache;
import com.changhong.sdk.http.TransferConstants;
/**
* [简要描述]:通信消息头
*
* @author zhangxurong
* @version 1.0, 2013-6-17
*/
public class Header
{
private Map<String, String> params = new LinkedHashMap<String, String>();
/**
* 返回params属性
* @return params属性
*/
public Map<String, String> getParams()
{
return params;
}
/**
* 设置params属性
* @param params params属性
*/
public void setParams(Map<String, String> params)
{
this.params.putAll(params);
}
public String get(String name)
{
if (StringUtils.isNotEmpty(name))
{
return this.params.get(name);
}
return null;
}
public void put(String name, String value)
{
if (StringUtils.isNotEmpty(name))
{
this.params.put(name, value);
}
}
/**
* 返回to属性
* @return to属性
*/
public String getTo()
{
return get(TransferConstants.COMM_HEADER_NAME_TO);
}
/**
* 设置to属性
* @param to to属性
*/
public void setTo(String to)
{
put(TransferConstants.COMM_HEADER_NAME_TO, to);
}
/**
* 返回toType属性
* @return toType属性
*/
public String getToType()
{
return get(TransferConstants.COMM_HEADER_NAME_TOTYPE);
}
/**
* 设置toType属性
* @param toType toType属性
*/
public void setToType(String toType)
{
put(TransferConstants.COMM_HEADER_NAME_TOTYPE, toType);
}
/**
* 返回from属性
* @return from属性
*/
public String getFrom()
{
return get(TransferConstants.COMM_HEADER_NAME_FROM);
}
/**
* 设置from属性
* @param from from属性
*/
public void setFrom(String from)
{
put(TransferConstants.COMM_HEADER_NAME_FROM, from);
}
/**
* 返回fromType属性
* @return fromType属性
*/
public String getFromType()
{
return get(TransferConstants.COMM_HEADER_NAME_FROMTYPE);
}
/**
* 设置fromType属性
* @param fromType fromType属性
*/
public void setFromType(String fromType)
{
put(TransferConstants.COMM_HEADER_NAME_FROMTYPE, fromType);
}
/**
* 返回messageType属性
* @return messageType属性
*/
public String getMessageType()
{
return get(TransferConstants.COMM_HEADER_NAME_MESSAGETYPE);
}
/**
* 设置messageType属性
* @param messageType messageType属性
*/
public void setMessageType(String messageType)
{
put(TransferConstants.COMM_HEADER_NAME_MESSAGETYPE, messageType);
}
/**
* 返回seq属性
* @return seq属性
*/
public String getSeq()
{
return get(TransferConstants.COMM_HEADER_NAME_SEQ);
}
/**
* 设置seq属性
* @param seq seq属性
*/
public void setSeq(String seq)
{
put(TransferConstants.COMM_HEADER_NAME_SEQ, seq);
}
/**
* 设置seq属性
* @param seq seq属性
*/
public void newSeq()
{
put(TransferConstants.COMM_HEADER_NAME_SEQ, SeqCache.newSeq());
}
/**
* 返回statusCode属性
* @return statusCode属性
*/
public String getStatusCode()
{
return get(TransferConstants.COMM_HEADER_NAME_STATUSCODE);
}
/**
* 设置statusCode属性
* @param seq statusCode属性
*/
public void setStatusCode(String statusCode)
{
put(TransferConstants.COMM_HEADER_NAME_STATUSCODE, statusCode);
}
/**
* [简要描述]:
*
* @author zhangxurong
* @return
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
if (null == params)
{
return null;
}
return params.toString();
}
}
| yangjun2/android | changhong/changhong_sdk/src/com/changhong/sdk/httpbean/Header.java | Java | unlicense | 4,403 |
package com.scottmobleyschreibman.ribbit;
import java.util.ArrayList;
import java.util.List;
import android.app.AlertDialog;
import android.app.ListActivity;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.NavUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.Toast;
import com.parse.FindCallback;
import com.parse.ParseException;
import com.parse.ParseFile;
import com.parse.ParseObject;
import com.parse.ParseQuery;
import com.parse.ParseRelation;
import com.parse.ParseUser;
import com.parse.SaveCallback;
public class RecipientsActivity extends ListActivity {
public static final String TAG = RecipientsActivity.class.getSimpleName();
protected ParseRelation<ParseUser> mFriendsRelation;
protected ParseUser mCurrentUser;
protected List<ParseUser> mFriends;
protected MenuItem mSendMenuItem;
protected Uri mMediaUri;
protected String mFileType;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
setContentView(R.layout.activity_recipients);
// Show the Up button in the action bar.
setupActionBar();
getListView().setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
mMediaUri = getIntent().getData();
mFileType = getIntent().getExtras().getString(ParseConstants.KEY_FILE_TYPE);
}
@Override
public void onResume() {
super.onResume();
mCurrentUser = ParseUser.getCurrentUser();
mFriendsRelation = mCurrentUser.getRelation(ParseConstants.KEY_FRIENDS_RELATION);
setProgressBarIndeterminateVisibility(true);
ParseQuery<ParseUser> query = mFriendsRelation.getQuery();
query.addAscendingOrder(ParseConstants.KEY_USERNAME);
query.findInBackground(new FindCallback<ParseUser>() {
@Override
public void done(List<ParseUser> friends, ParseException e) {
setProgressBarIndeterminateVisibility(false);
if (e == null) {
mFriends = friends;
String[] usernames = new String[mFriends.size()];
int i = 0;
for(ParseUser user : mFriends) {
usernames[i] = user.getUsername();
i++;
}
ArrayAdapter<String> adapter = new ArrayAdapter<String>(
getListView().getContext(),
android.R.layout.simple_list_item_checked,
usernames);
setListAdapter(adapter);
}
else {
Log.e(TAG, e.getMessage());
AlertDialog.Builder builder = new AlertDialog.Builder(RecipientsActivity.this);
builder.setMessage(e.getMessage())
.setTitle(R.string.error_title)
.setPositiveButton(android.R.string.ok, null);
AlertDialog dialog = builder.create();
dialog.show();
}
}
});
}
/**
* Set up the {@link android.app.ActionBar}.
*/
private void setupActionBar() {
getActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_recipients, menu);
mSendMenuItem = menu.getItem(0);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
// This ID represents the Home or Up button. In the case of this
// activity, the Up button is shown. Use NavUtils to allow users
// to navigate up one level in the application structure. For
// more details, see the Navigation pattern on Android Design:
//
// http://developer.android.com/design/patterns/navigation.html#up-vs-back
//
NavUtils.navigateUpFromSameTask(this);
return true;
case R.id.action_send:
ParseObject message = createMessage();
if (message == null) {
// error
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage(R.string.error_selecting_file)
.setTitle(R.string.error_selecting_file_title)
.setPositiveButton(android.R.string.ok, null);
AlertDialog dialog = builder.create();
dialog.show();
}
else {
send(message);
finish();
}
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onListItemClick(ListView l, View v, int position, long id) {
super.onListItemClick(l, v, position, id);
if (l.getCheckedItemCount() > 0) {
mSendMenuItem.setVisible(true);
}
else {
mSendMenuItem.setVisible(false);
}
}
protected ParseObject createMessage() {
ParseObject message = new ParseObject(ParseConstants.CLASS_MESSAGES);
message.put(ParseConstants.KEY_SENDER_ID, ParseUser.getCurrentUser().getObjectId());
message.put(ParseConstants.KEY_SENDER_NAME, ParseUser.getCurrentUser().getUsername());
message.put(ParseConstants.KEY_RECIPIENT_IDS, getRecipientIds());
message.put(ParseConstants.KEY_FILE_TYPE, mFileType);
byte[] fileBytes = FileHelper.getByteArrayFromFile(this, mMediaUri);
if (fileBytes == null) {
return null;
}
else {
if (mFileType.equals(ParseConstants.TYPE_IMAGE)) {
fileBytes = FileHelper.reduceImageForUpload(fileBytes);
}
String fileName = FileHelper.getFileName(this, mMediaUri, mFileType);
ParseFile file = new ParseFile(fileName, fileBytes);
message.put(ParseConstants.KEY_FILE, file);
return message;
}
}
protected ArrayList<String> getRecipientIds() {
ArrayList<String> recipientIds = new ArrayList<String>();
for (int i = 0; i < getListView().getCount(); i++) {
if (getListView().isItemChecked(i)) {
recipientIds.add(mFriends.get(i).getObjectId());
}
}
return recipientIds;
}
protected void send(ParseObject message) {
message.saveInBackground(new SaveCallback() {
@Override
public void done(ParseException e) {
if (e == null) {
// success!
Toast.makeText(RecipientsActivity.this, R.string.success_message, Toast.LENGTH_LONG).show();
}
else {
AlertDialog.Builder builder = new AlertDialog.Builder(RecipientsActivity.this);
builder.setMessage(R.string.error_sending_message)
.setTitle(R.string.error_selecting_file_title)
.setPositiveButton(android.R.string.ok, null);
AlertDialog dialog = builder.create();
dialog.show();
}
}
});
}
}
| smobley62/Ribbit | app/src/main/java/com/scottmobleyschreibman/ribbit/RecipientsActivity.java | Java | unlicense | 7,826 |
package K_Empty_Slots;
import java.util.ArrayList;
import java.util.List;
public class Solution {
public int kEmptySlots(int[] flowers, int k) {
if (flowers == null || flowers.length == 0) return -1;
int n = flowers.length;
/**
* use a array days[] to store each position's flower's blooming day.
* then isSameParent a subarray days[lo...hi] of days[], which satisfies:
* for i in lo+1 to hi-1, days[left] < days[i] && days[right] < days[i]
*/
int[] days = new int[n];
for (int i = 0; i < n; i++) {
days[flowers[i] - 1] = i + 1;
}
int lo = 0, hi = k + 1, res = Integer.MAX_VALUE;
for (int i = 0; hi < n; i++) {
if (days[lo] < days[i] && days[hi] < days[i]) continue;
if (i == hi) {
//This means that every days[i] from lo+1 to hi-1 satisfy the condition.
res = Math.min(res, Math.max(days[lo], days[hi]));
}
lo = i;
hi = i + k + 1;
}
return res == Integer.MAX_VALUE ? -1 : res;
/**
* brute force solution
* time limit exceeded
*/
// boolean[] blooms = new boolean[n + 1];
// for (int i = 0; i < n; i++) {
// blooms[flowers[i]] = true;
// if (verify(blooms, k)) return i + 1;
// }
// return -1;
}
private boolean verify(boolean[] blooms, int k) {
for (int i = 0, last = -1; i < blooms.length; i++) {
if (!blooms[i]) continue;
if (last != -1 && i - last - 1 == k) return true;
last = i;
}
return false;
}
public static void main(String[] args) {
Solution s = new Solution();
System.out.println(s.kEmptySlots(new int[]{1,3,2}, 1));//1
System.out.println(s.kEmptySlots(new int[]{1,2,3}, 1));//-1
}
} | lizij/Leetcode | src/K_Empty_Slots/Solution.java | Java | unlicense | 1,904 |
/*
* Software Name : ATK
*
* Copyright (C) 2007 - 2012 France Télécom
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ------------------------------------------------------------------
* File Name : XMLParser.java
*
* Created : 19/02/2007
* Author(s) : Aurore PENAULT
*/
package com.orange.atk.atkUI.corecli.utils;
import java.io.File;
import java.util.List;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import com.orange.atk.atkUI.corecli.Alert;
/**
* Tools to parse xml files.
*
* @author Aurore PENAULT
* @since JDK5.0
*/
public class XMLParser {
private Document doc;
private Element root;
private File file;
public XMLParser(File f, String dtdURL, String dtdDirectory) {
this.file = f;
SAXReader reader = new SAXReader();
if (dtdURL != null && dtdDirectory != null) {
reader.setEntityResolver(new MatosResolver(dtdURL,dtdDirectory));
}
try {
doc = reader.read(f);
root = doc.getRootElement();
} catch (DocumentException e) {
Alert.raise(e, "Error with file '"+f.getAbsolutePath()+"': \n"
+ e.getMessage() );
}
}
/**
* get a given category of entries from the file (for anasoot mainly)
* @param kind the kind of elements analyzed
* @return an array of XML elements representing the rules
*/
public Element [] getKind(String kind) {
return getElements(root,kind);
}
/**
* Extracts the set of XML elements having a given name in a given XML
* element.
* @param e the element to explore
* @param name the name of the elements searched
* @return an array of elements
*/
public Element [] getElements(Element e, String name) {
List<?> list = e.elements(name);
int l = list.size();
Element r [] = new Element [l];
for (int i=0; i<l; i++) {
r[i] = (Element) list.get(i);
}
return r;
}
/**
* Extracts a given XML element having a given name son of a given
* XML element. There should be only one such element:
* @param e the element to explore
* @param name the name of the elements searched
* @return an array of elements
*/
public Element getElement(Element e, String name) {
List<?> list = e.elements(name);
if (list.size()==1) return (Element) list.get(0);
else return null;
}
public Document getDoc() {
return doc;
}
public void setDoc(Document doc) {
this.doc = doc;
}
public Element getRoot() {
return root;
}
public void setRoot(Element root) {
this.root = root;
}
public File getFile() {
return file;
}
}
| Orange-OpenSource/ATK | gui/src/main/java/com/orange/atk/atkUI/corecli/utils/XMLParser.java | Java | apache-2.0 | 3,063 |
package ecologylab.bigsemantics.compiler;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import ecologylab.bigsemantics.FileUtils;
import ecologylab.bigsemantics.collecting.MetaMetadataRepositoryLocator;
import ecologylab.bigsemantics.metadata.MetadataClassDescriptor;
import ecologylab.bigsemantics.metametadata.Build;
import ecologylab.bigsemantics.metametadata.FileTools;
import ecologylab.bigsemantics.metametadata.MetaMetadata;
import ecologylab.bigsemantics.metametadata.MetaMetadataRepository;
import ecologylab.bigsemantics.metametadata.RepositoryOrderingByGeneration;
import ecologylab.bigsemantics.namesandnums.SemanticsNames;
import ecologylab.generic.Debug;
import ecologylab.serialization.ClassDescriptor;
import ecologylab.serialization.SIMPLTranslationException;
import ecologylab.serialization.SimplTypesScope;
import ecologylab.serialization.SimplTypesScope.GRAPH_SWITCH;
import ecologylab.serialization.formatenums.Format;
import ecologylab.serialization.formatenums.StringFormat;
import ecologylab.translators.CodeTranslationException;
import ecologylab.translators.CodeTranslator;
/**
*
* @author quyin
*
*/
public class MetaMetadataCompiler extends Debug // ApplicationEnvironment
{
public static final String BUILTINS_CLASS_PACKAGE = ".builtins";
public static final String DECLARATION_CLASS_PACKAGE = ".declarations";
public static final String DECLARATION_CLASS_SUFFIX = "Declaration";
private static final String META_METADATA_COMPILER_TSCOPE_NAME = "meta-metadata-compiler-tscope";
private static final String META_METADATA_COMPILER_BUILTIN_DECLARATIONS_SCOPE = "meta-metadata-compiler-builtin-declarations-scope";
public void compile(CompilerConfig config) throws IOException, SIMPLTranslationException,
CodeTranslationException
{
debug("\n\n loading repository ...\n\n");
SimplTypesScope.enableGraphSerialization();
MetaMetadataRepository.initializeTypes();
MetaMetadataRepository repository = config.loadRepository();
SimplTypesScope tscope = repository.traverseAndGenerateTranslationScope(META_METADATA_COMPILER_TSCOPE_NAME);
CodeTranslator codeTranslator = config.getCompiler();
File generatedSemanticsLocation = config.getGeneratedSemanticsLocation();
File libraryDir = new File(generatedSemanticsLocation, "Library");
if (libraryDir.exists() && libraryDir.isDirectory())
{
FileUtils.deleteDir(libraryDir);
}
// generate declaration classes and scope
SimplTypesScope builtinDeclarationsScope = SimplTypesScope.get(META_METADATA_COMPILER_BUILTIN_DECLARATIONS_SCOPE, new Class[] {});
String builtinPackage = null;
for (ClassDescriptor mdCD : tscope.getClassDescriptors())
{
MetaMetadata definingMmd = ((MetadataClassDescriptor) mdCD).getDefiningMmd();
if (definingMmd.isBuiltIn())
{
ClassDescriptor declCD = (ClassDescriptor) mdCD.clone();
String packageName = mdCD.getDescribedClassPackageName();
String classSimpleName = mdCD.getDescribedClassSimpleName();
if (definingMmd.isRootMetaMetadata())
{
packageName += BUILTINS_CLASS_PACKAGE + DECLARATION_CLASS_PACKAGE;
classSimpleName += DECLARATION_CLASS_SUFFIX;
}
else
{
builtinPackage = packageName; // essentially, the old package name
packageName = packageName.replace(BUILTINS_CLASS_PACKAGE, BUILTINS_CLASS_PACKAGE + DECLARATION_CLASS_PACKAGE);
classSimpleName += DECLARATION_CLASS_SUFFIX;
}
declCD.setDescribedClassPackageName(packageName);
declCD.setDescribedClassSimpleName(classSimpleName);
builtinDeclarationsScope.addTranslation(declCD);
}
}
// compiler.translate(cd, config.getGeneratedBuiltinDeclarationsLocation(), config,
// newPackageName, newSimpleName, GenerateAbstractClass.TRUE);
CompilerConfig newConfig = (CompilerConfig) config.clone();
newConfig.setLibraryTScopeClassPackage("ecologylab.bigsemantics.metadata.builtins.declarations");
newConfig.setLibraryTScopeClassSimpleName("MetadataBuiltinDeclarationsTranslationScope");
// newConfig.setGenerateAbstractClass(true);
newConfig.setBuiltinDeclarationScopeName(SemanticsNames.REPOSITORY_BUILTIN_DECLARATIONS_TYPE_SCOPE);
newConfig.getClassesExcludedFromGeneratedTScopeClass()
.add("ecologylab.bigsemantics.metadata.builtins.InformationComposition");
ClassDescriptor infoCompCD = builtinDeclarationsScope.getClassDescriptorBySimpleName("InformationCompositionDeclaration");
codeTranslator.excludeClassFromTranslation(infoCompCD);
if (config.getGeneratedBuiltinDeclarationsLocation() != null)
codeTranslator.translate(config.getGeneratedBuiltinDeclarationsLocation(), builtinDeclarationsScope, newConfig);
// generate normal metadata classes
for (ClassDescriptor mdCD : tscope.getClassDescriptors())
{
MetaMetadata definingMmd = ((MetadataClassDescriptor) mdCD).getDefiningMmd();
if (definingMmd.isBuiltIn())
codeTranslator.excludeClassFromTranslation(mdCD);
}
debug("\n\n compiling to " + generatedSemanticsLocation + " ...\n\n");
codeTranslator.translate(generatedSemanticsLocation, tscope, config);
// generate repository build info:
generateRepositoryBuildInfo(config);
repository.build().copyFrom(this.build);
// generate repository file list:
generateRepositoryFileList(config);
// serialize post-inheritance repository files:
serializePostInheritanceRepository(config.getRepositoryLocation(), repository);
debug("\n\n compiler finished.");
}
static private String REPOSITORY_FILES_LST = "repositoryFiles.lst";
static private String REPOSITORY_BUILD_FILE = "buildInfo.xml";
private Build build;
public void generateRepositoryBuildInfo(CompilerConfig config)
{
File repositoryLocation = config.getRepositoryLocation();
assert repositoryLocation != null;
assert repositoryLocation.exists();
assert repositoryLocation.isDirectory();
debug("Repository location: " + repositoryLocation);
MetaMetadataRepository repoBuild = new MetaMetadataRepository();
build = repoBuild.build();
build.date = new Date();
try
{
build.host = InetAddress.getLocalHost().getHostName();
}
catch (UnknownHostException e1)
{
build.host = "<unable to get host name>";
e1.printStackTrace();
}
build.user = System.getProperty("user.name");
File repoBuildFile = new File(repositoryLocation, REPOSITORY_BUILD_FILE);
try
{
SimplTypesScope.serialize(repoBuild, repoBuildFile, Format.XML);
}
catch (SIMPLTranslationException e2)
{
error("Error saving build info!");
e2.printStackTrace();
}
}
public void generateRepositoryFileList(CompilerConfig config)
{
File repositoryLocation = config.getRepositoryLocation();
assert repositoryLocation != null;
assert repositoryLocation.exists();
assert repositoryLocation.isDirectory();
debug("Repository location: " + repositoryLocation);
List<String> items = getRepositoryFileItems(repositoryLocation, config.getRepositoryFormat());
File repoFilesLst = new File(repositoryLocation, REPOSITORY_FILES_LST);
PrintWriter pw;
try
{
pw = new PrintWriter(repoFilesLst);
for (String item : items)
{
debug(" Repository file: " + item);
pw.println(item);
}
pw.close();
}
catch (FileNotFoundException e)
{
error("Cannot write to " + repoFilesLst);
}
}
private List<String> getRepositoryFileItems(File repositoryLocation, Format repositoryFormat)
{
String repositoryPath = repositoryLocation.getAbsolutePath();
List<File> files = MetaMetadataRepositoryLocator.listRepositoryFiles(repositoryLocation,
repositoryFormat);
List<String> items = new ArrayList<String>();
for (File file : files)
{
String item = FileTools.getRelativePath(repositoryPath, file.getAbsolutePath());
item = item.replace('\\', '/'); // when specifying java resources use '/'
debug(" Repository files list item: " + item);
items.add(item);
}
return items;
}
static private String POST_INHERITANCE_REPOSITORY_DIR = "PostInheritanceRepository";
static private String POST_INHERITANCE_REPOSITORY_FILE_NAME = "post-inheritance-repository";
public static void serializePostInheritanceRepository(File repositoryLocation, MetaMetadataRepository repository)
{
assert repositoryLocation != null;
assert repositoryLocation.exists();
assert repositoryLocation.isDirectory();
Debug.debugT(MetaMetadataCompiler.class, "Repository location: " + repositoryLocation);
File postInheritanceRepositoryDir = new File(repositoryLocation.getParentFile(),
POST_INHERITANCE_REPOSITORY_DIR);
File xmlPostInheritanceRepositoryFile = null;
File jsonPostInheritanceRepositoryFile = null;
if (postInheritanceRepositoryDir.exists() && postInheritanceRepositoryDir.isDirectory())
{
xmlPostInheritanceRepositoryFile = new File(postInheritanceRepositoryDir,
POST_INHERITANCE_REPOSITORY_FILE_NAME + ".xml");
jsonPostInheritanceRepositoryFile = new File(postInheritanceRepositoryDir,
POST_INHERITANCE_REPOSITORY_FILE_NAME + ".json");
Map<StringFormat, String> strs = new HashMap<StringFormat, String>();
serializeRepositoryIntoFormats(repository, strs);
writeStringToFile(xmlPostInheritanceRepositoryFile, strs.get(StringFormat.XML));
writeStringToFile(jsonPostInheritanceRepositoryFile, strs.get(StringFormat.JSON));
}
}
static void serializeRepositoryIntoFormats(MetaMetadataRepository repository,
Map<StringFormat, String> strs)
{
SimplTypesScope.graphSwitch = GRAPH_SWITCH.ON;
repository.traverseAndInheritMetaMetadata();
strs.put(StringFormat.XML, trySerializeRepository(repository, StringFormat.XML));
strs.put(StringFormat.JSON, trySerializeRepository(repository, StringFormat.JSON));
}
private static String trySerializeRepository(MetaMetadataRepository repository, StringFormat format)
{
try
{
return SimplTypesScope.serialize(repository, format).toString();
}
catch (SIMPLTranslationException e)
{
Debug.error(MetaMetadataCompiler.class,
"Cannot serialize post-inheritance repository in format " + format);
e.printStackTrace();
}
return null;
}
private static void writeStringToFile(File file, String str)
{
BufferedWriter bw;
try
{
bw = new BufferedWriter(new FileWriter(file));
bw.write(str);
bw.close();
}
catch (IOException e)
{
Debug.error(MetaMetadataCompiler.class, "Cannot write to " + file);
e.printStackTrace();
}
}
private static void error(String msg)
{
Debug.error(MetaMetadataCompiler.class, msg);
}
/**
* @param args
*
* @throws IOException
* @throws SIMPLTranslationException
* @throws CodeTranslationException
*/
public static void main(String[] args) throws IOException, SIMPLTranslationException,
CodeTranslationException
{
if (args.length < 2 || args.length > 3)
{
error("args: <target-language> <generated-semantics-location> [<generated-builtin-declarations-location>]");
error(" - <target-language>: e.g. java or csharp (cs, c#).");
error(" - <generated-semantics-location>: the path to the location for generated semantics.");
error(" - <generated-builtin-declarations-location>: the path to the location for generated builtin declarations.");
System.exit(-1);
}
String lang = args[0].toLowerCase();
String semanticsLoc = args[1];
String builtinDeclarationsLoc = args.length == 3 ? args[2] : null;
File generatedSemanticsLocation = new File(semanticsLoc);
File generatedBuiltinSemanticsLocation = builtinDeclarationsLoc == null ? null : new File(builtinDeclarationsLoc);
CompilerConfig config = new CompilerConfig(lang, generatedSemanticsLocation, generatedBuiltinSemanticsLocation);
MetaMetadataCompiler compiler = new MetaMetadataCompiler();
compiler.compile(config);
}
}
| ecologylab/BigSemanticsJava | BigSemanticsSDK/src/ecologylab/bigsemantics/compiler/MetaMetadataCompiler.java | Java | apache-2.0 | 12,980 |
package example.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
@Entity
public class Customer132 {
@Id @GeneratedValue(strategy = GenerationType.AUTO) private long id;
private String firstName;
private String lastName;
protected Customer132() {}
public Customer132(String firstName, String lastName) {
this.firstName = firstName;
this.lastName = lastName;
}
@Override
public String toString() {
return String.format("Customer132[id=%d, firstName='%s', lastName='%s']", id, firstName, lastName);
}
}
| spring-projects/spring-data-examples | jpa/deferred/src/main/java/example/model/Customer132.java | Java | apache-2.0 | 624 |
/*
* Copyright (C) 2014 Lucas Rocha
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lsjwzh.widget.recyclerviewpagerdeomo;
import android.app.Activity;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import com.lsjwzh.widget.recyclerviewpager.RecyclerViewPager;
public class HorizontalLayoutFragment extends Fragment {
private View mViewRoot;
private RecyclerViewPager mRecyclerView;
private TextView mCountText;
private TextView mStateText;
private Toast mToast;
private TextView mPositionText;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
if (mViewRoot == null) {
mViewRoot = inflater.inflate(R.layout.layout_horizontal, container, false);
} else if (mViewRoot.getParent() != null) {
((ViewGroup) mViewRoot.getParent()).removeView(mViewRoot);
}
return mViewRoot;
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
final Activity activity = getActivity();
mToast = Toast.makeText(activity, "", Toast.LENGTH_SHORT);
mToast.setGravity(Gravity.CENTER, 0, 0);
mRecyclerView = (RecyclerViewPager) view.findViewById(R.id.list);
LinearLayoutManager layout = new LinearLayoutManager(getActivity(), LinearLayoutManager.VERTICAL, false);
mRecyclerView.setLayoutManager(layout);
mRecyclerView.setAdapter(new LayoutAdapter(activity, mRecyclerView));
mRecyclerView.setHasFixedSize(true);
mRecyclerView.setLongClickable(true);
mPositionText = (TextView) view.getRootView().findViewById(R.id.position);
mCountText = (TextView) view.getRootView().findViewById(R.id.count);
mStateText = (TextView) view.getRootView().findViewById(R.id.state);
updateState(RecyclerView.SCROLL_STATE_IDLE);
mRecyclerView.setOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int scrollState) {
updateState(scrollState);
}
@Override
public void onScrolled(RecyclerView recyclerView, int i, int i2) {
// mPositionText.setText("First: " + mRecyclerView.getFirstVisiblePosition());
int childCount = mRecyclerView.getChildCount();
int height = mRecyclerView.getChildAt(0).getHeight();
int padding = (mRecyclerView.getHeight() - height) / 2;
mCountText.setText("Count: " + childCount);
for (int j = 0; j < childCount; j++) {
View v = recyclerView.getChildAt(j);
//往左 从 padding 到 -(v.getWidth()-padding) 的过程中,由大到小
float rate = 0;
if (v.getTop() <= padding) {
if (v.getTop() >= padding - v.getHeight()) {
rate = (padding - v.getTop()) * 1f / v.getHeight();
} else {
rate = 1;
}
v.setScaleY(1 - rate * 0.1f);
v.setScaleX(1 - rate * 0.1f);
} else {
//往右 从 padding 到 recyclerView.getWidth()-padding 的过程中,由大到小
if (v.getTop() <= recyclerView.getHeight() - padding) {
rate = (recyclerView.getHeight() - padding - v.getTop()) * 1f / v.getHeight();
}
v.setScaleY(0.9f + rate * 0.1f);
v.setScaleX(0.9f + rate * 0.1f);
}
}
}
});
mRecyclerView.addOnLayoutChangeListener(new View.OnLayoutChangeListener() {
@Override
public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
if (mRecyclerView.getChildCount() < 3) {
if (mRecyclerView.getChildAt(1) != null) {
if(mRecyclerView.getCurrentPosition()==0) {
View v1 = mRecyclerView.getChildAt(1);
v1.setScaleY(0.9f);
v1.setScaleX(0.9f);
} else {
View v1 = mRecyclerView.getChildAt(0);
v1.setScaleY(0.9f);
v1.setScaleX(0.9f);
}
}
} else {
if (mRecyclerView.getChildAt(0) != null) {
View v0 = mRecyclerView.getChildAt(0);
v0.setScaleY(0.9f);
v0.setScaleX(0.9f);
}
if (mRecyclerView.getChildAt(2) != null) {
View v2 = mRecyclerView.getChildAt(2);
v2.setScaleY(0.9f);
v2.setScaleX(0.9f);
}
}
}
});
}
private void updateState(int scrollState) {
String stateName = "Undefined";
switch (scrollState) {
case RecyclerView.SCROLL_STATE_IDLE:
stateName = "Idle";
break;
case RecyclerView.SCROLL_STATE_DRAGGING:
stateName = "Dragging";
break;
case RecyclerView.SCROLL_STATE_SETTLING:
stateName = "Flinging";
break;
}
mPositionText.setText("currentPosition:" + mRecyclerView.getCurrentPosition());
mStateText.setText(stateName);
}
}
| qiuxuhao/RecyclerViewPager-master | app/src/main/java/com/lsjwzh/widget/recyclerviewpagerdeomo/HorizontalLayoutFragment.java | Java | apache-2.0 | 6,808 |
/*
* Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opensingular.lib.wicket.util.model;
import java.util.Collection;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.wicket.Component;
import org.apache.wicket.model.IModel;
public class NullOrEmptyModel implements IBooleanModel {
private final IModel<?> model;
public NullOrEmptyModel(IModel<?> model) {
this.model = model;
}
@Override
public Boolean getObject() {
return nullOrEmpty(model);
}
@Override
public void detach() {
model.detach();
}
public static boolean nullOrEmpty(Object obj) {
if (obj == null) {
return true;
} else if (obj instanceof String) {
return StringUtils.isBlank((String) obj);
} else if (obj instanceof Collection<?>) {
return ((Collection<?>) obj).isEmpty();
} else if (obj instanceof Map<?, ?>) {
return ((Map<?, ?>) obj).isEmpty();
} else if (obj instanceof IModel<?>) {
return nullOrEmpty(((IModel<?>) obj).getObject());
} else if (obj instanceof Component) {
return nullOrEmpty(((Component) obj).getDefaultModel());
}
return false;
}
}
| opensingular/singular-core | lib/wicket-utils/src/main/java/org/opensingular/lib/wicket/util/model/NullOrEmptyModel.java | Java | apache-2.0 | 1,873 |
package com.shivamb7.sachinapp;
import java.io.IOException;
import android.app.ProgressDialog;
import android.app.WallpaperManager;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.Toast;
public class ImageFrag6 extends Fragment {
static Bitmap bmg1;
static ImageView iv1;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
setHasOptionsMenu(true);
View v=inflater.inflate(R.layout.imagefrag6, container, false);
iv1=(ImageView)v.findViewById(R.id.imageView1);
bmg1=((BitmapDrawable) iv1.getDrawable()).getBitmap();
return v;
}
public void onCreateOptionsMenu(Menu menu,MenuInflater inflater) {
// Inflate the menu; this adds items to the action bar if it is present.
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.image1, menu);
}
public boolean onOptionsItemSelected(MenuItem item) {
switch(item.getItemId())
{
case R.id.action_wall:
{
/*WallpaperManager wm=WallpaperManager.getInstance(getActivity().getApplicationContext());
try{
wm.setBitmap(bmg1);
Toast.makeText(getActivity().getBaseContext(), "Wallpaper set successfully",Toast.LENGTH_SHORT).show();
}
catch(IOException e)
{
Toast.makeText(getActivity().getBaseContext(), "Wallpaper not set successfully",Toast.LENGTH_SHORT).show();
}*/
ImageTask6 it1=new ImageTask6(getActivity());
it1.execute();
}
}
return false;
}
}
class ImageTask6 extends AsyncTask<Void, Void, Void>
{
Context c;
ProgressDialog pd;
public ImageTask6(Context ctx)
{
this.c=ctx;
}
@Override
protected void onPreExecute()
{
pd=ProgressDialog.show(c, "Please Wait", "Setting Wallpaper...");
}
protected void onPostExecute(Void result)
{
pd.dismiss();
Toast.makeText(c, "Wallpaper set successfully", Toast.LENGTH_SHORT).show();
}
protected Void doInBackground(Void... params) {
WallpaperManager wm1=WallpaperManager.getInstance(c);
try {
wm1.setBitmap(ImageFrag6.bmg1);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// TODO Auto-generated method stub
return null;
}
} | Shivam101/SachinApp-Android | src/com/shivamb7/sachinapp/ImageFrag6.java | Java | apache-2.0 | 2,597 |
package org.drools.kproject.memory;
import java.util.Collection;
import org.drools.core.util.StringUtils;
import org.drools.kproject.File;
import org.drools.kproject.Folder;
import org.drools.kproject.Path;
import org.drools.kproject.Resource;
public class MemoryFolder
implements
Folder {
private MemoryFileSystem mfs;
private String path;
public MemoryFolder(MemoryFileSystem mfs,
String path) {
this.mfs = mfs;
this.path = path;
}
public String getName() {
int lastSlash = path.lastIndexOf( '/' );
if ( lastSlash >= 0 ) {
return path.substring( lastSlash+1 );
} else {
return path;
}
}
public Path getPath() {
return new MemoryPath( path );
}
public File getFile(String name) {
if ( !StringUtils.isEmpty( path )) {
return mfs.getFile( path + "/" + name );
} else {
return mfs.getFile( name );
}
}
public Folder getFolder(String name) {
if ( !StringUtils.isEmpty( path )) {
return mfs.getFolder( path + "/" + name );
} else {
return mfs.getFolder( name );
}
}
public Folder getParent() {
String[] elements = path.split( "/" );
if ( elements.length == 0 ) {
// we are at root
return this;
}
String newPath = "";
boolean first = true;
for ( int i = 0; i < elements.length - 1; i++ ) {
if ( !StringUtils.isEmpty( elements[i] ) ) {
if ( !first ) {
newPath = newPath + "/";;
}
newPath = newPath + elements[i];
first = false;
}
}
if ( StringUtils.isEmpty( newPath ) ) {
// we are at root
newPath = "";
}
return new MemoryFolder( mfs,
newPath );
}
public Collection<? extends Resource> getMembers() {
return mfs.getMembers( this );
}
public boolean exists() {
return mfs.existsFolder( path );
}
public boolean create() {
if ( !exists() ) {
createFolder( this );
}
return true;
}
private void createFolder(MemoryFolder folder) {
if ( !folder.exists() ) {
createFolder( ( MemoryFolder ) folder.getParent() );
mfs.createFolder( folder );
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((path == null) ? 0 : path.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if ( this == obj ) return true;
if ( obj == null ) return false;
if ( getClass() != obj.getClass() ) return false;
MemoryFolder other = (MemoryFolder) obj;
if ( path == null ) {
if ( other.path != null ) return false;
} else if ( !path.equals( other.path ) ) return false;
return true;
}
@Override
public String toString() {
return "MemoryFolder [path=" + path + "]";
}
}
| psiroky/drools | drools-compiler/src/test/java/org/drools/kproject/memory/MemoryFolder.java | Java | apache-2.0 | 3,280 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.filestructurefinder;
import com.ibm.icu.text.CharsetDetector;
import com.ibm.icu.text.CharsetMatch;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.unit.TimeValue;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
import java.util.stream.Collectors;
/**
* Runs the high-level steps needed to create ingest configs for the specified file. In order:
* 1. Determine the most likely character set (UTF-8, UTF-16LE, ISO-8859-2, etc.)
* 2. Load a sample of the file, consisting of the first 1000 lines of the file
* 3. Determine the most likely file structure - one of ND-JSON, XML, delimited or semi-structured text
* 4. Create an appropriate structure object and delegate writing configs to it
*/
public final class FileStructureFinderManager {
public static final int MIN_SAMPLE_LINE_COUNT = 2;
public static final int DEFAULT_IDEAL_SAMPLE_LINE_COUNT = 1000;
static final Set<String> FILEBEAT_SUPPORTED_ENCODINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
"866", "ansi_x3.4-1968", "arabic", "ascii", "asmo-708", "big5", "big5-hkscs", "chinese", "cn-big5", "cp1250", "cp1251", "cp1252",
"cp1253", "cp1254", "cp1255", "cp1256", "cp1257", "cp1258", "cp819", "cp866", "csbig5", "cseuckr", "cseucpkdfmtjapanese",
"csgb2312", "csibm866", "csiso2022jp", "csiso2022kr", "csiso58gb231280", "csiso88596e", "csiso88596i", "csiso88598e", "csiso88598i",
"csisolatin1", "csisolatin2", "csisolatin3", "csisolatin4", "csisolatin5", "csisolatin6", "csisolatin9", "csisolatinarabic",
"csisolatincyrillic", "csisolatingreek", "csisolatinhebrew", "cskoi8r", "csksc56011987", "csmacintosh", "csshiftjis", "cyrillic",
"dos-874", "ecma-114", "ecma-118", "elot_928", "euc-jp", "euc-kr", "gb18030", "gb2312", "gb_2312", "gb_2312-80", "gbk", "greek",
"greek8", "hebrew", "hz-gb-2312", "ibm819", "ibm866", "iso-2022-cn", "iso-2022-cn-ext", "iso-2022-jp", "iso-2022-kr", "iso-8859-1",
"iso-8859-10", "iso-8859-11", "iso-8859-13", "iso-8859-14", "iso-8859-15", "iso-8859-16", "iso-8859-2", "iso-8859-3", "iso-8859-4",
"iso-8859-5", "iso-8859-6", "iso-8859-6-e", "iso-8859-6-i", "iso-8859-7", "iso-8859-8", "iso-8859-8-e", "iso-8859-8-i",
"iso-8859-9", "iso-ir-100", "iso-ir-101", "iso-ir-109", "iso-ir-110", "iso-ir-126", "iso-ir-127", "iso-ir-138", "iso-ir-144",
"iso-ir-148", "iso-ir-149", "iso-ir-157", "iso-ir-58", "iso8859-1", "iso8859-10", "iso8859-11", "iso8859-13", "iso8859-14",
"iso8859-15", "iso8859-2", "iso8859-3", "iso8859-4", "iso8859-5", "iso8859-6", "iso8859-6e", "iso8859-6i", "iso8859-7", "iso8859-8",
"iso8859-8e", "iso8859-8i", "iso8859-9", "iso88591", "iso885910", "iso885911", "iso885913", "iso885914", "iso885915", "iso88592",
"iso88593", "iso88594", "iso88595", "iso88596", "iso88597", "iso88598", "iso88599", "iso_8859-1", "iso_8859-15", "iso_8859-1:1987",
"iso_8859-2", "iso_8859-2:1987", "iso_8859-3", "iso_8859-3:1988", "iso_8859-4", "iso_8859-4:1988", "iso_8859-5", "iso_8859-5:1988",
"iso_8859-6", "iso_8859-6:1987", "iso_8859-7", "iso_8859-7:1987", "iso_8859-8", "iso_8859-8:1988", "iso_8859-9", "iso_8859-9:1989",
"koi", "koi8", "koi8-r", "koi8-ru", "koi8-u", "koi8_r", "korean", "ks_c_5601-1987", "ks_c_5601-1989", "ksc5601", "ksc_5601", "l1",
"l2", "l3", "l4", "l5", "l6", "l9", "latin1", "latin2", "latin3", "latin4", "latin5", "latin6", "logical", "mac", "macintosh",
"ms932", "ms_kanji", "shift-jis", "shift_jis", "sjis", "sun_eu_greek", "tis-620", "unicode-1-1-utf-8", "us-ascii", "utf-16",
"utf-16-bom", "utf-16be", "utf-16be-bom", "utf-16le", "utf-16le-bom", "utf-8", "utf8", "visual", "windows-1250", "windows-1251",
"windows-1252", "windows-1253", "windows-1254", "windows-1255", "windows-1256", "windows-1257", "windows-1258", "windows-31j",
"windows-874", "windows-949", "x-cp1250", "x-cp1251", "x-cp1252", "x-cp1253", "x-cp1254", "x-cp1255", "x-cp1256", "x-cp1257",
"x-cp1258", "x-euc-jp", "x-gbk", "x-mac-cyrillic", "x-mac-roman", "x-mac-ukrainian", "x-sjis", "x-x-big5"
)));
/**
* These need to be ordered so that the more generic formats come after the more specific ones
*/
private static final List<FileStructureFinderFactory> ORDERED_STRUCTURE_FACTORIES = Collections.unmodifiableList(Arrays.asList(
new JsonFileStructureFinderFactory(),
new XmlFileStructureFinderFactory(),
// ND-JSON will often also be valid (although utterly weird) CSV, so JSON must come before CSV
new DelimitedFileStructureFinderFactory(',', '"', 2, false),
new DelimitedFileStructureFinderFactory('\t', '"', 2, false),
new DelimitedFileStructureFinderFactory(';', '"', 4, false),
new DelimitedFileStructureFinderFactory('|', '"', 5, true),
new TextLogFileStructureFinderFactory()
));
private static final int BUFFER_SIZE = 8192;
private final ScheduledExecutorService scheduler;
/**
* Create the file structure manager.
* @param scheduler Used for checking timeouts.
*/
public FileStructureFinderManager(ScheduledExecutorService scheduler) {
this.scheduler = Objects.requireNonNull(scheduler);
}
public FileStructureFinder findFileStructure(Integer idealSampleLineCount, InputStream fromFile) throws Exception {
return findFileStructure(idealSampleLineCount, fromFile, FileStructureOverrides.EMPTY_OVERRIDES, null);
}
/**
* Given a stream of data from some file, determine its structure.
* @param idealSampleLineCount Ideally, how many lines from the stream will be read to determine the structure?
* If the stream has fewer lines then an attempt will still be made, providing at
* least {@link #MIN_SAMPLE_LINE_COUNT} lines can be read. If <code>null</code>
* the value of {@link #DEFAULT_IDEAL_SAMPLE_LINE_COUNT} will be used.
* @param fromFile A stream from which the sample will be read.
* @param overrides Aspects of the file structure that are known in advance. These take precedence over
* values determined by structure analysis. An exception will be thrown if the file structure
* is incompatible with an overridden value.
* @param timeout The maximum time the analysis is permitted to take. If it takes longer than this an
* {@link ElasticsearchTimeoutException} may be thrown (although not necessarily immediately
* the timeout is exceeded).
* @return A {@link FileStructureFinder} object from which the structure and messages can be queried.
* @throws Exception A variety of problems could occur at various stages of the structure finding process.
*/
public FileStructureFinder findFileStructure(Integer idealSampleLineCount, InputStream fromFile, FileStructureOverrides overrides,
TimeValue timeout)
throws Exception {
return findFileStructure(new ArrayList<>(), (idealSampleLineCount == null) ? DEFAULT_IDEAL_SAMPLE_LINE_COUNT : idealSampleLineCount,
fromFile, overrides, timeout);
}
public FileStructureFinder findFileStructure(List<String> explanation, int idealSampleLineCount, InputStream fromFile)
throws Exception {
return findFileStructure(explanation, idealSampleLineCount, fromFile, FileStructureOverrides.EMPTY_OVERRIDES, null);
}
public FileStructureFinder findFileStructure(List<String> explanation, int idealSampleLineCount, InputStream fromFile,
FileStructureOverrides overrides, TimeValue timeout) throws Exception {
try (TimeoutChecker timeoutChecker = new TimeoutChecker("structure analysis", timeout, scheduler)) {
String charsetName = overrides.getCharset();
Reader sampleReader;
if (charsetName != null) {
// Creating the reader will throw if the specified character set does not exist
sampleReader = new InputStreamReader(fromFile, charsetName);
explanation.add("Using specified character encoding [" + charsetName + "]");
} else {
CharsetMatch charsetMatch = findCharset(explanation, fromFile, timeoutChecker);
charsetName = charsetMatch.getName();
sampleReader = charsetMatch.getReader();
}
Tuple<String, Boolean> sampleInfo = sampleFile(sampleReader, charsetName, MIN_SAMPLE_LINE_COUNT,
Math.max(MIN_SAMPLE_LINE_COUNT, idealSampleLineCount), timeoutChecker);
return makeBestStructureFinder(explanation, sampleInfo.v1(), charsetName, sampleInfo.v2(), overrides, timeoutChecker);
}
}
CharsetMatch findCharset(List<String> explanation, InputStream inputStream, TimeoutChecker timeoutChecker) throws Exception {
// We need an input stream that supports mark and reset, so wrap the argument
// in a BufferedInputStream if it doesn't already support this feature
if (inputStream.markSupported() == false) {
inputStream = new BufferedInputStream(inputStream, BUFFER_SIZE);
}
// This is from ICU4J
CharsetDetector charsetDetector = new CharsetDetector().setText(inputStream);
CharsetMatch[] charsetMatches = charsetDetector.detectAll();
timeoutChecker.check("character set detection");
// Determine some extra characteristics of the input to compensate for some deficiencies of ICU4J
boolean pureAscii = true;
boolean containsZeroBytes = false;
inputStream.mark(BUFFER_SIZE);
byte[] workspace = new byte[BUFFER_SIZE];
int remainingLength = BUFFER_SIZE;
do {
int bytesRead = inputStream.read(workspace, 0, remainingLength);
if (bytesRead <= 0) {
break;
}
for (int i = 0; i < bytesRead && containsZeroBytes == false; ++i) {
if (workspace[i] == 0) {
containsZeroBytes = true;
pureAscii = false;
} else {
pureAscii = pureAscii && workspace[i] > 0 && workspace[i] < 128;
}
}
remainingLength -= bytesRead;
} while (containsZeroBytes == false && remainingLength > 0);
inputStream.reset();
timeoutChecker.check("character set detection");
if (pureAscii) {
// If the input is pure ASCII then many single byte character sets will match. We want to favour
// UTF-8 in this case, as it avoids putting a bold declaration of a dubious character set choice
// in the config files.
Optional<CharsetMatch> utf8CharsetMatch = Arrays.stream(charsetMatches)
.filter(charsetMatch -> StandardCharsets.UTF_8.name().equals(charsetMatch.getName())).findFirst();
if (utf8CharsetMatch.isPresent()) {
explanation.add("Using character encoding [" + StandardCharsets.UTF_8.name() +
"], which matched the input with [" + utf8CharsetMatch.get().getConfidence() + "%] confidence - first [" +
(BUFFER_SIZE / 1024) + "kB] of input was pure ASCII");
return utf8CharsetMatch.get();
}
}
// Input wasn't pure ASCII, so use the best matching character set that's supported by both Java and Go.
// Additionally, if the input contains zero bytes then avoid single byte character sets, as ICU4J will
// suggest these for binary files but then
for (CharsetMatch charsetMatch : charsetMatches) {
String name = charsetMatch.getName();
if (Charset.isSupported(name) && FILEBEAT_SUPPORTED_ENCODINGS.contains(name.toLowerCase(Locale.ROOT))) {
// This extra test is to avoid trying to read binary files as text. Running the structure
// finding algorithms on binary files is very slow as the binary files generally appear to
// have very long lines.
boolean spaceEncodingContainsZeroByte = false;
Charset charset = Charset.forName(name);
// Some character sets cannot be encoded. These are extremely rare so it's likely that
// they've been chosen based on incorrectly provided binary data. Therefore, err on
// the side of rejecting binary data.
if (charset.canEncode()) {
byte[] spaceBytes = " ".getBytes(charset);
for (int i = 0; i < spaceBytes.length && spaceEncodingContainsZeroByte == false; ++i) {
spaceEncodingContainsZeroByte = (spaceBytes[i] == 0);
}
}
if (containsZeroBytes && spaceEncodingContainsZeroByte == false) {
explanation.add("Character encoding [" + name + "] matched the input with [" + charsetMatch.getConfidence() +
"%] confidence but was rejected as the input contains zero bytes and the [" + name + "] encoding does not");
} else {
explanation.add("Using character encoding [" + name + "], which matched the input with [" +
charsetMatch.getConfidence() + "%] confidence");
return charsetMatch;
}
} else {
explanation.add("Character encoding [" + name + "] matched the input with [" + charsetMatch.getConfidence() +
"%] confidence but was rejected as it is not supported by [" +
(Charset.isSupported(name) ? "Filebeat" : "the JVM") + "]");
}
}
throw new IllegalArgumentException("Could not determine a usable character encoding for the input" +
(containsZeroBytes ? " - could it be binary data?" : ""));
}
FileStructureFinder makeBestStructureFinder(List<String> explanation, String sample, String charsetName, Boolean hasByteOrderMarker,
FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws Exception {
Character delimiter = overrides.getDelimiter();
Character quote = overrides.getQuote();
Boolean shouldTrimFields = overrides.getShouldTrimFields();
List<FileStructureFinderFactory> factories;
if (delimiter != null) {
// If a precise delimiter is specified, we only need one structure finder
// factory, and we'll tolerate as little as one column in the input
factories = Collections.singletonList(new DelimitedFileStructureFinderFactory(delimiter, (quote == null) ? '"' : quote, 1,
(shouldTrimFields == null) ? (delimiter == '|') : shouldTrimFields));
} else if (quote != null || shouldTrimFields != null) {
// The delimiter is not specified, but some other aspect of delimited files is,
// so clone our default delimited factories altering the overridden values
factories = ORDERED_STRUCTURE_FACTORIES.stream().filter(factory -> factory instanceof DelimitedFileStructureFinderFactory)
.map(factory -> ((DelimitedFileStructureFinderFactory) factory).makeSimilar(quote, shouldTrimFields))
.collect(Collectors.toList());
} else {
// We can use the default factories, but possibly filtered down to a specific format
factories = ORDERED_STRUCTURE_FACTORIES.stream()
.filter(factory -> factory.canFindFormat(overrides.getFormat())).collect(Collectors.toList());
}
for (FileStructureFinderFactory factory : factories) {
timeoutChecker.check("high level format detection");
if (factory.canCreateFromSample(explanation, sample)) {
return factory.createFromSample(explanation, sample, charsetName, hasByteOrderMarker, overrides, timeoutChecker);
}
}
throw new IllegalArgumentException("Input did not match " +
((overrides.getFormat() == null) ? "any known formats" : "the specified format [" + overrides.getFormat() + "]"));
}
private Tuple<String, Boolean> sampleFile(Reader reader, String charsetName, int minLines, int maxLines, TimeoutChecker timeoutChecker)
throws IOException {
int lineCount = 0;
BufferedReader bufferedReader = new BufferedReader(reader);
StringBuilder sample = new StringBuilder();
// Don't include any byte-order-marker in the sample. (The logic to skip it works for both
// UTF-8 and UTF-16 assuming the character set of the reader was correctly detected.)
Boolean hasByteOrderMarker = null;
if (charsetName.toUpperCase(Locale.ROOT).startsWith("UTF")) {
int maybeByteOrderMarker = reader.read();
hasByteOrderMarker = ((char) maybeByteOrderMarker == '\uFEFF');
if (maybeByteOrderMarker >= 0 && hasByteOrderMarker == false && (char) maybeByteOrderMarker != '\r')
{
sample.appendCodePoint(maybeByteOrderMarker);
if ((char) maybeByteOrderMarker == '\n') {
++lineCount;
}
}
}
String line;
while ((line = bufferedReader.readLine()) != null && ++lineCount <= maxLines) {
sample.append(line).append('\n');
timeoutChecker.check("sample line splitting");
}
if (lineCount < minLines) {
throw new IllegalArgumentException("Input contained too few lines [" + lineCount + "] to obtain a meaningful sample");
}
return new Tuple<>(sample.toString(), hasByteOrderMarker);
}
}
| gfyoung/elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java | Java | apache-2.0 | 18,837 |
package ca.firstvoices.simpleapi.security;
import ca.firstvoices.simpleapi.nuxeo.SimpleAPINuxeoConfiguration;
import com.auth0.jwk.JwkException;
import com.auth0.jwk.JwkProvider;
import com.auth0.jwk.JwkProviderBuilder;
import io.jsonwebtoken.Claims;
import io.jsonwebtoken.JwsHeader;
import io.jsonwebtoken.SigningKeyResolver;
import java.security.Key;
import java.util.Optional;
import java.util.logging.Logger;
import javax.inject.Singleton;
import org.nuxeo.runtime.api.Framework;
@Singleton
public class JWKSKeyResolver implements SigningKeyResolver {
private static final Logger log = Logger.getLogger(JWKSKeyResolver.class.getCanonicalName());
private Optional<JwkProvider> keyStore = Optional.empty();
public JWKSKeyResolver() {
SimpleAPINuxeoConfiguration config = Framework.getService(SimpleAPINuxeoConfiguration.class);
try {
keyStore = Optional.of(new JwkProviderBuilder(config.getJwksUrl()).build());
log.info("JWT keystore created using jwks url: " + config.getJwksUrl());
} catch (Exception e) {
log.severe("JWT Keystore configuration failed. Token verification will fail." + e);
}
}
private Key keyForKeyID(String keyId) {
return keyStore.map(ks -> {
try {
return ks.get(keyId).getPublicKey();
} catch (JwkException e) {
log.warning("No key could be returned\n" + e.toString());
return null;
}
}).orElse(null);
}
@Override
public Key resolveSigningKey(JwsHeader jwsHeader, Claims claims) {
return keyForKeyID(jwsHeader.getKeyId());
}
@Override
public Key resolveSigningKey(JwsHeader jwsHeader, String claims) {
return keyForKeyID(jwsHeader.getKeyId());
}
}
| First-Peoples-Cultural-Council/fv-web-ui | modules/api/firstvoices-rest-api-simple/src/main/java/ca/firstvoices/simpleapi/security/JWKSKeyResolver.java | Java | apache-2.0 | 1,699 |
/*
* Copyright 2013-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.security.oauth2.sso;
import static org.hamcrest.Matchers.startsWith;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import javax.servlet.Filter;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.security.oauth2.OAuth2AutoConfiguration;
import org.springframework.boot.autoconfigure.security.oauth2.client.EnableOAuth2Sso;
import org.springframework.boot.autoconfigure.security.oauth2.sso.CustomOAuth2SsoConfigurationTests.TestConfiguration;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.WebApplicationContext;
/**
* @author Dave Syer
*
*/
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = TestConfiguration.class)
@WebAppConfiguration
@TestPropertySource(properties = { "spring.oauth2.client.clientId=client",
"spring.oauth2.client.clientSecret=secret",
"spring.oauth2.client.authorizationUri=http://example.com/oauth/authorize",
"spring.oauth2.client.tokenUri=http://example.com/oauth/token",
"spring.oauth2.resource.jwt.keyValue=SSSSHHH" })
public class CustomOAuth2SsoConfigurationTests {
@Autowired
private WebApplicationContext context;
@Autowired
@Qualifier("springSecurityFilterChain")
private Filter filter;
private MockMvc mvc;
@Before
public void init() {
mvc = MockMvcBuilders.webAppContextSetup(context).addFilters(filter).build();
}
@Test
public void homePageIsBasicAuth() throws Exception {
mvc.perform(get("/")).andExpect(status().isUnauthorized())
.andExpect(header().string("WWW-Authenticate", startsWith("Basic")));
}
@Test
public void uiPageIsSecure() throws Exception {
mvc.perform(get("/ui/")).andExpect(status().isFound())
.andExpect(header().string("location", "http://localhost/login"));
}
@Test
public void uiTestPageIsAccessible() throws Exception {
mvc.perform(get("/ui/test")).andExpect(status().isOk())
.andExpect(content().string("test"));
}
@Configuration
@EnableOAuth2Sso
@Import(OAuth2AutoConfiguration.class)
@MinimalSecureWebConfiguration
protected static class TestConfiguration extends WebSecurityConfigurerAdapter {
@Override
public void configure(HttpSecurity http) throws Exception {
http.antMatcher("/ui/**").authorizeRequests().antMatchers("/ui/test")
.permitAll().anyRequest().authenticated();
}
@RestController
public static class TestController {
@RequestMapping(value = "/ui/test")
public String test() {
return "test";
}
}
}
}
| tan9/spring-boot | spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/security/oauth2/sso/CustomOAuth2SsoConfigurationTests.java | Java | apache-2.0 | 4,390 |
package com.shop.data.entity;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
/**
* Created by vazgen on 8/13/16.
*/
@Entity
@Table(name = "equipment")
public class CompositionEntity {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@NotNull
private String name;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| TorosyanV/shop | src/main/java/com/shop/data/entity/CompositionEntity.java | Java | apache-2.0 | 585 |
/**
* Created by stb on 1/18/2017.
*/
package com.stb.www.proxifier.client.initializer.tcp; | thebagchi/heimdall-proxy | src/main/java/com/stb/www/proxifier/client/initializer/tcp/package-info.java | Java | apache-2.0 | 93 |
package org.jboss.hal.testsuite.page.configuration;
import org.jboss.hal.meta.token.NameTokens;
import org.jboss.hal.testsuite.fragment.FormFragment;
import org.jboss.hal.testsuite.fragment.TableFragment;
import org.jboss.hal.testsuite.fragment.TabsFragment;
import org.jboss.hal.testsuite.page.BasePage;
import org.jboss.hal.testsuite.page.Place;
import org.openqa.selenium.support.FindBy;
@Place(NameTokens.MESSAGING_REMOTE_ACTIVEMQ)
public class MessagingRemoteActiveMQPage extends BasePage {
@FindBy(id = "msg-remote-connector-table_wrapper")
private TableFragment genericConnectorTable;
@FindBy(id = "msg-remote-connector-form")
private FormFragment genericConnectorForm;
@FindBy(id = "msg-remote-in-vm-connector-table_wrapper")
private TableFragment inVMConnectorTable;
@FindBy(id = "msg-remote-in-vm-connector-form")
private FormFragment inVMConnectorForm;
@FindBy(id = "msg-remote-http-connector-table_wrapper")
private TableFragment httpConnectorTable;
@FindBy(id = "msg-remote-http-connector-form")
private FormFragment httpConnectorForm;
@FindBy(id = "msg-remote-remote-connector-table_wrapper")
private TableFragment remoteConnectorTable;
@FindBy(id = "msg-remote-remote-connector-form")
private FormFragment remoteConnectorForm;
@FindBy(id = "msg-remote-discovery-group-table_wrapper")
private TableFragment discoveryGroupTable;
@FindBy(id = "msg-remote-discovery-group-form")
private FormFragment dicoveryGroupForm;
@FindBy(id = "msg-remote-connection-factory-table_wrapper")
private TableFragment connectionFactoryTable;
@FindBy(id = "msg-remote-connection-factory-form")
private FormFragment connectionFactoryForm;
@FindBy(id = "msg-remote-activemq-pooled-connection-factory-table_wrapper")
private TableFragment pooledConnectionFactoryTable;
@FindBy(id = "msg-remote-activemq-pooled-connection-factory-tab-container")
private TabsFragment pooledConnectionFactoryTab;
@FindBy(id = "messaging-pooled-connection-factory-form")
private FormFragment pooledConnectionFactoryForm;
@FindBy(id = "msg-remote-activemq-credential-reference-form")
private FormFragment pooledConnectionFactoryCredentialReferenceForm;
@FindBy(id = "msg-remote-external-queue-table_wrapper")
private TableFragment externalJMSQueueTable;
@FindBy(id = "msg-remote-external-queue-form")
private FormFragment externalJMSQueueForm;
@FindBy(id = "msg-remote-external-topic-table_wrapper")
private TableFragment externalJMSTopicTable;
@FindBy(id = "msg-remote-external-topic-form")
private FormFragment externalJMSTopicForm;
public TableFragment getGenericConnectorTable() {
return genericConnectorTable;
}
public FormFragment getGenericConnectorForm() {
return genericConnectorForm;
}
public TableFragment getInVMConnectorTable() {
return inVMConnectorTable;
}
public FormFragment getInVMConnectorForm() {
return inVMConnectorForm;
}
public TableFragment getHttpConnectorTable() {
return httpConnectorTable;
}
public FormFragment getHttpConnectorForm() {
return httpConnectorForm;
}
public TableFragment getRemoteConnectorTable() {
return remoteConnectorTable;
}
public FormFragment getRemoteConnectorForm() {
return remoteConnectorForm;
}
public TableFragment getDiscoveryGroupTable() {
return discoveryGroupTable;
}
public FormFragment getDicoveryGroupForm() {
return dicoveryGroupForm;
}
public TableFragment getConnectionFactoryTable() {
return connectionFactoryTable;
}
public FormFragment getConnectionFactoryForm() {
return connectionFactoryForm;
}
public TableFragment getPooledConnectionFactoryTable() {
return pooledConnectionFactoryTable;
}
public TabsFragment getPooledConnectionFactoryTab() {
return pooledConnectionFactoryTab;
}
public FormFragment getPooledConnectionFactoryForm() {
getPooledConnectionFactoryTab().select("msg-remote-activemq-pooled-connection-factory-attributes-tab");
return pooledConnectionFactoryForm;
}
public FormFragment getPooledConnectionFactoryCredentialReferenceForm() {
getPooledConnectionFactoryTab().select("msg-remote-activemq-pooled-connection-factory-credential-reference-tab");
return pooledConnectionFactoryCredentialReferenceForm;
}
public TableFragment getExternalJMSQueueTable() {
return externalJMSQueueTable;
}
public FormFragment getExternalJMSQueueForm() {
return externalJMSQueueForm;
}
public TableFragment getExternalJMSTopicTable() {
return externalJMSTopicTable;
}
public FormFragment getExternalJMSTopicForm() {
return externalJMSTopicForm;
}
}
| hpehl/testsuite.next | common/src/main/java/org/jboss/hal/testsuite/page/configuration/MessagingRemoteActiveMQPage.java | Java | apache-2.0 | 4,929 |
package org.mediameter.cliff.test;
import com.bericotech.clavin.extractor.LocationOccurrence;
import edu.mit.ll.mitie.NamedEntityExtractor;
import org.kohsuke.MetaInfServices;
import org.mediameter.cliff.CliffConfig;
import org.mediameter.cliff.extractor.*;
import org.mediameter.cliff.places.substitutions.Blacklist;
import org.mediameter.cliff.places.substitutions.CustomSubstitutionMap;
import org.mediameter.cliff.places.substitutions.WikipediaDemonymMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import edu.mit.ll.mitie.*;
import java.io.IOException;
import java.util.Map;
/**
* Created with IntelliJ IDEA.
* User: aaslinger
* Date: 7/23/15
* Time: 11:12 AM
* <p/>
* Developed By OpenWhere, Inc.
*/
@MetaInfServices(EntityExtractor.class)
public class MitieEntityExtractor implements EntityExtractor {
public final static Logger logger = LoggerFactory.getLogger(MitieEntityExtractor.class);
@Override
public String getName() {
return "MITIE";
}
public static final String CUSTOM_SUBSTITUTION_FILE = "custom-substitutions.csv";
public static final String LOCATION_BLACKLIST_FILE = "location-blacklist.txt";
public static final String PERSON_TO_PLACE_FILE = "person-to-place-replacements.csv";
// the actual named entity recognizer (NER) object
private NamedEntityExtractor namedEntityRecognizer;
private WikipediaDemonymMap demonyms;
private CustomSubstitutionMap customSubstitutions;
private CustomSubstitutionMap personToPlaceSubstitutions;
private Blacklist locationBlacklist;
public void initialize(CliffConfig config) throws ClassCastException, IOException, ClassNotFoundException{
String model = config.getPropertyByName("mer.mitiePath");
if(model == null){
logger.error("No MITIE model configured at {}", model);
}
namedEntityRecognizer = new NamedEntityExtractor(model);
demonyms = new WikipediaDemonymMap();
customSubstitutions = new CustomSubstitutionMap(CUSTOM_SUBSTITUTION_FILE);
locationBlacklist = new Blacklist(LOCATION_BLACKLIST_FILE);
personToPlaceSubstitutions = new CustomSubstitutionMap(PERSON_TO_PLACE_FILE,false);
}
//
// /**
// * Get extracted locations from a plain-text body.
// *
// * @param textToParse Text content to perform extraction on.
// * @param manuallyReplaceDemonyms Can slow down performance quite a bit
// * @return All the entities mentioned
// */
// @Override
public ExtractedEntities extractEntities(String textToParse, boolean manuallyReplaceDemonyms) {
ExtractedEntities entities = new ExtractedEntities();
if (textToParse==null || textToParse.length()==0){
logger.warn("input to extractEntities was null or zero!");
return entities;
}
String text = textToParse;
if(manuallyReplaceDemonyms){ // this is a noticeable performance hit
logger.debug("Replacing all demonyms by hand");
text = demonyms.replaceAll(textToParse);
}
global g = new global();
TokenIndexVector words = g.tokenizeWithOffsets(text);
StringVector possibleTags = namedEntityRecognizer.getPossibleNerTags();
EntityMentionVector extractedEntities = namedEntityRecognizer.extractEntities(words);
if (extractedEntities != null) {
assignExtractedEntities(entities, text, words, possibleTags, extractedEntities);
}
return entities;
}
//Lot's of entities had this character some reason, remove so valid
private String cleanName(String name){
return name.replace("\\n", " ").replace("“", "").replace("”", "");
}
private void assignExtractedEntities(ExtractedEntities entities, String text, TokenIndexVector words, StringVector possibleTags, EntityMentionVector extractedEntities) {
for (int i=0; i < extractedEntities.size(); i++){
EntityMention extractedEntity = extractedEntities.get(i);
String entityName = cleanName(getEntityString(words, extractedEntity));
String tag = possibleTags.get(extractedEntity.getTag());
double score = extractedEntity.getScore();
int position = extractedEntity.getStart();
switch(tag){
case "PERSON":
if(personToPlaceSubstitutions.contains(entityName)){
entities.addLocation( getLocationOccurrence(personToPlaceSubstitutions.getSubstitution(entityName), position) );
logger.debug("Changed person "+entityName+" to a place");
} else {
PersonOccurrence person = new PersonOccurrence(entityName, position, score);
entities.addPerson( person );
}
break;
case "LOCATION":
if(!locationBlacklist.contains(entityName)){
entities.addLocation( getLocationOccurrence(entityName, position) );
} else {
logger.debug("Ignored blacklisted location "+entityName);
}
break;
case "ORGANIZATION":
OrganizationOccurrence organization = new OrganizationOccurrence(entityName, position, score);
entities.addOrganization( organization );
break;
default:
logger.debug("Unknown NER type :"+ tag);
}
}
}
/**
* Get extracted locations from a plain-text body.
*
* @param sentences Text content to perform extraction on.
* @param manuallyReplaceDemonyms Can slow down performance quite a bit
* @return All the entities mentioned
*/
@Override
public ExtractedEntities extractEntitiesFromSentences(Map[] sentences, boolean manuallyReplaceDemonyms) {
ExtractedEntities entities = new ExtractedEntities();
if (sentences.length==0){
logger.warn("input to extractEntities was null or zero!");
return entities;
}
if(manuallyReplaceDemonyms){ // this is a noticeable performance hit
logger.debug("Replacing all demonyms by hand");
}
StringVector possibleTags = namedEntityRecognizer.getPossibleNerTags();
for(Map s:sentences){
String storySentencesId = s.get("story_sentences_id").toString();
String text = s.get("sentence").toString();
if(manuallyReplaceDemonyms){ // this is a noticeable performance hit
text = demonyms.replaceAll(text);
}
TokenIndexVector words = global.tokenizeWithOffsets(text);
EntityMentionVector extractedEntities = namedEntityRecognizer.extractEntities(words);
if (extractedEntities != null) {
assignExtractedEntities(entities, text, words, possibleTags, extractedEntities);
}
}
return entities;
}
private String getEntityString(TokenIndexVector words, EntityMention ent){
StringBuilder builder = new StringBuilder();
for(int i = ent.getStart(); i < ent.getEnd(); i++){
builder.append(words.get(i).getToken());
if(i + 1 < ent.getEnd()){
builder.append(" ");
}
}
return builder.toString();
}
private LocationOccurrence getLocationOccurrence(String entityName, int position){
String fixedName = entityName;
if (demonyms.contains(entityName)) {
fixedName = demonyms.getSubstitution(entityName);
logger.debug("Demonym substitution: "+entityName+" to "+fixedName);
} else if(customSubstitutions.contains(entityName)) {
fixedName = customSubstitutions.getSubstitution(entityName);
logger.debug("Custom substitution: "+entityName+" to "+fixedName);
}
return new LocationOccurrence(fixedName, position);
}
}
| OpenWhere/CLIFF | mitie/src/main/java/org/mediameter/cliff/test/MitieEntityExtractor.java | Java | apache-2.0 | 8,155 |
/***************************************************************************
* Copyright 2021 Kieker Project (http://kieker-monitoring.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
package kieker.tools.trace.analysis.repository;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import kieker.analysis.IProjectContext;
import kieker.analysis.plugin.annotation.Property;
import kieker.analysis.repository.AbstractRepository;
import kieker.analysis.repository.annotation.Repository;
import kieker.common.configuration.Configuration;
import kieker.tools.trace.analysis.filter.visualization.graph.Color;
/**
* Implementation of a trace color repository, which associates colors to traces. These colors can, for instance,
* be used to highlight traces in graph renderings.
*
* @author Holger Knoche
*
* @since 1.6
*/
@Repository(name = "Trace color repository",
description = "Provides color information for trace coloring",
configuration = {
@Property(name = TraceColorRepository.CONFIG_PROPERTY_NAME_TRACE_COLOR_FILE_NAME, defaultValue = "")
})
public class TraceColorRepository extends AbstractRepository {
/**
* Name of the configuration property that contains the file name of the trace color file.
*/
public static final String CONFIG_PROPERTY_NAME_TRACE_COLOR_FILE_NAME = "traceColorFileName";
private static final String DEFAULT_KEYWORD = "default";
private static final String COLLISION_KEYWORD = "collision";
private static final String COLOR_REGEX = "0x([0-9|a-f]{6})";
private static final Pattern COLOR_PATTERN = Pattern.compile(COLOR_REGEX);
private static final String DELIMITER_REGEX = "=";
private static final String ENCODING = "UTF-8";
private final ConcurrentMap<Long, Color> colorMap;
private final Color defaultColor;
private final Color collisionColor;
/**
* Creates a new description repository using the given configuration.
*
* @param configuration
* The configuration to use
* @param projectContext
* The project context for this plugin.
*
* @throws IOException
* If an I/O error occurs during initialization
*/
public TraceColorRepository(final Configuration configuration, final IProjectContext projectContext) throws IOException {
this(configuration, TraceColorRepository.readDataFromFile(configuration.getStringProperty(CONFIG_PROPERTY_NAME_TRACE_COLOR_FILE_NAME)), projectContext);
}
/**
* Creates a new color repository with the given data.
*
* @param configuration
* The configuration to use
* @param colorData
* The color data to use for this repository
* @param projectContext
* The project context to use for this repository.
*/
public TraceColorRepository(final Configuration configuration, final TraceColorRepositoryData colorData, final IProjectContext projectContext) {
super(configuration, projectContext);
this.colorMap = colorData.getColorMap();
this.defaultColor = colorData.getDefaultColor();
this.collisionColor = colorData.getCollisionColor();
}
/**
* {@inheritDoc}
*/
@Override
public Configuration getCurrentConfiguration() {
return this.configuration;
}
/**
* Returns the color map stored in this repository.
*
* @return See above
*/
public Map<Long, Color> getColorMap() {
return Collections.unmodifiableMap(this.colorMap);
}
/**
* Returns the color to use for elements which are not defined in the color map.
*
* @return See above
*/
public Color getDefaultColor() {
return this.defaultColor;
}
/**
* Returns the color to use for elements for which no unique color can be determined.
*
* @return See above
*/
public Color getCollisionColor() {
return this.collisionColor;
}
private static Long parseTraceId(final String input) {
try {
return Long.parseLong(input);
} catch (final NumberFormatException e) {
return null;
}
}
private static Color parseColor(final String input) {
final Matcher matcher = COLOR_PATTERN.matcher(input);
if (!matcher.matches()) {
return null;
}
final int rgbValue = Integer.parseInt(matcher.group(1), 16);
return new Color(rgbValue);
}
/**
* Initializes a trace color repository from a given file.
*
* @param fileName
* The name of the file to read from
* @param projectContext
* The project context to use.
* @return The initialized trace color repository
*
* @throws IOException
* If an I/O error occurs
*/
public static TraceColorRepository createFromFile(final String fileName, final IProjectContext projectContext) throws IOException {
final Configuration configuration = new Configuration();
configuration.setProperty(CONFIG_PROPERTY_NAME_TRACE_COLOR_FILE_NAME, fileName);
return new TraceColorRepository(configuration, TraceColorRepository.readDataFromFile(fileName), projectContext);
}
private static TraceColorRepositoryData readDataFromFile(final String fileName) throws IOException {
BufferedReader reader = null;
try {
reader = Files.newBufferedReader(Paths.get(fileName), Charset.forName(ENCODING));
final ConcurrentMap<Long, Color> colorMap = new ConcurrentHashMap<>();
Color defaultColor = Color.BLACK;
Color collisionColor = Color.GRAY;
while (true) {
final String currentLine = reader.readLine();
if (currentLine == null) {
break;
}
final String[] parts = currentLine.split(DELIMITER_REGEX);
if (parts.length != 2) {
continue;
}
final String traceName = parts[0];
final String colorSpecification = parts[1];
final Color traceColor = TraceColorRepository.parseColor(colorSpecification);
if (DEFAULT_KEYWORD.equals(traceName)) {
if (traceColor != null) {
defaultColor = traceColor;
}
} else if (COLLISION_KEYWORD.equals(traceName)) {
if (traceColor != null) {
collisionColor = traceColor;
}
} else {
final Long traceId = TraceColorRepository.parseTraceId(traceName);
if ((traceId != null) && (traceColor != null)) {
colorMap.put(traceId, traceColor);
}
}
}
return new TraceColorRepositoryData(colorMap, defaultColor, collisionColor);
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* This class groups the data required for a {@link TraceColorRepository}.
*
* @author Holger Knoche
*
* @since 1.6
*/
public static class TraceColorRepositoryData {
private final ConcurrentMap<Long, Color> colorMap;
private final Color defaultColor;
private final Color collisionColor;
/**
* Creates a new data object using the given data.
*
* @param colorMap
* The color map (trace id -> color) to use
* @param defaultColor
* The default color to use
* @param collisionColor
* The collision color to use
*/
public TraceColorRepositoryData(final ConcurrentMap<Long, Color> colorMap, final Color defaultColor, final Color collisionColor) {
this.colorMap = colorMap;
this.defaultColor = defaultColor;
this.collisionColor = collisionColor;
}
ConcurrentMap<Long, Color> getColorMap() { // NOPMD package for outer class
return this.colorMap;
}
Color getDefaultColor() { // NOPMD package for outer class
return this.defaultColor;
}
Color getCollisionColor() { // NOPMD package for outer class
return this.collisionColor;
}
}
}
| kieker-monitoring/kieker | kieker-tools/src/kieker/tools/trace/analysis/repository/TraceColorRepository.java | Java | apache-2.0 | 8,310 |
/**********************************************************************
Copyright (c) 2012 Andy Jefferson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
...
**********************************************************************/
package org.datanucleus.maven;
import java.util.List;
import org.codehaus.plexus.util.cli.Commandline;
/**
* Drop and create the Schema defined by the input files.
* @goal schema-deletecreate
* @requiresDependencyResolution runtime
* @description Drops and creates the datastore Schema for the specified input files
*/
public class SchemaToolDeleteCreateMojo extends AbstractSchemaToolMojo
{
private static final String OPERATION_MODE_DELETECREATE = "-deletecreate";
/**
* {@inheritDoc}
* @see org.datanucleus.maven.AbstractSchemaToolMojo#prepareModeSpecificCommandLineArguments(org.codehaus.plexus.util.cli.Commandline, java.util.List)
*/
protected void prepareModeSpecificCommandLineArguments(Commandline cl, List args)
{
if (fork)
{
cl.createArg().setValue(OPERATION_MODE_DELETECREATE);
if (ddlFile != null && ddlFile.trim().length() > 0)
{
cl.createArg().setValue("-ddlFile");
cl.createArg().setValue(ddlFile);
}
if (completeDdl)
{
cl.createArg().setValue("-completeDdl");
}
if (includeAutoStart)
{
cl.createArg().setValue("-includeAutoStart");
}
}
else
{
args.add(OPERATION_MODE_DELETECREATE);
if (ddlFile != null && ddlFile.trim().length() > 0)
{
args.add("-ddlFile");
args.add(ddlFile);
}
if (completeDdl)
{
args.add("-completeDdl");
}
if (includeAutoStart)
{
args.add("-includeAutoStart");
}
}
}
} | datanucleus/datanucleus-maven-plugin | src/main/java/org/datanucleus/maven/SchemaToolDeleteCreateMojo.java | Java | apache-2.0 | 2,523 |
/*
* $Id: HiddenDirective.java 651946 2008-04-27 13:41:38Z apetrelli $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.views.velocity.components;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts2.components.Component;
import org.apache.struts2.components.Hidden;
import com.opensymphony.xwork2.util.ValueStack;
/**
* @see Hidden
*/
public class HiddenDirective extends AbstractDirective {
protected Component getBean(ValueStack stack, HttpServletRequest req, HttpServletResponse res) {
return new Hidden(stack, req, res);
}
public String getBeanName() {
return "hidden";
}
}
| xiaguangme/struts2-src-study | src/org/apache/struts2/views/velocity/components/HiddenDirective.java | Java | apache-2.0 | 1,514 |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android;
import com.android.builder.core.VariantConfiguration;
import com.android.builder.dependency.SymbolFileProvider;
import com.android.utils.StdLogger;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.devtools.build.android.Converters.DependencySymbolFileProviderListConverter;
import com.google.devtools.build.android.Converters.PathConverter;
import com.google.devtools.build.android.resources.ResourceSymbols;
import com.google.devtools.common.options.Option;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsParser;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
/**
* Provides an entry point for the compiling resource classes using a custom compiler (simply parse
* R.txt and make a jar, which is simpler than parsing R.java and running errorprone, etc.).
*
* For now, we assume this is only worthwhile for android_binary and not libraries.
*
* <pre>
* Example Usage:
* java/com/google/build/android/RClassGeneratorAction\
* --primaryRTxt path/to/R.txt\
* --primaryManifest path/to/AndroidManifest.xml\
* --libraries p/t/1/AndroidManifest.txt:p/t/1/R.txt,\
* p/t/2/AndroidManifest.txt:p/t/2/R.txt\
* --classJarOutput path/to/write/archive_resources.jar
* </pre>
*/
public class RClassGeneratorAction {
private static final StdLogger STD_LOGGER =
new StdLogger(StdLogger.Level.WARNING);
private static final Logger logger = Logger.getLogger(RClassGeneratorAction.class.getName());
/**
* Flag specifications for this action.
*/
public static final class Options extends OptionsBase {
@Option(name = "primaryRTxt",
defaultValue = "null",
converter = PathConverter.class,
category = "input",
help = "The path to the binary's R.txt file")
public Path primaryRTxt;
@Option(name = "primaryManifest",
defaultValue = "null",
converter = PathConverter.class,
category = "input",
help = "The path to the binary's AndroidManifest.xml file. This helps provide the package.")
public Path primaryManifest;
@Option(name = "packageForR",
defaultValue = "null",
category = "config",
help = "Custom java package to generate the R class files.")
public String packageForR;
@Option(name = "libraries",
defaultValue = "",
converter = DependencySymbolFileProviderListConverter.class,
category = "input",
help = "R.txt and manifests for the libraries in this binary's deps. We will write "
+ "class files for the libraries as well. Expected format: lib1/R.txt[:lib2/R.txt]")
public List<DependencySymbolFileProvider> libraries;
@Option(name = "classJarOutput",
defaultValue = "null",
converter = PathConverter.class,
category = "output",
help = "Path for the generated jar of R.class files.")
public Path classJarOutput;
}
public static void main(String[] args) throws Exception {
final Stopwatch timer = Stopwatch.createStarted();
OptionsParser optionsParser = OptionsParser.newOptionsParser(Options.class);
optionsParser.enableParamsFileSupport(FileSystems.getDefault());
optionsParser.parseAndExitUponError(args);
Options options = optionsParser.getOptions(Options.class);
Preconditions.checkNotNull(options.classJarOutput);
final AndroidResourceProcessor resourceProcessor = new AndroidResourceProcessor(STD_LOGGER);
try (ScopedTemporaryDirectory scopedTmp =
new ScopedTemporaryDirectory("android_res_compile_tmp")) {
Path tmp = scopedTmp.getPath();
Path classOutPath = tmp.resolve("compiled_classes");
logger.fine(String.format("Setup finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
List<SymbolFileProvider> libraries = new ArrayList<>();
for (DependencySymbolFileProvider library : options.libraries) {
libraries.add(library);
}
// Note that we need to write the R class for the main binary (so proceed even if there
// are no libraries).
if (options.primaryRTxt != null) {
String appPackageName = options.packageForR;
if (appPackageName == null) {
appPackageName = VariantConfiguration
.getManifestPackage(options.primaryManifest.toFile());
}
Multimap<String, ResourceSymbols> libSymbolMap = ArrayListMultimap.create();
ResourceSymbols fullSymbolValues =
resourceProcessor.loadResourceSymbolTable(
libraries, appPackageName, options.primaryRTxt, libSymbolMap);
logger.fine(
String.format("Load symbols finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
// For now, assuming not used for libraries and setting final access for fields.
fullSymbolValues.writeClassesTo(
libSymbolMap, appPackageName, classOutPath, true /* finalFields */);
logger.fine(
String.format("Finished R.class at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
} else if (!libraries.isEmpty()) {
Multimap<String, ResourceSymbols> libSymbolMap = ArrayListMultimap.create();
ResourceSymbols fullSymbolValues =
resourceProcessor.loadResourceSymbolTable(libraries, null, null, libSymbolMap);
logger.fine(
String.format("Load symbols finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
// For now, assuming not used for libraries and setting final access for fields.
fullSymbolValues.writeClassesTo(libSymbolMap, null, classOutPath, true /* finalFields */);
logger.fine(
String.format("Finished R.class at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
} else {
Files.createDirectories(classOutPath);
}
// We write .class files to temp, then jar them up after (we create a dummy jar, even if
// there are no class files).
AndroidResourceOutputs.createClassJar(classOutPath, options.classJarOutput);
logger.fine(
String.format("createClassJar finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
} finally {
resourceProcessor.shutdown();
}
logger.fine(String.format("Compile action done in %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
}
}
| juhalindfors/bazel-patches | src/tools/android/java/com/google/devtools/build/android/RClassGeneratorAction.java | Java | apache-2.0 | 7,271 |
package germ.model.event;
import java.util.EventListener;
public interface UpdateListener extends EventListener{
public void updatePerformed(UpdateEvent e);
}
| delicb/Germ | germ/model/event/UpdateListener.java | Java | apache-2.0 | 162 |
package com.github.skittlesdev.kubrick.asyncs;
import android.os.AsyncTask;
import com.github.skittlesdev.kubrick.KubrickApplication;
import com.github.skittlesdev.kubrick.R;
import com.github.skittlesdev.kubrick.interfaces.MediaListener;
import info.movito.themoviedbapi.TmdbApi;
import info.movito.themoviedbapi.TmdbMovies;
import info.movito.themoviedbapi.model.MovieDb;
public class GetMovieTask extends AsyncTask<Integer, Void, MovieDb> {
private MediaListener listener;
public GetMovieTask(MediaListener listener) {
this.listener = listener;
}
@Override
protected MovieDb doInBackground(Integer... params) {
if (params[0] == null) {
return null;
}
TmdbApi api = new TmdbApi(KubrickApplication.getContext().getString(R.string.tmdb_api_key));
return api.getMovies().getMovie(params[0], "en", TmdbMovies.MovieMethod.credits, TmdbMovies.MovieMethod.similar_movies);
}
@Override
protected void onPostExecute(MovieDb movieDb) {
super.onPostExecute(movieDb);
if (movieDb != null) {
this.listener.onMediaRetrieved(movieDb);
}
}
}
| skittlesdev/kubrick | app/src/main/java/com/github/skittlesdev/kubrick/asyncs/GetMovieTask.java | Java | apache-2.0 | 1,158 |
/*
* JPPF.
* Copyright (C) 2005-2013 JPPF Team.
* http://www.jppf.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.distributedexternalsort;
import java.util.ArrayList;
import java.util.List;
import org.jppf.client.JPPFClient;
import org.jppf.client.JPPFJob;
import org.jppf.client.JPPFResultCollector;
import org.jppf.server.protocol.JPPFTask;
/**
* This is a JPPF application runner.
* It will create tasks per piece of the input file and sort those on individual nodes.
*
* @author Aniket Kokate
*/
public class SortJPPFRunner {
/**
* The JPPF client, handles all communications with the server.
* It is recommended to only use one JPPF client per JVM, so it
* should generally be created and used as a singleton.
*/
private static JPPFClient jppfClient = null;
private static List<String> resultList = null;
public static List<String> sortFiles(String[] fileNames) {
try {
System.out.println("SortJPPFRunner: Started");
long startDate = System.currentTimeMillis();
// create the JPPFClient. This constructor call causes JPPF to read the configuration file
// and connect with one or multiple JPPF drivers.
jppfClient = new MyJPPFClient();
// Create a job
JPPFJob job = createJob();
System.out.println("SortJPPFRunner: Job created");
System.out.println("SortJPPFRunner: Files to sort:"+fileNames.length);
for(int i=0; i<fileNames.length; i++) {
System.out.println("SortJPPFRunner: File["+i+"]: "+fileNames[i]);
SortJPPFTask s = (SortJPPFTask) job.addTask(new SortJPPFTask(fileNames[i]));
s.setName("SortJPPFTask "+i);
s.setId("SortJPPFTask "+i);
}
//job.getSLA().setSuspended(true);
// execute a blocking job
executeBlockingJob(job);
System.out.println("SortJPPFRunner: ---------Output:"+resultList);
// execute a non-blocking job
//runner.executeNonBlockingJob(job);
long endDate = System.currentTimeMillis();
System.out.println("SortJPPFRunner: Completed");
System.out.println("SortJPPFRunner: Total Time (ms): "+(endDate-startDate));
} catch(Exception e) {
System.out.println("SortJPPFRunner: Exception");
e.printStackTrace();
} finally {
if (jppfClient != null) jppfClient.close();
}
return resultList;
}
/**
* The entry point for this application runner to be run from a Java command line.
* @param args by default, we do not use the command line arguments,
* however nothing prevents us from using them if need be.
*/
/*public static void main(final String...args) {
SortJPPFRunner runner = new SortJPPFRunner(null);
}*/
/**
* Create a JPPF job that can be submitted for execution.
* @return an instance of the {@link org.jppf.client.JPPFJob JPPFJob} class.
* @throws Exception if an error occurs while creating the job or adding tasks.
*/
private static JPPFJob createJob() throws Exception {
// create a JPPF job
JPPFJob job = new JPPFJob();
// give this job a readable unique id that we can use to monitor and manage it.
job.setName("DistributedExternalSort");
return job;
}
/**
* Execute a job in blocking mode. The application will be blocked until the job
* execution is complete.
* @param job the JPPF job to execute.
* @throws Exception if an error occurs while executing the job.
*/
private static void executeBlockingJob(final JPPFJob job) throws Exception {
// set the job in blocking mode.
job.setBlocking(true);
// Submit the job and wait until the results are returned.
// The results are returned as a list of JPPFTask instances,
// in the same order as the one in which the tasks where initially added the job.
List<JPPFTask> results = jppfClient.submit(job);
// process the results
processExecutionResults(results);
}
/**
* Execute a job in non-blocking mode. The application has the responsibility
* for handling the notification of job completion and collecting the results.
* @param job the JPPF job to execute.
* @throws Exception if an error occurs while executing the job.
*/
private static void executeNonBlockingJob(final JPPFJob job) throws Exception {
// set the job in non-blocking (or asynchronous) mode.
job.setBlocking(false);
// this call returns immediately. We will use the collector at a later time
// to obtain the execution results asynchronously
JPPFResultCollector collector = submitNonBlockingJob(job);
// the non-blocking job execution is asynchronous, we can do anything else in the meantime
System.out.println("Doing something while the job is executing ...");
// ...
// We are now ready to get the results of the job execution.
// We use JPPFResultCollector.waitForResults() for this. This method returns immediately with
// the results if the job has completed, otherwise it waits until the job execution is complete.
List<JPPFTask> results = collector.waitForResults();
// process the results
processExecutionResults(results);
}
/**
* Execute a job in non-blocking mode. The application has the responsibility
* for handling the notification of job completion and collecting the results.
* @param job the JPPF job to execute.
* @return a JPPFResultCollector used to obtain the execution results at a later time.
* @throws Exception if an error occurs while executing the job.
*/
private static JPPFResultCollector submitNonBlockingJob(final JPPFJob job) throws Exception {
// set the job in non-blocking (or asynchronous) mode.
job.setBlocking(false);
// We need to be notified of when the job execution has completed.
// To this effect, we define an instance of the TaskResultListener interface,
// which we will register with the job.
// Here, we use an instance of JPPFResultCollector, conveniently provided by the JPPF API.
// JPPFResultCollector implements TaskResultListener and has a constructor that takes
// the number of tasks in the job as a parameter.
JPPFResultCollector collector = new JPPFResultCollector(job);
job.setResultListener(collector);
// Submit the job. This call returns immediately without waiting for the execution of
// the job to complete. As a consequence, the object returned for a non-blocking job is
// always null. Note that we are calling the exact same method as in the blocking case.
jppfClient.submit(job);
// finally return the result collector, so it can be used to collect the exeuction results
// at a time of our chosing. The collector can also be obtained at any time by calling
// (JPPFResultCollector) job.getResultListener()
return collector;
}
/**
* Process the execution results of each submitted task.
* @param results the tasks results after execution on the grid.
*/
private static void processExecutionResults(final List<JPPFTask> results) {
resultList = new ArrayList<String>();
// process the results
for (JPPFTask task: results) {
// if the task execution resulted in an exception
if (task.getException() != null) {
// process the exception here ...
task.getException().printStackTrace();
System.out.println("Task Id:"+task.getId()+": An exception was raised: " + task.getException().getMessage());
}
else {
// process the result here ...
System.out.println("Task Id:"+task.getId()+": Execution result: " + task.getResult());
if(task.getResult() != null)
resultList.add((String) task.getResult());
}
}
}
}
| wkapil/distributedexternalsort | src/main/java/org/distributedexternalsort/SortJPPFRunner.java | Java | apache-2.0 | 7,913 |
/*
* Copyright 2013-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.info;
import java.util.HashMap;
import java.util.Map;
import com.jcabi.github.Github;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.cloud.info.exceptions.InitializrParseException;
import org.springframework.cloud.info.exceptions.SpringCloudVersionNotFoundException;
import org.springframework.web.client.RestTemplate;
/**
* @author Ryan Baxter
*/
public class InitializrSpringCloudInfoService extends SpringCloudRelease {
private static final String INITIALIZR_URL = "https://start.spring.io/actuator/info";
private RestTemplate rest;
public InitializrSpringCloudInfoService(RestTemplate rest, Github github, GithubPomReader reader) {
super(github, reader);
this.rest = rest;
}
@Override
@Cacheable("springCloudViaBoot")
public SpringCloudVersion getSpringCloudVersion(String springBootVersion)
throws SpringCloudVersionNotFoundException {
Map<String, SpringBootAndCloudVersion> cache = new HashMap<>();
Map<String, Object> response = rest.getForObject(INITIALIZR_URL, Map.class);
if (!response.containsKey("bom-ranges")) {
throw new SpringCloudVersionNotFoundException(
new InitializrParseException("bom-ranges key not found in Initializr info endpoint"));
}
Map<String, Object> bomRanges = (Map<String, Object>) response.get("bom-ranges");
if (!bomRanges.containsKey("spring-cloud")) {
throw new SpringCloudVersionNotFoundException(
new InitializrParseException("spring-cloud key not found in Initializr info endpoint"));
}
Map<String, String> springCloud = (Map<String, String>) bomRanges.get("spring-cloud");
for (String key : springCloud.keySet()) {
String rangeString = springCloud.get(key);
cache.put(key, parseRangeString(rangeString, key));
}
for (String key : cache.keySet()) {
if (cache.get(key).matchesSpringBootVersion(springBootVersion)) {
return new SpringCloudVersion(key);
}
}
throw new SpringCloudVersionNotFoundException(springBootVersion);
}
private SpringBootAndCloudVersion parseRangeString(String rangeString, String springCloudVersion) {
// Example of rangeString Spring Boot >=2.0.0.M3 and <2.0.0.M5
String versions = rangeString.substring(13);
boolean startVersionInclusive = true;
if (versions.charAt(0) == '=') {
versions = versions.substring(1);
}
else {
startVersionInclusive = false;
}
// Example of versions 2.0.0.M3 and <2.0.0.M5 or 2.0.0.M3 and <=2.0.0.M5
String[] cleanedVersions;
boolean endVersionInclusive = true;
if (versions.contains("=")) {
cleanedVersions = versions.split(" and <=");
}
else {
endVersionInclusive = false;
cleanedVersions = versions.split(" and <");
}
if (cleanedVersions.length == 1) {
return new SpringBootAndCloudVersion(cleanedVersions[0], startVersionInclusive, "99999.99999.99999.RELEASE",
endVersionInclusive, springCloudVersion);
}
return new SpringBootAndCloudVersion(cleanedVersions[0], startVersionInclusive, cleanedVersions[1],
endVersionInclusive, springCloudVersion);
}
}
| spring-cloud/spring-cloud-release-tools | spring-cloud-info/src/main/java/org/springframework/cloud/info/InitializrSpringCloudInfoService.java | Java | apache-2.0 | 3,688 |
package org.openestate.io.immobiliare_it.xml;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
import org.jvnet.jaxb2_commons.lang.CopyStrategy2;
import org.jvnet.jaxb2_commons.lang.CopyTo2;
import org.jvnet.jaxb2_commons.lang.Equals2;
import org.jvnet.jaxb2_commons.lang.EqualsStrategy2;
import org.jvnet.jaxb2_commons.lang.JAXBCopyStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy;
import org.jvnet.jaxb2_commons.lang.ToString2;
import org.jvnet.jaxb2_commons.lang.ToStringStrategy2;
import org.jvnet.jaxb2_commons.locator.ObjectLocator;
import org.jvnet.jaxb2_commons.locator.util.LocatorUtils;
/**
* <p>Java class for properties complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="properties">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://feed.immobiliare.it}property" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "properties", propOrder = {
"property"
})
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public class Properties implements Serializable, Cloneable, CopyTo2, Equals2, ToString2
{
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
protected List<Property> property;
/**
* Gets the value of the property property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the property property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getProperty().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Property }
*
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public List<Property> getProperty() {
if (property == null) {
property = new ArrayList<Property>();
}
return this.property;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public String toString() {
final ToStringStrategy2 strategy = JAXBToStringStrategy.INSTANCE2;
final StringBuilder buffer = new StringBuilder();
append(null, buffer, strategy);
return buffer.toString();
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) {
strategy.appendStart(locator, this, buffer);
appendFields(locator, buffer, strategy);
strategy.appendEnd(locator, this, buffer);
return buffer;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) {
{
List<Property> theProperty;
theProperty = (((this.property!= null)&&(!this.property.isEmpty()))?this.getProperty():null);
strategy.appendField(locator, this, "property", buffer, theProperty, ((this.property!= null)&&(!this.property.isEmpty())));
}
return buffer;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public Object clone() {
return copyTo(createNewInstance());
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public Object copyTo(Object target) {
final CopyStrategy2 strategy = JAXBCopyStrategy.INSTANCE2;
return copyTo(null, target, strategy);
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public Object copyTo(ObjectLocator locator, Object target, CopyStrategy2 strategy) {
final Object draftCopy = ((target == null)?createNewInstance():target);
if (draftCopy instanceof Properties) {
final Properties copy = ((Properties) draftCopy);
{
Boolean propertyShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, ((this.property!= null)&&(!this.property.isEmpty())));
if (propertyShouldBeCopiedAndSet == Boolean.TRUE) {
List<Property> sourceProperty;
sourceProperty = (((this.property!= null)&&(!this.property.isEmpty()))?this.getProperty():null);
@SuppressWarnings("unchecked")
List<Property> copyProperty = ((List<Property> ) strategy.copy(LocatorUtils.property(locator, "property", sourceProperty), sourceProperty, ((this.property!= null)&&(!this.property.isEmpty()))));
copy.property = null;
if (copyProperty!= null) {
List<Property> uniquePropertyl = copy.getProperty();
uniquePropertyl.addAll(copyProperty);
}
} else {
if (propertyShouldBeCopiedAndSet == Boolean.FALSE) {
copy.property = null;
}
}
}
}
return draftCopy;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public Object createNewInstance() {
return new Properties();
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy2 strategy) {
if ((object == null)||(this.getClass()!= object.getClass())) {
return false;
}
if (this == object) {
return true;
}
final Properties that = ((Properties) object);
{
List<Property> lhsProperty;
lhsProperty = (((this.property!= null)&&(!this.property.isEmpty()))?this.getProperty():null);
List<Property> rhsProperty;
rhsProperty = (((that.property!= null)&&(!that.property.isEmpty()))?that.getProperty():null);
if (!strategy.equals(LocatorUtils.property(thisLocator, "property", lhsProperty), LocatorUtils.property(thatLocator, "property", rhsProperty), lhsProperty, rhsProperty, ((this.property!= null)&&(!this.property.isEmpty())), ((that.property!= null)&&(!that.property.isEmpty())))) {
return false;
}
}
return true;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-15T07:05:08+02:00", comments = "JAXB RI v2.3.0")
public boolean equals(Object object) {
final EqualsStrategy2 strategy = JAXBEqualsStrategy.INSTANCE2;
return equals(null, null, object, strategy);
}
}
| OpenEstate/OpenEstate-IO | ImmobiliareIT/src/main/jaxb/org/openestate/io/immobiliare_it/xml/Properties.java | Java | apache-2.0 | 7,811 |
package com.vendingontime.backend.initializers.sparkplugins;
import spark.Service;
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
public interface SparkPlugin {
void enable(Service http);
}
| VendingOnTime/server-vot | src/main/java/com/vendingontime/backend/initializers/sparkplugins/SparkPlugin.java | Java | apache-2.0 | 953 |
package br.com.caelum.designpatterns.state.investimento;
import br.com.caelum.designpatterns.modelo.Conta;
public class Negativo implements EstadoDeUmaConta{
@Override
public void sacar(Conta conta, double valor) {
throw new RuntimeException("Não pode sacar de conta negativada");
}
@Override
public void depositar(Conta conta, double valor) {
conta.mudarSaldo(conta.getSaldo() + valor * 0.95);
if (conta.getSaldo() > 0){
conta.mudarEstadoAtualPara(new Positivo());
}
}
@Override
public String toString() {
return "NEGATIVO";
}
}
| sedesdev/design-patterns | src/br/com/caelum/designpatterns/state/investimento/Negativo.java | Java | apache-2.0 | 563 |
package org.web3j.abi.datatypes.generated;
import java.math.BigInteger;
import org.web3j.abi.datatypes.Uint;
/**
* Auto generated code.
* <p><strong>Do not modifiy!</strong>
* <p>Please use org.web3j.codegen.AbiTypesGenerator in the
* <a href="https://github.com/web3j/web3j/tree/master/codegen">codegen module</a> to update.
*/
public class Uint64 extends Uint {
public static final Uint64 DEFAULT = new Uint64(BigInteger.ZERO);
public Uint64(BigInteger value) {
super(64, value);
}
public Uint64(long value) {
this(BigInteger.valueOf(value));
}
}
| web3j/web3j | abi/src/main/java/org/web3j/abi/datatypes/generated/Uint64.java | Java | apache-2.0 | 595 |
/**
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Asakusa Vanilla engine implementations.
*/
package com.asakusafw.vanilla.core.engine; | ashigeru/asakusafw-compiler | vanilla/runtime/core/src/main/java/com/asakusafw/vanilla/core/engine/package-info.java | Java | apache-2.0 | 706 |
package net.floodlightcontroller.packet;
import java.nio.ByteBuffer;
import java.util.ArrayList;
public class DHCPv6Option {
protected short code;
protected short length;
protected byte[] data;
public static final short CLIENT_IDENTIFIER = 1;
public static final short IDENTITY_ASSOCIATION = 3;
public DHCPv6Option(short code,short length){
this.code = code;
this.length = length;
this.data = new byte[length];
}
public short getCode() {
return code;
}
public void setCode(short code) {
this.code = code;
}
public void setData(byte[] data){
this.data = data;
}
public byte[] getData(){
return data;
}
public short getLength() {
return length;
}
public void setLength(byte length) {
this.length = length;
}
public static DHCPv6Option getOption(byte[] data,int offset){
short length = 0;
short code = 0;
if(offset>= data.length){
return null;
}
ByteBuffer bb = ByteBuffer.wrap(data,offset,4);
code = bb.getShort();
length = bb.getShort();
if(code == 0xff00){
return null;
}
DHCPv6Option option = new DHCPv6Option(code, length);
for(int i=0;i<length;i++){
option.data[i] = data[offset+i+4];
}
return option;
}
public static ArrayList<DHCPv6Option> getOptions(byte[] data,int offset){
ArrayList<DHCPv6Option> options = new ArrayList<DHCPv6Option>();
DHCPv6Option option = getOption(data, offset);
while(option!=null){
options.add(option);
offset += option.length + 4;
option = getOption(data, offset);
}
return options;
}
public byte[] serilize(){
byte[] data = new byte[4 + length];
ByteBuffer bb = ByteBuffer.wrap(data);
bb.putShort(code);
bb.putShort(length);
bb.put(this.data);
return data;
}
}
| zy-sdn/savi-floodlight | src/main/java/net/floodlightcontroller/packet/DHCPv6Option.java | Java | apache-2.0 | 1,728 |
/**
* IHostGroupDao.java
*/
package com.skycloud.jkb.dao;
/**
* 主机组持久化接口层.
*
* @creation 2014年01月03日 10:56:43
* @modification 2014年01月03日 10:56:43
* @company Skycloud
* @author xiweicheng
* @version 1.0
*
*/
public interface IHostGroupDao {
}
| xiwc/jkb | src/main/java/com/skycloud/jkb/dao/IHostGroupDao.java | Java | apache-2.0 | 290 |
/**
* Java API for Elasticsearch Spark.
*/
package org.elasticsearch.spark.api.java;
| nfouka/elasticsearch-hadoop | spark/src/main/scala/org/elasticsearch/spark/api/java/package-info.java | Java | apache-2.0 | 87 |
package model;
public class Funcionario extends Usuario {
private int matricula;
private String nome;
private String telefone;
private String email;
private String sexo;
private String cpf;
private String rg;
private String nascimento;
private double salario;
public Funcionario() {
super();
}
public Funcionario(String login, String senha, int tipo, String descricao, int matricula, String nome,
String telefone, String email, String sexo, String cpf, String rg, String nascimento, double salario) {
super(login, senha, tipo, descricao);
this.matricula = matricula;
this.nome = nome;
this.telefone = telefone;
this.email = email;
this.sexo = sexo;
this.cpf = cpf;
this.rg = rg;
this.nascimento = nascimento;
this.salario = salario;
}
public int getMatricula() {
return matricula;
}
public void setMatricula(int matricula) {
this.matricula = matricula;
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
public String getTelefone() {
return telefone;
}
public void setTelefone(String telefone) {
this.telefone = telefone;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getSexo() {
return sexo;
}
public void setSexo(String sexo) {
this.sexo = sexo;
}
public String getCpf() {
return cpf;
}
public void setCpf(String cpf) {
this.cpf = cpf;
}
public String getRg() {
return rg;
}
public void setRg(String rg) {
this.rg = rg;
}
public String getNascimento() {
return nascimento;
}
public void setNascimento(String nascimento) {
this.nascimento = nascimento;
}
public double getSalario() {
return salario;
}
public void setSalario(double salario) {
this.salario = salario;
}
public void mostrar(){
System.out.println("Dados do funcionário:");
System.out.println("Matrícula: "+this.getMatricula());
System.out.println("Nome: "+this.getNome());
System.out.println("Nascimento: "+this.getNascimento());
System.out.println("Sexo: "+this.getSexo());
System.out.println("CPF: "+this.getCpf());
System.out.println("RG: "+this.getRg());
System.out.println("E-mail: "+this.getEmail());
System.out.println("Telefone: "+this.getTelefone());
System.out.println("Salario fixo: "+this.getSalario());
}
} | adriano003/Turma-2016023-Sistema-de-Farmacia | WorkSpaceFarmacia/Farmacia/src/model/Funcionario.java | Java | apache-2.0 | 2,478 |
/*
* Copyright (c) 2016 Nova Ordis LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.novaordis.utilities.os;
/**
* An instance that give access to underlying O/S configuration values that do not change as long as the system is
* not rebooted (an example is the memory page size). The instance reads those values at initialization, and then it
* keeps returned the cached values. Instances implementing this interface can only be obtained through the
* getConfiguration() method of the OS instance.
*
* @see OS#getConfiguration()
*
* @author Ovidiu Feodorov <[email protected]>
* @since 8/1/16
*/
public interface OSConfiguration {
// Constants -------------------------------------------------------------------------------------------------------
// Static ----------------------------------------------------------------------------------------------------------
// Public ----------------------------------------------------------------------------------------------------------
/**
* @return the underlying O/S memory page size, in bytes. On Linux, that value is obtained by running
* getconf PAGESIZE.
*/
int getMemoryPageSize();
}
| NovaOrdis/novaordis-utilities | src/main/java/io/novaordis/utilities/os/OSConfiguration.java | Java | apache-2.0 | 1,718 |
/**
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2011 Eric Haddad Koenig
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.all.twitter.impl;
import java.net.URL;
import twitter4j.User;
import com.all.twitter.UserProfile;
public class UserProfileAdapter {
public UserProfile convertFrom(User user) {
URL url = user.getURL();
UserProfileImpl userProfileImpl = new UserProfileImpl(user.getName(), user.getScreenName(), user
.getDescription(), user.getLocation(), url != null ? url.toString() : "", user.getFollowersCount(), user
.getFriendsCount(), user.getStatusesCount(), user.getId(), user.getProfileImageURL(), false);
return userProfileImpl;
}
}
| josdem/client-extra | twitter/src/main/java/com/all/twitter/impl/UserProfileAdapter.java | Java | apache-2.0 | 12,137 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.FSDataset;
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.Level;
/**
* This class tests that a file need not be closed before its data can be read
* by another client.
*/
public class TestFileCreation extends junit.framework.TestCase {
static final String DIR = "/" + TestFileCreation.class.getSimpleName()
+ "/";
{
// ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) LeaseManager.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) FSNamesystem.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL);
}
static final long seed = 0xDEADBEEFL;
static final int blockSize = 8192;
static final int numBlocks = 2;
static final int fileSize = numBlocks * blockSize + 1;
boolean simulatedStorage = false;
// The test file is 2 times the blocksize plus one. This means that when the
// entire file is written, the first two blocks definitely get flushed to
// the datanodes.
// creates a file but does not close it
static FSDataOutputStream createFile(FileSystem fileSys, Path name, int repl)
throws IOException {
System.out.println("createFile: Created " + name + " with " + repl
+ " replica.");
FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf()
.getInt("io.file.buffer.size", 4096), (short) repl,
(long) blockSize);
return stm;
}
//
// writes to file but does not close it
//
static void writeFile(FSDataOutputStream stm) throws IOException {
writeFile(stm, fileSize);
}
//
// writes specified bytes to file.
//
static void writeFile(FSDataOutputStream stm, int size) throws IOException {
byte[] buffer = AppendTestUtil.randomBytes(seed, size);
stm.write(buffer, 0, size);
}
//
// verify that the data written to the full blocks are sane
//
private void checkFile(FileSystem fileSys, Path name, int repl)
throws IOException {
boolean done = false;
// wait till all full blocks are confirmed by the datanodes.
while (!done) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
done = true;
BlockLocation[] locations = fileSys.getFileBlockLocations(fileSys
.getFileStatus(name), 0, fileSize);
if (locations.length < numBlocks) {
done = false;
continue;
}
for (int idx = 0; idx < locations.length; idx++) {
if (locations[idx].getHosts().length < repl) {
done = false;
break;
}
}
}
FSDataInputStream stm = fileSys.open(name);
final byte[] expected;
if (simulatedStorage) {
expected = new byte[numBlocks * blockSize];
for (int i = 0; i < expected.length; i++) {
expected[i] = SimulatedFSDataset.DEFAULT_DATABYTE;
}
} else {
expected = AppendTestUtil.randomBytes(seed, numBlocks * blockSize);
}
// do a sanity check. Read the file
byte[] actual = new byte[numBlocks * blockSize];
stm.readFully(0, actual);
stm.close();
checkData(actual, 0, expected, "Read 1");
}
static private void checkData(byte[] actual, int from, byte[] expected,
String message) {
for (int idx = 0; idx < actual.length; idx++) {
assertEquals(message + " byte " + (from + idx)
+ " differs. expected " + expected[from + idx] + " actual "
+ actual[idx], expected[from + idx], actual[idx]);
actual[idx] = 0;
}
}
static void checkFullFile(FileSystem fs, Path name) throws IOException {
FileStatus stat = fs.getFileStatus(name);
BlockLocation[] locations = fs.getFileBlockLocations(stat, 0, fileSize);
for (int idx = 0; idx < locations.length; idx++) {
String[] hosts = locations[idx].getNames();
for (int i = 0; i < hosts.length; i++) {
System.out.print(hosts[i] + " ");
}
System.out.println(" off " + locations[idx].getOffset() + " len "
+ locations[idx].getLength());
}
byte[] expected = AppendTestUtil.randomBytes(seed, fileSize);
FSDataInputStream stm = fs.open(name);
byte[] actual = new byte[fileSize];
stm.readFully(0, actual);
checkData(actual, 0, expected, "Read 2");
stm.close();
}
/**
* Test that file data becomes available before file is closed.
*/
public void testFileCreation() throws IOException {
Configuration conf = new Configuration();
if (simulatedStorage) {
conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true);
}
MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
FileSystem fs = cluster.getFileSystem();
try {
//
// check that / exists
//
Path path = new Path("/");
System.out.println("Path : \"" + path.toString() + "\"");
System.out.println(fs.getFileStatus(path).isDir());
assertTrue("/ should be a directory", fs.getFileStatus(path)
.isDir() == true);
//
// Create a directory inside /, then try to overwrite it
//
Path dir1 = new Path("/test_dir");
fs.mkdirs(dir1);
System.out.println("createFile: Creating " + dir1.getName()
+ " for overwrite of existing directory.");
try {
fs.create(dir1, true); // Create path, overwrite=true
fs.close();
assertTrue("Did not prevent directory from being overwritten.",
false);
} catch (IOException ie) {
if (!ie.getMessage().contains("already exists as a directory."))
throw ie;
}
// create a new file in home directory. Do not close it.
//
Path file1 = new Path("filestatus.dat");
FSDataOutputStream stm = createFile(fs, file1, 1);
// verify that file exists in FS namespace
assertTrue(file1 + " should be a file", fs.getFileStatus(file1)
.isDir() == false);
System.out.println("Path : \"" + file1 + "\"");
// write to file
writeFile(stm);
// Make sure a client can read it before it is closed.
checkFile(fs, file1, 1);
// verify that file size has changed
long len = fs.getFileStatus(file1).getLen();
assertTrue(file1 + " should be of size " + (numBlocks * blockSize)
+ " but found to be of size " + len, len == numBlocks
* blockSize);
stm.close();
// verify that file size has changed to the full size
len = fs.getFileStatus(file1).getLen();
assertTrue(file1 + " should be of size " + fileSize
+ " but found to be of size " + len, len == fileSize);
// Check storage usage
// can't check capacities for real storage since the OS file system
// may be changing under us.
if (simulatedStorage) {
DataNode dn = cluster.getDataNodes().get(0);
assertEquals(fileSize, dn.getFSDataset().getDfsUsed());
assertEquals(SimulatedFSDataset.DEFAULT_CAPACITY - fileSize, dn
.getFSDataset().getRemaining());
}
} finally {
cluster.shutdown();
}
}
/**
* Test deleteOnExit
*/
public void testDeleteOnExit() throws IOException {
Configuration conf = new Configuration();
if (simulatedStorage) {
conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true);
}
MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
FileSystem fs = cluster.getFileSystem();
FileSystem localfs = FileSystem.getLocal(conf);
try {
// Creates files in HDFS and local file system.
//
Path file1 = new Path("filestatus.dat");
Path file2 = new Path("filestatus2.dat");
Path file3 = new Path("filestatus3.dat");
FSDataOutputStream stm1 = createFile(fs, file1, 1);
FSDataOutputStream stm2 = createFile(fs, file2, 1);
FSDataOutputStream stm3 = createFile(localfs, file3, 1);
System.out.println("DeleteOnExit: Created files.");
// write to files and close. Purposely, do not close file2.
writeFile(stm1);
writeFile(stm3);
stm1.close();
stm2.close();
stm3.close();
// set delete on exit flag on files.
fs.deleteOnExit(file1);
fs.deleteOnExit(file2);
localfs.deleteOnExit(file3);
// close the file system. This should make the above files
// disappear.
fs.close();
localfs.close();
fs = null;
localfs = null;
// reopen file system and verify that file does not exist.
fs = cluster.getFileSystem();
localfs = FileSystem.getLocal(conf);
assertTrue(file1 + " still exists inspite of deletOnExit set.", !fs
.exists(file1));
assertTrue(file2 + " still exists inspite of deletOnExit set.", !fs
.exists(file2));
assertTrue(file3 + " still exists inspite of deletOnExit set.",
!localfs.exists(file3));
System.out.println("DeleteOnExit successful.");
} finally {
IOUtils.closeStream(fs);
IOUtils.closeStream(localfs);
cluster.shutdown();
}
}
/**
* Test that file data does not become corrupted even in the face of errors.
*/
public void testFileCreationError1() throws IOException {
Configuration conf = new Configuration();
conf.setInt("heartbeat.recheck.interval", 1000);
conf.setInt("dfs.heartbeat.interval", 1);
if (simulatedStorage) {
conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true);
}
// create cluster
MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
FileSystem fs = cluster.getFileSystem();
cluster.waitActive();
InetSocketAddress addr = new InetSocketAddress("localhost", cluster
.getNameNodePort());
DFSClient client = new DFSClient(addr, conf);
try {
// create a new file.
//
Path file1 = new Path("/filestatus.dat");
FSDataOutputStream stm = createFile(fs, file1, 1);
// verify that file exists in FS namespace
assertTrue(file1 + " should be a file", fs.getFileStatus(file1)
.isDir() == false);
System.out.println("Path : \"" + file1 + "\"");
// kill the datanode
cluster.shutdownDataNodes();
// wait for the datanode to be declared dead
while (true) {
DatanodeInfo[] info = client
.datanodeReport(FSConstants.DatanodeReportType.LIVE);
if (info.length == 0) {
break;
}
System.out
.println("testFileCreationError1: waiting for datanode "
+ " to die.");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
// write 1 byte to file.
// This should fail because all datanodes are dead.
byte[] buffer = AppendTestUtil.randomBytes(seed, 1);
try {
stm.write(buffer);
stm.close();
} catch (Exception e) {
System.out.println("Encountered expected exception");
}
// verify that no blocks are associated with this file
// bad block allocations were cleaned up earlier.
LocatedBlocks locations = client.namenode.getBlockLocations(file1
.toString(), 0, Long.MAX_VALUE);
System.out.println("locations = " + locations.locatedBlockCount());
assertTrue("Error blocks were not cleaned up", locations
.locatedBlockCount() == 0);
} finally {
cluster.shutdown();
client.close();
}
}
/**
* Test that the filesystem removes the last block from a file if its lease
* expires.
*/
public void testFileCreationError2() throws IOException {
long leasePeriod = 1000;
System.out.println("testFileCreationError2 start");
Configuration conf = new Configuration();
conf.setInt("heartbeat.recheck.interval", 1000);
conf.setInt("dfs.heartbeat.interval", 1);
if (simulatedStorage) {
conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true);
}
// create cluster
MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
DistributedFileSystem dfs = null;
try {
cluster.waitActive();
dfs = (DistributedFileSystem) cluster.getFileSystem();
DFSClient client = dfs.dfs;
// create a new file.
//
Path file1 = new Path("/filestatus.dat");
createFile(dfs, file1, 1);
System.out.println("testFileCreationError2: "
+ "Created file filestatus.dat with one replicas.");
LocatedBlocks locations = client.namenode.getBlockLocations(file1
.toString(), 0, Long.MAX_VALUE);
System.out.println("testFileCreationError2: " + "The file has "
+ locations.locatedBlockCount() + " blocks.");
// add another block to the file
LocatedBlock location = client.namenode.addBlock(file1.toString(),
client.clientName);
System.out.println("testFileCreationError2: " + "Added block "
+ location.getBlock());
locations = client.namenode.getBlockLocations(file1.toString(), 0,
Long.MAX_VALUE);
int count = locations.locatedBlockCount();
System.out.println("testFileCreationError2: " + "The file now has "
+ count + " blocks.");
// set the soft and hard limit to be 1 second so that the
// namenode triggers lease recovery
cluster.setLeasePeriod(leasePeriod, leasePeriod);
// wait for the lease to expire
try {
Thread.sleep(5 * leasePeriod);
} catch (InterruptedException e) {
}
// verify that the last block was synchronized.
locations = client.namenode.getBlockLocations(file1.toString(), 0,
Long.MAX_VALUE);
System.out.println("testFileCreationError2: " + "locations = "
+ locations.locatedBlockCount());
assertEquals(0, locations.locatedBlockCount());
System.out.println("testFileCreationError2 successful");
} finally {
IOUtils.closeStream(dfs);
cluster.shutdown();
}
}
/**
* Test that file leases are persisted across namenode restarts. This test
* is currently not triggered because more HDFS work is is needed to handle
* persistent leases.
*/
public void xxxtestFileCreationNamenodeRestart() throws IOException {
Configuration conf = new Configuration();
final int MAX_IDLE_TIME = 2000; // 2s
conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
conf.setInt("heartbeat.recheck.interval", 1000);
conf.setInt("dfs.heartbeat.interval", 1);
if (simulatedStorage) {
conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true);
}
// create cluster
MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
FileSystem fs = null;
try {
cluster.waitActive();
fs = cluster.getFileSystem();
final int nnport = cluster.getNameNodePort();
// create a new file.
Path file1 = new Path("/filestatus.dat");
FSDataOutputStream stm = createFile(fs, file1, 1);
System.out.println("testFileCreationNamenodeRestart: "
+ "Created file " + file1);
// write two full blocks.
writeFile(stm, numBlocks * blockSize);
stm.sync();
// rename file wile keeping it open.
Path fileRenamed = new Path("/filestatusRenamed.dat");
fs.rename(file1, fileRenamed);
System.out.println("testFileCreationNamenodeRestart: "
+ "Renamed file " + file1 + " to " + fileRenamed);
file1 = fileRenamed;
// create another new file.
//
Path file2 = new Path("/filestatus2.dat");
FSDataOutputStream stm2 = createFile(fs, file2, 1);
System.out.println("testFileCreationNamenodeRestart: "
+ "Created file " + file2);
// create yet another new file with full path name.
// rename it while open
//
Path file3 = new Path("/user/home/fullpath.dat");
FSDataOutputStream stm3 = createFile(fs, file3, 1);
System.out.println("testFileCreationNamenodeRestart: "
+ "Created file " + file3);
Path file4 = new Path("/user/home/fullpath4.dat");
FSDataOutputStream stm4 = createFile(fs, file4, 1);
System.out.println("testFileCreationNamenodeRestart: "
+ "Created file " + file4);
fs.mkdirs(new Path("/bin"));
fs.rename(new Path("/user/home"), new Path("/bin"));
Path file3new = new Path("/bin/home/fullpath.dat");
System.out.println("testFileCreationNamenodeRestart: "
+ "Renamed file " + file3 + " to " + file3new);
Path file4new = new Path("/bin/home/fullpath4.dat");
System.out.println("testFileCreationNamenodeRestart: "
+ "Renamed file " + file4 + " to " + file4new);
// restart cluster with the same namenode port as before.
// This ensures that leases are persisted in fsimage.
cluster.shutdown();
try {
Thread.sleep(2 * MAX_IDLE_TIME);
} catch (InterruptedException e) {
}
cluster = new MiniDFSCluster(nnport, conf, 1, false, true, null,
null, null);
cluster.waitActive();
// restart cluster yet again. This triggers the code to read in
// persistent leases from fsimage.
cluster.shutdown();
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
}
cluster = new MiniDFSCluster(nnport, conf, 1, false, true, null,
null, null);
cluster.waitActive();
fs = cluster.getFileSystem();
// instruct the dfsclient to use a new filename when it requests
// new blocks for files that were renamed.
DFSClient.DFSOutputStream dfstream = (DFSClient.DFSOutputStream) (stm
.getWrappedStream());
dfstream.setTestFilename(file1.toString());
dfstream = (DFSClient.DFSOutputStream) (stm3.getWrappedStream());
dfstream.setTestFilename(file3new.toString());
dfstream = (DFSClient.DFSOutputStream) (stm4.getWrappedStream());
dfstream.setTestFilename(file4new.toString());
// write 1 byte to file. This should succeed because the
// namenode should have persisted leases.
byte[] buffer = AppendTestUtil.randomBytes(seed, 1);
stm.write(buffer);
stm.close();
stm2.write(buffer);
stm2.close();
stm3.close();
stm4.close();
// verify that new block is associated with this file
DFSClient client = ((DistributedFileSystem) fs).dfs;
LocatedBlocks locations = client.namenode.getBlockLocations(file1
.toString(), 0, Long.MAX_VALUE);
System.out.println("locations = " + locations.locatedBlockCount());
assertTrue("Error blocks were not cleaned up for file " + file1,
locations.locatedBlockCount() == 3);
// verify filestatus2.dat
locations = client.namenode.getBlockLocations(file2.toString(), 0,
Long.MAX_VALUE);
System.out.println("locations = " + locations.locatedBlockCount());
assertTrue("Error blocks were not cleaned up for file " + file2,
locations.locatedBlockCount() == 1);
} finally {
IOUtils.closeStream(fs);
cluster.shutdown();
}
}
/**
* Test that all open files are closed when client dies abnormally.
*/
public void testDFSClientDeath() throws IOException {
Configuration conf = new Configuration();
System.out.println("Testing adbornal client death.");
if (simulatedStorage) {
conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true);
}
MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
FileSystem fs = cluster.getFileSystem();
DistributedFileSystem dfs = (DistributedFileSystem) fs;
DFSClient dfsclient = dfs.dfs;
try {
// create a new file in home directory. Do not close it.
//
Path file1 = new Path("/clienttest.dat");
FSDataOutputStream stm = createFile(fs, file1, 1);
System.out.println("Created file clienttest.dat");
// write to file
writeFile(stm);
// close the dfsclient before closing the output stream.
// This should close all existing file.
dfsclient.close();
// reopen file system and verify that file exists.
assertTrue(file1 + " does not exist.", AppendTestUtil
.createHdfsWithDifferentUsername(conf).exists(file1));
} finally {
cluster.shutdown();
}
}
/**
* Test that file data becomes available before file is closed.
*/
public void testFileCreationSimulated() throws IOException {
simulatedStorage = true;
testFileCreation();
simulatedStorage = false;
}
/**
* Test creating two files at the same time.
*/
public void testConcurrentFileCreation() throws IOException {
Configuration conf = new Configuration();
MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
try {
FileSystem fs = cluster.getFileSystem();
Path[] p = { new Path("/foo"), new Path("/bar") };
// write 2 files at the same time
FSDataOutputStream[] out = { fs.create(p[0]), fs.create(p[1]) };
int i = 0;
for (; i < 100; i++) {
out[0].write(i);
out[1].write(i);
}
out[0].close();
for (; i < 200; i++) {
out[1].write(i);
}
out[1].close();
// verify
FSDataInputStream[] in = { fs.open(p[0]), fs.open(p[1]) };
for (i = 0; i < 100; i++) {
assertEquals(i, in[0].read());
}
for (i = 0; i < 200; i++) {
assertEquals(i, in[1].read());
}
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Create a file, write something, fsync but not close. Then change lease
* period and wait for lease recovery. Finally, read the block directly from
* each Datanode and verify the content.
*/
public void testLeaseExpireHardLimit() throws Exception {
System.out.println("testLeaseExpireHardLimit start");
final long leasePeriod = 1000;
final int DATANODE_NUM = 3;
Configuration conf = new Configuration();
conf.setInt("heartbeat.recheck.interval", 1000);
conf.setInt("dfs.heartbeat.interval", 1);
// create cluster
MiniDFSCluster cluster = new MiniDFSCluster(conf, DATANODE_NUM, true,
null);
DistributedFileSystem dfs = null;
try {
cluster.waitActive();
dfs = (DistributedFileSystem) cluster.getFileSystem();
// create a new file.
final String f = DIR + "foo";
final Path fpath = new Path(f);
FSDataOutputStream out = TestFileCreation.createFile(dfs, fpath,
DATANODE_NUM);
out.write("something".getBytes());
out.sync();
// set the soft and hard limit to be 1 second so that the
// namenode triggers lease recovery
cluster.setLeasePeriod(leasePeriod, leasePeriod);
// wait for the lease to expire
try {
Thread.sleep(5 * leasePeriod);
} catch (InterruptedException e) {
}
LocatedBlocks locations = dfs.dfs.namenode.getBlockLocations(f, 0,
Long.MAX_VALUE);
assertEquals(1, locations.locatedBlockCount());
LocatedBlock locatedblock = locations.getLocatedBlocks().get(0);
int successcount = 0;
for (DatanodeInfo datanodeinfo : locatedblock.getLocations()) {
DataNode datanode = cluster.getDataNode(datanodeinfo.ipcPort);
FSDataset dataset = (FSDataset) datanode.data;
Block b = dataset.getStoredBlock(locatedblock.getBlock()
.getBlockId());
File blockfile = dataset.findBlockFile(b.getBlockId());
System.out.println("blockfile=" + blockfile);
if (blockfile != null) {
BufferedReader in = new BufferedReader(new FileReader(
blockfile));
assertEquals("something", in.readLine());
in.close();
successcount++;
}
}
System.out.println("successcount=" + successcount);
assertTrue(successcount > 0);
} finally {
IOUtils.closeStream(dfs);
cluster.shutdown();
}
System.out.println("testLeaseExpireHardLimit successful");
}
// test closing file system before all file handles are closed.
public void testFsClose() throws Exception {
System.out.println("test file system close start");
final int DATANODE_NUM = 3;
Configuration conf = new Configuration();
// create cluster
MiniDFSCluster cluster = new MiniDFSCluster(conf, DATANODE_NUM, true,
null);
DistributedFileSystem dfs = null;
try {
cluster.waitActive();
dfs = (DistributedFileSystem) cluster.getFileSystem();
// create a new file.
final String f = DIR + "foofs";
final Path fpath = new Path(f);
FSDataOutputStream out = TestFileCreation.createFile(dfs, fpath,
DATANODE_NUM);
out.write("something".getBytes());
// close file system without closing file
dfs.close();
} finally {
System.out.println("testFsClose successful");
}
}
}
| shot/hadoop-source-reading | src/test/org/apache/hadoop/hdfs/TestFileCreation.java | Java | apache-2.0 | 25,044 |
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.auth.ldap;
import static com.google.gerrit.reviewdb.client.AccountExternalId.SCHEME_GERRIT;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.gerrit.common.data.ParameterizedString;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.AccountExternalId;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.AuthType;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.account.AccountException;
import com.google.gerrit.server.account.AuthRequest;
import com.google.gerrit.server.account.EmailExpander;
import com.google.gerrit.server.account.Realm;
import com.google.gerrit.server.auth.AuthenticationUnavailableException;
import com.google.gerrit.server.config.AuthConfig;
import com.google.gerrit.server.config.ConfigUtil;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gwtorm.server.SchemaFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.naming.CompositeName;
import javax.naming.Name;
import javax.naming.NamingException;
import javax.naming.directory.DirContext;
import javax.security.auth.login.LoginException;
@Singleton
public class LdapRealm implements Realm {
static final Logger log = LoggerFactory.getLogger(LdapRealm.class);
static final String LDAP = "com.sun.jndi.ldap.LdapCtxFactory";
static final String USERNAME = "username";
private final Helper helper;
private final AuthConfig authConfig;
private final EmailExpander emailExpander;
private final LoadingCache<String, Optional<Account.Id>> usernameCache;
private final Set<Account.FieldName> readOnlyAccountFields;
private final boolean fetchMemberOfEagerly;
private final Config config;
private final LoadingCache<String, Set<AccountGroup.UUID>> membershipCache;
@Inject
LdapRealm(
final Helper helper,
final AuthConfig authConfig,
final EmailExpander emailExpander,
@Named(LdapModule.GROUP_CACHE) final LoadingCache<String, Set<AccountGroup.UUID>> membershipCache,
@Named(LdapModule.USERNAME_CACHE) final LoadingCache<String, Optional<Account.Id>> usernameCache,
@GerritServerConfig final Config config) {
this.helper = helper;
this.authConfig = authConfig;
this.emailExpander = emailExpander;
this.usernameCache = usernameCache;
this.membershipCache = membershipCache;
this.config = config;
this.readOnlyAccountFields = new HashSet<>();
if (optdef(config, "accountFullName", "DEFAULT") != null) {
readOnlyAccountFields.add(Account.FieldName.FULL_NAME);
}
if (optdef(config, "accountSshUserName", "DEFAULT") != null) {
readOnlyAccountFields.add(Account.FieldName.USER_NAME);
}
fetchMemberOfEagerly = optional(config, "fetchMemberOfEagerly", true);
}
static SearchScope scope(final Config c, final String setting) {
return ConfigUtil.getEnum(c, "ldap", null, setting, SearchScope.SUBTREE);
}
static String optional(final Config config, final String name) {
return config.getString("ldap", null, name);
}
static int optional(Config config, String name, int defaultValue) {
return config.getInt("ldap", name, defaultValue);
}
static String optional(Config config, String name, String defaultValue) {
final String v = optional(config, name);
if (Strings.isNullOrEmpty(v)) {
return defaultValue;
}
return v;
}
static boolean optional(Config config, String name, boolean defaultValue) {
return config.getBoolean("ldap", name, defaultValue);
}
static String required(final Config config, final String name) {
final String v = optional(config, name);
if (v == null || "".equals(v)) {
throw new IllegalArgumentException("No ldap." + name + " configured");
}
return v;
}
static List<String> optionalList(final Config config,
final String name) {
String s[] = config.getStringList("ldap", null, name);
return Arrays.asList(s);
}
static List<String> requiredList(final Config config,
final String name) {
List<String> vlist = optionalList(config, name);
if (vlist.isEmpty()) {
throw new IllegalArgumentException("No ldap " + name + " configured");
}
return vlist;
}
static String optdef(final Config c, final String n, final String d) {
final String[] v = c.getStringList("ldap", null, n);
if (v == null || v.length == 0) {
return d;
} else if (v[0] == null || "".equals(v[0])) {
return null;
} else {
return v[0];
}
}
static String reqdef(final Config c, final String n, final String d) {
final String v = optdef(c, n, d);
if (v == null) {
throw new IllegalArgumentException("No ldap." + n + " configured");
}
return v;
}
static ParameterizedString paramString(Config c, String n, String d) {
String expression = optdef(c, n, d);
if (expression == null) {
return null;
} else if (expression.contains("${")) {
return new ParameterizedString(expression);
} else {
return new ParameterizedString("${" + expression + "}");
}
}
@Override
public boolean allowsEdit(final Account.FieldName field) {
return !readOnlyAccountFields.contains(field);
}
static String apply(ParameterizedString p, LdapQuery.Result m)
throws NamingException {
if (p == null) {
return null;
}
final Map<String, String> values = new HashMap<>();
for (final String name : m.attributes()) {
values.put(name, m.get(name));
}
String r = p.replace(values);
return r.isEmpty() ? null : r;
}
@Override
public AuthRequest authenticate(final AuthRequest who)
throws AccountException {
if (config.getBoolean("ldap", "localUsernameToLowerCase", false)) {
who.setLocalUser(who.getLocalUser().toLowerCase(Locale.US));
}
final String username = who.getLocalUser();
try {
final DirContext ctx;
if (authConfig.getAuthType() == AuthType.LDAP_BIND) {
ctx = helper.authenticate(username, who.getPassword());
} else {
ctx = helper.open();
}
try {
final Helper.LdapSchema schema = helper.getSchema(ctx);
final LdapQuery.Result m = helper.findAccount(schema, ctx, username,
fetchMemberOfEagerly);
if (authConfig.getAuthType() == AuthType.LDAP && !who.isSkipAuthentication()) {
// We found the user account, but we need to verify
// the password matches it before we can continue.
//
helper.authenticate(m.getDN(), who.getPassword()).close();
}
who.setDisplayName(apply(schema.accountFullName, m));
who.setUserName(apply(schema.accountSshUserName, m));
if (schema.accountEmailAddress != null) {
who.setEmailAddress(apply(schema.accountEmailAddress, m));
} else if (emailExpander.canExpand(username)) {
// If LDAP cannot give us a valid email address for this user
// try expanding it through the older email expander code which
// assumes a user name within a domain.
//
who.setEmailAddress(emailExpander.expand(username));
}
// Fill the cache with the user's current groups. We've already
// spent the cost to open the LDAP connection, we might as well
// do one more call to get their group membership. Since we are
// in the middle of authenticating the user, its likely we will
// need to know what access rights they have soon.
//
if (fetchMemberOfEagerly) {
membershipCache.put(username, helper.queryForGroups(ctx, username, m));
}
return who;
} finally {
try {
ctx.close();
} catch (NamingException e) {
log.warn("Cannot close LDAP query handle", e);
}
}
} catch (NamingException e) {
log.error("Cannot query LDAP to authenticate user", e);
throw new AuthenticationUnavailableException("Cannot query LDAP for account", e);
} catch (LoginException e) {
log.error("Cannot authenticate server via JAAS", e);
throw new AuthenticationUnavailableException("Cannot query LDAP for account", e);
}
}
@Override
public AuthRequest link(ReviewDb db, Account.Id to, AuthRequest who) {
return who;
}
@Override
public AuthRequest unlink(ReviewDb db, Account.Id from, AuthRequest who) {
return who;
}
@Override
public void onCreateAccount(final AuthRequest who, final Account account) {
usernameCache.put(who.getLocalUser(), Optional.of(account.getId()));
}
@Override
public Account.Id lookup(String accountName) {
if (Strings.isNullOrEmpty(accountName)) {
return null;
}
try {
Optional<Account.Id> id = usernameCache.get(accountName);
return id != null ? id.orNull() : null;
} catch (ExecutionException e) {
log.warn(String.format("Cannot lookup account %s in LDAP", accountName), e);
return null;
}
}
static class UserLoader extends CacheLoader<String, Optional<Account.Id>> {
private final SchemaFactory<ReviewDb> schema;
@Inject
UserLoader(SchemaFactory<ReviewDb> schema) {
this.schema = schema;
}
@Override
public Optional<Account.Id> load(String username) throws Exception {
final ReviewDb db = schema.open();
try {
final AccountExternalId extId =
db.accountExternalIds().get(
new AccountExternalId.Key(SCHEME_GERRIT, username));
if (extId != null) {
return Optional.of(extId.getAccountId());
}
return Optional.absent();
} finally {
db.close();
}
}
}
static class MemberLoader extends CacheLoader<String, Set<AccountGroup.UUID>> {
private final Helper helper;
@Inject
MemberLoader(final Helper helper) {
this.helper = helper;
}
@Override
public Set<AccountGroup.UUID> load(String username) throws Exception {
final DirContext ctx = helper.open();
try {
return helper.queryForGroups(ctx, username, null);
} finally {
try {
ctx.close();
} catch (NamingException e) {
log.warn("Cannot close LDAP query handle", e);
}
}
}
}
static class ExistenceLoader extends CacheLoader<String, Boolean> {
private final Helper helper;
@Inject
ExistenceLoader(final Helper helper) {
this.helper = helper;
}
@Override
public Boolean load(final String groupDn) throws Exception {
final DirContext ctx = helper.open();
try {
Name compositeGroupName = new CompositeName().add(groupDn);
try {
ctx.getAttributes(compositeGroupName);
return true;
} catch (NamingException e) {
return false;
}
} finally {
try {
ctx.close();
} catch (NamingException e) {
log.warn("Cannot close LDAP query handle", e);
}
}
}
}
}
| Team-OctOS/host_gerrit | gerrit-server/src/main/java/com/google/gerrit/server/auth/ldap/LdapRealm.java | Java | apache-2.0 | 12,221 |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.util.tuple;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
/**
* An immutable pair consisting of two {@code Object} elements.
* <p>
* Although the implementation is immutable, there is no restriction on the objects
* that may be stored. If mutable objects are stored in the pair, then the pair
* itself effectively becomes mutable.
* <p>
* This class is immutable and thread-safe if the stored objects are immutable.
*
* @param <A> the type of the first side of the pair
* @param <B> the type of the second side of the pair
*/
public final class ObjectsPair<A, B>
extends Pair<A, B> {
// this ImmutableBean is not auto-generated
/** Serialization version. */
private static final long serialVersionUID = 1L;
/** The first element. */
public final A first; // CSIGNORE
/** The second element. */
public final B second; // CSIGNORE
//-------------------------------------------------------------------------
/**
* Creates a pair inferring the types.
*
* @param <A> the first element type
* @param <B> the second element type
* @param first the first element, may be null
* @param second the second element, may be null
* @return a pair formed from the two parameters, not null
*/
public static <A, B> ObjectsPair<A, B> of(final A first, final B second) {
return new ObjectsPair<>(first, second);
}
//-------------------------------------------------------------------------
/**
* Constructs a pair.
*
* @param first the first element, may be null
* @param second the second element, may be null
* @deprecated Use of(first, second)
*/
@Deprecated
public ObjectsPair(final A first, final B second) { // CSIGNORE
this.first = first;
this.second = second;
}
//-------------------------------------------------------------------------
@Override
public A getFirst() {
return first;
}
@Override
public B getSecond() {
return second;
}
// CSOFF
//-------------------------------------------------------------------------
/**
* The meta-bean for {@code ObjectsPair}.
* @return the meta-bean, not null
*/
@SuppressWarnings("rawtypes")
public static ObjectsPair.Meta meta() {
return ObjectsPair.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(ObjectsPair.Meta.INSTANCE);
}
@Override
public ObjectsPair.Meta<A, B> metaBean() {
return ObjectsPair.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(final String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
@Override
public ObjectsPair<A, B> clone() {
return this;
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code ObjectsPair}.
*/
public static final class Meta<A, B> extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code first} property.
*/
private final MetaProperty<Object> _first = DirectMetaProperty.ofImmutable(
this, "first", ObjectsPair.class, Object.class);
/**
* The meta-property for the {@code second} property.
*/
private final MetaProperty<Object> _second = DirectMetaProperty.ofImmutable(
this, "second", ObjectsPair.class, Object.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"first",
"second");
/**
* Restricted constructor.
*/
Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(final String propertyName) {
switch (propertyName) {
case "first":
return _first;
case "second":
return _second;
}
return super.metaPropertyGet(propertyName);
}
@Override
public ObjectsPair.Builder builder() {
return new ObjectsPair.Builder();
}
@SuppressWarnings("rawtypes")
@Override
public Class<? extends ObjectsPair> beanType() {
return ObjectsPair.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code first} property.
* @return the meta-property, not null
*/
public MetaProperty<Object> first() {
return _first;
}
/**
* The meta-property for the {@code second} property.
* @return the meta-property, not null
*/
public MetaProperty<Object> second() {
return _second;
}
//-----------------------------------------------------------------------
@Override
@SuppressWarnings("rawtypes")
protected Object propertyGet(final Bean bean, final String propertyName, final boolean quiet) {
switch (propertyName) {
case "first":
return ((ObjectsPair) bean).getFirst();
case "second":
return ((ObjectsPair) bean).getSecond();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(final Bean bean, final String propertyName, final Object newValue, final boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code ObjectsPair}.
*/
@SuppressWarnings({"rawtypes" })
private static final class Builder extends DirectFieldsBeanBuilder<ObjectsPair> {
/** The first element. */
private Object _first;
/** The second element. */
private Object _second;
/**
* Restricted constructor.
*/
private Builder() {
super();
}
//-----------------------------------------------------------------------
@Override
public Builder set(final String propertyName, final Object newValue) {
switch (propertyName) {
case "first":
_first = newValue;
break;
case "second":
_second = newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder setString(final String propertyName, final String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public ObjectsPair build() {
return ObjectsPair.of(_first, _second);
}
}
}
| McLeodMoores/starling | projects/util/src/main/java/com/opengamma/util/tuple/ObjectsPair.java | Java | apache-2.0 | 7,448 |
/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.dev.js.ast;
import com.google.gwt.dev.jjs.SourceInfo;
/**
* A JavaScript string literal expression.
*/
public abstract class JsValueLiteral extends JsLiteral {
protected JsValueLiteral(SourceInfo sourceInfo) {
super(sourceInfo);
}
@Override
public final boolean hasSideEffects() {
return false;
}
@Override
public final boolean isLeaf() {
return true;
}
}
| syntelos/gwtcc | src/com/google/gwt/dev/js/ast/JsValueLiteral.java | Java | apache-2.0 | 1,002 |
package ru.job4j.tree;
/**
* Class бинарного дерева.
* @author karetskiy
* @since 25.06.2018
* @version 1
* @param <E> тип значений дерева.
*/
public class Bst<E extends Comparable<E>> {
/**
* Корневая ветка.
*/
private NodeBST<E> node = new NodeBST<>();
/**
* Добавляет значение в дерево.
* @param e добавляемое значение.
*/
public void add(E e) {
node.add(e);
}
/**
* Возвращает итератор дерева.
* @return итератор.
*/
public IteratorBST iterator() {
return (IteratorBST<E>) node.iterator();
}
/**
* Возвращает Представление дерева.
* @return представление дерева.
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
IteratorBST<E> iteratorBST = iterator();
while (iteratorBST.hasNext()) {
sb.append(iteratorBST.next().toString());
}
return sb.toString();
}
} | silberRus/karetskiy | chapter_004/src/main/java/ru/job4j/tree/Bst.java | Java | apache-2.0 | 1,140 |
package ru.r2cloud.util;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
public class ThreadPoolFactoryImpl implements ThreadPoolFactory {
private final long threadPoolShutdownMillis;
public ThreadPoolFactoryImpl(long threadPoolShutdownMillis) {
this.threadPoolShutdownMillis = threadPoolShutdownMillis;
}
@Override
public ScheduledExecutorService newScheduledThreadPool(int i, NamingThreadFactory namingThreadFactory) {
return Executors.newScheduledThreadPool(i, namingThreadFactory);
}
@Override
public long getThreadPoolShutdownMillis() {
return threadPoolShutdownMillis;
}
}
| dernasherbrezon/r2cloud | src/main/java/ru/r2cloud/util/ThreadPoolFactoryImpl.java | Java | apache-2.0 | 646 |
package org.aries.common.entity;
import java.lang.String;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
import org.aries.common.Country;
import org.aries.common.PhoneLocation;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
/**
* Generated by Nam.
*
*/
@Entity(name = "PhoneNumber")
@Table(name = "phone_number")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public class PhoneNumberEntity {
@Id
@GeneratedValue
@Column(name = "id")
private Long id;
@Column(name = "type")
private PhoneLocation type;
@Column(name = "area")
private String area;
@Column(name = "country")
private Country country;
@Column(name = "extension")
private String extension;
@Column(name = "number")
private String number;
@Column(name = "value")
private String value;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public PhoneLocation getType() {
return type;
}
public void setType(PhoneLocation type) {
this.type = type;
}
public String getArea() {
return area;
}
public void setArea(String area) {
this.area = area;
}
public Country getCountry() {
return country;
}
public void setCountry(Country country) {
this.country = country;
}
public String getExtension() {
return extension;
}
public void setExtension(String extension) {
this.extension = extension;
}
public String getNumber() {
return number;
}
public void setNumber(String number) {
this.number = number;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
} | tfisher1226/ARIES | aries/templates/template1/template1-data/src/main/java/org/aries/common/entity/PhoneNumberEntity.java | Java | apache-2.0 | 1,761 |
/**
* Copyright 2017 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package io.confluent.ksql.structured;
import io.confluent.ksql.analyzer.AggregateAnalysis;
import io.confluent.ksql.analyzer.AggregateAnalyzer;
import io.confluent.ksql.analyzer.Analysis;
import io.confluent.ksql.analyzer.AnalysisContext;
import io.confluent.ksql.analyzer.Analyzer;
import io.confluent.ksql.function.FunctionRegistry;
import io.confluent.ksql.metastore.KsqlStream;
import io.confluent.ksql.metastore.MetaStore;
import io.confluent.ksql.parser.KsqlParser;
import io.confluent.ksql.parser.tree.Expression;
import io.confluent.ksql.parser.tree.Statement;
import io.confluent.ksql.planner.LogicalPlanner;
import io.confluent.ksql.planner.plan.FilterNode;
import io.confluent.ksql.planner.plan.PlanNode;
import io.confluent.ksql.util.MetaStoreFixture;
import io.confluent.ksql.util.SerDeUtil;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.Consumed;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.KStream;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class SqlPredicateTest {
private SchemaKStream initialSchemaKStream;
private static final KsqlParser KSQL_PARSER = new KsqlParser();
MetaStore metaStore;
KStream kStream;
KsqlStream ksqlStream;
FunctionRegistry functionRegistry;
@Before
public void init() {
metaStore = MetaStoreFixture.getNewMetaStore();
functionRegistry = new FunctionRegistry();
ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
StreamsBuilder builder = new StreamsBuilder();
kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(),
SerDeUtil.getRowSerDe(ksqlStream.getKsqlTopic().getKsqlTopicSerDe(),
null)));
}
private PlanNode buildLogicalPlan(String queryStr) {
List<Statement> statements = KSQL_PARSER.buildAst(queryStr, metaStore);
// Analyze the query to resolve the references and extract oeprations
Analysis analysis = new Analysis();
Analyzer analyzer = new Analyzer(analysis, metaStore);
analyzer.process(statements.get(0), new AnalysisContext(null));
AggregateAnalysis aggregateAnalysis = new AggregateAnalysis();
AggregateAnalyzer aggregateAnalyzer = new AggregateAnalyzer(aggregateAnalysis,
analysis, functionRegistry);
for (Expression expression: analysis.getSelectExpressions()) {
aggregateAnalyzer.process(expression, new AnalysisContext(null));
}
// Build a logical plan
PlanNode logicalPlan = new LogicalPlanner(analysis, aggregateAnalysis, functionRegistry).buildPlan();
return logicalPlan;
}
@Test
public void testFilter() throws Exception {
String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
PlanNode logicalPlan = buildLogicalPlan(selectQuery);
FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);
initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(),
kStream,
ksqlStream.getKeyField(), new ArrayList<>(),
SchemaKStream.Type.SOURCE, functionRegistry);
SqlPredicate predicate = new SqlPredicate(filterNode.getPredicate(), initialSchemaKStream
.getSchema(), false, functionRegistry);
Assert.assertTrue(predicate.getFilterExpression()
.toString().equalsIgnoreCase("(TEST1.COL0 > 100)"));
Assert.assertTrue(predicate.getColumnIndexes().length == 1);
}
@Test
public void testFilterBiggerExpression() throws Exception {
String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100 AND LEN(col2) = 5;";
PlanNode logicalPlan = buildLogicalPlan(selectQuery);
FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);
initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(),
kStream,
ksqlStream.getKeyField(), new ArrayList<>(),
SchemaKStream.Type.SOURCE, functionRegistry);
SqlPredicate predicate = new SqlPredicate(filterNode.getPredicate(), initialSchemaKStream
.getSchema(), false, functionRegistry);
Assert.assertTrue(predicate
.getFilterExpression()
.toString()
.equalsIgnoreCase("((TEST1.COL0 > 100) AND"
+ " (LEN(TEST1.COL2) = 5))"));
Assert.assertTrue(predicate.getColumnIndexes().length == 3);
}
}
| TheRealHaui/ksql | ksql-engine/src/test/java/io/confluent/ksql/structured/SqlPredicateTest.java | Java | apache-2.0 | 5,493 |
package com.specmate.persistency.cdo.internal;
import java.util.List;
import org.eclipse.emf.cdo.common.id.CDOID;
import org.eclipse.emf.cdo.common.id.CDOIDUtil;
import org.eclipse.emf.cdo.view.CDOQuery;
import org.eclipse.emf.cdo.view.CDOView;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.osgi.service.log.LogService;
import com.specmate.persistency.IView;
public class ViewImpl implements IView {
private CDOView view;
private String resourceName;
protected CDOPersistencyService persistency;
public ViewImpl(CDOPersistencyService persistency, CDOView view, String resourceName, LogService logService) {
super();
this.view = view;
this.resourceName = resourceName;
this.persistency = persistency;
}
@Override
public Resource getResource() {
return view.getResource(resourceName);
}
@Override
public EObject getObjectById(Object originId) {
CDOID id;
if (originId instanceof CDOID) {
id = (CDOID) originId;
} else if (originId instanceof String) {
id = CDOIDUtil.read((String) originId);
} else {
return null;
}
return view.getObject(id);
}
public String getResourceName() {
return resourceName;
}
public void setResourceName(String resourceName) {
this.resourceName = resourceName;
}
@Override
public List<Object> query(String queryString, Object context) {
CDOQuery cdoQuery = this.view.createQuery("ocl", queryString, context);
return cdoQuery.getResult();
}
public void update(CDOView view) {
this.view = view;
}
public void close() {
view.close();
persistency.closedView(this);
}
}
| junkerm/specmate | bundles/specmate-persistency-cdo/src/com/specmate/persistency/cdo/internal/ViewImpl.java | Java | apache-2.0 | 1,690 |
/*
Copyright 2013 Nationale-Nederlanden
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nl.nn.adapterframework.extensions.sap;
import java.util.Map;
import nl.nn.adapterframework.configuration.ConfigurationException;
import nl.nn.adapterframework.core.IMessageHandler;
import nl.nn.adapterframework.core.IPushingListener;
import nl.nn.adapterframework.core.IbisExceptionListener;
import nl.nn.adapterframework.core.ListenerException;
import nl.nn.adapterframework.core.PipeLineResult;
/**
* Depending on the JCo version found (see {@link JCoVersion}) delegate to
* {@link nl.nn.adapterframework.extensions.sap.jco3.SapListener jco3.SapListener} or
* {@link nl.nn.adapterframework.extensions.sap.jco2.SapListener jco2.SapListener}
* Don't use the jco3 or jco2 class in your Ibis configuration, use this one
* instead.
*
* @author Jaco de Groot
* @since 5.0
* @version $Id$
*/
public class SapListener implements IPushingListener {
private int jcoVersion = 3;
private nl.nn.adapterframework.extensions.sap.jco3.SapListener sapListener3;
private nl.nn.adapterframework.extensions.sap.jco2.SapListener sapListener2;
public SapListener() throws ConfigurationException {
jcoVersion = JCoVersion.getInstance().getJCoVersion();
if (jcoVersion == -1) {
throw new ConfigurationException(JCoVersion.getInstance().getErrorMessage());
} else if (jcoVersion == 3) {
sapListener3 = new nl.nn.adapterframework.extensions.sap.jco3.SapListener();
} else {
sapListener2 = new nl.nn.adapterframework.extensions.sap.jco2.SapListener();
}
}
public void configure() throws ConfigurationException {
if (jcoVersion == 3) {
sapListener3.configure();
} else {
sapListener2.configure();
}
}
public void open() throws ListenerException {
if (jcoVersion == 3) {
sapListener3.open();
} else {
sapListener2.open();
}
}
public void close() throws ListenerException {
if (jcoVersion == 3) {
sapListener3.close();
} else {
sapListener2.close();
}
}
public String getIdFromRawMessage(Object rawMessage, Map context)
throws ListenerException {
if (jcoVersion == 3) {
return sapListener3.getIdFromRawMessage(rawMessage, context);
} else {
return sapListener2.getIdFromRawMessage(rawMessage, context);
}
}
public String getStringFromRawMessage(Object rawMessage, Map context)
throws ListenerException {
if (jcoVersion == 3) {
return sapListener3.getStringFromRawMessage(rawMessage, context);
} else {
return sapListener2.getStringFromRawMessage(rawMessage, context);
}
}
public void afterMessageProcessed(PipeLineResult processResult,
Object rawMessage, Map context) throws ListenerException {
if (jcoVersion == 3) {
sapListener3.afterMessageProcessed(processResult, rawMessage, context);
} else {
sapListener2.afterMessageProcessed(processResult, rawMessage, context);
}
}
public String getName() {
if (jcoVersion == 3) {
return sapListener3.getName();
} else {
return sapListener2.getName();
}
}
public void setName(String name) {
if (jcoVersion == 3) {
sapListener3.setName(name);
} else {
sapListener2.setName(name);
}
}
public void setHandler(IMessageHandler handler) {
if (jcoVersion == 3) {
sapListener3.setHandler(handler);
} else {
sapListener2.setHandler(handler);
}
}
public void setExceptionListener(IbisExceptionListener listener) {
if (jcoVersion == 3) {
sapListener3.setExceptionListener(listener);
} else {
sapListener2.setExceptionListener(listener);
}
}
public void setSapSystemName(String string) {
if (jcoVersion == 3) {
sapListener3.setSapSystemName(string);
} else {
sapListener2.setSapSystemName(string);
}
}
public void setProgid(String string) {
if (jcoVersion == 3) {
sapListener3.setProgid(string);
} else {
sapListener2.setProgid(string);
}
}
public void setConnectionCount(String connectionCount) {
if (jcoVersion == 3) {
sapListener3.setConnectionCount(connectionCount);
}
}
public void setCorrelationIdFieldIndex(int i) {
if (jcoVersion == 3) {
sapListener3.setCorrelationIdFieldIndex(i);
} else {
sapListener2.setCorrelationIdFieldIndex(i);
}
}
public void setCorrelationIdFieldName(String string) {
if (jcoVersion == 3) {
sapListener3.setCorrelationIdFieldName(string);
} else {
sapListener2.setCorrelationIdFieldName(string);
}
}
public void setReplyFieldIndex(int i) {
if (jcoVersion == 3) {
sapListener3.setReplyFieldIndex(i);
} else {
sapListener2.setReplyFieldIndex(i);
}
}
public void setReplyFieldName(String string) {
if (jcoVersion == 3) {
sapListener3.setReplyFieldName(string);
} else {
sapListener2.setReplyFieldName(string);
}
}
public void setRequestFieldIndex(int i) {
if (jcoVersion == 3) {
sapListener3.setRequestFieldIndex(i);
} else {
sapListener2.setRequestFieldIndex(i);
}
}
public void setRequestFieldName(String string) {
if (jcoVersion == 3) {
sapListener3.setRequestFieldName(string);
} else {
sapListener2.setRequestFieldName(string);
}
}
}
| smhoekstra/iaf | JavaSource/nl/nn/adapterframework/extensions/sap/SapListener.java | Java | apache-2.0 | 5,611 |
package org.araqne.logstorage.dump.engine;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.araqne.api.Io;
import org.araqne.codec.EncodingRule;
import org.araqne.logstorage.Log;
import org.araqne.logstorage.LogStorage;
import org.araqne.logstorage.dump.DumpManifest;
import org.araqne.logstorage.dump.DumpService;
import org.araqne.logstorage.dump.DumpTabletEntry;
import org.araqne.logstorage.dump.DumpTabletKey;
import org.araqne.logstorage.dump.ImportRequest;
import org.araqne.logstorage.dump.ImportTabletTask;
import org.araqne.logstorage.dump.ImportTask;
import org.araqne.logstorage.dump.ImportWorker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LocalImportWorker implements ImportWorker {
private final Logger slog = LoggerFactory.getLogger(LocalImportWorker.class);
private ImportRequest req;
private ImportTask task;
private DumpService dumpService;
private LogStorage storage;
private File path;
public LocalImportWorker(ImportRequest req, DumpService dumpService, LogStorage storage) {
this.req = req;
this.dumpService = dumpService;
this.storage = storage;
this.task = new ImportTask(req);
for (DumpTabletEntry e : req.getEntries()) {
DumpTabletKey key = new DumpTabletKey(e.getTableName(), e.getDay());
ImportTabletTask val = new ImportTabletTask(e.getTableName(), e.getDay(), e.getCount());
task.getTabletTasks().put(key, val);
}
this.path = new File(req.getParams().get("path"));
}
@Override
public ImportTask getTask() {
return task;
}
@Override
public void run() {
slog.info("araqne logstorage: start import job [{}]", req.getGuid());
ZipFile zipFile = null;
try {
DumpManifest manifest = dumpService.readManifest("local", req.getParams());
Map<String, Integer> tables = manifest.getTables();
Set<DumpTabletKey> keys = new HashSet<DumpTabletKey>();
for (DumpTabletEntry e : manifest.getEntries()) {
keys.add(new DumpTabletKey(e.getTableName(), e.getDay()));
}
zipFile = new ZipFile(path);
for (DumpTabletKey key : task.getTabletTasks().keySet()) {
if (task.isCancelled())
return;
ImportTabletTask e = task.getTabletTasks().get(key);
if (!keys.contains(key))
continue;
Integer tableId = tables.get(e.getTableName());
if (tableId == null)
continue;
loadFile(zipFile, e, tableId);
}
} catch (InterruptedException e) {
task.setCancelled();
} catch (Throwable t) {
task.setFailureException(t);
slog.error("araqne logstorage: import job [" + req.getGuid() + "] failed", t);
} finally {
if (zipFile != null) {
try {
zipFile.close();
} catch (IOException e) {
}
}
slog.info("araqne logstorage: import job [{}] completed", req.getGuid());
}
}
@SuppressWarnings("unchecked")
private void loadFile(ZipFile zipFile, ImportTabletTask dumpEntry, int tableId) throws IOException, InterruptedException {
long total = 0;
InputStream is = null;
try {
String tableName = dumpEntry.getTableName();
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");
ZipEntry zipEntry = new ZipEntry(tableId + "/" + df.format(dumpEntry.getDay()) + ".dmp");
is = zipFile.getInputStream(zipEntry);
while (true) {
if (task.isCancelled())
break;
byte[] blen = new byte[4];
int readBytes = Io.ensureRead(is, blen, 4);
if (readBytes <= 0)
break;
int len = Io.decodeInt(blen);
byte[] b = new byte[len];
readBytes = Io.ensureRead(is, b, len);
if (readBytes <= 0)
break;
if (slog.isDebugEnabled())
slog.debug("araqne logstorage: importing table [{}] block len [{}] actual len [{}]", new Object[] {
tableName, len, readBytes });
Object[] arr = (Object[]) EncodingRule.decode(ByteBuffer.wrap(b));
List<Log> logs = new ArrayList<Log>();
for (Object o : arr) {
Map<String, Object> m = (Map<String, Object>) o;
Date d = (Date) m.get("_time");
Log log = new Log(tableName, d, m);
logs.add(log);
}
total += logs.size();
storage.write(logs);
dumpEntry.setImportCount(total);
}
} catch (EOFException ex) {
} finally {
if (is != null) {
is.close();
}
dumpEntry.setCompleted(true);
}
}
}
| araqne/logdb | araqne-logstorage/src/main/java/org/araqne/logstorage/dump/engine/LocalImportWorker.java | Java | apache-2.0 | 4,560 |
/*
* Copyright (c) 2010-2015 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.task.quartzimpl;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.xml.bind.JAXBException;
import javax.xml.namespace.QName;
import com.evolveum.midpoint.prism.*;
import com.evolveum.midpoint.prism.delta.ItemDelta;
import com.evolveum.midpoint.prism.delta.PropertyDelta;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.query.AndFilter;
import com.evolveum.midpoint.prism.query.EqualFilter;
import com.evolveum.midpoint.prism.query.ObjectFilter;
import com.evolveum.midpoint.prism.query.ObjectQuery;
import com.evolveum.midpoint.prism.query.builder.QueryBuilder;
import com.evolveum.midpoint.schema.DeltaConvertor;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.result.OperationResultStatus;
import com.evolveum.midpoint.task.quartzimpl.execution.JobExecutor;
import com.evolveum.midpoint.task.quartzimpl.handlers.NoOpTaskHandler;
import com.evolveum.midpoint.test.Checker;
import com.evolveum.midpoint.test.IntegrationTestTools;
import com.evolveum.midpoint.test.util.TestUtil;
import com.evolveum.midpoint.util.exception.SystemException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import org.opends.server.types.Attribute;
import org.opends.server.types.SearchResultEntry;
import org.quartz.JobExecutionContext;
import org.quartz.JobKey;
import org.quartz.SchedulerException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.testng.AbstractTestNGSpringContextTests;
import org.testng.Assert;
import org.testng.AssertJUnit;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
import com.evolveum.midpoint.prism.util.PrismAsserts;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.repo.api.RepositoryService;
import com.evolveum.midpoint.schema.MidPointPrismContextFactory;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.ObjectTypeUtil;
import com.evolveum.midpoint.schema.util.ResourceTypeUtil;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.task.api.TaskBinding;
import com.evolveum.midpoint.task.api.TaskExecutionStatus;
import com.evolveum.midpoint.task.api.TaskRecurrence;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.JAXBUtil;
import com.evolveum.midpoint.util.PrettyPrinter;
import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import static com.evolveum.midpoint.test.IntegrationTestTools.*;
import static org.testng.AssertJUnit.*;
/**
* @author Radovan Semancik
*/
@ContextConfiguration(locations = {"classpath:ctx-task.xml",
"classpath:ctx-task-test.xml",
"classpath:ctx-repo-cache.xml",
"classpath*:ctx-repository-test.xml",
"classpath:ctx-audit.xml",
"classpath:ctx-security.xml",
"classpath:ctx-common.xml",
"classpath:ctx-configuration-test.xml"})
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
public class TestQuartzTaskManagerContract extends AbstractTestNGSpringContextTests {
private static final transient Trace LOGGER = TraceManager.getTrace(TestQuartzTaskManagerContract.class);
private static final String TASK_OWNER_FILENAME = "src/test/resources/repo/owner.xml";
private static final String TASK_OWNER2_FILENAME = "src/test/resources/repo/owner2.xml";
private static final String TASK_OWNER2_OID = "c0c010c0-d34d-b33f-f00d-111111111112";
private static final String NS_WHATEVER = "http://myself.me/schemas/whatever";
private static String taskFilename(String test) {
return "src/test/resources/repo/task-" + test + ".xml";
}
private static String taskOid(String test, String subId) {
return "91919191-76e0-59e2-86d6-55665566" + subId + test.substring(0, 3);
}
private static String taskOid(String test) {
return taskOid(test, "0");
}
private static OperationResult createResult(String test) {
System.out.println("===[ "+test+" ]===");
LOGGER.info("===[ "+test+" ]===");
return new OperationResult(TestQuartzTaskManagerContract.class.getName() + ".test" + test);
}
private static final String CYCLE_TASK_HANDLER_URI = "http://midpoint.evolveum.com/test/cycle-task-handler";
private static final String CYCLE_FINISHING_TASK_HANDLER_URI = "http://midpoint.evolveum.com/test/cycle-finishing-task-handler";
public static final String SINGLE_TASK_HANDLER_URI = "http://midpoint.evolveum.com/test/single-task-handler";
public static final String SINGLE_TASK_HANDLER_2_URI = "http://midpoint.evolveum.com/test/single-task-handler-2";
public static final String SINGLE_TASK_HANDLER_3_URI = "http://midpoint.evolveum.com/test/single-task-handler-3";
public static final String L1_TASK_HANDLER_URI = "http://midpoint.evolveum.com/test/l1-task-handler";
public static final String L2_TASK_HANDLER_URI = "http://midpoint.evolveum.com/test/l2-task-handler";
public static final String L3_TASK_HANDLER_URI = "http://midpoint.evolveum.com/test/l3-task-handler";
public static final String WAIT_FOR_SUBTASKS_TASK_HANDLER_URI = "http://midpoint.evolveum.com/test/wait-for-subtasks-task-handler";
public static final String PARALLEL_TASK_HANDLER_URI = "http://midpoint.evolveum.com/test/parallel-task-handler";
@Autowired(required = true)
private RepositoryService repositoryService;
private static boolean repoInitialized = false;
@Autowired(required = true)
private TaskManagerQuartzImpl taskManager;
@Autowired(required = true)
private PrismContext prismContext;
@BeforeSuite
public void setup() throws SchemaException, SAXException, IOException {
PrettyPrinter.setDefaultNamespacePrefix(MidPointConstants.NS_MIDPOINT_PUBLIC_PREFIX);
PrismTestUtil.resetPrismContext(MidPointPrismContextFactory.FACTORY);
}
// We need this complicated init as we want to initialize repo only once.
// JUnit will
// create new class instance for every test, so @Before and @PostInit will
// not work
// directly. We also need to init the repo after spring autowire is done, so
// @BeforeClass won't work either.
@BeforeMethod
public void initRepository() throws Exception {
if (!repoInitialized) {
// addObjectFromFile(SYSTEM_CONFIGURATION_FILENAME);
repoInitialized = true;
}
}
MockSingleTaskHandler singleHandler1, singleHandler2, singleHandler3;
MockSingleTaskHandler l1Handler, l2Handler, l3Handler;
MockSingleTaskHandler waitForSubtasksTaskHandler;
MockCycleTaskHandler cycleFinishingHandler;
MockParallelTaskHandler parallelTaskHandler;
@PostConstruct
public void initHandlers() throws Exception {
MockCycleTaskHandler cycleHandler = new MockCycleTaskHandler(false); // ordinary recurring task
taskManager.registerHandler(CYCLE_TASK_HANDLER_URI, cycleHandler);
cycleFinishingHandler = new MockCycleTaskHandler(true); // finishes the handler
taskManager.registerHandler(CYCLE_FINISHING_TASK_HANDLER_URI, cycleFinishingHandler);
singleHandler1 = new MockSingleTaskHandler("1", taskManager);
taskManager.registerHandler(SINGLE_TASK_HANDLER_URI, singleHandler1);
singleHandler2 = new MockSingleTaskHandler("2", taskManager);
taskManager.registerHandler(SINGLE_TASK_HANDLER_2_URI, singleHandler2);
singleHandler3 = new MockSingleTaskHandler("3", taskManager);
taskManager.registerHandler(SINGLE_TASK_HANDLER_3_URI, singleHandler3);
l1Handler = new MockSingleTaskHandler("L1", taskManager);
l2Handler = new MockSingleTaskHandler("L2", taskManager);
l3Handler = new MockSingleTaskHandler("L3", taskManager);
taskManager.registerHandler(L1_TASK_HANDLER_URI, l1Handler);
taskManager.registerHandler(L2_TASK_HANDLER_URI, l2Handler);
taskManager.registerHandler(L3_TASK_HANDLER_URI, l3Handler);
waitForSubtasksTaskHandler = new MockSingleTaskHandler("WFS", taskManager);
taskManager.registerHandler(WAIT_FOR_SUBTASKS_TASK_HANDLER_URI, waitForSubtasksTaskHandler);
parallelTaskHandler = new MockParallelTaskHandler("1", taskManager);
taskManager.registerHandler(PARALLEL_TASK_HANDLER_URI, parallelTaskHandler);
addObjectFromFile(TASK_OWNER_FILENAME);
addObjectFromFile(TASK_OWNER2_FILENAME);
}
/**
* Test integrity of the test setup.
*
* @throws SchemaException
* @throws ObjectNotFoundException
*/
@Test(enabled = true)
public void test000Integrity() {
AssertJUnit.assertNotNull(repositoryService);
AssertJUnit.assertNotNull(taskManager);
}
/**
* Here we only test setting various task properties.
*/
@Test(enabled = true)
public void test003GetProgress() throws Exception {
String test = "003GetProgress";
OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
logger.trace("Retrieving the task and getting its progress...");
TaskQuartzImpl task = (TaskQuartzImpl) taskManager.getTask(taskOid(test), result);
AssertJUnit.assertEquals("Progress is not 0", 0, task.getProgress());
}
@Test(enabled=false) // this is probably OK to fail, so do not enable it (at least for now)
public void test004aTaskBigProperty() throws Exception {
String test = "004aTaskBigProperty";
OperationResult result = createResult(test);
String string300 = "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-";
String string300a = "AAAAAAAAA-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-";
addObjectFromFile(taskFilename(test));
TaskQuartzImpl task = (TaskQuartzImpl) taskManager.getTask(taskOid(test), result);
// property definition
QName bigStringQName = new QName("http://midpoint.evolveum.com/repo/test", "bigString");
PrismPropertyDefinitionImpl bigStringDefinition = new PrismPropertyDefinitionImpl(bigStringQName, DOMUtil.XSD_STRING, taskManager.getPrismContext());
bigStringDefinition.setIndexed(false);
bigStringDefinition.setMinOccurs(0);
bigStringDefinition.setMaxOccurs(1);
System.out.println("bigstring property definition = " + bigStringDefinition);
PrismProperty<String> bigStringProperty = (PrismProperty<String>) bigStringDefinition.instantiate();
bigStringProperty.setRealValue(string300);
task.setExtensionProperty(bigStringProperty);
task.savePendingModifications(result);
System.out.println("1st round: Task = " + task.debugDump());
logger.trace("Retrieving the task and comparing its properties...");
Task task001 = taskManager.getTask(taskOid(test), result);
System.out.println("1st round: Task from repo: " + task001.debugDump());
PrismProperty<String> bigString001 = task001.getExtensionProperty(bigStringQName);
assertEquals("Big string not retrieved correctly (1st round)", bigStringProperty.getRealValue(), bigString001.getRealValue());
// second round
bigStringProperty.setRealValue(string300a);
task001.setExtensionProperty(bigStringProperty);
// brutal hack, because task extension property has no "indexed" flag when retrieved from repo
((PrismPropertyDefinitionImpl) task001.getExtensionProperty(bigStringQName).getDefinition()).setIndexed(false);
System.out.println("2nd round: Task before save = " + task001.debugDump());
task001.savePendingModifications(result); // however, this does not work, because 'modifyObject' in repo first reads object, overwriting any existing definitions ...
Task task002 = taskManager.getTask(taskOid(test), result);
System.out.println("2nd round: Task from repo: " + task002.debugDump());
PrismProperty<String> bigString002 = task002.getExtensionProperty(bigStringQName);
assertEquals("Big string not retrieved correctly (2nd round)", bigStringProperty.getRealValue(), bigString002.getRealValue());
}
@Test(enabled = true)
public void test004bTaskBigProperty() throws Exception {
String test = "004aTaskBigProperty";
OperationResult result = createResult(test);
String string300 = "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-";
String string300a = "AAAAAAAAA-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-"
+ "123456789-123456789-123456789-123456789-123456789-";
addObjectFromFile(taskFilename(test));
TaskQuartzImpl task = (TaskQuartzImpl) taskManager.getTask(taskOid(test), result);
// property definition
QName shipStateQName = new QName("http://myself.me/schemas/whatever", "shipState");
PrismPropertyDefinition shipStateDefinition = prismContext.getSchemaRegistry().findPropertyDefinitionByElementName(shipStateQName);
assertNotNull("Cannot find property definition for shipState", shipStateDefinition);
PrismProperty<String> shipStateProperty = (PrismProperty<String>) shipStateDefinition.instantiate();
shipStateProperty.setRealValue(string300);
task.setExtensionProperty(shipStateProperty);
task.savePendingModifications(result);
System.out.println("1st round: Task = " + task.debugDump());
logger.trace("Retrieving the task and comparing its properties...");
Task task001 = taskManager.getTask(taskOid(test), result);
System.out.println("1st round: Task from repo: " + task001.debugDump());
PrismProperty<String> shipState001 = task001.getExtensionProperty(shipStateQName);
assertEquals("Big string not retrieved correctly (1st round)", shipStateProperty.getRealValue(), shipState001.getRealValue());
// second round
shipStateProperty.setRealValue(string300a);
task001.setExtensionProperty(shipStateProperty);
System.out.println("2nd round: Task before save = " + task001.debugDump());
task001.savePendingModifications(result);
Task task002 = taskManager.getTask(taskOid(test), result);
System.out.println("2nd round: Task from repo: " + task002.debugDump());
PrismProperty<String> bigString002 = task002.getExtensionProperty(shipStateQName);
assertEquals("Big string not retrieved correctly (2nd round)", shipStateProperty.getRealValue(), bigString002.getRealValue());
}
@Test(enabled = false)
public void test004cReferenceInExtension() throws Exception { // ok to fail
String test = "004cReferenceInExtension";
OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
TaskQuartzImpl task = (TaskQuartzImpl) taskManager.getTask(taskOid(test), result);
System.out.println("Task extension = " + task.getExtension());
//PrismObject<UserType> requestee = task.getOwner();
//task.setRequesteeRef(requestee);
//logger.trace("Saving modifications...");
//task.savePendingModifications(result); // here it crashes
//logger.trace("Retrieving the task and comparing its properties...");
//Task task001 = taskManager.getTask(taskOid(test), result);
//logger.trace("Task from repo: " + task001.debugDump());
//AssertJUnit.assertEquals("RequesteeRef was not stored/retrieved correctly", requestee.getOid(), task001.getRequesteeRef().getOid());
}
@Test(enabled = false)
public void test004TaskProperties() throws Exception {
String test = "004TaskProperties";
OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
TaskQuartzImpl task = (TaskQuartzImpl) taskManager.getTask(taskOid(test), result);
System.out.println("Task extension = " + task.getExtension());
PrismPropertyDefinition delayDefinition = new PrismPropertyDefinitionImpl(SchemaConstants.NOOP_DELAY_QNAME, DOMUtil.XSD_INT, taskManager.getPrismContext());
System.out.println("property definition = " + delayDefinition);
PrismProperty<Integer> property = (PrismProperty<Integer>) delayDefinition.instantiate();
property.setRealValue(100);
PropertyDelta delta = new PropertyDelta(new ItemPath(TaskType.F_EXTENSION, property.getElementName()), property.getDefinition(), prismContext);
//delta.addV(property.getValues());
delta.setValuesToReplace(PrismValue.cloneCollection(property.getValues()));
Collection<ItemDelta<?,?>> modifications = new ArrayList<>(1);
modifications.add(delta);
// TODO fix this code
// Collection<ItemDeltaType> idts = DeltaConvertor.toPropertyModificationTypes(delta);
// for (ItemDeltaType idt : idts) {
// String idtxml = prismContext.getParserDom().marshalElementToString(idt, new QName("http://a/", "A"));
// System.out.println("item delta type = " + idtxml);
//
// ItemDeltaType idt2 = prismContext.getPrismJaxbProcessor().unmarshalObject(idtxml, ItemDeltaType.class);
// ItemDelta id2 = DeltaConvertor.createItemDelta(idt2, TaskType.class, prismContext);
// System.out.println("unwrapped item delta = " + id2.debugDump());
//
// task.modifyExtension(id2);
// }
task.savePendingModifications(result);
System.out.println("Task = " + task.debugDump());
PrismObject<UserType> owner2 = repositoryService.getObject(UserType.class, TASK_OWNER2_OID, null, result);
task.setBindingImmediate(TaskBinding.LOOSE, result);
// other properties will be set in batched mode
String newname = "Test task, name changed";
task.setName(PrismTestUtil.createPolyStringType(newname));
task.setProgress(10);
long currentTime = System.currentTimeMillis();
long currentTime1 = currentTime + 10000;
long currentTime2 = currentTime + 25000;
task.setLastRunStartTimestamp(currentTime);
task.setLastRunFinishTimestamp(currentTime1);
task.setExecutionStatus(TaskExecutionStatus.SUSPENDED);
task.setHandlerUri("http://no-handler.org/");
//task.setOwner(owner2);
ScheduleType st0 = task.getSchedule();
ScheduleType st1 = new ScheduleType();
st1.setInterval(1);
st1.setMisfireAction(MisfireActionType.RESCHEDULE);
task.pushHandlerUri("http://no-handler.org/1", st1, TaskBinding.TIGHT, ((TaskQuartzImpl) task).createExtensionDelta(delayDefinition, 1));
ScheduleType st2 = new ScheduleType();
st2.setInterval(2);
st2.setMisfireAction(MisfireActionType.EXECUTE_IMMEDIATELY);
task.pushHandlerUri("http://no-handler.org/2", st2, TaskBinding.LOOSE, ((TaskQuartzImpl) task).createExtensionDelta(delayDefinition, 2));
task.setRecurrenceStatus(TaskRecurrence.RECURRING);
OperationResultType ort = result.createOperationResultType(); // to be compared with later
task.setResult(result);
//PrismObject<UserType> requestee = task.getOwner();
//task.setRequesteeRef(requestee); does not work
//task.setRequesteeOid(requestee.getOid());
ObjectReferenceType objectReferenceType = new ObjectReferenceType();
objectReferenceType.setType(UserType.COMPLEX_TYPE);
String objectOid = "some-oid...";
objectReferenceType.setOid(objectOid);
task.setObjectRef(objectReferenceType);
logger.trace("Saving modifications...");
task.savePendingModifications(result);
logger.trace("Retrieving the task (second time) and comparing its properties...");
Task task001 = taskManager.getTask(taskOid(test), result);
logger.trace("Task from repo: " + task001.debugDump());
AssertJUnit.assertEquals(TaskBinding.LOOSE, task001.getBinding());
PrismAsserts.assertEqualsPolyString("Name not", newname, task001.getName());
// AssertJUnit.assertEquals(newname, task001.getName());
AssertJUnit.assertTrue(10 == task001.getProgress());
AssertJUnit.assertNotNull(task001.getLastRunStartTimestamp());
AssertJUnit.assertEquals("Start time is not correct", (Long) (currentTime / 1000L), (Long) (task001.getLastRunStartTimestamp() / 1000L)); // e.g. MySQL cuts off millisecond information
AssertJUnit.assertNotNull(task001.getLastRunFinishTimestamp());
AssertJUnit.assertEquals("Finish time is not correct", (Long) (currentTime1 / 1000L), (Long) (task001.getLastRunFinishTimestamp() / 1000L));
// AssertJUnit.assertEquals(TaskExclusivityStatus.CLAIMED, task001.getExclusivityStatus());
AssertJUnit.assertEquals(TaskExecutionStatus.SUSPENDED, task001.getExecutionStatus());
AssertJUnit.assertEquals("Handler after 2xPUSH is not OK", "http://no-handler.org/2", task001.getHandlerUri());
AssertJUnit.assertEquals("Schedule after 2xPUSH is not OK", st2, task001.getSchedule());
AssertJUnit.assertEquals("Number of handlers is not OK", 3, task.getHandlersCount());
UriStack us = task.getOtherHandlersUriStack();
AssertJUnit.assertEquals("First handler from the handler stack does not match", "http://no-handler.org/", us.getUriStackEntry().get(0).getHandlerUri());
AssertJUnit.assertEquals("First schedule from the handler stack does not match", st0, us.getUriStackEntry().get(0).getSchedule());
AssertJUnit.assertEquals("Second handler from the handler stack does not match", "http://no-handler.org/1", us.getUriStackEntry().get(1).getHandlerUri());
AssertJUnit.assertEquals("Second schedule from the handler stack does not match", st1, us.getUriStackEntry().get(1).getSchedule());
AssertJUnit.assertTrue(task001.isCycle());
OperationResult r001 = task001.getResult();
AssertJUnit.assertNotNull(r001);
//AssertJUnit.assertEquals("Owner OID is not correct", TASK_OWNER2_OID, task001.getOwner().getOid());
PrismProperty<?> d = task001.getExtensionProperty(SchemaConstants.NOOP_DELAY_QNAME);
AssertJUnit.assertNotNull("delay extension property was not found", d);
AssertJUnit.assertEquals("delay extension property has wrong value", (Integer) 100, d.getRealValue(Integer.class));
OperationResultType ort1 = r001.createOperationResultType();
// handling of operation result in tasks is extremely fragile now...
// in case of problems, just uncomment the following line ;)
AssertJUnit.assertEquals(ort, ort1);
//AssertJUnit.assertEquals("RequesteeRef was not stored/retrieved correctly", requestee.getOid(), task001.getRequesteeRef().getOid());
//AssertJUnit.assertEquals("RequesteeOid was not stored/retrieved correctly", requestee.getOid(), task001.getRequesteeOid());
AssertJUnit.assertEquals("ObjectRef OID was not stored/retrieved correctly", objectReferenceType.getOid(), task001.getObjectRef().getOid());
AssertJUnit.assertEquals("ObjectRef ObjectType was not stored/retrieved correctly", objectReferenceType.getType(), task001.getObjectRef().getType());
// now pop the handlers
((TaskQuartzImpl) task001).finishHandler(result);
task001.refresh(result);
AssertJUnit.assertEquals("Handler URI after first POP is not correct", "http://no-handler.org/1", task001.getHandlerUri());
AssertJUnit.assertEquals("Schedule after first POP is not correct", st1, task001.getSchedule());
AssertJUnit.assertEquals("Binding after first POP is not correct", TaskBinding.TIGHT, task001.getBinding());
AssertJUnit.assertNotSame("Task state after first POP should not be CLOSED", TaskExecutionStatus.CLOSED, task001.getExecutionStatus());
AssertJUnit.assertEquals("Extension element value is not correct after first POP", (Integer) 2, task001.getExtensionProperty(SchemaConstants.NOOP_DELAY_QNAME).getRealValue(Integer.class));
((TaskQuartzImpl) task001).finishHandler(result);
task001.refresh(result);
AssertJUnit.assertEquals("Handler URI after second POP is not correct", "http://no-handler.org/", task001.getHandlerUri());
AssertJUnit.assertEquals("Schedule after second POP is not correct", st0, task001.getSchedule());
AssertJUnit.assertEquals("Binding after second POP is not correct", TaskBinding.LOOSE, task001.getBinding());
AssertJUnit.assertNotSame("Task state after second POP should not be CLOSED", TaskExecutionStatus.CLOSED, task001.getExecutionStatus());
AssertJUnit.assertEquals("Extension element value is not correct after second POP", (Integer) 1, task001.getExtensionProperty(SchemaConstants.NOOP_DELAY_QNAME).getRealValue(Integer.class));
((TaskQuartzImpl) task001).finishHandler(result);
task001.refresh(result);
//AssertJUnit.assertNull("Handler URI after third POP is not null", task001.getHandlerUri());
AssertJUnit.assertEquals("Handler URI after third POP is not correct", "http://no-handler.org/", task001.getHandlerUri());
AssertJUnit.assertEquals("Task state after third POP is not CLOSED", TaskExecutionStatus.CLOSED, task001.getExecutionStatus());
}
/*
* Execute a single-run task.
*/
@Test(enabled = true)
public void test005Single() throws Exception {
final String test = "005Single";
final OperationResult result = createResult(test);
// reset 'has run' flag on the handler
singleHandler1.resetHasRun();
// Add single task. This will get picked by task scanner and executed
addObjectFromFile(taskFilename(test));
logger.trace("Retrieving the task...");
TaskQuartzImpl task = (TaskQuartzImpl) taskManager.getTask(taskOid(test), result);
AssertJUnit.assertNotNull(task);
logger.trace("Task retrieval OK.");
// We need to wait for a sync interval, so the task scanner has a chance
// to pick up this
// task
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getExecutionStatus() == TaskExecutionStatus.CLOSED;
}
@Override
public void timeout() {
}
}, 10000, 1000);
logger.info("... done");
// Check task status
Task task1 = taskManager.getTask(taskOid(test), result);
AssertJUnit.assertNotNull(task1);
System.out.println("getTask returned: " + task1.debugDump());
PrismObject<TaskType> po = repositoryService.getObject(TaskType.class, taskOid(test), null, result);
System.out.println("getObject returned: " + po.debugDump());
// .. it should be closed
AssertJUnit.assertEquals(TaskExecutionStatus.CLOSED, task1.getExecutionStatus());
// .. and released
// AssertJUnit.assertEquals(TaskExclusivityStatus.RELEASED, task1.getExclusivityStatus());
// .. and last run should not be zero
AssertJUnit.assertNotNull("LastRunStartTimestamp is null", task1.getLastRunStartTimestamp());
assertFalse("LastRunStartTimestamp is 0", task1.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull("LastRunFinishTimestamp is null", task1.getLastRunFinishTimestamp());
assertFalse("LastRunFinishTimestamp is 0", task1.getLastRunFinishTimestamp().longValue() == 0);
// The progress should be more than 0 as the task has run at least once
AssertJUnit.assertTrue("Task reported no progress", task1.getProgress() > 0);
// Test for presence of a result. It should be there and it should
// indicate success
OperationResult taskResult = task1.getResult();
AssertJUnit.assertNotNull("Task result is null", taskResult);
AssertJUnit.assertTrue("Task did not yield 'success' status", taskResult.isSuccess());
// Test for no presence of handlers
//AssertJUnit.assertNull("Handler is still present", task1.getHandlerUri());
AssertJUnit.assertNotNull("Handler is gone", task1.getHandlerUri());
AssertJUnit.assertTrue("Other handlers are still present",
task1.getOtherHandlersUriStack() == null || task1.getOtherHandlersUriStack().getUriStackEntry().isEmpty());
// Test whether handler has really run
AssertJUnit.assertTrue("Handler1 has not run", singleHandler1.hasRun());
}
/*
* Executes a cyclic task
*/
@Test(enabled = true)
public void test006Cycle() throws Exception {
final String test = "006Cycle";
final OperationResult result = createResult(test);
// But before that check sanity ... a known problem with xsi:type
PrismObject<? extends ObjectType> object = addObjectFromFile(taskFilename(test));
ObjectType objectType = object.asObjectable();
TaskType addedTask = (TaskType) objectType;
System.out.println("Added task");
System.out.println(object.debugDump());
PrismContainer<?> extensionContainer = object.getExtension();
PrismProperty<Object> deadProperty = extensionContainer.findProperty(new QName(NS_WHATEVER, "dead"));
assertEquals("Bad typed of 'dead' property (add result)", DOMUtil.XSD_INT, deadProperty.getDefinition().getTypeName());
// Read from repo
PrismObject<TaskType> repoTask = repositoryService.getObject(TaskType.class, addedTask.getOid(), null, result);
TaskType repoTaskType = repoTask.asObjectable();
extensionContainer = repoTask.getExtension();
deadProperty = extensionContainer.findProperty(new QName(NS_WHATEVER, "dead"));
assertEquals("Bad typed of 'dead' property (from repo)", DOMUtil.XSD_INT, deadProperty.getDefinition().getTypeName());
// We need to wait for a sync interval, so the task scanner has a chance
// to pick up this
// task
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getProgress() > 0;
}
@Override
public void timeout() {
}
}, 10000, 2000);
// Check task status
Task task = taskManager.getTask(taskOid(test), result);
AssertJUnit.assertNotNull(task);
System.out.println(task.debugDump());
PrismObject<TaskType> t = repositoryService.getObject(TaskType.class, taskOid(test), null, result);
System.out.println(t.debugDump());
// .. it should be running
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// .. and claimed
// AssertJUnit.assertEquals(TaskExclusivityStatus.CLAIMED, task.getExclusivityStatus());
// .. and last run should not be zero
AssertJUnit.assertNotNull("LastRunStartTimestamp is null", task.getLastRunStartTimestamp());
assertFalse("LastRunStartTimestamp is 0", task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull("LastRunFinishTimestamp is null", task.getLastRunFinishTimestamp());
assertFalse("LastRunFinishTimestamp is 0", task.getLastRunFinishTimestamp().longValue() == 0);
// The progress should be more at least 1 - so small because of lazy testing machine ... (wait time before task runs is 2 seconds)
AssertJUnit.assertTrue("Task progress is too small (should be at least 1)", task.getProgress() >= 1);
// Test for presence of a result. It should be there and it should
// indicate success
OperationResult taskResult = task.getResult();
AssertJUnit.assertNotNull("Task result is null", taskResult);
AssertJUnit.assertTrue("Task did not yield 'success' status", taskResult.isSuccess());
// Suspend the task (in order to keep logs clean), without much waiting
taskManager.suspendTask(task, 100, result);
}
/*
* Single-run task with more handlers.
*/
@Test(enabled = true)
public void test008MoreHandlers() throws Exception {
final String test = "008MoreHandlers";
final OperationResult result = createResult(test);
// reset 'has run' flag on handlers
singleHandler1.resetHasRun();
singleHandler2.resetHasRun();
singleHandler3.resetHasRun();
addObjectFromFile(taskFilename(test));
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getExecutionStatus() == TaskExecutionStatus.CLOSED;
}
@Override
public void timeout() {
}
}, 15000, 2000);
// Check task status
Task task = taskManager.getTask(taskOid(test), result);
AssertJUnit.assertNotNull(task);
System.out.println(task.debugDump());
PrismObject<TaskType> o = repositoryService.getObject(TaskType.class, taskOid(test), null, result);
System.out.println(ObjectTypeUtil.dump(o.getValue().getValue()));
// .. it should be closed
AssertJUnit.assertEquals(TaskExecutionStatus.CLOSED, task.getExecutionStatus());
// .. and released
// AssertJUnit.assertEquals(TaskExclusivityStatus.RELEASED, task.getExclusivityStatus());
// .. and last run should not be zero
AssertJUnit.assertNotNull(task.getLastRunStartTimestamp());
assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull("Last run finish timestamp not set", task.getLastRunFinishTimestamp());
assertFalse("Last run finish timestamp is 0", task.getLastRunFinishTimestamp().longValue() == 0);
// The progress should be more than 0 as the task has run at least once
AssertJUnit.assertTrue("Task reported no progress", task.getProgress() > 0);
// Test for presence of a result. It should be there and it should
// indicate success
OperationResult taskResult = task.getResult();
AssertJUnit.assertNotNull("Task result is null", taskResult);
AssertJUnit.assertTrue("Task did not yield 'success' status", taskResult.isSuccess());
// Test for no presence of handlers
AssertJUnit.assertNotNull("Handler is gone", task.getHandlerUri());
AssertJUnit.assertTrue("Other handlers are still present",
task.getOtherHandlersUriStack() == null || task.getOtherHandlersUriStack().getUriStackEntry().isEmpty());
// Test if all three handlers were run
AssertJUnit.assertTrue("Handler1 has not run", singleHandler1.hasRun());
AssertJUnit.assertTrue("Handler2 has not run", singleHandler2.hasRun());
AssertJUnit.assertTrue("Handler3 has not run", singleHandler3.hasRun());
}
@Test(enabled = true)
public void test009CycleLoose() throws Exception {
final String test = "009CycleLoose";
final OperationResult result = createResult(test);
PrismObject<? extends ObjectType> object = addObjectFromFile(taskFilename(test));
// We need to wait for a sync interval, so the task scanner has a chance
// to pick up this task
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getProgress() >= 1;
}
@Override
public void timeout() {
}
}, 15000, 2000);
// Check task status
Task task = taskManager.getTask(taskOid(test), result);
AssertJUnit.assertNotNull(task);
System.out.println(task.debugDump());
PrismObject<TaskType> t = repositoryService.getObject(TaskType.class, taskOid(test), null, result);
System.out.println(t.debugDump());
// .. it should be running
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// .. and last run should not be zero
AssertJUnit.assertNotNull(task.getLastRunStartTimestamp());
assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull(task.getLastRunFinishTimestamp());
assertFalse(task.getLastRunFinishTimestamp().longValue() == 0);
// The progress should be more at least 1 - lazy neptunus... (wait time before task runs is 2 seconds)
AssertJUnit.assertTrue("Progress is none or too small", task.getProgress() >= 1);
// The progress should not be too big (indicates fault in scheduling)
AssertJUnit.assertTrue("Progress is too big (fault in scheduling?)", task.getProgress() <= 7);
// Test for presence of a result. It should be there and it should
// indicate success
OperationResult taskResult = task.getResult();
AssertJUnit.assertNotNull("Task result is null", taskResult);
AssertJUnit.assertTrue("Task did not yield 'success' status", taskResult.isSuccess());
// Suspend the task (in order to keep logs clean), without much waiting
taskManager.suspendTask(task, 100, result);
}
@Test(enabled = true)
public void test010CycleCronLoose() throws Exception {
final String test = "010CycleCronLoose";
final OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getProgress() >= 2;
}
@Override
public void timeout() {
}
}, 15000, 2000);
// Check task status
Task task = taskManager.getTask(taskOid(test), result);
AssertJUnit.assertNotNull(task);
System.out.println(task.debugDump());
TaskType t = repositoryService.getObject(TaskType.class, taskOid(test), null, result).getValue().getValue();
System.out.println(ObjectTypeUtil.dump(t));
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// .. and last run should not be zero
AssertJUnit.assertNotNull(task.getLastRunStartTimestamp());
assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull(task.getLastRunFinishTimestamp());
assertFalse(task.getLastRunFinishTimestamp().longValue() == 0);
// The progress should be at least 2 as the task has run at least twice
AssertJUnit.assertTrue("Task has not been executed at least twice", task.getProgress() >= 2);
// Test for presence of a result. It should be there and it should
// indicate success
OperationResult taskResult = task.getResult();
AssertJUnit.assertNotNull("Task result is null", taskResult);
AssertJUnit.assertTrue("Task did not yield 'success' status", taskResult.isSuccess());
// Suspend the task (in order to keep logs clean), without much waiting
taskManager.suspendTask(task, 100, result);
}
@Test(enabled = true)
public void test011MoreHandlersAndSchedules() throws Exception {
final String test = "011MoreHandlersAndSchedules";
final OperationResult result = createResult(test);
// reset 'has run' flag on handlers
l1Handler.resetHasRun();
l2Handler.resetHasRun();
l3Handler.resetHasRun();
addObjectFromFile(taskFilename(test));
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getExecutionStatus() == TaskExecutionStatus.CLOSED;
}
@Override
public void timeout() {
}
}, 30000, 2000);
// Check task status
Task task = taskManager.getTask(taskOid(test), result);
AssertJUnit.assertNotNull(task);
System.out.println(task.debugDump());
PrismObject<TaskType> o = repositoryService.getObject(TaskType.class, taskOid(test), null, result);
System.out.println(ObjectTypeUtil.dump(o.getValue().getValue()));
// .. it should be closed
AssertJUnit.assertEquals(TaskExecutionStatus.CLOSED, task.getExecutionStatus());
// .. and last run should not be zero
AssertJUnit.assertNotNull(task.getLastRunStartTimestamp());
assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull("Last run finish timestamp not set", task.getLastRunFinishTimestamp());
assertFalse("Last run finish timestamp is 0", task.getLastRunFinishTimestamp().longValue() == 0);
/*
* Here the execution should be as follows:
* progress: 0->1 on first execution of L1 handler
* progress: 1->2 on first execution of L2 handler (ASAP after finishing L1)
* progress: 2->3 on second execution of L2 handler (2 seconds later)
* progress: 3->4 on third execution of L2 handler (2 seconds later)
* progress: 4->5 on fourth execution of L2 handler (2 seconds later)
* progress: 5->6 on first (and therefore last) execution of L3 handler
* progress: 6->7 on last execution of L2 handler (2 seconds later, perhaps)
* progress: 7->8 on last execution of L1 handler
*/
AssertJUnit.assertEquals("Task reported wrong progress", 8, task.getProgress());
// Test for presence of a result. It should be there and it should
// indicate success
OperationResult taskResult = task.getResult();
AssertJUnit.assertNotNull("Task result is null", taskResult);
AssertJUnit.assertTrue("Task did not yield 'success' status", taskResult.isSuccess());
// Test for no presence of handlers
AssertJUnit.assertNotNull("Handler is gone", task.getHandlerUri());
AssertJUnit.assertTrue("Other handlers are still present",
task.getOtherHandlersUriStack() == null || task.getOtherHandlersUriStack().getUriStackEntry().isEmpty());
// Test if all three handlers were run
AssertJUnit.assertTrue("L1 handler has not run", l1Handler.hasRun());
AssertJUnit.assertTrue("L2 handler has not run", l2Handler.hasRun());
AssertJUnit.assertTrue("L3 handler has not run", l3Handler.hasRun());
}
/*
* Suspends a running task.
*/
@Test(enabled = true)
public void test012Suspend() throws Exception {
final String test = "012Suspend";
final OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
// check if we can read the extension (xsi:type issue)
Task taskTemp = taskManager.getTask(taskOid(test), result);
PrismProperty delay = taskTemp.getExtensionProperty(SchemaConstants.NOOP_DELAY_QNAME);
AssertJUnit.assertEquals("Delay was not read correctly", 2000, delay.getRealValue());
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getProgress() >= 1;
}
@Override
public void timeout() {
}
}, 10000, 2000);
// Check task status (task is running 5 iterations where each takes 2000 ms)
Task task = taskManager.getTask(taskOid(test), result);
AssertJUnit.assertNotNull(task);
System.out.println(task.debugDump());
AssertJUnit.assertEquals("Task is not running", TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// Now suspend the task
boolean stopped = taskManager.suspendTask(task, 0, result);
task.refresh(result);
System.out.println("After suspend and refresh: " + task.debugDump());
AssertJUnit.assertTrue("Task is not stopped", stopped);
AssertJUnit.assertEquals("Task is not suspended", TaskExecutionStatus.SUSPENDED, task.getExecutionStatus());
AssertJUnit.assertNotNull("Task last start time is null", task.getLastRunStartTimestamp());
assertFalse("Task last start time is 0", task.getLastRunStartTimestamp().longValue() == 0);
// The progress should be more than 0
AssertJUnit.assertTrue("Task has not reported any progress", task.getProgress() > 0);
// Thread.sleep(200); // give the scheduler a chance to release the task
// task.refresh(result);
// AssertJUnit.assertEquals("Task is not released", TaskExclusivityStatus.RELEASED, task.getExclusivityStatus());
}
@Test(enabled = true)
public void test013ReleaseAndSuspendLooselyBound() throws Exception {
final String test = "013ReleaseAndSuspendLooselyBound";
final OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
Task task = taskManager.getTask(taskOid(test), result);
System.out.println("After setup: " + task.debugDump());
// check if we can read the extension (xsi:type issue)
PrismProperty delay = task.getExtensionProperty(SchemaConstants.NOOP_DELAY_QNAME);
AssertJUnit.assertEquals("Delay was not read correctly", 1000, delay.getRealValue());
// let us resume (i.e. start the task)
taskManager.resumeTask(task, result);
// task is executing for 1000 ms, so we need to wait slightly longer, in order for the execution to be done
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getProgress() >= 1;
}
@Override
public void timeout() {
}
}, 10000, 2000);
task.refresh(result);
System.out.println("After refresh: " + task.debugDump());
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// AssertJUnit.assertEquals(TaskExclusivityStatus.RELEASED, task.getExclusivityStatus()); // task cycle is 1000 ms, so it should be released now
AssertJUnit.assertNotNull("LastRunStartTimestamp is null", task.getLastRunStartTimestamp());
assertFalse("LastRunStartTimestamp is 0", task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull(task.getLastRunFinishTimestamp());
assertFalse(task.getLastRunFinishTimestamp().longValue() == 0);
AssertJUnit.assertTrue(task.getProgress() > 0);
// now let us suspend it (occurs during wait cycle, so we can put short timeout here)
boolean stopped = taskManager.suspendTask(task, 300, result);
task.refresh(result);
AssertJUnit.assertTrue("Task is not stopped", stopped);
AssertJUnit.assertEquals(TaskExecutionStatus.SUSPENDED, task.getExecutionStatus());
// AssertJUnit.assertEquals(TaskExclusivityStatus.RELEASED, task.getExclusivityStatus());
AssertJUnit.assertNotNull(task.getLastRunStartTimestamp());
assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull(task.getLastRunFinishTimestamp());
assertFalse(task.getLastRunFinishTimestamp().longValue() == 0);
AssertJUnit.assertTrue(task.getProgress() > 0);
// Thread.sleep(200); // give the scheduler a chance to release the task
// task.refresh(result);
// AssertJUnit.assertEquals("Task is not released", TaskExclusivityStatus.RELEASED, task.getExclusivityStatus());
}
@Test(enabled = true)
public void test014SuspendLongRunning() throws Exception {
final String test = "014SuspendLongRunning";
final OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
Task task = taskManager.getTask(taskOid(test), result);
System.out.println("After setup: " + task.debugDump());
waitFor("Waiting for task manager to start the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to start the task", task);
return task.getLastRunStartTimestamp() != null;
}
@Override
public void timeout() {
}
}, 10000, 2000);
task.refresh(result);
System.out.println("After refresh: " + task.debugDump());
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// AssertJUnit.assertEquals(TaskExclusivityStatus.CLAIMED, task.getExclusivityStatus());
AssertJUnit.assertNotNull(task.getLastRunStartTimestamp());
assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
// now let us suspend it, without long waiting
boolean stopped = taskManager.suspendTask(task, 1000, result);
task.refresh(result);
assertFalse("Task is stopped (it should be running for now)", stopped);
AssertJUnit.assertEquals("Task is not suspended", TaskExecutionStatus.SUSPENDED, task.getExecutionStatus());
// AssertJUnit.assertEquals("Task should be still claimed, as it is not definitely stopped", TaskExclusivityStatus.CLAIMED, task.getExclusivityStatus());
AssertJUnit.assertNotNull(task.getLastRunStartTimestamp());
assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNull(task.getLastRunFinishTimestamp());
AssertJUnit.assertEquals("There should be no progress reported", 0, task.getProgress());
// now let us wait for the finish
stopped = taskManager.suspendTask(task, 0, result);
task.refresh(result);
AssertJUnit.assertTrue("Task is not stopped", stopped);
AssertJUnit.assertEquals("Task is not suspended", TaskExecutionStatus.SUSPENDED, task.getExecutionStatus());
AssertJUnit.assertNotNull(task.getLastRunStartTimestamp());
assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull("Last run finish time is null", task.getLastRunStartTimestamp());
assertFalse("Last run finish time is zero", task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertTrue("Progress is not reported", task.getProgress() > 0);
// Thread.sleep(200); // give the scheduler a chance to release the task
// task.refresh(result);
// AssertJUnit.assertEquals("Task is not released", TaskExclusivityStatus.RELEASED, task.getExclusivityStatus());
}
@Test(enabled = true)
public void test015DeleteTaskFromRepo() throws Exception {
final String test = "015DeleteTaskFromRepo";
final OperationResult result = createResult(test);
PrismObject<? extends ObjectType> object = addObjectFromFile(taskFilename(test));
String oid = taskOid(test);
// is the task in Quartz?
final JobKey key = TaskQuartzImplUtil.createJobKeyForTaskOid(oid);
AssertJUnit.assertTrue("Job in Quartz does not exist", taskManager.getExecutionManager().getQuartzScheduler().checkExists(key));
// Remove task from repo
repositoryService.deleteObject(TaskType.class, taskOid(test), result);
// We need to wait for a sync interval, so the task scanner has a chance
// to pick up this task
waitFor("Waiting for the job to disappear from Quartz Job Store", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
try {
return !taskManager.getExecutionManager().getQuartzScheduler().checkExists(key);
} catch (SchedulerException e) {
throw new SystemException(e);
}
}
@Override
public void timeout() {
}
}, 10000, 2000);
}
@Test(enabled = true)
public void test016WaitForSubtasks() throws Exception {
final String test = "016WaitForSubtasks";
final OperationResult result = createResult(test);
//taskManager.getClusterManager().startClusterManagerThread();
try {
Task rootTask = taskManager.createTaskInstance((PrismObject<TaskType>) (PrismObject) addObjectFromFile(taskFilename(test)), result);
Task firstChildTask = taskManager.createTaskInstance((PrismObject<TaskType>) (PrismObject) addObjectFromFile(taskFilename(test + "-child-1")), result);
Task firstReloaded = taskManager.getTaskByIdentifier(firstChildTask.getTaskIdentifier(), result);
assertEquals("Didn't get correct task by identifier", firstChildTask.getOid(), firstReloaded.getOid());
Task secondChildTask = rootTask.createSubtask();
secondChildTask.setName("Second child");
secondChildTask.setOwner(rootTask.getOwner());
secondChildTask.pushHandlerUri(SINGLE_TASK_HANDLER_URI, new ScheduleType(), null);
secondChildTask.setInitialExecutionStatus(TaskExecutionStatus.SUSPENDED); // will resume it after root starts waiting for tasks
taskManager.switchToBackground(secondChildTask, result);
Task firstPrerequisiteTask = taskManager.createTaskInstance((PrismObject<TaskType>) (PrismObject) addObjectFromFile(taskFilename(test + "-prerequisite-1")), result);
List<Task> prerequisities = rootTask.listPrerequisiteTasks(result);
assertEquals("Wrong # of prerequisite tasks", 1, prerequisities.size());
assertEquals("Wrong OID of prerequisite task", firstPrerequisiteTask.getOid(), prerequisities.get(0).getOid());
Task secondPrerequisiteTask = taskManager.createTaskInstance();
secondPrerequisiteTask.setName("Second prerequisite");
secondPrerequisiteTask.setOwner(rootTask.getOwner());
secondPrerequisiteTask.addDependent(rootTask.getTaskIdentifier());
secondPrerequisiteTask.pushHandlerUri(NoOpTaskHandler.HANDLER_URI, new ScheduleType(), null);
secondPrerequisiteTask.setExtensionPropertyValue(SchemaConstants.NOOP_DELAY_QNAME, 1500);
secondPrerequisiteTask.setExtensionPropertyValue(SchemaConstants.NOOP_STEPS_QNAME, 1);
secondPrerequisiteTask.setInitialExecutionStatus(TaskExecutionStatus.SUSPENDED); // will resume it after root starts waiting for tasks
secondPrerequisiteTask.addDependent(rootTask.getTaskIdentifier());
taskManager.switchToBackground(secondPrerequisiteTask, result);
LOGGER.info("Starting waiting for child/prerequisite tasks");
rootTask.startWaitingForTasksImmediate(result);
firstChildTask.refresh(result);
assertEquals("Parent is not set correctly on 1st child task", rootTask.getTaskIdentifier(), firstChildTask.getParent());
secondChildTask.refresh(result);
assertEquals("Parent is not set correctly on 2nd child task", rootTask.getTaskIdentifier(), secondChildTask.getParent());
firstPrerequisiteTask.refresh(result);
assertEquals("Dependents are not set correctly on 1st prerequisite task (count differs)", 1, firstPrerequisiteTask.getDependents().size());
assertEquals("Dependents are not set correctly on 1st prerequisite task (value differs)", rootTask.getTaskIdentifier(), firstPrerequisiteTask.getDependents().get(0));
List<Task> deps = firstPrerequisiteTask.listDependents(result);
assertEquals("Dependents are not set correctly on 1st prerequisite task - listDependents - (count differs)", 1, deps.size());
assertEquals("Dependents are not set correctly on 1st prerequisite task - listDependents - (value differs)", rootTask.getOid(), deps.get(0).getOid());
secondPrerequisiteTask.refresh(result);
assertEquals("Dependents are not set correctly on 2nd prerequisite task (count differs)", 1, secondPrerequisiteTask.getDependents().size());
assertEquals("Dependents are not set correctly on 2nd prerequisite task (value differs)", rootTask.getTaskIdentifier(), secondPrerequisiteTask.getDependents().get(0));
deps = secondPrerequisiteTask.listDependents(result);
assertEquals("Dependents are not set correctly on 2nd prerequisite task - listDependents - (count differs)", 1, deps.size());
assertEquals("Dependents are not set correctly on 2nd prerequisite task - listDependents - (value differs)", rootTask.getOid(), deps.get(0).getOid());
LOGGER.info("Resuming suspended child/prerequisite tasks");
taskManager.resumeTask(secondChildTask, result);
taskManager.resumeTask(secondPrerequisiteTask, result);
final String rootOid = taskOid(test);
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(rootOid, result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getExecutionStatus() == TaskExecutionStatus.CLOSED;
}
@Override
public void timeout() {
}
}, 30000, 3000);
firstChildTask.refresh(result);
secondChildTask.refresh(result);
firstPrerequisiteTask.refresh(result);
secondPrerequisiteTask.refresh(result);
assertEquals("1st child task should be closed", TaskExecutionStatus.CLOSED, firstChildTask.getExecutionStatus());
assertEquals("2nd child task should be closed", TaskExecutionStatus.CLOSED, secondChildTask.getExecutionStatus());
assertEquals("1st prerequisite task should be closed", TaskExecutionStatus.CLOSED, firstPrerequisiteTask.getExecutionStatus());
assertEquals("2nd prerequisite task should be closed", TaskExecutionStatus.CLOSED, secondPrerequisiteTask.getExecutionStatus());
} finally {
// taskManager.getClusterManager().stopClusterManagerThread(10000L, result);
}
}
@Test(enabled = true)
public void test017WaitForSubtasksEmpty() throws Exception {
final String test = "017WaitForSubtasksEmpty";
final OperationResult result = createResult(test);
taskManager.getClusterManager().startClusterManagerThread();
try {
Task rootTask = taskManager.createTaskInstance((PrismObject<TaskType>) (PrismObject) addObjectFromFile(taskFilename(test)), result);
display("root task", rootTask);
final String rootOid = taskOid(test);
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(rootOid, result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getExecutionStatus() == TaskExecutionStatus.CLOSED;
}
@Override
public void timeout() {
}
}, 40000, 3000);
} finally {
taskManager.getClusterManager().stopClusterManagerThread(10000L, result);
}
}
@Test(enabled = true)
public void test018TaskResult() throws Exception {
final String test = "018RefreshingResult";
final OperationResult result = createResult(test);
Task task = taskManager.createTaskInstance();
task.setInitialExecutionStatus(TaskExecutionStatus.SUSPENDED);
PrismObject<UserType> owner2 = repositoryService.getObject(UserType.class, TASK_OWNER2_OID, null, result);
task.setOwner(owner2);
AssertJUnit.assertEquals("Task result for new task is not correct", OperationResultStatus.UNKNOWN, task.getResult().getStatus());
taskManager.switchToBackground(task, result);
AssertJUnit.assertEquals("Background task result is not correct (in memory)", OperationResultStatus.IN_PROGRESS, task.getResult().getStatus());
PrismObject<TaskType> task1 = repositoryService.getObject(TaskType.class, task.getOid(), null, result);
AssertJUnit.assertEquals("Background task result is not correct (in repo)", OperationResultStatusType.IN_PROGRESS, task1.asObjectable().getResult().getStatus());
// now change task's result and check the refresh() method w.r.t. result handling
task.getResult().recordFatalError("");
AssertJUnit.assertEquals(OperationResultStatus.FATAL_ERROR, task.getResult().getStatus());
task.refresh(result);
AssertJUnit.assertEquals("Refresh does not update task's result", OperationResultStatus.IN_PROGRESS, task.getResult().getStatus());
}
/*
* Recurring task returning FINISHED_HANDLER code.
*/
@Test(enabled = true)
public void test019FinishedHandler() throws Exception {
final String test = "019FinishedHandler";
final OperationResult result = createResult(test);
// reset 'has run' flag on handlers
singleHandler1.resetHasRun();
addObjectFromFile(taskFilename(test));
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getExecutionStatus() == TaskExecutionStatus.CLOSED;
}
@Override
public void timeout() {
}
}, 15000, 2000);
// Check task status
Task task = taskManager.getTask(taskOid(test), result);
AssertJUnit.assertNotNull(task);
System.out.println(task.debugDump());
PrismObject<TaskType> o = repositoryService.getObject(TaskType.class, taskOid(test), null, result);
System.out.println(ObjectTypeUtil.dump(o.getValue().getValue()));
// .. it should be closed
AssertJUnit.assertEquals(TaskExecutionStatus.CLOSED, task.getExecutionStatus());
// .. and last run should not be zero
AssertJUnit.assertNotNull(task.getLastRunStartTimestamp());
assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull("Last run finish timestamp not set", task.getLastRunFinishTimestamp());
assertFalse("Last run finish timestamp is 0", task.getLastRunFinishTimestamp().longValue() == 0);
// The progress should be at least 2 as the task has run at least twice (once in each handler)
AssertJUnit.assertTrue("Task reported progress lower than 2", task.getProgress() >= 2);
// Test for presence of a result. It should be there and it should
// indicate success
OperationResult taskResult = task.getResult();
AssertJUnit.assertNotNull("Task result is null", taskResult);
AssertJUnit.assertTrue("Task did not yield 'success' status", taskResult.isSuccess());
// Test for no presence of handlers
AssertJUnit.assertNotNull("Handler is gone", task.getHandlerUri());
AssertJUnit.assertTrue("Other handlers are still present",
task.getOtherHandlersUriStack() == null || task.getOtherHandlersUriStack().getUriStackEntry().isEmpty());
// Test if "outer" handler has run as well
AssertJUnit.assertTrue("Handler1 has not run", singleHandler1.hasRun());
}
@Test
public void test020QueryByExecutionStatus() throws Exception {
final String test = "020QueryByExecutionStatus";
final OperationResult result = createResult(test);
taskManager.getClusterManager().startClusterManagerThread();
Task rootTask = taskManager.createTaskInstance((PrismObject<TaskType>) (PrismObject) addObjectFromFile(taskFilename(test)), result);
String oid = rootTask.getOid();
ObjectFilter filter1 = QueryBuilder.queryFor(TaskType.class, prismContext).item(TaskType.F_EXECUTION_STATUS).eq(TaskExecutionStatusType.WAITING).buildFilter();
ObjectFilter filter2 = QueryBuilder.queryFor(TaskType.class, prismContext).item(TaskType.F_WAITING_REASON).eq(TaskWaitingReasonType.WORKFLOW).buildFilter();
ObjectFilter filter3 = AndFilter.createAnd(filter1, filter2);
List<PrismObject<TaskType>> prisms1 = repositoryService.searchObjects(TaskType.class, ObjectQuery.createObjectQuery(filter1), null, result);
List<PrismObject<TaskType>> prisms2 = repositoryService.searchObjects(TaskType.class, ObjectQuery.createObjectQuery(filter2), null, result);
List<PrismObject<TaskType>> prisms3 = repositoryService.searchObjects(TaskType.class, ObjectQuery.createObjectQuery(filter3), null, result);
assertFalse("There were no tasks with executionStatus == WAITING found", prisms1.isEmpty());
assertFalse("There were no tasks with waitingReason == WORKFLOW found", prisms2.isEmpty());
assertFalse("There were no tasks with executionStatus == WAITING and waitingReason == WORKFLOW found", prisms3.isEmpty());
}
@Test(enabled = true)
public void test021DeleteTaskTree() throws Exception {
final String test = "021DeleteTaskTree";
final OperationResult result = createResult(test);
PrismObject<TaskType> parentTaskPrism = (PrismObject<TaskType>) addObjectFromFile(taskFilename(test));
PrismObject<TaskType> childTask1Prism = (PrismObject<TaskType>) addObjectFromFile(taskFilename(test+"-child1"));
PrismObject<TaskType> childTask2Prism = (PrismObject<TaskType>) addObjectFromFile(taskFilename(test+"-child2"));
AssertJUnit.assertEquals(TaskExecutionStatusType.WAITING, parentTaskPrism.asObjectable().getExecutionStatus());
AssertJUnit.assertEquals(TaskExecutionStatusType.SUSPENDED, childTask1Prism.asObjectable().getExecutionStatus());
AssertJUnit.assertEquals(TaskExecutionStatusType.SUSPENDED, childTask2Prism.asObjectable().getExecutionStatus());
Task parentTask = taskManager.createTaskInstance(parentTaskPrism, result);
Task childTask1 = taskManager.createTaskInstance(childTask1Prism, result);
Task childTask2 = taskManager.createTaskInstance(childTask2Prism, result);
IntegrationTestTools.display("parent", parentTask);
IntegrationTestTools.display("child1", childTask1);
IntegrationTestTools.display("child2", childTask2);
taskManager.resumeTask(childTask1, result);
taskManager.resumeTask(childTask2, result);
parentTask.startWaitingForTasksImmediate(result);
LOGGER.info("Deleting task {} and its subtasks", parentTask);
taskManager.suspendAndDeleteTasks(Arrays.asList(parentTask.getOid()), 2000L, true, result);
IntegrationTestTools.display("after suspendAndDeleteTasks", result.getLastSubresult());
TestUtil.assertSuccessOrWarning("suspendAndDeleteTasks result is not success/warning", result.getLastSubresult());
try {
repositoryService.getObject(TaskType.class, childTask1.getOid(), null, result);
assertTrue("Task " + childTask1 + " was not deleted from the repository", false);
} catch (ObjectNotFoundException e) {
// ok!
}
try {
repositoryService.getObject(TaskType.class, childTask2.getOid(), null, result);
assertTrue("Task " + childTask2 + " was not deleted from the repository", false);
} catch (ObjectNotFoundException e) {
// ok!
}
try {
repositoryService.getObject(TaskType.class, parentTask.getOid(), null, result);
assertTrue("Task " + parentTask + " was not deleted from the repository", false);
} catch (ObjectNotFoundException e) {
// ok!
}
}
@Test(enabled = true)
public void test022ExecuteRecurringOnDemand() throws Exception {
final String test = "022ExecuteRecurringOnDemand";
final OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
Task task = taskManager.getTask(taskOid(test), result);
System.out.println("After setup: " + task.debugDump());
System.out.println("Waiting to see if the task would not start...");
Thread.sleep(5000L);
// check the task HAS NOT started
task.refresh(result);
System.out.println("After initial wait: " + task.debugDump());
assertEquals("task is not RUNNABLE", TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
assertNull("task was started", task.getLastRunStartTimestamp());
assertEquals("task was achieved some progress", 0L, task.getProgress());
// now let's start the task
taskManager.scheduleRunnableTaskNow(task, result);
// task is executing for 1000 ms, so we need to wait slightly longer, in order for the execution to be done
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getProgress() >= 1;
}
@Override
public void timeout() {
}
}, 10000, 2000);
task.refresh(result);
System.out.println("After refresh: " + task.debugDump());
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
AssertJUnit.assertNotNull("LastRunStartTimestamp is null", task.getLastRunStartTimestamp());
assertFalse("LastRunStartTimestamp is 0", task.getLastRunStartTimestamp().longValue() == 0);
AssertJUnit.assertNotNull(task.getLastRunFinishTimestamp());
assertFalse(task.getLastRunFinishTimestamp().longValue() == 0);
AssertJUnit.assertTrue("no progress", task.getProgress() > 0);
// now let us suspend it (occurs during wait cycle, so we can put short timeout here)
boolean stopped = taskManager.suspendTask(task, 10000L, result);
task.refresh(result);
AssertJUnit.assertTrue("Task is not stopped", stopped);
AssertJUnit.assertEquals("Task is not suspended", TaskExecutionStatus.SUSPENDED, task.getExecutionStatus());
}
@Test(enabled = true)
public void test100LightweightSubtasks() throws Exception {
final String test = "100LightweightSubtasks";
final OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
Task task = taskManager.getTask(taskOid(test), result);
System.out.println("After setup: " + task.debugDump());
waitFor("Waiting for task manager to execute the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getExecutionStatus() == TaskExecutionStatus.CLOSED;
}
@Override
public void timeout() {
}
}, 15000, 500);
task.refresh(result);
System.out.println("After refresh (task was executed): " + task.debugDump());
Collection<? extends Task> subtasks = parallelTaskHandler.getLastTaskExecuted().getLightweightAsynchronousSubtasks();
assertEquals("Wrong number of subtasks", MockParallelTaskHandler.NUM_SUBTASKS, subtasks.size());
for (Task subtask : subtasks) {
assertEquals("Wrong subtask state", TaskExecutionStatus.CLOSED, subtask.getExecutionStatus());
MockParallelTaskHandler.MyLightweightTaskHandler handler = (MockParallelTaskHandler.MyLightweightTaskHandler) subtask.getLightweightTaskHandler();
assertTrue("Handler has not run", handler.hasRun());
assertTrue("Handler has not exited", handler.hasExited());
}
}
@Test(enabled = true)
public void test105LightweightSubtasksSuspension() throws Exception {
final String test = "105LightweightSubtasksSuspension";
final OperationResult result = createResult(test);
addObjectFromFile(taskFilename(test));
Task task = taskManager.getTask(taskOid(test), result);
System.out.println("After setup: " + task.debugDump());
waitFor("Waiting for task manager to start the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(taskOid(test), result);
IntegrationTestTools.display("Task while waiting for task manager to execute the task", task);
return task.getLastRunStartTimestamp() != null && task.getLastRunStartTimestamp() != 0L;
}
@Override
public void timeout() {
}
}, 15000, 500);
task.refresh(result);
System.out.println("After refresh (task was started; and it should run now): " + task.debugDump());
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// check the thread
List<JobExecutionContext> jobExecutionContexts = taskManager.getExecutionManager().getQuartzScheduler().getCurrentlyExecutingJobs();
JobExecutionContext found = null;
for (JobExecutionContext jobExecutionContext : jobExecutionContexts) {
if (task.getOid().equals(jobExecutionContext.getJobDetail().getKey().getName())) {
found = jobExecutionContext; break;
}
}
assertNotNull("Job for the task was not found", found);
JobExecutor executor = (JobExecutor) found.getJobInstance();
assertNotNull("No job executor", executor);
Thread thread = executor.getExecutingThread();
assertNotNull("No executing thread", thread);
// now let us suspend it - the handler should stop, as well as the subtasks
boolean stopped = taskManager.suspendTask(task, 10000L, result);
task.refresh(result);
AssertJUnit.assertTrue("Task is not stopped", stopped);
AssertJUnit.assertEquals("Task is not suspended", TaskExecutionStatus.SUSPENDED, task.getExecutionStatus());
Collection<? extends Task> subtasks = parallelTaskHandler.getLastTaskExecuted().getLightweightAsynchronousSubtasks();
assertEquals("Wrong number of subtasks", MockParallelTaskHandler.NUM_SUBTASKS, subtasks.size());
for (Task subtask : subtasks) {
assertEquals("Wrong subtask state", TaskExecutionStatus.CLOSED, subtask.getExecutionStatus());
MockParallelTaskHandler.MyLightweightTaskHandler handler = (MockParallelTaskHandler.MyLightweightTaskHandler) subtask.getLightweightTaskHandler();
assertTrue("Handler has not run", handler.hasRun());
assertTrue("Handler has not exited", handler.hasExited());
}
}
@Test(enabled = true)
public void test999CheckingLeftovers() throws Exception {
String test = "999CheckingLeftovers";
OperationResult result = createResult(test);
ArrayList<String> leftovers = new ArrayList<String>();
checkLeftover(leftovers, "005", result);
checkLeftover(leftovers, "006", result);
checkLeftover(leftovers, "008", result);
checkLeftover(leftovers, "009", result);
checkLeftover(leftovers, "010", result);
checkLeftover(leftovers, "011", result);
checkLeftover(leftovers, "012", result);
checkLeftover(leftovers, "013", result);
checkLeftover(leftovers, "014", result);
checkLeftover(leftovers, "015", result);
checkLeftover(leftovers, "016", result);
checkLeftover(leftovers, "017", result);
checkLeftover(leftovers, "019", result);
checkLeftover(leftovers, "021", result);
checkLeftover(leftovers, "021", "1", result);
checkLeftover(leftovers, "021", "2", result);
checkLeftover(leftovers, "022", result);
checkLeftover(leftovers, "100", result);
checkLeftover(leftovers, "105", result);
String message = "Leftover task(s) found:";
for (String leftover : leftovers) {
message += " " + leftover;
}
AssertJUnit.assertTrue(message, leftovers.isEmpty());
}
private void checkLeftover(ArrayList<String> leftovers, String testNumber, OperationResult result) throws Exception {
checkLeftover(leftovers, testNumber, "0", result);
}
private void checkLeftover(ArrayList<String> leftovers, String testNumber, String subId, OperationResult result) throws Exception {
String oid = taskOid(testNumber, subId);
Task t;
try {
t = taskManager.getTask(oid, result);
} catch (ObjectNotFoundException e) {
// this is OK, test probably did not start
LOGGER.info("Check leftovers: Task " + oid + " does not exist.");
return;
}
LOGGER.info("Check leftovers: Task " + oid + " state: " + t.getExecutionStatus());
if (t.getExecutionStatus() == TaskExecutionStatus.RUNNABLE) {
LOGGER.info("Leftover task: {}", t);
leftovers.add(t.getOid());
}
}
// UTILITY METHODS
// TODO: maybe we should move them to a common utility class
private void assertAttribute(ShadowType repoShadow, ResourceType resource, String name, String value) {
assertAttribute(repoShadow, new QName(ResourceTypeUtil.getResourceNamespace(resource), name), value);
}
private void assertAttribute(ShadowType repoShadow, QName name, String value) {
boolean found = false;
List<Object> xmlAttributes = repoShadow.getAttributes().getAny();
for (Object element : xmlAttributes) {
if (name.equals(JAXBUtil.getElementQName(element))) {
if (found) {
Assert.fail("Multiple values for " + name + " attribute in shadow attributes");
} else {
AssertJUnit.assertEquals(value, ((Element) element).getTextContent());
found = true;
}
}
}
}
protected void assertAttribute(SearchResultEntry response, String name, String value) {
AssertJUnit.assertNotNull(response.getAttribute(name.toLowerCase()));
AssertJUnit.assertEquals(1, response.getAttribute(name.toLowerCase()).size());
Attribute attribute = response.getAttribute(name.toLowerCase()).get(0);
AssertJUnit.assertEquals(value, attribute.iterator().next().getValue().toString());
}
private <T extends ObjectType> PrismObject<T> unmarshallJaxbFromFile(String filePath, Class<T> clazz) throws IOException, JAXBException, SchemaException {
File file = new File(filePath);
return PrismTestUtil.parseObject(file);
}
private PrismObject<? extends ObjectType> addObjectFromFile(String filePath) throws Exception {
return addObjectFromFile(filePath, false);
}
private PrismObject<? extends ObjectType> addObjectFromFile(String filePath, boolean deleteIfExists) throws Exception {
PrismObject<ObjectType> object = unmarshallJaxbFromFile(filePath, ObjectType.class);
System.out.println("obj: " + object.getElementName());
OperationResult result = new OperationResult(TestQuartzTaskManagerContract.class.getName() + ".addObjectFromFile");
try {
add(object, result);
} catch(ObjectAlreadyExistsException e) {
delete(object, result);
add(object, result);
}
logger.trace("Object from " + filePath + " added to repository.");
return object;
}
private void add(PrismObject<ObjectType> object, OperationResult result)
throws ObjectAlreadyExistsException, SchemaException {
if (object.canRepresent(TaskType.class)) {
taskManager.addTask((PrismObject)object, result);
} else {
repositoryService.addObject(object, null, result);
}
}
private void delete(PrismObject<ObjectType> object, OperationResult result) throws ObjectNotFoundException, SchemaException {
if (object.canRepresent(TaskType.class)) {
taskManager.deleteTask(object.getOid(), result);
} else {
repositoryService.deleteObject(ObjectType.class, object.getOid(), result); // correct?
}
}
// private void display(SearchResultEntry response) {
// // TODO Auto-generated method stub
// System.out.println(response.toLDIFString());
// }
}
| PetrGasparik/midpoint | repo/task-quartz-impl/src/test/java/com/evolveum/midpoint/task/quartzimpl/TestQuartzTaskManagerContract.java | Java | apache-2.0 | 85,932 |
package de.tu_darmstadt.smastra.generator.extras;
import de.tu_darmstadt.smastra.markers.elements.extras.ExtraBroadcast;
import de.tu_darmstadt.smastra.markers.elements.extras.ExtraLibrary;
import de.tu_darmstadt.smastra.markers.elements.extras.ExtraPermission;
import de.tu_darmstadt.smastra.markers.elements.extras.ExtraService;
/**
* Created by Toby on 03.10.2016.
*/
public class ExtraFactory {
/**
* Creates an Element from the Annotation passed.
* @param element to create from.
* @return the created element or null if failed.
*/
public static AbstractSmaSTraExtra buildFromExtra(Object element){
if(element == null) return null;
//Broadcast:
if(element instanceof ExtraBroadcast){
ExtraBroadcast extraBroadcast = (ExtraBroadcast) element;
return new NeedsBroadcast(extraBroadcast.clazz().getCanonicalName(), extraBroadcast.exported());
}
//Service:
if(element instanceof ExtraService){
ExtraService extraService = (ExtraService) element;
return new NeedsService(extraService.clazz().getCanonicalName(), extraService.exported());
}
//Library:
if(element instanceof ExtraLibrary){
ExtraLibrary extraLibrary = (ExtraLibrary) element;
return new NeedsLibrary(extraLibrary.libName());
}
//Permission:
if(element instanceof ExtraPermission){
ExtraPermission extraPermission = (ExtraPermission) element;
return new NeedsPermission(extraPermission.permission());
}
return null;
}
}
| SmaSTra/SmaSTra | AndroidCodeSnippets/SmaSTraGenerator/src/main/java/de/tu_darmstadt/smastra/generator/extras/ExtraFactory.java | Java | apache-2.0 | 1,637 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.io;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.io.StreamUtil;
import com.intellij.util.TimeoutUtil;
import com.sun.net.httpserver.HttpServer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPOutputStream;
import static java.net.HttpURLConnection.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class HttpRequestsTest {
private static final String LOCALHOST = "127.0.0.1";
private HttpServer myServer;
private String myUrl;
@Before
public void setUp() throws IOException {
myServer = HttpServer.create();
myServer.bind(new InetSocketAddress(LOCALHOST, 0), 1);
myServer.start();
myUrl = "http://" + LOCALHOST + ":" + myServer.getAddress().getPort();
}
@After
public void tearDown() {
myServer.stop(0);
}
@Test(timeout = 5000)
public void redirectLimit() {
try {
HttpRequests.request("").redirectLimit(0).readString(null);
fail();
}
catch (IOException e) {
assertEquals(IdeBundle.message("error.connection.failed.redirects"), e.getMessage());
}
}
@Test(timeout = 5000, expected = SocketTimeoutException.class)
public void readTimeout() throws IOException {
myServer.createContext("/", ex -> {
TimeoutUtil.sleep(1000);
ex.sendResponseHeaders(HTTP_OK, 0);
ex.close();
});
HttpRequests.request(myUrl).readTimeout(50).readString(null);
fail();
}
@Test(timeout = 5000)
public void readContent() throws IOException {
myServer.createContext("/", ex -> {
ex.getResponseHeaders().add("Content-Type", "text/plain; charset=koi8-r");
ex.sendResponseHeaders(HTTP_OK, 0);
ex.getResponseBody().write("hello кодировочки".getBytes("koi8-r"));
ex.close();
});
assertThat(HttpRequests.request(myUrl).readString(null)).isEqualTo("hello кодировочки");
}
@Test(timeout = 5000)
public void gzippedContent() throws IOException {
myServer.createContext("/", ex -> {
ex.getResponseHeaders().add("Content-Type", "text/plain; charset=koi8-r");
ex.getResponseHeaders().add("Content-Encoding", "gzip");
ex.sendResponseHeaders(HTTP_OK, 0);
try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(ex.getResponseBody())) {
gzipOutputStream.write("hello кодировочки".getBytes("koi8-r"));
}
ex.close();
});
assertThat(HttpRequests.request(myUrl).readString(null)).isEqualTo("hello кодировочки");
byte[] bytes = HttpRequests.request(myUrl).gzip(false).readBytes(null);
assertThat(bytes).startsWith(0x1f, 0x8b); // GZIP magic
}
@Test(timeout = 5000)
public void tuning() throws IOException {
myServer.createContext("/", ex -> {
ex.sendResponseHeaders("HEAD".equals(ex.getRequestMethod()) ? HTTP_NO_CONTENT : HTTP_NOT_IMPLEMENTED, -1);
ex.close();
});
assertEquals(HTTP_NO_CONTENT, HttpRequests.request(myUrl)
.tuner((c) -> ((HttpURLConnection)c).setRequestMethod("HEAD"))
.tryConnect());
}
@Test(timeout = 5000, expected = AssertionError.class)
public void putNotAllowed() throws IOException {
HttpRequests.request(myUrl)
.tuner((c) -> ((HttpURLConnection)c).setRequestMethod("PUT"))
.tryConnect();
fail();
}
@Test(timeout = 5000)
public void post() throws IOException {
Ref<String> receivedData = Ref.create();
myServer.createContext("/", ex -> {
receivedData.set(StreamUtil.readText(ex.getRequestBody(), StandardCharsets.UTF_8));
ex.sendResponseHeaders(HTTP_OK, -1);
ex.close();
});
HttpRequests.post(myUrl, null).write("hello");
assertThat(receivedData.get()).isEqualTo("hello");
}
@Test(timeout = 5000)
public void postNotFound() throws IOException {
myServer.createContext("/", ex -> {
ex.sendResponseHeaders(HTTP_NOT_FOUND, -1);
ex.close();
});
//noinspection SpellCheckingInspection
try {
HttpRequests
.post(myUrl, null)
.write("hello");
fail();
}
catch (SocketException e) {
// java.net.SocketException: Software caused connection abort: recv failed
//noinspection SpellCheckingInspection
assertThat(e.getMessage()).contains("recv failed");
}
catch (HttpRequests.HttpStatusException e) {
assertThat(e.getMessage()).isEqualTo("Request failed with status code 404");
assertThat(e.getStatusCode()).isEqualTo(HTTP_NOT_FOUND);
}
}
@Test(timeout = 5000)
public void postNotFoundWithResponse() throws IOException {
String serverErrorText = "use another url";
myServer.createContext("/", ex -> {
byte[] bytes = serverErrorText.getBytes(StandardCharsets.UTF_8);
ex.sendResponseHeaders(HTTP_UNAVAILABLE, bytes.length);
ex.getResponseBody().write(bytes);
ex.close();
});
try {
HttpRequests
.post(myUrl, null)
.isReadResponseOnError(true)
.write("hello");
fail();
}
catch (HttpRequests.HttpStatusException e) {
assertThat(e.getMessage()).isEqualTo(serverErrorText);
}
}
@Test(timeout = 5000)
public void notModified() throws IOException {
myServer.createContext("/", ex -> {
ex.sendResponseHeaders(HTTP_NOT_MODIFIED, -1);
ex.close();
});
byte[] bytes = HttpRequests.request(myUrl).readBytes(null);
assertThat(bytes).isEmpty();
}
@Test(timeout = 5000)
public void permissionDenied() throws IOException {
try {
myServer.createContext("/", ex -> {
ex.sendResponseHeaders(HTTP_UNAUTHORIZED, -1);
ex.close();
});
HttpRequests.request(myUrl).productNameAsUserAgent().readString(null);
fail();
}
catch (HttpRequests.HttpStatusException e) {
assertThat(e.getStatusCode()).isEqualTo(HTTP_UNAUTHORIZED);
}
}
@Test(timeout = 5000)
public void invalidHeader() throws IOException {
try {
HttpRequests.request(myUrl).tuner(connection -> connection.setRequestProperty("X-Custom", "c-str\0")).readString(null);
fail();
}
catch (AssertionError e) {
assertThat(e.getMessage()).contains("value contains NUL bytes");
}
}
} | goodwinnk/intellij-community | platform/platform-tests/testSrc/com/intellij/util/io/HttpRequestsTest.java | Java | apache-2.0 | 6,747 |
package home.manager.system.hardware.modules;
/**
* **********************************************************************
*
* @author : OKAFOR AKACHUKWU
* @email : [email protected]
* @date : 10/17/2014
* This file was created by the said author as written above
* see http://www.kaso.co/
* **********************************************************************
* %%
* Copyright (C) 2012 - 2014 OKAFOR AKACHUKWU
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class RaspberryPiHubModule extends HubModule {
@Override
public boolean turnOff() {
System.out.println("Turning OFF. => Module Id: " + this.getId());
return super.turnOff();
}
@Override
public boolean turnOn() {
System.out.println("Turning ON. => Module Id: " + this.getId());
return super.turnOn();
}
}
| kasoprecede47/HomeManagementSystem | src/home/manager/system/hardware/modules/RaspberryPiHubModule.java | Java | apache-2.0 | 1,370 |
/*
* Copyright 2014 Kevin Quan ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kevinquan.cwa.model.creatures.sandy;
public class GreenPartyOgre extends AbstractSandyCreature {
@SuppressWarnings("unused")
private static final String TAG = GreenPartyOgre.class.getSimpleName();
public GreenPartyOgre() {
super("Green Party Ogre", 0, Rarity.AlgebraicRare);
mDescription = "Choose a friendly creature and raise its Defense by 4 for every creature you Flooped this turn";
}
@Override public int getBaseAttack() { return 11; }
@Override public int getBaseDefense() { return 19; }
@Override public int getFloopCost() { return 2; }
}
| kquan/card-wars-almanac | java/src/com/kevinquan/cwa/model/creatures/sandy/GreenPartyOgre.java | Java | apache-2.0 | 1,230 |
/*
* Copyright 2015 lixiaobo
*
* VersionUpgrade project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.ui.stylemenu;
import java.util.ArrayList;
import java.util.List;
import com.cats.ui.alertdialog.AlertDialog;
import com.cats.ui.alertdialog.AlertDialogOptions;
/**
* @author xblia
* 2015年9月18日
*/
public class StartSubProcess
{
protected static void startUp(String uri)
{
List<String> cmd = new ArrayList<String>();
cmd.add("java");
cmd.add("-jar");
cmd.add(uri);
ProcessBuilder builder = new ProcessBuilder(cmd);
try
{
builder.start();
} catch (Exception e)
{
e.printStackTrace();
alertError(e.toString());
}
System.exit(0);
}
protected static void alertError(String info)
{
String message = "Sorry, Upgrade internal error(" +info+ "), you can contact the developer or tray again. thks";
String title = "Upragde error.";
AlertDialog.show(title, message, AlertDialogOptions.OPTION_OK, AlertDialog.ALERT_TYPE_WARNING);
}
}
| xblia/Upgrade-service-for-java-application | CommonsDefUI/src/com/ui/stylemenu/StartSubProcess.java | Java | apache-2.0 | 1,564 |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest}
*/
public final class GlobalNetworkEndpointGroupsAttachEndpointsRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest)
GlobalNetworkEndpointGroupsAttachEndpointsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GlobalNetworkEndpointGroupsAttachEndpointsRequest.newBuilder() to construct.
private GlobalNetworkEndpointGroupsAttachEndpointsRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GlobalNetworkEndpointGroupsAttachEndpointsRequest() {
networkEndpoints_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GlobalNetworkEndpointGroupsAttachEndpointsRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private GlobalNetworkEndpointGroupsAttachEndpointsRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1198802282:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
networkEndpoints_ =
new java.util.ArrayList<com.google.cloud.compute.v1.NetworkEndpoint>();
mutable_bitField0_ |= 0x00000001;
}
networkEndpoints_.add(
input.readMessage(
com.google.cloud.compute.v1.NetworkEndpoint.parser(), extensionRegistry));
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
networkEndpoints_ = java.util.Collections.unmodifiableList(networkEndpoints_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GlobalNetworkEndpointGroupsAttachEndpointsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GlobalNetworkEndpointGroupsAttachEndpointsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest.class,
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest.Builder
.class);
}
public static final int NETWORK_ENDPOINTS_FIELD_NUMBER = 149850285;
private java.util.List<com.google.cloud.compute.v1.NetworkEndpoint> networkEndpoints_;
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.compute.v1.NetworkEndpoint> getNetworkEndpointsList() {
return networkEndpoints_;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.compute.v1.NetworkEndpointOrBuilder>
getNetworkEndpointsOrBuilderList() {
return networkEndpoints_;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
@java.lang.Override
public int getNetworkEndpointsCount() {
return networkEndpoints_.size();
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.NetworkEndpoint getNetworkEndpoints(int index) {
return networkEndpoints_.get(index);
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.NetworkEndpointOrBuilder getNetworkEndpointsOrBuilder(
int index) {
return networkEndpoints_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < networkEndpoints_.size(); i++) {
output.writeMessage(149850285, networkEndpoints_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < networkEndpoints_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
149850285, networkEndpoints_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest other =
(com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest) obj;
if (!getNetworkEndpointsList().equals(other.getNetworkEndpointsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getNetworkEndpointsCount() > 0) {
hash = (37 * hash) + NETWORK_ENDPOINTS_FIELD_NUMBER;
hash = (53 * hash) + getNetworkEndpointsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest)
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GlobalNetworkEndpointGroupsAttachEndpointsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GlobalNetworkEndpointGroupsAttachEndpointsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest.class,
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest.Builder
.class);
}
// Construct using
// com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getNetworkEndpointsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (networkEndpointsBuilder_ == null) {
networkEndpoints_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
networkEndpointsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GlobalNetworkEndpointGroupsAttachEndpointsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest build() {
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
buildPartial() {
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest result =
new com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest(this);
int from_bitField0_ = bitField0_;
if (networkEndpointsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
networkEndpoints_ = java.util.Collections.unmodifiableList(networkEndpoints_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.networkEndpoints_ = networkEndpoints_;
} else {
result.networkEndpoints_ = networkEndpointsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest) {
return mergeFrom(
(com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest other) {
if (other
== com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
.getDefaultInstance()) return this;
if (networkEndpointsBuilder_ == null) {
if (!other.networkEndpoints_.isEmpty()) {
if (networkEndpoints_.isEmpty()) {
networkEndpoints_ = other.networkEndpoints_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNetworkEndpointsIsMutable();
networkEndpoints_.addAll(other.networkEndpoints_);
}
onChanged();
}
} else {
if (!other.networkEndpoints_.isEmpty()) {
if (networkEndpointsBuilder_.isEmpty()) {
networkEndpointsBuilder_.dispose();
networkEndpointsBuilder_ = null;
networkEndpoints_ = other.networkEndpoints_;
bitField0_ = (bitField0_ & ~0x00000001);
networkEndpointsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getNetworkEndpointsFieldBuilder()
: null;
} else {
networkEndpointsBuilder_.addAllMessages(other.networkEndpoints_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest parsedMessage =
null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.compute.v1.NetworkEndpoint> networkEndpoints_ =
java.util.Collections.emptyList();
private void ensureNetworkEndpointsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
networkEndpoints_ =
new java.util.ArrayList<com.google.cloud.compute.v1.NetworkEndpoint>(networkEndpoints_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.NetworkEndpoint,
com.google.cloud.compute.v1.NetworkEndpoint.Builder,
com.google.cloud.compute.v1.NetworkEndpointOrBuilder>
networkEndpointsBuilder_;
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public java.util.List<com.google.cloud.compute.v1.NetworkEndpoint> getNetworkEndpointsList() {
if (networkEndpointsBuilder_ == null) {
return java.util.Collections.unmodifiableList(networkEndpoints_);
} else {
return networkEndpointsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public int getNetworkEndpointsCount() {
if (networkEndpointsBuilder_ == null) {
return networkEndpoints_.size();
} else {
return networkEndpointsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public com.google.cloud.compute.v1.NetworkEndpoint getNetworkEndpoints(int index) {
if (networkEndpointsBuilder_ == null) {
return networkEndpoints_.get(index);
} else {
return networkEndpointsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public Builder setNetworkEndpoints(
int index, com.google.cloud.compute.v1.NetworkEndpoint value) {
if (networkEndpointsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNetworkEndpointsIsMutable();
networkEndpoints_.set(index, value);
onChanged();
} else {
networkEndpointsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public Builder setNetworkEndpoints(
int index, com.google.cloud.compute.v1.NetworkEndpoint.Builder builderForValue) {
if (networkEndpointsBuilder_ == null) {
ensureNetworkEndpointsIsMutable();
networkEndpoints_.set(index, builderForValue.build());
onChanged();
} else {
networkEndpointsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public Builder addNetworkEndpoints(com.google.cloud.compute.v1.NetworkEndpoint value) {
if (networkEndpointsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNetworkEndpointsIsMutable();
networkEndpoints_.add(value);
onChanged();
} else {
networkEndpointsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public Builder addNetworkEndpoints(
int index, com.google.cloud.compute.v1.NetworkEndpoint value) {
if (networkEndpointsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNetworkEndpointsIsMutable();
networkEndpoints_.add(index, value);
onChanged();
} else {
networkEndpointsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public Builder addNetworkEndpoints(
com.google.cloud.compute.v1.NetworkEndpoint.Builder builderForValue) {
if (networkEndpointsBuilder_ == null) {
ensureNetworkEndpointsIsMutable();
networkEndpoints_.add(builderForValue.build());
onChanged();
} else {
networkEndpointsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public Builder addNetworkEndpoints(
int index, com.google.cloud.compute.v1.NetworkEndpoint.Builder builderForValue) {
if (networkEndpointsBuilder_ == null) {
ensureNetworkEndpointsIsMutable();
networkEndpoints_.add(index, builderForValue.build());
onChanged();
} else {
networkEndpointsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public Builder addAllNetworkEndpoints(
java.lang.Iterable<? extends com.google.cloud.compute.v1.NetworkEndpoint> values) {
if (networkEndpointsBuilder_ == null) {
ensureNetworkEndpointsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, networkEndpoints_);
onChanged();
} else {
networkEndpointsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public Builder clearNetworkEndpoints() {
if (networkEndpointsBuilder_ == null) {
networkEndpoints_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
networkEndpointsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public Builder removeNetworkEndpoints(int index) {
if (networkEndpointsBuilder_ == null) {
ensureNetworkEndpointsIsMutable();
networkEndpoints_.remove(index);
onChanged();
} else {
networkEndpointsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public com.google.cloud.compute.v1.NetworkEndpoint.Builder getNetworkEndpointsBuilder(
int index) {
return getNetworkEndpointsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public com.google.cloud.compute.v1.NetworkEndpointOrBuilder getNetworkEndpointsOrBuilder(
int index) {
if (networkEndpointsBuilder_ == null) {
return networkEndpoints_.get(index);
} else {
return networkEndpointsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public java.util.List<? extends com.google.cloud.compute.v1.NetworkEndpointOrBuilder>
getNetworkEndpointsOrBuilderList() {
if (networkEndpointsBuilder_ != null) {
return networkEndpointsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(networkEndpoints_);
}
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public com.google.cloud.compute.v1.NetworkEndpoint.Builder addNetworkEndpointsBuilder() {
return getNetworkEndpointsFieldBuilder()
.addBuilder(com.google.cloud.compute.v1.NetworkEndpoint.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public com.google.cloud.compute.v1.NetworkEndpoint.Builder addNetworkEndpointsBuilder(
int index) {
return getNetworkEndpointsFieldBuilder()
.addBuilder(index, com.google.cloud.compute.v1.NetworkEndpoint.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of network endpoints to be attached.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.NetworkEndpoint network_endpoints = 149850285;</code>
*/
public java.util.List<com.google.cloud.compute.v1.NetworkEndpoint.Builder>
getNetworkEndpointsBuilderList() {
return getNetworkEndpointsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.NetworkEndpoint,
com.google.cloud.compute.v1.NetworkEndpoint.Builder,
com.google.cloud.compute.v1.NetworkEndpointOrBuilder>
getNetworkEndpointsFieldBuilder() {
if (networkEndpointsBuilder_ == null) {
networkEndpointsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.NetworkEndpoint,
com.google.cloud.compute.v1.NetworkEndpoint.Builder,
com.google.cloud.compute.v1.NetworkEndpointOrBuilder>(
networkEndpoints_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
networkEndpoints_ = null;
}
return networkEndpointsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest)
private static final com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest();
}
public static com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GlobalNetworkEndpointGroupsAttachEndpointsRequest>
PARSER =
new com.google.protobuf.AbstractParser<
GlobalNetworkEndpointGroupsAttachEndpointsRequest>() {
@java.lang.Override
public GlobalNetworkEndpointGroupsAttachEndpointsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GlobalNetworkEndpointGroupsAttachEndpointsRequest(
input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GlobalNetworkEndpointGroupsAttachEndpointsRequest>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GlobalNetworkEndpointGroupsAttachEndpointsRequest>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.GlobalNetworkEndpointGroupsAttachEndpointsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| googleapis/java-compute | proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GlobalNetworkEndpointGroupsAttachEndpointsRequest.java | Java | apache-2.0 | 34,557 |
// Copyright 2006, 2007, 2008, 2009 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.dom;
import org.apache.tapestry5.ioc.internal.util.CollectionFactory;
import java.util.Set;
/**
* Default implementation of {@link org.apache.tapestry5.dom.MarkupModel} that is appropriate for traditional (X)HTML
* markup. Assumes that all tags are lower-case. The majority of elements will be "expanded" (meaning a complete start
* and end tag); this is for compatibility with web browsers, especially when the content type of a response indicates
* HTML, not true XML. Only the "hr", "br","img", "link", and "meta" tags will be rendered abbreviated (i.e., "lt;img/>").
*/
public class DefaultMarkupModel extends AbstractMarkupModel
{
private final Set<String> ALWAYS_EMPTY = CollectionFactory.newSet("hr", "br", "img", "link", "meta");
public DefaultMarkupModel()
{
this(false);
}
public DefaultMarkupModel(boolean useApostropheForAttributes)
{
super(useApostropheForAttributes);
}
public EndTagStyle getEndTagStyle(String element)
{
boolean alwaysEmpty = ALWAYS_EMPTY.contains(element);
return alwaysEmpty ? EndTagStyle.ABBREVIATE : EndTagStyle.REQUIRE;
}
/**
* Returns false.
*/
public boolean isXML()
{
return false;
}
}
| agileowl/tapestry-5 | tapestry-core/src/main/java/org/apache/tapestry5/dom/DefaultMarkupModel.java | Java | apache-2.0 | 1,904 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.opsworks.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.opsworks.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DescribeOperatingSystemsResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeOperatingSystemsResultJsonUnmarshaller implements Unmarshaller<DescribeOperatingSystemsResult, JsonUnmarshallerContext> {
public DescribeOperatingSystemsResult unmarshall(JsonUnmarshallerContext context) throws Exception {
DescribeOperatingSystemsResult describeOperatingSystemsResult = new DescribeOperatingSystemsResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return describeOperatingSystemsResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("OperatingSystems", targetDepth)) {
context.nextToken();
describeOperatingSystemsResult.setOperatingSystems(new ListUnmarshaller<OperatingSystem>(OperatingSystemJsonUnmarshaller.getInstance())
.unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return describeOperatingSystemsResult;
}
private static DescribeOperatingSystemsResultJsonUnmarshaller instance;
public static DescribeOperatingSystemsResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DescribeOperatingSystemsResultJsonUnmarshaller();
return instance;
}
}
| aws/aws-sdk-java | aws-java-sdk-opsworks/src/main/java/com/amazonaws/services/opsworks/model/transform/DescribeOperatingSystemsResultJsonUnmarshaller.java | Java | apache-2.0 | 3,029 |
/*
* Copyright 2015 Felipe Santos <fralph at ic.uff.br>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package br.uff.labtempo.tlauncher.data;
import br.uff.labtempo.osiris.to.common.definitions.ValueType;
/**
*
* @author Felipe Santos <fralph at ic.uff.br>
*/
public class DataBase {
public final static String NETWORK_ID = "test";//NETWORK_ID
public final static String RESULT_FOLDER = "results";
public final static String DATA_NAME = "message";
public final static String DATA_UNIT = "identifier";
public final static String DATA_SYMBOL = "ID";
public final static ValueType DATA_TYPE = ValueType.NUMBER;
public final static String RESOURCE_DATATYPE = "omcp://virtualsensornet/datatype/";
public final static String RESOURCE_CONVERTER = "omcp://virtualsensornet/converter/";
public final static String RESOURCE_LINK = "omcp://virtualsensornet/link/";
public final static String RESOURCE_COMPOSITE = "omcp://virtualsensornet/composite/";
public final static String RESOURCE_BLENDING = "omcp://virtualsensornet/blending/";
public final static String RESOURCE_FUNCTION = "omcp://virtualsensornet/function/";
public final static String RESOURCE_UPDATE_VSN = "omcp://update.messagegroup/virtualsensornet/#";
}
| println/TLauncher | src/main/java/br/uff/labtempo/tlauncher/data/DataBase.java | Java | apache-2.0 | 1,786 |
package com.future.service;
import com.future.entity.TaskEntity;
import java.util.List;
/**
* @author shuaiqi.xsq, 15/8/29
*/
public interface TaskService {
public boolean loadData();
public boolean saveTask(TaskEntity taskEntity);
public List<TaskEntity> getPublishedTask(String userName);
public TaskEntity getNextTask(String userName);
public boolean finishTask(String username, int taskId, int isFinish);
}
| mrdanding/future | src/com/future/service/TaskService.java | Java | apache-2.0 | 433 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.tez;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.tez.runtime.api.TezProcessorContext;
/**
* This interface is implemented by PhysicalOperators that can need to access
* TezProcessorContext of a Tez task.
*/
public interface TezTaskConfigurable {
public void initialize(TezProcessorContext processorContext) throws ExecException;
}
| daijyc/pig | src/org/apache/pig/backend/hadoop/executionengine/tez/TezTaskConfigurable.java | Java | apache-2.0 | 1,240 |
package com.iservport.swimit.config;
import javax.servlet.ServletContext;
import org.springframework.security.web.context.AbstractSecurityWebApplicationInitializer;
import org.springframework.web.multipart.support.MultipartFilter;
/**
* Automatically register the springSecurityFilterChain Filter for every URL.
*
* @author mauriciofernandesdecastro
*/
public class SecurityWebApplicationInitializer
extends AbstractSecurityWebApplicationInitializer
{
@Override
protected void beforeSpringSecurityFilterChain(ServletContext servletContext) {
insertFilters(servletContext, new MultipartFilter());
}
}
| chmulato/helianto-seed | temp/SecurityWebApplicationInitializer.java | Java | apache-2.0 | 653 |
/**
* Copyright 2013-present febit.org ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.febit.service;
import java.io.Serializable;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
*
* @author zqq90
* @param <T>
*/
public class ServiceResult<T> implements Serializable {
private static final long serialVersionUID = 1L;
public static final int OK = 0;
public static final int SUCCESS = OK;
public static final int ERROR_SYS = 100;
public static final int REDIRECT = 3020;
public static final int ERROR = 5000;
//参数错误:
public static final int ERROR_PARAM = 5100;
//参数错误:不能为空
public static final int ERROR_PARAM_REQUIRED = 5101;
//参数错误:格式错误
public static final int ERROR_PARAM_FORMAT = 5102;
//文件上传错误
public static final int ERROR_UPLOAD = 5200;
//文件上传错误:文件太大
public static final int ERROR_UPLOAD_TOOBIG = 5201;
//文件上传错误:文件无法写入
public static final int ERROR_UPLOAD_CANTWRITE = 5202;
//文件上传错误:文件类型错误
public static final int ERROR_UPLOAD_TYPE = 5203;
//权限错误
public static final int ERROR_RIGHT = 5400;
//XSRF
public static final int ERROR_XSRF = 5401;
//验证码错误
public static final int ERROR_VERCODE = 5402;
//未登录
public static final int ERROR_NOT_LOGIN = 5403;
public static final int ERROR_ADD = 6100;
public static final int ERROR_DEL = 6200;
public static final int ERROR_DEL_NOTFOUND = 6201;
public static final int ERROR_DEL_UNABLE = 6202;
public static final int ERROR_MODIFY = 6300;
public static final int ERROR_MODIFY_NOTFOUND = 6301;
public static final int ERROR_MODIFY_UNABLE = 6302;
public static final int ERROR_QUERY = 6400;
public static final int ERROR_QUERY_NOTFOUND = 6404;
public static final ServiceResult SUCCESS_RESULT = new ServiceResult(OK);
public final int code;
public final String msg;
public final T value;
private final Object[] args;
private Map<Object, Object> datas;
protected ServiceResult(T value) {
this.code = OK;
this.msg = null;
this.args = null;
this.value = value;
}
protected ServiceResult(int code, String message, Object... arguments) {
this.code = code;
this.msg = message;
this.args = arguments;
this.value = null;
}
protected ServiceResult(int code) {
this.code = code;
this.msg = null;
this.args = null;
this.value = null;
}
public boolean success() {
return code == OK;
}
public boolean failed() {
return code != OK;
}
public ServiceResult put(Object key, Object value) {
if (this.datas == null) {
this.datas = new HashMap<>();
}
this.datas.put(key, value);
return this;
}
public Object get(Object key) {
return datas != null ? datas.get(key) : null;
}
public int getCode() {
return code;
}
public String getMessage() {
return msg;
}
public T getValue() {
return value;
}
public Map<Object, Object> getDatas() {
return datas;
}
public Object[] getArgs() {
return args;
}
@Override
public String toString() {
return "ServiceResult{" + "code=" + code + ", message=" + msg + ", arguments=" + Arrays.toString(args) + '}';
}
public static <T> ServiceResult<T> success(T val) {
return new ServiceResult<>(val);
}
public static ServiceResult successResult() {
return new ServiceResult(OK);
}
public static ServiceResult error(int code) {
return new ServiceResult(code);
}
public static ServiceResult error(String msg) {
return new ServiceResult(ERROR, msg);
}
public static ServiceResult error(String msg, Object... args) {
return new ServiceResult(ERROR, msg, args);
}
public static ServiceResult error(int code, String msg, Object... args) {
return new ServiceResult(code, msg, args);
}
}
| febit/febit-common | src/main/java/org/febit/service/ServiceResult.java | Java | apache-2.0 | 4,766 |
/*
* #%L
* Wikidata Toolkit Data Model
* %%
* Copyright (C) 2014 Wikidata Toolkit Developers
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.wikidata.wdtk.datamodel.helpers;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.anEmptyMap;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.Arrays;
import org.junit.jupiter.api.Test;
import org.wikidata.wdtk.datamodel.interfaces.FormDocument;
import org.wikidata.wdtk.datamodel.interfaces.FormIdValue;
import org.wikidata.wdtk.datamodel.interfaces.FormUpdate;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
public class FormUpdateBuilderTest {
private static final FormIdValue F1 = EntityUpdateBuilderTest.F1;
private static final FormDocument FORM = EntityUpdateBuilderTest.FORM;
private static final Statement F1_DESCRIBES_SOMETHING = StatementBuilder
.forSubjectAndProperty(F1, Datamodel.makeWikidataPropertyIdValue("P1"))
.withValue(Datamodel.makeStringValue("something"))
.build();
private static final Statement F1_EVOKES_FEELING = StatementBuilder
.forSubjectAndProperty(F1, Datamodel.makeWikidataPropertyIdValue("P2"))
.withValue(Datamodel.makeStringValue("feeling"))
.build();
private static final MonolingualTextValue EN = TermUpdateBuilderTest.EN;
private static final MonolingualTextValue SK = TermUpdateBuilderTest.SK;
private static final ItemIdValue Q1 = EntityUpdateBuilderTest.Q1;
private static final ItemIdValue Q2 = Datamodel.makeWikidataItemIdValue("Q2");
private static final ItemIdValue Q3 = Datamodel.makeWikidataItemIdValue("Q3");
@Test
public void testForEntityId() {
assertThrows(NullPointerException.class, () -> FormUpdateBuilder.forEntityId(null));
assertThrows(IllegalArgumentException.class, () -> FormUpdateBuilder.forEntityId(FormIdValue.NULL));
FormUpdateBuilder.forEntityId(F1);
}
@Test
public void testForBaseRevisionId() {
assertEquals(123, FormUpdateBuilder.forBaseRevisionId(F1, 123).getBaseRevisionId());
}
@Test
public void testForBaseRevision() {
assertThrows(NullPointerException.class, () -> FormUpdateBuilder.forBaseRevision(null));
assertThrows(IllegalArgumentException.class,
() -> FormUpdateBuilder.forBaseRevision(FORM.withEntityId(FormIdValue.NULL)));
FormUpdateBuilder.forBaseRevision(FORM);
}
@Test
public void testStatementUpdate() {
FormUpdate update = FormUpdateBuilder.forEntityId(F1)
.updateStatements(StatementUpdateBuilder.create().add(F1_DESCRIBES_SOMETHING).build())
.build();
assertThat(update.getStatements().getAdded(), containsInAnyOrder(F1_DESCRIBES_SOMETHING));
}
@Test
public void testBlindRepresentationUpdate() {
assertThrows(NullPointerException.class, () -> FormUpdateBuilder.forEntityId(F1).updateRepresentations(null));
FormUpdate update = FormUpdateBuilder.forEntityId(F1)
.updateRepresentations(TermUpdateBuilder.create().remove("en").build())
.updateRepresentations(TermUpdateBuilder.create().remove("sk").build())
.build();
assertThat(update.getRepresentations().getRemoved(), containsInAnyOrder("en", "sk"));
}
@Test
public void testBaseRepresentationUpdate() {
FormUpdate update = FormUpdateBuilder
.forBaseRevision(FORM
.withRepresentation(EN)
.withRepresentation(SK))
.updateRepresentations(TermUpdateBuilder.create()
.put(SK) // ignored
.remove("en") // checked
.build())
.build();
assertThat(update.getRepresentations().getModified(), is(anEmptyMap()));
assertThat(update.getRepresentations().getRemoved(), containsInAnyOrder("en"));
}
@Test
public void testBlindFeatureChange() {
FormUpdateBuilder builder = FormUpdateBuilder.forEntityId(F1);
assertThrows(NullPointerException.class, () -> builder.setGrammaticalFeatures(null));
assertThrows(NullPointerException.class, () -> builder.setGrammaticalFeatures(Arrays.asList(Q1, null)));
assertThrows(IllegalArgumentException.class,
() -> builder.setGrammaticalFeatures(Arrays.asList(ItemIdValue.NULL)));
assertThrows(IllegalArgumentException.class, () -> builder.setGrammaticalFeatures(Arrays.asList(Q1, Q1)));
assertFalse(builder.build().getGrammaticalFeatures().isPresent());
FormUpdate update = builder.setGrammaticalFeatures(Arrays.asList(Q1, Q2)).build();
assertThat(update.getGrammaticalFeatures().get(), containsInAnyOrder(Q1, Q2));
}
@Test
public void testBaseFeatureChange() {
FormDocument base = FORM
.withGrammaticalFeature(Q1)
.withGrammaticalFeature(Q2);
assertFalse(FormUpdateBuilder.forBaseRevision(base).build().getGrammaticalFeatures().isPresent());
assertFalse(FormUpdateBuilder.forBaseRevision(base)
.setGrammaticalFeatures(Arrays.asList(Q1, Q2))
.build()
.getGrammaticalFeatures().isPresent());
FormUpdate update = FormUpdateBuilder.forBaseRevision(base)
.setGrammaticalFeatures(Arrays.asList(Q2, Q3))
.build();
assertThat(update.getGrammaticalFeatures().get(), containsInAnyOrder(Q2, Q3));
}
@Test
public void testMerge() {
assertThrows(NullPointerException.class, () -> FormUpdateBuilder.forEntityId(F1).append(null));
FormUpdate update = FormUpdateBuilder.forEntityId(F1)
.updateStatements(StatementUpdateBuilder.create().add(F1_DESCRIBES_SOMETHING).build())
.updateRepresentations(TermUpdateBuilder.create().remove("en").build())
.append(FormUpdateBuilder.forEntityId(F1)
.updateStatements(StatementUpdateBuilder.create().add(F1_EVOKES_FEELING).build())
.updateRepresentations(TermUpdateBuilder.create().remove("sk").build())
.build())
.build();
assertThat(update.getStatements().getAdded(),
containsInAnyOrder(F1_DESCRIBES_SOMETHING, F1_EVOKES_FEELING));
assertThat(update.getRepresentations().getRemoved(), containsInAnyOrder("en", "sk"));
}
}
| Wikidata/Wikidata-Toolkit | wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/FormUpdateBuilderTest.java | Java | apache-2.0 | 6,686 |
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.api;
public class CreateContainerBasicMetadata implements CreateContainerMetadata {
private String containerName;
private Throwable failure;
private Container container;
public boolean isSuccess() {
return failure == null;
}
public Throwable getFailure() {
return failure;
}
public void setFailure(Throwable failure) {
this.failure = failure;
}
public String getContainerName() {
return containerName;
}
public void setContainerName(String containerName) {
this.containerName = containerName;
}
public Container getContainer() {
return container;
}
public void setContainer(Container container) {
this.container = container;
}
}
| Jitendrakry/fuse | fabric/fabric-core/src/main/scala/org/fusesource/fabric/api/CreateContainerBasicMetadata.java | Java | apache-2.0 | 1,417 |
/**
* Copyright 2015-2016 Marcel Piestansky (http://marpies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marpies.ane.twitter.functions;
import com.adobe.fre.FREContext;
import com.adobe.fre.FREObject;
import com.marpies.ane.twitter.data.AIRTwitterEvent;
import com.marpies.ane.twitter.data.TwitterAPI;
import com.marpies.ane.twitter.utils.AIR;
import com.marpies.ane.twitter.utils.FREObjectUtils;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
public class ApplicationOpenURLFunction extends BaseFunction {
@Override
public FREObject call( FREContext context, FREObject[] args ) {
super.call( context, args );
try {
String url = FREObjectUtils.getString( args[0] );
/* URL class does not parse custom protocols, replace it with http */
url = url.replaceFirst( ".*:", "http:" );
String query = new URL( url ).getQuery();
Map<String, String> parameters = parametersFromQuery( query );
// String token = parameters.get( "oauth_token" );
String verifier = parameters.get( "oauth_verifier" );
String denied = parameters.get( "denied" );
if( denied != null || verifier == null ) {
AIR.log( "App was launched after cancelled attempt to login" );
AIR.dispatchEvent( AIRTwitterEvent.LOGIN_CANCEL );
return null;
} else {
AIR.log( "App launched with PIN" );
}
TwitterAPI.getAccessTokensForPIN( verifier );
} catch( MalformedURLException e ) {
AIR.dispatchEvent( AIRTwitterEvent.LOGIN_ERROR, e.getMessage() );
}
return null;
}
private Map<String, String> parametersFromQuery( String query ) {
String[] params = query.split( "&" );
Map<String, String> map = new HashMap<String, String>();
for( String param : params ) {
String[] pair = param.split( "=" );
if( pair.length != 2 ) continue;
String name = pair[0];
String value = pair[1];
map.put( name, value );
}
return map;
}
}
| marpies/AIRTwitter-ANE | android/src/com/marpies/ane/twitter/functions/ApplicationOpenURLFunction.java | Java | apache-2.0 | 2,462 |
package com.vladmihalcea.book.hpjp.hibernate.mapping;
import com.vladmihalcea.book.hpjp.util.AbstractPostgreSQLIntegrationTest;
import org.hibernate.annotations.JoinFormula;
import org.junit.Test;
import javax.persistence.*;
import java.sql.Statement;
import java.util.Locale;
import java.util.Objects;
import static org.junit.Assert.assertEquals;
/**
* @author Vlad Mihalcea
*/
public class JoinFormulaLastMonthSalaryTest extends AbstractPostgreSQLIntegrationTest {
@Override
protected Class<?>[] entities() {
return new Class<?>[] {
Employee.class,
Salary.class
};
}
@Override
protected void afterInit() {
Employee alice = new Employee();
alice.setId(1L);
alice.setName("Alice");
alice.setTitle("CEO");
Employee bob = new Employee();
bob.setId(2L);
bob.setName("Bob");
bob.setTitle("Developer");
doInJPA( entityManager -> {
entityManager.persist(alice);
entityManager.persist(bob);
} );
doInJPA( entityManager -> {
Salary aliceSalary201511 = new Salary();
aliceSalary201511.setId(1L);
aliceSalary201511.setEmployee(alice);
aliceSalary201511.setYear(2015);
aliceSalary201511.setMonth(11);
aliceSalary201511.setAmountCents(10_000);
entityManager.persist(aliceSalary201511);
Salary bobSalary201511 = new Salary();
bobSalary201511.setId(2L);
bobSalary201511.setEmployee(bob);
bobSalary201511.setYear(2015);
bobSalary201511.setMonth(11);
bobSalary201511.setAmountCents(7_000);
entityManager.persist(bobSalary201511);
Salary aliceSalary201512 = new Salary();
aliceSalary201512.setId(3L);
aliceSalary201512.setEmployee(alice);
aliceSalary201512.setYear(2015);
aliceSalary201512.setMonth(12);
aliceSalary201512.setAmountCents(11_000);
entityManager.persist(aliceSalary201512);
Salary bobSalary201512 = new Salary();
bobSalary201512.setId(4L);
bobSalary201512.setEmployee(bob);
bobSalary201512.setYear(2015);
bobSalary201512.setMonth(12);
bobSalary201512.setAmountCents(7_500);
entityManager.persist(bobSalary201512);
Salary aliceSalary201601 = new Salary();
aliceSalary201601.setId(5L);
aliceSalary201601.setEmployee(alice);
aliceSalary201601.setYear(2016);
aliceSalary201601.setMonth(1);
aliceSalary201601.setAmountCents(11_500);
entityManager.persist(aliceSalary201601);
Salary bobSalary201601 = new Salary();
bobSalary201601.setId(6L);
bobSalary201601.setEmployee(bob);
bobSalary201601.setYear(2016);
bobSalary201601.setMonth(1);
bobSalary201601.setAmountCents(7_900);
entityManager.persist(bobSalary201601);
Salary aliceSalary201602 = new Salary();
aliceSalary201602.setId(7L);
aliceSalary201602.setEmployee(alice);
aliceSalary201602.setYear(2016);
aliceSalary201602.setMonth(2);
aliceSalary201602.setAmountCents(11_900);
entityManager.persist(aliceSalary201602);
Salary bobSalary201602 = new Salary();
bobSalary201602.setId(8L);
bobSalary201602.setEmployee(bob);
bobSalary201602.setYear(2016);
bobSalary201602.setMonth(2);
bobSalary201602.setAmountCents(8_500);
entityManager.persist(bobSalary201602);
} );
assertEquals(Long.valueOf(1L), getPreviousSalaryId(3L));
assertEquals(Long.valueOf(2L), getPreviousSalaryId(4L));
assertEquals(Long.valueOf(3L), getPreviousSalaryId(5L));
assertEquals(Long.valueOf(4L), getPreviousSalaryId(6L));
assertEquals(Long.valueOf(5L), getPreviousSalaryId(7L));
assertEquals(Long.valueOf(6L), getPreviousSalaryId(8L));
}
@Test
public void test() {
doInJPA( entityManager -> {
assertEquals(
Long.valueOf(1L),
entityManager.find(Salary.class, 3L)
.getPreviousMonthSalary().getId()
);
assertEquals(
Long.valueOf(2L),
entityManager.find(Salary.class, 4L)
.getPreviousMonthSalary().getId()
);
assertEquals(
Long.valueOf(3L),
entityManager.find(Salary.class, 5L)
.getPreviousMonthSalary().getId()
);
assertEquals(
Long.valueOf(4L),
entityManager.find(Salary.class, 6L)
.getPreviousMonthSalary().getId()
);
assertEquals(
Long.valueOf(5L),
entityManager.find(Salary.class, 7L)
.getPreviousMonthSalary().getId()
);
assertEquals(
Long.valueOf(6L),
entityManager.find(Salary.class, 8L)
.getPreviousMonthSalary().getId()
);
} );
}
private Long getPreviousSalaryId(long salaryId) {
return doInJPA( entityManager -> {
Salary salary = entityManager.find(Salary.class, salaryId);
Number prevSalaryId = (Number) entityManager.createNativeQuery(
"SELECT prev_salary.id " +
"FROM salary prev_salary " +
"WHERE " +
" prev_salary.employee_id = :employeeId AND " +
" ( CASE WHEN :month = 1 " +
" THEN prev_salary.year + 1 = :year AND " +
" prev_salary.month = 12 " +
" ELSE prev_salary.year = :year AND " +
" prev_salary.month + 1 = :month " +
" END ) = true ")
.setParameter("employeeId", salary.getEmployee().getId())
.setParameter("year", salary.getYear())
.setParameter("month", salary.getMonth())
.getSingleResult();
return prevSalaryId.longValue();
} );
}
@Entity(name = "Employee")
@Table(name = "employee")
public static class Employee {
@Id
private Long id;
private String name;
private String title;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
}
@Entity(name = "Salary")
@Table(name = "salary")
public static class Salary {
@Id
private Long id;
@ManyToOne(fetch = FetchType.LAZY)
private Employee employee;
private int month;
private int year;
@Column(name = "amount_cents")
private long amountCents;
@ManyToOne(fetch = FetchType.LAZY)
@JoinFormula(
"( " +
" SELECT prev_salary.id " +
" FROM salary prev_salary " +
" WHERE " +
" prev_salary.employee_id = employee_id AND " +
" ( " +
" CASE WHEN month = 1 " +
" THEN prev_salary.year + 1 = year AND " +
" prev_salary.month = 12 " +
" ELSE prev_salary.year = year AND " +
" prev_salary.month + 1 = month " +
" END " +
" ) = true " +
")"
)
private Salary previousMonthSalary;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Employee getEmployee() {
return employee;
}
public void setEmployee(Employee employee) {
this.employee = employee;
}
public int getMonth() {
return month;
}
public void setMonth(int month) {
this.month = month;
}
public int getYear() {
return year;
}
public void setYear(int year) {
this.year = year;
}
public long getAmountCents() {
return amountCents;
}
public void setAmountCents(long amountCents) {
this.amountCents = amountCents;
}
public Salary getPreviousMonthSalary() {
return previousMonthSalary;
}
}
}
| vladmihalcea/high-performance-java-persistence | core/src/test/java/com/vladmihalcea/book/hpjp/hibernate/mapping/JoinFormulaLastMonthSalaryTest.java | Java | apache-2.0 | 7,083 |
/*
* Copyright (c) 2017. Jan Wiemer
*/
package org.jacis.plugin;
import org.jacis.JacisApi;
import org.jacis.container.JacisObjectTypeSpec;
import org.jacis.container.JacisTransactionHandle;
import org.jacis.store.JacisStoreImpl;
/**
* Listener that gets notified on each modification during commit.
*
* A listener implementing this interface can be registered at a transactional store
* by passing it to the method {@link JacisStoreImpl#registerModificationListener(JacisModificationListener)}.
* Once registered the method {@link #onModification(Object, Object, Object, JacisTransactionHandle)} of the listener is called
* for each modification on the committed values in the store. The callback method is called during the commit phase
* of the transaction when the modified values in the transactional view are written back to the store.
* Note that a modification listener can only be registered for a store if this is configured to track the
* original values of a transaction (see {@link JacisObjectTypeSpec#isTrackOriginalValueEnabled()}).
*
* @param <K> Key type of the store entry
* @param <V> Value type of the store entry
* @author Jan Wiemer
*/
@JacisApi
public interface JacisModificationListener<K, V> {
/**
* Callback method called during the commit phase of a transaction for each modified value written back
* from the transactional view to the store of committed values.
* Note that implementing methods should not throw an exception since the original transaction could be broken by this.
*
* @param key The key of the modified object
* @param oldValue The original value of the modified object when it was copied to the transactional view.
* @param newValue The new modified value that is written back to the committed values.
* @param tx The transaction that is currently committed.
*/
void onModification(K key, V oldValue, V newValue, JacisTransactionHandle tx);
/**
* @return if the implementation of the modification listener is thread safe. Default is <code>false</code>. Overwrite this method to declare a view to be thread safe.
*/
default boolean isThreadSafe() {
return false;
}
}
| JanWiemer/jacis | src/main/java/org/jacis/plugin/JacisModificationListener.java | Java | apache-2.0 | 2,239 |
package com.jlnu.lang.realtimeinfo.db;
import org.litepal.crud.DataSupport;
/**
* Created by qn on 2017/4/5.
*/
public class Province extends DataSupport {
private int id;
private String provinceName;
private int provinceCode;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getProvinceName() {
return provinceName;
}
public void setProvinceName(String provinceName) {
this.provinceName = provinceName;
}
public int getProvinceCode() {
return provinceCode;
}
public void setProvinceCode(int provinceCode) {
this.provinceCode = provinceCode;
}
}
| 897190301/RealTimeInfo | app/src/main/java/com/jlnu/lang/realtimeinfo/db/Province.java | Java | apache-2.0 | 710 |
package com.loveboyuan.smarttrader.elastic_search;
public class SearchResponse<T> {
private int took;
private boolean timed_out;
private Shard _shards;
private Hits<T> hits;
public SearchResponse() {}
public Hits<T> getHits() {
return hits;
}
}
class Shard {
private int total;
private int successful;
private int failed;
public Shard() {}
} | CMPUT301F15T16/Team16Project | SmartTrader/app/src/main/java/com/loveboyuan/smarttrader/elastic_search/SearchResponse.java | Java | apache-2.0 | 405 |
package com.jivesoftware.os.jive.utils.collections.bah;
import com.jivesoftware.os.jive.utils.collections.KeyValueStream;
import java.util.concurrent.Semaphore;
/**
* @author jonathan.colt
*/
public class BAHash<V> implements BAH<V> {
private final BAHasher hasher;
private final BAHEqualer equaler;
private volatile BAHState<V> state;
/**
* @param state
* @param hasher
* @param equaler
*/
public BAHash(BAHState<V> state, BAHasher hasher, BAHEqualer equaler) {
this.hasher = hasher;
this.equaler = equaler;
this.state = state;
}
@Override
public long size() {
return state.size();
}
/**
* @return
*/
@Override
public void clear() {
state = state.allocate(0);
}
private long hash(BAHState state, long keyShuffle) {
keyShuffle += keyShuffle >> 8; // shuffle bits to avoid worst case clustering
if (keyShuffle < 0) {
keyShuffle = -keyShuffle;
}
return keyShuffle % state.capacity();
}
V firstValue() {
BAHState<V> s = state;
byte[] skipped = s.skipped();
long i = s.first();
if (i != -1) {
byte[] key;
key = s.key(i);
if (key != null && key != skipped) {
return s.value(i);
}
}
return null;
}
V removeFirstValue() {
BAHState<V> s = state;
byte[] skipped = s.skipped();
long i = s.first();
if (i != -1) {
byte[] key;
key = s.key(i);
if (key != null && key != skipped) {
V v = s.value(i);
remove(key, 0, key.length);
return v;
}
}
return null;
}
@Override
public V get(byte[] key, int keyOffset, int keyLength) {
return get(hasher.hashCode(key, keyOffset, keyLength), key, keyOffset, keyLength);
}
@Override
public V get(long hashCode, byte[] key, int keyOffset, int keyLength) {
BAHState<V> s = state;
byte[] skipped = s.skipped();
if (key == null || key == skipped) {
return null;
}
if (s.size() == 0) {
return null;
}
long capacity = s.capacity();
long start = hash(s, hashCode);
for (long i = start, j = 0, k = capacity; // stack vars for efficiency
j < k; // max search for key
i = (++i) % k, j++) { // wraps around table
byte[] storedKey = s.key(i);
if (storedKey == skipped) {
continue;
}
if (storedKey == null) {
return null;
}
if (equaler.equals(storedKey, key, keyOffset, keyLength)) {
return s.value(i);
}
}
return null;
}
@Override
public void remove(byte[] key, int keyOffset, int keyLength) {
remove(hasher.hashCode(key, keyOffset, keyLength), key, keyOffset, keyLength);
}
@SuppressWarnings("unchecked")
@Override
public void remove(long hashCode, byte[] key, int keyOffset, int keyLength) {
BAHState<V> s = state;
byte[] skipped = s.skipped();
if (key == null || key == skipped) {
return;
}
if (s.size() == 0) {
return;
}
long capacity = s.capacity();
long start = hash(s, hashCode);
for (long i = start, j = 0, k = capacity; // stack vars for efficiency
j < k; // max search for key
i = (++i) % k, j++) { // wraps around table
byte[] storedKey = s.key(i);
if (storedKey == skipped) {
continue;
}
if (storedKey == null) {
return;
}
if (equaler.equals(storedKey, key, keyOffset, keyLength)) {
s.remove(i, skipped, null);
long next = (i + 1) % k;
if (s.key(next) == null) {
for (long z = i, y = 0; y < capacity; z = (z + capacity - 1) % k, y++) {
if (s.key(z) != skipped) {
break;
}
s.clear(z);
}
}
return;
}
}
}
@Override
public void put(byte[] key, V value) {
put(hasher.hashCode(key, 0, key.length), key, value);
}
@SuppressWarnings("unchecked")
@Override
public void put(long hashCode, byte[] key, V value) {
BAHState<V> s = state;
long capacity = s.capacity();
if (s.size() * 2 >= capacity) {
BAHState<V> to = s.allocate(capacity * 2);
rehash(s, to);
state = to;
s = to;
}
internalPut(s, hashCode, key, value);
}
private void internalPut(BAHState<V> s, long hashCode, byte[] key, V value) {
long capacity = s.capacity();
long start = hash(s, hashCode);
byte[] skipped = s.skipped();
for (long i = start, j = 0, k = capacity; // stack vars for efficiency
j < k; // max search for available slot
i = (++i) % k, j++) {
// wraps around table
byte[] storedKey = s.key(i);
if (storedKey == key) {
s.update(i, key, value);
return;
}
if (storedKey == null || storedKey == skipped) {
s.link(i, key, value);
return;
}
if (equaler.equals(storedKey, key, 0, key.length)) {
s.update(i, key, value);
return;
}
}
}
private void rehash(BAHState<V> from, BAHState<V> to) {
long i = from.first();
byte[] skipped = to.skipped();
while (i != -1) {
byte[] storedKey = from.key(i);
if (storedKey != null && storedKey != skipped) {
long hash = hasher.hashCode(storedKey, 0, storedKey.length);
internalPut(to, hash, storedKey, from.value(i));
}
i = from.next(i);
}
}
@Override
public boolean stream(Semaphore semaphore, KeyValueStream<byte[], V> stream) throws Exception {
BAHState<V> s = state;
long c = s.capacity();
if (c <= 0) {
return true;
}
byte[] skipped = s.skipped();
long i = s.first();
while (i != -1) {
byte[] key;
V value = null;
semaphore.acquire();
try {
key = s.key(i);
if (key != null && key != skipped) {
value = s.value(i);
}
} finally {
semaphore.release();
}
if (key != null && key != skipped) {
if (!stream.keyValue(key, value)) {
return false;
}
}
i = s.next(i);
}
return true;
}
@Override
public String toString() {
return "BAHash{" + "hasher=" + hasher + ", equaler=" + equaler + ", state=" + state + '}';
}
}
| jivesoftware/jive-utils | collections/src/main/java/com/jivesoftware/os/jive/utils/collections/bah/BAHash.java | Java | apache-2.0 | 7,297 |
package jp.aegif.android.cmis.asynctask;
import java.util.ArrayList;
import java.util.Map;
import org.dom4j.Element;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.os.AsyncTask;
import jp.aegif.android.cmis.R;
import jp.aegif.android.cmis.ServerInfoActivity;
import jp.aegif.android.cmis.model.Server;
import jp.aegif.android.cmis.repo.CmisProperty;
import jp.aegif.android.cmis.utils.FeedLoadException;
import jp.aegif.android.cmis.utils.FeedUtils;
public class ServerInfoLoadingTask extends AsyncTask<String, Void, Map<String, ArrayList<CmisProperty>>> {
private final Activity activity;
private Server server;
private ProgressDialog pg;
public ServerInfoLoadingTask(Activity activity, Server server) {
super();
this.activity = activity;
this.server = server;
}
@Override
protected void onPreExecute() {
pg = ProgressDialog.show(activity, "", activity.getText(R.string.loading), true);
}
@Override
protected Map<String, ArrayList<CmisProperty>> doInBackground(String... params) {
try {
Element workspace;
Map<String, ArrayList<CmisProperty>> properties = null;
try {
workspace = FeedUtils.getWorkspace(server.getWorkspace(), server.getUrl(), server.getUsername(), server.getPassword());
properties = FeedUtils.getCmisRepositoryProperties(workspace);
} catch (Exception e) {
e.printStackTrace();
}
return properties;
} catch (FeedLoadException fle) {
return null;
}
}
@Override
protected void onPostExecute(Map<String, ArrayList<CmisProperty>> properties) {
//Init View
Intent intent = new Intent(activity, ServerInfoActivity.class);
intent.putExtra("title", "Info " + server.getName());
intent.putParcelableArrayListExtra(Server.INFO_GENERAL, properties.get(Server.INFO_GENERAL));
intent.putParcelableArrayListExtra(Server.INFO_ACL_CAPABILITIES, properties.get(Server.INFO_ACL_CAPABILITIES));
intent.putParcelableArrayListExtra(Server.INFO_CAPABILITIES, properties.get(Server.INFO_CAPABILITIES));
activity.startActivity(intent);
pg.dismiss();
}
@Override
protected void onCancelled() {
pg.dismiss();
}
} | aegif/nemakiware-android-client | app/src/main/java/jp/aegif/android/cmis/asynctask/ServerInfoLoadingTask.java | Java | apache-2.0 | 2,168 |
/*Licensed to The Apereo Foundation under one or more contributor license
agreements. See the NOTICE file distributed with this work for
additional information regarding copyright ownership.
The Apereo Foundation licenses this file to you under the Apache License,
Version 2.0 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
package edu.wfu.inotado.marshalobj.sc;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="comment_id" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="created_at" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="grade_id" type="{http://www.w3.org/2001/XMLSchema}int"/>
* <element name="grade_scale_id" type="{http://www.w3.org/2001/XMLSchema}int"/>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}int"/>
* <element name="rubric_criterion" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="rubric_criterion_id" type="{http://www.w3.org/2001/XMLSchema}int"/>
* <element name="score" type="{http://www.w3.org/2001/XMLSchema}int"/>
* <element name="updated_at" type="{http://www.w3.org/2001/XMLSchema}string"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"comment_id",
"created_at",
"grade_id",
"grade_scale_id",
"id",
"rubric_criterion",
"rubric_criterion_id",
"score",
"updated_at"
})
@XmlRootElement(name = "GradePart")
public class ScGradePart {
@XmlElement(required = true)
protected String comment_id;
@XmlElement(required = true)
protected String created_at;
protected int grade_id;
protected int grade_scale_id;
protected int id;
@XmlElement(required = true)
protected ScRubricCriterion rubric_criterion;
protected int rubric_criterion_id;
protected double score;
@XmlElement(required = true)
protected String updated_at;
/**
* Gets the value of the comment_id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getcomment_id() {
return comment_id;
}
/**
* Sets the value of the comment_id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setcomment_id(String value) {
this.comment_id = value;
}
/**
* Gets the value of the created_at property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getcreated_at() {
return created_at;
}
/**
* Sets the value of the created_at property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setcreated_at(String value) {
this.created_at = value;
}
/**
* Gets the value of the grade_id property.
*
*/
public int getgrade_id() {
return grade_id;
}
/**
* Sets the value of the grade_id property.
*
*/
public void setgrade_id(int value) {
this.grade_id = value;
}
/**
* Gets the value of the grade_scale_id property.
*
*/
public int getgrade_scale_id() {
return grade_scale_id;
}
/**
* Sets the value of the grade_scale_id property.
*
*/
public void setgrade_scale_id(int value) {
this.grade_scale_id = value;
}
/**
* Gets the value of the id property.
*
*/
public int getid() {
return id;
}
/**
* Sets the value of the id property.
*
*/
public void setid(int value) {
this.id = value;
}
/**
* Gets the value of the rubric_criterion property.
*
* @return
* possible object is
* {@link String }
*
*/
public ScRubricCriterion getrubric_criterion() {
return rubric_criterion;
}
/**
* Sets the value of the rubric_criterion property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setrubric_criterion(ScRubricCriterion value) {
this.rubric_criterion = value;
}
/**
* Gets the value of the rubric_criterion_id property.
*
*/
public int getrubric_criterion_id() {
return rubric_criterion_id;
}
/**
* Sets the value of the rubric_criterion_id property.
*
*/
public void setrubric_criterion_id(int value) {
this.rubric_criterion_id = value;
}
/**
* Gets the value of the score property.
*
*/
public double getscore() {
return score;
}
/**
* Sets the value of the score property.
*
*/
public void setscore(double value) {
this.score = value;
}
/**
* Gets the value of the updated_at property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getupdated_at() {
return updated_at;
}
/**
* Sets the value of the updated_at property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setupdated_at(String value) {
this.updated_at = value;
}
}
| wfuedu/Inotado | inotado-api/api/src/java/edu/wfu/inotado/marshalobj/sc/ScGradePart.java | Java | apache-2.0 | 6,447 |
package com.izhbg.typz.sso.auth.dto;
public class RoleFunQuery {
private String jsDm,gnzyDm,gnMc,currentAppId;
public String getJsDm() {
return jsDm;
}
public void setJsDm(String jsDm) {
this.jsDm = jsDm;
}
public String getGnzyDm() {
return gnzyDm;
}
public void setGnzyDm(String gnzyDm) {
this.gnzyDm = gnzyDm;
}
public String getGnMc() {
return gnMc;
}
public void setGnMc(String gnMc) {
this.gnMc = gnMc;
}
public String getCurrentAppId() {
return currentAppId;
}
public void setCurrentAppId(String currentAppId) {
this.currentAppId = currentAppId;
}
}
| izhbg/typz | typz-all/typz-sso/src/main/java/com/izhbg/typz/sso/auth/dto/RoleFunQuery.java | Java | apache-2.0 | 743 |
/**
* Copyright 2017-2018 Gregory Moyer and contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.syphr.lametrictime.api.impl;
public class DataIcon extends AbstractDataIcon
{
public DataIcon(String mimeType, byte[] data)
{
setType(mimeType);
setData(data);
}
@Override
protected void configure()
{
// noop
}
@Override
protected void populateFields()
{
// noop
}
}
| syphr42/liblametrictime-java | src/main/java/org/syphr/lametrictime/api/impl/DataIcon.java | Java | apache-2.0 | 979 |
/*
* Copyright 2009 Jiemamy Project and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.jiemamy.utils.enhancer;
/**
* {@code public}でないプロダクトのメンバを参照するファクトリ。
* @version $Date$
* @author Suguru ARAKAWA (Gluegent, Inc.)
*/
public class AccessProtectedConstructor implements SingularFactory {
/**
* {@code public}でないプロダクトのメンバを参照するため、エンハンス自体に失敗する。
*/
public Object newInstance() {
TargetProduct1 product = new TargetProduct1(true);
return product;
}
}
| Jiemamy/factory-enhancer | src/test/java/org/jiemamy/utils/enhancer/AccessProtectedConstructor.java | Java | apache-2.0 | 1,115 |
package ar.edu.ungs.presentation;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
import javax.inject.Inject;
import javax.inject.Named;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.SwingConstants;
import ar.edu.ungs.commons.utils.MessageBox;
import ar.edu.ungs.commons.utils.RolEnum;
import ar.edu.ungs.presentation.componente.DeleteButton;
import ar.edu.ungs.presentation.componente.ImageFrame;
import ar.edu.ungs.presentation.componente.menu.JMenuItemPermission;
import ar.edu.ungs.presentation.componente.menu.JMenuPermission;
import ar.edu.ungs.presentation.controller.BackupDatosController;
import ar.edu.ungs.presentation.controller.CancelarTurnoProfesinalController;
import ar.edu.ungs.presentation.controller.CitasController;
import ar.edu.ungs.presentation.controller.ConfigurarDepositosController;
import ar.edu.ungs.presentation.controller.ConfigurarMailsController;
import ar.edu.ungs.presentation.controller.CrearInternacionUrgenciaController;
import ar.edu.ungs.presentation.controller.CrearPacienteController;
import ar.edu.ungs.presentation.controller.CrearProfesionalController;
import ar.edu.ungs.presentation.controller.CrearTurnoInternacionController;
import ar.edu.ungs.presentation.controller.FacturacionOSController;
import ar.edu.ungs.presentation.controller.HabilitarTurnoController;
import ar.edu.ungs.presentation.controller.ImportadorNomencladorController;
import ar.edu.ungs.presentation.controller.ListadoDepositosController;
import ar.edu.ungs.presentation.controller.ListadoInternacionesController;
import ar.edu.ungs.presentation.controller.ListadoPacientesController;
import ar.edu.ungs.presentation.controller.ListadoProfesionalesController;
import ar.edu.ungs.presentation.controller.ListadoSensoresController;
import ar.edu.ungs.presentation.controller.ListadoTurnoInternacionController;
import ar.edu.ungs.presentation.controller.ListadoUsuariosController;
import ar.edu.ungs.presentation.controller.PlanillaTurnosController;
import ar.edu.ungs.presentation.controller.RecepcionPacienteController;
import ar.edu.ungs.presentation.controller.RecepcionPacienteUrgenciaController;
import ar.edu.ungs.presentation.controller.TurnosAReasignarController;
import ar.edu.ungs.presentation.controller.TurnosProfesionalController;
import ar.edu.ungs.presentation.controller.VisualizarStatusController;
@Named
public class MainPage extends JFrame {
private static final long serialVersionUID = 1L;
@Inject
private PlanillaTurnosController planillaTurnosController;
@Inject
private CancelarTurnoProfesinalController cancelarTurnoProfesinalController;
@Inject
private HabilitarTurnoController habilitarTurnoController;
@Inject
private TurnosAReasignarController pacientesAReasignarController;
@Inject
private ListadoPacientesController listadoPacientesController;
@Inject
private CrearPacienteController crearPacienteController;
@Inject
private ListadoProfesionalesController listadoProfesionalesController;
@Inject
private CrearProfesionalController crearProfesiolanController;
@Inject
private RecepcionPacienteController recepcionPacienteController;
@Inject
private RecepcionPacienteUrgenciaController recepcionPacienteUrgenciaController;
@Inject
private CitasController citasController;
@Inject
private TurnosProfesionalController turnosProfesionalController;
@Inject
private CrearTurnoInternacionController turnoInternacionController;
@Inject
private ListadoTurnoInternacionController listadoTurnoInternacionController;
@Inject
private ListadoInternacionesController listadoInternacionesController;
@Inject
private ListadoSensoresController listadoSensoresController;
@Inject
private ListadoDepositosController listadoDepositosController;
@Inject
private ConfigurarDepositosController confgDepositosController;
@Inject
private ImportadorNomencladorController importadorNomencladorController;
@Inject
private ListadoUsuariosController listadoUsuariosController;
@Inject
private FacturacionOSController facturacionOSController;
@Inject
private VisualizarStatusController visualizarStatusController;
@Inject
private BackupDatosController backupDatosController;
@Inject
private CrearInternacionUrgenciaController crearInternacionUrgenciaController;
@Inject
protected ConfigurarMailsController configurarMailsController;
private JMenuBar barraMenu;
private JButton btnCerrarSesion;
private JLabel lblUsuario;
private JLabel lblRol;
private JPanel panel_1;
private ImageFrame pnlAvatar;
private JPanel pnlUsuarioLogueado;
public MainPage() {
setMinimumSize(new Dimension(700, 500));
setTitle("SiSalud SRL");
setBounds(100, 100, 785, 490);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setExtendedState(getExtendedState() | JFrame.MAXIMIZED_BOTH);
getContentPane().setLayout(null);
setBarraMenu(new JMenuBar());
getContentPane().add(getBarraMenu());
JMenuPermission mnTurnos = new JMenuPermission("Turnos");
mnTurnos.addAcceptedRole(RolEnum.ADMIN.getValue());
mnTurnos.addAcceptedRole(RolEnum.RECEPCION.getValue());
getBarraMenu().add(mnTurnos);
JMenuItemPermission mnHabilitarTurnos = new JMenuItemPermission("Habilitar Turnos");
mnHabilitarTurnos.addAcceptedRole(RolEnum.ADMIN.getValue());
mnHabilitarTurnos.addAcceptedRole(RolEnum.RECEPCION.getValue());
mnHabilitarTurnos.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
habilitarTurnoController.showView();
}
});
mnTurnos.add(mnHabilitarTurnos);
JMenuItemPermission mntmNewMenuItem = new JMenuItemPermission("Gestion Turnos");
mntmNewMenuItem.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmNewMenuItem.addAcceptedRole(RolEnum.RECEPCION.getValue());
mntmNewMenuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
planillaTurnosController.showView();
}
});
mnTurnos.add(mntmNewMenuItem);
JMenuItemPermission mnTurnosProfesional = new JMenuItemPermission("Turnos por Profesional");
mnTurnosProfesional.addAcceptedRole(RolEnum.ADMIN.getValue());
mnTurnosProfesional.addAcceptedRole(RolEnum.RECEPCION.getValue());
mnTurnosProfesional.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
turnosProfesionalController.showView();
}
});
mnTurnos.add(mnTurnosProfesional);
JMenuItemPermission mnCancelarTurnoProf = new JMenuItemPermission("Cancelar Turno por Profesional");
mnCancelarTurnoProf.addAcceptedRole(RolEnum.ADMIN.getValue());
mnCancelarTurnoProf.addAcceptedRole(RolEnum.RECEPCION.getValue());
mnCancelarTurnoProf.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
cancelarTurnoProfesinalController.showView();
}
});
mnTurnos.add(mnCancelarTurnoProf);
JMenuItemPermission mntmPacientesReasignar = new JMenuItemPermission("Pacientes a reasignar");
mntmPacientesReasignar.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmPacientesReasignar.addAcceptedRole(RolEnum.RECEPCION.getValue());
mntmPacientesReasignar.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
pacientesAReasignarController.showView();
}
});
mnTurnos.add(mntmPacientesReasignar);
JMenuPermission mnInternaciones = new JMenuPermission("Internaciones");
mnInternaciones.addAcceptedRole(RolEnum.ADMIN.getValue());
mnInternaciones.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
getBarraMenu().add(mnInternaciones);
JMenuItemPermission mntmCrearTurnosInternacion = new JMenuItemPermission("Crear Turno de Internación");
mntmCrearTurnosInternacion.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmCrearTurnosInternacion.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
mntmCrearTurnosInternacion.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
turnoInternacionController.showView();
}
});
mnInternaciones.add(mntmCrearTurnosInternacion);
JMenuItemPermission mntmListadoTurnosInternacion = new JMenuItemPermission("Turnos de Internación");
mntmListadoTurnosInternacion.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmListadoTurnosInternacion.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
mntmListadoTurnosInternacion.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
listadoTurnoInternacionController.showView();
}
});
mnInternaciones.add(mntmListadoTurnosInternacion);
JMenuItemPermission mntmListadoInternaciones = new JMenuItemPermission("Internaciones Activas");
mntmListadoInternaciones.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmListadoInternaciones.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
mntmListadoInternaciones.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
listadoInternacionesController.showView();
}
});
mnInternaciones.add(mntmListadoInternaciones);
JMenuItemPermission mntmCrearInternacionUrgencia = new JMenuItemPermission("Crear Internacion de Urgencia");
mntmCrearInternacionUrgencia.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmCrearInternacionUrgencia.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
mntmCrearInternacionUrgencia.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
crearInternacionUrgenciaController.showView();
}
});
mnInternaciones.add(mntmCrearInternacionUrgencia);
JMenuPermission mnPacientes = new JMenuPermission("Pacientes");
mnPacientes.addAcceptedRole(RolEnum.ADMIN.getValue());
mnPacientes.addAcceptedRole(RolEnum.RECEPCION.getValue());
mnPacientes.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
getBarraMenu().add(mnPacientes);
JMenuItemPermission mntmListadoPacientes = new JMenuItemPermission("Listado de pacientes");
mntmListadoPacientes.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmListadoPacientes.addAcceptedRole(RolEnum.RECEPCION.getValue());
mntmListadoPacientes.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
mntmListadoPacientes.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
listadoPacientesController.showView();
}
});
mnPacientes.add(mntmListadoPacientes);
JMenuItemPermission mntmCrearPaciente = new JMenuItemPermission("Crear un paciente");
mntmCrearPaciente.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmCrearPaciente.addAcceptedRole(RolEnum.RECEPCION.getValue());
mntmCrearPaciente.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
mntmCrearPaciente.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
crearPacienteController.showView();
}
});
mnPacientes.add(mntmCrearPaciente);
JMenuPermission mnProfesionales = new JMenuPermission("Profesionales");
mnProfesionales.addAcceptedRole(RolEnum.ADMIN.getValue());
mnProfesionales.addAcceptedRole(RolEnum.RECEPCION.getValue());
mnProfesionales.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
getBarraMenu().add(mnProfesionales);
JMenuItemPermission mntmListadoProfesionales = new JMenuItemPermission("Listado de profesionales");
mntmListadoProfesionales.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmListadoProfesionales.addAcceptedRole(RolEnum.RECEPCION.getValue());
mntmListadoProfesionales.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
mntmListadoProfesionales.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
listadoProfesionalesController.showView();
}
});
mnProfesionales.add(mntmListadoProfesionales);
JMenuItemPermission mntmCrearProfesional = new JMenuItemPermission("Crear un profesional");
mntmCrearProfesional.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmCrearProfesional.addAcceptedRole(RolEnum.RECEPCION.getValue());
mntmCrearProfesional.addAcceptedRole(RolEnum.RECEPCIONINTERNACION.getValue());
mntmCrearProfesional.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
crearProfesiolanController.showView(null);
}
});
mnProfesionales.add(mntmCrearProfesional);
JMenuPermission mnRecepcion = new JMenuPermission("Recepcion");
mnRecepcion.addAcceptedRole(RolEnum.ADMIN.getValue());
mnRecepcion.addAcceptedRole(RolEnum.RECEPCION.getValue());
getBarraMenu().add(mnRecepcion);
JMenuItemPermission mnRegistrarRecepcion = new JMenuItemPermission("Registrar recepcion");
mnRegistrarRecepcion.addAcceptedRole(RolEnum.ADMIN.getValue());
mnRegistrarRecepcion.addAcceptedRole(RolEnum.RECEPCION.getValue());
mnRegistrarRecepcion.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
recepcionPacienteController.showView();
}
});
mnRecepcion.add(mnRegistrarRecepcion);
JMenuItemPermission mnRegistrarUrgencia = new JMenuItemPermission("Registrar urgencia");
mnRegistrarUrgencia.addAcceptedRole(RolEnum.ADMIN.getValue());
mnRegistrarUrgencia.addAcceptedRole(RolEnum.RECEPCION.getValue());
mnRegistrarUrgencia.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
recepcionPacienteUrgenciaController.showView();
}
});
mnRecepcion.add(mnRegistrarUrgencia);
JMenuPermission mnDepositos = new JMenuPermission("Depósitos");
mnDepositos.addAcceptedRole(RolEnum.ADMIN.getValue());
getBarraMenu().add(mnDepositos);
JMenuItemPermission mntmListadoSensores = new JMenuItemPermission("Listado Sensores");
mntmListadoSensores.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmListadoSensores.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
listadoSensoresController.showView();
}
});
mnDepositos.add(mntmListadoSensores);
JMenuItemPermission mntmListadoDepositos = new JMenuItemPermission("Listado Depositos");
mntmListadoDepositos.addAcceptedRole(RolEnum.ADMIN.getValue());
mntmListadoDepositos.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
listadoDepositosController.showView();
}
});
mnDepositos.add(mntmListadoDepositos);
JMenuItemPermission mnitmMailAlerta = new JMenuItemPermission("Configurar EMails de Alerta");
mnitmMailAlerta.addAcceptedRole(RolEnum.ADMIN.getValue());
mnitmMailAlerta.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
configurarMailsController.showView();
}
});
mnDepositos.add(mnitmMailAlerta);
// -------------------------------------------------------------
JMenuPermission mnCitas = new JMenuPermission("Citas");
mnCitas.addAcceptedRole(RolEnum.PROFESIONAL.getValue());
getBarraMenu().add(mnCitas);
JMenuItemPermission mntmPantallaCitas = new JMenuItemPermission("Lista de citas");
mntmPantallaCitas.addAcceptedRole(RolEnum.PROFESIONAL.getValue());
mntmPantallaCitas.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
citasController.showView();
}
});
mnCitas.add(mntmPantallaCitas);
// -------------------------------------------------------------
JMenuPermission mnHerramientas = new JMenuPermission("Herramientas");
mnHerramientas.addAcceptedRole(RolEnum.ADMIN.getValue());
getBarraMenu().add(mnHerramientas);
JMenuItemPermission mnitmNomenclador = new JMenuItemPermission("Importacion Nomenclador");
mnitmNomenclador.addAcceptedRole(RolEnum.ADMIN.getValue());
mnitmNomenclador.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
importadorNomencladorController.showView();
}
});
mnHerramientas.add(mnitmNomenclador);
JMenuItemPermission mnitmUsuarios = new JMenuItemPermission("Usuarios");
mnitmUsuarios.addAcceptedRole(RolEnum.ADMIN.getValue());
mnitmUsuarios.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
listadoUsuariosController.showView();
}
});
mnHerramientas.add(mnitmUsuarios);
JMenuItemPermission mnitmFacturacion = new JMenuItemPermission("Facturacion OS");
mnitmFacturacion.addAcceptedRole(RolEnum.ADMIN.getValue());
mnitmFacturacion.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
facturacionOSController.showView();
}
});
mnHerramientas.add(mnitmFacturacion);
JMenuItemPermission mnitmRefrigeracion = new JMenuItemPermission("Refrigeracion");
mnitmRefrigeracion.addAcceptedRole(RolEnum.ADMIN.getValue());
mnitmRefrigeracion.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
visualizarStatusController.showView();
}
});
mnHerramientas.add(mnitmRefrigeracion);
JMenuItemPermission mnitmBackup = new JMenuItemPermission("Backup datos");
mnitmBackup.addAcceptedRole(RolEnum.ADMIN.getValue());
mnitmBackup.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
backupDatosController.showView();
}
});
mnHerramientas.add(mnitmBackup);
// -------------------------------------------------------------
JMenu mnAyuda = new JMenu("Ayuda");
getBarraMenu().add(mnAyuda);
JMenuItem mntmAcercaDe = new JMenuItem("Acerca de...");
mntmAcercaDe.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
MessageBox.showInfo("En construccion");
}
});
mnAyuda.add(mntmAcercaDe);
pnlUsuarioLogueado = new JPanel();
pnlUsuarioLogueado.setBounds(10, 57, 347, 321);
getContentPane().add(pnlUsuarioLogueado);
pnlUsuarioLogueado.setLayout(null);
panel_1 = new JPanel();
panel_1.setBounds(44, 234, 247, 33);
pnlUsuarioLogueado.add(panel_1);
panel_1.setLayout(null);
JLabel label1 = new JLabel("Usuario:");
label1.setBounds(0, 0, 86, 33);
panel_1.add(label1);
label1.setFont(new Font("Tahoma", Font.PLAIN, 20));
setLblUsuario(new JLabel(""));
getLblUsuario().setFont(new Font("Tahoma", Font.PLAIN, 16));
setLblRol(new JLabel(""));
getLblRol().setFont(new Font("Tahoma", Font.ITALIC, 18));
pnlUsuarioLogueado.add(getLblRol());
setBtnCerrarSesion(new DeleteButton());
getBtnCerrarSesion().setToolTipText("Cerrar Sesion");
pnlUsuarioLogueado.add(getBtnCerrarSesion());
setPnlAvatar(new ImageFrame("images/profile.png"));
getPnlAvatar().setBounds(98, 28, 150, 150);
pnlUsuarioLogueado.add(getPnlAvatar());
addComponentListener(new ComponentListener() {
@Override
public void componentShown(ComponentEvent arg0) {
// TODO Auto-generated method stub
barraMenu.setSize(arg0.getComponent().getWidth(), barraMenu.getHeight());
pnlUsuarioLogueado.setBounds(arg0.getComponent().getWidth() - pnlUsuarioLogueado.getWidth() - 24,
pnlUsuarioLogueado.getY(), pnlUsuarioLogueado.getWidth(), pnlUsuarioLogueado.getHeight());
}
@Override
public void componentResized(ComponentEvent arg0) {
// TODO Auto-generated method stub
barraMenu.setSize(arg0.getComponent().getWidth(), barraMenu.getHeight());
pnlUsuarioLogueado.setBounds(arg0.getComponent().getWidth() - pnlUsuarioLogueado.getWidth() - 24,
pnlUsuarioLogueado.getY(), pnlUsuarioLogueado.getWidth(), pnlUsuarioLogueado.getHeight());
}
@Override
public void componentMoved(ComponentEvent arg0) {
// TODO Auto-generated method stub
}
@Override
public void componentHidden(ComponentEvent arg0) {
// TODO Auto-generated method stub
}
});
}
public JMenuBar getBarraMenu() {
return barraMenu;
}
public void setBarraMenu(JMenuBar barraMenu) {
this.barraMenu = barraMenu;
barraMenu.setBounds(0, 0, 3812, 50);
}
public JButton getBtnCerrarSesion() {
return btnCerrarSesion;
}
public void setBtnCerrarSesion(JButton btnCerrarSesion) {
this.btnCerrarSesion = btnCerrarSesion;
btnCerrarSesion.setBounds(161, 282, 25, 25);
}
public JLabel getLblUsuario() {
return lblUsuario;
}
public void setLblUsuario(JLabel lblUsuario) {
this.lblUsuario = lblUsuario;
lblUsuario.setBounds(89, 0, 158, 33);
panel_1.add(lblUsuario);
}
public JLabel getLblRol() {
return lblRol;
}
public void setLblRol(JLabel lblRol) {
this.lblRol = lblRol;
lblRol.setHorizontalAlignment(SwingConstants.CENTER);
lblRol.setBounds(98, 181, 150, 33);
}
public ImageFrame getPnlAvatar() {
return pnlAvatar;
}
public void setPnlAvatar(ImageFrame pnlAvatar) {
this.pnlAvatar = pnlAvatar;
}
}
| alefherrera/sisalud | SiSaludSRL/src/main/java/ar/edu/ungs/presentation/MainPage.java | Java | apache-2.0 | 20,799 |
package com.bluespacetech;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.loadbalancer.LoadBalanced;
import org.springframework.cloud.netflix.eureka.EnableEurekaClient;
import org.springframework.cloud.netflix.feign.EnableFeignClients;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.cloud.netflix.zuul.EnableZuulProxy;
import org.springframework.cloud.stream.annotation.Output;
import org.springframework.context.annotation.Bean;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.integration.annotation.Gateway;
import org.springframework.integration.annotation.IntegrationComponentScan;
import org.springframework.integration.annotation.MessagingGateway;
import org.springframework.messaging.MessageChannel;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.client.RestTemplate;
@SpringBootApplication
@EnableEurekaClient
@EnableZuulProxy
@EnableFeignClients
@IntegrationComponentScan
public class CollaboratorWebAppApplication {
@LoadBalanced
@Bean
RestTemplate restTemplate() {
return new RestTemplate();
}
public static void main(final String[] args) {
SpringApplication.run(CollaboratorWebAppApplication.class, args);
}
}
@MessagingGateway
interface ContactWriter {
@Gateway(requestChannel = "output")
void write(String rn);
}
interface ContactChannels {
@Output
MessageChannel output();
}
@FeignClient("contactservice")
interface ContactReader {
@RequestMapping(method = RequestMethod.GET, value = "/contacts")
List<Contact> read();
}
@RestController
@RequestMapping("/contacts")
class ContactApiGatewayRestController {
private final ContactReader contactReader;
private final ContactWriter contactWriter;
@Autowired
public ContactApiGatewayRestController(final ContactReader contactReader, final ContactWriter contactWriter) {
this.contactReader = contactReader;
this.contactWriter = contactWriter;
}
public Collection<String> fallback() {
return new ArrayList<>();
}
// @HystrixCommand(fallbackMethod = "fallback")
@RequestMapping(method = RequestMethod.GET, value = "/names")
public ResponseEntity<List<Contact>> names() {
List<Contact> contacts = this.contactReader.read();
return new ResponseEntity<List<Contact>>(contacts, HttpStatus.OK);
}
@RequestMapping(method = RequestMethod.POST)
public void write(@RequestBody final Contact contact) {
this.contactWriter.write(contact.getFirstName());
}
/*@Autowired
private RestTemplate restTemplate;
@RequestMapping("")
public ResponseEntity<Contact[]> getContactEmails() {
Contact[] contacts = restTemplate.getForObject("http://contactservice/contacts", Contact[].class);
System.out.println(contacts.length);
return new ResponseEntity<Contact[]>(contacts, HttpStatus.OK);
}*/
}
class Contact {
private Long id;
private Long version;
private Timestamp lastUpdatedDate;
private String lastUpdatedUser;
private Timestamp creationDate;
private String createdUser;
private String firstName;
private String lastName;
private String email;
/**
* @return the firstName
*/
public String getFirstName() {
return firstName;
}
/**
* @param firstName
* the firstName to set
*/
public void setFirstName(final String firstName) {
this.firstName = firstName;
}
/**
* @return the lastName
*/
public String getLastName() {
return lastName;
}
/**
* @param lastName
* the lastName to set
*/
public void setLastName(final String lastName) {
this.lastName = lastName;
}
/**
* @return the email
*/
public String getEmail() {
return email;
}
/**
* @param email
* the email to set
*/
public void setEmail(final String email) {
this.email = email;
}
/**
* @return the id
*/
public Long getId() {
return id;
}
/**
* @param id
* the id to set
*/
public void setId(final Long id) {
this.id = id;
}
/**
* @return the version
*/
public Long getVersion() {
return version;
}
/**
* @param version
* the version to set
*/
public void setVersion(final Long version) {
this.version = version;
}
/**
* @return the lastUpdatedDate
*/
public Timestamp getLastUpdatedDate() {
return lastUpdatedDate;
}
/**
* @param lastUpdatedDate
* the lastUpdatedDate to set
*/
public void setLastUpdatedDate(final Timestamp lastUpdatedDate) {
this.lastUpdatedDate = lastUpdatedDate;
}
/**
* @return the lastUpdatedUser
*/
public String getLastUpdatedUser() {
return lastUpdatedUser;
}
/**
* @param lastUpdatedUser
* the lastUpdatedUser to set
*/
public void setLastUpdatedUser(final String lastUpdatedUser) {
this.lastUpdatedUser = lastUpdatedUser;
}
/**
* @return the creationDate
*/
public Timestamp getCreationDate() {
return creationDate;
}
/**
* @param creationDate
* the creationDate to set
*/
public void setCreationDate(final Timestamp creationDate) {
this.creationDate = creationDate;
}
/**
* @return the createdUser
*/
public String getCreatedUser() {
return createdUser;
}
/**
* @param createdUser
* the createdUser to set
*/
public void setCreatedUser(final String createdUser) {
this.createdUser = createdUser;
}
} | karreypradeep/BlueSpaceTech | CollaboratorWebApp/src/main/java/com/bluespacetech/CollaboratorWebAppApplication.java | Java | apache-2.0 | 6,218 |
package edu.berkeley.cs.succinct.dictionary;
import edu.berkeley.cs.succinct.bitmap.BitMap;
import junit.framework.TestCase;
import java.nio.ByteBuffer;
public class DictionaryTest extends TestCase {
/**
* Set up test.
*
* @throws Exception
*/
public void setUp() throws Exception {
super.setUp();
Tables.init();
}
/**
* Test method: long getRank1(int i)
*
* @throws Exception
*/
public void testGetRank1() throws Exception {
System.out.println("getRank1");
BitMap B = new BitMap(2048);
for(int i = 0; i < 2048; i++) {
if((int)(Math.random() * 2) == 1) {
B.setBit(i);
}
}
Dictionary instance = new Dictionary(B);
for (int i = 0; i < 2048; i++) {
assertEquals(B.getRank1(i), instance.getRank1(i));
}
}
/**
* Test method: long getRank0(int i)
*
* @throws Exception
*/
public void testGetRank0() throws Exception {
System.out.println("getRank0");
BitMap B = new BitMap(2048);
for(int i = 0; i < 2048; i++) {
if((int)(Math.random() * 2) == 1) {
B.setBit(i);
}
}
Dictionary instance = new Dictionary(B);
for (int i = 0; i < 2048; i++) {
assertEquals(B.getRank0(i), instance.getRank0(i));
}
}
/**
* Test method: ByteBuffer getByteBuffer()
*
* @throws Exception
*/
public void testGetByteBuffer() throws Exception {
System.out.println("getByteBuffer");
BitMap B = new BitMap(2048);
for(int i = 0; i < 2048; i++) {
if((int)(Math.random() * 2) == 1) {
B.setBit(i);
}
}
ByteBuffer instance = new Dictionary(B).getByteBuffer();
assertNotNull(instance);
}
}
| SwathiMystery/succinct | core/src/test/java/edu/berkeley/cs/succinct/dictionary/DictionaryTest.java | Java | apache-2.0 | 1,931 |
package markpeng.kaggle.mmc;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
public class InformationGainComputer {
private static final int BUFFER_LENGTH = 1000;
private static final String newLine = System.getProperty("line.separator");
public static Hashtable<String, List<String>> readTrainLabel(
String trainLabelFile) throws Exception {
// <label, list<doc_ids>>
Hashtable<String, List<String>> output = new Hashtable<String, List<String>>();
BufferedReader in = new BufferedReader(new InputStreamReader(
new FileInputStream(trainLabelFile), "UTF-8"));
try {
String aLine = null;
// skip header line
in.readLine();
while ((aLine = in.readLine()) != null) {
String[] sp = aLine.split(",");
if (sp != null && sp.length > 0) {
String fileName = sp[0].replaceAll("\"", "");
String label = sp[1];
// System.out.println(fileName + ", " + label);
if (output.get(label) == null) {
List<String> tmp = new ArrayList<String>();
tmp.add(fileName);
output.put(label, tmp);
} else {
List<String> tmp = output.get(label);
tmp.add(fileName);
output.put(label, tmp);
}
}
}
} finally {
in.close();
}
return output;
}
public static <K, V extends Comparable<? super V>> SortedSet<Map.Entry<K, V>> entriesSortedByValues(
Map<K, V> map) {
SortedSet<Map.Entry<K, V>> sortedEntries = new TreeSet<Map.Entry<K, V>>(
new Comparator<Map.Entry<K, V>>() {
@Override
public int compare(Map.Entry<K, V> e1, Map.Entry<K, V> e2) {
int res = e1.getValue().compareTo(e2.getValue());
if (res > 0)
return -1;
if (res < 0)
return 1;
else
return res;
}
});
sortedEntries.addAll(map.entrySet());
return sortedEntries;
}
public static void main(String[] args) throws Exception {
// System.out.println(Integer.MAX_VALUE - 5);
// args = new String[4];
// args[0] =
// "/home/markpeng/Share/Kaggle/Microsoft Malware Classification/trainLabels.csv";
// args[1] =
// "/home/markpeng/Share/Kaggle/Microsoft Malware Classification/dataSample/train_bytes.csv";
// args[2] =
// "/home/markpeng/Share/Kaggle/Microsoft Malware Classification/dataSample/topN_infogain_20150327.txt";
// args[3] = "500";
if (args.length < 4) {
System.out
.println("Arguments: [label file] [ngram csv file] [output file] [topN]");
return;
}
String trainLabelFile = args[0];
String csvFile = args[1];
String outputFile = args[2];
int topN = Integer.parseInt(args[3]);
int trainN = 10868;
// int byteFeatureN = (int) Math.pow(256, 2);
// System.out.println("Bytes feature size: " + byteFeatureN);
StringBuffer outputStr = new StringBuffer();
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(outputFile, false), "UTF-8"));
try {
Hashtable<String, List<String>> labels = readTrainLabel(trainLabelFile);
double[] classSize = new double[9];
double[] classProb = new double[9];
for (int i = 0; i < 9; i++) {
classSize[i] = labels.get(Integer.toString(i + 1)).size();
classProb[i] = (double) classSize[i] / trainN;
}
// read csv file
BufferedReader in = new BufferedReader(new InputStreamReader(
new FileInputStream(csvFile), "UTF-8"));
try {
String aLine = null;
// get header
String header = in.readLine();
String[] splitted = header.split(",");
String[] featureNames = Arrays.copyOfRange(splitted, 1,
splitted.length - 1);
int featureN = featureNames.length;
System.out.println("featureN: " + featureN);
// count = 1 or 0
int[] trueCount = new int[featureN];
int[] falseCount = new int[featureN];
int[][] classTrueCount = new int[featureN][9];
int[][] classFalseCount = new int[featureN][9];
TreeMap<Integer, Double> infoGainTable = new TreeMap<Integer, Double>();
while ((aLine = in.readLine()) != null) {
String[] tmp = aLine.trim().split(",");
// String fileName = tmp[0];
String raw = tmp[tmp.length - 1];
if (raw.contains("class"))
tmp[tmp.length - 1] = tmp[tmp.length - 1].substring(5);
// System.out.println("class label: " + tmp[tmp.length -
// 1]);
int label = Integer.parseInt(tmp[tmp.length - 1]) - 1;
int index = 0;
for (int j = 1; j < tmp.length - 1; j++) {
int value = Integer.parseInt(tmp[j]);
// System.out.println("value:" + value);
if (value > 0) {
trueCount[index] = trueCount[index] + 1;
classTrueCount[index][label] = classTrueCount[index][label] + 1;
} else {
falseCount[index] = falseCount[index] + 1;
classFalseCount[index][label] = classFalseCount[index][label] + 1;
}
// System.out.println("trueCount[" + index + "]:"
// + trueCount[index]);
// System.out.println("classTrueCount[" + index + "]["
// + label + "]:" + classTrueCount[index][label]);
// System.out.println("falseCount[" + index + "]:"
// + falseCount[index]);
// System.out.println("classFalseCount[" + index + "]["
// + label + "]:" + classFalseCount[index][label]);
index++;
}
}
// compute information gain
for (int n = 0; n < featureN; n++) {
double infoGain = 0.0;
for (int i = 0; i < 2; i++) {
if (i == 0) {
double trueProb = (double) trueCount[n] / trainN;
System.out.println("trueProb: " + trueProb);
for (int j = 0; j < 9; j++) {
double probVC = (double) classTrueCount[n][j]
/ classSize[j];
System.out.println("probVC: " + probVC);
double value = probVC
* Math.log((double) probVC
/ (trueProb * classProb[j]));
if (!Double.isInfinite(value)
&& !Double.isNaN(value))
infoGain += value;
} // end of class loop
} else {
double falseProb = (double) falseCount[n] / trainN;
System.out.println("falseProb: " + falseProb);
for (int j = 0; j < 9; j++) {
double probVC = (double) classFalseCount[n][j]
/ classSize[j];
System.out.println("probVC: " + probVC);
double value = probVC
* Math.log((double) probVC
/ (falseProb * classProb[j]));
if (!Double.isInfinite(value)
&& !Double.isNaN(value))
infoGain += value;
} // end of class loop
}
} // end of value loop
System.out.println("Completed feature " + n + ": "
+ infoGain);
infoGainTable.put(n, infoGain);
} // end of ngram loop
// get top-N features
SortedSet<Map.Entry<Integer, Double>> sortedFeatures = entriesSortedByValues(infoGainTable);
int validN = 0;
for (Map.Entry<Integer, Double> m : sortedFeatures) {
int index = m.getKey();
double infoGain = m.getValue();
if (!Double.isInfinite(infoGain) && !Double.isNaN(infoGain)) {
if (validN < topN) {
outputStr.append(featureNames[index] + ","
+ infoGain);
outputStr.append(newLine);
System.out.println(featureNames[index] + ","
+ infoGain);
if (outputStr.length() >= BUFFER_LENGTH) {
out.write(outputStr.toString());
out.flush();
outputStr.setLength(0);
}
} else
break;
validN++;
}
} // end of feature loop
System.out.println("Total # of features: " + validN);
} finally {
in.close();
}
} finally {
out.write(outputStr.toString());
out.flush();
out.close();
}
}
}
| guitarmind/play-mahout | src/main/java/markpeng/kaggle/mmc/InformationGainComputer.java | Java | apache-2.0 | 7,921 |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.volley.toolbox;
import android.content.Context;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager.NameNotFoundException;
import android.net.http.AndroidHttpClient;
import android.os.Build;
import com.android.volley.Network;
import com.android.volley.RequestQueue;
import java.io.File;
public class Volley {
/** Default on-disk cache directory. */
private static final String DEFAULT_CACHE_DIR = "com/android/volley";
/**
* Creates a default instance of the worker pool and calls {@link RequestQueue#start()} on it.
*
* @param context A {@link Context} to use for creating the cache dir.
* @param stack An {@link com.android.volley.toolbox.HttpStack} to use for the network, or null for default.
* @return A started {@link RequestQueue} instance.
*/
public static RequestQueue newRequestQueue(Context context, HttpStack stack) {
File cacheDir = new File(context.getCacheDir(), DEFAULT_CACHE_DIR);
String userAgent = "com/android/volley/0";
try {
String packageName = context.getPackageName();
PackageInfo info = context.getPackageManager().getPackageInfo(packageName, 0);
userAgent = packageName + "/" + info.versionCode;
} catch (NameNotFoundException e) {
}
if (stack == null) {
if (Build.VERSION.SDK_INT >= 9) {
stack = new HurlStack();
} else {
// Prior to Gingerbread, HttpUrlConnection was unreliable.
// See: http://android-developers.blogspot.com/2011/09/androids-http-clients.html
stack = new HttpClientStack(AndroidHttpClient.newInstance(userAgent));
}
}
Network network = new BasicNetwork(stack);
RequestQueue queue = new RequestQueue(new DiskBasedCache(cacheDir), network);
queue.start();
return queue;
}
/**
* Creates a default instance of the worker pool and calls {@link RequestQueue#start()} on it.
*
* @param context A {@link Context} to use for creating the cache dir.
* @return A started {@link RequestQueue} instance.
*/
public static RequestQueue newRequestQueue(Context context) {
return newRequestQueue(context, null);
}
}
| kingslou/CrossBow | Crossbow-Core/src/com/android/volley/toolbox/Volley.java | Java | apache-2.0 | 2,947 |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.designer.application.query.editing;
import com.eas.designer.application.query.editing.riddle.RiddleTask;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Set;
import net.sf.jsqlparser.expression.NamedParameter;
import net.sf.jsqlparser.schema.Column;
/**
*
* @author mg
*/
public class GatherRelationsSubjectsRiddleTask implements RiddleTask {
protected Set<String> parametersNames = new HashSet<>();
protected Set<NamedParameter> parameters = new LinkedHashSet<>();
protected Set<Column> columns = new HashSet<>();
public Set<NamedParameter> getParameters() {
return parameters;
}
public Set<Column> getColumns() {
return columns;
}
@Override
public boolean needToDelete(Object aExpression) {
observe(aExpression);
return false;
}
@Override
public void markAsDeleted(Object aExpression) {
}
@Override
public boolean markedAsDeleted(Object aExpression) {
return false;
}
@Override
public void observe(Object aExpression) {
if (aExpression instanceof NamedParameter) {
NamedParameter np = (NamedParameter) aExpression;
if (!parametersNames.contains(np.getName())) {
parametersNames.add(np.getName());
parameters.add(np);
}
} else if (aExpression instanceof Column) {
columns.add((Column) aExpression);
}
}
}
| jskonst/PlatypusJS | designer/PlatypusQueries/src/com/eas/designer/application/query/editing/GatherRelationsSubjectsRiddleTask.java | Java | apache-2.0 | 1,581 |
package org.docksidestage.hanger.simpleflute.dto.customize;
import org.docksidestage.hanger.simpleflute.dto.bs.customize.BsDoubleByteOnSqlDto;
/**
* The entity of DoubleByteOnSql.
* <p>
* You can implement your original methods here.
* This class remains when re-generating.
* </p>
* @author DBFlute(AutoGenerator)
*/
public class DoubleByteOnSqlDto extends BsDoubleByteOnSqlDto {
/** Serial version UID. (Default) */
private static final long serialVersionUID = 1L;
}
| dbflute-test/dbflute-test-active-hanger | src/main/java/org/docksidestage/hanger/simpleflute/dto/customize/DoubleByteOnSqlDto.java | Java | apache-2.0 | 487 |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.util.money;
import java.io.Serializable;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.joda.convert.FromString;
import org.joda.convert.ToString;
import com.google.common.collect.ImmutableSet;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.UniqueId;
import com.opengamma.id.UniqueIdentifiable;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.NamedInstance;
import com.opengamma.util.PublicAPI;
/**
* A unit of currency.
* <p>
* This class represents a unit of currency such as the British Pound, Euro or US Dollar.
* <p>
* This class is immutable and thread-safe.
*/
@PublicAPI
public final class Currency implements ObjectIdentifiable, UniqueIdentifiable, Comparable<Currency>, NamedInstance, Serializable {
/** Serialization version. */
private static final long serialVersionUID = 1L;
/**
* A cache of instances.
*/
private static final ConcurrentMap<String, Currency> INSTANCE_MAP = new ConcurrentHashMap<>();
/**
* The scheme to use in object identifiers.
*/
public static final String OBJECT_SCHEME = "CurrencyISO";
// a selection of commonly traded, stable currencies
/**
* The currency 'USD' - United States Dollar.
*/
public static final Currency USD = of("USD");
/**
* The currency 'EUR' - Euro.
*/
public static final Currency EUR = of("EUR");
/**
* The currency 'JPY' - Japanese Yen.
*/
public static final Currency JPY = of("JPY");
/**
* The currency 'GBP' - British pound.
*/
public static final Currency GBP = of("GBP");
/**
* The currency 'EUR' - Swiss Franc.
*/
public static final Currency CHF = of("CHF");
/**
* The currency 'AUD' - Australian Dollar.
*/
public static final Currency AUD = of("AUD");
/**
* The currency 'CAD' - Canadian Dollar.
*/
public static final Currency CAD = of("CAD");
// a selection of other currencies
/**
* The currency 'NZD' - New Zealand Dollar.
*/
public static final Currency NZD = of("NZD");
/**
* The currency 'DKK' - Danish Krone.
*/
public static final Currency DKK = of("DKK");
/**
* The currency 'DEM' - Deutsche Mark.
*/
public static final Currency DEM = of("DEM");
/**
* The currency 'CZK' - Czeck Krona.
*/
public static final Currency CZK = of("CZK");
/**
* The currency 'SEK' - Swedish Krona.
*/
public static final Currency SEK = of("SEK");
/**
* The currency 'SKK' - Slovak Korona.
*/
public static final Currency SKK = of("SKK");
/**
* The currency 'ITL' - Italian Lira.
*/
public static final Currency ITL = of("ITL");
/**
* The currency 'HUF' = Hugarian Forint.
*/
public static final Currency HUF = of("HUF");
/**
* The currency 'FRF' - French Franc.
*/
public static final Currency FRF = of("FRF");
/**
* The currency 'NOK' - Norwegian Krone.
*/
public static final Currency NOK = of("NOK");
/**
* The currency 'HKD' - Hong Kong Dollar.
*/
public static final Currency HKD = of("HKD");
/**
* The currency 'BRL' - Brazil Dollar.
*/
public static final Currency BRL = of("BRL");
/**
* The currency 'ZAR' - South African Rand.
*/
public static final Currency ZAR = of("ZAR");
/**
* The currency 'PLN' - Polish Zloty.
*/
public static final Currency PLN = of("PLN");
/**
* The currency 'SGD' - Singapore Dollar.
*/
public static final Currency SGD = of("SGD");
/**
* The currency 'MXN' - Mexican Peso.
*/
public static final Currency MXN = of("MXN");
/**
* The currency code, not null.
*/
private final String _code;
//-----------------------------------------------------------------------
/**
* Lists the available currencies.
*
* @return an immutable set containing all registered currencies, not null
*/
public static Set<Currency> getAvailableCurrencies() {
return ImmutableSet.copyOf(INSTANCE_MAP.values());
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code Currency} matching the specified JDK currency.
* <p>
* This converts the JDK currency instance to a currency unit using the code.
*
* @param currency the currency, not null
* @return the singleton instance, not null
*/
public static Currency of(final java.util.Currency currency) {
ArgumentChecker.notNull(currency, "currency");
return of(currency.getCurrencyCode());
}
/**
* Obtains an instance of {@code Currency} for the specified ISO-4217
* three letter currency code dynamically creating a currency if necessary.
* <p>
* A currency is uniquely identified by ISO-4217 three letter code.
* This method creates the currency if it is not known.
*
* @param currencyCode the three letter currency code, ASCII and upper case, not null
* @return the singleton instance, not null
* @throws IllegalArgumentException if the currency code is not three letters
*/
@FromString
public static Currency of(final String currencyCode) {
ArgumentChecker.notNull(currencyCode, "currencyCode");
// check cache before matching
final Currency previous = INSTANCE_MAP.get(currencyCode);
if (previous != null) {
return previous;
}
if (!currencyCode.matches("[A-Z][A-Z][A-Z]")) {
throw new IllegalArgumentException("Invalid currency code: " + currencyCode);
}
INSTANCE_MAP.putIfAbsent(currencyCode, new Currency(currencyCode));
return INSTANCE_MAP.get(currencyCode);
}
/**
* Parses a string to obtain a {@code Currency}.
* <p>
* The parse is identical to {@link #of(String)} except that it will convert
* letters to upper case first.
*
* @param currencyCode the three letter currency code, ASCII, not null
* @return the singleton instance, not null
* @throws IllegalArgumentException if the currency code is not three letters
*/
public static Currency parse(final String currencyCode) {
ArgumentChecker.notNull(currencyCode, "currencyCode");
return of(currencyCode.toUpperCase(Locale.ENGLISH));
}
//-------------------------------------------------------------------------
/**
* Restricted constructor.
*
* @param currencyCode the three letter currency code, not null
*/
private Currency(final String currencyCode) {
_code = currencyCode;
}
/**
* Ensure singleton on deserialization.
*
* @return the singleton, not null
*/
private Object readResolve() {
return of(_code);
}
//-------------------------------------------------------------------------
/**
* Gets the three letter ISO code.
*
* @return the three letter ISO code, not null
*/
@ToString
public String getCode() {
return _code;
}
//-------------------------------------------------------------------------
/**
* Gets the object identifier for the currency.
* <p>
* This uses the scheme {@link #OBJECT_SCHEME CurrencyISO}.
*
* @return the object identifier, not null
*/
@Override
public ObjectId getObjectId() {
return ObjectId.of(OBJECT_SCHEME, _code);
}
/**
* Gets the unique identifier for the currency.
* <p>
* This uses the scheme {@link #OBJECT_SCHEME CurrencyISO}.
*
* @return the unique identifier, not null
*/
@Override
public UniqueId getUniqueId() {
return UniqueId.of(OBJECT_SCHEME, _code);
}
//-----------------------------------------------------------------------
/**
* Gets the JDK currency instance equivalent to this currency.
* <p>
* This attempts to convert a {@code Currency} to a JDK {@code Currency}.
*
* @return the JDK currency instance, not null
* @throws IllegalArgumentException if no matching currency exists in the JDK
*/
public java.util.Currency toCurrency() {
return java.util.Currency.getInstance(_code);
}
//-----------------------------------------------------------------------
/**
* Compares this currency to another by alphabetical comparison of the code.
*
* @param other the other currency, not null
* @return negative if earlier alphabetically, 0 if equal, positive if greater alphabetically
*/
@Override
public int compareTo(final Currency other) {
return _code.compareTo(other._code);
}
/**
* Checks if this currency equals another currency.
* <p>
* The comparison checks the three letter currency code.
*
* @param obj the other currency, null returns false
* @return true if equal
*/
@Override
public boolean equals(final Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof Currency) {
return _code.equals(((Currency) obj)._code);
}
return false;
}
/**
* Returns a suitable hash code for the currency.
*
* @return the hash code
*/
@Override
public int hashCode() {
return _code.hashCode();
}
//-----------------------------------------------------------------------
/**
* Gets the three letter currency code as a string.
*
* @return the three letter currency code, not null
*/
@Override
public String toString() {
return _code;
}
@Override
public String getName() {
return getCode();
}
}
| McLeodMoores/starling | projects/util/src/main/java/com/opengamma/util/money/Currency.java | Java | apache-2.0 | 9,526 |
/*
Copyright 2018 - 2020 Volker Berlin (i-net software)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package de.inetsoftware.jwebassembly.module;
import java.util.Iterator;
import javax.annotation.Nonnull;
import de.inetsoftware.classparser.Member;
import de.inetsoftware.jwebassembly.WasmException;
import de.inetsoftware.jwebassembly.wasm.AnyType;
import de.inetsoftware.jwebassembly.wasm.ValueTypeParser;
/**
* Described the name of WebAssembly function.
*
* @author Volker Berlin
*
*/
public class FunctionName {
/**
* The Java class name like "java/lang/String".
*/
@Nonnull
public final String className;
/**
* The method name like "hashCode".
*/
@Nonnull
public final String methodName;
/**
* The name in the WebAssembly. For example: "java/lang/String.hashCode"
*/
@Nonnull
public final String fullName;
/**
* The Java signature which is used in Java byte code to reference the method call. For example: "java/lang/String.hashCode()I"
*/
@Nonnull
public final String signatureName;
/**
* The signature part. For example: "()I"
*/
@Nonnull
public final String signature;
/**
* Create a new instance from the given reference in the ConstantPool or parsed method.
*
* @param methodOrField
* the Java method
*/
FunctionName( @Nonnull Member methodOrField ) {
this( methodOrField, methodOrField.getType() );
}
/**
* Create a new instance from the given reference in the ConstantPool and a special signature.
*
* @param methodOrField
* the Java method
* @param signature
* the Java signature
*/
FunctionName( @Nonnull Member methodOrField, String signature ) {
this( methodOrField.getClassName(), methodOrField.getName(), signature );
}
/**
* Create a new instance from the given values
*
* @param className
* the Java class name
* @param methodName
* the Java method name
* @param signature
* the Java signature
*/
FunctionName( String className, String methodName, String signature ) {
this.className = className;
this.methodName = methodName;
this.fullName = className + '.' + methodName;
this.signatureName = fullName + signature;
this.signature = signature;
}
/**
* Create a new instance from the given values
*
* @param signatureName
* the full Java method signature like "com/foo/Bar.method()V"
*/
public FunctionName( String signatureName ) {
try {
int idx1 = signatureName.indexOf( '.' );
this.className = signatureName.substring( 0, idx1 );
int idx2 = signatureName.indexOf( '(', idx1 );
this.methodName = signatureName.substring( idx1 + 1, idx2 );
this.fullName = signatureName.substring( 0, idx2 );
this.signatureName = signatureName;
this.signature = signatureName.substring( idx2 );
} catch( IndexOutOfBoundsException ex ) {
throw WasmException.create( "Invalid method signature: " + signatureName, ex );
}
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return signatureName.hashCode();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals( Object obj ) {
if( this == obj ) {
return true;
}
if( obj == null ) {
return false;
}
// synthetic functions should be replace/equals to real functions.
if( !(obj instanceof FunctionName) ) {
return false;
}
FunctionName other = (FunctionName)obj;
return signatureName.equals( other.signatureName );
}
/**
* Get the method signature iterator for parameter and return values.
*
* @param types
* the type manager
* @return the iterator
*/
@Nonnull
public Iterator<AnyType> getSignature( TypeManager types ) {
return new ValueTypeParser( signature, types );
}
}
| i-net-software/JWebAssembly | src/de/inetsoftware/jwebassembly/module/FunctionName.java | Java | apache-2.0 | 4,747 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.pagemem.wal.record;
import java.io.File;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.configuration.DataStorageConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.pagemem.FullPageId;
import org.apache.ignite.internal.pagemem.wal.IgniteWriteAheadLogManager;
import org.apache.ignite.internal.pagemem.wal.WALIterator;
import org.apache.ignite.internal.pagemem.wal.WALPointer;
import org.apache.ignite.internal.processors.cache.persistence.wal.FileDescriptor;
import org.apache.ignite.internal.processors.cache.persistence.wal.FileWALPointer;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.wal.record.RecordUtils;
import org.junit.Test;
import org.mockito.internal.matchers.apachecommons.ReflectionEquals;
import static org.apache.ignite.configuration.DataStorageConfiguration.DFLT_PAGE_SIZE;
import static org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager.ZIP_SUFFIX;
/**
* Tests of serialization and deserialization of all WAL record types {@link org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType}.
*
* It checks that all records can be successfully deserialized from early serialized record included serialization via
* compaction.
*/
public class WALRecordSerializationTest extends GridCommonAbstractTest {
/** Wal segment size. */
private static final int WAL_SEGMENT_SIZE = 4 * 1024 * 1024;
/** **/
private boolean compactionEnabled;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String name) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(name);
cfg.setDataStorageConfiguration(new DataStorageConfiguration()
.setDefaultDataRegionConfiguration(new DataRegionConfiguration()
.setPersistenceEnabled(true)
.setMaxSize(200 * 1024 * 1024))
.setWalSegmentSize(WAL_SEGMENT_SIZE)
.setWalCompactionEnabled(compactionEnabled));
cfg.setConsistentId(name);
return cfg;
}
/** {@inheritDoc} **/
@Override protected void beforeTest() throws Exception {
stopAllGrids();
cleanPersistenceDir();
}
/** {@inheritDoc} **/
@Override protected void afterTest() throws Exception {
stopAllGrids();
cleanPersistenceDir();
}
/**
* @throws Exception If fail.
*/
@Test
public void testAllWalRecordsSerializedAndDeserializedSuccessfully() throws Exception {
compactionEnabled = false;
IgniteEx ignite = startGrid(0);
ignite.cluster().active(true);
WALRecord.RecordType[] recordTypes = WALRecord.RecordType.values();
List<ReflectionEquals> serializedRecords = new ArrayList<>();
IgniteWriteAheadLogManager wal = ignite.context().cache().context().wal();
ignite.context().cache().context().database().checkpointReadLock();
try {
for (WALRecord.RecordType recordType : recordTypes) {
WALRecord record = RecordUtils.buildWalRecord(recordType);
if (RecordUtils.isIncludeIntoLog(record)) {
serializedRecords.add(new ReflectionEquals(record, "prev", "pos",
"updateCounter" //updateCounter for PartitionMetaStateRecord isn't serialized.
));
wal.log(record);
}
}
wal.flush(null, true);
}
finally {
ignite.context().cache().context().database().checkpointReadUnlock();
}
stopGrid(0);
Iterator<ReflectionEquals> serializedIter = serializedRecords.iterator();
ReflectionEquals curExpRecord = serializedIter.hasNext() ? serializedIter.next() : null;
try (WALIterator iter = wal.replay(null)) {
while (iter.hasNext()) {
WALRecord record = iter.nextX().get2();
if (curExpRecord != null && curExpRecord.matches(record))
curExpRecord = serializedIter.hasNext() ? serializedIter.next() : null;
}
}
assertNull("Expected record '" + curExpRecord + "' not found.", curExpRecord);
}
/**
* @throws Exception If fail.
*/
@Test
public void testAllWalRecordsSerializedCompressedAndThenDeserializedSuccessfully() throws Exception {
compactionEnabled = true;
IgniteEx ignite = startGrid(0);
ignite.cluster().active(true);
WALRecord.RecordType[] recordTypes = WALRecord.RecordType.values();
List<ReflectionEquals> serializedRecords = new ArrayList<>();
IgniteWriteAheadLogManager wal = ignite.context().cache().context().wal();
WALPointer lastPointer = null;
ignite.context().cache().context().database().checkpointReadLock();
try {
for (WALRecord.RecordType recordType : recordTypes) {
WALRecord record = RecordUtils.buildWalRecord(recordType);
if (RecordUtils.isIncludeIntoLog(record) && (recordType.purpose() == WALRecord.RecordPurpose.LOGICAL ||
recordType == WALRecord.RecordType.CHECKPOINT_RECORD)) {
serializedRecords.add(new ReflectionEquals(record, "prev", "pos",
"updateCounter" //updateCounter for PartitionMetaStateRecord isn't serialized.
));
lastPointer = wal.log(record);
}
}
wal.flush(null, true);
}
finally {
ignite.context().cache().context().database().checkpointReadUnlock();
}
String nodeFolderName = ignite.context().pdsFolderResolver().resolveFolders().folderName();
File nodeArchiveDir = Paths.get(
U.resolveWorkDirectory(U.defaultWorkDirectory(), "db", false).getAbsolutePath(),
"wal",
"archive",
nodeFolderName
).toFile();
File walSegment = new File(nodeArchiveDir, FileDescriptor.fileName(((FileWALPointer)lastPointer).index()));
File walZipSegment = new File(nodeArchiveDir, FileDescriptor.fileName(((FileWALPointer)lastPointer).index()) + ZIP_SUFFIX);
// Spam WAL to move all data records to compressible WAL zone.
for (int i = 0; i < WAL_SEGMENT_SIZE / DFLT_PAGE_SIZE * 2; i++)
wal.log(new PageSnapshot(new FullPageId(-1, -1), new byte[DFLT_PAGE_SIZE], 1));
ignite.getOrCreateCache("generateDirtyPages");
// WAL archive segment is allowed to be compressed when it's at least one checkpoint away from current WAL head.
ignite.context().cache().context().database().wakeupForCheckpoint("Forced checkpoint").get();
ignite.context().cache().context().database().wakeupForCheckpoint("Forced checkpoint").get();
for (int i = 0; i < WAL_SEGMENT_SIZE / DFLT_PAGE_SIZE * 2; i++)
wal.log(new PageSnapshot(new FullPageId(-1, -1), new byte[DFLT_PAGE_SIZE], 1));
// Awaiting of zipping of the desirable segment.
assertTrue(GridTestUtils.waitForCondition(walZipSegment::exists, 15_000));
// Awaiting of removing of the desirable segment.
assertTrue(GridTestUtils.waitForCondition(() -> !walSegment.exists(), 15_000));
stopGrid(0);
Iterator<ReflectionEquals> serializedIter = serializedRecords.iterator();
ReflectionEquals curExpRecord = serializedIter.hasNext() ? serializedIter.next() : null;
try (WALIterator iter = wal.replay(null)) {
while (iter.hasNext()) {
WALRecord record = iter.nextX().get2();
if (curExpRecord != null && curExpRecord.matches(record))
curExpRecord = serializedIter.hasNext() ? serializedIter.next() : null;
}
}
assertNull("Expected record '" + curExpRecord + "' not found.", curExpRecord);
}
}
| SomeFire/ignite | modules/core/src/test/java/org/apache/ignite/internal/pagemem/wal/record/WALRecordSerializationTest.java | Java | apache-2.0 | 9,160 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.lang.sqlpp.rewrites.visitor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.functions.FunctionSignature;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.ILangExpression;
import org.apache.asterix.lang.common.expression.CallExpr;
import org.apache.asterix.lang.common.expression.FieldAccessor;
import org.apache.asterix.lang.common.expression.VariableExpr;
import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
import org.apache.asterix.lang.sqlpp.clause.FromClause;
import org.apache.asterix.lang.sqlpp.clause.FromTerm;
import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
import org.apache.asterix.lang.sqlpp.clause.SelectClause;
import org.apache.asterix.lang.sqlpp.clause.SelectElement;
import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
import org.apache.asterix.lang.sqlpp.struct.SetOperationInput;
import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppExpressionScopingVisitor;
/**
* An AST pre-processor to rewrite group-by sugar queries, which does the following transformations:
* 1. Rewrite the argument expression of an aggregation function into a subquery if the argument
* expression is not a subquery;
* 2. Turn a SQL-92 aggregate function into a SQL++ core aggregate function when performing 1.
*/
// For example, this visitor turns the following query
//
// FROM Employee e
// JOIN Incentive i ON e.job_category = i.job_category
// JOIN SuperStars s ON e.id = s.id
// GROUP BY e.department_id AS deptId
// GROUP AS eis(e AS e, i AS i, s AS s)
// SELECT deptId as deptId, SUM(e.salary + i.bonus) AS star_cost;
//
// into the following core-version query:
//
// FROM Employee e
// JOIN Incentive i ON e.job_category = i.job_category
// JOIN SuperStars s ON e.id = s.id
// GROUP BY e.department_id AS deptId
// GROUP AS eis(e AS e, i AS i, s AS s)
// SELECT ELEMENT {
// 'deptId': deptId,
// 'star_cost': array_sum( (FROM eis AS p SELECT ELEMENT p.e.salary + p.i.bonus) )
// };
//
// where SUM(e.salary + i.bonus) is turned into array_sum( (FROM eis AS p SELECT ELEMENT p.e.salary + p.i.bonus) ).
public class SqlppGroupBySugarVisitor extends AbstractSqlppExpressionScopingVisitor {
private final Expression groupVar;
private final Collection<VariableExpr> fieldVars;
public SqlppGroupBySugarVisitor(LangRewritingContext context, Expression groupVar,
Collection<VariableExpr> fieldVars) {
super(context);
this.groupVar = groupVar;
this.fieldVars = fieldVars;
}
@Override
public Expression visit(CallExpr callExpr, ILangExpression arg) throws CompilationException {
List<Expression> newExprList = new ArrayList<>();
FunctionSignature signature = callExpr.getFunctionSignature();
boolean aggregate = FunctionMapUtil.isSql92AggregateFunction(signature);
boolean rewritten = false;
for (Expression expr : callExpr.getExprList()) {
Expression newExpr = aggregate ? wrapAggregationArgument(expr) : expr;
rewritten |= newExpr != expr;
newExprList.add(newExpr.accept(this, arg));
}
if (rewritten) {
// Rewrites the SQL-92 function name to core functions,
// e.g., SUM --> array_sum
callExpr.setFunctionSignature(FunctionMapUtil.sql92ToCoreAggregateFunction(signature));
}
callExpr.setExprList(newExprList);
return callExpr;
}
private Expression wrapAggregationArgument(Expression argExpr) throws CompilationException {
Expression expr = argExpr;
Set<VariableExpr> freeVars = SqlppRewriteUtil.getFreeVariable(expr);
VariableExpr fromBindingVar = new VariableExpr(context.newVariable());
FromTerm fromTerm = new FromTerm(groupVar, fromBindingVar, null, null);
FromClause fromClause = new FromClause(Collections.singletonList(fromTerm));
// Maps field variable expressions to field accesses.
Map<Expression, Expression> varExprMap = new HashMap<>();
for (VariableExpr usedVar : freeVars) {
// Reference to a field in the group variable.
if (fieldVars.contains(usedVar)) {
// Rewrites to a reference to a field in the group variable.
varExprMap.put(usedVar, new FieldAccessor(fromBindingVar,
SqlppVariableUtil.toUserDefinedVariableName(usedVar.getVar())));
}
}
// Select clause.
SelectElement selectElement =
new SelectElement(SqlppRewriteUtil.substituteExpression(expr, varExprMap, context));
SelectClause selectClause = new SelectClause(selectElement, null, false);
// Construct the select expression.
SelectBlock selectBlock = new SelectBlock(selectClause, fromClause, null, null, null, null, null);
SelectSetOperation selectSetOperation = new SelectSetOperation(new SetOperationInput(selectBlock, null), null);
return new SelectExpression(null, selectSetOperation, null, null, true);
}
}
| ty1er/incubator-asterixdb | asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupBySugarVisitor.java | Java | apache-2.0 | 6,393 |
package br.com.dlp.jazzav.anuncio.status;
import br.com.dlp.jazzav.anuncio.PaymentStatusEnum;
import br.com.uol.pagseguro.domain.TransactionStatus;
public class EnumStatusTest {
/**
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
System.out.println(PaymentStatusEnum.valueOf("IN_DISPUTE").ordinal());
System.out.println(TransactionStatus.IN_DISPUTE.getValue());
System.out.println(PaymentStatusEnum.valueOf("WAITING_PAYMENT").ordinal());
System.out.println(TransactionStatus.WAITING_PAYMENT.getValue());
}
}
| darciopacifico/omr | modules/JazzAV/bsn/src/main/test/br/com/dlp/jazzav/anuncio/status/EnumStatusTest.java | Java | apache-2.0 | 585 |
package com.wonder.todotest_mvp.utils;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import com.wonder.todotest_mvp.R;
/**
* Created by wonder on 2016/11/24.
* 提供给Activity方法去加载它们的UI
*/
public class ActivityUtils {
public static boolean isTablet(Context context){
return context.getResources().getBoolean(R.bool.isTablet);
}
public static void addFragmentToActivity(@NonNull FragmentManager fragmentManager,
@NonNull Fragment fragment, int frameId){
checkNotNull(fragmentManager);
checkNotNull(fragment);
FragmentTransaction transaction = fragmentManager.beginTransaction();
transaction.add(frameId,fragment);
transaction.commit();
}
public static <T> T checkNotNull(T reference) {
if(reference == null) {
throw new NullPointerException();
} else {
return reference;
}
}
public static <T> T checkNotNull(T reference, @Nullable Object errorMessage) {
if (reference == null) {
throw new NullPointerException(String.valueOf(errorMessage));
} else {
return reference;
}
}
}
| lzf-lamer/TODO | app/src/main/java/com/wonder/todotest_mvp/utils/ActivityUtils.java | Java | apache-2.0 | 1,481 |
package com.baxi.agrohelper.service;
/*-
* #%L
* agro-helper
* %%
* Copyright (C) 2017 University of Debrecen, Faculty of Informatics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
* #L%
*/
import java.util.List;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.baxi.agrohelper.dao.GenericDaoInterface;
import com.baxi.agrohelper.model.VarietyName;
/**
*
* Implementation of the {@code VarietyNameService} interface.
*
* @author Gergely Szabó
*
*/
public class VarietyNameServiceImpl implements VarietyNameService {
private static Logger logger = LoggerFactory.getLogger(VarietyNameServiceImpl.class);
private GenericDaoInterface<VarietyName, Integer> varietyNameDao;
/**
* Constructs a newly allocated {@code VarietyNameServiceImpl} object, and initializes its DAO..
*
* @param varietyNameDao {@link com.baxi.agrohelper.dao.GenericDaoInterface} object for initialization
*/
public VarietyNameServiceImpl(GenericDaoInterface<VarietyName, Integer> varietyNameDao) {
this.varietyNameDao = varietyNameDao;
}
@Override
public VarietyName createVarietyName(VarietyName varietyName) {
logger.info("Creating VARIETYNAME {}", varietyName.getName());
varietyNameDao.persist(varietyName);
return varietyName;
}
@Override
public VarietyName deleteVarietyName(int id) {
VarietyName varietyName = varietyNameDao.findById(id);
logger.warn("Deleting VARIETYNAME {}", varietyName.getName());
varietyNameDao.delete(varietyName);
return varietyName;
}
@Override
public VarietyName findVarietyNameById(int id) {
return varietyNameDao.findById(id);
}
@Override
public VarietyName updateVarietyName(VarietyName varietyName) {
logger.info("Updating VARIETYNAME {}", varietyName.getName());
varietyNameDao.update(varietyName);
return varietyName;
}
@Override
public List<VarietyName> findAllVarietyNames() {
return varietyNameDao.findAll();
}
@Override
public List<String> getAllVarietyNames() {
return varietyNameDao.findAll().stream()
.map(VarietyName::getName)
.distinct()
.collect(Collectors.toList());
}
}
| Baxi0116/agro-helper | src/main/java/com/baxi/agrohelper/service/VarietyNameServiceImpl.java | Java | apache-2.0 | 2,641 |
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: 2011-02-17
*
*******************************************************************************/
package org.oscm.internal.types.exception;
import org.oscm.internal.types.exception.beans.ApplicationExceptionBean;
/**
* Exception thrown when a supplier, broker, or reseller organization cannot be
* added because it already exists.
*
*/
public class OrganizationAlreadyExistsException extends
SaaSApplicationException {
private static final long serialVersionUID = -5634737007588314715L;
/**
* Constructs a new exception with <code>null</code> as its detail message.
* The cause is not initialized.
*/
public OrganizationAlreadyExistsException() {
}
/**
* Constructs a new exception with the specified detail message. The cause
* is not initialized.
*
* @param message
* the detail message
*/
public OrganizationAlreadyExistsException(final String message) {
super(message);
}
/**
* Constructs a new exception with the specified detail message and bean for
* JAX-WS exception serialization.
*
* @param message
* the detail message
* @param bean
* the bean for JAX-WS exception serialization
*/
public OrganizationAlreadyExistsException(String message,
ApplicationExceptionBean bean) {
super(message, bean);
}
/**
* Constructs a new exception with the specified detail message, cause, and
* bean for JAX-WS exception serialization.
*
* @param message
* the detail message
* @param bean
* the bean for JAX-WS exception serialization
* @param cause
* the cause
*/
public OrganizationAlreadyExistsException(String message,
ApplicationExceptionBean bean, Throwable cause) {
super(message, bean, cause);
}
}
| opetrovski/development | oscm-extsvc-internal/javasrc/org/oscm/internal/types/exception/OrganizationAlreadyExistsException.java | Java | apache-2.0 | 2,231 |
/*
* Copyright (C) 2016 FormKiQ Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.formkiq.forms.dto;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
/**
* Class FormJson Section.
*
*/
public class FormJSONSection implements Serializable {
/** serialVersionUID. */
private static final long serialVersionUID = 1416599460301680510L;
/** Title of section. */
private String title;
/** UUID of Section. */
private String uuid;
/** {@link FormJSONSectionType}. */
@JsonDeserialize(using = FormJSONSectionTypeDeserializer.class)
private FormJSONSectionType type;
/** List of Fields. */
private List<FormJSONField> fields = new ArrayList<>();
/**
* default constructor.
*/
public FormJSONSection() {
}
/**
* Gets the Fields in the section.
* @return List<FormJSONField>
*/
public List<FormJSONField> getFields() {
return this.fields;
}
/**
* Get the Title of a Section.
* @return {@link String}
*/
public String getTitle() {
return this.title;
}
/**
* Whether to hide the section.
* @return boolean
*/
public boolean isHide() {
return !this.fields.stream().filter(s -> !s.isHide()).findFirst()
.isPresent();
}
/**
* Set the Fields in a Section.
* @param list List<FormJSONField>
*/
public void setFields(final List<FormJSONField> list) {
this.fields = list;
}
/**
* Set the Title of a Section.
* @param text String
*/
public void setTitle(final String text) {
this.title = text;
}
/**
* @return {@link FormJSONSectionType}
*/
public FormJSONSectionType getType() {
return this.type;
}
/**
* @param sectiontype {@link FormJSONSectionType}
*/
public void setType(final FormJSONSectionType sectiontype) {
this.type = sectiontype;
}
/**
* Gets UUID of @{link FormJSONSection}.
* @return {@link String}
*/
public String getUUID() {
return this.uuid;
}
/**
* Sets Form UUID.
* @param id {@link String}
*/
public void setUUID(final String id) {
this.uuid = id;
}
}
| formkiq/formkiq-server | forms/src/main/java/com/formkiq/forms/dto/FormJSONSection.java | Java | apache-2.0 | 2,896 |
package org.ovirt.engine.core.vdsbroker.vdsbroker;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.ovirt.engine.core.common.businessentities.DiskImage;
import org.ovirt.engine.core.common.businessentities.DisplayType;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.VMStatus;
import org.ovirt.engine.core.common.businessentities.VmType;
import org.ovirt.engine.core.common.businessentities.network;
import org.ovirt.engine.core.common.businessentities.network_cluster;
import org.ovirt.engine.core.common.config.Config;
import org.ovirt.engine.core.common.config.ConfigValues;
import org.ovirt.engine.core.compat.StringHelper;
import org.ovirt.engine.core.compat.TimeZoneInfo;
import org.ovirt.engine.core.compat.WindowsJavaTimezoneMapping;
import org.ovirt.engine.core.dal.comparators.DiskImageByBootComparator;
import org.ovirt.engine.core.dal.comparators.DiskImageByDriveMappingComparator;
import org.ovirt.engine.core.dal.dbbroker.DbFacade;
import org.ovirt.engine.core.utils.log.Log;
import org.ovirt.engine.core.utils.log.LogFactory;
import org.ovirt.engine.core.utils.vmproperties.VmPropertiesUtils;
import org.ovirt.engine.core.vdsbroker.xmlrpc.XmlRpcStruct;
public abstract class VmInfoBuilderBase {
protected static Log log = LogFactory.getLog(VmInfoBuilderBase.class);
protected XmlRpcStruct createInfo;
protected VM vm;
// IDE supports only 4 slots , slot 2 is preserved by VDSM to the CDROM
protected int[] ideIndexSlots = new int[] { 0, 1, 3 };
protected void buildVmProperties() {
createInfo.add(VdsProperties.vm_guid, vm.getId().toString());
createInfo.add(VdsProperties.vm_name, vm.getvm_name());
createInfo.add(VdsProperties.mem_size_mb, vm.getvm_mem_size_mb());
createInfo.add(VdsProperties.num_of_cpus,
(new Integer(vm.getnum_of_cpus())).toString());
if (Config.<Boolean> GetValue(ConfigValues.SendSMPOnRunVm)) {
createInfo.add(VdsProperties.cores_per_socket,
(new Integer(vm.getcpu_per_socket())).toString());
}
createInfo.add(VdsProperties.emulatedMachine, Config.<String> GetValue(
ConfigValues.EmulatedMachine, vm
.getvds_group_compatibility_version().toString()));
// send cipher suite and spice secure channels parameters only if ssl
// enabled.
if (Config.<Boolean> GetValue(ConfigValues.SSLEnabled)) {
createInfo.add(VdsProperties.spiceSslCipherSuite,
Config.<String> GetValue(ConfigValues.CipherSuite));
createInfo.add(VdsProperties.SpiceSecureChannels,
Config.<String> GetValue(ConfigValues.SpiceSecureChannels));
}
createInfo.add(VdsProperties.kvmEnable, vm.getkvm_enable().toString()
.toLowerCase());
createInfo.add(VdsProperties.acpiEnable, vm.getacpi_enable().toString()
.toLowerCase());
createInfo.add(VdsProperties.Custom,
VmPropertiesUtils.getVMProperties(vm.getStaticData()));
createInfo.add(VdsProperties.vm_type, "kvm"); // "qemu", "kvm"
if (vm.getRunAndPause()) {
createInfo.add(VdsProperties.launch_paused_param, "true");
}
if (vm.getvds_group_cpu_flags_data() != null) {
createInfo.add(VdsProperties.cpuType,
vm.getvds_group_cpu_flags_data());
}
createInfo.add(VdsProperties.niceLevel,
(new Integer(vm.getnice_level())).toString());
if (vm.getstatus() == VMStatus.Suspended
&& !StringHelper.isNullOrEmpty(vm.gethibernation_vol_handle())) {
createInfo.add(VdsProperties.hiberVolHandle,
vm.gethibernation_vol_handle());
}
createInfo.add(VdsProperties.KeyboardLayout,
Config.<String> GetValue(ConfigValues.VncKeyboardLayout));
if (vm.getvm_os().isLinux()) {
createInfo.add(VdsProperties.PitReinjection, "false");
}
if (vm.getdisplay_type() == DisplayType.vnc) {
createInfo.add(VdsProperties.TabletEnable, "true");
}
createInfo.add(VdsProperties.transparent_huge_pages,
vm.getTransparentHugePages() ? "true" : "false");
}
protected void buildVmNetworkCluster() {
// set Display network
List<network_cluster> all = DbFacade.getInstance()
.getNetworkClusterDAO().getAllForCluster(vm.getvds_group_id());
network_cluster networkCluster = null;
for (network_cluster tempNetworkCluster : all) {
if (tempNetworkCluster.getis_display()) {
networkCluster = tempNetworkCluster;
break;
}
}
if (networkCluster != null) {
network net = null;
List<network> allNetworks = DbFacade.getInstance().getNetworkDAO()
.getAll();
for (network tempNetwork : allNetworks) {
if (tempNetwork.getId().equals(networkCluster.getnetwork_id())) {
net = tempNetwork;
break;
}
}
if (net != null) {
createInfo.add(VdsProperties.displaynetwork, net.getname());
}
}
}
protected void buildVmBootOptions() {
// Boot Options
if (!StringHelper.isNullOrEmpty(vm.getinitrd_url())) {
createInfo.add(VdsProperties.InitrdUrl, vm.getinitrd_url());
}
if (!StringHelper.isNullOrEmpty(vm.getkernel_url())) {
createInfo.add(VdsProperties.KernelUrl, vm.getkernel_url());
if (!StringHelper.isNullOrEmpty(vm.getkernel_params())) {
createInfo.add(VdsProperties.KernelParams,
vm.getkernel_params());
}
}
}
protected void buildVmTimeZone() {
// send vm_dynamic.utc_diff if exist, if not send vm_static.time_zone
if (vm.getutc_diff() != null) {
createInfo.add(VdsProperties.utc_diff, vm.getutc_diff().toString());
} else {
// get vm timezone
String timeZone = TimeZoneInfo.Local.getId();
if (!StringHelper.isNullOrEmpty(vm.gettime_zone())) {
timeZone = vm.gettime_zone();
}
// convert to java & calculate offset
String javaZoneId = WindowsJavaTimezoneMapping.windowsToJava
.get(timeZone);
int offset = 0;
if (javaZoneId != null) {
offset = (TimeZone.getTimeZone(javaZoneId).getOffset(
new Date().getTime()) / 1000);
}
createInfo.add(VdsProperties.utc_diff, "" + offset);
}
}
protected List<DiskImage> getSortedDiskImages() {
// order first by drive numbers and then order by boot for the bootable
// drive to be first (important for IDE to be index 0) !
List<DiskImage> diskImages = new ArrayList<DiskImage>(vm.getDiskMap()
.values());
Collections.sort(diskImages, new DiskImageByDriveMappingComparator());
Collections.sort(diskImages,
Collections.reverseOrder(new DiskImageByBootComparator()));
return diskImages;
}
protected String getSoundDevice() {
final String OS_REGEX = "^.*%1s,([^,]*).*$";
final String DEFAULT_TYPE = "default";
String ret = DEFAULT_TYPE;
if (vm.getvm_type() == VmType.Desktop) {
String soundDeviceTypeConfig = Config.<String> GetValue(
ConfigValues.DesktopAudioDeviceType, vm
.getvds_group_compatibility_version().toString());
String vmOS = vm.getos().name();
Pattern regexPattern = Pattern.compile(String
.format(OS_REGEX, vmOS));
Matcher regexMatcher = regexPattern.matcher(soundDeviceTypeConfig);
if (regexMatcher.find()) {
ret = regexMatcher.group(1);
} else {
regexPattern = Pattern.compile(String.format(OS_REGEX,
DEFAULT_TYPE));
regexMatcher = regexPattern.matcher(soundDeviceTypeConfig);
if (regexMatcher.find()) {
ret = regexMatcher.group(1);
}
}
}
return ret;
}
protected void logUnsupportedInterfaceType() {
log.error("Unsupported interface type, ISCSI interface type is not supported.");
}
protected abstract void buildVmVideoCards();
protected abstract void buildVmCD();
protected abstract void buildVmFloppy();
protected abstract void buildVmDrives();
protected abstract void buildVmNetworkInterfaces();
protected abstract void buildVmSoundDevices();
protected abstract void buildUnmanagedDevices();
protected abstract void buildVmBootSequence();
}
| Dhandapani/gluster-ovirt | backend/manager/modules/vdsbroker/src/main/java/org/ovirt/engine/core/vdsbroker/vdsbroker/VmInfoBuilderBase.java | Java | apache-2.0 | 9,219 |
package practice;
import org.springframework.beans.BeanUtils;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import java.util.Collection;
public class AuthorizedUserDetails extends User implements UserDetails {
public AuthorizedUserDetails(User user) {
BeanUtils.copyProperties(user, this);
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities() {
return null;
}
@Override
public boolean isAccountNonExpired() {
return true;
}
@Override
public boolean isAccountNonLocked() {
return true;
}
@Override
public boolean isCredentialsNonExpired() {
return true;
}
@Override
public boolean isEnabled() {
return true;
}
}
| tagbangers/spring-best-practices | spring-best-practice-security-multi-tenancy/src/main/java/practice/AuthorizedUserDetails.java | Java | apache-2.0 | 750 |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.lib.dirspooler;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Meter;
import com.google.common.base.Preconditions;
import com.streamsets.pipeline.api.PushSource;
import com.streamsets.pipeline.api.impl.Utils;
import com.streamsets.pipeline.lib.executor.SafeScheduledExecutorService;
import com.streamsets.pipeline.lib.io.DirectoryPathCreationWatcher;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static com.streamsets.pipeline.lib.dirspooler.PathMatcherMode.GLOB;
import static com.streamsets.pipeline.lib.dirspooler.PathMatcherMode.REGEX;
public class DirectorySpooler {
private static final Logger LOG = LoggerFactory.getLogger(DirectorySpooler.class);
private static final String PENDING_FILES = "pending.files";
private final PushSource.Context context;
private final String spoolDir;
private final int maxSpoolFiles;
private final String pattern;
private final PathMatcherMode pathMatcherMode;
private final FilePostProcessing postProcessing;
private final String archiveDir;
private final long archiveRetentionMillis;
private final String errorArchiveDir;
private final boolean useLastModified;
private final Comparator<Path> pathComparator;
private final boolean processSubdirectories;
public enum FilePostProcessing {NONE, DELETE, ARCHIVE}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private PushSource.Context context;
private String spoolDir;
private int maxSpoolFiles;
private String pattern;
private PathMatcherMode pathMatcherMode = PathMatcherMode.GLOB;
private FilePostProcessing postProcessing;
private String archiveDir;
private long archiveRetentionMillis;
private String errorArchiveDir;
private boolean waitForPathAppearance;
private boolean useLastModifiedTimestamp;
private boolean processSubdirectories;
private Builder() {
postProcessing = FilePostProcessing.NONE;
}
public Builder setContext(PushSource.Context context) {
this.context = Preconditions.checkNotNull(context, "context cannot be null");
return this;
}
public Builder setDir(String dir) {
final String spoolDirInput = Preconditions.checkNotNull(dir, "dir cannot be null");
final Path spoolDirPath = FileSystems.getDefault().getPath(spoolDirInput);
Preconditions.checkArgument(spoolDirPath.isAbsolute(), Utils.formatL("dir '{}' must be an absolute path", dir));
// normalize path to ensure no trailing slash
spoolDir = spoolDirPath.toString();
return this;
}
public Builder setMaxSpoolFiles(int maxSpoolFiles) {
Preconditions.checkArgument(maxSpoolFiles > 0, "maxSpoolFiles must be greater than zero");
this.maxSpoolFiles = maxSpoolFiles;
return this;
}
public Builder setFilePattern(String pattern) {
this.pattern = Preconditions.checkNotNull(pattern, "pattern cannot be null");
return this;
}
public Builder setPathMatcherMode(PathMatcherMode mode) {
this.pathMatcherMode = Preconditions.checkNotNull(mode, "path matcher mode cannot be null");
return this;
}
public Builder setPostProcessing(FilePostProcessing postProcessing) {
this.postProcessing = Preconditions.checkNotNull(postProcessing, "postProcessing mode cannot be null");
return this;
}
public Builder setArchiveDir(String dir) {
this.archiveDir = Preconditions.checkNotNull(dir, "dir cannot be null");
Preconditions.checkArgument(new File(dir).isAbsolute(), Utils.formatL("dir '{}' must be an absolute path", dir));
return this;
}
public Builder setArchiveRetention(long minutes) {
return setArchiveRetention(minutes, TimeUnit.MINUTES);
}
//for testing only
Builder setArchiveRetention(long time, TimeUnit unit) {
Preconditions.checkArgument(time >= 0, "archive retention must be zero or greater");
Preconditions.checkNotNull(unit, "archive retention unit cannot be null");
archiveRetentionMillis = TimeUnit.MILLISECONDS.convert(time, unit);
return this;
}
public Builder setErrorArchiveDir(String dir) {
this.errorArchiveDir = Preconditions.checkNotNull(dir, "edir cannot be null");
Preconditions.checkArgument(new File(dir).isAbsolute(), Utils.formatL("dir '{}' must be an absolute path", dir));
return this;
}
public Builder waitForPathAppearance(boolean waitForPathAppearance) {
this.waitForPathAppearance = waitForPathAppearance;
return this;
}
public Builder processSubdirectories(boolean processSubdirectories) {
this.processSubdirectories = processSubdirectories;
return this;
}
public Builder setUseLastModifiedTimestamp(boolean useLastModifiedTimestamp) {
this.useLastModifiedTimestamp = useLastModifiedTimestamp;
return this;
}
public DirectorySpooler build() {
Preconditions.checkArgument(context != null, "context not specified");
Preconditions.checkArgument(spoolDir != null, "spool dir not specified");
Preconditions.checkArgument(maxSpoolFiles > 0, "max spool files not specified");
Preconditions.checkArgument(pattern != null, "file pattern not specified");
if (postProcessing == FilePostProcessing.ARCHIVE) {
Preconditions.checkArgument(archiveDir != null, "archive dir not specified");
}
return new DirectorySpooler(
context,
spoolDir,
maxSpoolFiles,
pattern,
pathMatcherMode,
postProcessing,
archiveDir,
archiveRetentionMillis,
errorArchiveDir,
waitForPathAppearance,
useLastModifiedTimestamp,
processSubdirectories
);
}
}
public DirectorySpooler(
PushSource.Context context,
String spoolDir,
int maxSpoolFiles,
String pattern,
PathMatcherMode pathMatcherMode,
FilePostProcessing postProcessing,
String archiveDir,
long archiveRetentionMillis,
String errorArchiveDir,
boolean processSubdirectories
) {
this(
context,
spoolDir,
maxSpoolFiles,
pattern,
pathMatcherMode,
postProcessing,
archiveDir,
archiveRetentionMillis,
errorArchiveDir,
true,
false,
processSubdirectories
);
}
public DirectorySpooler(
PushSource.Context context,
String spoolDir,
int maxSpoolFiles,
String pattern,
PathMatcherMode pathMatcherMode,
FilePostProcessing postProcessing,
String archiveDir,
long archiveRetentionMillis,
String errorArchiveDir,
boolean waitForPathAppearance,
final boolean useLastModified,
boolean processSubdirectories
) {
this.context = context;
this.spoolDir = spoolDir;
this.maxSpoolFiles = maxSpoolFiles;
this.pattern = pattern;
this.pathMatcherMode = pathMatcherMode;
this.postProcessing = postProcessing;
this.archiveDir = archiveDir;
this.archiveRetentionMillis = archiveRetentionMillis;
this.errorArchiveDir = errorArchiveDir;
this.waitForPathAppearance = waitForPathAppearance;
this.useLastModified = useLastModified;
this.processSubdirectories = processSubdirectories;
pathComparator = new Comparator<Path>() {
@Override
public int compare(Path file1, Path file2) {
try {
if (useLastModified) {
// if comparing with folder last modified timestamp, always return true
if (file2.toString().isEmpty()) {
return 1;
}
int compares = Files.getLastModifiedTime(file1).compareTo(Files.getLastModifiedTime(file2));
if (compares != 0) {
return compares;
}
}
return file1.getFileName().compareTo(file2.getFileName());
} catch (NoSuchFileException ex) {
// Logged later, so don't log here.
throw new RuntimeException(ex);
} catch (IOException ex) {
LOG.warn("Could not sort files due to IO Exception", ex);
throw new RuntimeException(ex);
}
}
};
}
private volatile Path currentFile;
private Path spoolDirPath;
private Path archiveDirPath;
private Path errorArchiveDirPath;
private PathMatcher fileMatcher;
private PriorityBlockingQueue<Path> filesQueue;
private Path previousFile;
private ScheduledExecutorService scheduledExecutor;
private boolean waitForPathAppearance;
private Meter spoolQueueMeter;
private Counter pendingFilesCounter;
private volatile boolean running;
volatile FilePurger purger;
volatile FileFinder finder;
private void checkBaseDir(Path path) {
Preconditions.checkState(path.isAbsolute(), Utils.formatL("Path '{}' is not an absolute path", path));
Preconditions.checkState(Files.exists(path), Utils.formatL("Path '{}' does not exist", path));
Preconditions.checkState(Files.isDirectory(path), Utils.formatL("Path '{}' is not a directory", path));
}
public static PathMatcher createPathMatcher(String pattern, PathMatcherMode mode) {
FileSystem fs = FileSystems.getDefault();
PathMatcher matcher;
if (mode == GLOB) {
matcher = fs.getPathMatcher("glob:" + pattern);
} else if (mode == REGEX) {
matcher = fs.getPathMatcher("regex:" + pattern);
} else {
throw new IllegalArgumentException("Unrecognized Path Matcher Mode: " + mode.getLabel());
}
return matcher;
}
public void init(String sourceFile) {
try {
FileSystem fs = FileSystems.getDefault();
spoolDirPath = fs.getPath(spoolDir).toAbsolutePath();
if(StringUtils.isEmpty(sourceFile)) {
sourceFile = "";
this.currentFile = Paths.get(sourceFile);
} else {
// sourceFile can contain: a filename, a partial path (relative to spoolDirPath),
// or a full path.
this.currentFile = Paths.get(sourceFile);
if (this.currentFile.getParent() == null
|| !(this.currentFile.getParent().toString().contains(spoolDirPath.toString()))) {
// if filename only or not full path - add the full path to the filename
this.currentFile = Paths.get(spoolDirPath.toString(), sourceFile);
}
}
if (!waitForPathAppearance) {
checkBaseDir(spoolDirPath);
}
if (postProcessing == FilePostProcessing.ARCHIVE) {
archiveDirPath = fs.getPath(archiveDir).toAbsolutePath();
checkBaseDir(archiveDirPath);
}
if (errorArchiveDir != null) {
errorArchiveDirPath = fs.getPath(errorArchiveDir).toAbsolutePath();
checkBaseDir(errorArchiveDirPath);
}
LOG.debug("Spool directory '{}', file pattern '{}', current file '{}'", spoolDirPath, pattern, currentFile);
String extraInfo = "";
if (postProcessing == FilePostProcessing.ARCHIVE) {
extraInfo = Utils.format(", archive directory '{}', retention '{}' minutes", archiveDirPath,
archiveRetentionMillis / 60 / 1000
);
}
LOG.debug("Post processing mode '{}'{}", postProcessing, extraInfo);
fileMatcher = createPathMatcher(pattern, pathMatcherMode);
if (useLastModified) {
// 11 is the DEFAULT_INITIAL_CAPACITY -- seems pretty random, but lets use the same one.
filesQueue = new PriorityBlockingQueue<>(11, pathComparator);
} else {
filesQueue = new PriorityBlockingQueue<>();
}
spoolQueueMeter = context.createMeter("spoolQueue");
pendingFilesCounter = context.createCounter(PENDING_FILES);
if (!waitForPathAppearance) {
startSpooling(currentFile);
}
} catch (IOException ex) {
destroy();
throw new RuntimeException(ex);
}
}
private void startSpooling(Path currentFile) throws IOException {
running = true;
if(!context.isPreview()) {
handleOlderFiles(currentFile);
}
scheduledExecutor = new SafeScheduledExecutorService(1, "directory-spooler");
findAndQueueFiles(currentFile, true, false);
finder = new FileFinder();
scheduledExecutor.scheduleAtFixedRate(finder, 5, 5, TimeUnit.SECONDS);
if (postProcessing == FilePostProcessing.ARCHIVE && archiveRetentionMillis > 0) {
// create and schedule file purger only if the retention time is > 0
purger = new FilePurger();
scheduledExecutor.scheduleAtFixedRate(purger, 1, 1, TimeUnit.MINUTES);
}
}
public void destroy() {
running = false;
try {
if (scheduledExecutor != null) {
scheduledExecutor.shutdownNow();
scheduledExecutor = null;
}
} catch (RuntimeException ex) {
LOG.warn("Error during scheduledExecutor.shutdownNow(), {}", ex.toString(), ex);
}
}
public boolean isRunning() {
return running;
}
public PushSource.Context getContext() {
return context;
}
public String getSpoolDir() {
return spoolDir;
}
public int getMaxSpoolFiles() {
return maxSpoolFiles;
}
public String getFilePattern() {
return pattern;
}
public FilePostProcessing getPostProcessing() {
return postProcessing;
}
public String getArchiveDir() {
return archiveDir;
}
private void addFileToQueue(Path file, boolean checkCurrent) {
Preconditions.checkNotNull(file, "file cannot be null");
if (checkCurrent) {
try {
boolean valid = StringUtils.isEmpty(currentFile.toString()) || compare(currentFile, file) < 0;
if (!valid) {
LOG.warn("File cannot be added to the queue: " + file.toString());
}
} catch (NoSuchFileException ex) {
// Happens only in timestamp ordering.
// Very unlikely this will happen, new file has to be added to the queue at the exact time when
// the currentFile was consumed and archived while a new file has not yet been picked up for processing.
// Ignore - we just add the new file, since this means this file is indeed newer
// (else this would have been consumed and archived first)
}
}
if (!filesQueue.contains(file)) {
if (filesQueue.size() >= maxSpoolFiles) {
throw new IllegalStateException(Utils.format("Exceeded max number '{}' of queued files", maxSpoolFiles));
}
filesQueue.add(file);
spoolQueueMeter.mark(filesQueue.size());
} else {
LOG.warn("File '{}' already in queue, ignoring", file);
}
}
private boolean canPoolFiles() {
if(waitForPathAppearance) {
try {
DirectoryPathCreationWatcher watcher = new DirectoryPathCreationWatcher(Arrays.asList(spoolDirPath), 0);
if (!watcher.find().isEmpty()) {
waitForPathAppearance = false;
startSpooling(this.currentFile);
} else {
LOG.debug(Utils.format("Directory Paths does not exist yet: {}", spoolDirPath));
}
} catch (IOException e) {
throw new RuntimeException(Utils.format("Some Problem with the file system: {}", e.toString()), e);
}
}
return !waitForPathAppearance;
}
public File poolForFile(long wait, TimeUnit timeUnit) throws InterruptedException {
Preconditions.checkArgument(wait >= 0, "wait must be zero or greater");
Preconditions.checkNotNull(timeUnit, "timeUnit cannot be null");
if (!canPoolFiles()) {
return null;
}
Preconditions.checkState(running, "Spool directory watcher not running");
synchronized (this) {
if (previousFile != null && !context.isPreview()) {
switch (postProcessing) {
case NONE:
LOG.debug("Previous file '{}' remains in spool directory", previousFile);
break;
case DELETE:
try {
if (Files.exists(previousFile)) {
LOG.debug("Deleting previous file '{}'", previousFile);
Files.delete(previousFile);
} else {
LOG.error("failed to delete previous file '{}'", previousFile);
}
} catch (IOException ex) {
throw new RuntimeException(Utils.format("Could not delete file '{}', {}", previousFile, ex.toString()),
ex);
}
break;
case ARCHIVE:
try {
if (Files.exists(previousFile)) {
LOG.debug("Archiving previous file '{}'", previousFile);
moveIt(previousFile, archiveDirPath);
} else {
LOG.error("failed to Archive previous file '{}'", previousFile);
}
} catch (IOException ex) {
throw new RuntimeException(Utils.format("Could not move file '{}' to archive dir {}, {}", previousFile,
archiveDirPath, ex.toString()), ex);
}
break;
default:
LOG.error("poolForFile(): switch failed. postProcesing " + postProcessing.name() + " " + postProcessing.toString());
}
previousFile = null;
}
}
Path next = null;
try {
LOG.debug("Polling for file, waiting '{}' ms", TimeUnit.MILLISECONDS.convert(wait, timeUnit));
next = filesQueue.poll(wait, timeUnit);
} catch (InterruptedException ex) {
next = null;
} finally {
LOG.debug("Polling for file returned '{}'", next);
if (next != null) {
currentFile = next;
previousFile = next;
}
}
pendingFilesCounter.inc(filesQueue.size() - pendingFilesCounter.getCount());
return (next != null) ? next.toFile() : null;
}
public void handleCurrentFileAsError() throws IOException {
if (errorArchiveDirPath != null && !context.isPreview()) {
Path current = spoolDirPath.resolve(previousFile);
LOG.error("Archiving file in error '{}' in error archive directory '{}'", previousFile, errorArchiveDirPath);
moveIt(current, errorArchiveDirPath);
// we need to set the currentFile to null because we just moved to error.
previousFile = null;
} else {
LOG.error("Leaving file in error '{}' in spool directory", currentFile);
}
}
// This method is a simple wrapper that lets us find the NoSuchFileException if that was the cause.
private int compare(Path path1, Path path2) throws NoSuchFileException {
// why not just check if the file exists? Well, there is a possibility file gets moved/archived/deleted right after
// that check. In that case we will still fail. So fail, and recover.
try {
return pathComparator.compare(path1, path2);
} catch (RuntimeException ex) {
Throwable cause = ex.getCause();
if (cause != null && cause instanceof NoSuchFileException) {
LOG.debug("Starting file may have already been archived.", cause);
throw (NoSuchFileException) cause;
}
LOG.warn("Error while comparing files", ex);
throw ex;
}
}
private void moveIt(Path file, Path destinationRoot) throws IOException {
// wipe out base of the path - leave subdirectory portion in place.
String f = file.toString().replaceFirst(spoolDirPath.toString(), "");
Path dest = Paths.get(destinationRoot.toString(), f);
if(!file.equals(dest)) {
dest.toFile().getParentFile().mkdirs();
try {
if (Files.exists(dest)) {
Files.delete(dest);
}
Files.move(file, dest);
} catch (Exception ex) {
throw new IOException("moveIt: Files.delete or Files.move failed. " + file + " " + dest + " " + ex.getMessage() + " destRoot " + destinationRoot);
}
}
}
private List<Path> findAndQueueFiles(
final Path startingFile, final boolean includeStartingFile, boolean checkCurrent
) throws IOException {
final long scanTime = System.currentTimeMillis();
DirectoryStream.Filter<Path> filter = new DirectoryStream.Filter<Path>() {
@Override
public boolean accept(Path entry) throws IOException {
boolean accept = false;
// SDC-3551: Pick up only files with mtime strictly less than scan time.
if (entry != null && Files.getLastModifiedTime(entry).toMillis() < scanTime && fileMatcher.matches(entry.getFileName())) {
if (startingFile == null || startingFile.toString().isEmpty()) {
accept = true;
} else {
try {
int compares = compare(entry, startingFile);
accept = (compares == 0 && includeStartingFile) || (compares > 0);
} catch (NoSuchFileException ex) {
// This happens only if timestamp is used, when the mtime is looked up for the startingFile
// which has been archived, so this file must be newer since it is still in the directory
// (if it was older it would have been consumed and archived earlier)
return true;
}
}
}
return accept;
}
};
final List<Path> directories = new ArrayList<>();
if (processSubdirectories && useLastModified) {
EnumSet<FileVisitOption> opts = EnumSet.noneOf(FileVisitOption.class);
try {
Files.walkFileTree(spoolDirPath, opts, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(
Path dirPath, BasicFileAttributes attributes
) throws IOException {
directories.add(dirPath);
return FileVisitResult.CONTINUE;
}
});
} catch (Exception ex) {
throw new IOException("findAndQueueFiles(): walkFileTree error. startingFile " + startingFile + ex.getMessage(), ex);
}
} else {
directories.add(spoolDirPath);
}
List<Path> foundFiles = new ArrayList<>(maxSpoolFiles);
for (Path dir : directories) {
try (DirectoryStream<Path> matchingFile = Files.newDirectoryStream(dir, filter)) {
for (Path file : matchingFile) {
if (!running) {
return null;
}
if (Files.isDirectory(file)) {
continue;
}
LOG.trace("Found file '{}'", file);
foundFiles.add(file);
if (foundFiles.size() > maxSpoolFiles) {
throw new IllegalStateException(Utils.format("Exceeded max number '{}' of spool files in directory",
maxSpoolFiles
));
}
}
} catch(Exception ex) {
LOG.error("findAndQueueFiles(): newDirectoryStream failed. " + ex.getMessage(), ex);
}
}
if (!useLastModified) { // Sorted in the queue, if useLastModified is true.
Collections.sort(foundFiles);
}
for (Path file : foundFiles) {
addFileToQueue(file, checkCurrent);
if (filesQueue.size() > maxSpoolFiles) {
throw new IllegalStateException(Utils.format("Exceeded max number '{}' of spool files in directory",
maxSpoolFiles
));
}
}
spoolQueueMeter.mark(filesQueue.size());
pendingFilesCounter.inc(filesQueue.size() - pendingFilesCounter.getCount());
LOG.debug("Found '{}' files", filesQueue.size());
return directories;
}
void handleOlderFiles(final Path startingFile) throws IOException {
if (postProcessing != FilePostProcessing.NONE) {
final ArrayList<Path> toProcess = new ArrayList<>();
EnumSet<FileVisitOption> opts = EnumSet.noneOf(FileVisitOption.class);
try {
Files.walkFileTree(spoolDirPath, opts, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(
Path dirPath, BasicFileAttributes attributes
) throws IOException {
if (compare(dirPath, startingFile) < 0) {
toProcess.add(dirPath);
}
return FileVisitResult.CONTINUE;
}
});
} catch (Exception ex) {
throw new IOException("traverseDirectories(): walkFileTree error. startingFile "
+ startingFile
+ ex.getMessage(),
ex
);
}
for (Path p : toProcess) {
switch (postProcessing) {
case DELETE:
if (fileMatcher.matches(p.getFileName())) {
if (Files.exists(p)) {
Files.delete(p);
LOG.debug("Deleting old file '{}'", p);
} else {
LOG.debug("The old file '{}' does not exist", p);
}
} else {
LOG.debug("Ignoring old file '{}' that do not match the file name pattern '{}'", p, pattern);
}
break;
case ARCHIVE:
if (fileMatcher.matches(p.getFileName())) {
if (Files.exists(p)) {
moveIt(p, archiveDirPath);
LOG.debug("Archiving old file '{}'", p);
} else {
LOG.debug("The old file '{}' does not exist", p);
}
} else {
LOG.debug("Ignoring old file '{}' that do not match the file name pattern '{}'", p, pattern);
}
break;
case NONE:
// no action required
break;
default:
throw new IllegalStateException("Unexpected post processing option " + postProcessing);
}
}
}
}
class FileFinder implements Runnable {
public FileFinder(){
}
@Override
public synchronized void run() {
// by using current we give a chance to have unprocessed files out of order
LOG.debug("Starting file finder from '{}'", currentFile);
try {
findAndQueueFiles(currentFile, false, true);
} catch (Exception ex) {
LOG.warn("Error while scanning directory '{}' for files newer than '{}': {}", archiveDirPath, currentFile,
ex.toString(), ex);
}
}
}
class FilePurger implements Runnable {
@Override
@SuppressWarnings("unchecked")
public void run() {
LOG.debug("Starting archived files purging");
final long timeThreshold = System.currentTimeMillis() - archiveRetentionMillis;
final ArrayList<Path> toProcess = new ArrayList<>();
EnumSet<FileVisitOption> opts = EnumSet.noneOf(FileVisitOption.class);
int purged = 0;
try {
Files.walkFileTree(archiveDirPath, opts, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(
Path entry, BasicFileAttributes attributes
) throws IOException {
if (fileMatcher.matches(entry.getFileName()) && (
timeThreshold - Files.getLastModifiedTime(entry).toMillis() > 0
)) {
toProcess.add(entry);
}
return FileVisitResult.CONTINUE;
}
});
for (Path file : toProcess) {
if (running) {
LOG.debug("Deleting archived file '{}', exceeded retention time", file);
try {
if(Files.exists(file)) {
Files.delete(file);
purged++;
}
} catch (IOException ex) {
LOG.warn("Error while deleting file '{}': {}", file, ex.toString(), ex);
}
} else {
LOG.debug("Spooler has been destroyed, stopping archived files purging half way");
break;
}
}
} catch (IOException ex) {
LOG.warn("Error while scanning directory '{}' for archived files purging: {}", archiveDirPath, ex.toString(),
ex
);
}
LOG.debug("Finished archived files purging, deleted '{}' files", purged);
}
}
}
| z123/datacollector | basic-lib/src/main/java/com/streamsets/pipeline/lib/dirspooler/DirectorySpooler.java | Java | apache-2.0 | 29,154 |
/**
* Copyright 2016 Christof Rath <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iaik.privlog.sanitizers;
import org.slf4j.helpers.Util;
import ch.qos.logback.core.spi.ContextAwareBase;
import ch.qos.logback.core.spi.LifeCycle;
/**
* @author Christof Rath <[email protected]>
*/
public class IdentitySanitizerFactory extends ContextAwareBase implements IParamSanitizerFactory, LifeCycle {
public static class IdentitySanitizer extends ParamSanitizerBase {
protected IdentitySanitizer(String tagName, Object parameter, int start, int startOriginal, int endOriginal) {
super(tagName, parameter, start, startOriginal, endOriginal);
equal = true;
}
@Override
public String getSanitized() {
if (sanitized == null) {
sanitized = getCritical();
}
return sanitized;
}
}
protected boolean started;
@Override
public IdentitySanitizer create(String tagName, Object parameter, int start, int startOriginal, int endOriginal) {
if (!isStarted()) {
addError("The identity sanitizer factory has not been started.");
return new IdentitySanitizer(tagName, "{" + tagName + "}", start, startOriginal, endOriginal);
}
return new IdentitySanitizer(tagName, parameter, start, startOriginal, endOriginal);
}
@Override
public void start() {
if (context == null) {
Util.report("IdentitySanitizer cannot be started w/o a context");
throw new RuntimeException("IdentitySanitizer cannot be started w/o a context");
}
addWarn("=========================\n|| WARNING ||\n=========================\n\n"
+ "The IdentitySanitizer does NOT sanitize at all!");
started = true;
}
@Override
public void stop() {
started = false;
}
@Override
public boolean isStarted() {
return started;
}
}
| nobecutan/privacy-aware-logging | src/main/java/iaik/privlog/sanitizers/IdentitySanitizerFactory.java | Java | apache-2.0 | 2,321 |
package com.google.ratel.deps.jackson.databind.jsontype.impl;
import com.google.ratel.deps.jackson.annotation.JsonTypeInfo.As;
import com.google.ratel.deps.jackson.databind.BeanProperty;
import com.google.ratel.deps.jackson.databind.JavaType;
import com.google.ratel.deps.jackson.databind.jsontype.TypeDeserializer;
import com.google.ratel.deps.jackson.databind.jsontype.TypeIdResolver;
/**
* Type deserializer used with {@link As#EXTERNAL_PROPERTY} inclusion mechanism.
* Actual implementation may look bit strange since it depends on comprehensive
* pre-processing done by {@link com.google.ratel.deps.jackson.databind.deser.BeanDeserializer}
* to basically transform external type id into structure that looks more like
* "wrapper-array" style inclusion. This intermediate form is chosen to allow
* supporting all possible JSON structures.
*/
public class AsExternalTypeDeserializer extends AsArrayTypeDeserializer
{
private static final long serialVersionUID = 1L;
public AsExternalTypeDeserializer(JavaType bt, TypeIdResolver idRes,
String typePropertyName, boolean typeIdVisible, Class<?> defaultImpl)
{
super(bt, idRes, typePropertyName, typeIdVisible, defaultImpl);
}
public AsExternalTypeDeserializer(AsExternalTypeDeserializer src, BeanProperty property) {
super(src, property);
}
@Override
public TypeDeserializer forProperty(BeanProperty prop)
{
if (prop == _property) { // usually if it's null
return this;
}
return new AsExternalTypeDeserializer(this, prop);
}
@Override
public As getTypeInclusion() {
return As.EXTERNAL_PROPERTY;
}
}
| sabob/ratel | ratel/src/com/google/ratel/deps/jackson/databind/jsontype/impl/AsExternalTypeDeserializer.java | Java | apache-2.0 | 1,695 |
package edu.teco.dnd.module.permissions;
import java.io.ObjectStreamClass;
import java.security.Permission;
import java.util.Arrays;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import edu.teco.dnd.blocks.FunctionBlock;
import edu.teco.dnd.module.Application;
import edu.teco.dnd.module.FunctionBlockSecurityDecorator;
import edu.teco.dnd.module.Module;
import edu.teco.dnd.module.UsercodeWrapper;
import edu.teco.dnd.module.messages.values.ValueMessageAdapter;
/**
* A SecurityManager used to restrict permissions for {@link FunctionBlock}s running on a {@link Module}. It allows
* getting the ClassLoader, getting environment variables and getting file system attributes. It also includes a
* workaround for the JIT ({@link JITPolicyRule}) and allows methods needed for sending values to FunctionBlocks running
* on remote Modules.
*/
public class ApplicationSecurityManager extends SecurityManager {
private static final Logger LOGGER = LogManager.getLogger(ApplicationSecurityManager.class);
private static final PolicyRule rule;
static {
final PolicyRuleCombiner ruleCombiner = new PolicyRuleCombiner();
final GrantPermissionPolicyRule grantPermissionRule = new GrantPermissionPolicyRule();
grantPermissionRule.addPermission(new RuntimePermission("getClassLoader"));
grantPermissionRule.addPermission(new RuntimePermission("getenv.*"));
grantPermissionRule.addPermission(new RuntimePermission("getFileSystemAttributes"));
ruleCombiner.addRule(grantPermissionRule);
ruleCombiner.addRule(new JITPolicyRule());
final StackTraceElementMatcherPolicyRule stemMatcher = new StackTraceElementMatcherPolicyRule();
stemMatcher.addInsecureMatcher(new ClassMatcher(UsercodeWrapper.class));
stemMatcher.addInsecureMatcher(new ClassMatcher(FunctionBlockSecurityDecorator.class));
stemMatcher.addInsecureMatcher(new MethodMatcher(ObjectStreamClass.class, "invokeReadObject"));
stemMatcher.addInsecureMatcher(new MethodMatcher(ObjectStreamClass.class, "invokeReadResolve"));
stemMatcher.addSecureMatcher(new MethodMatcher(FunctionBlock.class, "doInit"));
stemMatcher.addSecureMatcher(new MethodMatcher(Application.class, "sendValue"));
stemMatcher.addSecureMatcher(new MethodMatcher(ClassLoader.class, "loadClass"));
stemMatcher.addSecureMatcher(new ClassMatcher(ValueMessageAdapter.class));
ruleCombiner.addRule(stemMatcher);
rule = ruleCombiner;
}
@Override
public void checkPermission(final Permission permission) {
LOGGER.entry(permission);
final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
final Boolean policy = rule.getPolicy(permission, stackTrace);
if (policy == null || policy == true) {
LOGGER.exit();
return;
}
if (LOGGER.isWarnEnabled()) {
LOGGER.warn("denying {} for {}", permission, Arrays.asList(stackTrace));
}
throw new SecurityException();
}
@Override
public void checkPermission(final Permission perm, final Object context) {
checkPermission(perm);
}
}
| DesignAndDeploy/dnd | DND/src/edu/teco/dnd/module/permissions/ApplicationSecurityManager.java | Java | apache-2.0 | 3,022 |
package spring.aop;
import org.junit.Test;
import spring.SpringTxCase;
/**
* Ò»¾ä»°¹¦ÄܼòÊö.
*
* @author Zhang.Ge
* @version v1.0 2017Äê4ÔÂ12ÈÕ ÉÏÎç11:42:09
*/
public class AopInterfaceTest extends SpringTxCase {
@Test
public void test() {
IBasicService business = (IBasicService) context.getBean("businessProxy");
business.delete("123");
}
}
| codemonkeykings/study | spring/src/test/java/spring/aop/AopInterfaceTest.java | Java | apache-2.0 | 404 |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.plugin.core.datamgr.actions;
import javax.swing.tree.TreePath;
import docking.ActionContext;
import docking.action.DockingAction;
import docking.action.MenuData;
import docking.widgets.tree.GTree;
import ghidra.app.plugin.core.datamgr.DataTypeManagerPlugin;
import ghidra.app.plugin.core.datamgr.tree.DataTypeNode;
import ghidra.app.plugin.core.datamgr.tree.DataTypeTreeNode;
import ghidra.program.model.data.*;
import ghidra.util.Msg;
public class Pack1DataTypeAction extends DockingAction {
private DataTypeManagerPlugin plugin;
public Pack1DataTypeAction(DataTypeManagerPlugin plugin) {
super("Pack1 Data Type", plugin.getName());
this.plugin = plugin;
setPopupMenuData(new MenuData(new String[] { "Pack (1)" }, "Edit"));
}
@Override
public boolean isEnabledForContext(ActionContext context) {
Object contextObject = context.getContextObject();
if (!(contextObject instanceof GTree)) {
return false;
}
GTree gTree = (GTree) contextObject;
TreePath[] selectionPaths = gTree.getSelectionPaths();
if (selectionPaths.length != 1) {
return false;
}
DataTypeTreeNode node = (DataTypeTreeNode) selectionPaths[0].getLastPathComponent();
if (!(node instanceof DataTypeNode)) {
return false;
}
setEnabled(node.isModifiable());
return true;
}
@Override
public void actionPerformed(ActionContext context) {
GTree gTree = (GTree) context.getContextObject();
TreePath[] selectionPaths = gTree.getSelectionPaths();
if (selectionPaths.length != 1) {
Msg.error(this, "Pack is only allowed on an individual data type.");
return;
}
TreePath treePath = selectionPaths[0];
final DataTypeNode dataTypeNode = (DataTypeNode) treePath.getLastPathComponent();
DataType dataType = dataTypeNode.getDataType();
DataTypeManager dataTypeManager = dataType.getDataTypeManager();
if (dataTypeManager == null) {
Msg.error(this,
"Can't pack data type " + dataType.getName() + " without a data type manager.");
return;
}
int transactionID = -1;
boolean commit = false;
try {
// start a transaction
transactionID = dataTypeManager.startTransaction("pack of " + dataType.getName());
packDataType(dataType);
commit = true;
}
finally {
// commit the changes
dataTypeManager.endTransaction(transactionID, commit);
}
}
private void packDataType(DataType dataType) {
if (!(dataType instanceof Composite)) {
Msg.error(this,
"Can't pack data type " + dataType.getName() + ". It's not a composite.");
return;
}
((Composite) dataType).pack(1);
}
}
| NationalSecurityAgency/ghidra | Ghidra/Features/Base/src/main/java/ghidra/app/plugin/core/datamgr/actions/Pack1DataTypeAction.java | Java | apache-2.0 | 3,152 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.