content
stringlengths 10
4.9M
|
---|
/**
* Sets reminders for an assignment identified using it's displayed index from ProductiveNus.
*/
public class RemindCommand extends Command {
public static final String COMMAND_WORD = "remind";
public static final String MESSAGE_USAGE = "Format: " + COMMAND_WORD
+ " INDEX [MORE_INDEXES] (must be positive integers)";
public static final String MESSAGE_REMIND_ASSIGNMENT_SUCCESS = "Set reminders for Assignment: %1$s";
public static final String MESSAGE_REMIND_ASSIGNMENTS_SUCCESS = "Set reminders for Assignments: %1$s";
// for single index
public static final String MESSAGE_REMINDED_ASSIGNMENT = "This assignment already has reminders set.";
// for multiple indexes
public static final String MESSAGE_MULTIPLE_REMINDED_ASSIGNMENT = "This assignment already has reminders set: %1$s";
public static final String MESSAGE_MULTIPLE_REMINDED_ASSIGNMENTS =
"These assignments already have reminders set: %1$s";
private final List<Index> targetIndexes;
/**
* Constructs a RemindCommand to set reminders to the specified assignment(s).
* @param targetIndexes indexes of assignments in the filtered assignment list to remind
*/
public RemindCommand(List<Index> targetIndexes) {
requireNonNull(targetIndexes);
this.targetIndexes = targetIndexes;
}
@Override
public CommandResult execute(Model model) throws CommandException {
requireNonNull(model);
List<Assignment> lastShownList = model.getFilteredAssignmentList();
List<Assignment> assignmentsToRemind = new ArrayList<>();
List<Integer> assignmentsAlreadyReminded = new ArrayList<>();
CommandLogic.checkForDuplicatedIndexes(targetIndexes);
CommandLogic.checkForInvalidIndexes(targetIndexes, model, RemindCommand.MESSAGE_USAGE);
boolean isMultipleIndexes = targetIndexes.size() > 1;
boolean hasException = false;
for (Index targetIndex : targetIndexes) {
Assignment assignmentToRemind = lastShownList.get(targetIndex.getZeroBased());
if (assignmentToRemind.isReminded() && model.hasAssignment(assignmentToRemind)) {
hasException = true;
assignmentsAlreadyReminded.add(targetIndex.getOneBased());
continue;
}
assert(!assignmentToRemind.isReminded());
Assignment remindedAssignment = createRemindedAssignment(assignmentToRemind);
model.setAssignment(assignmentToRemind, remindedAssignment);
assignmentsToRemind.add(assignmentToRemind);
}
if (hasException) {
if (isMultipleIndexes && assignmentsAlreadyReminded.size() > 1) {
throw new CommandException(String.format(MESSAGE_MULTIPLE_REMINDED_ASSIGNMENTS,
assignmentsAlreadyReminded));
} else if (isMultipleIndexes) {
throw new CommandException(String.format(MESSAGE_MULTIPLE_REMINDED_ASSIGNMENT,
assignmentsAlreadyReminded));
} else {
throw new CommandException(MESSAGE_REMINDED_ASSIGNMENT);
}
}
return isMultipleIndexes
? new CommandResult(String.format(MESSAGE_REMIND_ASSIGNMENTS_SUCCESS, assignmentsToRemind))
: new CommandResult(String.format(MESSAGE_REMIND_ASSIGNMENT_SUCCESS, assignmentsToRemind));
}
/**
* Creates and returns a {@code Assignment} with the details of {@code assignmentToRemind}.
*/
private static Assignment createRemindedAssignment(Assignment assignmentToRemind) {
assert assignmentToRemind != null;
Name updatedName = assignmentToRemind.getName();
Time updatedDeadline = assignmentToRemind.getDeadline();
ModuleCode updatedModuleCode = assignmentToRemind.getModuleCode();
Remind updatedRemind = assignmentToRemind.getRemind().setReminder();
Schedule updatedSchedule = assignmentToRemind.getSchedule();
Priority priority = assignmentToRemind.getPriority();
Done updatedDone = assignmentToRemind.getDone();
return new Assignment(updatedName, updatedDeadline, updatedModuleCode, updatedRemind, updatedSchedule,
priority, updatedDone);
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof RemindCommand // instanceof handles nulls
&& targetIndexes.equals(((RemindCommand) other).targetIndexes)); // state check
}
} |
#include <iostream>
#include <vector>
#include <algorithm>
#include <math.h>
#include <map>
#include <limits.h>
#define ll long long
#define pii pair<int,int>
#define vi vector<int>
#define map_it map<int,int> :: iterator
#define mod 1000000007
using namespace std;
int main()
{
ll n,k;
cin>>n>>k;
ll len = 2*k+1;
if(n<=k)
{
cout<<1<<endl;
cout<<1<<endl;
}
else if(n%len==0)
{
int ans=n/len;
cout<<ans<<endl;
int i=k+1;
while(ans--)
{
cout<<i<<" ";
i+=len;
}
}
else
{
if(n<len)
{
cout<<1<<endl;
cout<<n-k<<endl;
}
else
{
ll c=0;
ll m = n/len;
m--;
c+=m;
ll l = n-m*len;
//cout<<l<<endl;
ll left=len;
ll right=len;
bool flag=true;
while(l!=(left+right))
{
if(flag)
{
left--;
flag=false;
}
else
{
right--;
flag=true;
}
}
//cout<<left<<" "<<right<<endl;
cout<<c+2<<endl;
cout<<left-k<<" ";
int i=left-k+len;
while(c--)
{
cout<<i<<" ";
i+=len;
}
cout<<i<<endl;
}
}
} |
/*
* @file
* @copyright defined in meycoin/LICENSE.txt
*/
package raftsupport
import (
"context"
"encoding/binary"
"github.com/meeypioneer/mey-library/log"
"github.com/meeypioneer/meycoin/consensus"
"github.com/meeypioneer/meycoin/p2p/p2pcommon"
"github.com/meeypioneer/meycoin/p2p/p2putil"
"github.com/meeypioneer/meycoin/types"
rtypes "github.com/meeypioneer/etcd/pkg/types"
"github.com/meeypioneer/etcd/raft/raftpb"
"github.com/golang/protobuf/proto"
"io"
)
const (
SnapRespHeaderLength = 4
)
// TODO consider the scope of type
type snapshotReceiver struct {
logger *log.Logger
pm p2pcommon.PeerManager
rAcc consensus.MeyCoinRaftAccessor
peer p2pcommon.RemotePeer
rwc io.ReadWriteCloser
}
func newSnapshotReceiver(logger *log.Logger, pm p2pcommon.PeerManager, rAcc consensus.MeyCoinRaftAccessor, peer p2pcommon.RemotePeer, sender io.ReadWriteCloser) *snapshotReceiver {
return &snapshotReceiver{logger: logger, pm: pm, rAcc: rAcc, peer: peer, rwc: sender}
}
func (s *snapshotReceiver) Receive() {
resp := &types.SnapshotResponse{Status:types.ResultStatus_OK}
defer s.sendResp(s.rwc, resp)
dec := &RaftMsgDecoder{r: s.rwc}
// let snapshots be very large since they can exceed 512MB for large installations
m, err := dec.DecodeLimit(uint64(1 << 63))
from := rtypes.ID(m.From).String()
if err != nil {
s.logger.Error().Str(p2putil.LogPeerName, s.peer.Name()).Err(err).Msg("failed to decode raft message")
resp.Status = types.ResultStatus_INVALID_ARGUMENT
resp.Message = "malformed message"
// TODO return error
//recvFailures.WithLabelValues(rwc.RemoteAddr).Inc()
//snapshotReceiveFailures.WithLabelValues(from).Inc()
return
}
//receivedBytes.WithLabelValues(from).Add(float64(m.Size()))
if m.Type != raftpb.MsgSnap {
s.logger.Error().Str("type", m.Type.String()).Msg("unexpected raft message type on snapshot path")
resp.Status = types.ResultStatus_INVALID_ARGUMENT
resp.Message = "invalid message type"
//http.Error(w, "wrong raft message type", http.StatusBadRequest)
//snapshotReceiveFailures.WithLabelValues(from).Inc()
return
}
s.logger.Info().Uint64("index", m.Snapshot.Metadata.Index).Str("from", from).Msg("receiving database snapshot")
// save incoming database snapshot.
_, err = s.rAcc.SaveFromRemote(s.rwc, m.Snapshot.Metadata.Index, m)
if err != nil {
s.logger.Error().Err(err).Msg("failed to save KV snapshot")
resp.Status = types.ResultStatus_INTERNAL
//http.Error(w, msg, http.StatusInternalServerError)
//snapshotReceiveFailures.WithLabelValues(from).Inc()
return
}
//receivedBytes.WithLabelValues(from).Add(float64(n))
s.logger.Info().Str(p2putil.LogPeerName, s.peer.Name()).Uint64("index", m.Snapshot.Metadata.Index).Str("from", from).Msg("received and saved database snapshot successfully")
if err := s.rAcc.Process(context.TODO(),s.peer.ID(), m); err != nil {
switch v := err.(type) {
// Process may return codeError error when doing some
// additional checks before calling raft.Node.Step.
case codeError:
// TODO get resp
resp.Status =v.Status()
resp.Message = v.Message()
default:
s.logger.Warn().Err(err).Msg("failed to process raft message")
resp.Status = types.ResultStatus_UNKNOWN
//http.Error(w, msg, http.StatusInternalServerError)
//snapshotReceiveFailures.WithLabelValues(from).Inc()
}
return
}
// Write StatusNoContent header after the message has been processed by
// raft, which facilitates the client to report MsgSnap status.
//w.WriteHeader(http.StatusNoContent)
//snapshotReceive.WithLabelValues(from).Inc()
//snapshotReceiveSeconds.WithLabelValues(from).Observe(time.Since(start).Seconds())
}
func (s *snapshotReceiver) sendResp(w io.Writer, resp *types.SnapshotResponse) {
b, err := proto.Marshal(resp)
if err == nil {
bytebuf := make([]byte, SnapRespHeaderLength)
binary.BigEndian.PutUint32(bytebuf, uint32(len(b)))
w.Write(bytebuf)
w.Write(b)
} else {
s.logger.Info().Err(err).Msg("Failed to write snapshot response")
}
}
type codeError interface {
Status() types.ResultStatus
Message() string
}
|
n = int(input())
prices = list(map(int, input().split()))
sorted_prices = sorted(prices)
print(prices.index(sorted_prices[n-1])+1, end=' ')
print(sorted_prices[n-2], end=' ')
|
A Pakistani immigrant who tried to set off a car bomb in Times Square was sentenced Tuesday to life in prison by a judge who said she hopes he spends time behind bars thinking "carefully about whether the Quran wants you to kill lots of people."
A defiant Faisal Shahzad smirked as he was given a mandatory life term that, under federal sentencing rules, will keep him behind bars until he dies.
"If I'm given 1,000 lives I will sacrifice them all for the life of Allah," he said at the start of a statement that lasted several minutes. "How can I be judged by a court that does not understand the suffering of my people?"
Shahzad - brought into the Manhattan courtroom in handcuffs and wearing a white skull cap - had instructed his attorney not to speak, and U.S. District Judge Miriam Goldman Cedarbaum told prosecutors she didn't need to hear from them. That left Shahzad and the judge free to spar over his reasoning for giving up his comfortable life in America to train in Pakistan and carry out a potentially deadly attack in the heart of Times Square.
Special Section: Terrorism in the U.S.
"You appear to be someone who was capable of education and I do hope you will spend some of the time in prison thinking carefully about whether the Quran wants you to kill lots of people," Cedarbaum told Shahzad after she announced his mandatory life sentence.
Shahzad, a 31-year-old former budget analyst from Connecticut who was born in Pakistan, responded that the "Quran gives us the right to defend. And that's all I'm doing."
Afterward, the head of the FBI's New York office, Janice K. Fedarcyk, cited evidence that Shahzad hoped to strike more than once.
"Shahzad built a mobile weapon of mass destruction and hoped and intended that it would kill large numbers of innocent people and planned to do it again two weeks later," Fedarcyk said in a statement. "The sentence imposed today means Shahzad will never pose that threat again."
Calling himself a Muslim solider, a defiant Shahzad pleaded guilty in June to 10 terrorism and weapons counts.
For greatest impact, he chose a crowded a section of the city by studying an online streaming video of Times Square, the so-called Crossroads of the World, prosecutors said.
On May 1, he lit the fuse of his crude, homemade bomb packed in a 1993 Nissan Pathfinder, then fled on foot, pausing along the way to listen for the explosion that never came, court papers said.
A street vendor spotted smoke coming from the sport utility vehicle and alerted police, who quickly cleared the area. The bomb attempt set off an intense investigation that culminated two days later with investigators plucking Shahzad off a Dubai-bound plane at a New York airport.
Shahzad has said the Pakistan Taliban provided him with more than $15,000 and five days of explosives training late last year and early this year, months after he became a U.S. citizen.
A few days later, Pakistani authorities arrested three men on charges they helped him meet leaders of the Pakistan Taliban, a militant group based in the northwest of the country that has claimed responsibility for the plot. They also are accused of sending him cash in the United States when he ran short of money.
The men's lawyer says there's no evidence to support the allegations and that the men had been forced to sign confessions. A trial date has yet to be set.
Three other men were detained in the northeastern U.S. on immigration charges in an investigation of an underground money transfer system used by Shahzad, but they were never charged with any crimes.
Prosecutors had introduced a dramatic video of an FBI-staged explosion they said demonstrated how deadly Shahzad's bomb could have been.
The FBI bomb - an identical vehicle fitted with 250 pounds (115 kilograms) of ammonium nitrate and diesel fuel, three 25-pound (11-kilogram) propane tanks and two five-gallon (19-liter) gasoline canisters - blew up with a force that ripped the vehicle in half. The explosion caused a giant fireball that overturned and shredded four other cars parked nearby in an open field, obliterated about a dozen dummies posed as pedestrians and shot fiery debris hundreds of feet in all directions.
"Had the bombing played out as Shahzad had so carefully planned, the lives of numerous residents and visitors of the city would have been lost and countless others would have been forever traumatized," prosecutors wrote in court papers.
At sentencing, Shahzad claimed the FBI's interrogation had violated his rights. He also warned that attacks on Americans will continue until the United States leaves Muslim lands.
"We are only Muslims ... but if you call us terrorists, we are proud terrorists and we will keep on terrorizing you," he said.
He added: "We do not accept your democracy or your freedom because we already have Sharia law and freedom."
The judge cut him off at one point to ask if he had sworn allegiance to the United States when he became an American citizen last year.
"I did swear but I did not mean it," said Shahzad.
"So you took a false oath," the judge told him.
She also reminded him that he was a failed terrorist.
"What you have done here, although happily, the training you sought in making bombs was unsuccessful and you were unsuccessful in your effort to kill many Americans," she said.
Asked by the judge if he had any final words, Shahzad said, "I'm happy with the deal that God has given me." |
<gh_stars>0
// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
// or more contributor license agreements. Licensed under the Elastic License;
// you may not use this file except in compliance with the Elastic License.
package process
import "time"
// Config for fine tuning new process
type Config struct {
MinPortNumber int `yaml:"min_port" config:"min_port"`
MaxPortNumber int `yaml:"max_port" config:"max_port"`
SpawnTimeout time.Duration `yaml:"spawn_timeout" config:"spawn_timeout"`
// Transport is one of `unix` or `tcp`. `unix` uses unix sockets and is not supported on windows.
// Windows falls back to `tcp` regardless of configuration.
// With invalid configuration fallback to `tcp` is used as well.
Transport string
// TODO: cgroups and namespaces
}
// DefaultConfig creates a config with pre-set default values.
func DefaultConfig() *Config {
return &Config{
MinPortNumber: 10000,
MaxPortNumber: 30000,
SpawnTimeout: 30 * time.Second,
}
}
|
def find_matching_allele_frequency(variant,
population_vcf_reader,
ref_reader,
padding_bases=0):
query_region = ranges.make_range(
chrom=variant.reference_name,
start=variant.start - padding_bases,
end=variant.end + padding_bases)
cohort_variants = list(population_vcf_reader.query(query_region))
dict_allele_frequency = {}
for alt_base in variant.alternate_bases:
dict_allele_frequency[alt_base] = 0
try:
reference_haplotype, reference_offset = get_ref_haplotype_and_offset(
variant, cohort_variants, ref_reader)
except ValueError:
dict_allele_frequency = {}
dict_allele_frequency[variant.reference_bases] = 1
for alt in variant.alternate_bases:
dict_allele_frequency[alt] = 0
return dict_allele_frequency
candidate_haps = update_haplotype(variant, reference_haplotype,
reference_offset)
cohort_haps = []
for cohort_variant in cohort_variants:
cohort_haps.extend(
update_haplotype(cohort_variant, reference_haplotype, reference_offset))
for c in candidate_haps:
logging.debug('candidate %s, %s', c['haplotype'], c['alt'])
for c in cohort_haps:
logging.debug('cohort %s, %s', c['haplotype'], c['alt'])
dict_allele_frequency = match_candidate_and_cohort_haplotypes(
candidate_haps, cohort_haps)
if dict_allele_frequency:
logging.vlog(3, '%s:%d-%d, %s > %s', variant.reference_name, variant.start,
variant.end, variant.reference_bases, dict_allele_frequency)
return dict_allele_frequency |
/**
* Reruns the workflow from a specific task
*
* @param workflowId the id of the workflow
* @param rerunWorkflowRequest the request containing the task to rerun from
* @return the id of the workflow
*/
public String rerunWorkflow(String workflowId, RerunWorkflowRequest rerunWorkflowRequest) {
Validate.notBlank(workflowId, "workflow id cannot be blank");
Validate.notNull(rerunWorkflowRequest, "RerunWorkflowRequest cannot be null");
return postForString("workflow/{workflowId}/rerun", rerunWorkflowRequest, null, workflowId);
} |
def validated(self, base):
for d in self.decisions:
if "strategy" in d:
del d["strategy"]
return sorted(self.decisions, key=_sort_key, reverse=True) |
/**
* This mojo provides access to the Groovy sources (including stubs).
*
* @author Keegan Witt
* @since 1.0-beta-3
*/
public abstract class AbstractGroovyStubSourcesMojo extends AbstractGroovySourcesMojo {
/**
* Gets the set of stub files in specified directory.
*
* @param outputDirectory the directory to write stubs to
* @return The set of stub files in specified directory
*/
protected Set<File> getStubs(File outputDirectory) {
Set<File> files = new HashSet<>();
FileSetManager fileSetManager = new FileSetManager(getLog());
FileSet fileSet = new FileSet();
fileSet.setDirectory(outputDirectory.getAbsolutePath());
fileSet.setIncludes(singletonList(JAVA_SOURCES_PATTERN));
for (String file : fileSetManager.getIncludedFiles(fileSet)) {
files.add(new File(outputDirectory, file));
}
return files;
}
} |
<filename>modules/audio_coding/neteq/random_vector.h
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_AUDIO_CODING_NETEQ_RANDOM_VECTOR_H_
#define MODULES_AUDIO_CODING_NETEQ_RANDOM_VECTOR_H_
#include <string.h> // size_t
#include "rtc_base/constructormagic.h"
#include "typedefs.h" // NOLINT(build/include)
namespace webrtc {
// This class generates pseudo-random samples.
class RandomVector {
public:
static const size_t kRandomTableSize = 256;
static const int16_t kRandomTable[kRandomTableSize];
RandomVector() : seed_(777), seed_increment_(1) {}
void Reset();
void Generate(size_t length, int16_t* output);
void IncreaseSeedIncrement(int16_t increase_by);
// Accessors and mutators.
int16_t seed_increment() { return seed_increment_; }
void set_seed_increment(int16_t value) { seed_increment_ = value; }
private:
uint32_t seed_;
int16_t seed_increment_;
RTC_DISALLOW_COPY_AND_ASSIGN(RandomVector);
};
} // namespace webrtc
#endif // MODULES_AUDIO_CODING_NETEQ_RANDOM_VECTOR_H_
|
import {
FindOneOptions,
SimpleEntityRepository,
} from 'src/internals/databases/simple-entity/simple-entity.repository';
import { DatedEntity } from './dated.entity';
import { FindOptions } from '../simple-entity/simple-entity.repository';
import { EntityManager } from 'typeorm';
export abstract class DatedEntityRepository<
Entity extends DatedEntity,
FieldsOmittedBeforePersistence extends keyof Entity = never,
> extends SimpleEntityRepository<
Entity,
'createdAt' | 'updatedAt' | FieldsOmittedBeforePersistence
> {
findOne(
query: FindOneOptions<Entity>,
options?: Partial<{ manager: EntityManager }>,
): Promise<Entity | undefined> {
return super.findOne(
{
...query,
order: query.order ?? {
updatedAt: 'DESC',
},
},
options,
);
}
async find(
query: FindOptions<Entity>,
options?: Partial<{ manager: EntityManager }>,
) {
return super.find(
{
...query,
order: query.order ?? {
updatedAt: 'DESC',
},
},
options,
);
}
}
|
/**
* this is Template Method child has Implement your own logic.
*
* @param exchange exchange the current server exchange {@linkplain ServerWebExchange}
* @param chain chain the current chain {@linkplain ServerWebExchange}
* @param rule rule {@linkplain RuleZkDTO}
* @return {@code Mono<Void>} to indicate when request handling is complete
*/
@Override
protected Mono<Void> doExecute(final ServerWebExchange exchange, final SoulPluginChain chain, final RuleZkDTO rule) {
LOGGER.debug(".......... function plugin start..............");
return chain.execute(exchange);
} |
//*******************************************************************************
// COPYRIGHT NOTES
// ---------------
// This is a part of BCGControlBar Library Professional Edition
// Copyright (C) 1998-2012 BCGSoft Ltd.
// All rights reserved.
//
// This source code can be used, distributed or modified
// only under terms and conditions
// of the accompanying license agreement.
//*******************************************************************************
//
// BCGPBaseTabbedBar.cpp : implementation file
//
#include "stdafx.h"
#include "bcgcbpro.h"
#include "BCGPBaseTabWnd.h"
#include "BCGPBaseTabbedBar.h"
#include "BCGPMiniFrameWnd.h"
#include "BCGPMultiMiniFrameWnd.h"
#include "BCGPDockingCBWrapper.h"
#include "BCGPAutoHideToolBar.h"
#include "BCGPDockBar.h"
#include "BCGPGlobalUtils.h"
#include "BCGPMDIFrameWnd.h"
#include "BCGPVisualManager.h"
#ifdef _DEBUG
#define new DEBUG_NEW
#undef THIS_FILE
static char THIS_FILE[] = __FILE__;
#endif
IMPLEMENT_DYNAMIC(CBCGPBaseTabbedBar, CBCGPDockingControlBar)
/////////////////////////////////////////////////////////////////////////////
// CBCGPBaseTabbedBar
CBCGPBaseTabbedBar::CBCGPBaseTabbedBar (BOOL bAutoDestroy)
{
m_bAutoDestroy = bAutoDestroy;
m_pTabWnd = NULL;
m_bEnableIDChecking = FALSE;
m_bSetCaptionTextToTabName = TRUE;
EnableDocking (CBRS_ALIGN_ANY);
}
//***********************************************************************************
CBCGPBaseTabbedBar::~CBCGPBaseTabbedBar()
{
}
BEGIN_MESSAGE_MAP(CBCGPBaseTabbedBar, CBCGPDockingControlBar)
//{{AFX_MSG_MAP(CBCGPBaseTabbedBar)
ON_WM_SIZE()
ON_WM_NCDESTROY()
ON_WM_ERASEBKGND()
ON_WM_SETFOCUS()
//}}AFX_MSG_MAP
ON_REGISTERED_MESSAGE(BCGM_CHANGE_ACTIVE_TAB, OnChangeActiveTab)
END_MESSAGE_MAP()
/////////////////////////////////////////////////////////////////////////////
// CBCGPBaseTabbedBar message handlers
void CBCGPBaseTabbedBar::OnSize(UINT nType, int cx, int cy)
{
CBCGPDockingControlBar::OnSize(nType, cx, cy);
CWnd* pUnderlinedWnd = GetUnderlinedWindow ();
if (pUnderlinedWnd != NULL && IsWindow (pUnderlinedWnd->GetSafeHwnd ()))
{
CRect rectClient;
GetClientRect (rectClient);
pUnderlinedWnd->SetWindowPos (NULL, 0, 0, rectClient.Width (), rectClient.Height (),
SWP_NOACTIVATE | SWP_NOZORDER | SWP_NOREDRAW);
}
}
//***********************************************************************************
void CBCGPBaseTabbedBar::OnNcDestroy()
{
if (m_pTabWnd != NULL)
{
delete m_pTabWnd;
m_pTabWnd = NULL;
}
CBCGPDockingControlBar::OnNcDestroy();
if (m_bAutoDestroy)
{
delete this;
}
}
//***********************************************************************************
BOOL CBCGPBaseTabbedBar::OnEraseBkgnd(CDC* /*pDC*/)
{
return TRUE;
}
//***********************************************************************************
BOOL CBCGPBaseTabbedBar::AddTab (CWnd* pNewBar, BOOL bVisible, BOOL bSetActive,
BOOL bDetachable)
{
ASSERT_VALID (this);
ASSERT_VALID (m_pTabWnd);
ASSERT_VALID (pNewBar);
if (pNewBar->IsKindOf (RUNTIME_CLASS (CBCGPBaseTabbedBar)))
{
CBCGPBaseTabbedBar* pTabbedControlBar =
DYNAMIC_DOWNCAST (CBCGPBaseTabbedBar, pNewBar);
// it's false when the tabbed bar is added from miniframe to docksite
BOOL bSetInfoForSlider = (GetParentMiniFrame () != NULL);
ASSERT_VALID (pTabbedControlBar);
CBCGPBaseTabWnd* pWndTab = pTabbedControlBar->GetUnderlinedWindow ();
ASSERT_VALID (pWndTab);
int nTabsNum = pWndTab->GetTabsNum ();
ASSERT (nTabsNum > 0);
for (int i = 0; i < nTabsNum; i++)
{
CBCGPBaseControlBar* pWnd =
DYNAMIC_DOWNCAST (CBCGPBaseControlBar, pWndTab->GetTabWnd (i));
ASSERT_VALID (pWnd);
BOOL bVisible = pWndTab->IsTabVisible (i);
BOOL bDetachable = pWndTab->IsTabDetachable (i);
pWnd->EnableGripper (FALSE);
if (!AddTab (pWnd, bVisible, bVisible, bDetachable))
{
ASSERT (FALSE);
}
CBCGPDockingControlBar* pDockingBar =
DYNAMIC_DOWNCAST (CBCGPDockingControlBar, pWnd);
if (pDockingBar != NULL)
{
pDockingBar->m_recentDockInfo.SetInfo (bSetInfoForSlider,
pTabbedControlBar->m_recentDockInfo);
}
}
pWndTab->RemoveAllTabs ();
pNewBar->DestroyWindow ();
// stop processing - this function will be called
// from AttachToTabWnd
return FALSE;
}
else
{
if (pNewBar->IsKindOf (RUNTIME_CLASS (CBCGPControlBar)))
{
CBCGPControlBar* pNewControlBar =
DYNAMIC_DOWNCAST (CBCGPControlBar, pNewBar);
ASSERT_VALID (pNewControlBar);
CWnd* pOldParent = pNewControlBar->GetParent ();
pNewControlBar->OnBeforeChangeParent (m_pTabWnd, TRUE);
pNewControlBar->SetParent (m_pTabWnd);
pNewControlBar->OnAfterChangeParent (pOldParent);
if (pNewControlBar->IsKindOf (RUNTIME_CLASS (CBCGPDockingControlBar)))
{
((CBCGPDockingControlBar*) pNewControlBar)->EnableGripper (FALSE);
}
}
CString strText;
pNewBar->GetWindowText (strText);
m_pTabWnd->AddTab (pNewBar, strText, bSetActive, bDetachable);
int iTab = m_pTabWnd->GetTabsNum () - 1;
m_pTabWnd->SetTabHicon (iTab, pNewBar->GetIcon (FALSE));
m_pTabWnd->EnableTabDetach (iTab, bDetachable);
if (bVisible)
{
if (bSetActive)
{
m_pTabWnd->SetActiveTab (iTab);
}
}
else
{
ASSERT (!bSetActive);
m_pTabWnd->ShowTab (iTab, FALSE);
}
}
return TRUE;
}
//**************************************************************************************
CWnd* CBCGPBaseTabbedBar::FindBarByID (UINT uBarID)
{
ASSERT_VALID (this);
ASSERT_VALID (m_pTabWnd);
for (int i = 0; i < m_pTabWnd->GetTabsNum (); i++)
{
CWnd* pBar = m_pTabWnd->GetTabWnd (i);
ASSERT_VALID (pBar);
if ((UINT) pBar->GetDlgCtrlID () == uBarID)
{
return pBar;
}
}
return NULL;
}
//**************************************************************************************
CWnd* CBCGPBaseTabbedBar::FindBarByTabNumber (int nTabNum, BOOL bGetWrappedBar)
{
ASSERT_VALID (this);
ASSERT_VALID (m_pTabWnd);
if (nTabNum < 0 || nTabNum >= m_pTabWnd->GetTabsNum ())
{
return NULL;
}
CWnd* pWnd = m_pTabWnd->GetTabWnd (nTabNum);
ASSERT_VALID (pWnd);
if (bGetWrappedBar && pWnd->IsKindOf (RUNTIME_CLASS (CBCGPDockingCBWrapper)))
{
CBCGPDockingCBWrapper* pWrapper =
DYNAMIC_DOWNCAST (CBCGPDockingCBWrapper, pWnd);
pWnd = pWrapper->GetWrappedWnd ();
ASSERT_VALID (pWnd);
}
return pWnd;
}
//*******************************************************************************
BOOL CBCGPBaseTabbedBar::DetachControlBar (CWnd* pBar, BOOL bHide)
{
ASSERT_VALID (this);
ASSERT_VALID (pBar);
ASSERT_VALID (m_pTabWnd);
int nTabNumber = m_pTabWnd->GetTabFromHwnd (pBar->GetSafeHwnd ());
if (nTabNumber < 0)
{
return FALSE;
}
m_pTabWnd->DetachTab (BCGP_DM_UNKNOWN, nTabNumber, bHide);
return TRUE;
}
//*******************************************************************************
BOOL CBCGPBaseTabbedBar::RemoveControlBar (CWnd* pBar)
{
ASSERT_VALID (this);
ASSERT_VALID (pBar);
ASSERT_VALID (m_pTabWnd);
int nTabNumber = m_pTabWnd->GetTabFromHwnd (pBar->GetSafeHwnd ());
if (nTabNumber < 0 || nTabNumber >= m_pTabWnd->GetTabsNum ())
{
return FALSE;
}
m_pTabWnd->RemoveTab (nTabNumber);
if (m_pTabWnd->GetTabsNum () == 0)
{
if (AllowDestroyEmptyTabbedBar ())
{
if (IsDocked ())
{
UnDockControlBar ();
}
else
{
CBCGPMiniFrameWnd* pMiniFrame = GetParentMiniFrame ();
pMiniFrame->RemoveControlBar (this);
}
DestroyWindow ();
return FALSE;
}
else
{
m_pTabWnd->ShowWindow (SW_HIDE);
}
}
return TRUE;
}
//*******************************************************************************
BOOL CBCGPBaseTabbedBar::ShowTab (CWnd* pBar, BOOL bShow, BOOL bDelay, BOOL bActivate)
{
ASSERT_VALID (this);
ASSERT_VALID (pBar);
ASSERT_VALID (m_pTabWnd);
int nTabNum = m_pTabWnd->GetTabFromHwnd (pBar->GetSafeHwnd ());
BOOL bResult = m_pTabWnd->ShowTab (nTabNum, bShow, !bDelay, bActivate);
BOOL bNowVisible = m_pTabWnd->GetVisibleTabsNum () > 0;
if (bNowVisible && !(m_pTabWnd->GetStyle () & WS_VISIBLE))
{
m_pTabWnd->ShowWindow (SW_SHOW);
}
CBCGPDockingControlBar::ShowControlBar (bNowVisible, bDelay, bActivate);
return bResult;
}
//*******************************************************************************
BOOL CBCGPBaseTabbedBar::FloatTab (CWnd* pBar, int nTabID,
BCGP_DOCK_METHOD dockMethod,
BOOL bHide)
{
ASSERT_VALID (this);
ASSERT_VALID (pBar);
ASSERT_VALID (m_pTabWnd);
CString strWndText;
pBar->GetWindowText (strWndText);
if (strWndText.IsEmpty ())
{
if (m_pTabWnd->GetTabLabel (nTabID, strWndText))
{
pBar->SetWindowText (strWndText);
}
}
m_pTabWnd->RemoveTab (nTabID);
if (dockMethod == BCGP_DM_MOUSE)
{
m_pTabWnd->SendMessage (WM_LBUTTONUP, 0, 0);
}
CBCGPDockingControlBar* pDockingBar =
DYNAMIC_DOWNCAST (CBCGPDockingControlBar, pBar);
if (pDockingBar != NULL)
{
pDockingBar->StoreRecentTabRelatedInfo ();
}
if (dockMethod == BCGP_DM_DBL_CLICK && pDockingBar != NULL)
{
CBCGPMultiMiniFrameWnd* pParentMiniFrame =
DYNAMIC_DOWNCAST (CBCGPMultiMiniFrameWnd, GetParentMiniFrame ());
if (pParentMiniFrame != NULL)
{
pParentMiniFrame->DockRecentControlBarToMainFrame (pDockingBar);
return TRUE;
}
else if (m_hDefaultSlider != NULL && IsWindow (m_hDefaultSlider))
{
CBCGPMultiMiniFrameWnd* pRecentMiniFrame =
DYNAMIC_DOWNCAST (CBCGPMultiMiniFrameWnd,
CWnd::FromHandlePermanent (pDockingBar->m_recentDockInfo.m_hRecentMiniFrame));
if (pRecentMiniFrame != NULL &&
pRecentMiniFrame->AddRecentControlBar (pDockingBar))
{
return TRUE;
}
}
}
if (pBar->IsKindOf (RUNTIME_CLASS (CBCGPControlBar)))
{
CBCGPControlBar* pControlBar =
DYNAMIC_DOWNCAST (CBCGPControlBar, pBar);
ASSERT_VALID (pControlBar);
pControlBar->FloatControlBar (pControlBar->m_recentDockInfo.m_rectRecentFloatingRect,
dockMethod, !bHide);
return TRUE;
}
return FALSE;
}
//**************************************************************************************
void CBCGPBaseTabbedBar::StoreRecentDockInfo ()
{
int nTabsNum = m_pTabWnd->GetTabsNum ();
for (int i = 0; i < nTabsNum; i++)
{
CBCGPDockingControlBar* pBar =
DYNAMIC_DOWNCAST (CBCGPDockingControlBar, m_pTabWnd->GetTabWnd (i));
if (pBar != NULL)
{
pBar->StoreRecentTabRelatedInfo ();
}
}
CBCGPDockingControlBar::StoreRecentDockInfo ();
}
//**************************************************************************************
BOOL CBCGPBaseTabbedBar::FloatControlBar (CRect rectFloat,
BCGP_DOCK_METHOD dockMethod,
bool bShow)
{
ASSERT_VALID (this);
ASSERT_VALID (m_pTabWnd);
if (!CBCGPDockingControlBar::FloatControlBar (rectFloat, dockMethod, bShow))
{
return FALSE;
}
CBCGPMiniFrameWnd* pParentFrame = GetParentMiniFrame ();
if (pParentFrame != NULL)
{
pParentFrame->SetIcon (m_pTabWnd->GetTabHicon (m_pTabWnd->GetActiveTab ()), FALSE);
}
return TRUE;
}
//**************************************************************************************
void CBCGPBaseTabbedBar::Serialize (CArchive& ar)
{
CBCGPDockingControlBar::Serialize (ar);
if (ar.IsLoading ())
{
ar >> m_bAutoDestroy;
}
else
{
ar << m_bAutoDestroy;
}
}
//**************************************************************************************
void CBCGPBaseTabbedBar::SerializeTabWindow (CArchive& ar)
{
if (m_pTabWnd != NULL)
{
m_pTabWnd->Serialize (ar);
}
}
//**************************************************************************************
void CBCGPBaseTabbedBar::LoadSiblingBarIDs (CArchive& ar, CList<UINT, UINT>& lstBarIDs)
{
ASSERT (ar.IsLoading ());
if (ar.IsLoading ())
{
int nTabsNum = 0;
ar >> nTabsNum;
for (int i = 0; i < nTabsNum; i++)
{
int nBarID = -1;
ar >> nBarID;
ASSERT (nBarID != -1);
lstBarIDs.AddTail (nBarID);
}
}
}
//**************************************************************************************
void CBCGPBaseTabbedBar::SaveSiblingBarIDs (CArchive& ar)
{
ASSERT_VALID (this);
ASSERT (ar.IsStoring ());
ASSERT_VALID (m_pTabWnd);
if (ar.IsStoring () && m_pTabWnd != NULL)
{
int nTabsNum = m_pTabWnd->GetTabsNum ();
// DO NOT SAVE empty tabbed bars
if (nTabsNum > 0)
{
ar << nTabsNum;
for (int i = 0; i < nTabsNum; i++)
{
CBCGPBaseControlBar* pWnd =
DYNAMIC_DOWNCAST (CBCGPBaseControlBar, m_pTabWnd->GetTabWnd (i));
ASSERT_VALID (pWnd);
ar << pWnd->GetDlgCtrlID ();
}
}
}
}
//**************************************************************************************
BOOL CBCGPBaseTabbedBar::LoadState (LPCTSTR lpszProfileName, int nIndex, UINT uiID)
{
ASSERT_VALID (this);
ASSERT_VALID (m_pTabWnd);
FillDefaultTabsOrderArray ();
// if initially tabbed bars were detached by user and exist only as regular
// docking control bars we need to give them a chance to load their state
// from the registry
CBCGPDockingControlBar::LoadState (lpszProfileName, nIndex, uiID);
int nTabsNum = m_pTabWnd->GetTabsNum ();
for (int i = 0; i < nTabsNum; i++)
{
CBCGPBaseControlBar* pWnd =
DYNAMIC_DOWNCAST (CBCGPBaseControlBar, m_pTabWnd->GetTabWnd (i));
if (pWnd != NULL && IsRestoreTabsState())
{
ASSERT_VALID (pWnd);
pWnd->LoadState (lpszProfileName, nIndex, uiID);
}
}
return TRUE;
}
//**************************************************************************************
BOOL CBCGPBaseTabbedBar::SaveState (LPCTSTR lpszProfileName, int nIndex, UINT uiID)
{
ASSERT_VALID (this);
ASSERT_VALID (m_pTabWnd);
CBCGPDockingControlBar::SaveState (lpszProfileName, nIndex, uiID);
int nTabsNum = m_pTabWnd->GetTabsNum ();
for (int i = 0; i < nTabsNum; i++)
{
CBCGPBaseControlBar* pWnd =
DYNAMIC_DOWNCAST (CBCGPBaseControlBar, m_pTabWnd->GetTabWnd (i));
if (pWnd != NULL)
{
ASSERT_VALID (pWnd);
if (!pWnd->SaveState (lpszProfileName, nIndex, uiID))
{
return FALSE;
}
}
}
return TRUE;
}
//**************************************************************************************
void CBCGPBaseTabbedBar::ApplyRestoredTabInfo (BOOL bUseTabIndexes)
{
ASSERT_VALID (this);
if (m_pTabWnd != NULL)
{
m_pTabWnd->ApplyRestoredTabInfo (bUseTabIndexes);
}
}
//**************************************************************************************
void CBCGPBaseTabbedBar::RecalcLayout ()
{
ASSERT_VALID (this);
CBCGPDockingControlBar::RecalcLayout ();
if (m_pTabWnd != NULL)
{
m_pTabWnd->RecalcLayout ();
}
}
//**************************************************************************************
BOOL CBCGPBaseTabbedBar::CanFloat () const
{
ASSERT_VALID (this);
return CBCGPDockingControlBar::CanFloat ();
}
//**************************************************************************************
void CBCGPBaseTabbedBar::OnSetFocus(CWnd* pOldWnd)
{
CBCGPDockingControlBar::OnSetFocus(pOldWnd);
// Pass the focus to the tab window
CWnd* pWndChild = GetUnderlinedWindow();
if (pWndChild != NULL)
pWndChild->SetFocus();
}
//**************************************************************************************
CBCGPAutoHideToolBar* CBCGPBaseTabbedBar::SetAutoHideMode (BOOL bMode, DWORD dwAlignment,
CBCGPAutoHideToolBar* pCurrAutoHideBar,
BOOL bUseTimer)
{
BOOL bHandleMinSize = CBCGPControlBar::m_bHandleMinSize;
if (bHandleMinSize)
{
CBCGPControlBar::m_bHandleMinSize = FALSE;
}
CBCGPAutoHideToolBar* pAutoHideBar = pCurrAutoHideBar;
CBCGPDockingControlBar* pActiveBar = NULL;
int nActiveTab = m_pTabWnd->GetActiveTab ();
int nTabsNum = m_pTabWnd->GetTabsNum ();
CObList lstTmp;
ShowControlBar (FALSE, TRUE, FALSE);
int nNonDetachedCount = 0;
for (int nNextTab = nTabsNum - 1; nNextTab >= 0; nNextTab--)
{
CBCGPDockingControlBar* pBar = DYNAMIC_DOWNCAST (CBCGPDockingControlBar,
m_pTabWnd->GetTabWnd (nNextTab));
ASSERT_VALID (pBar);
BOOL bIsVisible = m_pTabWnd->IsTabVisible (nNextTab);
BOOL bDetachable = m_pTabWnd->IsTabDetachable (nNextTab);
if (pBar != NULL && bIsVisible && bDetachable)
{
m_pTabWnd->RemoveTab (nNextTab, FALSE);
pBar->EnableGripper (TRUE);
pBar->StoreRecentTabRelatedInfo ();
CWnd* pOldParent = pBar->GetParent ();
pBar->OnBeforeChangeParent (m_pDockSite);
pBar->SetParent (m_pDockSite);
pBar->SetOwner (m_pDockSite);
pBar->OnAfterChangeParent (pOldParent);
lstTmp.AddHead (pBar);
if (nNextTab == nActiveTab)
{
pActiveBar = pBar;
}
}
else
{
nNonDetachedCount++;
}
}
BOOL bActiveSet = FALSE;
CBCGPControlBar* pNewAHBar = NULL;
for (POSITION pos = lstTmp.GetHeadPosition (); pos != NULL;)
{
CBCGPDockingControlBar* pBar = DYNAMIC_DOWNCAST (CBCGPDockingControlBar,
lstTmp.GetNext (pos));
BOOL bUseTimerForActiveBar = (pBar == pActiveBar) && bUseTimer;
pNewAHBar = pBar->SetAutoHideMode (TRUE, dwAlignment, NULL, bUseTimerForActiveBar);
if (pNewAHBar != NULL)
{
pNewAHBar->m_bFirstInGroup = (lstTmp.GetHead () == pBar);
pNewAHBar->m_bLastInGroup = (lstTmp.GetTail () == pBar);
pNewAHBar->m_bActiveInGroup = (pBar == pActiveBar);
if (!bActiveSet && pNewAHBar->m_bActiveInGroup)
{
bActiveSet = TRUE;
}
}
}
if (pNewAHBar != NULL)
{
if (!bActiveSet)
{
pNewAHBar->m_bActiveInGroup = TRUE;
}
CRect rect (0, 0, 0, 0);
pNewAHBar->GetParentDockBar ()->RepositionBars (rect);
}
if (nNonDetachedCount > 0)
{
if (m_pTabWnd->GetVisibleTabsNum () == 0)
{
ShowControlBar (FALSE, TRUE, FALSE);
}
else
{
if (m_pTabWnd->GetActiveTab () == -1)
{
int nVisibleTab = -1;
GetFirstVisibleTab (nVisibleTab);
m_pTabWnd->SetActiveTab (nVisibleTab);
}
m_pTabWnd->RecalcLayout ();
ShowControlBar (TRUE, TRUE, FALSE);
pAutoHideBar = CBCGPDockingControlBar::SetAutoHideMode (bMode, dwAlignment, pCurrAutoHideBar, bUseTimer);
}
}
if (pAutoHideBar != NULL)
{
pAutoHideBar->UpdateVisibleState();
}
CBCGPControlBar::m_bHandleMinSize = bHandleMinSize;
return pAutoHideBar;
}
//**************************************************************************************
CWnd* CBCGPBaseTabbedBar::GetFirstVisibleTab (int& iTabNum)
{
iTabNum = -1;
if (m_pTabWnd == NULL)
{
return NULL;
}
return m_pTabWnd->GetFirstVisibleTab (iTabNum);
}
//**************************************************************************************
HICON CBCGPBaseTabbedBar::GetBarIcon (BOOL bBigIcon)
{
HICON hIcon = GetIcon (bBigIcon);
if (hIcon == NULL && m_pTabWnd != NULL)
{
CWnd* pWnd = m_pTabWnd->GetActiveWnd ();
if (pWnd != NULL)
{
hIcon = pWnd->GetIcon (bBigIcon);
}
}
return hIcon;
}
//**************************************************************************************
LRESULT CBCGPBaseTabbedBar::OnChangeActiveTab (WPARAM wp, LPARAM)
{
int iTabNum = (int) wp;
CString strLabel;
if (m_pTabWnd != NULL && m_pTabWnd->GetTabLabel (iTabNum, strLabel) &&
m_bSetCaptionTextToTabName)
{
SetWindowText (strLabel);
}
OnActivateTab (iTabNum);
if (CBCGPControlBar::m_bHandleMinSize)
{
CBCGPMiniFrameWnd* pWnd = GetParentMiniFrame ();
if (pWnd != NULL)
{
pWnd->OnBarRecalcLayout ();
}
else
{
globalUtils.ForceAdjustLayout (globalUtils.GetDockManager (GetDockSite ()));
}
}
return 0;
}
//**************************************************************************************
BOOL CBCGPBaseTabbedBar::Dock (CBCGPBaseControlBar* pTargetBar, LPCRECT lpRect,
BCGP_DOCK_METHOD dockMethod)
{
BOOL bFloating = (GetParentMiniFrame () != NULL);
int nTabsNum = m_pTabWnd->GetTabsNum ();
BOOL bTabsHaveRecentInfo = TRUE;
if (bFloating)
{
for (int i = 0; i < nTabsNum; i++)
{
if (m_pTabWnd->IsTabDetachable (i))
{
CBCGPDockingControlBar* pBar = DYNAMIC_DOWNCAST (CBCGPDockingControlBar,
m_pTabWnd->GetTabWnd (i));
if (pBar != NULL)
{
ASSERT_VALID (pBar);
if (pBar->m_recentDockInfo.GetRecentContainer (TRUE) == NULL &&
pBar->m_recentDockInfo.GetRecentTabContainer (TRUE) == NULL)
{
bTabsHaveRecentInfo = FALSE;
break;
}
}
}
}
}
if (dockMethod != BCGP_DM_DBL_CLICK || !bTabsHaveRecentInfo)
{
return CBCGPDockingControlBar::Dock (pTargetBar, lpRect, dockMethod);
}
if (bFloating && m_recentDockInfo.GetRecentContainer (TRUE) != NULL ||
!bFloating && m_recentDockInfo.GetRecentContainer (FALSE) != NULL)
{
return CBCGPDockingControlBar::Dock (pTargetBar, lpRect, dockMethod);
}
ShowControlBar (FALSE, TRUE, FALSE);
int nNonDetachedCount = 0;
for (int nNextTab = nTabsNum - 1; nNextTab >= 0; nNextTab--)
{
CBCGPDockingControlBar* pBar = DYNAMIC_DOWNCAST (CBCGPDockingControlBar,
m_pTabWnd->GetTabWnd (nNextTab));
ASSERT_VALID (pBar);
BOOL bIsVisible = m_pTabWnd->IsTabVisible (nNextTab);
BOOL bDetachable = m_pTabWnd->IsTabDetachable (nNextTab);
if (pBar != NULL && bIsVisible && bDetachable)
{
m_pTabWnd->RemoveTab (nNextTab, FALSE);
pBar->EnableGripper (TRUE);
pBar->StoreRecentTabRelatedInfo ();
pBar->DockControlBar (pBar, lpRect, dockMethod);
}
else
{
nNonDetachedCount++;
}
}
if (nNonDetachedCount > 0)
{
if (m_pTabWnd->GetVisibleTabsNum () == 0)
{
ShowControlBar (FALSE, TRUE, FALSE);
}
else
{
if (m_pTabWnd->GetActiveTab () == -1)
{
int nVisibleTab = -1;
GetFirstVisibleTab (nVisibleTab);
m_pTabWnd->SetActiveTab (nVisibleTab);
}
m_pTabWnd->RecalcLayout ();
ShowControlBar (TRUE, TRUE, FALSE);
return CBCGPDockingControlBar::Dock (pTargetBar, lpRect, dockMethod);
}
}
else
{
DestroyWindow ();
return FALSE;
}
return TRUE;
}
//**************************************************************************************
void CBCGPBaseTabbedBar::FillDefaultTabsOrderArray ()
{
ASSERT_VALID (m_pTabWnd);
m_arDefaultTabsOrder.RemoveAll ();
const int nTabsNum = m_pTabWnd->GetTabsNum ();
for (int i = 0; i < nTabsNum; i++)
{
int nID = m_pTabWnd->GetTabID (i);
m_arDefaultTabsOrder.Add (nID);
}
}
//**************************************************************************************
void CBCGPBaseTabbedBar::GetMinSize (CSize& size) const
{
if (CBCGPControlBar::m_bHandleMinSize)
{
CBCGPDockingControlBar* pBar = DYNAMIC_DOWNCAST (CBCGPDockingControlBar,
m_pTabWnd->GetActiveWnd ());
if (pBar != NULL)
{
pBar->GetMinSize (size);
return;
}
}
CBCGPDockingControlBar::GetMinSize (size);
}
//**************************************************************************************
void CBCGPBaseTabbedBar::GetControlBarList (CObList& lst, CRuntimeClass* pRTCFilter)
{
CBCGPBaseTabWnd* pTabWnd = GetUnderlinedWindow ();
for (int i = 0; i < pTabWnd->GetTabsNum (); i++)
{
CBCGPDockingControlBar* pBar = DYNAMIC_DOWNCAST (CBCGPDockingControlBar, pTabWnd->GetTabWnd (i));
if (pBar != NULL)
{
ASSERT_VALID (pBar);
if (pRTCFilter == NULL || pBar->GetRuntimeClass () == pRTCFilter)
{
lst.AddTail (pBar);
}
}
}
}
//*******************************************************************************
void CBCGPBaseTabbedBar::ConvertToTabbedDocument (BOOL bActiveTabOnly)
{
ASSERT_VALID (this);
ASSERT_VALID (m_pTabWnd);
CBCGPMDIFrameWnd* pMDIFrame = DYNAMIC_DOWNCAST (CBCGPMDIFrameWnd, GetDockSite ());
if (pMDIFrame == NULL)
{
ASSERT (FALSE);
return;
}
ASSERT_VALID (pMDIFrame);
HWND hwnd = GetSafeHwnd ();
if (bActiveTabOnly)
{
CBCGPDockingControlBar* pBar = DYNAMIC_DOWNCAST (CBCGPDockingControlBar,
m_pTabWnd->GetActiveWnd ());
if (pBar == NULL)
{
return;
}
pBar->StoreRecentTabRelatedInfo ();
pMDIFrame->ControlBarToTabbedDocument (pBar);
RemoveControlBar (pBar);
}
else
{
CObList lst;
CBCGPBaseTabWnd* pTabWnd = GetUnderlinedWindow ();
for (int i = 0; i < pTabWnd->GetTabsNum (); i++)
{
if (pTabWnd->IsTabVisible (i))
{
CBCGPDockingControlBar* pBar = DYNAMIC_DOWNCAST (CBCGPDockingControlBar, pTabWnd->GetTabWnd (i));
if (pBar != NULL)
{
pBar->StoreRecentTabRelatedInfo ();
lst.AddTail (pBar);
}
}
}
for (POSITION pos = lst.GetHeadPosition (); pos != NULL;)
{
CBCGPDockingControlBar* pBar = (CBCGPDockingControlBar*) lst.GetNext (pos);
pMDIFrame->ControlBarToTabbedDocument (pBar);
RemoveControlBar (pBar);
}
}
if (IsWindow (hwnd) && GetVisibleTabsNum () == 0 && GetTabsNum () > 0)
{
ShowControlBar (FALSE, FALSE, FALSE);
}
}
//*******************************************************************************
void CBCGPBaseTabbedBar::OnChangeActiveState()
{
ASSERT_VALID (this);
ASSERT_VALID (m_pTabWnd);
m_pTabWnd->SetParentFocused(m_bActive);
if (CBCGPVisualManager::GetInstance ()->IsFocusedTabSeparateLook())
{
m_pTabWnd->RedrawWindow();
}
}
|
<reponame>blefaudeux/salina
#
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
import copy
import time
import gym
import hydra
import torch
import torch.nn as nn
import torch.nn.functional as F
from gym.wrappers import TimeLimit
from omegaconf import DictConfig, OmegaConf
import salina
import salina.rl.functional as RLF
from salina import TAgent, Workspace, get_arguments, get_class, instantiate_class
from salina.agents import Agents, TemporalAgent
from salina.agents.gym import GymAgent
from salina.logger import TFLogger
def _index(tensor_3d, tensor_2d):
"""This function is used to index a 3d tensors using a 2d tensor"""
x, y, z = tensor_3d.size()
t = tensor_3d.reshape(x * y, z)
tt = tensor_2d.reshape(x * y)
v = t[torch.arange(x * y), tt]
v = v.reshape(x, y)
return v
class REINFORCEAgent(TAgent):
def __init__(self, observation_size, hidden_size, n_actions):
super().__init__()
self.model = nn.Sequential(
nn.Linear(observation_size, hidden_size),
nn.ReLU(),
nn.Linear(hidden_size, n_actions),
)
self.critic_model = nn.Sequential(
nn.Linear(observation_size, hidden_size),
nn.ReLU(),
nn.Linear(hidden_size, 1),
)
def forward(self, t, stochastic, **kwargs):
observation = self.get(("env/env_obs", t))
scores = self.model(observation)
probs = torch.softmax(scores, dim=-1)
critic = self.critic_model(observation).squeeze(-1)
if stochastic:
action = torch.distributions.Categorical(probs).sample()
else:
action = probs.argmax(1)
self.set(("action", t), action)
self.set(("action_probs", t), probs)
self.set(("baseline", t), critic)
def make_cartpole(max_episode_steps):
return TimeLimit(gym.make("CartPole-v0"), max_episode_steps=max_episode_steps)
def run_reinforce(cfg):
logger = instantiate_class(cfg.logger)
env_agent = GymAgent(
get_class(cfg.algorithm.env),
get_arguments(cfg.algorithm.env),
n_envs=cfg.algorithm.n_envs,
)
env = instantiate_class(cfg.algorithm.env)
observation_size = env.observation_space.shape[0]
n_actions = env.action_space.n
del env
a2c_agent = REINFORCEAgent(
observation_size, cfg.algorithm.architecture.hidden_size, n_actions
)
agent = Agents(env_agent, a2c_agent)
agent = TemporalAgent(agent)
agent.seed(cfg.algorithm.env_seed)
# 6) Configure the workspace to the right dimension. The time size is greater than the naximum episode size to be able to store all episode states
workspace = salina.Workspace()
# 7) Confgure the optimizer over the a2c agent
optimizer_args = get_arguments(cfg.algorithm.optimizer)
optimizer = get_class(cfg.algorithm.optimizer)(
a2c_agent.parameters(), **optimizer_args
)
# 8) Training loop
epoch = 0
for epoch in range(cfg.algorithm.max_epochs):
# Execute the agent on the workspace to sample complete episodes
# Since not all the variables of workspace will be overwritten, it is better to clear the workspace
workspace.clear()
agent(workspace, stochastic=True, t=0, stop_variable="env/done")
# Get relevant tensors (size are timestep x n_envs x ....)
baseline, done, action_probs, reward, action = workspace[
"baseline", "env/done", "action_probs", "env/reward", "action"
]
r_loss = compute_reinforce_loss(
reward, action_probs, baseline, action, done, cfg.algorithm.discount_factor
)
# Log losses
[logger.add_scalar(k, v.item(), epoch) for k, v in r_loss.items()]
loss = (
-cfg.algorithm.entropy_coef * r_loss["entropy_loss"]
+ cfg.algorithm.baseline_coef * r_loss["baseline_loss"]
- cfg.algorithm.reinforce_coef * r_loss["reinforce_loss"]
)
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Compute the cumulated reward on final_state
creward = workspace["env/cumulated_reward"]
tl = done.float().argmax(0)
creward = creward[tl, torch.arange(creward.size()[1])]
logger.add_scalar("reward", creward.mean().item(), epoch)
def compute_reinforce_loss(
reward, action_probabilities, baseline, action, done, discount_factor
):
"""This function computes the reinforce loss, considering that episodes may have different lengths."""
batch_size = reward.size()[1]
# Find the first done occurence for each episode
v_done, trajectories_length = done.float().max(0)
trajectories_length += 1
assert v_done.eq(1.0).all()
max_trajectories_length = trajectories_length.max().item()
# Shorten trajectories for accelerate computation
reward = reward[:max_trajectories_length]
action_probabilities = action_probabilities[:max_trajectories_length]
baseline = baseline[:max_trajectories_length]
action = action[:max_trajectories_length]
# Create a binary mask to mask useless values (of size max_trajectories_length x batch_size)
arange = (
torch.arange(max_trajectories_length, device=done.device)
.unsqueeze(-1)
.repeat(1, batch_size)
)
mask = arange.lt(
trajectories_length.unsqueeze(0).repeat(max_trajectories_length, 1)
)
reward = reward * mask
# Compute discounted cumulated reward
cumulated_reward = [torch.zeros_like(reward[-1])]
for t in range(max_trajectories_length - 1, 0, -1):
cumulated_reward.append(discount_factor + cumulated_reward[-1] + reward[t])
cumulated_reward.reverse()
cumulated_reward = torch.cat([c.unsqueeze(0) for c in cumulated_reward])
# baseline loss
g = baseline - cumulated_reward
baseline_loss = (g) ** 2
baseline_loss = (baseline_loss * mask).mean()
# policy loss
log_probabilities = _index(action_probabilities, action).log()
policy_loss = log_probabilities * -g.detach()
policy_loss = policy_loss * mask
policy_loss = policy_loss.mean()
# entropy loss
entropy = torch.distributions.Categorical(action_probabilities).entropy() * mask
entropy_loss = entropy.mean()
return {
"baseline_loss": baseline_loss,
"reinforce_loss": policy_loss,
"entropy_loss": entropy_loss,
}
@hydra.main(config_path=".", config_name="main.yaml")
def main(cfg):
import torch.multiprocessing as mp
mp.set_start_method("spawn")
run_reinforce(cfg)
if __name__ == "__main__":
main()
|
CleanTechnica
Climate change deniers are getting angrier and angrier because there is less and less ground that they can even moderately stand upon. They are being forced off of multiple positions and the world is ignoring their opinions en masse.
People used to be able to believe that warming wasn’t an occurring scenario without much cognitive dissonance.
In psychology, cognitive dissonance is the mental stress or discomfort experienced by an individual who holds two or more contradictory beliefs, ideas, or values at the same time, performs an action that is contradictory to one or more beliefs, ideas, or values, or is confronted by new information that conflicts with existing beliefs, ideas, or values.
More and more, their position is being challenged in multiple ways, and they are being forced to greater and greater mental leaps in order to hold on to their position. But even when they move, it’s stressful and embarrassing, leading to more anger.
Cognitive dissonance makes people mad. Being forced to change makes people mad. Being forced to admit, even subconsciously, that they were wrong and were often wrong publicly and loudly, makes people mad.
I created this continuum of positions on climate change a while ago. It ranges from the extreme of not believing that any warming is occurring at all, to believing in impacts above the range of IPCC scenarios.
Some people, in the face of overwhelming evidence, continue to hold to that position. Most who held it have been forced off of it. They have been forced to change by the sheer weight of evidence which says that they are wrong. But usually they just move slightly to the left on the continuum.
A lot of people who firmly held the belief that CO2 emissions from humans were insignificant have been forced off of that position too. And every position to the left of the chart. Basically, the leftmost positions are intellectually and empirically untenable, so anyone with a fragment of intellectual self-respect who holds them is confronted daily with evidence that gives them cognitive dissonance, and if they move to a slightly more moderate position for relief, it doesn’t help much.
What evidence of the shift to the right exists? Well, an Australian organization has surveyed people about their positions since 2010 and finds regular movement, and an acceleration in it.
And in the USA, climate change is shaping up to be a game-changing election issue, with denialists increasingly unable to get elected, and once again with recent rapid strides.
The new survey found a growing number of registered voters understand global warming is happening: “Three in four (73%, up 7 points since Spring 2014) now think it is happening. Large majorities of Democrats — liberal (95%) and moderate/conservative (80%) — think it is happening, as do three in four Independents (74%, up 15 points since Spring 2014) and the majority of liberal/moderate Republicans (71%, up 10 points).”
A counter-example of someone who has made lemonade out of the lemons they keep getting handed is Bjorn Lomborg. He has made a good fiscal career out of asserting a succession of positions on climate change from the left-hand side of the graph up to his current position of stating that we should be doing geo-engineering and continuing to burn fossil fuels. At the beginning of 1998 he claimed, “The greenhouse effect is extremely doubtful.” Later that year, after much intellectual abuse, he admitted that CO2 was causing some tiny rises in temperature. In 2001, he slipped to some warming, but no need to do anything about it. By 2010, he’d shifted to continued use of fossil fuel and geo-engineering, with maybe some token efforts to reduce carbon emissions.
Why do I say he’s made lemonade?
Lomborg’s Copenhagen Consensus Center (CCC), though long associated with his native Denmark, actually registered as a US-based non-profit organization back in 2008. That’s how we know Lomborg walked away with a cool $775,000 in pay from the CCC in 2012.
As a note, he hasn’t made academic lemonade out of this. He has actually backslidacademically from an Associate Professor on tenure track, to an adjunct professor off the track, and recently a $4 million AUD governmental grant was refused by every university in Australia if it involved Lomborg setting up a ‘research’ facility among academics with actual intellectual integrity.
But most people aren’t as effective at happily getting their palms greased while being forced off of one intellectual position after another by cold, hard facts.
Patrick Moore is a fairly sad example of that. At one point he was president of Greenpeace Canada, although not a founder, as he continues to insist. He shifted to a potentially reasonable path of forming a consultancy to work with forestry industry firms to find more sustainable means of harvesting trees. However, over time he’s been fully co-opted by fairly egregious concerns, and has been denying climate change exists since at least 2006.
there is no scientific proof of causation between the human-induced increase in atmospheric CO2 and the recent global warming trend, a trend that has been evident for about 500 years, long before the human-induced increase in CO2 was evident.
More recently, his tone is increasingly angry.
“there will be a whole generation of people who are just blindly following this climate hysteria.”
And angrier.
What’s particularly absurd about this leftist conspiracy is that it is currently doing the exact opposite of the things left-wing people profess to care about: it is enriching crony capitalist fat cats at the expense of the world’s poor.
The rest of the world is strongly centered on the right side of the graph, within the IPCC range of scenarios. So much so that 195 countries agreed in Paris in December of 2015 to hold warming to 2 degrees above pre-industrial levels with an aspirational target of 1.5 degrees.
Marc Morano is another strident climate change denialist. It’s hard to say if his nastiness is actually increasing though, as he was an early adopter of the vicious Swift Boat allegations against John Kerry. Like Moore, he showed up at the poorly attended denialist counter-conference in Paris in 2015, mostly to pretend that his faux documentary was premiering to capacity crowds. Certainly his publishing the addresses of climate change scientists on his site is a very hostile action with no discernible redeeming qualities, and the actions of an increasingly isolated and embittered person.
All of the 170 COP21-signatory world leaders are showing in the starkest terms that they fundamentally disagree with the person holding positions on the left side of the chart. That exacerbates the cognitive dissonance of course, because in general, most people think that leaders of countries are often respectable and well-advised people, so their opinions likely hold weight. But it also makes them mad because they see an overwhelming majority of the world doing something that they think is unnecessary because of their un-empirical position.
They are being forced into an ugly corner. And they are painting themselves into it daily and weekly and monthly. And it’s painful. So they lash out.
A very similar dynamic is playing out with anti-wind energy advocates. The positions that they hold on issues like impacts on human health, livestock, real estate values and the like are just not supported by any facts, and study after study proves that they are wrong, so they get increasingly angry and bitter and hostile. And they are smaller in numbers as the sensible ones migrate to healthier mental positions.
Basically, the further to the left on the chart you are, the more likely you are to be bitter and angry. But anyone to the left of the low-end IPCC projections is likely to be annoyed and dismayed and lash out occasionally.
As to why they are so common, that’s simple. A group of self-interested companies and individuals set out on a course of creating uncertainty about climate change far beyond any that existed a couple of decades ago. It worked. Sadly.
Source: CleanTechnica. Reproduced with permission. |
package jotnar
type logConfig struct {
FilePath string
Level string
Format string // text or json
Timeformat string
IsPretty bool
}
var defualtLogConfig *logConfig
func (j *Jotnar) InitLogger() *Jotnar {
switch CurrentConfigType {
case "default":
logReadFromDefault()
case "viper":
logReadFromViper()
}
initLogrus()
return j
}
func logReadFromDefault() {
defualtLogConfig = &logConfig{
Level: "debug",
Format: "text",
Timeformat: "2006-01-02 15:04:05",
IsPretty: true,
}
}
func logReadFromViper() {
v := GetViper()
defualtLogConfig = &logConfig{
FilePath: v.GetString("log.default.file"),
Level: GetString("log.default.level", "info"),
Format: GetString("log.default.format", "text"),
Timeformat: GetString("log.default.timeFormat", "2006-01-02 15:04:05"),
IsPretty: v.GetBool("log.default.isPretty"),
}
}
|
(Reuters) - A federal appeals court said the Environmental Protection Agency should not have been ordered by a lower court judge to assess how its air regulations affect coal industry jobs, in a defeat for Murray Energy Corp, a large coal miner.
The 4th U.S. Circuit Court of Appeals in Richmond, Virginia, ruled on Thursday that in passing the Clean Air Act, Congress did not authorize lawsuits such as Murray’s, depriving federal courts of jurisdiction to hear them.
Murray had accused the EPA, during the Obama administration, of overstepping its authority by imposing regulations without regard for their economic and employment impact.
The 3-0 decision was also a defeat for 16 U.S. states, all but one with Republican governors, that supported Murray.
Gary Broadbent, a Murray spokesman, said the St. Clairsville, Ohio-based company intends to appeal.
Murray had sued the EPA in 2014, citing a Clean Air Act provision directing it to “conduct continuing evaluations of potential loss or shifts of employment” when enforcing its regulations.
Chief Executive Bob Murray had been a persistent critic of the Obama administration over what he called its “war on coal.”
Last October, U.S. District Judge John Bailey in Wheeling, West Virginia, ordered the EPA to write a report on the jobs impact of coal industry regulations, rejecting its argument that it had discretion to decide when evaluations were needed.
But the appeals court said the Clean Air Act did not require the EPA to write such a report.
“The agency gets to decide how to collect a broad set of employment impact data, how to judge and examine this extensive data, and how to manage these tasks on an ongoing basis,” Circuit Judge Henry Floyd wrote. “A court is ill-equipped to supervise this continuous, complex process.”
EPA spokeswoman Amy Graham said in an email: “President Trump’s EPA will take the economic and job impacts of its proposed regulations into account consistent with its statutory requirements, regardless of the outcome of this particular case.”
Murray also drew support from the Cause of Action Institute, an advocacy group favoring limited government.
Joshua Schopf, a lawyer for the group, in an interview said courts should not “unduly defer to agencies’ interpretation of statutory language,” and the 4th Circuit “appeared to engage in verbal gymnastics” by ruling against Murray.
Former President Barack Obama appointed the judges on the appeals court panel. Floyd was previously a district court judge appointed by President George W. Bush.
The case is Murray Energy Corp et al v. EPA, 4th U.S. Circuit Court of Appeals, No. 16-2432. |
Arnold‐Chiari malformation associated with sleep apnea and central dysregulation of arterial pressure
The natural history of symptomatic adult type I Arnold‐Chiari (ACM1) malformation is variable. Patients with this condition frequently develop corticospinal and sensory deficits, together with cerebellar signs and lower cranial nerve palsies in various combinations. In the present report we describe a patient with ACM1 in whom sleep apnea together with disturbances in the central regulation of arterial pressure were a major component of the symptomatology. These paroxysmal blood pressure changes has not been previously reported. The decompression of our patient's medulla, which contains the primary respiratory centers and baroreceptors, resulted in a marked improvement and indicate that the origin may have been on a central basis. |
<gh_stars>10-100
#ifndef rosic_EllipseOscillator_h
#define rosic_EllipseOscillator_h
namespace rosic
{
class rsEllipseOscillator : public RAPT::rsEllipseOscillator<double>
{
public:
//-----------------------------------------------------------------------------------------------
// \name Setup
void setFrequency(double newFrequency);
void setSampleRate(double newSampleRate);
/** Sets a detuning in semitones. */
inline void setDetune(double newDetune)
{
tuneFactor = RAPT::rsPitchOffsetToFreqFactor(newDetune);
updateOmega();
}
/** Sets an additive offset/shift for the frequency. */
inline void setFrequencyShift(double newShift)
{
freqShift = newShift;
updateOmega();
}
/** Sets amplitude of this oscillator (as raw multiplier). */
inline void setAmplitude(double newAmplitude)
{
amplitude = newAmplitude;
}
//setFrequencyScaleY, setFrequencyShiftY
//-----------------------------------------------------------------------------------------------
// \name Audio Processing
inline void getSampleFrameStereo(double* left, double* right)
{
//*left = *right = amplitude * getSample(); return; // test
getSamplePair(left, right);
*left *= amplitude;
*right *= amplitude;
}
protected:
INLINE void updateOmega() { setOmega(omegaFactor*(tuneFactor*frequency+freqShift)); }
double frequency = 1000;
double omegaFactor = 2*PI/44100; // to convert from frequency to normalized radian frequency
double tuneFactor = 1;
double freqShift = 0;
double amplitude = 1;
double midSide = 0; // not yet used
};
//=================================================================================================
class rsEllipseOscillatorVoice // : public rsPolyVoice (maybe)
{
protected:
rsEllipseOscillator* master;
};
class rsEllipseOscillatorPoly : public rsEllipseOscillator // public rsPolyModule
{
protected:
std::vector<rsEllipseOscillatorVoice> voices;
};
}
#endif |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package diagramas.logico;
import controlador.Editor;
import controlador.apoios.TreeItem;
import controlador.inspector.InspectorProperty;
import desenho.FormaElementar;
import java.awt.AlphaComposite;
import java.awt.BasicStroke;
import java.awt.Composite;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Stroke;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import javax.swing.ImageIcon;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
*
* @author ccandido
*/
public class Constraint implements Serializable {
private static final long serialVersionUID = 2017041120493765643L;
private final Tabela tabela;
public Tabela getTabela() {
return tabela;
}
public Constraint(Tabela tbl) {
this.tabela = tbl;
this.tabela.Add(this);
getMotivoValidade = new String[]{
Editor.fromConfiguracao.getValor("Inspector.obj.constraint.validacao.ok"),
Editor.fromConfiguracao.getValor("Inspector.obj.constraint.validacao.cons_origem"),
Editor.fromConfiguracao.getValor("Inspector.obj.constraint.validacao.qtd_cmp"),
Editor.fromConfiguracao.getValor("Inspector.obj.constraint.validacao.tipo"),
Editor.fromConfiguracao.getValor("Inspector.obj.constraint.validacao.rep"),
Editor.fromConfiguracao.getValor("Inspector.obj.constraint.validacao.ligacao"),
Editor.fromConfiguracao.getValor("Inspector.obj.constraint.validacao.ku")
};
}
public Rectangle area = null;
public void Paint(int x, int y, int altura, Graphics2D g) {
int f = 2 * getTabela().distSelecao;
int imgl = 16;
Rectangle r = new Rectangle(getTabela().getLeft() + x, getTabela().getTop() + y, getTabela().getWidth() - x - 1, altura);
area = r;
if (r.y + altura > getTabela().getTopHeight()) {
return;
}
float alfa = 1f - getTabela().getAlfa();// 0.2f;
Composite originalComposite = g.getComposite();
g.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, alfa));
Paint bkpp = g.getPaint();
g.setColor(getTabela().getMaster().getBackground()); //# Não: isDisablePainted()? disabledColor :
g.fill(area);
if (isSelecionado()) {
if (getTabela().isGradiente()) {
g.setColor(getTabela().getGradienteStartColor());
} else {
g.setColor(getTabela().getForeColor());
}
g.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, getTabela().getAlfa()));
g.fillRect(r.x, r.y, r.width + 1, r.height);
}
g.setPaint(bkpp);
g.setComposite(originalComposite);
Rectangle bkp = g.getClipBounds();
ImageIcon img;
switch (getTipo()) {
case tpPK:
img = Editor.fromControler().ImagemDeDiagrama.get("diagrama.Constraint_PK.img");
break;
case tpFK:
img = Editor.fromControler().ImagemDeDiagrama.get("diagrama.Constraint_FK.img");
break;
default:
img = Editor.fromControler().ImagemDeDiagrama.get("diagrama.Constraint_UN.img");
}
if (!isValidado()) {
g.drawRoundRect(r.x - 1 + f, r.y + 4 - 1, imgl + 1, imgl + 1, 4, 4);
}
if (roqued) {
Stroke bkps = g.getStroke();
g.setStroke(new BasicStroke(2f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL, 0, new float[]{1, 2}, 0));
//g.drawRoundRect(r.x - 1 + f + 2, r.y + f - 1 + 2, imgl + 1 - 4, imgl + 1 - 4, 4, 4);
g.drawRoundRect(r.x - 1 + f, r.y + 4 - 1, imgl + 1, imgl + 1, 4, 4);
g.setStroke(bkps);
}
if (getTabela().isDisablePainted()) {
img = new ImageIcon(util.TratadorDeImagens.dye(img, getTabela().getForeColor()));
}
g.drawImage(img.getImage(), r.x + f, r.y + 4, imgl, imgl, null);
g.clipRect(r.x, r.y, r.width, r.height);
g.setColor(getTabela().getForeColor());
String tx = getNomeFormatado();
g.drawString(tx, r.x + f + imgl + 2, r.y + altura / 2 + f);
g.setClip(bkp);
g.setPaint(bkpp);
}
public void Paint(int x, int y, Graphics2D g) {
int f = 3;
int imgl = 16;
Rectangle r = new Rectangle(getTabela().getLeft() + x, getTabela().getTop() + y, getTabela().cmpAltura, getTabela().cmpAltura);
area = r;
float alfa = 1f - getTabela().getAlfa();// 0.2f;
Composite originalComposite = g.getComposite();
g.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, alfa));
Paint bkpp = g.getPaint();
g.setColor(getTabela().getMaster().getBackground());
g.fill(area);
if (isSelecionado()) {
if (getTabela().isGradiente()) {
g.setColor(getTabela().getGradienteStartColor());
} else {
g.setColor(getTabela().getForeColor());
}
g.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, getTabela().getAlfa()));
g.fillRect(r.x, r.y, r.width + 1, r.height);
}
g.setPaint(bkpp);
g.setComposite(originalComposite);
Rectangle bkp = g.getClipBounds();
ImageIcon img;
switch (getTipo()) {
case tpPK:
img = Editor.fromControler().ImagemDeDiagrama.get("diagrama.Constraint_PK.img");
break;
case tpFK:
img = Editor.fromControler().ImagemDeDiagrama.get("diagrama.Constraint_FK.img");
break;
default:
img = Editor.fromControler().ImagemDeDiagrama.get("diagrama.Constraint_UN.img");
}
if (!isValidado()) {
g.drawRoundRect(r.x - 1 + f, r.y + f - 1, imgl + 1, imgl + 1, 4, 4);
}
if (roqued) {
Stroke bkps = g.getStroke();
g.setStroke(new BasicStroke(2f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL, 0, new float[]{1, 2}, 0));
g.drawRoundRect(r.x - 1 + f, r.y + 4 - 1, imgl + 1, imgl + 1, 4, 4);
g.setStroke(bkps);
}
if (getTabela().isDisablePainted()) {
img = new ImageIcon(util.TratadorDeImagens.dye(img, getTabela().getForeColor()));
}
g.drawImage(img.getImage(), r.x + f, r.y + f, imgl, imgl, null);
g.clipRect(r.x, r.y, r.width, r.height);
g.setColor(getTabela().getForeColor());
g.setClip(bkp);
g.setPaint(bkpp);
}
protected transient boolean roqued = false;
public boolean isMe(Point p) {
if (area == null) {
return false;
}
return area.contains(p);
}
public String getCamposStr(List<Campo> lst) {
if (lst == null || lst.isEmpty()) {
return "()";
}
String cmpsD = "";
cmpsD = lst.stream().map((cx) -> (cx == null ? "[]" : cx.getTexto().trim().isEmpty() ? "?" : cx.getTexto()) + ", ").reduce(cmpsD, String::concat);
return "(" + cmpsD.substring(0, cmpsD.length() - 2) + ")";
}
public String getCamposStrCheck(List<Campo> lst) {
if (lst == null || lst.isEmpty()) {
return "()";
}
String cmpsD = "";
cmpsD = lst.stream().map((cx) -> (cx == null ? "[]"
: (cx.getTexto().trim().isEmpty() ? "?"
: cx.getTexto())
+ (getOrigem(cx) == null ? "???" : ""))
+ ", ")
.reduce(cmpsD, String::concat);
return "(" + cmpsD.substring(0, cmpsD.length() - 2) + ")";
}
private final int V_MOTIVO_OK = 0;
private final int V_MOTIVO_CONS_ORIGEM = 1;
private final int V_MOTIVO_QTD_CMP = 2;
private final int V_MOTIVO_TIPO = 3;
private final int V_MOTIVO_CAMPO_REP = 4;
private final int V_MOTIVO_NAO_LIGADO = 5;
private final int V_MOTIVO_KEY_AND_UNIQUE = 6;
private String[] getMotivoValidade = null;
/**
* Qual motivo de não estar válido
*/
private int motivoValidade = 0;
public boolean isAutoRelacionamento() {
return (getTabela() != null && getTabela() == getTabelaDeOrigem());
}
public void Valide() {
motivoValidade = V_MOTIVO_OK;
if (getTipo() != CONSTRAINT_TIPO.tpFK) {
if (getCamposDeOrigem().size() == 1) {
Campo cx = getCamposDeOrigem().get(0);
if (cx.isKey() && cx.isUnique()) {
motivoValidade = V_MOTIVO_KEY_AND_UNIQUE;
setValidado(false);
return;
}
}
setValidado(true);
return;
}
if (getLigacao() == null) {
//# auto relacionamento.
if (getTabela() != getTabelaDeOrigem()) {
setValidado(false);
motivoValidade = V_MOTIVO_NAO_LIGADO;
return;
}
}
if (getConstraintOrigem() == null) {
setValidado(false);
motivoValidade = V_MOTIVO_CONS_ORIGEM;
return;
}
if (getConstraintOrigem().getCamposDeOrigem().size() != getCamposDeOrigem().size()) {
setValidado(false);
motivoValidade = V_MOTIVO_QTD_CMP;
return;
}
boolean sn = true;
for (int i = 0; i < getCamposDeOrigem().size(); i++) {
sn = getCamposDeOrigem().get(i) != null && getCamposDeOrigem().get(i).getTipo().equals(getCamposDeDestino().get(i).getTipo());
if (!sn) {
motivoValidade = V_MOTIVO_TIPO;
break;
}
}
if (sn) {
HashSet<Campo> teste = new HashSet<>();
int tl = 0;
for (Campo c : getCamposDeOrigem()) {
if (c != null) {
tl++;
teste.add(c);
if (tl != teste.size()) { // não inseriu: duplicado.
sn = false;
motivoValidade = V_MOTIVO_CAMPO_REP;
break;
}
}
}
}
setValidado(sn);
}
protected void MostreSeParaExibicao(TreeItem root) {
String img;
switch (getTipo()) {
case tpPK:
img = "Constraint_PK";
break;
case tpFK:
img = "Constraint_FK";
break;
default:
img = "Constraint_UN";
}
root.add(new TreeItem(((isNomeada() && !getNome().isEmpty())? getNome() : Editor.fromConfiguracao.getValor("diagrama.Constraint.nome")), getTabela().getID(), "diagrama." + img + ".img"));
}
public enum CONSTRAINT_TIPO {
tpPK, tpUNIQUE, tpFK
}
//<editor-fold defaultstate="collapsed" desc="Propriedades">
private boolean nomeada = false;
private String nome = "";
private final ArrayList<Campo> camposDeOrigem = new ArrayList<>();
private final ArrayList<Campo> camposDeDestino = new ArrayList<>();
private CONSTRAINT_TIPO tipo = CONSTRAINT_TIPO.tpPK;
//private HashMap<Campo, Campo> listaDeCamposKV = new HashMap<>();
private Constraint constraintOrigem = null;
public Constraint getConstraintOrigem() {
return constraintOrigem;
}
public void LigacaoDireta(Constraint constraintOrigem, LogicoLinha ligacao) {
if ((constraintOrigem == null) || (constraintOrigem.getTipo() == CONSTRAINT_TIPO.tpFK) || (this.constraintOrigem != null) || (getTipo() != CONSTRAINT_TIPO.tpFK)) {
setConstraintOrigem(constraintOrigem);
} else {
this.constraintOrigem = constraintOrigem;
Tabela ori = this.constraintOrigem.getTabela();
getTabela().PerformLigacao(ori, true);
int tl = camposDeOrigem.size() - 1;
while (tl > -1) {
if (camposDeOrigem.get(tl) != null && camposDeOrigem.get(tl).getTabela() != ori) {
camposDeOrigem.remove(tl);
camposDeOrigem.add(tl, null);
}
tl--;
}
this.ligacao = ligacao;
}
}
public void setConstraintOrigem(Constraint constraintOrigem) {
if (this.constraintOrigem != constraintOrigem) {
if (this.constraintOrigem != null) {
Tabela ori = this.constraintOrigem.getTabela();
getTabela().PerformLigacao(ori, false);
}
if (getTipo() == CONSTRAINT_TIPO.tpFK) {
if (constraintOrigem != null) {
if (constraintOrigem.getTipo() == CONSTRAINT_TIPO.tpFK) {
constraintOrigem = null;
}
}
this.constraintOrigem = constraintOrigem;
if (this.constraintOrigem != null) {
Tabela ori = this.constraintOrigem.getTabela();
getTabela().PerformLigacao(ori, true);
}
//# Remove os campos da antiga origem.
int tl = camposDeOrigem.size();
camposDeOrigem.clear();
for (int i = 0; i < tl; i++) {
camposDeOrigem.add(null);
}
if (!getTabela().getMaster().isCarregando && !novalide) {
Valide();
}
}
}
}
public CONSTRAINT_TIPO getTipo() {
return tipo;
}
public void setTipo(CONSTRAINT_TIPO tipo) {
if (this.tipo.equals(tipo)) {
return;
}
this.tipo = tipo;
if (tipo == CONSTRAINT_TIPO.tpFK) {
setValidado(false);
motivoValidade = V_MOTIVO_CONS_ORIGEM;
}
//InvalidateArea();
}
public void SetTipo(int tpForInspector) {
try {
setTipo(CONSTRAINT_TIPO.values()[tpForInspector]);
} catch (Exception e) {
}
}
public boolean isNomeada() {
return nomeada;
}
public void setNomeada(boolean nomeada) {
if (this.nomeada == nomeada) {
return;
}
this.nomeada = nomeada;
Repaint();
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
if (this.nome.equals(nome)) {
return;
}
this.nome = nome;
Repaint();
}
public String getNomeFormatado() {
return (isNomeada() && !getNome().isEmpty()) ? getNome() : getTipoStr();
}
// public HashMap<Campo, Campo> getListaDeCamposKV() {
// return listaDeCamposKV;
// }
//
// public void setListaDeCamposKV(HashMap<Campo, Campo> listaDeCamposKV) {
// this.listaDeCamposKV = listaDeCamposKV;
// }
public void Add(Campo origem, Campo destino, LogicoLinha lig, Constraint orig) {
novalide = true;
setConstraintOrigem(orig);
novalide = false;
Add(origem, destino, lig);
}
public void Add(Campo origem, Campo destino, LogicoLinha lig) {
novalide = true;
setLigacao(lig);
novalide = false;
Add(origem, destino);
}
public void Add(Campo origem, Campo destino) {
if (getTipo() != CONSTRAINT_TIPO.tpFK) {
int idx = camposDeOrigem.indexOf(origem);
if (idx == -1) {
camposDeOrigem.add(origem);
camposDeDestino.add(destino);
}
// # não usa destino!
// else {
// camposDeDestino.remove(idx);
// camposDeDestino.add(idx, destino);
// }
} else {
int idx = camposDeDestino.indexOf(destino);
if (idx == -1) {
camposDeOrigem.add(origem);
camposDeDestino.add(destino);
} else {
camposDeOrigem.remove(idx);
camposDeOrigem.add(idx, origem);
}
if (!getTabela().getMaster().isCarregando && origem != null && destino != null) {
destino.setTipo(origem.getTipo());
}
}
if (!getTabela().getMaster().isCarregando && !novalide) {
Valide();
}
}
public Campo getOrigem(Campo destino) {
int idx = camposDeDestino.indexOf(destino);
if (idx > -1) {
return camposDeOrigem.get(idx);
}
return null;
}
public Campo getDestino(Campo origem) {
int idx = camposDeOrigem.indexOf(origem);
if (idx > -1) {
return camposDeDestino.get(idx);
}
return null;
}
public List<Campo> getCamposDeOrigem() {
return camposDeOrigem;
}
public List<Campo> getCamposDeDestino() {
return camposDeDestino;
}
public void RemoveFromDestino(Campo cmp) {
int idx = camposDeDestino.indexOf(cmp);
if (idx > -1) {
camposDeDestino.remove(idx);
camposDeOrigem.remove(idx);
Valide();
}
}
public void RemoveFromOrigem(Campo cmp) {
int idx = camposDeOrigem.indexOf(cmp);
if (idx > -1) {
camposDeDestino.remove(idx);
camposDeOrigem.remove(idx);
Valide();
}
}
public void Clear() {
camposDeDestino.clear();
camposDeOrigem.clear();
}
public String getDicionario() {
return dicionario;
}
public void setDicionario(String dicionario) {
this.dicionario = dicionario;
}
public String getObservacao() {
return observacao;
}
public void setObservacao(String observacao) {
this.observacao = observacao;
}
private String observacao = "";
private String dicionario = "";
private boolean selecionado = false;
private boolean validado = true;
private transient boolean novalide = false;
private LogicoLinha ligacao = null;
public LogicoLinha getLigacao() {
return ligacao;
}
public void setLigacao(LogicoLinha ligacao) {
if (this.ligacao != ligacao) {
this.ligacao = ligacao;
if (!getTabela().getMaster().isCarregando && !novalide) {
Valide();
}
}
}
public boolean isValidado() {
return validado;
}
public void setValidado(boolean validado) {
if (this.validado != validado) {
this.validado = validado;
InvalidateArea();
}
}
public boolean isSelecionado() {
return selecionado && getTabela().isSelecionado();
}
public void setSelecionado(boolean selecionado) {
this.selecionado = selecionado;
}
protected Tabela getTabelaDeOrigem() {
if (tipo == CONSTRAINT_TIPO.tpPK || tipo == CONSTRAINT_TIPO.tpUNIQUE) {
return getTabela();
}
return (getConstraintOrigem() == null) ? null : getConstraintOrigem().getTabela();
}
protected Tabela getTabelaDeDestino() {
if (tipo == CONSTRAINT_TIPO.tpPK || tipo == CONSTRAINT_TIPO.tpUNIQUE) {
return null;
}
return getTabela();
}
//</editor-fold>
/**
* verifica se a tabela está selecionada para dar o efeito de seleção
*
* @return
*/
public boolean SuperSelecionado() {
return (tabela.isSelecionado() && isSelecionado());
}
public void ToXmlValores(Document doc, Element root) {
Element me = doc.createElement("Constraint");
me.appendChild(util.XMLGenerate.ValorString(doc, "Nome", getNome()));
me.appendChild(util.XMLGenerate.ValorBoolean(doc, "Nomeada", isNomeada()));
me.appendChild(util.XMLGenerate.ValorInteger(doc, "Tipo", getTipo().ordinal()));
me.appendChild(util.XMLGenerate.ValorText(doc, "Dicionario", getDicionario()));
me.appendChild(util.XMLGenerate.ValorText(doc, "Observacao", getObservacao()));
me.appendChild(util.XMLGenerate.ValorText(doc, "DdlOnUpdate", getDdlOnUpdate()));
me.appendChild(util.XMLGenerate.ValorText(doc, "DdlOnDelete", getDdlOnDelete()));
me.appendChild(util.XMLGenerate.ValorRefFormElementar(doc, "LigacaoParaOrigem", getLigacao()));
Element lig = util.XMLGenerate.ValorText(doc, "ConstraintOrigem", getConstraintOrigem() == null ? "" : getConstraintOrigem().getNomeFormatado());
lig.setAttribute("ID", GeraCodToLocalise(getConstraintOrigem()));
ArrayList<Integer> cmps = new ArrayList<>();
camposDeOrigem.stream().forEach(c -> cmps.add(c == null ? -1 : c.getIndexOnTable()));
lig.setAttribute("CamposOrigem", Arrays.toString(cmps.toArray()));
cmps.clear();
camposDeDestino.stream().forEach(c -> cmps.add(c == null ? -1 : c.getIndexOnTable()));
lig.setAttribute("CamposDestino", Arrays.toString(cmps.toArray()));
me.appendChild(lig);
root.appendChild(me);
}
public void LoadFromXML(Element me, boolean colando) {
setObservacao(util.XMLGenerate.getValorTextoFrom(me, "Observacao"));
setDicionario(util.XMLGenerate.getValorTextoFrom(me, "Dicionario"));
setNome(util.XMLGenerate.getValorStringFrom(me, "Nome"));
setDdlOnUpdate(util.XMLGenerate.getValorTextoFrom(me, "DdlOnUpdate"));
setDdlOnDelete(util.XMLGenerate.getValorTextoFrom(me, "DdlOnDelete"));
SetTipo(util.XMLGenerate.getValorIntegerFrom(me, "Tipo"));
setNomeada(util.XMLGenerate.getValorBooleanFrom(me, "Nomeada"));
}
public boolean CommitXML(Element me, HashMap<Element, FormaElementar> mapa) {
Element lig = util.XMLGenerate.FindByNodeName(me, "ConstraintOrigem");
String idStr = lig.getAttribute("ID");
Constraint oric = LocaliseFomCod(idStr, mapa);
if (oric != null) {
setConstraintOrigem(oric);
}
String cmpStr = lig.getAttribute("CamposOrigem");
final Tabela ori = getTabelaDeOrigem();
String[] origens = cmpStr.replaceAll("\\[", "").replaceAll("\\]", "").replaceAll("\\s", "").split(",");
if (getTipo() != CONSTRAINT_TIPO.tpFK) {
for (String origen : origens) {
int v = util.Utilidades.TryIntStr(origen, -1);
if (v != -1) {
Add(ori.getCampos().get(v), null);
}
}
} else {
cmpStr = lig.getAttribute("CamposDestino");
String[] destinos = cmpStr.replaceAll("\\[", "").replaceAll("\\]", "").replaceAll("\\s", "").split(",");
final Tabela dest = getTabelaDeDestino();
if (dest != null) {
for (int i = 0; i < origens.length; i++) {
int vo = util.Utilidades.TryIntStr(origens[i], -1);
int vd = util.Utilidades.TryIntStr(destinos[i], -1);
if (vd != -1) {
Add(vo == -1 || ori == null ? null : ori.getCampos().get(vo), dest.getCampos().get(vd));
}
}
}
}
lig = util.XMLGenerate.FindByNodeName(me, "LigacaoParaOrigem");
idStr = lig.getAttribute("ID");
if (!"-1".equals(idStr)) {
FormaElementar liga = util.XMLGenerate.FindWhoHasID(idStr, mapa);
if (liga instanceof LogicoLinha) {
setLigacao((LogicoLinha) liga);
}
}
Valide();
return true;
}
public void Repaint() {
if (getTabela() == null || area == null) {
return;
}
getTabela().InvalidateArea();
}
public void InvalidateArea() {
if (getTabela() == null || area == null) {
return;
}
getTabela().InvalidateArea(area);
}
public boolean isFirst() {
return (getTabela().getConstraints().indexOf(this) == 0);
}
public boolean isLast() {
int tmp = getTabela().getConstraints().indexOf(this) + 1;
return (tmp == getTabela().getConstraints().size());
}
// /**
// * Está validamente ligado/relacionado à uma tabela.
// *
// * @return
// */
// public boolean isLinkedToTable() {
// return getTabelaOrigem() != null && getTabela().getListaDeTabelasLigadas().indexOf(getTabelaOrigem()) > -1;
// }
public static final int TAG_COMMAND_PK = 120420170;
public static final int TAG_COMMAND_FK = 120420171;
public static final int TAG_COMMAND_UN = 120420172;
private final int DESCE_CONSTAN = +110417;
private final int SOBE_CONSTAN = -110417;
public ArrayList<InspectorProperty> CompleteGenerateProperty(ArrayList<InspectorProperty> res) {
res.add(InspectorProperty.PropertyFactorySeparador("constraint.selecionado"));
String relaName = "Constraint";
res.add(InspectorProperty.PropertyFactoryApenasLeituraTexto("constraint.tipo", getTipoStr()));
res.add(InspectorProperty.PropertyFactorySN("constraint.nomeada", relaName + ".setNomeada", isNomeada()).AddCondicaoForTrue(new String[]{relaName + ".setNome"}));
res.add(InspectorProperty.PropertyFactoryTexto("constraint.nome", relaName + ".setNome", getNome()));
res.add(InspectorProperty.PropertyFactoryTextoL("dicionario", relaName + ".setDicionario", getDicionario()));
res.add(InspectorProperty.PropertyFactoryTextoL("observacao", relaName + ".setObservacao", getObservacao()));
res.add(InspectorProperty.PropertyFactoryApenasLeituraTexto("constraint.validacao", getMotivoValidade[motivoValidade]));
if (tipo == CONSTRAINT_TIPO.tpFK) {
String txt = getTabelaDeOrigem() == null ? "[]" : getTabelaDeOrigem().getTexto();
res.add(InspectorProperty.PropertyFactoryCommand(FormaElementar.nomeComandos.cmdDoAnyThing.name(), "constraint.tabelaorigem", txt).setTag(Constraint.TAG_COMMAND_FK));
txt = getConstraintOrigem() == null ? "[]" : getConstraintOrigem().getNomeFormatado();
res.add(InspectorProperty.PropertyFactoryCommand(FormaElementar.nomeComandos.cmdDoAnyThing.name(), "constraint.ir.origem", txt).setTag(Constraint.TAG_COMMAND_FK));
res.add(InspectorProperty.PropertyFactoryCommand(FormaElementar.nomeComandos.cmdDoAnyThing.name(), "constraint.campos.ir", getCamposStr(getCamposDeOrigem())).setTag(Constraint.TAG_COMMAND_FK));
res.add(InspectorProperty.PropertyFactoryCommand(FormaElementar.nomeComandos.cmdDoAnyThing.name(), "constraint.tabeladestino", getTabelaDeDestino().getTexto()).setTag(Constraint.TAG_COMMAND_FK));
res.add(InspectorProperty.PropertyFactoryCommand(FormaElementar.nomeComandos.cmdDoAnyThing.name(), "constraint.campos.ir", getCamposStr(getCamposDeDestino())).setTag(Constraint.TAG_COMMAND_FK));
res.add(InspectorProperty.PropertyFactoryTexto("constraint.ddlonupdate", relaName + ".setDdlOnUpdate", getDdlOnUpdate()));
res.add(InspectorProperty.PropertyFactoryTexto("constraint.ddlondelete", relaName + ".setDdlOnDelete", getDdlOnDelete()));
} else {
res.add(InspectorProperty.PropertyFactoryApenasLeituraTexto("constraint.campos.ir", getCamposStr(getCamposDeOrigem())));
}
res.add(InspectorProperty.PropertyFactoryCommand(FormaElementar.nomeComandos.cmdDoAnyThing.name(), "tabela.edtitores")
.setTag(tipo == CONSTRAINT_TIPO.tpPK ? TAG_COMMAND_PK
: tipo == CONSTRAINT_TIPO.tpUNIQUE ? TAG_COMMAND_UN
: TAG_COMMAND_FK));
res.add(InspectorProperty.PropertyFactoryCommand(FormaElementar.nomeComandos.cmdExcluirSubItem.name()));
if (getTabela().getConstraints().size() > 1) {
res.add(InspectorProperty.PropertyFactorySeparador("tabela.constraint.posicao", false));
if (!isFirst()) {
res.add(InspectorProperty.PropertyFactoryCommand(FormaElementar.nomeComandos.cmdDoAnyThing.name(), "tabela.constraint.sobe").setTag(SOBE_CONSTAN));
}
if (!isLast()) {
res.add(InspectorProperty.PropertyFactoryCommand(FormaElementar.nomeComandos.cmdDoAnyThing.name(), "tabela.constraint.desce").setTag(DESCE_CONSTAN));
}
}
return res;
}
public String getTipoStr() {
String res;
switch (getTipo()) {
case tpFK:
res = Editor.fromConfiguracao.getValor("Inspector.obj.constraint.fkey");
break;
case tpPK:
res = Editor.fromConfiguracao.getValor("Inspector.obj.constraint.key");
break;
default:
res = Editor.fromConfiguracao.getValor("Inspector.obj.constraint.unique");
break;
}
return res;
}
public String getDDL() {
String txt = "";
String sepa = getTabela().getSepadorSql();
switch (getTipo()) {
case tpPK:
if (isNomeada() && !getNome().trim().isEmpty()) {
txt = "ALTER TABLE " + getTabela().getTexto() + " ADD CONSTRAINT " + getPrefixo() + getNome().trim() + " PRIMARY KEY " + getCamposStr(getCamposDeOrigem());
txt += sepa;
} else {
txt = "PRIMARY KEY " + getCamposStr(getCamposDeOrigem());
}
break;
case tpUNIQUE:
if (isNomeada() && !getNome().trim().isEmpty()) {
txt = "ALTER TABLE " + getPrefixo() + getTabela().getTexto() + " ADD CONSTRAINT " + getNome().trim() + " UNIQUE " + getCamposStr(getCamposDeOrigem());
txt += sepa;
} else {
txt = "UNIQUE " + getCamposStr(getCamposDeOrigem());
}
break;
case tpFK:
String nome = (isNomeada() && !getNome().trim().isEmpty()) ? getPrefixo() + getNome() : getPrefixo() + Editor.fromConfiguracao.getValor("Controler.interface.mensagem.msgcov.fk.prefix")
+ getTabela().getTexto() + "_" + String.valueOf(getTabela().getConstraints().indexOf(this) + 1);
String tmpCD = getCamposStr(getCamposDeOrigem()).replaceAll("\\[\\]", "???");
String tmpCO = getCamposStrCheck(getCamposDeDestino());
if (getConstraintOrigem() != null) {
if (getConstraintOrigem().getCamposDeOrigem().size() > getCamposDeOrigem().size()) {
tmpCO = tmpCO.substring(0, tmpCO.length() - 1) + (getCamposDeOrigem().size() > 0 ? ", " : "") + "???)";
}
if (getConstraintOrigem().getCamposDeOrigem().size() > getCamposDeDestino().size()) {
tmpCD = tmpCD.substring(0, tmpCD.length() - 1) + (getCamposDeDestino().size() > 0 ? ", " : "") + "???)";
}
}
txt = "ALTER TABLE " + getPrefixo() + getTabela().getTexto() + " ADD CONSTRAINT " + nome + "\nFOREIGN KEY " + tmpCO + "\n";
txt += "REFERENCES " + (getConstraintOrigem() == null ? "??? (???)" : getPrefixo() + getConstraintOrigem().getTabela().getTexto() + " " + tmpCD);
if (!getDdlOnDelete().isEmpty() && !getDdlOnUpdate().isEmpty()) {
txt += "\nON DELETE " + getDdlOnDelete() + " ON UPDATE " + getDdlOnUpdate();
} else if (!getDdlOnDelete().isEmpty() || !getDdlOnUpdate().isEmpty()) {
txt += "\n";
txt += !getDdlOnDelete().isEmpty() ? "ON DELETE " + getDdlOnDelete() : "";
txt += !getDdlOnUpdate().isEmpty() ? "ON UPDATE " + getDdlOnUpdate() : "";
}
txt += sepa;
break;
}
return txt;
}
//Versão 3.2!
public String getPrefixo() {
return getTabela().getPrefixo();
}
public void NotifiqueIR(Constraint cons, int msg, Campo cmp) {
}
private String ddlOnUpdate = "";
private String ddlOnDelete = "";
public String getDdlOnDelete() {
return ddlOnDelete;
}
public void setDdlOnDelete(String ddlOnDelete) {
if (this.ddlOnDelete == null ? ddlOnDelete != null : !this.ddlOnDelete.equals(ddlOnDelete)) {
this.ddlOnDelete = ddlOnDelete;
Repaint();
}
}
public String getDdlOnUpdate() {
return ddlOnUpdate;
}
public void setDdlOnUpdate(String ddlOnUpdate) {
if (this.ddlOnUpdate == null ? ddlOnUpdate != null : !this.ddlOnUpdate.equals(ddlOnUpdate)) {
this.ddlOnUpdate = ddlOnUpdate;
Repaint();
}
}
public Constraint LocaliseFomCod(String cod, HashMap<Element, FormaElementar> mapa) {
try {
String cods[] = cod.split(",");
if (cods[0].trim().equals("-1")) {
return null;
}
FormaElementar e = util.XMLGenerate.FindWhoHasID(cods[0], mapa);
if (e == null) {
return null;
}
return ((Tabela) e).getConstraints().get(Integer.valueOf(cods[1].trim()));
} catch (Exception e) {
util.BrLogger.Logger("ERROR_DIAGRAMA_LOAD", e.getMessage());
return null;
}
}
public String GeraCodToLocalise(Constraint ori) {
if (ori == null) {
return "-1,-1";
}
return String.valueOf(ori.getTabela().getID()) + "," + String.valueOf(ori.getTabela().getConstraints().indexOf(ori));
}
}
|
<filename>src/check.rs
//! Provides functionality for checking the availablility of URLs.
use std::collections::HashSet;
use std::fmt;
use std::fs::read_to_string;
use std::path::{Path, PathBuf};
use log::{debug, warn};
use once_cell::sync::Lazy;
use regex::Regex;
use url::Url;
use cached::cached_key_result;
use cached::SizedCache;
use super::CheckContext;
use crate::{
parse::{parse_fragments, parse_redirect},
HttpCheck,
};
const PREFIX_BLACKLIST: [&str; 1] = ["https://doc.rust-lang.org"];
#[derive(Debug)]
pub enum IoError {
HttpUnexpectedStatus(ureq::Response),
HttpFetch(ureq::Transport),
FileIo(String, std::io::Error),
}
impl fmt::Display for IoError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
IoError::HttpUnexpectedStatus(resp) => write!(
f,
"Unexpected HTTP status fetching {}: {}",
resp.get_url(),
resp.status_text()
),
IoError::HttpFetch(e) => write!(f, "Error fetching {}", e),
IoError::FileIo(url, e) => write!(f, "Error fetching {}: {}", url, e),
}
}
}
#[derive(Debug, Clone)]
pub enum Link {
File(PathBuf),
Http(Url),
}
impl fmt::Display for Link {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Link::File(path) => write!(f, "{}", path.display()),
Link::Http(url) => f.write_str(url.as_str()),
}
}
}
impl Link {
/// Removes the fragment
fn without_fragment(&self) -> Link {
match self {
Link::Http(url) => {
let mut url = url.clone();
url.set_fragment(None);
Link::Http(url)
}
_ => self.clone(),
}
}
}
#[derive(Debug)]
pub enum CheckError {
/// An intra-doc link went unresolved by rustdoc and ended up in the final HTML
IntraDocLink(String),
/// A relatively linked file did not exist
File(PathBuf),
/// A linked HTTP URL did not exist
Http(Url),
/// An HTTP URL was encountered, but HTTP checking was forbidden
HttpForbidden(Url),
/// The linked file existed, but was missing the linked HTML anchor
Fragment(Link, String, Option<Vec<String>>),
/// An error occured while trying to find whether the file or URL existed
Io(Box<IoError>),
}
impl From<ureq::Error> for CheckError {
fn from(err: ureq::Error) -> Self {
let io_err = match err {
ureq::Error::Status(_, response) => IoError::HttpUnexpectedStatus(response),
ureq::Error::Transport(err) => IoError::HttpFetch(err),
};
CheckError::Io(Box::new(io_err))
}
}
impl fmt::Display for CheckError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
CheckError::IntraDocLink(text) => {
write!(f, "Broken intra-doc link to {}!", text)
}
CheckError::File(path) => {
write!(f, "Linked file at path {} does not exist!", path.display())
}
CheckError::Http(url) => write!(f, "Linked URL {} does not exist!", url),
CheckError::HttpForbidden(url) => write!(
f,
"Found HTTP link {}, but HTTP checking is forbidden!",
url
),
CheckError::Fragment(link, fragment, missing_parts) => match missing_parts {
Some(missing_parts) => write!(
f,
"Fragments #{} as expected by ranged fragment #{} at {} do not exist!\n\
This is likely a bug in rustdoc itself.",
missing_parts.join(", #"),
fragment,
link
),
None => write!(f, "Fragment #{} at {} does not exist!", fragment, link),
},
CheckError::Io(err) => err.fmt(f),
}
}
}
/// Check a single URL for availability. Returns `false` if it is unavailable.
pub fn is_available(url: &Url, ctx: &CheckContext) -> Result<(), CheckError> {
match url.scheme() {
"file" => check_file_url(url, ctx),
"http" | "https" => check_http_url(url, ctx),
scheme @ "javascript" => {
debug!("Not checking URL scheme {:?}", scheme);
Ok(())
}
other => {
debug!("Unrecognized URL scheme {:?}", other);
Ok(())
}
}
}
cached_key_result! {
CHECK_FILE: SizedCache<String, HashSet<String>> = SizedCache::with_size(100);
Key = { link.without_fragment().to_string() };
// `fetch_html` is different depending on whether the link is being
// loaded from disk or from the network.
fn fragments_from(
link: &Link,
fetch_html: impl Fn() -> Result<String, CheckError>
) -> Result<HashSet<String>, CheckError> = {
fetch_html().map(|html| parse_fragments(&html))
}
}
fn is_fragment_available(
link: &Link,
fragment: &str,
fetch_html: impl Fn() -> Result<String, CheckError>,
) -> Result<(), CheckError> {
// Empty fragments (e.g. file.html#) are commonly used to reach the top
// of the document, see https://html.spec.whatwg.org/multipage/browsing-the-web.html#scroll-to-fragid
if fragment.is_empty() {
return Ok(());
}
let fragments = fragments_from(link, fetch_html)?;
if fragments.contains(fragment) {
return Ok(());
}
// Try again with percent-decoding.
// NOTE: This isn't done unconditionally because it's possible the fragment it's linking to was also percent-encoded.
match percent_encoding::percent_decode(fragment.as_bytes()).decode_utf8() {
Ok(cow) => {
if fragments.contains(&*cow) {
return Ok(());
}
}
// If this was invalid UTF8 after percent-decoding, it can't be in the file (since we have a `String`, not opaque bytes).
// Assume it wasn't meant to be url-encoded.
Err(err) => warn!("{} url-decoded to invalid UTF8: {}", fragment, err),
}
// Rust documentation uses `#n-m` fragments and JavaScript to highlight
// a range of lines in HTML of source code, an element with `id`
// attribute of (literal) "#n-m" will not exist, but elements with
// `id`s n through m should, this parses the ranged n-m anchor and
// checks if elements with `id`s n through m do exist
static RUST_LINE_HIGLIGHT_RX: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"^(?P<start>[0-9]+)-(?P<end>[0-9]+)$"#).unwrap());
match RUST_LINE_HIGLIGHT_RX.captures(fragment) {
Some(capture) => match (capture.name("start"), capture.name("end")) {
(Some(start_str), Some(end_str)) => {
// NOTE: assumes there are less than 2.pow(32) lines in a source file
let start = start_str.as_str().parse::<i32>().unwrap();
let end = end_str.as_str().parse::<i32>().unwrap();
let missing = (start..=end)
.map(|i| i.to_string())
.filter(|i| !fragments.contains(i))
.collect::<Vec<String>>();
if !missing.is_empty() {
Err(CheckError::Fragment(
link.clone(),
fragment.to_string(),
Some(missing),
))
} else {
Ok(())
}
}
_ => unreachable!("if the regex matches, it should have capture groups"),
},
None => Err(CheckError::Fragment(
link.clone(),
fragment.to_string(),
None,
)),
}
}
/// Check a URL with the "file" scheme for availability. Returns `false` if it is unavailable.
fn check_file_url(url: &Url, ctx: &CheckContext) -> Result<(), CheckError> {
let path = url.to_file_path().unwrap();
// determine the full path by looking if the path points to a directory,
// and if so append `index.html`, this is needed as we'll try to read
// the file, so `expanded_path` should point to a file not a directory
let index_html;
let expanded_path = if path.is_file() {
&path
} else if path.is_dir() && path.join("index.html").is_file() {
index_html = path.join("index.html");
&index_html
} else {
debug!("Linked file at path {} does not exist!", path.display());
return Err(CheckError::File(path));
};
if !ctx.check_fragments {
return Ok(());
}
// The URL might contain a fragment. In that case we need a full GET
// request to check if the fragment exists.
match url.fragment() {
Some(fragment) => check_file_fragment(&path, expanded_path, fragment),
None => Ok(()),
}
}
fn check_file_fragment(
path: &Path,
expanded_path: &Path,
fragment: &str,
) -> Result<(), CheckError> {
debug!(
"Checking fragment {} of file {}.",
fragment,
expanded_path.display()
);
fn get_html(expanded_path: &Path) -> Result<String, CheckError> {
read_to_string(expanded_path).map_err(|err| {
CheckError::Io(Box::new(IoError::FileIo(
expanded_path.to_string_lossy().to_string(),
err,
)))
})
}
let fetch_html = || {
let html = get_html(expanded_path)?;
if let Some(redirect) = parse_redirect(&html) {
get_html(&expanded_path.parent().unwrap().join(redirect))
} else {
Ok(html)
}
};
is_fragment_available(&Link::File(path.to_path_buf()), fragment, fetch_html)
}
/// Check a URL with "http" or "https" scheme for availability. Returns `Err` if it is unavailable.
fn check_http_url(url: &Url, ctx: &CheckContext) -> Result<(), CheckError> {
if ctx.check_http == HttpCheck::Ignored {
warn!(
"Skip checking {} as checking of http URLs is turned off",
url
);
return Ok(());
}
for blacklisted_prefix in PREFIX_BLACKLIST.iter() {
if url.as_str().starts_with(blacklisted_prefix) {
warn!(
"Skip checking {} as URL prefix is on the builtin blacklist",
url
);
return Ok(());
}
}
if ctx.check_http == HttpCheck::Forbidden {
return Err(CheckError::HttpForbidden(url.clone()));
}
// The URL might contain a fragment. In that case we need a full GET
// request to check if the fragment exists.
if url.fragment().is_none() || !ctx.check_fragments {
match ureq::head(url.as_str()).call() {
Err(ureq::Error::Status(405, _)) => {
// If HEAD isn't allowed, try sending a GET instead
ureq::get(url.as_str()).call()?;
Ok(())
}
Err(other) => Err(other.into()),
Ok(_) => Ok(()),
}
} else {
// the URL might contain a fragment, in that case we need to check if
// the fragment exists, this issues a GET request
check_http_fragment(url, url.fragment().unwrap())
}
}
fn check_http_fragment(url: &Url, fragment: &str) -> Result<(), CheckError> {
debug!("Checking fragment {} of URL {}.", fragment, url.as_str());
fn get_html(url: &Url) -> Result<String, CheckError> {
let resp = ureq::get(url.as_str()).call()?;
Ok(resp.into_string().unwrap())
}
let fetch_html = || {
let html = get_html(url)?;
// NOTE: only handles one level of nesting. Maybe we should have multiple levels?
let redirect = parse_redirect(&html).and_then(|s| {
Url::parse(&s)
.map_err(|err| {
warn!("failed to parse Rustdoc redirect: {}", err);
})
.ok()
});
if let Some(redirect) = redirect {
get_html(&redirect)
} else {
Ok(html)
}
};
is_fragment_available(&Link::Http(url.clone()), fragment, fetch_html)?;
Ok(())
}
#[cfg(test)]
mod test {
use crate::HttpCheck;
use super::{check_file_url, is_available, CheckContext, CheckError, Link};
use mockito::{self, mock};
use std::env;
use url::Url;
fn url_for(path: &str) -> Url {
let cwd = env::current_dir().unwrap();
let mut parts = path.split('#');
let file_path = parts.next().unwrap();
let mut url = if file_path.ends_with('/') {
Url::from_directory_path(cwd.join(file_path))
} else {
Url::from_file_path(cwd.join(file_path))
}
.unwrap();
url.set_fragment(parts.next());
assert_eq!(parts.count(), 0); // make sure the anchor was valid, not `a.html#x#y`
url
}
fn test_check_file_url(path: &str) -> Result<(), CheckError> {
check_file_url(&url_for(path), &CheckContext::default())
}
#[test]
fn test_file_path() {
test_check_file_url("tests/html/index.html").unwrap();
}
#[test]
fn test_directory_path() {
test_check_file_url("tests/html/").unwrap();
}
#[test]
fn test_anchors() {
test_check_file_url("tests/html/anchors.html#h1").unwrap();
}
#[test]
fn test_hash_fragment() {
test_check_file_url("tests/html/anchors.html#").unwrap();
}
#[test]
fn test_missing_anchors() {
match test_check_file_url("tests/html/anchors.html#nonexistent") {
Err(CheckError::Fragment(Link::File(path), fragment, None)) => {
assert!(path.ends_with("tests/html/anchors.html"));
assert_eq!("nonexistent", fragment);
}
x => panic!(
"Expected to report missing anchor (Err(CheckError::FileAnchor)), got {:?}",
x
),
}
}
#[test]
fn test_range_anchor() {
test_check_file_url("tests/html/range.html#2-4").unwrap();
}
#[test]
fn test_missing_range_anchor() {
match test_check_file_url("tests/html/range.html#4-6") {
Err(CheckError::Fragment(Link::File(path), fragment, Some(missing_parts))) => {
assert!(path.ends_with("tests/html/range.html"));
assert_eq!("4-6", fragment);
assert_eq!(missing_parts.len(), 1);
assert!(missing_parts.contains(&"6".to_string()));
}
x => panic!(
"Expected to report missing anchor (Err(CheckError::FileAnchorRange)), got {:?}",
x
),
}
}
#[test]
fn test_is_available_file_path() {
is_available(
&url_for("tests/html/index.html#i1"),
&CheckContext::default(),
)
.unwrap();
}
#[test]
fn test_is_available_directory_path() {
is_available(&url_for("tests/html/#i1"), &CheckContext::default()).unwrap();
}
#[test]
fn test_missing_dir_index_fragment() {
match is_available(
&url_for("tests/html/missing_index/#i1"),
&CheckContext::default(),
) {
Err(CheckError::File(path)) => assert!(path.ends_with("tests/html/missing_index")),
x => panic!(
"Expected to report missing anchor (Err(CheckError::File)), got {:?}",
x
),
}
}
#[test]
fn test_http_check() {
let root = mock("HEAD", "/test_http_check").with_status(200).create();
let mut url = mockito::server_url();
url.push_str("/test_http_check");
is_available(
&Url::parse(&url).unwrap(),
&CheckContext {
check_http: HttpCheck::Enabled,
..CheckContext::default()
},
)
.unwrap();
root.assert();
}
#[test]
fn test_http_check_fragment() {
let root = mock("GET", "/test_http_check_fragment")
.with_status(200)
.with_header("content-type", "text/html")
.with_body(
r#"<!DOCTYPE html>
<html>
<body id="r1" />
</html>"#,
)
.create();
let mut url = mockito::server_url();
url.push_str("/test_http_check_fragment#r1");
is_available(
&Url::parse(&url).unwrap(),
&CheckContext {
check_http: HttpCheck::Enabled,
..CheckContext::default()
},
)
.unwrap();
root.assert();
}
#[test]
fn test_missing_http_fragment() {
let root = mock("GET", "/test_missing_http_fragment")
.with_status(200)
.with_header("content-type", "text/html")
.with_body(
r#"<!DOCTYPE html>
<html />"#,
)
.create();
let mut url = mockito::server_url();
url.push_str("/test_missing_http_fragment#missing");
match is_available(
&Url::parse(&url).unwrap(),
&CheckContext {
check_http: HttpCheck::Enabled,
..CheckContext::default()
},
) {
Err(CheckError::Fragment(Link::Http(url), fragment, None)) => {
assert_eq!(
"http://127.0.0.1:1234/test_missing_http_fragment#missing",
url.to_string()
);
assert_eq!("missing", fragment);
}
x => panic!(
"Expected to report missing anchor (Err(CheckError::File)), got {:?}",
x
),
}
root.assert();
}
#[test]
fn test_disabling_fragment_checks_file() {
check_file_url(
&url_for("tests/html/anchors.html#nonexistent"),
&CheckContext {
check_fragments: false,
..CheckContext::default()
},
)
.unwrap();
}
#[test]
fn test_disabling_fragment_checks_http() {
let root = mock("HEAD", "/test_disabling_fragment_checks_http")
.with_status(200)
.create();
let mut url = mockito::server_url();
url.push_str("/test_disabling_fragment_checks_http#missing");
is_available(
&Url::parse(&url).unwrap(),
&CheckContext {
check_http: HttpCheck::Enabled,
check_fragments: false,
..CheckContext::default()
},
)
.unwrap();
root.assert();
}
}
|
/**
* Acts as if heat was shared, returns shared heat<br/>
* Note: This methods drains heat from the component.
* The more conducts tends to zero, the more energy will be shared to it
* @param conduct
* @return
*/
public default double shareHeat(float conduct) {
if (conduct < 0)
conduct = 0;
double shared = getHeat()/(1+conduct);
setHeat(getHeat() - shared);
return shared;
} |
/**
* @author Edgar Gonzalez
*
*/
public class FieldNotFoundException extends Exception {
/**
*
*/
private static final long serialVersionUID = 2153202784256590692L;
/**
*
* @param pojo
* @param field
*/
public <T extends PrestaShopPojo> FieldNotFoundException(Class<T> pojo, String field) {
super(String.format("Field %s does not exists in %s", field, pojo.getSimpleName()));
}
} |
The efficiency of different chemoradiotherapy regimens in patients with paediatric nasopharynx cancer: review of 46 cases
The purpose of this study was to evaluate the role of combined modality treatment in patients with paediatric nasopharynx cancer (NPC). Forty‐six patients with paediatric NPC were retrospectively analysed. Forty‐four of 46 patients received combined modality treatment. Five‐year overall survival and progression‐free survivals were 70% and 72% for the whole group, and only three of 46 patients had loco‐regional relapse. Complete remission was obtained in 18 of 45 patients (40%), and the overall survival (94% vs. 62% and 19%, p = 0.0009) and disease‐free survivals (93% vs. 70% and 16%, p = 0.0002) were significantly better in complete responders when compared with the patients who had partial response or stable disease. The 5‐year overall survival and disease‐free survivals of the patients who received neoadjuvant chemotherapy (CT) and radiotherapy (RT) followed by CT were superior to the other groups (77% and 80%, respectively). The number of total CT cycles (p = 0.0001), nodal stage (p = 0.05) and treatment response (p = 0.0009) were significant prognostic factors for overall survival. The treatment type (p = 0.02), the number of total CT cycles (p = 0.0006), nodal stage (p = 0.05) and treatment response (p = 0.0002) were found as significant prognostic factors for disease‐free survival. The survival of patients receiving six or more CT cycles was also significantly better than that of patients receiving less than six cycles (p = 0.0001). In patients with locally advanced paediatric NPC, CT should be added to RT to improve outcome. However, a standard protocol is yet to be identified, and further studies evaluating the addition of interferon or immunotherapy to CT and RT shall be performed. |
<gh_stars>0
/*
src/form/fieldtypes/alphabetic.rs
Copyright (c) 2019-2022 <NAME> All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom
the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
*/
use std::fmt;
use crate::form::{FieldType, FIELDTYPE_ALPHA, IsFieldType};
/// This field type accepts alphabetic data; no blanks, no digits, no special
/// characters (this is checked at character-entry time).
#[derive(PartialEq, Eq, Hash)]
pub struct Alphabetic<'a> {
fieldtype: &'a FieldType,
arguments: u8,
width: u16
}
impl<'a> Alphabetic<'a> {
pub fn new(width: u16) -> Self {
Self { fieldtype: &*FIELDTYPE_ALPHA, arguments: 1, width }
}
}
impl<'a> IsFieldType<'a, i32, i32, i32> for Alphabetic<'a> {
fn fieldtype(&self) -> &'a FieldType {
self.fieldtype
}
fn arguments(&self) -> u8 {
self.arguments
}
fn arg1(&self) -> i32 {
i32::from(self.width)
}
fn arg2(&self) -> i32 { 0 }
fn arg3(&self) -> i32 { 0 }
}
unsafe impl<'a> Send for Alphabetic<'a> { } // too make thread safe
unsafe impl<'a> Sync for Alphabetic<'a> { } // too make thread safe
impl <'a>AsRef<Alphabetic<'a>> for Alphabetic<'a> {
fn as_ref(&self) -> &Self {
self
}
}
impl <'a>AsMut<Alphabetic<'a>> for Alphabetic<'a> {
fn as_mut(&mut self) -> &mut Self {
self
}
}
impl<'a> fmt::Debug for Alphabetic<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{{ fieldtype: {:?}, arguments: {}, width: {} }}", self.fieldtype, self.arguments, self.width)
}
}
|
package com.github.freegeese.easymybatis.core.meta;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
public class MetaLambdaCache {
private static Map<String, MetaLambda> cache = new ConcurrentHashMap<>();
public static MetaLambda get(Function function) {
String name = function.getClass().getName();
if (cache.containsKey(name)) {
return cache.get(name);
}
MetaLambda value = MetaLambda.forFunction(function);
cache.put(name, value);
return value;
}
}
|
package gotabcmd
import (
"context"
"fmt"
"os"
"os/exec"
"testing"
"time"
)
func TestNewTabcmd(t *testing.T) {
type args struct {
commandTimeout time.Duration
}
tests := []struct {
name string
args args
want *tabcmd
}{
{"timeout setting", args{5 * time.Hour}, &tabcmd{timeout: 5 * time.Hour, commandContext: exec.CommandContext}},
{"default timeout", args{0}, &tabcmd{timeout: CCommandTimeout, commandContext: exec.CommandContext}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := newTabcmd(tt.args.commandTimeout)
// compare timeout
if got.timeout != tt.want.timeout {
t.Errorf("NewTabcmd() = %v, want %v", got, tt.want)
}
})
}
}
// from https://github.com/golang/go/blob/master/src/os/exec/exec_test.go
func helperCommandContext(ctx context.Context, name string, args ...string) (cmd *exec.Cmd) {
// testenv.MustHaveExec(t)
cs := []string{"-test.run=TestHelperProcess", "--"}
cs = append(cs, append([]string{name}, args...)...)
if ctx != nil {
cmd = exec.CommandContext(ctx, os.Args[0], cs...)
} else {
cmd = exec.Command(os.Args[0], cs...)
}
cmd.Env = []string{"GO_WANT_HELPER_PROCESS=1"}
return cmd
}
// from https://github.com/golang/go/blob/master/src/os/exec/exec_test.go
func helperCommand(t *testing.T, name string, args ...string) *exec.Cmd {
return helperCommandContext(nil, name, args...)
}
// func TestTabcmd_Run(t *testing.T) {
// tc := NewTabcmd(0)
// tc.commandContext = helperCommandContext
// out, err := tc.Run("success_to_stdout", "arg1", "arg2")
// if
// }
func TestTabcmd_Run(t *testing.T) {
type fields struct {
timeout time.Duration
commandContext func(context.Context, string, ...string) *exec.Cmd
}
type args struct {
action string
args []string
}
tests := []struct {
name string
fields fields
args args
want string
wantErr bool
}{
{"success", fields{CCommandTimeout, helperCommandContext},
args{"succeed", []string{"param1", "param2", "param3"}},
"Success: [param1 param2 param3]", false},
{"stderr", fields{CCommandTimeout, helperCommandContext},
args{"fail", []string{"fail1", "fail2"}},
"Failure: [fail1 fail2]", true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tc := &tabcmd{
timeout: tt.fields.timeout,
commandContext: tt.fields.commandContext,
}
got, err := tc.run(tt.args.action, tt.args.args...)
if (err != nil) != tt.wantErr {
t.Errorf("Tabcmd.Run() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("Tabcmd.Run() = %v, want %v", got, tt.want)
}
})
}
}
// from https://github.com/golang/go/blob/dca707b2a040642bb46aa4da4fb4eb6188cc2502/src/os/exec/exec_test.go#L724
// TestHelperProcess isn't a real test. It's used as a helper process
// for TestTabcmd_Run.
func TestHelperProcess(*testing.T) {
if os.Getenv("GO_WANT_HELPER_PROCESS") != "1" {
return
}
defer os.Exit(0)
args := os.Args
for len(args) > 0 {
if args[0] == "--" {
args = args[1:]
break
}
args = args[1:]
}
if len(args) == 0 {
fmt.Fprintf(os.Stderr, "No command\n")
os.Exit(2)
}
cmd, action, args := args[0], args[1], args[2:]
if cmd != cTabcmd {
os.Stderr.WriteString("Invalid tabcmd executable")
os.Exit(1)
}
switch action {
case "succeed":
os.Stdout.WriteString(fmt.Sprintf("Success: %v", args))
os.Exit(0)
case "fail":
os.Stderr.WriteString(fmt.Sprintf("Failure: %s", args))
os.Exit(1)
default:
fmt.Fprintf(os.Stderr, "Unknown command %q\n", cmd)
os.Exit(2)
}
}
|
// Tests that ObjectAdded is forwarded to a multiplexer with the correct
// additional service name parameter.
TEST_F(ObjectManagerInterfaceMultiplexerTest, ObjectAdded) {
EXPECT_CALL(
*interface_multiplexer_,
ObjectAdded(kTestServiceName1, dbus::ObjectPath(kTestRootServicePath),
kTestInterfaceName))
.Times(1);
GetForwardingInterface(kTestServiceName1)
->ObjectAdded(dbus::ObjectPath(kTestRootServicePath), kTestInterfaceName);
EXPECT_CALL(
*interface_multiplexer_,
ObjectAdded(kTestServiceName2, dbus::ObjectPath(kTestRootServicePath),
kTestInterfaceName))
.Times(1);
GetForwardingInterface(kTestServiceName2)
->ObjectAdded(dbus::ObjectPath(kTestRootServicePath), kTestInterfaceName);
} |
<reponame>shuxs/bootstrap
package config
import (
"context"
)
type Loader interface {
Watch(ctx context.Context, cfg Config) error
}
|
/**
* Maven goal which generates the metadata ending up in the package like {@code META-INF/MANIFEST.MF} as well as the
* files ending up in {@code META-INF/vault} like {@code filter.xml}, {@code properties.xml}, {@code config.xml} and
* {@code settings.xml}. Those files will be written to the directory given via parameter {@link #workDirectory}.
* In addition performs some validations.
*/
@Mojo(
name = "generate-metadata",
defaultPhase = LifecyclePhase.PROCESS_CLASSES,
requiresDependencyResolution = ResolutionScope.COMPILE,
threadSafe = true
)
public class GenerateMetadataMojo extends AbstractMetadataPackageMojo {
/**
* A date format which is compliant with {@code org.apache.jackrabbit.util.ISO8601.parse(...)}
* @see <a href="https://www.w3.org/TR/NOTE-datetime">Restricted profile for ISO8601</a>
* @see <a href="https://issues.apache.org/jira/browse/JCR-4267">JCR-4267</a>
*/
private final DateFormat iso8601DateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
/**
* For m2e incremental build support
*/
@Component
private BuildContext buildContext;
/**
* For correct source of standard embedded path base name.
*/
@Component(hint = "default")
private ArtifactRepositoryLayout embedArtifactLayout;
/**
* The Maven session.
*/
@Parameter(property = "session", readonly = true, required = true)
private MavenSession session;
/**
* The groupId used for the generated content package. This will be part of
* the target installation path of the content package.
*/
@Parameter(
property = "vault.group",
defaultValue="${project.groupId}",
required = true)
String group;
/**
* The name of the content package
*/
@Parameter(
property = "vault.name",
defaultValue="${project.artifactId}",
required = true)
String name;
/**
* The version of the content package.
*/
@Parameter(
property = "vault.version",
defaultValue = "${project.version}",
required = true)
String version;
/**
* Defines the content of the filter.xml file.
* Each filter consists of the mandatory element {@code root} and the optional {@code mode} and {@code type} elements. All those elements are simple strings.
* In addition optionally a number of {@code include} and {@code exclude} elements are supported below {@code includes}/{@code excludes} respectively.
*/
@Parameter
private final Filters filters = new Filters();
/**
* Optional file that specifies the source of the workspace filter. The filters specified in the configuration
* and injected via emebedds or subpackages are merged into it.
*/
@Parameter
private File filterSource;
/**
* Controls if empty workspace filter fails the build.
* @deprecated This is no longer evaluated as every package is supposed to come with a non-empty filter
*/
@Deprecated
@Parameter(
property = "vault.failOnEmptyFilter",
defaultValue="true",
required = true)
private boolean failOnEmptyFilter;
/**
* Specifies additional properties to be set in the properties.xml file.
* These properties cannot overwrite the following predefined properties:
* <p>
* <table>
* <tr><td>group</td><td>Use <i>group</i> parameter to set</td></tr>
* <tr><td>name</td><td>Use <i>name</i> parameter to set</td></tr>
* <tr><td>version</td><td>Use <i>version</i> parameter to set</td></tr>
* <tr><td>groupId</td><td><i>groupId</i> of the Maven project descriptor</td></tr>
* <tr><td>artifactId</td><td><i>artifactId</i> of the Maven project descriptor</td></tr>
* <tr><td>dependencies</td><td>Use <i>dependencies</i> parameter to set</td></tr>
* <tr><td>createdBy</td><td>The value of the <i>user.name</i> system property</td></tr>
* <tr><td>created</td><td>The current system time</td></tr>
* <tr><td>requiresRoot</td><td>Use <i>requiresRoot</i> parameter to set</td></tr>
* <tr><td>allowIndexDefinitions</td><td>Use <i>allowIndexDefinitions</i> parameter to set</td></tr>
* <tr><td>packagePath</td><td>Automatically generated from the group and package name</td></tr>
* <tr><td>packageType</td><td>Set via the package type parameter</td></tr>
* <tr><td>acHandling</td><td>Use <i>accessControlHandling</i> parameter to set</td></tr>
* </table>
*/
@Parameter
private final Properties properties = new Properties();
/**
* Defines the list of dependencies
* A dependency is declared as a {@code <dependency>} element of a list
* style {@code <dependencies>} element:
* <pre>
* <dependency>
* <group>theGroup</group>
* <name>theName</name>
* <version>1.5</version>
* </dependency>
* </pre>
* <p>
* The dependency can also reference a maven project dependency, this is preferred
* as it yields to more robust builds.
* <pre>
* <dependency>
* <groupId>theGroup</groupId>
* <artifactId>theName</artifactId>
* </dependency>
* </pre>
* <p>
* The {@code versionRange} may be indicated as a single version, in which
* case the version range has no upper bound and defines the minimal version
* accepted. Otherwise, the version range defines a lower and upper bound of
* accepted versions, where the bounds are either included using parentheses
* {@code ()} or excluded using brackets {@code []}
*/
@Parameter(property = "vault.dependencies")
private Collection<MavenBasedPackageDependency> dependencies = new LinkedList<>();
/**
* Defines the Access control handling. This will become the
* {@code acHandling} property of the properties.xml file.<br/>
* Possible values:
* <ul>
* <li>{@code ignore}: Ignores the packaged access control and leaves the target unchanged.</li>
* <li>{@code overwrite}: Applies the access control provided with the package to the target. this also removes
* existing access control.</li>
* <li>{@code merge}: Merge access control provided with the package with the one in the content by replacing the
* access control entries of corresponding principals (i.e. package first). It never alters access control entries of
* principals not present in the package.</li>
* <li>{@code merge_preserve}: Merge access control in the content with the one provided with the package by
* adding the access control entries of principals not present in the content (i.e. content first). It never alters
* access control entries already existing in the content.</li>
* <li>{@code clear}: Clears all access control on the target system.</li>
* </ul>
*/
@Parameter(
property = "vault.acHandling",
alias = "acHandling",
required = false)
private AccessControlHandling accessControlHandling;
/**
* Defines whether the package requires root. This will become the
* {@code requiresRoot} property of the properties.xml file.
*/
@Parameter(
property = "vault.requiresRoot",
defaultValue="false",
required = true)
private boolean requiresRoot;
/**
* Defines additional bundle dependency via the osgi import-package entry in the manifest.
*/
@Parameter(
property = "vault.importPackage",
defaultValue =
// exclude HTL compiler packages as they are never real dependencies of the content
"-org.apache.sling.scripting.sightly.compiler.expression.nodes," +
"-org.apache.sling.scripting.sightly.java.compiler," +
"-org.apache.sling.scripting.sightly.render"
)
private String importPackage;
/**
* Defines the path under which the embedded bundles are placed. defaults to '/apps/bundles/install'
*/
@Parameter(property = "vault.embeddedTarget")
private String embeddedTarget;
/**
* List of filters for artifacts to embed in the package.
* The {@code Embedded} class represents one or multiple embedded artifact dependencies
* from the project descriptor.
* Each {@code <embedded>} element may configure any of the following fields
* <p>
* <table>
* <tr><td>groupId</td><td>String</td><td>Filter criterion against the group id of a project dependency. A pattern as described below.</td></tr>
* <tr><td>artifactId</td><td>String</td><td>Filter criterion against the artifact id of a project dependency. A pattern as described below.</td></tr>
* <tr><td>scope</td><td>ScopeArtifactFilter</td><td>Filter criterion against the <a href="https://maven.apache.org/guides/introduction/introduction-to-dependency-mechanism.html#Dependency_Scope">scope of a project dependency</a>. Possible values are <ul><li>{@code test}, which allows every scope</li><li>{@code compile+runtime} which allows every scope except {@code test}</li><li>{@code runtime+system} which allows every scope except {@code test} and {@code provided}</li><li>{@code compile} which allows only scope {@code compile}, {@code provided} and {@code system}</li><li>{@code runtime} which only allows scope {@code runtime} and {@code compile}.</td></tr>
* <tr><td>type</td><td>String</td><td>Filter criterion against the type of a project dependency. A pattern as described below.</td></tr>
* <tr><td>classifier</td><td>String</td><td>Filter criterion against the classifier of a project dependency. A pattern as described below.</td></tr>
* <tr><td>filter</td><td>Boolean</td><td>If set to {@code true} adds the embedded artifact location to the package's filter.</td></tr>
* <tr><td>isAllVersionsFilter</td><td>Boolean</td><td>If {@code filter} is {@code true} and this is {@code true} as well, the filter entry will contain all versions of the same artifact (by creating an according filter pattern).</td></tr>
* <tr><td>target</td><td>String</td><td>The parent folder location in the package where to place the embedded artifact. Falls back to {@link #embeddedTarget} if not set.</td></tr>
* </table>
* </pre>
* All fields are optional. All filter criteria is concatenated with AND logic (i.e. every criterion must match for a specific dependency to be embedded).
* <br>
* All filter patterns follow the format {@code <filter>{,<filter>}}.
* Each {@code filter} is a string which is either an exclude (if it starts with a {@code ~}) or an include otherwise. If the first {@code filter} is an include the pattern acts as whitelist,
* otherwise as blacklist. The last matching filter determines the outcome. Only matching dependencies are being considered for being embedded.</td></tr>
* <br>
* <i>The difference between {@link #embeddeds} and {@link #subPackages} is that for the former an explicit target is given while for the latter the target is being computed from the artifact's vault property file.</i>
*/
@Parameter
private Embedded[] embeddeds = new Embedded[0];
/**
* Defines whether to fail the build when an embedded artifact is not
* found in the project's dependencies
*/
@Parameter(property = "vault.failOnMissingEmbed", defaultValue = "false", required = true)
private boolean failOnMissingEmbed;
/**
* Defines the list of sub packages to be embedded in this package.
* The {@code SubPackage} class represents one or multiple subpackage artifact dependencies
* from the project descriptor. Each {@code <subPackage>} element may configure any of the following fields
* <p>
* <table>
* <tr><td>groupId</td><td>String</td><td>Filter criterion against the group id of a project dependency. A pattern as described below.</td></tr>
* <tr><td>artifactId</td><td>String</td><td>Filter criterion against the artifact ids of a project dependency. A pattern as described below.</td></tr>
* <tr><td>scope</td><td>ScopeArtifactFilter</td><td>Filter criterion against the <a href="https://maven.apache.org/guides/introduction/introduction-to-dependency-mechanism.html#Dependency_Scope">scope of a project dependency</a>. Possible values are <ul><li>{@code test}, which allows every scope</li><li>{@code compile+runtime} which allows every scope except {@code test}</li><li>{@code runtime+system} which allows every scope except {@code test} and {@code provided}</li><li>{@code compile} which allows only scope {@code compile}, {@code provided} and {@code system}</li><li>{@code runtime} which only allows scope {@code runtime} and {@code compile}.</td></tr>
* <tr><td>type</td><td>String</td><td>Filter criterion against the type of a project dependency.A pattern as described below.</td></tr>
* <tr><td>classifier</td><td>String</td><td>Filter criterion against the classifier of a project dependency. A pattern as described below.</td></tr>
* <tr><td>isAllVersionsFilter</td><td>Boolean</td><td>If {@code filter} is {@code true} and this is {@code true} as well, the filter entry will contain all versions of the same artifact (by creating an according filter pattern).</td></tr>
* <tr><td>filter</td><td>Boolean</td><td>If set to {@code true} adds the embedded artifact location to the package's filter</td></tr>
* </table>
* </pre>
* All fields are optional. All filter criteria is concatenated with AND logic (i.e. every criterion must match for a specific dependency to be embedded as a sub package).
* <br>
* All filter patterns follow the format {@code <filter>{,<filter>}}.
* Each {@code filter} within a filter pattern is a string which is either an exclude (if it starts with a {@code ~}) or an include otherwise. If the first {@code filter} is an include the pattern acts as whitelist,
* otherwise as blacklist. The last matching filter determines the outcome. Only matching dependencies are being considered for being embedded.
* <br>
* <i>The difference between {@link #embeddeds} and {@link #subPackages} is that for the former an explicit target is given while for the latter the target is being computed from the artifact's vault property file.</i>
*/
@Parameter
private SubPackage[] subPackages = new SubPackage[0];
/**
* File to store the generated manifest snippet.
*/
@Parameter(property = "vault.generatedImportPackage", defaultValue = "${project.build.directory}/vault-generated-import.txt")
private File generatedImportPackage;
/**
* The archive configuration to use. See <a
* href="http://maven.apache.org/shared/maven-archiver/index.html">the
* documentation for Maven Archiver</a>.
*
* All settings related to manifest are not relevant as this gets overwritten by the manifest in {@link AbstractMetadataPackageMojo#workDirectory}
*/
@Parameter
private MavenArchiveConfiguration archive;
/**
* Optional reference to PNG image that should be used as thumbnail for the content package.
*/
@Parameter
private File thumbnailImage;
/**
* Defines the content package type. This is either 'application', 'content', 'container' or 'mixed'.
* If omitted, it is calculated automatically based on filter definitions. Certain package types imply restrictions,
* for example, 'application' and 'content' packages are not allowed to contain sub packages or embedded bundles.<br>
* Possible values:
* <ul>
* <li>{@code application}: An application package consists purely of application content. It serializes
* entire subtrees with no inclusion or exclusion filters. it does not contain any subpackages nor OSGi
* configuration or bundles.</li>
* <li>{@code content}: A content package consists only of content and user defined configuration.
* It usually serializes entire subtrees but can contain inclusion or exclusion filters. it does not contain
* any subpackages nor OSGi configuration or bundles.</li>
* <li>{@code container}: A container package only contains sub packages and OSGi configuration and bundles.
* The container package is only used as container for deployment.</li>
* <li>{@code mixed}: Catch all type for a combination of the above.</li>
* </ul>
*/
@Parameter(property = "vault.packageType")
protected PackageType packageType;
/**
* Defines whether the package is allowed to contain index definitions. This will become the
* {@code allowIndexDefinitions} property of the properties.xml file.
*/
@Parameter(
property = "vault.allowIndexDefinitions",
defaultValue="false",
required = true)
boolean allowIndexDefinitions;
// take the first "-" followed by a digit as separator between version suffix and rest
private static final Pattern FILENAME_PATTERN_WITHOUT_VERSION_IN_GROUP1 = Pattern.compile("((?!-\\d).*-)\\d.*");
public GenerateMetadataMojo() {
super();
// always emit dates in UTC timezone
iso8601DateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
}
/**
* Sets the package type.
* @param type the string representation of the package type
* @throws MojoFailureException if an error occurs
*/
public void setPackageType(String type) throws MojoFailureException {
try {
packageType = PackageType.valueOf(type.toUpperCase());
} catch (IllegalArgumentException e) {
throw new MojoFailureException("Invalid package type specified: " + type +".\n" +
"Must be empty or one of 'application', 'content', 'container', 'mixed'");
}
}
/**
* Sets the access control handling.
* @param type the string representation of the ac handling
* @throws MojoFailureException if an error occurs
*/
public void setAccessControlHandling(String type) throws MojoFailureException {
try {
accessControlHandling = AccessControlHandling.valueOf(type.toUpperCase());
} catch (IllegalArgumentException e) {
// TODO: emit in lower case
throw new MojoFailureException("Invalid accessControlHandling specified: " + type +".\n" +
"Must be empty or one of '" + StringUtils.join(AccessControlHandling.values(), "','") + "'.");
}
}
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
if (buildContext.isIncremental()) {
getLog().debug("Incremental build");
// only execute in case of changes towards the filter.xml as the generated one contains a merge
if (filterSource != null) {
if (buildContext.hasDelta(filterSource)) {
getLog().debug("Detecting a change on '" + filterSource + "' therefore not cancelling build");
} else {
getLog().debug("'" + filterSource + "' unchanged therefore cancelling build");
return;
}
} else {
getLog().debug("No file change would be relevant therefore cancelling build");
return;
}
}
if (!failOnEmptyFilter) {
getLog().warn("The parameter 'failOnEmptyFilter' is no longer supported and ignored. Every package must have a non-empty filter!");
}
final File vaultDir = getGeneratedVaultDir();
vaultDir.mkdirs();
// JCRVLT-331 share work directory to expose vault metadata between process-classes and package phases for
// multi-module builds.
getArtifactWorkDirectoryLookup(getPluginContext())
.put(getModuleArtifactKey(project.getArtifact()), workDirectory);
try {
// find the meta-inf source directory
File metaInfDirectory = getMetaInfVaultSourceDirectory();
// generate the filter.xml
String sourceFilters = computeFilters(metaInfDirectory);
computeImportPackage();
// this must happen before the filter rules are extended
// but after filters have been consolidated
if (packageType == null) {
packageType = computePackageType();
}
// calculate the embeddeds and subpackages
Map<String, File> embeddedFiles = getEmbeddeds();
embeddedFiles.putAll(getSubPackages());
setEmbeddedFilesMap(embeddedFiles);
String dependenciesString = computeDependencies();
String dependenciesLocations = computeDependenciesLocations();
// generate properties.xml
final Properties vaultProperties = computeProperties(dependenciesString, dependenciesLocations);
try (FileOutputStream fos = new FileOutputStream(new File(vaultDir, Constants.PROPERTIES_XML))) {
vaultProperties.storeToXML(fos, project.getName());
}
writeFilters(sourceFilters);
copyFile("/vault/config.xml", new File(vaultDir, Constants.CONFIG_XML));
copyFile("/vault/settings.xml", new File(vaultDir, Constants.SETTINGS_XML));
// add package thumbnail
if (thumbnailImage != null && thumbnailImage.exists()) {
File vaultDefinitionFolder = new File(vaultDir, "definition");
if (!vaultDefinitionFolder.exists()) {
vaultDefinitionFolder.mkdir();
}
copyFile("/vault/definition/.content.xml", new File(vaultDefinitionFolder, ".content.xml"));
FileUtils.copyFile(thumbnailImage, new File(vaultDefinitionFolder, "thumbnail.png"));
}
writeManifest(getGeneratedManifestFile(), dependenciesString, dependenciesLocations, vaultProperties);
} catch (IOException | ManifestException | DependencyResolutionRequiredException | ConfigurationException e) {
throw new MojoExecutionException(e.toString(), e);
}
buildContext.refresh(vaultDir);
}
void writeManifest(File file, String dependenciesString, String dependenciesLocations, final Properties vaultProperties)
throws ManifestException, DependencyResolutionRequiredException, IOException, FileNotFoundException {
// generate manifest file
MavenArchiver mavenArchiver = new MavenArchiver();
mavenArchiver.setCreatedBy("Apache Jackrabbit FileVault - Package Maven Plugin", "org.apache.jackrabbit", "filevault-package-maven-plugin");
Manifest manifest = mavenArchiver.getManifest(session, project, getMavenArchiveConfiguration(vaultProperties, dependenciesString, dependenciesLocations));
try (OutputStream out = new FileOutputStream(file)) {
manifest.write(out);
}
}
/**
* Computes the package filters.
*
* Requirements:
* - backward compatibility: if a filter.xml is copied to vault-work with the resource plugin, then it should still "work" correctly.
* - if there are any comments in the original filter source, they should not get lost, if possible
* - if there are filters specified in the pom and in a filter source, they should get merged.
* - if the prefix property is set, it should be used if no filter is set.
* - if both, a inline filter and a implicit filter is present, the build fails.
* - re-run the package goal w/o cleaning the target first must work
*
*@return the source filter string (if there have been a filter given manually), otherwise {@code null}
* @throws IOException if an I/O error occurs
* @throws MojoExecutionException if the build fails
*/
private String computeFilters(File vaultMetaDir) throws IOException, MojoExecutionException {
// backward compatibility: if implicit filter exists, use it. but check for conflicts
File filterFile = getGeneratedFilterFile();
if (filterFile.exists() && filterFile.lastModified() != 0) {
// if both, a inline filter and a implicit filter is present, the build fails.
if (!filters.getFilterSets().isEmpty()) {
getLog().error("Refuse to merge inline filters and non-sourced filter.xml. If this is intended, specify the filter.xml via the 'filterSource' property.");
throw new MojoExecutionException("Conflicting filters, look at above log for details.");
}
// load filters for further processing
try {
filters.load(filterFile);
} catch (ConfigurationException e) {
throw new IOException("Error loading filter file '" + filterFile + "'", e);
}
getLog().warn("The project is using a filter.xml provided via the resource plugin.");
getLog().warn("This is deprecated and might no longer be supported in future versions.");
getLog().warn("Use the 'filterSource' property to specify the filter or use inline filters.");
return null;
}
// if last modified of vault-work/META-INF/vault/filter.xml == 0 -> delete it
if (filterFile.exists() && filterFile.lastModified() == 0) {
try {
Files.delete(filterFile.toPath());
} catch (IOException e) {
getLog().error("Unable to delete previously generated filter.xml. re-run the goals with a clean setup.");
throw new MojoExecutionException("Unable to delete file.", e);
}
}
// check for filters file in vaultDir
if (vaultMetaDir != null) {
File metaFilterFile = new File(vaultMetaDir, "filter.xml");
if (metaFilterFile.exists()) {
if (filterSource != null && !filterSource.equals(metaFilterFile)) {
getLog().error("Project contains filter.xml in META-INF/vault but also specifies a filter source.");
throw new MojoExecutionException("Conflicting filters, look at above log for details.");
}
filterSource = metaFilterFile;
}
}
// if filterSource exists, read the filters into sourceFilters
DefaultWorkspaceFilter sourceFilters = new DefaultWorkspaceFilter();
if (filterSource != null && filterSource.exists()) {
getLog().info("Loading filter from " + filterSource.getPath());
try {
sourceFilters.load(filterSource);
} catch (ConfigurationException e) {
throw new IOException(e);
}
if (!filters.getFilterSets().isEmpty()) {
getLog().info("Merging inline filters.");
mergeFilters(sourceFilters, filters);
}
// now copy everything from sourceFilter to filters (as the latter is supposed to contain the final filter rules)!
// sourceFilters.resetSource();
// there is no suitable clone nor constructor, therefore use a serialization/deserialization approach
try (InputStream serializedFilters = sourceFilters.getSource()) {
filters.load(serializedFilters);
} catch (ConfigurationException e) {
throw new IllegalStateException("cloning filters failed.", e);
}
// reset source filters for later. this looks a bit complicated but is needed to keep the same
// filter order as in previous versions
sourceFilters = new DefaultWorkspaceFilter();
try {
sourceFilters.load(filterSource);
} catch (ConfigurationException e) {
throw new IOException("Error loading filter file '" + filterSource + "'", e);
}
}
// if the prefix property is set, it should be used if no filter is set
if (filters.getFilterSets().isEmpty() && prefix.length() > 0) {
filters.add(new PathFilterSet(prefix));
}
return sourceFilters.getSourceAsString();
}
private void mergeFilters(DefaultWorkspaceFilter dst, WorkspaceFilter src) {
for (PathFilterSet fs: src.getFilterSets()) {
// check for collision
for (PathFilterSet mfs: dst.getFilterSets()) {
if (mfs.getRoot().equals(fs.getRoot())) {
throw new IllegalArgumentException("Merging of equal filter roots not allowed for: " + fs.getRoot());
}
}
dst.add(fs);
}
}
private void writeFilters(String sourceFilters) throws IOException, MojoExecutionException {
// if no filter is defined at all, fail
if (filters.getFilterSets().isEmpty()) {
throw new MojoExecutionException("No workspace filter defined!");
}
File filterFile = getGeneratedFilterFile();
// if the source filters and the generated filters are the same, copy the source file to retain the comments
if (filterSource != null && filters.getSourceAsString().equals(sourceFilters)) {
FileUtils.copyFile(filterSource, filterFile);
} else {
// generate xml and write to filter.xml
getLog().info("Generating filter.xml from plugin configuration");
FileUtils.fileWrite(filterFile.getAbsolutePath(), filters.getSourceAsString());
}
// update the last modified time of filter.xml to for generated filters
if (!filterFile.setLastModified(0)) {
getLog().warn("Unable to set last modified of filters file. make sure to clean the project before next run.");
}
}
/**
* Computes the import-package definition from the given bundles if not provided by the project.
*/
private void computeImportPackage() throws IOException {
TreeMap<String, Attrs> importParams = new TreeMap<>();
if (generatedImportPackage.exists()) {
String importPackageStr = FileUtils.fileRead(generatedImportPackage);
if (importPackageStr.length() > 0) {
importParams.putAll(new Parameters(importPackageStr));
}
}
// override computed patterns
if (importPackage != null) {
getLog().debug("merging analyzer-packages with:\n" + importPackage + "\n");
for (Map.Entry<String, Attrs> entry : new Parameters(importPackage).entrySet()) {
boolean delete = false;
String pkg = entry.getKey();
if ("-*".equals(pkg)) {
importParams.clear();
continue;
}
if (pkg.charAt(0) == '-') {
pkg = pkg.substring(1);
delete = true;
}
if (pkg.endsWith("*")) {
String pkgDot = pkg.substring(0, pkg.length() - 1);
if (!pkgDot.endsWith(".")) {
// matches both, the packages and sub packages
pkg = pkgDot;
pkgDot = pkg + ".";
}
Iterator<Map.Entry<String, Attrs>> iter = importParams.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, Attrs> e = iter.next();
String pkgName = e.getKey();
if (pkgName.equals(pkg) || pkgName.startsWith(pkgDot)) {
if (delete) {
iter.remove();
} else {
e.setValue(entry.getValue());
}
}
}
} else {
if (delete) {
importParams.remove(pkg);
} else {
importParams.put(pkg, entry.getValue());
}
}
}
}
importPackage = Processor.printClauses(importParams);
if (!importPackage.isEmpty()) {
getLog().info("Merged detected packages from analyzer with 'importPackage':");
for (Map.Entry<String, Attrs> e: importParams.entrySet()) {
StringBuilder report = new StringBuilder();
report.append(" ").append(e.getKey());
try {
Processor.printClause(e.getValue(), report);
} catch (IOException e1) {
throw new IllegalStateException("Internal error while generating report", e1);
}
getLog().info(report);
}
getLog().info("");
}
}
/**
* Computes the dependency string.
* @return the dependency string
* @throws URISyntaxException
*/
private String computeDependencies() throws IOException {
String dependenciesString = null;
if (!dependencies.isEmpty()) {
MavenBasedPackageDependency.resolve(project, getLog(), dependencies);
Dependency[] vaultDependencies = dependencies.stream().map(MavenBasedPackageDependency::getPackageDependency).toArray(Dependency[]::new);
dependenciesString = Dependency.toString(vaultDependencies);
}
return dependenciesString;
}
private String computeDependenciesLocations() throws IOException {
String dependenciesLocations = null;
if (!dependencies.isEmpty()) {
MavenBasedPackageDependency.resolve(project, getLog(), dependencies);
// pid = uri
dependenciesLocations = dependencies.stream().filter(a -> a.getInfo() != null).map(a -> a.getInfo().getId().toString() + "=" + a.getLocation()).collect(Collectors.joining(","));
}
return dependenciesLocations;
}
/**
* Escapes multiline manifest values to work around bug <a href="https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8222547">JDK-8222547</a>
* Java itself only adds leading SPACE in case a line is longer than 72 chars.
*
* If the value contains a newline, suffix it with an additional space (continuation character)!
*
* Unfortunately although the generated manifests for such escaped values are perfectly valid according to the spec,
* when reading those via {@link java.util.jar.Manifest} the new lines are stripped.
*/
static final String escapeManifestValue(String value) {
return value.replaceAll("\n", "\n ") // this covers CRLF and LF
.replaceAll("\r(?!\n)", "\r "); // only CR (not followed by LF)
}
private MavenArchiveConfiguration getMavenArchiveConfiguration(Properties vaultProperties, String dependenciesString, String dependenciesLocations) throws IOException {
if (archive == null) {
archive = new MavenArchiveConfiguration();
archive.setManifest(new ManifestConfiguration());
archive.setAddMavenDescriptor(true);
archive.setCompress(true);
archive.setIndex(false);
archive.getManifest().setAddDefaultSpecificationEntries(false);
archive.getManifest().setAddDefaultImplementationEntries(true);
// TODO: split up manifest generation
PackageId id = new PackageId(group, name, version);
archive.addManifestEntry(PackageProperties.MF_KEY_PACKAGE_TYPE, escapeManifestValue(packageType.name().toLowerCase()));
archive.addManifestEntry(PackageProperties.MF_KEY_PACKAGE_ID, escapeManifestValue(id.toString()));
archive.addManifestEntry(PackageProperties.MF_KEY_PACKAGE_DESC, escapeManifestValue(vaultProperties.getProperty("description", "")));
if (dependenciesString != null && dependenciesString.length() > 0) {
archive.addManifestEntry(PackageProperties.MF_KEY_PACKAGE_DEPENDENCIES, escapeManifestValue(dependenciesString));
if (dependenciesLocations != null && dependenciesLocations.length() > 0) {
archive.addManifestEntry(PackageProperties.MF_KEY_PACKAGE_DEPENDENCIES_LOCATIONS, escapeManifestValue(dependenciesLocations));
}
}
// be sure to avoid duplicates
Set<String> rts = new TreeSet<>();
for (PathFilterSet p: filters.getFilterSets()) {
rts.add(p.getRoot());
}
String[] roots = rts.toArray(new String[rts.size()]);
Arrays.sort(roots);
archive.addManifestEntry(PackageProperties.MF_KEY_PACKAGE_ROOTS, escapeManifestValue(StringUtils.join(roots, ",")));
// import package is not yet there!
if (StringUtils.isNotEmpty(importPackage)) {
archive.addManifestEntry(PackageProperties.MF_KEY_IMPORT_PACKAGE, escapeManifestValue(StringUtils.deleteWhitespace(importPackage)));
}
}
return archive;
}
private Properties computeProperties(String dependenciesString, String dependenciesLocations) {
final Properties props = new Properties();
// find the description of the content package (bug #30546)
// this is allowed to be overwritten by the properties map (GRANITE-1527)
String description = project.getDescription();
if (description == null) {
description = project.getName();
if (description == null) {
description = project.getArtifactId();
}
}
props.put(PackageProperties.NAME_DESCRIPTION, description);
// add all user defined properties
// before the rest of the properties to prevent user
// overwriting of predefined properties
// (see JavaDoc of properties field for list)
// but make sure, that we don't have null values in there
for (Object o : properties.keySet()) {
if (properties.get(o) == null) {
properties.put(o, "");
}
}
props.putAll(properties);
// package descriptor properties
props.put(PackageProperties.NAME_GROUP, group);
props.put(PackageProperties.NAME_NAME, name);
props.put(PackageProperties.NAME_VERSION, version);
// maven artifact identification
props.put("groupId", project.getGroupId());
props.put("artifactId", project.getArtifactId());
// dependencies
if (dependenciesString != null && dependenciesString.length() > 0) {
props.put(PackageProperties.NAME_DEPENDENCIES, dependenciesString);
if (dependenciesLocations != null && dependenciesLocations.length() > 0) {
props.put(PackageProperties.NAME_DEPENDENCIES_LOCATIONS, dependenciesLocations);
}
}
MavenArchiver archiver = new MavenArchiver();
Date createdDate = archiver.parseOutputTimestamp(outputTimestamp);
if (createdDate == null) {
createdDate = new Date();
}
props.put(PackageProperties.NAME_CREATED, iso8601DateFormat.format(createdDate));
// configurable properties
props.put(PackageProperties.NAME_REQUIRES_ROOT, String.valueOf(requiresRoot));
props.put(PackageProperties.NAME_ALLOW_INDEX_DEFINITIONS, String.valueOf(allowIndexDefinitions));
props.put(PackageProperties.NAME_PACKAGE_TYPE, packageType.name().toLowerCase());
if (accessControlHandling != null) {
props.put(PackageProperties.NAME_AC_HANDLING, accessControlHandling.name().toLowerCase());
}
return props;
}
private Map<String, File> getEmbeddeds() throws MojoFailureException, ConfigurationException {
Map<String, File> fileMap = new HashMap<>();
for (Embedded emb : embeddeds) {
final Collection<Artifact> artifacts = emb.getMatchingArtifacts(project);
if (artifacts.isEmpty()) {
if (failOnMissingEmbed) {
throw new MojoFailureException("Embedded artifact specified " + emb + ", but no matching dependency artifact found. Add the missing dependency or fix the embed definition.");
} else {
getLog().warn("No matching artifacts for " + emb);
continue;
}
}
if (emb.getDestFileName() != null && artifacts.size() > 1) {
getLog().warn("destFileName defined but several artifacts match for " + emb);
}
String targetPath = emb.getTarget();
if (targetPath == null) {
targetPath = embeddedTarget;
if (targetPath == null) {
final String loc = (prefix.length() == 0)
? "/apps/"
: prefix;
targetPath = loc + "bundles/install/";
getLog().info("No target path set on " + emb + "; assuming default " + targetPath);
}
}
targetPath = makeAbsolutePath(targetPath);
targetPath = Constants.ROOT_DIR + "/" + targetPath;
targetPath = FileUtils.normalize(targetPath);
if (!targetPath.endsWith("/")) {
targetPath += "/";
}
getLog().info("Embedding --- " + emb + " ---");
for (final Artifact artifact : artifacts) {
final File source = artifact.getFile();
String destFileName = emb.getDestFileName();
// todo: add support for patterns
if (destFileName == null) {
// If the <destFileName> param is not specified...
if (!source.isDirectory()) {
// If the artifact file is not a directory, defer to File.getName().
destFileName = source.getName();
} else {
// If the dependency file is a directory, the final artifact file has not yet been packaged.
// Construct a fallback file name from the artifact coordinates.
final String layoutBaseName = Text.getName(embedArtifactLayout.pathOf(artifact));
// Look for a peer module in the session that the artifact is attached to.
final MavenProject peerModule = findModuleForArtifact(artifact);
if (peerModule != null) {
// determine the finalName of the artifact, which is ${artifactId}-${version} by default.
final Artifact attached = peerModule.getArtifact();
final String defaultFinalName = attached.getArtifactId() + "-" + attached.getVersion();
final String peerFinalName = peerModule.getBuild().getFinalName();
if (peerFinalName != null) {
// remove the default finalName from the beginning of the layout basename, and
// prepend the specified finalName to create the destFileName.
destFileName = peerFinalName + layoutBaseName.substring(defaultFinalName.length());
}
}
// If destFileName is still null, fallback to layoutBaseName.
if (destFileName == null) {
destFileName = layoutBaseName;
}
}
}
final String targetPathName = targetPath + destFileName;
final String targetNodePathName = targetPathName.substring(Constants.ROOT_DIR.length());
getLog().info(String.format("Embedding %s (from %s) -> %s", artifact.getId(), source.getAbsolutePath(), targetPathName));
fileMap.put(targetPathName, source);
if (emb.isFilter()) {
addEmbeddedFileToFilter(targetNodePathName, emb.isAllVersionsFilter());
}
}
}
return fileMap;
}
private Map<String, File> getSubPackages() throws MojoFailureException, ConfigurationException {
final String propsRelPath = Constants.META_DIR + "/" + Constants.PROPERTIES_XML;
Map<String, File> fileMap = new HashMap<>();
for (SubPackage pack : subPackages) {
final Collection<Artifact> artifacts = pack.getMatchingArtifacts(project);
if (artifacts.isEmpty()) {
getLog().warn("No matching artifacts for sub package " + pack);
continue;
}
// get the package path
getLog().info("Embedding subpackage --- " + pack + " ---");
for (Artifact artifact : artifacts) {
final Properties props = new Properties();
final File source = artifact.getFile();
if (source.isDirectory()) {
File otherWorkDirectory = null;
final MavenProject otherProject = findModuleForArtifact(artifact);
if (otherProject != null) {
final PluginDescriptor pluginDescriptor = (PluginDescriptor) this.getPluginContext().get("pluginDescriptor");
if (pluginDescriptor != null) {
Map<String, Object> otherContext = this.session.getPluginContext(pluginDescriptor, otherProject);
otherWorkDirectory = getArtifactWorkDirectoryLookup(otherContext).get(getModuleArtifactKey(artifact));
}
}
// if not identifiable as a filevault content-package dependency, assume a generic archive layout.
if (otherWorkDirectory == null) {
otherWorkDirectory = source; // points to "target/classes"
}
File propsXml = new File(otherWorkDirectory, propsRelPath);
if (!propsXml.exists()) {
// fallback to work dir (assuming the same folder name)
propsXml = new File(otherWorkDirectory.getParent(), workDirectory.getName() + "/" + propsRelPath);
}
try (InputStream input = new FileInputStream(propsXml)) {
props.loadFromXML(input);
} catch (IOException e) {
throw new MojoFailureException("Could not read META-INF/vault/properties.xml from directory '" +
otherWorkDirectory + "' to extract metadata: " + e.getMessage(), e);
}
} else {
// load properties
try (ZipFile zip = new ZipFile(source)) {
ZipEntry e = zip.getEntry(propsRelPath);
if (e == null) {
throw new IOException("Package does not contain 'META-INF/vault/properties.xml'");
}
try (InputStream in = zip.getInputStream(e)) {
props.loadFromXML(in);
}
} catch (IOException e) {
throw new MojoFailureException("Could not open subpackage '" + source + "' to extract metadata: " + e.getMessage(), e);
}
}
PackageId pid = new PackageId(
props.getProperty(PackageProperties.NAME_GROUP),
props.getProperty(PackageProperties.NAME_NAME),
props.getProperty(PackageProperties.NAME_VERSION)
);
final String targetNodePathName = pid.getInstallationPath() + ".zip";
final String targetPathName = "jcr_root" + targetNodePathName;
getLog().info(String.format("Embedding %s (from %s) -> %s", artifact.getId(), source.getAbsolutePath(), targetPathName));
fileMap.put(targetPathName, source);
if (pack.isFilter()) {
addEmbeddedFileToFilter(targetNodePathName, pack.isAllVersionsFilter());
}
}
}
return fileMap;
}
/**
* Establishes a session-shareable workDirectory lookup map for the given pluginContext.
*
* @param pluginContext a Map retrieved from {@link MavenSession#getPluginContext(PluginDescriptor, MavenProject)}.
* @return a lookup Map. The key is {@link Artifact#getId()} and value is {@link AbstractMetadataPackageMojo#workDirectory}.
*/
@SuppressWarnings("unchecked")
static Map<String, File> getArtifactWorkDirectoryLookup(final Map pluginContext) {
final String workDirectoryLookupKey = "workDirectoryLookup";
if (!pluginContext.containsKey(workDirectoryLookupKey)) {
pluginContext.put(workDirectoryLookupKey, new ConcurrentHashMap<String, File>());
}
return (Map<String, File>) pluginContext.get(workDirectoryLookupKey);
}
/**
* Find the other project which produces the provided artifact.
*
* @param artifact the dependency artifact needle
* @return another project that is a dependency of thisProject
*/
MavenProject findModuleForArtifact(final Artifact artifact) {
for (MavenProject otherProject : session.getProjects()) {
if (otherProject != this.project) {
final Artifact otherArtifact = otherProject.getArtifact();
if (getModuleArtifactKey(artifact).equals(getModuleArtifactKey(otherArtifact))) {
return otherProject;
}
}
}
return null;
}
/**
* Construct a handler-independent artifact disambiguation key. This helps with the issue
* of matching dependency artifacts, which cannot reliably reference their original artifact handler to match the
* correct packaging type, to multimodule artifacts, which include the packaging type in their getId() result.
*
* I.E. {@link Artifact#getId()} contains either ":content-package:" or ":zip:" depending on whether it comes from
* this.session.getProjects() -> MavenProject#getArtifact() or from this.project.getArtifacts().
*
* @param artifact the module artifact ({@link MavenProject#getArtifact()}) to identify
* @return a handler-independent artifact disambiguation key
*/
String getModuleArtifactKey(final Artifact artifact) {
return this.embedArtifactLayout.pathOf(artifact);
}
private void addEmbeddedFileToFilter(String embeddedFile, boolean includeAllVersions) throws ConfigurationException {
filters.add(getPathFilterSetForEmbeddedFile(embeddedFile, includeAllVersions));
}
static PathFilterSet getPathFilterSetForEmbeddedFile(final String embeddedFile, boolean includeAllVersions)
throws ConfigurationException {
final PathFilterSet pathFilterSet;
if (includeAllVersions) {
String filename = FilenameUtils.getName(embeddedFile);
// shorten the filter root by one level
String rootName = StringUtils.chomp(embeddedFile, "/");
String extension = FilenameUtils.getExtension(filename);
// now find a mattern which should apply to embeddedFile and all similar artifacts with different versions from format:
// ${artifactId}-${version})
Matcher matcher = FILENAME_PATTERN_WITHOUT_VERSION_IN_GROUP1.matcher(filename);
if (!matcher.matches()) {
throw new IllegalArgumentException("Could not figure out version part in filename '" + filename
+ "'. For this artifact you cannot use 'isAllVersionsFilter=true'");
}
// create new pattern which matches the same artifacts in all versions
String pattern = Pattern.quote(rootName + "/" + matcher.group(1)) + ".*" + "\\." + extension + "(/.*)?";
if (!embeddedFile.matches(pattern)) {
throw new IllegalArgumentException("Detected pattern '" + pattern + "' does not even match given filename '" + embeddedFile
+ "'. For this artifact you cannot use 'isAllVersionsFilter=true'");
}
pathFilterSet = new PathFilterSet(rootName);
pathFilterSet.addInclude(new DefaultPathFilter(pattern));
} else {
pathFilterSet = new PathFilterSet(embeddedFile);
}
return pathFilterSet;
}
private String makeAbsolutePath(final String relPath) {
final String absPath;
if (!relPath.startsWith("/")) {
absPath = ((prefix.length() == 0) ? "/" : prefix) + relPath;
getLog().info("Relative path resolved to " + absPath);
} else {
absPath = relPath;
}
return absPath;
}
private PackageType computePackageType() {
final PackageType packageType;
// auto detect...
boolean hasApps = false;
boolean hasOther = false;
for (PathFilterSet p : filters.getFilterSets()) {
if (PathFilterSet.TYPE_CLEANUP.equals(p.getType())) {
continue;
}
String root = p.getRoot();
if ("/apps".equals(root) || root.startsWith("/apps/") || "/libs".equals(root) || root.startsWith("/libs/")) {
hasApps = true;
getLog().debug("Detected /apps or /libs filter entry: " + p);
} else {
hasOther = true;
getLog().debug("Detected filter entry outside /apps and /libs: " + p);
}
}
// no embeds and subpackages?
getLog().debug("Detected " + embeddeds.length + " bundle(s) and " + subPackages.length + " sub package(s).");
if (embeddeds.length == 0 && subPackages.length == 0) {
if (hasApps && !hasOther) {
packageType = PackageType.APPLICATION;
} else if (hasOther && !hasApps) {
packageType = PackageType.CONTENT;
} else {
packageType = PackageType.MIXED;
}
} else {
if (!hasApps && !hasOther) {
packageType = PackageType.CONTAINER;
} else {
packageType = PackageType.MIXED;
}
}
getLog().info("Auto-detected package type: " + packageType.toString().toLowerCase());
return packageType;
}
/**
* Copies from the class loader resource given by {@link source} into the file given in {@link target} in case
* the target file does not exist yet.
* @param source the name of the class loader resource
* @param target the target file to copy to
* @throws IOException
*/
private void copyFile(String source, File target) throws IOException {
// nothing to do if the file exists
if (target.exists()) {
return;
}
target.getParentFile().mkdirs();
try (InputStream ins = getClass().getResourceAsStream(source);
OutputStream out = new FileOutputStream(target)) {
if (ins != null) {
IOUtil.copy(ins, out);
} else {
throw new IllegalArgumentException("Could not find resource " + source);
}
}
}
} |
<gh_stars>1-10
#!/usr/bin/python
'''
This script imports users and groups from json to KYPO AAI database.
JSON files were generated by perun-service named kypo_portal.
author: <NAME>
date: 2016-05-02
version: 1.0.0
2016-05-13: In getting data from DB added code to ignore testing DB entries with no external_id
'''
import sys
import json
import psycopg2
'''Information to access DB'''
DB_NAME = 'SET DB NAME HERE'
DB_USER = 'SET DB USER HERE'
DB_HOST = 'SET DB HOST HERE'
DB_PSSWD = '<PASSWORD> HERE'
CONN_STRING = "dbname='{0}' user='{1}' host='{2}' password='{3}'".format(DB_NAME, DB_USER, DB_HOST, DB_PSSWD)
''' Names of tables in DB'''
USER_TABLE = 'users'
GROUP_TABLE = 'idm_group'
IDENTITY_TABLE = 'user_identity'
USERINGROUP_TABLE = 'user_idm_group'
''' Source JSON files '''
USERS_SRC = '/tmp/users.scim'
GROUPS_SRC = '/tmp/groups.scim'
try:
conn = psycopg2.connect(CONN_STRING)
except:
print "Unable to connect to DB"
sys.exit(1)
''' DEFINING VARIABLES '''
class User(object):
def __init__(self):
self.displayName = ""
self.mail = ""
self.status = ""
self.external_id = 0
def __eq__(self,other):
return self.displayName == other.displayName and self.mail == other.mail and self.status == other.status and self.external_id == other.external_id
class Group(object):
def __init__(self):
self.name = ""
self.perun_id = 0
self.parrent_group_id = 0
def __eq__(self,other):
return self.name == other.name and self.external_id == other.external_id
class Identity(object):
def __init__(self):
self.user_id = 0
self.external_id = 0
self.login = ""
def __eq__(self,other):
return self.external_id == other.external_id and self.login == other.login
class UserInGroup(object):
def __init__(self):
self.group_id = 0
self.user_id = 0
self.group_external_id = 0
self.user_external_id = 0
def __eq__(self,other):
return self.group_external_id == other.group_external_id and self.user_external_id == other.user_external_id
usersDB = list()
groupsDB = list()
identitiesDB = list()
userInGroupDB = list()
userDB_ids = list()
userIdsToUpd = list()
userIdsToDis = list()
groupDB_ids = list()
groupIdsToUpd = list()
groupIdsToDel = list()
userJSON_ids = list()
groupJSON_ids = list()
identities_json = list()
users_list = list()
identities_list = list()
groups_list = list()
usersInGroups_list = list()
changedUsers = list()
changesGroups = list()
identitiesToDel = list()
''' GETTING DATA FROM JSON '''
json_users = open(USERS_SRC)
users_data = json.load(json_users)
json_users.close()
json_groups = open(GROUPS_SRC)
groups_data = json.load(json_groups)
json_groups.close()
''' PARSING DATA FROM users.scim '''
for item in users_data:
userJSON_ids.append(int(item['id']))
tmpUser = User()
tmpUser.displayName = (item['displayName']).encode('utf-8')
tmpUser.mail = (item['mail']).encode('utf-8')
tmpUser.status = (item['status']).encode('utf-8')
tmpUser.external_id = int(item['id'])
users_list.append(tmpUser)
for i in item['identities']:
identities_json.append(i.encode('utf-8'))
tmpIdentity = Identity()
tmpIdentity.login = i.encode('utf-8')
tmpIdentity.external_id = int(item['id'])
identities_list.append(tmpIdentity)
''' PARSING DATA FROM groups.scim '''
for item in groups_data:
groupJSON_ids.append(int(item['id']))
tmpGroup = Group()
tmpGroup.name = (item['name']).encode('utf-8')
tmpGroup.external_id = int(item['id'])
if item['parrentGroupId'] is None:
tmpGroup.parrent_group_id = 'default'
else:
tmpGroup.parrent_group_id = int(item['parrentGroupId'])
groups_list.append(tmpGroup)
for i in item['members']:
tmpUserInGroup = UserInGroup()
tmpUserInGroup.user_external_id = int(i['userId'])
tmpUserInGroup.group_external_id = int(item['id'])
usersInGroups_list.append(tmpUserInGroup)
''' WORK WITH DB '''
cur = conn.cursor()
''' GETTING ACTUAL USERS FROM DB '''
try:
cur.execute('SELECT * FROM {0};'.format(USER_TABLE));
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
for row in cur:
#This prevent to manipulation with testing data in db with no ext id
if row[2] is None:
continue
userDB_ids.append(row[2])
tmpUser = User()
tmpUser.displayName = row[1]
tmpUser.mail = row[3]
tmpUser.status = row[4]
tmpUser.external_id = row[2]
usersDB.append(tmpUser)
''' GETTING USERS THAT HAVE BEEN CHANGED TO LIST '''
changedUsers = [item for item in users_list if item not in usersDB]
for item in changedUsers:
userIdsToUpd.append(item.external_id)
''' GETTING USERS THAT ARE NOT IN JSON BUT IN DB TO LIST '''
userIdsToDis = list(set(userDB_ids) - set(userJSON_ids))
''' INSERTS AND UPDATES OF USERS '''
for item in users_list:
if int(item.external_id) not in userDB_ids:
try:
cur.execute('INSERT INTO {0} (id, display_name, mail, status, external_id) VALUES (default, '"'{1}'"', '"'{2}'"', '"'{3}'"', '"'{4}'"');'
.format(USER_TABLE, item.displayName, item.mail, item.status, item.external_id))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
if int(item.external_id) in userIdsToUpd:
try:
cur.execute('UPDATE {0} SET display_name = '"'{1}'"', mail = '"'{2}'"', status = '"'{3}'"' WHERE external_id = '"'{4}'"';'
.format(USER_TABLE, item.displayName, item.mail, item.status, item.external_id))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
''' GETTING ACTUAL GROUPS FROM DB '''
try:
cur.execute('SELECT * FROM {0};'.format(GROUP_TABLE))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
for row in cur:
#This prevent to manipulation with testing data in db with no ext id
if row[1] is None:
continue
groupDB_ids.append(row[1])
tmpGroup = Group()
tmpGroup.name = row[2]
tmpGroup.external_id = row[1]
tmpGroup.parrentGroupId = row[3]
groupsDB.append(tmpGroup)
''' GETTING GROUPS THAT HAVE BEEN CHANGED TO LIST'''
changedGroups = [item for item in groups_list if item not in groupsDB]
for item in changedGroups:
groupIdsToUpd.append(item.external_id)
''' GETTING GROUPS THAT ARE NOT IN JSON BUT IN DB TO LIST '''
groupIdsToDel = list(set(groupDB_ids) - set(groupJSON_ids))
''' INSERTS AND UPDATES OF GROUPS '''
for item in groups_list:
if int(item.external_id) not in groupDB_ids:
try:
cur.execute('INSERT INTO {0} (id, name, external_id, parent_group_id) VALUES (default, '"'{1}'"', '"'{2}'"', {3});'
.format(GROUP_TABLE, item.name, item.external_id, item.parrent_group_id))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
if int(item.external_id) in groupIdsToUpd:
try:
cur.execute('UPDATE {0} SET name = '"'{1}'"', parent_group_id = {2} WHERE external_id = '"'{3}'"';'
.format(GROUP_TABLE, item.name, item.parrent_group_id, item.external_id))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
''' GETTING ACTUAL IDENTITES FROM DB '''
try:
cur.execute('SELECT id, external_id, login FROM {0}, {1} WHERE id = user_id;'.format(USER_TABLE, IDENTITY_TABLE))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
for row in cur:
tmpIdentity = Identity()
tmpIdentity.login = row[2]
tmpIdentity.external_id = row[1]
tmpIdentity.user_id = row[0]
identitiesDB.append(row[2])
''' GETTING IDENTITIES NOT IN JSON BUT IN DB '''
identitiesToDel = list(set(identitiesDB) - set(identities_json))
''' INSERTS OF IDENTITIES '''
for item in identities_list:
if item.login not in identitiesDB:
try:
cur.execute('SELECT id FROM {0} WHERE external_id = {1};'.format(USER_TABLE, item.external_id))
id = cur.fetchone()[0]
cur.execute('INSERT INTO {0} (user_id, login) VALUES ('"'{1}'"', '"'{2}'"');'.format(IDENTITY_TABLE, id, item.login))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
''' GETTING ACTUAL MEMBERSHIPS FROM DB '''
try:
cur.execute('SELECT {1}.id, {0}.id, {1}.external_id, {0}.external_id FROM {0}, {1}, {2} WHERE {0}.id = {2}.user_id and {1}.id = {2}.idm_group_id;'
.format(USER_TABLE, GROUP_TABLE, USERINGROUP_TABLE))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
for row in cur:
#This prevent to manipulation with testing data in db with no ext id
if row[2] is None or row[3] is None:
continue
tmpUserInGroup = UserInGroup()
tmpUserInGroup.group_id = int(row[0])
tmpUserInGroup.user_id = int(row[1])
tmpUserInGroup.group_external_id = int(row[2])
tmpUserInGroup.user_external_id = int(row[3])
userInGroupDB.append(tmpUserInGroup)
''' INSERTS NEW MEMBERSHIPS FROM JSON '''
for item in usersInGroups_list:
if item not in userInGroupDB:
try:
cur.execute('SELECT id FROM {0} WHERE external_id = {1};'.format(USER_TABLE, item.user_external_id))
user_id = cur.fetchone()[0]
cur.execute('SELECT id FROM {0} WHERE external_id = {1};'.format(GROUP_TABLE, item.group_external_id))
group_id = cur.fetchone()[0]
cur.execute('INSERT INTO {0} (user_id, idm_group_id) VALUES('"'{1}'"', '"'{2}'"');'.format(USERINGROUP_TABLE, user_id, group_id))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
''' MEMBERSHIPS NOT IN JSON WILL BE DELETED '''
userInGroupToDel = [item for item in userInGroupDB if item not in usersInGroups_list]
''' DELETING MEMBERSHIPS FROM DB '''
for item in userInGroupToDel:
try:
cur.execute('DELETE FROM {0} WHERE user_id = '"'{1}'"' and idm_group_id = '"'{2}'"';'.format(USERINGROUP_TABLE, item.user_id, item.group_id))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
''' DELETING IDENTITIES FROM DB '''
for item in identitiesToDel:
try:
cur.execute('DELETE FROM {0} WHERE login = '"'{1}'"';'.format(IDENTITY_TABLE, item))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
''' USERS NOT IN JSON ARE SETTED TO DISABLE'''
for item in userIdsToDis:
try:
cur.execute ('UPDATE {0} SET status = '"'disabled'"' WHERE external_id = '"'{1}'"';'.format(USER_TABLE, item))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
''' FROM GROUPS NOT IN JSON ARE DELETED ALL USERS '''
for item in groupIdsToDel:
try:
cur.execute('SELECT id FROM {0} WHERE external_id = '"'{1}'"';'.format(GROUP_TABLE, item))
group_id = cur.fetchone()[0]
cur.execute('DELETE FROM {0} WHERE group_id = '"'{1}'"';'.format(USERINGROUP_TABLE, group_id))
except psycopg2.Error as e:
print('DB Error {0}').format(e)
cur.close()
conn.close()
sys.exit(1)
conn.commit()
cur.close()
conn.close()
|
Carl Hannah Slaying Suspects.jpg
Arshakia Chanae Walker and Fernando Gulley are charged with manslaughter in the beating death of 56-year-old Carl Hannah Jr.
(Jefferson County Jail)
A man and a woman are charged with manslaughter in the beating death of a 56-year-old man earlier this year.
A Jefferson County grand jury has indicted Fernando Gulley, 37, and Arshakia Chanae Walker, 27. The pair was initially arrested in April, court records show, but those arrests were never announced.
They are charged in the death of Carl Hannah Jr. The assault happened Sunday, Feb. 17, in the 200 block of Fourth Terrace North. Police responded to the location on a report of an unresponsive man, and found him there unconscious.
Carl Hannah Jr.
Hannah was taken to UAB Hospital, where he remained in intensive care until he was pronounced Thursday, March 10, at 2:28 p.m. Before he died, Hannah told officers he was assaulted by two suspects.
According to the indictments against Walker and Gulley, the pair is accused of beating Hannah with "hands and/or feet and/or an unknown object." They "caused the death due to a sudden heat of passion caused by provocation recognized by law and before a reasonable time for the passion to cool and for reason to reassert itself."
Authorities said the suspects and the victim knew each other, and that Hannah at one point had dated Walker. No additional details were made available.
Walker and Gulley are both out of jail on $30,000 bond. No court dates have yet been announced. |
#ifndef __CMD_PROC__
#define __CMD_PROC__
#ifdef __cplusplus
extern "C" {
#endif
//*****************************************************************************
typedef struct
{
char *buffer;
int length;
} CmdResponse;
typedef int (*CmdHandler)(const char *argument, CmdResponse *resp);
typedef const struct
{
const char * const cmd; // full command word
CmdHandler vector;
} CmdTable;
int CP_Lookup(char *line, const CmdTable *tbl, CmdResponse *resp);
const char *CP_SkipChars(const char *ptr);
const char *CP_SkipSpace(const char *ptr);
//*****************************************************************************
#ifdef __cplusplus
}
#endif
#endif
|
The Impact of Drug Metabolism Gene Polymorphisms on Therapeutic Response and Survival in Diffuse Large B-Cell Lymphoma Patients.
BACKGROUND
Diffuse large B-cell lymphoma (DLBCL) accounts for 30% of all non-Hodgkin lymphomas (NHL) and 80% of agressive lymphomas. Besides the traditional International Prognostic Index (IPI), some other factors may also influence the prognosis of DLBCL patients.
OBJECTIVES
To study how the genetic polymorphisms in the metabolic pathway influence the event-free and overall survivals and therapeutic responses in DLBCL.
METHODS
The study was comprised of 51 patients (32 men, 19 women). The average age was 53.1 years. DLBCL was diagnosed between 2011 and 2016 and the average follow-up time was 3.78 years. These patients received 1-8 cycles (an average of 6.2 cycles) of rituximab, cyclophosphamide, doxorubicin, vincristin, prednisolon (R-CHOP) immunochemotherapy. Real-time polymerase chain reaction was used to determine the genetic polymorphisms of CYP2E1, GSTP1, NAT1, and NAT2 genes.
RESULTS
Our results showed that the polymorphisms of CYP2E1, GSTP1, and NAT1 genes did not influence the prognosis of DLBCL patients significantly. In terms of the NAT2 gene, GG homozygous patients showed slightly better therapeutic response and survival results compared to those bearing an A allele; however, the differences were not statistically significant.
CONCLUSIONS
Our results could not confirm that genetic polymorphism in metabolic pathways has any predictive role in DLBCL. |
<reponame>tiddlyweb/tiddlywebplugins.markdown<gh_stars>1-10
"""
Markdown extensions for freelinks and wikilinks, with
optional @target handling.
"""
import re
from markdown import util, inlinepatterns
from markdown.extensions.wikilinks import (WikiLinkExtension,
WikiLinks)
from tiddlyweb.fixups import quote
FRONTBOUND = r'(?:^|(?<=[\s|\(]))'
FREELINKRAW = r'\[\[([^]]+?)\]\]'
#FREELINKRAW = r'\[\[[^]]+?\]\]'
FREELINKB = FRONTBOUND + FREELINKRAW
WIKILINKB = FRONTBOUND + r'(~?[A-Z][a-z]+[A-Z]\w+\b)'
FREELINK = FREELINKB + '(?!@)'
WIKILINK = WIKILINKB + '(?!@)'
TARGETLINK_BASE = (r'(@(?:' + FREELINKRAW +
r'|([0-9A-Za-z][0-9A-Za-z\-]*[0-9A-Za-z])))(?=\b|$|[^]])')
TARGETLINK = FRONTBOUND + TARGETLINK_BASE
WIKITARGET = WIKILINKB + TARGETLINK_BASE
FREETARGET = FREELINKB + TARGETLINK_BASE
class MarkdownLinksExtension(WikiLinkExtension):
def __init__(self, *args, **kwargs):
self.config = {
'base_url': ['', 'String to append to beginning or URL.'],
'end_url': ['', 'String to append to end of URL.'],
'html_class': ['wikilink', 'CSS hook. Leave blank for none.'],
'environ': [{}, 'Base wsgi environ'],
}
for key, value in kwargs.items():
self.setConfig(key, value)
def extendMarkdown(self, md, md_globals):
self.md = md
configs = self.getConfigs()
tiddlywebconfig = configs['environ'].get('tiddlyweb.config', {})
interlinker = tiddlywebconfig.get('markdown.interlinker', None)
wikilinkPattern = MarkdownLinks(WIKILINK, configs)
wikilinkPattern.md = md
md.inlinePatterns.add('wikilink', wikilinkPattern, '<link')
freelinkPattern = MarkdownLinks(FREELINK, configs)
freelinkPattern.md = md
md.inlinePatterns.add('freelink', freelinkPattern, '<wikilink')
if interlinker:
wikitargetlinkPattern = TargetLinks(WIKITARGET, configs)
wikitargetlinkPattern.md = md
md.inlinePatterns.add('wikitargetlink', wikitargetlinkPattern,
'<wikilink')
freetargetlinkPattern = TargetLinks(FREETARGET, configs)
freetargetlinkPattern.md = md
md.inlinePatterns.add('freetargetlink', freetargetlinkPattern,
'<wikitargetlink')
targetlinkPattern = TargetLinks(TARGETLINK, configs)
targetlinkPattern.md = md
md.inlinePatterns.add('targetlink', targetlinkPattern,
'>wikitargetlink')
class TargetLinks(inlinepatterns.Pattern):
def __init__(self, pattern, config):
inlinepatterns.Pattern.__init__(self, pattern)
self.config = config
tiddlywebconfig = config['environ'].get('tiddlyweb.config', {})
self.interlinker = tiddlywebconfig.get('markdown.interlinker', None)
def handleMatch(self, m):
if m.lastindex == 6: # we have a wikitargetlink or freetarget
page = m.group(2)
target = m.group(3)
if page and target:
if '|' in page:
label, destination = page.split('|', 1)
else:
label = destination = page
a = util.etree.Element('a')
a.text = util.AtomicString(label)
# Target regexp returns a different group depending on
# the match.
target = m.group(5) or m.group(4)
target_base = self.interlinker(self.config['environ'], target)
if not target_base.endswith('/'):
target_base = target_base + '/'
a.set('href', target_base + encode_name(destination))
return a
else:
matched_text = m.group(2)
if matched_text:
a = util.etree.Element('a')
a.text = util.AtomicString(matched_text)
target = m.group(4) or m.group(3)
a.set('href', self.interlinker(self.config['environ'], target))
return a
return ''
class MarkdownLinks(WikiLinks):
def handleMatch(self, m):
matched_text = m.group(2)
if matched_text:
matched_text = matched_text.strip()
base_url, end_url, html_class = self._getMeta()
if '|' in matched_text:
label, target = matched_text.split('|', 1)
else:
# short circuit escaping of ~WikiLink
if (re.match(WIKILINK, matched_text)
and matched_text.startswith('~')):
return matched_text[1:]
label = target = matched_text
url = '%s%s%s' % (base_url, encode_name(target), end_url)
a = util.etree.Element('a')
a.text = util.AtomicString(label)
a.set('href', url)
if html_class:
a.set('class', html_class)
else:
a = ''
return a
def makeExtension(**kwargs):
return MarkdownLinksExtension(**kwargs)
def encode_name(name):
"""
Like the encode_name found in tiddlyweb, but does not escape #.
"""
return quote(name.encode('utf-8'), safe=".!~*'()#")
|
//____________________________________________________________
//
// detect if it's a tape, rewind if so
// and set the buffer size
//
DESC NT_tape_open(const char* name, ULONG mode, ULONG create)
{
HANDLE handle;
TAPE_GET_MEDIA_PARAMETERS param;
DWORD size = sizeof(param);
BurpGlobals* tdgbl = BurpGlobals::getSpecific();
if (strnicmp(name, "\\\\.\\tape", 8))
{
handle = CreateFile(name, mode,
mode == MODE_WRITE ? 0 : FILE_SHARE_READ,
NULL, create, FILE_ATTRIBUTE_NORMAL, NULL);
}
else
{
Note: we *want* to open the tape in FILE_EXCLUSIVE_WRITE, but
during testing discovered that several NT tape drives do not
work unless we specify FILE_SHARE_WRITE as the open mode.
So it goes...
handle = CreateFile(name, mode | MODE_READ,
mode == MODE_WRITE ? FILE_SHARE_WRITE : FILE_SHARE_READ,
0, OPEN_EXISTING, 0, NULL);
if (handle != INVALID_HANDLE_VALUE)
{
emulate UNIX rewinding the tape on open:
This MUST be done since Windows does NOT have anything
like mt to allow the user to do tape management. The
implication here is that we will be able to write ONLY
one (1) database per tape. This is bad if the user wishes to
backup several small databases.
Note: We are intentionally NOT trapping for errors during
rewind, since if we can not rewind, we are either a non-rewind
device (then it is user controlled) or we have a problem with
the physical media. In the latter case I would rather wait for
the write to fail so that we can loop and prompt the user for
a different file/device.
SetTapePosition(handle, TAPE_REWIND, 0, 0, 0, FALSE);
if (GetTapeParameters(handle, GET_TAPE_MEDIA_INFORMATION, &size, ¶m) == NO_ERROR)
{
tdgbl->io_buffer_size = param.BlockSize;
}
}
}
return handle;
} |
<reponame>smooch/amqp-ts
import * as lodash from "lodash";
import * as winston from "winston";
function normalizeError(error: any, level: string): void | Error | object {
if (lodash.isString(error)) {
return new Error(error);
}
if (!(error instanceof Error)) {
return;
}
const normalizedError: any = {};
Object.assign(
normalizedError,
lodash.omit(error, ["response", "constructor", "toString", "errors"])
);
// Restore common fields form the original error
Object.assign(normalizedError, {
name: error.name,
message: error.message,
});
if (level === "error") {
normalizedError.stack = error.stack;
}
return normalizedError;
}
const baseFormat = winston.format((context) => {
if (context.error) {
const normalizedError = normalizeError(context.error, context.level);
if (normalizedError) {
context.error = normalizedError;
}
}
return context;
});
type LogFunction = (message: string, context?: { [key: string]: any }) => void;
export interface LoggerInstance {
silly: LogFunction;
debug: LogFunction;
verbose: LogFunction;
info: LogFunction;
warn: LogFunction;
error: LogFunction;
}
export const logger = {
jsonLogger(level: string): LoggerInstance {
const transports = [new winston.transports.Console({ level })];
const format = winston.format.combine(baseFormat(), winston.format.json());
return winston.createLogger({
transports,
format,
});
},
};
|
<filename>nodes/ue07_navigation_amcl/turtlebot3_move_base_action_client.py
#!/usr/bin/env python3
# ################################################################################
# edited WHS, OJ , 12.12.2020 #
# usage
# $1 roslaunch turtlebot3_gazebo turtlebot3_house.launch
# $2 roslaunch turtlebot3_navigation turtlebot3_navigation.launch /
# map_file:=$HOME/catkin_ws/src/rtc/rtc_maps/gazebo_house_map_2020_12_07
# $3 rosrun rtc turtlebot3_move_base_action_client.py (this file here)
# the Server is already started with move_base
# based on the code from
# https://hotblackrobotics.github.io/en/blog/2018/01/29/action-client-py/
# den RVIZ-Tool Pfeil Navigation Goal nutzen um die
# Posen in den Pfad zu schreiben
# über Tool Properties anderes Topic einstellen
# mit publish_pose_2_file.py
import rospy
import actionlib # Brings in the SimpleActionClient
# Brings in the .action file and messages used by the move base action
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
# Initial Koordinaten für Ort x,y und Orientierung x,y,z,w
path = [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
# Vollstaendiger Pfad der Datei
filename = "/home/oj/catkin_ws/src/rtc/nodes/ue07_navigation_amcl/path.txt"
def read_path_from_file(filename):
rospy.loginfo("Reading Path from path.txt : ")
# Den vorgegebenen Pfad einlesen, jede Zeile ein Goal
with open(filename, 'r') as fin:
for line in fin:
path.append(eval(line)) # Goal anhaengen
del path[0] # [0, 0] entfernen
rospy.loginfo(str(path))
def movebase_client():
# Create an action client called "move_base" /
# with action definition file "MoveBaseAction"
# /opt/ros/noetic/include/move_base_msgs
client = actionlib.SimpleActionClient('move_base', MoveBaseAction)
# Waits until the action server has started up
# and is listening for goals.
client.wait_for_server()
read_path_from_file(filename) # Hole Pfad aus Datei
# Creates a new goal with the MoveBaseGoal constructor
goal = MoveBaseGoal()
goal.target_pose.header.frame_id = "map"
goal.target_pose.header.stamp = rospy.Time.now()
for koord in path:
# set posistion (no z)
goal.target_pose.pose.position.x = koord[0]
goal.target_pose.pose.position.y = koord[1]
# set orientation - quaternion
goal.target_pose.pose.orientation.x = koord[2]
goal.target_pose.pose.orientation.y = koord[3]
goal.target_pose.pose.orientation.z = koord[4]
goal.target_pose.pose.orientation.w = koord[5]
# Sends the goal to the action server.
client.send_goal(goal)
# wait for the action to return, with timeout
finished_before_timeout = client.wait_for_result(
rospy.Duration(120.0))
if finished_before_timeout is True:
rospy.loginfo(" Reached Goal before Timeout ")
else:
rospy.loginfo(" Timeout ")
return client.get_result()
# If the python node is executed as main process (sourced directly)
if __name__ == '__main__':
try:
# let the SimpleActionClient publish and subscribe
rospy.init_node('movebase_client_py')
result = movebase_client()
if result:
rospy.loginfo("Path execution done!")
except rospy.ROSInterruptException:
rospy.loginfo("Navigation test finished.")
|
#!/usr/bin/env python3
import torch
import speechbrain as sb
from torch.utils.data import DataLoader
from speechbrain import Stage
from tqdm.contrib import tqdm
import os
import pandas as pd
import wandb
import numpy as np
import json
import losses.eval_metrics as em
class SpoofSpeechClassifier(sb.Brain):
def compute_forward(self, batch, stage):
"""Runs all the computation of that transforms the input into the
output probabilities over the N classes.
Arguments
---------
batch : PaddedBatch
This batch object contains all the relevant tensors for computation.
stage : sb.Stage
One of sb.Stage.TRAIN, sb.Stage.VALID, or sb.Stage.TEST.
Returns
-------
predictions : Tensor
Tensor that contains the posterior probabilities over the N classes.
"""
# We first move the batch to the appropriate device.
batch = batch.to(self.device)
# Compute features, embeddings, and predictions
feats, lens = self.prepare_features(batch.sig, stage)
embeddings = self.modules.embedding_model(feats.cuda(), lens.cuda())
predictions = self.modules.classifier(embeddings)
return predictions
def prepare_features(self, wavs, stage):
"""Prepare the features for computation, including augmentation.
Arguments
---------
wavs : tuple
Input signals (tensor) and their relative lengths (tensor).
stage : sb.Stage
The current stage of training.
"""
wavs, lens = wavs
if stage == sb.Stage.TRAIN:
if hasattr(self.modules, "env_corrupt"):
wavs_noise = self.modules.env_corrupt(wavs, lens)
wavs = torch.cat([wavs, wavs_noise], dim=0)
lens = torch.cat([lens, lens])
if hasattr(self.hparams, "augmentation"):
wavs = self.hparams.augmentation(wavs, lens)
# Choose what features we want to use
# todo: support multiple features and feature concat
target_feats = self.hparams.embedding_features
FEATURE_EXTRACTOR = {
# 'cqt': self.modules.cqt,
# 'fbanks': self.modules.fbanks
'fastaudiogauss': self.modules.fastaudiogauss
# 'ifr': self.modules.ifr
# 'mag': self.modules.mag
# 'mfcc': self.modules.mfcc
# 'leaf': self.modules.leaf
# 'tdfbanks': self.modules.tdfbanks
# 'pcen': self.modules.pcen
# 'sincnet': self.modules.sincnet
# 'trainable_fbanks': self.modules.trainable_fbanks
}
if len(target_feats) == 1:
# wavs = wavs.unsqueeze(1).cuda()
feats = FEATURE_EXTRACTOR[target_feats[0]](wavs)
# feats = torch.unsqueeze(feats, 1)
# feats = torch.transpose(feats, 1,2)
if target_feats[0]=='cqt':
log_spec = 10.0 * torch.log10(torch.clamp(feats, min=1e-30))
log_spec -= 10.0
feats=log_spec
feats = torch.transpose(feats, 1,2)
else:
feats = []
for target in target_feats:
temp = FEATURE_EXTRACTOR[target](wavs)
if target=='cqt':
temp = torch.transpose(temp, 1,2)
feats.append(temp)
f =feats[0]
for i in range(1, len(feats)):
f = torch.cat((f, feats[i]), dim=2)
feats = f
feats = self.modules.mean_var_norm(feats, lens)
return feats, lens
def compute_objectives(self, predictions, batch, stage):
"""Computes the loss given the predicted and targeted outputs.
Arguments
---------
predictions : tensor
The output tensor from `compute_forward`.
batch : PaddedBatch
This batch object contains all the relevant tensors for computation.
stage : sb.Stage
One of sb.Stage.TRAIN, sb.Stage.VALID, or sb.Stage.TEST.
Returns
-------
loss : torch.Tensor
A one-element tensor used for backpropagating the gradient.
"""
_, lens = batch.sig
spkid, _ = batch.key_encoded
# Concatenate labels (due to data augmentation)
if stage == sb.Stage.TRAIN and hasattr(self.modules, "env_corrupt"):
spkid = torch.cat([spkid, spkid], dim=0)
lens = torch.cat([lens, lens])
# Compute the cost function
loss = sb.nnet.losses.bce_loss(predictions, spkid, lens)
# Compute classification error at test time
if stage != sb.Stage.TRAIN:
self.error_metrics.append(batch.id, predictions, spkid, lens)
return loss
def on_stage_start(self, stage, epoch=None):
"""Gets called at the beginning of each epoch.
Arguments
---------
stage : sb.Stage
One of sb.Stage.TRAIN, sb.Stage.VALID, or sb.Stage.TEST.
epoch : int
The currently-starting epoch. This is passed
`None` during the test stage.
"""
# Set up statistics trackers for this stage
# Set up evaluation-only statistics trackers
if stage != sb.Stage.TRAIN:
self.error_metrics = self.hparams.error_stats()
if stage == sb.Stage.VALID:
label_encoder = sb.dataio.encoder.CategoricalEncoder()
lab_enc_file = os.path.join(self.hparams.save_folder, "label_encoder.txt")
label_encoder.load(path=lab_enc_file)
self.bona_index = label_encoder.encode_label('bonafide')
self.spoof_index = label_encoder.encode_label('spoof')
self.pd_out = {'files': [], 'scores': []}
def on_stage_end(self, stage, stage_loss, epoch=None):
def compute_det_curve(target_scores, nontarget_scores):
n_scores = target_scores.size + nontarget_scores.size
all_scores = np.concatenate((target_scores, nontarget_scores))
labels = np.concatenate((np.ones(target_scores.size), np.zeros(nontarget_scores.size)))
# Sort labels based on scores
indices = np.argsort(all_scores, kind='mergesort')
labels = labels[indices]
# Compute false rejection and false acceptance rates
tar_trial_sums = np.cumsum(labels)
nontarget_trial_sums = nontarget_scores.size - (np.arange(1, n_scores + 1) - tar_trial_sums)
frr = np.concatenate((np.atleast_1d(0), tar_trial_sums / target_scores.size)) # false rejection rates
far = np.concatenate(
(np.atleast_1d(1), nontarget_trial_sums / nontarget_scores.size)) # false acceptance rates
thresholds = np.concatenate(
(
np.atleast_1d(all_scores[indices[0]] - 0.001), all_scores[indices])) # Thresholds are the sorted scores
return frr, far, thresholds
def compute_eer(target_scores, nontarget_scores):
""" Returns equal error rate (EER) and the corresponding threshold. """
frr, far, thresholds = compute_det_curve(target_scores, nontarget_scores)
abs_diffs = np.abs(frr - far)
min_index = np.argmin(abs_diffs)
eer = np.mean((frr[min_index], far[min_index]))
return eer, thresholds[min_index]
def get_eer_tDCF(asv_score_file='losses/LA.asv.dev.scores.txt',
cm_target_score_file='predictions/target_score.json',
cm_nontarget_score_file='predictions/nontarget_score.json'
):
Pspoof = 0.05
cost_model = {
'Pspoof': Pspoof, # Prior probability of a spoofing attack
'Ptar': (1 - Pspoof) * 0.99, # Prior probability of target speaker
'Pnon': (1 - Pspoof) * 0.01, # Prior probability of nontarget speaker
'Cmiss_asv': 1, # Cost of ASV system falsely rejecting target speaker
'Cfa_asv': 10, # Cost of ASV system falsely accepting nontarget speaker
'Cmiss_cm': 1, # Cost of CM system falsely rejecting target speaker
'Cfa_cm': 10, # Cost of CM system falsely accepting spoof
}
# Load organizers' ASV scores
asv_data = np.genfromtxt(asv_score_file, dtype=str)
asv_sources = asv_data[:, 0]
asv_keys = asv_data[:, 1]
asv_scores = asv_data[:, 2].astype(np.float)
# Extract target, nontarget, and spoof scores from the ASV scores
tar_asv = asv_scores[asv_keys == 'target']
non_asv = asv_scores[asv_keys == 'nontarget']
spoof_asv = asv_scores[asv_keys == 'spoof']
# Extract bona fide (real human) and spoof scores from the CM scores
with open(cm_target_score_file, 'r') as f:
bona_cm = np.array(json.load(f)['score'])
with open(cm_nontarget_score_file, 'r') as f:
spoof_cm = np.array(json.load(f)['score'])
# EERs of the standalone systems and fix ASV operating point to EER threshold
eer_asv, asv_threshold = compute_eer(tar_asv, non_asv)
eer_cm = compute_eer(bona_cm, spoof_cm)[0]
[Pfa_asv, Pmiss_asv, Pmiss_spoof_asv] = em.obtain_asv_error_rates(tar_asv, non_asv, spoof_asv,
asv_threshold)
# Compute t-DCF
tDCF_curve, CM_thresholds = em.compute_tDCF(bona_cm, spoof_cm, Pfa_asv, Pmiss_asv, Pmiss_spoof_asv,
cost_model,
False)
# Minimum t-DCF
min_tDCF_index = np.argmin(tDCF_curve)
min_tDCF = tDCF_curve[min_tDCF_index]
return eer_cm, min_tDCF
def split_target_non_target():
pred_file = 'predictions/scores.txt'
gt_file = 'processed_data/la_cm_dev.json'
with open(gt_file, 'r') as f:
gt = json.load(f)
with open(pred_file, 'r') as f:
preds = f.readlines()
target_scores = []
non_target_scores = []
for pred in preds:
i, score = pred.split()
score = float(score)
if gt[i]['key'] == 'spoof':
non_target_scores.append(score)
else:
target_scores.append(score)
with open('predictions/target_score.json', 'w') as f:
json.dump({'score': target_scores}, f)
with open('predictions/nontarget_score.json', 'w') as f:
json.dump({'score': non_target_scores}, f)
"""Gets called at the end of an epoch.
Arguments
---------
stage : sb.Stage
One of sb.Stage.TRAIN, sb.Stage.VALID, sb.Stage.TEST
stage_loss : float
The average loss for all of the data processed in this stage.
epoch : int
The currently-starting epoch. This is passed
`None` during the test stage.
"""
# Store the train loss until the validation stage.
if stage == sb.Stage.TRAIN:
self.train_loss = stage_loss
wandb.log({"train_loss": self.train_loss})
# At the end of validation...
if stage == sb.Stage.VALID:
# old_lr, new_lr = self.hparams.lr_annealing(epoch)
old_lr, new_lr = self.hparams.lr_scheduler([self.optimizer], epoch, stage_loss)
# new_lr=self.hparams.lr_annealing(self.optimizer)
sb.nnet.schedulers.update_learning_rate(self.optimizer, new_lr)
# self.hparams.lr_annealing()
# The train_logger writes a summary to stdout and to the logfile.
pd.DataFrame(self.pd_out).to_csv('predictions/scores.txt', sep=' ', header=False, index=False)
split_target_non_target()
eer_cm, min_tDCF = get_eer_tDCF()
stats = {
"loss": stage_loss,
"min_tDCF": min_tDCF,
}
wandb.log({"stage_loss": stage_loss})
wandb.log({"error": self.error_metrics.summarize("average")})
# Save the current checkpoint and delete previous checkpoints,
wandb.log({"eer_cm": eer_cm})
wandb.log({"min_tDCF": min_tDCF})
self.hparams.train_logger.log_stats(
{"Epoch": epoch, "lr": new_lr},
train_stats={"loss": self.train_loss},
valid_stats=stats,
)
# self.checkpointer.save_and_keep_only(meta=stats, min_keys=["min_tDCF"])
self.checkpointer.save_and_keep_only(meta=stats, num_to_keep=5, keep_recent=True)
# We also write statistics about test data to stdout and to the logfile.
if stage == sb.Stage.TEST:
stats = {
"loss": stage_loss,
"error": self.error_metrics.summarize("average"),
}
self.hparams.train_logger.log_stats(
{"Epoch loaded": self.hparams.epoch_counter.current},
test_stats=stats,
)
def evaluate_batch(self, batch, stage):
"""
Overwrite evaluate_batch.
Keep same for stage in (TRAIN, VALID)
Output probability in TEST stage (from classify_batch)
"""
if stage != sb.Stage.TEST:
# Same as before
out = self.compute_forward(batch, stage=stage)
loss = self.compute_objectives(out, batch, stage=stage)
out_prob = self.compute_forward(batch, stage=stage)
out_prob = out_prob.squeeze(1)
score, index = torch.max(out_prob, dim=-1)
cm_scores = [out_prob[i].item() for i in range(out_prob.shape[0])]
self.pd_out['files'] += batch.id
self.pd_out['scores'] += cm_scores
return loss.detach().cpu()
else:
out_prob = self.compute_forward(batch, stage=stage)
out_prob = out_prob.squeeze(1)
score, index = torch.max(out_prob, dim=-1)
# text_lab = self.hparams.label_encoder.decode_torch(index)
return out_prob, score, index
# return out_prob, score, index, text_lab
def evaluate(
self,
test_set,
max_key=None,
min_key=None,
progressbar=None,
test_loader_kwargs={},
run_opts={"device": "cuda"}
):
def compute_det_curve(target_scores, nontarget_scores):
n_scores = target_scores.size + nontarget_scores.size
all_scores = np.concatenate((target_scores, nontarget_scores))
labels = np.concatenate((np.ones(target_scores.size), np.zeros(nontarget_scores.size)))
# Sort labels based on scores
indices = np.argsort(all_scores, kind='mergesort')
labels = labels[indices]
# Compute false rejection and false acceptance rates
tar_trial_sums = np.cumsum(labels)
nontarget_trial_sums = nontarget_scores.size - (np.arange(1, n_scores + 1) - tar_trial_sums)
frr = np.concatenate((np.atleast_1d(0), tar_trial_sums / target_scores.size)) # false rejection rates
far = np.concatenate(
(np.atleast_1d(1), nontarget_trial_sums / nontarget_scores.size)) # false acceptance rates
thresholds = np.concatenate(
(
np.atleast_1d(all_scores[indices[0]] - 0.001), all_scores[indices])) # Thresholds are the sorted scores
return frr, far, thresholds
def compute_eer(target_scores, nontarget_scores):
""" Returns equal error rate (EER) and the corresponding threshold. """
frr, far, thresholds = compute_det_curve(target_scores, nontarget_scores)
abs_diffs = np.abs(frr - far)
min_index = np.argmin(abs_diffs)
eer = np.mean((frr[min_index], far[min_index]))
return eer, thresholds[min_index]
def get_eer_tDCF(asv_score_file='losses/LA.asv.dev.scores.txt',
cm_target_score_file='predictions/nontarget_score.json',
cm_nontarget_score_file='predictions/nontarget_score.json'
):
Pspoof = 0.05
cost_model = {
'Pspoof': Pspoof, # Prior probability of a spoofing attack
'Ptar': (1 - Pspoof) * 0.99, # Prior probability of target speaker
'Pnon': (1 - Pspoof) * 0.01, # Prior probability of nontarget speaker
'Cmiss_asv': 1, # Cost of ASV system falsely rejecting target speaker
'Cfa_asv': 10, # Cost of ASV system falsely accepting nontarget speaker
'Cmiss_cm': 1, # Cost of CM system falsely rejecting target speaker
'Cfa_cm': 10, # Cost of CM system falsely accepting spoof
}
# Load organizers' ASV scores
asv_data = np.genfromtxt(asv_score_file, dtype=str)
asv_sources = asv_data[:, 0]
asv_keys = asv_data[:, 1]
asv_scores = asv_data[:, 2].astype(np.float)
# Extract target, nontarget, and spoof scores from the ASV scores
tar_asv = asv_scores[asv_keys == 'target']
non_asv = asv_scores[asv_keys == 'nontarget']
spoof_asv = asv_scores[asv_keys == 'spoof']
# Extract bona fide (real human) and spoof scores from the CM scores
with open(cm_target_score_file, 'r') as f:
bona_cm = np.array(json.load(f)['score'])
with open(cm_nontarget_score_file, 'r') as f:
spoof_cm = np.array(json.load(f)['score'])
# EERs of the standalone systems and fix ASV operating point to EER threshold
eer_asv, asv_threshold = compute_eer(tar_asv, non_asv)
eer_cm = compute_eer(bona_cm, spoof_cm)[0]
[Pfa_asv, Pmiss_asv, Pmiss_spoof_asv] = em.obtain_asv_error_rates(tar_asv, non_asv, spoof_asv,
asv_threshold)
# Compute t-DCF
tDCF_curve, CM_thresholds = em.compute_tDCF(bona_cm, spoof_cm, Pfa_asv, Pmiss_asv, Pmiss_spoof_asv,
cost_model,
False)
# Minimum t-DCF
min_tDCF_index = np.argmin(tDCF_curve)
min_tDCF = tDCF_curve[min_tDCF_index]
return eer_cm, min_tDCF
def split_target_non_target():
pred_file = 'predictions/scores.txt'
gt_file = 'processed_data/la_cm_dev.json'
with open(gt_file, 'r') as f:
gt = json.load(f)
with open(pred_file, 'r') as f:
preds = f.readlines()
target_scores = []
non_target_scores = []
for pred in preds:
i, score = pred.split()
score = float(score)
if gt[i]['key'] == 'spoof':
non_target_scores.append(score)
else:
target_scores.append(score)
with open('predictions/target_score.json', 'w') as f:
json.dump({'score': target_scores}, f)
with open('predictions/nontarget_score.json', 'w') as f:
json.dump({'score': non_target_scores}, f)
if progressbar is None:
progressbar = not self.noprogressbar
if not isinstance(test_set, DataLoader):
test_loader_kwargs["ckpt_prefix"] = None
test_set = self.make_dataloader(
test_set, Stage.TEST, **test_loader_kwargs
)
self.on_evaluate_start(max_key=max_key, min_key=min_key)
self.on_stage_start(Stage.TEST, epoch=None)
self.modules.eval()
avg_test_loss = 0.0
label_encoder = sb.dataio.encoder.CategoricalEncoder()
lab_enc_file = os.path.join(self.hparams.save_folder, "label_encoder.txt")
label_encoder.load(path=lab_enc_file)
bona_index = label_encoder.encode_label('bonafide')
spoof_index = label_encoder.encode_label('spoof')
pd_out = {'files': [], 'scores': []}
with torch.no_grad():
for batch in tqdm(
test_set, dynamic_ncols=True, disable=not progressbar
):
self.step += 1
"""
Rewrite here
bonafide --> 0 , spoof -->
"""
out_prob, score, index = self.evaluate_batch(batch, stage=Stage.TEST)
cm_scores = [out_prob[i].item() for i in range(out_prob.shape[0])]
pd_out['files'] += batch.id
pd_out['scores'] += cm_scores
# Debug mode only runs a few batches
if self.debug and self.step == self.debug_batches:
break
"""
Rewrite Over
"""
pd.DataFrame(pd_out).to_csv('predictions/scores.txt', sep=' ', header=False, index=False)
self.step = 0
|
On the mechanisms of genotoxicity and metabolism of quercetin.
Quercetin has been the subject of numerous studies on its genetic toxicity and carcinogenicity. Despite its well-proven genetic damaging activity for various genetic end-points (reverse mutations, induction of SOS functions, induction of sister chromatid exchanges, chromosomal aberrations and micronuclei), the mechanisms of genetic damage by quercetin remain, by and large, unknown. The present study aims to further extend the observations on the possible active oxygen species mediated DNA-damaging activity of quercetin and the role of cytochrome P450-dependent metabolism on the genotoxicity of quercetin. The results reported in this work show that quercetin can produce the OH. radical, as assessed by deoxyribose degradation in the presence of Fe3+/EDTA (ethylenediaminetetraacetic acid), and that it induces strand breakage in isolated plasmidic DNA (pUC18). The data support the hypothesis that the production of OH. is mediated by H2O2. The results with genetically engineered V79 cells expressing rat cytochromes 1A1, 1A2 and 2B1 failed to demonstrate metabolism of quercetin, as indicated by the fact that neither an enhancement nor a decrease in the genotoxicity of quercetin was observed. Results obtained on the pH dependence of the induction of chromosomal aberrations by quercetin in V79 cells show that, as the pH value of the medium is increased to 8.0, there is a significant increase in the number of aberrant cells, as expected if oxygen radicals are responsible for the formation of chromosomal aberrations. |
A Courtenay resident has been arrested after a 25-year-old man was found dead outside of a home Friday.
Police were called to the home, in the 500-block of 6th Street, early in the morning.
They found the deceased 25-year-old Courtenay man outside of the residence and then arrested the 37-year-old suspect, who remains in custody.
The victim and suspect were known to each other, according to RCMP.
A witness who spoke with CTV News but did not wish to be identified said he was inside the home when a violent confrontation took place between the two men.
He said the house was filled with many people, one of whom was trying to get another person out of the home.
“The guy who stabbed him just started hitting him with a knife and he got him in the jugular, went in one side and out the other,” the witness said.
CTV News has learned that Troy Galloway was the man killed in the stabbing.
“We tried to save his life, we tried to resuscitate him but he was gone already,” the witness said.
RCMP, fire and paramedics all arrived at the scene around 1 a.m. and took the 37 year old into custody.
A Comox Valley RCMP General Investigation Section as well as forensic investigators are probing the incident.
The man arrested at the scene was expected to appear in court Friday afternoon. |
Complete wheels are very rare
3,000 year old wheel found in Cambridgeshire
Experts say it rivals the finds at Pompeii
Three Bronze Age houses found with their contents intact
The news
Archaeologists in the UK have unearthed what is thought to be the largest and oldest example of a wheel from the Bronze Age.
We've uncovered the oldest, complete Bronze Age wheel! Over the next few days we'll be sharing more detailed info. pic.twitter.com/2ZUBWCMQUN — Must Farm (@MustFarm) February 19, 2016
The 3,000-year-old discovery was made at a site in the county of Cambridgeshire which has been dubbed “Britain’s Pompeii”.
The historical heritage of the finds at Must Farm is said to rival that of the famous town buried by an erupting volcano in Italy.
The one-metre diameter wooden wheel has been dated back to as far as 1,100 BC.
Three Bronze Age round houses have been discovered at the site with their contents intact inside.
In pictures
This perfectly preserved 3,000-year-old wheel has been dug up in the UK: https://t.co/omb9roKUNlpic.twitter.com/t2BMgrW507 — Gizmodo (@Gizmodo) February 19, 2016
Archaeologists uncover most complete example of a Bronze Age wheel ever found https://t.co/0d4l3nLQhm UK pic.twitter.com/a3wVBx6AQN — Ticia Verveer (@ticiaverveer) February 19, 2016
In video
What they are saying
“Complete wheels are very rare. This is the first complete wheel from the UK. There are fragments of other wheels. So this puts us in the same context as our European colleagues.” – Mark Knight, Cambridge University archaeologist at the Must Farm dig.
“The existence of this wheel expands our understanding of late Bronze Age technology and the level of sophistication of the lives of people living on the edge of the Fens 3,000 years ago.” – Historic England Chief Executive Duncan Wilson. |
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
/*
* This file implements a light-weight OpDesc like the framework::OpDesc. We
* delete the unnecessary methods, and remove the underlying dependencies, such
* as framework::Operator and boost::varient to make it runnable in mobile.
*/
#include <algorithm>
#include <map>
#include <set>
#include <string>
#include <vector>
#include "lite/core/framework.pb.h"
#include "lite/core/model/base/apis.h"
#include "lite/utils/all.h"
namespace paddle {
namespace lite {
namespace pb {
using Attribute =
variant<int, float, bool, std::vector<std::string>, std::vector<int>>;
using VariableNameMap = std::map<std::string, std::vector<std::string>>;
/*
* The lite::OpDesc, an light-weight implementation of wrapper of proto::OpDesc.
* Unlike the original one in framework::OpDesc, we remove the local members
* except the desc_, to avoid the inconsistent state, which is normal in the
* original interface and results in bugs.
*/
class OpDesc : public OpDescAPI {
public:
OpDesc() = delete;
explicit OpDesc(framework::proto::OpDesc *desc) : desc_(desc) {
CHECK(desc_);
}
framework::proto::OpDesc *Proto() { return desc_; }
const framework::proto::OpDesc &ReadonlyProto() const { return *desc_; }
std::string Type() const override { return desc_->type(); }
void SetType(const std::string &type) override { desc_->set_type(type); }
// Get the arguments of parameter called `param`
std::vector<std::string> Input(const std::string ¶m) const override {
return GetArguments(desc_->inputs(), param);
}
std::vector<std::string> InputArgumentNames() const override {
return GetArgumentNames(desc_->inputs());
}
void SetInput(const std::string ¶m,
const std::vector<std::string> &args) override {
SetArgument(desc_->mutable_inputs(), param, args);
}
std::vector<std::string> Output(const std::string ¶m) const override {
return GetArguments(desc_->outputs(), param);
}
std::vector<std::string> OutputArgumentNames() const override {
return GetArgumentNames(desc_->outputs());
}
void SetOutput(const std::string ¶m,
const std::vector<std::string> &args) override {
SetArgument(desc_->mutable_outputs(), param, args);
}
bool HasAttr(const std::string &name) const override {
const auto &xs = desc_->attrs();
auto it = std::find_if(
xs.begin(), xs.end(), [&](const framework::proto::OpDesc_Attr &x) {
return x.name() == name;
});
return it != xs.end();
}
AttrType GetAttrType(const std::string &name) const override {
const auto &xs = desc_->attrs();
auto it = std::find_if(
xs.begin(), xs.end(), [&](const framework::proto::OpDesc_Attr &x) {
return x.name() == name;
});
CHECK(it != xs.end());
#define DEF_ONE(type__) \
case framework::proto::AttrType::type__: \
return AttrType::type__;
switch (it->type()) {
DEF_ONE(INT);
DEF_ONE(FLOAT);
DEF_ONE(STRING);
DEF_ONE(INTS);
DEF_ONE(FLOATS);
DEF_ONE(STRINGS);
DEF_ONE(BOOLEAN);
DEF_ONE(BOOLEANS);
DEF_ONE(BLOCK);
DEF_ONE(LONG);
DEF_ONE(BLOCKS);
DEF_ONE(LONGS);
DEF_ONE(FLOAT64);
DEF_ONE(FLOAT64S);
default:
LOG(FATAL) << "Unknown attribute type for attr " << name;
return static_cast<AttrType>(-1);
}
#undef DEF_ONE
}
std::vector<std::string> AttrNames() const override {
std::vector<std::string> res;
const auto &xs = desc_->attrs();
std::transform(
xs.begin(),
xs.end(),
std::back_inserter(res),
[](const framework::proto::OpDesc_Attr &x) { return x.name(); });
return res;
}
template <typename T>
void SetAttr(const std::string &name, const T &v);
template <typename T>
T GetAttr(const std::string &name) const;
private:
std::vector<std::string> GetArguments(
const google::protobuf::RepeatedPtrField<framework::proto::OpDesc_Var>
&xs,
const std::string ¶m) const {
std::vector<std::string> res;
auto it = std::find_if(
xs.begin(), xs.end(), [&](const framework::proto::OpDesc_Var &it) {
return it.parameter() == param;
});
CHECK(it != xs.end());
const auto &ys = it->arguments();
std::transform(ys.begin(),
ys.end(),
std::back_inserter(res),
[](const std::string &x) { return x; });
return res;
}
void SetArgument(
google::protobuf::RepeatedPtrField<framework::proto::OpDesc_Var> *xs,
const std::string ¶m,
const std::vector<std::string> &args) {
auto it = std::find_if(
xs->begin(), xs->end(), [&](const framework::proto::OpDesc_Var &it) {
return it.parameter() == param;
});
if (it == xs->end()) {
auto *new_arg = xs->Add();
new_arg->set_parameter(param);
for (const auto &arg : args) {
*new_arg->mutable_arguments()->Add() = arg;
}
} else {
it->mutable_arguments()->Clear();
for (const auto &arg : args) {
*it->mutable_arguments()->Add() = arg;
}
}
}
std::vector<std::string> GetArgumentNames(
const google::protobuf::RepeatedPtrField<framework::proto::OpDesc_Var>
&xs) const {
std::vector<std::string> res;
std::transform(
xs.begin(),
xs.end(),
std::back_inserter(res),
[](const framework::proto::OpDesc_Var &x) { return x.parameter(); });
return res;
}
private:
framework::proto::OpDesc *desc_;
};
template <>
void OpDesc::SetAttr<std::string>(const std::string &name,
const std::string &v);
template <>
void OpDesc::SetAttr<std::vector<int>>(const std::string &name,
const std::vector<int> &v);
} // namespace pb
} // namespace lite
} // namespace paddle
|
<gh_stars>1-10
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import argparse
import json
import os
import subprocess
import sys
from pathlib import Path
from .utils import load_cluster_details
START_SERVICE_COMMAND = '''\
systemctl --user daemon-reload
systemctl --user start maro-master-agent.service
systemctl --user enable maro-master-agent.service
loginctl enable-linger {admin_username}
'''
if __name__ == "__main__":
# Load args
parser = argparse.ArgumentParser()
parser.add_argument('cluster_name')
args = parser.parse_args()
# Load details
cluster_details = load_cluster_details(cluster_name=args.cluster_name)
admin_username = cluster_details['user']['admin_username']
redis_port = cluster_details['master']['redis']['port']
# Dump master_agent.config
os.makedirs(os.path.expanduser("~/.maro-local/agents/"), exist_ok=True)
with open(os.path.expanduser("~/.maro-local/agents/master_agent.config"), 'w') as fw:
json.dump({
'cluster_name': args.cluster_name,
'redis_port': redis_port
}, fw)
# Load .service
with open(os.path.expanduser("~/.maro/lib/grass/agents/maro-master-agent.service"), 'r') as fr:
service_file = fr.read()
# Rewrite data in .service and write it to systemd folder
service_file = service_file.format(home_path=str(Path.home()))
os.makedirs(os.path.expanduser("~/.config/systemd/user/"), exist_ok=True)
with open(os.path.expanduser("~/.maro-local/agents/maro-master-agent.service"), 'w') as fw:
fw.write(service_file)
with open(os.path.expanduser("~/.config/systemd/user/maro-master-agent.service"), 'w') as fw:
fw.write(service_file)
# Exec command
command = START_SERVICE_COMMAND.format(admin_username=admin_username)
process = subprocess.Popen(
command, executable='/bin/bash', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8'
)
stdout, stderr = process.communicate()
if stderr:
sys.stderr.write(stderr.strip('\n'))
sys.stdout.write(stdout.strip('\n'))
|
export * from "./Progress.model";
export * from "./ProgressLog.model";
|
An Open-Source Toolkit To Expand Bioinformatics Training in Infectious Diseases
ABSTRACT As access to high-throughput sequencing technology has increased, the bottleneck in biomedical research has shifted from data generation to data analysis. Here, we describe a modular and extensible framework for didactic instruction in bioinformatics using publicly available RNA sequencing data sets from infectious disease studies, with a focus on host-parasite interactions. We highlight lessons learned from adapting this course for virtual learners during the coronavirus disease 2019 (COVID-19) pandemic.
have been major developments in easy-to-use interactive graphics, dynamic reports, and Web apps in the R environment, making it easy for students to turn static plots into dynamic data visualizations (14).
Parasites are ideal tools for studying gene expression. Analysis of host-parasite gene expression data provides an excellent opportunity to teach fundamental concepts in both parasitology and immunology. Parasite life cycles involve complex developmental transitions that coincide with remarkable alterations in gene expression, and new single-cell technologies enable high-resolution profiling of these life cycles. RNAseq data sets from different developmental stages provide learners with insight into mechanisms of host cell invasion, immune evasion, parasite maturation, sexual differentiation, and reproduction. In addition, parasites trigger robust immune and tissue repair responses in their hosts, providing an opportunity to move beyond parasite biology to consider and discuss how pathogens elicit immune responses and what the consequences of these responses may be for the outcome of infection and the development of pathological responses. To take full advantage of this concept, our course includes data-driven virtual labs derived from real infectious disease studies. Each lab was designed to highlight fascinating and unique aspects of host-pathogen biology, including "just-in-time" gene expression during the erythrocytic cycle of Plasmodium falciparum, the helminth response to praziquantel treatment, the expression of microexon genes in Schistosoma mansoni, the activation of canonical antiviral responses by some intracellular protozoa, parasite strain-specific polarization of macrophages via Toxoplasma gondii secreted virulence factors, and immune activation during severe acute respiratory syndrome coronavirus 2 (SARS-CoV-2) infection.
A modular approach to teaching coding. The course is organized into 13 to 16 2h modules, each of which includes lecture videos and slides, learning objectives, R scripts, and reading materials. As learners move through these modules, simple "step" scripts facilitate the construction of a complete RNA-seq analysis pipeline (Fig. 1). Currently, eight step scripts are provided, which include code for data preprocessing (steps 1 and 2), data visualization (step 3), accessing public data (step 4), carrying out differential gene expression (DGE) analysis (steps 5 and 6), using functional enrichment methods such as gene ontology (GO) and gene set enrichment analysis (GSEA) (step 7), and bundling code and outputs into dynamic Rmarkdown documents for transparency and reproducibility (step 8). This approach provides an opportunity to introduce statistical concepts in the context of real challenges that commonly arise during data analyses. For example, the module on data exploration introduces learners to FIG 1 Complex workflows broken into modular "step" scripts. Learners progress through the course using a series of R step scripts. This process incrementally builds a computational workflow and culminates in learners producing an Rmarkdown report that summarizes all code and outputs from the course. Blue boxes indicate module topics covered in detail in the course. White boxes indicate topics discussed but not covered in detail. Red text denotes R packages used throughout the course, while black text denotes Web-based or command-line tools outside the R/Bioconductor environment. Perspective ® experimental design considerations and to multivariate statistics and dimensional reduction as critical methods for identifying biological and technical sources of variance. Similarly, multiple-testing correction, linear models, and Bayesian inference become key concepts in the differential gene expression module. The course website contains additional reading material and supplemental videos for learners who want to explore these and other statistical concepts in more depth or for instructors who wish to dedicate additional lecture time to statistics.
At numerous points in this workflow, learners develop publication-quality graphics, opening the door to conversations about design aesthetics and crafting a narrative with genomic data. The course concludes by bundling all steps into an Rmarkdown document, providing an important context to discuss transparency and reproducibility in bioinformatics. To further emphasize the latter point, one module is dedicated to instruction on how to archive projects using GitHub and how to incorporate code into custom functions and R packages for reuse. Since all steps use the R programming language, learners build confidence and skills in coding as they progress through the course.
The modular structure of the course and stepwise nature of the coding not only accommodate learners with no prior experience in either RNA-seq or coding but also make it easy for instructors to modify the course content to include additional modules on statistics, related data types such as assay for transposase-accessible chromatin (ATAC-seq), or emerging technologies such as spatial transcriptomics. The next iteration of this course, for example, will include lectures and labs that explore single-cell RNA sequencing data from parasites and pathogen-infected host cells, to explore concepts around parasite development and host-pathogen interactions, respectively. Finally, many aspects of the course are generalizable well beyond transcriptomics data, and it would be feasible to adapt the course to focus on different 'omic data types, including but not limited to microbiome profiling.
Lessons learned from the COVID-19 pandemic. The coronavirus disease 2019 (COVID-19) pandemic had a dramatic and abrupt impact on in-person instruction at schools around the world and underscored a desperate need for high-quality, free, online educational content for biomedical trainees. To help meet this need, we modified our course to be run virtually for a full semester starting in April 2020. Although the course had a strong online presence since its inception in 2015, the pandemic accelerated a move to make the course completely virtual. Several advantages of this move became immediately apparent. First, in-person bioinformatics courses often require specialized "active learning" media classrooms that offer numerous power outlets for laptop computers, round tables for group work, and multiple display screens for improved visibility. Such classrooms are difficult to find and are limited in seating. In contrast, shifting our course online allowed us to double the class size from about 60 students to 120 students. Teaching assistants that once perused the classroom now monitored a class message board (Slack) for student questions and held virtual recitations via video conferencing software (Zoom). Similarly, labs were run via video conferencing using "breakout room" features to randomly split the ;120 students into small groups of 3 to 5 students. Teaching assistants and the instructor then circulated through each virtual breakout room to field questions and assist learners to overcome impediments. The virtual format also offered maximum flexibility during a time of great stress for learners. Furthermore, when offered in person, some learners struggled to keep up with modules that involved a mix of active coding and lecture. In contrast, virtual instruction with prerecorded video lectures made it simple to pause videos while coding. We also found that learners benefited from speed controls enabled on all videos, thus making it easy to move more quickly through familiar material while slowing down in more challenging areas.
To empirically test whether the transition from in-person to virtual instruction had a detrimental impact on the acquisition of skills by learners, we compared results from a 20-question skill self-assessment survey completed by 66 in-person learners from 2019 with responses from 65 virtual learners from 2020 (n = 131 learners total) ( Fig. 2A). Prior to starting the course, both in-person and virtual learners reported low confidence in Perspective ® their understanding of RNA-seq data, using command-line tools, the R programming language, and general aspects of data science and reproducible coding. After 15 modules, all students reported significant increases in all areas measured, regardless of whether they received instruction in person or virtually, demonstrating that the virtual format did not adversely impact the overall acquisition of skills by learners. Furthermore, the move to virtual instruction opened the course to learners from around the world (Fig. 2B). Since January 2020, over 17,000 people have visited the site. Although the majority originate from IP addresses in and around Philadelphia, PA, where our university is based, there were many users accessing the site from across the United States, Europe, India, and South America. By hosting our lecture videos on Vimeo and collecting detailed analytics on interactions of users with videos, we found that lectures had been viewed over 33,000 times and watched to completion over 12,000 times by over 4,000 unique viewers worldwide.
Curriculum in the post-COVID-19 era. The apparent success of the virtual format for this course raises the question of what should be done in a post-COVID-19 era when schools resume in-person instruction. Should virtual content be maintained? If the course remains fully virtual, then how would in-person instruction be used, if at all? These are questions that we and other educators are now wrestling with. Switching back to in-person instruction at the expense of maintaining strong virtual content not only would exclude learners from outside our institution but also would make us vulnerable yet again to significant disruptions from future local, national, or global emergencies. In contrast, keeping the course fully virtual without an in-person component ignores both an opportunity and a responsibility to engage students at our institution. A blended learning model that brings both concepts together offers an appealing solution. In this model, learners at our institution or elsewhere can watch the videos and learn asynchronously rather than attending traditional synchronous lectures in a classroom, while in-person classes focus on the data-driven labs described above (so-called "flipped classroom"). Lab content will still be made available online, raising the intriguing possibility of labs that mix in-person with virtual learners in small groups. Depending on course credit load and classroom size, instructors could opt to require learners to attend only a portion of the labs in person. Finally, an alternative model, and one that has spontaneously developed with this course at our institution, relates to the notion of "nanocourses" (15). Nanocourses are a short-course format that typically involves small groups of learners (e.g., a peer group from the same graduate program or all members of a particular laboratory) taking only a portion of the course, usually totaling about 5 to 6 h of instruction. The modular structure of our course readily accommodates a nanocourse model, where learners could spend approximately 6 h covering two modules and a lab.
Extensible curriculum that can be adopted by LMICs. An unexpected outcome of the nanocourse format described above is that students have found it relatively straightforward to use their own domain-specific data sets from neurobiology, cell biology, and model organisms such as Drosophila and Caenorhabditis elegans. This highlights that although currently focused on infectious diseases, the course can be easily extended to other areas of science, particularly since RNA-seq data are commonplace across biomedical research. This extensibility proves particularly useful when trying to engage students in the ever-changing landscape of infectious diseases since new (and newsworthy) outbreaks in human or veterinary medicine can easily be used as the basis for developing new data-driven labs. Of great concern is how this type of course can be successfully deployed to researchers in lower-and middle-income countries (LMICs) where endemic and emerging infectious diseases are major causes of childhood morbidity and mortality. To address this, the entire DIYtranscriptomics course, even the website itself, is available as a single GitHub repository (https://github.com/ DIYtranscriptomics/DIYtranscriptomics.github.io), making it easy for any instructor to clone the course, modify the code, and quickly host their own version of the course with little effort. There remains the challenge of what to do when students do not have access to laptops with sufficient computing resources to install or run the course software. One appealing solution is the availability of containerized software and cloud computing infrastructure. For example, we have used CodeOcean to bundle all the course code and data into a reproducible cloud computing environment that requires only a Web browser and Internet access to run (16). Finally, in many areas of LMICs, a reliable Internet connection is not available. In these cases, videos can be freely downloaded for offline viewing, and the course GitHub repository can be cloned and used to run a local version of the website. Together, these resources provide multiple options for learners in LMICs to access rich bioinformatics content for infectious diseases. |
/**
* Combine this {@link FilterContext} with another
* @param context the {@link FilterContext} to merge
* @return the merged {@link FilterContext}
*/
public FilterContext merge(FilterContext context) {
FilterContext filterContext = new FilterContext();
filterContext.getProperties().putAll(this.getProperties());
filterContext.getProperties().putAll(context.getProperties());
return filterContext;
} |
def from_structure(
structure: Structure,
material_id: str,
fields: Optional[List[str]] = None,
**kwargs
) -> "MaterialsDoc":
meta = StructureMetadata.from_structure(structure, fields=fields)
ordering = CollinearMagneticStructureAnalyzer(structure).ordering
kwargs.update(**meta.dict())
if "last_updated" not in kwargs:
kwargs["last_updated"] = datetime.utcnow()
if "created_at" not in kwargs:
kwargs["created_at"] = datetime.utcnow()
return MaterialsDoc(
structure=structure, material_id=material_id, ordering=ordering, **kwargs
) |
//
// JGNRReachability.h
// JGNetworkReachability
//
// Created by 梅继高 on 2018/6/12.
// Copyright © 2018年 MeiJigao. All rights reserved.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
* 网络连接类型
*/
typedef NS_ENUM(NSInteger, JGNRReachabilityStatus) {
JGNRReachabilityStatusNotReachable = 1, // 不可连接
JGNRReachabilityStatusViaWiFi, // WiFi
JGNRReachabilityStatusViaWWAN, // 蜂窝移动网络
};
/**
* 蜂窝移动网络类型
*/
typedef NS_ENUM(NSInteger, JGNRWWANType) {
JGNRWWANTypeUnknown = 201,
JGNRWWANTypeGPRS, // GPRS
JGNRWWANType2G, // 2G
JGNRWWANType3G, // 3G
JGNRWWANType4G, // 4G
};
typedef void (^JGNRReachabilityStatusChangeBlock)(JGNRReachabilityStatus status);
FOUNDATION_EXTERN NSNotificationName const JGNRReachabilityStatusChangedNotification;
typedef NSString *JGNRReachabilityNotificationKey NS_EXTENSIBLE_STRING_ENUM;
FOUNDATION_EXTERN JGNRReachabilityNotificationKey const JGNRReachabilityNotificationStatusKey;
#pragma mark - Deprecated
DEPRECATED_MSG_ATTRIBUTE("Use JGNRReachabilityStatus instead")
typedef NS_ENUM(NSInteger, JGNetworkReachabilityStatus) {
JGNetworkReachabilityStatusNotReachable = JGNRReachabilityStatusNotReachable,
JGNetworkReachabilityStatusReachableViaWiFi = JGNRReachabilityStatusViaWiFi,
JGNetworkReachabilityStatusReachableViaWWAN = JGNRReachabilityStatusViaWWAN,
};
DEPRECATED_MSG_ATTRIBUTE("Use JGNRWWANType instead")
typedef NS_ENUM(NSInteger, JGNetworkReachabilityWWAN) {
JGNetworkReachabilityWWANUnknown = JGNRWWANTypeUnknown,
JGNetworkReachabilityWWANGPRS = JGNRWWANTypeGPRS,
JGNetworkReachabilityWWAN2G = JGNRWWANType2G,
JGNetworkReachabilityWWAN3G = JGNRWWANType3G,
JGNetworkReachabilityWWAN4G = JGNRWWANType4G,
};
typedef void (^JGNetworkReachabilityStatusChangeAction)(JGNetworkReachabilityStatus status)
DEPRECATED_MSG_ATTRIBUTE("Use JGNRReachabilityStatusChangeBlock instead");
FOUNDATION_EXTERN NSNotificationName const JGNetworkReachabilityStatusChangedNotification
DEPRECATED_MSG_ATTRIBUTE("Use JGNRReachabilityStatusChangedNotification instead");
typedef JGNRReachabilityNotificationKey JGNetworkReachabilityNotificationKey
DEPRECATED_MSG_ATTRIBUTE("Use JGNRReachabilityNotificationKey instead");
FOUNDATION_EXTERN JGNRReachabilityNotificationKey const JGNetworkReachabilityNotificationstatusKey;
@interface JGNRReachability : NSObject
/** 网络连接类型 */
@property (nonatomic, assign, readonly) JGNRReachabilityStatus reachabilityStatus;
/** 网络是否可连接 */
@property (nonatomic, assign, readonly) BOOL reachable;
/** 是否WiFi网络 */
@property (nonatomic, assign, readonly) BOOL reachableViaWiFi;
/** 是否为蜂窝移动网络 */
@property (nonatomic, assign, readonly) BOOL reachableViaWWAN;
/** 蜂窝移动网络状态 */
@property (nonatomic, assign, readonly) JGNRWWANType WWANType;
/** 网络连接类型描述,eg: NoNetwork, WiFi, Mobile, GPRS, 2G, 3G, 4G */
@property (nonatomic, copy, readonly) NSString *reachabilityStatusString;
+ (instancetype)sharedInstance;
/**
* 全局调用,可重复调用,已启动时重复调用无效
*/
- (void)startMonitor;
/**
状态变化监听处理,可添加多个监听者,注意block内存问题
@param observer 监听接收者
@param block 监听处理block
*/
- (void)addObserver:(id)observer statusChangeBlock:(nullable JGNRReachabilityStatusChangeBlock)block;
/**
移除状态监听block,非必需
addStatusObserver:statusChangeBlock: 调用时block内部不存在内存问题时observer内存释放时会自动移除;
addStatusObserver:statusChangeBlock: 调用时block内部存在内存问题时observer内存释放时必须手动调用本接口移除监听
@param observer 监听接收者
*/
- (void)removeStatusChangeBlockWithObserver:(id)observer;
/**
状态变化监听处理,可添加多个监听者
selector定义带单个可选参数,执行时参数类型 JGNRReachability
selector定义多个参数执行时只第一参数有效,其他参数无效
@param observer 监听接收者
@param selector 监听处理selector
*/
- (void)addObserver:(id)observer selector:(SEL)selector;
/**
移除状态监听selector,非必需,observer内存释放时会自动移除
@param observer 监听接收者
*/
- (void)removeSelectorWithObserver:(id)observer;
#pragma mark - Deprecated
- (void)addObserver:(id)observer action:(nullable JGNetworkReachabilityStatusChangeAction)notification
DEPRECATED_MSG_ATTRIBUTE("Replaced by -addObserver:statusChangeBlock:");
- (void)removeActionWithObserver:(id)observer
DEPRECATED_MSG_ATTRIBUTE("Replaced by -removeStatusChangeBlockWithObserver:");
@property (nonatomic, assign, readonly) JGNRWWANType WWANStatus
DEPRECATED_MSG_ATTRIBUTE("Use WWANType instead");
@end
DEPRECATED_MSG_ATTRIBUTE("Use JGNRReachability instead")
@interface JGNetworkReachability : JGNRReachability
@end
NS_ASSUME_NONNULL_END
|
If it’s up to Robotbase, you’ll soon be coming home and a robot will greet you at the door. While you were away, the Robotbase Personal Robot patrolled your home, made sure the temperature was lowered when you left, maybe locked the door after you were gone and, through its built-in camera, allowed you to check in on your dog, too. Robotbase is officially launching its Kickstarter campaign today with an appearance from of our CES Battlefield judges today.
As Robotbase CEO Duy Huynh told me, the company’s mission is to build a platform for an autonomous robot with plenty of sensors, advanced computer vision and a good dose of artificial intelligence built-in so it can’t just react to what you’re telling it to do, but also proactively alert you and handle tasks quietly in the background.
In its current form, the Robotbase is a 4-foot tall robot that sits on a wide base with a telescoping arm that holds its screen, camera and other sensors. Huynh likened it to a really smart smart-home hub that goes beyond the standard tasks of those devices because it can’t just aggregate data from those tools, but also move around your home.
What really sets Robotbase’s efforts apart, however, is what the team calls “talents.” These are basically apps you can install on the Gen X. Say you are having a party. The Personal Robot, with its built-in image recognition skills and camera, can become your party photographer.
Huynh also told me that the company is working on a “security guard” skill that will have the robot patrol your home and a “storyteller” talent for lazy parents. Using that skill, the robot can tell your kids a bedtime story while you are spending some quality time with your Netflix queue. Once the robot notices your child has fallen asleep, it can turn off the light and roll back to its base station to charge.
The company purposely gave the robot a rather bland name — the “Artificial Intelligence Personal Robot” — because it wants people to name the robot themselves in order to personalize the experience. Users will also be able to create the faces that will pop up on the screen.
The robot will feature an always-on listening mode, which Huynh hopes will make it feel more natural for people to use voice to interact with it. Using its natural language processing skills, the system currently allows for some basic back-and-forth conversations. There will also be a touch interface on the screen and the team is working on gesture control, too (though that isn’t quite ready yet).
As Huynh admitted, it’s not easy to build this combination of speech recognition, natural language processing and computer vision, but he also believes that his team can overcome most of them.
“These used to be really hard problems,” he said. “But with deep neural networks, this is now much easier.” He also sees this as a platform. The software can always be updated, after all.
[gallery ids="1101136,1101135,1101134,1101133,1101132"]
The core of the robot is built around an Nvidia Tegra K1 chip that handles most of the computation — all of which happens on the unit and not in the cloud. It will include sensors to measure temperature, barometric pressure and CO2, as well as a smell sensor to alert you when it finds a potential gas leak.
The company hopes to eventually sell its robot for under $1,500 — and maybe even $1,000. The Kickstarter campaign starts at $995 (limited to the first 50 backers). It’ll likely go on sale by the end of 2015 and while Robotbase plans to sell directly to consumers, it is also exploring relationships with brick-and-mortar retailers.
Huynh has a background in manufacturing and ran a factory and global sourcing business with more than 400 employees in the past. Thanks to this experience, he has the contacts to source components directly, which should help keep the price low.
For now, the company is focusing on the consumer version of the Personal Robot, but the company has also seen some interest from business customers who want to test the robot in their stores, for example.
You can find Robotbase’s Kickstarter campaign here. |
package io.cattle.platform.framework.encryption.impl;
import io.cattle.platform.framework.encryption.handler.impl.TransformationServiceImpl;
import io.github.ibuildthecloud.gdapi.model.Transformer;
import java.security.SecureRandom;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.lang3.StringUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TransformerTest {
private TransformationServiceImpl transformationService;
static final char[] specialChars = new char[]{
'\'', '\"', '\\', '/', '[', ']', '*', ':', ';', '%', '$', ',',
'(', '-', ')', '#', '@', '!', '+'};
@Before
public void setUp() throws Exception {
transformationService = new TransformationServiceImpl();
Transformer transformer = new NoOpTransformer();
transformer.init();
transformationService.addTransformers(transformer);
transformer = new Sha256Hasher();
transformer.init();
transformationService.addTransformers(transformer);
transformer = new Aes256Encrypter();
transformer.init();
transformationService.addTransformers(transformer);
}
@Test
public void testCompare() throws Exception {
String toTest;
byte[] bytes = new byte[22];
SecureRandom rn = new SecureRandom();
for (Transformer transformer : transformationService.getTransformers().values()) {
for (int i = 0; i < 100; i++) {
rn.nextBytes(bytes);
toTest = String.valueOf(Hex.encodeHex(bytes));
String encrypted = transformer.transform(toTest);
Assert.assertFalse(StringUtils.equals(encrypted, toTest));
Assert.assertTrue(transformer.compare(toTest, encrypted));
Assert.assertFalse(transformer.compare("Garbage", encrypted));
}
}
for (Transformer transformer : transformationService.getTransformers().values()) {
for (int i = 0; i < 100; i++) {
String password = <PASSWORD>(rn);
String encrypted = transformer.transform(password);
Assert.assertFalse(StringUtils.equals(encrypted, password));
Assert.assertTrue(transformer.compare(password, encrypted));
Assert.assertFalse(transformer.compare("Garbage", encrypted));
}
}
}
private String randomPass(SecureRandom rn) {
byte[] bytes = new byte[10];
rn.nextBytes(bytes);
char[] rand = String.valueOf(Hex.encodeHex(bytes)).toCharArray();
StringBuilder password = new StringBuilder();
for (int i = 0; i <rand.length; i++){
password.append(specialChars[rn.nextInt(specialChars.length)]);
password.append(rand[i]);
}
return password.toString();
}
}
|
<gh_stars>0
package base
import (
"github.com/labstack/echo"
"github.com/labstack/echo/middleware"
"github.com/pavel-kiselyov/echo-logrusmiddleware"
log "github.com/sirupsen/logrus"
)
// SetupEcho creates a new Echo instance for HTTP
func SetupEcho() (*echo.Echo, error) {
e := echo.New()
e.HideBanner = true
e.HidePort = true
e.Logger = logrusmiddleware.Logger{Logger: log.StandardLogger()}
e.Pre(middleware.RemoveTrailingSlash())
e.Use(logrusmiddleware.Hook())
e.Use(middleware.Recover())
return e, nil
}
|
Variable Depth Bragg Peak Method for Single Event Effects Testing
The Variable Depth Bragg Peak (VDBP) method for measuring the Single Event Effects (SEE) cross-section of an integrated circuit (IC) in a closed package as a function of ion linear energy transfer (LET) is described. The method uses long-range, high-energy heavy ions that can penetrate the package and deposit charge in the device's sensitive volume (SV), the depth of which is not known. A series of calibrated energy degraders is used to vary the depth of the Bragg peak relative to the device's sensitive volume. When the Bragg peak is located at the sensitive volume, the measured SEE cross-section is a maximum, as is the LET, which is calculated using a Monte Carlo-based program, TRIM that takes both straggling and spread in beam energy and angle into account. Degrader thickness is varied and the change in LET is calculated while the corresponding cross-section is measured. Good agreement was obtained between the LET-dependence of the single event upset (SEU) cross-section for a 4 Mbit memory in an unopened package using the above method and that for an identical de-lidded part previously measured. |
def resolve():
n, l = map(int, input().split())
alist =[l+i for i in range(n)]
total = sum(alist)
minimum = 10000
j = 0
for i in range(n):
diff = abs(alist[i])
if minimum > diff:
minimum = diff
j = i
print(total-alist[j])
resolve() |
//! Weechat Hook module.
//!
//! Weechat hooks are used for many different things, to create commands, to
//! listen to events on a file descriptor, add completions to weechat, etc.
//! This module contains hook creation methods for the `Weechat` object.
#[cfg(feature = "unstable")]
mod signal;
mod bar;
mod commands;
mod completion;
mod fd;
mod timer;
pub use bar::{BarItemCallback, BarItemHandle};
pub use commands::{Command, CommandCallback, CommandRun, CommandSettings};
pub use completion::{Completion, CompletionHook, CompletionPosition};
pub use fd::{FdHook, FdHookCallback, FdHookMode};
#[cfg(feature = "unstable")]
pub use signal::{SignalHook, SignalHookValue};
pub use timer::TimerHook;
use crate::Weechat;
use weechat_sys::{t_hook, t_weechat_plugin};
/// Weechat Hook type. The hook is unhooked automatically when the object is
/// dropped.
pub(crate) struct Hook {
pub(crate) ptr: *mut t_hook,
pub(crate) weechat_ptr: *mut t_weechat_plugin,
}
impl Drop for Hook {
fn drop(&mut self) {
let weechat = Weechat::from_ptr(self.weechat_ptr);
let unhook = weechat.get().unhook.unwrap();
unsafe { unhook(self.ptr) };
}
}
|
<reponame>murillozampieri/whatsapp-button<gh_stars>0
import { LitElement } from 'lit-element';
declare class SnapbotButton extends LitElement {
icon: string;
hash: string;
preview: boolean;
title: string;
message: string;
titleFeedback: string;
messageFeedback: string;
type: string;
button: string;
textCancel: string;
accountNumber: string;
cc: string;
static styles: import("lit-element").CSSResult;
constructor();
render(): import("lit-element").TemplateResult;
firstUpdated(): void;
loadPreview(): void;
loadConfig(): Promise<void>;
getLinkToRedirect(): Promise<void>;
sendForm(form: any): Promise<void>;
__clickIcon(event: any): void;
__sendForm(): void;
__cancelForm(): void;
__openForm(form: any, time?: number, startBottom?: number, endBottom?: number): void;
__closeForm(form: any, time?: number, startBottom?: number, endBottom?: number): void;
__fadeIn(element: any, time: number): void;
__fadeOut(element: any, time: number): void;
__processFade(element: any, time: number, initial: number, end: number): void;
__mascara(event: any): void;
__telefone(element: any): any;
__submit(event: MouseEvent): void;
previewSend(): void;
disabledElement(element: any): void;
enbledElement(element: any): void;
}
export { SnapbotButton };
//# sourceMappingURL=SnapbotButton.d.ts.map |
/*
* Copyright 2018 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy ofthe License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specificlanguage governing permissions and
* limitations under the License.
*
*/
package k8s
import (
"github.com/skydive-project/skydive/graffiti/graph"
"github.com/skydive-project/skydive/probe"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// AreLinked return true if (a, b) should be linked
type AreLinked func(a, b interface{}) bool
// ABLinker basis for a simple A to B linker
type ABLinker struct {
manager string
typ string
graph *graph.Graph
aCache *ResourceCache
bCache *ResourceCache
areLinked AreLinked
}
func (l *ABLinker) newEdge(parent, child *graph.Node) *graph.Edge {
id := graph.GenID(string(parent.ID), string(child.ID), "RelationType", l.typ)
m := NewEdgeMetadata(l.manager, l.typ)
return l.graph.CreateEdge(id, parent, child, m, graph.TimeUTC(), "")
}
// GetABLinks implementing graph.Linker
func (l *ABLinker) GetABLinks(aNode *graph.Node) (edges []*graph.Edge) {
namespace, _ := aNode.GetFieldString(MetadataField("Namespace"))
if a := l.aCache.GetByNode(aNode); a != nil {
for _, b := range l.bCache.getByNamespace(namespace) {
uid := b.(metav1.Object).GetUID()
if bNode := l.graph.GetNode(graph.Identifier(uid)); bNode != nil {
if l.areLinked(a, b) {
edges = append(edges, l.newEdge(aNode, bNode))
}
}
}
}
return
}
// GetBALinks implementing graph.Linker
func (l *ABLinker) GetBALinks(bNode *graph.Node) (edges []*graph.Edge) {
namespace, _ := bNode.GetFieldString(MetadataField("Namespace"))
if b := l.bCache.GetByNode(bNode); b != nil {
for _, a := range l.aCache.getByNamespace(namespace) {
uid := a.(metav1.Object).GetUID()
if aNode := l.graph.GetNode(graph.Identifier(uid)); aNode != nil {
if l.areLinked(a, b) {
edges = append(edges, l.newEdge(aNode, bNode))
}
}
}
}
return
}
// NewABLinker create and initialize an ABLinker based linker
func NewABLinker(g *graph.Graph, aManager, aType, bManager, bType string, areLinked AreLinked) probe.Probe {
aProbe := GetSubprobe(aManager, aType)
bProbe := GetSubprobe(bManager, bType)
if aProbe == nil || bProbe == nil {
return nil
}
innerLinker := new(ABLinker)
innerLinker.manager = aManager
innerLinker.typ = aType
innerLinker.graph = g
innerLinker.aCache = aProbe.(*ResourceCache)
innerLinker.bCache = bProbe.(*ResourceCache)
innerLinker.areLinked = areLinked
rl := graph.NewResourceLinker(
g,
[]graph.ListenerHandler{aProbe},
[]graph.ListenerHandler{bProbe},
innerLinker,
graph.Metadata{"RelationType": aType},
)
linker := &Linker{
ResourceLinker: rl,
}
rl.AddEventListener(linker)
return linker
}
|
def trim_silence(audio, threshold):
energy = librosa.feature.rmse(audio)
frames = np.nonzero(np.array(energy > threshold))
indices = librosa.core.frames_to_samples(frames)[1]
audio_trim = audio[0:0]
left_blank = audio[0:0]
right_blank = audio[0:0]
if indices.size:
audio_trim = audio[indices[0]:indices[-1]]
left_blank = audio[:indices[0]]
right_blank = audio[indices[-1]:]
return audio_trim, left_blank, right_blank |
Applying Improved Multiscale Fuzzy Entropy for Feature Extraction of MI-EEG
Electroencephalography (EEG) is considered the output of a brain and it is a bioelectrical signal with multiscale and nonlinear properties. Motor Imagery EEG (MI-EEG) not only has a close correlation with the human imagination and movement intention but also contains a large amount of physiological or disease information. As a result, it has been fully studied in the field of rehabilitation. To correctly interpret and accurately extract the features of MI-EEG signals, many nonlinear dynamic methods based on entropy, such as Approximate Entropy (ApEn), Sample Entropy (SampEn), Fuzzy Entropy (FE), and Permutation Entropy (PE), have been proposed and exploited continuously in recent years. However, these entropy-based methods can only measure the complexity of MI-EEG based on a single scale and therefore fail to account for the multiscale property inherent in MI-EEG. To solve this problem, Multiscale Sample Entropy (MSE), Multiscale Permutation Entropy (MPE), and Multiscale Fuzzy Entropy (MFE) are developed by introducing scale factor. However, MFE has not been widely used in analysis of MI-EEG, and the same parameter values are employed when the MFE method is used to calculate the fuzzy entropy values on multiple scales. Actually, each coarse-grained MI-EEG carries the characteristic information of the original signal on different scale factors. It is necessary to optimize MFE parameters to discover more feature information. In this paper, the parameters of MFE are optimized independently for each scale factor, and the improved MFE (IMFE) is applied to the feature extraction of MI-EEG. Based on the event-related desynchronization (ERD)/event-related synchronization (ERS) phenomenon, IMFE features from multi channels are fused organically to construct the feature vector. Experiments are conducted on a public dataset by using Support Vector Machine (SVM) as a classifier. The experiment results of 10-fold cross-validation show that the proposed method yields relatively high classification accuracy compared with other entropy-based and classical time–frequency–space feature extraction methods. The t-test is used to prove the correctness of the improved MFE. |
t = int(input())
def is_zero(x):
return abs(x) < 1e-9
def EV(x, y, z, v):
if is_zero(x) and is_zero(y):
return 1
elif is_zero(x):
step = min(y, v)
return 1 + y * EV(x, y - step, z + step, v)
elif is_zero(y):
step = min(x, v)
return 1 + x * EV(x - step, y, z + step, v)
else:
step1 = min(x, v)
step2 = min(y, v)
return 1 + x * EV(x - step1, y + step1/2, z + step1/2, v) + y * EV(x + step2/2, y - step2, z + step2/2, v)
for test in range(t):
c, m, p, v = [float(x) for x in input().split()]
print(EV(c, m, p, v))
|
/**
* AMQPNotificationProcessor handles AMQP-specific notification processing.
*/
public class AMQPNotificationProcessor {
private static final Logger logger = LoggerFactory.getLogger(AMQPNotificationProcessor.class);
private boolean amqpEnabled = false;
private AMQPSender amqpSender = null;
private AMQPTopicSender amqpTopicSender = null;
private AMQPBroadcastSender amqpBroadcastSender = null;
public void init() {
String amqpEnabledAppSetting = ApplicationSettings.getSetting(AMQPUtil.CONFIG_AMQP_ENABLE, "");
if (!amqpEnabledAppSetting.isEmpty() && (1 == Integer.parseInt(amqpEnabledAppSetting))) {
try {
String host = ApplicationSettings.getSetting(AMQPUtil.CONFIG_AMQP_PROVIDER_HOST, "localhost");
String port = ApplicationSettings.getSetting(AMQPUtil.CONFIG_AMQP_PROVIDER_PORT, "5672");
String username = ApplicationSettings.getSetting(AMQPUtil.CONFIG_AMQP_PROVIDER_USERNAME, "guest");
String password = ApplicationSettings.getSetting(AMQPUtil.CONFIG_AMQP_PROVIDER_PASSWORD, "guest");
Properties properties = new Properties();
properties.setProperty(AMQPUtil.CONFIG_AMQP_PROVIDER_HOST, host);
properties.setProperty(AMQPUtil.CONFIG_AMQP_PROVIDER_PORT, port);
properties.setProperty(AMQPUtil.CONFIG_AMQP_PROVIDER_USERNAME, username);
properties.setProperty(AMQPUtil.CONFIG_AMQP_PROVIDER_PASSWORD, password);
String className = ApplicationSettings.getSetting(AMQPUtil.CONFIG_AMQP_SENDER, "");
Class clazz = Class.forName(className);
amqpSender = (AMQPSender)clazz.getDeclaredConstructor(Properties.class).newInstance(properties);
className = ApplicationSettings.getSetting(AMQPUtil.CONFIG_AMQP_TOPIC_SENDER, "");
clazz = Class.forName(className);
amqpTopicSender = (AMQPTopicSender)clazz.getDeclaredConstructor(Properties.class).newInstance(properties);
className = ApplicationSettings.getSetting(AMQPUtil.CONFIG_AMQP_BROADCAST_SENDER, "");
clazz = Class.forName(className);
amqpBroadcastSender = (AMQPBroadcastSender)clazz.getDeclaredConstructor(Properties.class).newInstance(properties);
Element routingKeys = AMQPUtil.loadRoutingKeys();
if (routingKeys != null) {
((AMQPRoutingAwareClient)amqpSender).init(routingKeys);
((AMQPRoutingAwareClient)amqpTopicSender).init(routingKeys);
((AMQPRoutingAwareClient)amqpBroadcastSender).init(routingKeys);
}
amqpEnabled = true;
} catch (Exception ex) {
logger.error(ex.getMessage());
}
}
}
public void notify(ProcessingContext ctx, OMNamespace protocolNs) throws OMException {
if (amqpEnabled) {
// Extract messages
List<OMElement> messages = new ArrayList<OMElement>();
if (NameSpaceConstants.WSNT_NS.equals(protocolNs)) {
// WSNT
OMElement messageElements = ctx.getSoapBody().getFirstElement();
for (Iterator<OMElement> ite = messageElements.getChildrenWithLocalName("NotificationMessage"); ite.hasNext(); ) {
try {
OMElement messageElement = ite.next();
OMElement message = messageElement.getFirstChildWithName(
new QName(NameSpaceConstants.WSNT_NS.getNamespaceURI(), "Message")).getFirstElement();
messages.add(message);
} catch (NullPointerException e) {
throw new OMException(e);
}
}
} else {
// WSE
OMElement message = ctx.getSoapBody().getFirstElement();
if (message != null) {
messages.add(message);
}
}
// Dispatch messages
try {
for (OMElement message : messages) {
amqpBroadcastSender.Send(message);
amqpTopicSender.Send(message);
amqpSender.Send(message);
}
} catch (AMQPException e) {
logger.warn("Failed to send AMQP notification.[Reason=" + e.getMessage() + "]");
}
}
}
} |
def agent_next_turn(self, record_training_data=True):
if self.fallback:
sentence = 'I cannot understand. Can please repeat?'
elif self.task_complete:
self.conv_over= True
sentence = 'Have nice day!!!!\n\n\n**************************************************************************'
elif self.cancel_check:
sentence = "You are canceling the task. Thank you. Bye !!!!"
self.conv_over = True
elif self.state_tracker.end_conv:
self.task_complete = True
self.agent_action = {'act_slot_response': {'diaact': 'inform', 'inform_slots': {'taskcomplete': 'Ticket Available'}, 'request_slots': {}}, 'act_slot_value_response': None}
sentence = self.agent.add_nl_to_taskcomplete(self.state_tracker.current_slots)
else:
self.state = self.state_tracker.get_state_for_agent()
self.agent_action = self.agent.state_to_action(self.state)
self.state_tracker.update(agent_action=self.agent_action)
sentence = self.agent.add_nl_to_action(self.agent_action)
if not self.fallback:
self.turn += 1
self.last_agent_action = self.agent_action['act_slot_response']['diaact']
self.lase_request_slot = self.agent_action['act_slot_response']['request_slots'].keys()
if self.conv_over:
self.full_output.close()
self.conv_output.close()
return (sentence, self.conv_over) |
class EventStream:
"""Represents a stream of events for an output plugin to process.
Properties:
_plugin: A reference to the plugin using this EventStream.
_plugin_api: An instance of a class implementing PluginAPI. Usually the
PluginSandbox of this plugin.
_count: The number of events retrieved so far through this EventStream.
"""
def __init__(self, plugin, plugin_api):
self._plugin = plugin
self._plugin_api = plugin_api
self._count = 0
def __iter__(self):
return self.iter()
def iter(self, *args, **kwargs):
"""Create an iterator to get events out of this EventStream.
Refer to EventStreamIterator for argument specification.
Returns:
EventStreamIterator instance.
"""
logging.debug('Creating a stream iterator...')
return EventStreamIterator(self, *args, **kwargs)
def GetCount(self):
"""The total number of events retrieved so far."""
return self._count
def Next(self, timeout=1):
"""Gets the next available event from the buffer.
Just like a normal Python iterable, should raise StopIteration when no
more events are available. However, in the case that the plugin has been
paused, a WaitException will be raised.
Args:
timeout: Seconds to wait for retrieving next event.
Returns:
None if timeout or no more events are currently available.
Raises:
WaitException if the plugin has been paused.
"""
ret = self._plugin_api.EventStreamNext(self._plugin, self, timeout)
if ret is not None:
self._count += 1
return ret
def Commit(self):
"""Commits the current batch of events as successfully processed.
Raises:
UnexpectedAccess if the plugin instance is in some unexpected state and
is trying to access core functionality that it should not.
"""
return self._plugin_api.EventStreamCommit(self._plugin, self)
def Abort(self):
"""Aborts the current batch of events as failed to process.
Raises:
UnexpectedAccess if the plugin instance is in some unexpected state and
is trying to access core functionality that it should not.
"""
return self._plugin_api.EventStreamAbort(self._plugin, self) |
/**
* Split an input string using the pattern as a token separator.
*
* @param input
* Input sequence to tokenize
* @param limit
* If positive, the maximum number of tokens to return. If
* negative, an indefinite number of tokens are returned. If
* zero, an indefinite number of tokens are returned but trailing
* empty tokens are excluded.
* @return A sequence of tokens split out of the input string.
*/
public String[] split(CharSequence input, int limit) {
ArrayList res = new ArrayList();
Matcher mat = matcher(input);
int index = 0;
int curPos = 0;
if (input.length() == 0) {
return new String [] {""};
} else {
while (mat.find() && (index + 1 < limit || limit <= 0)) {
res.add(input.subSequence(curPos, mat.start()).toString());
curPos = mat.end();
index++;
}
res.add(input.subSequence(curPos, input.length()).toString());
index++;
/*
* discard trailing empty strings
*/
if (limit == 0) {
while (--index >= 0 && res.get(index).toString().length() == 0) {
res.remove(index);
}
}
}
return (String[]) res.toArray(new String[index >= 0 ? index : 0]);
} |
/**
* Set current payment request and go to the index page.
*
* @return Index page URL.
*/
public String savePayment() {
main:
{
if (payment == null) {
break main;
}
if (clientId == null) {
break main;
}
BillingClient client = clients.stream()
.filter(c -> clientId.equals(c.getId()))
.findAny()
.orElse(null);
if (client == null) {
break main;
}
payment.setClient(client);
if (requestType == null) {
break main;
}
this.payment.setOperationType(PaymentOperationType.valueOf(requestType));
paymentsManager.save(payment);
}
indexPage.loadViewData();
return "index?faces-redirect=true";
} |
import User from './user/User'
export { default as IUser} from './user/IUser'
const userStore = new User()
export {
userStore
} |
package com.datastax.fixmessage;
import java.util.concurrent.BlockingQueue;
import com.datastax.fixmessage.model.FixMessage;
import com.datastax.session.dao.FixMessageDao;
public class FixMessageWriter implements Runnable{
private BlockingQueue<FixMessage> insertQueue;
private FixMessageDao dao;
public FixMessageWriter(FixMessageDao dao, BlockingQueue<FixMessage> insertQueue){
this.dao =dao;
this.insertQueue = insertQueue;
}
@Override
public void run() {
while (true){
FixMessage message;
try {
message = this.insertQueue.take();
if (message!=null){
dao.saveMessage(message);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
|
<gh_stars>1-10
/* MIT License
*
* Copyright (c) 2019 - 2020 <NAME> <<EMAIL>>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/****************************************************************************************************/
/**
@brief I/O abstraction
@author <NAME> <<EMAIL>>
@addtogroup HAL
@{
@file io.h
***************************************************************************************************/
#ifndef __IO_H__
#define __IO_H__
/* INCLUDES ***************************************************************************************/
#include <Arduino.h>
/* C-Interface ************************************************************************************/
extern "C"
{
}
/* FORWARD DECLARATIONS ***************************************************************************/
/* CONSTANTS **************************************************************************************/
/* TYPES ******************************************************************************************/
/**
* Standard i/o pin.
*/
class IoPin
{
public:
/**
* Constructs an i/o pin instance.
*
* @param[in] pinNo Arduino pin number
* @param[in] pinMode Arduino pin mode
*/
IoPin(uint8_t pinNo, uint8_t pinMode) : m_pinNo(pinNo),
m_pinMode(pinMode)
{
}
/**
* Destroys the i/o pin instance.
*/
~IoPin()
{
}
/**
* Initialize pin, which sets the mode.
*/
void init() const
{
pinMode(m_pinNo, m_pinMode);
return;
}
/**
* Get pin number.
*
* @return Arduino pin number
*/
uint8_t getPinNo() const
{
return m_pinNo;
}
/**
* Get pin mode.
*
* @return Arduino pin mode
*/
uint8_t getPinMode() const
{
return m_pinMode;
}
private:
const uint8_t m_pinNo; /**< Arduino pin number */
const uint8_t m_pinMode; /**< Arduino pin mode */
/**
* An instance shall not be copied.
*/
IoPin(const IoPin &ioPin);
/**
* An instance shall not assigned.
*/
IoPin &operator=(const IoPin &ioPin);
};
/**
* Digital output pin.
*/
template <uint8_t pinNo>
class DOutPin : public IoPin
{
public:
/**
* Constructs an digital output pin instance.
*/
DOutPin() : IoPin(pinNo, OUTPUT)
{
}
/**
* Destroys the digital output pin instance.
*/
~DOutPin()
{
}
/**
* Read from digital output pin.
*
* @return Ditial output pin value.
*/
int read() const
{
return digitalRead(pinNo);
}
/**
* Write to digital output pin.
*
* @param[in] value Digital output pin value (LOW, HIGH).
*/
void write(uint8_t value) const
{
digitalWrite(pinNo, value);
return;
}
private:
/**
* An instance shall not be copied.
*/
DOutPin(const DOutPin &dOutPin);
/**
* An instance shall not assigned.
*/
DOutPin &operator=(const DOutPin &dOutPin);
};
/**
* Digital input pin with not supported pin mode.
*/
template <uint8_t pinNo, uint8_t pinMode>
class DInPin : public IoPin
{
public:
private:
/**
* Constructs an digital output pin instance.
*/
DInPin() : IoPin(pinNo, pinMode)
{
}
/**
* Destroys the digital output pin instance.
*/
~DInPin()
{
}
/**
* An instance shall not be copied.
*/
DInPin(const DInPin &dInPin);
/**
* An instance shall not assigned.
*/
DInPin &operator=(const DInPin &dInPin);
};
/**
* Digital input pin without pull-up or pull-down.
*/
template <uint8_t pinNo>
class DInPin<pinNo, INPUT> : public IoPin
{
public:
/**
* Constructs an digital output pin instance.
*/
DInPin() : IoPin(pinNo, pinMode)
{
}
/**
* Destroys the digital output pin instance.
*/
~DInPin()
{
}
/**
* Read from digital input pin.
*
* @return Ditial input pin value.
*/
int read() const
{
return digitalRead(pinNo);
}
private:
/**
* An instance shall not be copied.
*/
DInPin(const DInPin &dInPin);
/**
* An instance shall not assigned.
*/
DInPin &operator=(const DInPin &dInPin);
};
/**
* Digital input pin with pull-up.
*/
template <uint8_t pinNo>
class DInPin<pinNo, INPUT_PULLUP> : public IoPin
{
public:
/**
* Constructs an digital output pin instance.
*/
DInPin() : IoPin(pinNo, INPUT_PULLUP)
{
}
/**
* Destroys the digital output pin instance.
*/
~DInPin()
{
}
/**
* Read from digital input pin.
*
* @return Ditial input pin value.
*/
int read() const
{
return digitalRead(pinNo);
}
private:
/**
* An instance shall not be copied.
*/
DInPin(const DInPin &dInPin);
/**
* An instance shall not assigned.
*/
DInPin &operator=(const DInPin &dInPin);
};
/**
* Digital input pin with pull-down.
*/
template <uint8_t pinNo>
class DInPin<pinNo, INPUT_PULLDOWN> : public IoPin
{
public:
/**
* Constructs an digital output pin instance.
*/
DInPin() : IoPin(pinNo, INPUT_PULLDOWN)
{
}
/**
* Destroys the digital output pin instance.
*/
~DInPin()
{
}
/**
* Read from digital input pin.
*
* @return Ditial input pin value.
*/
int read() const
{
return digitalRead(pinNo);
}
private:
/**
* An instance shall not be copied.
*/
DInPin(const DInPin &dInPin);
/**
* An instance shall not assigned.
*/
DInPin &operator=(const DInPin &dInPin);
};
/**
* Analog pin.
*/
template <uint8_t pinNo>
class AnalogPin : public IoPin
{
public:
/**
* Constructs an analog input pin instance.
*/
AnalogPin() : IoPin(pinNo, ANALOG)
{
}
/**
* Destroys the analog input pin instance.
*/
~AnalogPin()
{
}
/**
* Read from analog input pin.
*
* @return Value in ADC digits.
*/
uint16_t read() const
{
return analogRead(pinNo);
}
private:
/**
* An instance shall not be copied.
*/
AnalogPin(const AnalogPin &analogPin);
/**
* An instance shall not assigned.
*/
AnalogPin &operator=(const AnalogPin &analogPin);
};
/* INLINE FUNCTIONS *******************************************************************************/
/* PROTOTYPES *************************************************************************************/
#endif /* __IO_H__ */
/** @} */ |
<filename>src/components/avatargroup/AvatarGroup.d.ts
import { ClassComponent, GlobalComponentConstructor } from '../ts-helpers';
export interface AvatarGroupProps {
}
export interface AvatarGroupSlots {
}
export declare type AvatarGroupEmits = {
}
declare class AvatarGroup extends ClassComponent<AvatarGroupProps, AvatarGroupSlots, AvatarGroupEmits> { }
declare module '@vue/runtime-core' {
interface GlobalComponents {
AvatarGroup: GlobalComponentConstructor<AvatarGroup>
}
}
/**
*
* A set of Avatars can be displayed together using the AvatarGroup component.
*
* Helper Components:
*
* - Avatar
*
* Demos:
*
* - [AvatarGroup](https://www.primefaces.org/primevue/showcase/#/avatar)
*
*/
export default AvatarGroup;
|
<reponame>SpriteOvO/teloxide-core
use serde::{Deserialize, Serialize};
/// Identifier of a user.
#[derive(Clone, Copy)]
#[derive(Debug, derive_more::Display)]
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Serialize, Deserialize)]
#[serde(transparent)]
pub struct UserId(pub u64);
impl UserId {
/// Returns an URL that links to the user with this id in the form of
/// `tg://user/?id=<...>`.
pub fn url(self) -> reqwest::Url {
reqwest::Url::parse(&format!("tg://user/?id={}", self)).unwrap()
}
/// Returns `true` if this is the id of the special user used by telegram
/// bot API to denote an anonymous user that sends messages on behalf of
/// a group.
pub fn is_anonymous(self) -> bool {
// https://github.com/tdlib/td/blob/4791fb6a2af0257f6cad8396e10424a79ee5f768/td/telegram/ContactsManager.cpp#L4941-L4943
const ANON_ID: UserId = UserId(1087968824);
self == ANON_ID
}
/// Returns `true` if this is the id of the special user used by telegram
/// bot API to denote an anonymous user that sends messages on behalf of
/// a channel.
pub fn is_channel(self) -> bool {
// https://github.com/tdlib/td/blob/4791fb6a2af0257f6cad8396e10424a79ee5f768/td/telegram/ContactsManager.cpp#L4945-L4947
const ANON_CHANNEL_ID: UserId = UserId(136817688);
self == ANON_CHANNEL_ID
}
/// Returns `true` if this is the id of the special user used by telegram
/// itself.
///
/// It is sometimes also used as a fallback, for example when a channel post
/// is automatically forwarded to a group, bots in a group will get a
/// message where `from` is the Telegram user.
pub fn is_telegram(self) -> bool {
const TELEGRAM_USER_ID: UserId = UserId(777000);
self == TELEGRAM_USER_ID
}
}
#[cfg(test)]
mod tests {
use serde::{Deserialize, Serialize};
use crate::types::UserId;
/// Test that `UserId` is serialized as the underlying integer
#[test]
fn deser() {
let user_id = S {
user_id: UserId(17),
};
let json = r#"{"user_id":17}"#;
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
struct S {
user_id: UserId,
}
assert_eq!(serde_json::to_string(&user_id).unwrap(), json);
assert_eq!(user_id, serde_json::from_str(json).unwrap());
}
#[test]
fn url_works() {
let id = UserId(17);
assert_eq!(id.url(), "tg://user/?id=17".parse().unwrap());
}
}
|
<reponame>JLLeitschuh/ephemerals<filename>core/src/main/java/com/liveperson/ephemerals/deploy/unit/DeploymentUnit.java
package com.liveperson.ephemerals.deploy.unit;
import com.liveperson.ephemerals.deploy.DeploymentPort;
import com.liveperson.ephemerals.deploy.probe.Probe;
import com.liveperson.ephemerals.deploy.volume.Volume;
import com.liveperson.ephemerals.deploy.volume.VolumeMount;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Specification of deployment resources, runtime, network and health check.
*
* Created by waseemh on 9/4/16.
*/
public class DeploymentUnit {
/**
* Namespace of deployment unit
*/
private final String name;
/**
* Health Probe
*/
private final Probe healthProbe;
/**
* Readiness Probe
*/
private final Probe readinessProbe;
/**
* Deployment unit ports
*/
private final List<DeploymentPort> ports;
/**
* CPU resource requirements
*/
private final double cpu;
/**
* Memory resource requirements
*/
private final int mem;
/**
* Commandline arguments
*/
private final Map<String,String> cmdArgs;
/**
* Environment variables
*/
private final Map<String,String> envVars;
/**
* Volumes
*/
private final Map<VolumeMount,Volume> volumes;
public DeploymentUnit(Builder builder) {
this.name = builder.name;
this.healthProbe = builder.healthProbe;
this.readinessProbe = builder.readinessProbe;
this.ports = builder.ports;
this.cpu = builder.cpu;
this.mem = builder.mem;
this.cmdArgs = builder.cmdArgs;
this.envVars = builder.envVars;
this.volumes = builder.volumes;
}
public double getCpu() {
return cpu;
}
public int getMem() {
return mem;
}
public Map<String, String> getCmdArgs() {
return cmdArgs;
}
public Map<String, String> getEnvVars() {
return envVars;
}
public String getName() {
return name;
}
public Probe getHealthProbe() {
return healthProbe;
}
public Probe getReadinessProbe() {return readinessProbe; }
public List<DeploymentPort> getPorts() {
return ports;
}
public Map<VolumeMount,Volume> getVolumes() { return volumes; }
public static class Builder {
private String name;
private Probe healthProbe;
private Probe readinessProbe;
private List<DeploymentPort> ports = new ArrayList<>();
private double cpu = 0.5;
private int mem = 1024;
private Map<String, String> cmdArgs = new HashMap<>();
private Map<String, String> envVars = new HashMap<>();
private Map<VolumeMount,Volume> volumes = new HashMap<>();
public Builder(String name) {
this.name = name;
}
public Builder withHealthProbe(Probe probe) {
this.healthProbe = probe;
return this;
}
public Builder withReadinessProbe(Probe probe) {
this.readinessProbe = probe;
return this;
}
public Builder withPorts(List<DeploymentPort> ports) {
this.ports.addAll(ports);
return this;
}
public Builder withPort(DeploymentPort port) {
this.ports.add(port);
return this;
}
public Builder withCpu(double cpu) {
this.cpu = cpu;
return this;
}
public Builder withMem(int mem) {
this.mem = mem;
return this;
}
public Builder withCmdArgs(Map<String, String> cmdArgs) {
this.cmdArgs.putAll(cmdArgs);
return this;
}
public Builder withCmdArg(String key, String value) {
this.cmdArgs.put(key,value);
return this;
}
public Builder withEnvVars(Map<String, String> envVars) {
this.envVars.putAll(envVars);
return this;
}
public Builder withEnvVar(String key, String value) {
this.envVars.put(key,value);
return this;
}
public Builder withVolume(VolumeMount volumeMount, Volume volume) {
this.volumes.put(volumeMount,volume);
return this;
}
public DeploymentUnit build() {
return new DeploymentUnit(this);
}
}
} |
<gh_stars>1-10
#include <sys/types.h>
#include <sys/stat.h>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <sys/wait.h>
#include "parasite.h"
#define PARASITE_CMD_GETFD PARASITE_USER_CMDS
static void print_vmsg(unsigned int lvl, const char *fmt, va_list parms)
{
printf("\tLC%u: ", lvl);
vprintf(fmt, parms);
}
static int do_infection(int pid, int *stolen_fd)
{
#define err_and_ret(msg) do { fprintf(stderr, msg); return -1; } while (0)
int state;
struct parasite_ctl *ctl;
struct infect_ctx *ictx;
compel_log_init(print_vmsg, COMPEL_LOG_DEBUG);
printf("Stopping task\n");
state = compel_stop_task(pid);
if (state < 0)
err_and_ret("Can't stop task");
printf("Preparing parasite ctl\n");
ctl = compel_prepare(pid);
if (!ctl)
err_and_ret("Can't prepare for infection");
printf("Configuring contexts\n");
/*
* First -- the infection context. Most of the stuff
* is already filled by compel_prepare(), just set the
* log descriptor for parasite side, library cannot
* live w/o it.
*/
ictx = compel_infect_ctx(ctl);
ictx->log_fd = STDERR_FILENO;
parasite_setup_c_header(ctl);
printf("Infecting\n");
if (compel_infect(ctl, 1, sizeof(int)))
err_and_ret("Can't infect victim");
printf("Stealing fd\n");
if (compel_rpc_call(PARASITE_CMD_GETFD, ctl))
err_and_ret("Can't run cmd");
if (compel_util_recv_fd(ctl, stolen_fd))
err_and_ret("Can't recv fd");
if (compel_rpc_sync(PARASITE_CMD_GETFD, ctl))
err_and_ret("Con't finalize cmd");
printf("Stole %d fd\n", *stolen_fd);
/*
* Done. Cure and resume the task.
*/
printf("Curing\n");
if (compel_cure(ctl))
err_and_ret("Can't cure victim");
if (compel_resume_task(pid, state, state))
err_and_ret("Can't unseize task");
printf("Done\n");
return 0;
}
static int check_pipe_ends(int wfd, int rfd)
{
struct stat r, w;
char aux[4] = "0000";
printf("Check pipe ends are at hands\n");
if (fstat(wfd, &w) < 0) {
perror("Can't stat wfd");
return 0;
}
if (fstat(rfd, &r) < 0) {
perror("Can't stat rfd");
return 0;
}
if (w.st_dev != r.st_dev || w.st_ino != r.st_ino) {
perror("Pipe's not the same");
return 0;
}
printf("Check pipe ends are connected\n");
write(wfd, "1", 2);
read(rfd, aux, sizeof(aux));
if (aux[0] != '1' || aux[1] != '\0') {
fprintf(stderr, "Pipe connectivity lost\n");
return 0;
}
return 1;
}
int main(int argc, char **argv)
{
int p_in[2], p_out[2], p_err[2], pid, pass = 1, stolen_fd = -1;
/*
* Prepare IO-s and fork the victim binary
*/
if (pipe(p_in) || pipe(p_out) || pipe(p_err)) {
perror("Can't make pipe");
return -1;
}
printf("Run the victim\n");
pid = vfork();
if (pid == 0) {
close(p_in[1]); dup2(p_in[0], 0); close(p_in[0]);
close(p_out[0]); dup2(p_out[1], 1); close(p_out[1]);
close(p_err[0]); dup2(p_err[1], 2); close(p_err[1]);
execl("./victim", "victim", NULL);
exit(1);
}
close(p_in[0]); close(p_out[1]); close(p_err[1]);
/*
* Now do the infection with parasite.c
*/
printf("Infecting the victim\n");
if (do_infection(pid, &stolen_fd))
return 1;
/*
* Stop the victim and check the infection went well
*/
printf("Closing victim stdin\n");
close(p_in[1]);
printf("Waiting for victim to die\n");
wait(NULL);
printf("Checking the result\n");
/*
* Stolen fd is the stderr of the task
* Check these are the ends of the same pipe
* and message passing works OK
*/
pass = check_pipe_ends(stolen_fd, p_err[0]);
if (pass)
printf("All OK\n");
else
printf("Something went WRONG\n");
return 0;
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.pack.alpha.server.tcc;
import static com.seanyinx.github.unit.scaffolding.Randomness.uniquify;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import io.grpc.stub.StreamObserver;
import java.util.UUID;
import org.apache.servicecomb.pack.alpha.server.tcc.jpa.ParticipatedEvent;
import org.apache.servicecomb.pack.alpha.server.tcc.jpa.TccTxType;
import org.apache.servicecomb.pack.alpha.server.tcc.callback.OmegaCallbacksRegistry;
import org.apache.servicecomb.pack.alpha.server.tcc.callback.TccCallbackEngine;
import org.apache.servicecomb.pack.alpha.server.tcc.jpa.GlobalTxEvent;
import org.apache.servicecomb.pack.alpha.server.tcc.service.TccTxEventService;
import org.apache.servicecomb.pack.common.TransactionStatus;
import org.apache.servicecomb.pack.contract.grpc.GrpcServiceConfig;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = {TccApplication.class},
properties = {
"alpha.server.host=0.0.0.0",
"alpha.server.port=8092",
"alpha.compensation.retry.delay=30"
})
@ActiveProfiles("tccTest")
public class TccCallbackEngineTest {
@Autowired
private TccCallbackEngine tccCallbackEngine;
@Autowired
private TccTxEventService tccTxEventService;
private final String globalTxId = UUID.randomUUID().toString();
private final String localTxId = UUID.randomUUID().toString();
private final String parentTxId = UUID.randomUUID().toString();
private final String confirmMethod = "confirm";
private final String cancelMethod = "cancel";
private final String serviceName = uniquify("serviceName");
private final String instanceId = uniquify("instanceId");
private final GrpcServiceConfig serviceConfig = GrpcServiceConfig.newBuilder()
.setServiceName(serviceName)
.setInstanceId(instanceId)
.build();
private final GrpcServiceConfig serviceConfig2 = GrpcServiceConfig.newBuilder()
.setServiceName(serviceName)
.setInstanceId(uniquify("instanceId"))
.build();
private ParticipatedEvent participatedEvent;
private ParticipatedEvent participationStartedEvent;
private GlobalTxEvent tccEndEvent;
@Before
public void init() {
participationStartedEvent = new ParticipatedEvent(serviceName, instanceId, globalTxId, localTxId,
parentTxId, confirmMethod, cancelMethod, "");
participatedEvent = new ParticipatedEvent(serviceName, instanceId, globalTxId, localTxId,
parentTxId, confirmMethod, cancelMethod, TransactionStatus.Succeed.name());
tccEndEvent = new GlobalTxEvent(serviceName, instanceId, globalTxId,
localTxId, parentTxId, TccTxType.ENDED.name(), TransactionStatus.Succeed.name());
}
@After
public void teardown() {
}
@Test
public void sendCoordinateCommandAfterTccEnd() {
StreamObserver responseObserver = mock(StreamObserver.class);
OmegaCallbacksRegistry.register(serviceConfig, responseObserver);
tccTxEventService.onParticipationStartedEvent(participationStartedEvent);
tccTxEventService.onParticipationEndedEvent(participatedEvent);
tccTxEventService.onTccEndedEvent(tccEndEvent);
verify(responseObserver).onNext(any());
}
@Test
public void sendCoordinateFailedForOmegaDown() throws InterruptedException {
StreamObserver responseObserver = mock(StreamObserver.class);
doThrow(IllegalArgumentException.class).when(responseObserver).onNext(any());
OmegaCallbacksRegistry.register(serviceConfig, responseObserver);
tccTxEventService.onParticipationStartedEvent(participationStartedEvent);
tccTxEventService.onParticipationEndedEvent(participatedEvent);
boolean result = tccCallbackEngine.execute(tccEndEvent);
assertThat(result, is(false));
Thread.sleep(1000);
verify(responseObserver).onNext(any());
try {
OmegaCallbacksRegistry.retrieve(serviceName, instanceId);
} catch (Exception ex) {
assertThat(ex.getMessage().startsWith("No such omega callback found for service"), is(true));
}
}
@Test
public void doRetryCoordinateTillOmegaReceived() throws InterruptedException {
StreamObserver failedResponseObserver = mock(StreamObserver.class);
doThrow(IllegalArgumentException.class).when(failedResponseObserver).onNext(any());
OmegaCallbacksRegistry.register(serviceConfig, failedResponseObserver);
tccTxEventService.onParticipationStartedEvent(participationStartedEvent);
tccTxEventService.onParticipationEndedEvent(participatedEvent);
boolean result = tccCallbackEngine.execute(tccEndEvent);
assertThat(result, is(false));
Thread.sleep(1000);
StreamObserver succeedResponseObserver = mock(StreamObserver.class);
OmegaCallbacksRegistry.register(serviceConfig2, succeedResponseObserver);
Thread.sleep(1000);
verify(failedResponseObserver).onNext(any());
verify(succeedResponseObserver).onNext(any());
}
}
|
Former President Bill Clinton might want to keep the racism accusations to himself from now on -- after ripping Donald Trump for a slogan he's used repeatedly since his 'Comeback Kid' days.
The 42nd president on Wednesday, while stumping in Orlando for Hillary Clinton, suggested Trump’s campaign rallying cry, “Make America Great Again,” is racist code.
“I’m actually old enough to remember the good old days, and they weren’t all that good in many ways,” Clinton said. “That message where ‘I’ll give you America great again’ is if you’re a white Southerner, you know exactly what it means, don’t you?”
The crowd roared as Clinton continued. “What it means is ‘I’ll give you an economy you had 50 years ago, and I’ll move you back up on the social totem pole and other people down,’” he said.
The problem is, Clinton himself has used the same phrase several times in the past.
He used it repeatedly while running for president in 1991 and 1992, declaring at one Little Rock, Ark., event, "Together, we can make America great again."
And in a campaign ad for his wife in 2008, Bill Clinton said, "It's time for another comeback, time to make America great again."
Trump campaign manager Kellyanne Conway pointed to the inconvenient footage in calling the former president's allegations about Trump's slogan a "disgrace."
“That performance is really something,” she said Friday on “Fox & Friends.” “Bill Clinton is known as a very strong campaigner, a great voice in politics -- but not this cycle.” |
package mirror;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
public class RefConstructor<T> {
private Constructor<?> ctor;
public RefConstructor(Class<?> cls, Field field) throws NoSuchMethodException {
if (field.isAnnotationPresent(MethodParams.class)) {
Class<?>[] types = field.getAnnotation(MethodParams.class).value();
ctor = cls.getDeclaredConstructor(types);
} else if (field.isAnnotationPresent(MethodReflectParams.class)) {
String[] values = field.getAnnotation(MethodReflectParams.class).value();
Class[] parameterTypes = new Class[values.length];
int N = 0;
while (N < values.length) {
try {
parameterTypes[N] = Class.forName(values[N]);
N++;
} catch (Exception e) {
e.printStackTrace();
}
}
ctor = cls.getDeclaredConstructor(parameterTypes);
} else {
ctor = cls.getDeclaredConstructor();
}
if (ctor != null && !ctor.isAccessible()) {
ctor.setAccessible(true);
}
}
public T newInstance() {
try {
return (T) ctor.newInstance();
} catch (Exception e) {
return null;
}
}
public T newInstance(Object... params) {
try {
return (T) ctor.newInstance(params);
} catch (Exception e) {
return null;
}
}
} |
/**
* Accept.
*
* @param axioms the axioms
* @param logicGraphVersion the logic graph version
*/
@Override
public void accept(Set<Axiom> axioms, LogicalExpressionImpl logicGraphVersion) {
if (this.conceptSequences.get(logicGraphVersion.getConceptBeingDefinedNid())) {
axioms.addAll(generateAxioms(logicGraphVersion));
}
} |
<filename>mediasoup-client/deps/webrtc/src/build/android/bytecode/java/org/chromium/bytecode/AssertionEnablerClassAdapter.java
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.bytecode;
import static org.chromium.bytecode.TypeUtils.ASSERTION_ERROR;
import static org.chromium.bytecode.TypeUtils.BUILD_HOOKS;
import static org.chromium.bytecode.TypeUtils.VOID;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Label;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
/**
* An ClassVisitor for replacing Java ASSERT statements with a function by modifying Java bytecode.
*
* We do this in two steps, first step is to enable assert.
* Following bytecode is generated for each class with ASSERT statements:
* 0: ldc #8 // class CLASSNAME
* 2: invokevirtual #9 // Method java/lang/Class.desiredAssertionStatus:()Z
* 5: ifne 12
* 8: iconst_1
* 9: goto 13
* 12: iconst_0
* 13: putstatic #2 // Field $assertionsDisabled:Z
* Replaces line #13 to the following:
* 13: pop
* Consequently, $assertionsDisabled is assigned the default value FALSE.
* This is done in the first if statement in overridden visitFieldInsn. We do this per per-assert.
*
* Second step is to replace assert statement with a function:
* The followed instructions are generated by a java assert statement:
* getstatic #3 // Field $assertionsDisabled:Z
* ifne 118 // Jump to instruction as if assertion if not enabled
* ...
* ifne 19
* new #4 // class java/lang/AssertionError
* dup
* ldc #5 // String (don't have this line if no assert message given)
* invokespecial #6 // Method java/lang/AssertionError.
* athrow
* Replace athrow with:
* invokestatic #7 // Method org/chromium/base/JavaExceptionReporter.assertFailureHandler
* goto 118
* JavaExceptionReporter.assertFailureHandler is a function that handles the AssertionError,
* 118 is the instruction to execute as if assertion if not enabled.
*/
class AssertionEnablerClassAdapter extends ClassVisitor {
AssertionEnablerClassAdapter(ClassVisitor visitor) {
super(Opcodes.ASM5, visitor);
}
@Override
public MethodVisitor visitMethod(final int access, final String name, String desc,
String signature, String[] exceptions) {
return new RewriteAssertMethodVisitor(
Opcodes.ASM5, super.visitMethod(access, name, desc, signature, exceptions));
}
static class RewriteAssertMethodVisitor extends MethodVisitor {
static final String ASSERTION_DISABLED_NAME = "$assertionsDisabled";
static final String INSERT_INSTRUCTION_NAME = "assertFailureHandler";
static final String INSERT_INSTRUCTION_DESC =
TypeUtils.getMethodDescriptor(VOID, ASSERTION_ERROR);
static final boolean INSERT_INSTRUCTION_ITF = false;
boolean mStartLoadingAssert;
Label mGotoLabel;
public RewriteAssertMethodVisitor(int api, MethodVisitor mv) {
super(api, mv);
}
@Override
public void visitFieldInsn(int opcode, String owner, String name, String desc) {
if (opcode == Opcodes.PUTSTATIC && name.equals(ASSERTION_DISABLED_NAME)) {
super.visitInsn(Opcodes.POP); // enable assert
} else if (opcode == Opcodes.GETSTATIC && name.equals(ASSERTION_DISABLED_NAME)) {
mStartLoadingAssert = true;
super.visitFieldInsn(opcode, owner, name, desc);
} else {
super.visitFieldInsn(opcode, owner, name, desc);
}
}
@Override
public void visitJumpInsn(int opcode, Label label) {
if (mStartLoadingAssert && opcode == Opcodes.IFNE && mGotoLabel == null) {
mGotoLabel = label;
}
super.visitJumpInsn(opcode, label);
}
@Override
public void visitInsn(int opcode) {
if (!mStartLoadingAssert || opcode != Opcodes.ATHROW) {
super.visitInsn(opcode);
} else {
super.visitMethodInsn(Opcodes.INVOKESTATIC, BUILD_HOOKS, INSERT_INSTRUCTION_NAME,
INSERT_INSTRUCTION_DESC, INSERT_INSTRUCTION_ITF);
super.visitJumpInsn(Opcodes.GOTO, mGotoLabel);
mStartLoadingAssert = false;
mGotoLabel = null;
}
}
}
} |
/**
Preprocessor listener, used to instantiate objects of interest
found by the parser.
<p>This listener is used extensively in preprocessing, where symbolics
are resolved according to arcane rules.
*/
public class PPListener extends JCLPPParserBaseListener {
private Logger LOGGER = null;
private TheCLI CLI = null;
private String myName = this.getClass().getName();
public ArrayList<PPJob> jobs = null;
public ArrayList<PPProc> procs = null;
public ArrayList<PPSetSymbolValue> sets = null;
public String fileName = null;
public String procName = null;
public PPJob currJob = null;
public PPProc currProc = null;
public PPJclStep currJclStep = null;
public int nbJobs = 0;
public int fileNb = 0;
public File baseDir = null;
public File tmpJobDir = null;
public File tmpProcDir = null;
public PPListener(
ArrayList<PPJob> jobs
, ArrayList<PPProc> procs
, String fileName
, int fileNb
, File baseDir
, File tmpJobDir
, File tmpProcDir
, Logger LOGGER
, TheCLI CLI
) {
super();
if (jobs == null) {
} else {
this.jobs = jobs;
}
if (procs == null) {
} else {
this.procs = procs;
}
this.fileName = fileName;
this.fileNb = fileNb;
this.LOGGER = LOGGER;
this.CLI = CLI;
this.baseDir = baseDir;
this.tmpJobDir = tmpJobDir;
this.tmpProcDir = tmpProcDir;
}
@Override public void enterJobCard(JCLPPParser.JobCardContext ctx) {
if (this.currJob == null) {
} else {
this.currJob.setEndLine(ctx.JOB().getSymbol().getLine() - 1);
}
this.nbJobs++;
if (tmpJobDir == null && tmpProcDir == null) {
this.currJob =
new PPJob(
ctx
, this.fileName
, this.nbJobs
, this.fileNb
, this.baseDir
, this.LOGGER
, this.CLI);
} else {
this.currJob =
new PPJob(
ctx
, this.fileName
, this.nbJobs
, this.fileNb
, this.baseDir
, this.tmpJobDir
, this.tmpProcDir
, this.LOGGER
, this.CLI);
}
if (this.jobs == null) {
this.LOGGER.warning(this.myName + " ignoring job " + currJob);
} else {
this.jobs.add(this.currJob);
}
this.procName = null;
this.currProc = null;
this.currJclStep = null;
this.currJob.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
}
@Override public void enterJcllibStatement(JCLPPParser.JcllibStatementContext ctx) {
if (this.jobs == null) {
} else {
this.currJob.addJcllib(ctx);
}
}
@Override public void enterCommandStatement(JCLPPParser.CommandStatementContext ctx) {
if (this.currProc == null) {
this.currJob.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
} else {
this.currProc.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
}
}
@Override public void enterJclCommandStatement(JCLPPParser.JclCommandStatementContext ctx) {
if (this.currProc == null) {
this.currJob.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
} else {
this.currProc.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
}
}
@Override public void enterScheduleStatement(JCLPPParser.ScheduleStatementContext ctx) {
if (this.currProc == null) {
this.currJob.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
} else {
this.currProc.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
}
}
@Override public void enterNotifyStatement(JCLPPParser.NotifyStatementContext ctx) {
if (this.currProc == null) {
this.currJob.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
} else {
this.currProc.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
}
}
@Override public void enterOutputStatement(JCLPPParser.OutputStatementContext ctx) {
if (this.currProc == null) {
this.currJob.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
} else {
this.currProc.addOp(new PPOp(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
}
}
@Override public void enterXmitStatement(JCLPPParser.XmitStatementContext ctx) {
}
/**
A SET statement is considered to be part of the current "owning" entity -
either the current Job or the current Proc.
*/
@Override public void enterSetOperation(JCLPPParser.SetOperationContext ctx) {
if (this.currProc == null) {
this.currJob.addSetSym(new PPSetSymbolValue(ctx, this.fileName, null, this.LOGGER, this.CLI));
} else {
this.currProc.addSetSym(new PPSetSymbolValue(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
}
}
@Override public void enterProcStatement(JCLPPParser.ProcStatementContext ctx) {
this.procName = ctx.procName().NAME_FIELD().getSymbol().getText();
this.currJclStep = null;
this.currProc = this.createCurrProc(ctx);
if (this.currJob == null) {
} else {
this.currJob.addInstreamProc(this.currProc);
}
}
@Override public void enterDefineSymbolicParameter(JCLPPParser.DefineSymbolicParameterContext ctx) {
this.currProc.addSetSym(new PPSetSymbolValue(ctx, this.fileName, this.procName, this.LOGGER, this.CLI));
}
@Override public void enterPendStatement(JCLPPParser.PendStatementContext ctx) {
this.currProc.addPendCtx(ctx);
if (this.procs == null) {
if (this.currJob == null) {
this.LOGGER.warning(this.myName + " ignoring proc " + currProc);
}
} else {
if (this.currJob == null) {
this.procs.add(this.currProc);
}
}
this.procName = null;
this.currProc = null;
this.currJclStep = null;
}
/**
An IncludeStatement is considered to be part of the current "owning" entity -
either the current Job or the current Proc.
<p>Consider...<p>
<code>
//ZHANN JOB
// INCLUDE MEMBER=CHIANA
//RYGEL PROC
// INCLUDE MEMBER=DARGO
//PS01 EXEC PGM=CRAIS
// INCLUDE MEMBER=TALYN
// PEND
// INCLUDE MEMBER=CRICHTON
//JS01 EXEC PROC=RYGEL
// INCLUDE MEMBER=AERYN
</code>
<p>...the IncludeStatement CHIANA is attached to Job ZHANN. The
IncludeStatement DARGO is standalone and attached to Proc RYGEL. The
IncludeStatement TALYN is also attached to Proc RYGEL. The IncludeStatement
CRICHTON is attached to Job ZHANN. The IncludeStatement AERYN
is also attached to Job ZHANN.
*/
@Override public void enterIncludeStatement(JCLPPParser.IncludeStatementContext ctx) {
if (this.currProc == null && this.currJob == null) {
/**
A cataloged proc need not have a PROC statement. Thus it is
possible to encounter an INCLUDE without having an owning
entity.
*/
this.LOGGER.warning(
this.myName
+ " INCLUDE at line "
+ ctx.SS().getSymbol().getLine()
+ " encountered with this.currProc == null && this.currJob == null");
this.currProc = this.createCurrProc();
this.currProc.addInclude(
new PPIncludeStatement(
ctx
, this.fileName
, this.procName
, this.LOGGER
, this.CLI));
} else if (this.currProc == null) {
this.currJob.addInclude(
new PPIncludeStatement(
ctx
, this.fileName
, this.procName
, this.LOGGER
, this.CLI));
} else {
this.currProc.addInclude(
new PPIncludeStatement(
ctx
, this.fileName
, this.procName
, this.LOGGER
, this.CLI));
}
}
/**
A JCL step is considered to be part of the current "owning" entity -
either the current Job or the current Proc.
*/
@Override public void enterJclStep(JCLPPParser.JclStepContext ctx) {
if (this.currProc == null && this.currJob == null) {
/**
A cataloged proc need not have a PROC statement. Thus it is
possible to encounter a JclStep without having an owning
entity.
*/
this.currProc = this.createCurrProc();
this.currJclStep = new PPJclStep(ctx, this.fileName, this.currProc, this.LOGGER, this.CLI);
this.currProc.addJclStep(this.currJclStep);
} else if (this.currProc == null) {
this.currJclStep = new PPJclStep(ctx, this.fileName, this.currJob, this.LOGGER, this.CLI);
this.currJob.addJclStep(this.currJclStep);
} else {
this.currJclStep = new PPJclStep(ctx, this.fileName, this.currProc, this.LOGGER, this.CLI);
this.currProc.addJclStep(this.currJclStep);
}
}
/**
It is convenient to have the end line of the current Job or Proc.
<p>In-stream procs will have been ended by their PEND statement.
*/
@Override public void exitStartRule(JCLPPParser.StartRuleContext ctx) {
if (this.currJob == null) {
if (this.currProc == null) {
} else {
this.currProc.setEndLine(ctx.getStop().getLine());
if (this.procs == null) {
this.LOGGER.warning(this.myName + " ignoring proc " + currProc);
} else {
this.procs.add(this.currProc);
}
}
} else {
this.currJob.setEndLine(ctx.getStop().getLine());
}
}
private PPProc createCurrProc() {
PPProc aProc = null;
if (this.tmpProcDir == null) {
aProc = new PPProc(
this.fileName
, this.fileNb
, this.baseDir
, this.LOGGER
, this.CLI);
} else {
aProc = new PPProc(
this.fileName
, this.fileNb
, this.baseDir
, this.tmpProcDir
, this.LOGGER
, this.CLI);
}
return aProc;
}
private PPProc createCurrProc(JCLPPParser.ProcStatementContext ctx) {
PPProc aProc = null;
if (this.tmpProcDir == null) {
aProc = new PPProc(
ctx
, this.fileName
, this.fileNb
, this.baseDir
, this.LOGGER
, this.CLI);
} else {
aProc = new PPProc(
ctx
, this.fileName
, this.fileNb
, this.baseDir
, this.tmpProcDir
, this.LOGGER
, this.CLI);
}
return aProc;
}
} |
// vhost device i2c
//
// Copyright 2021 Linaro Ltd. All Rights Reserved.
// <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
use log::warn;
use std::mem::size_of;
use std::sync::Arc;
use std::{convert, io};
use thiserror::Error as ThisError;
use vhost::vhost_user::message::{VhostUserProtocolFeatures, VhostUserVirtioFeatures};
use vhost_user_backend::{VhostUserBackendMut, VringRwLock, VringT};
use virtio_bindings::bindings::virtio_net::{VIRTIO_F_NOTIFY_ON_EMPTY, VIRTIO_F_VERSION_1};
use virtio_bindings::bindings::virtio_ring::{
VIRTIO_RING_F_EVENT_IDX, VIRTIO_RING_F_INDIRECT_DESC,
};
use virtio_queue::DescriptorChain;
use vm_memory::{
ByteValued, Bytes, GuestMemoryAtomic, GuestMemoryLoadGuard, GuestMemoryMmap, Le16, Le32,
};
use vmm_sys_util::epoll::EventSet;
use vmm_sys_util::eventfd::{EventFd, EFD_NONBLOCK};
use crate::i2c::*;
/// Virtio I2C Feature bits
const VIRTIO_I2C_F_ZERO_LENGTH_REQUEST: u16 = 0;
const QUEUE_SIZE: usize = 1024;
const NUM_QUEUES: usize = 1;
type Result<T> = std::result::Result<T, Error>;
type VhostUserBackendResult<T> = std::result::Result<T, std::io::Error>;
#[derive(Copy, Clone, Debug, PartialEq, ThisError)]
/// Errors related to vhost-device-i2c daemon.
pub enum Error {
#[error("Failed to handle event, didn't match EPOLLIN")]
HandleEventNotEpollIn,
#[error("Failed to handle unknown event")]
HandleEventUnknown,
#[error("Received unexpected write only descriptor at index {0}")]
UnexpectedWriteOnlyDescriptor(usize),
#[error("Received unexpected readable descriptor at index {0}")]
UnexpectedReadableDescriptor(usize),
#[error("Invalid descriptor count {0}")]
UnexpectedDescriptorCount(usize),
#[error("Invalid descriptor size, expected: {0}, found: {1}")]
UnexpectedDescriptorSize(usize, u32),
#[error("Descriptor not found")]
DescriptorNotFound,
#[error("Descriptor read failed")]
DescriptorReadFailed,
#[error("Descriptor write failed")]
DescriptorWriteFailed,
#[error("Failed to send notification")]
NotificationFailed,
#[error("Failed to create new EventFd")]
EventFdFailed,
}
impl convert::From<Error> for io::Error {
fn from(e: Error) -> Self {
io::Error::new(io::ErrorKind::Other, e)
}
}
/// I2C definitions from Virtio Spec
/// The final status written by the device
const VIRTIO_I2C_MSG_OK: u8 = 0;
const VIRTIO_I2C_MSG_ERR: u8 = 1;
#[derive(Copy, Clone, Default)]
#[repr(C)]
struct VirtioI2cOutHdr {
addr: Le16,
padding: Le16,
flags: Le32,
}
unsafe impl ByteValued for VirtioI2cOutHdr {}
/// VirtioI2cOutHdr Flags
const VIRTIO_I2C_FLAGS_M_RD: u32 = 1 << 1;
#[derive(Copy, Clone, Default)]
#[repr(C)]
struct VirtioI2cInHdr {
status: u8,
}
unsafe impl ByteValued for VirtioI2cInHdr {}
pub struct VhostUserI2cBackend<D: I2cDevice> {
i2c_map: Arc<I2cMap<D>>,
event_idx: bool,
pub exit_event: EventFd,
}
type I2cDescriptorChain = DescriptorChain<GuestMemoryLoadGuard<GuestMemoryMmap<()>>>;
impl<D: I2cDevice> VhostUserI2cBackend<D> {
pub fn new(i2c_map: Arc<I2cMap<D>>) -> Result<Self> {
Ok(VhostUserI2cBackend {
i2c_map,
event_idx: false,
exit_event: EventFd::new(EFD_NONBLOCK).map_err(|_| Error::EventFdFailed)?,
})
}
/// Process the requests in the vring and dispatch replies
fn process_requests(
&self,
requests: Vec<I2cDescriptorChain>,
vring: &VringRwLock,
) -> Result<bool> {
let mut reqs: Vec<I2cReq> = Vec::new();
if requests.is_empty() {
return Ok(true);
}
// Iterate over each I2C request and push it to "reqs" vector.
for desc_chain in requests.clone() {
let descriptors: Vec<_> = desc_chain.clone().collect();
if (descriptors.len() != 2) && (descriptors.len() != 3) {
return Err(Error::UnexpectedDescriptorCount(descriptors.len()));
}
let desc_out_hdr = descriptors[0];
if desc_out_hdr.is_write_only() {
return Err(Error::UnexpectedWriteOnlyDescriptor(0));
}
if desc_out_hdr.len() as usize != size_of::<VirtioI2cOutHdr>() {
return Err(Error::UnexpectedDescriptorSize(
size_of::<VirtioI2cOutHdr>(),
desc_out_hdr.len(),
));
}
let out_hdr = desc_chain
.memory()
.read_obj::<VirtioI2cOutHdr>(desc_out_hdr.addr())
.map_err(|_| Error::DescriptorReadFailed)?;
let flags = match out_hdr.flags.to_native() & VIRTIO_I2C_FLAGS_M_RD {
VIRTIO_I2C_FLAGS_M_RD => I2C_M_RD,
_ => 0,
};
let desc_in_hdr = descriptors[descriptors.len() - 1];
if !desc_in_hdr.is_write_only() {
return Err(Error::UnexpectedReadableDescriptor(descriptors.len() - 1));
}
if desc_in_hdr.len() as usize != size_of::<u8>() {
return Err(Error::UnexpectedDescriptorSize(
size_of::<u8>(),
desc_in_hdr.len(),
));
}
let (buf, len) = match descriptors.len() {
// Buffer is available
3 => {
let desc_buf = descriptors[1];
let len = desc_buf.len();
if len == 0 {
return Err(Error::UnexpectedDescriptorSize(1, len));
}
let mut buf = vec![0; len as usize];
if flags != I2C_M_RD {
if desc_buf.is_write_only() {
return Err(Error::UnexpectedWriteOnlyDescriptor(1));
}
desc_chain
.memory()
.read(&mut buf, desc_buf.addr())
.map_err(|_| Error::DescriptorReadFailed)?;
} else if !desc_buf.is_write_only() {
return Err(Error::UnexpectedReadableDescriptor(1));
}
(buf, len)
}
_ => (Vec::<u8>::new(), 0),
};
reqs.push(I2cReq {
addr: out_hdr.addr.to_native() >> 1,
flags,
len: len as u16,
buf,
});
}
let in_hdr = {
VirtioI2cInHdr {
status: match self.i2c_map.transfer(&mut reqs) {
Ok(()) => VIRTIO_I2C_MSG_OK,
Err(_) => VIRTIO_I2C_MSG_ERR,
},
}
};
for (i, desc_chain) in requests.iter().enumerate() {
let descriptors: Vec<_> = desc_chain.clone().collect();
let desc_in_hdr = descriptors[descriptors.len() - 1];
let mut len = size_of::<VirtioI2cInHdr>() as u32;
if descriptors.len() == 3 {
let desc_buf = descriptors[1];
// Write the data read from the I2C device
if reqs[i].flags == I2C_M_RD {
desc_chain
.memory()
.write(&reqs[i].buf, desc_buf.addr())
.map_err(|_| Error::DescriptorWriteFailed)?;
}
if in_hdr.status == VIRTIO_I2C_MSG_OK {
len += desc_buf.len();
}
}
// Write the transfer status
desc_chain
.memory()
.write_obj::<VirtioI2cInHdr>(in_hdr, desc_in_hdr.addr())
.map_err(|_| Error::DescriptorWriteFailed)?;
if vring.add_used(desc_chain.head_index(), len).is_err() {
warn!("Couldn't return used descriptors to the ring");
}
}
Ok(true)
}
/// Process the requests in the vring and dispatch replies
fn process_queue(&self, vring: &VringRwLock) -> Result<bool> {
let requests: Vec<_> = vring
.get_mut()
.get_queue_mut()
.iter()
.map_err(|_| Error::DescriptorNotFound)?
.collect();
if self.process_requests(requests, vring)? {
// Send notification once all the requests are processed
vring
.signal_used_queue()
.map_err(|_| Error::NotificationFailed)?;
}
Ok(true)
}
}
/// VhostUserBackendMut trait methods
impl<D: 'static + I2cDevice + Sync + Send> VhostUserBackendMut<VringRwLock, ()>
for VhostUserI2cBackend<D>
{
fn num_queues(&self) -> usize {
NUM_QUEUES
}
fn max_queue_size(&self) -> usize {
QUEUE_SIZE
}
fn features(&self) -> u64 {
// this matches the current libvhost defaults except VHOST_F_LOG_ALL
1 << VIRTIO_F_VERSION_1
| 1 << VIRTIO_F_NOTIFY_ON_EMPTY
| 1 << VIRTIO_RING_F_INDIRECT_DESC
| 1 << VIRTIO_RING_F_EVENT_IDX
| 1 << VIRTIO_I2C_F_ZERO_LENGTH_REQUEST
| VhostUserVirtioFeatures::PROTOCOL_FEATURES.bits()
}
fn protocol_features(&self) -> VhostUserProtocolFeatures {
VhostUserProtocolFeatures::MQ
}
fn set_event_idx(&mut self, enabled: bool) {
dbg!(self.event_idx = enabled);
}
fn update_memory(
&mut self,
_mem: GuestMemoryAtomic<GuestMemoryMmap>,
) -> VhostUserBackendResult<()> {
Ok(())
}
fn handle_event(
&mut self,
device_event: u16,
evset: EventSet,
vrings: &[VringRwLock],
_thread_id: usize,
) -> VhostUserBackendResult<bool> {
if evset != EventSet::IN {
return Err(Error::HandleEventNotEpollIn.into());
}
match device_event {
0 => {
let vring = &vrings[0];
if self.event_idx {
// vm-virtio's Queue implementation only checks avail_index
// once, so to properly support EVENT_IDX we need to keep
// calling process_queue() until it stops finding new
// requests on the queue.
loop {
vring.disable_notification().unwrap();
self.process_queue(vring)?;
if !vring.enable_notification().unwrap() {
break;
}
}
} else {
// Without EVENT_IDX, a single call is enough.
self.process_queue(vring)?;
}
}
_ => {
warn!("unhandled device_event: {}", device_event);
return Err(Error::HandleEventUnknown.into());
}
}
Ok(false)
}
fn exit_event(&self, _thread_index: usize) -> Option<EventFd> {
self.exit_event.try_clone().ok()
}
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use virtio_queue::defs::{VIRTQ_DESC_F_NEXT, VIRTQ_DESC_F_WRITE};
use virtio_queue::{mock::MockSplitQueue, Descriptor};
use vm_memory::{Address, GuestAddress, GuestMemoryAtomic, GuestMemoryMmap};
use super::Error;
use super::*;
use crate::i2c::tests::{update_rdwr_buf, verify_rdwr_buf, DummyDevice};
use crate::AdapterConfig;
// Prepares a single chain of descriptors
fn prepare_desc_chain(
start_addr: GuestAddress,
buf: &mut Vec<u8>,
flag: u32,
client_addr: u16,
) -> I2cDescriptorChain {
let mem = GuestMemoryMmap::<()>::from_ranges(&[(start_addr, 0x1000)]).unwrap();
let vq = MockSplitQueue::new(&mem, 16);
let mut next_addr = vq.desc_table().total_size() + 0x100;
let mut index = 0;
// Out header descriptor
let out_hdr = VirtioI2cOutHdr {
addr: From::from(client_addr << 1),
padding: From::from(0x0),
flags: From::from(flag),
};
let desc_out = Descriptor::new(
next_addr,
size_of::<VirtioI2cOutHdr>() as u32,
VIRTQ_DESC_F_NEXT,
index + 1,
);
mem.write_obj::<VirtioI2cOutHdr>(out_hdr, desc_out.addr())
.unwrap();
vq.desc_table().store(index, desc_out);
next_addr += desc_out.len() as u64;
index += 1;
// Buf descriptor: optional
if !buf.is_empty() {
// Set buffer is write-only or not
let flag = if (flag & VIRTIO_I2C_FLAGS_M_RD) == 0 {
update_rdwr_buf(buf);
0
} else {
VIRTQ_DESC_F_WRITE
};
let desc_buf = Descriptor::new(
next_addr,
buf.len() as u32,
flag | VIRTQ_DESC_F_NEXT,
index + 1,
);
mem.write(buf, desc_buf.addr()).unwrap();
vq.desc_table().store(index, desc_buf);
next_addr += desc_buf.len() as u64;
index += 1;
}
// In response descriptor
let desc_in = Descriptor::new(next_addr, size_of::<u8>() as u32, VIRTQ_DESC_F_WRITE, 0);
vq.desc_table().store(index, desc_in);
// Put the descriptor index 0 in the first available ring position.
mem.write_obj(0u16, vq.avail_addr().unchecked_add(4))
.unwrap();
// Set `avail_idx` to 1.
mem.write_obj(1u16, vq.avail_addr().unchecked_add(2))
.unwrap();
// Create descriptor chain from pre-filled memory
vq.create_queue(GuestMemoryAtomic::<GuestMemoryMmap>::new(mem.clone()))
.iter()
.unwrap()
.next()
.unwrap()
}
// Validate descriptor chains after processing them, checks pass/failure of
// operation and the value of the buffers updated by the `DummyDevice`.
fn validate_desc_chains(desc_chains: Vec<I2cDescriptorChain>, status: u8) {
for desc_chain in desc_chains {
let descriptors: Vec<_> = desc_chain.clone().collect();
let in_hdr = desc_chain
.memory()
.read_obj::<VirtioI2cInHdr>(descriptors[descriptors.len() - 1].addr())
.unwrap();
// Operation result should match expected status.
assert_eq!(in_hdr.status, status);
let out_hdr = desc_chain
.memory()
.read_obj::<VirtioI2cOutHdr>(descriptors[0].addr())
.unwrap();
if (out_hdr.flags.to_native() & VIRTIO_I2C_FLAGS_M_RD) != 0 && descriptors.len() == 3 {
let mut buf = vec![0; descriptors[1].len() as usize];
desc_chain
.memory()
.read(&mut buf, descriptors[1].addr())
.unwrap();
// Verify the content of the read-buffer
verify_rdwr_buf(&buf);
}
}
}
// Prepares list of dummy descriptors, their content isn't significant
fn prepare_desc_chain_dummy(
addr: Option<Vec<u64>>,
flags: Vec<u16>,
len: Vec<u32>,
) -> I2cDescriptorChain {
let mem = GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x1000)]).unwrap();
let vq = MockSplitQueue::new(&mem, 16);
for (i, flag) in flags.iter().enumerate() {
let mut f = if i == flags.len() - 1 {
0
} else {
VIRTQ_DESC_F_NEXT
};
f |= flag;
let offset = match addr {
Some(ref addr) => addr[i],
_ => 0x100,
};
let desc = Descriptor::new(offset, len[i], f, (i + 1) as u16);
vq.desc_table().store(i as u16, desc);
}
// Put the descriptor index 0 in the first available ring position.
mem.write_obj(0u16, vq.avail_addr().unchecked_add(4))
.unwrap();
// Set `avail_idx` to 1.
mem.write_obj(1u16, vq.avail_addr().unchecked_add(2))
.unwrap();
// Create descriptor chain from pre-filled memory
vq.create_queue(GuestMemoryAtomic::<GuestMemoryMmap>::new(mem.clone()))
.iter()
.unwrap()
.next()
.unwrap()
}
#[test]
fn process_requests_success() {
let device_config = AdapterConfig::try_from("1:4,2:32:21,5:10:23").unwrap();
let i2c_map = I2cMap::<DummyDevice>::new(&device_config).unwrap();
let backend = VhostUserI2cBackend::new(Arc::new(i2c_map)).unwrap();
let mem = GuestMemoryAtomic::new(
GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x1000)]).unwrap(),
);
let vring = VringRwLock::new(mem, 0x1000);
// Descriptor chain size zero, shouldn't fail
backend
.process_requests(Vec::<I2cDescriptorChain>::new(), &vring)
.unwrap();
// Valid single read descriptor
let mut buf: Vec<u8> = vec![0; 30];
let desc_chain = prepare_desc_chain(GuestAddress(0), &mut buf, VIRTIO_I2C_FLAGS_M_RD, 4);
let desc_chains = vec![desc_chain];
backend
.process_requests(desc_chains.clone(), &vring)
.unwrap();
validate_desc_chains(desc_chains, VIRTIO_I2C_MSG_OK);
// Valid single write descriptor
let mut buf: Vec<u8> = vec![0; 30];
let desc_chain = prepare_desc_chain(GuestAddress(0), &mut buf, 0, 4);
let desc_chains = vec![desc_chain];
backend
.process_requests(desc_chains.clone(), &vring)
.unwrap();
validate_desc_chains(desc_chains, VIRTIO_I2C_MSG_OK);
// Valid mixed read-write descriptors
let mut buf: Vec<Vec<u8>> = vec![vec![0; 30]; 6];
let desc_chains = vec![
// Write
prepare_desc_chain(GuestAddress(0), &mut buf[0], 0, 4),
// Read
prepare_desc_chain(GuestAddress(0), &mut buf[1], VIRTIO_I2C_FLAGS_M_RD, 4),
// Write
prepare_desc_chain(GuestAddress(0), &mut buf[2], 0, 4),
// Read
prepare_desc_chain(GuestAddress(0), &mut buf[3], VIRTIO_I2C_FLAGS_M_RD, 4),
// Write
prepare_desc_chain(GuestAddress(0), &mut buf[4], 0, 4),
// Read
prepare_desc_chain(GuestAddress(0), &mut buf[5], VIRTIO_I2C_FLAGS_M_RD, 4),
];
backend
.process_requests(desc_chains.clone(), &vring)
.unwrap();
validate_desc_chains(desc_chains, VIRTIO_I2C_MSG_OK);
}
#[test]
fn process_requests_failure() {
let device_config = AdapterConfig::try_from("1:4,2:32:21,5:10:23").unwrap();
let i2c_map = I2cMap::<DummyDevice>::new(&device_config).unwrap();
let backend = VhostUserI2cBackend::new(Arc::new(i2c_map)).unwrap();
let mem = GuestMemoryAtomic::new(
GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x1000)]).unwrap(),
);
let vring = VringRwLock::new(mem, 0x1000);
// One descriptors
let flags: Vec<u16> = vec![0];
let len: Vec<u32> = vec![0];
let desc_chain = prepare_desc_chain_dummy(None, flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::UnexpectedDescriptorCount(1)
);
// Four descriptors
let flags: Vec<u16> = vec![0, 0, 0, 0];
let len: Vec<u32> = vec![0, 0, 0, 0];
let desc_chain = prepare_desc_chain_dummy(None, flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::UnexpectedDescriptorCount(4)
);
// Write only out hdr
let flags: Vec<u16> = vec![VIRTQ_DESC_F_WRITE, 0, VIRTQ_DESC_F_WRITE];
let len: Vec<u32> = vec![
size_of::<VirtioI2cOutHdr>() as u32,
1,
size_of::<u8>() as u32,
];
let desc_chain = prepare_desc_chain_dummy(None, flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::UnexpectedWriteOnlyDescriptor(0)
);
// Invalid out hdr length
let flags: Vec<u16> = vec![0, 0, VIRTQ_DESC_F_WRITE];
let len: Vec<u32> = vec![100, 1, size_of::<u8>() as u32];
let desc_chain = prepare_desc_chain_dummy(None, flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::UnexpectedDescriptorSize(size_of::<VirtioI2cOutHdr>(), 100)
);
// Invalid out hdr address
let addr: Vec<u64> = vec![0x10000, 0, 0];
let flags: Vec<u16> = vec![0, 0, VIRTQ_DESC_F_WRITE];
let len: Vec<u32> = vec![
size_of::<VirtioI2cOutHdr>() as u32,
1,
size_of::<u8>() as u32,
];
let desc_chain = prepare_desc_chain_dummy(Some(addr), flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::DescriptorReadFailed
);
// Read only in hdr
let flags: Vec<u16> = vec![0, 0, 0];
let len: Vec<u32> = vec![
size_of::<VirtioI2cOutHdr>() as u32,
1,
size_of::<u8>() as u32,
];
let desc_chain = prepare_desc_chain_dummy(None, flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::UnexpectedReadableDescriptor(2)
);
// Invalid in hdr length
let flags: Vec<u16> = vec![0, 0, VIRTQ_DESC_F_WRITE];
let len: Vec<u32> = vec![size_of::<VirtioI2cOutHdr>() as u32, 1, 100];
let desc_chain = prepare_desc_chain_dummy(None, flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::UnexpectedDescriptorSize(size_of::<u8>(), 100)
);
// Invalid in hdr address
let addr: Vec<u64> = vec![0, 0, 0x10000];
let flags: Vec<u16> = vec![0, 0, VIRTQ_DESC_F_WRITE];
let len: Vec<u32> = vec![
size_of::<VirtioI2cOutHdr>() as u32,
1,
size_of::<u8>() as u32,
];
let desc_chain = prepare_desc_chain_dummy(Some(addr), flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::DescriptorWriteFailed
);
// Invalid buf length
let flags: Vec<u16> = vec![0, 0, VIRTQ_DESC_F_WRITE];
let len: Vec<u32> = vec![
size_of::<VirtioI2cOutHdr>() as u32,
0,
size_of::<u8>() as u32,
];
let desc_chain = prepare_desc_chain_dummy(None, flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::UnexpectedDescriptorSize(1, 0)
);
// Invalid buf address
let addr: Vec<u64> = vec![0, 0x10000, 0];
let flags: Vec<u16> = vec![0, 0, VIRTQ_DESC_F_WRITE];
let len: Vec<u32> = vec![
size_of::<VirtioI2cOutHdr>() as u32,
1,
size_of::<u8>() as u32,
];
let desc_chain = prepare_desc_chain_dummy(Some(addr), flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::DescriptorReadFailed
);
// Write only buf for write operation
let flags: Vec<u16> = vec![0, VIRTQ_DESC_F_WRITE, VIRTQ_DESC_F_WRITE];
let len: Vec<u32> = vec![
size_of::<VirtioI2cOutHdr>() as u32,
10,
size_of::<u8>() as u32,
];
let desc_chain = prepare_desc_chain_dummy(None, flags, len);
assert_eq!(
backend
.process_requests(vec![desc_chain], &vring)
.unwrap_err(),
Error::UnexpectedWriteOnlyDescriptor(1)
);
// Missing buffer for I2C rdwr transfer
let mut buf = Vec::<u8>::new();
let desc_chain = prepare_desc_chain(GuestAddress(0), &mut buf, VIRTIO_I2C_FLAGS_M_RD, 4);
let desc_chains = vec![desc_chain];
backend
.process_requests(desc_chains.clone(), &vring)
.unwrap();
validate_desc_chains(desc_chains, VIRTIO_I2C_MSG_ERR);
}
#[test]
fn verify_backend() {
let device_config = AdapterConfig::try_from("1:4,2:32:21,5:10:23").unwrap();
let i2c_map: I2cMap<DummyDevice> = I2cMap::new(&device_config).unwrap();
let mut backend = VhostUserI2cBackend::new(Arc::new(i2c_map)).unwrap();
assert_eq!(backend.num_queues(), NUM_QUEUES);
assert_eq!(backend.max_queue_size(), QUEUE_SIZE);
assert_eq!(backend.features(), 0x171000001);
assert_eq!(backend.protocol_features(), VhostUserProtocolFeatures::MQ);
assert_eq!(backend.queues_per_thread(), vec![0xffff_ffff]);
assert_eq!(backend.get_config(0, 0), vec![]);
backend.set_event_idx(true);
assert!(backend.event_idx);
assert!(backend.exit_event(0).is_some());
let mem = GuestMemoryAtomic::new(
GuestMemoryMmap::<()>::from_ranges(&[(GuestAddress(0), 0x1000)]).unwrap(),
);
backend.update_memory(mem.clone()).unwrap();
let vring = VringRwLock::new(mem, 0x1000);
assert_eq!(
backend
.handle_event(0, EventSet::OUT, &[vring.clone()], 0)
.unwrap_err()
.kind(),
io::ErrorKind::Other
);
assert_eq!(
backend
.handle_event(1, EventSet::IN, &[vring.clone()], 0)
.unwrap_err()
.kind(),
io::ErrorKind::Other
);
// Hit the loop part
backend.set_event_idx(true);
backend
.handle_event(0, EventSet::IN, &[vring.clone()], 0)
.unwrap();
// Hit the non-loop part
backend.set_event_idx(false);
backend.handle_event(0, EventSet::IN, &[vring], 0).unwrap();
}
}
|
def convert(bsigs, grpkey, bldkey, msg=ffi.NULL, mgrkey=ffi.NULL):
_csigs = [
lib.groupsig_blindsig_init(grpkey.scheme) for i in range(len(bsigs))
]
csigs = ffi.new("groupsig_blindsig_t* []", _csigs)
if lib.groupsig_convert(csigs, bsigs, len(bsigs), grpkey, mgrkey, bldkey, msg) == lib.IERROR:
raise Exception('Error converting signatures.')
return csigs |
// New creates a new instance of `IstioCAServiceServer`.
func New(ca ca.CertificateAuthority, hostname string, port int) *Server {
authenticators := []authenticator{&clientCertAuthenticator{}}
aud := fmt.Sprintf("grpc://%s:%d", hostname, port)
if jwtAuthenticator, err := newIDTokenAuthenticator(aud); err != nil {
glog.Errorf(
"failed to create JWT authenticator and JWT token will not be used for authentication (error %v)",
err)
} else {
authenticators = append(authenticators, jwtAuthenticator)
}
return &Server{
authenticators: authenticators,
authorizer: &simpleAuthorizer{},
ca: ca,
hostname: hostname,
port: port,
}
} |
Harmonic mitigation for a megawatt grid-tied fuel cell system
This research deals with the design of an LCL filter for a grid tied megawatt fuel cell stack. In the adopted configuration, the LCL filter links a three-level neutral clamped point inverter with the grid. A typical inverter is a source of harmonics as in general its output is not a pure sinewave. Standards such each IEEE 519 and IEC 61000-3-6 define the allowable harmonics distortion for both the current and the voltage in a power system based on the current level and the voltage level respectively. This paper aims to design an LCL filter to reduce harmonics produced by a three-level inverter serving as interface between a fuel cell stack and a grid and meet the abovementioned standards. The input of the grid tied inverter is connected to a 1.4 MW solid oxide fuel cell stack at 1400 V. The inverter is modelled and controlled to deliver 1.2 MW at 600 V between phases. To evaluate the performance of the designed filter, the system including the 1.4 MW fuel cell stack, the inverter, the LCL filter, a grid and a load is simulated using Matlab/Simulink environment. The results show that the designed filter reduces the total harmonics distortion to 0.46 % and 24.87 % for the voltage and the current, respectively. |
<filename>travel-buddy-app/src/03-components/ActivityDetail.tsx
import React from 'react';
import { Col, Row } from 'reactstrap';
import { Activity } from '../api';
import { formatDate, getDurationHours, TIME_FORMAT } from '../utils';
export interface ActivityDetailProps {
activities: Activity[];
}
export const ActivityDetail: React.FC<ActivityDetailProps> = ({ activities }) => {
const getDuration = (activity: Activity) => getDurationHours(activity.startDate, activity.endDate);
return (
<div className="activity-detail">
<Row>
<Col className="activity-detail__name text-primary">Activities</Col>
</Row>
<Row className="my-2">
<Col xs={12} sm={2}>
<p className="activity-detail__label">Time</p>
</Col>
<Col xs={12} sm={4}>
<p className="activity-detail__label">Name</p>
</Col>
<Col xs={12} sm={6}>
<p className="activity-detail__label">Notes</p>
</Col>
</Row>
{activities.map((activity) => (
<Row key={activity.id} className="activity">
<Col xs={12} sm={2}>
{formatDate(activity.startDate, TIME_FORMAT)}{' '}
<em className="activity__duration">
({getDuration(activity)} hr{getDuration(activity) > 1 ? 's' : ''})
</em>
</Col>
<Col xs={12} sm={4}>
<p className="activity__name text-primary m-0">
{activity.name}
{!!activity.port && (
<span className="activity__port">
({activity.port.city}, {!!activity.port.state ? activity.port.state : activity.port.country})
</span>
)}
</p>
<p className="activity__address m-0">
{activity.addressLine1}
{!!activity.addressLine2 ? ` ${activity.addressLine2}` : ''}
{!!activity.city ? `, ${activity.city}` : ''}
{!!activity.state ? `, ${activity.state}` : ''}
{/* {!!activity.postalCode ? activity.postalCode : ''}{' '} */}
</p>
</Col>
<Col xs={12} sm={6}>
<p className="activity__notes m-0">{activity.description}</p>
</Col>
</Row>
))}
</div>
);
};
|
By PoliceOne Staff
DULUTH, Ga. — Dash cam video has been released capturing a dangerous pursuit of two burglary suspects.
According to the Gwinnett Daily Post, two burglars broke into a hardware store Monday. A nearby officer was alerted to an alarm sounding at the store and began to pursue the suspects.
The suspects stole 18 weed trimmers, 15 chainsaws, six edgers and two pole pruners before fleeing in a stolen SUV.
As police pursued the suspects, one individual climbed into the back of the stolen vehicle and began hurling chainsaws and weed trimmers at the officer’s vehicle.
One officer was able to avoid hitting any of the stolen items, but a second officer hit one of the chainsaws, causing around $3,000 in damage, according to the report.
The burglars eventually abandoned the stolen SUV and most of the $19,000 worth of equipment was returned, according to WSB-TV. Police were unable to catch the suspects and are still working to find them. |
def _calc_breach_prob_per_particle(
self,
shorezone: ShoreZone,
convert_lon: bool = True,
) -> np.ndarray:
with xr.open_dataset(self.path) as ds:
stranded_flag = utils.get_stranded_flag_from_status(ds)
nvessels = len(ds.trajectory)
breach_prob_per_particle = np.zeros((nvessels,))
stranded = ds.status.values == stranded_flag
stranded_ix = np.argwhere(stranded)
vessel_ix = stranded_ix[:, 0]
time_ix = stranded_ix[:, 1]
lons = ds.lon.values[vessel_ix, time_ix]
if convert_lon:
lons = utils.lon360_to_lon180(lons)
lats = ds.lat.values[vessel_ix, time_ix]
locs = np.vstack((lons, lats)).T
breach_prob_per_particle[vessel_ix] = shorezone.get_breach_prob(locs)
return breach_prob_per_particle |
/**
* Removes/Destroys a guild audio manager, stopping players from just laying dormant and using resources.
* @param guild {@link Guild}
*/
public static void destroyGuildAudioManager(Guild guild) {
if(guildManagers.containsKey(guild.getId())) {
guildManagers.get(guild.getId()).destroyConnection();
guildManagers.remove(guild.getId());
}
} |
L.A. County Sheriff's Department intends to fire seven deputies
The seven belong to a secret law enforcement clique that allegedly celebrated shootings and branded members with matching tattoos.
The Sheriff's Department has a long history of secret cliques with members of the groups having reached high-ranking positions within the agency. Sheriff officials have sought to crack down on the groups, fearing that they tarnished the department's reputation and encouraged unethical conduct.
The seven worked on an elite gang-enforcement team that patrols neighborhoods where violence is high. The team makes a priority of taking guns off the street, officials said.
The Times reported last year about the existence of the clique, dubbed the Jump Out Boys, and the discovery of a pamphlet that described the group's creed, which required aggressive policing and awarded tattoo modifications for police shootings.
Seven Los Angeles County sheriff's deputies have been notified that the department intends to fire them for belonging to a secret law enforcement clique that allegedly celebrated shootings and branded its members with matching tattoos, officials said.
In the case of the Jump Out Boys, sheriff's investigators did not uncover any criminal behavior. But, sources said, the group clashed with department policies and image.
Their tattoos, for instance, depicted an oversize skull with a wide, toothy grimace and glowing red eyes. A bandanna with the unit's acronym is wrapped around the skull. A bony hand clasps a revolver. Smoke would be tattooed over the gun's barrel for members who were involved in at least one shooting, officials said.
One member, who spoke to The Times and requested anonymity, said the group promoted only hard work and bravery. He dismissed concerns about the group's tattoo, noting that deputies throughout the department get matching tattoos. He said there was nothing sinister about their creed or conduct. The deputy, who was notified of the department's intent to terminate him, read The Times several passages from the pamphlet, which he said supported proactive policing.
"We are alpha dogs who think and act like the wolf, but never become the wolf," one passage stated, comparing criminals to wolves. Another passage stated, "We are not afraid to get our hands dirty without any disgrace, dishonor or hesitation... sometimes (members) need to do the things they don't want to in order to get where they want to be."
Department spokesman Steve Whitmore said starting the termination process shows that Sheriff Lee Baca "does not take any of this lightly and will move forward with the appropriate action."
Investigators were less concerned about the tattoos, and more focused on the suspected admiration they showed for officer-involved shootings, which are expected to be events of last resort. The deputy told The Times, however, that investigators reviewed their shootings and arrests and found nothing unlawful.
"We get called a gang within the badge? It's unfair," he said. "People want to say you have a tattoo. So do fraternities. Go to Yale. Are they a gang?.... Boy Scouts have patches and they have mission statements, and so do we."
"We do not glorify shootings," he continued. "What we do is commend and honor the shootings. I have to remember them because it can happen any time, any day. I don't want to forget them because I'm glad I'm alive."
If the firings are upheld, it would be one of the largest terminations over one incident in the department's history. In 2011, the department fired about half a dozen deputies who were also said to have formed a clique. Those deputies worked on the third floor of Men's Central Jail and allegedly threw gang-like three-finger hand signs. They were fired after they fought two fellow deputies at an employee Christmas party and allegedly punched a female deputy in the face. |
// run initializes local config and installs MinIO Operator to Kubernetes cluster.
func (v *expandCmd) run() error {
client, err := helpers.GetKubeOperatorClient()
if err != nil {
return err
}
if v.tenantOpts.NS == "" || v.tenantOpts.NS == helpers.DefaultNamespace {
v.tenantOpts.NS, err = getTenantNamespace(client, v.tenantOpts.Name)
if err != nil {
return err
}
}
t, err := client.MinioV2().Tenants(v.tenantOpts.NS).Get(context.Background(), v.tenantOpts.Name, metav1.GetOptions{})
if err != nil {
return err
}
currentCapacity := helpers.TotalCapacity(*t)
volumesPerServer := helpers.VolumesPerServer(v.tenantOpts.Volumes, v.tenantOpts.Servers)
capacityPerVolume, err := helpers.CapacityPerVolume(v.tenantOpts.Capacity, v.tenantOpts.Volumes)
if err != nil {
return err
}
t.Spec.Pools = append(t.Spec.Pools, resources.Pool(&v.tenantOpts, volumesPerServer, *capacityPerVolume))
expandedCapacity := helpers.TotalCapacity(*t)
if !v.output {
fmt.Printf(Bold(fmt.Sprintf("\nExpanding Tenant '%s/%s' from %s to %s\n\n", t.ObjectMeta.Name, t.ObjectMeta.Namespace, currentCapacity, expandedCapacity)))
return addPoolToTenant(client, t)
}
o, err := yaml.Marshal(t)
if err != nil {
return err
}
fmt.Println(string(o))
return nil
} |
<reponame>CanalTP/stomp-the-rabbit
package webstomp
import (
"net"
"net/url"
"golang.org/x/net/websocket"
)
// Dial creates a client connection to the given target
func Dial(target string, protocol string) (net.Conn, error) {
u, err := url.Parse(target)
if err != nil {
return nil, err
}
origin, err := u.Parse("/")
if err != nil {
return nil, err
}
origin.Scheme = "https"
return websocket.Dial(u.String(), protocol, origin.String())
}
|
/**
* This test class apes on a bare essential What Spring TestNG provides as a base class in terms of
* running Spring based tests. The following methods have been duplicated from
* org.springframework.test.context.testng.AbstractTestNGSpringContextTests to simulate the bug. 1.
* throwAsUncheckedException() 2. getTestResultException() 3. throwAs()
*/
public class AbstractBaseTestCase implements IHookable {
@Override
public void run(IHookCallBack callBack, ITestResult testResult) {
callBack.runTestMethod(testResult);
Throwable t = getTestResultException(testResult);
if (t != null) {
throwAsUncheckedException(t);
}
}
@SuppressWarnings("unchecked")
private <T extends Throwable> void throwAs(Throwable t) throws T {
throw (T) t;
}
private void throwAsUncheckedException(Throwable t) {
throwAs(t);
}
private Throwable getTestResultException(ITestResult testResult) {
Throwable testResultException = testResult.getThrowable();
if (testResultException instanceof InvocationTargetException) {
testResultException = testResultException.getCause();
}
return testResultException;
}
} |
<gh_stars>0
package database
import "github.com/jinzhu/gorm"
// Subscription model
type Subscription struct {
gorm.Model
UserID uint `gorm:"primary_key;not null"`
Address string `gorm:"primary_key;not null"`
Network string `gorm:"primary_key;not null"`
Alias string
WatchMask uint
SentryDSN string
}
func (d *db) GetSubscription(userID uint, address, network string) (s Subscription, err error) {
err = d.
Scopes(userIDScope(userID), networkScope(network), addressScope(address)).
First(&s).Error
return
}
func (d *db) GetSubscriptions(address, network string) ([]Subscription, error) {
var subs []Subscription
err := d.
Scopes(contract(address, network)).
Find(&subs).Error
return subs, err
}
func (d *db) ListSubscriptions(userID uint) ([]Subscription, error) {
var subs []Subscription
err := d.
Scopes(userIDScope(userID)).
Order("created_at DESC").
Find(&subs).Error
return subs, err
}
func (d *db) UpsertSubscription(s *Subscription) error {
return d.
Scopes(userIDScope(s.UserID), contract(s.Address, s.Network)).
Assign(Subscription{Alias: s.Alias, WatchMask: s.WatchMask, SentryDSN: s.SentryDSN}).
FirstOrCreate(s).Error
}
func (d *db) DeleteSubscription(s *Subscription) error {
return d.Unscoped().
Scopes(userIDScope(s.UserID), contract(s.Address, s.Network)).
Delete(Subscription{}).Error
}
func (d *db) GetSubscriptionsCount(address, network string) (count int, err error) {
err = d.
Model(&Subscription{}).
Scopes(contract(address, network)).
Count(&count).Error
return
}
|
a,b=map(int,input().split(' '))
c=[]
for i in range(a):
c.append(b-i)
for i in range(a):
c.append(b+i)
m=set(c)
m=list(m)
m.sort()
for i in m:
print(i,end=' ') |
/*
* Request for new transaction from light client. Builds a transaction without sign.
* Returns also list of previous transactions selected for input. it is used for signature on client side
*/
func (s *NodeServerRequest) handleTxRequest() error {
s.HasResponse = true
var payload nodeclient.ComRequestTransaction
err := s.parseRequestData(&payload)
if err != nil {
return err
}
result := nodeclient.ComRequestTransactionData{}
TXBytes, DataToSign, err := s.Node.GetTransactionsManager().
PrepareNewTransaction(payload.PubKey, payload.To, payload.Amount)
if err != nil {
return err
}
result.DataToSign = DataToSign
result.TX = TXBytes
s.Response, err = net.GobEncode(result)
if err != nil {
return err
}
return nil
} |
SPRINGFIELD, Ill. (AP) — Historic votes Tuesday in the Illinois Legislature positioned that state to become the largest in the heartland to legalize gay marriage, following months of arduous lobbying efforts by both sides in President Barack Obama’s home state.
Under the measure, which the state House approved 61-54 before sending it on to the Senate for technical changes, gay weddings could be held in Illinois starting in June. The bill heads next to Gov. Pat Quinn, who has pledged to sign it but didn’t immediately indicate when.
Fourteen states plus Washington D.C., allow same-sex marriage. Most recently, New Jersey, Minnesota and Rhode Island have legalized it.
The road to the Illinois vote was long with stalled attempts earlier this year, something that frustrated activists in the state where Democrats lead the House, Senate and governor’s office. Chicago Democratic Rep. Greg Harris, who is the sponsor of the bill, decided not to bring the bill for a vote in May because he said he simply didn’t have the support.
Then the U.S. Supreme Court ruled to strike down a provision of the federal Defense of Marriage Act, something he said resonated with lawmakers. Backers also launched a furious campaign, hiring a lobbyist from the state’s largest union, the former head of the Illinois Republican Party and field organizers spanning the state.
“To treat all our citizens equally in the eyes of the law we must change this,” Harris said on the floor. “Families have been kept apart.”
Debate lasted more than two hours, and the final roll call was met with hearty cheers and applause. Supporters’ speeches echoed themes of equality and civil rights with mentions of Obama, Martin Luther King Jr. and Matthew Shepard, a gay college student whose 1998 death sparked numerous hate crime bills.
Polls show support for gay marriage has surged since 1996, when Gallup found that 27 percent of Americans backed it. Now Gallup finds the majority support giving gay and lesbian couples the right to marry.
“Today the Illinois House put our state on the right side of history,” Quinn said in a statement. “Illinois is a place that embraces all people and today, we are an example for the nation.”
However, opponents of the legislation — which included some of the most powerful religious leaders in the state — have said marriage should remain between a man and a woman. A group of Chicago areas pastors vowed to line up primary challengers against some Chicago area lawmakers who voted yes.
“This issue is not just about two adults and their emotional relational and financial commitment to another,” said Rep. Tom Morrison, a Palatine Republican. “Redefining marriage has far reaching implications in our society.”
Three Republicans joined those voting in favor, including former House Minority Leader Tom Cross of Oswego who had not revealed how he’d vote ahead of Tuesday. The representative stepped down from his leadership position earlier this year and is seeking statewide office as treasurer.
“For me, supporting marriage equality is not only the right decision, but also consistent with my belief in individual freedom, equality and limited government,” Cross said in a statement. He declined to talk with reporters.
Other lawmakers also came forward for the first time Tuesday, including Democratic state Rep. Jehan Gordon-Booth of Peoria, who wrote a newspaper opinion piece expressing her support as a matter of equality. State Rep. Linda Chapa LaVia told House members on the floor that she’d support the measure too.
The bill first cleared the Senate on Valentine’s Day with the support of 33 Democrats and a single Republican. Backers had expressed confidence that the bill would be approved by the House in mid-March. But it took the supporters months to secure enough yes votes to win the House’s approval.
In September, Minneapolis Mayor R.T. Rybak even went to Chicago to encourage gay couples to hold their nuptials in his city if they were tired of waiting for Illinois lawmakers to act.
Although Illinois once appeared poised to become the first Midwestern state to approve gay marriage in the Legislature, Minnesota did it sooner and started holding its first same-sex weddings over the summer. Iowa allows gay marriages too because of a court ruling, not a legislative vote.
The issue caused internal conflict among Illinois Republicans as the party works to balance efforts to appeal more to younger voters, minorities and women with the more socially conservative positions of some party members.
For months, the leaders of several black mega-churches lobbied the districts of black House members with an aggressive robocall campaign against gay marriage, placing an uncomfortable spotlight on the mostly Democratic black caucus. Many remained undecided until the vote neared.
On Tuesday, the African American Clergy Coalition praised those who voted against the measure.
“We will always believe that marriage is between one man and one woman,” said Bishop Larry Trotter of the coalition. “Yet we will still love the members of the LGBT community.”
Illinois approved civil unions in 2011.
__
The bill is SB10
Online: http://www.ilga.gov
Copyright 2013 The Associated Press. All rights reserved. This material may not be published, broadcast, rewritten or redistributed. |
# Copyright 2020 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interface for graphs."""
import hashlib
from flask import current_app
import networkx as nx
from timesketch.lib.datastores.elastic import ElasticsearchDataStore
GRAPH_TYPES = {
'Graph': nx.Graph,
'MultiGraph': nx.MultiGraph,
'DiGraph': nx.DiGraph,
'MultiDiGraph': nx.MultiDiGraph
}
MAX_EVENTS_PER_EDGE = 500
class Graph:
"""Graph object with helper methods.
Attributes:
nx_instance: Networkx graph object.
"""
def __init__(self, graph_type):
"""Initialize Graph object.
Args:
graph_type: (str) Name of graph type.
"""
_nx_graph_class = GRAPH_TYPES.get(graph_type)
self.nx_instance = _nx_graph_class()
self._nodes = {}
self._edges = {}
def add_node(self, label, attributes):
"""Add node to graph.
Args:
label: (str) Label for the node.
attributes: (dict) Attributes to add to node.
Returns:
Instance of Node object.
"""
if not attributes:
attributes = {}
node = Node(label, attributes)
node.set_attribute('id', node.id)
if node.id not in self._nodes:
self._nodes[node.id] = node
return node
def add_edge(self, source, target, label, event, attributes=None):
"""Add edge to graph.
Args:
source: (Node) Node to use as source.
target: (Node) Node to use as target.
label: (str) Label for the node.
event: (dict): Elasticsearch event.
attributes: (dict) Attributes to add to node.
"""
if not attributes:
attributes = {}
attributes['id'] = ''.join([source.id, target.id, label]).lower()
edge = Edge(source, target, label, attributes)
if edge.node_counter < MAX_EVENTS_PER_EDGE:
index = event.get('_index')
doc_id = event.get('_id')
events = edge.attributes.get('events', {})
doc_ids = events.get(index, [])
doc_ids.append(doc_id)
edge.node_counter += 1
events[index] = doc_ids
edge.set_attribute('events', events)
self._edges[edge.id] = edge
def commit(self):
"""Commit all nodes and edges to the networkx graph object."""
for node_id, node in self._nodes.items():
self.nx_instance.add_node(
node_id, label=node.label, **node.attributes)
for _, edge in self._edges.items():
label = edge.label + f' ({edge.node_counter})'
self.nx_instance.add_edge(
edge.source.id, edge.target.id, label=label,
**edge.attributes)
def to_cytoscape(self):
"""Output graph in Cytoscape JSON format.
Returns:
Graph in Cytoscape JSON format.
"""
cy_json = nx.readwrite.json_graph.cytoscape_data(self.nx_instance)
return cy_json.get('elements', [])
class BaseGraphElement:
"""Base class for graph elements.
Attributes:
label (str): Node/Edge label to show in the UI.
attributes (dict): Attributed to add to the node/edge.
id (str): Uniq value generated from the label.
"""
def __init__(self, label='', attributes=None):
"""Initialize the base element object.
Args:
label (str): Node/Edge label to show in the UI.
attributes (dict): Attributes to add to the node/edge.
"""
self.label = label
self.attributes = attributes or {}
self.id = self._generate_id()
def _generate_id(self):
"""Generate ID for node/edge.
Returns:
MD5 hash (str): MD5 hash of the provided label.
"""
id_string = self.attributes.get('id', self.label)
return hashlib.md5(id_string.encode('utf-8')).hexdigest()
def set_attribute(self, key, value):
"""Add or replace an attribute to the element.
Args:
key (str): Attribute key.
value (str): Attribute value.
"""
self.attributes[key] = value
class Node(BaseGraphElement):
"""Graph node object."""
# TODO: Add logic for Nodes when needed.
class Edge(BaseGraphElement):
"""Graph edge object.
Attributes:
source (Node): Node to add as source node.
target (Node): Node to add as target node.
node_counter (int): Counter for number of nodes referenced for the edge.
"""
def __init__(self, source, target, label='', attributes=None):
"""Initialize the Edge object.
Args:
label (str): Node/Edge label to show in the UI.
attributes (dict): Attributes to add to the edge.
"""
self.source = source
self.target = target
self.node_counter = 0
super(Edge, self).__init__(label, attributes)
class BaseGraphPlugin:
"""Base class for a graph.
Attributes:
datastore (ElasticsearchDataStore): Elasticsearch datastore object.
graph (nx.Graph): NetworkX Graph object.
"""
# Name that the graph will be registered as.
NAME = 'name'
# Display name (used in the UI)
DISPLAY_NAME = 'display_name'
# Description of the plugin (used in the UI)
DESCRIPTION = 'description'
# Type of graph. There are four supported types: Undirected Graph,
# Undirected Multi Graph, Directed Graph, Directed Multi Graph.
# If you have multiple edges between nodes you need to use the multi graphs.
#
# See NetworkX documentation for details:
# https://networkx.org/documentation/stable/reference/classes/index.html
GRAPH_TYPE = 'MultiDiGraph'
def __init__(self, sketch=None):
"""Initialize the graph object.
Args:
sketch (Sketch): Sketch object.
Raises:
KeyError if graph type specified is not supported.
"""
self.datastore = ElasticsearchDataStore(
host=current_app.config['ELASTIC_HOST'],
port=current_app.config['ELASTIC_PORT'])
if not GRAPH_TYPES.get(self.GRAPH_TYPE):
raise KeyError(f'Graph type {self.GRAPH_TYPE} is not supported')
self.graph = Graph(self.GRAPH_TYPE)
self.sketch = sketch
def _get_all_sketch_indices(self):
"""List all indices in the Sketch.
Returns:
List of index names.
"""
active_timelines = self.sketch.active_timelines
indices = [t.searchindex.index_name for t in active_timelines]
return indices
# TODO: Refactor this to reuse across analyzers and graphs.
def event_stream(
self, query_string=None, query_filter=None, query_dsl=None,
indices=None, return_fields=None, scroll=True):
"""Search ElasticSearch.
Args:
query_string: Query string.
query_filter: Dictionary containing filters to apply.
query_dsl: Dictionary containing Elasticsearch DSL query.
indices: List of indices to query.
return_fields: List of fields to return.
scroll: Boolean determining whether we support scrolling searches
or not. Defaults to True.
Returns:
Generator of Event objects.
Raises:
ValueError: if neither query_string or query_dsl is provided.
"""
if not (query_string or query_dsl):
raise ValueError('Both query_string and query_dsl are missing')
# Query all sketch indices if none are specified.
if not indices:
indices = self._get_all_sketch_indices()
if not query_filter:
query_filter = {}
return_fields = list(set(return_fields))
event_generator = self.datastore.search_stream(
query_string=query_string,
query_filter=query_filter,
query_dsl=query_dsl,
indices=indices,
return_fields=return_fields,
enable_scroll=scroll,
)
return event_generator
def generate(self):
"""Entry point for the graph."""
raise NotImplementedError
|
Comparative analysis using WP and TOPSIS method to find the best mountain for hiking
Mountain activities or outdoor activities located in the mountains is one form of ecotourism that is popular for many people. One type of mountaineering that is often done is mountain climbing (mountain hiking) because of the many mountains scattered almost throughout the Indonesian archipelago. For beginner hikers it is advisable to hiking in groups, as well as adjusting to the experience and ability of climbers in order to anticipate the subjective risks. In this study, the Design of Decision Supporting Approval Approved Mountain Best For Hiking ‘Go-Hiking’ using Web Based Weighted Product method and for Android using TOPSIS method. Weighted Product (WP) and TOPSIS is example of the methods used to solve MADM problems. Multi Attribute Decision Making (MADM) is a method used to find the most optimal alternative of a number of optimal alternatives with certain criteria. The end result of the use of the weighted product and TOPSIS method is to display the best recommendations that can only be made as a consideration in the selection of the mountain.
Introduction
Mountaineering or outdoor activities where located in the mountains is one form of ecotourism that is popular for many people. One type of mountaineering that is often done is mountain climbing (hiking). It because of the many mountains scattered almost throughout the Indonesian archipelago . According to Gunung Gede Pangrango National Park (TNGGP) statistical data, the number of visitors in 2013 is 139,767 people, and in 2016 the number is 162,184 people (Ministry of Forestry, Statistics 2016: 88), the number of mountaineers is increasing, this hiking activity has become so popular .
For novice climbers, the proper selection of mounts to climb should adjust to the experience and ability of the climbers themselves in order to anticipate the subjective risks. Climbers need to prepare physically, mentally and also hiking education, such as understanding the difficulty of the terrain, safety, and distance. Generally, mountain climbing can be done by solo, but it is not recommended for novice climbers because of the difficulty level and high teamwork needs (Lee, 2006) . Therefore, it is recommended to climb in groups. Hiking also allows climbers to share accommodation, logistics and can expand the network mountain climber friends network. Climbers who are connected in mountain climbing organizations will find climbing friends easier than climber who isn't connected to the organization The problem that occurs is adjusting the climbers experience to determine the right mountains, and it is better to climb in groups. So, we need an information system that can help the climbers to determine the best mountain based on experience, altitude, mileage, travel time, & security, and a system that allows climbers to find climbing friends.
In this study, TOPSIS and WP Product methodology will be compared to find which best methodology that to determine the best mountain recommendation system that can be climbed for beginners in West Java.Based on APGI (Indonesian Mountain Guides Association) recommendation; Experience, Sports and Disease History are used as the criteria. In the comparison analysis, the author will compare the results of manual and system calculations. The results of the system will be compared with the results from APGI and the percentage of success will be calculated Their used 4 criteria such as style, life span, full economy, and cost .Then, in 2016 Assamaoi et.all also used TOPSIS methodto decided which country will be choice to expend market for one product. 5 countries in Africa are used as an alternative solution, and the data represent during 2000 until 2013 .There are seven steps in TOPSIS method :
The Present Methodology
Step 1: Construct the Decision Matrix (DM). (1) Where C refer to criteria, so index (i=1…n) refer to number of criteria. Four criteria used in this paper such as altitude, distance, secure, and time. L refers to alternative solution, so index (j=1…m) refer to number of alternative solution.
Step 2: Find the value of Normalised Decision Matrix. The Normalized Decision Matrix (NDM) which represents the relative performance of the generated design alternatives.
Step 3: Calculate the weighted decision matrix. The weighting decision matrix is simply calculated by multiply each element of each column of the normalized decision matrix by the random weights. Where, element Jrefers to the beneficial attributes and J' refersto non-beneficial attributes Step 5: Calculate the separation distance of each competitive alternative from the ideal and nonidealsolution.
Where, i = criterion index, j = alternative index.
Step 6: Measure the relative closeness of each location to the ideal solution. For each competitive alternative the relative closeness of the potential location with respect to the ideal solution is computed.
Step 7: this is the last step. ordering all candidate alternative solution using value of C. First rank for the higher value, which has relative closeness solution.
Weight Product
Weighted Product (WP) method is another scoring method where the weighted product of the criterion is used to select the best alternative. There were several researches in WP method for decision making system such as in food choice for person with special healthy condition , and decision system for determine people who got recipients from government (PNPM) They are five steps as a procedure to execute WP : Step 1 : made the decision matrix using all alternative solution and their criteria. Step 2 : calculate the normalized decision matrix for each criteria as a value for criteria of benefit Wj = ∑ (9) Step 3: Construct weighted normalized decision matrix Step 4: Calculate the score of each alternative Si = ∏ (10) Step 5: Select the best alternative
Analysis and Results
As for how the recommendation system works on Go-Hiking applications.
Calculate with WP Method
This research will implement Weighted Product algorithm in recommended mountain app for hiking. The mount criteria, and mount alternative can be seen on Table 1.
Calculate with TOPSIS Method
This research will implement TOPSIS algorithm in recommended mountain app for hiking. The mount criteria, and mount alternative can be seen on figure 1. |
/**
* Handles event distribution and listeners.
* @author Daniel
*/
public class EventSystem {
private final Map<Class<? extends Event>, List<EventListener>> listeners = new HashMap<>();
private final Queue<Event> events = new ArrayDeque<>();
/**
* Registers an event, adds it to the queue of events yet to process.
* @param event The event to register
*/
public void register(Event event) {
events.add(event);
}
/**
* Handles registered events.
*/
public void update() {
Event e;
while ((e = events.poll()) != null) {
handleEvent(e);
}
}
private void handleEvent(Event event) {
List<EventListener> eventListeners = listeners.get(event.getClass());
if (eventListeners != null) {
for (EventListener listener : eventListeners) {
listener.process(event);
}
}
if (!(event instanceof AfterEvent)) {
register(new AfterEvent(event.getClass()));
}
}
/**
* Removes the registered events from the queue.
*/
public void clear() {
events.clear();
}
/**
* Adds an event listener to an event.
* @param clazz The event
* @param listener The event listener
*/
public void addListener(Class<? extends Event> clazz, EventListener listener) {
List<EventListener> list = listeners.get(clazz);
if (list != null) {
listeners.get(clazz).add(listener);
} else {
list = new ArrayList<>();
list.add(listener);
listeners.put(clazz, list);
}
}
/**
* Removes an event listener.
* @param clazz The class of the event that the listener no longer will listen to
* @param listener The event listener
*/
public void removeListener(Class<? extends Event> clazz, EventListener listener) {
List<EventListener> list = listeners.get(clazz);
if (list != null) {
list.remove(listener);
if (listeners.isEmpty()) {
this.listeners.remove(clazz);
}
}
}
/**
* Add a list of event listeners to listen for an event.
* @param clazz The class of the event
* @param listeners The list of listeners
*/
public void addListeners(Class<? extends Event> clazz, List<EventListener> listeners) {
if (this.listeners.containsKey(clazz)) {
this.listeners.get(clazz).addAll(listeners);
} else {
this.listeners.put(clazz, listeners);
}
}
/**
* Gets the number of events that are waiting to get processed.
* @return The event count
*/
public int getEventsPendingCount() {
return events.size();
}
/**
* Gets the number of event listeners for a certain event.
* @param clazz The event
* @return The event listener count
*/
public int getEventListenerCount(Class<? extends Event> clazz) {
return listeners.get(clazz).size();
}
} |
<filename>packages/amnis-display/src/Skeleton/index.ts
export * from './Skeleton';
export * from './Skeleton.types';
export * from './Skeleton.schema';
|
/**
* returns true if If-Range specs match reply, false otherwise
*/
static int
clientIfRangeMatch(ClientHttpRequest * http, HttpReply * rep)
{
const TimeOrTag spec = http->request->header.getTimeOrTag(HDR_IF_RANGE);
if (!spec.valid)
return 0;
if (spec.tag.str) {
ETag rep_tag = rep->header.getETag(HDR_ETAG);
debugs(33, 3, "clientIfRangeMatch: ETags: " << spec.tag.str << " and " <<
(rep_tag.str ? rep_tag.str : "<none>"));
if (!rep_tag.str)
return 0;
if (spec.tag.weak || rep_tag.weak) {
debugs(33, DBG_IMPORTANT, "clientIfRangeMatch: Weak ETags are not allowed in If-Range: " << spec.tag.str << " ? " << rep_tag.str);
return 0;
}
return etagIsStrongEqual(rep_tag, spec.tag);
}
if (spec.time >= 0) {
return http->storeEntry()->lastmod <= spec.time;
}
assert(0);
return 0;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.