content
stringlengths 10
4.9M
|
---|
def plotVectorSectionsOctree(
mesh, m, normal='X', ind=0, vmin=None, vmax=None,
subFact=2, scale=1., xlim=None, ylim=None, vec='k',
title=None, axs=None, actvMap=None, contours=None, fill=True,
orientation='vertical', cmap='pink_r'
):
normalInd = {'X': 0, 'Y': 1, 'Z': 2}[normal]
antiNormalInd = {'X': [1, 2], 'Y': [0, 2], 'Z': [0, 1]}[normal]
h2d = (mesh.h[antiNormalInd[0]], mesh.h[antiNormalInd[1]])
x2d = (mesh.x0[antiNormalInd[0]], mesh.x0[antiNormalInd[1]])
szSliceDim = len(mesh.h[normalInd])
if ind is None:
ind = int(szSliceDim//2)
cc_tensor = [None, None, None]
for i in range(3):
cc_tensor[i] = np.cumsum(np.r_[mesh.x0[i], mesh.h[i]])
cc_tensor[i] = (cc_tensor[i][1:] + cc_tensor[i][:-1])*0.5
slice_loc = cc_tensor[normalInd][ind]
temp_mesh = Mesh.TreeMesh(h2d, x2d)
level_diff = mesh.max_level - temp_mesh.max_level
XS = [None, None, None]
XS[antiNormalInd[0]], XS[antiNormalInd[1]] = np.meshgrid(
cc_tensor[antiNormalInd[0]], cc_tensor[antiNormalInd[1]]
)
XS[normalInd] = np.ones_like(XS[antiNormalInd[0]])*slice_loc
loc_grid = np.c_[XS[0].reshape(-1), XS[1].reshape(-1), XS[2].reshape(-1)]
inds = np.unique(mesh._get_containing_cell_indexes(loc_grid))
grid2d = mesh.gridCC[inds][:, antiNormalInd]
levels = mesh._cell_levels_by_indexes(inds) - level_diff
temp_mesh.insert_cells(grid2d, levels)
tm_gridboost = np.empty((temp_mesh.nC, 3))
tm_gridboost[:, antiNormalInd] = temp_mesh.gridCC
tm_gridboost[:, normalInd] = slice_loc
mx = (actvMap*m[:, 0])
my = (actvMap*m[:, 1])
mz = (actvMap*m[:, 2])
m = np.c_[mx, my, mz]
ind_3d_to_2d = mesh._get_containing_cell_indexes(tm_gridboost)
v2d = m[ind_3d_to_2d, :]
amp = np.sum(v2d**2., axis=1)**0.5
if axs is None:
axs = plt.subplot(111)
if fill:
temp_mesh.plotImage(amp, ax=axs, clim=[vmin, vmax], grid=True)
axs.quiver(temp_mesh.gridCC[:, 0],
temp_mesh.gridCC[:, 1],
v2d[:, antiNormalInd[0]],
v2d[:, antiNormalInd[1]],
pivot='mid',
scale_units="inches", scale=scale, linewidths=(1,),
edgecolors=(vec),
headaxislength=0.1, headwidth=10, headlength=30) |
CHARACTERISTICS OF FREE RADICAL LIPID PEROXIDATION AND ITS CONNECTION WITH IRON EXCHANGE IN EXPERIMENTAL HEPATITIS THERAPY
The purpose of the work is to study the processes of free radical lipid peroxidation, antioxidant system activity and their connection with the iron metabolism in white rats with experimental hepatitis, and after the use of hepatoprotectors. Materials and Methods. Studies were conducted on white Wistar rats: body weight – 180–200 g, age – 3 months. “Berlition” and “Legalon M” were administered intramuscularly, 0.2 ml per animal once a day (30 days). The authors registered the content of malonic dialdehyde (MDA), diene conjugates, catalase activity, serum iron level, total and unsaturated iron binding capacity. Results. In experimental animals, the level of diene conjugates increased 1.8 times if compared with healthy ones. After the administration of “Legalon M” and “Berlition”, the concentration of diene conjugates decreased by 58.6 % (by half) compared with the experimental animals. It was established that the malondialdehyde content significantly increased in all tissues. After the administration of “Legalon M” and “Berlition”, the malondialdehyde content decreased in the liver, lungs and muscle tissues. In the tissues of the intestine and in the blood serum, MDA level resolved to that of healthy animals. In experimental animals, catalase activity increased significantly if compared with healthy ones. After “Legalon M” and “Berlition” administration there was a decrease in enzyme activity in all body tissues. In animals with experimental hepatitis, there was a failure of iron metabolism in the body. The authors observed activation of iron metabolism in the body after drug administration to rats with experimental hepatitis. Conclusions. “Ligalon M” and “Berlition” have an antioxidant effect and stimulate iron metabolism. Keywords: hepatitis, antioxidants, free radicals, malonic dialdehyde, catalase, diene conjugates, iron metabolism, transferrin, lipids. |
// DecodeHistory decodes the records in a page of data and
// returns them in reverse chronological order (most recent first),
// to match the order of the history pages themselves.
func DecodeHistory(data []byte, family Family) (History, error) {
var results History
var r HistoryRecord
var err error
for !allZero(data) {
r, err = DecodeHistoryRecord(data, family)
if err != nil {
break
}
results = append(results, r)
data = data[len(r.Data):]
}
ReverseHistory(results)
return results, err
} |
def remove_character(cls, character_id):
cls.storage.delete_category(character_id) |
import React from 'react';
import Box from '@material-ui/core/Box';
import Typography from '@material-ui/core/Typography';
import Button from '@material-ui/core/Button';
import { Link, LinkProps } from 'react-router-dom';
const AdapterLink = React.forwardRef<HTMLAnchorElement, LinkProps>(
(props, ref) => <Link innerRef={ref} {...props} />
);
interface VideoCard {
title: string;
subtitle: string;
src: string;
content: string;
}
const VideoCard = ({ title, subtitle, src, content }: VideoCard) => (
<Box my={4}>
<Box mx={1}>
<Typography variant="h6">{title}</Typography>
<Typography variant="subtitle2">{subtitle}</Typography>
</Box>
<iframe title={title} src={src}></iframe>
<Box mx={1}>
<Typography variant="body2">{content}</Typography>
</Box>
</Box>
);
const Home = () => (
<div>
<Box m={1} height="100vh">
<Typography variant="h6">2020 投票指南</Typography>
<Typography variant="body2">
減少盲目支持,從更多的理解和認識,選擇你真正認同的候選人。
<br />
整合政府開放資料,提供民眾對照,快速比較出你支持的政黨、候選人。
</Typography>
<Box display="flex" justifyContent="center">
<Button color="primary" variant="contained">
<Typography variant="button">比較政黨</Typography>
</Button>
<Button
color="primary"
variant="contained"
component={AdapterLink}
to="/county"
>
<Typography variant="button">比較區域立委</Typography>
</Button>
</Box>
</Box>
<VideoCard
title="立委工作說明"
subtitle="The News Lens 關鍵評論網 - 立委的一天在幹嘛?"
src="https://www.youtube.com/embed/lOCqaZ5Pb_w"
content="身為不分區立委的助理A解釋道,「一般來說,在會期裡一週的一三四是委員會;二五則是院會。其中一三四的委員會,早上會是最緊繃的,有關質詢重點、議題都得會花很多時間找資料、準備,或是配合黨團開記者會等。而二五院會,從早上到下午2~3點之前都在立法院,結束後才會回地方跑選區。」
「在會期中,委員2/3的時間、精神、功夫都在立法院,也非常注重問政品質、和自己上台質詢的表現。」但是真的每個立委都是這樣嗎?"
/>
<VideoCard
title="立委立委選舉投票規則工作說明"
subtitle="The News Lens 關鍵評論網 - 什麼是政黨票?"
src="https://www.youtube.com/embed/zPkX6cn4oMg"
content="身為不分區立委的助理A解釋道,「一般來說,在會期裡一週的一三四是委員會;二五則是院會。其中一三四的委員會,早上會是最緊繃的,有關質詢重點、議題都得會花很多時間找資料、準備,或是配合黨團開記者會等。而二五院會,從早上到下午2~3點之前都在立法院,結束後才會回地方跑選區。」
「在會期中,委員2/3的時間、精神、功夫都在立法院,也非常注重問政品質、和自己上台質詢的表現。」但是真的每個立委都是這樣嗎?"
/>
</div>
);
export default Home;
|
/**
* Configuration that is read from config.json
*
* @author Fritz Windisch
*/
public class Configuration {
String workspaceDir;
long crawlInterval;
public Configuration(String workspaceDir, long crawlInterval) {
this.workspaceDir = workspaceDir;
this.crawlInterval = crawlInterval;
}
public String getWorkspaceDir() {
return workspaceDir;
}
public void setWorkspaceDir(String workspaceDir) {
this.workspaceDir = workspaceDir;
}
public long getCrawlInterval() {
return crawlInterval;
}
public void setCrawlInterval(long crawlInterval) {
this.crawlInterval = crawlInterval;
}
} |
class ExtraTaskAttacher:
'''Decorator factory to insert extra tasks before and after method calls.'''
wrapper_prefix = 'wrap_'
len_wrapper_prefix = len(wrapper_prefix)
def __call__(self, wrapped_class):
'''Wrap methods in wrapped_class with corresponding wrapper in self.'''
for self_attr_name in dir(self):
if not self_attr_name.startswith(self.wrapper_prefix):
continue
wrapped_attr_name = self_attr_name[self.len_wrapper_prefix:]
try:
to_be_wrapped = getattr(wrapped_class, wrapped_attr_name)
except AttributeError:
raise AssertionError(
'Tried to wrap `{form}.{wrapped}` because the handler `{handler}` ' \
'has `{wrapper}`, but `{form}.{wrapped}` was not found.'.format(
form=wrapped_class.__qualname__,
handler=self.__class__.__qualname__,
wrapped=wrapped_attr_name,
wrapper=self_attr_name,
)
)
decorated = _wrap(to_be_wrapped, getattr(self, self_attr_name))
setattr(wrapped_class, wrapped_attr_name, decorated)
return wrapped_class |
Europe cannot afford to let New Ukraine die, and the élan of a people fighting to join Europe should be an inspiration to Europe’s old guard to build a New Europe too.
Ukraine was already falling off the West’s radar last summer. In east Ukraine, major military offensives were prepared and trip-wires almost crossed; but there was no major outbreak of fighting comparable to the bloodshed of 2014. Something resembling a ceasefire was in place from September, albeit one with frequent violations and casualties. And while it may be in danger of collapsing once again, the migration crisis has increasingly preoccupied European leaders, even before the Paris atrocities in November led to a radical shift in focus.
But even if the West now has other priorities, Ukraine is still part of the bigger picture. It cannot be pushed into the background or left on the back-burner. Here are three reasons why.
Trading with Russia over Islamic State won’t work
The temptation, especially for France, to flirt with Putin over Syria will only work in the shortest of short-terms. Putin is Yin to Hollande’s Yang. Putin was able to provide the tough talk and illusion of rapid response that Western leaders might crave, but which their systems and political cultures constrain them from delivering. Even in terms of simple vocabulary, Hollande talked of “war”, but not of “revenge”; while Putin had no such compunctions. Putin could provide the instant bombing that is slow to arrange in the West – the TV version, at least – and plugged the gap between immediate symbolic sorties and the UK joining in by December.
But several Western politicians, especially in France, have rushed further ahead to endorse the idea of Russia as a global partner, putting to one side its actions in Ukraine. In October Nicolas Sarkozy declared in Moscow that "the world needs Russia" and reversed Barack Obama’s famous put-down that “Russia is [only] a regional power” to say "the destiny of Russia is to be a great global power and not just a regional power". Two days before the Paris attacks, Frank-Walter Steinmeier said that “We need Russia to be at the table of global political responsibility in order to be in a position to master the challenges facing us in other regions of the world. I say this with a view to Syria, the fight against international terrorism and the security architecture of the Middle East. We can only make progress here with, and not without, Russia.”
But there is nothing at play for Russia in Syria other than a short-term PR boost and a strengthened idea of Russia a “great global player”. And in this it has partly succeeded. It has renewed its global ambitions and already succeeded in changing the story at home, even if only temporarily, away from its misadventure in Ukraine. But on the ground, analysis showsRussia has mainly been bombing Assad’s enemies, not Islamic State. Moreover, for all Putin’s talk of revenge, the pattern barely changed after the shooting down of Flight 9268. Even more significantly, Russia has not shifted its targeting decisively after the Paris atrocities on 13 November, apart from a few PR-driven sorties. Russia even has a short-term interest in keeping Islamic State going in Syria – it puts additional pressure on the anti-Assad opposition and diverts militants from the North Caucasus.
Russia is therefore not likely to join any anti-IS coalition in any meaningful way. Its priority is still to assist the Assad regime and its Iranian allies in stabilising their rule in the west and the north. And Russia is seeking to limit American influence as much as expand its own. It is enjoying showing off the fruits of its post-2008 military reform in Syria. But is also showing its limits. According to ECFR expert Gustav Gressel; ‘the Syrian deployment does not draw on the core strengths of the armed forces, or on Moscow’s military vision’ and Russian public opinion is wary of any involvement on the ground.
And finally, Russia does not have a very nuanced view of diplomatic trading. It wants to buy a free pass in Ukraine and shift the West towards its position on Assad’s survival. It is not prepared to sacrifice the latter to get the former, or vice-versa. Russia will only really cooperate on Syria, if we accept Moscow's vision and policy for the region, which doesn’t require any horse-trading. The talk about needing Russia for Syria has been a convenient excuse, an opportunity to blame Russia for the lack of workable western policy. Had we had the latter in place, Russia could have done very little to obstruct it.
Consigning Ukraine to a Russian ‘sphere of influence’ won’t work
And if the trade in mind is to forget about Ukraine, then that won’t work either.
Significant parts of the European left have bought the myth that the US-led expansion of NATO was the key cause of the crisis over Ukraine. Significant parts of the European right have bought the myth that a "militarist and expansionist" EU was the key cause, over-extending its flawed project to weak east European states without the capacity or desire to adopt the acquis, when the EU had neither the capacity nor the desire to protect them from Russia’s reaction. Logically but absurdly, both extremes therefore assume that the situation will be more stable without the Western interference that caused the problem in the first place, and would either de facto confine Ukraine to a Russian sphere of influence, or tell Ukraine that, even if it is nominally independent, it is forced to live with Putin.
So the West must change itself: it does not have the responsibility, the capacity or the desire to force Putin to change. Putin is not being told to live with Ukraine. But a Russian ‘sphere of influence’ is not a recipe for peace or stability. Too many Ukrainians would resist it; both for patriotic reasons and for the renewed corruption and loss of European perspective it would bring. And Russia wouldn’t understand why it was resisted and resented; because it doesn’t understand the animus of a new nation-state in the making, which has been the underlying force of the revolution in Ukraine since 2013. The Kremlin would assume that any acts of rebellion were being covertly supported by the West. So we would be back to confrontation. The problem could not be quietly consigned to a remoter part of Europe.
Weak states don’t just bleed away quietly in a corner. Granting Russia what it wants in Ukraine and elsewhere in the eastern neighbourhood would amount to acquiescing in the campaign of what Russia openly calls ‘de-sovereignisation’. The West is partly complicit for not taking the full sovereignty of states like Ukraine seriously enough; but Russia is not complaining about weak or even failing states on its borders – it is actively creating them. Pushed to its logical conclusion, full ‘de-sovereignisation’ would hollow out the OSCE framework (after the 22nd OSCE Ministerial meeting in Belgrade on 4 December ended acrimoniously), WTO and even Bretton Woods; from which everybody in the West would suffer. And if Russia is able to interpret its intervention as a ‘success’, then it will resort to the same means again elsewhere.
Disorder in Ukraine was not a problem when Putin intervened to “protect” local Russians in 2014. But it is more likely to become so in an isolated or Russia-dominated Ukraine. There have been many premature forecasts that Ukraine was developing a toxic mix of nationalists, militias, and oligarchs who sponsored militias to protect their own interests. And such forecasts may remain premature – Ukrainians still have an admirable sense of self-restraint embodied in the popular phrase “You can get rid of [sitting President] Poroshenko, but your next President would be Putin”. But add in the elements of a weak state and desperate opportunistic politicians who have tried to win popularity by jumping on the bandwagon of the recent Crimean blockade ex post facto, and you have a dangerous mixture, and potential gift to Russian propaganda. The major factor currently restraining potential disorder is the links these same politicians and oligarchs to the West.
A good example would be the Crimean Tatars, who previously had the longest history of non-violent protest in the former Soviet Union, dating back to the 1960s.Despite twenty three years of frustration under an independent Ukraine, when the authorities in Kyiv never backed their cause as they should have done, constant predictions of radicalisation and violent protest were proven wrong. But now the Crimean Tatars are at the forefront of the blockade of Crimea. Whether their activists blew up the electricity supply or not, they certainly prevented the authorities from restoring it quickly. Again, the best solution to the Crimean Tatar problem is its internationalisation.
It’s too early for ‘Ukraine fatigue 2.0’
The first iteration of the term ‘Ukraine fatigue’ was caused by the disappointment after the ‘Orange Revolution’ in 2004. By 2008-10, European and American leaders were personally weary of the broken promises and machinations of Ukrainian politicians, and the Ukrainian electorate was sufficiently disillusioned to actually elect Yanukovych.
But the echoes of ‘Ukraine fatigue’ are being heard once again. Patience and attention spans are shorter in the West; but complacency on the domestic front in Ukraine is not helping either. Reforms are progressing painfully slowly. Accusations of corruption, increasingly coming from former Georgian President Mikheil Saakashvili, now the governor of Odesa, include many in the current government. For some, Ukraine is in danger of becoming a failed state.
But the internal dynamic this time is totally different. During the Orange Revolution in 2004 the protests aimed to ensure the right person was elected. Viktor Yushchenko duly took office and the protestors went home, placing their faith in a small number of leaders that proved fractious, incompetent and corrupt. The Euromaidan protests in 2013-14 were about much more than who ran the country. Civil society is much stronger and isn’t going to go away.
It may be a depressing reality that the Ukrainian system is capable of re-consolidating itself after a second attempted revolution. But the system is much weaker than it was, and it would still face opposition from a newly energetic, powerful and self-organised civic sector that knowsthat faith in leaders alone is an unaffordable luxury - you need to work at running the country, or the revolution will be stolen. Ukraine cannot stabilise as anything but a democracy – authoritarian stabilisation was tried, but resulted in revolution. The likely result of leaving Ukraine to Russia or to its own devices will only be to deepen dysfunctionality and conflict, creating a mess that we will still be forced to deal with. So it is better to be engaged from the beginning. Pro-reform forces are strong enough to keep opposing and weakening a non-reformist state. But they are not strong enough to succeed without international help.
Why you should care about Ukraine
Two years after Ukrainians took to the street to fight for their independence and European choice, Ukraine has proven remarkably resilient in the face of Russian aggression and massive reform challenges. But the country’s trajectory is far from certain. It could end fatally damaged externally by Russian aggression and internally if reform drivers remain in the wrong place. But Ukraine could also be a success story – and the EU has every interest in seeing this happen.
An open market of 45 million people would provide a tremendous boost to growth in the EU. The long-delayed Deep and Comprehensive Free Trade Agreement between the EU and Ukraine will finally come into force in January 2016. Russian arguments about the damage to Russo-Ukrainian trade can be dismissed, as Russia’s trade war against Ukraine has already reduced its share of Ukrainian exports from near 30 per cent to less than 10 per cent. If Ukraine gets its economic reforms right, Ukraine could take off as a low-cost manufacturing and IT hub for a European economy badly in need of new sources of dynamism. The boost to pan-European trade could rival that from the accession of central European and Baltic States economies in 2004.
A successful Ukraine would also bring much needed stability to the neighbourhood at a time when the EU is under pressure from a breakdown of order all around. Ukraine is a vital flank in controlling migration processes into Europe - both its own IDPs and migrants likely to be displaced north of Turkey if the EU-Turkey agreement holds. Cooperation over Ukraine ought to bring the EU and USA closer together.
The New Ukraine is struggling to be born; Old Ukraine is resisting and Russia is trying to strangle it at birth. Europe cannot afford to let New Ukraine die, and the élan of a people fighting to join Europe should be an inspiration to Europe’s old guard to build a New Europe too.
Read more on: Wider Europe,EaP,Ukraine |
import pydicom
from pydicom.dataset import Dataset
from pydicom.sequence import Sequence
import base64
import pandas as pd
import json
def df2dicom(df, outdir):
"""
Fill up a directory with DICOMs initially contained in a dataframe
@param dataframe : data structure containing the information needed to
reconstruct DICOMs
@param outdir : output directory where the DICOMs will be generated
"""
nb_file = 0
for index in range(len(df)):
print(f"dicom n°{nb_file} has been rebuilt")
ds = build_dicom(df, index, parent_path = '')
ds.save_as(f"{outdir}/dicom_{nb_file}.dcm", write_like_original=False)
nb_file += 1
def get_ds_attr(df, parent_path, attr):
"""Gets and returns a list of distinct @i extracted from the elements in the sequence"""
#filters the columns names starting with parent_path and attr
child_attr = [col.replace(parent_path + attr, '') for col in df.columns if col.startswith(parent_path + attr)]
#extract @i. and remove duplicates from the list
child_attr = list(set([attr[:3] for attr in child_attr]))
child_attr.sort()
return child_attr
def build_seq(df, index, parent_path, seq_attr):
"""
Builds and returns a pydicom sequence and : 0 for a basic sequence | 1 for an
empty sequence that needs to be represented even if it is empty.
"""
seq = Sequence()
for ds_attr in get_ds_attr(df, parent_path, seq_attr):
ds = build_dicom(df, index, parent_path+seq_attr+ds_attr)
if ds != None:
seq.append(ds)
else:
return [], 1
return seq, 0
def getSeq_attr(attrs):
"""Gets and returns a list of unique names of the sequence attributes without the @child_attribute"""
nom_seq = set([attr.split('@')[0] for attr in attrs]) #extract the part before the @
return list(nom_seq) #keep only unique values
def getValue(df, index, parent_path, child_path):
"""Gets and returns the value of a given attribute name"""
return df[parent_path+child_path][index]
def getVR(column_name):
"""Returns the type as it is defined in the pydicom definition"""
return pydicom.sequence.Sequence if 'SQ' in column_name else ''
def add_file_meta(df, ds, meta_attrs, index, parent_path):
"""Creates and returns a dataset containing the meta-information of the dicom file"""
ds.file_meta = Dataset()
for attr in meta_attrs:
if not pd.isna(getValue(df, index, parent_path, attr)):
attr_tag, attr_VR, attr_VM = attr.split('_')[1], attr.split('_')[2], attr.split('_')[3]
attr_value = decode_unit(getValue(df, index, parent_path, attr), attr_VR, attr_VM)
ds.file_meta.add_new(attr_tag, attr_VR, attr_value)
#Fills 2 ds.properties needed in order to save the dicom file
if '0x00020010' in attr_tag:
if '1.2.840.10008.1.2.1' in attr_value:
ds.is_little_endian, ds.is_implicit_VR = True, False
elif ('1.2.840.10008.1.2.2' in attr_value) or ('1.2.840.10008.1.2.99' in attr_value):
ds.is_little_endian, ds.is_implicit_VR = False, False
else:
ds.is_little_endian, ds.is_implicit_VR = True, True
return ds
def build_dicom(df, index, parent_path = ''):
"""Builds one DICOM file from the dataframe information"""
seq_attrs, nonseq_attrs, meta_attrs = [], [], []
child_attr = [col.replace(parent_path, '') for col in df.columns if col.startswith(parent_path)] #name of the column without the parent name
#filters child_attr into two lists (sequences and not sequences)
[seq_attrs.append(attr) if getVR(attr) == pydicom.sequence.Sequence else nonseq_attrs.append(attr) for attr in child_attr]
ds = Dataset()
#NON-SEQUENCE ATTRIBUTES
for attr in nonseq_attrs:
if not pd.isna(getValue(df, index, parent_path, attr)):
if attr != 'empty':
attr_tag, attr_VR, attr_VM = attr.split('_')[1], attr.split('_')[2], attr.split('_')[3]
if '0x0002' in attr_tag:
meta_attrs.append(attr)
else:
ds.add_new(attr_tag, attr_VR, decode_unit(getValue(df, index, parent_path, attr), attr_VR, attr_VM))
else:
return None
#SEQUENCE ATTRIBUTES
for attr in getSeq_attr(seq_attrs):
#If the sequence is present in the DICOM, test_sequence would take a value != NaN
for test in child_attr:
if attr in test:
test_sequence = test
#If test_sequence == NaN, the sequence does not appear in this DICOM.
if not pd.isna(getValue(df, index, parent_path, test_sequence)):
seq, empty_but_present = build_seq(df, index, parent_path, attr)
if not empty_but_present:
#If the sequence is not empty then we add the sequence to the ds
if seq:
ds.add_new(attr.split('_')[1], attr.split('_')[2], seq)
#create an empty sequence (if the initial dicom had an empty sequence it has
#to rebuild it
else:
ds.add_new(attr.split('_')[1], attr.split('_')[2], None)
#META-FILE ATTRIBUTES
ds = add_file_meta(df, ds, meta_attrs, index, parent_path)
return ds
def decode_unit(value, VR, VM):
#if the value is None : no need to decode
if value == str(None):
return None
else:
integer_types = ['IS','SS','SL','US','UL']
if VM != '1':
if (VR in integer_types or VR == 'CS' or VR == 'DS' or VR == 'FD' or VR == 'UN') and VM != '0':
return [decode_unit(e, VR, '1') for e in json.loads(value)]
else:
if VR == 'OB' or VR == 'OW' or VR == 'UN':
return base64.b64decode(value.encode("UTF-8"))
elif VR in integer_types:
return int(value)
elif VR == 'FD':
return float(value)
return value
|
<filename>src/case.hs
head' :: [a] -> a
head' [] = error "No head for empty list"
head' (x:_) = x
head1 :: [a] -> a
head1 xs = case xs of
[] -> error "No head for empty list"
(x:_) -> x
fibonacci :: Int -> Int
fibonacci 0 = 0
fibonacci 1 = 1
fibonacci x = fibonacci (x - 1) + fibonacci (x - 2)
maximum' :: (Ord a) => [a] -> a
maximum' [] = error "Maximum of empty list"
maximum' [x] = x
maximum' (x:xs) = max x (maximum' xs)
replicate'' :: (Num a, Eq a) => a -> a1 -> [a1]
replicate'' n x = replicate' n x []
replicate' :: (Num a, Eq a) => a -> a1 -> [a1] -> [a1]
replicate' 0 rp acc = acc
replicate' n rp acc = replicate' (n-1) rp (rp:acc)
replicate1 :: Int -> a -> [a]
replicate1 n x
| n <= 0 = []
| otherwise = x : replicate1 (n-1) x
|
import React from 'react';
import { Story, Meta } from '@storybook/react';
import { Field, Props } from './Field';
import { randomFill } from './filling';
export default {
component: Field,
title: 'Display/Display',
} as Meta;
const Template: Story<Props> = (args) => <Field {...args} />;
export const DisplayExample = Template.bind({});
DisplayExample.args = {
width: 300,
height: 300,
field: randomFill(300, 300),
};
|
<filename>src/core/datasource.cpp
#include "datasource.h"
#include "data/entity.h"
#include "data/entitydatapool.h"
namespace SUCore {
DataSource_I::DataSource_I(const EntityDataBank_C &m_dataBank) :
m_dataBank(m_dataBank)
{
}
void DataSource_I::addEntity(EntityDataBank_C::EntityType type, std::unique_ptr<SUData::Entity_C> entity)
{
SUData::EntityDataPool_C* pool = m_dataBank.entityDataPool(type);
Q_ASSERT(pool);
pool->addEntity(std::move(entity));
}
}
|
<reponame>jdxj/kiwivm-sdk-go
package kiwivm_sdk_go
type PrivateIPGetAvailableIPsRsp struct {
Status
}
// PrivateIPGetAvailableIPs Returns all available (free) IPv4 addresses which you can activate on VM
// todo: test
func (c *Client) PrivateIPGetAvailableIPs() (*PrivateIPGetAvailableIPsRsp, error) {
call := "/privateIp/getAvailableIps"
req := c.auth
rsp := &PrivateIPGetAvailableIPsRsp{}
return rsp, c.do(call, req, rsp)
}
type PrivateIpAssignReq struct {
*Auth
// optional
IP string `json:"ip"`
}
type PrivateIpAssignRsp struct {
Status
}
// PrivateIpAssign Assign private IP address.
// If IP address not specified, a random address will be assigned.
// todo: test
func (c *Client) PrivateIpAssign(req *PrivateIpAssignReq) (*PrivateIpAssignRsp, error) {
call := "/privateIp/assign"
req.Auth = c.auth
rsp := &PrivateIpAssignRsp{}
return rsp, c.do(call, req, rsp)
}
type PrivateIpDeleteReq struct {
*Auth
IP string `json:"ip"`
}
type PrivateIpDeleteRsp struct {
Status
}
// PrivateIpDelete Delete private IP address.
// todo: test
func (c *Client) PrivateIpDelete(req *PrivateIpDeleteReq) (*PrivateIpDeleteRsp, error) {
call := "/privateIp/delete"
req.Auth = c.auth
rsp := &PrivateIpDeleteRsp{}
return rsp, c.do(call, req, rsp)
}
|
/*
* sldevice.c
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <signal.h>
#define LOG_TAG "sldevice"
#include "base/sl_log.h"
#include <base/sl_mutex.h>
#include <base/sl_cond.h>
#include <base/sl_thread.h>
#include <base/sl_time.h>
#include <base/sl_utils.h>
#include <sl/sl_service.h>
#include "mydev.h"
//static void my_sighandler(int s){
// ALOGD("my_sighandler:%d", s);
//}
static void chan_on_mine_changed(void *userdata, int state){
ALOGD("chan_on_mine_changed:%d", state);
}
sl_state_listener state_listener = {
.on_mine_changed = chan_on_mine_changed,
.on_preconnect_changed = NULL,
.on_message = NULL,
};
extern sl_session_listener sessionlistener;
int main(int argc, char *argv[]){
if(argc != 4){
printf("usage:%s, devx 123456 configfile.json\n", argv[0]);
return -1;
}
char sid[32] ={0};
char passed[32] = {0};
char configfile[64] = {0};
strncpy(sid, argv[1], sizeof(sid));
strncpy(passed, argv[2], sizeof(passed));
strncpy(configfile, argv[3], sizeof(configfile));
//--------把log写入文件,不需要则注释这几行--------
char fname[32] = {0};
snprintf(fname, sizeof(fname), "./%s.log", sid);
FILE* logf = fopen(fname, "wb");
sl_log_init(logf);
//--------把log写入文件,不需要则注释这几行--------
// signal(SIGINT, &my_sighandler);
// signal(SIGPIPE, &my_sighandler);
// signal(SIGKILL, &my_sighandler);
ALOGD("========login:%s=========", sid);
MyDevice mydev;
mydevice_init(&mydev, configfile);
sl_device_info* myinfo = &mydev.myinfo;
myinfo->_nettype = SL_NETTYPE_ETH;
myinfo->_appOrdev = SL_DEV;
strncpy(myinfo->_sid, sid, sizeof(myinfo->_sid));
strncpy(myinfo->_passwd, <PASSWORD>, sizeof(myinfo->_passwd));
sl_status_t rs = sl_service_init(myinfo, &state_listener, NULL);
ALOGD("sl_service_init:%d", rs);
if(rs == SL_OK){
sl_service_setSessionListener(&sessionlistener, &mydev);
rs = sl_service_start(0);
ALOGD("sl_service_start:%d", rs);
}
sl_service_stop();
ALOGD("sl_service_stop");
sl_service_deinit();
mydevice_uninit(&mydev);
sl_log_deinit();
return 0;
}
|
<reponame>micheltobon/stencil-connect-4
import { newSpecPage, SpecPage } from '@stencil/core/testing';
import { MtBoardSlot } from './mt-connect-slot';
import { chip } from '../../../helpers/model'
describe('<mt-connect-slot>', () => {
let page: SpecPage;
let boardSlot;
beforeEach(async () => {
page = await newSpecPage({
components: [ MtBoardSlot ],
html: '<mt-connect-slot col="0"></mt-connect-slot>'
});
})
it('renders a basic slot', async () => {
expect(page.root.innerHTML).toEqualHtml(`<div class="chip"></div>`);
});
it('renders a red or blue chip', async () => {
const theChip = page.root.firstChild;
page.root.value = chip.red;
await page.waitForChanges();
expect(theChip).toHaveClass(chip.red);
expect(theChip).not.toHaveClass(chip.blue);
page.root.value = chip.blue;
await page.waitForChanges();
expect(theChip).toHaveClass(chip.blue);
expect(theChip).not.toHaveClass(chip.red);
});
it('renders a highlighed chip', async () => {
const theChip = page.root.firstChild;
expect(theChip).not.toHaveClass('highlight');
page.root.highlighted = true;
await page.waitForChanges();
expect(theChip).toHaveClass('highlight');
});
it('emits selectSlot when clicked', () => {
const selectSlotListener = jest.fn();
page.win.addEventListener('selectSlot', selectSlotListener);
page.root.click();
expect(selectSlotListener).toHaveBeenCalled();
const detail = selectSlotListener.mock.calls[0][0].detail;
expect(detail).toBe(0);
});
});
|
/**
* This method attempts to adjust MHP information locally, if that can be done,
* to model the effect of addition of
* {@code node} If successful, it returns {@code true}.
*
* @param node
* the node that has been added to the program, for which MHP
* information needs to be stabilized.
*
* @return {@code true}, if the MHP stabilization was successful.
*/
public static boolean stabilizeMHPLocallyUponAddition(Node node) {
node = Misc.getCFGNodeFor(node);
assert (!(node instanceof BeginNode));
if (node instanceof BarrierDirective || !node.getInfo().isControlConfined()
|| node.getInfo().getCFGInfo().getIntraTaskCFGLeafContentsOfSameParLevel().stream()
.anyMatch(n -> n.getNode() instanceof BarrierDirective)) {
return false;
}
Set<Node> predSet = node.getInfo().getCFGInfo().getInterProceduralLeafPredecessors();
for (Node pred : predSet) {
if (pred instanceof BarrierDirective
|| (pred instanceof BeginNode && pred.getParent() instanceof ParallelConstruct)
|| (pred instanceof EndNode && pred.getParent() instanceof ParallelConstruct)) {
return false;
}
}
hasBeenOtimized++;
if (Program.concurrencyAlgorithm == Program.ConcurrencyAlgorithm.ICON) {
boolean oldVal = BeginPhasePoint.stabilizationInProgress;
BeginPhasePoint.stabilizationInProgress = true;
Set<Node> internalContents = node.getInfo().getCFGInfo().getIntraTaskCFGLeafContents();
for (Node pred : predSet) {
Set<BeginPhasePoint> bppsOfPred = BeginPhasePoint.getRelatedBPPsNoStaleRemoval(pred);
for (BeginPhasePoint bpp : bppsOfPred) {
for (Node n : internalContents) {
bpp.getInternalReachables().add(n);
}
}
try {
for (AbstractPhase<?, ?> ph : pred.getInfo().getNodePhaseInfo().getStalePhaseSet()) {
for (Node n : internalContents) {
ph.addNode(n);
}
}
} catch (ConcurrentModificationException e) {
System.err.println("While attempting to add " + node + " a" + node.getClass().getSimpleName()
+ " we found an issue while dealing with its predecessor " + pred + " a "
+ pred.getClass().getSimpleName()
+ ". It's worth noting that phaseSet of the predecessor is object "
+ pred.getInfo().getNodePhaseInfo().getStalePhaseSet().hashCode()
+ ", whereas that of the node is "
+ node.getInfo().getNodePhaseInfo().getStalePhaseSet().hashCode());
System.exit(0);
}
}
BeginPhasePoint.stabilizationInProgress = oldVal;
} else {
Set<Node> internalContents = node.getInfo().getCFGInfo().getIntraTaskCFGLeafContents();
for (Node pred : predSet) {
try {
for (AbstractPhase<?, ?> ph : pred.getInfo().getNodePhaseInfo().getStalePhaseSet()) {
for (Node n : internalContents) {
ph.addNode(n);
}
}
} catch (ConcurrentModificationException e) {
System.err.println("While attempting to add " + node + " a" + node.getClass().getSimpleName()
+ " we found an issue while dealing with its predecessor " + pred + " a "
+ pred.getClass().getSimpleName()
+ ". It's worth noting that phaseSet of the predecessor is object "
+ pred.getInfo().getNodePhaseInfo().getStalePhaseSet().hashCode()
+ ", whereas that of the node is "
+ node.getInfo().getNodePhaseInfo().getStalePhaseSet().hashCode());
System.exit(0);
}
}
}
return true;
} |
def forecast_made_utc_offset_seconds(self) -> int:
offset = self.forecast_made_datetime.utcoffset()
if offset:
return int(offset.total_seconds())
return 0 |
// ag-grid-enterprise v17.1.1
import { Component } from "ag-grid/main";
import { IToolPanel } from "ag-grid";
export declare class ToolPanelComp extends Component implements IToolPanel {
private context;
private eventService;
private gridOptionsWrapper;
private buttonComp;
private columnPanel;
private initialised;
constructor();
private postConstruct();
init(): void;
refresh(): void;
showToolPanel(show: boolean): void;
isToolPanelShowing(): boolean;
}
|
Records, posters and toys are among the items in the New York home of a man who said he is making progress in controlling his hoarding. (Seth Wenig/AP)
Sandy Stark always loved pretty things. When she was a girl, she collected unusual rocks, birds’ nests, crooked sticks and dolls. As an adult, she gravitated to white ceramics and china, paperweights, kitchenware and art. Year by year, the treasures accumulated until the only way she could navigate her San Francisco apartment was through a narrow line of what she called “goat paths.”
That was when her two grown daughters swooped in and cleaned the place out. All her treasures, gone. On reentering her house, seeing it so sterile, so empty, Stark, now 71, says she felt traumatized. Almost immediately, she began reacquiring things — with a vengeance:
“You’re pulling everything in around you, building the hamster’s nest, building the wall. Part of it is for the high. It’s an addiction, sort of. But it’s also to fill a void. It fills a lot of void.”
[Updates to psychiatry’s guidebook change criteria for ADHD, autism, hoarding]
Within 18 months, Stark, who was at one time so organized she supervised payroll for the Pacific Stock Exchange, could barely negotiate the way to her bedroom. Everywhere she turned, boxes. She was then going on 60, and her life had become defined by “the hoard.”
Compulsive hoarding is more than just keeping your old baseball trophies from middle school. Psychologist Dr. Greg Chasson of Towson University outlines the symptoms of compulsive hoarding and how it can lead to debilitating stress. (Towson University)
While the stockpiling of stuff is often pinned on America’s culture of mass consumption, hoarding is nothing new. But it’s only in recent years that the subject has received the attention of researchers, social workers, psychologists, fire marshals and public-health officials.
They call it an emerging issue that is certain to grow with an aging population. That’s because, while the first signs often arise in adolescence, they typically worsen with age, usually after a divorce, the death of a spouse or another crisis.
Hoarding is different from merely living amid clutter, experts note. It’s possible to have a messy house and be a pack rat without qualifying for a diagnosis of hoarding behavior. The difference is one of degree. Hoarding disorder is present when the behavior causes distress to the individual or interferes with emotional, physical, social, financial or legal well-being.
[Hoarding drugs is bad medicine, even if your intentions are good]
“If you aren’t able to use the stove and your refrigerator is stockpiled with expired items, if you’re so disorganized you aren’t able to file for Medicare or make a primary-care appointment, [hoarding] becomes a major problem,” says Catherine Ayers, a geriatric psychologist at the University of California at San Diego who has developed a cognitive behavior therapy for older people with the disorder.
Studies show that compulsive hoarding affects up to 6 percent of the population, or 19 million Americans, and it has been found to run in families. The rate is twice that of obsessive-compulsive disorder, the condition under which hoarding was listed until 2013 in the Diagnostic and Statistical Manual of Mental Disorders, the bible of the American Psychiatric Association. The DSM’s latest version now categorizes it as a separate mental illness.
Brain-imaging studies of hoarders have revealed abnormally low activity in the anterior cingulate cortex, which governs thinking and emotion. When these people are shown trigger images — such as pictures of objects being shredded and discarded — that area of their brain lights up and turns hyperactive.
Hoarding is “underdiagnosed and undertreated,” says Sanjaya Saxena, director of the Obsessive-Compulsive Disorders Program at the UC San Diego health system. “Though people realize it’s a problem, they never conceive of it as a medical disorder rooted in brain abnormalities.”
Awareness, though, is growing. In the past five years, more than 100 task forces on hoarding have sprouted around the United States and Canada. Most involve training and education — teaching clinicians and community figures such as firefighters how to recognize and deal with the disorder.
Many programs use a team approach that may include a landlord, a home-health nurse, a code enforcement officer, firefighters, a family member, a neighbor and a social worker.
“Unlike some other mental-health disorders, many people with hoarding do not seek treatment,” says Michael Tompkins, a San Francisco psychologist and the author of “Digging Out: Helping Your Loved One Manage Clutter, Hoarding and Compulsive Acquiring.” “They don’t recognize the consequences of their condition or the fact that it affects other people in their apartment building and community.”
Compulsive hoarding is associated in various studies with serious health risks such as household falls, obesity, respiratory problems (caused by dust mites and squalor) and poor medication compliance.
A 2012 study in New York found that 22 percent of people threatened with eviction and seeking intervention had a hoarding problem, and the condition has been associated with homelessness.
Among the most serious concerns is the potential for fire. A 2009 study in Australia found that hoarding-related fires ranked among the most deadly of all blazes, with 48 such fires responsible for 10 fatalities over a 10-year period.
“A lot of these people don’t use their front door; a lot of times they don’t use a door at all,” says Ryan Pennington, a paramedic and firefighter who maintains a website called Chamber of Hoarders and who lectures widely to fire departments about the issue. “Firefighters who crawl into these houses often don’t get the full force of radiant heat. Many times, they don’t realize how hot the fire really is until it’s too late.”
Medications — most commonly antidepressants — have been used with some success, but the primary approach to hoarding behavior is psychotherapy and harm reduction. Cognitive behavior therapy is also used, to teach people how to organize, prioritize and plan while working to ease their emotional attachment to the objects they have collected.
The most common acquisitions are clothes and books. But often the stockpiling includes items that people ordinarily discard: junk mail, food packaging, shampoo bottles.
“I call it rubble without a cause,” says Fred Lipschultz, 78, a retired physicist from the University of Connecticut. Over the years, he has hung on to the ticket of virtually every show and concert he has attended. He says he collects papers, plastic containers and quart-size jars. “It does pain me to throw away something that’s useful.”
But in the past few years, he has found help at the Institute for Compulsive Hoarding and Cluttering at the Mental Health Association of San Francisco. He has identified techniques, such as setting limits on how many containers he accumulates, to keep his hoarding in check. He uses a computer to digitize his mementoes, photos, theater tickets and papers. Once something has been scanned into his computer, he allows himself to toss out the actual paper.
Cognitive behavior therapy has its limits, however, said Randy O. Frost, a Smith College researcher who has helped develop a model used around the country to help hoarders address their emotional reactions to reducing their possessions.
“We’ve developed a treatment program, and it does work — but not as well as we’d hoped,” Frost says. “Between 60 and 80 percent of people are improved after treatment, with an average decrease in symptoms of about 30 percent.”
And relapse is common.
Stark says her own experience is “several steps backward while moving forward.”
She dates the beginning of her recovery to the time, about seven years ago, when she saw Tompkins on TV, discussing hoarding behavior and characterizing the people who suffer from it as perfectionists.
“And I thought: ‘That’s me! I’m not this lazy, dirty person! I have a problem.’ Half the relief was knowing that I had an issue I had to address. It was something I had a word for.”
Group therapy proved helpful, and eventually Stark became a member of a peer-led counseling group. But it wasn’t until she found a comrade — a “clutter buddy” — that she began her real recovery.
“It took me three years to accept her offer, and even then I cried as she came over the threshold,” Stark says. It was the first time in many years that she had allowed anyone inside her house.
Now her goal is to open her home to a host of friends. And she knows just how she wants to do it.
“I’m going to have an old-fashioned cocktail party,” says Stark, who loved entertaining before her hoarding began. “That’s my eye on the prize.” |
package metronome
import (
"bytes"
"crypto/tls"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/http/httputil"
"net/url"
"path"
"strings"
"time"
log "github.com/behance/go-logrus"
)
// Constants to represent HTTP verbs
const (
HTTPGet = "GET"
HTTPPut = "PUT"
HTTPDelete = "DELETE"
HTTPPost = "POST"
)
// Metronome represents the client interface for interacting with the metronome API
type Metronome interface {
// POST /v1/jobs
CreateJob(*Job) (*Job, error)
// DELETE /v1/jobs/$jobId
DeleteJob(jobID string) (interface{}, error)
// GET /v1/jobs/$jobId
GetJob(jobID string) (*Job, error)
// GET /v1/jobs
Jobs() (*[]Job, error)
// PUT /v1/jobs/$jobId
UpdateJob(jobID string, job *Job) (interface{}, error)
//
// schedules
// GET /v1/jobs/$jobId/runs
// Technically, this rev of Runs() is a hack to get functionality from the undocumented api
// - since is milliseconds from epoch
Runs(jobID string, statusSince int64) (*Job, error)
// POST /v1/jobs/$jobId/runs
StartJob(jobID string) (interface{}, error)
// GET /v1/jobs/$jobId/runs/$runId
StatusJob(jobID string, runID string) (*JobStatus, error)
// POST /v1/jobs/$jobId/runs/$runId/action/stop
StopJob(jobID string, runID string) (interface{}, error)
//
// Schedules
//
// POST /v1/jobs/$jobId/schedules
CreateSchedule(jobID string, new *Schedule) (interface{}, error)
// GET /v1/jobs/$jobId/schedules/$scheduleId
GetSchedule(jobID string, schedID string) (*Schedule, error)
// GET /v1/jobs/$jobId/schedules
Schedules(jobID string) (*[]Schedule, error)
// DELETE /v1/jobs/$jobId/schedules/$scheduleId
DeleteSchedule(jobID string, schedID string) (interface{}, error)
// PUT /v1/jobs/$jobId/schedules/$scheduleId
UpdateSchedule(jobID string, schedID string, sched *Schedule) (interface{}, error)
// GET /v1/metrics
Metrics() (interface{}, error)
// GET /v1/ping
Ping() (*string, error)
}
// TwentyFourHoursAgo - return time 24 hours ago
func TwentyFourHoursAgo() int64 {
return time.Now().UnixNano()/int64(time.Millisecond) - 24*3600000
}
// A Client can make http requests
type Client struct {
url *url.URL
config Config
http *http.Client
}
// NewClient returns a new client, initialzed with the provided config
func NewClient(config Config) (Metronome, error) {
client := new(Client)
log.Debugf("NewClient started %+v", config)
var err error
client.url, err = url.Parse(config.URL)
if err != nil {
return nil, err
}
client.config = config
var PTransport http.RoundTripper = &http.Transport{
Proxy: http.ProxyFromEnvironment,
TLSClientConfig: &tls.Config{
InsecureSkipVerify: config.AllowUnverifiedTLS,
},
}
client.http = &http.Client{
Timeout: (time.Duration(config.RequestTimeout) * time.Second),
Transport: PTransport,
}
// Verify you can reach metronome
_, err = client.Jobs()
if err != nil {
return nil, errors.New("Could not reach metronome cluster: " + err.Error())
}
return client, nil
}
func (client *Client) apiGet(uri string, queryParams map[string][]string, result interface{}) (status int, err error) {
return client.apiCall(HTTPGet, uri, queryParams, "", result)
}
func (client *Client) apiDelete(uri string, queryParams map[string][]string, result interface{}) (status int, err error) {
return client.apiCall(HTTPDelete, uri, queryParams, "", result)
}
func (client *Client) apiPut(uri string, queryParams map[string][]string, putData interface{}, result interface{}) (status int, err error) {
var putDataString []byte
if putData != nil {
putDataString, err = json.Marshal(putData)
log.Debugf("PUT %s", string(putDataString))
}
return client.apiCall(HTTPPut, uri, queryParams, string(putDataString), result)
}
func (client *Client) apiPost(uri string, queryParams map[string][]string, postData interface{}, result interface{}) (status int, err error) {
//postDataString, err := json.Marshal(postData)
postDataString := new(bytes.Buffer)
enc := json.NewEncoder(postDataString)
enc.SetEscapeHTML(false)
err = enc.Encode(postData)
if err != nil {
return http.StatusBadRequest, err
}
return client.apiCall(HTTPPost, uri, queryParams, postDataString.String(), result)
}
func (client *Client) apiCall(method string, uri string, queryParams map[string][]string, body string, result interface{}) (int, error) {
log.Debugf("apiCall ... method: %v url: %v queryParams: %+v", method, uri, queryParams)
url, _ := client.buildURL(uri, queryParams)
status, response, err := client.httpCall(method, url, body)
if err != nil {
return 0, err
}
log.Debugf("%s result status: %+v", uri, response.Status)
log.Debugf("Headers: %+v", response.Header)
if response.ContentLength > 0 {
ct := response.Header["Content-Type"]
log.Debugf("content-type: %s", ct)
switch ct[0] {
case "application/json":
var msg json.RawMessage
err = json.NewDecoder(response.Body).Decode(&msg)
// decode as a raw json message which will fail if the message isn't good json
if err == nil {
switch result.(type) {
case json.RawMessage:
tt := result.(*json.RawMessage)
*tt = msg
return status, nil
default:
err = json.Unmarshal(msg, result)
if err != nil || status >= 400 {
//== http.StatusUnprocessableEntity {
// metronome returns json error messages. panic if so.
bb := new(bytes.Buffer)
fmt.Fprintf(bb, string(msg))
return status, errors.New(string(bb.Bytes()))
}
log.Debugf("method %s uri: %s status: %d result type: %T", method, uri, status, result)
}
} else {
return status, err
}
case "text/plain; charset=utf-8":
htmlData, err := ioutil.ReadAll(response.Body)
if err != nil {
return status, err
}
v := result.(*string)
*v = string(htmlData)
default:
return status, fmt.Errorf("Unknown content-type %s", ct[0])
}
}
// TODO: Handle error status codes
if status < 200 || status > 299 {
return status, errors.New(response.Status)
}
return status, nil
}
func (client *Client) buildURL(reqPath string, queryParams map[string][]string) (*url.URL, error) {
// make copy of client url
base := *client.url
query := base.Query()
log.Debugf("client.url.params %+v ; queryParams: %+v; client.config.URL: %+v base.url: %+v", query, queryParams, client.config.URL, base)
master, _ := url.Parse(client.config.URL)
prefix := master.Path
for k, vl := range queryParams {
for _, val := range vl {
query.Add(k, val)
}
}
base.RawQuery = query.Encode()
base.Path = path.Join(prefix, reqPath)
return &base, nil
}
func (client *Client) applyRequestHeaders(request *http.Request) {
request.Header.Add("Content-Type", "application/json")
request.Header.Add("Accept", "application/json")
if client.config.User != "" && client.config.Pw != "" {
request.SetBasicAuth(client.config.User, client.config.Pw)
}
if client.config.AuthToken != "" {
request.Header.Add("Authorization", client.config.AuthToken)
}
}
func (client *Client) newRequest(method string, url *url.URL, body string) (*http.Request, error) {
request, err := http.NewRequest(method, url.String(), strings.NewReader(body))
if err != nil {
return nil, err
}
client.applyRequestHeaders(request)
if client.config.Debug {
if dump, err := httputil.DumpRequest(request, true); err != nil {
log.Infof(string(dump))
}
}
return request, nil
}
func (client *Client) httpCall(method string, url *url.URL, body string) (int, *http.Response, error) {
request, err := client.newRequest(method, url, body)
if err != nil {
return 0, nil, err
}
response, err := client.http.Do(request)
if err != nil {
return 0, nil, err
}
return response.StatusCode, response, nil
}
// TODO: this better
func (client *Client) log(message string, args ...interface{}) {
log.Infof(message+"\n", args...)
}
|
<filename>src/main/java/io/streams/classes/highLevel/BufferedWriters.java
package io.streams.classes.highLevel;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
public class BufferedWriters {
public static void main(String[] args) throws IOException {
File file = new File("doesnt_exist");
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(file));
bufferedWriter.write("aaa".toCharArray());
bufferedWriter.write("aaa");
bufferedWriter.newLine();
}
}
|
// SubCmd prints the usage of a subcommand
func SubCmd(name, signature, description string) *flag.FlagSet {
flags := flag.NewFlagSet(name, flag.ContinueOnError)
flags.Usage = func() {
fmt.Fprintf(os.Stderr, "\nUsage: mirrorbits %s %s\n\n%s\n\n", name, signature, description)
flags.PrintDefaults()
}
return flags
} |
import Driver from '../src/Driver';
import ExecuteDriverRoutine from '../src/ExecuteDriverRoutine';
import RunCommandRoutine from '../src/execute/RunCommandRoutine';
import DriverContext from '../src/contexts/DriverContext';
import {
getFixturePath,
createDriverContext,
createTestDebugger,
createTestDriver,
createTestTool,
} from '../../../tests/helpers';
jest.mock('../src/execute/RunCommandRoutine', () => jest.fn());
describe('ExecuteDriverRoutine', () => {
let routine: ExecuteDriverRoutine;
let driver: Driver;
beforeEach(() => {
const tool = createTestTool();
driver = createTestDriver('primary', tool);
routine = new ExecuteDriverRoutine('driver', 'Executing driver');
routine.context = createDriverContext(driver);
routine.tool = tool;
routine.debug = createTestDebugger();
// RunCommandRoutine is mocked, so use plain objects
routine.routines = [
// @ts-ignore
{ key: 'primary' },
// @ts-ignore
{ key: 'foo' },
// @ts-ignore
{ key: 'bar' },
// @ts-ignore
{ key: 'baz' },
// @ts-ignore
{ key: 'qux' },
];
routine.workspacePackages = [
{
name: '@scope/primary',
version: '0.0.0',
workspace: tool.createWorkspaceMetadata('./packages/primary/package.json'),
},
{
name: '@scope/foo',
version: '0.0.0',
workspace: tool.createWorkspaceMetadata('./packages/foo/package.json'),
},
{
name: '@scope/bar',
version: '0.0.0',
workspace: tool.createWorkspaceMetadata('./packages/bar/package.json'),
},
{
name: '@scope/baz',
version: '0.0.0',
workspace: tool.createWorkspaceMetadata('./packages/baz/package.json'),
},
{
name: '@scope/qux',
version: '0.0.0',
workspace: tool.createWorkspaceMetadata('./packages/qux/package.json'),
},
];
// @ts-ignore
RunCommandRoutine.mockClear();
});
describe('bootstrap()', () => {
it('adds a routine for the primary driver', () => {
routine.pipe = jest.fn();
routine.bootstrap();
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('primary', 'primary -a --foo bar baz'),
);
});
it('adds multiple routines when parallel is used', () => {
routine.context.parallelArgv = [['--one', '--two=2'], ['--three', '-f']];
routine.pipe = jest.fn();
routine.bootstrap();
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('primary', 'primary -a --foo bar baz --one --two=2', {
additionalArgv: ['--one', '--two=2'],
}),
);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('primary', 'primary -a --foo bar baz --three -f', {
additionalArgv: ['--three', '-f'],
}),
);
});
it('adds a routine if parallel is empty', () => {
routine.context.parallelArgv = [];
routine.pipe = jest.fn();
routine.bootstrap();
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('primary', 'primary -a --foo bar baz'),
);
});
describe('workspaces', () => {
beforeEach(() => {
routine.context.args.workspaces = '*';
routine.context.workspaces = ['packages/*'];
routine.context.workspaceRoot = getFixturePath('workspaces-driver');
routine.context.root = getFixturePath('workspaces-driver');
});
it('adds a routine for each', () => {
routine.pipe = jest.fn();
routine.bootstrap();
expect(routine.pipe).toHaveBeenCalledTimes(3);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('foo', 'primary -a --foo bar baz', {
forceConfigOption: true,
packageRoot: './packages/foo',
}),
);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('bar', 'primary -a --foo bar baz', {
forceConfigOption: true,
packageRoot: './packages/bar',
}),
);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('baz', 'primary -a --foo bar baz', {
forceConfigOption: true,
packageRoot: './packages/baz',
}),
);
});
it('adds a routine for each when parallel is used', () => {
routine.context.parallelArgv = [['--one', '--two=2'], ['--three', '-f']];
routine.pipe = jest.fn();
routine.bootstrap();
expect(routine.pipe).toHaveBeenCalledTimes(9);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('foo', 'primary -a --foo bar baz --one --two=2', {
additionalArgv: ['--one', '--two=2'],
forceConfigOption: true,
packageRoot: './packages/foo',
}),
);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('foo', 'primary -a --foo bar baz --three -f', {
additionalArgv: ['--three', '-f'],
forceConfigOption: true,
packageRoot: './packages/foo',
}),
);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('bar', 'primary -a --foo bar baz --one --two=2', {
additionalArgv: ['--one', '--two=2'],
forceConfigOption: true,
packageRoot: './packages/bar',
}),
);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('bar', 'primary -a --foo bar baz --three -f', {
additionalArgv: ['--three', '-f'],
forceConfigOption: true,
packageRoot: './packages/bar',
}),
);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('baz', 'primary -a --foo bar baz --one --two=2', {
additionalArgv: ['--one', '--two=2'],
forceConfigOption: true,
packageRoot: './packages/baz',
}),
);
expect(routine.pipe).toHaveBeenCalledWith(
new RunCommandRoutine('baz', 'primary -a --foo bar baz --three -f', {
additionalArgv: ['--three', '-f'],
forceConfigOption: true,
packageRoot: './packages/baz',
}),
);
});
it('errors if workspaces config is not set', () => {
expect(() => {
delete routine.context.workspaces;
routine.bootstrap();
}).toThrowErrorMatchingSnapshot();
});
it('errors if workspaces config is empty', () => {
expect(() => {
routine.context.workspaces = [];
routine.bootstrap();
}).toThrowErrorMatchingSnapshot();
});
});
});
describe('execute()', () => {
let context: DriverContext;
beforeEach(() => {
context = createDriverContext(driver);
});
it('pools each routine', async () => {
routine.poolRoutines = jest.fn(() => Promise.resolve({ errors: [], results: [] }));
await routine.execute(context);
expect(routine.poolRoutines).toHaveBeenCalledWith(null, {}, routine.routines);
});
it('passes concurrency to pooler', async () => {
routine.poolRoutines = jest.fn(() => Promise.resolve({ errors: [], results: [] }));
context.args.concurrency = 2;
await routine.execute(context);
expect(routine.poolRoutines).toHaveBeenCalledWith(null, { concurrency: 2 }, routine.routines);
});
it('passes concurrency option to pooler', async () => {
routine.poolRoutines = jest.fn(() => Promise.resolve({ errors: [], results: [] }));
routine.tool.config.execute.concurrency = 3;
await routine.execute(context);
expect(routine.poolRoutines).toHaveBeenCalledWith(null, { concurrency: 3 }, routine.routines);
});
it('throws an error if any failures', async () => {
routine.poolRoutines = jest.fn(() =>
Promise.resolve({ errors: [new Error('Failed'), new Error('Oops')], results: [] }),
);
try {
await routine.execute(context);
} catch (error) {
expect(error).toEqual(
new Error(
'Failed to execute driver pipeline. The following errors have occurred:\n\nFailed\n\nOops',
),
);
}
});
it('returns results', async () => {
routine.poolRoutines = jest.fn(() => Promise.resolve({ errors: [], results: [123] }));
const response = await routine.execute(context);
expect(response).toEqual([123]);
});
it('serializes priority routines before pooling other routines', async () => {
routine.context.args.priority = true;
routine.context.args.workspaces = '*';
routine.serializeRoutines = jest.fn(() => Promise.resolve());
routine.poolRoutines = jest.fn(() => Promise.resolve({ errors: [], results: [] }));
routine.workspacePackages[1].peerDependencies = {
'@scope/foo': '1.0.0',
};
await routine.execute(context);
expect(routine.serializeRoutines).toHaveBeenCalledWith(null, [{ key: 'foo' }]);
expect(routine.poolRoutines).toHaveBeenCalledWith(null, {}, [
{ key: 'primary' },
{ key: 'bar' },
{ key: 'baz' },
{ key: 'qux' },
]);
});
});
describe('getFilteredWorkspaces()', () => {
it('returns none for empty string', () => {
routine.context.args.workspaces = '';
expect(routine.getFilteredWorkspacePackages()).toEqual([]);
});
it('returns all for wildcard `*`', () => {
routine.context.args.workspaces = '*';
expect(routine.getFilteredWorkspacePackages()).toEqual([
{
name: '@scope/primary',
version: '0.0.0',
workspace: routine.tool.createWorkspaceMetadata('./packages/primary/package.json'),
},
{
name: '@scope/foo',
version: '0.0.0',
workspace: routine.tool.createWorkspaceMetadata('./packages/foo/package.json'),
},
{
name: '@scope/bar',
version: '0.0.0',
workspace: routine.tool.createWorkspaceMetadata('./packages/bar/package.json'),
},
{
name: '@scope/baz',
version: '0.0.0',
workspace: routine.tool.createWorkspaceMetadata('./packages/baz/package.json'),
},
{
name: '@scope/qux',
version: '0.0.0',
workspace: routine.tool.createWorkspaceMetadata('./packages/qux/package.json'),
},
]);
});
it('filters by package name', () => {
routine.context.args.workspaces = 'foo|bar';
expect(routine.getFilteredWorkspacePackages()).toEqual([
{
name: '@scope/foo',
version: '0.0.0',
workspace: routine.tool.createWorkspaceMetadata('./packages/foo/package.json'),
},
{
name: '@scope/bar',
version: '0.0.0',
workspace: routine.tool.createWorkspaceMetadata('./packages/bar/package.json'),
},
]);
});
});
describe('orderByWorkspacePriorityGraph()', () => {
beforeEach(() => {
routine.context.args.priority = true;
routine.context.args.workspaces = '*';
});
it('returns all as `other` if priority is false', () => {
routine.context.args.priority = false;
routine.tool.config.execute.priority = false;
expect(routine.orderByWorkspacePriorityGraph()).toEqual({
other: [{ key: 'primary' }, { key: 'foo' }, { key: 'bar' }, { key: 'baz' }, { key: 'qux' }],
priority: [],
});
});
it('returns all as `other` if workspaces is empty', () => {
routine.context.args.workspaces = '';
expect(routine.orderByWorkspacePriorityGraph()).toEqual({
other: [{ key: 'primary' }, { key: 'foo' }, { key: 'bar' }, { key: 'baz' }, { key: 'qux' }],
priority: [],
});
});
it('returns all as `other` if no dependents', () => {
expect(routine.orderByWorkspacePriorityGraph()).toEqual({
other: [{ key: 'primary' }, { key: 'foo' }, { key: 'bar' }, { key: 'baz' }, { key: 'qux' }],
priority: [],
});
});
it('prioritizes based on peerDependencies', () => {
routine.workspacePackages[1].peerDependencies = {
'@scope/bar': '1.0.0',
};
expect(routine.orderByWorkspacePriorityGraph()).toEqual({
other: [{ key: 'primary' }, { key: 'foo' }, { key: 'baz' }, { key: 'qux' }],
priority: [{ key: 'bar' }],
});
});
it('prioritizes based on dependencies', () => {
routine.workspacePackages[1].dependencies = {
'@scope/bar': '1.0.0',
};
expect(routine.orderByWorkspacePriorityGraph()).toEqual({
other: [{ key: 'primary' }, { key: 'foo' }, { key: 'baz' }, { key: 'qux' }],
priority: [{ key: 'bar' }],
});
});
it('sorts priority based on dependency count', () => {
routine.workspacePackages[2].peerDependencies = {
'@scope/primary': '2.0.0',
};
routine.workspacePackages[1].dependencies = {
'@scope/bar': '1.0.0',
};
routine.workspacePackages[4].peerDependencies = {
'@scope/bar': '1.0.0',
};
expect(routine.orderByWorkspacePriorityGraph()).toEqual({
other: [{ key: 'foo' }, { key: 'baz' }, { key: 'qux' }],
priority: [{ key: 'bar' }, { key: 'primary' }],
});
});
it('sorts priority by taking `priority` package option into account', () => {
routine.workspacePackages[2].peerDependencies = {
'@scope/primary': '2.0.0',
};
routine.workspacePackages[1].dependencies = {
'@scope/bar': '1.0.0',
};
routine.workspacePackages[4].priority = 3;
routine.workspacePackages[4].peerDependencies = {
'@scope/bar': '1.0.0',
};
routine.workspacePackages[0].priority = 100;
expect(routine.orderByWorkspacePriorityGraph()).toEqual({
other: [{ key: 'foo' }, { key: 'baz' }],
priority: [{ key: 'primary' }, { key: 'qux' }, { key: 'bar' }],
});
});
});
});
|
<filename>java-17/target/generated-sources/annotations/com/github/howaric/java17/java12/jmh_generated/T1_TestJMH2_jmhType.java
package com.github.howaric.java17.java12.jmh_generated;
public class T1_TestJMH2_jmhType extends T1_TestJMH2_jmhType_B3 {
}
|
//////////////////////////////////////////////////////////////////////////////
//
// This file is part of the Corona game engine.
// For overview and more information on licensing please refer to README.md
// Home page: https://github.com/coronalabs/corona
// Contact: <EMAIL>
//
//////////////////////////////////////////////////////////////////////////////
package com.ansca.corona.listeners;
import com.ansca.corona.MailSettings;
import com.ansca.corona.permissions.PermissionsSettings;
import com.ansca.corona.SmsSettings;
/** The interface has the funcations that will show a new window/overlay */
public interface CoronaShowApiListener{
/**
* Called from media.selectPhoto(). The lua script wants to select a photo from the gallery
* How the information gets back to the lua side is up to the CoronaKit developer.
* @param destinationFilePath The location of the requested save location.
*/
public void showSelectImageWindowUsing(String destinationFilePath);
/**
* Called from media.selectPhoto(). The lua script wants to receive a photo from the camera.
* How the information gets back to the lua side is up to the CoronaKit developer.
* @param destinationFilePath The location of the requested save location.
*/
public void showCameraWindowForImage(String destinationFilePath);
/**
* Called from media.selectVideo(). The lua script wants to select a video from the gallery
* How the information gets back to the lua side is up to the CoronaKit developer.
*/
public void showSelectVideoWindow();
/**
* Called from media.selectVideo(). The lua script wants to retrieve a video from the camera.
* How the information gets back to the lua side is up to the CoronaKit developer.
* @param maxVideoTime the preferred maximum length of the video.
* @param videoQuality the quality of the video. 0 for medium/low quality and 1 for high quality.
*/
public void showCameraWindowForVideo(int maxVideoTime, int videoQuality);
/**
* Called from native.showPopup(). The lua script wants to send an email with the settings.
* @param mailSettings the settings the lua script passed made into an object.
*/
public void showSendMailWindowUsing(MailSettings mailSettings);
/**
* Called from native.showPopup(). The lua script wants to send a sms with the settings.
* @param smsSettings the settings the lua script passed made into an object.
*/
public void showSendSmsWindowUsing(SmsSettings smsSettings);
/**
* Called from native.showPopup(). The lua script wants to show an app store popup.
* @param settings A hash map of the app IDs and supported stores.
* @return Returns true if the window is about to be displayed
* <p>
* Returns false if the App Store could not be found and is unable to display a window.
*/
public boolean showAppStoreWindow(java.util.HashMap<String, Object> settings);
/**
* Called from native.showPopup(). The lua script wants to request permissions with the settings.
* @param permissionsSettings the settings the lua script passed made into an object.
*/
public void showRequestPermissionsWindowUsing(PermissionsSettings permissionsSettings);
}
|
def play_tone_sequence_nonblocking(self, tones):
self.tone_maker.play_tone_sequence_nonblocking(tones) |
<reponame>HappyFacade/komet<gh_stars>1-10
/*******************************************************************************
* Copyright (c) 2015 BestSolution.at and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* <NAME><<EMAIL>> - initial API and implementation
*******************************************************************************/
package sh.komet.fx.tabpane;
/**
* Interface to be implemented by tab-control
*/
public interface GenericTabPane {
/**
* Get the index of the tab
*
* @param t
* the tab
* @return the index
*/
public int indexOf(GenericTab t);
/**
* Remove the tab
*
* @param t
* the tab
* @return <code>true</code> if remove succeeded
*/
public boolean remove(GenericTab t);
/**
* Add the tab
*
* @param t
* the tab
*/
public void add(GenericTab t);
/**
* Add the tab at a given index
*
* @param index
* the index
* @param t
* the tab
*/
public void add(int index, GenericTab t);
/**
* @return the number of tab elements
*/
public int getTabNumber();
/**
* Select the tab
* @param draggedTab
*/
public void select(GenericTab draggedTab);
}
|
def stop(self):
if not self._is_started:
raise RuntimeError('stop called on unstarted tf_file_io_proxy')
if self.mock_gcs:
self.patched_file_io.stop()
self._is_started = False |
<filename>gopher_types.ts
export const TYPE_TEXT = '0';
export const TYPE_MENU = '1';
export const TYPE_CCSO_NAMESERVER = '2';
export const TYPE_ERROR = '3';
export const TYPE_BINHEX_FILE = '4';
export const TYPE_DOS_FILE = '5';
export const TYPE_UUENCODED_FILE = '6';
export const TYPE_FULL_TEXT_SEARCH = '7';
export const TYPE_TELNET = '8';
export const TYPE_BINARY_FILE = '9';
export const TYPE_MIRROR = '+';
export const TYPE_GIF = 'g';
export const TYPE_IMAGE = 'I';
export const TYPE_TELNET_3270 = 'T';
export const TYPE_DOC = 'd';
export const TYPE_HTML = 'h';
export const TYPE_INFO = 'i';
export const TYPE_AUDIO = 's';
export const TYPE_PLUS_IMAGE = ':';
export const TYPE_PLUS_VIDEO = ';';
export const TYPE_PLUS_AUDIO = '<';
/**
* An unknown Gopher menu type. Use this if you want to use a "new" type that
* is not part of the common spec.
*/
export class UnknownType {
constructor(public readonly character:string) {}
}
export type ItemType = UnknownType | typeof TYPE_TEXT | typeof TYPE_MENU |
typeof TYPE_CCSO_NAMESERVER | typeof TYPE_ERROR | typeof TYPE_BINHEX_FILE |
typeof TYPE_DOS_FILE | typeof TYPE_UUENCODED_FILE | typeof TYPE_FULL_TEXT_SEARCH |
typeof TYPE_TELNET | typeof TYPE_BINARY_FILE | typeof TYPE_MIRROR | typeof TYPE_GIF |
typeof TYPE_IMAGE | typeof TYPE_TELNET_3270 | typeof TYPE_DOC | typeof TYPE_HTML |
typeof TYPE_INFO | typeof TYPE_AUDIO |typeof TYPE_PLUS_IMAGE | typeof TYPE_PLUS_VIDEO |
typeof TYPE_PLUS_AUDIO;
|
Serial pay-it-forward incidents involving between 4 and 24 cars have been reported at Wendy’s, McDonald’s, Starbucks, Del Taco, Taco Bell, KFC and Dunkin’ Donuts locations in Maryland, Florida, California, Texas, Louisiana, Pennsylvania, Oklahoma, Georgia, Alabama, North Dakota, Michigan, North Carolina and Washington.
More typically, though, it’s one customer acting alone and perhaps routinely. “We have a lady who always pays it forward in the drive-through, every day,” said Aaron Quinton, co-owner of Old School Bagel Cafe, in Tulsa, Okla. “I point at the person behind and she just nods.”
The anonymity of the drive-through makes it especially easy to pay it forward because it dispenses with any awkwardness and suspicion about motives. The payer pulls away before the next car pulls up and discovers a gift that is impossible to refuse.
“If you paid for someone inside a restaurant, they would see you,” said Jessica Kelishes, a marketing representative for an auto parts distributor, who pays it forward at Del Taco, McDonald’s and Starbucks drive-throughs in Banning, Calif. “I just do it out of kindness rather than for recognition.” She said her kindness stemmed from feeling blessed and wanting to share her good fortune. But others have told drive-through cashiers they wanted to pay it forward in gratitude to drivers who waved their car ahead of them in line or after noticing in the rearview mirror a woman weeping into her steering wheel, and wanting to make her smile. Cancer survivors have done it in appreciation of life, and new parents have done it to celebrate their baby.
But more often there is an expressed desire to do something good at a time when so much else in the world seems so dishearteningly bad. It’s a stark contrast, and perhaps a backlash, to the seemingly unremitting reports of unkindness in the news — politicians shutting down the government, N.S.A. spying, teenage suicides resulting from cyber-bullying, vicious slayings at a mall in Kenya, gas attacks in Syria.
“It’s about giving, and letting people see not everybody is bad, and there are nice people out there and maybe we can turn it around,” said Connie Herring, an optical technician in St. Pauls, N.C., who pays it forward at drive-throughs at least once a week.
But her generosity has its limits. “I don’t do it at Starbucks because I did it there once and that one time ended up costing me 12 bucks,” she said. “You can’t pay it forward if you’re broke.” |
/**
* Converts big-endian integer to byte array.
* @param integer Interger value (4-bytes)
* @return Byte array of length 4 created from parameter
*/
protected static byte[] intToByteArray(final int integer)
{
byte[] byteArray = new byte[4];
for (int i = 0; i < 4; ++i)
byteArray[3 - i] = (byte) (integer >>> (i * 8));
return byteArray;
} |
/**
* A DTO for the Project entity.
*/
public class ProjectDTO implements Serializable {
private static final long serialVersionUID = 1L;
public static final String EXTERNAL_PROJECT_URL_KEY = "External-project-url";
public static final String EXTERNAL_PROJECT_ID_KEY = "External-project-id";
public static final String WORK_PACKAGE_KEY = "Work-package";
public static final String PHASE_KEY = "Phase";
public static final String HUMAN_READABLE_PROJECT_NAME = "Human-readable-project-name";
public static final String PRIVACY_POLICY_URL = "Privacy-policy-url";
private Long id;
@NotNull
private String projectName;
private String humanReadableProjectName;
@NotNull
private String description;
private String organization;
@NotNull
private String location;
private ZonedDateTime startDate;
private ProjectStatus projectStatus;
private ZonedDateTime endDate;
@JsonInclude(Include.NON_NULL)
private Set<SourceTypeDTO> sourceTypes;
private Map<String, String> attributes;
private Long persistentTokenTimeout;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getProjectName() {
return projectName;
}
public void setProjectName(String projectName) {
this.projectName = projectName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getOrganization() {
return organization;
}
public void setOrganization(String organization) {
this.organization = organization;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
public ZonedDateTime getStartDate() {
return startDate;
}
public void setStartDate(ZonedDateTime startDate) {
this.startDate = startDate;
}
public ProjectStatus getProjectStatus() {
return projectStatus;
}
public void setProjectStatus(ProjectStatus projectStatus) {
this.projectStatus = projectStatus;
}
public ZonedDateTime getEndDate() {
return endDate;
}
public void setEndDate(ZonedDateTime endDate) {
this.endDate = endDate;
}
public Set<SourceTypeDTO> getSourceTypes() {
return sourceTypes;
}
public void setSourceTypes(Set<SourceTypeDTO> sourceTypes) {
this.sourceTypes = sourceTypes;
}
public Map<String, String> getAttributes() {
return attributes;
}
public void setAttributes(Map<String, String> attributes) {
this.attributes = attributes;
}
public String getHumanReadableProjectName() {
return humanReadableProjectName;
}
public void setHumanReadableProjectName(String humanReadableProjectName) {
this.humanReadableProjectName = humanReadableProjectName;
}
public Long getPersistentTokenTimeout() {
return persistentTokenTimeout;
}
public void setPersistentTokenTimeout(Long persistentTokenTimeout) {
this.persistentTokenTimeout = persistentTokenTimeout;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ProjectDTO projectDto = (ProjectDTO) o;
if (id == null || projectDto.id == null) {
return false;
}
return Objects.equals(id, projectDto.id);
}
@Override
public int hashCode() {
return Objects.hashCode(id);
}
@Override
public String toString() {
return "ProjectDTO{"
+ "id=" + id
+ ", projectName='" + projectName + "'"
+ ", description='" + description + "'"
+ ", organization='" + organization + "'"
+ ", location='" + location + "'"
+ ", startDate='" + startDate + "'"
+ ", projectStatus='" + projectStatus + "'"
+ ", endDate='" + endDate + "'"
+ '}';
}
} |
use rt::{Force, ForceRef, IntBlocker};
use lists::{DList, SortedList};
use event::{Event, EventQueue};
use core::cmp::Ordering;
use core::iter::FromIterator;
use core::ptr::Shared;
pub type TimerId = u16;
pub enum TimerHandler {
Unset,
Queue(ForceRef<EventQueue>),
Callback(fn(TimerId) -> ())
}
pub struct TimerManager {
timer_pool: [TimerEntity; 256],
free_timers: DList<TimerEntity>,
ticking_timers: SortedList<TimerEntity>,
counter: usize
}
unsafe impl Send for TimerManager { }
unsafe impl Sync for TimerManager { }
impl TimerManager {
#[inline(always)]
pub fn init(&mut self) {
for (i, timer) in self.timer_pool.iter_mut().enumerate() {
*timer = TimerEntity::new(i as TimerId);
}
self.free_timers = DList::from_iter(self.timer_pool.iter_mut().map(|timer| unsafe { Shared::new(timer) }));
self.ticking_timers = SortedList::new(TimerEntity::cmp);
self.counter = 0;
}
fn with_handler(&mut self, handler: TimerHandler) -> TimerId {
unsafe {
let timer = self.free_timers.pop_front().expect("Not enough timers");
(**timer).handler = handler;
(**timer).id
}
}
fn remove(&mut self, timer: Shared<TimerEntity>) {
if self.ticking_timers.contains(&timer) {
self.ticking_timers.remove(&timer);
}
self.free_timers.push_back(timer);
}
pub fn tick(&mut self, count: usize) {
unsafe {
self.counter = self.counter.wrapping_add(count);
while let Some(timer) = self.ticking_timers.front() {
if (**timer).tick > self.counter {
break;
}
self.ticking_timers.remove(&timer);
match (**timer).handler {
TimerHandler::Unset => unreachable!(),
TimerHandler::Queue(ref mut queue) => queue.push(Event::Timer((**timer).id)),
TimerHandler::Callback(cb) => {
cb((**timer).id);
break;
}
}
}
}
}
#[inline(always)]
pub fn counter(&self) -> usize {
self.counter
}
}
struct TimerEntity {
id: TimerId,
handler: TimerHandler,
tick: usize,
prev: Option<Shared<TimerEntity>>,
next: Option<Shared<TimerEntity>>
}
impl_linked_node!(Shared<TimerEntity> { prev: prev, next: next });
impl TimerEntity {
#[inline]
fn new(id: TimerId) -> TimerEntity {
TimerEntity {
id: id,
handler: TimerHandler::Unset,
tick: 0,
prev: None,
next: None
}
}
pub fn reset(this: Shared<TimerEntity>, delay: usize) {
unsafe {
let _blocker = IntBlocker::new();
let mut man = manager();
let counter = man.counter();
if counter < (**this).tick {
// リストの最後に移動
man.ticking_timers.remove(&this);
}
(**this).tick = counter + delay;
man.ticking_timers.push(this);
}
}
pub fn clear(this: Shared<TimerEntity>) {
unsafe {
let _blocker = IntBlocker::new();
let mut man = manager();
if man.ticking_timers.contains(&this) {
man.ticking_timers.remove(&this);
}
(**this).tick = 0;
}
}
fn cmp(a: &TimerEntity, b: &TimerEntity) -> Ordering {
a.tick.cmp(&b.tick)
}
}
pub struct Timer(TimerId);
impl Timer {
#[inline]
pub fn with_queue(queue: ForceRef<EventQueue>) -> Timer {
Timer(manager().with_handler(TimerHandler::Queue(queue)))
}
#[inline]
pub fn with_callback(callback: fn(TimerId) -> ()) -> Timer {
Timer(manager().with_handler(TimerHandler::Callback(callback)))
}
#[inline]
fn entity(&self) -> Shared<TimerEntity> {
unsafe {
Shared::new(&mut manager().timer_pool[self.0 as usize])
}
}
#[inline(always)]
pub fn id(&self) -> TimerId {
self.0
}
#[inline(always)]
pub fn reset(&self, delay: usize) {
TimerEntity::reset(self.entity(), delay);
}
#[inline(always)]
pub fn clear(&self) {
TimerEntity::clear(self.entity());
}
}
impl Drop for Timer {
#[inline]
fn drop(&mut self) {
manager().remove(self.entity());
}
}
pub struct UnmanagedTimer(TimerId);
impl UnmanagedTimer {
#[inline]
pub unsafe fn with_queue(queue: ForceRef<EventQueue>) -> UnmanagedTimer {
UnmanagedTimer(manager().with_handler(TimerHandler::Queue(queue)))
}
#[inline]
pub unsafe fn with_callback(callback: fn(TimerId) -> ()) -> UnmanagedTimer {
UnmanagedTimer(manager().with_handler(TimerHandler::Callback(callback)))
}
#[inline]
fn entity(&self) -> Shared<TimerEntity> {
unsafe {
Shared::new(&mut manager().timer_pool[self.0 as usize])
}
}
#[inline(always)]
pub fn id(&self) -> TimerId {
self.0
}
#[inline(always)]
pub fn reset(&self, delay: usize) {
TimerEntity::reset(self.entity(), delay);
}
#[inline(always)]
pub fn clear(&self) {
TimerEntity::clear(self.entity());
}
#[inline]
pub fn drop(&self) {
manager().remove(self.entity());
}
}
impl Clone for UnmanagedTimer {
#[inline]
fn clone(&self) -> UnmanagedTimer {
UnmanagedTimer(self.0)
}
}
static MANAGER: Force<TimerManager> = Force::new();
#[inline]
pub fn init() {
MANAGER.setup().init();
}
#[inline(always)]
pub fn manager() -> ForceRef<TimerManager> {
MANAGER.as_ref()
}
|
/*
* ======== LoggerSM_setFilterLevel ========
* Sets the filter level for the given diags level.
*
* LoggerSM maintains a separate filter level for every diags category.
* This is accomplished by maintaining three masks, one for each of the levels
* 1 - 3, wich store the diags categories which are currently at that level.
* There is no mask for level4; if the diags category is not found in levels
* 1-3, it is assumed that the filtering level is level4.
*
* This API is an instance function per the IFilterLogger interface, but
* LoggerSM only maintains module-wide filter levels.
*
* TODO - Should this be conditional on the 'filterByLevel' config?
*/
Void LoggerSM_setFilterLevel(LoggerSM_Object *obj,
xdc_runtime_Diags_Mask mask,
xdc_runtime_Diags_EventLevel filterLevel)
{
LoggerSM_module->level1 = ~(LoggerSM_module->level1 & mask) &
LoggerSM_module->level1;
LoggerSM_module->level2 = ~(LoggerSM_module->level2 & mask) &
LoggerSM_module->level2;
LoggerSM_module->level3 = ~(LoggerSM_module->level3 & mask) &
LoggerSM_module->level3;
switch (filterLevel) {
case Diags_LEVEL1:
LoggerSM_module->level1 |= mask;
break;
case Diags_LEVEL2:
LoggerSM_module->level2 |= mask;
break;
case Diags_LEVEL3:
LoggerSM_module->level3 |= mask;
break;
case Diags_LEVEL4:
break;
default: {
Error_Block eb;
Error_init(&eb);
Error_raise(&eb, LoggerSM_E_badLevel, filterLevel, 0);
break;
}
}
} |
<gh_stars>1-10
import React, { useRef, useEffect } from 'react'
import { LocalVideoTrack, RemoteVideoTrack, Track } from 'twilio-video'
import useMediaStreamTrack from './useMediaStreamTrack'
import useVideoTrackDimensions from './useVideoTrackDimensions'
interface VideoTrackProps {
track: LocalVideoTrack | RemoteVideoTrack;
isLocal?: boolean;
priority?: Track.Priority | null;
}
export default function VideoTrack ({ track, isLocal, priority }: VideoTrackProps) {
const ref = useRef<HTMLVideoElement>(null!)
const mediaStreamTrack = useMediaStreamTrack(track)
const dimensions = useVideoTrackDimensions(track)
const isPortrait = (dimensions?.height ?? 0) > (dimensions?.width ?? 0)
console.log('Rendering videotrack', track)
useEffect(() => {
const el = ref.current
el.muted = true
if ((track as RemoteVideoTrack).setPriority && priority) {
(track as RemoteVideoTrack).setPriority(priority)
}
track.attach(el)
return () => {
track.detach(el)
if ((track as RemoteVideoTrack).setPriority && priority) {
// Passing `null` to setPriority will set the track's priority to that which it was published with.
(track as RemoteVideoTrack).setPriority(null)
}
}
}, [track, priority, mediaStreamTrack])
// The local video track is mirrored if it is not facing the environment.
const isFrontFacing = mediaStreamTrack?.getSettings().facingMode !== 'environment'
const style = {
transform: isLocal && isFrontFacing ? 'rotateY(180deg)' : '',
objectFit: isPortrait || track.name.includes('screen') ? ('contain' as const) : ('cover' as const)
}
// eslint-disable-next-line jsx-a11y/media-has-caption
return <video ref={ref} style={style} />
}
|
/*
* Refreshes the context with properties satisfying to invoke update.
*/
private void forceUpdate() {
changeProperty("eureka.client.use-dns-for-fetching-service-urls=false",
"eureka.client.region=unavailable-region");
this.context.publishEvent(
new EnvironmentChangeEvent(Collections.singleton("eureka.client.service-url.defaultZone")));
} |
def _random_uniform(shape, dtype, seed=None, seed2=None, name=None):
result = _op_def_lib.apply_op("RandomUniform", shape=shape, dtype=dtype,
seed=seed, seed2=seed2, name=name)
return result |
Human development index modelling in South Kalimantan province using panel regression
Human development is a paradigm and becomes the focus and target of all development activities. Development is a way to improve welfare and a better quality of life. The Human Development Index (HDI) is one indicator to measure the success of a development. The purpose of this research is to describe the factors that are thought to influence HDI in South Kalimantan Province, estimate the parameters of the HDI panel regression model, and determine the best model. The data of this research is sourced from the Central Statistics Agency (BPS) of South Kalimantan Province with a period from 2015-2018. Based on the results of data analysis it can be concluded that the Fixed Effect Model with the time effect is the best model of the HDI panel regression in South Kalimantan Province with an R-Squared value of 99,81.
An analysis of the HDI of the Province of South Kalimantan in 2015-2018 was carried out again in this study using panel regression to measure the outcome of the development that has been carried out in the Province of South Kalimantan in the following year. Research has identified the factors that have a major impact on HDI estimates in the province of South Kalimantan. Other factors, however, may have influenced the HDI score in South Kalimantan Province. Using a panel regression approach, this study will estimate the regression parameters and determine the optimum HDI model in South Kalimantan Province, as well as estimate the parameters of the HDI panel regression model, and determine the best model.
Descriptive Statistics
Descriptive statistics is a strategy for collecting, classifying, and concisely presenting data so that it can be understood more easily .
Panel Regression Analysis
Panel regression analysis is a method for modeling the effect of the independent variable on the dependent variable in different research sectors over a period of time. Model of panel regression :
Estimation Panel Regression Model
Three types of estimations can be used to estimate panel regression 2. 3 H 0 is rejected if JB > 2 ( ,2) with meaning the residuals is not normally distributed.
Multicollinearity Test.
In linear regression, the multicollinearity test is used to measure the correlation of independent variables. One method of detecting multicollinearity symptoms is to examine the R2 value of the estimated regression model and analyze it using a correlation matrix of independent variables .
Conclusion: If > reject 0 this indicates that the independent variable has a concurrent effect on the dependent variable.
b. Time Effect Model Hypotheses: Test statistics using equation (10) Conclusion: If > reject 0 this indicates that the independent variable has a concurrent effect on the dependent variable.
The t-test (Partial).
The purpose of t-test testing will be how the independent variables in the Fixed Effect Model affect the dependent variable independently . Hypotheses: Conclusion : If | ℎ | > ( 2 , − ) or 0 is rejected, the independent variable has an individual effect.
Coefficient of Determination.
The coefficient of determination is used to estimate how much of the dependent variable's diversity is explained by the independent variable .
Descriptive Statistics
The pattern of HDI distribution in South Kalimantan Province, and also the factors that are thought to influence it.
Panel Regression Model
If the probability value for each variable is greater than the significance = 0,05 level in panel regression analysis, the model is not significant. Variables that are not significant will be excluded from the model one at a period, starting with the one with the highest probability value. The following are the results of the model parameter estimation that was repeated, and the results are as follows: The results of the school participation rate variable with a negative coefficient value can be seen in Table 3. This means that as the value of the school participation rate increases, the HDI value decreases or decreases. Meanwhile, HDI is positively influenced by variables such as expected years of schooling, mean years of school, life expectancy at birth, health facilities, and adjusted per capita expenditure.
According to the value of each coefficient, if the projected expected years of schooling, mean years of school, life expectancy, health facilities, and adjusted per capita expenditure are modified to increase, the HDI value will also expand. The following are the results of the individual effects FEM model equations, as can be seen in Table 4 Ŷ it = α i + 0.9335 1 it + 2.1831 3 it + 1.3144 4 it + ε it The value of the variable expected years of schooling, mean years school, life expectancy at birth has a favorable effect on HDI, as according to Table 4. According to the value of each coefficient, if the predicted length of schooling, the average length of schooling, or life expectancy increases, the HDI value will also increase. The following are the results of the time effects FEM model equations, as can be seen in Table 5 ̂=̂+ 0.7974273 − 0.0569077 + 1.15169314 + 0.63052511 + 0.0017921 + 0.1157241 + The value of the variable expected years of schooling, mean years of school, life expectancy at birth, health facilities, and adjusted per capita expenditure has a favorable effect on HDI, according to Table 5. According to the value of each coefficient, if the projected expected years of schooling, mean years of school, life expectancy at birth, health facilities, and adjusted per capita expenditure are modified to increase, the HDI value will also grow. The school participation rate variable, on the other hand, has a negative value in the equation. This means that as the value of the school participation rate rises, the HDI value decreases. Table 6 shows that the variables expected years of schooling, mean years of school, and life expectancy at birth all have positive coefficients, which means that when the HDI value rises, the value of each of these variables rises by the coefficient value. While the variable percentage of the poor people has a negative coefficient, the HDI value will fall by the coefficient value if the percentage value of the poor increases. Table 7. Result Chow Test
Method of Regression Panel
The Fixed Effect Model is the best model if the probability value is less than the value of the significance level = 0,05 and it rejects H0, as shown in Table 7. Table 8 shows that the probability value is less than the significance level = 0,05, so it is rejected, indicating that the Fixed Effect Model is the best selection. The Fixed model was used in the study and was based on the Chow and Hausman tests.
Multicollinearity Test.
The multicollinearity test is used to see if an independent variable in one model correlates with other independent variables. There must be no correlation between the independent variables in a decent regression model. Table 9. Result Multicollinearity Test X1 X2 X3 X4 X5 X6 X7 X1 1 Table 9 values of the correlation coefficient between the independent variables < 0.9, it is free from multicollinearity symptoms.
4.4.3
Heteroscedasticity Test. Heteroscedasticity testing is used to determine whether there is an inequality of variance between one residual and another observation in the regression model. Determine if the residual variance-covariance structure is homoscedastic or heteroscedastic. The Breusch Pagan value was calculated using the results of the heteroscedasticity test. The Breusch Pagan value is 28,47 < 2 (0,05,19) = 30,1435 and the probability value (0,07878) > α = 0,05. And Accept H0, which indicates that the model's residual variance is homoscedastic.
4.4.4
Autocorrelation Test. The autocorrelation test is used to see if there is a correlation between confounding errors and residuals in the t-1 (prior) period in a linear regression model.
Effect Test
Statistic
4.5.6
Result Coefficient Determination Using Individual Effect Model. The results of measuring the coefficient of determination using the time effect model were obtained from the study's findings value coefficient 0,981or the independent variable in the individual impact model's Fixed Influence Model has a 98.1% effect.
Conclusion
The following conclusions are drawn from the findings and discussion. |
// GetClusters returns the cluster instances for an organization ID.
func (m *Manager) GetClusters(ctx context.Context, organizationID uint) ([]CommonCluster, error) {
logger := m.getLogger(ctx).WithFields(logrus.Fields{
"organization": organizationID,
})
logger.Debug("fetching clusters from database")
clusterModels, err := m.clusters.FindByOrganization(organizationID)
if err != nil {
return nil, err
}
var clusters []CommonCluster
for _, clusterModel := range clusterModels {
logger := logger.WithField("cluster", clusterModel.Name)
logger.Debug("converting cluster model to common cluster")
cluster, err := GetCommonClusterFromModel(clusterModel)
if err != nil {
logger.Errorf("converting cluster model to common cluster failed: %s", err.Error())
continue
}
clusters = append(clusters, cluster)
}
return clusters, nil
} |
// Intercept Runtime.getRuntime().exit, and check if the caller is allowed to use it, if not wrap it in a ExitTrappedException
public static void runtimeExitCalled(Runtime runtime, int status)
{
ExitVisitor.checkAccess();
runtime.exit(status);
} |
package io.pivotal.labs.matchers;
import org.hamcrest.Matcher;
public class JsonArrayMatcher extends CastingMatcher<Iterable> {
public static Matcher<Object> jsonArray() {
return new JsonArrayMatcher(null);
}
public static Matcher<Object> jsonArrayWhich(Matcher<? extends Iterable> elementsMatcher) {
return new JsonArrayMatcher(elementsMatcher);
}
private JsonArrayMatcher(Matcher<? extends Iterable> elementsMatcher) {
super(Iterable.class, "an array", elementsMatcher);
}
}
|
// 二叉排序树
#include <iostream>
#include <stack>
using namespace std;
typedef int KeyType;
typedef struct {
KeyType key; // 关键字域
int count; // 元素个数
} ElemType;
typedef struct BiTNode{
ElemType data; //数据元素
struct BiTNode *lchild, *rchild;
} BiTNode, *BiTree;
// 初始化
void InitBST (BiTree &T) {
T = new BiTNode;
T->rchild = NULL;
T->lchild = NULL;
T->data.count = 1;
}
// 查找元素的个数
int SearchBST (BiTree T, KeyType key)
{
BiTree p = T;
while (p) {
if (key == p->data.key) {
return p->data.count;
} else if (p->data.key > key) {
p = p->lchild;
} else {
p = p->rchild;
}
}
// 寻找前驱结点
return 0;
}
// 二叉树排序树插入结点
void InsertBST (BiTree &T, KeyType key)
{
int flag = 0; //表示key值是否已经存在
BiTree p, q;
BiTree S;
InitBST(S);
S->data.key = key;
if (!T) T = S;
else {
p = T;
while (p) {
q = p;
if (S->data.key == p->data.key) {
p->data.count++;
delete S; // key值已经存在,无需删除
flag = 1; break;
} else if (S->data.key < p->data.key) {
p = p->lchild;
} else {
p = p->rchild;
}
}
if (!flag) {
if (S->data.key < q->data.key) q->lchild = S;
else q->rchild = S;
}
}
return ;
}// InsertBST
// 二叉排序树删除结点
void delNode (BiTree &T, BiTree p, BiTree f)
{
BiTree s, q;
int tag = 0; // tag == 0表示左右孩子至少有一个为空
if (p->data.count > 1) {
(p->data.count)--;
}// 有重复元素,删除一个即可
else {
if (!p->lchild) s = p->rchild;
else if (!p->rchild) s = p->lchild;
else {
q = p; s = p->lchild; // q是s的双亲
while (s->rchild) {
q = s; s = s->rchild;
}
p->data = s->data;
if (q == p) q->lchild = s->lchild;
else q->rchild = s->lchild;
delete s;
tag = 1; //左右孩子都存在,删除s
}
if (!tag) {
if (!f) T = s; // p是根结点
else if (f->lchild == p) f->lchild = s;
else f->rchild = s;
delete s;
}
}
return ;
}// delNode
// 二叉树排序树的删除操作
int DeleteBST (BiTree &T, KeyType key)
{
BiTree p, f;
int findFlag = 0;
p = T; f = NULL;
while (p) {
if (p->data.key == key) {
delNode (T, p, f);
findFlag = 1;
break;
} else if (p->data.key > key) {
f = p; p = p->lchild;
} else {
f = p; p = p->rchild;
}
}
return findFlag;
}// DeleteBST
// 寻找最小值,这里树非空
KeyType FindMin (BiTree T)
{
BiTree p = T;
while (p->lchild) {
p = p->lchild;
}
return p->data.key;
}// FindMin
// 寻找前驱结点
BiTree FrontBST (BiTree T, KeyType key)
{
BiTree p, q = NULL, pre = NULL;
stack<BiTree> S;
p = T;
do {
while (p) {
S.push(p); p = p->lchild;
}
if (!S.empty()) {
p = S.top(); S.pop(); // 出栈
if (p->data.key >= key) {
pre = q;
break;
}
q = p; // 记录结点
p = p->rchild;
}
} while (!S.empty() || p);
if (q && q->data.key < key) pre = q;
return pre;
}// FrontBST
void OperatorBST (BiTree &T)
{
int op;
KeyType key;
BiTree pre; //前驱结点
cin >> op;
switch (op) {
case 1: {// 插入元素
cin >> key;
InsertBST(T, key);
break;
}
case 2: {// 删除元素
cin >> key;
if (DeleteBST(T, key)) ;
else cout << "None" << endl;
break;
}
case 3: {// 查询
cin >> key;
int cnt = SearchBST(T, key);
cout << cnt << endl;
break;
}
case 4: {// 最小元素
if (T) {
KeyType min = FindMin(T);
cout << min << endl;
}
break;
}
case 5: {// 前驱结点
cin >> key;
pre = FrontBST(T, key);
if (pre) cout << pre->data.key << endl;
else cout << "None" << endl;
break;
}
default: ;
}
}
int main()
{
int n;
BiTree T = NULL;
cin >> n;
for (int i = 0; i < n; i++) {
OperatorBST(T);
}
return 0;
} |
<gh_stars>0
import * as React from "react";
import { connect } from "react-redux";
import { actionCreators, AppState } from "../../state-management";
import Button from "../common/Button";
interface Props {
goto: typeof actionCreators.goto;
currentPlace: AppState["currentPlace"];
done: () => void;
}
interface State {}
class Map extends React.Component<Props, State> {
render() {
return (
<div className="flex justify-between items-center">
<Button
onClick={() => {
this.props.goto("woods");
this.props.done();
}}
>
aller dans la foret
</Button>
<Button
onClick={() => {
this.props.goto("home");
this.props.done();
}}
>
aller à la maison
</Button>
</div>
);
}
}
export default connect(
(state: AppState) => ({
currentPlace: state.currentPlace
}),
{
goto: actionCreators.goto
}
)(Map);
|
<reponame>mingmoe/UtopiaServer-Cpp
//* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
// The PacketClassifier.java is a part of project utopia, under MIT License.
// See https://opensource.org/licenses/MIT for license information.
// Copyright (c) 2021 moe-org All rights reserved.
//* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
package moe.kawayi.org.utopia.desktop.net;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.handler.codec.LengthFieldBasedFrameDecoder;
import io.netty.util.AttributeKey;
import io.netty.util.concurrent.FastThreadLocal;
import moe.kawayi.org.utopia.core.log.LogManagers;
import moe.kawayi.org.utopia.core.log.Logger;
import moe.kawayi.org.utopia.core.net.PackageTypeEnum;
import moe.kawayi.org.utopia.core.net.packet.PingPacket;
import moe.kawayi.org.utopia.core.ubf.converter.BinaryConverter;
import moe.kawayi.org.utopia.core.util.NotNull;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.util.List;
/**
* 包分类器。根据包id进行分类。
*
* 我们已经使用{@link LengthFieldBasedFrameDecoder}来解码长度数据了。所以我们不需要再检查长度。
*
* 线程安全的
*/
public class PacketClassifier extends ByteToMessageDecoder {
/**
* 获取服务器版本号的key的netty attr
*/
public static final String CHANNEL_SERVER_PING_VERSION = "utopia.client.received_ping_packet.server_version";
private final Logger logger = LogManagers.getLogger(this.getClass());
@NotNull
private final FastThreadLocal<BinaryConverter.ConvertFrom> converter = new FastThreadLocal<>(){
@Override
@NotNull
protected BinaryConverter.ConvertFrom initialValue() throws Exception {
return new BinaryConverter.ConvertFrom();
}
};
@Override
protected void decode(
@NotNull ChannelHandlerContext ctx,
@NotNull ByteBuf in,
@NotNull List<Object> out) throws Exception {
// 读取数据类型
var packetType = in.readInt();
// LengthFieldBasedFrameDecoder为我们准备好了完整的数据长度
byte[] data = new byte[in.readableBytes()];
in.readBytes(data);
// 分类数据
if(packetType == PackageTypeEnum.PING.getTypeId()){
try(var byteArrayInputStream = new ByteArrayInputStream(data)){
try(var dataInputStream = new DataInputStream(byteArrayInputStream)){
var nbt = converter.get().convert(dataInputStream);
var attr = ctx.channel().attr(AttributeKey.valueOf(CHANNEL_SERVER_PING_VERSION));
attr.set(nbt.get(PingPacket.UBF_VERSION_KEY).orElseThrow().getString().orElseThrow());
}
}
} else if(packetType == PackageTypeEnum.COMMAND.getTypeId()){
logger.debug("received command type packet");
}
else{
logger.debug("received unknown type packet");
}
}
}
|
// extractS3cred tries to extract AWS access key and secret
// from an already parsed cred string
func extractS3cred() (accessKeyID string, secretAccessKey string) {
for _, p := range brf.Creds.Params {
if p.Key == "ACCESS_KEY_ID" {
accessKeyID = p.Value
}
if p.Key == "SECRET_ACCESS_KEY" {
secretAccessKey = p.Value
}
}
return accessKeyID, secretAccessKey
} |
/**
* Writes the table record to the output stream or writer.
*
* @param record the <code>TableRecord</code> object
* @throws IOException
*/
public void write(TableRecord record) throws IOException {
if (adapter instanceof TableDelimitedAdapter) {
csvWriter.writeNext(((DelimitedTableRecord) record).getRecordValue());
} else {
outputStream.write(((FixedTableRecord) record).getRecordValue());
}
} |
/**
* Tests the behavior of {@link Node#removeChild(Node)} on an element that has not been built
* completely.
*/
public class TestRemoveChildIncomplete extends AxiomTestCase {
public TestRemoveChildIncomplete(OMMetaFactory metaFactory) {
super(metaFactory);
}
protected void runTest() throws Throwable {
Element element = (Element)AXIOMUtil.stringToOM(metaFactory.getOMFactory(),
"<parent><a/><b/><c/></parent>");
Node b = element.getFirstChild().getNextSibling();
element.removeChild(b);
Node child = element.getFirstChild();
assertEquals("a", child.getLocalName());
child = child.getNextSibling();
assertEquals("c", child.getLocalName());
assertNull(child.getNextSibling());
}
} |
<gh_stars>0
use std::io::{BufReader, Read};
use crate::convert_path_buf;
use crate::{Error, Result};
use regex::Regex;
use std::collections::HashSet;
use std::iter::FromIterator;
use std::path::PathBuf;
use std::str::FromStr;
// Consider using a parser combinator instead of regexes here, like Nom: https://crates.io/crates/nom
lazy_static! {
static ref BYR: Regex = Regex::new(r"byr:(\d+)").expect("ok");
static ref IYR: Regex = Regex::new(r"iyr:(\d+)").expect("ok");
static ref EYR: Regex = Regex::new(r"eyr:(\d+)").unwrap();
static ref HGT: Regex = Regex::new(r"hgt:(\S+)").unwrap();
static ref HCL: Regex = Regex::new(r"hcl:(\S+)").unwrap();
static ref ECL: Regex = Regex::new(r"ecl:(\S+)").unwrap();
static ref PID: Regex = Regex::new(r"pid:(\S+)").unwrap();
static ref CID: Regex = Regex::new(r"cid:(\S+)").unwrap();
}
#[derive(Debug, Hash, Eq, PartialEq)]
enum Field {
BirthYear(u16),
IssueYear(u16),
ExpirationYear(u16),
Height(String),
HairColor(String),
EyeColor(String),
PassportId(String),
CountryId(String),
}
// These are the regexes we're using for validation:
lazy_static! {
static ref HEIGHT_PARSER: Regex = Regex::new(r"^(?P<value>\d+)(?P<unit>in|cm)$").unwrap();
static ref HAIR_COLOR_PARSER: Regex = Regex::new(r"^\#([0-9a-f]{6})$").unwrap();
static ref EYE_COLOR_PARSER: Regex = Regex::new(r"^(amb|blu|brn|gry|grn|hzl|oth)$").unwrap();
static ref PASSPORT_ID_PARSER: Regex = Regex::new(r"^(\d{9})$").unwrap();
}
impl Field {
fn is_valid_part_2(&self) -> bool {
use Field::*;
// TODO: we can model this better with a "ValidPassportInput" struct...
match self {
BirthYear(1920..=2002) => true,
IssueYear(2010..=2020) => true,
ExpirationYear(2020..=2030) => true,
Height(field) => HEIGHT_PARSER.captures(&field).map_or(false, |caps| {
match (caps["value"].parse::<u16>(), &caps["unit"]) {
(Ok(value), "cm") => 150 <= value && value <= 193,
(Ok(value), "in") => 59 <= value && value <= 76,
_ => false,
}
}),
HairColor(field) => HAIR_COLOR_PARSER
.captures(&field)
.map_or(false, |caps| caps[0].parse::<String>().is_ok()),
EyeColor(field) => EYE_COLOR_PARSER
.captures(&field)
.map_or(false, |caps| caps[0].parse::<String>().is_ok()),
PassportId(field) => PASSPORT_ID_PARSER
.captures(&field)
.map_or(false, |caps| caps[0].parse::<String>().is_ok()),
CountryId(_) => true,
_ => false,
}
}
}
#[derive(Debug, Eq, PartialEq)]
struct PassportInput {
fields: HashSet<Field>,
}
impl PassportInput {
fn is_valid(&self) -> bool {
match self.fields.len() {
// All 8 fields are present:
8 => true,
// The only optional field is CountryId:
7 => self
.fields
.iter()
.find(|field| match field {
Field::CountryId(_) => true,
_ => false,
})
.is_none(),
_ => false,
}
}
fn is_valid_part_2(&self) -> bool {
self.is_valid() && self.fields.iter().all(|field| field.is_valid_part_2())
}
}
impl FromStr for PassportInput {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut fields = HashSet::new();
use Field::*;
// TODO: DRY this up
if let Some(caps) = BYR.captures(s) {
if BYR.find_iter(s).count() > 1 {
return Err(Error::InvalidState(format!(
"too many matches of regex BYR"
)));
}
fields.insert(BirthYear(caps[1].parse()?));
}
if let Some(caps) = IYR.captures(s) {
if IYR.find_iter(s).count() > 1 {
return Err(Error::InvalidState(format!(
"too many matches of regex IYR"
)));
}
fields.insert(IssueYear(caps[1].parse()?));
}
if let Some(caps) = EYR.captures(s) {
if EYR.find_iter(s).count() > 1 {
return Err(Error::InvalidState(format!(
"too many matches of regex EYR"
)));
}
fields.insert(ExpirationYear(caps[1].parse()?));
}
if let Some(caps) = HGT.captures(s) {
if HGT.find_iter(s).count() > 1 {
return Err(Error::InvalidState(format!(
"too many matches of regex HGT"
)));
}
fields.insert(Height(caps[1].parse()?));
}
if let Some(caps) = HCL.captures(s) {
if HCL.find_iter(s).count() > 1 {
return Err(Error::InvalidState(format!(
"too many matches of regex HCL"
)));
}
fields.insert(HairColor(caps[1].parse()?));
}
if let Some(caps) = ECL.captures(s) {
if ECL.find_iter(s).count() > 1 {
return Err(Error::InvalidState(format!(
"too many matches of regex ECL"
)));
}
fields.insert(EyeColor(caps[1].parse()?));
}
if let Some(caps) = PID.captures(s) {
if PID.find_iter(s).count() > 1 {
return Err(Error::InvalidState(format!(
"too many matches of regex PID"
)));
}
fields.insert(PassportId(caps[1].parse()?));
}
if let Some(caps) = CID.captures(s) {
if CID.find_iter(s).count() > 1 {
return Err(Error::InvalidState(format!(
"too many matches of regex CID"
)));
}
fields.insert(CountryId(caps[1].parse()?));
}
Ok(Self { fields })
}
}
fn get_passports_from_buffer(
mut buf_reader: BufReader<Box<dyn Read + '_>>,
) -> Result<Vec<PassportInput>> {
let mut input = String::new();
buf_reader.read_to_string(&mut input)?;
input
.split("\n\n")
.map(|passport_string| passport_string.parse::<PassportInput>())
.collect::<Result<Vec<PassportInput>>>()
}
pub fn part_1(mut buf_reader: BufReader<Box<dyn Read + '_>>) -> Result<usize> {
let passports = get_passports_from_buffer(buf_reader)?;
Ok(passports
.iter()
.filter(|passport| passport.is_valid())
.count())
}
pub fn part_2(mut buf_reader: BufReader<Box<dyn Read + '_>>) -> Result<usize> {
let passports = get_passports_from_buffer(buf_reader)?;
Ok(passports
.iter()
.filter(|passport| passport.is_valid_part_2())
.count())
}
#[test]
fn test_from_string() -> Result<()> {
use Field::*;
let passport = "iyr:2013 ecl:amb cid:350 \
\n eyr:2023 pid:028048884"
.parse::<PassportInput>()?;
let fields = HashSet::from_iter(
vec![
IssueYear(2013),
EyeColor("amb".into()),
CountryId("350".into()),
ExpirationYear(2023),
PassportId("028048884".into()),
]
.into_iter(),
);
assert_eq!(passport, PassportInput { fields: fields });
Ok(())
}
#[test]
fn test_from_string_err_duplicates() -> Result<()> {
use Field::*;
let passport =
"iyr:2013 ecl:amb cid:350 eyr:2103 pid:028048884 eyr:2023".parse::<PassportInput>();
assert_eq!(passport.is_err(), true);
Ok(())
}
#[test]
fn test_get_passports_from_buffer() -> Result<()> {
let p = Some(PathBuf::from("./src/exercises/day_04/test.txt"));
let passports = get_passports_from_buffer(convert_path_buf(p)?)?;
assert_eq!(passports.len(), 4);
Ok(())
}
#[test]
fn test_height_field_validator() -> Result<()> {
let field1 = Field::Height("150cm".into());
assert_eq!(field1.is_valid_part_2(), true);
let invalid_field = Field::Height("149cm".into());
assert_eq!(invalid_field.is_valid_part_2(), false);
let field2 = Field::Height("76in".into());
assert_eq!(field2.is_valid_part_2(), true);
let field3 = Field::Height("x76in".into());
assert_eq!(field3.is_valid_part_2(), false);
Ok(())
}
#[test]
fn test_hair_field_validator() -> Result<()> {
let field1 = Field::HairColor("#60292f".into());
assert_eq!(field1.is_valid_part_2(), true);
let invalid_field = Field::HairColor("1f7352".into());
assert_eq!(invalid_field.is_valid_part_2(), false);
let field2 = Field::HairColor("#60292z".into()); // not a-f
assert_eq!(field2.is_valid_part_2(), false);
let field3 = Field::HairColor("#60292f0".into()); // 7 digits
assert_eq!(field3.is_valid_part_2(), false);
Ok(())
}
#[test]
fn test_eye_color_field_validator() -> Result<()> {
let field = Field::EyeColor("amb".into());
assert_eq!(field.is_valid_part_2(), true);
let field = Field::EyeColor("blu".into());
assert_eq!(field.is_valid_part_2(), true);
let field = Field::EyeColor("oth".into());
assert_eq!(field.is_valid_part_2(), true);
let field = Field::EyeColor("amb ".into());
assert_eq!(field.is_valid_part_2(), false);
let field = Field::EyeColor("ambx".into());
assert_eq!(field.is_valid_part_2(), false);
let field = Field::EyeColor("amb blu".into());
assert_eq!(field.is_valid_part_2(), false);
Ok(())
}
#[test]
fn test_passport_id_field_validator() -> Result<()> {
let field = Field::PassportId("157096267".into());
assert_eq!(field.is_valid_part_2(), true);
let field = Field::PassportId("000096267".into());
assert_eq!(field.is_valid_part_2(), true);
let field = Field::PassportId(" 000096267".into());
assert_eq!(field.is_valid_part_2(), false);
let field = Field::PassportId("00096267".into());
assert_eq!(field.is_valid_part_2(), false);
Ok(())
}
#[test]
fn test_part_1_example() -> Result<()> {
let p = Some(PathBuf::from("./src/exercises/day_04/test.txt"));
let res = part_1(convert_path_buf(p)?)?;
assert_eq!(res, 2);
Ok(())
}
#[test]
fn test_part_1() -> Result<()> {
let p = Some(PathBuf::from("./src/exercises/day_04/passports.txt"));
let res = part_1(convert_path_buf(p)?)?;
assert_eq!(res, 182);
Ok(())
}
#[test]
fn test_part_2_example() -> Result<()> {
let p = Some(PathBuf::from("./src/exercises/day_04/test_2.txt"));
let res = part_2(convert_path_buf(p)?)?;
assert_eq!(res, 4);
Ok(())
}
#[test]
fn test_part_2() -> Result<()> {
let p = Some(PathBuf::from("./src/exercises/day_04/passports.txt"));
let res = part_2(convert_path_buf(p)?)?;
assert_eq!(res, 109);
Ok(())
}
|
package com.davidmogar.quizzer.utils;
import org.junit.Test;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import static org.junit.Assert.*;
public class UrlReaderTest {
@Test
public void testGetStreamAsString() throws Exception {
try {
String result = UrlReader.getStreamAsString(new URL("http://google.com"));
assertNotNull(result);
assertNotEquals(result, "");
} catch (IOException e) {
fail("IOException was not expected for the given URL" + e);
}
}
} |
def __parse_row(self, row_string, column_types, column_labels):
column_values = list()
if not row_string:
return column_values
string_val = None
current_column = 0
for i, col_val in enumerate(self.__tokenize_row(row_string)):
assert current_column < len(column_types),\
'Number of columns returned > the number of column types: %s' % column_types
column_values.append(ResultColumn(col_val, column_types[i], column_labels[i]))
return column_values |
def generate_index(
index: "faiss.Index",
embeddings: npt.NDArray,
) -> "faiss.Index":
dimensions = embeddings.shape[1]
index = index(dimensions)
index.add(embeddings)
return index |
Temple of the Silent Storm
With this week’s patch, you will be able to play a beta version of our new PvP map – Temple of the Silent Storm.
We’re really trying to push the verticality of the map. Taking inspiration from many of the shooting games that we’ve enjoyed, we created a map that has some of the vertical play normally seen in shooters, but still has strong regions of the map that play off our active combat and RPG elements.
The secondary mechanic for this map revolves around channeling different buffs throughout the map. These buffs will have different effects on the game:
Meditation on Ferocity : Bonus +3 team points when anyone on your team earns a kill. These can stack, which means that if you have both buffs, your team can earn 11 points for a kill! There are 2 of these buffs; one on each team’s side of the map.
: Bonus +3 team points when anyone on your team earns a kill. These can stack, which means that if you have both buffs, your team can earn 11 points for a kill! There are 2 of these buffs; one on each team’s side of the map. Meditation on Stillness : Cap points give you 2 points per pulse instead of 1.
: Cap points give you 2 points per pulse instead of 1. Meditation on Tranquility: Resets all cap points and gives them to your team. This meditation will only come up once the game has been running for a while, and it only appears once each game! If you’re really behind and see this meditation come up, grab it, because it could swing the outcome of the game!
sPvP Daily and Monthly Achievements
We’ll also be introducing daily and monthly Structured PvP achievements with this patch.
Daily Achievement:
Caps: 3
Kills: 5
Matches: 3
Top Stat: 1
Rewards:
1 x Glory Boost
1 x Tourney Ticket
Monthly Achievement:
Kill Variety: 8 unique professions killed
Rank points earned, organized in four levels: 200, 400, 800, 100
Tournament wins: 3
Monthly matches won: 10
Rewards:
5x Glory boosts
5x Salvage Kits
10 x Tourney tickets
New PvP Stats
The PvP hero panel now has more stats! You can access these stats by pressing the crossed swords icon in the top left-hand corner of your screen.
The current stats tracked are now:
Games played
Games won
Tournaments played
Tournaments won
Top stats awarded
Favorite profession
Favorite map
Glory
Qualifying points
Qualifying Point Leader Boards
Qualifying points are very important. We’ll display the current qualifying points for both EU and US datacenters here: https://forum-en.guildwars2.com/forum/pvp/pvp/Leaderboards-for-Qualifying-Points/first#post665723
These qualifying points will be used to determining entry into larger tournaments down the road.
Paid Tournament Entry Fees and Rewards
We’re going to revise the reward structure for paid tournaments. The rewards will break down like this:
1st place – 1 Gold Tournament chest, 500 glory, 500 rank points, 120 Gems, 5 Qualifying Points
2nd place – 1 Silver Tournament chest, 400 glory, 400 rank points, 80 Gems, 3 Qualifying Points
3rd place – 1 Silver Tournament chest, 300 glory, 300 rank points, 20 Gems, 1 Qualifying Points
4th place – 1 Silver Tournament chest, 300 glory, 300 rank points, 20 Gems, 1 Qualifying Points
5th place – 1 Copper Tournament chest
6th place – 1 Copper Tournament chest
7th place – 1 Copper Tournament chest
8th place – 1 Copper Tournament chest
Paid tournaments also provide each finishing place a small % chance to win gems in the following amounts. The chance of winning one of these is approximately 1 in 4. No player can win twice.
10 gems
35 gems
80 gems
120 gems
1000 gems
Smaller Changes
We’ve reorganized the PvP browser hot-join maps so that some are 5-on-5 and some are the current 8-on-8. We’ve made this change so that people who want to play 5-on-5 instead of 8-on-8 in public maps have the option to do so.
Players will also be able to use /rank in the PvP lobby to show off their PvP rank! While this can’t be used in matches (we don’t want people confusing them with finishing stomps) we wanted people to be able to show off how awesome they are in PvP.
For a full list of all PvP changes this patch, please check out the patch notes right here.
That’s it for now! Stay tuned and I’ll keep you guys updated as things are added to structured PvP! |
You read that correctly. This isn’t The Onion.
Fox NFL Sunday has issued an open casting call for Rams fans to come out for the pregame show, possibly (probably) due to fear the visiting Eagles will be better represented than the 9-3 home team.
Southern California wild fires have been an issue lately, but Rams attendance has been an issue more than lately; even causing head coach Sean McVay to institute a silent count in practice in case his team’s fans should be out-rooted in their own building.
And the game won’t be any slouch, considering you have arguably the league’s top two surprises squaring off in what could also be a playoff preview. But maybe we shouldn’t be surprised. After all, Philly to LA is a weekend drive lots of folks take for out of town games…
A portion of the casting call can be seen below.
Audio: Rams using a silent count in preparation for a home game? John Clayton has the details |
<filename>src/components/RFVideo.tsx
import React from 'react'
import bgVideo from "../video/bubble-video.mp4"
const RFVideo = () => {
return (
<video id="rfVideo" autoPlay muted loop>
<source src={bgVideo} type="video/mp4"/>
</video>
)
}
export default RFVideo |
/**
* Converts a RelBuilder into a sql string.
*
* @param sqlWriter The writer to be used when translating the {@link org.apache.calcite.rel.RelNode} to sql.
* @param relToSql The converter from {@link org.apache.calcite.rel.RelNode} to
* {@link org.apache.calcite.sql.SqlNode}.
* @param query The RelNode representing the query.
*
* @return the sql string built by the RelBuilder.
*/
protected String writeSql(SqlPrettyWriter sqlWriter, RelToSqlConverter relToSql, RelNode query) {
sqlWriter.reset();
SqlSelect select = relToSql.visitRoot(query).asSelect();
return sqlWriter.format(select);
} |
import math
s = raw_input("")
s = s.split(" ")
n = int(s[0])
m = int(s[1])
a = int(s[2])
print(str(int(math.ceil(float(n) / a)) * int(math.ceil(float(m) / a)))) |
def fit_model(self, all_parents, tau_max=None):
self.fit_results = self.get_fit(all_parents=all_parents,
selected_variables=None,
tau_max=tau_max)
coeffs = self.get_coefs()
self.phi = self._get_phi(coeffs)
self.psi = self._get_psi(self.phi)
self.all_psi_k = self._get_all_psi_k(self.phi) |
<reponame>kuujo/onos-ran
// Copyright 2020-present Open Networking Foundation.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package c1
import (
"context"
"fmt"
"github.com/onosproject/onos-ran/api/nb"
"github.com/onosproject/onos-ran/api/sb"
"github.com/onosproject/onos-ran/pkg/manager"
"github.com/onosproject/onos-ran/pkg/service"
"google.golang.org/grpc"
)
// NewService returns a new device Service
func NewService() (service.Service, error) {
return &Service{}, nil
}
// Service is an implementation of C1 service.
type Service struct {
service.Service
}
// Register registers the C1 Service with the gRPC server.
func (s Service) Register(r *grpc.Server) {
server := &Server{}
nb.RegisterC1InterfaceServiceServer(r, server)
}
// Server implements the C1 gRPC service for administrative facilities.
type Server struct {
}
// ListStations returns a stream of base station records.
func (s Server) ListStations(req *nb.StationListRequest, stream nb.C1InterfaceService_ListStationsServer) error {
if req.Subscribe {
return fmt.Errorf("subscribe not yet implemented")
}
if req.Ecgi == nil {
ch := make(chan sb.ControlUpdate)
if req.Subscribe {
if err := manager.GetManager().SubscribeControlUpdates(ch); err != nil {
return err
}
} else {
if err := manager.GetManager().ListControlUpdates(ch); err != nil {
return err
}
}
for update := range ch {
switch update.GetMessageType() {
case sb.MessageType_CELL_CONFIG_REPORT:
cellConfigReport := update.GetCellConfigReport()
ecgi := nb.ECGI{
Ecid: cellConfigReport.GetEcgi().GetEcid(),
Plmnid: cellConfigReport.GetEcgi().GetPlmnId(),
}
baseStationInfo := nb.StationInfo{
Ecgi: &ecgi,
}
baseStationInfo.MaxNumConnectedUes = cellConfigReport.GetMaxNumConnectedUes()
if err := stream.Send(&baseStationInfo); err != nil {
return err
}
}
}
} else {
return fmt.Errorf("list stations for specific ecgi not yet implemented")
}
return nil
}
// ListStationLinks returns a stream of links between neighboring base stations.
func (s Server) ListStationLinks(req *nb.StationLinkListRequest, stream nb.C1InterfaceService_ListStationLinksServer) error {
if req.Ecgi == nil {
ch := make(chan sb.ControlUpdate)
if req.Subscribe {
if err := manager.GetManager().SubscribeControlUpdates(ch); err != nil {
return err
}
} else {
if err := manager.GetManager().ListControlUpdates(ch); err != nil {
return err
}
}
for update := range ch {
switch update.GetMessageType() {
case sb.MessageType_CELL_CONFIG_REPORT:
cellConfigReport := update.GetCellConfigReport()
ecgi := nb.ECGI{
Ecid: cellConfigReport.GetEcgi().GetEcid(),
Plmnid: cellConfigReport.GetEcgi().GetPlmnId(),
}
stationLinkInfo := nb.StationLinkInfo{
Ecgi: &ecgi,
}
candScells := cellConfigReport.GetCandScells()
for _, candScell := range candScells {
candCellEcgi := candScell.GetEcgi()
nbEcgi := nb.ECGI{
Ecid: candCellEcgi.GetEcid(),
Plmnid: candCellEcgi.GetPlmnId(),
}
stationLinkInfo.NeighborECGI = append(stationLinkInfo.NeighborECGI, &nbEcgi)
}
if err := stream.Send(&stationLinkInfo); err != nil {
return err
}
}
}
} else {
return fmt.Errorf("req ecgi is not nil")
}
return nil
}
// ListUELinks returns a stream of UI and base station links; one-time or (later) continuous subscribe.
func (s Server) ListUELinks(req *nb.UELinkListRequest, stream nb.C1InterfaceService_ListUELinksServer) error {
if req.Ecgi == nil {
ch := make(chan sb.TelemetryMessage)
if req.Subscribe {
if err := manager.GetManager().SubscribeTelemetry(ch); err != nil {
return err
}
} else {
if err := manager.GetManager().ListTelemetry(ch); err != nil {
return err
}
}
for telemetry := range ch {
switch telemetry.GetMessageType() {
case sb.MessageType_RADIO_MEAS_REPORT_PER_UE:
radioReportUe := telemetry.GetRadioMeasReportPerUE()
ecgi := nb.ECGI{
Ecid: radioReportUe.GetEcgi().GetEcid(),
Plmnid: radioReportUe.GetEcgi().GetPlmnId(),
}
radioReportServCells := radioReportUe.GetRadioReportServCells()
var cqis []*nb.ChannelQuality
for _, radioReportServCell := range radioReportServCells {
servCellEcgi := radioReportServCell.GetEcgi()
ecgi := nb.ECGI{
Ecid: servCellEcgi.GetEcid(),
Plmnid: servCellEcgi.GetPlmnId(),
}
cqiHist := radioReportServCell.GetCqiHist()
for _, cqi := range cqiHist {
nbCqi := nb.ChannelQuality{
TargetEcgi: &ecgi,
CqiHist: cqi,
}
cqis = append(cqis, &nbCqi)
}
}
ueLinkInfo := nb.UELinkInfo{
Ecgi: &ecgi,
Crnti: radioReportUe.GetCrnti(),
ChannelQualities: cqis,
}
if err := stream.Send(&ueLinkInfo); err != nil {
return err
}
}
}
} else {
return fmt.Errorf("UELinkListRequest is not empty")
}
return nil
}
// TriggerHandOver returns a hand-over response indicating success or failure.
func (s Server) TriggerHandOver(ctx context.Context, req *nb.HandOverRequest) (*nb.HandOverResponse, error) {
if req != nil {
src := req.GetSrcStation()
dst := req.GetDstStation()
crnti := req.GetCrnti()
srcEcgi := sb.ECGI{
Ecid: src.GetEcid(),
PlmnId: src.GetPlmnid(),
}
dstEcgi := sb.ECGI{
Ecid: dst.GetEcid(),
PlmnId: dst.GetPlmnid(),
}
ctrlResponse := sb.ControlResponse{
MessageType: sb.MessageType_HO_REQUEST,
S: &sb.ControlResponse_HORequest{
HORequest: &sb.HORequest{
Crnti: crnti,
EcgiS: &srcEcgi,
EcgiT: &dstEcgi,
},
},
}
err := manager.GetManager().SB.SendResponse(ctrlResponse)
if err != nil {
return nil, err
}
manager.GetManager().DeleteTelemetry(src.GetPlmnid(), src.GetEcid(), crnti)
} else {
return nil, fmt.Errorf("HandOverRequest is nil")
}
return &nb.HandOverResponse{}, nil
}
// SetRadioPower returns a response indicating success or failure.
func (s Server) SetRadioPower(ctx context.Context, req *nb.RadioPowerRequest) (*nb.RadioPowerResponse, error) {
if req != nil {
offset := req.GetOffset()
var pa []sb.XICICPA
switch offset {
case nb.StationPowerOffset_PA_DB_0:
pa = append(pa, sb.XICICPA_XICIC_PA_DB_0)
case nb.StationPowerOffset_PA_DB_1:
pa = append(pa, sb.XICICPA_XICIC_PA_DB_1)
case nb.StationPowerOffset_PA_DB_2:
pa = append(pa, sb.XICICPA_XICIC_PA_DB_2)
case nb.StationPowerOffset_PA_DB_3:
pa = append(pa, sb.XICICPA_XICIC_PA_DB_3)
case nb.StationPowerOffset_PA_DB_MINUS3:
pa = append(pa, sb.XICICPA_XICIC_PA_DB_MINUS3)
case nb.StationPowerOffset_PA_DB_MINUS6:
pa = append(pa, sb.XICICPA_XICIC_PA_DB_MINUS6)
case nb.StationPowerOffset_PA_DB_MINUS1DOT77:
pa = append(pa, sb.XICICPA_XICIC_PA_DB_MINUS1DOT77)
case nb.StationPowerOffset_PA_DB_MINUX4DOT77:
pa = append(pa, sb.XICICPA_XICIC_PA_DB_MINUX4DOT77)
}
ecgi := sb.ECGI{
Ecid: req.GetEcgi().GetEcid(),
PlmnId: req.GetEcgi().GetPlmnid(),
}
ctrlResponse := sb.ControlResponse{
MessageType: sb.MessageType_RRM_CONFIG,
S: &sb.ControlResponse_RRMConfig{
RRMConfig: &sb.RRMConfig{
Ecgi: &ecgi,
PA: pa,
},
},
}
err := manager.GetManager().SB.SendResponse(ctrlResponse)
if err != nil {
return nil, err
}
} else {
return nil, fmt.Errorf("SetRadioPower request cannot be nil")
}
return &nb.RadioPowerResponse{Success: true}, nil
}
|
s=input().strip()
an=0
m=10**9
m+=7
p=0
for v in s:
if v=='a':
p=p+1
elif v=='b':
if p>0:
q=p*((an+1)%m)
an=(an+q)%m
p=0
#print(an)
if p>0:
q=p*((an+1)%m)
an=(an+q)%m
print(an%m) |
Visitors are searched at the music festival Rock am Ring in Nuerburg, Germany, on June 3. German authorities allowed the popular rock festival to go ahead after a scare over people with suspected links to Islamic extremism prompted them to curtail its opening night. (Photo11: AP)
BERLIN — Germany's Family and Youth Minister Katarina Barley on Wednesday called for her country to strengthen its efforts to prevent all forms of extremism, calling for a federal law on the prevention of extremism to stabilize projects and initiatives against, for example, right-wing extremism.
Although there is now more money available for prevention, "we aren't yet on target," Barley said on Wednesday. Announcing the findings of a report into extremism prevention, Barley said at a press conference in Berlin that in fighting Islamist extremism, "we must not wait until young people have become radicalized."
"Security and prevention must go hand in hand," she added.
According to Barley, prevention work must begin where the threat is particularly high, for example in the school yard, on the Internet, and also in the prisons.
100-million-euro investment
As part of Germany's 2018 "national prevention program" against extremism, some 100 million euros ($112 million) will be invested into specifically combating Islamist extremism. Some funds will be allocated to supporting mosque communities, while money will also be invested in expanding the prevention of radicalization online.
"Every euro we invest (in prevention) is a very well-spent euro, as it serves to create security," Barley said.
In the crackdown on Islamist radicalization, Barley rejected demands made earlier this month, however, to allow the surveillance of minors who may be involved in Islamist groups.
"Minors have already committed serious acts of violence," Bavarian Interior Minister Joachim Herrmann told the Funke media group, adding that Germany "must consequently deal" with such cases.
Barley on Wednesday described Herrmann's demand as a "misguided approach," arguing that children should be protected from slipping into radicalized violence.
Germany's governing "grand coalition" has already reduced the minimum age for monitoring by Germany's intelligence agencies from 16 to 14 years.
National anti-racism action plan
Together with Parliamentary State Secretary in the Interior Ministry, Günter Krings, Barley also presented on Wednesday the updated National Action Plan against Racism.
In principle, the aim is to "show clear boundaries, regardless of where discrimination occurs, whether in leisure time, online or in the workplace," Barley said.
At the center of the new action plan are issues including human rights policy, protection against discrimination in daily life, for example in the workplace, as well as the punishment of criminal offenses.
Other elements include education and political education, as well as racism and hate speech online.
On the basis of the coalition agreement, the action plan has also been expanded to cover the issues of homosexuality and transphobia.
Greens politician Volker Beck criticized the plan, saying it was lacking in concrete proposals for action.
"Instead of binding measures, only the current situation of gays, lesbians, bi-, trans- and intersexuals was described," Beck said.
Petra Pau, a member of the executive committee of the leftist Linke political group, and member of the inquiry committee into the right-wing extremist National Socialist Underground, welcomed the action plan and increase in subsidies for social initiatives against right-wing extremism and racism.
"So far so good," she said in a statement, adding, however, that the subsidies are limited in time: "This is ineffective and short-sighted, as the fight against right-wing extremism and racism requires continuity and endurance."
RELATED COVERAGE
This article originally appeared in DW.com. Its content was created separately to USA TODAY.
Read or Share this story: https://usat.ly/2srjh3h |
def add_loss_op(self, preds):
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=preds,labels=self.labels_placeholder)
loss = tf.reduce_mean(tf.boolean_mask(loss, self.mask_placeholder))
return loss |
The crowd mingles at the party sponsored by NJOY at The Jane Hotel. New York's Smoke-Free Air Act of 2002 does not address electronic cigarette smoking (Owen Kolasinski / BFAnyc.com)
At a private party inside The Jane Hotel's posh nightclub last month, a giddy procession of well-lubricated influencers snapped Instagram photos of themselves clowning around with a giant six-foot replica of an electronic cigarette. Chatty couples on tufted sofas exhaled vaporous plumes between bites of complimentary caviar, endive, and goat cheese. Playboy's Miss November 2012 floated through the fashionable crowd, which was liberally peppered with representatives from the Finance Set, their wrists bristling with Breitling watches. "They're trying to capture the essence of being a New York smoker, which is incredibly effective," one woman said, referring to NJOY, the "#1 electronic cigarette company in America" who sponsored the open bar event. "They're pushing the benefits of smoking without the health risks."
At 22, she might have been too young to remember bars before Bloomberg, but what benefits was she referring to? "Being social, being cool," she said while dragging thoughtfully on her menthol NJOY King, as if Don Draper's doppelganger might approach at any moment and ask her for a puff. "This is just nicotine, right?"
Though the first e-cigarette patent appeared in 1963, Draper wouldn't recognize today's battery-powered robot tube that vaporizes nicotine so it flows neatly into our lungs and bloodstream while staying out of our hair, clothing, and breath. But Draper's Lucky Strike tantrum aside, he'd certainly love selling it. Marketed as a safer alternative to traditional smokes, 3.5 million Americans spend around $400 million on e-cigs every year, and some analysts predict sales to surpass normal cigarettes in ten years. Better still, e-cigarettes are currently unregulated by the FDA, so they can be hawked on TV, in print, or at an expensive Manhattan hotel, while their essential competitive advantageselling a drug that many doctors believe is one of the most addictive substances on earthremains unchanged.
Brittany Nola, Playboy's Miss November 2012, at The Jane Hotel for NJOY's launch party (Owen Kolasinski / BFAnyc.com)
The modern incarnation of the e-cigarette was invented in China in 2003, and the basic structure hasn't changed much since. E-cigarettes use atomizers powered by rechargeable or disposable lithium batteries to heat up liquid nicotine without combustion. Users inhale that nicotine, along with water, flavorings, and a chemical used in fog machine juice (among other household products), without ingesting the smoke that contains toxic, carcinogenic ingredients the tobacco industry spent decades and billions of dollars denying (then admitting) killed you.
NJOY calls its product the "gold standard" of the field, and it's easy to see why. One of their disposable NJOY Kings smells sweetly of unlit tobacco, crackles like a lit cigarette when inhaled (think the "engine revving" noise electric cars artificially produce), and has an ashen-colored tip that resembles one of those fake rocks suburbanites hide their spare keys in, which glows bright red during a drag. The resulting cloud of vapor can be convincing enough to have you tossed out of the bar.
"We use a very well-regarded tobacco flavorist from North Carolina, so it has a terrific aroma and flavor to it," NJOY's Chief Marketing Executive, Andrew Beaver tells us. "Our proprietary technology is extraordinary. Everything about our productfrom how hard you have to draw on it, to how it tastes, to how it feels in your hand, to how you inhale, exhale, is meant to replicate the experience of smoking without all the things you don't like." Those features seemingly make them ideal tools for smokers desperate for ways to stop smoking without renouncing a comforting oral fixation and a drug their bodies crave.
Yet despite their proud sales pitch, NJOY sued the FDA to prevent e-cigarettes from being regulated as a smoking cessation device. While e-cigarette manufacturers make dubious claimsboth coy and bluntabout their products' health benefits, the medical and public health community remains deeply conflicted about whether atomized nicotine vapor is the safest or sanest way of addressing a public health crisis that kills 500,000 Americans each year. But if sales are any indication, smokers are rapidly drawing their own conclusions.
Ricco, a 32-year-old who has been smoking e-cigarettes for a year, uses his on the G train (Gretchen Robinette / Gothamist)
"I will never give up nicotine."
Linc Williams may have quit smoking, but not nicotine. "I will never give up nicotine, I enjoy my life with nicotine, and I would like to see its use encouraged long-term," Williams told the audience at a recent FDA hearing on the regulation of smoking cessation products. Williams, a self-described e-cig activist, said that he had spent nearly $17,000 on patches, gum, and prescriptions that didn't work, and credited e-cigarettes with helping him kick combustible cigarettes for good, noting that he had run a 5K, and felt his health was improving.
But Williams, who is making a documentary about e-cigarette use called We Are Vapers, continues to use snus and e-cigarettes. "[E-cigarettes] brought the pleasure back the breathing in and out. That social aspect, the hand-eye-mouth." The perpetuation of the culture of smoking is one of the major reasons why some public health experts have a hard time endorsing e-cigarettes. They would prefer to see the quitting process be as speedy and unsexy as possiblee-cigarette smokers may not die of lung cancer, but why should they remain physically addicted to a product that has yet to be proven safe? And why would a Hollywood starlet blowing vapor from an e-cigarette look any less cool than a "real" cigarette to an adolescent, future customer watching at home?
"Having this thing people suck on that makes them addicted to nicotine, I don't think that does the public any good," Dr. Thomas Novotny, a medical epidemiologist, professor, and former Assistant Surgeon General says. "I really think that we shouldn't support any new nicotine delivery productit just won't change the culture of smoking. If anything, it glorifies it."
Jason, 35, smokes his e-cig in the speciality store VapeNY in Jamaica, Queens. He's been using them for two years, and says they helped him quit normal cigarettes (Gretchen Robinette / Gothamist)
A report published by The Royal College of Physicians [PDF] stated that "Nicotine itself is not especially hazardous," but many doctors, including those at the Center for Disease Control and the American Medical Association, disagree. Dr. Constantine Vardavas, a senior research scientist at Harvard's School of Public Health, tells us, "Nicotine is a toxic substance that is seriously harmful to one's heath, especially due to its effect on the nervous system and this heartbeat and arterial tone, too."
"Sometimes you feel a little ill... but that goes away in a minute."
There is also the matter of what else you are inhaling when you take a drag from an e-cigarette. Manufacturers are not yet required to list their ingredients to consumers, but Thomas Kiklas of the Tobacco Vapor Electronic Cigarette Association, rattled them off as being "Nicotine, water, propylene glycol, glycerol, nicotine flavorings, and that's it." Kiklas notes that propylene glycol "has been in the U.S. food supply for generations, they use it for sinus medications." Dr. Vardavas called it an "irritant," and noted that while the ingredients themselves may not be harmful, "the synergistic effect of them altogether may be harmful."
Dr. Vardavas was the lead researcher on a study published last year in Chest medical journal that showed changes in lung function to e-cigarette users in the first five minutes of use. "The Chest study is a small pilot study of limited scope, a fact we acknowledge," he wrote in an email. "However the significance of this research is that it indicates that e-cigarettes may not be has 'harmless' as they are promoted and may indeed impact the pulmonary system, at least in the short term that we assessed."
Michael Murphy, a 28-year-old who has used e-cigarettes for a year, admits he "can't really focus without nicotine. For me, I need it to pay attention to what I'm doing. It's just how I start my daycoffee, and a cigarette." Murphy, a 28-year-old former smoker who runs an after-school program for students in Queens, said he saves $90 a week on EonSmoke e-cigs compared to buying his former brand, Camels, and has had a mostly positive experience with e-cigarettes: "Before I go to bed, I take the biggest hit I possibly can, and sometimes you just feel a little ill, and your chest hurts, but that goes away in like, a minute."
In the case of NJOYs, that pain is a selling point: "The kick is out of this world!" one user writes in a review on the company's website. "Tons of flavor, vapor AND throat hit!!!!!!" another adds. User "mick," who states that he normally smokes Parliaments, wrote, "I didn't have great expectations but the hit to the back of the throat was almost too much this is the real deal. Haha I am hooked."
Andrew Beaver, NJOY's spokesman, explains, "That's the nicotine," and points out that the effect also happens when you take a sip of a soda after eating spicy food. "That's just a type of irritation on the back of your throat that can be caused by anything caustic being there." Asked to comment on the irony of a customer gushing that they are "hooked" on an intrinsically addictive product, Beaver says, "We're not here to tell you that nicotine is not addictive, we are marketing this product to people who are already addicted to nicotine."
Colleen, 28, enjoys an e-cigarette in the bathroom of The Charleston in Williamsburg. She said she uses them when she can't smoke a normal cigarette (Gretchen Robinette / Gothamist)
"What happens at the end-of-life stage for these products?"
Dr. Novotny, who has studied the effects of cigarette waste on public health as well as the environment, raises an alarming point in a world that is poised to be saturated with disposable e-cigarettes: "What happens at the end-of-life stage for these products? Cigarette butts are already toxic waste. These have batteries and other toxic materials in them." He believes their disposal should be "more carefully regulated just like we regulate paints or car batteries or other items we don't just dispose of in the street. It's very difficult to get rid of batteries. To think that people could be just tossing these on the ground like normal cigarette buttsI just don't know."
Kiklas, the e-cig lobbyist, admitted, "Are there knuckleheads? Of course. But stores have return programs, and we have processes for that." NJOY, for instance, will send users a free e-cigarette if they send back eight spent ones, and Beaver, the company's spokesman, claims, "We're getting a tremendous amount of recycling returns." NJOYs are also completely recyclable, a feature that eludes most of the 300 and counting brands of e-cigarettes on the market.
After comparing the nicotine in NJOY to the prodigious amount of caffeine the country doesn't bat an eye at ingesting, Beaver adds, "We are keenly aware that we are not as benign as a piece of chocolate on the checkout counter. There are those who choose to smoke and those who choose to live this very organic, risk adverse life. We do exist somewhere in the middle. Actually," Beaver pauses and corrects himself, "we're very far towards the side of being not as consequential as smoking."
Doctors and scientists who promote a theory of overall harm reduction agree, and say the risks of promoting and using e-cigarettes far outweigh the proven dangers of smoking. They point to the statistics that around half of the country's 46.6 million smokers try to quit smoking at least one day a year, and that most will fail.
These harm-reduction advocates argue that the FDA should embrace technologies like electronic cigarettes, and even extend the recommended quitting period of 12 weeks that's currently printed on the packaging for products like Nicorette gum or Nicoderm patches. They argue that this would allow smokers more time to wean themselves off nicotine, perhaps indefinitely.
Doing so would entice a "much larger segment of the smoking population" to use tools to help themselves quit smoking, according to Jonathan Foulds, a public health professor at Penn State University who spoke at the FDA hearing. "Right now [the smokers who are quitting are] primarily people who are making the decision to quit right now because that's what the labeling tells me I must do, and it actually implies that it would be dangerous to do it gradually with this product."
An e-cigarette display in a bodega window on Bedford Avenue in Williamsburg (Gretchen Robinette / Gothamist)
Perhaps the most compelling argument for embracing new technology like e-cigarettes is that the old technology doesn't work very well. A recent study published in the medical journal Tobacco Control showed that a large percentage of smokers trying to quit with nicotine replacement products like gum or patches started smoking again after several years. The report echoed what has been found in previous studies: gum and patches may work well in the short-term, but there's a good chance that ex-smokers using them will return to smoking in the long-run. Chantix, a non-nicotine based prescription drug made by Pfizer, has been linked to an increase in depression and an increased risk of suicide.
And then there is anecdotal evidence from longtime smokers that smoking e-cigarettes for long periods of time helped them quit combustible cigarettes. Brenda Smith had smoked Marlboro Ultra Light 100s for decades until a friend bought her an e-cigarette starter kit 18 months ago: "We were going on a cruise, and you couldn't smoke on the cruise, but we could smoke e-cigs." Smith, a 60-year-old registered nurse from Virginia, said that she had tried quitting smoking a few times before, but had never stopped longer than a few months.
"Every time I thought about quitting again," Smith says, "I knew that I'd just gain 30 pounds while I was trying to quit, then hate myself and start smoking again." While using e-cigarettes, Smith decreased her cigarette intake from a pack a day, to 10 a day, to exclusively using e-cigarettes. "It's been a positive experience so far. I definitely feel better now than when I was smoking cigarettes, but it's been a long process," she adds. Nowadays she'll "have a few puffs socially, with a glass of wine in one hand and an e-cigarette in the other," or take drags after a long day at work. Some days she doesn't use it at all. "I just needed something in my mouth to smoke to help me to quit, that certainly helps."
A "juice bar" of different fruity flavors of nicotine at VapeNY in Jamaica, Queens (Gretchen Robinette / Gothamist)
NJOY's spokesman, Andrew Beaver, calls Big Tobacco "one of the most criminal enterprises that have been out there," and said his company's goal is to "be a responsible player in this category, to bring the category into the mainstream, and to allow adults to make the decision to use our products on their own." Beaver added that the company does not sell to minors, does not sell flavors other than menthol, and engages in "self-regulation." But NJOY's rise to becoming the most popular e-cigarette brand in the country stemmed from a lawsuit they filed to prevent their product from strict government oversight, and they continue to reap profits in a vacuum where no regulation currently exists.
In 2010 NJOY sued the FDA to prevent electronic cigarettes from being regulated as a drug device that provided the "therapeutic benefit" of quitting smoking. Nicotine devices must undergo rigorous and costly testing before they can market themselves as products to help smokers quit. NJOY won the lawsuit, and the right to keep selling their product as a type of tobacco product. E-cigarettes were ordered to be regulated under the historic Tobacco Control Act of 2009.
That law, which President Obama called a "extraordinary accomplishment," prevents tobacco companies from advertising within 1,000 feet of a school, banned cigarette flavors (other than menthol), and for the first time gave the FDA the power to regulate an industry that had regularly been poisoning their customers. But the FDA, an agency that regulates more than $1 trillion in consumer goods25% of all expenditures in the countryhas been slow to respond to the e-cigarette market, and hasn't yet put any regulation in place.
“We’re moving to release for public comment a proposed rule to regulate additional categories of tobacco products,” said Jennifer Haliski, the spokeswoman for the FDA's Office for the Center of Tobacco Products. That proposed rule may impose restrictions on e-cigarettes that already existnew warning labels, a ban on flavors, advertising limitations, additional taxesand ones that may be specific to e-cigarettes, such as regulating nicotine intake or the ingredients that can be put into e-cigarettes.
The rule is due by April of this year, but once proposed, a lengthy public comment period would ensue, and months would stretch on before the regulations would be enforceable by law.
Steve, a 67-year-old poet and artist who runs a gallery on the Lower East Side, has been experimenting with an e-cigarette because he feels it'd be better for his health, but still enjoys regular cigarettes (Gretchen Robinette / Gothamist)
Until then, electronic cigarette companies can do what tobacco companies can only dream of doing: show celebrities puffing e-cigs on television, sell flavors like chocolate banana or cinnamon apple, and creep into the $1 billion market for smoking cessation products by brazenly touting the supposed health benefits of using e-cigarettes. Company names include SafeCig, and SaveASmoker, while another called E-HealthCigarettes, sold in 7-Eleven stores alongside NJOYs, states on its packaging, "Smoking is harmful for health. E-Cigarette is good for health."
"There are no handcuffs on the e-cigarette industry."
One of Brooklyn's homegrown e-cigarette companies, Bedford Slims, sidesteps the stigma by calling their products "vapourettes," and their cylinders substitute the staid masculinity of traditional tobacco designs with cheery patterns from New York-based artists. Their pitch is more tactful, but clear. After declining to make claims of cessation, the company writes on their website, "a little internet research will tell you what we would LOVE to say." The company adds, "there are no known negative side effects for vapourettes that could cause damage on the scale that cigarettes do."
"Compared to the traditional tobacco industry, there are no handcuffs on the e-cigarette industry," an institutional tobacco investor says. "At this point, the FDA still looks like a paper tiger." The investor, who would only speak to us anonymously because he is not authorized to comment to the media, stresses how dependent traditional cigarette companies are on "innovation," or gimmicks, to sell their products, a process that has ground to a halt by the FDA in part by the Tobacco Act's stipulation that every alteration to their product, however minute, must be FDA-approved.
"The thing about this industry is that they need to keep tweaking their product. That's how they keep gaining users. But tobacco companies have to get the FDA's approval if they want to do anything, even if they change the ink on their product," the investor says. "There's like 3,000 applications for modifications, and the FDA hasn't touched one. But the nice thing about e-cigs is that right now, you don't have to go through all that. There's a lot of potential."
Bonnie Herzog, the managing director for Wells Fargo's Beverage, Tobacco, and Consumer Research division, agrees. "Even when the tobacco industry had none of these new restrictions, it's not like they had a ton of huge innovationsthey couldn't even advertise. But this is a whole new game. It's a category where the margins could be quite substantial." As for NJOY, Herzog believes there is a good chance that one of the major tobacco companies will buy it up. "Keep in mind, Altria doesn't even have anything in the form of e-cigarettes."
Lacey, 26, has been using e-cigarettes for eight months after quitting smoking, and enjoys them because it's cheaper, and she believes better for her health (Gretchen Robinette / Gothamist)
Altria, Philip Morris' parent company, makes Marlboro and Parliament cigarettes, and is the largest cigarette manufacturer in the country. Its top two competitors, Reynolds American (Camel, American Spirit, and many others) and Lorillard (Newport) both have their own e-cigarette brands. NJOY's party at the Jane wasn't merely to spread the e-cigarette gospel to hip, attractive New Yorkers (although it did plenty of that through the dozens of free e-cigs it doled out) but to flex its muscles for prospective suitors. Another fact not lost on investors: NJOY's executive vice president, Roy Anise, and its senior VP of sales and distribution, are both Altria alums.
On its website, NJOY states that it is "the only e-cigarette company to have had its marketing practices reviewed by Federal District and Appellate Courts and found to not have made or implied health claims," and stresses that "NJOY products are not a smoking cessation product and have not been tested as such." Yet an insert inside a NJOY King reminds us, "Be sure to tell your friends and family about the positive impact NJOY products are having on your life."
We also received this pitch from NJOY's PR representative:
Every year at this time, people vow to “quit smoking” (often unsuccessfully) as a New Year’s resolution. Now modern technology has created a ground-breaking new invention designed to help smokers keep that resolution: meet NJOY Kings, a brand-new electronic-cigarette (e-cig) that now gives smokers a new alternative to tobacco cigarette Whether it’s the vow to “quit smoking” for real this time or you’re at a party where drinks are flowing, even the most casual of smokers enjoys a cig with a drink or two.
Beaver, the NJOY spokesman, said, "We do not market the product as a smoking cessation product. If you look at the Marist polls, quitting smoking has been one of the top New Year's resolutions for decades, so we believe it's appropriate to reference that fact."
Haliski, the FDA spokeswoman, said that the agency does not comment on the claims of specific companies, nor has it released any guidance or regulation on what a "therapeutic claim" is. So who regulates these statements? "It is the manufacturer's responsibility to review the applicable law."
"There are a few companies out there making sly claims, but we rigorously go after them," said Thomas Kiklas, of the TVECA. "We don't make claims of cessation, we don't make claims that it's a less harmful option." Asked whether NJOY's insert constituted "sly" marketing, Kiklas replied, "I think it just means if you enjoyed it, spread it around."
Beaver said that NJOY is working towards submitting itself to the type of regulations necessary to be marketed as a tool to help smokers quit, but declined to say when it would happen. "The thing to keep in mind is that, it's not like the FDA does this for you, it's quite costly. But we are in the process of putting together the protocols and the process and the formats to do it." The company is private, and so are their earnings, but Beaver noted, "Our product is doing extraordinarily well, even beyond our expectations."
The institutional investor explained the product's success: "You're selling this highly addictive substance to the consumer, but you're not killing them outright. I have to assume this is a product you can have a long runway with."
"I'm a better person on nicotine."
Linc Williams, the 40-year-old former smoker who testified at the FDA hearing and has been using e-cigarettes and snus for 30 months, credits the product with saving his life. "I was smoking one day, and had a random trucker come up to meI had no idea who he was. This guy gave me an e-cigarette, told me to try it, and said 'It may safe your life.' He was absolutely right." Williams says it spurred him to do something about his declining health: he has lost 100 pounds since quitting cigarettes and stresses that he is making his documentary to "pay it forward" and spread the gospel.
"I'm a better person on nicotine," Williams tells us. "These anti-nicotine zealots who oppose e-cigarettes don't like us because we're very vocal about [e-cigs'] success. They don't like that thousands of us come together online and talk about how using them is a profound, life-changing thing." When asked what he wants to accomplish as an e-cigarette activist, Williams seems to share NJOY's stated vision. "My ultimate goal, is for cigarettes to become abnormal, and for e-cigarettes to become the norm." |
package net.serenitybdd.core.webdriver.driverproviders;
import net.thucydides.core.util.EnvironmentVariables;
import net.thucydides.core.webdriver.appium.AppiumConfiguration;
import org.openqa.selenium.remote.DesiredCapabilities;
public class AppiumDriverCapabilities implements DriverCapabilitiesProvider {
private final EnvironmentVariables environmentVariables;
private final String options;
public AppiumDriverCapabilities(EnvironmentVariables environmentVariables, String options) {
this.environmentVariables = environmentVariables;
this.options = options;
}
@Override
public DesiredCapabilities getCapabilities() {
return AppiumConfiguration.from(environmentVariables).getCapabilities(options);
}
}
|
Domain Transfer Learning for Hyperspectral Image Super-Resolution
A Hyperspectral Image (HSI) contains a great number of spectral bands for each pixel; however, the spatial resolution of HSI is low. Hyperspectral image super-resolution is effective to enhance the spatial resolution while preserving the high-spectral-resolution by software techniques. Recently, the existing methods have been presented to fuse HSI and Multispectral Images (MSI) by assuming that the MSI of the same scene is required with the observed HSI, which limits the super-resolution reconstruction quality. In this paper, a new framework based on domain transfer learning for HSI super-resolution is proposed to enhance the spatial resolution of HSI by learning the knowledge from the general purpose optical images (natural scene images) and exploiting the cross-correlation between the observed low-resolution HSI and high-resolution MSI. First, the relationship between low- and high-resolution images is learned by a single convolutional super-resolution network and then is transferred to HSI by the idea of transfer learning. Second, the obtained Pre-high-resolution HSI (pre-HSI), the observed low-resolution HSI, and high-resolution MSI are simultaneously considered to estimate the endmember matrix and the abundance code for learning the spectral characteristic. Experimental results on ground-based and remote sensing datasets demonstrate that the proposed method achieves comparable performance and outperforms the existing HSI super-resolution methods. |
In Vitro Activity of Trovafloxacin against Bacteroides fragilis in Mixed Culture with either Escherichia coli or a Vancomycin- Resistant Strain of Enterococcus faecium Determined by an Anaerobic Time-Kill Technique
ABSTRACT To determine the efficacy of trovafloxacin as a possible treatment for intra-abdominal abscesses, we have developed an anaerobic time-kill technique using different inocula to study the in vitro killing ofBacteroides fragilis in pure culture or in mixed culture with either Escherichia coli or a vancomycin-resistant strain of Enterococcus faecium (VREF). With inocula of 5 × 105 CFU/ml and trovafloxacin concentrations of ≤2 μg/ml, a maximum observed effect (Emax) of ≥6.1 (log10 CFU/ml) was attained with all pure and mixed cultures within 24 h. With inocula of 108CFU/ml, a similar Emax and a similar concentration to produce 50% of Emax(EC50) for B. fragilis were found in both pure cultures and mixed cultures with E. coli. However, to produce a similar killing of B. fragilis in the mixed cultures with VREF, a 14-fold increase in the concentration of trovafloxacin was required. A vancomycin-susceptible strain of E. faecium and a trovafloxacin-resistant strain of E. coli were also found to confer a similar “protective” effect on B. fragilis against the activity of trovafloxacin. Using inocula of 109 CFU/ml, the activity of trovafloxacin was retained for E. coli and B. fragilis and was negligible against VREF. We conclude that this is a useful technique to study the anaerobic killing of mixed cultures in vitro and may be of value in predicting the killing of mixed infections in vivo. The importance of using mixed cultures and not pure cultures is clearly shown by the difference in the killing of B. fragilis in the mixed cultures tested. Trovafloxacin will probably be ineffective in the treatment of infections involving large numbers of enterococci. However, due to its ability to retain activity against large cultures of B. fragilis and E. coli, trovafloxacin could be beneficial in the treatment of intra-abdominal abscesses. |
/**
* Constructs a {@link Date} object from the given string. <br />
* The String must be in the format: YYYY-MM-DDTHH:MM:SS.
*
* @param date String representing the date.
*/
public void fromString(String date) {
Pattern pattern = Pattern.compile("(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2})");
if(date != null) {
Matcher matcher = pattern.matcher(date);
if(matcher.matches()) {
int year = Integer.valueOf(matcher.group(1));
int month = Integer.valueOf(matcher.group(2)) - 1;
int day = Integer.valueOf(matcher.group(3));
int hour = Integer.valueOf(matcher.group(4));
int minute = Integer.valueOf(matcher.group(5));
int second = Integer.valueOf(matcher.group(6));
GregorianCalendar cal = new GregorianCalendar(year, month, day, hour, minute, second);
mValue = cal.getTime();
}
}
} |
// Package vedicextensions
// Block: Vedic Extensions
// Range: 1CD0..1CFF
package vedicextensions
const (
// VedicToneKarshana VEDIC TONE KARSHANA
// Codepoint: U+1CD0
// Category: Mark, Nonspacing
// String: ᳐
VedicToneKarshana = '\u1cd0'
// VedicToneShara VEDIC TONE SHARA
// Codepoint: U+1CD1
// Category: Mark, Nonspacing
// String: ᳑
VedicToneShara = '\u1cd1'
// VedicTonePrenkha VEDIC TONE PRENKHA
// Codepoint: U+1CD2
// Category: Mark, Nonspacing
// String: ᳒
VedicTonePrenkha = '\u1cd2'
// VedicSignNihshvasa VEDIC SIGN NIHSHVASA
// Codepoint: U+1CD3
// Category: Punctuation, Other
// String: ᳓
VedicSignNihshvasa = '\u1cd3'
// VedicSignYajurvedicMidlineSvarita VEDIC SIGN YAJURVEDIC MIDLINE SVARITA
// Codepoint: U+1CD4
// Category: Mark, Nonspacing
// String: ᳔
VedicSignYajurvedicMidlineSvarita = '\u1cd4'
// VedicToneYajurvedicAggravatedIndependentSvarita VEDIC TONE YAJURVEDIC AGGRAVATED INDEPENDENT SVARITA
// Codepoint: U+1CD5
// Category: Mark, Nonspacing
// String: ᳕
VedicToneYajurvedicAggravatedIndependentSvarita = '\u1cd5'
// VedicToneYajurvedicIndependentSvarita VEDIC TONE YAJURVEDIC INDEPENDENT SVARITA
// Codepoint: U+1CD6
// Category: Mark, Nonspacing
// String: ᳖
VedicToneYajurvedicIndependentSvarita = '\u1cd6'
// VedicToneYajurvedicKathakaIndependentSvarita VEDIC TONE YAJURVEDIC KATHAKA INDEPENDENT SVARITA
// Codepoint: U+1CD7
// Category: Mark, Nonspacing
// String: ᳗
VedicToneYajurvedicKathakaIndependentSvarita = '\u1cd7'
// VedicToneCandraBelow VEDIC TONE CANDRA BELOW
// Codepoint: U+1CD8
// Category: Mark, Nonspacing
// String: ᳘
VedicToneCandraBelow = '\u1cd8'
// VedicToneYajurvedicKathakaIndependentSvaritaSchroeder VEDIC TONE YAJURVEDIC KATHAKA INDEPENDENT SVARITA SCHROEDER
// Codepoint: U+1CD9
// Category: Mark, Nonspacing
// String: ᳙
VedicToneYajurvedicKathakaIndependentSvaritaSchroeder = '\u1cd9'
// VedicToneDoubleSvarita VEDIC TONE DOUBLE SVARITA
// Codepoint: U+1CDA
// Category: Mark, Nonspacing
// String: ᳚
VedicToneDoubleSvarita = '\u1cda'
// VedicToneTripleSvarita VEDIC TONE TRIPLE SVARITA
// Codepoint: U+1CDB
// Category: Mark, Nonspacing
// String: ᳛
VedicToneTripleSvarita = '\u1cdb'
// VedicToneKathakaAnudatta VEDIC TONE KATHAKA ANUDATTA
// Codepoint: U+1CDC
// Category: Mark, Nonspacing
// String: ᳜
VedicToneKathakaAnudatta = '\u1cdc'
// VedicToneDotBelow VEDIC TONE DOT BELOW
// Codepoint: U+1CDD
// Category: Mark, Nonspacing
// String: ᳝
VedicToneDotBelow = '\u1cdd'
// VedicToneTwoDotsBelow VEDIC TONE TWO DOTS BELOW
// Codepoint: U+1CDE
// Category: Mark, Nonspacing
// String: ᳞
VedicToneTwoDotsBelow = '\u1cde'
// VedicToneThreeDotsBelow VEDIC TONE THREE DOTS BELOW
// Codepoint: U+1CDF
// Category: Mark, Nonspacing
// String: ᳟
VedicToneThreeDotsBelow = '\u1cdf'
// VedicToneRigvedicKashmiriIndependentSvarita VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
// Codepoint: U+1CE0
// Category: Mark, Nonspacing
// String: ᳠
VedicToneRigvedicKashmiriIndependentSvarita = '\u1ce0'
// VedicToneAtharvavedicIndependentSvarita VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
// Codepoint: U+1CE1
// Category: Mark, Spacing Combining
// String: ᳡
VedicToneAtharvavedicIndependentSvarita = '\u1ce1'
// VedicSignVisargaSvarita VEDIC SIGN VISARGA SVARITA
// Codepoint: U+1CE2
// Category: Mark, Nonspacing
// String: ᳢
VedicSignVisargaSvarita = '\u1ce2'
// VedicSignVisargaUdatta VEDIC SIGN VISARGA UDATTA
// Codepoint: U+1CE3
// Category: Mark, Nonspacing
// String: ᳣
VedicSignVisargaUdatta = '\u1ce3'
// VedicSignReversedVisargaUdatta VEDIC SIGN REVERSED VISARGA UDATTA
// Codepoint: U+1CE4
// Category: Mark, Nonspacing
// String: ᳤
VedicSignReversedVisargaUdatta = '\u1ce4'
// VedicSignVisargaAnudatta VEDIC SIGN VISARGA ANUDATTA
// Codepoint: U+1CE5
// Category: Mark, Nonspacing
// String: ᳥
VedicSignVisargaAnudatta = '\u1ce5'
// VedicSignReversedVisargaAnudatta VEDIC SIGN REVERSED VISARGA ANUDATTA
// Codepoint: U+1CE6
// Category: Mark, Nonspacing
// String: ᳦
VedicSignReversedVisargaAnudatta = '\u1ce6'
// VedicSignVisargaUdattaWithTail VEDIC SIGN VISARGA UDATTA WITH TAIL
// Codepoint: U+1CE7
// Category: Mark, Nonspacing
// String: ᳧
VedicSignVisargaUdattaWithTail = '\u1ce7'
// VedicSignVisargaAnudattaWithTail VEDIC SIGN VISARGA ANUDATTA WITH TAIL
// Codepoint: U+1CE8
// Category: Mark, Nonspacing
// String: ᳨
VedicSignVisargaAnudattaWithTail = '\u1ce8'
// VedicSignAnusvaraAntargomukha VEDIC SIGN ANUSVARA ANTARGOMUKHA
// Codepoint: U+1CE9
// Category: Letter, Other
// String: ᳩ
VedicSignAnusvaraAntargomukha = '\u1ce9'
// VedicSignAnusvaraBahirgomukha VEDIC SIGN ANUSVARA BAHIRGOMUKHA
// Codepoint: U+1CEA
// Category: Letter, Other
// String: ᳪ
VedicSignAnusvaraBahirgomukha = '\u1cea'
// VedicSignAnusvaraVamagomukha VEDIC SIGN ANUSVARA VAMAGOMUKHA
// Codepoint: U+1CEB
// Category: Letter, Other
// String: ᳫ
VedicSignAnusvaraVamagomukha = '\u1ceb'
// VedicSignAnusvaraVamagomukhaWithTail VEDIC SIGN ANUSVARA VAMAGOMUKHA WITH TAIL
// Codepoint: U+1CEC
// Category: Letter, Other
// String: ᳬ
VedicSignAnusvaraVamagomukhaWithTail = '\u1cec'
// VedicSignTiryak VEDIC SIGN TIRYAK
// Codepoint: U+1CED
// Category: Mark, Nonspacing
// String: ᳭
VedicSignTiryak = '\u1ced'
// VedicSignHexiformLongAnusvara VEDIC SIGN HEXIFORM LONG ANUSVARA
// Codepoint: U+1CEE
// Category: Letter, Other
// String: ᳮ
VedicSignHexiformLongAnusvara = '\u1cee'
// VedicSignLongAnusvara VEDIC SIGN LONG ANUSVARA
// Codepoint: U+1CEF
// Category: Letter, Other
// String: ᳯ
VedicSignLongAnusvara = '\u1cef'
// VedicSignRthangLongAnusvara VEDIC SIGN RTHANG LONG ANUSVARA
// Codepoint: U+1CF0
// Category: Letter, Other
// String: ᳰ
VedicSignRthangLongAnusvara = '\u1cf0'
// VedicSignAnusvaraUbhayatoMukha VEDIC SIGN ANUSVARA UBHAYATO MUKHA
// Codepoint: U+1CF1
// Category: Letter, Other
// String: ᳱ
VedicSignAnusvaraUbhayatoMukha = '\u1cf1'
// VedicSignArdhavisarga VEDIC SIGN ARDHAVISARGA
// Codepoint: U+1CF2
// Category: Letter, Other
// String: ᳲ
VedicSignArdhavisarga = '\u1cf2'
// VedicSignRotatedArdhavisarga VEDIC SIGN ROTATED ARDHAVISARGA
// Codepoint: U+1CF3
// Category: Letter, Other
// String: ᳳ
VedicSignRotatedArdhavisarga = '\u1cf3'
// VedicToneCandraAbove VEDIC TONE CANDRA ABOVE
// Codepoint: U+1CF4
// Category: Mark, Nonspacing
// String: ᳴
VedicToneCandraAbove = '\u1cf4'
// VedicSignJihvamuliya VEDIC SIGN JIHVAMULIYA
// Codepoint: U+1CF5
// Category: Letter, Other
// String: ᳵ
VedicSignJihvamuliya = '\u1cf5'
// VedicSignUpadhmaniya VEDIC SIGN UPADHMANIYA
// Codepoint: U+1CF6
// Category: Letter, Other
// String: ᳶ
VedicSignUpadhmaniya = '\u1cf6'
// VedicSignAtikrama VEDIC SIGN ATIKRAMA
// Codepoint: U+1CF7
// Category: Mark, Spacing Combining
// String: ᳷
VedicSignAtikrama = '\u1cf7'
// VedicToneRingAbove VEDIC TONE RING ABOVE
// Codepoint: U+1CF8
// Category: Mark, Nonspacing
// String: ᳸
VedicToneRingAbove = '\u1cf8'
// VedicToneDoubleRingAbove VEDIC TONE DOUBLE RING ABOVE
// Codepoint: U+1CF9
// Category: Mark, Nonspacing
// String: ᳹
VedicToneDoubleRingAbove = '\u1cf9'
// VedicSignDoubleAnusvaraAntargomukha VEDIC SIGN DOUBLE ANUSVARA ANTARGOMUKHA
// Codepoint: U+1CFA
// Category: Letter, Other
// String: ᳺ
VedicSignDoubleAnusvaraAntargomukha = '\u1cfa'
) |
def read_geometry(self, group):
if group.attrs['grid_type'].decode('utf-8') != 'sph_pol':
raise ValueError("Grid is not spherical polar")
self.set_walls(group['walls_1']['r'],
group['walls_2']['t'],
group['walls_3']['p'])
if group.attrs['geometry'].decode('utf-8') != self.get_geometry_id():
raise Exception("Calculated geometry hash does not match hash in file") |
<reponame>pecigonzalo/kpt<filename>pkg/live/preprocess/process.go<gh_stars>1-10
// Copyright 2021 Google LLC.
// SPDX-License-Identifier: Apache-2.0
package preprocess
import (
"fmt"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"sigs.k8s.io/cli-utils/pkg/common"
"sigs.k8s.io/cli-utils/pkg/inventory"
"sigs.k8s.io/cli-utils/pkg/provider"
)
func PreProcess(p provider.Provider, inv inventory.InventoryInfo, strategy common.DryRunStrategy) (inventory.InventoryPolicy, error) {
invClient, err := p.InventoryClient()
if err != nil {
return inventory.InventoryPolicyMustMatch, err
}
obj, err := invClient.GetClusterInventoryInfo(inv)
if err != nil {
if apierrors.IsNotFound(err) {
return inventory.InventoryPolicyMustMatch, nil
}
return inventory.InventoryPolicyMustMatch, err
}
if obj == nil {
return inventory.InventoryPolicyMustMatch, nil
}
managedByKey := "apps.kubernetes.io/managed-by"
managedByVal := "kpt"
labels := obj.GetLabels()
val, found := labels[managedByKey]
if found {
if val != managedByVal {
return inventory.InventoryPolicyMustMatch, fmt.Errorf("can't apply the current package since it is managed by %s", val)
}
return inventory.InventoryPolicyMustMatch, nil
}
labels[managedByKey] = managedByVal
if strategy.ClientOrServerDryRun() {
return inventory.AdoptIfNoInventory, nil
}
err = invClient.UpdateLabels(inv, labels)
return inventory.AdoptIfNoInventory, err
}
|
rd = lambda : map(int, raw_input().split())
n = input()
a = sorted(rd())
d = {}
c = {}
for i in range(1, len(a)):
d[a[i] - a[i - 1]] = a[i - 1]
if not a[i] - a[i - 1] in c:
c[a[i] - a[i - 1]] = 0
c[a[i] - a[i - 1]] += 1
if len(d) == 0:
print -1
elif len(d) == 1:
dist = d.keys()[0]
if dist == 0:
print 1
print a[0]
elif len(a) == 2 and dist & 1 == 0:
print 3
print a[0] - dist, a[0] + dist / 2, a[-1] + dist
else:
print 2
print a[0] - dist, a[-1] + dist
elif len(d) == 2:
dists = sorted(d.keys())
if dists[0] * 2 != dists[1] or c[dists[1]] > 1:
print 0
print
else:
print 1
print d[dists[1]] + dists[0]
else:
print 0
print |
// SharedLock takes a co-operative (shared) lock on a directory.
// It will block if an exclusive lock is already held on the directory.
func SharedLock(dir string) (DirLock, error) {
l, err := newLock(dir)
if err != nil {
return nil, err
}
err = syscall.Flock(l.fd, syscall.LOCK_SH)
if err != nil {
return nil, err
}
return l, nil
} |
<reponame>AwesomestCode/DiscordBotPython
import discord
msgtable = {
"hi": "Hello, world!",
"ping": "Pong!",
"about": "[Your about message here]",
"customcmd": "Hey, why aren't you customizing me?"
}
import discord
class MyClient(discord.Client):
async def on_ready(self):
print('Logged on as', self.user)
async def on_message(self, message):
# don't respond to ourselves
if message.author == self.user:
return
if message.content[0] == '!':
await message.channel.send(msgtable[message.content[1:]])
client = MyClient()
client.run('PUT CLIENT TOKEN HERE')
|
// String implements the Expression String interface.
func (da *DateArith) String() string {
var str string
if da.isAdd() {
str = "DATE_ADD"
} else {
str = "DATE_SUB"
}
return fmt.Sprintf("%s(%s, INTERVAL %s %s)", str, da.Date, da.Interval, strings.ToUpper(da.Unit))
} |
/**
* Event that signals that an object idtentified through the id should be
* reneamed
*
* @author Alexander Lex
*
*/
public class RenameEvent extends AEvent {
private Integer id;
/**
*
*/
public RenameEvent() {
}
public RenameEvent(Integer id) {
this.id = id;
}
/**
* @param id
* setter, see {@link #id}
*/
public void setID(Integer id) {
this.id = id;
}
/**
* @return the id, see {@link #id}
*/
public Integer getID() {
return id;
}
@Override
public boolean checkIntegrity() {
if (id == null)
return false;
return true;
}
} |
<gh_stars>10-100
import {
beforeEachProviders,
it,
describe,
expect,
inject,
fakeAsync,
tick
} from '@angular/core/testing';
import { MockBackend } from '@angular/http/testing';
import { provide } from '@angular/core';
import { Http, ConnectionBackend, BaseRequestOptions, Response, ResponseOptions } from '@angular/http';
import { SearchService } from './search.service';
export function main() {
describe('Search Service', () => {
beforeEachProviders(() => {
return [BaseRequestOptions, MockBackend, SearchService,
provide(Http, {
useFactory: (backend:ConnectionBackend, defaultOptions:BaseRequestOptions) => {
return new Http(backend, defaultOptions);
}, deps: [MockBackend, BaseRequestOptions]
}),
];
});
it('should retrieve all search results',
inject([SearchService, MockBackend], fakeAsync((searchService:SearchService, mockBackend:MockBackend) => {
var res:Response;
mockBackend.connections.subscribe(c => {
expect(c.request.url).toBe('app/shared/search/data/people.json');
let response = new ResponseOptions({body: '[{"name": "<NAME>"}, {"name": "<NAME>"}]'});
c.mockRespond(new Response(response));
});
searchService.getAll().subscribe((response) => {
res = response;
});
tick();
expect(res[0].name).toBe('<NAME>');
}))
);
it('should filter by search term',
inject([SearchService, MockBackend], fakeAsync((searchService:SearchService, mockBackend:MockBackend) => {
var res;
mockBackend.connections.subscribe(c => {
expect(c.request.url).toBe('app/shared/search/data/people.json');
let response = new ResponseOptions({body: '[{"name": "<NAME>"}, {"name": "<NAME>"}]'});
c.mockRespond(new Response(response));
});
searchService.search('john').subscribe((response) => {
res = response;
});
tick();
expect(res[0].name).toBe('<NAME>');
}))
);
it('should fetch by id',
inject([SearchService, MockBackend], fakeAsync((searchService:SearchService, mockBackend:MockBackend) => {
var res;
mockBackend.connections.subscribe(c => {
expect(c.request.url).toBe('app/shared/search/data/people.json');
let response = new ResponseOptions({body: '[{"id": 1, "name": "<NAME>"}, {"id": 2, "name": "<NAME>"}]'});
c.mockRespond(new Response(response));
});
searchService.search('2').subscribe((response) => {
res = response;
});
tick();
expect(res[0].name).toBe('<NAME>');
}))
);
});
}
|
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2019 by <NAME> : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.big.data.kettle.plugins.hdfs.vfs;
import org.apache.commons.vfs2.FileName;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.FileSystemOptions;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.MessageBox;
import org.pentaho.hadoop.shim.api.cluster.NamedCluster;
import org.pentaho.hadoop.shim.api.cluster.NamedClusterService;
import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.ui.spoon.Spoon;
import org.pentaho.runtime.test.RuntimeTester;
import org.pentaho.runtime.test.action.RuntimeTestActionService;
import org.pentaho.vfs.ui.CustomVfsUiPanel;
import org.pentaho.vfs.ui.VfsFileChooserDialog;
public class HadoopVfsFileChooserDialog extends CustomVfsUiPanel {
// for message resolution
private static final Class<?> PKG = HadoopVfsFileChooserDialog.class;
// for logging
private LogChannel log = new LogChannel( this );
// Default root file - used to avoid NPE when rootFile was not provided
// and the browser is resolved
FileObject defaultInitialFile = null;
// File objects to keep track of when the user selects the radio buttons
FileObject hadoopRootFile = null;
String hadoopOpenFromFolder = null;
FileObject rootFile = null;
FileObject initialFile = null;
VfsFileChooserDialog vfsFileChooserDialog = null;
String schemeName = "hdfs";
private NamedClusterWidgetImpl namedClusterWidget = null;
private String namedCluster = null;
private final NamedClusterService namedClusterService;
private final RuntimeTestActionService runtimeTestActionService;
private final RuntimeTester runtimeTester;
public HadoopVfsFileChooserDialog( String schemeName, String displayName, VfsFileChooserDialog vfsFileChooserDialog,
FileObject rootFile, FileObject initialFile,
NamedClusterService namedClusterService,
RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) {
super( schemeName, displayName, vfsFileChooserDialog, SWT.NONE );
this.schemeName = schemeName;
this.rootFile = rootFile;
this.initialFile = initialFile;
this.vfsFileChooserDialog = vfsFileChooserDialog;
this.namedClusterService = namedClusterService;
this.runtimeTestActionService = runtimeTestActionService;
this.runtimeTester = runtimeTester;
// Create the Hadoop panel
GridData gridData = new GridData( SWT.FILL, SWT.CENTER, true, false );
setLayoutData( gridData );
setLayout( new GridLayout( 1, false ) );
createConnectionPanel();
}
private void createConnectionPanel() {
// The Connection group
Group connectionGroup = new Group( this, SWT.SHADOW_ETCHED_IN );
connectionGroup.setText( BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.ConnectionGroup.Label" ) ); //$NON-NLS-1$ ;
GridLayout connectionGroupLayout = new GridLayout();
connectionGroupLayout.marginWidth = 5;
connectionGroupLayout.marginHeight = 5;
connectionGroupLayout.verticalSpacing = 5;
connectionGroupLayout.horizontalSpacing = 5;
GridData gData = new GridData( SWT.FILL, SWT.FILL, true, false );
connectionGroup.setLayoutData( gData );
connectionGroup.setLayout( connectionGroupLayout );
setNamedClusterWidget( new NamedClusterWidgetImpl( connectionGroup, true, namedClusterService, runtimeTestActionService, runtimeTester ) );
getNamedClusterWidget().addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent evt ) {
try {
connect();
} catch ( Exception e ) {
// To prevent errors from multiple event firings.
}
}
} );
// The composite we need in the group
Composite textFieldPanel = new Composite( connectionGroup, SWT.NONE );
GridData gridData = new GridData( SWT.FILL, SWT.FILL, true, false );
textFieldPanel.setLayoutData( gridData );
textFieldPanel.setLayout( new GridLayout( 5, false ) );
}
public void initializeConnectionPanel( FileObject file ) {
initialFile = file;
/*
* if ( initialFile != null && initialFile.getName().getScheme().equals( HadoopSpoonPlugin.HDFS_SCHEME ) ) { //TODO
* activate HDFS }
*/
}
private void showMessageAndLog( String title, String message, String messageToLog ) {
MessageBox box = new MessageBox( this.getShell() );
box.setText( title ); // $NON-NLS-1$
box.setMessage( message );
log.logError( messageToLog );
box.open();
}
public VariableSpace getVariableSpace() {
if ( Spoon.getInstance().getActiveTransformation() != null ) {
return Spoon.getInstance().getActiveTransformation();
} else if ( Spoon.getInstance().getActiveJob() != null ) {
return Spoon.getInstance().getActiveJob();
} else {
return new Variables();
}
}
public NamedClusterWidgetImpl getNamedClusterWidget() {
return namedClusterWidget;
}
protected void setNamedClusterWidget( NamedClusterWidgetImpl namedClusterWidget ) {
this.namedClusterWidget = namedClusterWidget;
}
public void setNamedCluster( String namedCluster ) {
this.namedCluster = namedCluster;
}
public void activate() {
vfsFileChooserDialog.setRootFile( null );
vfsFileChooserDialog.setInitialFile( null );
vfsFileChooserDialog.openFileCombo.setText( "hdfs://" );
vfsFileChooserDialog.vfsBrowser.fileSystemTree.removeAll();
getNamedClusterWidget().initiate();
getNamedClusterWidget().setSelectedNamedCluster( namedCluster );
super.activate();
}
public void connect() {
NamedCluster nc = getNamedClusterWidget().getSelectedNamedCluster();
// The Named Cluster may be hdfs, maprfs or wasb. We need to detect it here since the named
// cluster was just selected.
schemeName = "wasb".equals( nc.getStorageScheme() ) ? "wasb" : "hdfs";
FileObject root = rootFile;
try {
root = KettleVFS.getFileObject( nc.processURLsubstitution( FileName.ROOT_PATH, Spoon.getInstance().getMetaStore(), getVariableSpace() ) );
} catch ( KettleFileException exc ) {
showMessageAndLog( BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.error" ), BaseMessages.getString( PKG,
"HadoopVfsFileChooserDialog.Connection.error" ), exc.getMessage() );
}
vfsFileChooserDialog.setRootFile( root );
vfsFileChooserDialog.setSelectedFile( root );
rootFile = root;
}
public FileObject resolveFile( String fileUri ) throws FileSystemException {
try {
return KettleVFS.getFileObject( fileUri, getVariableSpace(), getFileSystemOptions() );
} catch ( KettleFileException e ) {
throw new FileSystemException( e );
}
}
public FileObject resolveFile( String fileUri, FileSystemOptions opts ) throws FileSystemException {
try {
return KettleVFS.getFileObject( fileUri, getVariableSpace(), opts );
} catch ( KettleFileException e ) {
throw new FileSystemException( e );
}
}
protected FileSystemOptions getFileSystemOptions() throws FileSystemException {
FileSystemOptions opts = new FileSystemOptions();
return opts;
}
}
|
def box(self, start, end, offset=(0, 0), color='b'):
ax = gca()
x = offset[0] - 0.5 + start
y = offset[1] - 0.5
r1 = Rectangle((x, y), end - start + 1, 1,
facecolor='none', ls='--', lw=2, edgecolor=color)
ax.add_patch(r1) |
package sparqles.core;
import static org.junit.Assert.*;
import java.util.HashMap;
import java.util.Map;
import org.apache.avro.specific.SpecificRecordBase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LogTest {
private static final Logger log = LoggerFactory.getLogger(LogTest.class);
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
@Test
public void test() {
System.out.println("Test");
log.debug("DEBUG");
log.info("INFO");
log.warn("WARN");
log.error("ERROR");
org.apache.log4j.Logger log4j = org.apache.log4j.Logger.getLogger(getClass());
log4j.info("LOG4j info");
}
}
|
Scalable Spherical Harmonics Hierarchies
Scalable Spherical Harmonics Hierarchies (SSPHH) is a real-time rendering solution to the global illumination problem. Our novel method is a system of components that enables the computation of light probes and the conversion to spherical harmonics coefficients, which are used as anisotropic time-varying light sources we call Spherical Harmonics Lights (SPHLs). The SPHLs encode irradiance information that can be used for imagebased lighting. Our approach focuses on reconstructing scene lighting using diffuse illumination but is flexible to allow specular details. Furthermore, we consider the light transport from neighboring SPHLs by computing a transfer coefficient that estimates how much light from one probe is visible at another. SPHLs can be used for physically-based lighting using rendering methods similar to point lights and shadow maps. We created a reproducible testing methodology to compare our images with those of a commercial path tracer by automatically generating the appropriate scene information which gets used with absolute error metrics to determine remaining image defects. Our SSPHH method utilizes a scalable architecture to distribute the rendering of light probes between client and worker nodes using the ZeroMQ Majordomo protocol. |
/// consume the event to
/// - maybe change the input
/// - build a command
/// then redraw the input field
pub fn on_event(
&mut self,
w: &mut W,
event: Event,
con: &AppContext,
sel: Selection<'_>,
) -> Result<Command, ProgramError> {
let cmd = self.get_command(event, con, sel);
self.input_field.display_on(w)?;
Ok(cmd)
} |
// findColumn searches the in-memory loaded column map using the specified parameters.
func findColumn(columns []ColumnMapper, columnName, tableName, schemaPrefix, schema,
dataType string) (col ColumnMapper) {
for _, col = range columns {
if col.ColumnName == columnName && col.TableName == tableName && col.TableSchema == schema &&
col.DataType == dataType {
return col
} else if col.ColumnName == columnName && col.TableName == tableName && col.TableSchema == schemaPrefix+"*" &&
col.DataType == dataType {
return col
}
}
return ColumnMapper{}
} |
<reponame>panyuan5056/flybeat<gh_stars>0
package plug
import (
"bytes"
"context"
"encoding/json"
"flybeat/core/topology"
"flybeat/pkg/logging"
"fmt"
"runtime"
"strings"
"time"
"github.com/elastic/go-elasticsearch/v7"
"github.com/elastic/go-elasticsearch/v7/esutil"
)
func NewElasticsearchOutput(config map[string]interface{}) (*topology.Output, bool) {
indexType, ok := config["index_type"]
if ok {
indexType = indexType.(string)
} else {
indexType = "doc"
}
indexHome, ok1 := config["index"]
indexHome2 := indexHome.(string)
if !ok1 {
panic("es index not found")
} else {
if strings.Index(indexHome2, "%") > -1 {
rows := strings.Split(indexHome2, "%")
if len(rows) == 2 && len(rows[1]) == 0 {
panic(fmt.Sprintf("index:%s format fail", indexHome2))
}
}
}
address := strings.Split(config["address"].(string), ",")
cfg := elasticsearch.Config{
Addresses: address,
}
es, err := elasticsearch.NewClient(cfg)
if err != nil {
panic(err.Error())
}
output := &topology.Output{
Messages: make(chan map[string]interface{}, 10),
Stop: false,
Config: config,
}
go func() {
for {
indexName := index(indexHome2)
bi, err := esutil.NewBulkIndexer(esutil.BulkIndexerConfig{
Index: indexName, // The default index name
Client: es, // The Elasticsearch client
NumWorkers: runtime.NumCPU(), // The number of worker goroutines
FlushBytes: int(5e+6), // The flush threshold in bytes
FlushInterval: 30 * time.Second, // The periodic flush interval
})
if err != nil {
logging.Error(err.Error())
}
for i := 0; i <= 1000; i++ {
body := <-output.Messages
//fmt.Println("body:", body)
data, err1 := json.Marshal(body)
if err1 != nil {
logging.Error(err1.Error())
}
err2 := bi.Add(context.Background(), esutil.BulkIndexerItem{
Action: "index",
Index: indexName,
Body: bytes.NewReader(data),
})
if err2 != nil {
logging.Error(err2.Error())
}
}
if err3 := bi.Close(context.Background()); err3 != nil {
logging.Error(err3.Error())
}
}
}()
return output, true
}
func index(index string) string {
if strings.Index(index, "%") > -1 {
rows := strings.Split(index, "%")
if len(rows) == 2 && len(rows[1]) > 0 {
return fmt.Sprintf("%s%s", rows[0], time.Now().Format(rows[1]))
} else {
return rows[0]
}
}
return index
}
|
def run_term_query(self, search_term: str):
__log__.info("starting term query: search_term: {}".format(search_term))
if search_term.endswith("%"):
__log__.debug("wildcard detected in search_term: {}".format(search_term))
base_term = search_term[:-1]
searching_terms = list((key.decode("utf-8").lower() for key, val in self.terms.items() if re.match(r"{}[a-zA-Z0-9\-_]*".format(base_term), key.decode("utf-8"))))
else:
searching_terms = [search_term.lower()]
searching_terms = set(searching_terms)
__log__.info("running search_term query: searching_terms: {}".format(searching_terms))
term_matches = set()
for term, data in set(self.terms.items()):
term_str = term.decode("utf-8")
data_str = data.decode("utf-8")
if term_str.lower() in searching_terms:
__log__.info("found matching db_term: {} data: {}".format(term_str, data_str))
term_matches.add(self.terms[term].decode("utf-8"))
else:
self.terms.__delitem__(term)
for aid in term_matches:
if self.ads.has_key(bytes(aid, "utf-8")):
if self.full_output:
__log__.info("found matching term: search_term: {} aid: {} ad: {}".format(search_term, aid, self.ads[bytes(aid, "utf-8")].decode("utf-8")))
else:
__log__.info("found matching term: aid: {} title: {}".format(aid, get_title(self.ads[bytes(aid, "utf-8")].decode("utf-8"))))
else:
__log__.debug("found matching term but no valid full ad relates to the aid: {}".format(aid))
self.delete_non_matching_aids(term_matches)
__log__.info("total hits: {}".format(len(self.ads))) |
//NewAPIController create new instance of BaseController
func NewAPIController(c interface{}, name string, v *web.APIVersion) *APIController {
b := APIController{}
b.Name = name
b.APIVersion = v
return &b
} |
def iterate_one_material(rootDirr, material, maxError, maxIterations, energyMesh=None, fluxDict=None, verbosity=False):
sig0Vec = None
if verbosity:
print 'Performing Bondarenko iteration for material {0}'.format(material.longName)
ZAList = sorted(material.ZAList)
readerOpt = 'gendf'
totalXSDict = {}
backgroundXSDict = {}
iterationCount = 0
globalError = 1.0
for (Z,A) in ZAList:
read_one_total_xs(rootDirr, Z, A, material, totalXSDict, readerOpt, sig0Vec, energyMesh, fluxDict, verbosity)
build_all_background_xs(material, totalXSDict, backgroundXSDict, verbosity)
print_bondarenko(iterationCount, maxIterations, globalError, maxError, verbosity)
readerOpt = 'pickle'
while globalError > maxError and iterationCount < maxIterations:
globalError = 0.0
for (Z,A) in ZAList:
sig0Vec = backgroundXSDict[(Z,A)]
localError = read_one_total_xs(rootDirr, Z, A, material, totalXSDict, readerOpt, sig0Vec, energyMesh, fluxDict, verbosity)
globalError = max(localError, globalError)
build_all_background_xs(material, totalXSDict, backgroundXSDict, verbosity)
iterationCount += 1
print_bondarenko(iterationCount, maxIterations, globalError, maxError, verbosity)
return backgroundXSDict |
/**
* Abstract service controller implementation.
*/
public abstract static class ApiConnection extends ConnectionBase {
@Override
protected void onBaseConnected(IBinder service) {
onConnected(IPositioningServiceControl.Stub.asInterface(service));
}
/**
* This callback is called when valid collector service controller AIDL instance when service connection
* is made.
* @param controller Service controller AIDL instance.
*/
public abstract void onConnected(IPositioningServiceControl controller);
} |
/**
* Generates SQL to modify a column in a table
*
* @param string $tableName
* @param string $schemaName
* @param Phalcon\Db\ColumnInterface $column
* @return string
*/
PHP_METHOD(Phalcon_Db_Dialect_Oracle, modifyColumn){
zval *table_name, *schema_name, *column;
phalcon_fetch_params(0, 3, 0, &table_name, &schema_name, &column);
PHALCON_THROW_EXCEPTION_STRW(phalcon_db_exception_ce, "Not implemented yet");
return;
} |
// delete remove an item from the cache. It's not thread-safe.
// Only other functions of the bindCache can use this function.
func (c *bindCache) delete(key bindCacheKey) bool {
bindRecords := c.get(key)
if bindRecords != nil {
mem := calcBindCacheKVMem(key, bindRecords)
c.cache.Delete(key)
c.memTracker.Consume(-mem)
return true
}
return false
} |
WORCESTER, Mass. (AP) — According to recently filed court documents, a Massachusetts school is alleging that a student who was raped overseas is partially responsible because she was drinking that night and chose to follow a stranger onto a dark rooftop.
The Boston Globe reports (http://bit.ly/25JPPnw ) Worcester Polytechnic Institute made the arguments in response to a civil suit filed last year in which the victim alleges that the college failed to provide a safe environment for students.
The woman was assaulted by a security guard in April 2012 at a university-leased apartment building in Puerto Rico, where she was completing a research project.
The school’s attorneys argue the victim engaged in risky behavior including excessive drinking and disregarded training about how to protect herself from harm.
The woman’s lawyers say the school’s argument is an attempt to harass and intimidate her.
___
Information from: The Boston Globe, http://www.bostonglobe.com |
package com.nosvisuals.engine;
import processing.core.*;
import java.util.*;
public class VisualEngineParameter {
public String name;
public String type;
public int index;
public float min;
public float max;
public float value;
public float valuePre;
public String label;
public ArrayList<controlP5.Controller> controllers;
public float[] radioValues;
public float[] radioValuesPre;
public int length;
public int radioButtonIndex = -1;
public boolean bangEnable = false;
public boolean isChanged = false;
// for Knob & Fader
public VisualEngineParameter(String _name, String _type, int _index, float _min, float _max, String _label){
name = _name;
type = _type;
index = _index;
min = _min;
max = _max;
label = _label;
controllers = new ArrayList<controlP5.Controller>();
}
// for Toggle & Button & Bang
public VisualEngineParameter(String _name, String _type, int _index, String _label){
name = _name;
type = _type;
index = _index;
label = _label;
min = 0.f;
max = 1.f;
controllers = new ArrayList<controlP5.Controller>();
}
// for RadioButton
public VisualEngineParameter(String _name, String _type, int _index, int _length, String _label){
name = _name;
type = _type;
index = _index;
label = _label;
length = _length;
min = 0.f;
max = _length;
controllers = new ArrayList<controlP5.Controller>();
radioValues = new float[_length];
radioValuesPre = new float[_length];
for(int i = 0; i < _length; i++){
radioValues[i] = 0;
radioValuesPre[i] = 0;
}
}
public void renderRadio(){
int isZero = 0;
valuePre = value;
for(int i = 0; i < length; i++){
radioValuesPre[i] = radioValues[i];
radioValues[i] = controllers.get(i).getValue();
if(radioValues[i]>radioValuesPre[i]){
deactivateOthers(i);
controllers.get(length).setValue(i);
value = i;
}
isZero += radioValues[i];
}
if(isZero == 0){
controllers.get(length).setValue(-1);
value = -1;
}
}
public void toggleRadio(float _data, int _radioIndex){
if(_data > 0){
controllers.get(_radioIndex).setValue((controllers.get(_radioIndex).getValue() == 0) ? 1 : 0);
}
}
public void activateRadio(int _radioIndex){
if(_radioIndex < length){
controlP5.Controller c = controllers.get(_radioIndex);
c.setValue(1);
radioValues[_radioIndex] = 1;
}
controlP5.Controller c = controllers.get(length);
c.setValue(_radioIndex);
value = _radioIndex;
deactivateOthers(_radioIndex);
}
public void deactivateRadio(int _radioIndex){
if(_radioIndex < length){
controlP5.Controller c = controllers.get(_radioIndex);
c.setValue(0);
radioValues[_radioIndex] = 0;
}
}
public void deactivateOthers(int _radioIndex){
for(int i = 0; i < length; i++){
if(i != _radioIndex){
controlP5.Controller c = controllers.get(i);
c.setValue(0);
radioValues[i] = 0;
}
}
}
public void deactivateAll(){
for(int i = 0; i < length; i++){
controlP5.Controller c = controllers.get(i);
c.setValue(0);
radioValues[i] = 0;
}
}
public void getValuesFromGUI(){
if(!type.equals("radioButton")){
valuePre = value;
value = controllers.get(0).getValue();
}
}
public void setValuesFromGUI(float _data){
if(!type.equals("radioButton")){
controllers.get(0).setValue(_data);
valuePre = value;
value = _data;
}
}
public void setToggle(float _data){
controllers.get(0).setValue(_data);
}
public void update(float _data){
if(type.equals("knob") || type.equals("fader")){
controllers.get(0).setValue(_data);
} else if(type.equals("toggle")){
if(_data > 0){
controllers.get(0).setValue((controllers.get(0).getValue() == 0) ? 1 : 0);
}
} else if(type.equals("bang")){
if(_data > 0){
controllers.get(0).setValue(1);
}
} else if(type.equals("button")){
if(_data > 0){
controllers.get(0).setValue(1);
} else if(_data == 0){
controllers.get(0).setValue(0);
}
}
}
public boolean checkChange(){
if(value != valuePre){
isChanged = true;
} else {
isChanged = false;
}
return isChanged;
}
}
|
package org.motechproject.event.it;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.motechproject.event.MotechEvent;
import org.motechproject.event.listener.impl.EventListenerRegistry;
import org.motechproject.event.listener.impl.ServerEventRelay;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Ignore
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"classpath*:META-INF/motech/*.xml"})
public class AnnotationBasedHandlerIT {
private static boolean test = false;
@Autowired
private ServerEventRelay eventRelay;
@Autowired
private EventListenerRegistry eventListenerRegistry;
private void send(String dest, Object... objects) {
Map<String, Object> params = new HashMap<String, Object>();
int i = 0;
for (Object obj : objects) {
params.put(Integer.toString(i++), obj);
}
MotechEvent event = new MotechEvent(dest, params);
eventRelay.relayQueueEvent(event);
}
public static void clear() {
test=false;
}
@Test
public void testRegistry() {
assertEquals(2, eventListenerRegistry.getListenerCount("sub_a"));
assertEquals(1,eventListenerRegistry.getListenerCount("sub_b"));
assertEquals(1,eventListenerRegistry.getListenerCount("sub_c"));
}
@Test
public void testRelay() {
MotechEvent e = new MotechEvent("sub_b", null);
clear();
eventRelay.relayQueueEvent(e);
assertTrue(test);
e = new MotechEvent("sub_c", null);
clear();
eventRelay.relayQueueEvent(e);
assertTrue(test);
}
@Test
public void testOrderedParams() {
clear();
send("params",23,44,null);
assertTrue(test);
}
@Test
public void testNamedParamsHappy() {
clear();
MotechEvent event = new MotechEvent("named");
event.getParameters().put("id", "id0012");
event.getParameters().put("key", "2354");
eventRelay.relayQueueEvent(event);
assertTrue(test);
}
}
|
package org.wikipedia.analytics;
import android.support.annotation.NonNull;
import org.json.JSONObject;
import org.wikipedia.WikipediaApp;
import org.wikipedia.dataclient.WikiSite;
public class RandomizerFunnel extends TimedFunnel {
private static final String SCHEMA_NAME = "MobileWikiAppRandomizer";
private static final int REV_ID = 18118733;
private final int source;
private int numSwipesForward;
private int numSwipesBack;
private int numClicksForward;
private int numClicksBack;
public RandomizerFunnel(WikipediaApp app, WikiSite wiki, int source) {
super(app, SCHEMA_NAME, REV_ID, Funnel.SAMPLE_LOG_ALL, wiki);
this.source = source;
}
@Override protected void preprocessSessionToken(@NonNull JSONObject eventData) { }
public void swipedForward() {
numSwipesForward++;
}
public void swipedBack() {
numSwipesBack++;
}
public void clickedForward() {
numClicksForward++;
}
public void clickedBack() {
numClicksBack++;
}
public void done() {
log(
"source", source,
"fingerSwipesForward", numSwipesForward,
"fingerSwipesBack", numSwipesBack,
"diceClicks", numClicksForward,
"backClicks", numClicksBack
);
}
}
|
/**
* Utility class consisting of static methods that generate various generally
* useful messages that can be sent to the client. These messages are sent by
* a number of different classes, including, potentially, application-defined
* {@link Mod} classes, so the methods to construct these messages don't
* naturally belong with any particular server abstraction. Hence this bag of
* miscellany.
*/
public class Msg {
/**
* Suppress the Miranda constructor.
*/
private Msg() { }
/**
* Create a 'delete' message. This directs a client to delete an object.
*
* @param target Object the message is being sent to (the object being
* deleted).
*/
static public JSONLiteral msgDelete(Referenceable target) {
JSONLiteral msg = new JSONLiteral(target, "delete");
msg.finish();
return msg;
}
/**
* Create an 'error' message. This informs the client that something went
* wrong.
*
* @param target Object the message is being sent to (the object being
* informed).
* @param op Operation to be performed.
* @param error Contents of the error message.
*/
static public JSONLiteral msgError(Referenceable target, String op,
String error)
{
JSONLiteral msg = new JSONLiteral(target, op);
msg.addParameter("error", error);
msg.finish();
return msg;
}
/**
* Create an 'exit' message.
*
* @param target Object the message is being sent to.
* @param why Helpful text explaining the reason for the exit.
* @param whyCode Machine readable tag indicating the reason for the exit.
* @param reload True if client should attempt a reload.
*/
static JSONLiteral msgExit(Referenceable target, String why,
String whyCode, boolean reload)
{
JSONLiteral msg = new JSONLiteral(target, "exit");
msg.addParameterOpt("why", why);
msg.addParameterOpt("whycode", whyCode);
if (reload) {
msg.addParameter("reload", reload);
}
msg.finish();
return msg;
}
/**
* Create a 'make' message. This directs a client to create an object.
*
* @param target Object the message is being sent to (the object that is
* to be the container of the new object).
* @param obj The object that is to be created by the client.
* @param maker The user who is to be represented as the creator of the
* object, or null if none is.
* @param you If true, object being made is its recipient.
* @param sess The client context session ID, or null if there is none.
*/
static public JSONLiteral msgMake(Referenceable target, BasicObject obj,
User maker, boolean you, String sess)
{
JSONLiteral msg = new JSONLiteral(target, "make");
msg.addParameter("obj", (Encodable) obj);
msg.addParameterOpt("maker", (Referenceable) maker);
if (you) {
msg.addParameter("you", you);
}
msg.addParameterOpt("sess", sess);
msg.finish();
return msg;
}
/**
* Create a 'make' message. This directs a client to create an object.
* This method is a notational convenience; it is equivalent to the
* five-argument 'make' with the 'you' parameter set to false (which is, by
* far, the typical case) and no context session ID.
*
* @param target Object the message is being sent to (the object that is
* to be the container of the new object).
* @param obj The object that is to be created by the client.
* @param maker The user who is to be represented as the creator of the
* object, or null if none is.
*/
static public JSONLiteral msgMake(Referenceable target, BasicObject obj,
User maker)
{
return msgMake(target, obj, maker, false, null);
}
/**
* Create a 'make' message with a default creator and explicit session
* identifier. This method is exactly equivalent to:
*
* <p><tt>msgMake(target, obj, null, false, sess)</tt>
*
* <p>and is provided just for convenience.
*
* @param target Object the message is being sent to (the object that is
* to be the container of the new object).
* @param obj The object that is to be created by the client.
* @param sess The client context session ID, or null if there is none.
*/
static public JSONLiteral msgMake(Referenceable target, BasicObject obj,
String sess)
{
return msgMake(target, obj, null, false, sess);
}
/**
* Create a 'make' message with a default creator. This method is
* exactly equivalent to:
*
* <p><tt>msgMake(target, obj, null, false, null)</tt>
*
* <p>and is provided just for convenience.
*
* @param target Object the message is being sent to (the object that is
* to be the container of the new object).
* @param obj The object that is to be created by the client.
*/
static public JSONLiteral msgMake(Referenceable target, BasicObject obj) {
return msgMake(target, obj, null, false, null);
}
/**
* Create a 'push' message. This directs a client to push the browser to a
* different URL than the one it is looking at.
*
* @param target Object the message is being sent to (normally this will
* be a user or context).
* @param from Object the message is to be alleged to be from, or
* null if not relevant. This normally indicates the user who is doing
* the pushing.
* @param url The URL being pushed.
* @param frame Name of a frame to push the URL into, or null if not
* relevant.
* @param features Features string to associate with the URL, or null if
* not relevant.
*/
static public JSONLiteral msgPush(Referenceable target, Referenceable from,
String url, String frame, String features)
{
JSONLiteral msg = new JSONLiteral(target, "push");
msg.addParameterOpt("from", from);
msg.addParameter("url", url);
msg.addParameterOpt("frame", frame);
msg.addParameterOpt("features", features);
msg.finish();
return msg;
}
/**
* Create a 'ready' message.
*
* @param target Object the message is being sent to.
*/
static JSONLiteral msgReady(Referenceable target) {
JSONLiteral msg = new JSONLiteral(target, "ready");
msg.finish();
return msg;
}
/**
* Create a 'say' message. This directs a client to display chat text.
*
* @param target Object the message is being sent to (normally this will
* be a user or context).
* @param from Object the message is to be alleged to be from, or null if
* not relevant. This normally indicates the user who is speaking.
* @param text The text to be said.
*/
static public JSONLiteral msgSay(Referenceable target, Referenceable from,
String text)
{
JSONLiteral msg = new JSONLiteral(target, "say");
msg.addParameterOpt("from", from);
msg.addParameter("text", text);
msg.finish();
return msg;
}
} |
<gh_stars>0
package comm
import (
"net/rpc"
)
type Sender struct {
Addr []string
}
func (s *Sender) RequestVote(addr string, args VoteArgs, result *VoteResult) error {
err := rpcRequest(addr, "Service.RequestVote", args, result)
return err
}
func (s *Sender) AppEntries(addr string, args AppEntryArgs, result *AppEntryResult) error {
err := rpcRequest(addr, "Service.AppendEntries", args, result)
return err
}
func rpcRequest(addr string, method string, args interface{}, result interface{}) error {
client, err := rpc.DialHTTP("tcp", addr)
if err != nil {
return err
}
err = client.Call(method, args, result)
if err != nil {
return err
}
return nil
}
|
Obedience to normalcy
is what lobotomies are for.—Crass
Someone sent me a link to Tricycle magazine’s “Daily Dharma” for February 3-10. My first response, when I get such links from the Buddhist glossies is to hit delete. Ready for some procrastination, though, I read this one. The advice distilled in this “Wisdom Collection” confirmed a growing suspicion of mine: meditation/mindfulness in present-day North America is hardly distinguishable from lobotomy.
Consider this. Among the “good results” of a prefrontal lobotomy are calming of obsessive-compulsive states; reduction of chronic anxiety; lessening of recursive introspection; amelioration of affective disorders; reduction of feelings of inadequacy and self-consciousness; reduction of emotional tension. Sound familiar? Most significantly—Kabot-Zinnites take note!— prefrontal lobotomy
has also been used successfully to control pain secondary to organic lesions. In this case, the tendency has been to employ unilateral lobotomy, because of the evidence that a lobotomy extensive enough to reduce psychotic symptoms is not required to control pain. (My source for all of this is Leland E. Hinsie and Robert Jean Campbell [1970]. Psychiatric Dictionary. Fourth Edition. Oxford University Press.).
I am not saying that meditation has similar effects as a lobotomy. How could I? Pardon the pun, but “meditation” is nowhere near as cut and dry as “lobotomy.” My point is that the contemporary western rhetoric of meditation/mindfulness suggests a similarity. In case you think my comparison of the two is overly cute (as opposed to merely cute), here are some pearls of wisdom from Tricycle’s “Daily Dharma.”
In “Finding Sense in Sensation,” S. N. Goenka recommends that we attend to the “arising and passing” of sensation. Why? Well, precisely not to feel life more acutely; precisely not to be more alive to the rich, intricate textures of human existence. No. The “sense in sensation” is to “understand its flux,” in order to “learn not to react to it.” Fuck that is my reaction.
Goenka’s is a rhetoric of control, of resisting the demands of unruly, hence dangerous, sensation. It repeats the tendency of contemporary x-buddhistic meditation rhetoric to condemn strong emotions. In employing such rhetoric, x-buddhism’s roots show; and they have the fleshless hue of ascetic, world-renouncing moralizing.
Allan Lokos’s “Daily Dharma” of February 4 continues in this vein. The wisdom he imparts involves, as his title states, “Cooling Emotional Fires.” “Anger, annoyance, and impatience deplete energy,” he teaches.
So, what should we do to tame these quite natural and often exceptionally useful human responses to our environment? Well, first of all, we should just be patient, for “Patient effort strengthens our resources.” I find such tired x-buddhistic clichés exceedingly annoying. I suppose the protesters on Tahrir Square finally did, too. And they, alas, would not have cared for Lokos’s advice on what to do with their impatience and anger:
We need to practice cooling emotional fires and alleviating fierce disruptions from our lives.
Again, a crypto-ascetic rhetoric of human denial, emotional repression, and general lassitude. We don’t need no water—let the motherfucker burn is the fierce disruption from my life.
Sharon Salzberg and Joseph Goldstein reinforce this emotion-phobic rhetoric of x-buddhism in their February 6 “Daily Dharma,” titled “Cutting Through Anger.” Their use of the word “cutting” also, of course, unintentionally creates a parallel to lobotomy. Like 1940s-era doctors, they, too, want to cut off vibrant, pulsing expressions of human being in the name of some utopian, and anodyne, “well-being.” They call their lobotomy “mental noting:”
Mental noting takes us in a very different direction from getting lost in a story: “Oh, this anger is so miserable; I am such a terrible person because I’m always angry; this is just how I will always be,” and so on. Instead, we simply say to ourselves, “anger, anger”—and cut through all of that elaboration, the story, the judgment, the interpretation.
Sharon and Joseph, I have a question for you: how will you cut through all of that elaboration, through that story, through that judgment and that interpretation? Or are you two liberated from story?
Bullshit bullshit is the miserable story I’m getting lost in right now.
“Mental noting” is just another strategy of real-world renunciation; it is just more crypto-ascetic x-buddhistic rhetoric. Yet, no sooner do I say this than Clark Strand contradicts me in the very next “Daily Dharma,” titled “Living with the World.”
We are not called upon as Buddhists to deny the world, and certainly not to escape from it. We are called to live with it, and to make our peace with all that is.
Well, wait a minute; I take that back. Making “our peace with all that is” is not the same thing as “living with the world.” In fact, it is just the opposite. It is not living at all. It is merely operating under the yoke of vacuous spiritualized prescription. Strand’s “called upon/to” is about as close to Althusser’s “hailing/interpellation” as I’ve heard an x-buddhist come to admitting the hidden ideological claws of x-buddhism.
Again, this is a rhetoric of renunciation that veers toward the human-hostile. Do you want the promise of Buddhism to manifest in your life? Then you must make peace with all that is, goddamit! Oh, yes, that promise. Let us bow our heads:
The world of worries we wish to escape from in the beginning of Buddhist practice is found to be enlightenment itself in the end.
The “world of worries” is not fucking “enlightenment.” It is the world of worries.
We continue to get lobotomy-like results and instruction in Jason Siff‘s “Gentle Meditation” (“try approaching [meditation practices] in a softer, gentler manner,” etc.), in Peter Doobinin‘s employment of the “just do it” rhetoric (“You’re just walking. This is a good instruction: just walk…sense the joy in simply walking”). Brad Warner tops it all off by reminding us that “there are no magic solutions.” Ironically, though, he sprinkles fairy dust on his “no magic” by claiming for it the “one lesson that runs through pretty much every Buddhist tradition.”
In “Axiomatic Heresy,” Ray Brassier comments that François Laruelle sees “a philosopher” as a person who never says what he is really doing, and never does what he is really saying. Can we say the same for those x-buddhists who prescribe, and subscribe to, the formulations of contemporary x-buddhist meditation/mindfulness rhetoric? In what sense could any of them really be doing what they claim here? And do you really believe that they are honestly saying what they do do? What would other guests at the Great Feast of Knowledge—biology, physics, gastronomy, literature, political science—have to say about their claims?
“Obedience to normalcy is what lobotomies are for,” barks Steve Ignorant. Is that what meditation/mindfulness is for, too? Reading Tricycle’s “Daily Dharma,” you really have to wonder.
Tricycallergic? Yea. Try this instead:
Or this:
Tricycle’s “Wisdom Collection.”
Leland E. Hinsie and Robert Jean Campbell (1970). Psychiatric Dictionary. Fourth Edition. Oxford University Press (on Google books). |
<reponame>WambuaSimon/RoadQualityLab
package com.softteco.roadqualitydetector.sqlite.dao;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
import com.softteco.roadqualitydetector.sqlite.DataBaseHelper;
import com.softteco.roadqualitydetector.sqlite.model.BaseModel;
import com.softteco.roadqualitydetector.sqlite.model.MeasurementItem;
import com.softteco.roadqualitydetector.sqlite.model.RoadModel;
import com.softteco.roadqualitydetector.sqlite.model.TagModel;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Aleksey on 17.04.2015.
*/
public class TagDAO extends BaseDAO implements UploadDAO {
public TagDAO(final Context context) {
super(context);
}
// columns of the bump table
public static final String COLUMN_TAG_ID = RecordDAO.COLUMN_RECORD_ID;
public static final String COLUMN_TAG_FOLDER_ID = "folder_id";
public static final String COLUMN_TAG_ROAD_ID = "road_id";
public static final String COLUMN_TAG_MEASUREMENT_ID = "measurement_id";
public static final String COLUMN_TAG_TIME = "time";
public static final String COLUMN_TAG_SPEED = "speed";
public static final String COLUMN_TAG_NAME = "name";
public static final String COLUMN_TAG_DESCRIPTION = "description";
public static final String COLUMN_TAG_LATITUDE = "latitude";
public static final String COLUMN_TAG_LONGITUDE = "longitude";
public static final String COLUMN_TAG_ALTITUDE = "altitude";
public static final String COLUMN_TAG_UPLOADED = "uploaded";
public static final String COLUMN_TAG_PENDING = "pending";
public static final String COLUMN_TAG_FIRST_IMAGE = "first_image";
public static final String COLUMN_TAG_SECOND_IMAGE = "second_image";
public static final String COLUMN_TAG_THIRD_IMAGE = "third_image";
public static final String COLUMN_TAG_AUDIO = "tag_audio";
public static final String COLUMN_TAG_NOTES = "notes";
public static final String COLUMN_TAG_SINGLE = "single"; //without measurement
public static final String COLUMN_TAG_ROAD_CONDITION = "road_condition";
public static final String COLUMN_TAG_IRI = "iri";
private String[] allColumns = {
COLUMN_TAG_ID,
COLUMN_TAG_FOLDER_ID,
COLUMN_TAG_ROAD_ID,
COLUMN_TAG_MEASUREMENT_ID,
COLUMN_TAG_TIME,
COLUMN_TAG_SPEED,
COLUMN_TAG_NAME,
COLUMN_TAG_DESCRIPTION,
COLUMN_TAG_LATITUDE,
COLUMN_TAG_LONGITUDE,
COLUMN_TAG_ALTITUDE,
COLUMN_TAG_UPLOADED,
COLUMN_TAG_PENDING,
COLUMN_TAG_FIRST_IMAGE,
COLUMN_TAG_SECOND_IMAGE,
COLUMN_TAG_THIRD_IMAGE,
COLUMN_TAG_AUDIO,
COLUMN_TAG_NOTES,
COLUMN_TAG_SINGLE,
COLUMN_TAG_ROAD_CONDITION,
COLUMN_TAG_IRI};
public static final String SQL_CREATE_TABLE_TAGS = "CREATE TABLE " + DataBaseHelper.TABLE_TAGS + "("
+ COLUMN_TAG_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, "
+ COLUMN_TAG_FOLDER_ID + " INTEGER NOT NULL, "
+ COLUMN_TAG_ROAD_ID + " INTEGER NOT NULL, "
+ COLUMN_TAG_MEASUREMENT_ID + " INTEGER NOT NULL, "
+ COLUMN_TAG_TIME + " INTEGER NOT NULL, "
+ COLUMN_TAG_SPEED + " REAL NOT NULL, "
+ COLUMN_TAG_NAME + " TEXT, "
+ COLUMN_TAG_DESCRIPTION + " TEXT, "
+ COLUMN_TAG_LATITUDE + " REAL NOT NULL, "
+ COLUMN_TAG_LONGITUDE + " REAL NOT NULL, "
+ COLUMN_TAG_ALTITUDE + " REAL NOT NULL, "
+ COLUMN_TAG_UPLOADED + " INTEGER NOT NULL DEFAULT (0), "
+ COLUMN_TAG_PENDING + " INTEGER NOT NULL DEFAULT (0), "
+ COLUMN_TAG_FIRST_IMAGE + " TEXT, "
+ COLUMN_TAG_SECOND_IMAGE + " TEXT, "
+ COLUMN_TAG_THIRD_IMAGE + " TEXT, "
+ COLUMN_TAG_AUDIO + " TEXT, "
+ COLUMN_TAG_NOTES + " TEXT, "
+ COLUMN_TAG_SINGLE + " INTEGER NOT NULL DEFAULT (0), "
+ COLUMN_TAG_ROAD_CONDITION + " INTEGER, "
+ COLUMN_TAG_IRI + " REAL NOT NULL "
+ ");";
public static final String SQL_DROP_TABLE_TAGS = "DROP TABLE IF EXISTS " + DataBaseHelper.TABLE_TAGS;
public long put(final TagModel data) {
long insertId = -1;
SQLiteDatabase db = getDatabase();
try {
db.beginTransaction();
final ContentValues values = getContentValues(data);
insertId = db.insert(DataBaseHelper.TABLE_TAGS, null, values);
db.setTransactionSuccessful();
} catch (Exception e) {
Log.e(TAG, e != null && e.getMessage() != null ? e.getMessage() : "");
} finally {
db.endTransaction();
}
return insertId;
}
private ContentValues getContentValues(final TagModel data) {
final ContentValues values = new ContentValues();
values.put(COLUMN_TAG_FOLDER_ID, data.getFolderId());
values.put(COLUMN_TAG_ROAD_ID, data.getRoadId());
values.put(COLUMN_TAG_MEASUREMENT_ID, data.getMeasurementId());
values.put(COLUMN_TAG_TIME, data.getTime());
values.put(COLUMN_TAG_SPEED, data.getSpeed());
values.put(COLUMN_TAG_NAME, data.getName());
values.put(COLUMN_TAG_DESCRIPTION, data.getDescription());
values.put(COLUMN_TAG_LATITUDE, data.getLatitude());
values.put(COLUMN_TAG_LONGITUDE, data.getLongitude());
values.put(COLUMN_TAG_ALTITUDE, data.getAltitude());
values.put(COLUMN_TAG_UPLOADED, data.isUploaded() ? 1 : 0);
values.put(COLUMN_TAG_PENDING, data.isPending() ? 1 : 0);
values.put(COLUMN_TAG_FIRST_IMAGE, data.getImages()[0]);
values.put(COLUMN_TAG_SECOND_IMAGE, data.getImages()[1]);
values.put(COLUMN_TAG_THIRD_IMAGE, data.getImages()[2]);
values.put(COLUMN_TAG_AUDIO, data.getAudioFile());
values.put(COLUMN_TAG_NOTES, data.getNotes());
values.put(COLUMN_TAG_SINGLE, data.isSingle() ? 1 : 0);
values.put(COLUMN_TAG_ROAD_CONDITION, data.getRoadCondition().getId());
values.put(COLUMN_TAG_IRI, data.getIri());
return values;
}
public void putList(final List<TagModel> items) {
putList(items, -1);
}
public void putList(final List<TagModel> items, final long measurementId) {
if (measurementId >= 0) {
deleteItemsWithMeasurementId(measurementId);
}
for (TagModel d : items) {
if (measurementId >= 0) {
d.setMeasurementId(measurementId);
}
put(d);
}
}
public long getAllItemsCount(long folderId, long roadId, long measurementId) {
String queryStr = getQueryStr(folderId, roadId, measurementId);
return getAllItemsCountForId(DataBaseHelper.TABLE_TAGS, queryStr);
}
public TagModel getLastTag() {
Cursor c = getAllTagsCursor();
TagModel tag = null;
try {
if (c != null) {
if (c.getCount() > 0) {
c.moveToLast();
tag = cursorToTag(c);
}
c.close();
}
} catch (Exception e) {
Log.e(TAG, e != null && e.getMessage() != null ? e.getMessage() : "");
}
return tag;
}
public void deleteItemsWithMeasurementId(final long measurementId) {
SQLiteDatabase db = getDatabase();
try {
db.beginTransaction();
db.delete(DataBaseHelper.TABLE_TAGS, COLUMN_TAG_MEASUREMENT_ID + " = " + measurementId, null);
db.setTransactionSuccessful();
} catch (Exception e) {
Log.e(TAG, e != null && e.getMessage() != null ? e.getMessage() : "");
} finally {
db.endTransaction();
}
}
public void deleteItemsWithRoadId(final long roadId) {
SQLiteDatabase db = getDatabase();
try {
db.beginTransaction();
db.delete(DataBaseHelper.TABLE_TAGS, COLUMN_TAG_ROAD_ID + " = " + roadId, null);
db.setTransactionSuccessful();
} catch (Exception e) {
Log.e(TAG, e != null && e.getMessage() != null ? e.getMessage() : "");
} finally {
db.endTransaction();
}
}
public int delete(final TagModel data) {
long id = data.getId();
int count = 0;
SQLiteDatabase db = getDatabase();
try {
db.beginTransaction();
count = db.delete(DataBaseHelper.TABLE_TAGS, COLUMN_TAG_ID + " = " + id, null);
db.setTransactionSuccessful();
} catch (Exception e) {
Log.e(TAG, e != null && e.getMessage() != null ? e.getMessage() : "");
} finally {
db.endTransaction();
}
return count;
}
public Cursor getItemsCursor(long folderId, long roadId, long measurementId) {
String queryStr = getQueryStr(folderId, roadId, measurementId);
Cursor cursor = null;
try {
cursor = getDatabase().query(DataBaseHelper.TABLE_TAGS,
allColumns, queryStr, null, null, null, null);
cursor.moveToFirst();
} catch (Exception e) {
Log.e(TAG, e != null && e.getMessage() != null ? e.getMessage() : "");
}
return cursor;
}
public void moveTags(long folderId, long roadId, long measurementId) {
Cursor cursor = getItemsCursor(-1, -1, measurementId);
if (cursor != null) {
boolean moveToNext = cursor.getCount() > 0;
TagModel tag = null;
while(moveToNext) {
tag = cursorToTag(cursor);
moveTag(folderId, roadId, measurementId, tag);
moveToNext = cursor.moveToNext();
}
cursor.close();
}
}
public void moveTag(long folderId, long roadId, long measurementId, TagModel tag) {
if (folderId >= 0) {
tag.setFolderId(folderId);
}
if (roadId >= 0) {
tag.setRoadId(roadId);
}
if (measurementId >= 0) {
tag.setMeasurementId(measurementId);
}
updateItem(tag);
}
public TagModel getItemById(final long id) {
TagModel tag = null;
try {
final Cursor cursor = getTagByIdCursor(id);
tag = cursorToTag(cursor);
} catch (Exception e) {
Log.e(TAG, e != null && e.getMessage() != null ? e.getMessage() : "");
}
return tag;
}
public Cursor getTagByIdCursor(final long tagId) {
final Cursor cursor = getDatabase().query(DataBaseHelper.TABLE_TAGS, allColumns,
COLUMN_TAG_ID + " = ?",
new String[]{String.valueOf(tagId)}, null, null, null);
cursor.moveToFirst();
return cursor;
}
public int updateItem(final TagModel tag) {
int rows = 0;
SQLiteDatabase db = getDatabase();
try {
db.beginTransaction();
ContentValues values = getContentValues(tag);
rows = getDatabase().update(DataBaseHelper.TABLE_TAGS, values,
COLUMN_TAG_ID + "=?", new String[]{String.valueOf(tag.getId())});
db.setTransactionSuccessful();
} catch (Exception e) {
Log.e(TAG, e != null && e.getMessage() != null ? e.getMessage() : "");
} finally {
db.endTransaction();
}
return rows;
}
public TagModel cursorToTag(final Cursor cursor) {
final TagModel data = new TagModel();
data.setId(cursor.getLong(cursor.getColumnIndex(RecordDAO.COLUMN_RECORD_ID)));
data.setFolderId(cursor.getLong(cursor.getColumnIndex(COLUMN_TAG_FOLDER_ID)));
data.setRoadId(cursor.getLong(cursor.getColumnIndex(COLUMN_TAG_ROAD_ID)));
data.setMeasurementId(cursor.getLong(cursor.getColumnIndex(COLUMN_TAG_MEASUREMENT_ID)));
data.setTime(cursor.getLong(cursor.getColumnIndex(COLUMN_TAG_TIME)));
data.setDate(cursor.getLong(cursor.getColumnIndex(COLUMN_TAG_TIME)));
data.setSpeed(cursor.getFloat(cursor.getColumnIndex(COLUMN_TAG_SPEED)));
data.setName(cursor.getString(cursor.getColumnIndex(COLUMN_TAG_NAME)));
data.setDescription(cursor.getString(cursor.getColumnIndex(COLUMN_TAG_DESCRIPTION)));
data.setLatitude(cursor.getDouble(cursor.getColumnIndex(COLUMN_TAG_LATITUDE)));
data.setLongitude(cursor.getDouble(cursor.getColumnIndex(COLUMN_TAG_LONGITUDE)));
data.setAltitude(cursor.getDouble(cursor.getColumnIndex(COLUMN_TAG_ALTITUDE)));
data.setUploaded(cursor.getInt(cursor.getColumnIndex(COLUMN_TAG_UPLOADED)) == 1);
data.setPending(cursor.getInt(cursor.getColumnIndex(COLUMN_TAG_PENDING)) == 1);
data.setImages(
new String[] {
cursor.getString(cursor.getColumnIndex(COLUMN_TAG_FIRST_IMAGE)),
cursor.getString(cursor.getColumnIndex(COLUMN_TAG_SECOND_IMAGE)),
cursor.getString(cursor.getColumnIndex(COLUMN_TAG_THIRD_IMAGE))});
data.setNotes(cursor.getString(cursor.getColumnIndex(COLUMN_TAG_NOTES)));
data.setAudioFile(cursor.getString(cursor.getColumnIndex(COLUMN_TAG_AUDIO)));
data.setSingle(cursor.getInt(cursor.getColumnIndex(COLUMN_TAG_SINGLE)) == 1);
final int roadConditionId = cursor.getInt(cursor.getColumnIndex(COLUMN_TAG_ROAD_CONDITION));
setRoadCondition(data, roadConditionId);
data.setIri(cursor.getFloat(cursor.getColumnIndex(COLUMN_TAG_IRI)));
return data;
}
private void setRoadCondition(final TagModel data, final int roadConditionId) {
switch (roadConditionId) {
case 0:
data.setRoadCondition(TagModel.RoadCondition.GOOD);
break;
case 1:
data.setRoadCondition(TagModel.RoadCondition.FAIR);
break;
case 2:
data.setRoadCondition(TagModel.RoadCondition.POOR);
break;
case 3:
data.setRoadCondition(TagModel.RoadCondition.BAD);
break;
default:
data.setRoadCondition(TagModel.RoadCondition.NONE);
break;
}
}
public Cursor getAllTagsCursor() {
return getDatabase().query(DataBaseHelper.TABLE_TAGS, allColumns, null, null, null, null, null);
}
public Cursor getTagsByRoadIdCursor(final long roadId) {
final Cursor cursor = getDatabase().query(DataBaseHelper.TABLE_TAGS, allColumns,
COLUMN_TAG_ROAD_ID + " = ?",
new String[]{String.valueOf(roadId)}, null, null, null);
cursor.moveToFirst();
return cursor;
}
private Cursor getBumpsForUploadCursor(final String selection) {
String sel = COLUMN_TAG_UPLOADED + "=0 and " + COLUMN_TAG_PENDING + "=0";
if (selection != null) {
sel += " and " + selection;
}
return getDatabase().query(DataBaseHelper.TABLE_TAGS, allColumns, sel, null, null, null, null);
}
public Cursor getTagsByMeasurementIdCursor(final long measurementId) {
final Cursor cursor = getDatabase().query(DataBaseHelper.TABLE_TAGS, allColumns,
COLUMN_TAG_MEASUREMENT_ID + " = ?",
new String[]{String.valueOf(measurementId)}, null, null, null);
cursor.moveToFirst();
return cursor;
}
public Cursor getTagsByFolderIdCursor(final long folderId) {
final Cursor cursor = getDatabase().query(DataBaseHelper.TABLE_TAGS, allColumns,
COLUMN_TAG_FOLDER_ID + " = ?",
new String[]{String.valueOf(folderId)}, null, null, null);
cursor.moveToFirst();
return cursor;
}
public Cursor getTagsWithoutMeasurement() {
final Cursor cursor = getDatabase().query(DataBaseHelper.TABLE_TAGS, allColumns,
COLUMN_TAG_SINGLE + " = ?",
new String[]{String.valueOf(1)}, null, null, null);
cursor.moveToFirst();
return cursor;
}
public List<MeasurementItem> getTagsByRoadId(final long roadId) {
final Cursor cursor = getTagsByRoadIdCursor(roadId);
List<MeasurementItem> items = new ArrayList<MeasurementItem>();
TagModel record = null;
boolean moveToNext = cursor.getCount() > 0;
while(moveToNext) {
record = cursorToTag(cursor);
items.add(record);
moveToNext = cursor.moveToNext();
}
return items;
}
public List<TagModel> getTagsListByRoadId(final long roadId) {
final Cursor cursor = getTagsByRoadIdCursor(roadId);
List<TagModel> items = new ArrayList<TagModel>();
TagModel record = null;
boolean moveToNext = cursor.getCount() > 0;
while(moveToNext) {
record = cursorToTag(cursor);
items.add(record);
moveToNext = cursor.moveToNext();
}
return items;
}
public List<TagModel> getTagsByMeasurementId(final long id) {
final Cursor cursor = getTagsByMeasurementIdCursor(id);
List<TagModel> items = new ArrayList<TagModel>();
TagModel record = null;
boolean moveToNext = cursor.getCount() > 0;
while(moveToNext) {
record = cursorToTag(cursor);
items.add(record);
moveToNext = cursor.moveToNext();
}
return items;
}
@Override
public List<? extends BaseModel> getDataForUpload(final String query) {
final List<TagModel> listData = new ArrayList<>();
final Cursor cursor = getBumpsForUploadCursor(query);
if (cursor != null) {
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
final TagModel data = cursorToTag(cursor);
listData.add(data);
cursor.moveToNext();
}
cursor.close();
}
return listData;
}
@Override
public List<? extends BaseModel> getDataForUpload() {
return null;
}
@Override
public boolean updateUploadedDB(List<? extends BaseModel> items, boolean flag) {
Boolean result = false;
List<TagModel> castedItems = (List<TagModel>) items;
SQLiteDatabase db = getDatabase();
try {
db.beginTransaction();
for (TagModel item : castedItems) {
item.setUploaded(flag);
db.update(DataBaseHelper.TABLE_TAGS, getContentValues(item), COLUMN_TAG_ID + "=?", new String[]{String.valueOf(item.getId())});
}
db.setTransactionSuccessful();
result = true;
} catch (Exception e) {
result = false;
} finally {
db.endTransaction();
}
return result;
}
@Override
public boolean updatePendingDB(List<? extends BaseModel> items, boolean flag) {
Boolean result = false;
List<TagModel> castedItems = (List<TagModel>) items;
SQLiteDatabase db = getDatabase();
try {
db.beginTransaction();
for (TagModel item : castedItems) {
item.setPending(flag);
db.update(DataBaseHelper.TABLE_TAGS, getContentValues(item), COLUMN_TAG_ID + "=?", new String[]{String.valueOf(item.getId())});
}
db.setTransactionSuccessful();
result = true;
} catch (Exception e) {
result = false;
} finally {
db.endTransaction();
}
return result;
}
@Override
public boolean updateUploadedPendingDB(List<? extends BaseModel> items, boolean uploaded, boolean pending) {
Boolean result = false;
List<TagModel> castedItems = (List<TagModel>) items;
SQLiteDatabase db = getDatabase();
try {
db.beginTransaction();
for (TagModel item : castedItems) {
item.setUploaded(uploaded);
item.setPending(pending);
db.update(DataBaseHelper.TABLE_TAGS, getContentValues(item), COLUMN_TAG_ID + "=?",
new String[]{String.valueOf(item.getId())});
}
db.setTransactionSuccessful();
result = true;
} catch (Exception e) {
result = false;
} finally {
db.endTransaction();
}
return result;
}
@Override
public boolean putDB(List<? extends BaseModel> items) {
return false;
}
}
|
package user
import (
"testing"
"fmt"
"strings"
)
func TestInsertandValidateUser(t *testing.T){
u, err := InitUser("../sdk.json", "Database")
if (err != nil){
t.Fatalf("%v\n",err)
}
fmt.Println("Successfully loaded!")
var um UserModel
um.Fullname = "<NAME>"
um.Email = "<EMAIL>"
um.Password = "Password"
err = u.InsertOrUpdateData("habibiefaried",um)
if (err != nil){
t.Fatalf("%v\n",err)
}
user, err := u.GetData("habibiefaried")
if (err != nil){
t.Fatalf("%v\n",err)
}
if (strings.Compare(user.Fullname, um.Fullname) != 0){
t.Fatalf("The fullname is not the same value!")
}
}
func TestNonExistentData(t *testing.T){
u, err := InitUser("../sdk.json", "Database")
if (err != nil){
t.Fatalf("%v\n",err)
}
fmt.Println("Successfully loaded!")
username := "nonexistent!_+$@)(2020+_'"
_, err = u.GetData(username)
if (strings.Compare(err.Error(),"Not found") != 0) {
t.Fatalf("The error 'Not found' should be here for username %v\n",username)
}
} |
// TestAuthCmd tests the auth command. Therefore, this test assumes a file "~/keptn/.keptnmock" containing
// the endpoint and api-token.
func TestAuthCmd(t *testing.T) {
credentialmanager.MockAuthCreds = true
endPoint, apiToken, err := credentialmanager.GetCreds()
if err != nil {
t.Error(err)
return
}
buf := new(bytes.Buffer)
rootCmd.SetOutput(buf)
args := []string{
"auth",
fmt.Sprintf("--endpoint=%s", endPoint.String()),
fmt.Sprintf("--api-token=%s", apiToken),
"--mock",
}
rootCmd.SetArgs(args)
err = rootCmd.Execute()
if err != nil {
t.Errorf("An error occured %v", err)
}
} |
/* SPDX-License-Identifier: GPL-2.0 */
/*
* Copyright 2020 Google LLC
*/
#ifndef __ASM_ACPI_VBNV_LAYOUT_H__
#define __ASM_ACPI_VBNV_LAYOUT_H__
#define VBOOT_VBNV_BLOCK_SIZE 16 /* Size of NV storage block in bytes */
/* Constants for NV storage, for use with ACPI */
#define HEADER_OFFSET 0
#define HEADER_MASK 0xc0
#define HEADER_SIGNATURE 0x40
#define HEADER_FIRMWARE_SETTINGS_RESET 0x20
#define HEADER_KERNEL_SETTINGS_RESET 0x10
#define BOOT_OFFSET 1
#define BOOT_DEBUG_RESET_MODE 0x80
#define BOOT_DISABLE_DEV_REQUEST 0x40
#define BOOT_DISPLAY_REQUEST 0x20
#define BOOT_TRY_B_COUNT_MASK 0x0f
#define RECOVERY_OFFSET 2
#define LOCALIZATION_OFFSET 3
#define DEV_FLAGS_OFFSET 4
#define DEV_BOOT_USB_MASK 0x01
#define DEV_BOOT_SIGNED_ONLY_MASK 0x02
#define DEV_ENABLE_UDC 0x40
#define MISC_FLAGS_OFFSET 8
#define MISC_FLAGS_BATTERY_CUTOFF_MASK 0x08
#define KERNEL_FIELD_OFFSET 11
#define CRC_OFFSET 15
#endif /* __ASM_ACPI_VBNV_LAYOUT_H__ */
|
/**
* An in-container test checking your application while it's executing.
*/
public class InContainerIT extends WisdomTest {
/**
* First inject your controller. The @Inject annotation is able to
* inject (in tests) the bundle context, controllers, services and
* templates.
*/
@Inject
WelcomeController controller;
@Test
public void testWelcomePage() {
// Wrap your controller invocation so you can configure the HTTP
// Context (parameter, header...)
Action.ActionResult result = action(new Invocation() {
@Override
public Result invoke() throws Throwable {
return controller.welcome();
}
}).header("foo", "bar").invoke();
assertThat(status(result)).isEqualTo(OK);
assertThat(toString(result)).contains("Wisdom");
}
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.